From 06410d504312e508b8aee0b303798921d01953e3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 8 Mar 2023 22:59:55 +0800 Subject: [PATCH 001/475] Clean up `XXXDataset` classes --- basics/base_dataset.py | 11 +- configs/{acoustic => obsolete}/cascade.yaml | 0 src/diffsinger_task.py | 377 ++++++-------------- src/naive_task.py | 4 +- tts/tasks/fs2_utils.py | 178 --------- tts/tasks/pe.py | 155 -------- 6 files changed, 122 insertions(+), 603 deletions(-) rename configs/{acoustic => obsolete}/cascade.yaml (100%) delete mode 100644 tts/tasks/fs2_utils.py delete mode 100644 tts/tasks/pe.py diff --git a/basics/base_dataset.py b/basics/base_dataset.py index c7d96e0fa..ffea96bb4 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -1,9 +1,12 @@ -import torch -from utils.hparams import hparams -import numpy as np import os -class BaseDataset(torch.utils.data.Dataset): +import numpy as np +from torch.utils.data import Dataset + +from utils.hparams import hparams + + +class BaseDataset(Dataset): ''' Base class for datasets. 1. *ordered_indices*: diff --git a/configs/acoustic/cascade.yaml b/configs/obsolete/cascade.yaml similarity index 100% rename from configs/acoustic/cascade.yaml rename to configs/obsolete/cascade.yaml diff --git a/src/diffsinger_task.py b/src/diffsinger_task.py index 7b150241d..6248b9031 100644 --- a/src/diffsinger_task.py +++ b/src/diffsinger_task.py @@ -1,24 +1,25 @@ +import glob +import importlib +import os + +import numpy as np import torch +import torch.nn.functional as F import utils -from utils.hparams import hparams -from .diff.net import DiffNet -from .diff.diffusion import GaussianDiffusion, OfflineGaussianDiffusion -from .diffspeech_task import DiffSpeechTask -from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder +import matplotlib +from basics.base_dataset import BaseDataset from modules.fastspeech.pe import PitchExtractor -from modules.fastspeech.fs2 import FastSpeech2 -from modules.diffsinger_midi.fs2 import FastSpeech2MIDI from modules.fastspeech.tts_modules import mel2ph_to_dur - +from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder +from utils.cwt import get_lf0_cwt +from utils.hparams import hparams +from utils.indexed_datasets import IndexedDataset +from utils.pitch_utils import denorm_f0, norm_interp_f0 from .diff.candidate_decoder import FFT -from utils.pitch_utils import denorm_f0 -from tts.tasks.fs2_utils import FastSpeechDataset -from tts.tasks.fs2 import FastSpeech2Task - -import numpy as np -import os -import torch.nn.functional as F +from .diff.diffusion import GaussianDiffusion +from .diff.net import DiffNet +from .diffspeech_task import DiffSpeechTask DIFF_DECODERS = { 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), @@ -26,11 +27,13 @@ hp['hidden_size'], hp['dec_layers'], hp['dec_ffn_kernel_size'], hp['num_heads']), } +matplotlib.use('Agg') + class DiffSingerTask(DiffSpeechTask): def __init__(self): super(DiffSingerTask, self).__init__() - self.dataset_cls = FastSpeechDataset + self.dataset_cls = AcousticDataset self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() if hparams.get('pe_enable') is not None and hparams['pe_enable']: self.pe = PitchExtractor().cuda() @@ -99,161 +102,78 @@ def validation_step(self, sample, batch_idx): return outputs -class ShallowDiffusionOfflineDataset(FastSpeechDataset): - def __getitem__(self, index): - sample = super(ShallowDiffusionOfflineDataset, self).__getitem__(index) - item = self._get_item(index) - - if self.prefix != 'train' and hparams['fs2_ckpt'] != '': - fs2_ckpt = os.path.dirname(hparams['fs2_ckpt']) - item_name = item['item_name'] - fs2_mel = torch.Tensor(np.load(f'{fs2_ckpt}/P_mels_npy/{item_name}.npy')) # ~M generated by FFT-singer. - sample['fs2_mel'] = fs2_mel - return sample - - def collater(self, samples): - batch = super(ShallowDiffusionOfflineDataset, self).collater(samples) - if self.prefix != 'train' and hparams['fs2_ckpt'] != '': - batch['fs2_mels'] = utils.collate_2d([s['fs2_mel'] for s in samples], 0.0) - return batch - - -class DiffSingerOfflineTask(DiffSingerTask): - def __init__(self): - super(DiffSingerOfflineTask, self).__init__() - self.dataset_cls = ShallowDiffusionOfflineDataset - - def build_tts_model(self): - mel_bins = hparams['audio_num_mel_bins'] - self.model = OfflineGaussianDiffusion( - phone_encoder=self.phone_encoder, - out_dims=mel_bins, denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), - timesteps=hparams['timesteps'], - K_step=hparams['K_step'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], - ) - # if hparams['fs2_ckpt'] != '': - # utils.load_ckpt(self.model.fs2, hparams['fs2_ckpt'], 'model', strict=True) - # self.model.fs2.decoder = None - - def run_model(self, model, sample, return_output=False, infer=False): - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample['energy'] - fs2_mel = None #sample['fs2_mels'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - if hparams['pitch_type'] == 'cwt': - cwt_spec = sample[f'cwt_spec'] - f0_mean = sample['f0_mean'] - f0_std = sample['f0_std'] - sample['f0_cwt'] = f0 = model.cwt2f0_norm(cwt_spec, f0_mean, f0_std, mel2ph) - - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, - ref_mels=[target, fs2_mel], f0=f0, uv=uv, energy=energy, infer=infer) - - losses = {} - if 'diff_loss' in output: - losses['mel'] = output['diff_loss'] - # self.add_dur_loss(output['dur'], mel2ph, txt_tokens, losses=losses) - # if hparams['use_pitch_embed']: - # self.add_pitch_loss(output, sample, losses) - if hparams['use_energy_embed']: - self.add_energy_loss(output['energy_pred'], energy, losses) - - if not return_output: - return losses - else: - return losses, output - - def validation_step(self, sample, batch_idx): - outputs = {} - txt_tokens = sample['txt_tokens'] # [B, T_t] - - target = sample['mels'] # [B, T_s, 80] - energy = sample['energy'] - # fs2_mel = sample['fs2_mels'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - uv = sample['uv'] - - outputs['losses'] = {} - - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) - - - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - fs2_mel = sample['fs2_mels'] - model_out = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=uv, energy=energy, - ref_mels=[None, fs2_mel], infer=True) - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel - else: - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) - pred_f0 = model_out.get('f0_denorm') - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') - self.plot_mel(batch_idx, sample['mels'], fs2_mel, name=f'fs2mel_{batch_idx}') - return outputs - - def test_step(self, sample, batch_idx): - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - txt_tokens = sample['txt_tokens'] - energy = sample['energy'] - if hparams['profile_infer']: - pass +class AcousticDataset(BaseDataset): + def __init__(self, prefix, shuffle=False): + super().__init__(shuffle) + self.data_dir = hparams['binary_data_dir'] + self.prefix = prefix + self.hparams = hparams + self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') + self.indexed_ds = None + # self.name2spk_id={} + + # pitch stats + f0_stats_fn = f'{self.data_dir}/train_f0s_mean_std.npy' + if os.path.exists(f0_stats_fn): + hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = np.load(f0_stats_fn) + hparams['f0_mean'] = float(hparams['f0_mean']) + hparams['f0_std'] = float(hparams['f0_std']) else: - mel2ph, uv, f0 = None, None, None - if hparams['use_gt_dur']: - mel2ph = sample['mel2ph'] - if hparams['use_gt_f0']: - f0 = sample['f0'] - uv = sample['uv'] - fs2_mel = sample['fs2_mels'] - outputs = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=uv, ref_mels=[None, fs2_mel], energy=energy, - infer=True) - sample['outputs'] = self.model.out2mel(outputs['mel_out']) - sample['mel2ph_pred'] = outputs['mel2ph'] + hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = None, None - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - sample['f0'] = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - sample['f0_pred'] = self.pe(sample['outputs'])['f0_denorm_pred'] # pe predict from Pred mel + if prefix == 'test': + if hparams['test_input_dir'] != '': + self.indexed_ds, self.sizes = self.load_test_inputs(hparams['test_input_dir']) else: - sample['f0'] = denorm_f0(sample['f0'], sample['uv'], hparams) - sample['f0_pred'] = outputs.get('f0_denorm') - return self.after_infer(sample) + if hparams['num_test_samples'] > 0: + self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] + self.sizes = [self.sizes[i] for i in self.avail_idxs] + if hparams['pitch_type'] == 'cwt': + _, hparams['cwt_scales'] = get_lf0_cwt(np.ones(10)) -class MIDIDataset(FastSpeechDataset): def __getitem__(self, index): - sample = super(MIDIDataset, self).__getitem__(index) + hparams = self.hparams item = self._get_item(index) - sample['f0_midi'] = torch.FloatTensor(item['f0_midi']) - sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] - - return sample - - def collater(self, samples): - batch = super(MIDIDataset, self).collater(samples) - batch['f0_midi'] = utils.collate_1d([s['f0_midi'] for s in samples], 0.0) - batch['pitch_midi'] = utils.collate_1d([s['pitch_midi'] for s in samples], 0) - # print((batch['pitch_midi'] == f0_to_coarse(batch['f0_midi'])).all()) - return batch - - -class OpencpopDataset(FastSpeechDataset): - def __getitem__(self, index): - sample = super(OpencpopDataset, self).__getitem__(index) + max_frames = hparams['max_frames'] + spec = torch.Tensor(item['mel'])[:max_frames] + # energy = (spec.exp() ** 2).sum(-1).sqrt() + mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None + f0, uv = norm_interp_f0(item["f0"][:max_frames], hparams) + phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) + pitch = torch.LongTensor(item.get("pitch"))[:max_frames] + sample = { + "id": index, + "item_name": item['item_name'], + "text": item['txt'], + "txt_token": phone, + "mel": spec, + "pitch": pitch, + "f0": f0, + "uv": uv, + "mel2ph": mel2ph, + "mel_nonpadding": spec.abs().sum(-1) > 0, + } + if self.hparams['use_energy_embed']: + sample['energy'] = item['energy'] + if self.hparams.get('use_key_shift_embed', False): + sample['key_shift'] = item['key_shift'] + if self.hparams.get('use_speed_embed', False): + sample['speed'] = item['speed'] + if self.hparams['use_spk_embed']: + sample["spk_embed"] = torch.Tensor(item['spk_embed']) + if self.hparams['use_spk_id']: + sample["spk_id"] = item['spk_id'] + if self.hparams['pitch_type'] == 'cwt': + cwt_spec = torch.Tensor(item['cwt_spec'])[:max_frames] + f0_mean = item.get('f0_mean', item.get('cwt_mean')) + f0_std = item.get('f0_std', item.get('cwt_std')) + sample.update({"cwt_spec": cwt_spec, "f0_mean": f0_mean, "f0_std": f0_std}) + elif self.hparams['pitch_type'] == 'ph': + f0_phlevel_sum = torch.zeros_like(phone).float().scatter_add(0, mel2ph - 1, f0) + f0_phlevel_num = torch.zeros_like(phone).float().scatter_add( + 0, mel2ph - 1, torch.ones_like(f0)).clamp_min(1) + sample["f0_ph"] = f0_phlevel_sum / f0_phlevel_num item = self._get_item(index) sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] @@ -265,11 +185,39 @@ def collater(self, samples): from preprocessing.opencpop import File2Batch return File2Batch.processed_input2batch(samples) + def _get_item(self, index): + if hasattr(self, 'avail_idxs') and self.avail_idxs is not None: + index = self.avail_idxs[index] + if self.indexed_ds is None: + self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') + return self.indexed_ds[index] + + def load_test_inputs(self, test_input_dir, spk_id=0): + inp_wav_paths = glob.glob(f'{test_input_dir}/*.wav') + glob.glob(f'{test_input_dir}/*.mp3') + sizes = [] + items = [] + + binarizer_cls = hparams.get("binarizer_cls", 'basics.base_binarizer.BaseBinarizer') + pkg = ".".join(binarizer_cls.split(".")[:-1]) + cls_name = binarizer_cls.split(".")[-1] + binarizer_cls = getattr(importlib.import_module(pkg), cls_name) + binarization_args = hparams['binarization_args'] + + for wav_fn in inp_wav_paths: + item_name = os.path.basename(wav_fn) + ph = txt = tg_fn = '' + wav_fn = wav_fn + encoder = None + item = binarizer_cls.process_item(item_name, ph, txt, tg_fn, wav_fn, spk_id, encoder, binarization_args) + items.append(item) + sizes.append(item['len']) + return items, sizes + class DiffSingerMIDITask(DiffSingerTask): def __init__(self): super(DiffSingerMIDITask, self).__init__() - self.dataset_cls = OpencpopDataset + self.dataset_cls = AcousticDataset def run_model(self, model, sample, return_output=False, infer=False): ''' @@ -394,102 +342,3 @@ def add_dur_loss(self, dur_pred, mel2ph, txt_tokens, wdb, losses=None): sent_dur_g = dur_gt.sum(-1) sdur_loss = F.mse_loss((sent_dur_p + 1).log(), (sent_dur_g + 1).log(), reduction='mean') losses['sdur'] = sdur_loss.mean() * hparams['lambda_sent_dur'] - - -class AuxDecoderMIDITask(FastSpeech2Task): - def __init__(self): - super().__init__() - # self.dataset_cls = MIDIDataset - self.dataset_cls = OpencpopDataset - - def build_tts_model(self): - if hparams.get('use_midi') is not None and hparams['use_midi']: - self.model = FastSpeech2MIDI(self.phone_encoder) - else: - self.model = FastSpeech2(self.phone_encoder) - - def run_model(self, model, sample, return_output=False): - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample['energy'] - - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - if hparams['pitch_type'] == 'cwt': - cwt_spec = sample[f'cwt_spec'] - f0_mean = sample['f0_mean'] - f0_std = sample['f0_std'] - sample['f0_cwt'] = f0 = model.cwt2f0_norm(cwt_spec, f0_mean, f0_std, mel2ph) - - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, - ref_mels=target, f0=f0, uv=uv, energy=energy, infer=False, pitch_midi=sample['pitch_midi'], - midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur')) - - losses = {} - self.add_mel_loss(output['mel_out'], target, losses) - self.add_dur_loss(output['dur'], mel2ph, txt_tokens, sample['word_boundary'], losses=losses) - if hparams['use_pitch_embed']: - self.add_pitch_loss(output, sample, losses) - if hparams['use_energy_embed']: - self.add_energy_loss(output['energy_pred'], energy, losses) - if not return_output: - return losses - else: - return losses, output - - def add_dur_loss(self, dur_pred, mel2ph, txt_tokens, wdb, losses=None): - """ - :param dur_pred: [B, T], float, log scale - :param mel2ph: [B, T] - :param txt_tokens: [B, T] - :param losses: - :return: - """ - B, T = txt_tokens.shape - nonpadding = (txt_tokens != 0).float() - dur_gt = mel2ph_to_dur(mel2ph, T).float() * nonpadding - is_sil = torch.zeros_like(txt_tokens).bool() - for p in self.sil_ph: - is_sil = is_sil | (txt_tokens == self.phone_encoder.encode(p)[0]) - is_sil = is_sil.float() # [B, T_txt] - - # phone duration loss - if hparams['dur_loss'] == 'mse': - losses['pdur'] = F.mse_loss(dur_pred, (dur_gt + 1).log(), reduction='none') - losses['pdur'] = (losses['pdur'] * nonpadding).sum() / nonpadding.sum() - dur_pred = (dur_pred.exp() - 1).clamp(min=0) - else: - raise NotImplementedError - - # use linear scale for sent and word duration - if hparams['lambda_word_dur'] > 0: - idx = F.pad(wdb.cumsum(axis=1), (1, 0))[:, :-1] - # word_dur_g = dur_gt.new_zeros([B, idx.max() + 1]).scatter_(1, idx, midi_dur) # midi_dur can be implied by add gt-ph_dur - word_dur_p = dur_pred.new_zeros([B, idx.max() + 1]).scatter_add(1, idx, dur_pred) - word_dur_g = dur_gt.new_zeros([B, idx.max() + 1]).scatter_add(1, idx, dur_gt) - wdur_loss = F.mse_loss((word_dur_p + 1).log(), (word_dur_g + 1).log(), reduction='none') - word_nonpadding = (word_dur_g > 0).float() - wdur_loss = (wdur_loss * word_nonpadding).sum() / word_nonpadding.sum() - losses['wdur'] = wdur_loss * hparams['lambda_word_dur'] - if hparams['lambda_sent_dur'] > 0: - sent_dur_p = dur_pred.sum(-1) - sent_dur_g = dur_gt.sum(-1) - sdur_loss = F.mse_loss((sent_dur_p + 1).log(), (sent_dur_g + 1).log(), reduction='mean') - losses['sdur'] = sdur_loss.mean() * hparams['lambda_sent_dur'] - - def validation_step(self, sample, batch_idx): - outputs = {} - outputs['losses'] = {} - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True) - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - mel_out = self.model.out2mel(model_out['mel_out']) - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - self.plot_mel(batch_idx, sample['mels'], mel_out) - self.plot_dur(batch_idx, sample, model_out) - if hparams['use_pitch_embed']: - self.plot_pitch(batch_idx, sample, model_out) - return outputs diff --git a/src/naive_task.py b/src/naive_task.py index 7e6983cc9..f2cd52419 100644 --- a/src/naive_task.py +++ b/src/naive_task.py @@ -1,12 +1,12 @@ from utils.hparams import hparams -from .diffsinger_task import DiffSingerMIDITask, OpencpopDataset +from .diffsinger_task import DiffSingerMIDITask, AcousticDataset from utils.pitch_utils import denorm_f0 import utils class NaiveTask(DiffSingerMIDITask): def __init__(self): super(NaiveTask, self).__init__() - self.dataset_cls = OpencpopDataset + self.dataset_cls = AcousticDataset def run_model(self, model, sample, return_output=False, infer=False): ''' diff --git a/tts/tasks/fs2_utils.py b/tts/tasks/fs2_utils.py deleted file mode 100644 index b05e2932e..000000000 --- a/tts/tasks/fs2_utils.py +++ /dev/null @@ -1,178 +0,0 @@ -import matplotlib - -matplotlib.use('Agg') - -import glob -import importlib -from utils.cwt import get_lf0_cwt -import os -import torch.optim -import torch.utils.data -from utils.indexed_datasets import IndexedDataset -from utils.pitch_utils import norm_interp_f0 -import numpy as np -from basics.base_dataset import BaseDataset -import torch -import torch.optim -import torch.utils.data -import utils -import torch.distributions -from utils.hparams import hparams - - -class FastSpeechDataset(BaseDataset): - def __init__(self, prefix, shuffle=False): - super().__init__(shuffle) - self.data_dir = hparams['binary_data_dir'] - self.prefix = prefix - self.hparams = hparams - self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') - self.indexed_ds = None - # self.name2spk_id={} - - # pitch stats - f0_stats_fn = f'{self.data_dir}/train_f0s_mean_std.npy' - if os.path.exists(f0_stats_fn): - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = np.load(f0_stats_fn) - hparams['f0_mean'] = float(hparams['f0_mean']) - hparams['f0_std'] = float(hparams['f0_std']) - else: - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = None, None - - if prefix == 'test': - if hparams['test_input_dir'] != '': - self.indexed_ds, self.sizes = self.load_test_inputs(hparams['test_input_dir']) - else: - if hparams['num_test_samples'] > 0: - self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] - self.sizes = [self.sizes[i] for i in self.avail_idxs] - - if hparams['pitch_type'] == 'cwt': - _, hparams['cwt_scales'] = get_lf0_cwt(np.ones(10)) - - def _get_item(self, index): - if hasattr(self, 'avail_idxs') and self.avail_idxs is not None: - index = self.avail_idxs[index] - if self.indexed_ds is None: - self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') - return self.indexed_ds[index] - - def __getitem__(self, index): - hparams = self.hparams - item = self._get_item(index) - max_frames = hparams['max_frames'] - spec = torch.Tensor(item['mel'])[:max_frames] - # energy = (spec.exp() ** 2).sum(-1).sqrt() - mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None - f0, uv = norm_interp_f0(item["f0"][:max_frames], hparams) - phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) - pitch = torch.LongTensor(item.get("pitch"))[:max_frames] - # print(item.keys(), item['mel'].shape, spec.shape) - sample = { - "id": index, - "item_name": item['item_name'], - "text": item['txt'], - "txt_token": phone, - "mel": spec, - "pitch": pitch, - "f0": f0, - "uv": uv, - "mel2ph": mel2ph, - "mel_nonpadding": spec.abs().sum(-1) > 0, - } - if self.hparams['use_energy_embed']: - sample['energy'] = item['energy'] - if self.hparams.get('use_key_shift_embed', False): - sample['key_shift'] = item['key_shift'] - if self.hparams.get('use_speed_embed', False): - sample['speed'] = item['speed'] - if self.hparams['use_spk_embed']: - sample["spk_embed"] = torch.Tensor(item['spk_embed']) - if self.hparams['use_spk_id']: - sample["spk_id"] = item['spk_id'] - # sample['spk_id'] = 0 - # for key in self.name2spk_id.keys(): - # if key in item['item_name']: - # sample['spk_id'] = self.name2spk_id[key] - # break - if self.hparams['pitch_type'] == 'cwt': - cwt_spec = torch.Tensor(item['cwt_spec'])[:max_frames] - f0_mean = item.get('f0_mean', item.get('cwt_mean')) - f0_std = item.get('f0_std', item.get('cwt_std')) - sample.update({"cwt_spec": cwt_spec, "f0_mean": f0_mean, "f0_std": f0_std}) - elif self.hparams['pitch_type'] == 'ph': - f0_phlevel_sum = torch.zeros_like(phone).float().scatter_add(0, mel2ph - 1, f0) - f0_phlevel_num = torch.zeros_like(phone).float().scatter_add( - 0, mel2ph - 1, torch.ones_like(f0)).clamp_min(1) - sample["f0_ph"] = f0_phlevel_sum / f0_phlevel_num - return sample - - def collater(self, samples): - if len(samples) == 0: - return {} - id = torch.LongTensor([s['id'] for s in samples]) - item_names = [s['item_name'] for s in samples] - text = [s['text'] for s in samples] - txt_tokens = utils.collate_1d([s['txt_token'] for s in samples], 0) - f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) - pitch = utils.collate_1d([s['pitch'] for s in samples]) - uv = utils.collate_1d([s['uv'] for s in samples]) - energy = utils.collate_1d([s['energy'] for s in samples], 0.0) - mel2ph = utils.collate_1d([s['mel2ph'] for s in samples], 0.0) \ - if samples[0]['mel2ph'] is not None else None - mels = utils.collate_2d([s['mel'] for s in samples], 0.0) - txt_lengths = torch.LongTensor([s['txt_token'].numel() for s in samples]) - mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) - - batch = { - 'id': id, - 'item_name': item_names, - 'nsamples': len(samples), - 'text': text, - 'txt_tokens': txt_tokens, - 'txt_lengths': txt_lengths, - 'mels': mels, - 'mel_lengths': mel_lengths, - 'mel2ph': mel2ph, - 'energy': energy, - 'pitch': pitch, - 'f0': f0, - 'uv': uv, - } - - if self.hparams['use_spk_embed']: - spk_embed = torch.stack([s['spk_embed'] for s in samples]) - batch['spk_embed'] = spk_embed - if self.hparams['use_spk_id']: - spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) - batch['spk_ids'] = spk_ids - if self.hparams['pitch_type'] == 'cwt': - cwt_spec = utils.collate_2d([s['cwt_spec'] for s in samples]) - f0_mean = torch.Tensor([s['f0_mean'] for s in samples]) - f0_std = torch.Tensor([s['f0_std'] for s in samples]) - batch.update({'cwt_spec': cwt_spec, 'f0_mean': f0_mean, 'f0_std': f0_std}) - elif self.hparams['pitch_type'] == 'ph': - batch['f0'] = utils.collate_1d([s['f0_ph'] for s in samples]) - - return batch - - def load_test_inputs(self, test_input_dir, spk_id=0): - inp_wav_paths = glob.glob(f'{test_input_dir}/*.wav') + glob.glob(f'{test_input_dir}/*.mp3') - sizes = [] - items = [] - - binarizer_cls = hparams.get("binarizer_cls", 'basics.base_binarizer.BaseBinarizer') - pkg = ".".join(binarizer_cls.split(".")[:-1]) - cls_name = binarizer_cls.split(".")[-1] - binarizer_cls = getattr(importlib.import_module(pkg), cls_name) - binarization_args = hparams['binarization_args'] - - for wav_fn in inp_wav_paths: - item_name = os.path.basename(wav_fn) - ph = txt = tg_fn = '' - wav_fn = wav_fn - encoder = None - item = binarizer_cls.process_item(item_name, ph, txt, tg_fn, wav_fn, spk_id, encoder, binarization_args) - items.append(item) - sizes.append(item['len']) - return items, sizes diff --git a/tts/tasks/pe.py b/tts/tasks/pe.py deleted file mode 100644 index 10e816b03..000000000 --- a/tts/tasks/pe.py +++ /dev/null @@ -1,155 +0,0 @@ -import matplotlib -matplotlib.use('Agg') - -import torch -import numpy as np -import os - -from basics.base_dataset import BaseDataset -from tts.tasks.fs2 import FastSpeech2Task -from modules.fastspeech.pe import PitchExtractor -import utils -from utils.indexed_datasets import IndexedDataset -from utils.hparams import hparams -from utils.plot import f0_to_figure -from utils.pitch_utils import norm_interp_f0, denorm_f0 - - -class PeDataset(BaseDataset): - def __init__(self, prefix, shuffle=False): - super().__init__(shuffle) - self.data_dir = hparams['binary_data_dir'] - self.prefix = prefix - self.hparams = hparams - self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') - self.indexed_ds = None - - # pitch stats - f0_stats_fn = f'{self.data_dir}/train_f0s_mean_std.npy' - if os.path.exists(f0_stats_fn): - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = np.load(f0_stats_fn) - hparams['f0_mean'] = float(hparams['f0_mean']) - hparams['f0_std'] = float(hparams['f0_std']) - else: - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = None, None - - if prefix == 'test': - if hparams['num_test_samples'] > 0: - self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] - self.sizes = [self.sizes[i] for i in self.avail_idxs] - - def _get_item(self, index): - if hasattr(self, 'avail_idxs') and self.avail_idxs is not None: - index = self.avail_idxs[index] - if self.indexed_ds is None: - self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') - return self.indexed_ds[index] - - def __getitem__(self, index): - hparams = self.hparams - item = self._get_item(index) - max_frames = hparams['max_frames'] - spec = torch.Tensor(item['mel'])[:max_frames] - # mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None - f0, uv = norm_interp_f0(item["f0"][:max_frames], hparams) - pitch = torch.LongTensor(item.get("pitch"))[:max_frames] - # print(item.keys(), item['mel'].shape, spec.shape) - sample = { - "id": index, - "item_name": item['item_name'], - "text": item['txt'], - "mel": spec, - "pitch": pitch, - "f0": f0, - "uv": uv, - # "mel2ph": mel2ph, - # "mel_nonpadding": spec.abs().sum(-1) > 0, - } - return sample - - def collater(self, samples): - if len(samples) == 0: - return {} - id = torch.LongTensor([s['id'] for s in samples]) - item_names = [s['item_name'] for s in samples] - text = [s['text'] for s in samples] - f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) - pitch = utils.collate_1d([s['pitch'] for s in samples]) - uv = utils.collate_1d([s['uv'] for s in samples]) - mels = utils.collate_2d([s['mel'] for s in samples], 0.0) - mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) - # mel2ph = utils.collate_1d([s['mel2ph'] for s in samples], 0.0) \ - # if samples[0]['mel2ph'] is not None else None - # mel_nonpaddings = utils.collate_1d([s['mel_nonpadding'].float() for s in samples], 0.0) - - batch = { - 'id': id, - 'item_name': item_names, - 'nsamples': len(samples), - 'text': text, - 'mels': mels, - 'mel_lengths': mel_lengths, - 'pitch': pitch, - # 'mel2ph': mel2ph, - # 'mel_nonpaddings': mel_nonpaddings, - 'f0': f0, - 'uv': uv, - } - return batch - - -class PitchExtractionTask(FastSpeech2Task): - def __init__(self): - super().__init__() - self.dataset_cls = PeDataset - - def build_tts_model(self): - self.model = PitchExtractor(conv_layers=hparams['pitch_extractor_conv_layers']) - - # def build_scheduler(self, optimizer): - # return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=0.5) - def _training_step(self, sample, batch_idx, _): - loss_output = self.run_model(self.model, sample) - total_loss = sum([v for v in loss_output.values() if isinstance(v, torch.Tensor) and v.requires_grad]) - loss_output['batch_size'] = sample['mels'].size()[0] - return total_loss, loss_output - - def validation_step(self, sample, batch_idx): - outputs = {} - outputs['losses'] = {} - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=True) - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - self.plot_pitch(batch_idx, model_out, sample) - return outputs - - def run_model(self, model, sample, return_output=False, infer=False): - f0 = sample['f0'] - uv = sample['uv'] - output = model(sample['mels']) - losses = {} - self.add_pitch_loss(output, sample, losses) - if not return_output: - return losses - else: - return losses, output - - def plot_pitch(self, batch_idx, model_out, sample): - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) - self.logger.experiment.add_figure( - f'f0_{batch_idx}', - f0_to_figure(gt_f0[0], None, model_out['f0_denorm_pred'][0]), - self.global_step) - - def add_pitch_loss(self, output, sample, losses): - # mel2ph = sample['mel2ph'] # [B, T_s] - mel = sample['mels'] - f0 = sample['f0'] - uv = sample['uv'] - # nonpadding = (mel2ph != 0).float() if hparams['pitch_type'] == 'frame' \ - # else (sample['txt_tokens'] != 0).float() - nonpadding = (mel.abs().sum(-1) > 0).float() # sample['mel_nonpaddings'] - # print(nonpadding[0][-8:], nonpadding.shape) - self.add_f0_loss(output['pitch_pred'], f0, uv, losses, nonpadding=nonpadding) \ No newline at end of file From 2a7a0189025494b23f3bc24f6314ba5a2b2efc79 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 00:24:13 +0800 Subject: [PATCH 002/475] Clean up `XXXTask` classes --- basics/base_task.py | 46 ++ configs/acoustic/nomidi.yaml | 2 +- tts/tasks/fs2.py => src/acoustic_task.py | 576 ++++++++++++----------- src/diffsinger_task.py | 344 -------------- src/diffspeech_task.py | 124 ----- src/naive_task.py | 88 ---- src/task.py | 94 ---- tts/tasks/tts.py | 123 ----- 8 files changed, 351 insertions(+), 1046 deletions(-) rename tts/tasks/fs2.py => src/acoustic_task.py (50%) delete mode 100644 src/diffsinger_task.py delete mode 100644 src/diffspeech_task.py delete mode 100644 src/naive_task.py delete mode 100644 src/task.py delete mode 100644 tts/tasks/tts.py diff --git a/basics/base_task.py b/basics/base_task.py index abedf8185..4eaaf589d 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -177,6 +177,52 @@ def configure_optimizers(self): self.scheduler = self.build_scheduler(optm) return [optm] + def build_dataloader(self, dataset, shuffle, max_tokens=None, max_sentences=None, + required_batch_size_multiple=-1, endless=False, batch_by_size=True): + devices_cnt = torch.cuda.device_count() + if devices_cnt == 0: + devices_cnt = 1 + if required_batch_size_multiple == -1: + required_batch_size_multiple = devices_cnt + + def shuffle_batches(batches): + np.random.shuffle(batches) + return batches + + if max_tokens is not None: + max_tokens *= devices_cnt + if max_sentences is not None: + max_sentences *= devices_cnt + indices = dataset.ordered_indices() + if batch_by_size: + batch_sampler = utils.batch_by_size( + indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, + required_batch_size_multiple=required_batch_size_multiple, + ) + else: + batch_sampler = [] + for i in range(0, len(indices), max_sentences): + batch_sampler.append(indices[i:i + max_sentences]) + + if shuffle: + batches = shuffle_batches(list(batch_sampler)) + if endless: + batches = [b for _ in range(1000) for b in shuffle_batches(list(batch_sampler))] + else: + batches = batch_sampler + if endless: + batches = [b for _ in range(1000) for b in batches] + num_workers = dataset.num_workers + if self.trainer.use_ddp: + num_replicas = dist.get_world_size() + rank = dist.get_rank() + batches = [x[rank::num_replicas] for x in batches if len(x) % num_replicas == 0] + return torch.utils.data.DataLoader(dataset, + collate_fn=dataset.collater, + batch_sampler=batches, + num_workers=num_workers, + pin_memory=False) + def test_start(self): pass diff --git a/configs/acoustic/nomidi.yaml b/configs/acoustic/nomidi.yaml index e71629e80..1ead10aed 100644 --- a/configs/acoustic/nomidi.yaml +++ b/configs/acoustic/nomidi.yaml @@ -1,7 +1,7 @@ base_config: - configs/basics/fs2.yaml -task_cls: src.naive_task.NaiveTask +task_cls: src.acoustic_task.NaiveTask datasets: [ 'opencpop', ] diff --git a/tts/tasks/fs2.py b/src/acoustic_task.py similarity index 50% rename from tts/tasks/fs2.py rename to src/acoustic_task.py index 1b6690b74..5abe331fd 100644 --- a/tts/tasks/fs2.py +++ b/src/acoustic_task.py @@ -1,35 +1,174 @@ -import matplotlib - -matplotlib.use('Agg') - -from utils import audio -import matplotlib.pyplot as plt -from data_gen.data_gen_utils import get_pitch_parselmouth -from tts.tasks.fs2_utils import FastSpeechDataset -from utils.cwt import cwt2f0 -from utils.pl_utils import data_loader +import glob +import importlib import os from multiprocessing.pool import Pool -from tqdm import tqdm -from modules.fastspeech.tts_modules import mel2ph_to_dur -from utils.hparams import hparams -from utils.plot import spec_to_figure, dur_to_figure, f0_to_figure -from utils.pitch_utils import denorm_f0 -from modules.fastspeech.fs2 import FastSpeech2 -from tts.tasks.tts import TtsTask + +import matplotlib +import matplotlib.pyplot as plt +import numpy as np import torch +import torch.distributions +import torch.distributions +import torch.distributions import torch.optim +import torch.optim +import torch.optim +import torch.utils.data import torch.utils.data -import torch.nn.functional as F +import torch.utils.data +from tqdm import tqdm + import utils -import torch.distributions -import numpy as np -from modules.commons.ssim import ssim +from basics.base_dataset import BaseDataset +from basics.base_task import BaseTask +from data_gen.data_gen_utils import get_pitch_parselmouth +from modules.fastspeech.tts_modules import mel2ph_to_dur +from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder +from utils import audio +from utils.cwt import get_lf0_cwt +from utils.hparams import hparams +from utils.indexed_datasets import IndexedDataset +from utils.phoneme_utils import build_phoneme_list +from utils.pitch_utils import denorm_f0 +from utils.pitch_utils import norm_interp_f0 +from utils.pl_utils import data_loader +from utils.plot import spec_to_figure +from utils.text_encoder import TokenTextEncoder +from .diff.candidate_decoder import FFT +from .diff.diffusion import GaussianDiffusion +from .diff.net import DiffNet + +DIFF_DECODERS = { + 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), + 'fft': lambda hp: FFT( + hp['hidden_size'], hp['dec_layers'], hp['dec_ffn_kernel_size'], hp['num_heads']), +} +matplotlib.use('Agg') + + +class AcousticDataset(BaseDataset): + def __init__(self, prefix, shuffle=False): + super().__init__(shuffle) + self.data_dir = hparams['binary_data_dir'] + self.prefix = prefix + self.hparams = hparams + self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') + self.indexed_ds = None + # self.name2spk_id={} + + # pitch stats + f0_stats_fn = f'{self.data_dir}/train_f0s_mean_std.npy' + if os.path.exists(f0_stats_fn): + hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = np.load(f0_stats_fn) + hparams['f0_mean'] = float(hparams['f0_mean']) + hparams['f0_std'] = float(hparams['f0_std']) + else: + hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = None, None + + if prefix == 'test': + if hparams['test_input_dir'] != '': + self.indexed_ds, self.sizes = self.load_test_inputs(hparams['test_input_dir']) + else: + if hparams['num_test_samples'] > 0: + self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] + self.sizes = [self.sizes[i] for i in self.avail_idxs] -class FastSpeech2Task(TtsTask): + if hparams['pitch_type'] == 'cwt': + _, hparams['cwt_scales'] = get_lf0_cwt(np.ones(10)) + + def __getitem__(self, index): + hparams = self.hparams + item = self._get_item(index) + max_frames = hparams['max_frames'] + spec = torch.Tensor(item['mel'])[:max_frames] + # energy = (spec.exp() ** 2).sum(-1).sqrt() + mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None + f0, uv = norm_interp_f0(item["f0"][:max_frames], hparams) + phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) + pitch = torch.LongTensor(item.get("pitch"))[:max_frames] + sample = { + "id": index, + "item_name": item['item_name'], + "text": item['txt'], + "txt_token": phone, + "mel": spec, + "pitch": pitch, + "f0": f0, + "uv": uv, + "mel2ph": mel2ph, + "mel_nonpadding": spec.abs().sum(-1) > 0, + } + if self.hparams['use_energy_embed']: + sample['energy'] = item['energy'] + if self.hparams.get('use_key_shift_embed', False): + sample['key_shift'] = item['key_shift'] + if self.hparams.get('use_speed_embed', False): + sample['speed'] = item['speed'] + if self.hparams['use_spk_embed']: + sample["spk_embed"] = torch.Tensor(item['spk_embed']) + if self.hparams['use_spk_id']: + sample["spk_id"] = item['spk_id'] + if self.hparams['pitch_type'] == 'cwt': + cwt_spec = torch.Tensor(item['cwt_spec'])[:max_frames] + f0_mean = item.get('f0_mean', item.get('cwt_mean')) + f0_std = item.get('f0_std', item.get('cwt_std')) + sample.update({"cwt_spec": cwt_spec, "f0_mean": f0_mean, "f0_std": f0_std}) + elif self.hparams['pitch_type'] == 'ph': + f0_phlevel_sum = torch.zeros_like(phone).float().scatter_add(0, mel2ph - 1, f0) + f0_phlevel_num = torch.zeros_like(phone).float().scatter_add( + 0, mel2ph - 1, torch.ones_like(f0)).clamp_min(1) + sample["f0_ph"] = f0_phlevel_sum / f0_phlevel_num + item = self._get_item(index) + sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] + sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] + sample['is_slur'] = torch.LongTensor(item['is_slur'])[:hparams['max_frames']] + sample['word_boundary'] = torch.LongTensor(item['word_boundary'])[:hparams['max_frames']] + return sample + + def collater(self, samples): + from preprocessing.opencpop import File2Batch + return File2Batch.processed_input2batch(samples) + + def _get_item(self, index): + if hasattr(self, 'avail_idxs') and self.avail_idxs is not None: + index = self.avail_idxs[index] + if self.indexed_ds is None: + self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') + return self.indexed_ds[index] + + def load_test_inputs(self, test_input_dir, spk_id=0): + inp_wav_paths = glob.glob(f'{test_input_dir}/*.wav') + glob.glob(f'{test_input_dir}/*.mp3') + sizes = [] + items = [] + + binarizer_cls = hparams.get("binarizer_cls", 'basics.base_binarizer.BaseBinarizer') + pkg = ".".join(binarizer_cls.split(".")[:-1]) + cls_name = binarizer_cls.split(".")[-1] + binarizer_cls = getattr(importlib.import_module(pkg), cls_name) + binarization_args = hparams['binarization_args'] + + for wav_fn in inp_wav_paths: + item_name = os.path.basename(wav_fn) + ph = txt = tg_fn = '' + wav_fn = wav_fn + encoder = None + item = binarizer_cls.process_item(item_name, ph, txt, tg_fn, wav_fn, spk_id, encoder, binarization_args) + items.append(item) + sizes.append(item['len']) + return items, sizes + +class AcousticTask(BaseTask): def __init__(self): - super(FastSpeech2Task, self).__init__() - self.dataset_cls = FastSpeechDataset + super().__init__() + self.dataset_cls = AcousticDataset + self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() + self.phone_encoder = self.build_phone_encoder() + self.padding_idx = self.phone_encoder.pad() + self.eos_idx = self.phone_encoder.eos() + self.seg_idx = self.phone_encoder.seg() + self.saving_result_pool = None + self.saving_results_futures = None + self.stats = {} self.mse_loss_fn = torch.nn.MSELoss() mel_losses = hparams['mel_loss'].split("|") self.loss_and_lambda = {} @@ -44,6 +183,42 @@ def __init__(self): self.loss_and_lambda[l] = lbd print("| Mel losses:", self.loss_and_lambda) self.sil_ph = self.phone_encoder.sil_phonemes() + self.logged_gt_wav = set() + + @staticmethod + def build_phone_encoder(): + phone_list = build_phoneme_list() + return TokenTextEncoder(vocab_list=phone_list, replace_oov=',') + + def build_model(self): + mel_bins = hparams['audio_num_mel_bins'] + self.model = GaussianDiffusion( + phone_encoder=self.phone_encoder, + out_dims=mel_bins, denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), + timesteps=hparams['timesteps'], + K_step=hparams['K_step'], + loss_type=hparams['diff_loss_type'], + spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], + ) + if hparams.get('fs2_ckpt', '') != '': + utils.load_ckpt(self.model.fs2, hparams['fs2_ckpt'], 'model', strict=True) + for k, v in self.model.fs2.named_parameters(): + v.requires_grad = False + if hparams['load_ckpt'] != '': + self.load_ckpt(hparams['load_ckpt'], strict=True) + utils.print_arch(self.model) + return self.model + + def build_optimizer(self, model): + self.optimizer = optimizer = torch.optim.AdamW( + filter(lambda p: p.requires_grad, model.parameters()), + lr=hparams['lr'], + betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), + weight_decay=hparams['weight_decay']) + return optimizer + + def build_scheduler(self, optimizer): + return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) @data_loader def train_dataloader(self): @@ -62,35 +237,98 @@ def test_dataloader(self): return self.build_dataloader(test_dataset, False, self.max_eval_tokens, self.max_eval_sentences, batch_by_size=False) - def build_tts_model(self): - self.model = FastSpeech2(self.phone_encoder) + def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): + if optimizer is None: + return + optimizer.step() + optimizer.zero_grad() + if self.scheduler is not None: + self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) + + def run_model(self, model, sample, return_output=False, infer=False): + ''' + steps: + 1. run the full model, calc the main loss + 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor + ''' + txt_tokens = sample['txt_tokens'] # [B, T_t] + target = sample['mels'] # [B, T_s, 80] + mel2ph = sample['mel2ph'] # [B, T_s] + f0 = sample['f0'] + uv = sample['uv'] + energy = sample.get('energy') + key_shift = sample.get('key_shift') + speed = sample.get('speed') + + if infer: + if hparams['use_spk_id']: + spk_embed = model.fs2.spk_embed(sample['spk_ids'])[:, None, :] + elif hparams['use_spk_embed']: + spk_embed = sample['spk_embed'] + else: + spk_embed = None + output = model(txt_tokens, mel2ph=mel2ph, spk_mix_embed=spk_embed,ref_mels=target, + f0=f0, uv=uv, energy=energy, key_shift=key_shift, speed=speed, infer=infer) + else: + spk_embed = sample.get('spk_ids') if hparams['use_spk_id'] else sample.get('spk_embed') + output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, ref_mels=target, + f0=f0, uv=uv, energy=energy, key_shift=key_shift, speed=speed, infer=infer) - def build_model(self): - self.build_tts_model() - if hparams['load_ckpt'] != '': - self.load_ckpt(hparams['load_ckpt'], strict=True) - utils.print_arch(self.model) - return self.model + losses = {} + if 'diff_loss' in output: + losses['mel'] = output['diff_loss'] + if not return_output: + return losses + else: + return losses, output def _training_step(self, sample, batch_idx, _): - loss_output = self.run_model(self.model, sample) - total_loss = sum([v for v in loss_output.values() if isinstance(v, torch.Tensor) and v.requires_grad]) - loss_output['batch_size'] = sample['txt_tokens'].size()[0] - return total_loss, loss_output + log_outputs = self.run_model(self.model, sample) + total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) + log_outputs['batch_size'] = sample['txt_tokens'].size()[0] + log_outputs['lr'] = self.scheduler.get_lr()[0] + return total_loss, log_outputs def validation_step(self, sample, batch_idx): outputs = {} - outputs['losses'] = {} - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True) + txt_tokens = sample['txt_tokens'] # [B, T_t] + + target = sample['mels'] # [B, T_s, 80] + energy = sample.get('energy') + key_shift = sample.get('key_shift') + speed = sample.get('speed') + # fs2_mel = sample['fs2_mels'] + # spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') + mel2ph = sample['mel2ph'] + f0 = sample['f0'] + + outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) + outputs['total_loss'] = sum(outputs['losses'].values()) outputs['nsamples'] = sample['nsamples'] - mel_out = self.model.out2mel(model_out['mel_out']) outputs = utils.tensors_to_scalars(outputs) if batch_idx < hparams['num_valid_plots']: - self.plot_mel(batch_idx, sample['mels'], mel_out) - self.plot_dur(batch_idx, sample, model_out) - if hparams['use_pitch_embed']: - self.plot_pitch(batch_idx, sample, model_out) + if hparams['use_spk_id']: + spk_embed = self.model.fs2.spk_embed(sample['spk_ids'])[:, None, :] + elif hparams['use_spk_embed']: + spk_embed = sample['spk_embed'] + else: + spk_embed = None + model_out = self.model( + txt_tokens, spk_mix_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=None, energy=energy, + key_shift=key_shift, speed=speed, ref_mels=None, pitch_midi=sample['pitch_midi'], + midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur'), infer=True + ) + + if hparams.get('pe_enable') is not None and hparams['pe_enable']: + gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel + pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel + else: + gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) + pred_f0 = gt_f0 + self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) + self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') + return outputs def _validation_end(self, outputs): @@ -106,184 +344,22 @@ def _validation_end(self, outputs): all_losses_meter['total_loss'].update(output['total_loss'], n) return {k: round(v.avg, 4) for k, v in all_losses_meter.items()} - def run_model(self, model, sample, return_output=False): - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample['energy'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - if hparams['pitch_type'] == 'cwt': - cwt_spec = sample[f'cwt_spec'] - f0_mean = sample['f0_mean'] - f0_std = sample['f0_std'] - sample['f0_cwt'] = f0 = model.cwt2f0_norm(cwt_spec, f0_mean, f0_std, mel2ph) - - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, - ref_mels=target, f0=f0, uv=uv, energy=energy, infer=False) - - losses = {} - self.add_mel_loss(output['mel_out'], target, losses) - self.add_dur_loss(output['dur'], mel2ph, txt_tokens, losses=losses) - if hparams['use_pitch_embed']: - self.add_pitch_loss(output, sample, losses) - if hparams['use_energy_embed']: - self.add_energy_loss(output['energy_pred'], energy, losses) - if not return_output: - return losses - else: - return losses, output - - ############ - # losses - ############ - def add_mel_loss(self, mel_out, target, losses, postfix='', mel_mix_loss=None): - if mel_mix_loss is None: - for loss_name, lbd in self.loss_and_lambda.items(): - if 'l1' == loss_name: - l = self.l1_loss(mel_out, target) - elif 'mse' == loss_name: - raise NotImplementedError - elif 'ssim' == loss_name: - l = self.ssim_loss(mel_out, target) - elif 'gdl' == loss_name: - raise NotImplementedError - losses[f'{loss_name}{postfix}'] = l * lbd - else: - raise NotImplementedError - - def l1_loss(self, decoder_output, target): - # decoder_output : B x T x n_mel - # target : B x T x n_mel - l1_loss = F.l1_loss(decoder_output, target, reduction='none') - weights = self.weights_nonzero_speech(target) - l1_loss = (l1_loss * weights).sum() / weights.sum() - return l1_loss - - def ssim_loss(self, decoder_output, target, bias=6.0): - # decoder_output : B x T x n_mel - # target : B x T x n_mel - assert decoder_output.shape == target.shape - weights = self.weights_nonzero_speech(target) - decoder_output = decoder_output[:, None] + bias - target = target[:, None] + bias - ssim_loss = 1 - ssim(decoder_output, target, size_average=False) - ssim_loss = (ssim_loss * weights).sum() / weights.sum() - return ssim_loss - - def add_dur_loss(self, dur_pred, mel2ph, txt_tokens, losses=None): - """ - - :param dur_pred: [B, T], float, log scale - :param mel2ph: [B, T] - :param txt_tokens: [B, T] - :param losses: - :return: - """ - B, T = txt_tokens.shape - nonpadding = (txt_tokens != 0).float() - dur_gt = mel2ph_to_dur(mel2ph, T).float() * nonpadding - is_sil = torch.zeros_like(txt_tokens).bool() - for p in self.sil_ph: - is_sil = is_sil | (txt_tokens == self.phone_encoder.encode(p)[0]) - is_sil = is_sil.float() # [B, T_txt] - - # phone duration loss - if hparams['dur_loss'] == 'mse': - losses['pdur'] = F.mse_loss(dur_pred, (dur_gt + 1).log(), reduction='none') - losses['pdur'] = (losses['pdur'] * nonpadding).sum() / nonpadding.sum() - dur_pred = (dur_pred.exp() - 1).clamp(min=0) - elif hparams['dur_loss'] == 'mog': - return NotImplementedError - elif hparams['dur_loss'] == 'crf': - losses['pdur'] = -self.model.dur_predictor.crf( - dur_pred, dur_gt.long().clamp(min=0, max=31), mask=nonpadding > 0, reduction='mean') - losses['pdur'] = losses['pdur'] * hparams['lambda_ph_dur'] - - # use linear scale for sent and word duration - if hparams['lambda_word_dur'] > 0: - word_id = (is_sil.cumsum(-1) * (1 - is_sil)).long() - word_dur_p = dur_pred.new_zeros([B, word_id.max() + 1]).scatter_add(1, word_id, dur_pred)[:, 1:] - word_dur_g = dur_gt.new_zeros([B, word_id.max() + 1]).scatter_add(1, word_id, dur_gt)[:, 1:] - wdur_loss = F.mse_loss((word_dur_p + 1).log(), (word_dur_g + 1).log(), reduction='none') - word_nonpadding = (word_dur_g > 0).float() - wdur_loss = (wdur_loss * word_nonpadding).sum() / word_nonpadding.sum() - losses['wdur'] = wdur_loss * hparams['lambda_word_dur'] - if hparams['lambda_sent_dur'] > 0: - sent_dur_p = dur_pred.sum(-1) - sent_dur_g = dur_gt.sum(-1) - sdur_loss = F.mse_loss((sent_dur_p + 1).log(), (sent_dur_g + 1).log(), reduction='mean') - losses['sdur'] = sdur_loss.mean() * hparams['lambda_sent_dur'] - - def add_pitch_loss(self, output, sample, losses): - if hparams['pitch_type'] == 'ph': - nonpadding = (sample['txt_tokens'] != 0).float() - pitch_loss_fn = F.l1_loss if hparams['pitch_loss'] == 'l1' else F.mse_loss - losses['f0'] = (pitch_loss_fn(output['pitch_pred'][:, :, 0], sample['f0'], - reduction='none') * nonpadding).sum() \ - / nonpadding.sum() * hparams['lambda_f0'] - return - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - nonpadding = (mel2ph != 0).float() - if hparams['pitch_type'] == 'cwt': - cwt_spec = sample[f'cwt_spec'] - f0_mean = sample['f0_mean'] - f0_std = sample['f0_std'] - cwt_pred = output['cwt'][:, :, :10] - f0_mean_pred = output['f0_mean'] - f0_std_pred = output['f0_std'] - losses['C'] = self.cwt_loss(cwt_pred, cwt_spec) * hparams['lambda_f0'] - if hparams['use_uv']: - assert output['cwt'].shape[-1] == 11 - uv_pred = output['cwt'][:, :, -1] - losses['uv'] = (F.binary_cross_entropy_with_logits(uv_pred, uv, reduction='none') * nonpadding) \ - .sum() / nonpadding.sum() * hparams['lambda_uv'] - losses['f0_mean'] = F.l1_loss(f0_mean_pred, f0_mean) * hparams['lambda_f0'] - losses['f0_std'] = F.l1_loss(f0_std_pred, f0_std) * hparams['lambda_f0'] - if hparams['cwt_add_f0_loss']: - f0_cwt_ = self.model.cwt2f0_norm(cwt_pred, f0_mean_pred, f0_std_pred, mel2ph) - self.add_f0_loss(f0_cwt_[:, :, None], f0, uv, losses, nonpadding=nonpadding) - elif hparams['pitch_type'] == 'frame': - self.add_f0_loss(output['pitch_pred'], f0, uv, losses, nonpadding=nonpadding) - - def add_f0_loss(self, p_pred, f0, uv, losses, nonpadding): - assert p_pred[..., 0].shape == f0.shape - if hparams['use_uv']: - assert p_pred[..., 1].shape == uv.shape - losses['uv'] = (F.binary_cross_entropy_with_logits( - p_pred[:, :, 1], uv, reduction='none') * nonpadding).sum() \ - / nonpadding.sum() * hparams['lambda_uv'] - nonpadding = nonpadding * (uv == 0).float() - - f0_pred = p_pred[:, :, 0] - if hparams['pitch_loss'] in ['l1', 'l2']: - pitch_loss_fn = F.l1_loss if hparams['pitch_loss'] == 'l1' else F.mse_loss - losses['f0'] = (pitch_loss_fn(f0_pred, f0, reduction='none') * nonpadding).sum() \ - / nonpadding.sum() * hparams['lambda_f0'] - elif hparams['pitch_loss'] == 'ssim': - return NotImplementedError - - def cwt_loss(self, cwt_p, cwt_g): - if hparams['cwt_loss'] == 'l1': - return F.l1_loss(cwt_p, cwt_g) - if hparams['cwt_loss'] == 'l2': - return F.mse_loss(cwt_p, cwt_g) - if hparams['cwt_loss'] == 'ssim': - return self.ssim_loss(cwt_p, cwt_g, 20) - - def add_energy_loss(self, energy_pred, energy, losses): - nonpadding = (energy != 0).float() - loss = (F.mse_loss(energy_pred, energy, reduction='none') * nonpadding).sum() / nonpadding.sum() - loss = loss * hparams['lambda_energy'] - losses['e'] = loss - ############ # validation plots ############ + def plot_wav(self, batch_idx, gt_mel, pred_mel, gt_f0=None, pred_f0=None): + gt_mel = gt_mel[0].cpu().numpy() + pred_mel = pred_mel[0].cpu().numpy() + gt_f0 = gt_f0[0].cpu().numpy() + pred_f0 = pred_f0[0].cpu().numpy() + if batch_idx not in self.logged_gt_wav: + gt_wav = self.vocoder.spec2wav(gt_mel, f0=gt_f0) + self.logger.experiment.add_audio(f'gt_{batch_idx}', gt_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) + self.logged_gt_wav.add(batch_idx) + pred_wav = self.vocoder.spec2wav(pred_mel, f0=pred_f0) + self.logger.experiment.add_audio(f'pred_{batch_idx}', pred_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) + def plot_mel(self, batch_idx, spec, spec_out, name=None): name = f'mel_{batch_idx}' if name is None else name vmin = hparams['mel_vmin'] @@ -291,50 +367,14 @@ def plot_mel(self, batch_idx, spec, spec_out, name=None): spec_cat = torch.cat([(spec_out - spec).abs() + vmin, spec, spec_out], -1) self.logger.experiment.add_figure(name, spec_to_figure(spec_cat[0], vmin, vmax), self.global_step) - def plot_dur(self, batch_idx, sample, model_out): - T_txt = sample['txt_tokens'].shape[1] - dur_gt = mel2ph_to_dur(sample['mel2ph'], T_txt)[0] - dur_pred = self.model.dur_predictor.out2dur(model_out['dur']).float() - txt = self.phone_encoder.decode(sample['txt_tokens'][0].cpu().numpy()) - txt = txt.split(" ") - self.logger.experiment.add_figure( - f'dur_{batch_idx}', dur_to_figure(dur_gt, dur_pred, txt), self.global_step) - - def plot_pitch(self, batch_idx, sample, model_out): - f0 = sample['f0'] - if hparams['pitch_type'] == 'ph': - mel2ph = sample['mel2ph'] - f0 = self.expand_f0_ph(f0, mel2ph) - f0_pred = self.expand_f0_ph(model_out['pitch_pred'][:, :, 0], mel2ph) - self.logger.experiment.add_figure( - f'f0_{batch_idx}', f0_to_figure(f0[0], None, f0_pred[0]), self.global_step) - return - f0 = denorm_f0(f0, sample['uv'], hparams) - if hparams['pitch_type'] == 'cwt': - # cwt - cwt_out = model_out['cwt'] - cwt_spec = cwt_out[:, :, :10] - cwt = torch.cat([cwt_spec, sample['cwt_spec']], -1) - self.logger.experiment.add_figure(f'cwt_{batch_idx}', spec_to_figure(cwt[0]), self.global_step) - # f0 - f0_pred = cwt2f0(cwt_spec, model_out['f0_mean'], model_out['f0_std'], hparams['cwt_scales']) - if hparams['use_uv']: - assert cwt_out.shape[-1] == 11 - uv_pred = cwt_out[:, :, -1] > 0 - f0_pred[uv_pred > 0] = 0 - f0_cwt = denorm_f0(sample['f0_cwt'], sample['uv'], hparams) - self.logger.experiment.add_figure( - f'f0_{batch_idx}', f0_to_figure(f0[0], f0_cwt[0], f0_pred[0]), self.global_step) - elif hparams['pitch_type'] == 'frame': - # f0 - uv_pred = model_out['pitch_pred'][:, :, 1] > 0 - pitch_pred = denorm_f0(model_out['pitch_pred'][:, :, 0], uv_pred, hparams) - self.logger.experiment.add_figure( - f'f0_{batch_idx}', f0_to_figure(f0[0], None, pitch_pred[0]), self.global_step) - ############ # infer ############ + def test_start(self): + self.saving_result_pool = Pool(8) + self.saving_results_futures = [] + self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() + def test_step(self, sample, batch_idx): spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') txt_tokens = sample['txt_tokens'] @@ -366,6 +406,12 @@ def test_step(self, sample, batch_idx): sample['f0_pred'] = outputs.get('f0_denorm') return self.after_infer(sample) + def test_end(self, outputs): + self.saving_result_pool.close() + [f.get() for f in tqdm(self.saving_results_futures)] + self.saving_result_pool.join() + return {} + def after_infer(self, predictions): if self.saving_result_pool is None and not hparams['profile_infer']: self.saving_result_pool = Pool(min(int(os.getenv('N_PROC', os.cpu_count())), 16)) @@ -493,17 +539,3 @@ def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, me plt.tight_layout() plt.savefig(f'{gen_dir}/plot/{base_fn}.png', format='png', dpi=1000) plt.close(fig) - - ############## - # utils - ############## - @staticmethod - def expand_f0_ph(f0, mel2ph): - f0 = denorm_f0(f0, None, hparams) - f0 = F.pad(f0, [1, 0]) - f0 = torch.gather(f0, 1, mel2ph) # [B, T_mel] - return f0 - - -if __name__ == '__main__': - FastSpeech2Task.start() diff --git a/src/diffsinger_task.py b/src/diffsinger_task.py deleted file mode 100644 index 6248b9031..000000000 --- a/src/diffsinger_task.py +++ /dev/null @@ -1,344 +0,0 @@ -import glob -import importlib -import os - -import numpy as np -import torch -import torch.nn.functional as F - -import utils -import matplotlib -from basics.base_dataset import BaseDataset -from modules.fastspeech.pe import PitchExtractor -from modules.fastspeech.tts_modules import mel2ph_to_dur -from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder -from utils.cwt import get_lf0_cwt -from utils.hparams import hparams -from utils.indexed_datasets import IndexedDataset -from utils.pitch_utils import denorm_f0, norm_interp_f0 -from .diff.candidate_decoder import FFT -from .diff.diffusion import GaussianDiffusion -from .diff.net import DiffNet -from .diffspeech_task import DiffSpeechTask - -DIFF_DECODERS = { - 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), - 'fft': lambda hp: FFT( - hp['hidden_size'], hp['dec_layers'], hp['dec_ffn_kernel_size'], hp['num_heads']), -} - -matplotlib.use('Agg') - - -class DiffSingerTask(DiffSpeechTask): - def __init__(self): - super(DiffSingerTask, self).__init__() - self.dataset_cls = AcousticDataset - self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - self.pe = PitchExtractor().cuda() - utils.load_ckpt(self.pe, hparams['pe_ckpt'], 'model', strict=True) - self.pe.eval() - - def build_tts_model(self): - # import torch - # from tqdm import tqdm - # v_min = torch.ones([80]) * 100 - # v_max = torch.ones([80]) * -100 - # for i, ds in enumerate(tqdm(self.dataset_cls('train'))): - # v_max = torch.max(torch.max(ds['mel'].reshape(-1, 80), 0)[0], v_max) - # v_min = torch.min(torch.min(ds['mel'].reshape(-1, 80), 0)[0], v_min) - # if i % 100 == 0: - # print(i, v_min, v_max) - # print('final', v_min, v_max) - mel_bins = hparams['audio_num_mel_bins'] - self.model = GaussianDiffusion( - phone_encoder=self.phone_encoder, - out_dims=mel_bins, denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), - timesteps=hparams['timesteps'], - K_step=hparams['K_step'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], - ) - if hparams.get('fs2_ckpt', '') != '': - utils.load_ckpt(self.model.fs2, hparams['fs2_ckpt'], 'model', strict=True) - # self.model.fs2.decoder = None - for k, v in self.model.fs2.named_parameters(): - v.requires_grad = False - - def validation_step(self, sample, batch_idx): - outputs = {} - txt_tokens = sample['txt_tokens'] # [B, T_t] - - target = sample['mels'] # [B, T_s, 80] - energy = sample['energy'] - # fs2_mel = sample['fs2_mels'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - uv = sample['uv'] - - outputs['losses'] = {} - - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) - - - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - model_out = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=uv, energy=energy, ref_mels=None, infer=True) - - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel - else: - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) - pred_f0 = model_out.get('f0_denorm') - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') - self.plot_mel(batch_idx, sample['mels'], model_out['fs2_mel'], name=f'fs2mel_{batch_idx}') - return outputs - - -class AcousticDataset(BaseDataset): - def __init__(self, prefix, shuffle=False): - super().__init__(shuffle) - self.data_dir = hparams['binary_data_dir'] - self.prefix = prefix - self.hparams = hparams - self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') - self.indexed_ds = None - # self.name2spk_id={} - - # pitch stats - f0_stats_fn = f'{self.data_dir}/train_f0s_mean_std.npy' - if os.path.exists(f0_stats_fn): - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = np.load(f0_stats_fn) - hparams['f0_mean'] = float(hparams['f0_mean']) - hparams['f0_std'] = float(hparams['f0_std']) - else: - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = None, None - - if prefix == 'test': - if hparams['test_input_dir'] != '': - self.indexed_ds, self.sizes = self.load_test_inputs(hparams['test_input_dir']) - else: - if hparams['num_test_samples'] > 0: - self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] - self.sizes = [self.sizes[i] for i in self.avail_idxs] - - if hparams['pitch_type'] == 'cwt': - _, hparams['cwt_scales'] = get_lf0_cwt(np.ones(10)) - - def __getitem__(self, index): - hparams = self.hparams - item = self._get_item(index) - max_frames = hparams['max_frames'] - spec = torch.Tensor(item['mel'])[:max_frames] - # energy = (spec.exp() ** 2).sum(-1).sqrt() - mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None - f0, uv = norm_interp_f0(item["f0"][:max_frames], hparams) - phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) - pitch = torch.LongTensor(item.get("pitch"))[:max_frames] - sample = { - "id": index, - "item_name": item['item_name'], - "text": item['txt'], - "txt_token": phone, - "mel": spec, - "pitch": pitch, - "f0": f0, - "uv": uv, - "mel2ph": mel2ph, - "mel_nonpadding": spec.abs().sum(-1) > 0, - } - if self.hparams['use_energy_embed']: - sample['energy'] = item['energy'] - if self.hparams.get('use_key_shift_embed', False): - sample['key_shift'] = item['key_shift'] - if self.hparams.get('use_speed_embed', False): - sample['speed'] = item['speed'] - if self.hparams['use_spk_embed']: - sample["spk_embed"] = torch.Tensor(item['spk_embed']) - if self.hparams['use_spk_id']: - sample["spk_id"] = item['spk_id'] - if self.hparams['pitch_type'] == 'cwt': - cwt_spec = torch.Tensor(item['cwt_spec'])[:max_frames] - f0_mean = item.get('f0_mean', item.get('cwt_mean')) - f0_std = item.get('f0_std', item.get('cwt_std')) - sample.update({"cwt_spec": cwt_spec, "f0_mean": f0_mean, "f0_std": f0_std}) - elif self.hparams['pitch_type'] == 'ph': - f0_phlevel_sum = torch.zeros_like(phone).float().scatter_add(0, mel2ph - 1, f0) - f0_phlevel_num = torch.zeros_like(phone).float().scatter_add( - 0, mel2ph - 1, torch.ones_like(f0)).clamp_min(1) - sample["f0_ph"] = f0_phlevel_sum / f0_phlevel_num - item = self._get_item(index) - sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] - sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] - sample['is_slur'] = torch.LongTensor(item['is_slur'])[:hparams['max_frames']] - sample['word_boundary'] = torch.LongTensor(item['word_boundary'])[:hparams['max_frames']] - return sample - - def collater(self, samples): - from preprocessing.opencpop import File2Batch - return File2Batch.processed_input2batch(samples) - - def _get_item(self, index): - if hasattr(self, 'avail_idxs') and self.avail_idxs is not None: - index = self.avail_idxs[index] - if self.indexed_ds is None: - self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') - return self.indexed_ds[index] - - def load_test_inputs(self, test_input_dir, spk_id=0): - inp_wav_paths = glob.glob(f'{test_input_dir}/*.wav') + glob.glob(f'{test_input_dir}/*.mp3') - sizes = [] - items = [] - - binarizer_cls = hparams.get("binarizer_cls", 'basics.base_binarizer.BaseBinarizer') - pkg = ".".join(binarizer_cls.split(".")[:-1]) - cls_name = binarizer_cls.split(".")[-1] - binarizer_cls = getattr(importlib.import_module(pkg), cls_name) - binarization_args = hparams['binarization_args'] - - for wav_fn in inp_wav_paths: - item_name = os.path.basename(wav_fn) - ph = txt = tg_fn = '' - wav_fn = wav_fn - encoder = None - item = binarizer_cls.process_item(item_name, ph, txt, tg_fn, wav_fn, spk_id, encoder, binarization_args) - items.append(item) - sizes.append(item['len']) - return items, sizes - - -class DiffSingerMIDITask(DiffSingerTask): - def __init__(self): - super(DiffSingerMIDITask, self).__init__() - self.dataset_cls = AcousticDataset - - def run_model(self, model, sample, return_output=False, infer=False): - ''' - steps: - 1. run the full model, calc the main loss - 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor - ''' - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample.get('energy') - - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - if hparams['pitch_type'] == 'cwt': - # NOTE: this part of script is *isolated* from other scripts, which means - # it may not be compatible with the current version. - pass - # cwt_spec = sample[f'cwt_spec'] - # f0_mean = sample['f0_mean'] - # f0_std = sample['f0_std'] - # sample['f0_cwt'] = f0 = model.cwt2f0_norm(cwt_spec, f0_mean, f0_std, mel2ph) - - # output == ret - # model == src.diff.diffusion.GaussianDiffusion - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, - ref_mels=target, f0=f0, uv=uv, energy=energy, infer=infer, pitch_midi=sample['pitch_midi'], - midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur')) - - losses = {} - if 'diff_loss' in output: - losses['mel'] = output['diff_loss'] - self.add_dur_loss(output['dur'], mel2ph, txt_tokens, sample['word_boundary'], losses=losses) - if hparams['use_pitch_embed']: - self.add_pitch_loss(output, sample, losses) - if hparams['use_energy_embed']: - self.add_energy_loss(output['energy_pred'], energy, losses) - if not return_output: - return losses - else: - return losses, output - - def validation_step(self, sample, batch_idx): - outputs = {} - txt_tokens = sample['txt_tokens'] # [B, T_t] - - target = sample['mels'] # [B, T_s, 80] - energy = sample.get('energy') - # fs2_mel = sample['fs2_mels'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - mel2ph = sample['mel2ph'] - - outputs['losses'] = {} - - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) - - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - model_out = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=None, uv=None, energy=energy, ref_mels=None, infer=True, - pitch_midi=sample['pitch_midi'], midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur')) - - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel - else: - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) - pred_f0 = model_out.get('f0_denorm') - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') - #self.plot_mel(batch_idx, sample['mels'], model_out['fs2_mel'], name=f'fs2mel_{batch_idx}') - if hparams['use_pitch_embed']: - self.plot_pitch(batch_idx, sample, model_out) - return outputs - - def add_dur_loss(self, dur_pred, mel2ph, txt_tokens, wdb, losses=None): - """ - the effect of each loss component: - hparams['dur_loss'] : align each phoneme - hparams['lambda_word_dur']: align each word - hparams['lambda_sent_dur']: align each sentence - - :param dur_pred: [B, T], float, log scale - :param mel2ph: [B, T] - :param txt_tokens: [B, T] - :param losses: - :return: - """ - B, T = txt_tokens.shape - nonpadding = (txt_tokens != 0).float() - dur_gt = mel2ph_to_dur(mel2ph, T).float() * nonpadding - is_sil = torch.zeros_like(txt_tokens).bool() - for p in self.sil_ph: - is_sil = is_sil | (txt_tokens == self.phone_encoder.encode(p)[0]) - is_sil = is_sil.float() # [B, T_txt] - - # phone duration loss - if hparams['dur_loss'] == 'mse': - losses['pdur'] = F.mse_loss(dur_pred, (dur_gt + 1).log(), reduction='none') - losses['pdur'] = (losses['pdur'] * nonpadding).sum() / nonpadding.sum() - losses['pdur'] = losses['pdur'] * hparams['lambda_ph_dur'] - dur_pred = (dur_pred.exp() - 1).clamp(min=0) - else: - raise NotImplementedError - - # use linear scale for sent and word duration - if hparams['lambda_word_dur'] > 0: - #idx = F.pad(wdb.cumsum(axis=1), (1, 0))[:, :-1] - idx = wdb.cumsum(axis=1) - # word_dur_g = dur_gt.new_zeros([B, idx.max() + 1]).scatter_(1, idx, midi_dur) # midi_dur can be implied by add gt-ph_dur - word_dur_p = dur_pred.new_zeros([B, idx.max() + 1]).scatter_add(1, idx, dur_pred) - word_dur_g = dur_gt.new_zeros([B, idx.max() + 1]).scatter_add(1, idx, dur_gt) - wdur_loss = F.mse_loss((word_dur_p + 1).log(), (word_dur_g + 1).log(), reduction='none') - word_nonpadding = (word_dur_g > 0).float() - wdur_loss = (wdur_loss * word_nonpadding).sum() / word_nonpadding.sum() - losses['wdur'] = wdur_loss * hparams['lambda_word_dur'] - if hparams['lambda_sent_dur'] > 0: - sent_dur_p = dur_pred.sum(-1) - sent_dur_g = dur_gt.sum(-1) - sdur_loss = F.mse_loss((sent_dur_p + 1).log(), (sent_dur_g + 1).log(), reduction='mean') - losses['sdur'] = sdur_loss.mean() * hparams['lambda_sent_dur'] diff --git a/src/diffspeech_task.py b/src/diffspeech_task.py deleted file mode 100644 index b94f7a0cb..000000000 --- a/src/diffspeech_task.py +++ /dev/null @@ -1,124 +0,0 @@ -import torch - -import utils -from utils.hparams import hparams -from .diff.net import DiffNet -from .diff.diffusion import GaussianDiffusion -from .task import DiffFsTask -from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder -from utils.pitch_utils import denorm_f0 -from tts.tasks.fs2_utils import FastSpeechDataset - -DIFF_DECODERS = { - 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), -} - - -class DiffSpeechTask(DiffFsTask): - def __init__(self): - super(DiffSpeechTask, self).__init__() - self.dataset_cls = FastSpeechDataset - self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() - self.logged_gt_wav = set() - - def build_tts_model(self): - mel_bins = hparams['audio_num_mel_bins'] - self.model = GaussianDiffusion( - phone_encoder=self.phone_encoder, - out_dims=mel_bins, denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), - timesteps=hparams['timesteps'], - K_step=hparams['K_step'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], - ) - if hparams['fs2_ckpt'] != '': - utils.load_ckpt(self.model.fs2, hparams['fs2_ckpt'], 'model', strict=True) - # self.model.fs2.decoder = None - for k, v in self.model.fs2.named_parameters(): - if not 'predictor' in k: - v.requires_grad = False - - def build_optimizer(self, model): - self.optimizer = optimizer = torch.optim.AdamW( - filter(lambda p: p.requires_grad, model.parameters()), - lr=hparams['lr'], - betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), - weight_decay=hparams['weight_decay']) - return optimizer - - def run_model(self, model, sample, return_output=False, infer=False): - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - # mel2ph = sample['mel2ph'] if hparams['use_gt_dur'] else None # [B, T_s] - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample['energy'] - # fs2_mel = sample['fs2_mels'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - if hparams['pitch_type'] == 'cwt': - cwt_spec = sample[f'cwt_spec'] - f0_mean = sample['f0_mean'] - f0_std = sample['f0_std'] - sample['f0_cwt'] = f0 = model.cwt2f0_norm(cwt_spec, f0_mean, f0_std, mel2ph) - - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, - ref_mels=target, f0=f0, uv=uv, energy=energy, infer=infer) - - losses = {} - if 'diff_loss' in output: - losses['mel'] = output['diff_loss'] - self.add_dur_loss(output['dur'], mel2ph, txt_tokens, losses=losses) - if hparams['use_pitch_embed']: - self.add_pitch_loss(output, sample, losses) - if hparams['use_energy_embed']: - self.add_energy_loss(output['energy_pred'], energy, losses) - if not return_output: - return losses - else: - return losses, output - - def validation_step(self, sample, batch_idx): - outputs = {} - txt_tokens = sample['txt_tokens'] # [B, T_t] - - energy = sample['energy'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - uv = sample['uv'] - - outputs['losses'] = {} - - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) - - - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - # model_out = self.model( - # txt_tokens, spk_embed=spk_embed, mel2ph=None, f0=None, uv=None, energy=None, ref_mels=None, infer=True) - # self.plot_mel(batch_idx, model_out['mel_out'], model_out['fs2_mel'], name=f'diffspeech_vs_fs2_{batch_idx}') - model_out = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=uv, energy=energy, ref_mels=None, infer=True) - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, - pred_f0=model_out.get('f0_denorm')) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out']) - return outputs - - ############ - # validation plots - ############ - def plot_wav(self, batch_idx, gt_mel, pred_mel, gt_f0=None, pred_f0=None): - gt_mel = gt_mel[0].cpu().numpy() - pred_mel = pred_mel[0].cpu().numpy() - gt_f0 = gt_f0[0].cpu().numpy() - pred_f0 = pred_f0[0].cpu().numpy() - if batch_idx not in self.logged_gt_wav: - gt_wav = self.vocoder.spec2wav(gt_mel, f0=gt_f0) - self.logger.experiment.add_audio(f'gt_{batch_idx}', gt_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) - self.logged_gt_wav.add(batch_idx) - pred_wav = self.vocoder.spec2wav(pred_mel, f0=pred_f0) - self.logger.experiment.add_audio(f'pred_{batch_idx}', pred_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) diff --git a/src/naive_task.py b/src/naive_task.py deleted file mode 100644 index f2cd52419..000000000 --- a/src/naive_task.py +++ /dev/null @@ -1,88 +0,0 @@ -from utils.hparams import hparams -from .diffsinger_task import DiffSingerMIDITask, AcousticDataset -from utils.pitch_utils import denorm_f0 -import utils - -class NaiveTask(DiffSingerMIDITask): - def __init__(self): - super(NaiveTask, self).__init__() - self.dataset_cls = AcousticDataset - - def run_model(self, model, sample, return_output=False, infer=False): - ''' - steps: - 1. run the full model, calc the main loss - 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor - ''' - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample.get('energy') - key_shift = sample.get('key_shift') - speed = sample.get('speed') - - if infer: - if hparams['use_spk_id']: - spk_embed = model.fs2.spk_embed(sample['spk_ids'])[:, None, :] - elif hparams['use_spk_embed']: - spk_embed = sample['spk_embed'] - else: - spk_embed = None - output = model(txt_tokens, mel2ph=mel2ph, spk_mix_embed=spk_embed,ref_mels=target, - f0=f0, uv=uv, energy=energy, key_shift=key_shift, speed=speed, infer=infer) - else: - spk_embed = sample.get('spk_ids') if hparams['use_spk_id'] else sample.get('spk_embed') - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, ref_mels=target, - f0=f0, uv=uv, energy=energy, key_shift=key_shift, speed=speed, infer=infer) - - losses = {} - if 'diff_loss' in output: - losses['mel'] = output['diff_loss'] - if not return_output: - return losses - else: - return losses, output - - def validation_step(self, sample, batch_idx): - outputs = {} - txt_tokens = sample['txt_tokens'] # [B, T_t] - - target = sample['mels'] # [B, T_s, 80] - energy = sample.get('energy') - key_shift = sample.get('key_shift') - speed = sample.get('speed') - # fs2_mel = sample['fs2_mels'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) - - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - if hparams['use_spk_id']: - spk_embed = self.model.fs2.spk_embed(sample['spk_ids'])[:, None, :] - elif hparams['use_spk_embed']: - spk_embed = sample['spk_embed'] - else: - spk_embed = None - model_out = self.model( - txt_tokens, spk_mix_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=None, energy=energy, - key_shift=key_shift, speed=speed, ref_mels=None, pitch_midi=sample['pitch_midi'], - midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur'), infer=True - ) - - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel - else: - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) - pred_f0 = gt_f0 - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') - - return outputs diff --git a/src/task.py b/src/task.py deleted file mode 100644 index 8bf7f0516..000000000 --- a/src/task.py +++ /dev/null @@ -1,94 +0,0 @@ -import torch - -import utils -from .diff.net import DiffNet -from tts.tasks.fs2 import FastSpeech2Task -from utils.hparams import hparams - - -DIFF_DECODERS = { - 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), -} - - -class DiffFsTask(FastSpeech2Task): - def build_tts_model(self): - ''' - NOTE: this function is *isolated* from other scripts, which means - it may not be compatible with the current version. - ''' - return - from src.diff.diffusion_ import GaussianDiffusion - mel_bins = hparams['audio_num_mel_bins'] - self.model = GaussianDiffusion( - phone_encoder=self.phone_encoder, - out_dims=mel_bins, denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), - timesteps=hparams['timesteps'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], - ) - - def run_model(self, model, sample, return_output=False, infer=False): - ''' - NOTE: this function is *isolated* from other scripts, which means - it may not be compatible with the current version. - ''' - return - txt_tokens = sample['txt_tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] - f0 = sample['f0'] - uv = sample['uv'] - energy = sample['energy'] - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - if hparams['pitch_type'] == 'cwt': - cwt_spec = sample[f'cwt_spec'] - f0_mean = sample['f0_mean'] - f0_std = sample['f0_std'] - sample['f0_cwt'] = f0 = model.cwt2f0_norm(cwt_spec, f0_mean, f0_std, mel2ph) - - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, - ref_mels=target, f0=f0, uv=uv, energy=energy, infer=infer) - - losses = {} - if 'diff_loss' in output: - losses['mel'] = output['diff_loss'] - self.add_dur_loss(output['dur'], mel2ph, txt_tokens, losses=losses) - if hparams['use_pitch_embed']: - self.add_pitch_loss(output, sample, losses) - if hparams['use_energy_embed']: - self.add_energy_loss(output['energy_pred'], energy, losses) - if not return_output: - return losses - else: - return losses, output - - def _training_step(self, sample, batch_idx, _): - log_outputs = self.run_model(self.model, sample) - total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) - log_outputs['batch_size'] = sample['txt_tokens'].size()[0] - log_outputs['lr'] = self.scheduler.get_lr()[0] - return total_loss, log_outputs - - def validation_step(self, sample, batch_idx): - outputs = {} - outputs['losses'] = {} - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: - _, model_out = self.run_model(self.model, sample, return_output=True, infer=True) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out']) - return outputs - - def build_scheduler(self, optimizer): - return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) - - def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): - if optimizer is None: - return - optimizer.step() - optimizer.zero_grad() - if self.scheduler is not None: - self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) diff --git a/tts/tasks/tts.py b/tts/tasks/tts.py deleted file mode 100644 index d4196eb59..000000000 --- a/tts/tasks/tts.py +++ /dev/null @@ -1,123 +0,0 @@ -from multiprocessing.pool import Pool - -import matplotlib - -from modules.fastspeech.pe import PitchExtractor -from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder -from utils.phoneme_utils import build_phoneme_list -from utils.training_utils import RSQRTSchedule - -matplotlib.use('Agg') -import numpy as np -from tqdm import tqdm -import torch.distributed as dist - -from basics.base_task import BaseTask -from utils.hparams import hparams -from utils.text_encoder import TokenTextEncoder - -import torch -import torch.optim -import torch.utils.data -import utils - - -class TtsTask(BaseTask): - def __init__(self, *args, **kwargs): - self.vocoder = None - self.phone_encoder = self.build_phone_encoder() - self.padding_idx = self.phone_encoder.pad() - self.eos_idx = self.phone_encoder.eos() - self.seg_idx = self.phone_encoder.seg() - self.saving_result_pool = None - self.saving_results_futures = None - self.stats = {} - super().__init__(*args, **kwargs) - - def build_scheduler(self, optimizer): - return RSQRTSchedule(optimizer) - - def build_optimizer(self, model): - self.optimizer = optimizer = torch.optim.AdamW( - model.parameters(), - lr=hparams['lr']) - return optimizer - - def build_dataloader(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, endless=False, batch_by_size=True): - devices_cnt = torch.cuda.device_count() - if devices_cnt == 0: - devices_cnt = 1 - if required_batch_size_multiple == -1: - required_batch_size_multiple = devices_cnt - - def shuffle_batches(batches): - np.random.shuffle(batches) - return batches - - if max_tokens is not None: - max_tokens *= devices_cnt - if max_sentences is not None: - max_sentences *= devices_cnt - indices = dataset.ordered_indices() - if batch_by_size: - batch_sampler = utils.batch_by_size( - indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple, - ) - else: - batch_sampler = [] - for i in range(0, len(indices), max_sentences): - batch_sampler.append(indices[i:i + max_sentences]) - - if shuffle: - batches = shuffle_batches(list(batch_sampler)) - if endless: - batches = [b for _ in range(1000) for b in shuffle_batches(list(batch_sampler))] - else: - batches = batch_sampler - if endless: - batches = [b for _ in range(1000) for b in batches] - num_workers = dataset.num_workers - if self.trainer.use_ddp: - num_replicas = dist.get_world_size() - rank = dist.get_rank() - batches = [x[rank::num_replicas] for x in batches if len(x) % num_replicas == 0] - return torch.utils.data.DataLoader(dataset, - collate_fn=dataset.collater, - batch_sampler=batches, - num_workers=num_workers, - pin_memory=False) - - @staticmethod - def build_phone_encoder(): - phone_list = build_phoneme_list() - return TokenTextEncoder(vocab_list=phone_list, replace_oov=',') - - def test_start(self): - self.saving_result_pool = Pool(8) - self.saving_results_futures = [] - self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - self.pe = PitchExtractor().cuda() - utils.load_ckpt(self.pe, hparams['pe_ckpt'], 'model', strict=True) - self.pe.eval() - - def test_end(self, outputs): - self.saving_result_pool.close() - [f.get() for f in tqdm(self.saving_results_futures)] - self.saving_result_pool.join() - return {} - - ########## - # utils - ########## - def weights_nonzero_speech(self, target): - # target : B x T x mel - # Assign weight 1.0 to all labels except for padding (id=0). - dim = target.size(-1) - return target.abs().sum(-1, keepdim=True).ne(0).float().repeat(1, 1, dim) - - -if __name__ == '__main__': - TtsTask.start() From 474d3c64c5f336c377b3aee15398e085a67592ba Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 00:27:07 +0800 Subject: [PATCH 003/475] Fix config file --- configs/acoustic/nomidi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/acoustic/nomidi.yaml b/configs/acoustic/nomidi.yaml index 1ead10aed..b53356909 100644 --- a/configs/acoustic/nomidi.yaml +++ b/configs/acoustic/nomidi.yaml @@ -1,7 +1,7 @@ base_config: - configs/basics/fs2.yaml -task_cls: src.acoustic_task.NaiveTask +task_cls: src.acoustic_task.AcousticTask datasets: [ 'opencpop', ] From debec87a86fee5f7b54880f5eb50b6bbfb531d42 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 00:36:46 +0800 Subject: [PATCH 004/475] Clean up vocoders and useless code --- augmentation/spec_stretch.py | 2 +- basics/base_svs_infer.py | 2 +- {src/vocoders => basics}/base_vocoder.py | 0 configs/obsolete/hifigan.yaml | 122 -------- configs/obsolete/pe.yaml | 35 --- configs/obsolete/pwg.yaml | 111 -------- modules/fastspeech/tts_modules.py | 4 - preprocessing/opencpop.py | 2 +- src/acoustic_task.py | 5 +- src/diff/candidate_decoder.py | 98 ------- src/diff/diffusion (isolated).py | 339 ----------------------- src/vocoders/ddsp.py | 2 +- src/vocoders/hifigan.py | 115 -------- src/vocoders/nsf_hifigan.py | 2 +- src/vocoders/pwg.py | 137 --------- 15 files changed, 6 insertions(+), 970 deletions(-) rename {src/vocoders => basics}/base_vocoder.py (100%) delete mode 100644 configs/obsolete/hifigan.yaml delete mode 100644 configs/obsolete/pe.yaml delete mode 100644 configs/obsolete/pwg.yaml delete mode 100644 src/diff/candidate_decoder.py delete mode 100644 src/diff/diffusion (isolated).py delete mode 100644 src/vocoders/hifigan.py delete mode 100644 src/vocoders/pwg.py diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 6ed9ccaf5..f1d0884a9 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -6,7 +6,7 @@ from basics.base_augmentation import BaseAugmentation from data_gen.data_gen_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import LengthRegulator -from src.vocoders.base_vocoder import VOCODERS +from basics.base_vocoder import VOCODERS from utils.hparams import hparams from utils.pitch_utils import f0_to_coarse diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index ced52dfee..deaa06fee 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -5,7 +5,7 @@ import torch from pypinyin import lazy_pinyin -from src.vocoders.base_vocoder import VOCODERS +from basics.base_vocoder import VOCODERS from utils.hparams import set_hparams, hparams from utils.phoneme_utils import build_g2p_dictionary, build_phoneme_list from utils.text_encoder import TokenTextEncoder diff --git a/src/vocoders/base_vocoder.py b/basics/base_vocoder.py similarity index 100% rename from src/vocoders/base_vocoder.py rename to basics/base_vocoder.py diff --git a/configs/obsolete/hifigan.yaml b/configs/obsolete/hifigan.yaml deleted file mode 100644 index 846f16c19..000000000 --- a/configs/obsolete/hifigan.yaml +++ /dev/null @@ -1,122 +0,0 @@ -base_config: - - configs/basics/base.yaml -task_cls: src.vocoder.hifigan.HifiGanTask -resblock: "1" -adam_b1: 0.8 -adam_b2: 0.99 -upsample_rates: [ 8,8,2,2 ] -upsample_kernel_sizes: [ 16,16,4,4 ] -upsample_initial_channel: 128 -resblock_kernel_sizes: [ 3,7,11 ] -resblock_dilation_sizes: [ [ 1,3,5 ], [ 1,3,5 ], [ 1,3,5 ] ] - -lambda_mel: 45.0 - -binarization_args: - with_wav: true - with_spk_embed: false - with_align: false -test_input_dir: '' - -########### -# train and eval -########### -max_samples: 8192 -max_sentences: 16 -max_eval_sentences: 1 -max_updates: 1000000 -val_check_interval: 2000 - - -########################################################### -# FEATURE EXTRACTION SETTING # -########################################################### -sampling_rate: 22050 # Sampling rate. -fft_size: 1024 # FFT size. -hop_size: 256 # Hop size. -win_length: null # Window length. -# If set to null, it will be the same as fft_size. -window: "hann" # Window function. -num_mels: 80 # Number of mel basis. -fmin: 80 # Minimum freq in mel basis calculation. -fmax: 7600 # Maximum frequency in mel basis calculation. -format: "hdf5" # Feature file format. "npy" or "hdf5" is supported. - -########################################################### -# GENERATOR NETWORK ARCHITECTURE SETTING # -########################################################### -generator_params: - lr: 0.0002 # Generator's learning rate. - in_channels: 1 # Number of input channels. - out_channels: 1 # Number of output channels. - kernel_size: 3 # Kernel size of dilated convolution. - layers: 30 # Number of residual block layers. - stacks: 3 # Number of stacks i.e., dilation cycles. - residual_channels: 64 # Number of channels in residual conv. - gate_channels: 128 # Number of channels in gated conv. - skip_channels: 64 # Number of channels in skip conv. - aux_channels: 80 # Number of channels for auxiliary feature conv. - # Must be the same as num_mels. - aux_context_window: 0 # Context window size for auxiliary feature. - # If set to 2, previous 2 and future 2 frames will be considered. - dropout: 0.0 # Dropout rate. 0.0 means no dropout applied. - use_weight_norm: true # Whether to use weight norm. - # If set to true, it will be applied to all of the conv layers. - upsample_net: "ConvInUpsampleNetwork" # Upsampling network architecture. - upsample_params: # Upsampling network parameters. - upsample_scales: [4, 4, 4, 4] # Upsampling scales. Prodcut of these must be the same as hop size. - use_pitch_embed: false - -########################################################### -# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # -########################################################### -discriminator_params: - in_channels: 1 # Number of input channels. - out_channels: 1 # Number of output channels. - kernel_size: 3 # Number of output channels. - layers: 10 # Number of conv layers. - conv_channels: 64 # Number of chnn layers. - bias: true # Whether to use bias parameter in conv. - use_weight_norm: true # Whether to use weight norm. - # If set to true, it will be applied to all of the conv layers. - nonlinear_activation: "LeakyReLU" # Nonlinear function after each conv. - nonlinear_activation_params: # Nonlinear function parameters - negative_slope: 0.2 # Alpha in LeakyReLU. - -########################################################### -# STFT LOSS SETTING # -########################################################### -stft_loss_params: - fft_sizes: [1024, 2048, 512] # List of FFT size for STFT-based loss. - hop_sizes: [120, 240, 50] # List of hop size for STFT-based loss - win_lengths: [600, 1200, 240] # List of window length for STFT-based loss. - window: "hann_window" # Window function for STFT-based loss -use_mel_loss: false - -########################################################### -# ADVERSARIAL LOSS SETTING # -########################################################### -lambda_adv: 4.0 # Loss balancing coefficient. - -########################################################### -# OPTIMIZER & SCHEDULER SETTING # -########################################################### -generator_optimizer_params: - lr: 0.0001 # Generator's learning rate. - eps: 1.0e-6 # Generator's epsilon. - weight_decay: 0.0 # Generator's weight decay coefficient. -generator_scheduler_params: - step_size: 200000 # Generator's scheduler step size. - gamma: 0.5 # Generator's scheduler gamma. - # At each step size, lr will be multiplied by this parameter. -generator_grad_norm: 10 # Generator's gradient norm. -discriminator_optimizer_params: - lr: 0.0002 # Discriminator's learning rate. - eps: 1.0e-6 # Discriminator's epsilon. - weight_decay: 0.0 # Discriminator's weight decay coefficient. -discriminator_scheduler_params: - step_size: 200000 # Discriminator's scheduler step size. - gamma: 0.5 # Discriminator's scheduler gamma. - # At each step size, lr will be multiplied by this parameter. -discriminator_grad_norm: 1 # Discriminator's gradient norm. -disc_start_steps: 40000 # Number of steps to start to train discriminator. diff --git a/configs/obsolete/pe.yaml b/configs/obsolete/pe.yaml deleted file mode 100644 index 86d93d8c8..000000000 --- a/configs/obsolete/pe.yaml +++ /dev/null @@ -1,35 +0,0 @@ -base_config: - - configs/basics/fs2.yaml -task_cls: tts.tasks.pe.PitchExtractionTask - -max_frames: 8000 -audio_sample_rate: 24000 -hop_size: 128 # Hop size. -fft_size: 512 # FFT size. -win_size: 512 # FFT size. -fmin: 30 -fmax: 12000 -min_level_db: -120 - -raw_data_dir: 'tts/data/raw/ljspeech-1.1' -processed_data_dir: 'tts/data/processed/ljspeech' -binary_data_dir: 'xxx' -pre_align_cls: tts.data_gen.lj.pre_align.LJPreAlign - -mel_loss: l1 -num_test_samples: 20 -test_ids: [ 68, 70, 74, 87, 110, 172, 190, 215, 231, 294, - 316, 324, 402, 422, 485, 500, 505, 508, 509, 519 ] -use_energy_embed: false -test_num: 523 -valid_num: 348 - -pitch_type: frame -pitch_extractor_conv_layers: 2 - - -# config for experiments -max_tokens: 20000 -use_spk_embed: false -num_valid_plots: 10 -max_updates: 60000 \ No newline at end of file diff --git a/configs/obsolete/pwg.yaml b/configs/obsolete/pwg.yaml deleted file mode 100644 index 9f3459068..000000000 --- a/configs/obsolete/pwg.yaml +++ /dev/null @@ -1,111 +0,0 @@ -base_config: - - configs/basics/base.yaml -task_cls: src.vocoder.pwg.PwgTask - -binarization_args: - with_wav: true - with_spk_embed: false - with_align: false -test_input_dir: '' - -########### -# train and eval -########### -max_samples: 25600 -max_sentences: 5 -max_eval_sentences: 1 -max_updates: 1000000 -val_check_interval: 2000 - - -########################################################### -# FEATURE EXTRACTION SETTING # -########################################################### -sampling_rate: 22050 # Sampling rate. -fft_size: 1024 # FFT size. -hop_size: 256 # Hop size. -win_length: null # Window length. -# If set to null, it will be the same as fft_size. -window: "hann" # Window function. -num_mels: 80 # Number of mel basis. -fmin: 80 # Minimum freq in mel basis calculation. -fmax: 7600 # Maximum frequency in mel basis calculation. -format: "hdf5" # Feature file format. "npy" or "hdf5" is supported. - -########################################################### -# GENERATOR NETWORK ARCHITECTURE SETTING # -########################################################### -generator_params: - in_channels: 1 # Number of input channels. - out_channels: 1 # Number of output channels. - kernel_size: 3 # Kernel size of dilated convolution. - layers: 30 # Number of residual block layers. - stacks: 3 # Number of stacks i.e., dilation cycles. - residual_channels: 64 # Number of channels in residual conv. - gate_channels: 128 # Number of channels in gated conv. - skip_channels: 64 # Number of channels in skip conv. - aux_channels: 80 # Number of channels for auxiliary feature conv. - # Must be the same as num_mels. - aux_context_window: 2 # Context window size for auxiliary feature. - # If set to 2, previous 2 and future 2 frames will be considered. - dropout: 0.0 # Dropout rate. 0.0 means no dropout applied. - use_weight_norm: true # Whether to use weight norm. - # If set to true, it will be applied to all of the conv layers. - upsample_net: "ConvInUpsampleNetwork" # Upsampling network architecture. - upsample_params: # Upsampling network parameters. - upsample_scales: [4, 4, 4, 4] # Upsampling scales. Prodcut of these must be the same as hop size. - use_pitch_embed: false - -########################################################### -# DISCRIMINATOR NETWORK ARCHITECTURE SETTING # -########################################################### -discriminator_params: - in_channels: 1 # Number of input channels. - out_channels: 1 # Number of output channels. - kernel_size: 3 # Number of output channels. - layers: 10 # Number of conv layers. - conv_channels: 64 # Number of chnn layers. - bias: true # Whether to use bias parameter in conv. - use_weight_norm: true # Whether to use weight norm. - # If set to true, it will be applied to all of the conv layers. - nonlinear_activation: "LeakyReLU" # Nonlinear function after each conv. - nonlinear_activation_params: # Nonlinear function parameters - negative_slope: 0.2 # Alpha in LeakyReLU. - -########################################################### -# STFT LOSS SETTING # -########################################################### -stft_loss_params: - fft_sizes: [1024, 2048, 512] # List of FFT size for STFT-based loss. - hop_sizes: [120, 240, 50] # List of hop size for STFT-based loss - win_lengths: [600, 1200, 240] # List of window length for STFT-based loss. - window: "hann_window" # Window function for STFT-based loss -use_mel_loss: false - -########################################################### -# ADVERSARIAL LOSS SETTING # -########################################################### -lambda_adv: 4.0 # Loss balancing coefficient. - -########################################################### -# OPTIMIZER & SCHEDULER SETTING # -########################################################### -generator_optimizer_params: - lr: 0.0001 # Generator's learning rate. - eps: 1.0e-6 # Generator's epsilon. - weight_decay: 0.0 # Generator's weight decay coefficient. -generator_scheduler_params: - step_size: 200000 # Generator's scheduler step size. - gamma: 0.5 # Generator's scheduler gamma. - # At each step size, lr will be multiplied by this parameter. -generator_grad_norm: 10 # Generator's gradient norm. -discriminator_optimizer_params: - lr: 0.00005 # Discriminator's learning rate. - eps: 1.0e-6 # Discriminator's epsilon. - weight_decay: 0.0 # Discriminator's weight decay coefficient. -discriminator_scheduler_params: - step_size: 200000 # Discriminator's scheduler step size. - gamma: 0.5 # Discriminator's scheduler gamma. - # At each step size, lr will be multiplied by this parameter. -discriminator_grad_norm: 1 # Discriminator's gradient norm. -disc_start_steps: 40000 # Number of steps to start to train discriminator. diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index a8acc5a5f..3cb9dd3a8 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -262,10 +262,6 @@ def forward(self, xs): return xs -class EnergyPredictor(PitchPredictor): - pass - - def mel2ph_to_dur(mel2ph, T_txt, max_dur=None): B, _ = mel2ph.shape dur = mel2ph.new_zeros(B, T_txt + 1).scatter_add(1, mel2ph, torch.ones_like(mel2ph)) diff --git a/preprocessing/opencpop.py b/preprocessing/opencpop.py index 68e490232..d5c886b00 100644 --- a/preprocessing/opencpop.py +++ b/preprocessing/opencpop.py @@ -11,7 +11,7 @@ import utils from basics.base_binarizer import BinarizationError from data_gen.data_gen_utils import get_pitch_parselmouth -from src.vocoders.base_vocoder import VOCODERS +from basics.base_vocoder import VOCODERS from tts.data_gen.txt_processors.zh_g2pM import get_all_vowels from utils.hparams import hparams diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 5abe331fd..cb510ee07 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -23,7 +23,7 @@ from basics.base_task import BaseTask from data_gen.data_gen_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import mel2ph_to_dur -from src.vocoders.base_vocoder import get_vocoder_cls, BaseVocoder +from basics.base_vocoder import get_vocoder_cls, BaseVocoder from utils import audio from utils.cwt import get_lf0_cwt from utils.hparams import hparams @@ -34,14 +34,11 @@ from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder -from .diff.candidate_decoder import FFT from .diff.diffusion import GaussianDiffusion from .diff.net import DiffNet DIFF_DECODERS = { 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), - 'fft': lambda hp: FFT( - hp['hidden_size'], hp['dec_layers'], hp['dec_ffn_kernel_size'], hp['num_heads']), } matplotlib.use('Agg') diff --git a/src/diff/candidate_decoder.py b/src/diff/candidate_decoder.py deleted file mode 100644 index bccb47aad..000000000 --- a/src/diff/candidate_decoder.py +++ /dev/null @@ -1,98 +0,0 @@ -from modules.fastspeech.tts_modules import FastspeechDecoder -# from modules.fastspeech.fast_tacotron import DecoderRNN -# from modules.fastspeech.speedy_speech.speedy_speech import ConvBlocks -# from modules.fastspeech.conformer.conformer import ConformerDecoder -import torch -from torch.nn import functional as F -import torch.nn as nn -import math -from utils.hparams import hparams -from modules.commons.common_layers import Mish -Linear = nn.Linear - -class SinusoidalPosEmb(nn.Module): - def __init__(self, dim): - super().__init__() - self.dim = dim - - def forward(self, x): - device = x.device - half_dim = self.dim // 2 - emb = math.log(10000) / (half_dim - 1) - emb = torch.exp(torch.arange(half_dim, device=device) * -emb) - emb = x[:, None] * emb[None, :] - emb = torch.cat((emb.sin(), emb.cos()), dim=-1) - return emb - - -def Conv1d(*args, **kwargs): - layer = nn.Conv1d(*args, **kwargs) - nn.init.kaiming_normal_(layer.weight) - return layer - - -class FFT(FastspeechDecoder): # unused, because DiffSinger only uses FastspeechEncoder - # NOTE: this part of script is *isolated* from other scripts, which means - # it may not be compatible with the current version. - - def __init__(self, hidden_size=None, num_layers=None, kernel_size=None, num_heads=None): - super().__init__(hidden_size, num_layers, kernel_size, num_heads=num_heads) - dim = hparams['residual_channels'] - self.input_projection = Conv1d(hparams['audio_num_mel_bins'], dim, 1) - self.diffusion_embedding = SinusoidalPosEmb(dim) - self.mlp = nn.Sequential( - nn.Linear(dim, dim * 4), - Mish(), - nn.Linear(dim * 4, dim) - ) - self.get_mel_out = Linear(hparams['hidden_size'], 80, bias=True) - self.get_decode_inp = Linear(hparams['hidden_size'] + dim + dim, - hparams['hidden_size']) # hs + dim + 80 -> hs - - def forward(self, spec, diffusion_step, cond, padding_mask=None, attn_mask=None, return_hiddens=False): - """ - :param spec: [B, 1, 80, T] - :param diffusion_step: [B, 1] - :param cond: [B, M, T] - :return: - """ - x = spec[:, 0] - x = self.input_projection(x).permute([0, 2, 1]) # [B, T, residual_channel] - diffusion_step = self.diffusion_embedding(diffusion_step) - diffusion_step = self.mlp(diffusion_step) # [B, dim] - cond = cond.permute([0, 2, 1]) # [B, T, M] - - seq_len = cond.shape[1] # [T_mel] - time_embed = diffusion_step[:, None, :] # [B, 1, dim] - time_embed = time_embed.repeat([1, seq_len, 1]) # # [B, T, dim] - - decoder_inp = torch.cat([x, cond, time_embed], dim=-1) # [B, T, dim + H + dim] - decoder_inp = self.get_decode_inp(decoder_inp) # [B, T, H] - x = decoder_inp - - ''' - Required x: [B, T, C] - :return: [B, T, C] or [L, B, T, C] - ''' - padding_mask = x.abs().sum(-1).eq(0).data if padding_mask is None else padding_mask - nonpadding_mask_TB = 1 - padding_mask.transpose(0, 1).float()[:, :, None] # [T, B, 1] - if self.use_pos_embed: - positions = self.pos_embed_alpha * self.embed_positions(x[..., 0]) - x = x + positions - x = F.dropout(x, p=self.dropout, training=self.training) - # B x T x C -> T x B x C - x = x.transpose(0, 1) * nonpadding_mask_TB - hiddens = [] - for layer in self.layers: - x = layer(x, encoder_padding_mask=padding_mask, attn_mask=attn_mask) * nonpadding_mask_TB - hiddens.append(x) - if self.use_last_norm: - x = self.layer_norm(x) * nonpadding_mask_TB - if return_hiddens: - x = torch.stack(hiddens, 0) # [L, T, B, C] - x = x.transpose(1, 2) # [L, B, T, C] - else: - x = x.transpose(0, 1) # [B, T, C] - - x = self.get_mel_out(x).permute([0, 2, 1]) # [B, 80, T] - return x[:, None, :, :] \ No newline at end of file diff --git a/src/diff/diffusion (isolated).py b/src/diff/diffusion (isolated).py deleted file mode 100644 index fc8035be5..000000000 --- a/src/diff/diffusion (isolated).py +++ /dev/null @@ -1,339 +0,0 @@ -''' - NOTE: this script is *isolated* from other scripts, which means - it may not be compatible with the current version. -''' - -import math -import random -from functools import partial -from inspect import isfunction -from pathlib import Path -import numpy as np -import torch -import torch.nn.functional as F -from torch import nn -from tqdm import tqdm -from einops import rearrange - -from modules.fastspeech.fs2 import FastSpeech2 -from modules.diffsinger_midi.fs2 import FastSpeech2MIDI -from utils.hparams import hparams - - - -def exists(x): - return x is not None - - -def default(val, d): - if exists(val): - return val - return d() if isfunction(d) else d - - -def cycle(dl): - while True: - for data in dl: - yield data - - -def num_to_groups(num, divisor): - groups = num // divisor - remainder = num % divisor - arr = [divisor] * groups - if remainder > 0: - arr.append(remainder) - return arr - - -class Residual(nn.Module): - def __init__(self, fn): - super().__init__() - self.fn = fn - - def forward(self, x, *args, **kwargs): - return self.fn(x, *args, **kwargs) + x - - -class SinusoidalPosEmb(nn.Module): - def __init__(self, dim): - super().__init__() - self.dim = dim - - def forward(self, x): - device = x.device - half_dim = self.dim // 2 - emb = math.log(10000) / (half_dim - 1) - emb = torch.exp(torch.arange(half_dim, device=device) * -emb) - emb = x[:, None] * emb[None, :] - emb = torch.cat((emb.sin(), emb.cos()), dim=-1) - return emb - - -class Mish(nn.Module): - def forward(self, x): - return x * torch.tanh(F.softplus(x)) - - -class Upsample(nn.Module): - def __init__(self, dim): - super().__init__() - self.conv = nn.ConvTranspose2d(dim, dim, 4, 2, 1) - - def forward(self, x): - return self.conv(x) - - -class Downsample(nn.Module): - def __init__(self, dim): - super().__init__() - self.conv = nn.Conv2d(dim, dim, 3, 2, 1) - - def forward(self, x): - return self.conv(x) - - -class Rezero(nn.Module): - def __init__(self, fn): - super().__init__() - self.fn = fn - self.g = nn.Parameter(torch.zeros(1)) - - def forward(self, x): - return self.fn(x) * self.g - - -# building block modules - -class Block(nn.Module): - def __init__(self, dim, dim_out, groups=8): - super().__init__() - self.block = nn.Sequential( - nn.Conv2d(dim, dim_out, 3, padding=1), - nn.GroupNorm(groups, dim_out), - Mish() - ) - - def forward(self, x): - return self.block(x) - - -class ResnetBlock(nn.Module): - def __init__(self, dim, dim_out, *, time_emb_dim, groups=8): - super().__init__() - self.mlp = nn.Sequential( - Mish(), - nn.Linear(time_emb_dim, dim_out) - ) - - self.block1 = Block(dim, dim_out) - self.block2 = Block(dim_out, dim_out) - self.res_conv = nn.Conv2d(dim, dim_out, 1) if dim != dim_out else nn.Identity() - - def forward(self, x, time_emb): - h = self.block1(x) - h += self.mlp(time_emb)[:, :, None, None] - h = self.block2(h) - return h + self.res_conv(x) - - -class LinearAttention(nn.Module): - def __init__(self, dim, heads=4, dim_head=32): - super().__init__() - self.heads = heads - hidden_dim = dim_head * heads - self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) - self.to_out = nn.Conv2d(hidden_dim, dim, 1) - - def forward(self, x): - b, c, h, w = x.shape - qkv = self.to_qkv(x) - q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads=self.heads, qkv=3) - k = k.softmax(dim=-1) - context = torch.einsum('bhdn,bhen->bhde', k, v) - out = torch.einsum('bhde,bhdn->bhen', context, q) - out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w) - return self.to_out(out) - - -# gaussian diffusion trainer class - -def extract(a, t, x_shape): - b, *_ = t.shape - out = a.gather(-1, t) - return out.reshape(b, *((1,) * (len(x_shape) - 1))) - - -def noise_like(shape, device, repeat=False): - repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) - noise = lambda: torch.randn(shape, device=device) - return repeat_noise() if repeat else noise() - - -def cosine_beta_schedule(timesteps, s=0.008): - """ - cosine schedule - as proposed in https://openreview.net/forum?id=-NEXDKk8gZ - """ - steps = timesteps + 1 - x = np.linspace(0, steps, steps) - alphas_cumprod = np.cos(((x / steps) + s) / (1 + s) * np.pi * 0.5) ** 2 - alphas_cumprod = alphas_cumprod / alphas_cumprod[0] - betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1]) - return np.clip(betas, a_min=0, a_max=0.999) - - -class GaussianDiffusion(nn.Module): - def __init__(self, phone_encoder, out_dims, denoise_fn, - timesteps=1000, loss_type='l1', betas=None, spec_min=None, spec_max=None): - super().__init__() - self.denoise_fn = denoise_fn - if hparams.get('use_midi') is not None and hparams['use_midi']: - self.fs2 = FastSpeech2MIDI(phone_encoder, out_dims) - else: - self.fs2 = FastSpeech2(phone_encoder, out_dims) - self.fs2.decoder = None - self.mel_bins = out_dims - - if exists(betas): - betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas - else: - betas = cosine_beta_schedule(timesteps) - - alphas = 1. - betas - alphas_cumprod = np.cumprod(alphas, axis=0) - alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) - - timesteps, = betas.shape - self.num_timesteps = int(timesteps) - self.loss_type = loss_type - - to_torch = partial(torch.tensor, dtype=torch.float32) - - self.register_buffer('betas', to_torch(betas)) - self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) - self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) - - # calculations for diffusion q(x_t | x_{t-1}) and others - self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) - self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) - self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) - self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) - self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) - - # calculations for posterior q(x_{t-1} | x_t, x_0) - posterior_variance = betas * (1. - alphas_cumprod_prev) / (1. - alphas_cumprod) - # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) - self.register_buffer('posterior_variance', to_torch(posterior_variance)) - # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain - self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) - self.register_buffer('posterior_mean_coef1', to_torch( - betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) - self.register_buffer('posterior_mean_coef2', to_torch( - (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) - - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) - - def q_mean_variance(self, x_start, t): - mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start - variance = extract(1. - self.alphas_cumprod, t, x_start.shape) - log_variance = extract(self.log_one_minus_alphas_cumprod, t, x_start.shape) - return mean, variance, log_variance - - def predict_start_from_noise(self, x_t, t, noise): - return ( - extract(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - - extract(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise - ) - - def q_posterior(self, x_start, x_t, t): - posterior_mean = ( - extract(self.posterior_mean_coef1, t, x_t.shape) * x_start + - extract(self.posterior_mean_coef2, t, x_t.shape) * x_t - ) - posterior_variance = extract(self.posterior_variance, t, x_t.shape) - posterior_log_variance_clipped = extract(self.posterior_log_variance_clipped, t, x_t.shape) - return posterior_mean, posterior_variance, posterior_log_variance_clipped - - def p_mean_variance(self, x, t, cond, clip_denoised: bool): - noise_pred = self.denoise_fn(x, t, cond=cond) - x_recon = self.predict_start_from_noise(x, t=t, noise=noise_pred) - - if clip_denoised: - x_recon.clamp_(-1., 1.) - - model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) - return model_mean, posterior_variance, posterior_log_variance - - @torch.no_grad() - def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): - b, *_, device = *x.shape, x.device - model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, cond=cond, clip_denoised=clip_denoised) - noise = noise_like(x.shape, device, repeat_noise) - # no noise when t == 0 - nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) - return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise - - def q_sample(self, x_start, t, noise=None): - noise = default(noise, lambda: torch.randn_like(x_start)) - return ( - extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + - extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise - ) - - def p_losses(self, x_start, t, cond, noise=None, nonpadding=None): - noise = default(noise, lambda: torch.randn_like(x_start)) - - x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) - x_recon = self.denoise_fn(x_noisy, t, cond) - - if self.loss_type == 'l1': - if nonpadding is not None: - loss = ((noise - x_recon).abs() * nonpadding.unsqueeze(1)).mean() - else: - # print('are you sure w/o nonpadding?') - loss = (noise - x_recon).abs().mean() - - elif self.loss_type == 'l2': - loss = F.mse_loss(noise, x_recon) - else: - raise NotImplementedError() - - return loss - - def forward(self, txt_tokens, mel2ph=None, spk_embed=None, - ref_mels=None, f0=None, uv=None, energy=None, infer=False): - b, *_, device = *txt_tokens.shape, txt_tokens.device - ret = self.fs2(txt_tokens, mel2ph, spk_embed, ref_mels, f0, uv, energy, - skip_decoder=True, infer=infer) - cond = ret['decoder_inp'].transpose(1, 2) - if not infer: - t = torch.randint(0, self.num_timesteps, (b,), device=device).long() - x = ref_mels - x = self.norm_spec(x) - x = x.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] - nonpadding = (mel2ph != 0).float() - ret['diff_loss'] = self.p_losses(x, t, cond, nonpadding=nonpadding) - else: - t = self.num_timesteps - shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) - x = torch.randn(shape, device=device) - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): - x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) - x = x[:, 0].transpose(1, 2) - ret['mel_out'] = self.denorm_spec(x) - - return ret - - def norm_spec(self, x): - return (x - self.spec_min) / (self.spec_max - self.spec_min) * 2 - 1 - - def denorm_spec(self, x): - return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min - - def cwt2f0_norm(self, cwt_spec, mean, std, mel2ph): - return self.fs2.cwt2f0_norm(cwt_spec, mean, std, mel2ph) - - def out2mel(self, x): - return x diff --git a/src/vocoders/ddsp.py b/src/vocoders/ddsp.py index e1f58c041..57feb7919 100644 --- a/src/vocoders/ddsp.py +++ b/src/vocoders/ddsp.py @@ -4,7 +4,7 @@ import yaml import numpy as np from librosa.filters import mel as librosa_mel_fn -from src.vocoders.base_vocoder import BaseVocoder, register_vocoder +from basics.base_vocoder import BaseVocoder, register_vocoder from utils.hparams import hparams class DotDict(dict): diff --git a/src/vocoders/hifigan.py b/src/vocoders/hifigan.py deleted file mode 100644 index 374b3ef90..000000000 --- a/src/vocoders/hifigan.py +++ /dev/null @@ -1,115 +0,0 @@ -import glob -import json -import os -import re - -import librosa -import torch - -import utils -from modules.hifigan.hifigan import HifiGanGenerator -from utils.hparams import hparams, set_hparams -from src.vocoders.base_vocoder import register_vocoder -from src.vocoders.pwg import PWG -from src.vocoders.vocoder_utils import denoise - - -def load_model(config_path, file_path): - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - ext = os.path.splitext(file_path)[-1] - if ext == '.pth': - if '.yaml' in config_path: - config = set_hparams(config_path, global_hparams=False) - elif '.json' in config_path: - config = json.load(open(config_path, 'r', encoding='utf-8')) - model = torch.load(file_path, map_location="cpu") - elif ext == '.ckpt': - ckpt_dict = torch.load(file_path, map_location="cpu") - if '.yaml' in config_path: - config = set_hparams(config_path, global_hparams=False) - state = ckpt_dict["state_dict"]["model_gen"] - elif '.json' in config_path: - config = json.load(open(config_path, 'r', encoding='utf-8')) - state = ckpt_dict["generator"] - model = HifiGanGenerator(config) - model.load_state_dict(state, strict=True) - model.remove_weight_norm() - model = model.eval().to(device) - print(f"| Loaded model parameters from {file_path}.") - print(f"| HifiGAN device: {device}.") - return model, config, device - - -total_time = 0 - - -@register_vocoder -class HifiGAN(PWG): - def __init__(self): - base_dir = hparams['vocoder_ckpt'] - config_path = f'{base_dir}/config.yaml' - file_path = sorted(glob.glob(f'{base_dir}/model_ckpt_steps_*.*'), key= - lambda x: int(re.findall(f'{base_dir}/model_ckpt_steps_(\d+).*', x.replace('\\','/'))[0]))[-1] - print('| load HifiGAN: ', file_path) - self.model, self.config, self.device = load_model(config_path=config_path, file_path=file_path) - - def spec2wav_torch(self, mel, **kwargs): - if self.config['audio_sample_rate'] != hparams['audio_sample_rate']: - print('Mismatch parameters: hparams[\'audio_sample_rate\']=',hparams['audio_sample_rate'],'!=',self.config['audio_sample_rate'],'(vocoder)') - if self.config['audio_num_mel_bins'] != hparams['audio_num_mel_bins']: - print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=',hparams['audio_num_mel_bins'],'!=',self.config['audio_num_mel_bins'],'(vocoder)') - if self.config['fft_size'] != hparams['fft_size']: - print('Mismatch parameters: hparams[\'fft_size\']=',hparams['fft_size'],'!=',self.config['fft_size'],'(vocoder)') - if self.config['win_size'] != hparams['win_size']: - print('Mismatch parameters: hparams[\'win_size\']=',hparams['win_size'],'!=',self.config['win_size'],'(vocoder)') - if self.config['hop_size'] != hparams['hop_size']: - print('Mismatch parameters: hparams[\'hop_size\']=',hparams['hop_size'],'!=',self.config['hop_size'],'(vocoder)') - if self.config['fmin'] != hparams['fmin']: - print('Mismatch parameters: hparams[\'fmin\']=',hparams['fmin'],'!=',self.config['fmin'] ,'(vocoder)') - if self.config['fmax'] != hparams['fmax']: - print('Mismatch parameters: hparams[\'fmax\']=',hparams['fmax'],'!=',self.config['fmax'] ,'(vocoder)') - with torch.no_grad(): - c = mel.transpose(2, 1) - f0 = kwargs.get('f0') - if f0 is not None and hparams.get('use_nsf'): - y = self.model(c, f0).view(-1) - else: - y = self.model(c).view(-1) - return y - - def spec2wav(self, mel, **kwargs): - if self.config['audio_sample_rate'] != hparams['audio_sample_rate']: - print('Mismatch parameters: hparams[\'audio_sample_rate\']=',hparams['audio_sample_rate'],'!=',self.config['audio_sample_rate'],'(vocoder)') - if self.config['audio_num_mel_bins'] != hparams['audio_num_mel_bins']: - print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=',hparams['audio_num_mel_bins'],'!=',self.config['audio_num_mel_bins'],'(vocoder)') - if self.config['fft_size'] != hparams['fft_size']: - print('Mismatch parameters: hparams[\'fft_size\']=',hparams['fft_size'],'!=',self.config['fft_size'],'(vocoder)') - if self.config['win_size'] != hparams['win_size']: - print('Mismatch parameters: hparams[\'win_size\']=',hparams['win_size'],'!=',self.config['win_size'],'(vocoder)') - if self.config['hop_size'] != hparams['hop_size']: - print('Mismatch parameters: hparams[\'hop_size\']=',hparams['hop_size'],'!=',self.config['hop_size'],'(vocoder)') - if self.config['fmin'] != hparams['fmin']: - print('Mismatch parameters: hparams[\'fmin\']=',hparams['fmin'],'!=',self.config['fmin'] ,'(vocoder)') - if self.config['fmax'] != hparams['fmax']: - print('Mismatch parameters: hparams[\'fmax\']=',hparams['fmax'],'!=',self.config['fmax'] ,'(vocoder)') - device = self.device - with torch.no_grad(): - c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(device) - with utils.Timer('hifigan', print_time=hparams['profile_infer']): - f0 = kwargs.get('f0') - if f0 is not None and hparams.get('use_nsf'): - f0 = torch.FloatTensor(f0[None, :]).to(device) - y = self.model(c, f0).view(-1) - else: - y = self.model(c).view(-1) - wav_out = y.cpu().numpy() - if hparams.get('vocoder_denoise_c', 0.0) > 0: - wav_out = denoise(wav_out, v=hparams['vocoder_denoise_c']) - return wav_out - - # @staticmethod - # def wav2spec(wav_fn, **kwargs): - # wav, _ = librosa.core.load(wav_fn, sr=hparams['audio_sample_rate']) - # wav_torch = torch.FloatTensor(wav)[None, :] - # mel = mel_spectrogram(wav_torch, hparams).numpy()[0] - # return wav, mel.T diff --git a/src/vocoders/nsf_hifigan.py b/src/vocoders/nsf_hifigan.py index 443a04f84..22cd0babf 100644 --- a/src/vocoders/nsf_hifigan.py +++ b/src/vocoders/nsf_hifigan.py @@ -4,7 +4,7 @@ from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT -from src.vocoders.base_vocoder import BaseVocoder, register_vocoder +from basics.base_vocoder import BaseVocoder, register_vocoder from utils.hparams import hparams diff --git a/src/vocoders/pwg.py b/src/vocoders/pwg.py deleted file mode 100644 index ddc5f4f02..000000000 --- a/src/vocoders/pwg.py +++ /dev/null @@ -1,137 +0,0 @@ -import glob -import re -import librosa -import torch -import yaml -from sklearn.preprocessing import StandardScaler -from torch import nn -from modules.parallel_wavegan.models import ParallelWaveGANGenerator -from modules.parallel_wavegan.utils import read_hdf5 -from utils.hparams import hparams -from utils.pitch_utils import f0_to_coarse -from src.vocoders.base_vocoder import BaseVocoder, register_vocoder -import numpy as np - - -def load_pwg_model(config_path, checkpoint_path, stats_path): - # load config - with open(config_path, encoding='utf-8') as f: - config = yaml.load(f, Loader=yaml.Loader) - - # setup - if torch.cuda.is_available(): - device = torch.device("cuda") - else: - device = torch.device("cpu") - model = ParallelWaveGANGenerator(**config["generator_params"]) - - ckpt_dict = torch.load(checkpoint_path, map_location="cpu") - if 'state_dict' not in ckpt_dict: # official vocoder - model.load_state_dict(torch.load(checkpoint_path, map_location="cpu")["model"]["generator"]) - scaler = StandardScaler() - if config["format"] == "hdf5": - scaler.mean_ = read_hdf5(stats_path, "mean") - scaler.scale_ = read_hdf5(stats_path, "scale") - elif config["format"] == "npy": - scaler.mean_ = np.load(stats_path)[0] - scaler.scale_ = np.load(stats_path)[1] - else: - raise ValueError("support only hdf5 or npy format.") - else: # custom PWG vocoder - fake_task = nn.Module() - fake_task.model_gen = model - fake_task.load_state_dict(torch.load(checkpoint_path, map_location="cpu")["state_dict"], strict=False) - scaler = None - - model.remove_weight_norm() - model = model.eval().to(device) - print(f"| Loaded model parameters from {checkpoint_path}.") - print(f"| PWG device: {device}.") - return model, scaler, config, device - - -@register_vocoder -class PWG(BaseVocoder): - def __init__(self): - if hparams['vocoder_ckpt'] == '': # load LJSpeech PWG pretrained model - base_dir = 'wavegan_pretrained' - ckpts = glob.glob(f'{base_dir}/checkpoint-*steps.pkl') - ckpt = sorted(ckpts, key= - lambda x: int(re.findall(f'{base_dir}/checkpoint-(\d+)steps.pkl', x)[0]))[-1] - config_path = f'{base_dir}/config.yaml' - print('| load PWG: ', ckpt) - self.model, self.scaler, self.config, self.device = load_pwg_model( - config_path=config_path, - checkpoint_path=ckpt, - stats_path=f'{base_dir}/stats.h5', - ) - else: - base_dir = hparams['vocoder_ckpt'] - print(base_dir) - config_path = f'{base_dir}/config.yaml' - ckpt = sorted(glob.glob(f'{base_dir}/model_ckpt_steps_*.ckpt'), key= - lambda x: int(re.findall(f'{base_dir}/model_ckpt_steps_(\d+).ckpt', x)[0]))[-1] - print('| load PWG: ', ckpt) - self.scaler = None - self.model, _, self.config, self.device = load_pwg_model( - config_path=config_path, - checkpoint_path=ckpt, - stats_path=f'{base_dir}/stats.h5', - ) - - def spec2wav(self, mel, **kwargs): - # start generation - config = self.config - device = self.device - pad_size = (config["generator_params"]["aux_context_window"], - config["generator_params"]["aux_context_window"]) - c = mel - if self.scaler is not None: - c = self.scaler.transform(c) - - with torch.no_grad(): - z = torch.randn(1, 1, c.shape[0] * config["hop_size"]).to(device) - c = np.pad(c, (pad_size, (0, 0)), "edge") - c = torch.FloatTensor(c).unsqueeze(0).transpose(2, 1).to(device) - p = kwargs.get('f0') - if p is not None: - p = f0_to_coarse(p) - p = np.pad(p, (pad_size,), "edge") - p = torch.LongTensor(p[None, :]).to(device) - y = self.model(z, c, p).view(-1) - wav_out = y.cpu().numpy() - return wav_out - - @staticmethod - def wav2spec(wav_fn, return_linear=False): - from data_gen.data_gen_utils import process_utterance - res = process_utterance( - wav_fn, fft_size=hparams['fft_size'], - hop_size=hparams['hop_size'], - win_length=hparams['win_size'], - num_mels=hparams['audio_num_mel_bins'], - fmin=hparams['fmin'], - fmax=hparams['fmax'], - sample_rate=hparams['audio_sample_rate'], - loud_norm=hparams['loud_norm'], - min_level_db=hparams['min_level_db'], - return_linear=return_linear, vocoder='pwg', eps=float(hparams.get('wav2spec_eps', 1e-10))) - if return_linear: - return res[0], res[1].T, res[2].T # [T, 80], [T, n_fft] - else: - return res[0], res[1].T - - @staticmethod - def wav2mfcc(wav_fn): - fft_size = hparams['fft_size'] - hop_size = hparams['hop_size'] - win_length = hparams['win_size'] - sample_rate = hparams['audio_sample_rate'] - wav, _ = librosa.core.load(wav_fn, sr=sample_rate) - mfcc = librosa.feature.mfcc(y=wav, sr=sample_rate, n_mfcc=13, - n_fft=fft_size, hop_length=hop_size, - win_length=win_length, pad_mode="constant", power=1.0) - mfcc_delta = librosa.feature.delta(mfcc, order=1) - mfcc_delta_delta = librosa.feature.delta(mfcc, order=2) - mfcc = np.concatenate([mfcc, mfcc_delta, mfcc_delta_delta]).T - return mfcc From 15ac90715c07e4a52b5341f59d24e0a8f600f9a8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 00:49:12 +0800 Subject: [PATCH 005/475] Clean up `XXXTxtProcessor` classes --- modules/diffsinger_midi/fs2.py | 12 +-- preprocessing/opencpop.py | 2 +- .../txt_processors/base_text_processor.py | 8 -- tts/data_gen/txt_processors/en.py | 78 -------------- tts/data_gen/txt_processors/zh.py | 41 ------- tts/data_gen/txt_processors/zh_g2pM.py | 102 ------------------ utils/phoneme_utils.py | 46 ++++++-- 7 files changed, 42 insertions(+), 247 deletions(-) delete mode 100644 tts/data_gen/txt_processors/base_text_processor.py delete mode 100644 tts/data_gen/txt_processors/en.py delete mode 100644 tts/data_gen/txt_processors/zh.py delete mode 100644 tts/data_gen/txt_processors/zh_g2pM.py diff --git a/modules/diffsinger_midi/fs2.py b/modules/diffsinger_midi/fs2.py index b31d01632..698f7906b 100644 --- a/modules/diffsinger_midi/fs2.py +++ b/modules/diffsinger_midi/fs2.py @@ -1,16 +1,10 @@ from modules.commons.common_layers import * from modules.commons.common_layers import Embedding -from modules.fastspeech.tts_modules import FastspeechDecoder, DurationPredictor, LengthRegulator, PitchPredictor, \ - EnergyPredictor, FastspeechEncoder -from utils.cwt import cwt2f0 -from utils.hparams import hparams -from utils.pitch_utils import f0_to_coarse, denorm_f0, norm_f0 from modules.fastspeech.fs2 import FastSpeech2 -from utils.text_encoder import TokenTextEncoder -from tts.data_gen.txt_processors.zh_g2pM import get_all_vowels -from torch.nn import functional as F -import torch +from modules.fastspeech.tts_modules import FastspeechEncoder from training.diffsinger import Batch2Loss +from utils.hparams import hparams +from utils.phoneme_utils import get_all_vowels class FastspeechMIDIEncoder(FastspeechEncoder): diff --git a/preprocessing/opencpop.py b/preprocessing/opencpop.py index d5c886b00..0e1fac00f 100644 --- a/preprocessing/opencpop.py +++ b/preprocessing/opencpop.py @@ -12,8 +12,8 @@ from basics.base_binarizer import BinarizationError from data_gen.data_gen_utils import get_pitch_parselmouth from basics.base_vocoder import VOCODERS -from tts.data_gen.txt_processors.zh_g2pM import get_all_vowels from utils.hparams import hparams +from utils.phoneme_utils import get_all_vowels vowels = get_all_vowels() diff --git a/tts/data_gen/txt_processors/base_text_processor.py b/tts/data_gen/txt_processors/base_text_processor.py deleted file mode 100644 index 84a9772fe..000000000 --- a/tts/data_gen/txt_processors/base_text_processor.py +++ /dev/null @@ -1,8 +0,0 @@ -class BaseTxtProcessor: - @staticmethod - def sp_phonemes(): - return ['|'] - - @classmethod - def process(cls, txt, pre_align_args): - raise NotImplementedError diff --git a/tts/data_gen/txt_processors/en.py b/tts/data_gen/txt_processors/en.py deleted file mode 100644 index 7ae8befcf..000000000 --- a/tts/data_gen/txt_processors/en.py +++ /dev/null @@ -1,78 +0,0 @@ -import re -from data_gen.data_gen_utils import PUNCS -from g2p_en import G2p -import unicodedata -from g2p_en.expand import normalize_numbers -from nltk import pos_tag -from nltk.tokenize import TweetTokenizer - -from tts.data_gen.txt_processors.base_text_processor import BaseTxtProcessor - - -class EnG2p(G2p): - word_tokenize = TweetTokenizer().tokenize - - def __call__(self, text): - # preprocessing - words = EnG2p.word_tokenize(text) - tokens = pos_tag(words) # tuples of (word, tag) - - # steps - prons = [] - for word, pos in tokens: - if re.search("[a-z]", word) is None: - pron = [word] - - elif word in self.homograph2features: # Check homograph - pron1, pron2, pos1 = self.homograph2features[word] - if pos.startswith(pos1): - pron = pron1 - else: - pron = pron2 - elif word in self.cmu: # lookup CMU dict - pron = self.cmu[word][0] - else: # predict for oov - pron = self.predict(word) - - prons.extend(pron) - prons.extend([" "]) - - return prons[:-1] - - -class TxtProcessor(BaseTxtProcessor): - g2p = EnG2p() - - @staticmethod - def preprocess_text(text): - text = normalize_numbers(text) - text = ''.join(char for char in unicodedata.normalize('NFD', text) - if unicodedata.category(char) != 'Mn') # Strip accents - text = text.lower() - text = re.sub("[\'\"()]+", "", text) - text = re.sub("[-]+", " ", text) - text = re.sub(f"[^ a-z{PUNCS}]", "", text) - text = re.sub(f" ?([{PUNCS}]) ?", r"\1", text) # !! -> ! - text = re.sub(f"([{PUNCS}])+", r"\1", text) # !! -> ! - text = text.replace("i.e.", "that is") - text = text.replace("i.e.", "that is") - text = text.replace("etc.", "etc") - text = re.sub(f"([{PUNCS}])", r" \1 ", text) - text = re.sub(rf"\s+", r" ", text) - return text - - @classmethod - def process(cls, txt, pre_align_args): - txt = cls.preprocess_text(txt).strip() - phs = cls.g2p(txt) - phs_ = [] - n_word_sep = 0 - for p in phs: - if p.strip() == '': - phs_ += ['|'] - n_word_sep += 1 - else: - phs_ += p.split(" ") - phs = phs_ - assert n_word_sep + 1 == len(txt.split(" ")), (phs, f"\"{txt}\"") - return phs, txt diff --git a/tts/data_gen/txt_processors/zh.py b/tts/data_gen/txt_processors/zh.py deleted file mode 100644 index 58466959a..000000000 --- a/tts/data_gen/txt_processors/zh.py +++ /dev/null @@ -1,41 +0,0 @@ -import re -# from pypinyin import pinyin, Style -from data_gen.data_gen_utils import PUNCS -from tts.data_gen.txt_processors.base_text_processor import BaseTxtProcessor -from utils.text_norm import NSWNormalizer - - -class TxtProcessor(BaseTxtProcessor): - table = {ord(f): ord(t) for f, t in zip( - u':,。!?【】()%#@&1234567890', - u':,.!?[]()%#@&1234567890')} - - @staticmethod - def preprocess_text(text): - text = text.translate(TxtProcessor.table) - text = NSWNormalizer(text).normalize(remove_punc=False) - text = re.sub("[\'\"()]+", "", text) - text = re.sub("[-]+", " ", text) - text = re.sub(f"[^ A-Za-z\u4e00-\u9fff{PUNCS}]", "", text) - text = re.sub(f"([{PUNCS}])+", r"\1", text) # !! -> ! - text = re.sub(f"([{PUNCS}])", r" \1 ", text) - text = re.sub(rf"\s+", r"", text) - return text - - @classmethod - def process(cls, txt, pre_align_args): - txt = cls.preprocess_text(txt) - shengmu = pinyin(txt, style=Style.INITIALS) # https://blog.csdn.net/zhoulei124/article/details/89055403 - yunmu_finals = pinyin(txt, style=Style.FINALS) - yunmu_tone3 = pinyin(txt, style=Style.FINALS_TONE3) - yunmu = [[t[0] + '5'] if t[0] == f[0] else t for f, t in zip(yunmu_finals, yunmu_tone3)] \ - if pre_align_args['use_tone'] else yunmu_finals - - assert len(shengmu) == len(yunmu) - phs = ["|"] - for a, b, c in zip(shengmu, yunmu, yunmu_finals): - if a[0] == c[0]: - phs += [a[0], "|"] - else: - phs += [a[0], b[0], "|"] - return phs, txt diff --git a/tts/data_gen/txt_processors/zh_g2pM.py b/tts/data_gen/txt_processors/zh_g2pM.py deleted file mode 100644 index 9b91d8f94..000000000 --- a/tts/data_gen/txt_processors/zh_g2pM.py +++ /dev/null @@ -1,102 +0,0 @@ -import re -# import jieba -# from pypinyin import pinyin, Style -from data_gen.data_gen_utils import PUNCS -from tts.data_gen.txt_processors import zh -from g2pM import G2pM -from utils.phoneme_utils import build_g2p_dictionary - - -_initialized = False -_ALL_CONSONANTS_SET = set() -_ALL_VOWELS_SET = set() - - -def _initialize_consonants_and_vowels(): - # Currently we only support two-part consonant-vowel phoneme systems. - for _ph_list in build_g2p_dictionary().values(): - _ph_count = len(_ph_list) - if _ph_count == 0 or _ph_list[0] in ['AP', 'SP']: - continue - elif len(_ph_list) == 1: - _ALL_VOWELS_SET.add(_ph_list[0]) - else: - _ALL_CONSONANTS_SET.add(_ph_list[0]) - _ALL_VOWELS_SET.add(_ph_list[1]) - - -def get_all_consonants(): - global _initialized - if not _initialized: - _initialize_consonants_and_vowels() - _initialized = True - return sorted(_ALL_CONSONANTS_SET) - - -def get_all_vowels(): - global _initialized - if not _initialized: - _initialize_consonants_and_vowels() - _initialized = True - return sorted(_ALL_VOWELS_SET) - - -class TxtProcessor(zh.TxtProcessor): - model = G2pM() - - @staticmethod - def sp_phonemes(): - return ['|', '#'] - - @classmethod - def process(cls, txt, pre_align_args): - txt = cls.preprocess_text(txt) - ph_list = cls.model(txt, tone=pre_align_args['use_tone'], char_split=True) - seg_list = '#'.join(jieba.cut(txt)) - assert len(ph_list) == len([s for s in seg_list if s != '#']), (ph_list, seg_list) - - # 加入词边界'#' - ph_list_ = [] - seg_idx = 0 - for p in ph_list: - p = p.replace("u:", "v") - if seg_list[seg_idx] == '#': - ph_list_.append('#') - seg_idx += 1 - else: - ph_list_.append("|") - seg_idx += 1 - if re.findall('[\u4e00-\u9fff]', p): - if pre_align_args['use_tone']: - p = pinyin(p, style=Style.TONE3, strict=True)[0][0] - if p[-1] not in ['1', '2', '3', '4', '5']: - p = p + '5' - else: - p = pinyin(p, style=Style.NORMAL, strict=True)[0][0] - - finished = False - consonants = get_all_consonants() - if len([c.isalpha() for c in p]) > 1: - for shenmu in consonants: - if p.startswith(shenmu) and not p.lstrip(shenmu).isnumeric(): - ph_list_ += [shenmu, p.lstrip(shenmu)] - finished = True - break - if not finished: - ph_list_.append(p) - - ph_list = ph_list_ - - # 去除静音符号周围的词边界标记 [..., '#', ',', '#', ...] - sil_phonemes = list(PUNCS) + TxtProcessor.sp_phonemes() - ph_list_ = [] - for i in range(0, len(ph_list), 1): - if ph_list[i] != '#' or (ph_list[i - 1] not in sil_phonemes and ph_list[i + 1] not in sil_phonemes): - ph_list_.append(ph_list[i]) - ph_list = ph_list_ - return ph_list, txt - - -if __name__ == '__main__': - phs, txt = TxtProcessor.process('他来到了,网易杭研大厦', {'use_tone': True}) - print(phs) diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index f9376368a..0bc79540a 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -10,6 +10,11 @@ _phoneme_list: list +_initialized = False +_ALL_CONSONANTS_SET = set() +_ALL_VOWELS_SET = set() + + def _build_dict_and_list(): from utils.hparams import hparams global _g2p_dictionary, _phoneme_list @@ -26,19 +31,44 @@ def _build_dict_and_list(): print('| load phoneme set:', _phoneme_list) -def build_g2p_dictionary() -> dict: - global _has_cache - if not _has_cache: +def _initialize_consonants_and_vowels(): + # Currently we only support two-part consonant-vowel phoneme systems. + for _ph_list in build_g2p_dictionary().values(): + _ph_count = len(_ph_list) + if _ph_count == 0 or _ph_list[0] in ['AP', 'SP']: + continue + elif len(_ph_list) == 1: + _ALL_VOWELS_SET.add(_ph_list[0]) + else: + _ALL_CONSONANTS_SET.add(_ph_list[0]) + _ALL_VOWELS_SET.add(_ph_list[1]) + + +def _initialize(): + global _initialized + if not _initialized: _build_dict_and_list() - _has_cache = True + _initialize_consonants_and_vowels() + _initialized = True + + +def get_all_consonants(): + _initialize() + return sorted(_ALL_CONSONANTS_SET) + + +def get_all_vowels(): + _initialize() + return sorted(_ALL_VOWELS_SET) + + +def build_g2p_dictionary() -> dict: + _initialize() return _g2p_dictionary def build_phoneme_list() -> list: - global _has_cache - if not _has_cache: - _build_dict_and_list() - _has_cache = True + _initialize() return _phoneme_list From 742d8941cc61d3bff142fe613029af0374942d2d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 00:51:33 +0800 Subject: [PATCH 006/475] Delete useless workflow --- .github/workflows/main.yml | 44 -------------------------------------- 1 file changed, 44 deletions(-) delete mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 66cdf6ee9..000000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,44 +0,0 @@ -on: - workflow_dispatch: - inputs: - version: - description: '版本号' - default: '0.0.0' - required: true - type: string - -jobs: - pack-windows: - runs-on: ${{ matrix.os }} - - strategy: - matrix: - os: [ 'windows-latest' ] - - steps: - - uses: actions/checkout@v1 - - - name: Download Winpython - uses: carlosperate/download-file-action@v1.0.3 - with: - file-url: https://github.com/winpython/winpython/releases/download/4.3.20210620/Winpython64-3.8.10.0dot.exe - file-name: Winpython64-3.8.10.0dot.exe - location: ${{ github.workspace }} - - - name: Extract Winpython - run: 7z x -o"${{ github.workspace }}" "${{ github.workspace }}/Winpython64-3.8.10.0dot.exe" - - - name: Install python packages - run: .\build.bat - - - name: remove Winpython installer - run: del "${{ github.workspace }}/Winpython64-3.8.10.0dot.exe" - - - name: pack - run: 7z a .\diffsinger-server.zip .\* - - - name: upload - uses: actions/upload-artifact@v1.0.0 - with: - name: diffsinger-server-cpu-${{ inputs.version }}.zip - path: ${{ github.workspace }}\diffsinger-server.zip \ No newline at end of file From d1cd3c641f929d5ada8a8296a56127f749cec6f5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 00:54:44 +0800 Subject: [PATCH 007/475] Remove migration method from opencpop to opencpop-strict --- docs/README-SVS-custom-phonemes.md | 7 --- utils/phoneme_utils.py | 72 ++---------------------------- 2 files changed, 3 insertions(+), 76 deletions(-) diff --git a/docs/README-SVS-custom-phonemes.md b/docs/README-SVS-custom-phonemes.md index cf81b3a0c..f526688bc 100644 --- a/docs/README-SVS-custom-phonemes.md +++ b/docs/README-SVS-custom-phonemes.md @@ -117,10 +117,3 @@ Phoneme distribution* of Opencpop dataset on this dictionary is shown below. ![img](resources/phoneme_distribution.jpg) *`AP` and `SP` are not included. - -To migrate `ds` file from original dictionary to this strict dictionary, run the following command: - -```bash -python utils/phoneme_utils path/to/your/original.ds path/to/your/target.ds -``` - diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index 0bc79540a..768c32c78 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,8 +1,6 @@ -import argparse -import json - - -_has_cache = False +_initialized = False +_ALL_CONSONANTS_SET = set() +_ALL_VOWELS_SET = set() _g2p_dictionary = { 'AP': ['AP'], 'SP': ['SP'] @@ -10,11 +8,6 @@ _phoneme_list: list -_initialized = False -_ALL_CONSONANTS_SET = set() -_ALL_VOWELS_SET = set() - - def _build_dict_and_list(): from utils.hparams import hparams global _g2p_dictionary, _phoneme_list @@ -70,62 +63,3 @@ def build_g2p_dictionary() -> dict: def build_phoneme_list() -> list: _initialize() return _phoneme_list - - -def opencpop_old_to_strict(phonemes: list, slurs: list) -> list: - assert len(phonemes) == len(slurs), 'Length of phonemes mismatches length of slurs!' - new_phonemes = [p for p in phonemes] - i = 0 - while i < len(phonemes): - if phonemes[i] == 'i' and i > 0: - rep = None - if phonemes[i - 1] in ['zh', 'ch', 'sh', 'r']: - rep = 'ir' - elif phonemes[i - 1] in ['z', 'c', 's']: - rep = 'i0' - if rep is not None: - new_phonemes[i] = rep - i += 1 - while i < len(phonemes) and slurs[i] == '1': - new_phonemes[i] = rep - i += 1 - else: - i += 1 - elif phonemes[i] == 'e' and i > 0 and phonemes[i - 1] == 'y': - new_phonemes[i] = 'E' - i += 1 - while i < len(phonemes) and slurs[i] == '1': - new_phonemes[i] = 'E' - i += 1 - elif phonemes[i] == 'an' and i > 0 and phonemes[i - 1] == 'y': - new_phonemes[i] = 'En' - i += 1 - while i < len(phonemes) and slurs[i] == '1': - new_phonemes[i] = 'En' - i += 1 - else: - i += 1 - return new_phonemes - - -def opencpop_ds_old_to_strict(ds_params): - ds_params['ph_seq'] = ' '.join( - opencpop_old_to_strict(ds_params['ph_seq'].split(), ds_params['is_slur_seq'].split())) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description='Migrate ds file from old opencpop pinyin dictionary to new strict pinyin dictionary.') - parser.add_argument('input', type=str, help='Path to the input file') - parser.add_argument('output', type=str, help='Path to the output file') - args = parser.parse_args() - - with open(args.input, 'r', encoding='utf8') as f: - params = json.load(f) - if isinstance(params, list): - [opencpop_ds_old_to_strict(p) for p in params] - else: - opencpop_ds_old_to_strict(params) - - with open(args.output, 'w', encoding='utf8') as f: - json.dump(params, f, ensure_ascii=False, indent=2) From fed7c4583922b27ce49bb547e673071af3273674 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:00:34 +0800 Subject: [PATCH 008/475] Add main process checks --- utils/hparams.py | 5 ++--- utils/multiprocess_utils.py | 10 +++++++++- utils/phoneme_utils.py | 4 +++- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/utils/hparams.py b/utils/hparams.py index 064b2c4aa..e3bd34521 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -1,14 +1,13 @@ import argparse -import multiprocessing import os -import re import shutil import yaml +from utils.multiprocess_utils import is_main_process + global_print_hparams = True hparams = {} -is_main_process = not bool(re.match(r'Process-\d+', multiprocessing.current_process().name)) class Args: diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 24876c4ca..e77638296 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -1,6 +1,14 @@ import os +import re import traceback -from multiprocessing import Queue, Process +from multiprocessing import Queue, Process, current_process + +is_main_process = not bool(re.match(r'Process-\d+', current_process().name)) + + +def main_process_print(self, *args, sep=' ', end='\n', file=None): + if is_main_process: + print(self, *args, sep=sep, end=end, file=file) def chunked_worker(worker_id, map_func, args, results_queue=None, init_ctx_func=None): diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index 768c32c78..b2e62102c 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,3 +1,5 @@ +from utils.multiprocess_utils import main_process_print + _initialized = False _ALL_CONSONANTS_SET = set() _ALL_VOWELS_SET = set() @@ -21,7 +23,7 @@ def _build_dict_and_list(): for _list in _g2p_dictionary.values(): [_set.add(ph) for ph in _list] _phoneme_list = sorted(list(_set)) - print('| load phoneme set:', _phoneme_list) + main_process_print('| load phoneme set:', _phoneme_list) def _initialize_consonants_and_vowels(): From 4a74fc5ff3b1539aaba9d2dca5eb37fd098f5cbc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:04:14 +0800 Subject: [PATCH 009/475] Remove `EnergyPredictor` imports --- modules/fastspeech/fs2.py | 36 ++---------------------------------- 1 file changed, 2 insertions(+), 34 deletions(-) diff --git a/modules/fastspeech/fs2.py b/modules/fastspeech/fs2.py index b8d4e9d18..eff1c6040 100644 --- a/modules/fastspeech/fs2.py +++ b/modules/fastspeech/fs2.py @@ -1,10 +1,10 @@ from modules.commons.common_layers import * from modules.commons.common_layers import Embedding from modules.fastspeech.tts_modules import FastspeechDecoder, DurationPredictor, LengthRegulator, PitchPredictor, \ - EnergyPredictor, FastspeechEncoder + FastspeechEncoder from utils.cwt import cwt2f0 from utils.hparams import hparams -from utils.pitch_utils import f0_to_coarse, denorm_f0, norm_f0 +from utils.pitch_utils import norm_f0 FS_ENCODERS = { 'fft': lambda hp, embed_tokens, d: FastspeechEncoder( @@ -76,14 +76,6 @@ def __init__(self, dictionary, out_dims=None): dropout_rate=hparams['predictor_dropout'], odim=2 if hparams['pitch_type'] == 'frame' else 1, padding=hparams['ffn_padding'], kernel_size=hparams['predictor_kernel']) - if hparams['use_energy_embed']: - self.energy_embed = Embedding(256, self.hidden_size, self.padding_idx) - self.energy_predictor = EnergyPredictor( - self.hidden_size, - n_chans=predictor_hidden, - n_layers=hparams['predictor_layers'], - dropout_rate=hparams['predictor_dropout'], odim=1, - padding=hparams['ffn_padding'], kernel_size=hparams['predictor_kernel']) def build_embedding(self, dictionary, embed_dim): num_embeddings = len(dictionary) @@ -148,30 +140,6 @@ def forward(self, txt_tokens, mel2ph=None, spk_embed=None, return ret - def add_dur(self, dur_input, mel2ph, txt_tokens, ret): - """ - NOTE: this part of script is *isolated* from other scripts, which means - it may not be compatible with the current version. - - :param dur_input: [B, T_txt, H] - :param mel2ph: [B, T_mel] - :param txt_tokens: [B, T_txt] - :param ret: - :return: - """ - return - src_padding = txt_tokens == 0 - dur_input = dur_input.detach() + hparams['predictor_grad'] * (dur_input - dur_input.detach()) - if mel2ph is None: - dur, xs = self.dur_predictor.inference(dur_input, src_padding) - ret['dur'] = xs - ret['dur_choice'] = dur - mel2ph = self.length_regulator(dur, src_padding).detach() - else: - ret['dur'] = self.dur_predictor(dur_input, src_padding) - ret['mel2ph'] = mel2ph - return mel2ph - def run_decoder(self, decoder_inp, tgt_nonpadding, ret, infer, **kwargs): x = decoder_inp # [B, T, H] x = self.decoder(x) From 2bad814111449eee4472c57c91b45feb1047c591 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:07:51 +0800 Subject: [PATCH 010/475] Redirect vocoder package --- basics/base_vocoder.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index 1f6c2a277..9a815ac4e 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -14,6 +14,8 @@ def get_vocoder_cls(hparams): else: vocoder_cls = hparams['vocoder'] pkg = ".".join(vocoder_cls.split(".")[:-1]) + if pkg == '': + pkg = 'src.vocoders' cls_name = vocoder_cls.split(".")[-1] vocoder_cls = getattr(importlib.import_module(pkg), cls_name) return vocoder_cls From 286e79c266fe15e59c4f6faf8ffdc453c5aefe6c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:10:56 +0800 Subject: [PATCH 011/475] Refactor vocoder loading logic --- basics/base_vocoder.py | 20 -------------------- src/acoustic_task.py | 3 ++- src/vocoders/ddsp.py | 3 ++- src/vocoders/nsf_hifigan.py | 3 ++- src/vocoders/vocoder_utils.py | 27 ++++++++++++++++----------- 5 files changed, 22 insertions(+), 34 deletions(-) diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index 9a815ac4e..3d136c7cd 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -1,26 +1,6 @@ -import importlib VOCODERS = {} -def register_vocoder(cls): - VOCODERS[cls.__name__.lower()] = cls - VOCODERS[cls.__name__] = cls - return cls - - -def get_vocoder_cls(hparams): - if hparams['vocoder'] in VOCODERS: - return VOCODERS[hparams['vocoder']] - else: - vocoder_cls = hparams['vocoder'] - pkg = ".".join(vocoder_cls.split(".")[:-1]) - if pkg == '': - pkg = 'src.vocoders' - cls_name = vocoder_cls.split(".")[-1] - vocoder_cls = getattr(importlib.import_module(pkg), cls_name) - return vocoder_cls - - class BaseVocoder: def spec2wav(self, mel, **kwargs): """ diff --git a/src/acoustic_task.py b/src/acoustic_task.py index cb510ee07..b8bcdb8f5 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -23,7 +23,8 @@ from basics.base_task import BaseTask from data_gen.data_gen_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import mel2ph_to_dur -from basics.base_vocoder import get_vocoder_cls, BaseVocoder +from basics.base_vocoder import BaseVocoder +from .vocoders.vocoder_utils import get_vocoder_cls from utils import audio from utils.cwt import get_lf0_cwt from utils.hparams import hparams diff --git a/src/vocoders/ddsp.py b/src/vocoders/ddsp.py index 57feb7919..e89b187e4 100644 --- a/src/vocoders/ddsp.py +++ b/src/vocoders/ddsp.py @@ -4,7 +4,8 @@ import yaml import numpy as np from librosa.filters import mel as librosa_mel_fn -from basics.base_vocoder import BaseVocoder, register_vocoder +from basics.base_vocoder import BaseVocoder +from src.vocoders.vocoder_utils import register_vocoder from utils.hparams import hparams class DotDict(dict): diff --git a/src/vocoders/nsf_hifigan.py b/src/vocoders/nsf_hifigan.py index 22cd0babf..61b101f51 100644 --- a/src/vocoders/nsf_hifigan.py +++ b/src/vocoders/nsf_hifigan.py @@ -4,7 +4,8 @@ from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT -from basics.base_vocoder import BaseVocoder, register_vocoder +from basics.base_vocoder import BaseVocoder +from src.vocoders.vocoder_utils import register_vocoder from utils.hparams import hparams diff --git a/src/vocoders/vocoder_utils.py b/src/vocoders/vocoder_utils.py index db5d5ca17..143c699de 100644 --- a/src/vocoders/vocoder_utils.py +++ b/src/vocoders/vocoder_utils.py @@ -1,15 +1,20 @@ -import librosa +import importlib -from utils.hparams import hparams -import numpy as np +from basics.base_vocoder import VOCODERS -def denoise(wav, v=0.1): - spec = librosa.stft(y=wav, n_fft=hparams['fft_size'], hop_length=hparams['hop_size'], - win_length=hparams['win_size'], pad_mode='constant') - spec_m = np.abs(spec) - spec_m = np.clip(spec_m - v, a_min=0, a_max=None) - spec_a = np.angle(spec) +def register_vocoder(cls): + VOCODERS[cls.__name__.lower()] = cls + VOCODERS[cls.__name__] = cls + return cls - return librosa.istft(spec_m * np.exp(1j * spec_a), hop_length=hparams['hop_size'], - win_length=hparams['win_size']) + +def get_vocoder_cls(hparams): + if hparams['vocoder'] in VOCODERS: + return VOCODERS[hparams['vocoder']] + else: + vocoder_cls = hparams['vocoder'] + pkg = ".".join(vocoder_cls.split(".")[:-1]) + cls_name = vocoder_cls.split(".")[-1] + vocoder_cls = getattr(importlib.import_module(pkg), cls_name) + return vocoder_cls From 32198757c6b25dd0cf0db114d51136e0609b916a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:13:04 +0800 Subject: [PATCH 012/475] Refactor vocoder register logic --- augmentation/spec_stretch.py | 2 +- basics/base_svs_infer.py | 2 +- basics/base_vocoder.py | 3 --- preprocessing/opencpop.py | 2 +- src/vocoders/vocoder_utils.py | 3 ++- 5 files changed, 5 insertions(+), 7 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index f1d0884a9..fa915dcf5 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -6,7 +6,7 @@ from basics.base_augmentation import BaseAugmentation from data_gen.data_gen_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import LengthRegulator -from basics.base_vocoder import VOCODERS +from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import hparams from utils.pitch_utils import f0_to_coarse diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index deaa06fee..2efed4a06 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -5,7 +5,7 @@ import torch from pypinyin import lazy_pinyin -from basics.base_vocoder import VOCODERS +from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import set_hparams, hparams from utils.phoneme_utils import build_g2p_dictionary, build_phoneme_list from utils.text_encoder import TokenTextEncoder diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index 3d136c7cd..1c898b8d3 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -1,6 +1,3 @@ -VOCODERS = {} - - class BaseVocoder: def spec2wav(self, mel, **kwargs): """ diff --git a/preprocessing/opencpop.py b/preprocessing/opencpop.py index 0e1fac00f..412126cae 100644 --- a/preprocessing/opencpop.py +++ b/preprocessing/opencpop.py @@ -11,7 +11,7 @@ import utils from basics.base_binarizer import BinarizationError from data_gen.data_gen_utils import get_pitch_parselmouth -from basics.base_vocoder import VOCODERS +from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import hparams from utils.phoneme_utils import get_all_vowels diff --git a/src/vocoders/vocoder_utils.py b/src/vocoders/vocoder_utils.py index 143c699de..a77e37ed1 100644 --- a/src/vocoders/vocoder_utils.py +++ b/src/vocoders/vocoder_utils.py @@ -1,6 +1,7 @@ import importlib -from basics.base_vocoder import VOCODERS + +VOCODERS = {} def register_vocoder(cls): From 3d97aa704f0324f7fbc47685b218272962da9cdf Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:14:39 +0800 Subject: [PATCH 013/475] Remove `hifigan` imports --- src/vocoders/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/vocoders/__init__.py b/src/vocoders/__init__.py index 36c2bc2e8..8c39a7364 100644 --- a/src/vocoders/__init__.py +++ b/src/vocoders/__init__.py @@ -1,3 +1,2 @@ -from src.vocoders import nsf_hifigan -from src.vocoders import hifigan from src.vocoders import ddsp +from src.vocoders import nsf_hifigan From 04563d7e1af8c79fed74bf99b44304c80addfb0d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 01:20:57 +0800 Subject: [PATCH 014/475] Fix infinite recursion --- utils/phoneme_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index b2e62102c..bec97a2cf 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -28,7 +28,7 @@ def _build_dict_and_list(): def _initialize_consonants_and_vowels(): # Currently we only support two-part consonant-vowel phoneme systems. - for _ph_list in build_g2p_dictionary().values(): + for _ph_list in _g2p_dictionary.values(): _ph_count = len(_ph_list) if _ph_count == 0 or _ph_list[0] in ['AP', 'SP']: continue From 34a2fcc3e639b210ada70de7feeae139942d0231 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 13:37:41 +0800 Subject: [PATCH 015/475] Remove hifigan, parallel_wavegan and pe --- modules/fastspeech/pe.py | 150 ----- modules/hifigan/hifigan.py | 365 ------------ modules/hifigan/mel_utils.py | 80 --- modules/parallel_wavegan/__init__.py | 0 modules/parallel_wavegan/layers/__init__.py | 5 - .../parallel_wavegan/layers/causal_conv.py | 56 -- modules/parallel_wavegan/layers/pqmf.py | 129 ----- .../parallel_wavegan/layers/residual_block.py | 129 ----- .../parallel_wavegan/layers/residual_stack.py | 75 --- modules/parallel_wavegan/layers/tf_layers.py | 129 ----- modules/parallel_wavegan/layers/upsample.py | 183 ------ modules/parallel_wavegan/losses/__init__.py | 1 - modules/parallel_wavegan/losses/stft_loss.py | 153 ----- modules/parallel_wavegan/models/__init__.py | 2 - modules/parallel_wavegan/models/melgan.py | 427 -------------- .../models/parallel_wavegan.py | 434 -------------- modules/parallel_wavegan/models/source.py | 538 ------------------ .../parallel_wavegan/optimizers/__init__.py | 2 - modules/parallel_wavegan/optimizers/radam.py | 91 --- modules/parallel_wavegan/stft_loss.py | 100 ---- modules/parallel_wavegan/utils/__init__.py | 1 - modules/parallel_wavegan/utils/utils.py | 169 ------ test_crepe.py | 67 --- 23 files changed, 3286 deletions(-) delete mode 100644 modules/fastspeech/pe.py delete mode 100644 modules/hifigan/hifigan.py delete mode 100644 modules/hifigan/mel_utils.py delete mode 100644 modules/parallel_wavegan/__init__.py delete mode 100644 modules/parallel_wavegan/layers/__init__.py delete mode 100644 modules/parallel_wavegan/layers/causal_conv.py delete mode 100644 modules/parallel_wavegan/layers/pqmf.py delete mode 100644 modules/parallel_wavegan/layers/residual_block.py delete mode 100644 modules/parallel_wavegan/layers/residual_stack.py delete mode 100644 modules/parallel_wavegan/layers/tf_layers.py delete mode 100644 modules/parallel_wavegan/layers/upsample.py delete mode 100644 modules/parallel_wavegan/losses/__init__.py delete mode 100644 modules/parallel_wavegan/losses/stft_loss.py delete mode 100644 modules/parallel_wavegan/models/__init__.py delete mode 100644 modules/parallel_wavegan/models/melgan.py delete mode 100644 modules/parallel_wavegan/models/parallel_wavegan.py delete mode 100644 modules/parallel_wavegan/models/source.py delete mode 100644 modules/parallel_wavegan/optimizers/__init__.py delete mode 100644 modules/parallel_wavegan/optimizers/radam.py delete mode 100644 modules/parallel_wavegan/stft_loss.py delete mode 100644 modules/parallel_wavegan/utils/__init__.py delete mode 100644 modules/parallel_wavegan/utils/utils.py delete mode 100644 test_crepe.py diff --git a/modules/fastspeech/pe.py b/modules/fastspeech/pe.py deleted file mode 100644 index b04ce5eb5..000000000 --- a/modules/fastspeech/pe.py +++ /dev/null @@ -1,150 +0,0 @@ -from modules.commons.common_layers import * -from utils.hparams import hparams -from modules.fastspeech.tts_modules import PitchPredictor -from utils.pitch_utils import denorm_f0 - - -class Prenet(nn.Module): - def __init__(self, in_dim=80, out_dim=256, kernel=5, n_layers=3, strides=None): - super(Prenet, self).__init__() - padding = kernel // 2 - self.layers = [] - self.strides = strides if strides is not None else [1] * n_layers - for l in range(n_layers): - self.layers.append(nn.Sequential( - nn.Conv1d(in_dim, out_dim, kernel_size=kernel, padding=padding, stride=self.strides[l]), - nn.ReLU(), - nn.BatchNorm1d(out_dim) - )) - in_dim = out_dim - self.layers = nn.ModuleList(self.layers) - self.out_proj = nn.Linear(out_dim, out_dim) - - def forward(self, x): - """ - - :param x: [B, T, 80] - :return: [L, B, T, H], [B, T, H] - """ - # padding_mask = x.abs().sum(-1).eq(0).data # [B, T] - padding_mask = x.abs().sum(-1).eq(0).detach() - nonpadding_mask_TB = 1 - padding_mask.float()[:, None, :] # [B, 1, T] - x = x.transpose(1, 2) - hiddens = [] - for i, l in enumerate(self.layers): - nonpadding_mask_TB = nonpadding_mask_TB[:, :, ::self.strides[i]] - x = l(x) * nonpadding_mask_TB - hiddens.append(x) - hiddens = torch.stack(hiddens, 0) # [L, B, H, T] - hiddens = hiddens.transpose(2, 3) # [L, B, T, H] - x = self.out_proj(x.transpose(1, 2)) # [B, T, H] - x = x * nonpadding_mask_TB.transpose(1, 2) - return hiddens, x - - -class ConvBlock(nn.Module): - def __init__(self, idim=80, n_chans=256, kernel_size=3, stride=1, norm='gn', dropout=0): - super().__init__() - self.conv = ConvNorm(idim, n_chans, kernel_size, stride=stride) - self.norm = norm - if self.norm == 'bn': - self.norm = nn.BatchNorm1d(n_chans) - elif self.norm == 'in': - self.norm = nn.InstanceNorm1d(n_chans, affine=True) - elif self.norm == 'gn': - self.norm = nn.GroupNorm(n_chans // 16, n_chans) - elif self.norm == 'ln': - self.norm = LayerNorm(n_chans // 16, n_chans) - elif self.norm == 'wn': - self.conv = torch.nn.utils.weight_norm(self.conv.conv) - self.dropout = nn.Dropout(dropout) - self.relu = nn.ReLU() - - def forward(self, x): - """ - - :param x: [B, C, T] - :return: [B, C, T] - """ - x = self.conv(x) - if not isinstance(self.norm, str): - if self.norm == 'none': - pass - elif self.norm == 'ln': - x = self.norm(x.transpose(1, 2)).transpose(1, 2) - else: - x = self.norm(x) - x = self.relu(x) - x = self.dropout(x) - return x - - -class ConvStacks(nn.Module): - def __init__(self, idim=80, n_layers=5, n_chans=256, odim=32, kernel_size=5, norm='gn', - dropout=0, strides=None, res=True): - super().__init__() - self.conv = torch.nn.ModuleList() - self.kernel_size = kernel_size - self.res = res - self.in_proj = Linear(idim, n_chans) - if strides is None: - strides = [1] * n_layers - else: - assert len(strides) == n_layers - for idx in range(n_layers): - self.conv.append(ConvBlock( - n_chans, n_chans, kernel_size, stride=strides[idx], norm=norm, dropout=dropout)) - self.out_proj = Linear(n_chans, odim) - - def forward(self, x, return_hiddens=False): - """ - - :param x: [B, T, H] - :return: [B, T, H] - """ - x = self.in_proj(x) - x = x.transpose(1, -1) # (B, idim, Tmax) - hiddens = [] - for f in self.conv: - x_ = f(x) - x = x + x_ if self.res else x_ # (B, C, Tmax) - hiddens.append(x) - x = x.transpose(1, -1) - x = self.out_proj(x) # (B, Tmax, H) - if return_hiddens: - hiddens = torch.stack(hiddens, 1) # [B, L, C, T] - return x, hiddens - return x - - -class PitchExtractor(nn.Module): - def __init__(self, n_mel_bins=80, conv_layers=2): - super().__init__() - self.hidden_size = hparams['hidden_size'] - self.predictor_hidden = hparams['predictor_hidden'] if hparams['predictor_hidden'] > 0 else self.hidden_size - self.conv_layers = conv_layers - - self.mel_prenet = Prenet(n_mel_bins, self.hidden_size, strides=[1, 1, 1]) - if self.conv_layers > 0: - self.mel_encoder = ConvStacks( - idim=self.hidden_size, n_chans=self.hidden_size, odim=self.hidden_size, n_layers=self.conv_layers) - self.pitch_predictor = PitchPredictor( - self.hidden_size, n_chans=self.predictor_hidden, - n_layers=5, dropout_rate=0.1, odim=2, - padding=hparams['ffn_padding'], kernel_size=hparams['predictor_kernel']) - - def forward(self, mel_input=None): - ret = {} - mel_hidden = self.mel_prenet(mel_input)[1] - if self.conv_layers > 0: - mel_hidden = self.mel_encoder(mel_hidden) - - ret['pitch_pred'] = pitch_pred = self.pitch_predictor(mel_hidden) - - pitch_padding = mel_input.abs().sum(-1) == 0 - use_uv = hparams['pitch_type'] == 'frame' and hparams['use_uv'] - - ret['f0_denorm_pred'] = denorm_f0( - pitch_pred[:, :, 0], (pitch_pred[:, :, 1] > 0) if use_uv else None, - hparams, pitch_padding=pitch_padding) - return ret \ No newline at end of file diff --git a/modules/hifigan/hifigan.py b/modules/hifigan/hifigan.py deleted file mode 100644 index ae7e61f56..000000000 --- a/modules/hifigan/hifigan.py +++ /dev/null @@ -1,365 +0,0 @@ -import torch -import torch.nn.functional as F -import torch.nn as nn -from torch.nn import Conv1d, ConvTranspose1d, AvgPool1d, Conv2d -from torch.nn.utils import weight_norm, remove_weight_norm, spectral_norm - -from modules.parallel_wavegan.layers import UpsampleNetwork, ConvInUpsampleNetwork -from modules.parallel_wavegan.models.source import SourceModuleHnNSF -import numpy as np - -LRELU_SLOPE = 0.1 - - -def init_weights(m, mean=0.0, std=0.01): - classname = m.__class__.__name__ - if classname.find("Conv") != -1: - m.weight.data.normal_(mean, std) - - -def apply_weight_norm(m): - classname = m.__class__.__name__ - if classname.find("Conv") != -1: - weight_norm(m) - - -def get_padding(kernel_size, dilation=1): - return int((kernel_size * dilation - dilation) / 2) - - -class ResBlock1(torch.nn.Module): - def __init__(self, h, channels, kernel_size=3, dilation=(1, 3, 5)): - super(ResBlock1, self).__init__() - self.h = h - self.convs1 = nn.ModuleList([ - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], - padding=get_padding(kernel_size, dilation[0]))), - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], - padding=get_padding(kernel_size, dilation[1]))), - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], - padding=get_padding(kernel_size, dilation[2]))) - ]) - self.convs1.apply(init_weights) - - self.convs2 = nn.ModuleList([ - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=1, - padding=get_padding(kernel_size, 1))), - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=1, - padding=get_padding(kernel_size, 1))), - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=1, - padding=get_padding(kernel_size, 1))) - ]) - self.convs2.apply(init_weights) - - def forward(self, x): - for c1, c2 in zip(self.convs1, self.convs2): - xt = F.leaky_relu(x, LRELU_SLOPE) - xt = c1(xt) - xt = F.leaky_relu(xt, LRELU_SLOPE) - xt = c2(xt) - x = xt + x - return x - - def remove_weight_norm(self): - for l in self.convs1: - remove_weight_norm(l) - for l in self.convs2: - remove_weight_norm(l) - - -class ResBlock2(torch.nn.Module): - def __init__(self, h, channels, kernel_size=3, dilation=(1, 3)): - super(ResBlock2, self).__init__() - self.h = h - self.convs = nn.ModuleList([ - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], - padding=get_padding(kernel_size, dilation[0]))), - weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], - padding=get_padding(kernel_size, dilation[1]))) - ]) - self.convs.apply(init_weights) - - def forward(self, x): - for c in self.convs: - xt = F.leaky_relu(x, LRELU_SLOPE) - xt = c(xt) - x = xt + x - return x - - def remove_weight_norm(self): - for l in self.convs: - remove_weight_norm(l) - - -class Conv1d1x1(Conv1d): - """1x1 Conv1d with customized initialization.""" - - def __init__(self, in_channels, out_channels, bias): - """Initialize 1x1 Conv1d module.""" - super(Conv1d1x1, self).__init__(in_channels, out_channels, - kernel_size=1, padding=0, - dilation=1, bias=bias) - - -class HifiGanGenerator(torch.nn.Module): - def __init__(self, h, c_out=1): - super(HifiGanGenerator, self).__init__() - self.h = h - self.num_kernels = len(h['resblock_kernel_sizes']) - self.num_upsamples = len(h['upsample_rates']) - - if h['use_pitch_embed']: - self.harmonic_num = 8 - self.f0_upsamp = torch.nn.Upsample(scale_factor=np.prod(h['upsample_rates'])) - self.m_source = SourceModuleHnNSF( - sampling_rate=h['audio_sample_rate'], - harmonic_num=self.harmonic_num) - self.noise_convs = nn.ModuleList() - self.conv_pre = weight_norm(Conv1d(80, h['upsample_initial_channel'], 7, 1, padding=3)) - resblock = ResBlock1 if h['resblock'] == '1' else ResBlock2 - - self.ups = nn.ModuleList() - for i, (u, k) in enumerate(zip(h['upsample_rates'], h['upsample_kernel_sizes'])): - c_cur = h['upsample_initial_channel'] // (2 ** (i + 1)) - self.ups.append(weight_norm( - ConvTranspose1d(c_cur * 2, c_cur, k, u, padding=(k - u) // 2))) - if h['use_pitch_embed']: - if i + 1 < len(h['upsample_rates']): - stride_f0 = np.prod(h['upsample_rates'][i + 1:]) - self.noise_convs.append(Conv1d( - 1, c_cur, kernel_size=stride_f0 * 2, stride=stride_f0, padding=stride_f0 // 2)) - else: - self.noise_convs.append(Conv1d(1, c_cur, kernel_size=1)) - - self.resblocks = nn.ModuleList() - for i in range(len(self.ups)): - ch = h['upsample_initial_channel'] // (2 ** (i + 1)) - for j, (k, d) in enumerate(zip(h['resblock_kernel_sizes'], h['resblock_dilation_sizes'])): - self.resblocks.append(resblock(h, ch, k, d)) - - self.conv_post = weight_norm(Conv1d(ch, c_out, 7, 1, padding=3)) - self.ups.apply(init_weights) - self.conv_post.apply(init_weights) - - def forward(self, x, f0=None): - if f0 is not None: - # harmonic-source signal, noise-source signal, uv flag - f0 = self.f0_upsamp(f0[:, None]).transpose(1, 2) - har_source, noi_source, uv = self.m_source(f0) - har_source = har_source.transpose(1, 2) - - x = self.conv_pre(x) - for i in range(self.num_upsamples): - x = F.leaky_relu(x, LRELU_SLOPE) - x = self.ups[i](x) - if f0 is not None: - x_source = self.noise_convs[i](har_source) - x = x + x_source - xs = None - for j in range(self.num_kernels): - if xs is None: - xs = self.resblocks[i * self.num_kernels + j](x) - else: - xs += self.resblocks[i * self.num_kernels + j](x) - x = xs / self.num_kernels - x = F.leaky_relu(x) - x = self.conv_post(x) - x = torch.tanh(x) - - return x - - def remove_weight_norm(self): - print('Removing weight norm...') - for l in self.ups: - remove_weight_norm(l) - for l in self.resblocks: - l.remove_weight_norm() - remove_weight_norm(self.conv_pre) - remove_weight_norm(self.conv_post) - - -class DiscriminatorP(torch.nn.Module): - def __init__(self, period, kernel_size=5, stride=3, use_spectral_norm=False, use_cond=False, c_in=1): - super(DiscriminatorP, self).__init__() - self.use_cond = use_cond - if use_cond: - from utils.hparams import hparams - t = hparams['hop_size'] - self.cond_net = torch.nn.ConvTranspose1d(80, 1, t * 2, stride=t, padding=t // 2) - c_in = 2 - - self.period = period - norm_f = weight_norm if use_spectral_norm == False else spectral_norm - self.convs = nn.ModuleList([ - norm_f(Conv2d(c_in, 32, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(32, 128, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(128, 512, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(512, 1024, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(1024, 1024, (kernel_size, 1), 1, padding=(2, 0))), - ]) - self.conv_post = norm_f(Conv2d(1024, 1, (3, 1), 1, padding=(1, 0))) - - def forward(self, x, mel): - fmap = [] - if self.use_cond: - x_mel = self.cond_net(mel) - x = torch.cat([x_mel, x], 1) - # 1d to 2d - b, c, t = x.shape - if t % self.period != 0: # pad first - n_pad = self.period - (t % self.period) - x = F.pad(x, (0, n_pad), "reflect") - t = t + n_pad - x = x.view(b, c, t // self.period, self.period) - - for l in self.convs: - x = l(x) - x = F.leaky_relu(x, LRELU_SLOPE) - fmap.append(x) - x = self.conv_post(x) - fmap.append(x) - x = torch.flatten(x, 1, -1) - - return x, fmap - - -class MultiPeriodDiscriminator(torch.nn.Module): - def __init__(self, use_cond=False, c_in=1): - super(MultiPeriodDiscriminator, self).__init__() - self.discriminators = nn.ModuleList([ - DiscriminatorP(2, use_cond=use_cond, c_in=c_in), - DiscriminatorP(3, use_cond=use_cond, c_in=c_in), - DiscriminatorP(5, use_cond=use_cond, c_in=c_in), - DiscriminatorP(7, use_cond=use_cond, c_in=c_in), - DiscriminatorP(11, use_cond=use_cond, c_in=c_in), - ]) - - def forward(self, y, y_hat, mel=None): - y_d_rs = [] - y_d_gs = [] - fmap_rs = [] - fmap_gs = [] - for i, d in enumerate(self.discriminators): - y_d_r, fmap_r = d(y, mel) - y_d_g, fmap_g = d(y_hat, mel) - y_d_rs.append(y_d_r) - fmap_rs.append(fmap_r) - y_d_gs.append(y_d_g) - fmap_gs.append(fmap_g) - - return y_d_rs, y_d_gs, fmap_rs, fmap_gs - - -class DiscriminatorS(torch.nn.Module): - def __init__(self, use_spectral_norm=False, use_cond=False, upsample_rates=None, c_in=1): - super(DiscriminatorS, self).__init__() - self.use_cond = use_cond - if use_cond: - t = np.prod(upsample_rates) - self.cond_net = torch.nn.ConvTranspose1d(80, 1, t * 2, stride=t, padding=t // 2) - c_in = 2 - norm_f = weight_norm if use_spectral_norm == False else spectral_norm - self.convs = nn.ModuleList([ - norm_f(Conv1d(c_in, 128, 15, 1, padding=7)), - norm_f(Conv1d(128, 128, 41, 2, groups=4, padding=20)), - norm_f(Conv1d(128, 256, 41, 2, groups=16, padding=20)), - norm_f(Conv1d(256, 512, 41, 4, groups=16, padding=20)), - norm_f(Conv1d(512, 1024, 41, 4, groups=16, padding=20)), - norm_f(Conv1d(1024, 1024, 41, 1, groups=16, padding=20)), - norm_f(Conv1d(1024, 1024, 5, 1, padding=2)), - ]) - self.conv_post = norm_f(Conv1d(1024, 1, 3, 1, padding=1)) - - def forward(self, x, mel): - if self.use_cond: - x_mel = self.cond_net(mel) - x = torch.cat([x_mel, x], 1) - fmap = [] - for l in self.convs: - x = l(x) - x = F.leaky_relu(x, LRELU_SLOPE) - fmap.append(x) - x = self.conv_post(x) - fmap.append(x) - x = torch.flatten(x, 1, -1) - - return x, fmap - - -class MultiScaleDiscriminator(torch.nn.Module): - def __init__(self, use_cond=False, c_in=1): - super(MultiScaleDiscriminator, self).__init__() - from utils.hparams import hparams - self.discriminators = nn.ModuleList([ - DiscriminatorS(use_spectral_norm=True, use_cond=use_cond, - upsample_rates=[4, 4, hparams['hop_size'] // 16], - c_in=c_in), - DiscriminatorS(use_cond=use_cond, - upsample_rates=[4, 4, hparams['hop_size'] // 32], - c_in=c_in), - DiscriminatorS(use_cond=use_cond, - upsample_rates=[4, 4, hparams['hop_size'] // 64], - c_in=c_in), - ]) - self.meanpools = nn.ModuleList([ - AvgPool1d(4, 2, padding=1), - AvgPool1d(4, 2, padding=1) - ]) - - def forward(self, y, y_hat, mel=None): - y_d_rs = [] - y_d_gs = [] - fmap_rs = [] - fmap_gs = [] - for i, d in enumerate(self.discriminators): - if i != 0: - y = self.meanpools[i - 1](y) - y_hat = self.meanpools[i - 1](y_hat) - y_d_r, fmap_r = d(y, mel) - y_d_g, fmap_g = d(y_hat, mel) - y_d_rs.append(y_d_r) - fmap_rs.append(fmap_r) - y_d_gs.append(y_d_g) - fmap_gs.append(fmap_g) - - return y_d_rs, y_d_gs, fmap_rs, fmap_gs - - -def feature_loss(fmap_r, fmap_g): - loss = 0 - for dr, dg in zip(fmap_r, fmap_g): - for rl, gl in zip(dr, dg): - loss += torch.mean(torch.abs(rl - gl)) - - return loss * 2 - - -def discriminator_loss(disc_real_outputs, disc_generated_outputs): - r_losses = 0 - g_losses = 0 - for dr, dg in zip(disc_real_outputs, disc_generated_outputs): - r_loss = torch.mean((1 - dr) ** 2) - g_loss = torch.mean(dg ** 2) - r_losses += r_loss - g_losses += g_loss - r_losses = r_losses / len(disc_real_outputs) - g_losses = g_losses / len(disc_real_outputs) - return r_losses, g_losses - - -def cond_discriminator_loss(outputs): - loss = 0 - for dg in outputs: - g_loss = torch.mean(dg ** 2) - loss += g_loss - loss = loss / len(outputs) - return loss - - -def generator_loss(disc_outputs): - loss = 0 - for dg in disc_outputs: - l = torch.mean((1 - dg) ** 2) - loss += l - loss = loss / len(disc_outputs) - return loss diff --git a/modules/hifigan/mel_utils.py b/modules/hifigan/mel_utils.py deleted file mode 100644 index 06e0f7d4d..000000000 --- a/modules/hifigan/mel_utils.py +++ /dev/null @@ -1,80 +0,0 @@ -import numpy as np -import torch -import torch.utils.data -from librosa.filters import mel as librosa_mel_fn -from scipy.io.wavfile import read - -MAX_WAV_VALUE = 32768.0 - - -def load_wav(full_path): - sampling_rate, data = read(full_path) - return data, sampling_rate - - -def dynamic_range_compression(x, C=1, clip_val=1e-5): - return np.log(np.clip(x, a_min=clip_val, a_max=None) * C) - - -def dynamic_range_decompression(x, C=1): - return np.exp(x) / C - - -def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): - return torch.log(torch.clamp(x, min=clip_val) * C) - - -def dynamic_range_decompression_torch(x, C=1): - return torch.exp(x) / C - - -def spectral_normalize_torch(magnitudes): - output = dynamic_range_compression_torch(magnitudes) - return output - - -def spectral_de_normalize_torch(magnitudes): - output = dynamic_range_decompression_torch(magnitudes) - return output - - -mel_basis = {} -hann_window = {} - - -def mel_spectrogram(y, hparams, center=False, complex=False): - # hop_size: 512 # For 22050Hz, 275 ~= 12.5 ms (0.0125 * sample_rate) - # win_size: 2048 # For 22050Hz, 1100 ~= 50 ms (If None, win_size: fft_size) (0.05 * sample_rate) - # fmin: 55 # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) - # fmax: 10000 # To be increased/reduced depending on data. - # fft_size: 2048 # Extra window size is filled with 0 paddings to match this parameter - # n_fft, num_mels, sampling_rate, hop_size, win_size, fmin, fmax, - n_fft = hparams['fft_size'] - num_mels = hparams['audio_num_mel_bins'] - sampling_rate = hparams['audio_sample_rate'] - hop_size = hparams['hop_size'] - win_size = hparams['win_size'] - fmin = hparams['fmin'] - fmax = hparams['fmax'] - y = y.clamp(min=-1., max=1.) - global mel_basis, hann_window - if fmax not in mel_basis: - mel = librosa_mel_fn(sampling_rate, n_fft, num_mels, fmin, fmax) - mel_basis[str(fmax) + '_' + str(y.device)] = torch.from_numpy(mel).float().to(y.device) - hann_window[str(y.device)] = torch.hann_window(win_size).to(y.device) - - y = torch.nn.functional.pad(y.unsqueeze(1), (int((n_fft - hop_size) / 2), int((n_fft - hop_size) / 2)), - mode='reflect') - y = y.squeeze(1) - - spec = torch.stft(y, n_fft, hop_length=hop_size, win_length=win_size, window=hann_window[str(y.device)], - center=center, pad_mode='reflect', normalized=False, onesided=True) - - if not complex: - spec = torch.sqrt(spec.pow(2).sum(-1) + (1e-9)) - spec = torch.matmul(mel_basis[str(fmax) + '_' + str(y.device)], spec) - spec = spectral_normalize_torch(spec) - else: - B, C, T, _ = spec.shape - spec = spec.transpose(1, 2) # [B, T, n_fft, 2] - return spec diff --git a/modules/parallel_wavegan/__init__.py b/modules/parallel_wavegan/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/modules/parallel_wavegan/layers/__init__.py b/modules/parallel_wavegan/layers/__init__.py deleted file mode 100644 index e477f5111..000000000 --- a/modules/parallel_wavegan/layers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .causal_conv import * # NOQA -from .pqmf import * # NOQA -from .residual_block import * # NOQA -from modules.parallel_wavegan.layers.residual_stack import * # NOQA -from .upsample import * # NOQA diff --git a/modules/parallel_wavegan/layers/causal_conv.py b/modules/parallel_wavegan/layers/causal_conv.py deleted file mode 100644 index fca77daf6..000000000 --- a/modules/parallel_wavegan/layers/causal_conv.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""Causal convolusion layer modules.""" - - -import torch - - -class CausalConv1d(torch.nn.Module): - """CausalConv1d module with customized initialization.""" - - def __init__(self, in_channels, out_channels, kernel_size, - dilation=1, bias=True, pad="ConstantPad1d", pad_params={"value": 0.0}): - """Initialize CausalConv1d module.""" - super(CausalConv1d, self).__init__() - self.pad = getattr(torch.nn, pad)((kernel_size - 1) * dilation, **pad_params) - self.conv = torch.nn.Conv1d(in_channels, out_channels, kernel_size, - dilation=dilation, bias=bias) - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, in_channels, T). - - Returns: - Tensor: Output tensor (B, out_channels, T). - - """ - return self.conv(self.pad(x))[:, :, :x.size(2)] - - -class CausalConvTranspose1d(torch.nn.Module): - """CausalConvTranspose1d module with customized initialization.""" - - def __init__(self, in_channels, out_channels, kernel_size, stride, bias=True): - """Initialize CausalConvTranspose1d module.""" - super(CausalConvTranspose1d, self).__init__() - self.deconv = torch.nn.ConvTranspose1d( - in_channels, out_channels, kernel_size, stride, bias=bias) - self.stride = stride - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, in_channels, T_in). - - Returns: - Tensor: Output tensor (B, out_channels, T_out). - - """ - return self.deconv(x)[:, :, :-self.stride] diff --git a/modules/parallel_wavegan/layers/pqmf.py b/modules/parallel_wavegan/layers/pqmf.py deleted file mode 100644 index ac21074fd..000000000 --- a/modules/parallel_wavegan/layers/pqmf.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""Pseudo QMF modules.""" - -import numpy as np -import torch -import torch.nn.functional as F - -from scipy.signal import kaiser - - -def design_prototype_filter(taps=62, cutoff_ratio=0.15, beta=9.0): - """Design prototype filter for PQMF. - - This method is based on `A Kaiser window approach for the design of prototype - filters of cosine modulated filterbanks`_. - - Args: - taps (int): The number of filter taps. - cutoff_ratio (float): Cut-off frequency ratio. - beta (float): Beta coefficient for kaiser window. - - Returns: - ndarray: Impluse response of prototype filter (taps + 1,). - - .. _`A Kaiser window approach for the design of prototype filters of cosine modulated filterbanks`: - https://ieeexplore.ieee.org/abstract/document/681427 - - """ - # check the arguments are valid - assert taps % 2 == 0, "The number of taps mush be even number." - assert 0.0 < cutoff_ratio < 1.0, "Cutoff ratio must be > 0.0 and < 1.0." - - # make initial filter - omega_c = np.pi * cutoff_ratio - with np.errstate(invalid='ignore'): - h_i = np.sin(omega_c * (np.arange(taps + 1) - 0.5 * taps)) \ - / (np.pi * (np.arange(taps + 1) - 0.5 * taps)) - h_i[taps // 2] = np.cos(0) * cutoff_ratio # fix nan due to indeterminate form - - # apply kaiser window - w = kaiser(taps + 1, beta) - h = h_i * w - - return h - - -class PQMF(torch.nn.Module): - """PQMF module. - - This module is based on `Near-perfect-reconstruction pseudo-QMF banks`_. - - .. _`Near-perfect-reconstruction pseudo-QMF banks`: - https://ieeexplore.ieee.org/document/258122 - - """ - - def __init__(self, subbands=4, taps=62, cutoff_ratio=0.15, beta=9.0): - """Initilize PQMF module. - - Args: - subbands (int): The number of subbands. - taps (int): The number of filter taps. - cutoff_ratio (float): Cut-off frequency ratio. - beta (float): Beta coefficient for kaiser window. - - """ - super(PQMF, self).__init__() - - # define filter coefficient - h_proto = design_prototype_filter(taps, cutoff_ratio, beta) - h_analysis = np.zeros((subbands, len(h_proto))) - h_synthesis = np.zeros((subbands, len(h_proto))) - for k in range(subbands): - h_analysis[k] = 2 * h_proto * np.cos( - (2 * k + 1) * (np.pi / (2 * subbands)) * - (np.arange(taps + 1) - ((taps - 1) / 2)) + - (-1) ** k * np.pi / 4) - h_synthesis[k] = 2 * h_proto * np.cos( - (2 * k + 1) * (np.pi / (2 * subbands)) * - (np.arange(taps + 1) - ((taps - 1) / 2)) - - (-1) ** k * np.pi / 4) - - # convert to tensor - analysis_filter = torch.from_numpy(h_analysis).float().unsqueeze(1) - synthesis_filter = torch.from_numpy(h_synthesis).float().unsqueeze(0) - - # register coefficients as beffer - self.register_buffer("analysis_filter", analysis_filter) - self.register_buffer("synthesis_filter", synthesis_filter) - - # filter for downsampling & upsampling - updown_filter = torch.zeros((subbands, subbands, subbands)).float() - for k in range(subbands): - updown_filter[k, k, 0] = 1.0 - self.register_buffer("updown_filter", updown_filter) - self.subbands = subbands - - # keep padding info - self.pad_fn = torch.nn.ConstantPad1d(taps // 2, 0.0) - - def analysis(self, x): - """Analysis with PQMF. - - Args: - x (Tensor): Input tensor (B, 1, T). - - Returns: - Tensor: Output tensor (B, subbands, T // subbands). - - """ - x = F.conv1d(self.pad_fn(x), self.analysis_filter) - return F.conv1d(x, self.updown_filter, stride=self.subbands) - - def synthesis(self, x): - """Synthesis with PQMF. - - Args: - x (Tensor): Input tensor (B, subbands, T // subbands). - - Returns: - Tensor: Output tensor (B, 1, T). - - """ - x = F.conv_transpose1d(x, self.updown_filter * self.subbands, stride=self.subbands) - return F.conv1d(self.pad_fn(x), self.synthesis_filter) diff --git a/modules/parallel_wavegan/layers/residual_block.py b/modules/parallel_wavegan/layers/residual_block.py deleted file mode 100644 index 7a267a86c..000000000 --- a/modules/parallel_wavegan/layers/residual_block.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Residual block module in WaveNet. - -This code is modified from https://github.com/r9y9/wavenet_vocoder. - -""" - -import math - -import torch -import torch.nn.functional as F - - -class Conv1d(torch.nn.Conv1d): - """Conv1d module with customized initialization.""" - - def __init__(self, *args, **kwargs): - """Initialize Conv1d module.""" - super(Conv1d, self).__init__(*args, **kwargs) - - def reset_parameters(self): - """Reset parameters.""" - torch.nn.init.kaiming_normal_(self.weight, nonlinearity="relu") - if self.bias is not None: - torch.nn.init.constant_(self.bias, 0.0) - - -class Conv1d1x1(Conv1d): - """1x1 Conv1d with customized initialization.""" - - def __init__(self, in_channels, out_channels, bias): - """Initialize 1x1 Conv1d module.""" - super(Conv1d1x1, self).__init__(in_channels, out_channels, - kernel_size=1, padding=0, - dilation=1, bias=bias) - - -class ResidualBlock(torch.nn.Module): - """Residual block module in WaveNet.""" - - def __init__(self, - kernel_size=3, - residual_channels=64, - gate_channels=128, - skip_channels=64, - aux_channels=80, - dropout=0.0, - dilation=1, - bias=True, - use_causal_conv=False - ): - """Initialize ResidualBlock module. - - Args: - kernel_size (int): Kernel size of dilation convolution layer. - residual_channels (int): Number of channels for residual connection. - skip_channels (int): Number of channels for skip connection. - aux_channels (int): Local conditioning channels i.e. auxiliary input dimension. - dropout (float): Dropout probability. - dilation (int): Dilation factor. - bias (bool): Whether to add bias parameter in convolution layers. - use_causal_conv (bool): Whether to use use_causal_conv or non-use_causal_conv convolution. - - """ - super(ResidualBlock, self).__init__() - self.dropout = dropout - # no future time stamps available - if use_causal_conv: - padding = (kernel_size - 1) * dilation - else: - assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size." - padding = (kernel_size - 1) // 2 * dilation - self.use_causal_conv = use_causal_conv - - # dilation conv - self.conv = Conv1d(residual_channels, gate_channels, kernel_size, - padding=padding, dilation=dilation, bias=bias) - - # local conditioning - if aux_channels > 0: - self.conv1x1_aux = Conv1d1x1(aux_channels, gate_channels, bias=False) - else: - self.conv1x1_aux = None - - # conv output is split into two groups - gate_out_channels = gate_channels // 2 - self.conv1x1_out = Conv1d1x1(gate_out_channels, residual_channels, bias=bias) - self.conv1x1_skip = Conv1d1x1(gate_out_channels, skip_channels, bias=bias) - - def forward(self, x, c): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, residual_channels, T). - c (Tensor): Local conditioning auxiliary tensor (B, aux_channels, T). - - Returns: - Tensor: Output tensor for residual connection (B, residual_channels, T). - Tensor: Output tensor for skip connection (B, skip_channels, T). - - """ - residual = x - x = F.dropout(x, p=self.dropout, training=self.training) - x = self.conv(x) - - # remove future time steps if use_causal_conv conv - x = x[:, :, :residual.size(-1)] if self.use_causal_conv else x - - # split into two part for gated activation - splitdim = 1 - xa, xb = x.split(x.size(splitdim) // 2, dim=splitdim) - - # local conditioning - if c is not None: - assert self.conv1x1_aux is not None - c = self.conv1x1_aux(c) - ca, cb = c.split(c.size(splitdim) // 2, dim=splitdim) - xa, xb = xa + ca, xb + cb - - x = torch.tanh(xa) * torch.sigmoid(xb) - - # for skip connection - s = self.conv1x1_skip(x) - - # for residual connection - x = (self.conv1x1_out(x) + residual) * math.sqrt(0.5) - - return x, s diff --git a/modules/parallel_wavegan/layers/residual_stack.py b/modules/parallel_wavegan/layers/residual_stack.py deleted file mode 100644 index 6e07c8803..000000000 --- a/modules/parallel_wavegan/layers/residual_stack.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""Residual stack module in MelGAN.""" - -import torch - -from . import CausalConv1d - - -class ResidualStack(torch.nn.Module): - """Residual stack module introduced in MelGAN.""" - - def __init__(self, - kernel_size=3, - channels=32, - dilation=1, - bias=True, - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - pad="ReflectionPad1d", - pad_params={}, - use_causal_conv=False, - ): - """Initialize ResidualStack module. - - Args: - kernel_size (int): Kernel size of dilation convolution layer. - channels (int): Number of channels of convolution layers. - dilation (int): Dilation factor. - bias (bool): Whether to add bias parameter in convolution layers. - nonlinear_activation (str): Activation function module name. - nonlinear_activation_params (dict): Hyperparameters for activation function. - pad (str): Padding function module name before dilated convolution layer. - pad_params (dict): Hyperparameters for padding function. - use_causal_conv (bool): Whether to use causal convolution. - - """ - super(ResidualStack, self).__init__() - - # defile residual stack part - if not use_causal_conv: - assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size." - self.stack = torch.nn.Sequential( - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - getattr(torch.nn, pad)((kernel_size - 1) // 2 * dilation, **pad_params), - torch.nn.Conv1d(channels, channels, kernel_size, dilation=dilation, bias=bias), - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - torch.nn.Conv1d(channels, channels, 1, bias=bias), - ) - else: - self.stack = torch.nn.Sequential( - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - CausalConv1d(channels, channels, kernel_size, dilation=dilation, - bias=bias, pad=pad, pad_params=pad_params), - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - torch.nn.Conv1d(channels, channels, 1, bias=bias), - ) - - # defile extra layer for skip connection - self.skip_layer = torch.nn.Conv1d(channels, channels, 1, bias=bias) - - def forward(self, c): - """Calculate forward propagation. - - Args: - c (Tensor): Input tensor (B, channels, T). - - Returns: - Tensor: Output tensor (B, chennels, T). - - """ - return self.stack(c) + self.skip_layer(c) diff --git a/modules/parallel_wavegan/layers/tf_layers.py b/modules/parallel_wavegan/layers/tf_layers.py deleted file mode 100644 index c0f46bd75..000000000 --- a/modules/parallel_wavegan/layers/tf_layers.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 MINH ANH (@dathudeptrai) -# MIT License (https://opensource.org/licenses/MIT) - -"""Tensorflow Layer modules complatible with pytorch.""" - -import tensorflow as tf - - -class TFReflectionPad1d(tf.keras.layers.Layer): - """Tensorflow ReflectionPad1d module.""" - - def __init__(self, padding_size): - """Initialize TFReflectionPad1d module. - - Args: - padding_size (int): Padding size. - - """ - super(TFReflectionPad1d, self).__init__() - self.padding_size = padding_size - - @tf.function - def call(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, T, 1, C). - - Returns: - Tensor: Padded tensor (B, T + 2 * padding_size, 1, C). - - """ - return tf.pad(x, [[0, 0], [self.padding_size, self.padding_size], [0, 0], [0, 0]], "REFLECT") - - -class TFConvTranspose1d(tf.keras.layers.Layer): - """Tensorflow ConvTranspose1d module.""" - - def __init__(self, channels, kernel_size, stride, padding): - """Initialize TFConvTranspose1d( module. - - Args: - channels (int): Number of channels. - kernel_size (int): kernel size. - strides (int): Stride width. - padding (str): Padding type ("same" or "valid"). - - """ - super(TFConvTranspose1d, self).__init__() - self.conv1d_transpose = tf.keras.layers.Conv2DTranspose( - filters=channels, - kernel_size=(kernel_size, 1), - strides=(stride, 1), - padding=padding, - ) - - @tf.function - def call(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, T, 1, C). - - Returns: - Tensors: Output tensor (B, T', 1, C'). - - """ - x = self.conv1d_transpose(x) - return x - - -class TFResidualStack(tf.keras.layers.Layer): - """Tensorflow ResidualStack module.""" - - def __init__(self, - kernel_size, - channels, - dilation, - bias, - nonlinear_activation, - nonlinear_activation_params, - padding, - ): - """Initialize TFResidualStack module. - - Args: - kernel_size (int): Kernel size. - channles (int): Number of channels. - dilation (int): Dilation ine. - bias (bool): Whether to add bias parameter in convolution layers. - nonlinear_activation (str): Activation function module name. - nonlinear_activation_params (dict): Hyperparameters for activation function. - padding (str): Padding type ("same" or "valid"). - - """ - super(TFResidualStack, self).__init__() - self.block = [ - getattr(tf.keras.layers, nonlinear_activation)(**nonlinear_activation_params), - TFReflectionPad1d(dilation), - tf.keras.layers.Conv2D( - filters=channels, - kernel_size=(kernel_size, 1), - dilation_rate=(dilation, 1), - use_bias=bias, - padding="valid", - ), - getattr(tf.keras.layers, nonlinear_activation)(**nonlinear_activation_params), - tf.keras.layers.Conv2D(filters=channels, kernel_size=1, use_bias=bias) - ] - self.shortcut = tf.keras.layers.Conv2D(filters=channels, kernel_size=1, use_bias=bias) - - @tf.function - def call(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, T, 1, C). - - Returns: - Tensor: Output tensor (B, T, 1, C). - - """ - _x = tf.identity(x) - for i, layer in enumerate(self.block): - _x = layer(_x) - shortcut = self.shortcut(x) - return shortcut + _x diff --git a/modules/parallel_wavegan/layers/upsample.py b/modules/parallel_wavegan/layers/upsample.py deleted file mode 100644 index 18c6397c4..000000000 --- a/modules/parallel_wavegan/layers/upsample.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Upsampling module. - -This code is modified from https://github.com/r9y9/wavenet_vocoder. - -""" - -import numpy as np -import torch -import torch.nn.functional as F - -from . import Conv1d - - -class Stretch2d(torch.nn.Module): - """Stretch2d module.""" - - def __init__(self, x_scale, y_scale, mode="nearest"): - """Initialize Stretch2d module. - - Args: - x_scale (int): X scaling factor (Time axis in spectrogram). - y_scale (int): Y scaling factor (Frequency axis in spectrogram). - mode (str): Interpolation mode. - - """ - super(Stretch2d, self).__init__() - self.x_scale = x_scale - self.y_scale = y_scale - self.mode = mode - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input tensor (B, C, F, T). - - Returns: - Tensor: Interpolated tensor (B, C, F * y_scale, T * x_scale), - - """ - return F.interpolate( - x, scale_factor=(self.y_scale, self.x_scale), mode=self.mode) - - -class Conv2d(torch.nn.Conv2d): - """Conv2d module with customized initialization.""" - - def __init__(self, *args, **kwargs): - """Initialize Conv2d module.""" - super(Conv2d, self).__init__(*args, **kwargs) - - def reset_parameters(self): - """Reset parameters.""" - self.weight.data.fill_(1. / np.prod(self.kernel_size)) - if self.bias is not None: - torch.nn.init.constant_(self.bias, 0.0) - - -class UpsampleNetwork(torch.nn.Module): - """Upsampling network module.""" - - def __init__(self, - upsample_scales, - nonlinear_activation=None, - nonlinear_activation_params={}, - interpolate_mode="nearest", - freq_axis_kernel_size=1, - use_causal_conv=False, - ): - """Initialize upsampling network module. - - Args: - upsample_scales (list): List of upsampling scales. - nonlinear_activation (str): Activation function name. - nonlinear_activation_params (dict): Arguments for specified activation function. - interpolate_mode (str): Interpolation mode. - freq_axis_kernel_size (int): Kernel size in the direction of frequency axis. - - """ - super(UpsampleNetwork, self).__init__() - self.use_causal_conv = use_causal_conv - self.up_layers = torch.nn.ModuleList() - for scale in upsample_scales: - # interpolation layer - stretch = Stretch2d(scale, 1, interpolate_mode) - self.up_layers += [stretch] - - # conv layer - assert (freq_axis_kernel_size - 1) % 2 == 0, "Not support even number freq axis kernel size." - freq_axis_padding = (freq_axis_kernel_size - 1) // 2 - kernel_size = (freq_axis_kernel_size, scale * 2 + 1) - if use_causal_conv: - padding = (freq_axis_padding, scale * 2) - else: - padding = (freq_axis_padding, scale) - conv = Conv2d(1, 1, kernel_size=kernel_size, padding=padding, bias=False) - self.up_layers += [conv] - - # nonlinear - if nonlinear_activation is not None: - nonlinear = getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params) - self.up_layers += [nonlinear] - - def forward(self, c): - """Calculate forward propagation. - - Args: - c : Input tensor (B, C, T). - - Returns: - Tensor: Upsampled tensor (B, C, T'), where T' = T * prod(upsample_scales). - - """ - c = c.unsqueeze(1) # (B, 1, C, T) - for f in self.up_layers: - if self.use_causal_conv and isinstance(f, Conv2d): - c = f(c)[..., :c.size(-1)] - else: - c = f(c) - return c.squeeze(1) # (B, C, T') - - -class ConvInUpsampleNetwork(torch.nn.Module): - """Convolution + upsampling network module.""" - - def __init__(self, - upsample_scales, - nonlinear_activation=None, - nonlinear_activation_params={}, - interpolate_mode="nearest", - freq_axis_kernel_size=1, - aux_channels=80, - aux_context_window=0, - use_causal_conv=False - ): - """Initialize convolution + upsampling network module. - - Args: - upsample_scales (list): List of upsampling scales. - nonlinear_activation (str): Activation function name. - nonlinear_activation_params (dict): Arguments for specified activation function. - mode (str): Interpolation mode. - freq_axis_kernel_size (int): Kernel size in the direction of frequency axis. - aux_channels (int): Number of channels of pre-convolutional layer. - aux_context_window (int): Context window size of the pre-convolutional layer. - use_causal_conv (bool): Whether to use causal structure. - - """ - super(ConvInUpsampleNetwork, self).__init__() - self.aux_context_window = aux_context_window - self.use_causal_conv = use_causal_conv and aux_context_window > 0 - # To capture wide-context information in conditional features - kernel_size = aux_context_window + 1 if use_causal_conv else 2 * aux_context_window + 1 - # NOTE(kan-bayashi): Here do not use padding because the input is already padded - self.conv_in = Conv1d(aux_channels, aux_channels, kernel_size=kernel_size, bias=False) - self.upsample = UpsampleNetwork( - upsample_scales=upsample_scales, - nonlinear_activation=nonlinear_activation, - nonlinear_activation_params=nonlinear_activation_params, - interpolate_mode=interpolate_mode, - freq_axis_kernel_size=freq_axis_kernel_size, - use_causal_conv=use_causal_conv, - ) - - def forward(self, c): - """Calculate forward propagation. - - Args: - c : Input tensor (B, C, T'). - - Returns: - Tensor: Upsampled tensor (B, C, T), - where T = (T' - aux_context_window * 2) * prod(upsample_scales). - - Note: - The length of inputs considers the context window size. - - """ - c_ = self.conv_in(c) - c = c_[:, :, :-self.aux_context_window] if self.use_causal_conv else c_ - return self.upsample(c) diff --git a/modules/parallel_wavegan/losses/__init__.py b/modules/parallel_wavegan/losses/__init__.py deleted file mode 100644 index b03080a90..000000000 --- a/modules/parallel_wavegan/losses/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .stft_loss import * # NOQA diff --git a/modules/parallel_wavegan/losses/stft_loss.py b/modules/parallel_wavegan/losses/stft_loss.py deleted file mode 100644 index 74d2aa21a..000000000 --- a/modules/parallel_wavegan/losses/stft_loss.py +++ /dev/null @@ -1,153 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""STFT-based Loss modules.""" - -import torch -import torch.nn.functional as F - - -def stft(x, fft_size, hop_size, win_length, window): - """Perform STFT and convert to magnitude spectrogram. - - Args: - x (Tensor): Input signal tensor (B, T). - fft_size (int): FFT size. - hop_size (int): Hop size. - win_length (int): Window length. - window (str): Window function type. - - Returns: - Tensor: Magnitude spectrogram (B, #frames, fft_size // 2 + 1). - - """ - x_stft = torch.stft(x, fft_size, hop_size, win_length, window) - real = x_stft[..., 0] - imag = x_stft[..., 1] - - # NOTE(kan-bayashi): clamp is needed to avoid nan or inf - return torch.sqrt(torch.clamp(real ** 2 + imag ** 2, min=1e-7)).transpose(2, 1) - - -class SpectralConvergengeLoss(torch.nn.Module): - """Spectral convergence loss module.""" - - def __init__(self): - """Initilize spectral convergence loss module.""" - super(SpectralConvergengeLoss, self).__init__() - - def forward(self, x_mag, y_mag): - """Calculate forward propagation. - - Args: - x_mag (Tensor): Magnitude spectrogram of predicted signal (B, #frames, #freq_bins). - y_mag (Tensor): Magnitude spectrogram of groundtruth signal (B, #frames, #freq_bins). - - Returns: - Tensor: Spectral convergence loss value. - - """ - return torch.norm(y_mag - x_mag, p="fro") / torch.norm(y_mag, p="fro") - - -class LogSTFTMagnitudeLoss(torch.nn.Module): - """Log STFT magnitude loss module.""" - - def __init__(self): - """Initilize los STFT magnitude loss module.""" - super(LogSTFTMagnitudeLoss, self).__init__() - - def forward(self, x_mag, y_mag): - """Calculate forward propagation. - - Args: - x_mag (Tensor): Magnitude spectrogram of predicted signal (B, #frames, #freq_bins). - y_mag (Tensor): Magnitude spectrogram of groundtruth signal (B, #frames, #freq_bins). - - Returns: - Tensor: Log STFT magnitude loss value. - - """ - return F.l1_loss(torch.log(y_mag), torch.log(x_mag)) - - -class STFTLoss(torch.nn.Module): - """STFT loss module.""" - - def __init__(self, fft_size=1024, shift_size=120, win_length=600, window="hann_window"): - """Initialize STFT loss module.""" - super(STFTLoss, self).__init__() - self.fft_size = fft_size - self.shift_size = shift_size - self.win_length = win_length - self.window = getattr(torch, window)(win_length) - self.spectral_convergenge_loss = SpectralConvergengeLoss() - self.log_stft_magnitude_loss = LogSTFTMagnitudeLoss() - - def forward(self, x, y): - """Calculate forward propagation. - - Args: - x (Tensor): Predicted signal (B, T). - y (Tensor): Groundtruth signal (B, T). - - Returns: - Tensor: Spectral convergence loss value. - Tensor: Log STFT magnitude loss value. - - """ - x_mag = stft(x, self.fft_size, self.shift_size, self.win_length, self.window) - y_mag = stft(y, self.fft_size, self.shift_size, self.win_length, self.window) - sc_loss = self.spectral_convergenge_loss(x_mag, y_mag) - mag_loss = self.log_stft_magnitude_loss(x_mag, y_mag) - - return sc_loss, mag_loss - - -class MultiResolutionSTFTLoss(torch.nn.Module): - """Multi resolution STFT loss module.""" - - def __init__(self, - fft_sizes=[1024, 2048, 512], - hop_sizes=[120, 240, 50], - win_lengths=[600, 1200, 240], - window="hann_window"): - """Initialize Multi resolution STFT loss module. - - Args: - fft_sizes (list): List of FFT sizes. - hop_sizes (list): List of hop sizes. - win_lengths (list): List of window lengths. - window (str): Window function type. - - """ - super(MultiResolutionSTFTLoss, self).__init__() - assert len(fft_sizes) == len(hop_sizes) == len(win_lengths) - self.stft_losses = torch.nn.ModuleList() - for fs, ss, wl in zip(fft_sizes, hop_sizes, win_lengths): - self.stft_losses += [STFTLoss(fs, ss, wl, window)] - - def forward(self, x, y): - """Calculate forward propagation. - - Args: - x (Tensor): Predicted signal (B, T). - y (Tensor): Groundtruth signal (B, T). - - Returns: - Tensor: Multi resolution spectral convergence loss value. - Tensor: Multi resolution log STFT magnitude loss value. - - """ - sc_loss = 0.0 - mag_loss = 0.0 - for f in self.stft_losses: - sc_l, mag_l = f(x, y) - sc_loss += sc_l - mag_loss += mag_l - sc_loss /= len(self.stft_losses) - mag_loss /= len(self.stft_losses) - - return sc_loss, mag_loss diff --git a/modules/parallel_wavegan/models/__init__.py b/modules/parallel_wavegan/models/__init__.py deleted file mode 100644 index 4803ba6b2..000000000 --- a/modules/parallel_wavegan/models/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .melgan import * # NOQA -from .parallel_wavegan import * # NOQA diff --git a/modules/parallel_wavegan/models/melgan.py b/modules/parallel_wavegan/models/melgan.py deleted file mode 100644 index e021ae481..000000000 --- a/modules/parallel_wavegan/models/melgan.py +++ /dev/null @@ -1,427 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""MelGAN Modules.""" - -import logging - -import numpy as np -import torch - -from modules.parallel_wavegan.layers import CausalConv1d -from modules.parallel_wavegan.layers import CausalConvTranspose1d -from modules.parallel_wavegan.layers import ResidualStack - - -class MelGANGenerator(torch.nn.Module): - """MelGAN generator module.""" - - def __init__(self, - in_channels=80, - out_channels=1, - kernel_size=7, - channels=512, - bias=True, - upsample_scales=[8, 8, 2, 2], - stack_kernel_size=3, - stacks=3, - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - pad="ReflectionPad1d", - pad_params={}, - use_final_nonlinear_activation=True, - use_weight_norm=True, - use_causal_conv=False, - ): - """Initialize MelGANGenerator module. - - Args: - in_channels (int): Number of input channels. - out_channels (int): Number of output channels. - kernel_size (int): Kernel size of initial and final conv layer. - channels (int): Initial number of channels for conv layer. - bias (bool): Whether to add bias parameter in convolution layers. - upsample_scales (list): List of upsampling scales. - stack_kernel_size (int): Kernel size of dilated conv layers in residual stack. - stacks (int): Number of stacks in a single residual stack. - nonlinear_activation (str): Activation function module name. - nonlinear_activation_params (dict): Hyperparameters for activation function. - pad (str): Padding function module name before dilated convolution layer. - pad_params (dict): Hyperparameters for padding function. - use_final_nonlinear_activation (torch.nn.Module): Activation function for the final layer. - use_weight_norm (bool): Whether to use weight norm. - If set to true, it will be applied to all of the conv layers. - use_causal_conv (bool): Whether to use causal convolution. - - """ - super(MelGANGenerator, self).__init__() - - # check hyper parameters is valid - assert channels >= np.prod(upsample_scales) - assert channels % (2 ** len(upsample_scales)) == 0 - if not use_causal_conv: - assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size." - - # add initial layer - layers = [] - if not use_causal_conv: - layers += [ - getattr(torch.nn, pad)((kernel_size - 1) // 2, **pad_params), - torch.nn.Conv1d(in_channels, channels, kernel_size, bias=bias), - ] - else: - layers += [ - CausalConv1d(in_channels, channels, kernel_size, - bias=bias, pad=pad, pad_params=pad_params), - ] - - for i, upsample_scale in enumerate(upsample_scales): - # add upsampling layer - layers += [getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params)] - if not use_causal_conv: - layers += [ - torch.nn.ConvTranspose1d( - channels // (2 ** i), - channels // (2 ** (i + 1)), - upsample_scale * 2, - stride=upsample_scale, - padding=upsample_scale // 2 + upsample_scale % 2, - output_padding=upsample_scale % 2, - bias=bias, - ) - ] - else: - layers += [ - CausalConvTranspose1d( - channels // (2 ** i), - channels // (2 ** (i + 1)), - upsample_scale * 2, - stride=upsample_scale, - bias=bias, - ) - ] - - # add residual stack - for j in range(stacks): - layers += [ - ResidualStack( - kernel_size=stack_kernel_size, - channels=channels // (2 ** (i + 1)), - dilation=stack_kernel_size ** j, - bias=bias, - nonlinear_activation=nonlinear_activation, - nonlinear_activation_params=nonlinear_activation_params, - pad=pad, - pad_params=pad_params, - use_causal_conv=use_causal_conv, - ) - ] - - # add final layer - layers += [getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params)] - if not use_causal_conv: - layers += [ - getattr(torch.nn, pad)((kernel_size - 1) // 2, **pad_params), - torch.nn.Conv1d(channels // (2 ** (i + 1)), out_channels, kernel_size, bias=bias), - ] - else: - layers += [ - CausalConv1d(channels // (2 ** (i + 1)), out_channels, kernel_size, - bias=bias, pad=pad, pad_params=pad_params), - ] - if use_final_nonlinear_activation: - layers += [torch.nn.Tanh()] - - # define the model as a single function - self.melgan = torch.nn.Sequential(*layers) - - # apply weight norm - if use_weight_norm: - self.apply_weight_norm() - - # reset parameters - self.reset_parameters() - - def forward(self, c): - """Calculate forward propagation. - - Args: - c (Tensor): Input tensor (B, channels, T). - - Returns: - Tensor: Output tensor (B, 1, T ** prod(upsample_scales)). - - """ - return self.melgan(c) - - def remove_weight_norm(self): - """Remove weight normalization module from all of the layers.""" - def _remove_weight_norm(m): - try: - logging.debug(f"Weight norm is removed from {m}.") - torch.nn.utils.remove_weight_norm(m) - except ValueError: # this module didn't have weight norm - return - - self.apply(_remove_weight_norm) - - def apply_weight_norm(self): - """Apply weight normalization module from all of the layers.""" - def _apply_weight_norm(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.ConvTranspose1d): - torch.nn.utils.weight_norm(m) - logging.debug(f"Weight norm is applied to {m}.") - - self.apply(_apply_weight_norm) - - def reset_parameters(self): - """Reset parameters. - - This initialization follows official implementation manner. - https://github.com/descriptinc/melgan-neurips/blob/master/spec2wav/modules.py - - """ - def _reset_parameters(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.ConvTranspose1d): - m.weight.data.normal_(0.0, 0.02) - logging.debug(f"Reset parameters in {m}.") - - self.apply(_reset_parameters) - - -class MelGANDiscriminator(torch.nn.Module): - """MelGAN discriminator module.""" - - def __init__(self, - in_channels=1, - out_channels=1, - kernel_sizes=[5, 3], - channels=16, - max_downsample_channels=1024, - bias=True, - downsample_scales=[4, 4, 4, 4], - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - pad="ReflectionPad1d", - pad_params={}, - ): - """Initilize MelGAN discriminator module. - - Args: - in_channels (int): Number of input channels. - out_channels (int): Number of output channels. - kernel_sizes (list): List of two kernel sizes. The prod will be used for the first conv layer, - and the first and the second kernel sizes will be used for the last two layers. - For example if kernel_sizes = [5, 3], the first layer kernel size will be 5 * 3 = 15, - the last two layers' kernel size will be 5 and 3, respectively. - channels (int): Initial number of channels for conv layer. - max_downsample_channels (int): Maximum number of channels for downsampling layers. - bias (bool): Whether to add bias parameter in convolution layers. - downsample_scales (list): List of downsampling scales. - nonlinear_activation (str): Activation function module name. - nonlinear_activation_params (dict): Hyperparameters for activation function. - pad (str): Padding function module name before dilated convolution layer. - pad_params (dict): Hyperparameters for padding function. - - """ - super(MelGANDiscriminator, self).__init__() - self.layers = torch.nn.ModuleList() - - # check kernel size is valid - assert len(kernel_sizes) == 2 - assert kernel_sizes[0] % 2 == 1 - assert kernel_sizes[1] % 2 == 1 - - # add first layer - self.layers += [ - torch.nn.Sequential( - getattr(torch.nn, pad)((np.prod(kernel_sizes) - 1) // 2, **pad_params), - torch.nn.Conv1d(in_channels, channels, np.prod(kernel_sizes), bias=bias), - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - ) - ] - - # add downsample layers - in_chs = channels - for downsample_scale in downsample_scales: - out_chs = min(in_chs * downsample_scale, max_downsample_channels) - self.layers += [ - torch.nn.Sequential( - torch.nn.Conv1d( - in_chs, out_chs, - kernel_size=downsample_scale * 10 + 1, - stride=downsample_scale, - padding=downsample_scale * 5, - groups=in_chs // 4, - bias=bias, - ), - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - ) - ] - in_chs = out_chs - - # add final layers - out_chs = min(in_chs * 2, max_downsample_channels) - self.layers += [ - torch.nn.Sequential( - torch.nn.Conv1d( - in_chs, out_chs, kernel_sizes[0], - padding=(kernel_sizes[0] - 1) // 2, - bias=bias, - ), - getattr(torch.nn, nonlinear_activation)(**nonlinear_activation_params), - ) - ] - self.layers += [ - torch.nn.Conv1d( - out_chs, out_channels, kernel_sizes[1], - padding=(kernel_sizes[1] - 1) // 2, - bias=bias, - ), - ] - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input noise signal (B, 1, T). - - Returns: - List: List of output tensors of each layer. - - """ - outs = [] - for f in self.layers: - x = f(x) - outs += [x] - - return outs - - -class MelGANMultiScaleDiscriminator(torch.nn.Module): - """MelGAN multi-scale discriminator module.""" - - def __init__(self, - in_channels=1, - out_channels=1, - scales=3, - downsample_pooling="AvgPool1d", - # follow the official implementation setting - downsample_pooling_params={ - "kernel_size": 4, - "stride": 2, - "padding": 1, - "count_include_pad": False, - }, - kernel_sizes=[5, 3], - channels=16, - max_downsample_channels=1024, - bias=True, - downsample_scales=[4, 4, 4, 4], - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - pad="ReflectionPad1d", - pad_params={}, - use_weight_norm=True, - ): - """Initilize MelGAN multi-scale discriminator module. - - Args: - in_channels (int): Number of input channels. - out_channels (int): Number of output channels. - downsample_pooling (str): Pooling module name for downsampling of the inputs. - downsample_pooling_params (dict): Parameters for the above pooling module. - kernel_sizes (list): List of two kernel sizes. The sum will be used for the first conv layer, - and the first and the second kernel sizes will be used for the last two layers. - channels (int): Initial number of channels for conv layer. - max_downsample_channels (int): Maximum number of channels for downsampling layers. - bias (bool): Whether to add bias parameter in convolution layers. - downsample_scales (list): List of downsampling scales. - nonlinear_activation (str): Activation function module name. - nonlinear_activation_params (dict): Hyperparameters for activation function. - pad (str): Padding function module name before dilated convolution layer. - pad_params (dict): Hyperparameters for padding function. - use_causal_conv (bool): Whether to use causal convolution. - - """ - super(MelGANMultiScaleDiscriminator, self).__init__() - self.discriminators = torch.nn.ModuleList() - - # add discriminators - for _ in range(scales): - self.discriminators += [ - MelGANDiscriminator( - in_channels=in_channels, - out_channels=out_channels, - kernel_sizes=kernel_sizes, - channels=channels, - max_downsample_channels=max_downsample_channels, - bias=bias, - downsample_scales=downsample_scales, - nonlinear_activation=nonlinear_activation, - nonlinear_activation_params=nonlinear_activation_params, - pad=pad, - pad_params=pad_params, - ) - ] - self.pooling = getattr(torch.nn, downsample_pooling)(**downsample_pooling_params) - - # apply weight norm - if use_weight_norm: - self.apply_weight_norm() - - # reset parameters - self.reset_parameters() - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input noise signal (B, 1, T). - - Returns: - List: List of list of each discriminator outputs, which consists of each layer output tensors. - - """ - outs = [] - for f in self.discriminators: - outs += [f(x)] - x = self.pooling(x) - - return outs - - def remove_weight_norm(self): - """Remove weight normalization module from all of the layers.""" - def _remove_weight_norm(m): - try: - logging.debug(f"Weight norm is removed from {m}.") - torch.nn.utils.remove_weight_norm(m) - except ValueError: # this module didn't have weight norm - return - - self.apply(_remove_weight_norm) - - def apply_weight_norm(self): - """Apply weight normalization module from all of the layers.""" - def _apply_weight_norm(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.ConvTranspose1d): - torch.nn.utils.weight_norm(m) - logging.debug(f"Weight norm is applied to {m}.") - - self.apply(_apply_weight_norm) - - def reset_parameters(self): - """Reset parameters. - - This initialization follows official implementation manner. - https://github.com/descriptinc/melgan-neurips/blob/master/spec2wav/modules.py - - """ - def _reset_parameters(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.ConvTranspose1d): - m.weight.data.normal_(0.0, 0.02) - logging.debug(f"Reset parameters in {m}.") - - self.apply(_reset_parameters) diff --git a/modules/parallel_wavegan/models/parallel_wavegan.py b/modules/parallel_wavegan/models/parallel_wavegan.py deleted file mode 100644 index c63b59f67..000000000 --- a/modules/parallel_wavegan/models/parallel_wavegan.py +++ /dev/null @@ -1,434 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""Parallel WaveGAN Modules.""" - -import logging -import math - -import torch -from torch import nn - -from modules.parallel_wavegan.layers import Conv1d -from modules.parallel_wavegan.layers import Conv1d1x1 -from modules.parallel_wavegan.layers import ResidualBlock -from modules.parallel_wavegan.layers import upsample -from modules.parallel_wavegan import models - - -class ParallelWaveGANGenerator(torch.nn.Module): - """Parallel WaveGAN Generator module.""" - - def __init__(self, - in_channels=1, - out_channels=1, - kernel_size=3, - layers=30, - stacks=3, - residual_channels=64, - gate_channels=128, - skip_channels=64, - aux_channels=80, - aux_context_window=2, - dropout=0.0, - bias=True, - use_weight_norm=True, - use_causal_conv=False, - upsample_conditional_features=True, - upsample_net="ConvInUpsampleNetwork", - upsample_params={"upsample_scales": [4, 4, 4, 4]}, - use_pitch_embed=False, - ): - """Initialize Parallel WaveGAN Generator module. - - Args: - in_channels (int): Number of input channels. - out_channels (int): Number of output channels. - kernel_size (int): Kernel size of dilated convolution. - layers (int): Number of residual block layers. - stacks (int): Number of stacks i.e., dilation cycles. - residual_channels (int): Number of channels in residual conv. - gate_channels (int): Number of channels in gated conv. - skip_channels (int): Number of channels in skip conv. - aux_channels (int): Number of channels for auxiliary feature conv. - aux_context_window (int): Context window size for auxiliary feature. - dropout (float): Dropout rate. 0.0 means no dropout applied. - bias (bool): Whether to use bias parameter in conv layer. - use_weight_norm (bool): Whether to use weight norm. - If set to true, it will be applied to all of the conv layers. - use_causal_conv (bool): Whether to use causal structure. - upsample_conditional_features (bool): Whether to use upsampling network. - upsample_net (str): Upsampling network architecture. - upsample_params (dict): Upsampling network parameters. - - """ - super(ParallelWaveGANGenerator, self).__init__() - self.in_channels = in_channels - self.out_channels = out_channels - self.aux_channels = aux_channels - self.layers = layers - self.stacks = stacks - self.kernel_size = kernel_size - - # check the number of layers and stacks - assert layers % stacks == 0 - layers_per_stack = layers // stacks - - # define first convolution - self.first_conv = Conv1d1x1(in_channels, residual_channels, bias=True) - - # define conv + upsampling network - if upsample_conditional_features: - upsample_params.update({ - "use_causal_conv": use_causal_conv, - }) - if upsample_net == "MelGANGenerator": - assert aux_context_window == 0 - upsample_params.update({ - "use_weight_norm": False, # not to apply twice - "use_final_nonlinear_activation": False, - }) - self.upsample_net = getattr(models, upsample_net)(**upsample_params) - else: - if upsample_net == "ConvInUpsampleNetwork": - upsample_params.update({ - "aux_channels": aux_channels, - "aux_context_window": aux_context_window, - }) - self.upsample_net = getattr(upsample, upsample_net)(**upsample_params) - else: - self.upsample_net = None - - # define residual blocks - self.conv_layers = torch.nn.ModuleList() - for layer in range(layers): - dilation = 2 ** (layer % layers_per_stack) - conv = ResidualBlock( - kernel_size=kernel_size, - residual_channels=residual_channels, - gate_channels=gate_channels, - skip_channels=skip_channels, - aux_channels=aux_channels, - dilation=dilation, - dropout=dropout, - bias=bias, - use_causal_conv=use_causal_conv, - ) - self.conv_layers += [conv] - - # define output layers - self.last_conv_layers = torch.nn.ModuleList([ - torch.nn.ReLU(inplace=True), - Conv1d1x1(skip_channels, skip_channels, bias=True), - torch.nn.ReLU(inplace=True), - Conv1d1x1(skip_channels, out_channels, bias=True), - ]) - - self.use_pitch_embed = use_pitch_embed - if use_pitch_embed: - self.pitch_embed = nn.Embedding(300, aux_channels, 0) - self.c_proj = nn.Linear(2 * aux_channels, aux_channels) - - # apply weight norm - if use_weight_norm: - self.apply_weight_norm() - - def forward(self, x, c=None, pitch=None, **kwargs): - """Calculate forward propagation. - - Args: - x (Tensor): Input noise signal (B, C_in, T). - c (Tensor): Local conditioning auxiliary features (B, C ,T'). - pitch (Tensor): Local conditioning pitch (B, T'). - - Returns: - Tensor: Output tensor (B, C_out, T) - - """ - # perform upsampling - if c is not None and self.upsample_net is not None: - if self.use_pitch_embed: - p = self.pitch_embed(pitch) - c = self.c_proj(torch.cat([c.transpose(1, 2), p], -1)).transpose(1, 2) - c = self.upsample_net(c) - assert c.size(-1) == x.size(-1), (c.size(-1), x.size(-1)) - - # encode to hidden representation - x = self.first_conv(x) - skips = 0 - for f in self.conv_layers: - x, h = f(x, c) - skips += h - skips *= math.sqrt(1.0 / len(self.conv_layers)) - - # apply final layers - x = skips - for f in self.last_conv_layers: - x = f(x) - - return x - - def remove_weight_norm(self): - """Remove weight normalization module from all of the layers.""" - def _remove_weight_norm(m): - try: - logging.debug(f"Weight norm is removed from {m}.") - torch.nn.utils.remove_weight_norm(m) - except ValueError: # this module didn't have weight norm - return - - self.apply(_remove_weight_norm) - - def apply_weight_norm(self): - """Apply weight normalization module from all of the layers.""" - def _apply_weight_norm(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.Conv2d): - torch.nn.utils.weight_norm(m) - logging.debug(f"Weight norm is applied to {m}.") - - self.apply(_apply_weight_norm) - - @staticmethod - def _get_receptive_field_size(layers, stacks, kernel_size, - dilation=lambda x: 2 ** x): - assert layers % stacks == 0 - layers_per_cycle = layers // stacks - dilations = [dilation(i % layers_per_cycle) for i in range(layers)] - return (kernel_size - 1) * sum(dilations) + 1 - - @property - def receptive_field_size(self): - """Return receptive field size.""" - return self._get_receptive_field_size(self.layers, self.stacks, self.kernel_size) - - -class ParallelWaveGANDiscriminator(torch.nn.Module): - """Parallel WaveGAN Discriminator module.""" - - def __init__(self, - in_channels=1, - out_channels=1, - kernel_size=3, - layers=10, - conv_channels=64, - dilation_factor=1, - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - bias=True, - use_weight_norm=True, - ): - """Initialize Parallel WaveGAN Discriminator module. - - Args: - in_channels (int): Number of input channels. - out_channels (int): Number of output channels. - kernel_size (int): Number of output channels. - layers (int): Number of conv layers. - conv_channels (int): Number of chnn layers. - dilation_factor (int): Dilation factor. For example, if dilation_factor = 2, - the dilation will be 2, 4, 8, ..., and so on. - nonlinear_activation (str): Nonlinear function after each conv. - nonlinear_activation_params (dict): Nonlinear function parameters - bias (bool): Whether to use bias parameter in conv. - use_weight_norm (bool) Whether to use weight norm. - If set to true, it will be applied to all of the conv layers. - - """ - super(ParallelWaveGANDiscriminator, self).__init__() - assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size." - assert dilation_factor > 0, "Dilation factor must be > 0." - self.conv_layers = torch.nn.ModuleList() - conv_in_channels = in_channels - for i in range(layers - 1): - if i == 0: - dilation = 1 - else: - dilation = i if dilation_factor == 1 else dilation_factor ** i - conv_in_channels = conv_channels - padding = (kernel_size - 1) // 2 * dilation - conv_layer = [ - Conv1d(conv_in_channels, conv_channels, - kernel_size=kernel_size, padding=padding, - dilation=dilation, bias=bias), - getattr(torch.nn, nonlinear_activation)(inplace=True, **nonlinear_activation_params) - ] - self.conv_layers += conv_layer - padding = (kernel_size - 1) // 2 - last_conv_layer = Conv1d( - conv_in_channels, out_channels, - kernel_size=kernel_size, padding=padding, bias=bias) - self.conv_layers += [last_conv_layer] - - # apply weight norm - if use_weight_norm: - self.apply_weight_norm() - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input noise signal (B, 1, T). - - Returns: - Tensor: Output tensor (B, 1, T) - - """ - for f in self.conv_layers: - x = f(x) - return x - - def apply_weight_norm(self): - """Apply weight normalization module from all of the layers.""" - def _apply_weight_norm(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.Conv2d): - torch.nn.utils.weight_norm(m) - logging.debug(f"Weight norm is applied to {m}.") - - self.apply(_apply_weight_norm) - - def remove_weight_norm(self): - """Remove weight normalization module from all of the layers.""" - def _remove_weight_norm(m): - try: - logging.debug(f"Weight norm is removed from {m}.") - torch.nn.utils.remove_weight_norm(m) - except ValueError: # this module didn't have weight norm - return - - self.apply(_remove_weight_norm) - - -class ResidualParallelWaveGANDiscriminator(torch.nn.Module): - """Parallel WaveGAN Discriminator module.""" - - def __init__(self, - in_channels=1, - out_channels=1, - kernel_size=3, - layers=30, - stacks=3, - residual_channels=64, - gate_channels=128, - skip_channels=64, - dropout=0.0, - bias=True, - use_weight_norm=True, - use_causal_conv=False, - nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}, - ): - """Initialize Parallel WaveGAN Discriminator module. - - Args: - in_channels (int): Number of input channels. - out_channels (int): Number of output channels. - kernel_size (int): Kernel size of dilated convolution. - layers (int): Number of residual block layers. - stacks (int): Number of stacks i.e., dilation cycles. - residual_channels (int): Number of channels in residual conv. - gate_channels (int): Number of channels in gated conv. - skip_channels (int): Number of channels in skip conv. - dropout (float): Dropout rate. 0.0 means no dropout applied. - bias (bool): Whether to use bias parameter in conv. - use_weight_norm (bool): Whether to use weight norm. - If set to true, it will be applied to all of the conv layers. - use_causal_conv (bool): Whether to use causal structure. - nonlinear_activation_params (dict): Nonlinear function parameters - - """ - super(ResidualParallelWaveGANDiscriminator, self).__init__() - assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size." - - self.in_channels = in_channels - self.out_channels = out_channels - self.layers = layers - self.stacks = stacks - self.kernel_size = kernel_size - - # check the number of layers and stacks - assert layers % stacks == 0 - layers_per_stack = layers // stacks - - # define first convolution - self.first_conv = torch.nn.Sequential( - Conv1d1x1(in_channels, residual_channels, bias=True), - getattr(torch.nn, nonlinear_activation)( - inplace=True, **nonlinear_activation_params), - ) - - # define residual blocks - self.conv_layers = torch.nn.ModuleList() - for layer in range(layers): - dilation = 2 ** (layer % layers_per_stack) - conv = ResidualBlock( - kernel_size=kernel_size, - residual_channels=residual_channels, - gate_channels=gate_channels, - skip_channels=skip_channels, - aux_channels=-1, - dilation=dilation, - dropout=dropout, - bias=bias, - use_causal_conv=use_causal_conv, - ) - self.conv_layers += [conv] - - # define output layers - self.last_conv_layers = torch.nn.ModuleList([ - getattr(torch.nn, nonlinear_activation)( - inplace=True, **nonlinear_activation_params), - Conv1d1x1(skip_channels, skip_channels, bias=True), - getattr(torch.nn, nonlinear_activation)( - inplace=True, **nonlinear_activation_params), - Conv1d1x1(skip_channels, out_channels, bias=True), - ]) - - # apply weight norm - if use_weight_norm: - self.apply_weight_norm() - - def forward(self, x): - """Calculate forward propagation. - - Args: - x (Tensor): Input noise signal (B, 1, T). - - Returns: - Tensor: Output tensor (B, 1, T) - - """ - x = self.first_conv(x) - - skips = 0 - for f in self.conv_layers: - x, h = f(x, None) - skips += h - skips *= math.sqrt(1.0 / len(self.conv_layers)) - - # apply final layers - x = skips - for f in self.last_conv_layers: - x = f(x) - return x - - def apply_weight_norm(self): - """Apply weight normalization module from all of the layers.""" - def _apply_weight_norm(m): - if isinstance(m, torch.nn.Conv1d) or isinstance(m, torch.nn.Conv2d): - torch.nn.utils.weight_norm(m) - logging.debug(f"Weight norm is applied to {m}.") - - self.apply(_apply_weight_norm) - - def remove_weight_norm(self): - """Remove weight normalization module from all of the layers.""" - def _remove_weight_norm(m): - try: - logging.debug(f"Weight norm is removed from {m}.") - torch.nn.utils.remove_weight_norm(m) - except ValueError: # this module didn't have weight norm - return - - self.apply(_remove_weight_norm) diff --git a/modules/parallel_wavegan/models/source.py b/modules/parallel_wavegan/models/source.py deleted file mode 100644 index f2a006e53..000000000 --- a/modules/parallel_wavegan/models/source.py +++ /dev/null @@ -1,538 +0,0 @@ -import torch -import numpy as np -import sys -import torch.nn.functional as torch_nn_func - - -class SineGen(torch.nn.Module): - """ Definition of sine generator - SineGen(samp_rate, harmonic_num = 0, - sine_amp = 0.1, noise_std = 0.003, - voiced_threshold = 0, - flag_for_pulse=False) - - samp_rate: sampling rate in Hz - harmonic_num: number of harmonic overtones (default 0) - sine_amp: amplitude of sine-wavefrom (default 0.1) - noise_std: std of Gaussian noise (default 0.003) - voiced_thoreshold: F0 threshold for U/V classification (default 0) - flag_for_pulse: this SinGen is used inside PulseGen (default False) - - Note: when flag_for_pulse is True, the first time step of a voiced - segment is always sin(np.pi) or cos(0) - """ - - def __init__(self, samp_rate, harmonic_num=0, - sine_amp=0.1, noise_std=0.003, - voiced_threshold=0, - flag_for_pulse=False): - super(SineGen, self).__init__() - self.sine_amp = sine_amp - self.noise_std = noise_std - self.harmonic_num = harmonic_num - self.dim = self.harmonic_num + 1 - self.sampling_rate = samp_rate - self.voiced_threshold = voiced_threshold - self.flag_for_pulse = flag_for_pulse - - def _f02uv(self, f0): - # generate uv signal - uv = torch.ones_like(f0) - uv = uv * (f0 > self.voiced_threshold) - return uv - - def _f02sine(self, f0_values): - """ f0_values: (batchsize, length, dim) - where dim indicates fundamental tone and overtones - """ - # convert to F0 in rad. The interger part n can be ignored - # because 2 * np.pi * n doesn't affect phase - rad_values = (f0_values / self.sampling_rate) % 1 - - # initial phase noise (no noise for fundamental component) - rand_ini = torch.rand(f0_values.shape[0], f0_values.shape[2], \ - device=f0_values.device) - rand_ini[:, 0] = 0 - rad_values[:, 0, :] = rad_values[:, 0, :] + rand_ini - - # instantanouse phase sine[t] = sin(2*pi \sum_i=1 ^{t} rad) - if not self.flag_for_pulse: - # for normal case - - # To prevent torch.cumsum numerical overflow, - # it is necessary to add -1 whenever \sum_k=1^n rad_value_k > 1. - # Buffer tmp_over_one_idx indicates the time step to add -1. - # This will not change F0 of sine because (x-1) * 2*pi = x * 2*pi - tmp_over_one = torch.cumsum(rad_values, 1) % 1 - tmp_over_one_idx = (tmp_over_one[:, 1:, :] - - tmp_over_one[:, :-1, :]) < 0 - cumsum_shift = torch.zeros_like(rad_values) - cumsum_shift[:, 1:, :] = tmp_over_one_idx * -1.0 - - sines = torch.sin(torch.cumsum(rad_values + cumsum_shift, dim=1) - * 2 * np.pi) - else: - # If necessary, make sure that the first time step of every - # voiced segments is sin(pi) or cos(0) - # This is used for pulse-train generation - - # identify the last time step in unvoiced segments - uv = self._f02uv(f0_values) - uv_1 = torch.roll(uv, shifts=-1, dims=1) - uv_1[:, -1, :] = 1 - u_loc = (uv < 1) * (uv_1 > 0) - - # get the instantanouse phase - tmp_cumsum = torch.cumsum(rad_values, dim=1) - # different batch needs to be processed differently - for idx in range(f0_values.shape[0]): - temp_sum = tmp_cumsum[idx, u_loc[idx, :, 0], :] - temp_sum[1:, :] = temp_sum[1:, :] - temp_sum[0:-1, :] - # stores the accumulation of i.phase within - # each voiced segments - tmp_cumsum[idx, :, :] = 0 - tmp_cumsum[idx, u_loc[idx, :, 0], :] = temp_sum - - # rad_values - tmp_cumsum: remove the accumulation of i.phase - # within the previous voiced segment. - i_phase = torch.cumsum(rad_values - tmp_cumsum, dim=1) - - # get the sines - sines = torch.cos(i_phase * 2 * np.pi) - return sines - - def forward(self, f0): - """ sine_tensor, uv = forward(f0) - input F0: tensor(batchsize=1, length, dim=1) - f0 for unvoiced steps should be 0 - output sine_tensor: tensor(batchsize=1, length, dim) - output uv: tensor(batchsize=1, length, 1) - """ - with torch.no_grad(): - f0_buf = torch.zeros(f0.shape[0], f0.shape[1], self.dim, - device=f0.device) - # fundamental component - f0_buf[:, :, 0] = f0[:, :, 0] - for idx in np.arange(self.harmonic_num): - # idx + 2: the (idx+1)-th overtone, (idx+2)-th harmonic - f0_buf[:, :, idx + 1] = f0_buf[:, :, 0] * (idx + 2) - - # generate sine waveforms - sine_waves = self._f02sine(f0_buf) * self.sine_amp - - # generate uv signal - # uv = torch.ones(f0.shape) - # uv = uv * (f0 > self.voiced_threshold) - uv = self._f02uv(f0) - - # noise: for unvoiced should be similar to sine_amp - # std = self.sine_amp/3 -> max value ~ self.sine_amp - # . for voiced regions is self.noise_std - noise_amp = uv * self.noise_std + (1 - uv) * self.sine_amp / 3 - noise = noise_amp * torch.randn_like(sine_waves) - - # first: set the unvoiced part to 0 by uv - # then: additive noise - sine_waves = sine_waves * uv + noise - return sine_waves, uv, noise - - -class PulseGen(torch.nn.Module): - """ Definition of Pulse train generator - - There are many ways to implement pulse generator. - Here, PulseGen is based on SinGen. For a perfect - """ - def __init__(self, samp_rate, pulse_amp = 0.1, - noise_std = 0.003, voiced_threshold = 0): - super(PulseGen, self).__init__() - self.pulse_amp = pulse_amp - self.sampling_rate = samp_rate - self.voiced_threshold = voiced_threshold - self.noise_std = noise_std - self.l_sinegen = SineGen(self.sampling_rate, harmonic_num=0, \ - sine_amp=self.pulse_amp, noise_std=0, \ - voiced_threshold=self.voiced_threshold, \ - flag_for_pulse=True) - - def forward(self, f0): - """ Pulse train generator - pulse_train, uv = forward(f0) - input F0: tensor(batchsize=1, length, dim=1) - f0 for unvoiced steps should be 0 - output pulse_train: tensor(batchsize=1, length, dim) - output uv: tensor(batchsize=1, length, 1) - - Note: self.l_sine doesn't make sure that the initial phase of - a voiced segment is np.pi, the first pulse in a voiced segment - may not be at the first time step within a voiced segment - """ - with torch.no_grad(): - sine_wav, uv, noise = self.l_sinegen(f0) - - # sine without additive noise - pure_sine = sine_wav - noise - - # step t corresponds to a pulse if - # sine[t] > sine[t+1] & sine[t] > sine[t-1] - # & sine[t-1], sine[t+1], and sine[t] are voiced - # or - # sine[t] is voiced, sine[t-1] is unvoiced - # we use torch.roll to simulate sine[t+1] and sine[t-1] - sine_1 = torch.roll(pure_sine, shifts=1, dims=1) - uv_1 = torch.roll(uv, shifts=1, dims=1) - uv_1[:, 0, :] = 0 - sine_2 = torch.roll(pure_sine, shifts=-1, dims=1) - uv_2 = torch.roll(uv, shifts=-1, dims=1) - uv_2[:, -1, :] = 0 - - loc = (pure_sine > sine_1) * (pure_sine > sine_2) \ - * (uv_1 > 0) * (uv_2 > 0) * (uv > 0) \ - + (uv_1 < 1) * (uv > 0) - - # pulse train without noise - pulse_train = pure_sine * loc - - # additive noise to pulse train - # note that noise from sinegen is zero in voiced regions - pulse_noise = torch.randn_like(pure_sine) * self.noise_std - - # with additive noise on pulse, and unvoiced regions - pulse_train += pulse_noise * loc + pulse_noise * (1 - uv) - return pulse_train, sine_wav, uv, pulse_noise - - -class SignalsConv1d(torch.nn.Module): - """ Filtering input signal with time invariant filter - Note: FIRFilter conducted filtering given fixed FIR weight - SignalsConv1d convolves two signals - Note: this is based on torch.nn.functional.conv1d - - """ - - def __init__(self): - super(SignalsConv1d, self).__init__() - - def forward(self, signal, system_ir): - """ output = forward(signal, system_ir) - - signal: (batchsize, length1, dim) - system_ir: (length2, dim) - - output: (batchsize, length1, dim) - """ - if signal.shape[-1] != system_ir.shape[-1]: - print("Error: SignalsConv1d expects shape:") - print("signal (batchsize, length1, dim)") - print("system_id (batchsize, length2, dim)") - print("But received signal: {:s}".format(str(signal.shape))) - print(" system_ir: {:s}".format(str(system_ir.shape))) - sys.exit(1) - padding_length = system_ir.shape[0] - 1 - groups = signal.shape[-1] - - # pad signal on the left - signal_pad = torch_nn_func.pad(signal.permute(0, 2, 1), \ - (padding_length, 0)) - # prepare system impulse response as (dim, 1, length2) - # also flip the impulse response - ir = torch.flip(system_ir.unsqueeze(1).permute(2, 1, 0), \ - dims=[2]) - # convolute - output = torch_nn_func.conv1d(signal_pad, ir, groups=groups) - return output.permute(0, 2, 1) - - -class CyclicNoiseGen_v1(torch.nn.Module): - """ CyclicnoiseGen_v1 - Cyclic noise with a single parameter of beta. - Pytorch v1 implementation assumes f_t is also fixed - """ - - def __init__(self, samp_rate, - noise_std=0.003, voiced_threshold=0): - super(CyclicNoiseGen_v1, self).__init__() - self.samp_rate = samp_rate - self.noise_std = noise_std - self.voiced_threshold = voiced_threshold - - self.l_pulse = PulseGen(samp_rate, pulse_amp=1.0, - noise_std=noise_std, - voiced_threshold=voiced_threshold) - self.l_conv = SignalsConv1d() - - def noise_decay(self, beta, f0mean): - """ decayed_noise = noise_decay(beta, f0mean) - decayed_noise = n[t]exp(-t * f_mean / beta / samp_rate) - - beta: (dim=1) or (batchsize=1, 1, dim=1) - f0mean (batchsize=1, 1, dim=1) - - decayed_noise (batchsize=1, length, dim=1) - """ - with torch.no_grad(): - # exp(-1.0 n / T) < 0.01 => n > -log(0.01)*T = 4.60*T - # truncate the noise when decayed by -40 dB - length = 4.6 * self.samp_rate / f0mean - length = length.int() - time_idx = torch.arange(0, length, device=beta.device) - time_idx = time_idx.unsqueeze(0).unsqueeze(2) - time_idx = time_idx.repeat(beta.shape[0], 1, beta.shape[2]) - - noise = torch.randn(time_idx.shape, device=beta.device) - - # due to Pytorch implementation, use f0_mean as the f0 factor - decay = torch.exp(-time_idx * f0mean / beta / self.samp_rate) - return noise * self.noise_std * decay - - def forward(self, f0s, beta): - """ Producde cyclic-noise - """ - # pulse train - pulse_train, sine_wav, uv, noise = self.l_pulse(f0s) - pure_pulse = pulse_train - noise - - # decayed_noise (length, dim=1) - if (uv < 1).all(): - # all unvoiced - cyc_noise = torch.zeros_like(sine_wav) - else: - f0mean = f0s[uv > 0].mean() - - decayed_noise = self.noise_decay(beta, f0mean)[0, :, :] - # convolute - cyc_noise = self.l_conv(pure_pulse, decayed_noise) - - # add noise in invoiced segments - cyc_noise = cyc_noise + noise * (1.0 - uv) - return cyc_noise, pulse_train, sine_wav, uv, noise - - -class SineGen(torch.nn.Module): - """ Definition of sine generator - SineGen(samp_rate, harmonic_num = 0, - sine_amp = 0.1, noise_std = 0.003, - voiced_threshold = 0, - flag_for_pulse=False) - - samp_rate: sampling rate in Hz - harmonic_num: number of harmonic overtones (default 0) - sine_amp: amplitude of sine-wavefrom (default 0.1) - noise_std: std of Gaussian noise (default 0.003) - voiced_thoreshold: F0 threshold for U/V classification (default 0) - flag_for_pulse: this SinGen is used inside PulseGen (default False) - - Note: when flag_for_pulse is True, the first time step of a voiced - segment is always sin(np.pi) or cos(0) - """ - - def __init__(self, samp_rate, harmonic_num=0, - sine_amp=0.1, noise_std=0.003, - voiced_threshold=0, - flag_for_pulse=False): - super(SineGen, self).__init__() - self.sine_amp = sine_amp - self.noise_std = noise_std - self.harmonic_num = harmonic_num - self.dim = self.harmonic_num + 1 - self.sampling_rate = samp_rate - self.voiced_threshold = voiced_threshold - self.flag_for_pulse = flag_for_pulse - - def _f02uv(self, f0): - # generate uv signal - uv = torch.ones_like(f0) - uv = uv * (f0 > self.voiced_threshold) - return uv - - def _f02sine(self, f0_values): - """ f0_values: (batchsize, length, dim) - where dim indicates fundamental tone and overtones - """ - # convert to F0 in rad. The interger part n can be ignored - # because 2 * np.pi * n doesn't affect phase - rad_values = (f0_values / self.sampling_rate) % 1 - - # initial phase noise (no noise for fundamental component) - rand_ini = torch.rand(f0_values.shape[0], f0_values.shape[2], \ - device=f0_values.device) - rand_ini[:, 0] = 0 - rad_values[:, 0, :] = rad_values[:, 0, :] + rand_ini - - # instantanouse phase sine[t] = sin(2*pi \sum_i=1 ^{t} rad) - if not self.flag_for_pulse: - # for normal case - - # To prevent torch.cumsum numerical overflow, - # it is necessary to add -1 whenever \sum_k=1^n rad_value_k > 1. - # Buffer tmp_over_one_idx indicates the time step to add -1. - # This will not change F0 of sine because (x-1) * 2*pi = x * 2*pi - tmp_over_one = torch.cumsum(rad_values, 1) % 1 - tmp_over_one_idx = (tmp_over_one[:, 1:, :] - - tmp_over_one[:, :-1, :]) < 0 - cumsum_shift = torch.zeros_like(rad_values) - cumsum_shift[:, 1:, :] = tmp_over_one_idx * -1.0 - - sines = torch.sin(torch.cumsum(rad_values + cumsum_shift, dim=1) - * 2 * np.pi) - else: - # If necessary, make sure that the first time step of every - # voiced segments is sin(pi) or cos(0) - # This is used for pulse-train generation - - # identify the last time step in unvoiced segments - uv = self._f02uv(f0_values) - uv_1 = torch.roll(uv, shifts=-1, dims=1) - uv_1[:, -1, :] = 1 - u_loc = (uv < 1) * (uv_1 > 0) - - # get the instantanouse phase - tmp_cumsum = torch.cumsum(rad_values, dim=1) - # different batch needs to be processed differently - for idx in range(f0_values.shape[0]): - temp_sum = tmp_cumsum[idx, u_loc[idx, :, 0], :] - temp_sum[1:, :] = temp_sum[1:, :] - temp_sum[0:-1, :] - # stores the accumulation of i.phase within - # each voiced segments - tmp_cumsum[idx, :, :] = 0 - tmp_cumsum[idx, u_loc[idx, :, 0], :] = temp_sum - - # rad_values - tmp_cumsum: remove the accumulation of i.phase - # within the previous voiced segment. - i_phase = torch.cumsum(rad_values - tmp_cumsum, dim=1) - - # get the sines - sines = torch.cos(i_phase * 2 * np.pi) - return sines - - def forward(self, f0): - """ sine_tensor, uv = forward(f0) - input F0: tensor(batchsize=1, length, dim=1) - f0 for unvoiced steps should be 0 - output sine_tensor: tensor(batchsize=1, length, dim) - output uv: tensor(batchsize=1, length, 1) - """ - with torch.no_grad(): - f0_buf = torch.zeros(f0.shape[0], f0.shape[1], self.dim, \ - device=f0.device) - # fundamental component - f0_buf[:, :, 0] = f0[:, :, 0] - for idx in np.arange(self.harmonic_num): - # idx + 2: the (idx+1)-th overtone, (idx+2)-th harmonic - f0_buf[:, :, idx + 1] = f0_buf[:, :, 0] * (idx + 2) - - # generate sine waveforms - sine_waves = self._f02sine(f0_buf) * self.sine_amp - - # generate uv signal - # uv = torch.ones(f0.shape) - # uv = uv * (f0 > self.voiced_threshold) - uv = self._f02uv(f0) - - # noise: for unvoiced should be similar to sine_amp - # std = self.sine_amp/3 -> max value ~ self.sine_amp - # . for voiced regions is self.noise_std - noise_amp = uv * self.noise_std + (1 - uv) * self.sine_amp / 3 - noise = noise_amp * torch.randn_like(sine_waves) - - # first: set the unvoiced part to 0 by uv - # then: additive noise - sine_waves = sine_waves * uv + noise - return sine_waves, uv, noise - - -class SourceModuleCycNoise_v1(torch.nn.Module): - """ SourceModuleCycNoise_v1 - SourceModule(sampling_rate, noise_std=0.003, voiced_threshod=0) - sampling_rate: sampling_rate in Hz - - noise_std: std of Gaussian noise (default: 0.003) - voiced_threshold: threshold to set U/V given F0 (default: 0) - - cyc, noise, uv = SourceModuleCycNoise_v1(F0_upsampled, beta) - F0_upsampled (batchsize, length, 1) - beta (1) - cyc (batchsize, length, 1) - noise (batchsize, length, 1) - uv (batchsize, length, 1) - """ - - def __init__(self, sampling_rate, noise_std=0.003, voiced_threshod=0): - super(SourceModuleCycNoise_v1, self).__init__() - self.sampling_rate = sampling_rate - self.noise_std = noise_std - self.l_cyc_gen = CyclicNoiseGen_v1(sampling_rate, noise_std, - voiced_threshod) - - def forward(self, f0_upsamped, beta): - """ - cyc, noise, uv = SourceModuleCycNoise_v1(F0, beta) - F0_upsampled (batchsize, length, 1) - beta (1) - cyc (batchsize, length, 1) - noise (batchsize, length, 1) - uv (batchsize, length, 1) - """ - # source for harmonic branch - cyc, pulse, sine, uv, add_noi = self.l_cyc_gen(f0_upsamped, beta) - - # source for noise branch, in the same shape as uv - noise = torch.randn_like(uv) * self.noise_std / 3 - return cyc, noise, uv - - -class SourceModuleHnNSF(torch.nn.Module): - """ SourceModule for hn-nsf - SourceModule(sampling_rate, harmonic_num=0, sine_amp=0.1, - add_noise_std=0.003, voiced_threshod=0) - sampling_rate: sampling_rate in Hz - harmonic_num: number of harmonic above F0 (default: 0) - sine_amp: amplitude of sine source signal (default: 0.1) - add_noise_std: std of additive Gaussian noise (default: 0.003) - note that amplitude of noise in unvoiced is decided - by sine_amp - voiced_threshold: threhold to set U/V given F0 (default: 0) - - Sine_source, noise_source = SourceModuleHnNSF(F0_sampled) - F0_sampled (batchsize, length, 1) - Sine_source (batchsize, length, 1) - noise_source (batchsize, length 1) - uv (batchsize, length, 1) - """ - - def __init__(self, sampling_rate, harmonic_num=0, sine_amp=0.1, - add_noise_std=0.003, voiced_threshod=0): - super(SourceModuleHnNSF, self).__init__() - - self.sine_amp = sine_amp - self.noise_std = add_noise_std - - # to produce sine waveforms - self.l_sin_gen = SineGen(sampling_rate, harmonic_num, - sine_amp, add_noise_std, voiced_threshod) - - # to merge source harmonics into a single excitation - self.l_linear = torch.nn.Linear(harmonic_num + 1, 1) - self.l_tanh = torch.nn.Tanh() - - def forward(self, x): - """ - Sine_source, noise_source = SourceModuleHnNSF(F0_sampled) - F0_sampled (batchsize, length, 1) - Sine_source (batchsize, length, 1) - noise_source (batchsize, length 1) - """ - # source for harmonic branch - sine_wavs, uv, _ = self.l_sin_gen(x) - sine_merge = self.l_tanh(self.l_linear(sine_wavs)) - - # source for noise branch, in the same shape as uv - noise = torch.randn_like(uv) * self.sine_amp / 3 - return sine_merge, noise, uv - - -if __name__ == '__main__': - source = SourceModuleCycNoise_v1(24000) - x = torch.randn(16, 25600, 1) - - diff --git a/modules/parallel_wavegan/optimizers/__init__.py b/modules/parallel_wavegan/optimizers/__init__.py deleted file mode 100644 index a0e0c5932..000000000 --- a/modules/parallel_wavegan/optimizers/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from torch.optim import * # NOQA -from .radam import * # NOQA diff --git a/modules/parallel_wavegan/optimizers/radam.py b/modules/parallel_wavegan/optimizers/radam.py deleted file mode 100644 index e805d7e34..000000000 --- a/modules/parallel_wavegan/optimizers/radam.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- - -"""RAdam optimizer. - -This code is drived from https://github.com/LiyuanLucasLiu/RAdam. -""" - -import math -import torch - -from torch.optim.optimizer import Optimizer - - -class RAdam(Optimizer): - """Rectified Adam optimizer.""" - - def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0): - """Initilize RAdam optimizer.""" - defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay) - self.buffer = [[None, None, None] for ind in range(10)] - super(RAdam, self).__init__(params, defaults) - - def __setstate__(self, state): - """Set state.""" - super(RAdam, self).__setstate__(state) - - def step(self, closure=None): - """Run one step.""" - loss = None - if closure is not None: - loss = closure() - - for group in self.param_groups: - - for p in group['params']: - if p.grad is None: - continue - grad = p.grad.data.float() - if grad.is_sparse: - raise RuntimeError('RAdam does not support sparse gradients') - - p_data_fp32 = p.data.float() - - state = self.state[p] - - if len(state) == 0: - state['step'] = 0 - state['exp_avg'] = torch.zeros_like(p_data_fp32) - state['exp_avg_sq'] = torch.zeros_like(p_data_fp32) - else: - state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32) - state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32) - - exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] - beta1, beta2 = group['betas'] - - exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) - exp_avg.mul_(beta1).add_(1 - beta1, grad) - - state['step'] += 1 - buffered = self.buffer[int(state['step'] % 10)] - if state['step'] == buffered[0]: - N_sma, step_size = buffered[1], buffered[2] - else: - buffered[0] = state['step'] - beta2_t = beta2 ** state['step'] - N_sma_max = 2 / (1 - beta2) - 1 - N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t) - buffered[1] = N_sma - - # more conservative since it's an approximated value - if N_sma >= 5: - step_size = math.sqrt( - (1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / (N_sma_max - 2)) / (1 - beta1 ** state['step']) # NOQA - else: - step_size = 1.0 / (1 - beta1 ** state['step']) - buffered[2] = step_size - - if group['weight_decay'] != 0: - p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32) - - # more conservative since it's an approximated value - if N_sma >= 5: - denom = exp_avg_sq.sqrt().add_(group['eps']) - p_data_fp32.addcdiv_(-step_size * group['lr'], exp_avg, denom) - else: - p_data_fp32.add_(-step_size * group['lr'], exp_avg) - - p.data.copy_(p_data_fp32) - - return loss diff --git a/modules/parallel_wavegan/stft_loss.py b/modules/parallel_wavegan/stft_loss.py deleted file mode 100644 index 229e6c777..000000000 --- a/modules/parallel_wavegan/stft_loss.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""STFT-based Loss modules.""" -import librosa -import torch - -from modules.parallel_wavegan.losses import LogSTFTMagnitudeLoss, SpectralConvergengeLoss, stft - - -class STFTLoss(torch.nn.Module): - """STFT loss module.""" - - def __init__(self, fft_size=1024, shift_size=120, win_length=600, window="hann_window", - use_mel_loss=False): - """Initialize STFT loss module.""" - super(STFTLoss, self).__init__() - self.fft_size = fft_size - self.shift_size = shift_size - self.win_length = win_length - self.window = getattr(torch, window)(win_length) - self.spectral_convergenge_loss = SpectralConvergengeLoss() - self.log_stft_magnitude_loss = LogSTFTMagnitudeLoss() - self.use_mel_loss = use_mel_loss - self.mel_basis = None - - def forward(self, x, y): - """Calculate forward propagation. - - Args: - x (Tensor): Predicted signal (B, T). - y (Tensor): Groundtruth signal (B, T). - - Returns: - Tensor: Spectral convergence loss value. - Tensor: Log STFT magnitude loss value. - - """ - x_mag = stft(x, self.fft_size, self.shift_size, self.win_length, self.window) - y_mag = stft(y, self.fft_size, self.shift_size, self.win_length, self.window) - if self.use_mel_loss: - if self.mel_basis is None: - self.mel_basis = torch.from_numpy(librosa.filters.mel(22050, self.fft_size, 80)).cuda().T - x_mag = x_mag @ self.mel_basis - y_mag = y_mag @ self.mel_basis - - sc_loss = self.spectral_convergenge_loss(x_mag, y_mag) - mag_loss = self.log_stft_magnitude_loss(x_mag, y_mag) - - return sc_loss, mag_loss - - -class MultiResolutionSTFTLoss(torch.nn.Module): - """Multi resolution STFT loss module.""" - - def __init__(self, - fft_sizes=[1024, 2048, 512], - hop_sizes=[120, 240, 50], - win_lengths=[600, 1200, 240], - window="hann_window", - use_mel_loss=False): - """Initialize Multi resolution STFT loss module. - - Args: - fft_sizes (list): List of FFT sizes. - hop_sizes (list): List of hop sizes. - win_lengths (list): List of window lengths. - window (str): Window function type. - - """ - super(MultiResolutionSTFTLoss, self).__init__() - assert len(fft_sizes) == len(hop_sizes) == len(win_lengths) - self.stft_losses = torch.nn.ModuleList() - for fs, ss, wl in zip(fft_sizes, hop_sizes, win_lengths): - self.stft_losses += [STFTLoss(fs, ss, wl, window, use_mel_loss)] - - def forward(self, x, y): - """Calculate forward propagation. - - Args: - x (Tensor): Predicted signal (B, T). - y (Tensor): Groundtruth signal (B, T). - - Returns: - Tensor: Multi resolution spectral convergence loss value. - Tensor: Multi resolution log STFT magnitude loss value. - - """ - sc_loss = 0.0 - mag_loss = 0.0 - for f in self.stft_losses: - sc_l, mag_l = f(x, y) - sc_loss += sc_l - mag_loss += mag_l - sc_loss /= len(self.stft_losses) - mag_loss /= len(self.stft_losses) - - return sc_loss, mag_loss diff --git a/modules/parallel_wavegan/utils/__init__.py b/modules/parallel_wavegan/utils/__init__.py deleted file mode 100644 index e8fa95a02..000000000 --- a/modules/parallel_wavegan/utils/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .utils import * # NOQA diff --git a/modules/parallel_wavegan/utils/utils.py b/modules/parallel_wavegan/utils/utils.py deleted file mode 100644 index 6a30e8037..000000000 --- a/modules/parallel_wavegan/utils/utils.py +++ /dev/null @@ -1,169 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 Tomoki Hayashi -# MIT License (https://opensource.org/licenses/MIT) - -"""Utility functions.""" - -import fnmatch -import logging -import os -import sys - -import h5py -import numpy as np - - -def find_files(root_dir, query="*.wav", include_root_dir=True): - """Find files recursively. - - Args: - root_dir (str): Root root_dir to find. - query (str): Query to find. - include_root_dir (bool): If False, root_dir name is not included. - - Returns: - list: List of found filenames. - - """ - files = [] - for root, dirnames, filenames in os.walk(root_dir, followlinks=True): - for filename in fnmatch.filter(filenames, query): - files.append(os.path.join(root, filename)) - if not include_root_dir: - files = [file_.replace(root_dir + "/", "") for file_ in files] - - return files - - -def read_hdf5(hdf5_name, hdf5_path): - """Read hdf5 dataset. - - Args: - hdf5_name (str): Filename of hdf5 file. - hdf5_path (str): Dataset name in hdf5 file. - - Return: - any: Dataset values. - - """ - if not os.path.exists(hdf5_name): - logging.error(f"There is no such a hdf5 file ({hdf5_name}).") - sys.exit(1) - - hdf5_file = h5py.File(hdf5_name, "r") - - if hdf5_path not in hdf5_file: - logging.error(f"There is no such a data in hdf5 file. ({hdf5_path})") - sys.exit(1) - - hdf5_data = hdf5_file[hdf5_path][()] - hdf5_file.close() - - return hdf5_data - - -def write_hdf5(hdf5_name, hdf5_path, write_data, is_overwrite=True): - """Write dataset to hdf5. - - Args: - hdf5_name (str): Hdf5 dataset filename. - hdf5_path (str): Dataset path in hdf5. - write_data (ndarray): Data to write. - is_overwrite (bool): Whether to overwrite dataset. - - """ - # convert to numpy array - write_data = np.array(write_data) - - # check folder existence - folder_name, _ = os.path.split(hdf5_name) - if not os.path.exists(folder_name) and len(folder_name) != 0: - os.makedirs(folder_name) - - # check hdf5 existence - if os.path.exists(hdf5_name): - # if already exists, open with r+ mode - hdf5_file = h5py.File(hdf5_name, "r+") - # check dataset existence - if hdf5_path in hdf5_file: - if is_overwrite: - logging.warning("Dataset in hdf5 file already exists. " - "recreate dataset in hdf5.") - hdf5_file.__delitem__(hdf5_path) - else: - logging.error("Dataset in hdf5 file already exists. " - "if you want to overwrite, please set is_overwrite = True.") - hdf5_file.close() - sys.exit(1) - else: - # if not exists, open with w mode - hdf5_file = h5py.File(hdf5_name, "w") - - # write data to hdf5 - hdf5_file.create_dataset(hdf5_path, data=write_data) - hdf5_file.flush() - hdf5_file.close() - - -class HDF5ScpLoader(object): - """Loader class for a fests.scp file of hdf5 file. - - Examples: - key1 /some/path/a.h5:feats - key2 /some/path/b.h5:feats - key3 /some/path/c.h5:feats - key4 /some/path/d.h5:feats - ... - >>> loader = HDF5ScpLoader("hdf5.scp") - >>> array = loader["key1"] - - key1 /some/path/a.h5 - key2 /some/path/b.h5 - key3 /some/path/c.h5 - key4 /some/path/d.h5 - ... - >>> loader = HDF5ScpLoader("hdf5.scp", "feats") - >>> array = loader["key1"] - - """ - - def __init__(self, feats_scp, default_hdf5_path="feats"): - """Initialize HDF5 scp loader. - - Args: - feats_scp (str): Kaldi-style feats.scp file with hdf5 format. - default_hdf5_path (str): Path in hdf5 file. If the scp contain the info, not used. - - """ - self.default_hdf5_path = default_hdf5_path - with open(feats_scp, encoding='utf-8') as f: - lines = [line.replace("\n", "") for line in f.readlines()] - self.data = {} - for line in lines: - key, value = line.split() - self.data[key] = value - - def get_path(self, key): - """Get hdf5 file path for a given key.""" - return self.data[key] - - def __getitem__(self, key): - """Get ndarray for a given key.""" - p = self.data[key] - if ":" in p: - return read_hdf5(*p.split(":")) - else: - return read_hdf5(p, self.default_hdf5_path) - - def __len__(self): - """Return the length of the scp file.""" - return len(self.data) - - def __iter__(self): - """Return the iterator of the scp file.""" - return iter(self.data) - - def keys(self): - """Return the keys of the scp file.""" - return self.data.keys() diff --git a/test_crepe.py b/test_crepe.py deleted file mode 100644 index 2a8844682..000000000 --- a/test_crepe.py +++ /dev/null @@ -1,67 +0,0 @@ -from utils.hparams import set_hparams, hparams -import torch -from src.vocoders.hifigan import HifiGAN -import torchcrepe -import sys -import resampy -import numpy as np -from utils.audio import save_wav -sys.argv = [ - 'inference/ds_e2e.py', - '--config', - 'configs/midi/e2e/opencpop/ds100_adj_rel.yaml', - '--exp_name', - '0909' -] - -def get_pitch(wav_data, mel, hparams, threshold=0.3): - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - #crepe只支持16khz采样率,需要重采样 - wav16k = resampy.resample(wav_data, hparams['audio_sample_rate'], 16000) - wav16k_torch = torch.FloatTensor(wav16k).unsqueeze(0).to(device) - - #频率范围 - f0_min = 50 - f0_max = 800 - - #重采样后按照hopsize=80,也就是5ms一帧分析f0 - f0, pd = torchcrepe.predict(wav16k_torch, 16000, 80, f0_min, f0_max, pad=True, model='full', batch_size=1024, device=device, return_periodicity=True) - - #滤波,去掉静音,设置uv阈值,参考原仓库readme - pd = torchcrepe.filter.median(pd, 3) - pd = torchcrepe.threshold.Silence(-60.)(pd, wav16k_torch, 16000, 80) - f0 = torchcrepe.threshold.At(threshold)(f0, pd) - f0 = torchcrepe.filter.mean(f0, 3) - - #将nan频率(uv部分)转换为0频率 - f0 = torch.where(torch.isnan(f0), torch.full_like(f0,0), f0) - - ''' - np.savetxt('问棋-crepe.csv',np.array([0.005*np.arange(len(f0[0])),f0[0].cpu().numpy()]).transpose(),delimiter=',') - ''' - - #去掉0频率,并线性插值 - nzindex = torch.nonzero(f0[0]).squeeze() - f0 = torch.index_select(f0[0],dim=0, index=nzindex).cpu().numpy() - time_org = 0.005*nzindex.cpu().numpy() - time_frame = np.arange(len(mel))*hparams['hop_size']/hparams['audio_sample_rate'] - f0 = np.interp(time_frame, time_org, f0, left=f0[0], right=f0[-1]) - return f0 - -set_hparams() -device = torch.device("cuda" if torch.cuda.is_available() else "cpu") -vocoder = HifiGAN() -wav, mel = vocoder.wav2spec("infer_out/example_out.wav") -f0 = get_pitch(wav, mel, hparams,threshold=0.05) - -with torch.no_grad(): - c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(device) - f0_cp = torch.FloatTensor(f0[None, :]).to(device) - wav_out = vocoder.model(c, f0_cp).view(-1).cpu().numpy() - save_wav(wav_out, 'infer_out/test-crepe.wav', hparams['audio_sample_rate']) - - - - - - From f2ca10be2c018f569acf246d5769f9dc2ce7dbed Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 13:38:25 +0800 Subject: [PATCH 016/475] Clean up `FastSpeechXXX` classes, remove MIDI-A/B --- inference/ds_cascade.py | 3 +- inference/ds_e2e.py | 196 ------------------------ main.py | 13 +- modules/diffsinger_midi/fs2.py | 112 -------------- modules/fastspeech/fs2.py | 165 --------------------- modules/fastspeech/tts_modules.py | 126 +++------------- modules/naive_frontend/encoder.py | 26 ++-- onnx/export/export_acoustic.py | 8 +- onnx/export/export_rhythmizer.py | 238 ------------------------------ src/acoustic_task.py | 10 +- src/diff/diffusion.py | 34 ++--- src/diff/net.py | 1 + 12 files changed, 58 insertions(+), 874 deletions(-) delete mode 100644 inference/ds_e2e.py delete mode 100644 modules/diffsinger_midi/fs2.py delete mode 100644 modules/fastspeech/fs2.py delete mode 100644 onnx/export/export_rhythmizer.py diff --git a/inference/ds_cascade.py b/inference/ds_cascade.py index 94b34d9d7..da55b00fd 100644 --- a/inference/ds_cascade.py +++ b/inference/ds_cascade.py @@ -3,7 +3,6 @@ from utils import load_ckpt from utils.hparams import hparams from src.diff.diffusion import GaussianDiffusion -from src.diffsinger_task import DIFF_DECODERS from modules.fastspeech.tts_modules import LengthRegulator import librosa import numpy as np @@ -13,7 +12,7 @@ class DiffSingerCascadeInfer(BaseSVSInfer): def build_model(self, ckpt_steps=None): model = GaussianDiffusion( phone_encoder=self.ph_encoder, - out_dims=hparams['audio_num_mel_bins'], denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), + out_dims=hparams['audio_num_mel_bins'], timesteps=hparams['timesteps'], K_step=hparams['K_step'], loss_type=hparams['diff_loss_type'], diff --git a/inference/ds_e2e.py b/inference/ds_e2e.py deleted file mode 100644 index 5ffe92c6e..000000000 --- a/inference/ds_e2e.py +++ /dev/null @@ -1,196 +0,0 @@ -import torch -# from inference.tts.fs import FastSpeechInfer -# from modules.tts.fs2_orig import FastSpeech2Orig -from basics.base_svs_infer import BaseSVSInfer -from utils import load_ckpt -from utils.hparams import hparams -from src.diff.diffusion import GaussianDiffusion -from src.diffsinger_task import DIFF_DECODERS -from modules.fastspeech.pe import PitchExtractor -import utils -from modules.fastspeech.tts_modules import LengthRegulator -import librosa -import numpy as np - - -class DiffSingerE2EInfer(BaseSVSInfer): - def build_model(self, ckpt_steps=None): - model = GaussianDiffusion( - phone_encoder=self.ph_encoder, - out_dims=hparams['audio_num_mel_bins'], denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), - timesteps=hparams['timesteps'], - K_step=hparams['K_step'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], - ) - - model.eval() - load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps) - - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - self.pe = PitchExtractor().to(self.device) - utils.load_ckpt(self.pe, hparams['pe_ckpt'], 'model', strict=True) - self.pe.eval() - return model - - def preprocess_word_level_input(self, inp): - return super().preprocess_word_level_input(inp) - - def preprocess_phoneme_level_input(self, inp): - ph_seq = inp['ph_seq'] - note_lst = inp['note_seq'].split() - midi_dur_lst = inp['note_dur_seq'].split() - is_slur = np.array(inp['is_slur_seq'].split(),'float') - ph_dur = None - if inp['ph_dur'] is not None: - ph_dur = np.array(inp['ph_dur'].split(),'float') - print(len(note_lst), len(ph_seq.split()), len(midi_dur_lst), len(ph_dur)) - if len(note_lst) == len(ph_seq.split()) == len(midi_dur_lst) == len(ph_dur): - print('Pass word-notes check.') - else: - print('The number of words does\'t match the number of notes\' windows. ', - 'You should split the note(s) for each word by | mark.') - return None - else: - print('Automatic phone duration mode') - print(len(note_lst), len(ph_seq.split()), len(midi_dur_lst)) - if len(note_lst) == len(ph_seq.split()) == len(midi_dur_lst): - print('Pass word-notes check.') - else: - print('The number of words does\'t match the number of notes\' windows. ', - 'You should split the note(s) for each word by | mark.') - return None - return ph_seq, note_lst, midi_dur_lst, is_slur, ph_dur - - def preprocess_input(self, inp, input_type='word'): - """ - - :param inp: {'text': str, 'item_name': (str, optional), 'spk_name': (str, optional)} - :return: - """ - - item_name = inp.get('item_name', '') - spk_name = inp.get('spk_name', 'opencpop') - - # single spk - spk_id = 0 - - # get ph seq, note lst, midi dur lst, is slur lst. - if input_type == 'word': - ret = self.preprocess_word_level_input(inp) - elif input_type == 'phoneme': # like transcriptions.txt in Opencpop dataset. - ret = self.preprocess_phoneme_level_input(inp) - else: - print('Invalid input type.') - return None - - if ret: - if input_type == 'word': - ph_seq, note_lst, midi_dur_lst, is_slur = ret - else: - ph_seq, note_lst, midi_dur_lst, is_slur, ph_dur = ret - else: - print('==========> Preprocess_word_level or phone_level input wrong.') - return None - - # convert note lst to midi id; convert note dur lst to midi duration - try: - midis = [librosa.note_to_midi(x.split("/")[0]) if x != 'rest' else 0 - for x in note_lst] - midi_dur_lst = [float(x) for x in midi_dur_lst] - except Exception as e: - print(e) - print('Invalid Input Type.') - return None - - ph_token = self.ph_encoder.encode(ph_seq) - item = {'item_name': item_name, 'text': inp['text'], 'ph': ph_seq, 'spk_id': spk_id, - 'ph_token': ph_token, 'pitch_midi': np.asarray(midis), 'midi_dur': np.asarray(midi_dur_lst), - 'is_slur': np.asarray(is_slur), 'ph_dur': None} - item['ph_len'] = len(item['ph_token']) - if input_type == 'phoneme' : - item['ph_dur'] = ph_dur - return item - - def input_to_batch(self, item): - item_names = [item['item_name']] - text = [item['text']] - ph = [item['ph']] - txt_tokens = torch.LongTensor(item['ph_token'])[None, :].to(self.device) - txt_lengths = torch.LongTensor([txt_tokens.shape[1]]).to(self.device) - spk_ids = torch.LongTensor(item['spk_id'])[None, :].to(self.device) - - pitch_midi = torch.LongTensor(item['pitch_midi'])[None, :hparams['max_frames']].to(self.device) - midi_dur = torch.FloatTensor(item['midi_dur'])[None, :hparams['max_frames']].to(self.device) - is_slur = torch.LongTensor(item['is_slur'])[None, :hparams['max_frames']].to(self.device) - mel2ph = None - if item['ph_dur'] is not None: - ph_acc=np.around(np.add.accumulate(item['ph_dur'])*hparams['audio_sample_rate']/hparams['hop_size']+0.5).astype('int') - ph_dur=np.diff(ph_acc,prepend=0) - ph_dur = torch.LongTensor(ph_dur)[None, :hparams['max_frames']].to(self.device) - lr=LengthRegulator() - mel2ph=lr(ph_dur,txt_tokens==0).detach() - - batch = { - 'item_name': item_names, - 'text': text, - 'ph': ph, - 'txt_tokens': txt_tokens, - 'txt_lengths': txt_lengths, - 'spk_ids': spk_ids, - 'pitch_midi': pitch_midi, - 'midi_dur': midi_dur, - 'is_slur': is_slur, - 'mel2ph': mel2ph - } - return batch - - def forward_model(self, inp, return_mel=False): - sample = self.input_to_batch(inp) - txt_tokens = sample['txt_tokens'] # [B, T_t] - spk_id = sample.get('spk_ids') - with torch.no_grad(): - output = self.model(txt_tokens, spk_id=spk_id, ref_mels=None, infer=True, - pitch_midi=sample['pitch_midi'], midi_dur=sample['midi_dur'], - is_slur=sample['is_slur'],mel2ph=sample['mel2ph']) - mel_out = output['mel_out'] # [B, T,80] - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - f0_pred = self.pe(mel_out)['f0_denorm_pred'] # pe predict from Pred mel - else: - f0_pred = output['f0_denorm'] - if return_mel: - return mel_out.cpu(), f0_pred.cpu() - wav_out = self.run_vocoder(mel_out, f0=f0_pred) - wav_out = wav_out.cpu().numpy() - return wav_out[0] - - -if __name__ == '__main__': - inp1 = { - 'text': 'SP一闪一闪亮晶晶SP满天都是小星星', - 'notes': 'rest|C4|C4|G4|G4|A4|A4|G4|rest|F4|F4|E4|E4|D4|D4|C4', - 'notes_duration': '1|0.5|0.5|0.5|0.5|0.5|0.5|0.75|0.25|0.5|0.5|0.5|0.5|0.5|0.5|0.75', - 'input_type': 'word' # Automatic phone duration mode - } # user input: Chinese characters - inp2 = { - 'text': 'SP 好 一 朵 美 丽 地 茉 莉 花 SP 好 一 朵 美 丽 地 茉 莉 花 SP 芬 芳 美 丽 满 枝 芽 SP 又 香 又 白 人 人 夸 SP 让 我 来 将 你 摘 下 SP 送 给 别 人 家 SP 茉 莉 花 呀 茉 莉 花 SP', - 'ph_seq': 'SP h ao y i d uo m ei ei l i d i m o l i i h ua SP h ao y i d uo m ei ei l i d i m o l i i h ua SP f en f ang m ei l i i m an zh i y a SP y ou x iang iang y ou b ai ai r en r en en k ua SP r ang ang w o o l ai j iang n i zh ai ai x ia SP s ong g ei ei b ie ie r en en j ia SP m o l i h ua y a m o o l i i h ua SP', - 'note_seq': 'rest E4 E4 E4 E4 G4 G4 A4 A4 C5 C5 C5 A4 A4 G4 G4 G4 G4 A4 G4 G4 rest E4 E4 E4 E4 G4 G4 A4 A4 C5 C5 C5 A4 A4 G4 G4 G4 G4 A4 G4 G4 rest G4 G4 G4 G4 G4 G4 E4 E4 G4 A4 A4 A4 A4 G4 G4 rest E4 E4 D4 D4 E4 G4 G4 E4 E4 D4 C4 C4 C4 C4 D4 C4 C4 rest E4 E4 D4 C4 C4 E4 D4 D4 E4 E4 G4 G4 A4 A4 C5 G4 G4 rest D4 D4 E4 E4 G4 C4 C4 D4 C4 C4 A3 G3 G3 rest A3 A3 C4 C4 D4 D4 E4 E4 C4 C4 D4 C4 C4 A3 G3 G3 rest', - 'note_dur_seq': '1 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 1.058824 1.058824 0.352941 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 1.058824 1.058824 0.352941 0.7058824 0.7058824 0.7058824 0.7058824 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 0.7058824 0.7058824 0.7058824 0.7058824 1.058824 1.058824 0.352941 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 1.058824 1.058824 0.352941 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 1.058824 1.058824 0.3529412 0.3529412 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 1.058824 1.058824 0.352941 0.7058824 0.7058824 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 1.058824 1.058824 0.352941 0.7058824 0.7058824 0.7058824 0.7058824 1.058824 1.058824 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 0.3529412 1.058824 1.058824 1', - 'is_slur_seq': '0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0', - 'ph_dur': None, # Automatic phone duration mode - 'input_type': 'phoneme' - } # input like Opencpop dataset. - inp3 = { - 'text': 'SP 还 记 得 那 场 音 乐 会 的 烟 火 SP 还 记 得 那 个 凉 凉 的 深 秋 SP 还 记 得 人 潮 把 你 推 向 了 我 SP 游 乐 园 拥 挤 的 正 是 时 候 SP 一 个 夜 晚 坚 持 不 睡 的 等 候 SP 一 起 泡 温 泉 奢 侈 的 享 受 SP 有 一 次 日 记 里 愚 蠢 的 困 惑 SP 因 为 你 的 微 笑 幻 化 成 风 SP 你 大 大 的 勇 敢 保 护 着 我 SP 我 小 小 的 关 怀 喋 喋 不 休 SP 感 谢 我 们 一 起 走 了 那 么 久 SP 又 再 一 次 回 到 凉 凉 深 秋 SP 给 你 我 的 手 SP 像 温 柔 野 兽 SP 把 自 由 交 给 草 原 的 辽 阔 SP 我 们 小 手 拉 大 手 SP 一 起 郊 游 SP 今 天 别 想 太 多 SP 你 是 我 的 梦 SP 像 北 方 的 风 SP 吹 着 南 方 暖 洋 洋 的 哀 愁 SP 我 们 小 手 拉 大 手 SP 今 天 加 油 SP 向 昨 天 挥 挥 手 SP', - 'ph_seq': 'SP h ai j i d e n a ch ang y in y ve h ui d e y an h uo uo SP h ai j i d e n a g e l iang l iang d e sh en en q iu iu SP h ai j i d e r en ch ao b a n i t ui x iang l e w o o SP y ou l e y van y ong j i d e zh eng sh i sh i h ou ou SP y i g e y e w an j ian ch i b u sh ui d e d eng h ou ou SP y i q i p ao w en q van sh e ch i d e x iang iang sh ou ou SP y ou y i c i r i j i l i y v ch un d e k un h uo uo SP y in w ei n i d e w ei x iao h uan h ua ch eng f eng eng SP n i d a d a d e y ong g an b ao h u zh e w o o SP w o x iao x iao d e g uan h uai d ie d ie b u x iu iu SP g an x ie w o m en y i q i z ou l e n a m e j iu iu SP y ou z ai y i c i h ui d ao ao l iang l iang sh en q iu iu SP g ei n i w o d e sh ou SP x iang w en r ou y e sh ou SP b a z i y ou j iao g ei c ao y van d e l iao iao k uo uo uo SP w o m en x iao sh ou l a d a sh ou SP y i q i j iao iao y ou SP j in t ian b ie x iang t ai d uo uo SP n i sh i w o d e m eng SP x iang b ei f ang d e f eng SP ch ui zh e n an f ang n uan y ang y ang d e ai ai ch ou ou ou SP w o m en x iao sh ou l a d a sh ou SP j in t ian j ia ia y ou SP x iang z uo t ian h ui h ui ui sh ou ou ou SP', - 'note_seq': 'rest G3 G3 G3 G3 A3 A3 C4 C4 D4 D4 E4 E4 A4 A4 G4 G4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 D4 D4 E4 E4 E4 D4 rest D4 D4 E4 E4 D4 D4 C#4 C#4 D4 D4 G4 G4 B3 B3 D4 D4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 A3 A3 A3 A3 G3 rest G3 G3 G3 G3 A3 A3 C4 C4 D4 D4 E4 E4 A4 A4 G4 G4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 D4 D4 E4 E4 E4 D4 rest D4 D4 E4 E4 D4 D4 C#4 C#4 D4 D4 G4 G4 B3 B3 D4 D4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 D4 D4 D4 D4 C4 rest E4 E4 F4 F4 E4 E4 D4 D4 E4 E4 F4 F4 E4 E4 D4 D4 E4 E4 E4 E4 F4 rest F4 F4 G4 G4 F4 F4 G4 G4 F4 F4 E4 E4 D4 D4 C4 C4 D4 D4 D4 D4 E4 rest E4 E4 E4 E4 D4 D4 C#4 C#4 E4 E4 E4 E4 D4 D4 D4 D4 D4 D4 C#4 C#4 C#4 C#4 D4 rest D4 D4 D4 D4 E4 E4 F#4 F#4 D4 D4 G4 G4 A4 G4 G4 G4 G4 F#4 F#4 F#4 F#4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest G4 G4 A4 A4 G4 G4 A4 A4 B4 B4 C5 C5 E4 E4 E4 E4 G4 G4 A4 A4 A4 G4 G4 rest C4 C4 D4 D4 C4 C4 F4 F4 E4 E4 D4 D4 C4 C4 rest F4 F4 E4 E4 D4 D4 C4 C4 C4 rest C4 C4 D4 D4 A3 A3 C4 C4 E4 E4 E4 E4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest G4 G4 A4 A4 G4 G4 A4 A4 B4 B4 C5 C5 E4 E4 E4 E4 G4 A4 A4 A4 G4 G4 rest C4 C4 D4 D4 C4 C4 F4 F4 E4 E4 D4 D4 C4 C4 rest F4 F4 E4 E4 D4 D4 C4 C4 C4 rest C4 C4 D4 D4 A3 A3 C4 C4 C4 C4 D4 D4 D4 C4 C4 rest', - 'note_dur_seq': '8.076923 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.3028846 0.389423 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1298077 0.1298077 0.3317308 0.2307692 0.2307692 0.2884615 0.403846 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2740385 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.2307692 0.2307692 0.2740385 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2884615 0.403846 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1153846 0.1153846 0.3461539 0.2740385 0.2740385 0.2307692 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2740385 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.2596154 0.2596154 0.3173077 0.346154 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1442308 0.1442308 0.4182692 0.360577 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1586538 0.1586538 0.3894231 0.375 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.1586538 0.1586538 0.4615385 0.302885 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.1298077 0.1298077 0.3317308 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1442308 0.1442308 0.3461539 0.432692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5480769 0.5480769 0.375 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5480769 0.5480769 0.375 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1153846 0.1153846 0.3461539 0.2740385 0.2740385 0.4182692 0.375 0.317308 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.375 0.375 0.317308 0.2307692 0.2307692 0.4615385 0.4615385 0.2740385 0.2740385 0.1875 0.2307692 0.2307692 0.230769 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1442308 0.1442308 0.4326923 0.346154 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5913461 0.5913461 0.331731 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5913461 0.5913461 0.331731 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1298077 0.3317308 0.2884615 0.2884615 0.4038461 0.3028846 0.389423 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.3894231 0.3894231 0.302885 0.2307692 0.2307692 0.4615385 0.4615385 0.2740385 0.2740385 0.1875 0.1730769 0.1730769 0.288462 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1298077 0.1298077 0.3317308 0.2163462 0.2163462 0.4759615 0.3894231 1', - 'is_slur_seq': '0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0', - 'ph_dur': '7.911923 0.165 0.12718 0.103589 0.185769 0.045 0.155769 0.075 0.13782 0.092949 0.185769 0.045 0.416538 0.045 0.124423 0.106346 0.416538 0.045 0.185769 0.045 0.155768 0.075001 0.230769 0.302885 0.314423 0.075 0.107052 0.123717 0.185769 0.045 0.167052 0.063717 0.17077 0.059999 0.170769 0.06 0.401666 0.059873 0.185769 0.045 0.290193 0.171346 0.129808 0.188012 0.143719 0.230769 0.288462 0.343847 0.059999 0.131732 0.099037 0.185769 0.045 0.185769 0.045 0.1475 0.083269 0.185769 0.045 0.371538 0.09 0.142179 0.088591 0.311539 0.15 0.155768 0.075001 0.177501 0.053268 0.230769 0.274038 0.393268 0.025002 0.187821 0.042948 0.185769 0.045 0.185769 0.045 0.150062 0.080708 0.185384 0.045385 0.356537 0.105001 0.119102 0.111668 0.311537 0.150002 0.356539 0.105 0.230769 0.274038 0.373269 0.045 0.102946 0.127823 0.185769 0.045 0.185769 0.045 0.185771 0.044998 0.108523 0.122246 0.401537 0.060001 0.111154 0.119616 0.416538 0.045 0.172755 0.058014 0.15577 0.074999 0.230769 0.288462 0.358846 0.045 0.111602 0.119167 0.15577 0.074999 0.162692 0.068077 0.15577 0.074999 0.131024 0.099745 0.311539 0.15 0.185769 0.045 0.317692 0.143847 0.115385 0.196152 0.150002 0.274038 0.230769 0.373271 0.044998 0.124934 0.105835 0.118781 0.111988 0.185771 0.044998 0.155768 0.075001 0.127177 0.103592 0.41654 0.044998 0.127563 0.103207 0.41654 0.044998 0.129546 0.101223 0.14532 0.085449 0.230769 0.274038 0.393268 0.025002 0.17686 0.053909 0.170768 0.060001 0.185771 0.044998 0.185767 0.045002 0.114741 0.116028 0.356539 0.105 0.150062 0.080708 0.301085 0.160454 0.290259 0.17128 0.259615 0.317308 0.300961 0.045193 0.15673 0.074039 0.203528 0.027241 0.197818 0.032951 0.169616 0.061153 0.151668 0.079102 0.41654 0.044998 0.132499 0.09827 0.356535 0.105003 0.385771 0.075768 0.144231 0.418269 0.317951 0.042625 0.103847 0.126923 0.154811 0.075958 0.185767 0.045002 0.170772 0.059998 0.127372 0.103397 0.416536 0.045002 0.139617 0.091152 0.386538 0.075001 0.312758 0.148781 0.158654 0.389423 0.314999 0.060001 0.116088 0.114681 0.185767 0.045002 0.155768 0.075001 0.185771 0.044998 0.087241 0.143528 0.34532 0.116219 0.182818 0.047951 0.356539 0.105 0.155768 0.075001 0.154998 0.075771 0.158654 0.461538 0.257883 0.045002 0.128524 0.102245 0.202945 0.027824 0.097816 0.132954 0.155772 0.074997 0.168716 0.062054 0.129808 0.271729 0.060001 0.164615 0.066154 0.323973 0.137566 0.308973 0.152565 0.144231 0.346154 0.372691 0.060001 0.185771 0.044998 0.185767 0.045002 0.185771 0.044998 0.109038 0.121731 0.548077 0.240002 0.134998 0.185767 0.045002 0.185771 0.044998 0.185767 0.045002 0.109233 0.121536 0.548077 0.330002 0.044998 0.116408 0.114361 0.185767 0.045002 0.170577 0.060192 0.185771 0.044998 0.111991 0.118778 0.41654 0.044998 0.185767 0.045002 0.386538 0.075001 0.115385 0.245194 0.10096 0.274038 0.418269 0.375 0.281472 0.035835 0.170768 0.060001 0.15301 0.077759 0.10942 0.121349 0.386538 0.075001 0.127177 0.103592 0.313074 0.148464 0.375 0.272302 0.045006 0.101024 0.129745 0.356543 0.104996 0.274038 0.10878 0.07872 0.230769 0.140773 0.089996 0.129741 0.101028 0.41654 0.044998 0.114749 0.11602 0.326536 0.135002 0.385962 0.075577 0.144231 0.432692 0.301156 0.044998 0.1114 0.11937 0.185771 0.044998 0.185771 0.044998 0.119479 0.11129 0.591346 0.196736 0.134995 0.185771 0.044998 0.155765 0.075005 0.185771 0.044998 0.11967 0.111099 0.591346 0.226735 0.104996 0.144809 0.08596 0.155765 0.075005 0.155765 0.075005 0.170772 0.059998 0.151668 0.079102 0.41654 0.044998 0.185771 0.044998 0.401533 0.189813 0.211423 0.120308 0.288462 0.403846 0.302885 0.344417 0.045006 0.170764 0.060005 0.132129 0.09864 0.118671 0.112099 0.401533 0.060005 0.146937 0.083832 0.323286 0.138252 0.389423 0.197889 0.104996 0.155772 0.074997 0.358343 0.103195 0.274038 0.142502 0.044998 0.173077 0.153459 0.135002 0.124492 0.106277 0.341543 0.119995 0.121677 0.109093 0.29225 0.169289 0.129808 0.173413 0.158318 0.216346 0.475962 0.389423 0.05', - 'input_type': 'phoneme' # Manual phone duration mode - } # input like Opencpop dataset. - DiffSingerE2EInfer.example_run(inp3) - - -# python inference/ds_e2e.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name 0228_opencpop_ds100_rel diff --git a/main.py b/main.py index 8c2ee5c2b..ce7cb9795 100644 --- a/main.py +++ b/main.py @@ -10,7 +10,6 @@ from utils.infer_utils import cross_fade, trans_key from inference.ds_cascade import DiffSingerCascadeInfer -from inference.ds_e2e import DiffSingerE2EInfer from utils.audio import save_wav from utils.hparams import set_hparams, hparams from utils.slur_utils import merge_slurs @@ -86,6 +85,7 @@ if not args.title: name += key_suffix print(f'音调基于原音频{key_suffix}') +params = params[:1] if args.gender is not None: assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' @@ -100,16 +100,7 @@ infer_ins = None if len(params) > 0: - if hparams['use_pitch_embed']: - infer_ins = DiffSingerCascadeInfer(hparams, load_vocoder=not args.mel, ckpt_steps=args.ckpt) - else: - warnings.warn( - message='SVS MIDI-B version (implicit pitch prediction) is deprecated. ' - 'Please select or train a model of MIDI-A version (controllable pitch prediction).', - category=DeprecationWarning - ) - warnings.filterwarnings(action='default') - infer_ins = DiffSingerE2EInfer(hparams, load_vocoder=not args.mel, ckpt_steps=args.ckpt) + infer_ins = DiffSingerCascadeInfer(hparams, load_vocoder=not args.mel, ckpt_steps=args.ckpt) spk_mix = parse_commandline_spk_mix(args.spk) if hparams['use_spk_id'] and args.spk is not None else None diff --git a/modules/diffsinger_midi/fs2.py b/modules/diffsinger_midi/fs2.py deleted file mode 100644 index 698f7906b..000000000 --- a/modules/diffsinger_midi/fs2.py +++ /dev/null @@ -1,112 +0,0 @@ -from modules.commons.common_layers import * -from modules.commons.common_layers import Embedding -from modules.fastspeech.fs2 import FastSpeech2 -from modules.fastspeech.tts_modules import FastspeechEncoder -from training.diffsinger import Batch2Loss -from utils.hparams import hparams -from utils.phoneme_utils import get_all_vowels - - -class FastspeechMIDIEncoder(FastspeechEncoder): - ''' - compared to FastspeechEncoder: - - adds new input items (midi, midi_dur, slur) - - but these are same: - - positional encoding - ''' - def forward_embedding(self, txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding): - # embed tokens and positions - x = self.embed_scale * self.embed_tokens(txt_tokens) - x = x + midi_embedding + midi_dur_embedding + slur_embedding - if hparams['use_pos_embed']: - if hparams.get('rel_pos') is not None and hparams['rel_pos']: - x = self.embed_positions(x) - else: - positions = self.embed_positions(txt_tokens) - x = x + positions - x = F.dropout(x, p=self.dropout, training=self.training) - return x - - def forward(self, txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding): - """ - :param txt_tokens: [B, T] - :return: { - 'encoder_out': [T x B x C] - } - """ - encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() - x = self.forward_embedding(txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding) # [B, T, H] - x = super(FastspeechEncoder, self).forward(x, encoder_padding_mask) - return x - - -FS_ENCODERS = { - 'fft': lambda hp, embed_tokens, d: FastspeechMIDIEncoder( - embed_tokens, hp['hidden_size'], hp['enc_layers'], hp['enc_ffn_kernel_size'], - num_heads=hp['num_heads']), -} - - -class FastSpeech2MIDI(FastSpeech2): - def __init__(self, dictionary, out_dims=None): - super().__init__(dictionary, out_dims) - del self.encoder - - self.encoder = FS_ENCODERS[hparams['encoder_type']](hparams, self.encoder_embed_tokens, self.dictionary) - self.midi_embed = Embedding(300, self.hidden_size, self.padding_idx) - self.midi_dur_layer = Linear(1, self.hidden_size) - self.is_slur_embed = Embedding(2, self.hidden_size) - yunmu = ['AP', 'SP'] + get_all_vowels() - self.vowel_tokens = [dictionary.encode(ph)[0] for ph in yunmu] - - def forward(self, txt_tokens, mel2ph=None, spk_embed_id=None, - ref_mels=None, f0=None, uv=None, energy=None, skip_decoder=False, - spk_embed_dur_id=None, spk_embed_f0_id=None, infer=False, **kwargs): - ''' - steps: - 1. embedding: midi_embedding, midi_dur_embedding, slur_embedding - 2. run self.encoder (a Transformer) using txt_tokens and embeddings - 3. run *dur_predictor* in *add_dur* using *encoder_out*, get *ret['dur']* and *ret['mel2ph']* - 4. the same for *pitch_predictor* and *energy_predictor* - 5. run decoder (skipped for diffusion) - ''' - midi_embedding, midi_dur_embedding, slur_embedding = Batch2Loss.insert1( - kwargs['pitch_midi'], kwargs.get('midi_dur', None), kwargs.get('is_slur', None), - self.midi_embed, self.midi_dur_layer, self.is_slur_embed - ) - - encoder_out = Batch2Loss.module1(self.encoder, txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding) # [B, T, C] - - src_nonpadding = (txt_tokens > 0).float()[:, :, None] - var_embed, spk_embed, spk_embed_dur, spk_embed_f0, dur_inp = Batch2Loss.insert2( - encoder_out, spk_embed_id, spk_embed_dur_id, spk_embed_f0_id, src_nonpadding, - self.spk_embed_proj if hasattr(self, 'spk_embed_proj') else None - ) - - ret = {} - mel2ph = Batch2Loss.module2( - self.dur_predictor, self.length_regulator, - dur_inp, mel2ph, txt_tokens, self.vowel_tokens, ret, midi_dur=kwargs['midi_dur']*hparams['audio_sample_rate']/hparams['hop_size'] - ) - - tgt_nonpadding = (mel2ph > 0).float()[:, :, None] - decoder_inp, pitch_inp, pitch_inp_ph = Batch2Loss.insert3( - encoder_out, mel2ph, var_embed, spk_embed_f0, src_nonpadding, tgt_nonpadding - ) - - pitch_embedding, energy_embedding = Batch2Loss.module3( - getattr(self, 'pitch_predictor', None), getattr(self, 'pitch_embed', None), - getattr(self, 'energy_predictor', None), getattr(self, 'energy_embed', None), - pitch_inp, pitch_inp_ph, f0, uv, energy, mel2ph, (not infer), ret - ) - - decoder_inp = Batch2Loss.insert4( - decoder_inp, pitch_embedding, energy_embedding, spk_embed, ret, tgt_nonpadding - ) - - if skip_decoder: - return ret - ret['mel_out'] = self.run_decoder(decoder_inp, tgt_nonpadding, ret, infer=infer, **kwargs) - - return ret \ No newline at end of file diff --git a/modules/fastspeech/fs2.py b/modules/fastspeech/fs2.py deleted file mode 100644 index eff1c6040..000000000 --- a/modules/fastspeech/fs2.py +++ /dev/null @@ -1,165 +0,0 @@ -from modules.commons.common_layers import * -from modules.commons.common_layers import Embedding -from modules.fastspeech.tts_modules import FastspeechDecoder, DurationPredictor, LengthRegulator, PitchPredictor, \ - FastspeechEncoder -from utils.cwt import cwt2f0 -from utils.hparams import hparams -from utils.pitch_utils import norm_f0 - -FS_ENCODERS = { - 'fft': lambda hp, embed_tokens, d: FastspeechEncoder( - embed_tokens, hp['hidden_size'], hp['enc_layers'], hp['enc_ffn_kernel_size'], - num_heads=hp['num_heads']), -} - -FS_DECODERS = { - 'fft': lambda hp: FastspeechDecoder( - hp['hidden_size'], hp['dec_layers'], hp['dec_ffn_kernel_size'], hp['num_heads']), -} - - -class FastSpeech2(nn.Module): - def __init__(self, dictionary, out_dims=None): - super().__init__() - self.dictionary = dictionary - self.padding_idx = dictionary.pad() - self.enc_layers = hparams['enc_layers'] - self.dec_layers = hparams['dec_layers'] - self.hidden_size = hparams['hidden_size'] - self.encoder_embed_tokens = self.build_embedding(self.dictionary, self.hidden_size) - self.encoder = FS_ENCODERS[hparams['encoder_type']](hparams, self.encoder_embed_tokens, self.dictionary) - self.decoder = FS_DECODERS[hparams['decoder_type']](hparams) - self.out_dims = out_dims - if out_dims is None: - self.out_dims = hparams['audio_num_mel_bins'] - self.mel_out = Linear(self.hidden_size, self.out_dims, bias=True) - - if hparams['use_spk_id']: - self.spk_embed_proj = Embedding(hparams['num_spk'] + 1, self.hidden_size) - if hparams['use_split_spk_id']: - self.spk_embed_f0 = Embedding(hparams['num_spk'] + 1, self.hidden_size) - self.spk_embed_dur = Embedding(hparams['num_spk'] + 1, self.hidden_size) - elif hparams['use_spk_embed']: - self.spk_embed_proj = Linear(256, self.hidden_size, bias=True) - predictor_hidden = hparams['predictor_hidden'] if hparams['predictor_hidden'] > 0 else self.hidden_size - self.dur_predictor = DurationPredictor( - self.hidden_size, - n_chans=predictor_hidden, - n_layers=hparams['dur_predictor_layers'], - dropout_rate=hparams['predictor_dropout'], padding=hparams['ffn_padding'], - kernel_size=hparams['dur_predictor_kernel']) - self.length_regulator = LengthRegulator() - if hparams['use_pitch_embed']: - self.pitch_embed = Embedding(300, self.hidden_size, self.padding_idx) - if hparams['pitch_type'] == 'cwt': - h = hparams['cwt_hidden_size'] - cwt_out_dims = 10 - if hparams['use_uv']: - cwt_out_dims = cwt_out_dims + 1 - self.cwt_predictor = nn.Sequential( - nn.Linear(self.hidden_size, h), - PitchPredictor( - h, - n_chans=predictor_hidden, - n_layers=hparams['predictor_layers'], - dropout_rate=hparams['predictor_dropout'], odim=cwt_out_dims, - padding=hparams['ffn_padding'], kernel_size=hparams['predictor_kernel'])) - self.cwt_stats_layers = nn.Sequential( - nn.Linear(self.hidden_size, h), nn.ReLU(), - nn.Linear(h, h), nn.ReLU(), nn.Linear(h, 2) - ) - else: - self.pitch_predictor = PitchPredictor( - self.hidden_size, - n_chans=predictor_hidden, - n_layers=hparams['predictor_layers'], - dropout_rate=hparams['predictor_dropout'], - odim=2 if hparams['pitch_type'] == 'frame' else 1, - padding=hparams['ffn_padding'], kernel_size=hparams['predictor_kernel']) - - def build_embedding(self, dictionary, embed_dim): - num_embeddings = len(dictionary) - emb = Embedding(num_embeddings, embed_dim, self.padding_idx) - return emb - - def forward(self, txt_tokens, mel2ph=None, spk_embed=None, - ref_mels=None, f0=None, uv=None, energy=None, skip_decoder=False, - spk_embed_dur_id=None, spk_embed_f0_id=None, infer=False, **kwargs): - ret = {} - encoder_out = self.encoder(txt_tokens) # [B, T, C] - src_nonpadding = (txt_tokens > 0).float()[:, :, None] - - # add ref style embed - # Not implemented - # variance encoder - var_embed = 0 - - # encoder_out_dur denotes encoder outputs for duration predictor - # in speech adaptation, duration predictor use old speaker embedding - if hparams['use_spk_embed']: - spk_embed_dur = spk_embed_f0 = spk_embed = self.spk_embed_proj(spk_embed)[:, None, :] - elif hparams['use_spk_id']: - spk_embed_id = spk_embed - if spk_embed_dur_id is None: - spk_embed_dur_id = spk_embed_id - if spk_embed_f0_id is None: - spk_embed_f0_id = spk_embed_id - spk_embed = self.spk_embed_proj(spk_embed_id)[:, None, :] - spk_embed_dur = spk_embed_f0 = spk_embed - if hparams['use_split_spk_id']: - spk_embed_dur = self.spk_embed_dur(spk_embed_dur_id)[:, None, :] - spk_embed_f0 = self.spk_embed_f0(spk_embed_f0_id)[:, None, :] - else: - spk_embed_dur = spk_embed_f0 = spk_embed = 0 - - # add dur - dur_inp = (encoder_out + var_embed + spk_embed_dur) * src_nonpadding - - mel2ph = self.add_dur(dur_inp, mel2ph, txt_tokens, ret) - - decoder_inp = F.pad(encoder_out, [0, 0, 1, 0]) - - mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) - decoder_inp_origin = decoder_inp = torch.gather(decoder_inp, 1, mel2ph_) # [B, T, H] - - tgt_nonpadding = (mel2ph > 0).float()[:, :, None] - - # add pitch and energy embed - pitch_inp = (decoder_inp_origin + var_embed + spk_embed_f0) * tgt_nonpadding - if hparams['use_pitch_embed']: - pitch_inp_ph = (encoder_out + var_embed + spk_embed_f0) * src_nonpadding - decoder_inp = decoder_inp + self.add_pitch(pitch_inp, f0, uv, mel2ph, ret, encoder_out=pitch_inp_ph) - if hparams['use_energy_embed']: - decoder_inp = decoder_inp + self.add_energy(pitch_inp, energy, ret) - - ret['decoder_inp'] = decoder_inp = (decoder_inp + spk_embed) * tgt_nonpadding - - if skip_decoder: - return ret - ret['mel_out'] = self.run_decoder(decoder_inp, tgt_nonpadding, ret, infer=infer, **kwargs) - - return ret - - def run_decoder(self, decoder_inp, tgt_nonpadding, ret, infer, **kwargs): - x = decoder_inp # [B, T, H] - x = self.decoder(x) - x = self.mel_out(x) - return x * tgt_nonpadding - - def cwt2f0_norm(self, cwt_spec, mean, std, mel2ph): - f0 = cwt2f0(cwt_spec, mean, std, hparams['cwt_scales']) - f0 = torch.cat( - [f0] + [f0[:, -1:]] * (mel2ph.shape[1] - f0.shape[1]), 1) - f0_norm = norm_f0(f0, None, hparams) - return f0_norm - - def out2mel(self, out): - return out - - @staticmethod - def mel_norm(x): - return (x + 5.5) / (6.3 / 2) - 1 - - @staticmethod - def mel_denorm(x): - return (x + 1) * (6.3 / 2) - 5.5 diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 3cb9dd3a8..3e2d9fccb 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -216,52 +216,6 @@ def forward(self, dur, mel2ph): return stretch * (mel2ph > 0) -class PitchPredictor(torch.nn.Module): - def __init__(self, idim, n_layers=5, n_chans=384, odim=2, kernel_size=5, - dropout_rate=0.1, padding='SAME'): - """Initilize pitch predictor module. - Args: - idim (int): Input dimension. - n_layers (int, optional): Number of convolutional layers. - n_chans (int, optional): Number of channels of convolutional layers. - kernel_size (int, optional): Kernel size of convolutional layers. - dropout_rate (float, optional): Dropout rate. - """ - super(PitchPredictor, self).__init__() - self.conv = torch.nn.ModuleList() - self.kernel_size = kernel_size - self.padding = padding - for idx in range(n_layers): - in_chans = idim if idx == 0 else n_chans - self.conv += [torch.nn.Sequential( - torch.nn.ConstantPad1d(((kernel_size - 1) // 2, (kernel_size - 1) // 2) - if padding == 'SAME' - else (kernel_size - 1, 0), 0), - torch.nn.Conv1d(in_chans, n_chans, kernel_size, stride=1, padding=0), - torch.nn.ReLU(), - LayerNorm(n_chans, dim=1), - torch.nn.Dropout(dropout_rate) - )] - self.linear = torch.nn.Linear(n_chans, odim) - self.embed_positions = SinusoidalPositionalEmbedding(idim, 0, init_size=4096) - self.pos_embed_alpha = nn.Parameter(torch.Tensor([1])) - - def forward(self, xs): - """ - - :param xs: [B, T, H] - :return: [B, T, H] - """ - positions = self.pos_embed_alpha * self.embed_positions(xs[..., 0]) - xs = xs + positions - xs = xs.transpose(1, -1) # (B, idim, Tmax) - for f in self.conv: - xs = f(xs) # (B, C, Tmax) - # NOTE: calculate in log domain - xs = self.linear(xs.transpose(1, -1)) # (B, Tmax, H) - return xs - - def mel2ph_to_dur(mel2ph, T_txt, max_dur=None): B, _ = mel2ph.shape dur = mel2ph.new_zeros(B, T_txt + 1).scatter_add(1, mel2ph, torch.ones_like(mel2ph)) @@ -271,10 +225,13 @@ def mel2ph_to_dur(mel2ph, T_txt, max_dur=None): return dur -class FFTBlocks(nn.Module): - def __init__(self, hidden_size, num_layers, ffn_kernel_size=9, dropout=None, num_heads=2, - use_pos_embed=True, use_last_norm=True, norm='ln', use_pos_embed_alpha=True): +class FastSpeech2Encoder(nn.Module): + def __init__(self, embed_tokens, hidden_size, num_layers, ffn_kernel_size=9, dropout=None, num_heads=2, + use_pos_embed=False, use_last_norm=True, norm='ln', use_pos_embed_alpha=True): super().__init__() + hidden_size = hparams['hidden_size'] if hidden_size is None else hidden_size + kernel_size = hparams['enc_ffn_kernel_size'] if ffn_kernel_size is None else ffn_kernel_size + num_layers = hparams['dec_layers'] if num_layers is None else num_layers self.num_layers = num_layers embed_dim = self.hidden_size = hidden_size self.dropout = dropout if dropout is not None else hparams['dropout'] @@ -291,7 +248,7 @@ def __init__(self, hidden_size, num_layers, ffn_kernel_size=9, dropout=None, num self.layers = nn.ModuleList([]) self.layers.extend([ TransformerEncoderLayer(self.hidden_size, self.dropout, - kernel_size=ffn_kernel_size, num_heads=num_heads) + kernel_size=kernel_size, num_heads=num_heads) for _ in range(self.num_layers) ]) if self.use_last_norm: @@ -302,13 +259,22 @@ def __init__(self, hidden_size, num_layers, ffn_kernel_size=9, dropout=None, num else: self.layer_norm = None - def forward(self, x, padding_mask=None, attn_mask=None, return_hiddens=False): + self.embed_tokens = embed_tokens + self.embed_scale = math.sqrt(hidden_size) + self.padding_idx = 0 + if hparams.get('rel_pos') is not None and hparams['rel_pos']: + self.embed_positions = RelPositionalEncoding(hidden_size, dropout_rate=0.0) + else: + self.embed_positions = SinusoidalPositionalEmbedding( + hidden_size, self.padding_idx, init_size=DEFAULT_MAX_TARGET_POSITIONS, + ) + + def _forward(self, x, padding_mask=None, attn_mask=None, return_hiddens=False): """ :param x: [B, T, C] :param padding_mask: [B, T] :return: [B, T, C] or [L, B, T, C] """ - # padding_mask = x.abs().sum(-1).eq(0).data if padding_mask is None else padding_mask padding_mask = x.abs().sum(-1).eq(0).detach() if padding_mask is None else padding_mask nonpadding_mask_TB = 1 - padding_mask.transpose(0, 1).float()[:, :, None] # [T, B, 1] if self.use_pos_embed: @@ -329,59 +295,3 @@ def forward(self, x, padding_mask=None, attn_mask=None, return_hiddens=False): else: x = x.transpose(0, 1) # [B, T, C] return x - - -class FastspeechEncoder(FFTBlocks): - ''' - compared to FFTBlocks: - - input is [B, T], not [B, T, C] - - supports "relative" positional encoding - ''' - def __init__(self, embed_tokens, hidden_size=None, num_layers=None, kernel_size=None, num_heads=2): - hidden_size = hparams['hidden_size'] if hidden_size is None else hidden_size - kernel_size = hparams['enc_ffn_kernel_size'] if kernel_size is None else kernel_size - num_layers = hparams['dec_layers'] if num_layers is None else num_layers - super().__init__(hidden_size, num_layers, kernel_size, num_heads=num_heads, - use_pos_embed=False) # use_pos_embed_alpha for compatibility - self.embed_tokens = embed_tokens - self.embed_scale = math.sqrt(hidden_size) - self.padding_idx = 0 - if hparams.get('rel_pos') is not None and hparams['rel_pos']: - self.embed_positions = RelPositionalEncoding(hidden_size, dropout_rate=0.0) - else: - self.embed_positions = SinusoidalPositionalEmbedding( - hidden_size, self.padding_idx, init_size=DEFAULT_MAX_TARGET_POSITIONS, - ) - - def forward(self, txt_tokens): - """ - - :param txt_tokens: [B, T] - :return: { - 'encoder_out': [T x B x C] - } - """ - # encoder_padding_mask = txt_tokens.eq(self.padding_idx).data - encoder_padding_mask = txt_tokens.eq(self.padding_idx) - x = self.forward_embedding(txt_tokens) # [B, T, H] - x = super(FastspeechEncoder, self).forward(x, encoder_padding_mask) - return x - - def forward_embedding(self, txt_tokens): - # embed tokens and positions - x = self.embed_scale * self.embed_tokens(txt_tokens) - if hparams['use_pos_embed']: - positions = self.embed_positions(txt_tokens) - x = x + positions - x = F.dropout(x, p=self.dropout, training=self.training) - return x - - -class FastspeechDecoder(FFTBlocks): - def __init__(self, hidden_size=None, num_layers=None, kernel_size=None, num_heads=None): - num_heads = hparams['num_heads'] if num_heads is None else num_heads - hidden_size = hparams['hidden_size'] if hidden_size is None else hidden_size - kernel_size = hparams['dec_ffn_kernel_size'] if kernel_size is None else kernel_size - num_layers = hparams['dec_layers'] if num_layers is None else num_layers - super().__init__(hidden_size, num_layers, kernel_size, num_heads=num_heads) - diff --git a/modules/naive_frontend/encoder.py b/modules/naive_frontend/encoder.py index e9c1b89d7..8093718ea 100644 --- a/modules/naive_frontend/encoder.py +++ b/modules/naive_frontend/encoder.py @@ -1,12 +1,14 @@ -from utils.hparams import hparams -from utils.pitch_utils import f0_to_coarse, denorm_f0, norm_f0 import torch import torch.nn as nn from torch.nn import functional as F + from modules.commons.common_layers import Embedding, Linear -from modules.fastspeech.tts_modules import FastspeechEncoder, mel2ph_to_dur +from modules.fastspeech.tts_modules import FastSpeech2Encoder, mel2ph_to_dur +from utils.hparams import hparams +from utils.pitch_utils import f0_to_coarse, denorm_f0 + -class Encoder(FastspeechEncoder): +class FastSpeech2Acoustic2Encoder(FastSpeech2Encoder): def forward_embedding(self, txt_tokens, dur_embed): # embed tokens and positions x = self.embed_scale * self.embed_tokens(txt_tokens) @@ -23,21 +25,25 @@ def forward_embedding(self, txt_tokens, dur_embed): def forward(self, txt_tokens, dur_embed): """ :param txt_tokens: [B, T] + :param dur_embed: [B, T, H] :return: { - 'encoder_out': [T x B x C] + 'encoder_out': [T x B x H] } """ encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() x = self.forward_embedding(txt_tokens, dur_embed) # [B, T, H] - x = super(FastspeechEncoder, self).forward(x, encoder_padding_mask) + x = super()._forward(x, encoder_padding_mask) return x -class ParameterEncoder(nn.Module): +class FastSpeech2Acoustic(nn.Module): def __init__(self, dictionary): super().__init__() self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], dictionary.pad()) self.dur_embed = Linear(1, hparams['hidden_size']) - self.encoder = Encoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) + self.encoder = FastSpeech2Acoustic2Encoder( + self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], + ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] + ) self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': @@ -56,9 +62,7 @@ def __init__(self, dictionary): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - def forward(self, txt_tokens, mel2ph=None, spk_embed_id=None, - ref_mels=None, f0=None, uv=None, energy=None, skip_decoder=False, - spk_embed_dur_id=None, spk_embed_f0_id=None, infer=False, is_slur=None, **kwarg): + def forward(self, txt_tokens, mel2ph=None, f0=None, uv=None, spk_embed_id=None, infer=False, **kwarg): B, T = txt_tokens.shape dur = mel2ph_to_dur(mel2ph, T).float() dur_embed = self.dur_embed(dur[:, :, None]) diff --git a/onnx/export/export_acoustic.py b/onnx/export/export_acoustic.py index 1d9ca13c7..c83536696 100644 --- a/onnx/export/export_acoustic.py +++ b/onnx/export/export_acoustic.py @@ -23,7 +23,7 @@ from torch.nn import Linear, Embedding from modules.commons.common_layers import Mish -from modules.naive_frontend.encoder import Encoder +from modules.naive_frontend.encoder import FastSpeech2Acoustic2Encoder from src.diff.diffusion import beta_schedule from src.diff.net import AttrDict from utils import load_ckpt @@ -71,8 +71,8 @@ def __init__(self, dictionary): self.lr = LengthRegulator() self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], dictionary.pad()) self.dur_embed = Linear(1, hparams['hidden_size']) - self.encoder = Encoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], - hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) + self.encoder = FastSpeech2Acoustic2Encoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], + hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': @@ -1129,7 +1129,7 @@ def export(fs2_path, diff_path, ckpt_steps=None, diffusion = torch.jit.script(diffusion) condition = torch.rand((1, n_frames, hparams['hidden_size']), device=device) speedup = torch.tensor(10, dtype=torch.long, device=device) - dummy = diffusion.forward(condition, speedup) + dummy = diffusion.forward(condition, speedup, ) torch.onnx.export( diffusion, diff --git a/onnx/export/export_rhythmizer.py b/onnx/export/export_rhythmizer.py deleted file mode 100644 index f2e051fa9..000000000 --- a/onnx/export/export_rhythmizer.py +++ /dev/null @@ -1,238 +0,0 @@ -import os -import sys - -root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -os.environ['PYTHONPATH'] = f'"{root_dir}"' -sys.path.insert(0, root_dir) - -import argparse - -import onnx -import onnxsim -import torch -import torch.nn as nn -from torch.nn import Linear, Embedding - -from utils import load_ckpt -from utils.hparams import set_hparams -from utils.phoneme_utils import build_phoneme_list -from modules.commons.common_layers import Embedding -from modules.diffsinger_midi.fs2 import FS_ENCODERS -from modules.fastspeech.fs2 import FastSpeech2 -from modules.fastspeech.tts_modules import LayerNorm -from utils.hparams import hparams -from utils.text_encoder import TokenTextEncoder - - -class DurationPredictor(nn.Module): - def __init__(self, idim, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0.1, offset=1.0, padding='SAME'): - super(DurationPredictor, self).__init__() - self.offset = offset - self.conv = nn.ModuleList() - self.kernel_size = kernel_size - self.padding = padding - for idx in range(n_layers): - in_chans = idim if idx == 0 else n_chans - self.conv += [nn.Sequential( - nn.ConstantPad1d(((kernel_size - 1) // 2, (kernel_size - 1) // 2) - if padding == 'SAME' - else (kernel_size - 1, 0), 0), - nn.Conv1d(in_chans, n_chans, kernel_size, stride=1, padding=0), - nn.ReLU(), - LayerNorm(n_chans, dim=1), - nn.Dropout(dropout_rate) - )] - if hparams['dur_loss'] in ['mse', 'huber']: - odims = 1 - elif hparams['dur_loss'] == 'mog': - odims = 15 - elif hparams['dur_loss'] == 'crf': - odims = 32 - from torchcrf import CRF - self.crf = CRF(odims, batch_first=True) - else: - raise NotImplementedError() - self.linear = nn.Linear(n_chans, odims) - - def out2dur(self, xs): - if hparams['dur_loss'] in ['mse']: - # NOTE: calculate in log domain - xs = xs.squeeze(-1) # (B, Tmax) - dur = xs.exp() - self.offset - # dur = torch.clamp(torch.round(xs.exp() - self.offset), min=0).long() # avoid negative value - elif hparams['dur_loss'] == 'mog': - raise NotImplementedError() - elif hparams['dur_loss'] == 'crf': - dur = torch.LongTensor(self.crf.decode(xs)).cuda() - else: - raise NotImplementedError() - return dur - - def forward(self, xs, x_masks): - xs = xs.transpose(1, -1) # (B, idim, Tmax) - conv_masks = x_masks[:, None, :] - for f in self.conv: - xs = f(xs) # (B, C, Tmax) - xs = xs * conv_masks - xs = self.linear(xs.transpose(1, -1)) # [B, T, C] - xs = xs * x_masks[:, :, None] # (B, T, C) - return self.out2dur(xs) - - -class FastSpeech2MIDI(FastSpeech2): - def __init__(self, dictionary, out_dims=None): - super().__init__(dictionary, out_dims) - del self.encoder - - self.encoder = FS_ENCODERS[hparams['encoder_type']](hparams, self.encoder_embed_tokens, self.dictionary) - self.midi_embed = Embedding(300, self.hidden_size, self.padding_idx) - self.midi_dur_layer = Linear(1, self.hidden_size) - self.is_slur_embed = Embedding(2, self.hidden_size) - - del self.dur_predictor - predictor_hidden = hparams['predictor_hidden'] if hparams['predictor_hidden'] > 0 else self.hidden_size - self.dur_predictor = DurationPredictor( - self.hidden_size, - n_chans=predictor_hidden, - n_layers=hparams['dur_predictor_layers'], - dropout_rate=hparams['predictor_dropout'], padding=hparams['ffn_padding'], - kernel_size=hparams['dur_predictor_kernel']) - - def forward(self, txt_tokens, mel2ph=None, spk_embed_id=None, - ref_mels=None, f0=None, uv=None, energy=None, skip_decoder=False, - spk_embed_dur_id=None, spk_embed_f0_id=None, infer=False, **kwargs): - midi_embedding = self.midi_embed(kwargs['midi']) - midi_dur_embedding = self.midi_dur_layer(kwargs['midi_dur'][:, :, None]) - slur_embedding = self.is_slur_embed(kwargs['is_slur'].long()) - - encoder_out = self.encoder(txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding) - - src_nonpadding = (txt_tokens > 0).float() - dur_inp = encoder_out * src_nonpadding[:, :, None] - dur = self.dur_predictor(dur_inp, src_nonpadding) - dur = dur * (hparams['hop_size'] / hparams['audio_sample_rate']) - - return dur - - -class ModuleWrapper(nn.Module): - def __init__(self, model, name='model'): - super().__init__() - self.wrapped_name = name - setattr(self, name, model) - - def forward(self, *args, **kwargs): - return getattr(self, self.wrapped_name)(*args, **kwargs) - - -class FastSpeech2Wrapper(nn.Module): - def __init__(self, model): - super().__init__() - self.model = ModuleWrapper(model, name='fs2') - - def forward(self, tokens, midi, midi_dur, is_slur): - return self.model(tokens, midi=midi, midi_dur=midi_dur, is_slur=is_slur) - - -def build_fs2_model(device): - model = FastSpeech2MIDI( - dictionary=TokenTextEncoder(vocab_list=build_phoneme_list()) - ) - model.eval() - load_ckpt(model, hparams['work_dir'], 'model.fs2', strict=True) - model.to(device) - return model - - -def export(fs2_path): - set_hparams(print_hparams=False) - if not hparams.get('use_midi', True): - raise NotImplementedError('Only checkpoints of MIDI mode are supported.') - device = 'cuda' if torch.cuda.is_available() else 'cpu' - fs2 = FastSpeech2Wrapper( - model=build_fs2_model(device) - ) - - with torch.no_grad(): - tokens = torch.tensor([[3]], dtype=torch.long, device=device) - midi = torch.tensor([[69]], dtype=torch.long, device=device) - midi_dur = torch.tensor([[1.]], dtype=torch.float32, device=device) - is_slur = torch.tensor([[False]], dtype=torch.bool, device=device) - print('Exporting FastSpeech2...') - torch.onnx.export( - fs2, - ( - tokens, - midi, - midi_dur, - is_slur - ), - fs2_path, - input_names=[ - 'tokens', - 'midi', - 'midi_dur', - 'is_slur' - ], - output_names=[ - 'ph_dur' - ], - dynamic_axes={ - 'tokens': { - 1: 'n_tokens' - }, - 'midi': { - 1: 'n_tokens' - }, - 'midi_dur': { - 1: 'n_tokens' - }, - 'is_slur': { - 1: 'n_tokens' - } - }, - opset_version=11 - ) - model = onnx.load(fs2_path) - in_dims = model.graph.input[0].type.tensor_type.shape.dim - out_dims = model.graph.output[0].type.tensor_type.shape.dim - out_dims.remove(out_dims[0]) - out_dims.insert(0, in_dims[0]) - out_dims.remove(out_dims[1]) - out_dims.insert(1, in_dims[1]) - model, check = onnxsim.simplify(model, include_subgraph=True) - assert check, 'Simplified ONNX model could not be validated' - onnx.save(model, fs2_path) - print('PyTorch ONNX export finished.') - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Export DiffSinger acoustic model to ONNX') - parser.add_argument('--exp', type=str, required=True, help='Experiment to export') - parser.add_argument('--target', required=False, type=str, help='Path of the target ONNX model') - args = parser.parse_args() - - cwd = os.getcwd() - if args.target: - target = os.path.join(cwd, args.target) - else: - target = None - os.chdir(root_dir) - exp = args.exp - sys.argv = [ - 'inference/ds_cascade.py', - '--config', - f'checkpoints/{exp}/config.yaml', - '--exp_name', - exp - ] - - fs2_model_path = f'onnx/assets/{exp}.rhythmizer.onnx' if not target else target - export(fs2_path=fs2_model_path) - - os.chdir(cwd) - if args.target: - log_path = os.path.abspath(args.target) - else: - log_path = fs2_model_path - print(f'| export \'model.fs2\' to \'{log_path}\'.') diff --git a/src/acoustic_task.py b/src/acoustic_task.py index b8bcdb8f5..2c00d6482 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -21,10 +21,9 @@ import utils from basics.base_dataset import BaseDataset from basics.base_task import BaseTask +from basics.base_vocoder import BaseVocoder from data_gen.data_gen_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import mel2ph_to_dur -from basics.base_vocoder import BaseVocoder -from .vocoders.vocoder_utils import get_vocoder_cls from utils import audio from utils.cwt import get_lf0_cwt from utils.hparams import hparams @@ -36,11 +35,8 @@ from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder from .diff.diffusion import GaussianDiffusion -from .diff.net import DiffNet +from .vocoders.vocoder_utils import get_vocoder_cls -DIFF_DECODERS = { - 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), -} matplotlib.use('Agg') @@ -192,7 +188,7 @@ def build_model(self): mel_bins = hparams['audio_num_mel_bins'] self.model = GaussianDiffusion( phone_encoder=self.phone_encoder, - out_dims=mel_bins, denoise_fn=DIFF_DECODERS[hparams['diff_decoder_type']](hparams), + out_dims=mel_bins, timesteps=hparams['timesteps'], K_step=hparams['K_step'], loss_type=hparams['diff_loss_type'], diff --git a/src/diff/diffusion.py b/src/diff/diffusion.py index 86715d644..71066cc5d 100644 --- a/src/diff/diffusion.py +++ b/src/diff/diffusion.py @@ -1,21 +1,22 @@ -import math -import random from collections import deque from functools import partial from inspect import isfunction -from pathlib import Path + import numpy as np import torch import torch.nn.functional as F from torch import nn from tqdm import tqdm -from einops import rearrange -from modules.fastspeech.fs2 import FastSpeech2 -from modules.diffsinger_midi.fs2 import FastSpeech2MIDI -from modules.naive_frontend.encoder import ParameterEncoder -from utils.hparams import hparams +from modules.naive_frontend.encoder import FastSpeech2Acoustic +from src.diff.net import DiffNet from training.diffsinger import Batch2Loss +from utils.hparams import hparams + + +DIFF_DECODERS = { + 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), +} def exists(x): @@ -28,8 +29,6 @@ def default(val, d): return d() if isfunction(d) else d -# gaussian diffusion trainer class - def extract(a, t, x_shape): b, *_ = t.shape out = a.gather(-1, t) @@ -70,16 +69,12 @@ def cosine_beta_schedule(timesteps, s=0.008): class GaussianDiffusion(nn.Module): - def __init__(self, phone_encoder, out_dims, denoise_fn, - timesteps=1000, K_step=1000, loss_type=hparams.get('diff_loss_type', 'l1'), betas=None, spec_min=None, + def __init__(self, phone_encoder, out_dims, timesteps=1000, K_step=1000, + loss_type=hparams.get('diff_loss_type', 'l1'), betas=None, spec_min=None, spec_max=None): super().__init__() - self.denoise_fn = denoise_fn - if hparams.get('use_midi') is not None and hparams['use_midi']: - self.fs2 = FastSpeech2MIDI(phone_encoder, out_dims) - else: - #self.fs2 = FastSpeech2(phone_encoder, out_dims) - self.fs2 = ParameterEncoder(phone_encoder) + self.denoise_fn = DIFF_DECODERS[hparams['diff_decoder_type']](hparams) + self.fs2 = FastSpeech2Acoustic(phone_encoder) self.mel_bins = out_dims if exists(betas): @@ -235,8 +230,7 @@ def forward(self, txt_tokens, mel2ph=None, spk_embed=None, ''' conditioning diffusion, use fastspeech2 encoder output as the condition ''' - ret = self.fs2(txt_tokens, mel2ph, spk_embed, ref_mels, f0, uv, energy, - skip_decoder=True, infer=infer, **kwargs) + ret = self.fs2(txt_tokens, mel2ph=mel2ph, f0=f0, uv=uv, spk_embed_id=spk_embed, infer=infer, **kwargs) cond = ret['decoder_inp'].transpose(1, 2) b, *_, device = *txt_tokens.shape, txt_tokens.device diff --git a/src/diff/net.py b/src/diff/net.py index e12aee75d..13f2efaad 100644 --- a/src/diff/net.py +++ b/src/diff/net.py @@ -86,6 +86,7 @@ def forward(self, x, conditioner, diffusion_step): return (x + residual) / sqrt(2.0), skip + class DiffNet(nn.Module): def __init__(self, in_dims=80): super().__init__() From b5219eb03f920147c32c1157a8f1ae760a7fb7c2 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 13:45:03 +0800 Subject: [PATCH 017/475] Remove useless CLI arguments --- main.py | 34 ++-------------------------------- 1 file changed, 2 insertions(+), 32 deletions(-) diff --git a/main.py b/main.py index ce7cb9795..4163e49ac 100644 --- a/main.py +++ b/main.py @@ -3,15 +3,14 @@ import json import os import sys -import warnings import numpy as np import torch -from utils.infer_utils import cross_fade, trans_key from inference.ds_cascade import DiffSingerCascadeInfer from utils.audio import save_wav from utils.hparams import set_hparams, hparams +from utils.infer_utils import cross_fade, trans_key from utils.slur_utils import merge_slurs from utils.spk_utils import parse_commandline_spk_mix @@ -32,21 +31,10 @@ parser.add_argument('--seed', type=int, required=False, help='Random seed of the inference') parser.add_argument('--speedup', type=int, required=False, default=0, help='PNDM speed-up ratio') parser.add_argument('--pitch', action='store_true', required=False, default=None, help='Enable manual pitch mode') -parser.add_argument('--forced_automatic_pitch_mode', action='store_true', required=False, default=False) parser.add_argument('--mel', action='store_true', required=False, default=False, help='Save intermediate mel format instead of waveform') args = parser.parse_args() -# Deprecation for --pitch -warnings.filterwarnings(action='default') -if args.pitch is not None: - warnings.warn( - message='The argument \'--pitch\' is deprecated and will be removed in the future. ' - 'The program now automatically detects which mode to use.', - category=DeprecationWarning, - ) - warnings.filterwarnings(action='default') - name = os.path.basename(args.proj).split('.')[0] if not args.title else args.title exp = args.exp if not os.path.exists(f'{root_dir}/checkpoints/{exp}'): @@ -65,7 +53,7 @@ out = os.path.dirname(os.path.abspath(args.proj)) sys.argv = [ - f'{root_dir}/inference/ds_e2e.py' if not args.pitch else f'{root_dir}/inference/ds_cascade.py', + f'{root_dir}/inference/ds_cascade.py', '--exp_name', exp, '--infer' @@ -130,24 +118,6 @@ def infer_once(path: str, save_mel=False): current_length = 0 for i, param in enumerate(params): - # Ban automatic pitch mode by default - param_have_f0 = 'f0_seq' in param and param['f0_seq'] - if hparams['use_pitch_embed'] and not param_have_f0: - if not args.forced_automatic_pitch_mode: - assert param_have_f0, 'You are using automatic pitch mode which may not produce satisfactory ' \ - 'results. When you see this message, it is very likely that you forgot to ' \ - 'freeze the f0 sequence into the input file, and this error is to inform ' \ - 'you that a double-check should be applied. If you do want to test out the ' \ - 'automatic pitch mode, please force it on manually.' - warnings.warn( - message='You are using forced automatic pitch mode. As this mode is only for testing purpose, ' - 'please note that you must know clearly what you are doing, and be aware that the result ' - 'may not be satisfactory.', - category=UserWarning - ) - warnings.filterwarnings(action='default') - param['f0_seq'] = None - if 'seed' in param: print(f'| set seed: {param["seed"] & 0xffff_ffff}') torch.manual_seed(param["seed"] & 0xffff_ffff) From 45c1ad0988bdbc9309efa2a56ca678a120338a3d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 17:59:19 +0800 Subject: [PATCH 018/475] Renames and clean-ups --- main.py | 1 - modules/fastspeech/tts_modules.py | 3 +-- modules/naive_frontend/encoder.py | 6 +++--- onnx/export/export_acoustic.py | 10 +++++----- 4 files changed, 9 insertions(+), 11 deletions(-) diff --git a/main.py b/main.py index 4163e49ac..8bd0c9a2f 100644 --- a/main.py +++ b/main.py @@ -30,7 +30,6 @@ parser.add_argument('--gender', type=float, required=False, help='Formant shifting (gender control)') parser.add_argument('--seed', type=int, required=False, help='Random seed of the inference') parser.add_argument('--speedup', type=int, required=False, default=0, help='PNDM speed-up ratio') -parser.add_argument('--pitch', action='store_true', required=False, default=None, help='Enable manual pitch mode') parser.add_argument('--mel', action='store_true', required=False, default=False, help='Save intermediate mel format instead of waveform') args = parser.parse_args() diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 3e2d9fccb..4c2d792e3 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -1,12 +1,11 @@ -import logging import math import torch import torch.nn as nn from torch.nn import functional as F +from modules.commons.common_layers import SinusoidalPositionalEmbedding, EncSALayer, BatchNorm1dTBC from modules.commons.espnet_positional_embedding import RelPositionalEncoding -from modules.commons.common_layers import SinusoidalPositionalEmbedding, Linear, EncSALayer, DecSALayer, BatchNorm1dTBC from utils.hparams import hparams DEFAULT_MAX_SOURCE_POSITIONS = 2000 diff --git a/modules/naive_frontend/encoder.py b/modules/naive_frontend/encoder.py index 8093718ea..b176a8da5 100644 --- a/modules/naive_frontend/encoder.py +++ b/modules/naive_frontend/encoder.py @@ -8,13 +8,13 @@ from utils.pitch_utils import f0_to_coarse, denorm_f0 -class FastSpeech2Acoustic2Encoder(FastSpeech2Encoder): +class FastSpeech2AcousticEncoder(FastSpeech2Encoder): def forward_embedding(self, txt_tokens, dur_embed): # embed tokens and positions x = self.embed_scale * self.embed_tokens(txt_tokens) x = x + dur_embed if hparams['use_pos_embed']: - if hparams.get('rel_pos') is not None and hparams['rel_pos']: + if hparams.get('rel_pos', False): x = self.embed_positions(x) else: positions = self.embed_positions(txt_tokens) @@ -40,7 +40,7 @@ def __init__(self, dictionary): super().__init__() self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], dictionary.pad()) self.dur_embed = Linear(1, hparams['hidden_size']) - self.encoder = FastSpeech2Acoustic2Encoder( + self.encoder = FastSpeech2AcousticEncoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] ) diff --git a/onnx/export/export_acoustic.py b/onnx/export/export_acoustic.py index c83536696..a0d87f41e 100644 --- a/onnx/export/export_acoustic.py +++ b/onnx/export/export_acoustic.py @@ -23,7 +23,7 @@ from torch.nn import Linear, Embedding from modules.commons.common_layers import Mish -from modules.naive_frontend.encoder import FastSpeech2Acoustic2Encoder +from modules.naive_frontend.encoder import FastSpeech2AcousticEncoder from src.diff.diffusion import beta_schedule from src.diff.net import AttrDict from utils import load_ckpt @@ -65,14 +65,14 @@ def forward(self, dur): return mel2ph -class FastSpeech2MIDILess(nn.Module): +class FastSpeech2Acoustic(nn.Module): def __init__(self, dictionary): super().__init__() self.lr = LengthRegulator() self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], dictionary.pad()) self.dur_embed = Linear(1, hparams['hidden_size']) - self.encoder = FastSpeech2Acoustic2Encoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], - hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) + self.encoder = FastSpeech2AcousticEncoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], + hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': @@ -455,7 +455,7 @@ def forward(self, condition, speedup): def build_fs2_model(device, ckpt_steps=None): - model = FastSpeech2MIDILess( + model = FastSpeech2Acoustic( dictionary=TokenTextEncoder(vocab_list=build_phoneme_list()) ) model.eval() From f8d79bb2935c180eb6e0de0a1bf3834aac380910 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 20:40:56 +0800 Subject: [PATCH 019/475] Remove debug code --- main.py | 1 - 1 file changed, 1 deletion(-) diff --git a/main.py b/main.py index 8bd0c9a2f..ded190d19 100644 --- a/main.py +++ b/main.py @@ -72,7 +72,6 @@ if not args.title: name += key_suffix print(f'音调基于原音频{key_suffix}') -params = params[:1] if args.gender is not None: assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' From c7f2485a3d110c46b225dd7a7c8e7c82ced4f5d1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 20:48:33 +0800 Subject: [PATCH 020/475] Clean up config files --- README.md | 4 +- .../{acoustic/nomidi.yaml => acoustic.yaml} | 18 +- configs/{basics => }/base.yaml | 48 ++-- configs/basics/fs2.yaml | 81 ------- configs/obsolete/cascade.yaml | 115 --------- configs/obsolete/e2e.yaml | 136 ----------- inference/vocoder/val_nsf_hifigan.py | 2 +- modules/fastspeech/tts_modules.py | 2 +- onnx/export/export_nsf_hifigan.py | 2 +- src/acoustic_task.py | 5 - training/diffsinger.py | 219 +----------------- 11 files changed, 27 insertions(+), 605 deletions(-) rename configs/{acoustic/nomidi.yaml => acoustic.yaml} (83%) rename configs/{basics => }/base.yaml (79%) delete mode 100644 configs/basics/fs2.yaml delete mode 100644 configs/obsolete/cascade.yaml delete mode 100644 configs/obsolete/e2e.yaml diff --git a/README.md b/README.md index 3afb21616..a386d5080 100644 --- a/README.md +++ b/README.md @@ -43,14 +43,14 @@ The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop ```sh export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python data_gen/binarize.py --config configs/acoustic/nomidi.yaml +CUDA_VISIBLE_DEVICES=0 python data_gen/binarize.py --config configs/acoustic.yaml ``` ### Training The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop/) dataset. ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/acoustic/nomidi.yaml --exp_name $MY_DS_EXP_NAME --reset +CUDA_VISIBLE_DEVICES=0 python run.py --config configs/acoustic.yaml --exp_name $MY_DS_EXP_NAME --reset ``` ### Inference diff --git a/configs/acoustic/nomidi.yaml b/configs/acoustic.yaml similarity index 83% rename from configs/acoustic/nomidi.yaml rename to configs/acoustic.yaml index b53356909..e726ce5ae 100644 --- a/configs/acoustic/nomidi.yaml +++ b/configs/acoustic.yaml @@ -1,5 +1,5 @@ base_config: - - configs/basics/fs2.yaml + - configs/base.yaml task_cls: src.acoustic_task.AcousticTask datasets: [ @@ -13,8 +13,6 @@ test_prefixes: [ '2093', '2100', ] -test_num: 0 -valid_num: 0 vocoder: NsfHifiGAN vocoder_ckpt: checkpoints/nsf_hifigan/model @@ -46,21 +44,16 @@ binarization_args: # scale: 2.0 raw_data_dir: 'data/opencpop/raw' -processed_data_dir: '' binary_data_dir: 'data/opencpop/binary' binarizer_cls: data_gen.acoustic.AcousticBinarizer g2p_dictionary: dictionaries/opencpop-extension.txt -pitch_extractor: 'parselmouth' pitch_type: frame -content_cond_steps: [ ] # [ 0, 10000 ] -spk_cond_steps: [ ] # [ 0, 10000 ] spec_min: [-5] spec_max: [0] keep_bins: 128 mel_loss: "ssim:0.5|l1:0.5" mel_vmin: -6. #-6. mel_vmax: 1.5 -wav2spec_eps: 1e-6 save_f0: true #pe_enable: true @@ -75,18 +68,10 @@ use_speed_embed: false use_gt_f0: false # for midi exp use_gt_dur: false # for further midi exp f0_embed_type: continuous -lambda_f0: 0.0 -lambda_uv: 0.0 -lambda_energy: 0.0 -lambda_ph_dur: 0.0 -lambda_sent_dur: 0.0 -lambda_word_dur: 0.0 -predictor_grad: 0.0 K_step: 1000 timesteps: 1000 max_beta: 0.02 -predictor_layers: 5 rel_pos: true gaussian_start: true pndm_speedup: 10 @@ -99,7 +84,6 @@ diff_loss_type: l2 schedule_type: 'linear' # train and eval -gen_tgt_spk_id: -1 num_sanity_val_steps: 1 lr: 0.0004 decay_steps: 50000 diff --git a/configs/basics/base.yaml b/configs/base.yaml similarity index 79% rename from configs/basics/base.yaml rename to configs/base.yaml index 7b3fcc979..16617f2f8 100644 --- a/configs/basics/base.yaml +++ b/configs/base.yaml @@ -15,18 +15,8 @@ save_codes: ############# sort_by_len: true raw_data_dir: '' -processed_data_dir: '' binary_data_dir: '' -dict_dir: '' -pre_align_cls: '' binarizer_cls: basics.base_binarizer.BaseBinarizer -pre_align_args: - use_tone: true # for ZH - forced_align: mfa - use_sox: false - txt_processor: en - allow_no_txt: false - denoise: false binarization_args: shuffle: false with_txt: true @@ -36,12 +26,9 @@ binarization_args: with_f0: true with_f0cwt: true -loud_norm: false endless_ds: true reset_phone_dict: true -test_num: 100 -valid_num: 100 max_frames: 1550 max_input_tokens: 1550 audio_num_mel_bins: 80 @@ -60,30 +47,33 @@ ds_workers: 4 ######### # model ######### +hidden_size: 256 dropout: 0.1 +use_pos_embed: true enc_layers: 4 -dec_layers: 4 -hidden_size: 384 num_heads: 2 -prenet_dropout: 0.5 -prenet_hidden_size: 256 -stop_token_weight: 5.0 enc_ffn_kernel_size: 9 -dec_ffn_kernel_size: 9 ffn_act: gelu ffn_padding: 'SAME' - +use_pitch_embed: true +pitch_type: ph # frame|ph|cwt +use_uv: true +pitch_norm: log +use_energy_embed: false +use_spk_id: false +use_spk_embed: false ########### # optimization ########### lr: 2.0 -warmup_updates: 8000 +warmup_updates: 2000 optimizer_adam_beta1: 0.9 optimizer_adam_beta2: 0.98 weight_decay: 0 clip_grad_norm: 1 - +mel_loss: l1:0.5|ssim:0.5 # l1|l2|gdl|ssim or l1:0.5|ssim:0.5 +dur_loss: mse # huber|mol ########### # train and eval @@ -95,23 +85,25 @@ num_ckpt_keep: 3 accumulate_grad_batches: 1 log_interval: 100 num_sanity_val_steps: 5 # steps of validation at the beginning -check_val_every_n_epoch: 10 val_check_interval: 2000 -max_epochs: 1000 -max_updates: 160000 +max_updates: 120000 test_input_dir: '' -max_tokens: 30000 +max_tokens: 32000 max_sentences: 100000 max_eval_sentences: 1 max_eval_tokens: 60000 train_set_name: 'train' valid_set_name: 'valid' test_set_name: 'test' -vocoder: pwg +vocoder: '' vocoder_ckpt: '' profile_infer: false out_wav_norm: false save_gt: false save_f0: false gen_dir_name: '' -use_denoise: false +num_valid_plots: 5 +num_test_samples: 0 +test_ids: [] +use_gt_dur: false +use_gt_f0: false diff --git a/configs/basics/fs2.yaml b/configs/basics/fs2.yaml deleted file mode 100644 index 23cb51307..000000000 --- a/configs/basics/fs2.yaml +++ /dev/null @@ -1,81 +0,0 @@ -base_config: - - configs/basics/base.yaml -task_cls: tts.tasks.fs2.FastSpeech2Task - -# model -hidden_size: 256 -dropout: 0.1 -encoder_type: fft # fft|tacotron|tacotron2|conformer -encoder_K: 8 # for tacotron encoder -decoder_type: fft # fft|rnn|conv|conformer -use_pos_embed: true - -# duration -predictor_hidden: -1 -predictor_kernel: 5 -predictor_layers: 2 -dur_predictor_kernel: 3 -dur_predictor_layers: 2 -predictor_dropout: 0.5 - -# pitch and energy -use_pitch_embed: true -pitch_type: ph # frame|ph|cwt -use_uv: true -cwt_hidden_size: 128 -cwt_layers: 2 -cwt_loss: l1 -cwt_add_f0_loss: false -cwt_std_scale: 0.8 - -pitch_ar: false -#pitch_embed_type: 0q -pitch_loss: 'l1' # l1|l2|ssim -pitch_norm: log -use_energy_embed: false - -# reference encoder and speaker embedding -use_spk_id: false -use_split_spk_id: false -use_spk_embed: false -use_var_enc: false -lambda_commit: 0.25 -ref_norm_layer: bn -pitch_enc_hidden_stride_kernel: - - 0,2,5 # conv_hidden_size, conv_stride, conv_kernel_size. conv_hidden_size=0: use hidden_size - - 0,2,5 - - 0,2,5 -dur_enc_hidden_stride_kernel: - - 0,2,3 # conv_hidden_size, conv_stride, conv_kernel_size. conv_hidden_size=0: use hidden_size - - 0,2,3 - - 0,1,3 - - -# mel -mel_loss: l1:0.5|ssim:0.5 # l1|l2|gdl|ssim or l1:0.5|ssim:0.5 - -# loss lambda -lambda_f0: 1.0 -lambda_uv: 1.0 -lambda_energy: 0.1 -lambda_ph_dur: 1.0 -lambda_sent_dur: 1.0 -lambda_word_dur: 1.0 -predictor_grad: 0.1 - -# train and eval -pretrain_fs_ckpt: '' -warmup_updates: 2000 -max_tokens: 32000 -max_sentences: 100000 -max_eval_sentences: 1 -max_updates: 120000 -num_valid_plots: 5 -num_test_samples: 0 -test_ids: [] -use_gt_dur: false -use_gt_f0: false - -# exp -dur_loss: mse # huber|mol -norm_type: gn \ No newline at end of file diff --git a/configs/obsolete/cascade.yaml b/configs/obsolete/cascade.yaml deleted file mode 100644 index 3ba2e6d83..000000000 --- a/configs/obsolete/cascade.yaml +++ /dev/null @@ -1,115 +0,0 @@ -base_config: - - configs/basics/fs2.yaml - -task_cls: src.diffsinger_task.DiffSingerMIDITask -datasets: [ - 'opencpop', -] -num_spk: 1 -test_prefixes: [ - '2044', - '2086', - '2092', - '2093', - '2100', -] - -test_num: 0 -valid_num: 0 - -pre_align_cls: data_gen.pre_align.SingingPreAlign -pre_align_args: - txt_processor: zh_g2pM - use_tone: false # for ZH - forced_align: mfa - use_sox: true -max_frames: 8000 - -audio_sample_rate: 44100 -audio_num_mel_bins: 128 -hop_size: 512 # Hop size. -fft_size: 2048 # FFT size. -win_size: 2048 # FFT size. -fmin: 40 -fmax: 16000 -min_level_db: -120 - -vocoder: NsfHifiGAN -vocoder_ckpt: checkpoints/nsf_hifigan/model -use_nsf: true - -binarizer_cls: data_gen.acoustic.AcousticBinarizer -binarization_args: - with_wav: false - with_spk_embed: false - with_align: true -raw_data_dir: 'data/opencpop/raw' -processed_data_dir: '' -binary_data_dir: 'data/opencpop/binary' -g2p_dictionary: dictionaries/opencpop-extension.txt -mel_vmin: -6. #-6. -mel_vmax: 1.5 -wav2spec_eps: 1e-6 -save_f0: true - -pitch_extractor: 'parselmouth' -pitch_type: frame -# config for experiments - -content_cond_steps: [ ] # [ 0, 10000 ] -spk_cond_steps: [ ] # [ 0, 10000 ] -spec_min: [-5] -spec_max: [0] -keep_bins: 128 -mel_loss: "ssim:0.5|l1:0.5" -use_spk_id: false - -#switch_midi2f0_step: 174000 -use_midi: true # for midi exp -use_spk_embed: false -use_gt_f0: false # for midi exp -use_gt_dur: false # for further midi exp -use_uv: false -lambda_f0: 1.0 -lambda_uv: 0.0 -lambda_energy: 0.0 -lambda_ph_dur: 0.3 -lambda_sent_dur: 3.0 -lambda_word_dur: 1.0 -predictor_grad: 0.1 -#pe_enable: true -#pe_ckpt: 'checkpoints/0102_xiaoma_pe' -pitch_loss: l2 -diff_loss_type: l2 - -fs2_ckpt: '' # - -K_step: 1000 -timesteps: 1000 -max_beta: 0.02 -lr: 0.0005 -decay_steps: 50000 -max_tokens: 80000 -predictor_layers: 5 -dilation_cycle_length: 4 # * -rel_pos: true -dur_predictor_layers: 5 # * -gaussian_start: true -pndm_speedup: 10 - -hidden_size: 256 -diff_decoder_type: 'wavenet' -schedule_type: 'linear' - -residual_layers: 20 -residual_channels: 512 - -# train and eval -gen_tgt_spk_id: -1 -num_sanity_val_steps: 1 -max_sentences: 48 -val_check_interval: 2000 -num_valid_plots: 10 -max_updates: 320000 -permanent_ckpt_start: 120000 -permanent_ckpt_interval: 40000 diff --git a/configs/obsolete/e2e.yaml b/configs/obsolete/e2e.yaml deleted file mode 100644 index 3fc2b3982..000000000 --- a/configs/obsolete/e2e.yaml +++ /dev/null @@ -1,136 +0,0 @@ -base_config: - - configs/basics/fs2.yaml - -task_cls: src.diffsinger_task.DiffSingerMIDITask -datasets: [ - 'opencpop', -] -num_spk: 1 -test_prefixes: [ - '2044', - '2086', - '2092', - '2093', - '2100', -] - -test_num: 0 -valid_num: 0 - -pre_align_cls: data_gen.pre_align.SingingPreAlign -pre_align_args: - txt_processor: zh_g2pM - use_tone: false # for ZH - forced_align: mfa - use_sox: true -max_frames: 8000 - -audio_sample_rate: 24000 -hop_size: 128 # Hop size. -fft_size: 512 # FFT size. -win_size: 512 # FFT size. -fmin: 30 -fmax: 12000 -min_level_db: -120 - -vocoder: src.vocoders.hifigan.HifiGAN -vocoder_ckpt: checkpoints/0109_hifigan_bigpopcs_hop128 -use_nsf: true - -binarizer_cls: data_gen.opencpop.OpencpopBinarizer -binarization_args: - with_wav: true - with_spk_embed: false - with_align: true -raw_data_dir: 'data/raw/opencpop/segments' -processed_data_dir: 'xxx' -binary_data_dir: 'data/binary/opencpop-midi-dp' -save_f0: true - -pitch_extractor: 'parselmouth' -pitch_type: frame -# config for experiments -use_spk_embed: false -num_valid_plots: 10 -lr: 0.001 - -content_cond_steps: [ ] # [ 0, 10000 ] -spk_cond_steps: [ ] # [ 0, 10000 ] - -#switch_midi2f0_step: 174000 -use_midi: true # for midi exp -use_gt_dur: false # for further midi exp -use_spk_id: false -use_pitch_embed: false - -lambda_ph_dur: 1.0 -lambda_sent_dur: 1.0 -lambda_word_dur: 1.0 -predictor_grad: 0.1 -dur_predictor_layers: 5 # * -mel_vmin: -6. #-6. -mel_vmax: 1.5 -wav2spec_eps: 1e-6 -spec_min: [-6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., - -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., - -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., - -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., - -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., - -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., -6., - -6., -6., -6., -6., -6., -6., -6., -6.] -spec_max: [-7.9453e-01, -8.1116e-01, -6.1631e-01, -3.0679e-01, -1.3863e-01, - -5.0652e-02, -1.1563e-01, -1.0679e-01, -9.1068e-02, -6.2174e-02, - -7.5302e-02, -7.2217e-02, -6.3815e-02, -7.3299e-02, 7.3610e-03, - -7.2508e-02, -5.0234e-02, -1.6534e-01, -2.6928e-01, -2.0782e-01, - -2.0823e-01, -1.1702e-01, -7.0128e-02, -6.5868e-02, -1.2675e-02, - 1.5121e-03, -8.9902e-02, -2.1392e-01, -2.3789e-01, -2.8922e-01, - -3.0405e-01, -2.3029e-01, -2.2088e-01, -2.1542e-01, -2.9367e-01, - -3.0137e-01, -3.8281e-01, -4.3590e-01, -2.8681e-01, -4.6855e-01, - -5.7485e-01, -4.7022e-01, -5.4266e-01, -4.4848e-01, -6.4120e-01, - -6.8700e-01, -6.4860e-01, -7.6436e-01, -4.9971e-01, -7.1068e-01, - -6.9724e-01, -6.1487e-01, -5.5843e-01, -6.9773e-01, -5.7502e-01, - -7.0919e-01, -8.2431e-01, -8.4213e-01, -9.0431e-01, -8.2840e-01, - -7.7945e-01, -8.2758e-01, -8.7699e-01, -1.0532e+00, -1.0766e+00, - -1.1198e+00, -1.0185e+00, -9.8983e-01, -1.0001e+00, -1.0756e+00, - -1.0024e+00, -1.0304e+00, -1.0579e+00, -1.0188e+00, -1.0500e+00, - -1.0842e+00, -1.0923e+00, -1.1223e+00, -1.2381e+00, -1.6467e+00] - -fs2_ckpt: '' # -#num_valid_plots: 0 - -# for diffusion schedule -timesteps: 1000 -K_step: 1000 -max_beta: 0.02 -max_tokens: 36000 -max_updates: 320000 -gaussian_start: True -pndm_speedup: 40 - -diff_loss_type: l1 -diff_decoder_type: 'wavenet' -schedule_type: 'linear' - -mel_loss: "ssim:0.5|l1:0.5" -hidden_size: 256 -residual_layers: 20 -residual_channels: 256 -decay_steps: 50000 -keep_bins: 80 - -use_gt_f0: false # for midi exp - -lambda_f0: 0. -lambda_uv: 0. -lambda_energy: 0.0 -dilation_cycle_length: 4 # * -rel_pos: true -predictor_layers: 5 -pe_enable: true -pe_ckpt: 'checkpoints/0102_xiaoma_pe' - -# train and eval -# max_updates: 200000 -gen_tgt_spk_id: -1 -max_sentences: 48 -num_sanity_val_steps: 1 diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/vocoder/val_nsf_hifigan.py index ef35ccb7d..75fc97dd0 100644 --- a/inference/vocoder/val_nsf_hifigan.py +++ b/inference/vocoder/val_nsf_hifigan.py @@ -15,7 +15,7 @@ sys.argv = [ 'inference/svs/ds_cascade.py', '--config', - 'configs/acoustic/nomidi.yaml', + 'configs/acoustic.yaml', ] diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 4c2d792e3..b842953f4 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -230,7 +230,7 @@ def __init__(self, embed_tokens, hidden_size, num_layers, ffn_kernel_size=9, dro super().__init__() hidden_size = hparams['hidden_size'] if hidden_size is None else hidden_size kernel_size = hparams['enc_ffn_kernel_size'] if ffn_kernel_size is None else ffn_kernel_size - num_layers = hparams['dec_layers'] if num_layers is None else num_layers + num_layers = hparams['enc_layers'] if num_layers is None else num_layers self.num_layers = num_layers embed_dim = self.hidden_size = hidden_size self.dropout = dropout if dropout is not None else hparams['dropout'] diff --git a/onnx/export/export_nsf_hifigan.py b/onnx/export/export_nsf_hifigan.py index d80fd48bc..8957e4dba 100644 --- a/onnx/export/export_nsf_hifigan.py +++ b/onnx/export/export_nsf_hifigan.py @@ -311,7 +311,7 @@ def export(model_path): sys.argv = [ 'inference/ds_cascade.py', '--config', - 'configs/acoustic/nomidi.yaml', + 'configs/acoustic.yaml', ] path = 'onnx/assets/nsf_hifigan2.onnx' export(path) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 2c00d6482..a331686b6 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -475,11 +475,6 @@ def after_infer(self, predictions): fig = plt.figure() plt.plot(f0_pred_, label=r'$f0_P$') plt.plot(f0_gt_, label=r'$f0_G$') - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - # f0_midi = prediction.get("f0_midi") - # f0_midi = f0_midi[mel_gt_mask] - # plt.plot(f0_midi, label=r'$f0_M$') - pass plt.legend() plt.tight_layout() plt.savefig(f'{gen_dir}/plot/[F0][{item_name}]{text}.png', format='png') diff --git a/training/diffsinger.py b/training/diffsinger.py index a3c1151d0..59565b0c2 100644 --- a/training/diffsinger.py +++ b/training/diffsinger.py @@ -1,224 +1,7 @@ -from utils.hparams import hparams import torch -from torch.nn import functional as F -from utils.pitch_utils import f0_to_coarse, denorm_f0, norm_f0 -class Batch2Loss: - ''' - pipeline: batch -> insert1 -> module1 -> insert2 -> module2 -> insert3 -> module3 -> insert4 -> module4 -> loss - ''' - - @staticmethod - def insert1(pitch_midi, midi_dur, is_slur, # variables - midi_embed, midi_dur_layer, is_slur_embed): # modules - ''' - add embeddings for midi, midi_dur, slur - ''' - midi_embedding = midi_embed(pitch_midi) - midi_dur_embedding, slur_embedding = 0, 0 - if midi_dur is not None: - midi_dur_embedding = midi_dur_layer(midi_dur[:, :, None]) # [B, T, 1] -> [B, T, H] - if is_slur is not None: - slur_embedding = is_slur_embed(is_slur) - return midi_embedding, midi_dur_embedding, slur_embedding - - @staticmethod - def module1(fs2_encoder, # modules - txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding): # variables - ''' - get *encoder_out* == fs2_encoder(*txt_tokens*, some embeddings) - ''' - encoder_out = fs2_encoder(txt_tokens, midi_embedding, midi_dur_embedding, slur_embedding) - return encoder_out - - @staticmethod - def insert2(encoder_out, spk_embed_id, spk_embed_dur_id, spk_embed_f0_id, src_nonpadding, # variables - spk_embed_proj): # modules - ''' - 1. add embeddings for spk, spk_dur, spk_f0 - 2. get *dur_inp* ~= *encoder_out* + *spk_embed_dur* - ''' - # add ref style embed - # Not implemented - # variance encoder - var_embed = 0 - - # encoder_out_dur denotes encoder outputs for duration predictor - # in speech adaptation, duration predictor use old speaker embedding - if hparams['use_spk_embed']: - spk_embed_dur = spk_embed_f0 = spk_embed = spk_embed_proj(spk_embed_id)[:, None, :] - elif hparams['use_spk_id']: - if spk_embed_dur_id is None: - spk_embed_dur_id = spk_embed_id - if spk_embed_f0_id is None: - spk_embed_f0_id = spk_embed_id - spk_embed = spk_embed_proj(spk_embed_id)[:, None, :] - spk_embed_dur = spk_embed_f0 = spk_embed - if hparams['use_split_spk_id']: - spk_embed_dur = spk_embed_dur(spk_embed_dur_id)[:, None, :] - spk_embed_f0 = spk_embed_f0(spk_embed_f0_id)[:, None, :] - else: - spk_embed_dur = spk_embed_f0 = spk_embed = 0 - - # add dur - dur_inp = (encoder_out + var_embed + spk_embed_dur) * src_nonpadding - return var_embed, spk_embed, spk_embed_dur, spk_embed_f0, dur_inp - - @staticmethod - def module2(dur_predictor, length_regulator, # modules - dur_input, mel2ph, txt_tokens, all_vowel_tokens, ret, midi_dur=None): # variables - ''' - 1. get *dur* ~= dur_predictor(*dur_inp*) - 2. (mel2ph is None): get *mel2ph* ~= length_regulater(*dur*) - ''' - src_padding = (txt_tokens == 0) - dur_input = dur_input.detach() + hparams['predictor_grad'] * (dur_input - dur_input.detach()) - - if mel2ph is None: - dur, xs = dur_predictor.inference(dur_input, src_padding) - ret['dur'] = xs - dur = xs.squeeze(-1).exp() - 1.0 - for i in range(len(dur)): - for j in range(len(dur[i])): - if txt_tokens[i,j] in all_vowel_tokens: - if j < len(dur[i])-1 and txt_tokens[i,j+1] not in all_vowel_tokens: - dur[i,j] = midi_dur[i,j] - dur[i,j+1] - if dur[i,j] < 0: - dur[i,j] = 0 - dur[i,j+1] = midi_dur[i,j] - else: - dur[i,j]=midi_dur[i,j] - dur[:,0] = dur[:,0] + 0.5 - dur_acc = F.pad(torch.round(torch.cumsum(dur, axis=1)), (1,0)) - dur = torch.clamp(dur_acc[:,1:]-dur_acc[:,:-1], min=0).long() - ret['dur_choice'] = dur - mel2ph = length_regulator(dur, src_padding).detach() - else: - ret['dur'] = dur_predictor(dur_input, src_padding) - ret['mel2ph'] = mel2ph - - return mel2ph - - @staticmethod - def insert3(encoder_out, mel2ph, var_embed, spk_embed_f0, src_nonpadding, tgt_nonpadding): # variables - ''' - 1. get *decoder_inp* ~= gather *encoder_out* according to *mel2ph* - 2. get *pitch_inp* ~= *decoder_inp* + *spk_embed_f0* - 3. get *pitch_inp_ph* ~= *encoder_out* + *spk_embed_f0* - ''' - decoder_inp = F.pad(encoder_out, [0, 0, 1, 0]) - mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) - decoder_inp = decoder_inp_origin = torch.gather(decoder_inp, 1, mel2ph_) # [B, T, H] - - pitch_inp = (decoder_inp_origin + var_embed + spk_embed_f0) * tgt_nonpadding - pitch_inp_ph = (encoder_out + var_embed + spk_embed_f0) * src_nonpadding - return decoder_inp, pitch_inp, pitch_inp_ph - - @staticmethod - def module3(pitch_predictor, pitch_embed, energy_predictor, energy_embed, # modules - pitch_inp, pitch_inp_ph, f0, uv, energy, mel2ph, is_training, ret): # variables - ''' - 1. get *ret['pitch_pred']*, *ret['energy_pred']* ~= pitch_predictor(*pitch_inp*), energy_predictor(*pitch_inp*) - 2. get *pitch_embedding* ~= pitch_embed(f0_to_coarse(denorm_f0(*f0* or *pitch_pred*)) - 3. get *energy_embedding* ~= energy_embed(energy_to_coarse(*energy* or *energy_pred*)) - ''' - def add_pitch(decoder_inp, f0, uv, mel2ph, ret, encoder_out=None): - if hparams['pitch_type'] == 'ph': - pitch_pred_inp = encoder_out.detach() + hparams['predictor_grad'] * (encoder_out - encoder_out.detach()) - pitch_padding = (encoder_out.sum().abs() == 0) - ret['pitch_pred'] = pitch_pred = pitch_predictor(pitch_pred_inp) - if f0 is None: - f0 = pitch_pred[:, :, 0] - ret['f0_denorm'] = f0_denorm = denorm_f0(f0, None, hparams, pitch_padding=pitch_padding) - pitch = f0_to_coarse(f0_denorm) # start from 0 [B, T_txt] - pitch = F.pad(pitch, [1, 0]) - pitch = torch.gather(pitch, 1, mel2ph) # [B, T_mel] - pitch_embedding = pitch_embed(pitch) - return pitch_embedding - - decoder_inp = decoder_inp.detach() + hparams['predictor_grad'] * (decoder_inp - decoder_inp.detach()) - - pitch_padding = (mel2ph == 0) - - if hparams['pitch_type'] == 'cwt': - # NOTE: this part of script is *isolated* from other scripts, which means - # it may not be compatible with the current version. - pass - # pitch_padding = None - # ret['cwt'] = cwt_out = self.cwt_predictor(decoder_inp) - # stats_out = self.cwt_stats_layers(encoder_out[:, 0, :]) # [B, 2] - # mean = ret['f0_mean'] = stats_out[:, 0] - # std = ret['f0_std'] = stats_out[:, 1] - # cwt_spec = cwt_out[:, :, :10] - # if f0 is None: - # std = std * hparams['cwt_std_scale'] - # f0 = self.cwt2f0_norm(cwt_spec, mean, std, mel2ph) - # if hparams['use_uv']: - # assert cwt_out.shape[-1] == 11 - # uv = cwt_out[:, :, -1] > 0 - elif hparams['pitch_ar']: - ret['pitch_pred'] = pitch_pred = pitch_predictor(decoder_inp, f0 if is_training else None) - if f0 is None: - f0 = pitch_pred[:, :, 0] - else: - ret['pitch_pred'] = pitch_pred = pitch_predictor(decoder_inp) - if f0 is None: - f0 = pitch_pred[:, :, 0] - if hparams['use_uv'] and uv is None: - uv = pitch_pred[:, :, 1] > 0 - ret['f0_denorm'] = f0_denorm = denorm_f0(f0, uv, hparams, pitch_padding=pitch_padding) - if pitch_padding is not None: - f0[pitch_padding] = 0 - - pitch = f0_to_coarse(f0_denorm) # start from 0 - pitch_embedding = pitch_embed(pitch) - return pitch_embedding - - def add_energy(decoder_inp, energy, ret): - decoder_inp = decoder_inp.detach() + hparams['predictor_grad'] * (decoder_inp - decoder_inp.detach()) - ret['energy_pred'] = energy_pred = energy_predictor(decoder_inp)[:, :, 0] - if energy is None: - energy = energy_pred - energy = torch.clamp(energy * 256 // 4, max=255).long() # energy_to_coarse - energy_embedding = energy_embed(energy) - return energy_embedding - - # add pitch and energy embed - nframes = mel2ph.size(1) - - pitch_embedding = 0 - if hparams['use_pitch_embed']: - if f0 is not None: - delta_l = nframes - f0.size(1) - if delta_l > 0: - f0 = torch.cat((f0,torch.FloatTensor([[x[-1]] * delta_l for x in f0]).to(f0.device)),1) - f0 = f0[:,:nframes] - if uv is not None: - delta_l = nframes - uv.size(1) - if delta_l > 0: - uv = torch.cat((uv,torch.FloatTensor([[x[-1]] * delta_l for x in uv]).to(uv.device)),1) - uv = uv[:,:nframes] - pitch_embedding = add_pitch(pitch_inp, f0, uv, mel2ph, ret, encoder_out=pitch_inp_ph) - - energy_embedding = 0 - if hparams['use_energy_embed']: - if energy is not None: - delta_l = nframes - energy.size(1) - if delta_l > 0: - energy = torch.cat((energy,torch.FloatTensor([[x[-1]] * delta_l for x in energy]).to(energy.device)),1) - energy = energy[:,:nframes] - energy_embedding = add_energy(pitch_inp, energy, ret) - - return pitch_embedding, energy_embedding - - @staticmethod - def insert4(decoder_inp, pitch_embedding, energy_embedding, spk_embed, ret, tgt_nonpadding): - ''' - *decoder_inp* ~= *decoder_inp* + embeddings for spk, pitch, energy - ''' - ret['decoder_inp'] = decoder_inp = (decoder_inp + pitch_embedding + energy_embedding + spk_embed) * tgt_nonpadding - return decoder_inp +class Batch2Loss: @staticmethod def module4(diff_main_loss, # modules norm_spec, decoder_inp_t, ret, K_step, batch_size, device): # variables From 2eca861bd436cb4b72b4bfb030282252610b2dc0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 20:51:17 +0800 Subject: [PATCH 021/475] Move encoder code --- main.py | 1 + .../encoder.py => fastspeech/acoustic_encoder.py} | 0 onnx/export/export_acoustic.py | 2 +- src/diff/diffusion.py | 2 +- 4 files changed, 3 insertions(+), 2 deletions(-) rename modules/{naive_frontend/encoder.py => fastspeech/acoustic_encoder.py} (100%) diff --git a/main.py b/main.py index ded190d19..8bd0c9a2f 100644 --- a/main.py +++ b/main.py @@ -72,6 +72,7 @@ if not args.title: name += key_suffix print(f'音调基于原音频{key_suffix}') +params = params[:1] if args.gender is not None: assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' diff --git a/modules/naive_frontend/encoder.py b/modules/fastspeech/acoustic_encoder.py similarity index 100% rename from modules/naive_frontend/encoder.py rename to modules/fastspeech/acoustic_encoder.py diff --git a/onnx/export/export_acoustic.py b/onnx/export/export_acoustic.py index a0d87f41e..4670d6813 100644 --- a/onnx/export/export_acoustic.py +++ b/onnx/export/export_acoustic.py @@ -23,7 +23,7 @@ from torch.nn import Linear, Embedding from modules.commons.common_layers import Mish -from modules.naive_frontend.encoder import FastSpeech2AcousticEncoder +from modules.fastspeech.encoder import FastSpeech2AcousticEncoder from src.diff.diffusion import beta_schedule from src.diff.net import AttrDict from utils import load_ckpt diff --git a/src/diff/diffusion.py b/src/diff/diffusion.py index 71066cc5d..cdbf264d2 100644 --- a/src/diff/diffusion.py +++ b/src/diff/diffusion.py @@ -8,7 +8,7 @@ from torch import nn from tqdm import tqdm -from modules.naive_frontend.encoder import FastSpeech2Acoustic +from modules.fastspeech.encoder import FastSpeech2Acoustic from src.diff.net import DiffNet from training.diffsinger import Batch2Loss from utils.hparams import hparams From 304d45c8ec1d8406f3ecba52609517d35cb2a6d5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 20:55:48 +0800 Subject: [PATCH 022/475] Fix ModuleNotFoundError --- onnx/export/export_acoustic.py | 2 +- src/diff/diffusion.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/onnx/export/export_acoustic.py b/onnx/export/export_acoustic.py index 4670d6813..18b69da13 100644 --- a/onnx/export/export_acoustic.py +++ b/onnx/export/export_acoustic.py @@ -23,7 +23,7 @@ from torch.nn import Linear, Embedding from modules.commons.common_layers import Mish -from modules.fastspeech.encoder import FastSpeech2AcousticEncoder +from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder from src.diff.diffusion import beta_schedule from src.diff.net import AttrDict from utils import load_ckpt diff --git a/src/diff/diffusion.py b/src/diff/diffusion.py index cdbf264d2..ab160ca9f 100644 --- a/src/diff/diffusion.py +++ b/src/diff/diffusion.py @@ -8,7 +8,7 @@ from torch import nn from tqdm import tqdm -from modules.fastspeech.encoder import FastSpeech2Acoustic +from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from src.diff.net import DiffNet from training.diffsinger import Batch2Loss from utils.hparams import hparams From 73bb629024db9ab6a0319ea1ef5876270f968280 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 9 Mar 2023 21:59:37 +0800 Subject: [PATCH 023/475] Remove useless code in binarizer --- basics/base_binarizer.py | 49 ++++----------------------------------- data_gen/acoustic.py | 3 --- preprocessing/opencpop.py | 16 ++++++------- 3 files changed, 12 insertions(+), 56 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 5eb956dd7..e89822eaf 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -1,6 +1,7 @@ import copy -import shutil import os +import shutil + os.environ["OMP_NUM_THREADS"] = "1" from utils.multiprocess_utils import chunked_multiprocess_run @@ -8,7 +9,7 @@ import json from resemblyzer import VoiceEncoder from tqdm import tqdm -from data_gen.data_gen_utils import get_mel2ph, get_pitch_parselmouth, build_phone_encoder +from data_gen.data_gen_utils import get_pitch_parselmouth, build_phone_encoder from utils.hparams import set_hparams, hparams from utils.phoneme_utils import build_phoneme_list import numpy as np @@ -47,11 +48,7 @@ def __init__(self, data_dir=None, item_attributes=None): if data_dir is None: data_dir = hparams['raw_data_dir'] - if 'speakers' not in hparams: - speakers = hparams['datasets'] - hparams['speakers'] = hparams['datasets'] - else: - speakers = hparams['speakers'] + speakers = hparams['speakers'] assert isinstance(speakers, list), 'Speakers must be a list' assert len(speakers) == len(set(speakers)), 'Speakers cannot contain duplicate names' @@ -343,44 +340,6 @@ def process_item(self, item_name, meta_data, binarization_args): from preprocessing.opencpop import File2Batch return File2Batch.temporary_dict2processed_input(item_name, meta_data, self.phone_encoder, binarization_args) - def get_align(self, meta_data, mel, phone_encoded, res): - raise NotImplementedError - - def get_align_from_textgrid(self, meta_data, mel, phone_encoded, res): - ''' - NOTE: this part of script is *isolated* from other scripts, which means - it may not be compatible with the current version. - ''' - return - tg_fn, ph = meta_data['tg_fn'], meta_data['ph'] - if tg_fn is not None and os.path.exists(tg_fn): - mel2ph, dur = get_mel2ph(tg_fn, ph, mel, hparams) - else: - raise BinarizationError(f"Align not found") - if mel2ph.max() - 1 >= len(phone_encoded): - raise BinarizationError( - f"Align does not match: mel2ph.max() - 1: {mel2ph.max() - 1}, len(phone_encoded): {len(phone_encoded)}") - res['mel2ph'] = mel2ph - res['dur'] = dur - - def get_f0cwt(self, f0, res): - ''' - NOTE: this part of script is *isolated* from other scripts, which means - it may not be compatible with the current version. - ''' - return - from utils.cwt import get_cont_lf0, get_lf0_cwt - uv, cont_lf0_lpf = get_cont_lf0(f0) - logf0s_mean_org, logf0s_std_org = np.mean(cont_lf0_lpf), np.std(cont_lf0_lpf) - cont_lf0_lpf_norm = (cont_lf0_lpf - logf0s_mean_org) / logf0s_std_org - Wavelet_lf0, scales = get_lf0_cwt(cont_lf0_lpf_norm) - if np.any(np.isnan(Wavelet_lf0)): - raise BinarizationError("NaN CWT") - res['cwt_spec'] = Wavelet_lf0 - res['cwt_scales'] = scales - res['f0_mean'] = logf0s_mean_org - res['f0_std'] = logf0s_std_org - if __name__ == "__main__": set_hparams() diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 6bc909fc0..33b2a078b 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -47,9 +47,6 @@ def valid_item_names(self): def test_item_names(self): return self._test_item_names - def get_align(self, meta_data, mel, phone_encoded, res): - raise NotImplementedError() - def split_train_test_set(self, item_names): item_names = set(deepcopy(item_names)) prefixes = set([str(pr) for pr in hparams['test_prefixes']]) diff --git a/preprocessing/opencpop.py b/preprocessing/opencpop.py index 412126cae..acb552ef2 100644 --- a/preprocessing/opencpop.py +++ b/preprocessing/opencpop.py @@ -19,15 +19,15 @@ class File2Batch: - ''' + """ pipeline: file -> temporary_dict -> processed_input -> batch - ''' + """ @staticmethod def file2temporary_dict(raw_data_dir, ds_id): - ''' + """ read from file, store data in temporary dicts - ''' + """ # meta_midi = json.load(open(os.path.join(raw_data_dir, 'meta.json'))) # [list of dict] utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), encoding='utf-8').readlines() @@ -57,9 +57,9 @@ def file2temporary_dict(raw_data_dir, ds_id): @staticmethod def temporary_dict2processed_input(item_name, temp_dict, encoder, binarization_args): - ''' + """ process data in temporary_dicts - ''' + """ def get_pitch(wav, mel): # get ground truth f0 by self.get_pitch_algorithm @@ -127,12 +127,12 @@ def get_align(meta_data, mel, phone_encoded, hop_size=hparams['hop_size'], @staticmethod def processed_input2batch(samples): - ''' + """ Args: samples: one batch of processed_input NOTE: the batch size is controlled by hparams['max_sentences'] - ''' + """ if len(samples) == 0: return {} id = torch.LongTensor([s['id'] for s in samples]) From 3cff7bf815dd07b3488426dacd178807316a8c99 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 10:31:00 +0800 Subject: [PATCH 024/475] Remove useless keys --- configs/acoustic.yaml | 2 -- configs/base.yaml | 1 - 2 files changed, 3 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index e726ce5ae..0ce459f0e 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -56,8 +56,6 @@ mel_vmin: -6. #-6. mel_vmax: 1.5 save_f0: true -#pe_enable: true -#pe_ckpt: 'checkpoints/0102_xiaoma_pe' max_frames: 8000 use_uv: false use_midi: false diff --git a/configs/base.yaml b/configs/base.yaml index 16617f2f8..d43daa3cc 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -24,7 +24,6 @@ binarization_args: with_align: true with_spk_embed: true with_f0: true - with_f0cwt: true endless_ds: true reset_phone_dict: true From c5577881c185275f8d16f3b03d7248ca3936b33f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 21:46:38 +0800 Subject: [PATCH 025/475] Clean up `XXXTextEncoder` classes --- basics/base_svs_infer.py | 2 +- data_gen/data_gen_utils.py | 2 +- src/acoustic_task.py | 3 +- utils/text_encoder.py | 272 ++++--------------------------------- 4 files changed, 31 insertions(+), 248 deletions(-) diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 2efed4a06..07b67b732 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -39,7 +39,7 @@ def __init__(self, hparams, device=None, load_model=True, load_vocoder=True, ckp if load_model: phone_list = build_phoneme_list() - self.ph_encoder = TokenTextEncoder(vocab_list=phone_list, replace_oov=',') + self.ph_encoder = TokenTextEncoder(vocab_list=phone_list) self.pinyin2phs = build_g2p_dictionary() if hparams['use_spk_id']: with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: diff --git a/data_gen/data_gen_utils.py b/data_gen/data_gen_utils.py index 13a2de946..6637c74a8 100644 --- a/data_gen/data_gen_utils.py +++ b/data_gen/data_gen_utils.py @@ -324,7 +324,7 @@ def get_mel2ph(tg_fn, ph, mel, hparams): def build_phone_encoder(phone_list): - return TokenTextEncoder(vocab_list=phone_list, replace_oov=',') + return TokenTextEncoder(vocab_list=phone_list) def is_sil_phoneme(p): diff --git a/src/acoustic_task.py b/src/acoustic_task.py index a331686b6..471c387c3 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -176,13 +176,12 @@ def __init__(self): lbd = 1.0 self.loss_and_lambda[l] = lbd print("| Mel losses:", self.loss_and_lambda) - self.sil_ph = self.phone_encoder.sil_phonemes() self.logged_gt_wav = set() @staticmethod def build_phone_encoder(): phone_list = build_phoneme_list() - return TokenTextEncoder(vocab_list=phone_list, replace_oov=',') + return TokenTextEncoder(vocab_list=phone_list) def build_model(self): mel_bins = hparams['audio_num_mel_bins'] diff --git a/utils/text_encoder.py b/utils/text_encoder.py index 017da40ae..e10710f62 100644 --- a/utils/text_encoder.py +++ b/utils/text_encoder.py @@ -1,28 +1,8 @@ -import re -import six -from six.moves import range # pylint: disable=redefined-builtin +import numpy as np -PAD = "" -EOS = "" -UNK = "" -SEG = "|" -RESERVED_TOKENS = [PAD, EOS, UNK] -NUM_RESERVED_TOKENS = len(RESERVED_TOKENS) -PAD_ID = RESERVED_TOKENS.index(PAD) # Normally 0 -EOS_ID = RESERVED_TOKENS.index(EOS) # Normally 1 -UNK_ID = RESERVED_TOKENS.index(UNK) # Normally 2 - -if six.PY2: - RESERVED_TOKENS_BYTES = RESERVED_TOKENS -else: - RESERVED_TOKENS_BYTES = [bytes(PAD, "ascii"), bytes(EOS, "ascii")] +from hparams import hparams -# Regular expression for unescaping token strings. -# '\u' is converted to '_' -# '\\' is converted to '\' -# '\213;' is converted to unichr(213) -_UNESCAPE_REGEX = re.compile(r"\\u|\\\\|\\([0-9]+);") -_ESCAPE_CHARS = set(u"\\_u;0123456789") +PAD = "" def strip_ids(ids, ids_to_strip): @@ -33,237 +13,44 @@ def strip_ids(ids, ids_to_strip): return ids -class TextEncoder(object): - """Base class for converting from ints to/from human readable strings.""" - - def __init__(self, num_reserved_ids=NUM_RESERVED_TOKENS): - self._num_reserved_ids = num_reserved_ids - - @property - def num_reserved_ids(self): - return self._num_reserved_ids - - def encode(self, s): - """Transform a human-readable string into a sequence of int ids. - - The ids should be in the range [num_reserved_ids, vocab_size). Ids [0, - num_reserved_ids) are reserved. - - EOS is not appended. - - Args: - s: human-readable string to be converted. - - Returns: - ids: list of integers - """ - return [int(w) + self._num_reserved_ids for w in s.split()] - - def decode(self, ids, strip_extraneous=False): - """Transform a sequence of int ids into a human-readable string. - - EOS is not expected in ids. - - Args: - ids: list of integers to be converted. - strip_extraneous: bool, whether to strip off extraneous tokens - (EOS and PAD). - - Returns: - s: human-readable string. - """ - if strip_extraneous: - ids = strip_ids(ids, list(range(self._num_reserved_ids or 0))) - return " ".join(self.decode_list(ids)) - - def decode_list(self, ids): - """Transform a sequence of int ids into a their string versions. - - This method supports transforming individual input/output ids to their - string versions so that sequence to/from text conversions can be visualized - in a human readable format. - - Args: - ids: list of integers to be converted. - - Returns: - strs: list of human-readable string. - """ - decoded_ids = [] - for id_ in ids: - if 0 <= id_ < self._num_reserved_ids: - decoded_ids.append(RESERVED_TOKENS[int(id_)]) - else: - decoded_ids.append(id_ - self._num_reserved_ids) - return [str(d) for d in decoded_ids] - - @property - def vocab_size(self): - raise NotImplementedError() - - -class ByteTextEncoder(TextEncoder): - """Encodes each byte to an id. For 8-bit strings only.""" - - def encode(self, s): - numres = self._num_reserved_ids - if six.PY2: - if isinstance(s, unicode): - s = s.encode("utf-8") - return [ord(c) + numres for c in s] - # Python3: explicitly convert to UTF-8 - return [c + numres for c in s.encode("utf-8")] - - def decode(self, ids, strip_extraneous=False): - if strip_extraneous: - ids = strip_ids(ids, list(range(self._num_reserved_ids or 0))) - numres = self._num_reserved_ids - decoded_ids = [] - int2byte = six.int2byte - for id_ in ids: - if 0 <= id_ < numres: - decoded_ids.append(RESERVED_TOKENS_BYTES[int(id_)]) - else: - decoded_ids.append(int2byte(id_ - numres)) - if six.PY2: - return "".join(decoded_ids) - # Python3: join byte arrays and then decode string - return b"".join(decoded_ids).decode("utf-8", "replace") - - def decode_list(self, ids): - numres = self._num_reserved_ids - decoded_ids = [] - int2byte = six.int2byte - for id_ in ids: - if 0 <= id_ < numres: - decoded_ids.append(RESERVED_TOKENS_BYTES[int(id_)]) - else: - decoded_ids.append(int2byte(id_ - numres)) - # Python3: join byte arrays and then decode string - return decoded_ids - - @property - def vocab_size(self): - return 2**8 + self._num_reserved_ids - - -class ByteTextEncoderWithEos(ByteTextEncoder): - """Encodes each byte to an id and appends the EOS token.""" - - def encode(self, s): - return super(ByteTextEncoderWithEos, self).encode(s) + [EOS_ID] - - -class TokenTextEncoder(TextEncoder): +class TokenTextEncoder: """Encoder based on a user-supplied vocabulary (file or list).""" - def __init__(self, - reverse=False, - vocab_list=None, - replace_oov=None, - num_reserved_ids=NUM_RESERVED_TOKENS): + def __init__(self, vocab_list): """Initialize from a file or list, one token per line. Handling of reserved tokens works as follows: - When initializing from a list, we add reserved tokens to the vocab. - - When initializing from a file, we do not add reserved tokens to the vocab. - - When saving vocab files, we save reserved tokens to the file. Args: - reverse: Boolean indicating if tokens should be reversed during encoding - and decoding. - vocab_list: If not None, a list of elements of the vocabulary. If this is - not None, then vocab_filename should be None. - replace_oov: If not None, every out-of-vocabulary token seen when - encoding will be replaced by this string (which must be in vocab). - num_reserved_ids: Number of IDs to save for reserved tokens like . + vocab_list: If not None, a list of elements of the vocabulary. """ - super(TokenTextEncoder, self).__init__(num_reserved_ids=num_reserved_ids) - self._reverse = reverse - self._replace_oov = replace_oov - assert vocab_list is not None - self._init_vocab_from_list(vocab_list) - self.pad_index = self._token_to_id[PAD] - self.eos_index = self._token_to_id[EOS] - self.unk_index = self._token_to_id[UNK] - self.seg_index = self._token_to_id[SEG] if SEG in self._token_to_id else self.eos_index - - def encode(self, s): - """Converts a space-separated string of tokens to a list of ids.""" - sentence = s - tokens = sentence.strip().split() - ret = [self._token_to_id[tok] for tok in tokens] - return ret[::-1] if self._reverse else ret - - def decode(self, ids, strip_eos=False, strip_padding=False): - if strip_padding and self.pad() in list(ids): - pad_pos = list(ids).index(self.pad()) - ids = ids[:pad_pos] - if strip_eos and self.eos() in list(ids): - eos_pos = list(ids).index(self.eos()) - ids = ids[:eos_pos] - return " ".join(self.decode_list(ids)) - - def decode_list(self, ids): - seq = reversed(ids) if self._reverse else ids - return [self._safe_id_to_token(i) for i in seq] + self.num_reserved_ids = hparams.get('num_pad_tokens', 3) + assert self.num_reserved_ids > 0, 'num_pad_tokens must be positive' + self.vocab_list = sorted(vocab_list) + + def encode(self, sentence): + """Converts a space-separated string of phones to a list of ids.""" + phones = sentence.strip().split() + return [self.vocab_list.index(ph) + self.num_reserved_ids if ph != PAD else 0 for ph in phones] + + def decode(self, ids, strip_padding=False): + if strip_padding: + ids = np.trim_zeros(ids) + ids = list(ids) + return ' '.join([ + self.vocab_list[_id - self.num_reserved_ids] if _id >= self.num_reserved_ids else PAD + for _id in ids + ]) @property def vocab_size(self): - return len(self._id_to_token) + return len(self.vocab_list) + self.num_reserved_ids def __len__(self): return self.vocab_size - def _safe_id_to_token(self, idx): - return self._id_to_token.get(idx, "ID_%d" % idx) - - def _init_vocab_from_list(self, vocab_list): - """Initialize tokens from a list of tokens. - - It is ok if reserved tokens appear in the vocab list. They will be - removed. The set of tokens in vocab_list should be unique. - - Args: - vocab_list: A list of tokens. - """ - def token_gen(): - for token in vocab_list: - if token not in RESERVED_TOKENS: - yield token - - self._init_vocab(token_gen()) - - def _init_vocab(self, token_generator, add_reserved_tokens=True): - """Initialize vocabulary with tokens from token_generator.""" - - self._id_to_token = {} - non_reserved_start_index = 0 - - if add_reserved_tokens: - self._id_to_token.update(enumerate(RESERVED_TOKENS)) - non_reserved_start_index = len(RESERVED_TOKENS) - - self._id_to_token.update( - enumerate(token_generator, start=non_reserved_start_index)) - - # _token_to_id is the reverse of _id_to_token - self._token_to_id = dict((v, k) - for k, v in six.iteritems(self._id_to_token)) - - def pad(self): - return self.pad_index - - def eos(self): - return self.eos_index - - def unk(self): - return self.unk_index - - def seg(self): - return self.seg_index - - def store_to_file(self, filename, encoding:str="UTF-8"): + def store_to_file(self, filename): """Write vocab file to disk. Vocab files have one token per line. The file ends in a newline. Reserved @@ -272,9 +59,6 @@ def store_to_file(self, filename, encoding:str="UTF-8"): Args: filename: Full path of the file to store the vocab to. """ - with open(filename, "w", encoding = encoding) as f: - for i in range(len(self._id_to_token)): - f.write(self._id_to_token[i] + "\n") - - def sil_phonemes(self): - return [p for p in self._id_to_token.values() if not p[0].isalpha()] + with open(filename, 'w', encoding='utf8') as f: + [print(PAD, file=f) for _ in range(self.num_reserved_ids)] + [print(tok, file=f) for tok in self.vocab_list] From b0bccb47de57ecf8c551c81e986efece03b0e65c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 21:59:56 +0800 Subject: [PATCH 026/475] Fix ModuleNotFoundError --- utils/text_encoder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/text_encoder.py b/utils/text_encoder.py index e10710f62..b50d2ee9e 100644 --- a/utils/text_encoder.py +++ b/utils/text_encoder.py @@ -1,6 +1,6 @@ import numpy as np -from hparams import hparams +from utils.hparams import hparams PAD = "" From c290ffd63df764aaa89901d9e35d1adf6c83611f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 22:02:39 +0800 Subject: [PATCH 027/475] Remove useless attributes --- src/acoustic_task.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 471c387c3..d8bfcaf20 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -157,9 +157,6 @@ def __init__(self): self.dataset_cls = AcousticDataset self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() self.phone_encoder = self.build_phone_encoder() - self.padding_idx = self.phone_encoder.pad() - self.eos_idx = self.phone_encoder.eos() - self.seg_idx = self.phone_encoder.seg() self.saving_result_pool = None self.saving_results_futures = None self.stats = {} From ec807c305e4004a1be97ef9a90e6f14ac37b2a39 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 22:10:44 +0800 Subject: [PATCH 028/475] Fix AttributeError --- modules/fastspeech/acoustic_encoder.py | 5 +++-- onnx/export/export_acoustic.py | 6 +++--- utils/text_encoder.py | 3 +++ 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index b176a8da5..7f115a4c4 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -6,6 +6,7 @@ from modules.fastspeech.tts_modules import FastSpeech2Encoder, mel2ph_to_dur from utils.hparams import hparams from utils.pitch_utils import f0_to_coarse, denorm_f0 +from utils.text_encoder import PAD as TOKEN_PAD class FastSpeech2AcousticEncoder(FastSpeech2Encoder): @@ -38,7 +39,7 @@ def forward(self, txt_tokens, dur_embed): class FastSpeech2Acoustic(nn.Module): def __init__(self, dictionary): super().__init__() - self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], dictionary.pad()) + self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], TOKEN_PAD) self.dur_embed = Linear(1, hparams['hidden_size']) self.encoder = FastSpeech2AcousticEncoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], @@ -47,7 +48,7 @@ def __init__(self, dictionary): self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': - self.pitch_embed = Embedding(300, hparams['hidden_size'], dictionary.pad()) + self.pitch_embed = Embedding(300, hparams['hidden_size'], TOKEN_PAD) elif self.f0_embed_type == 'continuous': self.pitch_embed = Linear(1, hparams['hidden_size']) else: diff --git a/onnx/export/export_acoustic.py b/onnx/export/export_acoustic.py index 18b69da13..37291baa1 100644 --- a/onnx/export/export_acoustic.py +++ b/onnx/export/export_acoustic.py @@ -30,7 +30,7 @@ from utils.hparams import hparams, set_hparams from utils.phoneme_utils import build_phoneme_list from utils.spk_utils import parse_commandline_spk_mix -from utils.text_encoder import TokenTextEncoder +from utils.text_encoder import TokenTextEncoder, PAD as TOKEN_PAD f0_bin = 256 @@ -69,14 +69,14 @@ class FastSpeech2Acoustic(nn.Module): def __init__(self, dictionary): super().__init__() self.lr = LengthRegulator() - self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], dictionary.pad()) + self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], TOKEN_PAD) self.dur_embed = Linear(1, hparams['hidden_size']) self.encoder = FastSpeech2AcousticEncoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': - self.pitch_embed = Embedding(300, hparams['hidden_size'], dictionary.pad()) + self.pitch_embed = Embedding(300, hparams['hidden_size'], TOKEN_PAD) elif self.f0_embed_type == 'continuous': self.pitch_embed = Linear(1, hparams['hidden_size']) else: diff --git a/utils/text_encoder.py b/utils/text_encoder.py index b50d2ee9e..56b0152ed 100644 --- a/utils/text_encoder.py +++ b/utils/text_encoder.py @@ -43,6 +43,9 @@ def decode(self, ids, strip_padding=False): for _id in ids ]) + def pad(self): + pass + @property def vocab_size(self): return len(self.vocab_list) + self.num_reserved_ids From 3ed9f86d7b285de9399ec4f67f1080e2e0ff01ae Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 22:16:17 +0800 Subject: [PATCH 029/475] Fix data type error --- modules/fastspeech/acoustic_encoder.py | 6 +++--- onnx/export/export_acoustic.py | 6 +++--- utils/text_encoder.py | 5 +++-- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 7f115a4c4..5159e0b43 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -6,7 +6,7 @@ from modules.fastspeech.tts_modules import FastSpeech2Encoder, mel2ph_to_dur from utils.hparams import hparams from utils.pitch_utils import f0_to_coarse, denorm_f0 -from utils.text_encoder import PAD as TOKEN_PAD +from utils.text_encoder import PAD_INDEX class FastSpeech2AcousticEncoder(FastSpeech2Encoder): @@ -39,7 +39,7 @@ def forward(self, txt_tokens, dur_embed): class FastSpeech2Acoustic(nn.Module): def __init__(self, dictionary): super().__init__() - self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], TOKEN_PAD) + self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], PAD_INDEX) self.dur_embed = Linear(1, hparams['hidden_size']) self.encoder = FastSpeech2AcousticEncoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], @@ -48,7 +48,7 @@ def __init__(self, dictionary): self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': - self.pitch_embed = Embedding(300, hparams['hidden_size'], TOKEN_PAD) + self.pitch_embed = Embedding(300, hparams['hidden_size'], PAD_INDEX) elif self.f0_embed_type == 'continuous': self.pitch_embed = Linear(1, hparams['hidden_size']) else: diff --git a/onnx/export/export_acoustic.py b/onnx/export/export_acoustic.py index 37291baa1..2fd1f3910 100644 --- a/onnx/export/export_acoustic.py +++ b/onnx/export/export_acoustic.py @@ -30,7 +30,7 @@ from utils.hparams import hparams, set_hparams from utils.phoneme_utils import build_phoneme_list from utils.spk_utils import parse_commandline_spk_mix -from utils.text_encoder import TokenTextEncoder, PAD as TOKEN_PAD +from utils.text_encoder import TokenTextEncoder, PAD_INDEX f0_bin = 256 @@ -69,14 +69,14 @@ class FastSpeech2Acoustic(nn.Module): def __init__(self, dictionary): super().__init__() self.lr = LengthRegulator() - self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], TOKEN_PAD) + self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], PAD_INDEX) self.dur_embed = Linear(1, hparams['hidden_size']) self.encoder = FastSpeech2AcousticEncoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') if self.f0_embed_type == 'discrete': - self.pitch_embed = Embedding(300, hparams['hidden_size'], TOKEN_PAD) + self.pitch_embed = Embedding(300, hparams['hidden_size'], PAD_INDEX) elif self.f0_embed_type == 'continuous': self.pitch_embed = Linear(1, hparams['hidden_size']) else: diff --git a/utils/text_encoder.py b/utils/text_encoder.py index 56b0152ed..5147ec057 100644 --- a/utils/text_encoder.py +++ b/utils/text_encoder.py @@ -2,7 +2,8 @@ from utils.hparams import hparams -PAD = "" +PAD = '' +PAD_INDEX = 0 def strip_ids(ids, ids_to_strip): @@ -32,7 +33,7 @@ def __init__(self, vocab_list): def encode(self, sentence): """Converts a space-separated string of phones to a list of ids.""" phones = sentence.strip().split() - return [self.vocab_list.index(ph) + self.num_reserved_ids if ph != PAD else 0 for ph in phones] + return [self.vocab_list.index(ph) + self.num_reserved_ids if ph != PAD else PAD_INDEX for ph in phones] def decode(self, ids, strip_padding=False): if strip_padding: From df2ff04bbd447875ee10c228e004959b196a1fc8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 22:25:02 +0800 Subject: [PATCH 030/475] Fix KeyError --- basics/base_binarizer.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index e89822eaf..a60bfb324 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -58,8 +58,7 @@ def __init__(self, data_dir=None, item_attributes=None): self.binarization_args = hparams['binarization_args'] self.augmentation_args = hparams.get('augmentation_args', {}) - self.pre_align_args = hparams['pre_align_args'] - + self.items = {} # every item in self.items has some attributes self.item_attributes = item_attributes From 7c1797f2a50df0eb5fdb987a2f57e904ba0ae550 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 10 Mar 2023 23:28:56 +0800 Subject: [PATCH 031/475] Remove key --- configs/acoustic.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 0ce459f0e..bf22671ac 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -2,9 +2,6 @@ base_config: - configs/base.yaml task_cls: src.acoustic_task.AcousticTask -datasets: [ - 'opencpop', -] num_spk: 1 test_prefixes: [ '2044', From 4073772808ed813a75fdc6a913c4d8b980935617 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 11 Mar 2023 10:26:08 +0800 Subject: [PATCH 032/475] Clean up binarizers --- basics/base_binarizer.py | 300 ++++++---------------------- configs/acoustic.yaml | 2 + data_gen/acoustic.py | 397 +++++++++++++++++++++++++++++-------- data_gen/data_gen_utils.py | 309 +---------------------------- preprocessing/opencpop.py | 110 ---------- 5 files changed, 375 insertions(+), 743 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index a60bfb324..8ff7c2209 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -1,28 +1,20 @@ -import copy +import json +import logging import os -import shutil - -os.environ["OMP_NUM_THREADS"] = "1" - -from utils.multiprocess_utils import chunked_multiprocess_run import random -import json -from resemblyzer import VoiceEncoder -from tqdm import tqdm -from data_gen.data_gen_utils import get_pitch_parselmouth, build_phone_encoder +from copy import deepcopy + +from data_gen.data_gen_utils import get_pitch_parselmouth from utils.hparams import set_hparams, hparams from utils.phoneme_utils import build_phoneme_list -import numpy as np -from utils.indexed_datasets import IndexedDatasetBuilder +from utils.text_encoder import TokenTextEncoder class BinarizationError(Exception): pass -BASE_ITEM_ATTRIBUTES = ['txt', 'ph', 'wav_fn', 'tg_fn', 'spk_id'] - class BaseBinarizer: - ''' + """ Base class for data processing. 1. *process* and *process_data_split*: process entire data, generate the train-test split (support parallel processing); @@ -41,10 +33,8 @@ class BaseBinarizer: how to split the dataset; 3. load_ph_set: the phoneme set. - ''' - def __init__(self, data_dir=None, item_attributes=None): - if item_attributes is None: - item_attributes = BASE_ITEM_ATTRIBUTES + """ + def __init__(self, data_dir=None): if data_dir is None: data_dir = hparams['raw_data_dir'] @@ -59,18 +49,16 @@ def __init__(self, data_dir=None, item_attributes=None): self.binarization_args = hparams['binarization_args'] self.augmentation_args = hparams.get('augmentation_args', {}) + self.spk_map = None self.items = {} - # every item in self.items has some attributes - self.item_attributes = item_attributes + self.phone_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) # load each dataset for ds_id, data_dir in enumerate(self.raw_data_dirs): self.load_meta_data(data_dir, ds_id) - if ds_id == 0: - # check program correctness - assert all([attr in self.item_attributes for attr in list(self.items.values())[0].keys()]) self.item_names = sorted(list(self.items.keys())) - + self._train_item_names, self._test_item_names = self.split_train_test_set() + if self.binarization_args['shuffle']: random.seed(hparams['seed']) random.shuffle(self.item_names) @@ -79,46 +67,55 @@ def __init__(self, data_dir=None, item_attributes=None): self.get_pitch_algorithm = get_pitch_parselmouth def load_meta_data(self, raw_data_dir, ds_id): - raise NotImplementedError - - def split_train_test_set(self, item_names): - raise NotImplementedError + raise NotImplementedError() + + def split_train_test_set(self): + item_names = set(deepcopy(self.item_names)) + prefixes = set([str(pr) for pr in hparams['test_prefixes']]) + test_item_names = set() + # Add prefixes that specified speaker index and matches exactly item name to test set + for prefix in deepcopy(prefixes): + if prefix in item_names: + test_item_names.add(prefix) + prefixes.remove(prefix) + # Add prefixes that exactly matches item name without speaker id to test set + for prefix in deepcopy(prefixes): + for name in item_names: + if name.split(':')[-1] == prefix: + test_item_names.add(name) + prefixes.remove(prefix) + # Add names with one of the remaining prefixes to test set + for prefix in deepcopy(prefixes): + for name in item_names: + if name.startswith(prefix): + test_item_names.add(name) + prefixes.remove(prefix) + for prefix in prefixes: + for name in item_names: + if name.split(':')[-1].startswith(prefix): + test_item_names.add(name) + test_item_names = sorted(list(test_item_names)) + train_item_names = [x for x in item_names if x not in set(test_item_names)] + logging.info("train {}".format(len(train_item_names))) + logging.info("test {}".format(len(test_item_names))) + return train_item_names, test_item_names @property def train_item_names(self): - raise NotImplementedError + return self._train_item_names @property def valid_item_names(self): - raise NotImplementedError + return self._test_item_names @property def test_item_names(self): - raise NotImplementedError + return self._test_item_names def build_spk_map(self): spk_map = {x: i for i, x in enumerate(hparams['speakers'])} assert len(spk_map) <= hparams['num_spk'], 'Actual number of speakers should be smaller than num_spk!' - return spk_map - - def item_name2spk_id(self, item_name): - return self.spk_map[self.items[item_name]['spk_id']] - - def _phone_encoder(self): - ph_set = [] - # Just for ensuring the transcriptions match the dictionary. - # May need refactoring in the future. - dict_fn = os.path.join(hparams['binary_data_dir'], 'dictionary.txt') - if hparams['reset_phone_dict'] or not os.path.exists(dict_fn): - self.load_ph_set(ph_set) # For singing, do checking and return the correct results. - ph_set = sorted(set(ph_set)) - shutil.copy(hparams['g2p_dictionary'], dict_fn) - else: - ph_set = build_phoneme_list() - return build_phone_encoder(ph_set) - - def load_ph_set(self, ph_set): - raise NotImplementedError + self.spk_map = spk_map def meta_data_iterator(self, prefix): if prefix == 'valid': @@ -133,211 +130,26 @@ def meta_data_iterator(self, prefix): def process(self): os.makedirs(hparams['binary_data_dir'], exist_ok=True) - self.spk_map = self.build_spk_map() + self.build_spk_map() print("| spk_map: ", self.spk_map) spk_map_fn = f"{hparams['binary_data_dir']}/spk_map.json" json.dump(self.spk_map, open(spk_map_fn, 'w', encoding='utf-8')) + self.check_coverage() - self.phone_encoder = self._phone_encoder() - self.process_data_split('valid') - self.process_data_split('test') - self.process_data_split('train', apply_augmentation=len(self.augmentation_args) > 0) + def check_coverage(self): + raise NotImplementedError() def process_data_split(self, prefix, multiprocess=False, apply_augmentation=False): - data_dir = hparams['binary_data_dir'] - args = [] - builder = IndexedDatasetBuilder(f'{data_dir}/{prefix}') - lengths = [] - f0s = [] - total_sec = 0 - total_raw_sec = 0 - - if self.binarization_args['with_spk_embed']: - voice_encoder = VoiceEncoder().cuda() - - for item_name, meta_data in self.meta_data_iterator(prefix): - args.append([item_name, meta_data, self.binarization_args]) - - aug_map = self.arrange_data_augmentation(prefix) if apply_augmentation else {} - - def postprocess(item_): - nonlocal total_sec, total_raw_sec - if item_ is None: - return - item_['spk_embed'] = voice_encoder.embed_utterance(item_['wav']) \ - if self.binarization_args['with_spk_embed'] else None - if not self.binarization_args['with_wav'] and 'wav' in item_: - del item_['wav'] - builder.add_item(item_) - lengths.append(item_['len']) - total_sec += item_['sec'] - total_raw_sec += item_['sec'] - if item_.get('f0') is not None: - f0s.append(item_['f0']) - - for task in aug_map.get(item_['item_name'], []): - aug_item = task['func'](item_, **task['kwargs']) - builder.add_item(aug_item) - lengths.append(aug_item['len']) - total_sec += aug_item['sec'] - if aug_item.get('f0') is not None: - f0s.append(aug_item['f0']) - - if multiprocess: - # code for parallel processing - num_workers = int(os.getenv('N_PROC', hparams.get('ds_workers', os.cpu_count() // 3))) - for item in tqdm( - chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))) - ): - postprocess(item) - else: - # code for single cpu processing - for a in tqdm(args): - item = self.process_item(*a) - postprocess(item) - - builder.finalize() - np.save(f'{data_dir}/{prefix}_lengths.npy', lengths) - if len(f0s) > 0: - f0s = np.concatenate(f0s, 0) - f0s = f0s[f0s != 0] - np.save(f'{data_dir}/{prefix}_f0s_mean_std.npy', [np.mean(f0s).item(), np.std(f0s).item()]) - - if apply_augmentation: - print(f'| {prefix} total duration (before augmentation): {total_raw_sec:.2f}s') - print(f'| {prefix} total duration (after augmentation): {total_sec:.2f}s ({total_sec / total_raw_sec:.2f}x)') - else: - print(f'| {prefix} total duration: {total_raw_sec:.2f}s') + raise NotImplementedError() def arrange_data_augmentation(self, prefix): """ Code for all types of data augmentation should be added here. """ - aug_map = {} - aug_list = [] - all_item_names = [item_name for item_name, _ in self.meta_data_iterator(prefix)] - total_scale = 0 - if self.augmentation_args.get('random_pitch_shifting') is not None: - from augmentation.spec_stretch import SpectrogramStretchAugmentation - aug_args = self.augmentation_args['random_pitch_shifting'] - key_shift_min, key_shift_max = aug_args['range'] - assert hparams.get('use_key_shift_embed', False), \ - 'Random pitch shifting augmentation requires use_key_shift_embed == True.' - assert key_shift_min < 0 < key_shift_max, \ - 'Random pitch shifting augmentation must have a range where min < 0 < max.' - - aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) - scale = aug_args['scale'] - aug_item_names = random.choices(all_item_names, k=int(scale * len(all_item_names))) - - for aug_item_name in aug_item_names: - rand = random.uniform(-1, 1) - if rand < 0: - key_shift = key_shift_min * abs(rand) - else: - key_shift = key_shift_max * rand - aug_task = { - 'name': aug_item_name, - 'func': aug_ins.process_item, - 'kwargs': {'key_shift': key_shift} - } - if aug_item_name in aug_map: - aug_map[aug_item_name].append(aug_task) - else: - aug_map[aug_item_name] = [aug_task] - aug_list.append(aug_task) - - total_scale += scale - - if self.augmentation_args.get('fixed_pitch_shifting') is not None: - from augmentation.spec_stretch import SpectrogramStretchAugmentation - aug_args = self.augmentation_args['fixed_pitch_shifting'] - targets = aug_args['targets'] - scale = aug_args['scale'] - assert self.augmentation_args.get('random_pitch_shifting') is None, \ - 'Fixed pitch shifting augmentation is not compatible with random pitch shifting.' - assert len(targets) == len(set(targets)), \ - 'Fixed pitch shifting augmentation requires having no duplicate targets.' - assert hparams['use_spk_id'], 'Fixed pitch shifting augmentation requires use_spk_id == True.' - assert hparams['num_spk'] >= (1 + len(targets)) * len(self.spk_map), \ - 'Fixed pitch shifting augmentation requires num_spk >= (1 + len(targets)) * len(speakers).' - assert scale < 1, 'Fixed pitch shifting augmentation requires scale < 1.' - - aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) - for i, target in enumerate(targets): - aug_item_names = random.choices(all_item_names, k=int(scale * len(all_item_names))) - for aug_item_name in aug_item_names: - replace_spk_id = int(aug_item_name.split(':', maxsplit=1)[0]) + (i + 1) * len(self.spk_map) - aug_task = { - 'name': aug_item_name, - 'func': aug_ins.process_item, - 'kwargs': {'key_shift': target, 'replace_spk_id': replace_spk_id} - } - if aug_item_name in aug_map: - aug_map[aug_item_name].append(aug_task) - else: - aug_map[aug_item_name] = [aug_task] - aug_list.append(aug_task) - - total_scale += scale * len(targets) - - if self.augmentation_args.get('random_time_stretching') is not None: - from augmentation.spec_stretch import SpectrogramStretchAugmentation - aug_args = self.augmentation_args['random_time_stretching'] - speed_min, speed_max = aug_args['range'] - domain = aug_args['domain'] - assert hparams.get('use_speed_embed', False), \ - 'Random time stretching augmentation requires use_speed_embed == True.' - assert 0 < speed_min < 1 < speed_max, \ - 'Random time stretching augmentation must have a range where 0 < min < 1 < max.' - assert domain in ['log', 'linear'], 'domain must be \'log\' or \'linear\'.' - - aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) - scale = aug_args['scale'] - k_from_raw = int(scale / (1 + total_scale) * len(all_item_names)) - k_from_aug = int(total_scale * scale / (1 + total_scale) * len(all_item_names)) - k_mutate = int(total_scale * scale / (1 + scale) * len(all_item_names)) - aug_types = [0] * k_from_raw + [1] * k_from_aug + [2] * k_mutate - aug_items = random.choices(all_item_names, k=k_from_raw) + random.choices(aug_list, k=k_from_aug + k_mutate) - - for aug_type, aug_item in zip(aug_types, aug_items): - if domain == 'log': - # Uniform distribution in log domain - speed = speed_min * (speed_max / speed_min) ** random.random() - else: - # Uniform distribution in linear domain - rand = random.uniform(-1, 1) - speed = 1 + (speed_max - 1) * rand if rand >= 0 else 1 + (1 - speed_min) * rand - if aug_type == 0: - aug_task = { - 'name': aug_item, - 'func': aug_ins.process_item, - 'kwargs': {'speed': speed} - } - if aug_item in aug_map: - aug_map[aug_item].append(aug_task) - else: - aug_map[aug_item] = [aug_task] - aug_list.append(aug_task) - elif aug_type == 1: - aug_task = copy.deepcopy(aug_item) - aug_item['kwargs']['speed'] = speed - if aug_item['name'] in aug_map: - aug_map[aug_item['name']].append(aug_task) - else: - aug_map[aug_item['name']] = [aug_task] - aug_list.append(aug_task) - elif aug_type == 2: - aug_item['kwargs']['speed'] = speed - - total_scale += scale - - return aug_map + raise NotImplementedError() def process_item(self, item_name, meta_data, binarization_args): - from preprocessing.opencpop import File2Batch - return File2Batch.temporary_dict2processed_input(item_name, meta_data, self.phone_encoder, binarization_args) + raise NotImplementedError() if __name__ == "__main__": diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index bf22671ac..783f34369 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -3,6 +3,8 @@ base_config: task_cls: src.acoustic_task.AcousticTask num_spk: 1 +speakers: + - opencpop test_prefixes: [ '2044', '2086', diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 33b2a078b..402008df2 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -13,92 +13,80 @@ is_slur: keep singing upon note changes """ -import logging +import os import os.path +import random +import traceback from copy import deepcopy import matplotlib.pyplot as plt +import numpy as np +from librosa import note_to_midi +from tqdm import tqdm -from basics.base_binarizer import BaseBinarizer, BASE_ITEM_ATTRIBUTES +from basics.base_binarizer import BaseBinarizer, BinarizationError +from data_gen.data_gen_utils import get_pitch_parselmouth +from preprocessing.opencpop import vowels +from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import hparams +from utils.indexed_datasets import IndexedDatasetBuilder +from utils.multiprocess_utils import chunked_multiprocess_run from utils.phoneme_utils import build_phoneme_list -ACOUSTIC_ITEM_ATTRIBUTES = BASE_ITEM_ATTRIBUTES + \ - ['f0_fn', 'pitch_midi', 'midi_dur', 'is_slur', 'ph_durs', 'word_boundary'] +os.environ["OMP_NUM_THREADS"] = "1" -class AcousticBinarizer(BaseBinarizer): - def __init__(self): - super().__init__(item_attributes=ACOUSTIC_ITEM_ATTRIBUTES) - self.item_names = sorted(list(self.items.keys())) - self._train_item_names, self._test_item_names = self.split_train_test_set(self.item_names) +class AcousticBinarizer(BaseBinarizer): def load_meta_data(self, raw_data_dir, ds_id): - from preprocessing.opencpop import File2Batch - self.items.update(File2Batch.file2temporary_dict(raw_data_dir, ds_id)) - @property - def train_item_names(self): - return self._train_item_names - - @property - def valid_item_names(self): - return self._test_item_names - - @property - def test_item_names(self): - return self._test_item_names - - def split_train_test_set(self, item_names): - item_names = set(deepcopy(item_names)) - prefixes = set([str(pr) for pr in hparams['test_prefixes']]) - test_item_names = set() - # Add prefixes that specified speaker index and matches exactly item name to test set - for prefix in deepcopy(prefixes): - if prefix in item_names: - test_item_names.add(prefix) - prefixes.remove(prefix) - # Add prefixes that exactly matches item name without speaker id to test set - for prefix in deepcopy(prefixes): - for name in item_names: - if name.split(':')[-1] == prefix: - test_item_names.add(name) - prefixes.remove(prefix) - # Add names with one of the remaining prefixes to test set - for prefix in deepcopy(prefixes): - for name in item_names: - if name.startswith(prefix): - test_item_names.add(name) - prefixes.remove(prefix) - for prefix in prefixes: - for name in item_names: - if name.split(':')[-1].startswith(prefix): - test_item_names.add(name) - test_item_names = sorted(list(test_item_names)) - train_item_names = [x for x in item_names if x not in set(test_item_names)] - logging.info("train {}".format(len(train_item_names))) - logging.info("test {}".format(len(test_item_names))) - return train_item_names, test_item_names - - def generate_summary(self, phone_set: set): - # Group by phonemes. + utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), encoding='utf-8').readlines() + all_temp_dict = {} + for utterance_label in utterance_labels: + song_info = utterance_label.split('|') + item_name = song_info[0] + temp_dict = { + 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', + 'txt': song_info[1], + 'ph': song_info[2], + 'word_boundary': np.array([1 if x in vowels + ['AP', 'SP'] else 0 for x in song_info[2].split()]), + 'ph_durs': [float(x) for x in song_info[5].split()], + 'pitch_midi': np.array([note_to_midi(x.split("/")[0]) if x != 'rest' else 0 + for x in song_info[3].split()]), + 'midi_dur': np.array([float(x) for x in song_info[4].split()]), + 'is_slur': np.array([int(x) for x in song_info[6].split()]), + 'spk_id': ds_id + } + + assert temp_dict['pitch_midi'].shape == temp_dict['midi_dur'].shape == temp_dict['is_slur'].shape, \ + (temp_dict['pitch_midi'].shape, temp_dict['midi_dur'].shape, temp_dict['is_slur'].shape) + + all_temp_dict[f'{ds_id}:{item_name}'] = temp_dict + self.items.update(all_temp_dict) + + def process(self): + super().process() + self.process_data_split('valid') + self.process_data_split('test') + self.process_data_split('train', apply_augmentation=len(self.augmentation_args) > 0) + + def check_coverage(self): + # Group by phonemes in the dictionary. + ph_required = set(build_phoneme_list()) phoneme_map = {} - for ph in sorted(phone_set): + for ph in ph_required: phoneme_map[ph] = 0 - if hparams['use_midi']: - for item in self.items.values(): - for ph, slur in zip(item['ph'].split(), item['is_slur']): - if ph not in phone_set or slur == 1: - continue - phoneme_map[ph] += 1 - else: - for item in self.items.values(): - for ph in item['ph'].split(): - if ph not in phone_set: - continue - phoneme_map[ph] += 1 + ph_occurred = [] + # Load and count those phones that appear in the actual data + for item in self.items.values(): + ph_occurred += item['ph'].split(' ') + for ph in ph_occurred: + if ph not in ph_required: + continue + phoneme_map[ph] += 1 + ph_occurred = set(ph_occurred) print('===== Phoneme Distribution Summary =====') for i, key in enumerate(sorted(phoneme_map.keys())): - if i == len(phone_set) - 1: + if i == len(ph_required) - 1: end = '\n' elif i % 10 == 9: end = ',\n' @@ -107,12 +95,12 @@ def generate_summary(self, phone_set: set): print(f'\'{key}\': {phoneme_map[key]}', end=end) # Draw graph. - plt.figure(figsize=(int(len(phone_set) * 0.8), 10)) + plt.figure(figsize=(int(len(ph_required) * 0.8), 10)) x = list(phoneme_map.keys()) values = list(phoneme_map.values()) plt.bar(x=x, height=values) plt.tick_params(labelsize=15) - plt.xlim(-1, len(phone_set)) + plt.xlim(-1, len(ph_required)) for a, b in zip(x, values): plt.text(a, b, b, ha='center', va='bottom', fontsize=15) plt.grid() @@ -124,18 +112,259 @@ def generate_summary(self, phone_set: set): bbox_inches='tight', pad_inches=0.25) print(f'| save summary to \'{filename}\'') - - def load_ph_set(self, ph_set): - # load those phones that appear in the actual data - for item in self.items.values(): - ph_set += item['ph'].split(' ') - # check unrecognizable or missing phones - actual_phone_set = set(ph_set) - required_phone_set = set(build_phoneme_list()) - self.generate_summary(required_phone_set) - if actual_phone_set != required_phone_set: - unrecognizable_phones = actual_phone_set.difference(required_phone_set) - missing_phones = required_phone_set.difference(actual_phone_set) + # Check unrecognizable or missing phonemes + if ph_occurred != ph_required: + unrecognizable_phones = ph_occurred.difference(ph_required) + missing_phones = ph_required.difference(ph_occurred) raise AssertionError('transcriptions and dictionary mismatch.\n' f' (+) {sorted(unrecognizable_phones)}\n' f' (-) {sorted(missing_phones)}') + + def process_data_split(self, prefix, multiprocess=False, apply_augmentation=False): + data_dir = hparams['binary_data_dir'] + args = [] + builder = IndexedDatasetBuilder(f'{data_dir}/{prefix}') + lengths = [] + f0s = [] + total_sec = 0 + total_raw_sec = 0 + + if self.binarization_args['with_spk_embed']: + from resemblyzer import VoiceEncoder + voice_encoder = VoiceEncoder().cuda() + + for item_name, meta_data in self.meta_data_iterator(prefix): + args.append([item_name, meta_data, self.binarization_args]) + + aug_map = self.arrange_data_augmentation(prefix) if apply_augmentation else {} + + def postprocess(item_): + nonlocal total_sec, total_raw_sec + if item_ is None: + return + item_['spk_embed'] = voice_encoder.embed_utterance(item_['wav']) \ + if self.binarization_args['with_spk_embed'] else None + if not self.binarization_args['with_wav'] and 'wav' in item_: + del item_['wav'] + builder.add_item(item_) + lengths.append(item_['len']) + total_sec += item_['sec'] + total_raw_sec += item_['sec'] + if item_.get('f0') is not None: + f0s.append(item_['f0']) + + for task in aug_map.get(item_['item_name'], []): + aug_item = task['func'](item_, **task['kwargs']) + builder.add_item(aug_item) + lengths.append(aug_item['len']) + total_sec += aug_item['sec'] + if aug_item.get('f0') is not None: + f0s.append(aug_item['f0']) + + if multiprocess: + # code for parallel processing + num_workers = int(os.getenv('N_PROC', hparams.get('ds_workers', os.cpu_count() // 3))) + for item in tqdm( + chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), + total=len(list(self.meta_data_iterator(prefix))) + ): + postprocess(item) + else: + # code for single cpu processing + for a in tqdm(args): + item = self.process_item(*a) + postprocess(item) + + builder.finalize() + np.save(f'{data_dir}/{prefix}_lengths.npy', lengths) + if len(f0s) > 0: + f0s = np.concatenate(f0s, 0) + f0s = f0s[f0s != 0] + np.save(f'{data_dir}/{prefix}_f0s_mean_std.npy', [np.mean(f0s).item(), np.std(f0s).item()]) + + if apply_augmentation: + print(f'| {prefix} total duration (before augmentation): {total_raw_sec:.2f}s') + print( + f'| {prefix} total duration (after augmentation): {total_sec:.2f}s ({total_sec / total_raw_sec:.2f}x)') + else: + print(f'| {prefix} total duration: {total_raw_sec:.2f}s') + + def arrange_data_augmentation(self, prefix): + aug_map = {} + aug_list = [] + all_item_names = [item_name for item_name, _ in self.meta_data_iterator(prefix)] + total_scale = 0 + if self.augmentation_args.get('random_pitch_shifting') is not None: + from augmentation.spec_stretch import SpectrogramStretchAugmentation + aug_args = self.augmentation_args['random_pitch_shifting'] + key_shift_min, key_shift_max = aug_args['range'] + assert hparams.get('use_key_shift_embed', False), \ + 'Random pitch shifting augmentation requires use_key_shift_embed == True.' + assert key_shift_min < 0 < key_shift_max, \ + 'Random pitch shifting augmentation must have a range where min < 0 < max.' + + aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) + scale = aug_args['scale'] + aug_item_names = random.choices(all_item_names, k=int(scale * len(all_item_names))) + + for aug_item_name in aug_item_names: + rand = random.uniform(-1, 1) + if rand < 0: + key_shift = key_shift_min * abs(rand) + else: + key_shift = key_shift_max * rand + aug_task = { + 'name': aug_item_name, + 'func': aug_ins.process_item, + 'kwargs': {'key_shift': key_shift} + } + if aug_item_name in aug_map: + aug_map[aug_item_name].append(aug_task) + else: + aug_map[aug_item_name] = [aug_task] + aug_list.append(aug_task) + + total_scale += scale + + if self.augmentation_args.get('fixed_pitch_shifting') is not None: + from augmentation.spec_stretch import SpectrogramStretchAugmentation + aug_args = self.augmentation_args['fixed_pitch_shifting'] + targets = aug_args['targets'] + scale = aug_args['scale'] + assert self.augmentation_args.get('random_pitch_shifting') is None, \ + 'Fixed pitch shifting augmentation is not compatible with random pitch shifting.' + assert len(targets) == len(set(targets)), \ + 'Fixed pitch shifting augmentation requires having no duplicate targets.' + assert hparams['use_spk_id'], 'Fixed pitch shifting augmentation requires use_spk_id == True.' + assert hparams['num_spk'] >= (1 + len(targets)) * len(self.spk_map), \ + 'Fixed pitch shifting augmentation requires num_spk >= (1 + len(targets)) * len(speakers).' + assert scale < 1, 'Fixed pitch shifting augmentation requires scale < 1.' + + aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) + for i, target in enumerate(targets): + aug_item_names = random.choices(all_item_names, k=int(scale * len(all_item_names))) + for aug_item_name in aug_item_names: + replace_spk_id = int(aug_item_name.split(':', maxsplit=1)[0]) + (i + 1) * len(self.spk_map) + aug_task = { + 'name': aug_item_name, + 'func': aug_ins.process_item, + 'kwargs': {'key_shift': target, 'replace_spk_id': replace_spk_id} + } + if aug_item_name in aug_map: + aug_map[aug_item_name].append(aug_task) + else: + aug_map[aug_item_name] = [aug_task] + aug_list.append(aug_task) + + total_scale += scale * len(targets) + + if self.augmentation_args.get('random_time_stretching') is not None: + from augmentation.spec_stretch import SpectrogramStretchAugmentation + aug_args = self.augmentation_args['random_time_stretching'] + speed_min, speed_max = aug_args['range'] + domain = aug_args['domain'] + assert hparams.get('use_speed_embed', False), \ + 'Random time stretching augmentation requires use_speed_embed == True.' + assert 0 < speed_min < 1 < speed_max, \ + 'Random time stretching augmentation must have a range where 0 < min < 1 < max.' + assert domain in ['log', 'linear'], 'domain must be \'log\' or \'linear\'.' + + aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) + scale = aug_args['scale'] + k_from_raw = int(scale / (1 + total_scale) * len(all_item_names)) + k_from_aug = int(total_scale * scale / (1 + total_scale) * len(all_item_names)) + k_mutate = int(total_scale * scale / (1 + scale) * len(all_item_names)) + aug_types = [0] * k_from_raw + [1] * k_from_aug + [2] * k_mutate + aug_items = random.choices(all_item_names, k=k_from_raw) + random.choices(aug_list, k=k_from_aug + k_mutate) + + for aug_type, aug_item in zip(aug_types, aug_items): + if domain == 'log': + # Uniform distribution in log domain + speed = speed_min * (speed_max / speed_min) ** random.random() + else: + # Uniform distribution in linear domain + rand = random.uniform(-1, 1) + speed = 1 + (speed_max - 1) * rand if rand >= 0 else 1 + (1 - speed_min) * rand + if aug_type == 0: + aug_task = { + 'name': aug_item, + 'func': aug_ins.process_item, + 'kwargs': {'speed': speed} + } + if aug_item in aug_map: + aug_map[aug_item].append(aug_task) + else: + aug_map[aug_item] = [aug_task] + aug_list.append(aug_task) + elif aug_type == 1: + aug_task = deepcopy(aug_item) + aug_item['kwargs']['speed'] = speed + if aug_item['name'] in aug_map: + aug_map[aug_item['name']].append(aug_task) + else: + aug_map[aug_item['name']] = [aug_task] + aug_list.append(aug_task) + elif aug_type == 2: + aug_item['kwargs']['speed'] = speed + + total_scale += scale + + return aug_map + + def process_item(self, item_name, meta_data, binarization_args): + mel_path = f"{meta_data['wav_fn'][:-4]}_mel.npy" + if os.path.exists(mel_path): + wav = None + mel = np.load(mel_path) + print("load mel from npy") + else: + if hparams['vocoder'] in VOCODERS: + wav, mel = VOCODERS[hparams['vocoder']].wav2spec(meta_data['wav_fn']) + else: + wav, mel = VOCODERS[hparams['vocoder'].split('.')[-1]].wav2spec(meta_data['wav_fn']) + processed_input = { + 'item_name': item_name, 'mel': mel, 'wav': wav, + 'sec': len(mel) * hparams["hop_size"] / hparams["audio_sample_rate"], 'len': mel.shape[0] + } + processed_input = {**meta_data, **processed_input} # merge two dicts + try: + if binarization_args['with_f0']: + # get ground truth f0 by self.get_pitch_algorithm + f0_path = f"{meta_data['wav_fn'][:-4]}_f0.npy" + if os.path.exists(f0_path): + from utils.pitch_utils import f0_to_coarse + processed_input['f0'] = np.load(f0_path) + processed_input['pitch'] = f0_to_coarse(np.load(f0_path)) + else: + gt_f0, gt_pitch_coarse = get_pitch_parselmouth(wav, mel, hparams) + if sum(gt_f0) == 0: + raise BinarizationError("Empty **gt** f0") + processed_input['f0'] = gt_f0 + processed_input['pitch'] = gt_pitch_coarse + if binarization_args['with_txt']: + try: + processed_input['phone'] = self.phone_encoder.encode(meta_data['ph']) + except: + traceback.print_exc() + raise BinarizationError(f"Empty phoneme") + if binarization_args['with_align']: + size = hparams['hop_size'] + rate = hparams['audio_sample_rate'] + mel2ph = np.zeros([mel.shape[0]], int) + startTime = 0 + ph_durs = meta_data['ph_durs'] + processed_input['ph_durs'] = np.asarray(ph_durs, dtype=np.float32) + for i_ph in range(len(ph_durs)): + start_frame = int(startTime * rate / size + 0.5) + end_frame = int((startTime + ph_durs[i_ph]) * rate / size + 0.5) + mel2ph[start_frame:end_frame] = i_ph + 1 + startTime = startTime + ph_durs[i_ph] + processed_input['mel2ph'] = mel2ph + if hparams.get('use_key_shift_embed', False): + processed_input['key_shift'] = 0. + if hparams.get('use_speed_embed', False): + processed_input['speed'] = 1. + except BinarizationError as e: + print(f"| Skip item ({e}). item_name: {item_name}, wav_fn: {meta_data['wav_fn']}") + return None + return processed_input diff --git a/data_gen/data_gen_utils.py b/data_gen/data_gen_utils.py index 6637c74a8..6cff925e2 100644 --- a/data_gen/data_gen_utils.py +++ b/data_gen/data_gen_utils.py @@ -3,148 +3,8 @@ warnings.filterwarnings("ignore") import parselmouth -import os -import torch -from skimage.transform import resize -from utils.text_encoder import TokenTextEncoder from utils.pitch_utils import f0_to_coarse -import struct -import webrtcvad -from scipy.ndimage.morphology import binary_dilation -import librosa import numpy as np -from utils import audio -import pyloudnorm as pyln -import re -import json -from collections import OrderedDict - -PUNCS = '!,.?;:' - -int16_max = (2 ** 15) - 1 - - -def trim_long_silences(path, sr=None, return_raw_wav=False, norm=True, vad_max_silence_length=12): - """ - Ensures that segments without voice in the waveform remain no longer than a - threshold determined by the VAD parameters in params.py. - :param wav: the raw waveform as a numpy array of floats - :param vad_max_silence_length: Maximum number of consecutive silent frames a segment can have. - :return: the same waveform with silences trimmed away (length <= original wav length) - """ - - ## Voice Activation Detection - # Window size of the VAD. Must be either 10, 20 or 30 milliseconds. - # This sets the granularity of the VAD. Should not need to be changed. - sampling_rate = 16000 - wav_raw, sr = librosa.core.load(path, sr=sr) - - if norm: - meter = pyln.Meter(sr) # create BS.1770 meter - loudness = meter.integrated_loudness(wav_raw) - wav_raw = pyln.normalize.loudness(wav_raw, loudness, -20.0) - if np.abs(wav_raw).max() > 1.0: - wav_raw = wav_raw / np.abs(wav_raw).max() - - wav = librosa.resample(wav_raw, sr, sampling_rate, res_type='kaiser_best') - - vad_window_length = 30 # In milliseconds - # Number of frames to average together when performing the moving average smoothing. - # The larger this value, the larger the VAD variations must be to not get smoothed out. - vad_moving_average_width = 8 - - # Compute the voice detection window size - samples_per_window = (vad_window_length * sampling_rate) // 1000 - - # Trim the end of the audio to have a multiple of the window size - wav = wav[:len(wav) - (len(wav) % samples_per_window)] - - # Convert the float waveform to 16-bit mono PCM - pcm_wave = struct.pack("%dh" % len(wav), *(np.round(wav * int16_max)).astype(np.int16)) - - # Perform voice activation detection - voice_flags = [] - vad = webrtcvad.Vad(mode=3) - for window_start in range(0, len(wav), samples_per_window): - window_end = window_start + samples_per_window - voice_flags.append(vad.is_speech(pcm_wave[window_start * 2:window_end * 2], - sample_rate=sampling_rate)) - voice_flags = np.array(voice_flags) - - # Smooth the voice detection with a moving average - def moving_average(array, width): - array_padded = np.concatenate((np.zeros((width - 1) // 2), array, np.zeros(width // 2))) - ret = np.cumsum(array_padded, dtype=float) - ret[width:] = ret[width:] - ret[:-width] - return ret[width - 1:] / width - - audio_mask = moving_average(voice_flags, vad_moving_average_width) - audio_mask = np.round(audio_mask).astype(np.bool) - - # Dilate the voiced regions - audio_mask = binary_dilation(audio_mask, np.ones(vad_max_silence_length + 1)) - audio_mask = np.repeat(audio_mask, samples_per_window) - audio_mask = resize(audio_mask, (len(wav_raw),)) > 0 - if return_raw_wav: - return wav_raw, audio_mask, sr - return wav_raw[audio_mask], audio_mask, sr - - -def process_utterance(wav_path, - fft_size=1024, - hop_size=256, - win_length=1024, - window="hann", - num_mels=80, - fmin=80, - fmax=7600, - eps=1e-6, - sample_rate=22050, - loud_norm=False, - min_level_db=-100, - return_linear=False, - trim_long_sil=False, vocoder='pwg'): - if isinstance(wav_path, str): - if trim_long_sil: - wav, _, _ = trim_long_silences(wav_path, sample_rate) - else: - wav, _ = librosa.core.load(wav_path, sr=sample_rate) - else: - wav = wav_path - - if loud_norm: - meter = pyln.Meter(sample_rate) # create BS.1770 meter - loudness = meter.integrated_loudness(wav) - wav = pyln.normalize.loudness(wav, loudness, -22.0) - if np.abs(wav).max() > 1: - wav = wav / np.abs(wav).max() - - # get amplitude spectrogram - x_stft = librosa.stft(wav, n_fft=fft_size, hop_length=hop_size, - win_length=win_length, window=window, pad_mode="constant") - spc = np.abs(x_stft) # (n_bins, T) - - # get mel basis - fmin = 0 if fmin == -1 else fmin - fmax = sample_rate / 2 if fmax == -1 else fmax - mel_basis = librosa.filters.mel(sample_rate, fft_size, num_mels, fmin, fmax) - mel = mel_basis @ spc - - if vocoder == 'pwg': - mel = np.log10(np.maximum(eps, mel)) # (n_mel_bins, T) - else: - assert False, f'"{vocoder}" is not in ["pwg"].' - - l_pad, r_pad = audio.librosa_pad_lr(wav, fft_size, hop_size, 1) - wav = np.pad(wav, (l_pad, r_pad), mode='constant', constant_values=0.0) - wav = wav[:mel.shape[1] * hop_size] - - if not return_linear: - return wav, mel - else: - spc = audio.amp_to_db(spc) - spc = audio.normalize(spc, {'min_level_db': min_level_db}) - return wav, mel, spc def get_pitch_parselmouth(wav_data, mel, hparams, speed=1): @@ -156,176 +16,15 @@ def get_pitch_parselmouth(wav_data, mel, hparams, speed=1): :return: """ hop_size = int(np.round(hparams['hop_size'] * speed)) - + time_step = hop_size / hparams['audio_sample_rate'] * 1000 f0_min = 65 f0_max = 800 - + f0 = parselmouth.Sound(wav_data, hparams['audio_sample_rate']).to_pitch_ac( time_step=time_step / 1000, voicing_threshold=0.6, pitch_floor=f0_min, pitch_ceiling=f0_max).selected_array['frequency'] - pad_size=(int(len(wav_data) // hop_size) - len(f0) + 1) // 2 - f0 = np.pad(f0,[[pad_size,len(mel) - len(f0) - pad_size]], mode='constant') + pad_size = (int(len(wav_data) // hop_size) - len(f0) + 1) // 2 + f0 = np.pad(f0, [[pad_size, len(mel) - len(f0) - pad_size]], mode='constant') pitch_coarse = f0_to_coarse(f0) return f0, pitch_coarse - - -def remove_empty_lines(text): - """remove empty lines""" - assert (len(text) > 0) - assert (isinstance(text, list)) - text = [t.strip() for t in text] - if "" in text: - text.remove("") - return text - - -class TextGrid(object): - def __init__(self, text): - text = remove_empty_lines(text) - self.text = text - self.line_count = 0 - self._get_type() - self._get_time_intval() - self._get_size() - self.tier_list = [] - self._get_item_list() - - def _extract_pattern(self, pattern, inc): - """ - Parameters - ---------- - pattern : regex to extract pattern - inc : increment of line count after extraction - Returns - ------- - group : extracted info - """ - try: - group = re.match(pattern, self.text[self.line_count]).group(1) - self.line_count += inc - except AttributeError: - raise ValueError("File format error at line %d:%s" % (self.line_count, self.text[self.line_count])) - return group - - def _get_type(self): - self.file_type = self._extract_pattern(r"File type = \"(.*)\"", 2) - - def _get_time_intval(self): - self.xmin = self._extract_pattern(r"xmin = (.*)", 1) - self.xmax = self._extract_pattern(r"xmax = (.*)", 2) - - def _get_size(self): - self.size = int(self._extract_pattern(r"size = (.*)", 2)) - - def _get_item_list(self): - """Only supports IntervalTier currently""" - for itemIdx in range(1, self.size + 1): - tier = OrderedDict() - item_list = [] - tier_idx = self._extract_pattern(r"item \[(.*)\]:", 1) - tier_class = self._extract_pattern(r"class = \"(.*)\"", 1) - if tier_class != "IntervalTier": - raise NotImplementedError("Only IntervalTier class is supported currently") - tier_name = self._extract_pattern(r"name = \"(.*)\"", 1) - tier_xmin = self._extract_pattern(r"xmin = (.*)", 1) - tier_xmax = self._extract_pattern(r"xmax = (.*)", 1) - tier_size = self._extract_pattern(r"intervals: size = (.*)", 1) - for i in range(int(tier_size)): - item = OrderedDict() - item["idx"] = self._extract_pattern(r"intervals \[(.*)\]", 1) - item["xmin"] = self._extract_pattern(r"xmin = (.*)", 1) - item["xmax"] = self._extract_pattern(r"xmax = (.*)", 1) - item["text"] = self._extract_pattern(r"text = \"(.*)\"", 1) - item_list.append(item) - tier["idx"] = tier_idx - tier["class"] = tier_class - tier["name"] = tier_name - tier["xmin"] = tier_xmin - tier["xmax"] = tier_xmax - tier["size"] = tier_size - tier["items"] = item_list - self.tier_list.append(tier) - - def toJson(self): - _json = OrderedDict() - _json["file_type"] = self.file_type - _json["xmin"] = self.xmin - _json["xmax"] = self.xmax - _json["size"] = self.size - _json["tiers"] = self.tier_list - return json.dumps(_json, ensure_ascii=False, indent=2) - - -def get_mel2ph(tg_fn, ph, mel, hparams): - ph_list = ph.split(" ") - with open(tg_fn, "r", encoding='utf-8') as f: - tg = f.readlines() - tg = remove_empty_lines(tg) - tg = TextGrid(tg) - tg = json.loads(tg.toJson()) - split = np.ones(len(ph_list) + 1, np.float) * -1 - tg_idx = 0 - ph_idx = 0 - tg_align = [x for x in tg['tiers'][-1]['items']] - tg_align_ = [] - for x in tg_align: - x['xmin'] = float(x['xmin']) - x['xmax'] = float(x['xmax']) - if x['text'] in ['sil', 'sp', '', 'SIL', 'PUNC']: - x['text'] = '' - if len(tg_align_) > 0 and tg_align_[-1]['text'] == '': - tg_align_[-1]['xmax'] = x['xmax'] - continue - tg_align_.append(x) - tg_align = tg_align_ - tg_len = len([x for x in tg_align if x['text'] != '']) - ph_len = len([x for x in ph_list if not is_sil_phoneme(x)]) - assert tg_len == ph_len, (tg_len, ph_len, tg_align, ph_list, tg_fn) - while tg_idx < len(tg_align) or ph_idx < len(ph_list): - if tg_idx == len(tg_align) and is_sil_phoneme(ph_list[ph_idx]): - split[ph_idx] = 1e8 - ph_idx += 1 - continue - x = tg_align[tg_idx] - if x['text'] == '' and ph_idx == len(ph_list): - tg_idx += 1 - continue - assert ph_idx < len(ph_list), (tg_len, ph_len, tg_align, ph_list, tg_fn) - ph = ph_list[ph_idx] - if x['text'] == '' and not is_sil_phoneme(ph): - assert False, (ph_list, tg_align) - if x['text'] != '' and is_sil_phoneme(ph): - ph_idx += 1 - else: - assert (x['text'] == '' and is_sil_phoneme(ph)) \ - or x['text'].lower() == ph.lower() \ - or x['text'].lower() == 'sil', (x['text'], ph) - split[ph_idx] = x['xmin'] - if ph_idx > 0 and split[ph_idx - 1] == -1 and is_sil_phoneme(ph_list[ph_idx - 1]): - split[ph_idx - 1] = split[ph_idx] - ph_idx += 1 - tg_idx += 1 - assert tg_idx == len(tg_align), (tg_idx, [x['text'] for x in tg_align]) - assert ph_idx >= len(ph_list) - 1, (ph_idx, ph_list, len(ph_list), [x['text'] for x in tg_align], tg_fn) - mel2ph = np.zeros([mel.shape[0]], np.int) - split[0] = 0 - split[-1] = 1e8 - for i in range(len(split) - 1): - assert split[i] != -1 and split[i] <= split[i + 1], (split[:-1],) - split = [int(s * hparams['audio_sample_rate'] / hparams['hop_size'] + 0.5) for s in split] - for ph_idx in range(len(ph_list)): - mel2ph[split[ph_idx]:split[ph_idx + 1]] = ph_idx + 1 - mel2ph_torch = torch.from_numpy(mel2ph) - T_t = len(ph_list) - dur = mel2ph_torch.new_zeros([T_t + 1]).scatter_add(0, mel2ph_torch, torch.ones_like(mel2ph_torch)) - dur = dur[1:].numpy() - return mel2ph, dur - - -def build_phone_encoder(phone_list): - return TokenTextEncoder(vocab_list=phone_list) - - -def is_sil_phoneme(p): - return not p[0].isalpha() diff --git a/preprocessing/opencpop.py b/preprocessing/opencpop.py index acb552ef2..5acb21d70 100644 --- a/preprocessing/opencpop.py +++ b/preprocessing/opencpop.py @@ -1,17 +1,10 @@ ''' file -> temporary_dict -> processed_input -> batch ''' -import os -import traceback -import numpy as np import torch -from librosa import note_to_midi import utils -from basics.base_binarizer import BinarizationError -from data_gen.data_gen_utils import get_pitch_parselmouth -from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import hparams from utils.phoneme_utils import get_all_vowels @@ -22,109 +15,6 @@ class File2Batch: """ pipeline: file -> temporary_dict -> processed_input -> batch """ - - @staticmethod - def file2temporary_dict(raw_data_dir, ds_id): - """ - read from file, store data in temporary dicts - """ - # meta_midi = json.load(open(os.path.join(raw_data_dir, 'meta.json'))) # [list of dict] - utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), encoding='utf-8').readlines() - - all_temp_dict = {} - for utterance_label in utterance_labels: - song_info = utterance_label.split('|') - item_name = song_info[0] - temp_dict = { - 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', - 'txt': song_info[1], - 'ph': song_info[2], - 'word_boundary': np.array([1 if x in vowels + ['AP', 'SP'] else 0 for x in song_info[2].split()]), - 'ph_durs': [float(x) for x in song_info[5].split()], - 'pitch_midi': np.array([note_to_midi(x.split("/")[0]) if x != 'rest' else 0 - for x in song_info[3].split()]), - 'midi_dur': np.array([float(x) for x in song_info[4].split()]), - 'is_slur': np.array([int(x) for x in song_info[6].split()]), - 'spk_id': ds_id - } - - assert temp_dict['pitch_midi'].shape == temp_dict['midi_dur'].shape == temp_dict['is_slur'].shape, \ - (temp_dict['pitch_midi'].shape, temp_dict['midi_dur'].shape, temp_dict['is_slur'].shape) - - all_temp_dict[f'{ds_id}:{item_name}'] = temp_dict - - return all_temp_dict - - @staticmethod - def temporary_dict2processed_input(item_name, temp_dict, encoder, binarization_args): - """ - process data in temporary_dicts - """ - - def get_pitch(wav, mel): - # get ground truth f0 by self.get_pitch_algorithm - f0_path = f"{temp_dict['wav_fn'][:-4]}_f0.npy" - if os.path.exists(f0_path): - from utils.pitch_utils import f0_to_coarse - processed_input['f0'] = np.load(f0_path) - processed_input['pitch'] = f0_to_coarse(np.load(f0_path)) - else: - gt_f0, gt_pitch_coarse = get_pitch_parselmouth(wav, mel, hparams) - if sum(gt_f0) == 0: - raise BinarizationError("Empty **gt** f0") - processed_input['f0'] = gt_f0 - processed_input['pitch'] = gt_pitch_coarse - - def get_align(meta_data, mel, phone_encoded, hop_size=hparams['hop_size'], - audio_sample_rate=hparams['audio_sample_rate']): - mel2ph = np.zeros([mel.shape[0]], int) - startTime = 0 - ph_durs = meta_data['ph_durs'] - processed_input['ph_durs'] = np.asarray(ph_durs, dtype=np.float32) - - for i_ph in range(len(ph_durs)): - start_frame = int(startTime * audio_sample_rate / hop_size + 0.5) - end_frame = int((startTime + ph_durs[i_ph]) * audio_sample_rate / hop_size + 0.5) - mel2ph[start_frame:end_frame] = i_ph + 1 - startTime = startTime + ph_durs[i_ph] - - processed_input['mel2ph'] = mel2ph - - mel_path = f"{temp_dict['wav_fn'][:-4]}_mel.npy" - if os.path.exists(mel_path): - wav = None - mel = np.load(mel_path) - print("load mel from npy") - else: - if hparams['vocoder'] in VOCODERS: - wav, mel = VOCODERS[hparams['vocoder']].wav2spec(temp_dict['wav_fn']) - else: - wav, mel = VOCODERS[hparams['vocoder'].split('.')[-1]].wav2spec(temp_dict['wav_fn']) - processed_input = { - 'item_name': item_name, 'mel': mel, 'wav': wav, - 'sec': len(mel) * hparams["hop_size"] / hparams["audio_sample_rate"], 'len': mel.shape[0] - } - processed_input = {**temp_dict, **processed_input} # merge two dicts - try: - if binarization_args['with_f0']: - get_pitch(wav, mel) - if binarization_args['with_txt']: - try: - phone_encoded = processed_input['phone'] = encoder.encode(temp_dict['ph']) - except: - traceback.print_exc() - raise BinarizationError(f"Empty phoneme") - if binarization_args['with_align']: - get_align(temp_dict, mel, phone_encoded) - if hparams.get('use_key_shift_embed', False): - processed_input['key_shift'] = 0. - if hparams.get('use_speed_embed', False): - processed_input['speed'] = 1. - except BinarizationError as e: - print(f"| Skip item ({e}). item_name: {item_name}, wav_fn: {temp_dict['wav_fn']}") - return None - return processed_input - @staticmethod def processed_input2batch(samples): """ From 366f371117581d969ec7c6e0eb1d97c12e8a9d52 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 11 Mar 2023 10:26:20 +0800 Subject: [PATCH 033/475] Remove cwt --- src/acoustic_task.py | 23 +------ utils/cwt.py | 146 ------------------------------------------- 2 files changed, 1 insertion(+), 168 deletions(-) delete mode 100644 utils/cwt.py diff --git a/src/acoustic_task.py b/src/acoustic_task.py index d8bfcaf20..c9645e087 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -8,14 +8,8 @@ import numpy as np import torch import torch.distributions -import torch.distributions -import torch.distributions -import torch.optim -import torch.optim import torch.optim import torch.utils.data -import torch.utils.data -import torch.utils.data from tqdm import tqdm import utils @@ -25,12 +19,10 @@ from data_gen.data_gen_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import mel2ph_to_dur from utils import audio -from utils.cwt import get_lf0_cwt from utils.hparams import hparams from utils.indexed_datasets import IndexedDataset from utils.phoneme_utils import build_phoneme_list -from utils.pitch_utils import denorm_f0 -from utils.pitch_utils import norm_interp_f0 +from utils.pitch_utils import denorm_f0, norm_interp_f0 from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder @@ -67,9 +59,6 @@ def __init__(self, prefix, shuffle=False): self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] self.sizes = [self.sizes[i] for i in self.avail_idxs] - if hparams['pitch_type'] == 'cwt': - _, hparams['cwt_scales'] = get_lf0_cwt(np.ones(10)) - def __getitem__(self, index): hparams = self.hparams item = self._get_item(index) @@ -102,16 +91,6 @@ def __getitem__(self, index): sample["spk_embed"] = torch.Tensor(item['spk_embed']) if self.hparams['use_spk_id']: sample["spk_id"] = item['spk_id'] - if self.hparams['pitch_type'] == 'cwt': - cwt_spec = torch.Tensor(item['cwt_spec'])[:max_frames] - f0_mean = item.get('f0_mean', item.get('cwt_mean')) - f0_std = item.get('f0_std', item.get('cwt_std')) - sample.update({"cwt_spec": cwt_spec, "f0_mean": f0_mean, "f0_std": f0_std}) - elif self.hparams['pitch_type'] == 'ph': - f0_phlevel_sum = torch.zeros_like(phone).float().scatter_add(0, mel2ph - 1, f0) - f0_phlevel_num = torch.zeros_like(phone).float().scatter_add( - 0, mel2ph - 1, torch.ones_like(f0)).clamp_min(1) - sample["f0_ph"] = f0_phlevel_sum / f0_phlevel_num item = self._get_item(index) sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] diff --git a/utils/cwt.py b/utils/cwt.py deleted file mode 100644 index 1a08461b9..000000000 --- a/utils/cwt.py +++ /dev/null @@ -1,146 +0,0 @@ -import librosa -import numpy as np -from pycwt import wavelet -from scipy.interpolate import interp1d - - -def load_wav(wav_file, sr): - wav, _ = librosa.load(wav_file, sr=sr, mono=True) - return wav - - -def convert_continuos_f0(f0): - '''CONVERT F0 TO CONTINUOUS F0 - Args: - f0 (ndarray): original f0 sequence with the shape (T) - Return: - (ndarray): continuous f0 with the shape (T) - ''' - # get uv information as binary - f0 = np.copy(f0) - uv = np.float32(f0 != 0) - - # get start and end of f0 - if (f0 == 0).all(): - print("| all of the f0 values are 0.") - return uv, f0 - start_f0 = f0[f0 != 0][0] - end_f0 = f0[f0 != 0][-1] - - # padding start and end of f0 sequence - start_idx = np.where(f0 == start_f0)[0][0] - end_idx = np.where(f0 == end_f0)[0][-1] - f0[:start_idx] = start_f0 - f0[end_idx:] = end_f0 - - # get non-zero frame index - nz_frames = np.where(f0 != 0)[0] - - # perform linear interpolation - f = interp1d(nz_frames, f0[nz_frames]) - cont_f0 = f(np.arange(0, f0.shape[0])) - - return uv, cont_f0 - - -def get_cont_lf0(f0, frame_period=5.0): - uv, cont_f0_lpf = convert_continuos_f0(f0) - # cont_f0_lpf = low_pass_filter(cont_f0_lpf, int(1.0 / (frame_period * 0.001)), cutoff=20) - cont_lf0_lpf = np.log(cont_f0_lpf) - return uv, cont_lf0_lpf - - -def get_lf0_cwt(lf0): - ''' - input: - signal of shape (N) - output: - Wavelet_lf0 of shape(10, N), scales of shape(10) - ''' - mother = wavelet.MexicanHat() - dt = 0.005 - dj = 1 - s0 = dt * 2 - J = 9 - - Wavelet_lf0, scales, _, _, _, _ = wavelet.cwt(np.squeeze(lf0), dt, dj, s0, J, mother) - # Wavelet.shape => (J + 1, len(lf0)) - Wavelet_lf0 = np.real(Wavelet_lf0).T - return Wavelet_lf0, scales - - -def norm_scale(Wavelet_lf0): - Wavelet_lf0_norm = np.zeros((Wavelet_lf0.shape[0], Wavelet_lf0.shape[1])) - mean = Wavelet_lf0.mean(0)[None, :] - std = Wavelet_lf0.std(0)[None, :] - Wavelet_lf0_norm = (Wavelet_lf0 - mean) / std - return Wavelet_lf0_norm, mean, std - - -def normalize_cwt_lf0(f0, mean, std): - uv, cont_lf0_lpf = get_cont_lf0(f0) - cont_lf0_norm = (cont_lf0_lpf - mean) / std - Wavelet_lf0, scales = get_lf0_cwt(cont_lf0_norm) - Wavelet_lf0_norm, _, _ = norm_scale(Wavelet_lf0) - - return Wavelet_lf0_norm - - -def get_lf0_cwt_norm(f0s, mean, std): - uvs = list() - cont_lf0_lpfs = list() - cont_lf0_lpf_norms = list() - Wavelet_lf0s = list() - Wavelet_lf0s_norm = list() - scaless = list() - - means = list() - stds = list() - for f0 in f0s: - uv, cont_lf0_lpf = get_cont_lf0(f0) - cont_lf0_lpf_norm = (cont_lf0_lpf - mean) / std - - Wavelet_lf0, scales = get_lf0_cwt(cont_lf0_lpf_norm) # [560,10] - Wavelet_lf0_norm, mean_scale, std_scale = norm_scale(Wavelet_lf0) # [560,10],[1,10],[1,10] - - Wavelet_lf0s_norm.append(Wavelet_lf0_norm) - uvs.append(uv) - cont_lf0_lpfs.append(cont_lf0_lpf) - cont_lf0_lpf_norms.append(cont_lf0_lpf_norm) - Wavelet_lf0s.append(Wavelet_lf0) - scaless.append(scales) - means.append(mean_scale) - stds.append(std_scale) - - return Wavelet_lf0s_norm, scaless, means, stds - - -def inverse_cwt_torch(Wavelet_lf0, scales): - import torch - b = ((torch.arange(0, len(scales)).float().to(Wavelet_lf0.device)[None, None, :] + 1 + 2.5) ** (-2.5)) - lf0_rec = Wavelet_lf0 * b - lf0_rec_sum = lf0_rec.sum(-1) - lf0_rec_sum = (lf0_rec_sum - lf0_rec_sum.mean(-1, keepdim=True)) / lf0_rec_sum.std(-1, keepdim=True) - return lf0_rec_sum - - -def inverse_cwt(Wavelet_lf0, scales): - b = ((np.arange(0, len(scales))[None, None, :] + 1 + 2.5) ** (-2.5)) - lf0_rec = Wavelet_lf0 * b - lf0_rec_sum = lf0_rec.sum(-1) - lf0_rec_sum = (lf0_rec_sum - lf0_rec_sum.mean(-1, keepdims=True)) / lf0_rec_sum.std(-1, keepdims=True) - return lf0_rec_sum - - -def cwt2f0(cwt_spec, mean, std, cwt_scales): - assert len(mean.shape) == 1 and len(std.shape) == 1 and len(cwt_spec.shape) == 3 - import torch - if isinstance(cwt_spec, torch.Tensor): - f0 = inverse_cwt_torch(cwt_spec, cwt_scales) - f0 = f0 * std[:, None] + mean[:, None] - f0 = f0.exp() # [B, T] - else: - f0 = inverse_cwt(cwt_spec, cwt_scales) - f0 = f0 * std[:, None] + mean[:, None] - f0 = np.exp(f0) # [B, T] - return f0 From 8509c85c097b0a50eb264a81c6172f667f7cd5ee Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 11 Mar 2023 23:53:41 +0800 Subject: [PATCH 034/475] Re-write binarizer and dataset structure --- augmentation/spec_stretch.py | 37 ++-- basics/base_binarizer.py | 4 - configs/acoustic.yaml | 8 +- configs/base.yaml | 8 - data_gen/acoustic.py | 228 ++++++++++----------- data_gen/data_gen_utils.py | 45 +++- inference/vocoder/val_nsf_hifigan.py | 2 +- modules/fastspeech/acoustic_encoder.py | 10 +- preprocessing/opencpop.py | 80 -------- src/acoustic_task.py | 272 +++++++++++-------------- src/diff/diffusion.py | 4 +- src/vocoders/ddsp.py | 2 +- utils/indexed_datasets.py | 31 ++- utils/pitch_utils.py | 53 +---- utils/text_encoder.py | 2 +- 15 files changed, 312 insertions(+), 474 deletions(-) delete mode 100644 preprocessing/opencpop.py diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index fa915dcf5..0a7656a7d 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -4,7 +4,7 @@ import torch from basics.base_augmentation import BaseAugmentation -from data_gen.data_gen_utils import get_pitch_parselmouth +from data_gen.data_gen_utils import get_pitch_parselmouth, get_mel2ph_torch from modules.fastspeech.tts_modules import LengthRegulator from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import hparams @@ -31,37 +31,28 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['wav_fn'], keyshift=key_shift, speed=speed ) - aug_item['mel'] = mel + aug_item['mel'] = torch.from_numpy(mel) if speed != 1. or hparams.get('use_speed_embed', False): - aug_item['len'] = len(mel) + aug_item['length'] = mel.shape[0] aug_item['speed'] = int(np.round(hparams['hop_size'] * speed)) / hparams['hop_size'] # real speed - aug_item['sec'] /= aug_item['speed'] - aug_item['ph_durs'] /= aug_item['speed'] - aug_item['mel2ph'] = self.get_mel2ph(aug_item['ph_durs'], aug_item['len']) - aug_item['f0'], aug_item['pitch'] = get_pitch_parselmouth(wav, mel, hparams, speed=speed) + aug_item['seconds'] /= aug_item['speed'] + aug_item['ph_dur'] /= aug_item['speed'] + aug_item['mel2ph'] = get_mel2ph_torch( + self.lr, aug_item['ph_dur'], aug_item['length'], hparams, device=self.device + ) + f0, f0_coarse, _ = get_pitch_parselmouth( + wav, aug_item['length'], hparams, speed=speed, interp_uv=item['interp_uv'] + ) + aug_item['f0'], aug_item['f0_coarse'] = \ + torch.from_numpy(f0), torch.from_numpy(f0_coarse) if key_shift != 0. or hparams.get('use_key_shift_embed', False): aug_item['key_shift'] = key_shift aug_item['f0'] *= 2 ** (key_shift / 12) - aug_item['pitch'] = f0_to_coarse(aug_item['f0']) + aug_item['f0_coarse'] = torch.from_numpy(f0_to_coarse(aug_item['f0'].numpy())) if replace_spk_id is not None: aug_item['spk_id'] = replace_spk_id return aug_item - - @torch.no_grad() - def get_mel2ph(self, durs, length): - ph_acc = np.around( - np.add.accumulate(durs) * hparams['audio_sample_rate'] / hparams['hop_size'] + 0.5 - ).astype('int') - ph_dur = np.diff(ph_acc, prepend=0) - ph_dur = torch.LongTensor(ph_dur)[None].to(self.device) - mel2ph = self.lr(ph_dur).cpu().numpy()[0] - num_frames = len(mel2ph) - if num_frames < length: - mel2ph = np.concatenate((mel2ph, np.full(length - num_frames, mel2ph[-1])), axis=0) - elif num_frames > length: - mel2ph = mel2ph[:length] - return mel2ph diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 8ff7c2209..76b0608d5 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -4,7 +4,6 @@ import random from copy import deepcopy -from data_gen.data_gen_utils import get_pitch_parselmouth from utils.hparams import set_hparams, hparams from utils.phoneme_utils import build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -62,9 +61,6 @@ def __init__(self, data_dir=None): if self.binarization_args['shuffle']: random.seed(hparams['seed']) random.shuffle(self.item_names) - - # set default get_pitch algorithm - self.get_pitch_algorithm = get_pitch_parselmouth def load_meta_data(self, raw_data_dir, ds_id): raise NotImplementedError() diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 783f34369..0464d330a 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -26,9 +26,10 @@ fmax: 16000 min_level_db: -120 binarization_args: - with_wav: false with_spk_embed: false with_align: true + with_uv: false + interp_uv: true shuffle: true #augmentation_args: # random_pitch_shifting: @@ -46,7 +47,6 @@ raw_data_dir: 'data/opencpop/raw' binary_data_dir: 'data/opencpop/binary' binarizer_cls: data_gen.acoustic.AcousticBinarizer g2p_dictionary: dictionaries/opencpop-extension.txt -pitch_type: frame spec_min: [-5] spec_max: [0] keep_bins: 128 @@ -56,15 +56,15 @@ mel_vmax: 1.5 save_f0: true max_frames: 8000 -use_uv: false use_midi: false use_spk_embed: false use_spk_id: false +use_pitch_embed: true +f0_embed_type: continuous use_key_shift_embed: false use_speed_embed: false use_gt_f0: false # for midi exp use_gt_dur: false # for further midi exp -f0_embed_type: continuous K_step: 1000 timesteps: 1000 diff --git a/configs/base.yaml b/configs/base.yaml index d43daa3cc..ce3340dbb 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -20,13 +20,11 @@ binarizer_cls: basics.base_binarizer.BaseBinarizer binarization_args: shuffle: false with_txt: true - with_wav: false with_align: true with_spk_embed: true with_f0: true endless_ds: true -reset_phone_dict: true max_frames: 1550 max_input_tokens: 1550 @@ -55,9 +53,6 @@ enc_ffn_kernel_size: 9 ffn_act: gelu ffn_padding: 'SAME' use_pitch_embed: true -pitch_type: ph # frame|ph|cwt -use_uv: true -pitch_norm: log use_energy_embed: false use_spk_id: false use_spk_embed: false @@ -86,14 +81,12 @@ log_interval: 100 num_sanity_val_steps: 5 # steps of validation at the beginning val_check_interval: 2000 max_updates: 120000 -test_input_dir: '' max_tokens: 32000 max_sentences: 100000 max_eval_sentences: 1 max_eval_tokens: 60000 train_set_name: 'train' valid_set_name: 'valid' -test_set_name: 'test' vocoder: '' vocoder_ckpt: '' profile_infer: false @@ -103,6 +96,5 @@ save_f0: false gen_dir_name: '' num_valid_plots: 5 num_test_samples: 0 -test_ids: [] use_gt_dur: false use_gt_f0: false diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 402008df2..29cd76a54 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -1,32 +1,25 @@ """ item: one piece of data item_name: data id - wavfn: wave file path - txt: lyrics - ph: phoneme - tgfn: text grid file path (unused) + wav_fn: wave file path spk: dataset name - wdb: word boundary - ph_durs: phoneme durations - midi: pitch as midi notes - midi_dur: midi duration - is_slur: keep singing upon note changes + ph_seq: phoneme sequence + ph_dur: phoneme durations """ - +import json import os import os.path import random -import traceback from copy import deepcopy import matplotlib.pyplot as plt import numpy as np -from librosa import note_to_midi +import torch from tqdm import tqdm from basics.base_binarizer import BaseBinarizer, BinarizationError -from data_gen.data_gen_utils import get_pitch_parselmouth -from preprocessing.opencpop import vowels +from data_gen.data_gen_utils import get_pitch_parselmouth, get_mel2ph_torch +from modules.fastspeech.tts_modules import LengthRegulator from src.vocoders.vocoder_utils import VOCODERS from utils.hparams import hparams from utils.indexed_datasets import IndexedDatasetBuilder @@ -34,38 +27,45 @@ from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" +ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'f0_coarse', 'uv', 'key_shift', 'speed'] class AcousticBinarizer(BaseBinarizer): + def __init__(self): + super().__init__() + self.lr = LengthRegulator() + def load_meta_data(self, raw_data_dir, ds_id): utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), encoding='utf-8').readlines() - all_temp_dict = {} + meta_data_dict = {} for utterance_label in utterance_labels: - song_info = utterance_label.split('|') - item_name = song_info[0] - temp_dict = { - 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', - 'txt': song_info[1], - 'ph': song_info[2], - 'word_boundary': np.array([1 if x in vowels + ['AP', 'SP'] else 0 for x in song_info[2].split()]), - 'ph_durs': [float(x) for x in song_info[5].split()], - 'pitch_midi': np.array([note_to_midi(x.split("/")[0]) if x != 'rest' else 0 - for x in song_info[3].split()]), - 'midi_dur': np.array([float(x) for x in song_info[4].split()]), - 'is_slur': np.array([int(x) for x in song_info[6].split()]), - 'spk_id': ds_id - } - - assert temp_dict['pitch_midi'].shape == temp_dict['midi_dur'].shape == temp_dict['is_slur'].shape, \ - (temp_dict['pitch_midi'].shape, temp_dict['midi_dur'].shape, temp_dict['is_slur'].shape) - - all_temp_dict[f'{ds_id}:{item_name}'] = temp_dict - self.items.update(all_temp_dict) + if self.binarization_args.get('label_format', 'grid') == 'json': + label_dict = json.loads(utterance_label) + item_name = label_dict['item_name'] + temp_dict = { + 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', + 'ph_seq': label_dict['ph_seq'].split(), + 'ph_dur': [float(x) for x in label_dict['ph_dur'].split()], + 'spk_id': ds_id + } + else: + song_info = utterance_label.split('|') + item_name = song_info[0] + temp_dict = { + 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', + 'ph_seq': song_info[2].split(), + 'ph_dur': [float(x) for x in song_info[5].split()], + 'spk_id': ds_id + } + assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ + f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + self.items.update(meta_data_dict) def process(self): super().process() self.process_data_split('valid') - self.process_data_split('test') + # self.process_data_split('test') self.process_data_split('train', apply_augmentation=len(self.augmentation_args) > 0) def check_coverage(self): @@ -76,8 +76,10 @@ def check_coverage(self): phoneme_map[ph] = 0 ph_occurred = [] # Load and count those phones that appear in the actual data - for item in self.items.values(): - ph_occurred += item['ph'].split(' ') + for item_name in self.items: + ph_occurred += self.items[item_name]['ph_seq'] + if len(ph_occurred) == 0: + raise BinarizationError(f'Empty tokens in {item_name}.') for ph in ph_occurred: if ph not in ph_required: continue @@ -116,50 +118,43 @@ def check_coverage(self): if ph_occurred != ph_required: unrecognizable_phones = ph_occurred.difference(ph_required) missing_phones = ph_required.difference(ph_occurred) - raise AssertionError('transcriptions and dictionary mismatch.\n' + raise BinarizationError('transcriptions and dictionary mismatch.\n' f' (+) {sorted(unrecognizable_phones)}\n' f' (-) {sorted(missing_phones)}') def process_data_split(self, prefix, multiprocess=False, apply_augmentation=False): data_dir = hparams['binary_data_dir'] args = [] - builder = IndexedDatasetBuilder(f'{data_dir}/{prefix}') + builder = IndexedDatasetBuilder(data_dir, name=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) lengths = [] - f0s = [] total_sec = 0 total_raw_sec = 0 - if self.binarization_args['with_spk_embed']: - from resemblyzer import VoiceEncoder - voice_encoder = VoiceEncoder().cuda() + # if self.binarization_args['with_spk_embed']: + # from resemblyzer import VoiceEncoder + # voice_encoder = VoiceEncoder().cuda() for item_name, meta_data in self.meta_data_iterator(prefix): args.append([item_name, meta_data, self.binarization_args]) aug_map = self.arrange_data_augmentation(prefix) if apply_augmentation else {} - def postprocess(item_): + def postprocess(_item): nonlocal total_sec, total_raw_sec - if item_ is None: + if _item is None: return - item_['spk_embed'] = voice_encoder.embed_utterance(item_['wav']) \ - if self.binarization_args['with_spk_embed'] else None - if not self.binarization_args['with_wav'] and 'wav' in item_: - del item_['wav'] - builder.add_item(item_) - lengths.append(item_['len']) - total_sec += item_['sec'] - total_raw_sec += item_['sec'] - if item_.get('f0') is not None: - f0s.append(item_['f0']) - - for task in aug_map.get(item_['item_name'], []): - aug_item = task['func'](item_, **task['kwargs']) + # item_['spk_embed'] = voice_encoder.embed_utterance(item_['wav']) \ + # if self.binarization_args['with_spk_embed'] else None + builder.add_item(_item) + lengths.append(_item['length']) + total_sec += _item['seconds'] + total_raw_sec += _item['seconds'] + + for task in aug_map.get(_item['name'], []): + aug_item = task['func'](_item, **task['kwargs']) builder.add_item(aug_item) - lengths.append(aug_item['len']) - total_sec += aug_item['sec'] - if aug_item.get('f0') is not None: - f0s.append(aug_item['f0']) + lengths.append(aug_item['length']) + total_sec += aug_item['seconds'] if multiprocess: # code for parallel processing @@ -176,11 +171,9 @@ def postprocess(item_): postprocess(item) builder.finalize() - np.save(f'{data_dir}/{prefix}_lengths.npy', lengths) - if len(f0s) > 0: - f0s = np.concatenate(f0s, 0) - f0s = f0s[f0s != 0] - np.save(f'{data_dir}/{prefix}_f0s_mean_std.npy', [np.mean(f0s).item(), np.std(f0s).item()]) + with open(os.path.join(data_dir, f'{prefix}.lengths'), 'wb') as f: + # noinspection PyTypeChecker + np.save(f, lengths) if apply_augmentation: print(f'| {prefix} total duration (before augmentation): {total_raw_sec:.2f}s') @@ -189,6 +182,47 @@ def postprocess(item_): else: print(f'| {prefix} total duration: {total_raw_sec:.2f}s') + def process_item(self, item_name, meta_data, binarization_args): + if hparams['vocoder'] in VOCODERS: + wav, mel = VOCODERS[hparams['vocoder']].wav2spec(meta_data['wav_fn']) + else: + wav, mel = VOCODERS[hparams['vocoder'].split('.')[-1]].wav2spec(meta_data['wav_fn']) + length = mel.shape[0] + seconds = length * hparams['hop_size'] / hparams['audio_sample_rate'] + processed_input = { + 'name': item_name, + 'wav_fn': meta_data['wav_fn'], + 'spk_id': meta_data['spk_id'], + 'seconds': seconds, + 'length': length, + 'mel': torch.from_numpy(mel), + 'tokens': torch.LongTensor(self.phone_encoder.encode(meta_data['ph_seq'])), + 'ph_dur': torch.FloatTensor(meta_data['ph_dur']), + 'interp_uv': self.binarization_args['interp_uv'], + } + + # get ground truth f0 + gt_f0, gt_f0_coarse, uv = get_pitch_parselmouth( + wav, length, hparams, interp_uv=self.binarization_args['interp_uv'] + ) + if uv.all(): # All unvoiced + raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') + processed_input['f0'] = torch.from_numpy(gt_f0) + processed_input['f0_coarse'] = torch.from_numpy(gt_f0_coarse) + if self.binarization_args['with_uv']: + processed_input['uv'] = torch.from_numpy(uv) + + # get ground truth dur + processed_input['mel2ph'] = get_mel2ph_torch(self.lr, processed_input['ph_dur'], length, hparams) + + if hparams.get('use_key_shift_embed', False): + processed_input['key_shift'] = 0. + + if hparams.get('use_speed_embed', False): + processed_input['speed'] = 1. + + return processed_input + def arrange_data_augmentation(self, prefix): aug_map = {} aug_list = [] @@ -310,61 +344,3 @@ def arrange_data_augmentation(self, prefix): total_scale += scale return aug_map - - def process_item(self, item_name, meta_data, binarization_args): - mel_path = f"{meta_data['wav_fn'][:-4]}_mel.npy" - if os.path.exists(mel_path): - wav = None - mel = np.load(mel_path) - print("load mel from npy") - else: - if hparams['vocoder'] in VOCODERS: - wav, mel = VOCODERS[hparams['vocoder']].wav2spec(meta_data['wav_fn']) - else: - wav, mel = VOCODERS[hparams['vocoder'].split('.')[-1]].wav2spec(meta_data['wav_fn']) - processed_input = { - 'item_name': item_name, 'mel': mel, 'wav': wav, - 'sec': len(mel) * hparams["hop_size"] / hparams["audio_sample_rate"], 'len': mel.shape[0] - } - processed_input = {**meta_data, **processed_input} # merge two dicts - try: - if binarization_args['with_f0']: - # get ground truth f0 by self.get_pitch_algorithm - f0_path = f"{meta_data['wav_fn'][:-4]}_f0.npy" - if os.path.exists(f0_path): - from utils.pitch_utils import f0_to_coarse - processed_input['f0'] = np.load(f0_path) - processed_input['pitch'] = f0_to_coarse(np.load(f0_path)) - else: - gt_f0, gt_pitch_coarse = get_pitch_parselmouth(wav, mel, hparams) - if sum(gt_f0) == 0: - raise BinarizationError("Empty **gt** f0") - processed_input['f0'] = gt_f0 - processed_input['pitch'] = gt_pitch_coarse - if binarization_args['with_txt']: - try: - processed_input['phone'] = self.phone_encoder.encode(meta_data['ph']) - except: - traceback.print_exc() - raise BinarizationError(f"Empty phoneme") - if binarization_args['with_align']: - size = hparams['hop_size'] - rate = hparams['audio_sample_rate'] - mel2ph = np.zeros([mel.shape[0]], int) - startTime = 0 - ph_durs = meta_data['ph_durs'] - processed_input['ph_durs'] = np.asarray(ph_durs, dtype=np.float32) - for i_ph in range(len(ph_durs)): - start_frame = int(startTime * rate / size + 0.5) - end_frame = int((startTime + ph_durs[i_ph]) * rate / size + 0.5) - mel2ph[start_frame:end_frame] = i_ph + 1 - startTime = startTime + ph_durs[i_ph] - processed_input['mel2ph'] = mel2ph - if hparams.get('use_key_shift_embed', False): - processed_input['key_shift'] = 0. - if hparams.get('use_speed_embed', False): - processed_input['speed'] = 1. - except BinarizationError as e: - print(f"| Skip item ({e}). item_name: {item_name}, wav_fn: {meta_data['wav_fn']}") - return None - return processed_input diff --git a/data_gen/data_gen_utils.py b/data_gen/data_gen_utils.py index 6cff925e2..9db41b6d5 100644 --- a/data_gen/data_gen_utils.py +++ b/data_gen/data_gen_utils.py @@ -1,30 +1,55 @@ import warnings +import torch + warnings.filterwarnings("ignore") import parselmouth -from utils.pitch_utils import f0_to_coarse +from utils.pitch_utils import f0_to_coarse, interp_f0 import numpy as np -def get_pitch_parselmouth(wav_data, mel, hparams, speed=1): +def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): """ :param wav_data: [T] - :param mel: [T, mel_bins] + :param length: Expected number of frames :param hparams: - :return: + :param speed: Change the speed + :param interp_uv: Interpolate unvoiced parts + :return: f0, f0_coarse, uv """ hop_size = int(np.round(hparams['hop_size'] * speed)) - time_step = hop_size / hparams['audio_sample_rate'] * 1000 + time_step = hop_size / hparams['audio_sample_rate'] f0_min = 65 f0_max = 800 f0 = parselmouth.Sound(wav_data, hparams['audio_sample_rate']).to_pitch_ac( - time_step=time_step / 1000, voicing_threshold=0.6, + time_step=time_step, voicing_threshold=0.6, pitch_floor=f0_min, pitch_ceiling=f0_max).selected_array['frequency'] - pad_size = (int(len(wav_data) // hop_size) - len(f0) + 1) // 2 - f0 = np.pad(f0, [[pad_size, len(mel) - len(f0) - pad_size]], mode='constant') - pitch_coarse = f0_to_coarse(f0) - return f0, pitch_coarse + len_f0 = f0.shape[0] + pad_size = (int(len(wav_data) // hop_size) - len_f0 + 1) // 2 + f0 = np.pad(f0, [[pad_size, length - len_f0 - pad_size]], mode='constant') + uv = f0 == 0 + if interp_uv: + f0, uv = interp_f0(f0, uv) + f0_coarse = f0_to_coarse(f0) + return f0, f0_coarse, uv + + +@torch.no_grad() +def get_mel2ph_torch(lr, durs, length, hparams, device='cpu'): + ph_acc = torch.round( + torch.cumsum( + durs.to(device), dim=0 + ) * hparams['audio_sample_rate'] / hparams['hop_size'] + 0.5 + ).long() + ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(device)) + mel2ph = lr(ph_dur[None])[0] + num_frames = mel2ph.shape[0] + if num_frames < length: + mel2ph = torch.cat((mel2ph, torch.full(length - num_frames, mel2ph[-1])), dim=0) + elif num_frames > length: + mel2ph = mel2ph[:length] + return mel2ph diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/vocoder/val_nsf_hifigan.py index 75fc97dd0..ab8ef21be 100644 --- a/inference/vocoder/val_nsf_hifigan.py +++ b/inference/vocoder/val_nsf_hifigan.py @@ -61,7 +61,7 @@ def get_pitch(wav_data, mel, hparams, threshold=0.3): if not filename.endswith('.wav'): continue wav, mel = vocoder.wav2spec(os.path.join(in_path, filename)) - f0, _ = get_pitch_parselmouth(wav, mel, hparams) + f0, _, _ = get_pitch_parselmouth(wav, len(mel), hparams) wav_out = vocoder.spec2wav(mel, f0=f0) save_wav(wav_out, os.path.join(out_path, filename), hparams['audio_sample_rate']) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 5159e0b43..4b5c48eb1 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -63,7 +63,7 @@ def __init__(self, dictionary): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - def forward(self, txt_tokens, mel2ph=None, f0=None, uv=None, spk_embed_id=None, infer=False, **kwarg): + def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=False, **kwarg): B, T = txt_tokens.shape dur = mel2ph_to_dur(mel2ph, T).float() dur_embed = self.dur_embed(dur[:, :, None]) @@ -77,15 +77,13 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, uv=None, spk_embed_id=None, delta_l = nframes - f0.size(1) if delta_l > 0: f0 = torch.cat((f0,torch.FloatTensor([[x[-1]] * delta_l for x in f0]).to(f0.device)),1) - f0 = f0[:,:nframes] + f0 = f0[:, :nframes] - pitch_padding = (mel2ph == 0) - f0_denorm = denorm_f0(f0, uv, hparams, pitch_padding=pitch_padding) if self.f0_embed_type == 'discrete': - pitch = f0_to_coarse(f0_denorm) + pitch = f0_to_coarse(f0) pitch_embed = self.pitch_embed(pitch) else: - f0_mel = (1 + f0_denorm / 700).log() + f0_mel = (1 + f0 / 700).log() pitch_embed = self.pitch_embed(f0_mel[:, :, None]) if hparams.get('use_key_shift_embed', False): diff --git a/preprocessing/opencpop.py b/preprocessing/opencpop.py deleted file mode 100644 index 5acb21d70..000000000 --- a/preprocessing/opencpop.py +++ /dev/null @@ -1,80 +0,0 @@ -''' - file -> temporary_dict -> processed_input -> batch -''' - -import torch - -import utils -from utils.hparams import hparams -from utils.phoneme_utils import get_all_vowels - -vowels = get_all_vowels() - - -class File2Batch: - """ - pipeline: file -> temporary_dict -> processed_input -> batch - """ - @staticmethod - def processed_input2batch(samples): - """ - Args: - samples: one batch of processed_input - NOTE: - the batch size is controlled by hparams['max_sentences'] - """ - if len(samples) == 0: - return {} - id = torch.LongTensor([s['id'] for s in samples]) - item_names = [s['item_name'] for s in samples] - text = [s['text'] for s in samples] - txt_tokens = utils.collate_1d([s['txt_token'] for s in samples], 0) - f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) - pitch = utils.collate_1d([s['pitch'] for s in samples]) - uv = utils.collate_1d([s['uv'] for s in samples]) - mel2ph = utils.collate_1d([s['mel2ph'] for s in samples], 0.0) \ - if samples[0]['mel2ph'] is not None else None - mels = utils.collate_2d([s['mel'] for s in samples], 0.0) - txt_lengths = torch.LongTensor([s['txt_token'].numel() for s in samples]) - mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) - - batch = { - 'id': id, - 'item_name': item_names, - 'nsamples': len(samples), - 'text': text, - 'txt_tokens': txt_tokens, - 'txt_lengths': txt_lengths, - 'mels': mels, - 'mel_lengths': mel_lengths, - 'mel2ph': mel2ph, - 'pitch': pitch, - 'f0': f0, - 'uv': uv, - } - if hparams['use_energy_embed']: - batch['energy'] = utils.collate_1d([s['energy'] for s in samples], 0.0) - if hparams.get('use_key_shift_embed', False): - batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples]) - if hparams.get('use_speed_embed', False): - batch['speed'] = torch.FloatTensor([s['speed'] for s in samples]) - if hparams['use_spk_embed']: - spk_embed = torch.stack([s['spk_embed'] for s in samples]) - batch['spk_embed'] = spk_embed - if hparams['use_spk_id']: - spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) - batch['spk_ids'] = spk_ids - if hparams['pitch_type'] == 'cwt': - cwt_spec = utils.collate_2d([s['cwt_spec'] for s in samples]) - f0_mean = torch.Tensor([s['f0_mean'] for s in samples]) - f0_std = torch.Tensor([s['f0_std'] for s in samples]) - batch.update({'cwt_spec': cwt_spec, 'f0_mean': f0_mean, 'f0_std': f0_std}) - elif hparams['pitch_type'] == 'ph': - batch['f0'] = utils.collate_1d([s['f0_ph'] for s in samples]) - - batch['pitch_midi'] = utils.collate_1d([s['pitch_midi'] for s in samples], 0) - batch['midi_dur'] = utils.collate_1d([s['midi_dur'] for s in samples], 0) - batch['is_slur'] = utils.collate_1d([s['is_slur'] for s in samples], 0) - batch['word_boundary'] = utils.collate_1d([s['word_boundary'] for s in samples], 0) - - return batch diff --git a/src/acoustic_task.py b/src/acoustic_task.py index c9645e087..26d42f7b2 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -1,5 +1,3 @@ -import glob -import importlib import os from multiprocessing.pool import Pool @@ -22,7 +20,7 @@ from utils.hparams import hparams from utils.indexed_datasets import IndexedDataset from utils.phoneme_utils import build_phoneme_list -from utils.pitch_utils import denorm_f0, norm_interp_f0 +from utils.pitch_utils import denorm_f0 from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder @@ -37,98 +35,82 @@ def __init__(self, prefix, shuffle=False): super().__init__(shuffle) self.data_dir = hparams['binary_data_dir'] self.prefix = prefix - self.hparams = hparams self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') - self.indexed_ds = None - # self.name2spk_id={} - - # pitch stats - f0_stats_fn = f'{self.data_dir}/train_f0s_mean_std.npy' - if os.path.exists(f0_stats_fn): - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = np.load(f0_stats_fn) - hparams['f0_mean'] = float(hparams['f0_mean']) - hparams['f0_std'] = float(hparams['f0_std']) - else: - hparams['f0_mean'], hparams['f0_std'] = self.f0_mean, self.f0_std = None, None - - if prefix == 'test': - if hparams['test_input_dir'] != '': - self.indexed_ds, self.sizes = self.load_test_inputs(hparams['test_input_dir']) - else: - if hparams['num_test_samples'] > 0: - self.avail_idxs = list(range(hparams['num_test_samples'])) + hparams['test_ids'] - self.sizes = [self.sizes[i] for i in self.avail_idxs] + self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') def __getitem__(self, index): - hparams = self.hparams - item = self._get_item(index) - max_frames = hparams['max_frames'] - spec = torch.Tensor(item['mel'])[:max_frames] - # energy = (spec.exp() ** 2).sum(-1).sqrt() - mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None - f0, uv = norm_interp_f0(item["f0"][:max_frames], hparams) - phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) - pitch = torch.LongTensor(item.get("pitch"))[:max_frames] - sample = { - "id": index, - "item_name": item['item_name'], - "text": item['txt'], - "txt_token": phone, - "mel": spec, - "pitch": pitch, - "f0": f0, - "uv": uv, - "mel2ph": mel2ph, - "mel_nonpadding": spec.abs().sum(-1) > 0, - } - if self.hparams['use_energy_embed']: - sample['energy'] = item['energy'] - if self.hparams.get('use_key_shift_embed', False): - sample['key_shift'] = item['key_shift'] - if self.hparams.get('use_speed_embed', False): - sample['speed'] = item['speed'] - if self.hparams['use_spk_embed']: - sample["spk_embed"] = torch.Tensor(item['spk_embed']) - if self.hparams['use_spk_id']: - sample["spk_id"] = item['spk_id'] - item = self._get_item(index) - sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] - sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] - sample['is_slur'] = torch.LongTensor(item['is_slur'])[:hparams['max_frames']] - sample['word_boundary'] = torch.LongTensor(item['word_boundary'])[:hparams['max_frames']] + sample = item = self.indexed_ds[index] return sample + # max_frames = hparams['max_frames'] + # spec = torch.Tensor(item['mel'])[:max_frames] + # # energy = (spec.exp() ** 2).sum(-1).sqrt() + # mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None + # f0, uv = interp_f0(item['f0'][:max_frames]) + # phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) + # pitch = torch.LongTensor(item.get('pitch'))[:max_frames] + # sample = { + # 'id': index, + # 'item_name': item['item_name'], + # 'text': item['txt'], + # 'txt_token': phone, + # 'mel': spec, + # 'pitch': pitch, + # 'f0': f0, + # 'uv': uv, + # 'mel2ph': mel2ph, + # 'mel_nonpadding': spec.abs().sum(-1) > 0, + # } + # if hparams['use_energy_embed']: + # sample['energy'] = item['energy'] + # if hparams.get('use_key_shift_embed', False): + # sample['key_shift'] = item['key_shift'] + # if hparams.get('use_speed_embed', False): + # sample['speed'] = item['speed'] + # if hparams['use_spk_embed']: + # sample['spk_embed'] = torch.Tensor(item['spk_embed']) + # if hparams['use_spk_id']: + # sample['spk_id'] = item['spk_id'] + # sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] + # sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] + # sample['is_slur'] = torch.LongTensor(item['is_slur'])[:hparams['max_frames']] + # sample['word_boundary'] = torch.LongTensor(item['word_boundary'])[:hparams['max_frames']] + # return sample def collater(self, samples): - from preprocessing.opencpop import File2Batch - return File2Batch.processed_input2batch(samples) - - def _get_item(self, index): - if hasattr(self, 'avail_idxs') and self.avail_idxs is not None: - index = self.avail_idxs[index] - if self.indexed_ds is None: - self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') - return self.indexed_ds[index] - - def load_test_inputs(self, test_input_dir, spk_id=0): - inp_wav_paths = glob.glob(f'{test_input_dir}/*.wav') + glob.glob(f'{test_input_dir}/*.mp3') - sizes = [] - items = [] - - binarizer_cls = hparams.get("binarizer_cls", 'basics.base_binarizer.BaseBinarizer') - pkg = ".".join(binarizer_cls.split(".")[:-1]) - cls_name = binarizer_cls.split(".")[-1] - binarizer_cls = getattr(importlib.import_module(pkg), cls_name) - binarization_args = hparams['binarization_args'] - - for wav_fn in inp_wav_paths: - item_name = os.path.basename(wav_fn) - ph = txt = tg_fn = '' - wav_fn = wav_fn - encoder = None - item = binarizer_cls.process_item(item_name, ph, txt, tg_fn, wav_fn, spk_id, encoder, binarization_args) - items.append(item) - sizes.append(item['len']) - return items, sizes + if len(samples) == 0: + return {} + txt_lengths = torch.LongTensor([s['tokens'].numel() for s in samples]) + tokens = utils.collate_1d([s['tokens'] for s in samples], 0) + if hparams['f0_embed_type'] == 'continuous': + f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) + else: + f0 = utils.collate_1d([s['f0_coarse'] for s in samples], 0) + mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) + mel2ph = utils.collate_1d([s['mel2ph'] for s in samples], 0) + mels = utils.collate_2d([s['mel'] for s in samples], 0.0) + batch = { + 'nsamples': len(samples), + 'txt_lengths': txt_lengths, + 'tokens': tokens, + 'mel_lengths': mel_lengths, + 'mel2ph': mel2ph, + 'mels': mels, + 'f0': f0, + } + if hparams['use_energy_embed']: + batch['energy'] = utils.collate_1d([s['energy'] for s in samples], 0.0) + if hparams.get('use_key_shift_embed', False): + batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples]) + if hparams.get('use_speed_embed', False): + batch['speed'] = torch.FloatTensor([s['speed'] for s in samples]) + if hparams['use_spk_embed']: + spk_embed = torch.stack([s['spk_embed'] for s in samples]) + batch['spk_embed'] = spk_embed + if hparams['use_spk_id']: + spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) + batch['spk_ids'] = spk_ids + return batch + class AcousticTask(BaseTask): def __init__(self): @@ -140,18 +122,18 @@ def __init__(self): self.saving_results_futures = None self.stats = {} self.mse_loss_fn = torch.nn.MSELoss() - mel_losses = hparams['mel_loss'].split("|") + mel_losses = hparams['mel_loss'].split('|') self.loss_and_lambda = {} for i, l in enumerate(mel_losses): if l == '': continue if ':' in l: - l, lbd = l.split(":") + l, lbd = l.split(':') lbd = float(lbd) else: lbd = 1.0 self.loss_and_lambda[l] = lbd - print("| Mel losses:", self.loss_and_lambda) + print('| Mel losses:', self.loss_and_lambda) self.logged_gt_wav = set() @staticmethod @@ -169,10 +151,6 @@ def build_model(self): loss_type=hparams['diff_loss_type'], spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], ) - if hparams.get('fs2_ckpt', '') != '': - utils.load_ckpt(self.model.fs2, hparams['fs2_ckpt'], 'model', strict=True) - for k, v in self.model.fs2.named_parameters(): - v.requires_grad = False if hparams['load_ckpt'] != '': self.load_ckpt(hparams['load_ckpt'], strict=True) utils.print_arch(self.model) @@ -202,9 +180,7 @@ def val_dataloader(self): @data_loader def test_dataloader(self): - test_dataset = self.dataset_cls(hparams['test_set_name'], shuffle=False) - return self.build_dataloader(test_dataset, False, self.max_eval_tokens, - self.max_eval_sentences, batch_by_size=False) + return self.val_dataloader() def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): if optimizer is None: @@ -214,34 +190,33 @@ def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): if self.scheduler is not None: self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) - def run_model(self, model, sample, return_output=False, infer=False): + def run_model(self, sample, return_output=False, infer=False): ''' steps: 1. run the full model, calc the main loss 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor ''' - txt_tokens = sample['txt_tokens'] # [B, T_t] + txt_tokens = sample['tokens'] # [B, T_t] target = sample['mels'] # [B, T_s, 80] - mel2ph = sample['mel2ph'] # [B, T_s] + mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] - uv = sample['uv'] energy = sample.get('energy') key_shift = sample.get('key_shift') speed = sample.get('speed') if infer: if hparams['use_spk_id']: - spk_embed = model.fs2.spk_embed(sample['spk_ids'])[:, None, :] + spk_embed = self.model.fs2.spk_embed(sample['spk_ids'])[:, None, :] elif hparams['use_spk_embed']: spk_embed = sample['spk_embed'] else: spk_embed = None - output = model(txt_tokens, mel2ph=mel2ph, spk_mix_embed=spk_embed,ref_mels=target, - f0=f0, uv=uv, energy=energy, key_shift=key_shift, speed=speed, infer=infer) + output = self.model(txt_tokens, mel2ph=mel2ph, spk_mix_embed=spk_embed, ref_mels=target, + f0=f0, energy=energy, key_shift=key_shift, speed=speed, infer=infer) else: spk_embed = sample.get('spk_ids') if hparams['use_spk_id'] else sample.get('spk_embed') - output = model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, ref_mels=target, - f0=f0, uv=uv, energy=energy, key_shift=key_shift, speed=speed, infer=infer) + output = self.model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, ref_mels=target, + f0=f0, energy=energy, key_shift=key_shift, speed=speed, infer=infer) losses = {} if 'diff_loss' in output: @@ -252,7 +227,7 @@ def run_model(self, model, sample, return_output=False, infer=False): return losses, output def _training_step(self, sample, batch_idx, _): - log_outputs = self.run_model(self.model, sample) + log_outputs = self.run_model(sample) total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) log_outputs['batch_size'] = sample['txt_tokens'].size()[0] log_outputs['lr'] = self.scheduler.get_lr()[0] @@ -260,18 +235,16 @@ def _training_step(self, sample, batch_idx, _): def validation_step(self, sample, batch_idx): outputs = {} - txt_tokens = sample['txt_tokens'] # [B, T_t] + txt_tokens = sample['tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] energy = sample.get('energy') key_shift = sample.get('key_shift') speed = sample.get('speed') - # fs2_mel = sample['fs2_mels'] # spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') mel2ph = sample['mel2ph'] f0 = sample['f0'] - outputs['losses'], model_out = self.run_model(self.model, sample, return_output=True, infer=False) + outputs['losses'], model_out = self.run_model(sample, return_output=True, infer=False) outputs['total_loss'] = sum(outputs['losses'].values()) outputs['nsamples'] = sample['nsamples'] @@ -284,16 +257,15 @@ def validation_step(self, sample, batch_idx): else: spk_embed = None model_out = self.model( - txt_tokens, spk_mix_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=None, energy=energy, - key_shift=key_shift, speed=speed, ref_mels=None, pitch_midi=sample['pitch_midi'], - midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur'), infer=True + txt_tokens, spk_mix_embed=spk_embed, mel2ph=mel2ph, f0=f0, energy=energy, + key_shift=key_shift, speed=speed, ref_mels=None, infer=True ) if hparams.get('pe_enable') is not None and hparams['pe_enable']: gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel else: - gt_f0 = denorm_f0(sample['f0'], sample['uv'], hparams) + gt_f0 = denorm_f0(sample['f0'], sample['uv']) pred_f0 = gt_f0 self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') @@ -313,7 +285,6 @@ def _validation_end(self, outputs): all_losses_meter['total_loss'].update(output['total_loss'], n) return {k: round(v.avg, 4) for k, v in all_losses_meter.items()} - ############ # validation plots ############ @@ -324,10 +295,12 @@ def plot_wav(self, batch_idx, gt_mel, pred_mel, gt_f0=None, pred_f0=None): pred_f0 = pred_f0[0].cpu().numpy() if batch_idx not in self.logged_gt_wav: gt_wav = self.vocoder.spec2wav(gt_mel, f0=gt_f0) - self.logger.experiment.add_audio(f'gt_{batch_idx}', gt_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) + self.logger.experiment.add_audio(f'gt_{batch_idx}', gt_wav, sample_rate=hparams['audio_sample_rate'], + global_step=self.global_step) self.logged_gt_wav.add(batch_idx) pred_wav = self.vocoder.spec2wav(pred_mel, f0=pred_f0) - self.logger.experiment.add_audio(f'pred_{batch_idx}', pred_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) + self.logger.experiment.add_audio(f'pred_{batch_idx}', pred_wav, sample_rate=hparams['audio_sample_rate'], + global_step=self.global_step) def plot_mel(self, batch_idx, spec, spec_out, name=None): name = f'mel_{batch_idx}' if name is None else name @@ -346,32 +319,24 @@ def test_start(self): def test_step(self, sample, batch_idx): spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - txt_tokens = sample['txt_tokens'] - mel2ph, uv, f0 = None, None, None + txt_tokens = sample['tokens'] + mel2ph = sample['mel2ph'] + f0 = sample['f0'] ref_mels = None if hparams['profile_infer']: pass else: - if hparams['use_gt_dur']: - mel2ph = sample['mel2ph'] - if hparams['use_gt_f0']: - f0 = sample['f0'] - uv = sample['uv'] - print('Here using gt f0!!') - if hparams.get('use_midi') is not None and hparams['use_midi']: - outputs = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=uv, ref_mels=ref_mels, infer=True, - pitch_midi=sample['pitch_midi'], midi_dur=sample.get('midi_dur'), is_slur=sample.get('is_slur')) - else: - outputs = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, uv=uv, ref_mels=ref_mels, infer=True) + outputs = self.model( + txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, + ref_mels=ref_mels, infer=True + ) sample['outputs'] = self.model.out2mel(outputs['mel_out']) sample['mel2ph_pred'] = outputs['mel2ph'] if hparams.get('pe_enable') is not None and hparams['pe_enable']: sample['f0'] = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel sample['f0_pred'] = self.pe(sample['outputs'])['f0_denorm_pred'] # pe predict from Pred mel else: - sample['f0'] = denorm_f0(sample['f0'], sample['uv'], hparams) + sample['f0'] = denorm_f0(sample['f0'], sample['uv']) sample['f0_pred'] = outputs.get('f0_denorm') return self.after_infer(sample) @@ -393,28 +358,28 @@ def after_infer(self, predictions): prediction[k] = v.cpu().numpy() item_name = prediction.get('item_name') - text = prediction.get('text').replace(":", "%3A")[:80] + text = prediction.get('text').replace(':', '%3A')[:80] # remove paddings - mel_gt = prediction["mels"] + mel_gt = prediction['mels'] mel_gt_mask = np.abs(mel_gt).sum(-1) > 0 mel_gt = mel_gt[mel_gt_mask] - mel2ph_gt = prediction.get("mel2ph") + mel2ph_gt = prediction.get('mel2ph') mel2ph_gt = mel2ph_gt[mel_gt_mask] if mel2ph_gt is not None else None - mel_pred = prediction["outputs"] + mel_pred = prediction['outputs'] mel_pred_mask = np.abs(mel_pred).sum(-1) > 0 mel_pred = mel_pred[mel_pred_mask] mel_gt = np.clip(mel_gt, hparams['mel_vmin'], hparams['mel_vmax']) mel_pred = np.clip(mel_pred, hparams['mel_vmin'], hparams['mel_vmax']) - mel2ph_pred = prediction.get("mel2ph_pred") + mel2ph_pred = prediction.get('mel2ph_pred') if mel2ph_pred is not None: if len(mel2ph_pred) > len(mel_pred_mask): mel2ph_pred = mel2ph_pred[:len(mel_pred_mask)] mel2ph_pred = mel2ph_pred[mel_pred_mask] - f0_gt = prediction.get("f0") - f0_pred = prediction.get("f0_pred") + f0_gt = prediction.get('f0') + f0_pred = prediction.get('f0_pred') if f0_pred is not None: f0_gt = f0_gt[mel_gt_mask] if len(f0_pred) > len(mel_pred_mask): @@ -446,7 +411,7 @@ def after_infer(self, predictions): import matplotlib.pyplot as plt # f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams) f0_pred_ = f0_pred - f0_gt_, _ = get_pitch_parselmouth(wav_gt, mel_gt, hparams) + f0_gt_, _, _ = get_pitch_parselmouth(wav_gt, len(mel_gt), hparams) fig = plt.figure() plt.plot(f0_pred_, label=r'$f0_P$') plt.plot(f0_gt_, label=r'$f0_G$') @@ -456,7 +421,7 @@ def after_infer(self, predictions): plt.close(fig) t.set_description( - f"Pred_shape: {mel_pred.shape}, gt_shape: {mel_gt.shape}") + f'Pred_shape: {mel_pred.shape}, gt_shape: {mel_gt.shape}') else: if 'gen_wav_time' not in self.stats: self.stats['gen_wav_time'] = 0 @@ -466,7 +431,8 @@ def after_infer(self, predictions): return {} @staticmethod - def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, mel2ph=None, gt_f0=None, pred_f0=None): + def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, mel2ph=None, gt_f0=None, + pred_f0=None): item_name = item_name.replace('/', '-') base_fn = f'[{item_name}][{prefix}]' @@ -481,17 +447,11 @@ def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, me spec_vmax = hparams['mel_vmax'] heatmap = plt.pcolor(mel.T, vmin=spec_vmin, vmax=spec_vmax) fig.colorbar(heatmap) - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - gt_f0 = (gt_f0 - 100) / (800 - 100) * 80 * (gt_f0 > 0) - pred_f0 = (pred_f0 - 100) / (800 - 100) * 80 * (pred_f0 > 0) - plt.plot(pred_f0, c='white', linewidth=1, alpha=0.6) - plt.plot(gt_f0, c='red', linewidth=1, alpha=0.6) - else: - f0, _ = get_pitch_parselmouth(wav_out, mel, hparams) - f0 = (f0 - 100) / (800 - 100) * 80 * (f0 > 0) - plt.plot(f0, c='white', linewidth=1, alpha=0.6) + f0, _, _ = get_pitch_parselmouth(wav_out, len(mel), hparams) + f0 = (f0 - 100) / (800 - 100) * 80 * (f0 > 0) + plt.plot(f0, c='white', linewidth=1, alpha=0.6) if mel2ph is not None and str_phs is not None: - decoded_txt = str_phs.split(" ") + decoded_txt = str_phs.split(' ') dur = mel2ph_to_dur(torch.LongTensor(mel2ph)[None, :], len(decoded_txt))[0].numpy() dur = [0] + list(np.cumsum(dur)) for i in range(len(dur) - 1): diff --git a/src/diff/diffusion.py b/src/diff/diffusion.py index ab160ca9f..2e12ecd2d 100644 --- a/src/diff/diffusion.py +++ b/src/diff/diffusion.py @@ -226,11 +226,11 @@ def p_losses(self, x_start, t, cond, noise=None, nonpadding=None): return loss def forward(self, txt_tokens, mel2ph=None, spk_embed=None, - ref_mels=None, f0=None, uv=None, energy=None, infer=False, **kwargs): + ref_mels=None, f0=None, energy=None, infer=False, **kwargs): ''' conditioning diffusion, use fastspeech2 encoder output as the condition ''' - ret = self.fs2(txt_tokens, mel2ph=mel2ph, f0=f0, uv=uv, spk_embed_id=spk_embed, infer=infer, **kwargs) + ret = self.fs2(txt_tokens, mel2ph=mel2ph, f0=f0, spk_embed_id=spk_embed, infer=infer, **kwargs) cond = ret['decoder_inp'].transpose(1, 2) b, *_, device = *txt_tokens.shape, txt_tokens.device diff --git a/src/vocoders/ddsp.py b/src/vocoders/ddsp.py index e89b187e4..72a5d2da9 100644 --- a/src/vocoders/ddsp.py +++ b/src/vocoders/ddsp.py @@ -1,6 +1,7 @@ import os import librosa import torch +import torch.nn.functional as F import yaml import numpy as np from librosa.filters import mel as librosa_mel_fn @@ -194,5 +195,4 @@ def wav2spec(inp_path, keyshift=0, speed=1, device=None): mel_fmax=mel_fmax).to(device) mel = mel_extractor(x_t, keyshift=keyshift, speed=speed) - return x, mel.squeeze(0).cpu().numpy() diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index e15632be3..c82872a36 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,3 +1,4 @@ +import os.path import pickle from copy import deepcopy @@ -5,17 +6,17 @@ class IndexedDataset: - def __init__(self, path, num_cache=1): + def __init__(self, path, num_cache=0): super().__init__() self.path = path self.data_file = None - self.data_offsets = np.load(f"{path}.idx", allow_pickle=True).item()['offsets'] + self.data_offsets = np.load(f"{path}.idx") self.data_file = open(f"{path}.data", 'rb', buffering=-1) self.cache = [] self.num_cache = num_cache def check_index(self, i): - if i < 0 or i >= len(self.data_offsets) - 1: + if i < 0 or i >= len(self.data_offsets): raise IndexError('index out of range') def __del__(self): @@ -36,22 +37,32 @@ def __getitem__(self, i): return item def __len__(self): - return len(self.data_offsets) - 1 + return len(self.data_offsets) class IndexedDatasetBuilder: - def __init__(self, path): + def __init__(self, path, name, allowed_attr=None): self.path = path - self.out_file = open(f"{path}.data", 'wb') + self.name = name + self.out_file = open(os.path.join(path, f'{name}.data'), 'wb') self.byte_offsets = [0] + if allowed_attr is not None: + self.allowed_attr = set(allowed_attr) def add_item(self, item): + if self.allowed_attr is not None: + item = { + k: item.get(k) + for k in self.allowed_attr + } s = pickle.dumps(item) - bytes = self.out_file.write(s) - self.byte_offsets.append(self.byte_offsets[-1] + bytes) + n_bytes = self.out_file.write(s) + self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) def finalize(self): self.out_file.close() - np.save(open(f"{self.path}.idx", 'wb'), {'offsets': self.byte_offsets}) + with open(os.path.join(self.path, f'{self.name}.idx'), 'wb') as f: + # noinspection PyTypeChecker + np.save(f, self.byte_offsets[:-1]) if __name__ == "__main__": @@ -61,7 +72,7 @@ def finalize(self): size = 100 items = [{"a": np.random.normal(size=[10000, 10]), "b": np.random.normal(size=[10000, 10])} for i in range(size)] - builder = IndexedDatasetBuilder(ds_path) + builder = IndexedDatasetBuilder(ds_path, 'example') for i in tqdm(range(size)): builder.add_item(items[i]) builder.finalize() diff --git a/utils/pitch_utils.py b/utils/pitch_utils.py index f7fd166ab..9e3e804de 100644 --- a/utils/pitch_utils.py +++ b/utils/pitch_utils.py @@ -1,17 +1,6 @@ -######### -# world -########## -import librosa import numpy as np import torch -gamma = 0 -mcepInput = 3 # 0 for dB, 3 for magnitude -alpha = 0.45 -en_floor = 10 ** (-80 / 20) -FFT_SIZE = 2048 - - f0_bin = 256 f0_max = 1100.0 f0_min = 50.0 @@ -31,45 +20,25 @@ def f0_to_coarse(f0): return f0_coarse -def norm_f0(f0, uv, hparams): - is_torch = isinstance(f0, torch.Tensor) - if hparams['pitch_norm'] == 'standard': - f0 = (f0 - hparams['f0_mean']) / hparams['f0_std'] - if hparams['pitch_norm'] == 'log': - f0 = torch.log2(f0) if is_torch else np.log2(f0) - if uv is not None and hparams['use_uv']: - f0[uv > 0] = 0 +def norm_f0(f0): + f0 = np.log2(f0) return f0 -def norm_interp_f0(f0, hparams): - is_torch = isinstance(f0, torch.Tensor) - if is_torch: - device = f0.device - f0 = f0.data.cpu().numpy() - uv = f0 == 0 - f0 = norm_f0(f0, uv, hparams) +def interp_f0(f0, uv=None): + if uv is None: + uv = f0 == 0 + f0 = norm_f0(f0) if sum(uv) == len(f0): - f0[uv] = 0 + f0[uv] = -np.inf elif sum(uv) > 0: f0[uv] = np.interp(np.where(uv)[0], np.where(~uv)[0], f0[~uv]) - uv = torch.FloatTensor(uv) - f0 = torch.FloatTensor(f0) - if is_torch: - f0 = f0.to(device) - return f0, uv + return denorm_f0(f0, uv=None), uv -def denorm_f0(f0, uv, hparams, pitch_padding=None, min=None, max=None): - if hparams['pitch_norm'] == 'standard': - f0 = f0 * hparams['f0_std'] + hparams['f0_mean'] - if hparams['pitch_norm'] == 'log': - f0 = 2 ** f0 - if min is not None: - f0 = f0.clamp(min=min) - if max is not None: - f0 = f0.clamp(max=max) - if uv is not None and hparams['use_uv']: +def denorm_f0(f0, uv, pitch_padding=None): + f0 = 2 ** f0 + if uv is not None: f0[uv > 0] = 0 if pitch_padding is not None: f0[pitch_padding] = 0 diff --git a/utils/text_encoder.py b/utils/text_encoder.py index 5147ec057..605b7e80e 100644 --- a/utils/text_encoder.py +++ b/utils/text_encoder.py @@ -32,7 +32,7 @@ def __init__(self, vocab_list): def encode(self, sentence): """Converts a space-separated string of phones to a list of ids.""" - phones = sentence.strip().split() + phones = sentence.strip().split() if isinstance(sentence, str) else sentence return [self.vocab_list.index(ph) + self.num_reserved_ids if ph != PAD else PAD_INDEX for ph in phones] def decode(self, ids, strip_padding=False): From e97b51f266ce18d4dd502420cab7410e03d3256c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 11 Mar 2023 23:59:34 +0800 Subject: [PATCH 035/475] Fix FileNotFoundError --- src/acoustic_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 26d42f7b2..ad684df26 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -35,7 +35,7 @@ def __init__(self, prefix, shuffle=False): super().__init__(shuffle) self.data_dir = hparams['binary_data_dir'] self.prefix = prefix - self.sizes = np.load(f'{self.data_dir}/{self.prefix}_lengths.npy') + self.sizes = np.load(f'{self.data_dir}/{self.prefix}.lengths') self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') def __getitem__(self, index): From 82a522a486bd5ed3b056ed28116a40f9a2d104db Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 00:29:52 +0800 Subject: [PATCH 036/475] Fix file loading error --- data_gen/acoustic.py | 2 +- src/acoustic_task.py | 4 ++-- utils/indexed_datasets.py | 16 ++++++++-------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 29cd76a54..065fba1ca 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -125,7 +125,7 @@ def check_coverage(self): def process_data_split(self, prefix, multiprocess=False, apply_augmentation=False): data_dir = hparams['binary_data_dir'] args = [] - builder = IndexedDatasetBuilder(data_dir, name=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) + builder = IndexedDatasetBuilder(data_dir, prefix=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) lengths = [] total_sec = 0 total_raw_sec = 0 diff --git a/src/acoustic_task.py b/src/acoustic_task.py index ad684df26..16c556944 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -35,8 +35,8 @@ def __init__(self, prefix, shuffle=False): super().__init__(shuffle) self.data_dir = hparams['binary_data_dir'] self.prefix = prefix - self.sizes = np.load(f'{self.data_dir}/{self.prefix}.lengths') - self.indexed_ds = IndexedDataset(f'{self.data_dir}/{self.prefix}') + self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) + self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) def __getitem__(self, index): sample = item = self.indexed_ds[index] diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index c82872a36..5da8eb49f 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -6,12 +6,12 @@ class IndexedDataset: - def __init__(self, path, num_cache=0): + def __init__(self, path, prefix, num_cache=0): super().__init__() self.path = path self.data_file = None - self.data_offsets = np.load(f"{path}.idx") - self.data_file = open(f"{path}.data", 'rb', buffering=-1) + self.data_offsets = np.load(os.path.join(path, f'{prefix}.idx')) + self.data_file = open(os.path.join(path, f'{prefix}.data'), 'rb', buffering=-1) self.cache = [] self.num_cache = num_cache @@ -40,10 +40,10 @@ def __len__(self): return len(self.data_offsets) class IndexedDatasetBuilder: - def __init__(self, path, name, allowed_attr=None): + def __init__(self, path, prefix, allowed_attr=None): self.path = path - self.name = name - self.out_file = open(os.path.join(path, f'{name}.data'), 'wb') + self.prefix = prefix + self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') self.byte_offsets = [0] if allowed_attr is not None: self.allowed_attr = set(allowed_attr) @@ -60,7 +60,7 @@ def add_item(self, item): def finalize(self): self.out_file.close() - with open(os.path.join(self.path, f'{self.name}.idx'), 'wb') as f: + with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: # noinspection PyTypeChecker np.save(f, self.byte_offsets[:-1]) @@ -76,7 +76,7 @@ def finalize(self): for i in tqdm(range(size)): builder.add_item(items[i]) builder.finalize() - ds = IndexedDataset(ds_path) + ds = IndexedDataset(ds_path, 'example') for i in tqdm(range(10000)): idx = random.randint(0, size - 1) assert (ds[idx]['a'] == items[idx]['a']).all() From aece559e954ecb99aebde8f2d27095975d158b26 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 00:50:45 +0800 Subject: [PATCH 037/475] Fix dtype --- data_gen/acoustic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 065fba1ca..259bee998 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -207,8 +207,8 @@ def process_item(self, item_name, meta_data, binarization_args): ) if uv.all(): # All unvoiced raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') - processed_input['f0'] = torch.from_numpy(gt_f0) - processed_input['f0_coarse'] = torch.from_numpy(gt_f0_coarse) + processed_input['f0'] = torch.from_numpy(gt_f0).float() + processed_input['f0_coarse'] = torch.from_numpy(gt_f0_coarse).long() if self.binarization_args['with_uv']: processed_input['uv'] = torch.from_numpy(uv) From 6169d109ec24309c1dfb0717469362004bf3f6c1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:05:51 +0800 Subject: [PATCH 038/475] Fix variable undefined --- modules/fastspeech/acoustic_encoder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 4b5c48eb1..ecdf468cf 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -125,5 +125,5 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal else: spk_embed = 0 - ret = {'decoder_inp': decoder_inp + pitch_embed + key_shift_embed + speed_embed + spk_embed, 'f0_denorm': f0_denorm} + ret = {'decoder_inp': decoder_inp + pitch_embed + key_shift_embed + speed_embed + spk_embed, 'f0_denorm': f0} return ret From 65a75a4b562b360d0c3cb054c529a390a9709f4d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:09:28 +0800 Subject: [PATCH 039/475] Fix denorm f0 --- src/acoustic_task.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 16c556944..14fe32123 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -261,13 +261,7 @@ def validation_step(self, sample, batch_idx): key_shift=key_shift, speed=speed, ref_mels=None, infer=True ) - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - gt_f0 = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - pred_f0 = self.pe(model_out['mel_out'])['f0_denorm_pred'] # pe predict from Pred mel - else: - gt_f0 = denorm_f0(sample['f0'], sample['uv']) - pred_f0 = gt_f0 - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], gt_f0=gt_f0, pred_f0=pred_f0) + self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], f0=model_out['f0_denorm']) self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') return outputs @@ -288,17 +282,16 @@ def _validation_end(self, outputs): ############ # validation plots ############ - def plot_wav(self, batch_idx, gt_mel, pred_mel, gt_f0=None, pred_f0=None): + def plot_wav(self, batch_idx, gt_mel, pred_mel, f0=None): gt_mel = gt_mel[0].cpu().numpy() pred_mel = pred_mel[0].cpu().numpy() - gt_f0 = gt_f0[0].cpu().numpy() - pred_f0 = pred_f0[0].cpu().numpy() + f0 = f0[0].cpu().numpy() if batch_idx not in self.logged_gt_wav: - gt_wav = self.vocoder.spec2wav(gt_mel, f0=gt_f0) + gt_wav = self.vocoder.spec2wav(gt_mel, f0=f0) self.logger.experiment.add_audio(f'gt_{batch_idx}', gt_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) self.logged_gt_wav.add(batch_idx) - pred_wav = self.vocoder.spec2wav(pred_mel, f0=pred_f0) + pred_wav = self.vocoder.spec2wav(pred_mel, f0=f0) self.logger.experiment.add_audio(f'pred_{batch_idx}', pred_wav, sample_rate=hparams['audio_sample_rate'], global_step=self.global_step) From 643bb9ad662832c18966db1f639ac7ebb8289372 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:11:18 +0800 Subject: [PATCH 040/475] Fix KeyError --- src/acoustic_task.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 14fe32123..2ad41ece5 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -229,7 +229,7 @@ def run_model(self, sample, return_output=False, infer=False): def _training_step(self, sample, batch_idx, _): log_outputs = self.run_model(sample) total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) - log_outputs['batch_size'] = sample['txt_tokens'].size()[0] + log_outputs['batch_size'] = sample['tokens'].size()[0] log_outputs['lr'] = self.scheduler.get_lr()[0] return total_loss, log_outputs @@ -380,8 +380,8 @@ def after_infer(self, predictions): f0_pred = f0_pred[mel_pred_mask] str_phs = None - if self.phone_encoder is not None and 'txt_tokens' in prediction: - str_phs = self.phone_encoder.decode(prediction['txt_tokens'], strip_padding=True) + if self.phone_encoder is not None and 'tokens' in prediction: + str_phs = self.phone_encoder.decode(prediction['tokens'], strip_padding=True) gen_dir = os.path.join(hparams['work_dir'], f'generated_{self.trainer.global_step}_{hparams["gen_dir_name"]}') wav_pred = self.vocoder.spec2wav(mel_pred, f0=f0_pred) From a9a7e315b9eb6a01b5a1e4e41777623f29ef00c1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:16:29 +0800 Subject: [PATCH 041/475] Add debug --- utils/indexed_datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 5da8eb49f..2d4bdd4c6 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -68,7 +68,7 @@ def finalize(self): if __name__ == "__main__": import random from tqdm import tqdm - ds_path = '/tmp/indexed_ds_example' + ds_path = './checkpoints/indexed_ds_example' size = 100 items = [{"a": np.random.normal(size=[10000, 10]), "b": np.random.normal(size=[10000, 10])} for i in range(size)] From d8ec1e2450a63ce0c400512eab9105f7d30e5c6b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:18:30 +0800 Subject: [PATCH 042/475] Fix AttributeNotFound --- utils/indexed_datasets.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 2d4bdd4c6..ec48e4c5d 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -47,6 +47,8 @@ def __init__(self, path, prefix, allowed_attr=None): self.byte_offsets = [0] if allowed_attr is not None: self.allowed_attr = set(allowed_attr) + else: + self.allowed_attr = None def add_item(self, item): if self.allowed_attr is not None: From 001de79e231bf17975ec86ab46946d68a161c4d9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:20:25 +0800 Subject: [PATCH 043/475] Fix index length --- utils/indexed_datasets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index ec48e4c5d..5c8f84581 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -16,7 +16,7 @@ def __init__(self, path, prefix, num_cache=0): self.num_cache = num_cache def check_index(self, i): - if i < 0 or i >= len(self.data_offsets): + if i < 0 or i >= len(self.data_offsets) - 1: raise IndexError('index out of range') def __del__(self): @@ -37,7 +37,7 @@ def __getitem__(self, i): return item def __len__(self): - return len(self.data_offsets) + return len(self.data_offsets) - 1 class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): @@ -64,7 +64,7 @@ def finalize(self): self.out_file.close() with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: # noinspection PyTypeChecker - np.save(f, self.byte_offsets[:-1]) + np.save(f, self.byte_offsets) if __name__ == "__main__": From 9c8d9128d77e895e6af55435d151ff378cb310a8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 01:40:02 +0800 Subject: [PATCH 044/475] Try fix dataloader --- src/acoustic_task.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 2ad41ece5..51832eb44 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -36,9 +36,11 @@ def __init__(self, prefix, shuffle=False): self.data_dir = hparams['binary_data_dir'] self.prefix = prefix self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) - self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) + self.indexed_ds = None def __getitem__(self, index): + if self.indexed_ds is None: + self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) sample = item = self.indexed_ds[index] return sample # max_frames = hparams['max_frames'] From c7d04079290d81c96424691f9dea0a64980b2bc9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 02:05:04 +0800 Subject: [PATCH 045/475] Fix broken discrete f0 --- data_gen/acoustic.py | 5 ++--- modules/fastspeech/acoustic_encoder.py | 2 +- src/acoustic_task.py | 5 +---- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 259bee998..eea272804 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -27,7 +27,7 @@ from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" -ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'f0_coarse', 'uv', 'key_shift', 'speed'] +ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'uv', 'key_shift', 'speed'] class AcousticBinarizer(BaseBinarizer): @@ -202,13 +202,12 @@ def process_item(self, item_name, meta_data, binarization_args): } # get ground truth f0 - gt_f0, gt_f0_coarse, uv = get_pitch_parselmouth( + gt_f0, _, uv = get_pitch_parselmouth( wav, length, hparams, interp_uv=self.binarization_args['interp_uv'] ) if uv.all(): # All unvoiced raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') processed_input['f0'] = torch.from_numpy(gt_f0).float() - processed_input['f0_coarse'] = torch.from_numpy(gt_f0_coarse).long() if self.binarization_args['with_uv']: processed_input['uv'] = torch.from_numpy(uv) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index ecdf468cf..61c899f03 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -5,7 +5,7 @@ from modules.commons.common_layers import Embedding, Linear from modules.fastspeech.tts_modules import FastSpeech2Encoder, mel2ph_to_dur from utils.hparams import hparams -from utils.pitch_utils import f0_to_coarse, denorm_f0 +from utils.pitch_utils import f0_to_coarse from utils.text_encoder import PAD_INDEX diff --git a/src/acoustic_task.py b/src/acoustic_task.py index 51832eb44..a845fe965 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -83,10 +83,7 @@ def collater(self, samples): return {} txt_lengths = torch.LongTensor([s['tokens'].numel() for s in samples]) tokens = utils.collate_1d([s['tokens'] for s in samples], 0) - if hparams['f0_embed_type'] == 'continuous': - f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) - else: - f0 = utils.collate_1d([s['f0_coarse'] for s in samples], 0) + f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) mel2ph = utils.collate_1d([s['mel2ph'] for s in samples], 0) mels = utils.collate_2d([s['mel'] for s in samples], 0.0) From 2817f4b3f551f3de3db2a7c4cf4e67c83d156e54 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 11:10:13 +0800 Subject: [PATCH 046/475] Remove gradio --- inference/gradio/gradio_settings.yaml | 29 -------- inference/gradio/infer.py | 96 --------------------------- 2 files changed, 125 deletions(-) delete mode 100644 inference/gradio/gradio_settings.yaml delete mode 100644 inference/gradio/infer.py diff --git a/inference/gradio/gradio_settings.yaml b/inference/gradio/gradio_settings.yaml deleted file mode 100644 index 13de72c19..000000000 --- a/inference/gradio/gradio_settings.yaml +++ /dev/null @@ -1,29 +0,0 @@ -title: 'DiffSinger' -description: | - This model is trained on 5 hours single female singing voice samples of Opencpop dataset. (该模型在开源数据集Opencpop的5小时单人歌声上训练。) - - Please assign pitch and duration values to each Chinese character. The corresponding pitch and duration value of each character should be separated by a | separator. It is necessary to ensure that the note window separated by the separator is consistent with the number of Chinese characters (AP or SP is also viewed as a Chinese character). (请给每个汉字分配音高和时值, 每个字对应的音高和时值需要用|分隔符隔开。需要保证分隔符分割出来的音符窗口与汉字个数(AP或SP也算一个汉字)一致。) - - You can click one of the examples to load them. (你可以点击下方示例,加载示例曲谱。) - - Note: This space is running on CPU, inference times will be higher. (该Demo是在Huggingface提供的CPU上运行的, 其推理速度在本地会更高一些。) - -article: | - Link to Github REPO -example_inputs: - - |- - 你 说 你 不 SP 懂 为 何 在 这 时 牵 手 APD#4/Eb4 | D#4/Eb4 | D#4/Eb4 | D#4/Eb4 | rest | D#4/Eb4 | D4 | D4 | D4 | D#4/Eb4 | F4 | D#4/Eb4 | D4 | rest0.113740 | 0.329060 | 0.287950 | 0.133480 | 0.150900 | 0.484730 | 0.242010 | 0.180820 | 0.343570 | 0.152050 | 0.266720 | 0.280310 | 0.633300 | 0.444590 - - |- - 小酒窝长睫毛AP是你最美的记号C#4/Db4 | F#4/Gb4 | G#4/Ab4 | A#4/Bb4 F#4/Gb4 | F#4/Gb4 C#4/Db4 | C#4/Db4 | rest | C#4/Db4 | A#4/Bb4 | G#4/Ab4 | A#4/Bb4 | G#4/Ab4 | F4 | C#4/Db40.407140 | 0.376190 | 0.242180 | 0.509550 0.183420 | 0.315400 0.235020 | 0.361660 | 0.223070 | 0.377270 | 0.340550 | 0.299620 | 0.344510 | 0.283770 | 0.323390 | 0.360340 - - |- - 小酒窝长睫毛AP那是可爱猪宝宝C#4/Db4 | F#4/Gb4 | G#4/Ab4 | A#4/Bb4 F#4/Gb4 | F#4/Gb4 C#4/Db4 | C#4/Db4 | rest | C#4/Db4 | A#4/Bb4 | G#4/Ab4 | A#4/Bb4 | G#4/Ab4 | F4 | C#4/Db40.407140 | 0.376190 | 0.242180 | 0.509550 0.183420 | 0.315400 0.235020 | 0.361660 | 0.223070 | 0.377270 | 0.340550 | 0.299620 | 0.344510 | 0.283770 | 0.323390 | 0.360340 - - |- - 我真的SP爱你SP句句不轻易D4 | A4 | F#4 | rest | A4 | D4 | rest | B4 | A4 F#4 | F#4 | A4 | A40.8 | 0.4 | 0.967 | 0.3 | 0.4 | 0.967 | 0.4 | 0.8 | 0.4 0.4 | 0.25 | 0.967 | 0.9 - - |- - 好冷啊 AP 我在东北玩泥巴F4 | F4 | D4 | rest | D4 | D4 | C4 | C4 | B3 | C4 | D40.5 | 0.3 | 0.3 | 0.3 | 0.2 | 0.2 | 0.2 | 0.2 | 0.25 | 0.25 | 0.4 - -#inference_cls: inference.ds_cascade.DiffSingerCascadeInfer -#exp_name: 0303_opencpop_ds58_midi - -inference_cls: inference.ds_e2e.DiffSingerE2EInfer -exp_name: 0228_opencpop_ds100_rel \ No newline at end of file diff --git a/inference/gradio/infer.py b/inference/gradio/infer.py deleted file mode 100644 index c5c264d71..000000000 --- a/inference/gradio/infer.py +++ /dev/null @@ -1,96 +0,0 @@ -''' - NOTE: this script is *isolated* from other scripts, which means - it may not be compatible with the current version. -''' - -import importlib -import re - -import gradio as gr -import yaml -from gradio.inputs import Textbox - -from basics.base_svs_infer import BaseSVSInfer -from utils.hparams import set_hparams -from utils.hparams import hparams as hp -import numpy as np - - -class GradioInfer: - def __init__(self, exp_name, inference_cls, title, description, article, example_inputs): - self.exp_name = exp_name - self.title = title - self.description = description - self.article = article - self.example_inputs = example_inputs - pkg = ".".join(inference_cls.split(".")[:-1]) - cls_name = inference_cls.split(".")[-1] - self.inference_cls = getattr(importlib.import_module(pkg), cls_name) - - def greet(self, text, notes, notes_duration): - PUNCS = '。?;:' - sents = re.split(rf'([{PUNCS}])', text.replace('\n', ',')) - sents_notes = re.split(rf'([{PUNCS}])', notes.replace('\n', ',')) - sents_notes_dur = re.split(rf'([{PUNCS}])', notes_duration.replace('\n', ',')) - - if sents[-1] not in list(PUNCS): - sents = sents + [''] - sents_notes = sents_notes + [''] - sents_notes_dur = sents_notes_dur + [''] - - audio_outs = [] - s, n, n_dur = "", "", "" - for i in range(0, len(sents), 2): - if len(sents[i]) > 0: - s += sents[i] + sents[i + 1] - n += sents_notes[i] + sents_notes[i+1] - n_dur += sents_notes_dur[i] + sents_notes_dur[i+1] - if len(s) >= 400 or (i >= len(sents) - 2 and len(s) > 0): - audio_out = self.infer_ins.infer_once({ - 'text': s, - 'notes': n, - 'notes_duration': n_dur, - }) - audio_out = audio_out * 32767 - audio_out = audio_out.astype(np.int16) - audio_outs.append(audio_out) - audio_outs.append(np.zeros(int(hp['audio_sample_rate'] * 0.3)).astype(np.int16)) - s = "" - n = "" - audio_outs = np.concatenate(audio_outs) - return hp['audio_sample_rate'], audio_outs - - def run(self): - set_hparams(exp_name=self.exp_name, print_hparams=False) - infer_cls = self.inference_cls - self.infer_ins: BaseSVSInfer = infer_cls(hp) - example_inputs = self.example_inputs - for i in range(len(example_inputs)): - text, notes, notes_dur = example_inputs[i].split('') - example_inputs[i] = [text, notes, notes_dur] - - iface = gr.Interface(fn=self.greet, - inputs=[ - Textbox(lines=2, placeholder=None, default=example_inputs[0][0], label="input text"), - Textbox(lines=2, placeholder=None, default=example_inputs[0][1], label="input note"), - Textbox(lines=2, placeholder=None, default=example_inputs[0][2], label="input duration")] - , - outputs="audio", - allow_flagging="never", - title=self.title, - description=self.description, - article=self.article, - examples=example_inputs, - enable_queue=True) - iface.launch(share=True,)# cache_examples=True) - - -if __name__ == '__main__': - gradio_config = yaml.safe_load(open('inference/gradio/gradio_settings.yaml', encoding='utf-8')) - g = GradioInfer(**gradio_config) - g.run() - - -# python inference/gradio/infer.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name 0303_opencpop_ds58_midi -# python inference/ds_cascade.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name 0303_opencpop_ds58_midi -# CUDA_VISIBLE_DEVICES=3 python inference/gradio/infer.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name 0228_opencpop_ds100_rel \ No newline at end of file From 33080238e3849119e1d1c8d1bf9112af57ec30a5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 11:10:31 +0800 Subject: [PATCH 047/475] Clean up hparams --- configs/acoustic.yaml | 4 -- configs/base.yaml | 5 -- src/acoustic_task.py | 125 ++++++++++++------------------------ src/diff/diffusion.py | 47 -------------- src/vocoders/nsf_hifigan.py | 4 +- 5 files changed, 43 insertions(+), 142 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 0464d330a..0fde14476 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -15,7 +15,6 @@ test_prefixes: [ vocoder: NsfHifiGAN vocoder_ckpt: checkpoints/nsf_hifigan/model -use_nsf: true audio_sample_rate: 44100 audio_num_mel_bins: 128 hop_size: 512 # Hop size. @@ -63,14 +62,11 @@ use_pitch_embed: true f0_embed_type: continuous use_key_shift_embed: false use_speed_embed: false -use_gt_f0: false # for midi exp -use_gt_dur: false # for further midi exp K_step: 1000 timesteps: 1000 max_beta: 0.02 rel_pos: true -gaussian_start: true pndm_speedup: 10 hidden_size: 256 residual_layers: 20 diff --git a/configs/base.yaml b/configs/base.yaml index ce3340dbb..b52bcdac5 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -27,7 +27,6 @@ binarization_args: endless_ds: true max_frames: 1550 -max_input_tokens: 1550 audio_num_mel_bins: 80 audio_sample_rate: 22050 hop_size: 256 # For 22050Hz, 275 ~= 12.5 ms (0.0125 * sample_rate) @@ -89,12 +88,8 @@ train_set_name: 'train' valid_set_name: 'valid' vocoder: '' vocoder_ckpt: '' -profile_infer: false out_wav_norm: false save_gt: false save_f0: false gen_dir_name: '' num_valid_plots: 5 -num_test_samples: 0 -use_gt_dur: false -use_gt_f0: false diff --git a/src/acoustic_task.py b/src/acoustic_task.py index a845fe965..cf03265c4 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -41,42 +41,8 @@ def __init__(self, prefix, shuffle=False): def __getitem__(self, index): if self.indexed_ds is None: self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) - sample = item = self.indexed_ds[index] + sample = self.indexed_ds[index] return sample - # max_frames = hparams['max_frames'] - # spec = torch.Tensor(item['mel'])[:max_frames] - # # energy = (spec.exp() ** 2).sum(-1).sqrt() - # mel2ph = torch.LongTensor(item['mel2ph'])[:max_frames] if 'mel2ph' in item else None - # f0, uv = interp_f0(item['f0'][:max_frames]) - # phone = torch.LongTensor(item['phone'][:hparams['max_input_tokens']]) - # pitch = torch.LongTensor(item.get('pitch'))[:max_frames] - # sample = { - # 'id': index, - # 'item_name': item['item_name'], - # 'text': item['txt'], - # 'txt_token': phone, - # 'mel': spec, - # 'pitch': pitch, - # 'f0': f0, - # 'uv': uv, - # 'mel2ph': mel2ph, - # 'mel_nonpadding': spec.abs().sum(-1) > 0, - # } - # if hparams['use_energy_embed']: - # sample['energy'] = item['energy'] - # if hparams.get('use_key_shift_embed', False): - # sample['key_shift'] = item['key_shift'] - # if hparams.get('use_speed_embed', False): - # sample['speed'] = item['speed'] - # if hparams['use_spk_embed']: - # sample['spk_embed'] = torch.Tensor(item['spk_embed']) - # if hparams['use_spk_id']: - # sample['spk_id'] = item['spk_id'] - # sample['pitch_midi'] = torch.LongTensor(item['pitch_midi'])[:hparams['max_frames']] - # sample['midi_dur'] = torch.FloatTensor(item['midi_dur'])[:hparams['max_frames']] - # sample['is_slur'] = torch.LongTensor(item['is_slur'])[:hparams['max_frames']] - # sample['word_boundary'] = torch.LongTensor(item['word_boundary'])[:hparams['max_frames']] - # return sample def collater(self, samples): if len(samples) == 0: @@ -315,22 +281,19 @@ def test_step(self, sample, batch_idx): mel2ph = sample['mel2ph'] f0 = sample['f0'] ref_mels = None - if hparams['profile_infer']: - pass + outputs = self.model( + txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, + ref_mels=ref_mels, infer=True + ) + sample['outputs'] = self.model.out2mel(outputs['mel_out']) + sample['mel2ph_pred'] = outputs['mel2ph'] + if hparams.get('pe_enable') is not None and hparams['pe_enable']: + sample['f0'] = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel + sample['f0_pred'] = self.pe(sample['outputs'])['f0_denorm_pred'] # pe predict from Pred mel else: - outputs = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, - ref_mels=ref_mels, infer=True - ) - sample['outputs'] = self.model.out2mel(outputs['mel_out']) - sample['mel2ph_pred'] = outputs['mel2ph'] - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - sample['f0'] = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - sample['f0_pred'] = self.pe(sample['outputs'])['f0_denorm_pred'] # pe predict from Pred mel - else: - sample['f0'] = denorm_f0(sample['f0'], sample['uv']) - sample['f0_pred'] = outputs.get('f0_denorm') - return self.after_infer(sample) + sample['f0'] = denorm_f0(sample['f0'], sample['uv']) + sample['f0_pred'] = outputs.get('f0_denorm') + return self.after_infer(sample) def test_end(self, outputs): self.saving_result_pool.close() @@ -339,7 +302,7 @@ def test_end(self, outputs): return {} def after_infer(self, predictions): - if self.saving_result_pool is None and not hparams['profile_infer']: + if self.saving_result_pool is None: self.saving_result_pool = Pool(min(int(os.getenv('N_PROC', os.cpu_count())), 16)) self.saving_results_futures = [] predictions = utils.unpack_dict_to_list(predictions) @@ -384,41 +347,35 @@ def after_infer(self, predictions): gen_dir = os.path.join(hparams['work_dir'], f'generated_{self.trainer.global_step}_{hparams["gen_dir_name"]}') wav_pred = self.vocoder.spec2wav(mel_pred, f0=f0_pred) - if not hparams['profile_infer']: - os.makedirs(gen_dir, exist_ok=True) - os.makedirs(f'{gen_dir}/wavs', exist_ok=True) - os.makedirs(f'{gen_dir}/plot', exist_ok=True) - os.makedirs(os.path.join(hparams['work_dir'], 'P_mels_npy'), exist_ok=True) - os.makedirs(os.path.join(hparams['work_dir'], 'G_mels_npy'), exist_ok=True) + os.makedirs(gen_dir, exist_ok=True) + os.makedirs(f'{gen_dir}/wavs', exist_ok=True) + os.makedirs(f'{gen_dir}/plot', exist_ok=True) + os.makedirs(os.path.join(hparams['work_dir'], 'P_mels_npy'), exist_ok=True) + os.makedirs(os.path.join(hparams['work_dir'], 'G_mels_npy'), exist_ok=True) + self.saving_results_futures.append( + self.saving_result_pool.apply_async(self.save_result, args=[ + wav_pred, mel_pred, 'P', item_name, text, gen_dir, str_phs, mel2ph_pred, f0_gt, f0_pred])) + + if mel_gt is not None and hparams['save_gt']: + wav_gt = self.vocoder.spec2wav(mel_gt, f0=f0_gt) self.saving_results_futures.append( self.saving_result_pool.apply_async(self.save_result, args=[ - wav_pred, mel_pred, 'P', item_name, text, gen_dir, str_phs, mel2ph_pred, f0_gt, f0_pred])) - - if mel_gt is not None and hparams['save_gt']: - wav_gt = self.vocoder.spec2wav(mel_gt, f0=f0_gt) - self.saving_results_futures.append( - self.saving_result_pool.apply_async(self.save_result, args=[ - wav_gt, mel_gt, 'G', item_name, text, gen_dir, str_phs, mel2ph_gt, f0_gt, f0_pred])) - if hparams['save_f0']: - import matplotlib.pyplot as plt - # f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams) - f0_pred_ = f0_pred - f0_gt_, _, _ = get_pitch_parselmouth(wav_gt, len(mel_gt), hparams) - fig = plt.figure() - plt.plot(f0_pred_, label=r'$f0_P$') - plt.plot(f0_gt_, label=r'$f0_G$') - plt.legend() - plt.tight_layout() - plt.savefig(f'{gen_dir}/plot/[F0][{item_name}]{text}.png', format='png') - plt.close(fig) - - t.set_description( - f'Pred_shape: {mel_pred.shape}, gt_shape: {mel_gt.shape}') - else: - if 'gen_wav_time' not in self.stats: - self.stats['gen_wav_time'] = 0 - self.stats['gen_wav_time'] += len(wav_pred) / hparams['audio_sample_rate'] - print('gen_wav_time: ', self.stats['gen_wav_time']) + wav_gt, mel_gt, 'G', item_name, text, gen_dir, str_phs, mel2ph_gt, f0_gt, f0_pred])) + if hparams['save_f0']: + import matplotlib.pyplot as plt + # f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams) + f0_pred_ = f0_pred + f0_gt_, _, _ = get_pitch_parselmouth(wav_gt, len(mel_gt), hparams) + fig = plt.figure() + plt.plot(f0_pred_, label=r'$f0_P$') + plt.plot(f0_gt_, label=r'$f0_G$') + plt.legend() + plt.tight_layout() + plt.savefig(f'{gen_dir}/plot/[F0][{item_name}]{text}.png', format='png') + plt.close(fig) + + t.set_description( + f'Pred_shape: {mel_pred.shape}, gt_shape: {mel_gt.shape}') return {} diff --git a/src/diff/diffusion.py b/src/diff/diffusion.py index 2e12ecd2d..a02d8a843 100644 --- a/src/diff/diffusion.py +++ b/src/diff/diffusion.py @@ -240,18 +240,6 @@ def forward(self, txt_tokens, mel2ph=None, spk_embed=None, self.norm_spec(ref_mels), cond, ret, self.K_step, b, device ) else: - ''' - ret['fs2_mel'] = ret['mel_out'] - fs2_mels = ret['mel_out'] - t = self.K_step - fs2_mels = self.norm_spec(fs2_mels) - fs2_mels = fs2_mels.transpose(1, 2)[:, None, :, :] - x = self.q_sample(x_start=fs2_mels, t=torch.tensor([t - 1], device=device).long()) - if hparams.get('gaussian_start') is not None and hparams['gaussian_start']: - print('===> gaussion start.') - shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) - x = torch.randn(shape, device=device) - ''' t = self.K_step shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) x = torch.randn(shape, device=device) @@ -322,38 +310,3 @@ def cwt2f0_norm(self, cwt_spec, mean, std, mel2ph): def out2mel(self, x): return x - - -class OfflineGaussianDiffusion(GaussianDiffusion): - def forward(self, txt_tokens, mel2ph=None, spk_embed=None, - ref_mels=None, f0=None, uv=None, energy=None, infer=False, **kwargs): - b, *_, device = *txt_tokens.shape, txt_tokens.device - - ret = self.fs2(txt_tokens, mel2ph, spk_embed, ref_mels, f0, uv, energy, - skip_decoder=True, infer=True, **kwargs) - cond = ret['decoder_inp'].transpose(1, 2) - fs2_mels = ref_mels[1] - ref_mels = ref_mels[0] - - if not infer: - t = torch.randint(0, self.K_step, (b,), device=device).long() - x = ref_mels - x = self.norm_spec(x) - x = x.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] - ret['diff_loss'] = self.p_losses(x, t, cond) - else: - t = self.K_step - fs2_mels = self.norm_spec(fs2_mels) - fs2_mels = fs2_mels.transpose(1, 2)[:, None, :, :] - - x = self.q_sample(x_start=fs2_mels, t=torch.tensor([t - 1], device=device).long()) - - if hparams.get('gaussian_start') is not None and hparams['gaussian_start']: - print('===> gaussion start.') - shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) - x = torch.randn(shape, device=device) - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): - x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) - x = x[:, 0].transpose(1, 2) - ret['mel_out'] = self.denorm_spec(x) - return ret diff --git a/src/vocoders/nsf_hifigan.py b/src/vocoders/nsf_hifigan.py index 61b101f51..704d61391 100644 --- a/src/vocoders/nsf_hifigan.py +++ b/src/vocoders/nsf_hifigan.py @@ -44,7 +44,7 @@ def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] # log10 to log mel c = 2.30259 * c f0 = kwargs.get('f0') # [B, T] - if f0 is not None and hparams.get('use_nsf'): + if f0 is not None: y = self.model(c, f0).view(-1) else: y = self.model(c).view(-1) @@ -74,7 +74,7 @@ def spec2wav(self, mel, **kwargs): # log10 to log mel c = 2.30259 * c f0 = kwargs.get('f0') - if f0 is not None and hparams.get('use_nsf'): + if f0 is not None: f0 = torch.FloatTensor(f0[None, :]).to(self.device) y = self.model(c, f0).view(-1) else: From b0eab6444857efa849880e31e1ce89950bc90e1f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 11:13:17 +0800 Subject: [PATCH 048/475] Reset base config path --- pipelines/no_midi_preparation.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipelines/no_midi_preparation.ipynb b/pipelines/no_midi_preparation.ipynb index 7eee472ac..20909a21f 100644 --- a/pipelines/no_midi_preparation.ipynb +++ b/pipelines/no_midi_preparation.ipynb @@ -1171,7 +1171,7 @@ " test_prefixes += sorted(random.sample(training_cases, 10))\n", "\n", "configs = {\n", - " 'base_config': ['configs/acoustic/nomidi.yaml'],\n", + " 'base_config': ['configs/acoustic.yaml'],\n", " 'speakers': [dataset_name],\n", " 'raw_data_dir': [f'data/{full_name}/raw'],\n", " 'binary_data_dir': f'data/{full_name}/binary',\n", @@ -1471,7 +1471,7 @@ " test_prefixes += [f'{i}:{n}' for n in sorted(random.sample(training_cases[i], count))]\n", "\n", "configs = {\n", - " 'base_config': ['configs/acoustic/nomidi.yaml'],\n", + " 'base_config': ['configs/acoustic.yaml'],\n", " 'speakers': speakers,\n", " 'num_spk': len(speakers),\n", " 'use_spk_id': True,\n", From 735620076a503234d2e7a2d52846048886a23fcd Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 11:14:59 +0800 Subject: [PATCH 049/475] Adjust multiprocessing binarize config --- configs/acoustic.yaml | 1 + data_gen/acoustic.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 0fde14476..2f4e8b7d4 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -25,6 +25,7 @@ fmax: 16000 min_level_db: -120 binarization_args: + num_workers: 4 with_spk_embed: false with_align: true with_uv: false diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index eea272804..9e1409908 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -26,7 +26,7 @@ from utils.multiprocess_utils import chunked_multiprocess_run from utils.phoneme_utils import build_phoneme_list -os.environ["OMP_NUM_THREADS"] = "1" +# os.environ["OMP_NUM_THREADS"] = "1" ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'uv', 'key_shift', 'speed'] @@ -158,7 +158,7 @@ def postprocess(_item): if multiprocess: # code for parallel processing - num_workers = int(os.getenv('N_PROC', hparams.get('ds_workers', os.cpu_count() // 3))) + num_workers = int(self.binarization_args.get('num_workers', os.getenv('N_PROC', os.cpu_count() // 3))) for item in tqdm( chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), total=len(list(self.meta_data_iterator(prefix))) From cc21e6d149d6fc3f2e34cb52c4cc10daa39e54c7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 12:42:17 +0800 Subject: [PATCH 050/475] Configure multiprocessing binarize --- basics/base_binarizer.py | 2 +- configs/acoustic.yaml | 2 +- data_gen/acoustic.py | 12 +++++++----- utils/multiprocess_utils.py | 5 +---- 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 76b0608d5..18679c864 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -135,7 +135,7 @@ def process(self): def check_coverage(self): raise NotImplementedError() - def process_data_split(self, prefix, multiprocess=False, apply_augmentation=False): + def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): raise NotImplementedError() def arrange_data_augmentation(self, prefix): diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 2f4e8b7d4..2932abd22 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -25,7 +25,7 @@ fmax: 16000 min_level_db: -120 binarization_args: - num_workers: 4 + num_workers: 0 with_spk_embed: false with_align: true with_uv: false diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 9e1409908..b7c79a64f 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -65,8 +65,11 @@ def load_meta_data(self, raw_data_dir, ds_id): def process(self): super().process() self.process_data_split('valid') - # self.process_data_split('test') - self.process_data_split('train', apply_augmentation=len(self.augmentation_args) > 0) + self.process_data_split( + 'train', + num_workers=int(self.binarization_args.get('num_workers', os.getenv('N_PROC', 0))), + apply_augmentation=len(self.augmentation_args) > 0 + ) def check_coverage(self): # Group by phonemes in the dictionary. @@ -122,7 +125,7 @@ def check_coverage(self): f' (+) {sorted(unrecognizable_phones)}\n' f' (-) {sorted(missing_phones)}') - def process_data_split(self, prefix, multiprocess=False, apply_augmentation=False): + def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): data_dir = hparams['binary_data_dir'] args = [] builder = IndexedDatasetBuilder(data_dir, prefix=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) @@ -156,9 +159,8 @@ def postprocess(_item): lengths.append(aug_item['length']) total_sec += aug_item['seconds'] - if multiprocess: + if num_workers > 0: # code for parallel processing - num_workers = int(self.binarization_args.get('num_workers', os.getenv('N_PROC', os.cpu_count() // 3))) for item in tqdm( chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), total=len(list(self.meta_data_iterator(prefix))) diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index e77638296..00d389ae4 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -1,4 +1,3 @@ -import os import re import traceback from multiprocessing import Queue, Process, current_process @@ -24,12 +23,10 @@ def chunked_worker(worker_id, map_func, args, results_queue=None, init_ctx_func= traceback.print_exc() results_queue.put((job_idx, None)) -def chunked_multiprocess_run(map_func, args, num_workers=None, ordered=True, init_ctx_func=None, q_max_size=1000): +def chunked_multiprocess_run(map_func, args, num_workers, ordered=True, init_ctx_func=None, q_max_size=1000): args = zip(range(len(args)), args) args = list(args) n_jobs = len(args) - if num_workers is None: - num_workers = int(os.getenv('N_PROC', os.cpu_count())) results_queues = [] if ordered: for i in range(num_workers): From 5faccdb6a220f7c884a083442ff8a06cee7fbd11 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 13:33:12 +0800 Subject: [PATCH 051/475] Remove OMP_NUM_THREADS --- data_gen/acoustic.py | 1 - data_gen/binarize.py | 4 ---- 2 files changed, 5 deletions(-) diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index b7c79a64f..0d9dca2e7 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -26,7 +26,6 @@ from utils.multiprocess_utils import chunked_multiprocess_run from utils.phoneme_utils import build_phoneme_list -# os.environ["OMP_NUM_THREADS"] = "1" ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'uv', 'key_shift', 'speed'] diff --git a/data_gen/binarize.py b/data_gen/binarize.py index 9780f9d8c..63295839f 100644 --- a/data_gen/binarize.py +++ b/data_gen/binarize.py @@ -1,7 +1,3 @@ -import os - -os.environ["OMP_NUM_THREADS"] = "1" - import importlib from utils.hparams import set_hparams, hparams From b0e9d1a5b098b4a38e2d358cd56db70987f501bd Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 13:33:47 +0800 Subject: [PATCH 052/475] Optimize collate_1d and collate_2d --- utils/__init__.py | 34 +++++++--------------------------- 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/utils/__init__.py b/utils/__init__.py index f936ed8e5..d5298d415 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,17 +1,12 @@ import glob -import logging import re import time -from collections import defaultdict import os import sys -import shutil import types import numpy as np import torch import torch.nn.functional as F -import torch.distributed as dist -from torch import nn def tensors_to_scalars(metrics): @@ -41,38 +36,23 @@ def update(self, val, n=1): self.avg = self.sum / self.cnt -def collate_1d(values, pad_idx=0, left_pad=False, shift_right=False, max_len=None, shift_id=1): +def collate_1d(values, pad_value=0, max_len=None): """Convert a list of 1d tensors into a padded 2d tensor.""" size = max(v.size(0) for v in values) if max_len is None else max_len - res = values[0].new(len(values), size).fill_(pad_idx) - - def copy_tensor(src, dst): - assert dst.numel() == src.numel() - if shift_right: - dst[1:] = src[:-1] - dst[0] = shift_id - else: - dst.copy_(src) + res = torch.full((len(values), size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) for i, v in enumerate(values): - copy_tensor(v, res[i][size - len(v):] if left_pad else res[i][:len(v)]) + res[i, :len(v)] = v return res -def collate_2d(values, pad_idx=0, left_pad=False, shift_right=False, max_len=None): +def collate_2d(values, pad_value=0, max_len=None): """Convert a list of 2d tensors into a padded 3d tensor.""" - size = max(v.size(0) for v in values) if max_len is None else max_len - res = values[0].new(len(values), size, values[0].shape[1]).fill_(pad_idx) - - def copy_tensor(src, dst): - assert dst.numel() == src.numel() - if shift_right: - dst[1:] = src[:-1] - else: - dst.copy_(src) + size = ((max(v.size(0) for v in values) if max_len is None else max_len), values[0].shape[1]) + res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) for i, v in enumerate(values): - copy_tensor(v, res[i][size - len(v):] if left_pad else res[i][:len(v)]) + res[i, :len(v), :] = v return res From f6c7961f91c37728fa23101f8fe733b826206a43 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 17:01:31 +0800 Subject: [PATCH 053/475] Add back OMP_NUM_THREADS --- data_gen/acoustic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 0d9dca2e7..68c3689c4 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -26,6 +26,7 @@ from utils.multiprocess_utils import chunked_multiprocess_run from utils.phoneme_utils import build_phoneme_list +os.environ["OMP_NUM_THREADS"] = "1" ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'uv', 'key_shift', 'speed'] From 48008997a4f540e0b9515fdfe4a2329fb84f85a5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 18:32:49 +0800 Subject: [PATCH 054/475] Combine collate 1d and 2d into Nd --- src/acoustic_task.py | 10 +++++----- utils/__init__.py | 20 ++++++-------------- 2 files changed, 11 insertions(+), 19 deletions(-) diff --git a/src/acoustic_task.py b/src/acoustic_task.py index cf03265c4..620ee51b3 100644 --- a/src/acoustic_task.py +++ b/src/acoustic_task.py @@ -48,11 +48,11 @@ def collater(self, samples): if len(samples) == 0: return {} txt_lengths = torch.LongTensor([s['tokens'].numel() for s in samples]) - tokens = utils.collate_1d([s['tokens'] for s in samples], 0) - f0 = utils.collate_1d([s['f0'] for s in samples], 0.0) + tokens = utils.collate_nd([s['tokens'] for s in samples], 0) + f0 = utils.collate_nd([s['f0'] for s in samples], 0.0) mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) - mel2ph = utils.collate_1d([s['mel2ph'] for s in samples], 0) - mels = utils.collate_2d([s['mel'] for s in samples], 0.0) + mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) + mels = utils.collate_nd([s['mel'] for s in samples], 0.0) batch = { 'nsamples': len(samples), 'txt_lengths': txt_lengths, @@ -63,7 +63,7 @@ def collater(self, samples): 'f0': f0, } if hparams['use_energy_embed']: - batch['energy'] = utils.collate_1d([s['energy'] for s in samples], 0.0) + batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) if hparams.get('use_key_shift_embed', False): batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples]) if hparams.get('use_speed_embed', False): diff --git a/utils/__init__.py b/utils/__init__.py index d5298d415..b93b6aae5 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -36,23 +36,15 @@ def update(self, val, n=1): self.avg = self.sum / self.cnt -def collate_1d(values, pad_value=0, max_len=None): - """Convert a list of 1d tensors into a padded 2d tensor.""" - size = max(v.size(0) for v in values) if max_len is None else max_len - res = torch.full((len(values), size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) - - for i, v in enumerate(values): - res[i, :len(v)] = v - return res - - -def collate_2d(values, pad_value=0, max_len=None): - """Convert a list of 2d tensors into a padded 3d tensor.""" - size = ((max(v.size(0) for v in values) if max_len is None else max_len), values[0].shape[1]) +def collate_nd(values, pad_value=0, max_len=None): + """ + Pad a list of Nd tensors on their first dimension and stack them into a (N+1)d tensor. + """ + size = ((max(v.size(0) for v in values) if max_len is None else max_len), values[0].shape[1:]) res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) for i, v in enumerate(values): - res[i, :len(v), :] = v + res[i, :len(v), ...] = v return res From 7e32911a395385dd75006f7a8960003eb2be7899 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 21:19:19 +0800 Subject: [PATCH 055/475] Remove uv --- configs/acoustic.yaml | 6 ++---- configs/base.yaml | 5 +---- data_gen/acoustic.py | 6 ++---- 3 files changed, 5 insertions(+), 12 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 2932abd22..ae3d466ff 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -25,12 +25,10 @@ fmax: 16000 min_level_db: -120 binarization_args: + label_format: grid + shuffle: true num_workers: 0 - with_spk_embed: false - with_align: true - with_uv: false interp_uv: true - shuffle: true #augmentation_args: # random_pitch_shifting: # range: [-5., 5.] diff --git a/configs/base.yaml b/configs/base.yaml index b52bcdac5..fa603122b 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -19,10 +19,7 @@ binary_data_dir: '' binarizer_cls: basics.base_binarizer.BaseBinarizer binarization_args: shuffle: false - with_txt: true - with_align: true - with_spk_embed: true - with_f0: true + num_workers: 0 endless_ds: true diff --git a/data_gen/acoustic.py b/data_gen/acoustic.py index 68c3689c4..9d9bbfaac 100644 --- a/data_gen/acoustic.py +++ b/data_gen/acoustic.py @@ -20,14 +20,14 @@ from basics.base_binarizer import BaseBinarizer, BinarizationError from data_gen.data_gen_utils import get_pitch_parselmouth, get_mel2ph_torch from modules.fastspeech.tts_modules import LengthRegulator -from src.vocoders.vocoder_utils import VOCODERS +from utils.vocoder_utils import VOCODERS from utils.hparams import hparams from utils.indexed_datasets import IndexedDatasetBuilder from utils.multiprocess_utils import chunked_multiprocess_run from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" -ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'uv', 'key_shift', 'speed'] +ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'key_shift', 'speed'] class AcousticBinarizer(BaseBinarizer): @@ -210,8 +210,6 @@ def process_item(self, item_name, meta_data, binarization_args): if uv.all(): # All unvoiced raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') processed_input['f0'] = torch.from_numpy(gt_f0).float() - if self.binarization_args['with_uv']: - processed_input['uv'] = torch.from_numpy(uv) # get ground truth dur processed_input['mel2ph'] = get_mel2ph_torch(self.lr, processed_input['ph_dur'], length, hparams) From c7c5b9c863a4b9c149229fd2ee7fca749830e56e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 22:03:59 +0800 Subject: [PATCH 056/475] Re-organize project structure --- .gitignore | 2 +- README.md | 21 +- augmentation/spec_stretch.py | 4 +- basics/base_svs_infer.py | 2 +- configs/acoustic.yaml | 4 +- configs/base.yaml | 2 +- {onnx => deployment}/.gitignore | 0 .../export/export_acoustic.py | 22 +- .../export/export_nsf_hifigan.py | 2 +- {onnx => deployment}/infer/infer_acoustic.py | 10 +- .../infer/infer_nsf_hifigan.py | 8 +- {onnx => deployment}/requirements.txt | 0 ...E-SVS-onnx.md => README-SVS-deployment.md} | 4 +- docs/README-SVS-opencpop-cascade.md | 8 +- docs/README-SVS-opencpop-e2e.md | 6 +- docs/README-SVS-opencpop-pndm.md | 6 +- inference/ds_cascade.py | 4 +- inference/vocoder/val_nsf_hifigan.py | 4 +- {src => modules}/diff/diffusion.py | 623 +++++++++--------- src/diff/net.py => modules/diff/wavenet.py | 5 +- modules/fastspeech/acoustic_encoder.py | 4 +- modules/nsf_hifigan/env.py | 7 - modules/nsf_hifigan/models.py | 166 +---- modules/nsf_hifigan/nvSTFT.py | 8 +- modules/nsf_hifigan/utils.py | 57 +- modules/vocoders/__init__.py | 2 + {src => modules}/vocoders/ddsp.py | 2 +- {src => modules}/vocoders/nsf_hifigan.py | 204 +++--- .../vocoders/registry.py | 0 {pipelines => preparation}/.gitignore | 0 .../acoustic_preparation.ipynb | 34 +- .../assets/2001000001.lab | 0 .../assets/2001000001.wav | Bin {pipelines => preparation}/requirements.txt | 0 .../utils/distribution.py | 0 {pipelines => preparation}/utils/slicer2.py | 0 {data_gen => preprocessing}/acoustic.py | 4 +- {data_gen => scripts}/binarize.py | 0 main.py => scripts/infer.py | 2 +- run.py => scripts/train.py | 4 +- vocode.py => scripts/vocode.py | 0 src/vocoders/__init__.py | 2 - src/acoustic_task.py => training/acoustic.py | 8 +- training/diffsinger.py | 21 - .../binarizer_utils.py | 0 45 files changed, 508 insertions(+), 754 deletions(-) rename {onnx => deployment}/.gitignore (100%) rename {onnx => deployment}/export/export_acoustic.py (98%) rename {onnx => deployment}/export/export_nsf_hifigan.py (99%) rename {onnx => deployment}/infer/infer_acoustic.py (73%) rename {onnx => deployment}/infer/infer_nsf_hifigan.py (65%) rename {onnx => deployment}/requirements.txt (100%) rename docs/{README-SVS-onnx.md => README-SVS-deployment.md} (96%) rename {src => modules}/diff/diffusion.py (93%) rename src/diff/net.py => modules/diff/wavenet.py (97%) create mode 100644 modules/vocoders/__init__.py rename {src => modules}/vocoders/ddsp.py (99%) rename {src => modules}/vocoders/nsf_hifigan.py (97%) rename src/vocoders/vocoder_utils.py => modules/vocoders/registry.py (100%) rename {pipelines => preparation}/.gitignore (100%) rename pipelines/no_midi_preparation.ipynb => preparation/acoustic_preparation.ipynb (97%) rename {pipelines => preparation}/assets/2001000001.lab (100%) rename {pipelines => preparation}/assets/2001000001.wav (100%) rename {pipelines => preparation}/requirements.txt (100%) rename {pipelines => preparation}/utils/distribution.py (100%) rename {pipelines => preparation}/utils/slicer2.py (100%) rename {data_gen => preprocessing}/acoustic.py (99%) rename {data_gen => scripts}/binarize.py (100%) rename main.py => scripts/infer.py (98%) rename run.py => scripts/train.py (97%) rename vocode.py => scripts/vocode.py (100%) delete mode 100644 src/vocoders/__init__.py rename src/acoustic_task.py => training/acoustic.py (98%) delete mode 100644 training/diffsinger.py rename data_gen/data_gen_utils.py => utils/binarizer_utils.py (100%) diff --git a/.gitignore b/.gitignore index 1a2c1815a..ac2836be5 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,7 @@ __pycache__/ local_tools/ *.ckpt *.pth -(!/pipelines/assets)/*.wav +(!/preparation/assets)/*.wav infer_out/ *.onnx data/* diff --git a/README.md b/README.md index a386d5080..236f50f43 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,8 @@ This is a cleaner version of Diffsinger, which provides: - fewer code: scripts unused in the DiffSinger are marked **\*isolated\***; - better readability: many important functions are annotated (however, **we assume the reader already knows how the neural networks work**); -- abstract classes: the bass classes are filtered out into the "basics/" folder and are annotated. Other classes inherent from the base classes. -- better file structre: tts-related files are filtered out into the "tts/" folder, as they are not used in DiffSinger. -- **(new) Much condensed version of the preprocessing, training, and inference pipeline**. The preprocessing pipeline is at 'preprocessing/opencpop.py', the training pipeline is at 'training/diffsinger.py', the inference pipeline is at 'inference/ds_cascade.py' or 'inference/ds_e2e.py'. +- abstract classes: the bass classes are filtered out into the "basics/" folder and are annotated. Other classes directly inherent from the base classes. +- re-organized project structure: pipelines are seperated into preparation, preprocessing, augmentation, training, inference and deployment ## Progress since we forked into this repository @@ -35,7 +34,7 @@ pip install -r requirements.txt ### Building your own dataset -This [pipeline](pipelines/no_midi_preparation.ipynb) will guide you from installing dependencies to formatting your recordings and generating the final configuration file. +This [pipeline](preparation/acoustic_preparation.ipynb) will guide you from installing dependencies to formatting your recordings and generating the final configuration file. ### Preprocessing @@ -43,35 +42,35 @@ The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop ```sh export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python data_gen/binarize.py --config configs/acoustic.yaml +CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/acoustic.yaml ``` ### Training The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop/) dataset. ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/acoustic.yaml --exp_name $MY_DS_EXP_NAME --reset +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/acoustic.yaml --exp_name $MY_DS_EXP_NAME --reset ``` ### Inference #### Infer from *.ds file ```sh -python main.py path/to/your.ds --exp $MY_DS_EXP_NAME +python scripts/infer.py path/to/your.ds --exp $MY_DS_EXP_NAME ``` -See more supported arguments with `python main.py -h`. See examples of *.ds files in the [samples/](samples/) folder. +See more supported arguments with `python scripts/infer.py -h`. See examples of *.ds files in the [samples/](samples/) folder. ### Deployment #### Export model to ONNX format -Please see this [documentation](docs/README-SVS-onnx.md) before you run the following command: +Please see this [documentation](docs/README-SVS-deployment.md) before you run the following command: ```sh -python onnx/export/export_acoustic.py --exp $MY_DS_EXP_NAME +python deployment/export/export_acoustic.py --exp $MY_DS_EXP_NAME ``` -See more supported arguments with `python onnx/export/export_acoustic.py -h`. +See more supported arguments with `python deployment/export/export_acoustic.py -h`. #### Use DiffSinger via OpenUTAU editor diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 0a7656a7d..3a6244083 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -4,9 +4,9 @@ import torch from basics.base_augmentation import BaseAugmentation -from data_gen.data_gen_utils import get_pitch_parselmouth, get_mel2ph_torch +from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from modules.fastspeech.tts_modules import LengthRegulator -from src.vocoders.vocoder_utils import VOCODERS +from modules.vocoders.registry import VOCODERS from utils.hparams import hparams from utils.pitch_utils import f0_to_coarse diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 07b67b732..85aeabeb9 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -5,7 +5,7 @@ import torch from pypinyin import lazy_pinyin -from src.vocoders.vocoder_utils import VOCODERS +from modules.vocoders.registry import VOCODERS from utils.hparams import set_hparams, hparams from utils.phoneme_utils import build_g2p_dictionary, build_phoneme_list from utils.text_encoder import TokenTextEncoder diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index ae3d466ff..09ffbed28 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -1,7 +1,7 @@ base_config: - configs/base.yaml -task_cls: src.acoustic_task.AcousticTask +task_cls: training.acoustic.AcousticTask num_spk: 1 speakers: - opencpop @@ -43,7 +43,7 @@ binarization_args: raw_data_dir: 'data/opencpop/raw' binary_data_dir: 'data/opencpop/binary' -binarizer_cls: data_gen.acoustic.AcousticBinarizer +binarizer_cls: preprocessing.acoustic.AcousticBinarizer g2p_dictionary: dictionaries/opencpop-extension.txt spec_min: [-5] spec_max: [0] diff --git a/configs/base.yaml b/configs/base.yaml index fa603122b..5302021a0 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -16,7 +16,7 @@ save_codes: sort_by_len: true raw_data_dir: '' binary_data_dir: '' -binarizer_cls: basics.base_binarizer.BaseBinarizer +binarizer_cls: '' binarization_args: shuffle: false num_workers: 0 diff --git a/onnx/.gitignore b/deployment/.gitignore similarity index 100% rename from onnx/.gitignore rename to deployment/.gitignore diff --git a/onnx/export/export_acoustic.py b/deployment/export/export_acoustic.py similarity index 98% rename from onnx/export/export_acoustic.py rename to deployment/export/export_acoustic.py index 2fd1f3910..b4189bb96 100644 --- a/onnx/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -24,8 +24,8 @@ from modules.commons.common_layers import Mish from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder -from src.diff.diffusion import beta_schedule -from src.diff.net import AttrDict +from modules.diff.diffusion import beta_schedule +from modules.diff.wavenet import AttrDict from utils import load_ckpt from utils.hparams import hparams, set_hparams from utils.phoneme_utils import build_phoneme_list @@ -66,10 +66,10 @@ def forward(self, dur): class FastSpeech2Acoustic(nn.Module): - def __init__(self, dictionary): + def __init__(self, vocab_size): super().__init__() self.lr = LengthRegulator() - self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], PAD_INDEX) + self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) self.dur_embed = Linear(1, hparams['hidden_size']) self.encoder = FastSpeech2AcousticEncoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) @@ -456,7 +456,7 @@ def forward(self, condition, speedup): def build_fs2_model(device, ckpt_steps=None): model = FastSpeech2Acoustic( - dictionary=TokenTextEncoder(vocab_list=build_phoneme_list()) + vocab_size=len(TokenTextEncoder(vocab_list=build_phoneme_list())) ) model.eval() load_ckpt(model, hparams['work_dir'], 'model.fs2', ckpt_steps=ckpt_steps, strict=True) @@ -1236,7 +1236,7 @@ def export_phonemes_txt(phonemes_txt_path:str): if args.out: out = os.path.join(cwd, args.out) if not os.path.isabs(args.out) else args.out else: - out = f'onnx/assets/{exp}' + out = f'deployment/assets/{exp}' os.chdir(root_dir) sys.argv = [ 'inference/ds_cascade.py', @@ -1245,9 +1245,9 @@ def export_phonemes_txt(phonemes_txt_path:str): '--infer' ] - os.makedirs(f'onnx/temp', exist_ok=True) - diff_model_path = f'onnx/temp/diffusion.onnx' - fs2_model_path = f'onnx/temp/fs2.onnx' + os.makedirs(f'deployment/temp', exist_ok=True) + diff_model_path = f'deployment/temp/diffusion.onnx' + fs2_model_path = f'deployment/temp/fs2.onnx' spk_name_pattern = r'[0-9A-Za-z_-]+' spk_export_paths = None frozen_spk_name = None @@ -1260,11 +1260,11 @@ def export_phonemes_txt(phonemes_txt_path:str): if '=' in spk_export: alias, mix = spk_export.split('=', maxsplit=1) assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' - spk_export_paths.append({'mix': mix, 'path': f'onnx/temp/{exp}.{alias}.emb'}) + spk_export_paths.append({'mix': mix, 'path': f'deployment/temp/{exp}.{alias}.emb'}) else: assert re.fullmatch(spk_name_pattern, spk_export) is not None, \ f'Invalid alias \'{spk_export}\' for speaker mix.' - spk_export_paths.append({'mix': spk_export, 'path': f'onnx/temp/{exp}.{spk_export}.emb'}) + spk_export_paths.append({'mix': spk_export, 'path': f'deployment/temp/{exp}.{spk_export}.emb'}) elif args.freeze_spk is not None: assert '=' in args.freeze_spk or '|' not in args.freeze_spk, \ 'You must specify an alias with \'NAME=\' for each speaker mix.' diff --git a/onnx/export/export_nsf_hifigan.py b/deployment/export/export_nsf_hifigan.py similarity index 99% rename from onnx/export/export_nsf_hifigan.py rename to deployment/export/export_nsf_hifigan.py index 8957e4dba..28290637a 100644 --- a/onnx/export/export_nsf_hifigan.py +++ b/deployment/export/export_nsf_hifigan.py @@ -313,7 +313,7 @@ def export(model_path): '--config', 'configs/acoustic.yaml', ] - path = 'onnx/assets/nsf_hifigan2.onnx' + path = 'deployment/assets/nsf_hifigan2.onnx' export(path) simplify(path, path) print(f'| export \'NSF-HiFiGAN\' to \'{path}\'.') diff --git a/onnx/infer/infer_acoustic.py b/deployment/infer/infer_acoustic.py similarity index 73% rename from onnx/infer/infer_acoustic.py rename to deployment/infer/infer_acoustic.py index 134cc60c8..b8e02b897 100644 --- a/onnx/infer/infer_acoustic.py +++ b/deployment/infer/infer_acoustic.py @@ -8,9 +8,9 @@ # os.add_dll_directory(r'D:\NVIDIA GPU Computing Toolkit\CUDA\v11.6\bin') # os.add_dll_directory(r'D:\NVIDIA GPU Computing Toolkit\cuDNN\bin') -tokens = np.load('onnx/assets/tokens.npy') -durations = np.load('onnx/assets/durations.npy') -f0 = np.load('onnx/assets/f0_denorm.npy') +tokens = np.load('deployment/assets/tokens.npy') +durations = np.load('deployment/assets/durations.npy') +f0 = np.load('deployment/assets/f0_denorm.npy') speedup = np.array(50, dtype=np.int64) print('tokens', tokens.shape) @@ -19,7 +19,7 @@ options = ort.SessionOptions() session = ort.InferenceSession( - 'onnx/assets/1220_zhibin_ds1000.onnx', + 'deployment/assets/1220_zhibin_ds1000.onnx', providers=['CUDAExecutionProvider', 'CPUExecutionProvider'], sess_options=options ) @@ -31,4 +31,4 @@ print('mel', mel.shape) print('cost', end - start) -np.save('onnx/assets/mel_test.npy', mel) +np.save('deployment/assets/mel_test.npy', mel) diff --git a/onnx/infer/infer_nsf_hifigan.py b/deployment/infer/infer_nsf_hifigan.py similarity index 65% rename from onnx/infer/infer_nsf_hifigan.py rename to deployment/infer/infer_nsf_hifigan.py index ec99c24a8..49ecc05ca 100644 --- a/onnx/infer/infer_nsf_hifigan.py +++ b/deployment/infer/infer_nsf_hifigan.py @@ -4,14 +4,14 @@ import onnxruntime as ort from scipy.io import wavfile -mel = np.load('onnx/assets/mel.npy') -f0 = np.load('onnx/assets/f0.npy') +mel = np.load('deployment/assets/mel.npy') +f0 = np.load('deployment/assets/f0.npy') print('mel', mel.shape) print('f0', f0.shape) session = ort.InferenceSession( - 'onnx/assets/nsf_hifigan.onnx', + 'deployment/assets/nsf_hifigan.onnx', providers=['CPUExecutionProvider'] ) @@ -23,4 +23,4 @@ print('cost', end - start) -wavfile.write('onnx/assets/waveform.wav', 44100, wav[0]) +wavfile.write('deployment/assets/waveform.wav', 44100, wav[0]) diff --git a/onnx/requirements.txt b/deployment/requirements.txt similarity index 100% rename from onnx/requirements.txt rename to deployment/requirements.txt diff --git a/docs/README-SVS-onnx.md b/docs/README-SVS-deployment.md similarity index 96% rename from docs/README-SVS-onnx.md rename to docs/README-SVS-deployment.md index 41e6423ef..3fa0f3836 100644 --- a/docs/README-SVS-onnx.md +++ b/docs/README-SVS-deployment.md @@ -26,7 +26,7 @@ The `onnxruntime` package is required to run inference with ONNX model and ONNXR Run with the command ```bash -python onnx/export/export_acoustic.py --exp EXP [--out OUT] +python deployment/export/export_acoustic.py --exp EXP [--out OUT] ``` where `EXP` is the name of experiment, `OUT` is the output directory. @@ -37,7 +37,7 @@ Note: DPM-Solver acceleration is not currently included, but PNDM is wrapped int ### 2. Inference with ONNXRuntime -See `onnx/infer/infer_acoustic.py` for details. +See `deployment/infer/infer_acoustic.py` for details. #### Issues related to CUDAExecutionProvider diff --git a/docs/README-SVS-opencpop-cascade.md b/docs/README-SVS-opencpop-cascade.md index acecd3b49..2e3556020 100644 --- a/docs/README-SVS-opencpop-cascade.md +++ b/docs/README-SVS-opencpop-cascade.md @@ -18,7 +18,7 @@ b) Run the following scripts to pack the dataset for training/inference. ```sh export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python data_gen/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml +CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml # `data/binary/opencpop-midi-dp` will be generated. ``` @@ -56,20 +56,20 @@ export MY_DS_EXP_NAME=0303_opencpop_ds58_midi ### 2. Training Example First, you need a pre-trained FFT-Singer checkpoint. You can use the pre-trained model, or train FFT-Singer from scratch, run: ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/cascade/opencs/aux_rel.yaml --exp_name $MY_FS_EXP_NAME --reset +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/cascade/opencs/aux_rel.yaml --exp_name $MY_FS_EXP_NAME --reset ``` Then, to train DiffSinger, run: ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME --reset +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME --reset ``` Remember to adjust the "fs2_ckpt" parameter in `configs/midi/cascade/opencs/ds60_rel.yaml` to fit your path. ### 3. Inference from packed test set ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer ``` We also provide: diff --git a/docs/README-SVS-opencpop-e2e.md b/docs/README-SVS-opencpop-e2e.md index 0391f2ede..4dfd0a709 100644 --- a/docs/README-SVS-opencpop-e2e.md +++ b/docs/README-SVS-opencpop-e2e.md @@ -24,7 +24,7 @@ b) Run the following scripts to pack the dataset for training/inference. ```sh export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python data_gen/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml +CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml # `data/binary/opencpop-midi-dp` will be generated. ``` @@ -60,12 +60,12 @@ export MY_DS_EXP_NAME=0228_opencpop_ds100_rel ### 2. Training Example ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset ``` ### 3. Inference from packed test set ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer ``` We also provide: diff --git a/docs/README-SVS-opencpop-pndm.md b/docs/README-SVS-opencpop-pndm.md index 248c11fc6..8ec2b89f0 100644 --- a/docs/README-SVS-opencpop-pndm.md +++ b/docs/README-SVS-opencpop-pndm.md @@ -31,7 +31,7 @@ b) Run the following scripts to pack the dataset for training/inference. ```sh export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python data_gen/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml +CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml # `data/binary/opencpop-midi-dp` will be generated. ``` @@ -67,12 +67,12 @@ export MY_DS_EXP_NAME=0228_opencpop_ds100_rel ### 2. Training Example ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset ``` ### 3. Inference from packed test set ```sh -CUDA_VISIBLE_DEVICES=0 python run.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer +CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer ``` We also provide: diff --git a/inference/ds_cascade.py b/inference/ds_cascade.py index da55b00fd..0c4ddb338 100644 --- a/inference/ds_cascade.py +++ b/inference/ds_cascade.py @@ -2,7 +2,7 @@ from basics.base_svs_infer import BaseSVSInfer from utils import load_ckpt from utils.hparams import hparams -from src.diff.diffusion import GaussianDiffusion +from modules.diff.diffusion import GaussianDiffusion from modules.fastspeech.tts_modules import LengthRegulator import librosa import numpy as np @@ -11,7 +11,7 @@ class DiffSingerCascadeInfer(BaseSVSInfer): def build_model(self, ckpt_steps=None): model = GaussianDiffusion( - phone_encoder=self.ph_encoder, + vocab_size=len(self.ph_encoder), out_dims=hparams['audio_num_mel_bins'], timesteps=hparams['timesteps'], K_step=hparams['K_step'], diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/vocoder/val_nsf_hifigan.py index ab8ef21be..79a50b11f 100644 --- a/inference/vocoder/val_nsf_hifigan.py +++ b/inference/vocoder/val_nsf_hifigan.py @@ -7,8 +7,8 @@ import torchcrepe import tqdm -from data_gen.data_gen_utils import get_pitch_parselmouth -from src.vocoders.nsf_hifigan import NsfHifiGAN +from utils.binarizer_utils import get_pitch_parselmouth +from modules.vocoders.nsf_hifigan import NsfHifiGAN from utils.audio import save_wav from utils.hparams import set_hparams, hparams diff --git a/src/diff/diffusion.py b/modules/diff/diffusion.py similarity index 93% rename from src/diff/diffusion.py rename to modules/diff/diffusion.py index a02d8a843..42f40e229 100644 --- a/src/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -1,312 +1,311 @@ -from collections import deque -from functools import partial -from inspect import isfunction - -import numpy as np -import torch -import torch.nn.functional as F -from torch import nn -from tqdm import tqdm - -from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from src.diff.net import DiffNet -from training.diffsinger import Batch2Loss -from utils.hparams import hparams - - -DIFF_DECODERS = { - 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), -} - - -def exists(x): - return x is not None - - -def default(val, d): - if exists(val): - return val - return d() if isfunction(d) else d - - -def extract(a, t, x_shape): - b, *_ = t.shape - out = a.gather(-1, t) - return out.reshape(b, *((1,) * (len(x_shape) - 1))) - - -def noise_like(shape, device, repeat=False): - repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) - noise = lambda: torch.randn(shape, device=device) - return repeat_noise() if repeat else noise() - - -def linear_beta_schedule(timesteps, max_beta=hparams.get('max_beta', 0.01)): - """ - linear schedule - """ - betas = np.linspace(1e-4, max_beta, timesteps) - return betas - - -def cosine_beta_schedule(timesteps, s=0.008): - """ - cosine schedule - as proposed in https://openreview.net/forum?id=-NEXDKk8gZ - """ - steps = timesteps + 1 - x = np.linspace(0, steps, steps) - alphas_cumprod = np.cos(((x / steps) + s) / (1 + s) * np.pi * 0.5) ** 2 - alphas_cumprod = alphas_cumprod / alphas_cumprod[0] - betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1]) - return np.clip(betas, a_min=0, a_max=0.999) - - -beta_schedule = { - "cosine": cosine_beta_schedule, - "linear": linear_beta_schedule, -} - - -class GaussianDiffusion(nn.Module): - def __init__(self, phone_encoder, out_dims, timesteps=1000, K_step=1000, - loss_type=hparams.get('diff_loss_type', 'l1'), betas=None, spec_min=None, - spec_max=None): - super().__init__() - self.denoise_fn = DIFF_DECODERS[hparams['diff_decoder_type']](hparams) - self.fs2 = FastSpeech2Acoustic(phone_encoder) - self.mel_bins = out_dims - - if exists(betas): - betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas - else: - if 'schedule_type' in hparams.keys(): - betas = beta_schedule[hparams['schedule_type']](timesteps) - else: - betas = cosine_beta_schedule(timesteps) - - alphas = 1. - betas - alphas_cumprod = np.cumprod(alphas, axis=0) - alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) - - timesteps, = betas.shape - self.num_timesteps = int(timesteps) - self.K_step = K_step - self.loss_type = loss_type - - self.noise_list = deque(maxlen=4) - - to_torch = partial(torch.tensor, dtype=torch.float32) - - self.register_buffer('betas', to_torch(betas)) - self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) - self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) - - # calculations for diffusion q(x_t | x_{t-1}) and others - self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) - self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) - self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) - self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) - self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) - - # calculations for posterior q(x_{t-1} | x_t, x_0) - posterior_variance = betas * (1. - alphas_cumprod_prev) / (1. - alphas_cumprod) - # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) - self.register_buffer('posterior_variance', to_torch(posterior_variance)) - # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain - self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) - self.register_buffer('posterior_mean_coef1', to_torch( - betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) - self.register_buffer('posterior_mean_coef2', to_torch( - (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) - - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) - - def q_mean_variance(self, x_start, t): - mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start - variance = extract(1. - self.alphas_cumprod, t, x_start.shape) - log_variance = extract(self.log_one_minus_alphas_cumprod, t, x_start.shape) - return mean, variance, log_variance - - def predict_start_from_noise(self, x_t, t, noise): - return ( - extract(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - - extract(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise - ) - - def q_posterior(self, x_start, x_t, t): - posterior_mean = ( - extract(self.posterior_mean_coef1, t, x_t.shape) * x_start + - extract(self.posterior_mean_coef2, t, x_t.shape) * x_t - ) - posterior_variance = extract(self.posterior_variance, t, x_t.shape) - posterior_log_variance_clipped = extract(self.posterior_log_variance_clipped, t, x_t.shape) - return posterior_mean, posterior_variance, posterior_log_variance_clipped - - def p_mean_variance(self, x, t, cond, clip_denoised: bool): - noise_pred = self.denoise_fn(x, t, cond=cond) - x_recon = self.predict_start_from_noise(x, t=t, noise=noise_pred) - - if clip_denoised: - x_recon.clamp_(-1., 1.) - - model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) - return model_mean, posterior_variance, posterior_log_variance - - @torch.no_grad() - def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): - b, *_, device = *x.shape, x.device - model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, cond=cond, clip_denoised=clip_denoised) - noise = noise_like(x.shape, device, repeat_noise) - # no noise when t == 0 - nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) - return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise - - @torch.no_grad() - def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=False): - """ - Use the PLMS method from [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778). - """ - - def get_x_pred(x, noise_t, t): - a_t = extract(self.alphas_cumprod, t, x.shape) - a_prev = extract(self.alphas_cumprod, torch.max(t-interval, torch.zeros_like(t)), x.shape) - a_t_sq, a_prev_sq = a_t.sqrt(), a_prev.sqrt() - - x_delta = (a_prev - a_t) * ((1 / (a_t_sq * (a_t_sq + a_prev_sq))) * x - 1 / (a_t_sq * (((1 - a_prev) * a_t).sqrt() + ((1 - a_t) * a_prev).sqrt())) * noise_t) - x_pred = x + x_delta - - return x_pred - - noise_list = self.noise_list - noise_pred = self.denoise_fn(x, t, cond=cond) - - if len(noise_list) == 0: - x_pred = get_x_pred(x, noise_pred, t) - noise_pred_prev = self.denoise_fn(x_pred, max(t-interval, 0), cond=cond) - noise_pred_prime = (noise_pred + noise_pred_prev) / 2 - elif len(noise_list) == 1: - noise_pred_prime = (3 * noise_pred - noise_list[-1]) / 2 - elif len(noise_list) == 2: - noise_pred_prime = (23 * noise_pred - 16 * noise_list[-1] + 5 * noise_list[-2]) / 12 - elif len(noise_list) >= 3: - noise_pred_prime = (55 * noise_pred - 59 * noise_list[-1] + 37 * noise_list[-2] - 9 * noise_list[-3]) / 24 - - x_prev = get_x_pred(x, noise_pred_prime, t) - noise_list.append(noise_pred) - - return x_prev - - def q_sample(self, x_start, t, noise=None): - noise = default(noise, lambda: torch.randn_like(x_start)) - return ( - extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + - extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise - ) - - def p_losses(self, x_start, t, cond, noise=None, nonpadding=None): - noise = default(noise, lambda: torch.randn_like(x_start)) - - x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) - x_recon = self.denoise_fn(x_noisy, t, cond) - - if self.loss_type == 'l1': - if nonpadding is not None: - loss = ((noise - x_recon).abs() * nonpadding.unsqueeze(1)).mean() - else: - # print('are you sure w/o nonpadding?') - loss = (noise - x_recon).abs().mean() - - elif self.loss_type == 'l2': - loss = F.mse_loss(noise, x_recon) - else: - raise NotImplementedError() - - return loss - - def forward(self, txt_tokens, mel2ph=None, spk_embed=None, - ref_mels=None, f0=None, energy=None, infer=False, **kwargs): - ''' - conditioning diffusion, use fastspeech2 encoder output as the condition - ''' - ret = self.fs2(txt_tokens, mel2ph=mel2ph, f0=f0, spk_embed_id=spk_embed, infer=infer, **kwargs) - cond = ret['decoder_inp'].transpose(1, 2) - b, *_, device = *txt_tokens.shape, txt_tokens.device - - if not infer: - Batch2Loss.module4( - self.p_losses, - self.norm_spec(ref_mels), cond, ret, self.K_step, b, device - ) - else: - t = self.K_step - shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) - x = torch.randn(shape, device=device) - if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: - # obsolete: pndm_speedup, now use dpm_solver. - # self.noise_list = deque(maxlen=4) - # iteration_interval = hparams['pndm_speedup'] - # for i in tqdm(reversed(range(0, t, iteration_interval)), desc='sample time step', - # total=t // iteration_interval): - # x = self.p_sample_plms(x, torch.full((b,), i, device=device, dtype=torch.long), iteration_interval, - # cond) - - from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver - ## 1. Define the noise schedule. - noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) - - ## 2. Convert your discrete-time `model` to the continuous-time - # noise prediction model. Here is an example for a diffusion model - ## `model` with the noise prediction type ("noise") . - def my_wrapper(fn): - def wrapped(x, t, **kwargs): - ret = fn(x, t, **kwargs) - self.bar.update(1) - return ret - return wrapped - - model_fn = model_wrapper( - my_wrapper(self.denoise_fn), - noise_schedule, - model_type="noise", # or "x_start" or "v" or "score" - model_kwargs={"cond": cond} - ) - - ## 3. Define dpm-solver and sample by singlestep DPM-Solver. - ## (We recommend singlestep DPM-Solver for unconditional sampling) - ## You can adjust the `steps` to balance the computation - ## costs and the sample quality. - dpm_solver = DPM_Solver(model_fn, noise_schedule) - - steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps) - x = dpm_solver.sample( - x, - steps=steps, - order=3, - skip_type="time_uniform", - method="singlestep", - ) - self.bar.close() - else: - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): - x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) - x = x[:, 0].transpose(1, 2) - if mel2ph is not None: # for singing - ret['mel_out'] = self.denorm_spec(x) * ((mel2ph > 0).float()[:, :, None]) - else: - ret['mel_out'] = self.denorm_spec(x) - return ret - - def norm_spec(self, x): - return (x - self.spec_min) / (self.spec_max - self.spec_min) * 2 - 1 - - def denorm_spec(self, x): - return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min - - def cwt2f0_norm(self, cwt_spec, mean, std, mel2ph): - return self.fs2.cwt2f0_norm(cwt_spec, mean, std, mel2ph) - - def out2mel(self, x): - return x +from collections import deque +from functools import partial +from inspect import isfunction + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn +from tqdm import tqdm + +from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic +from modules.diff.wavenet import DiffNet +from utils.hparams import hparams + + +DIFF_DENOISERS = { + 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), +} + + +def exists(x): + return x is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def extract(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() + + +def linear_beta_schedule(timesteps, max_beta=hparams.get('max_beta', 0.01)): + """ + linear schedule + """ + betas = np.linspace(1e-4, max_beta, timesteps) + return betas + + +def cosine_beta_schedule(timesteps, s=0.008): + """ + cosine schedule + as proposed in https://openreview.net/forum?id=-NEXDKk8gZ + """ + steps = timesteps + 1 + x = np.linspace(0, steps, steps) + alphas_cumprod = np.cos(((x / steps) + s) / (1 + s) * np.pi * 0.5) ** 2 + alphas_cumprod = alphas_cumprod / alphas_cumprod[0] + betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1]) + return np.clip(betas, a_min=0, a_max=0.999) + + +beta_schedule = { + "cosine": cosine_beta_schedule, + "linear": linear_beta_schedule, +} + + +class GaussianDiffusion(nn.Module): + def __init__(self, vocab_size, out_dims, timesteps=1000, K_step=1000, + loss_type=hparams.get('diff_loss_type', 'l1'), betas=None, spec_min=None, + spec_max=None): + super().__init__() + self.denoise_fn = DIFF_DENOISERS[hparams['diff_decoder_type']](hparams) + self.fs2 = FastSpeech2Acoustic(vocab_size=vocab_size) + self.mel_bins = out_dims + + if exists(betas): + betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas + else: + if 'schedule_type' in hparams.keys(): + betas = beta_schedule[hparams['schedule_type']](timesteps) + else: + betas = cosine_beta_schedule(timesteps) + + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.K_step = K_step + self.loss_type = loss_type + + self.noise_list = deque(maxlen=4) + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer('betas', to_torch(betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + posterior_variance = betas * (1. - alphas_cumprod_prev) / (1. - alphas_cumprod) + # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) + self.register_buffer('posterior_variance', to_torch(posterior_variance)) + # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain + self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) + self.register_buffer('posterior_mean_coef1', to_torch( + betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) + self.register_buffer('posterior_mean_coef2', to_torch( + (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) + + self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) + self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) + + def q_mean_variance(self, x_start, t): + mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + variance = extract(1. - self.alphas_cumprod, t, x_start.shape) + log_variance = extract(self.log_one_minus_alphas_cumprod, t, x_start.shape) + return mean, variance, log_variance + + def predict_start_from_noise(self, x_t, t, noise): + return ( + extract(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - + extract(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise + ) + + def q_posterior(self, x_start, x_t, t): + posterior_mean = ( + extract(self.posterior_mean_coef1, t, x_t.shape) * x_start + + extract(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = extract(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = extract(self.posterior_log_variance_clipped, t, x_t.shape) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance(self, x, t, cond, clip_denoised: bool): + noise_pred = self.denoise_fn(x, t, cond=cond) + x_recon = self.predict_start_from_noise(x, t=t, noise=noise_pred) + + if clip_denoised: + x_recon.clamp_(-1., 1.) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): + b, *_, device = *x.shape, x.device + model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, cond=cond, clip_denoised=clip_denoised) + noise = noise_like(x.shape, device, repeat_noise) + # no noise when t == 0 + nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=False): + """ + Use the PLMS method from [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778). + """ + + def get_x_pred(x, noise_t, t): + a_t = extract(self.alphas_cumprod, t, x.shape) + a_prev = extract(self.alphas_cumprod, torch.max(t-interval, torch.zeros_like(t)), x.shape) + a_t_sq, a_prev_sq = a_t.sqrt(), a_prev.sqrt() + + x_delta = (a_prev - a_t) * ((1 / (a_t_sq * (a_t_sq + a_prev_sq))) * x - 1 / (a_t_sq * (((1 - a_prev) * a_t).sqrt() + ((1 - a_t) * a_prev).sqrt())) * noise_t) + x_pred = x + x_delta + + return x_pred + + noise_list = self.noise_list + noise_pred = self.denoise_fn(x, t, cond=cond) + + if len(noise_list) == 0: + x_pred = get_x_pred(x, noise_pred, t) + noise_pred_prev = self.denoise_fn(x_pred, max(t-interval, 0), cond=cond) + noise_pred_prime = (noise_pred + noise_pred_prev) / 2 + elif len(noise_list) == 1: + noise_pred_prime = (3 * noise_pred - noise_list[-1]) / 2 + elif len(noise_list) == 2: + noise_pred_prime = (23 * noise_pred - 16 * noise_list[-1] + 5 * noise_list[-2]) / 12 + elif len(noise_list) >= 3: + noise_pred_prime = (55 * noise_pred - 59 * noise_list[-1] + 37 * noise_list[-2] - 9 * noise_list[-3]) / 24 + + x_prev = get_x_pred(x, noise_pred_prime, t) + noise_list.append(noise_pred) + + return x_prev + + def q_sample(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + return ( + extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise + ) + + def p_losses(self, x_start, t, cond, noise=None, nonpadding=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + x_recon = self.denoise_fn(x_noisy, t, cond) + + if self.loss_type == 'l1': + if nonpadding is not None: + loss = ((noise - x_recon).abs() * nonpadding.unsqueeze(1)).mean() + else: + # print('are you sure w/o nonpadding?') + loss = (noise - x_recon).abs().mean() + + elif self.loss_type == 'l2': + loss = F.mse_loss(noise, x_recon) + else: + raise NotImplementedError() + + return loss + + def forward(self, txt_tokens, mel2ph=None, spk_embed=None, + ref_mels=None, f0=None, infer=False, **kwargs): + """ + conditioning diffusion, use fastspeech2 encoder output as the condition + """ + ret = self.fs2(txt_tokens, mel2ph=mel2ph, f0=f0, spk_embed_id=spk_embed, infer=infer, **kwargs) + cond = ret['decoder_inp'].transpose(1, 2) + b, *_, device = *txt_tokens.shape, txt_tokens.device + + if not infer: + spec = self.norm_spec(ref_mels) + t = torch.randint(0, self.K_step, (b,), device=device).long() + norm_spec = spec.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] + ret['diff_loss'] = self.p_losses(norm_spec, t, cond=cond) + else: + t = self.K_step + shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) + x = torch.randn(shape, device=device) + if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: + # obsolete: pndm_speedup, now use dpm_solver. + # self.noise_list = deque(maxlen=4) + # iteration_interval = hparams['pndm_speedup'] + # for i in tqdm(reversed(range(0, t, iteration_interval)), desc='sample time step', + # total=t // iteration_interval): + # x = self.p_sample_plms(x, torch.full((b,), i, device=device, dtype=torch.long), iteration_interval, + # cond) + + from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver + ## 1. Define the noise schedule. + noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) + + ## 2. Convert your discrete-time `model` to the continuous-time + # noise prediction model. Here is an example for a diffusion model + ## `model` with the noise prediction type ("noise") . + def my_wrapper(fn): + def wrapped(x, t, **kwargs): + ret = fn(x, t, **kwargs) + self.bar.update(1) + return ret + return wrapped + + model_fn = model_wrapper( + my_wrapper(self.denoise_fn), + noise_schedule, + model_type="noise", # or "x_start" or "v" or "score" + model_kwargs={"cond": cond} + ) + + ## 3. Define dpm-solver and sample by singlestep DPM-Solver. + ## (We recommend singlestep DPM-Solver for unconditional sampling) + ## You can adjust the `steps` to balance the computation + ## costs and the sample quality. + dpm_solver = DPM_Solver(model_fn, noise_schedule) + + steps = t // hparams["pndm_speedup"] + self.bar = tqdm(desc="sample time step", total=steps) + x = dpm_solver.sample( + x, + steps=steps, + order=3, + skip_type="time_uniform", + method="singlestep", + ) + self.bar.close() + else: + for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): + x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) + x = x[:, 0].transpose(1, 2) + if mel2ph is not None: # for singing + ret['mel_out'] = self.denorm_spec(x) * ((mel2ph > 0).float()[:, :, None]) + else: + ret['mel_out'] = self.denorm_spec(x) + return ret + + def norm_spec(self, x): + return (x - self.spec_min) / (self.spec_max - self.spec_min) * 2 - 1 + + def denorm_spec(self, x): + return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min + + def cwt2f0_norm(self, cwt_spec, mean, std, mel2ph): + return self.fs2.cwt2f0_norm(cwt_spec, mean, std, mel2ph) + + def out2mel(self, x): + return x diff --git a/src/diff/net.py b/modules/diff/wavenet.py similarity index 97% rename from src/diff/net.py rename to modules/diff/wavenet.py index 13f2efaad..45f47c0cf 100644 --- a/src/diff/net.py +++ b/modules/diff/wavenet.py @@ -9,9 +9,6 @@ from utils.hparams import hparams from modules.commons.common_layers import Mish -Linear = nn.Linear -ConvTranspose2d = nn.ConvTranspose2d - class AttrDict(dict): def __init__(self, *args, **kwargs): @@ -62,7 +59,7 @@ class ResidualBlock(nn.Module): def __init__(self, encoder_hidden, residual_channels, dilation): super().__init__() self.dilated_conv = Conv1d(residual_channels, 2 * residual_channels, 3, padding=dilation, dilation=dilation) - self.diffusion_projection = Linear(residual_channels, residual_channels) + self.diffusion_projection = nn.Linear(residual_channels, residual_channels) self.conditioner_projection = Conv1d(encoder_hidden, 2 * residual_channels, 1) self.output_projection = Conv1d(residual_channels, 2 * residual_channels, 1) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 61c899f03..d4ac65741 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -37,9 +37,9 @@ def forward(self, txt_tokens, dur_embed): return x class FastSpeech2Acoustic(nn.Module): - def __init__(self, dictionary): + def __init__(self, vocab_size): super().__init__() - self.txt_embed = Embedding(len(dictionary), hparams['hidden_size'], PAD_INDEX) + self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) self.dur_embed = Linear(1, hparams['hidden_size']) self.encoder = FastSpeech2AcousticEncoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], diff --git a/modules/nsf_hifigan/env.py b/modules/nsf_hifigan/env.py index 26e0961e6..b4b8de71f 100644 --- a/modules/nsf_hifigan/env.py +++ b/modules/nsf_hifigan/env.py @@ -9,10 +9,3 @@ def __init__(self, *args, **kwargs): def __getattr__(self, item): return self[item] - - -def build_env(config, config_name, path): - t_path = os.path.join(path, config_name) - if config != t_path: - os.makedirs(path, exist_ok=True) - shutil.copyfile(config, os.path.join(path, config_name)) diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 6f24f617a..1c3006d61 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -1,12 +1,14 @@ -import os import json -from .env import AttrDict +import os + import numpy as np import torch -import torch.nn.functional as F import torch.nn as nn -from torch.nn import Conv1d, ConvTranspose1d, AvgPool1d, Conv2d -from torch.nn.utils import weight_norm, remove_weight_norm, spectral_norm +import torch.nn.functional as F +from torch.nn import Conv1d, ConvTranspose1d +from torch.nn.utils import weight_norm, remove_weight_norm + +from .env import AttrDict from .utils import init_weights, get_padding LRELU_SLOPE = 0.1 @@ -279,157 +281,3 @@ def remove_weight_norm(self): l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) - - -class DiscriminatorP(torch.nn.Module): - def __init__(self, period, kernel_size=5, stride=3, use_spectral_norm=False): - super(DiscriminatorP, self).__init__() - self.period = period - norm_f = weight_norm if use_spectral_norm == False else spectral_norm - self.convs = nn.ModuleList([ - norm_f(Conv2d(1, 32, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(32, 128, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(128, 512, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(512, 1024, (kernel_size, 1), (stride, 1), padding=(get_padding(5, 1), 0))), - norm_f(Conv2d(1024, 1024, (kernel_size, 1), 1, padding=(2, 0))), - ]) - self.conv_post = norm_f(Conv2d(1024, 1, (3, 1), 1, padding=(1, 0))) - - def forward(self, x): - fmap = [] - - # 1d to 2d - b, c, t = x.shape - if t % self.period != 0: # pad first - n_pad = self.period - (t % self.period) - x = F.pad(x, (0, n_pad), "reflect") - t = t + n_pad - x = x.view(b, c, t // self.period, self.period) - - for l in self.convs: - x = l(x) - x = F.leaky_relu(x, LRELU_SLOPE) - fmap.append(x) - x = self.conv_post(x) - fmap.append(x) - x = torch.flatten(x, 1, -1) - - return x, fmap - - -class MultiPeriodDiscriminator(torch.nn.Module): - def __init__(self, periods=None): - super(MultiPeriodDiscriminator, self).__init__() - self.periods = periods if periods is not None else [2, 3, 5, 7, 11] - self.discriminators = nn.ModuleList() - for period in self.periods: - self.discriminators.append(DiscriminatorP(period)) - - def forward(self, y, y_hat): - y_d_rs = [] - y_d_gs = [] - fmap_rs = [] - fmap_gs = [] - for i, d in enumerate(self.discriminators): - y_d_r, fmap_r = d(y) - y_d_g, fmap_g = d(y_hat) - y_d_rs.append(y_d_r) - fmap_rs.append(fmap_r) - y_d_gs.append(y_d_g) - fmap_gs.append(fmap_g) - - return y_d_rs, y_d_gs, fmap_rs, fmap_gs - - -class DiscriminatorS(torch.nn.Module): - def __init__(self, use_spectral_norm=False): - super(DiscriminatorS, self).__init__() - norm_f = weight_norm if use_spectral_norm == False else spectral_norm - self.convs = nn.ModuleList([ - norm_f(Conv1d(1, 128, 15, 1, padding=7)), - norm_f(Conv1d(128, 128, 41, 2, groups=4, padding=20)), - norm_f(Conv1d(128, 256, 41, 2, groups=16, padding=20)), - norm_f(Conv1d(256, 512, 41, 4, groups=16, padding=20)), - norm_f(Conv1d(512, 1024, 41, 4, groups=16, padding=20)), - norm_f(Conv1d(1024, 1024, 41, 1, groups=16, padding=20)), - norm_f(Conv1d(1024, 1024, 5, 1, padding=2)), - ]) - self.conv_post = norm_f(Conv1d(1024, 1, 3, 1, padding=1)) - - def forward(self, x): - fmap = [] - for l in self.convs: - x = l(x) - x = F.leaky_relu(x, LRELU_SLOPE) - fmap.append(x) - x = self.conv_post(x) - fmap.append(x) - x = torch.flatten(x, 1, -1) - - return x, fmap - - -class MultiScaleDiscriminator(torch.nn.Module): - def __init__(self): - super(MultiScaleDiscriminator, self).__init__() - self.discriminators = nn.ModuleList([ - DiscriminatorS(use_spectral_norm=True), - DiscriminatorS(), - DiscriminatorS(), - ]) - self.meanpools = nn.ModuleList([ - AvgPool1d(4, 2, padding=2), - AvgPool1d(4, 2, padding=2) - ]) - - def forward(self, y, y_hat): - y_d_rs = [] - y_d_gs = [] - fmap_rs = [] - fmap_gs = [] - for i, d in enumerate(self.discriminators): - if i != 0: - y = self.meanpools[i - 1](y) - y_hat = self.meanpools[i - 1](y_hat) - y_d_r, fmap_r = d(y) - y_d_g, fmap_g = d(y_hat) - y_d_rs.append(y_d_r) - fmap_rs.append(fmap_r) - y_d_gs.append(y_d_g) - fmap_gs.append(fmap_g) - - return y_d_rs, y_d_gs, fmap_rs, fmap_gs - - -def feature_loss(fmap_r, fmap_g): - loss = 0 - for dr, dg in zip(fmap_r, fmap_g): - for rl, gl in zip(dr, dg): - loss += torch.mean(torch.abs(rl - gl)) - - return loss * 2 - - -def discriminator_loss(disc_real_outputs, disc_generated_outputs): - loss = 0 - r_losses = [] - g_losses = [] - for dr, dg in zip(disc_real_outputs, disc_generated_outputs): - r_loss = torch.mean((1 - dr) ** 2) - g_loss = torch.mean(dg ** 2) - loss += (r_loss + g_loss) - r_losses.append(r_loss.item()) - g_losses.append(g_loss.item()) - - return loss, r_losses, g_losses - - -def generator_loss(disc_outputs): - loss = 0 - gen_losses = [] - for dg in disc_outputs: - l = torch.mean((1 - dg) ** 2) - gen_losses.append(l) - loss += l - - return loss, gen_losses diff --git a/modules/nsf_hifigan/nvSTFT.py b/modules/nsf_hifigan/nvSTFT.py index 99ca6437b..b2d509308 100644 --- a/modules/nsf_hifigan/nvSTFT.py +++ b/modules/nsf_hifigan/nvSTFT.py @@ -1,14 +1,10 @@ -import math import os os.environ["LRU_CACHE_CAPACITY"] = "3" -import random import torch import torch.utils.data import numpy as np import librosa -from librosa.util import normalize from librosa.filters import mel as librosa_mel_fn -from scipy.io.wavfile import read import soundfile as sf import torch.nn.functional as F @@ -56,7 +52,7 @@ def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): def dynamic_range_decompression_torch(x, C=1): return torch.exp(x) / C -class STFT(): +class STFT: def __init__(self, sr=22050, n_mels=80, n_fft=1024, win_size=1024, hop_length=256, fmin=20, fmax=11025, clip_val=1e-5): self.target_sr = sr @@ -124,5 +120,3 @@ def __call__(self, audiopath): audio, sr = load_wav_to_torch(audiopath, target_sr=self.target_sr) spect = self.get_mel(audio.unsqueeze(0)).squeeze(0) return spect - -stft = STFT() diff --git a/modules/nsf_hifigan/utils.py b/modules/nsf_hifigan/utils.py index 84bff024f..ea12791b8 100644 --- a/modules/nsf_hifigan/utils.py +++ b/modules/nsf_hifigan/utils.py @@ -1,22 +1,6 @@ -import glob -import os import matplotlib -import torch -from torch.nn.utils import weight_norm -matplotlib.use("Agg") -import matplotlib.pylab as plt - - -def plot_spectrogram(spectrogram): - fig, ax = plt.subplots(figsize=(10, 2)) - im = ax.imshow(spectrogram, aspect="auto", origin="lower", - interpolation='none') - plt.colorbar(im, ax=ax) - fig.canvas.draw() - plt.close() - - return fig +matplotlib.use("Agg") def init_weights(m, mean=0.0, std=0.01): @@ -25,44 +9,5 @@ def init_weights(m, mean=0.0, std=0.01): m.weight.data.normal_(mean, std) -def apply_weight_norm(m): - classname = m.__class__.__name__ - if classname.find("Conv") != -1: - weight_norm(m) - - def get_padding(kernel_size, dilation=1): return int((kernel_size*dilation - dilation)/2) - - -def load_checkpoint(filepath, device): - assert os.path.isfile(filepath) - print("Loading '{}'".format(filepath)) - checkpoint_dict = torch.load(filepath, map_location=device) - print("Complete.") - return checkpoint_dict - - -def save_checkpoint(filepath, obj): - print("Saving checkpoint to {}".format(filepath)) - torch.save(obj, filepath) - print("Complete.") - - -def del_old_checkpoints(cp_dir, prefix, n_models=2): - pattern = os.path.join(cp_dir, prefix + '????????') - cp_list = glob.glob(pattern) # get checkpoint paths - cp_list = sorted(cp_list)# sort by iter - if len(cp_list) > n_models: # if more than n_models models are found - for cp in cp_list[:-n_models]:# delete the oldest models other than lastest n_models - open(cp, 'w').close()# empty file contents - os.unlink(cp)# delete file (move to trash when using Colab) - - -def scan_checkpoint(cp_dir, prefix): - pattern = os.path.join(cp_dir, prefix + '????????') - cp_list = glob.glob(pattern) - if len(cp_list) == 0: - return None - return sorted(cp_list)[-1] - diff --git a/modules/vocoders/__init__.py b/modules/vocoders/__init__.py new file mode 100644 index 000000000..607db7af8 --- /dev/null +++ b/modules/vocoders/__init__.py @@ -0,0 +1,2 @@ +from modules.vocoders import ddsp +from modules.vocoders import nsf_hifigan diff --git a/src/vocoders/ddsp.py b/modules/vocoders/ddsp.py similarity index 99% rename from src/vocoders/ddsp.py rename to modules/vocoders/ddsp.py index 72a5d2da9..29243c46b 100644 --- a/src/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -6,7 +6,7 @@ import numpy as np from librosa.filters import mel as librosa_mel_fn from basics.base_vocoder import BaseVocoder -from src.vocoders.vocoder_utils import register_vocoder +from modules.vocoders.registry import register_vocoder from utils.hparams import hparams class DotDict(dict): diff --git a/src/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py similarity index 97% rename from src/vocoders/nsf_hifigan.py rename to modules/vocoders/nsf_hifigan.py index 704d61391..d232718a7 100644 --- a/src/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,102 +1,102 @@ -import os - -import torch - -from modules.nsf_hifigan.models import load_model -from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT -from basics.base_vocoder import BaseVocoder -from src.vocoders.vocoder_utils import register_vocoder -from utils.hparams import hparams - - -@register_vocoder -class NsfHifiGAN(BaseVocoder): - def __init__(self, device=None): - if device is None: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - self.device = device - model_path = hparams['vocoder_ckpt'] - assert os.path.exists(model_path), 'HifiGAN model file is not found!' - print('| Load HifiGAN: ', model_path) - self.model, self.h = load_model(model_path, device=self.device) - - def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] - if self.h.sampling_rate != hparams['audio_sample_rate']: - print('Mismatch parameters: hparams[\'audio_sample_rate\']=', hparams['audio_sample_rate'], '!=', - self.h.sampling_rate, '(vocoder)') - if self.h.num_mels != hparams['audio_num_mel_bins']: - print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=', hparams['audio_num_mel_bins'], '!=', - self.h.num_mels, '(vocoder)') - if self.h.n_fft != hparams['fft_size']: - print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.h.n_fft, '(vocoder)') - if self.h.win_size != hparams['win_size']: - print('Mismatch parameters: hparams[\'win_size\']=', hparams['win_size'], '!=', self.h.win_size, - '(vocoder)') - if self.h.hop_size != hparams['hop_size']: - print('Mismatch parameters: hparams[\'hop_size\']=', hparams['hop_size'], '!=', self.h.hop_size, - '(vocoder)') - if self.h.fmin != hparams['fmin']: - print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.h.fmin, '(vocoder)') - if self.h.fmax != hparams['fmax']: - print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') - with torch.no_grad(): - c = mel.transpose(2, 1) # [B, T, bins] - # log10 to log mel - c = 2.30259 * c - f0 = kwargs.get('f0') # [B, T] - if f0 is not None: - y = self.model(c, f0).view(-1) - else: - y = self.model(c).view(-1) - return y - - def spec2wav(self, mel, **kwargs): - if self.h.sampling_rate != hparams['audio_sample_rate']: - print('Mismatch parameters: hparams[\'audio_sample_rate\']=', hparams['audio_sample_rate'], '!=', - self.h.sampling_rate, '(vocoder)') - if self.h.num_mels != hparams['audio_num_mel_bins']: - print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=', hparams['audio_num_mel_bins'], '!=', - self.h.num_mels, '(vocoder)') - if self.h.n_fft != hparams['fft_size']: - print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.h.n_fft, '(vocoder)') - if self.h.win_size != hparams['win_size']: - print('Mismatch parameters: hparams[\'win_size\']=', hparams['win_size'], '!=', self.h.win_size, - '(vocoder)') - if self.h.hop_size != hparams['hop_size']: - print('Mismatch parameters: hparams[\'hop_size\']=', hparams['hop_size'], '!=', self.h.hop_size, - '(vocoder)') - if self.h.fmin != hparams['fmin']: - print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.h.fmin, '(vocoder)') - if self.h.fmax != hparams['fmax']: - print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') - with torch.no_grad(): - c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(self.device) - # log10 to log mel - c = 2.30259 * c - f0 = kwargs.get('f0') - if f0 is not None: - f0 = torch.FloatTensor(f0[None, :]).to(self.device) - y = self.model(c, f0).view(-1) - else: - y = self.model(c).view(-1) - wav_out = y.cpu().numpy() - return wav_out - - @staticmethod - def wav2spec(inp_path, keyshift=0, speed=1, device=None): - if device is None: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - sampling_rate = hparams['audio_sample_rate'] - num_mels = hparams['audio_num_mel_bins'] - n_fft = hparams['fft_size'] - win_size = hparams['win_size'] - hop_size = hparams['hop_size'] - fmin = hparams['fmin'] - fmax = hparams['fmax'] - stft = STFT(sampling_rate, num_mels, n_fft, win_size, hop_size, fmin, fmax) - with torch.no_grad(): - wav_torch, _ = load_wav_to_torch(inp_path, target_sr=stft.target_sr) - mel_torch = stft.get_mel(wav_torch.unsqueeze(0).to(device), keyshift=keyshift, speed=speed).squeeze(0).T - # log mel to log10 mel - mel_torch = 0.434294 * mel_torch - return wav_torch.cpu().numpy(), mel_torch.cpu().numpy() +import os + +import torch + +from modules.nsf_hifigan.models import load_model +from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT +from basics.base_vocoder import BaseVocoder +from modules.vocoders.registry import register_vocoder +from utils.hparams import hparams + + +@register_vocoder +class NsfHifiGAN(BaseVocoder): + def __init__(self, device=None): + if device is None: + device = 'cuda' if torch.cuda.is_available() else 'cpu' + self.device = device + model_path = hparams['vocoder_ckpt'] + assert os.path.exists(model_path), 'HifiGAN model file is not found!' + print('| Load HifiGAN: ', model_path) + self.model, self.h = load_model(model_path, device=self.device) + + def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] + if self.h.sampling_rate != hparams['audio_sample_rate']: + print('Mismatch parameters: hparams[\'audio_sample_rate\']=', hparams['audio_sample_rate'], '!=', + self.h.sampling_rate, '(vocoder)') + if self.h.num_mels != hparams['audio_num_mel_bins']: + print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=', hparams['audio_num_mel_bins'], '!=', + self.h.num_mels, '(vocoder)') + if self.h.n_fft != hparams['fft_size']: + print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.h.n_fft, '(vocoder)') + if self.h.win_size != hparams['win_size']: + print('Mismatch parameters: hparams[\'win_size\']=', hparams['win_size'], '!=', self.h.win_size, + '(vocoder)') + if self.h.hop_size != hparams['hop_size']: + print('Mismatch parameters: hparams[\'hop_size\']=', hparams['hop_size'], '!=', self.h.hop_size, + '(vocoder)') + if self.h.fmin != hparams['fmin']: + print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.h.fmin, '(vocoder)') + if self.h.fmax != hparams['fmax']: + print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') + with torch.no_grad(): + c = mel.transpose(2, 1) # [B, T, bins] + # log10 to log mel + c = 2.30259 * c + f0 = kwargs.get('f0') # [B, T] + if f0 is not None: + y = self.model(c, f0).view(-1) + else: + y = self.model(c).view(-1) + return y + + def spec2wav(self, mel, **kwargs): + if self.h.sampling_rate != hparams['audio_sample_rate']: + print('Mismatch parameters: hparams[\'audio_sample_rate\']=', hparams['audio_sample_rate'], '!=', + self.h.sampling_rate, '(vocoder)') + if self.h.num_mels != hparams['audio_num_mel_bins']: + print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=', hparams['audio_num_mel_bins'], '!=', + self.h.num_mels, '(vocoder)') + if self.h.n_fft != hparams['fft_size']: + print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.h.n_fft, '(vocoder)') + if self.h.win_size != hparams['win_size']: + print('Mismatch parameters: hparams[\'win_size\']=', hparams['win_size'], '!=', self.h.win_size, + '(vocoder)') + if self.h.hop_size != hparams['hop_size']: + print('Mismatch parameters: hparams[\'hop_size\']=', hparams['hop_size'], '!=', self.h.hop_size, + '(vocoder)') + if self.h.fmin != hparams['fmin']: + print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.h.fmin, '(vocoder)') + if self.h.fmax != hparams['fmax']: + print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') + with torch.no_grad(): + c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(self.device) + # log10 to log mel + c = 2.30259 * c + f0 = kwargs.get('f0') + if f0 is not None: + f0 = torch.FloatTensor(f0[None, :]).to(self.device) + y = self.model(c, f0).view(-1) + else: + y = self.model(c).view(-1) + wav_out = y.cpu().numpy() + return wav_out + + @staticmethod + def wav2spec(inp_path, keyshift=0, speed=1, device=None): + if device is None: + device = 'cuda' if torch.cuda.is_available() else 'cpu' + sampling_rate = hparams['audio_sample_rate'] + num_mels = hparams['audio_num_mel_bins'] + n_fft = hparams['fft_size'] + win_size = hparams['win_size'] + hop_size = hparams['hop_size'] + fmin = hparams['fmin'] + fmax = hparams['fmax'] + stft = STFT(sampling_rate, num_mels, n_fft, win_size, hop_size, fmin, fmax) + with torch.no_grad(): + wav_torch, _ = load_wav_to_torch(inp_path, target_sr=stft.target_sr) + mel_torch = stft.get_mel(wav_torch.unsqueeze(0).to(device), keyshift=keyshift, speed=speed).squeeze(0).T + # log mel to log10 mel + mel_torch = 0.434294 * mel_torch + return wav_torch.cpu().numpy(), mel_torch.cpu().numpy() diff --git a/src/vocoders/vocoder_utils.py b/modules/vocoders/registry.py similarity index 100% rename from src/vocoders/vocoder_utils.py rename to modules/vocoders/registry.py diff --git a/pipelines/.gitignore b/preparation/.gitignore similarity index 100% rename from pipelines/.gitignore rename to preparation/.gitignore diff --git a/pipelines/no_midi_preparation.ipynb b/preparation/acoustic_preparation.ipynb similarity index 97% rename from pipelines/no_midi_preparation.ipynb rename to preparation/acoustic_preparation.ipynb index 20909a21f..4ee131a39 100644 --- a/pipelines/no_midi_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -480,9 +480,9 @@ "\n", "We use [Montreal Forced Aligner](https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner) to do forced phoneme alignment.\n", "\n", - "To run MFA alignment, please first run the following cell to resample all recordings to 16 kHz. The resampled recordings and copies of the phoneme labels will be saved at `pipelines/segments/`. Also, the folder `pipelines/textgrids/` will be created for temporarily storing aligned TextGrids.\n", + "To run MFA alignment, please first run the following cell to resample all recordings to 16 kHz. The resampled recordings and copies of the phoneme labels will be saved at `preparation/segments/`. Also, the folder `preparation/textgrids/` will be created for temporarily storing aligned TextGrids.\n", "\n", - "WARNING: This will overwrite all files in `pipelines/segments/` and `pipelines/textgrids/`.\n" + "WARNING: This will overwrite all files in `preparation/segments/` and `preparation/textgrids/`.\n" ] }, { @@ -592,7 +592,7 @@ "\n", "In this section, we run some scripts to reduce errors for long utterances and detect `AP`s which have not been labeled before. The optimized TextGrids can be saved for future use if you specify a backup directory in the following cell.\n", "\n", - "Edit the path and adjust arguments according to your needs in the following cell before you run it. Optimized results will be saved at `pipelines/textgrids/revised/`.\n" + "Edit the path and adjust arguments according to your needs in the following cell before you run it. Optimized results will be saved at `preparation/textgrids/revised/`.\n" ] }, { @@ -788,7 +788,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "`TextGrid` saved in `pipelines/textgrids/revised` can be edited via [Praat](https://github.com/praat/praat). You may examine these files and fix label errors by yourself if you want a more accurate model with higher performance. However, this is not required since manual labeling takes much time.\n", + "`TextGrid` saved in `preparation/textgrids/revised` can be edited via [Praat](https://github.com/praat/praat). You may examine these files and fix label errors by yourself if you want a more accurate model with higher performance. However, this is not required since manual labeling takes much time.\n", "\n", "Run the following cell to see summary of word-level pitch coverage of your dataset. (Data may not be accurate due to octave errors in pitch extraction.)\n" ] @@ -958,7 +958,7 @@ "source": [ "Now that the dataset and transcriptions have been saved, you can run the following cell to clean up all temporary files generated by pipelines above.\n", "\n", - "WARNING: This will remove `pipelines/segments/` and `pipelines/textgrids/` folders. You should specify a directory in the following cell to back up your TextGrids if you want them for future use.\n" + "WARNING: This will remove `preparation/segments/` and `preparation/textgrids/` folders. You should specify a directory in the following cell to back up your TextGrids if you want them for future use.\n" ] }, { @@ -1229,20 +1229,20 @@ "print('============ Linux ============\\n'\n", " 'export PYTHONPATH=.\\n'\n", " 'export CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python data_gen/binarize.py --config data/{full_name}/config.yaml\\n'\n", - " f'python run.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", + " f'python scripts/binarize.py --config data/{full_name}/config.yaml\\n'\n", + " f'python scripts/train.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", "\n", "print('===== Windows (PowerShell) =====\\n'\n", " '$env:PYTHONPATH=\".\"\\n'\n", " '$env:CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python data_gen/binarize.py --config data/{full_name}/config.yaml\\n'\n", - " f'python run.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", + " f'python scripts/binarize.py --config data/{full_name}/config.yaml\\n'\n", + " f'python scripts/train.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", "\n", "print('===== Windows (Command Prompt) =====\\n'\n", " 'set PYTHONPATH=.\\n'\n", " 'set CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python data_gen/binarize.py --config data/{full_name}/config.yaml\\n'\n", - " f'python run.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", + " f'python scripts/binarize.py --config data/{full_name}/config.yaml\\n'\n", + " f'python scripts/train.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", "\n", "print(f'If you want to train your model on another machine (like a remote GPU), please copy the whole \\'data/{full_name}/\\' folder.')\n" ] @@ -1531,20 +1531,20 @@ "print('============ Linux ============\\n'\n", " 'export PYTHONPATH=.\\n'\n", " 'export CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python data_gen/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", - " f'python run.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", + " f'python scripts/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", + " f'python scripts/train.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", "\n", "print('===== Windows (PowerShell) =====\\n'\n", " '$env:PYTHONPATH=\".\"\\n'\n", " '$env:CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python data_gen/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", - " f'python run.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", + " f'python scripts/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", + " f'python scripts/train.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", "\n", "print('===== Windows (Command Prompt) =====\\n'\n", " 'set PYTHONPATH=.\\n'\n", " 'set CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python data_gen/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", - " f'python run.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", + " f'python scripts/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", + " f'python scripts/train.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", "\n", "print(f'To preprocess the selected datasets, please make sure these directories exist:')\n", "for d in raw_data_dirs:\n", diff --git a/pipelines/assets/2001000001.lab b/preparation/assets/2001000001.lab similarity index 100% rename from pipelines/assets/2001000001.lab rename to preparation/assets/2001000001.lab diff --git a/pipelines/assets/2001000001.wav b/preparation/assets/2001000001.wav similarity index 100% rename from pipelines/assets/2001000001.wav rename to preparation/assets/2001000001.wav diff --git a/pipelines/requirements.txt b/preparation/requirements.txt similarity index 100% rename from pipelines/requirements.txt rename to preparation/requirements.txt diff --git a/pipelines/utils/distribution.py b/preparation/utils/distribution.py similarity index 100% rename from pipelines/utils/distribution.py rename to preparation/utils/distribution.py diff --git a/pipelines/utils/slicer2.py b/preparation/utils/slicer2.py similarity index 100% rename from pipelines/utils/slicer2.py rename to preparation/utils/slicer2.py diff --git a/data_gen/acoustic.py b/preprocessing/acoustic.py similarity index 99% rename from data_gen/acoustic.py rename to preprocessing/acoustic.py index 9d9bbfaac..a489213bb 100644 --- a/data_gen/acoustic.py +++ b/preprocessing/acoustic.py @@ -18,9 +18,9 @@ from tqdm import tqdm from basics.base_binarizer import BaseBinarizer, BinarizationError -from data_gen.data_gen_utils import get_pitch_parselmouth, get_mel2ph_torch +from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from modules.fastspeech.tts_modules import LengthRegulator -from utils.vocoder_utils import VOCODERS +from modules.vocoders.registry import VOCODERS from utils.hparams import hparams from utils.indexed_datasets import IndexedDatasetBuilder from utils.multiprocess_utils import chunked_multiprocess_run diff --git a/data_gen/binarize.py b/scripts/binarize.py similarity index 100% rename from data_gen/binarize.py rename to scripts/binarize.py diff --git a/main.py b/scripts/infer.py similarity index 98% rename from main.py rename to scripts/infer.py index 8bd0c9a2f..2e8d88c37 100644 --- a/main.py +++ b/scripts/infer.py @@ -15,7 +15,7 @@ from utils.spk_utils import parse_commandline_spk_mix sys.path.insert(0, '/') -root_dir = os.path.dirname(os.path.abspath(__file__)) +root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) os.environ['PYTHONPATH'] = f'"{root_dir}"' parser = argparse.ArgumentParser(description='Run DiffSinger inference') diff --git a/run.py b/scripts/train.py similarity index 97% rename from run.py rename to scripts/train.py index 40d119f4b..bb2d51f47 100644 --- a/run.py +++ b/scripts/train.py @@ -1,6 +1,7 @@ import importlib -import os + from utils.hparams import set_hparams, hparams + set_hparams(print_hparams=False) def run_task(): @@ -13,4 +14,3 @@ def run_task(): if __name__ == '__main__': run_task() - diff --git a/vocode.py b/scripts/vocode.py similarity index 100% rename from vocode.py rename to scripts/vocode.py diff --git a/src/vocoders/__init__.py b/src/vocoders/__init__.py deleted file mode 100644 index 8c39a7364..000000000 --- a/src/vocoders/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from src.vocoders import ddsp -from src.vocoders import nsf_hifigan diff --git a/src/acoustic_task.py b/training/acoustic.py similarity index 98% rename from src/acoustic_task.py rename to training/acoustic.py index 620ee51b3..07d707cf7 100644 --- a/src/acoustic_task.py +++ b/training/acoustic.py @@ -14,7 +14,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder -from data_gen.data_gen_utils import get_pitch_parselmouth +from utils.binarizer_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import mel2ph_to_dur from utils import audio from utils.hparams import hparams @@ -24,8 +24,8 @@ from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder -from .diff.diffusion import GaussianDiffusion -from .vocoders.vocoder_utils import get_vocoder_cls +from modules.diff.diffusion import GaussianDiffusion +from modules.vocoders.registry import get_vocoder_cls matplotlib.use('Agg') @@ -109,7 +109,7 @@ def build_phone_encoder(): def build_model(self): mel_bins = hparams['audio_num_mel_bins'] self.model = GaussianDiffusion( - phone_encoder=self.phone_encoder, + vocab_size=len(self.phone_encoder), out_dims=mel_bins, timesteps=hparams['timesteps'], K_step=hparams['K_step'], diff --git a/training/diffsinger.py b/training/diffsinger.py deleted file mode 100644 index 59565b0c2..000000000 --- a/training/diffsinger.py +++ /dev/null @@ -1,21 +0,0 @@ -import torch - - -class Batch2Loss: - @staticmethod - def module4(diff_main_loss, # modules - norm_spec, decoder_inp_t, ret, K_step, batch_size, device): # variables - ''' - training diffusion using spec as input and decoder_inp as condition. - - Args: - norm_spec: (normalized) spec - decoder_inp_t: (transposed) decoder_inp - Returns: - ret['diff_loss'] - ''' - t = torch.randint(0, K_step, (batch_size,), device=device).long() - norm_spec = norm_spec.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] - ret['diff_loss'] = diff_main_loss(norm_spec, t, cond=decoder_inp_t) - # nonpadding = (mel2ph != 0).float() - # ret['diff_loss'] = self.p_losses(x, t, cond, nonpadding=nonpadding) diff --git a/data_gen/data_gen_utils.py b/utils/binarizer_utils.py similarity index 100% rename from data_gen/data_gen_utils.py rename to utils/binarizer_utils.py From 1505e2a5e44a353a7f8ee745026d2fb99b2c8459 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 22:14:07 +0800 Subject: [PATCH 057/475] Support configuring num_workers of preprocessing in notebook --- preparation/acoustic_preparation.ipynb | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index 4ee131a39..d348bf4f6 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1063,7 +1063,13 @@ "> 1. The number of data pieces applied with the $ k $th augmentation will be $ scale_{k} $ times than those not applied with the $ k $th augmentation.\n", "> 2. The number of data pieces applied with at least one type of augmentation will be $ \\sum_{i = 1}^{n} scale_{i} $ times than those not applied with any augmentation (purely raw data).\n", "\n", - "#### 4.2.3 Training and validating\n", + "#### 4.2.3 Preprocessing\n", + "\n", + "##### `binarize_num_workers`\n", + "\n", + "Multiprocessing can speed up the preprocessing but may consume more CPU, GPU and memory. This value indicates whether to enable multiprocessing, and how many workers to use if multiprocessing is enabled. Set this value to `0` if you do not want to use multiprocessing.\n", + "\n", + "#### 4.2.4 Training and validating\n", "\n", "##### `test_prefixes`\n", "\n", @@ -1124,6 +1130,9 @@ " 'scale': 2.\n", "}\n", "\n", + "# Preprocessing\n", + "binarize_num_workers = 0\n", + "\n", "# Training and validating\n", "test_prefixes = [\n", "\n", @@ -1175,6 +1184,9 @@ " 'speakers': [dataset_name],\n", " 'raw_data_dir': [f'data/{full_name}/raw'],\n", " 'binary_data_dir': f'data/{full_name}/binary',\n", + " 'binarization_args': {\n", + " 'num_workers': binarize_num_workers\n", + " },\n", " 'residual_channels': residual_channels,\n", " 'residual_layers': residual_layers,\n", " 'f0_embed_type': f0_embed_type,\n", @@ -1400,6 +1412,9 @@ " 'scale': 1.5\n", "}\n", "\n", + "# Preprocessing\n", + "binarize_num_workers = 0\n", + "\n", "# Training and validating\n", "test_prefixes = [\n", "\n", @@ -1477,6 +1492,9 @@ " 'use_spk_id': True,\n", " 'raw_data_dir': raw_data_dirs,\n", " 'binary_data_dir': f'data/{model_full_name}/binary',\n", + " 'binarization_args': {\n", + " 'num_workers': binarize_num_workers\n", + " },\n", " 'residual_channels': residual_channels,\n", " 'residual_layers': residual_layers,\n", " 'f0_embed_type': f0_embed_type,\n", From b8ca1481ddbb19ba6357c9e53f82bc7f5ee8410f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 22:24:05 +0800 Subject: [PATCH 058/475] Update README.md --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 236f50f43..4e3e71178 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ # Usage of Refactor Branch This is a cleaner version of Diffsinger, which provides: -- fewer code: scripts unused in the DiffSinger are marked **\*isolated\***; +- fewer code: scripts unused or obsolete in the DiffSinger are removed; - better readability: many important functions are annotated (however, **we assume the reader already knows how the neural networks work**); - abstract classes: the bass classes are filtered out into the "basics/" folder and are annotated. Other classes directly inherent from the base classes. -- re-organized project structure: pipelines are seperated into preparation, preprocessing, augmentation, training, inference and deployment +- re-organized project structure: pipelines are seperated into preparation, preprocessing, augmentation, training, inference and deployment. +- main command-line entries are collected into the "scripts/" folder. ## Progress since we forked into this repository From d1d6b48873e8f27706cc0e748a0a62621268f3fe Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 22:48:20 +0800 Subject: [PATCH 059/475] pathlib --- scripts/infer.py | 4 ++-- scripts/vocode.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/infer.py b/scripts/infer.py index 2e8d88c37..17e7975db 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -2,6 +2,7 @@ import argparse import json import os +import pathlib import sys import numpy as np @@ -14,8 +15,7 @@ from utils.slur_utils import merge_slurs from utils.spk_utils import parse_commandline_spk_mix -sys.path.insert(0, '/') -root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) os.environ['PYTHONPATH'] = f'"{root_dir}"' parser = argparse.ArgumentParser(description='Run DiffSinger inference') diff --git a/scripts/vocode.py b/scripts/vocode.py index b9251e225..eb2604ca0 100644 --- a/scripts/vocode.py +++ b/scripts/vocode.py @@ -1,6 +1,7 @@ # coding=utf8 import argparse import os +import pathlib import sys import numpy as np @@ -12,8 +13,7 @@ from utils.audio import save_wav from utils.hparams import set_hparams, hparams -sys.path.insert(0, '/') -root_dir = os.path.dirname(os.path.abspath(__file__)) +root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) os.environ['PYTHONPATH'] = f'"{root_dir}"' parser = argparse.ArgumentParser(description='Run DiffSinger vocoder') From 08ffa85c69383a9b6f96b298e8f10684b687f15e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 23:01:33 +0800 Subject: [PATCH 060/475] Remove debug --- scripts/infer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/infer.py b/scripts/infer.py index 17e7975db..522dc75ce 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -72,7 +72,6 @@ if not args.title: name += key_suffix print(f'音调基于原音频{key_suffix}') -params = params[:1] if args.gender is not None: assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' From 7304f2cc29b6a4df62200f21e78841997e5dfe07 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 23:24:39 +0800 Subject: [PATCH 061/475] Fix PYTHONPATH --- scripts/infer.py | 7 ++++--- scripts/vocode.py | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/infer.py b/scripts/infer.py index 522dc75ce..38859d2d9 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -5,6 +5,10 @@ import pathlib import sys +root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) +os.environ['PYTHONPATH'] = root_dir +sys.path.insert(0, root_dir) + import numpy as np import torch @@ -15,9 +19,6 @@ from utils.slur_utils import merge_slurs from utils.spk_utils import parse_commandline_spk_mix -root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) -os.environ['PYTHONPATH'] = f'"{root_dir}"' - parser = argparse.ArgumentParser(description='Run DiffSinger inference') parser.add_argument('proj', type=str, help='Path to the input file') parser.add_argument('--exp', type=str, required=True, help='Selection of model') diff --git a/scripts/vocode.py b/scripts/vocode.py index eb2604ca0..80d667e18 100644 --- a/scripts/vocode.py +++ b/scripts/vocode.py @@ -4,6 +4,10 @@ import pathlib import sys +root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) +os.environ['PYTHONPATH'] = root_dir +sys.path.insert(0, root_dir) + import numpy as np import torch import tqdm @@ -13,9 +17,6 @@ from utils.audio import save_wav from utils.hparams import set_hparams, hparams -root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) -os.environ['PYTHONPATH'] = f'"{root_dir}"' - parser = argparse.ArgumentParser(description='Run DiffSinger vocoder') parser.add_argument('mel', type=str, help='Path to the input file') parser.add_argument('--exp', type=str, required=False, help='Read vocoder class and path from chosen experiment') From 764a65b56c4464f620292222db68287fe606599a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 12 Mar 2023 23:28:07 +0800 Subject: [PATCH 062/475] Remove example_run --- basics/base_svs_infer.py | 40 +--------------------------------------- inference/ds_cascade.py | 34 ++++------------------------------ 2 files changed, 5 insertions(+), 69 deletions(-) diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 85aeabeb9..8d6331fb7 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -6,7 +6,7 @@ from pypinyin import lazy_pinyin from modules.vocoders.registry import VOCODERS -from utils.hparams import set_hparams, hparams +from utils.hparams import hparams from utils.phoneme_utils import build_g2p_dictionary, build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -147,41 +147,3 @@ def infer_once(self, inp, return_mel=False): output = self.forward_model(inp, return_mel=return_mel) output = self.postprocess_output(output) return output - - @classmethod - def example_run(cls, inp, target='infer_out/example_out.wav'): - # settings hparams - set_hparams(print_hparams=False) - - # call the model - infer_ins = cls(hparams) - out = infer_ins.infer_once(inp) - - # output to file - os.makedirs(os.path.dirname(target), exist_ok=True) - print(f'| save audio: {target}') - from utils.audio import save_wav - save_wav(out, target, hparams['audio_sample_rate']) - -# if __name__ == '__main__': -# debug -# a = BaseSVSInfer(hparams) -# a.preprocess_input({'text': '你 说 你 不 SP 懂 为 何 在 这 时 牵 手 AP', -# 'notes': 'D#4/Eb4 | D#4/Eb4 | D#4/Eb4 | D#4/Eb4 | rest | D#4/Eb4 | D4 | D4 | D4 | D#4/Eb4 | F4 | D#4/Eb4 | D4 | rest', -# 'notes_duration': '0.113740 | 0.329060 | 0.287950 | 0.133480 | 0.150900 | 0.484730 | 0.242010 | 0.180820 | 0.343570 | 0.152050 | 0.266720 | 0.280310 | 0.633300 | 0.444590' -# }) - -# b = { -# 'text': '小酒窝长睫毛AP是你最美的记号', -# 'notes': 'C#4/Db4 | F#4/Gb4 | G#4/Ab4 | A#4/Bb4 F#4/Gb4 | F#4/Gb4 C#4/Db4 | C#4/Db4 | rest | C#4/Db4 | A#4/Bb4 | G#4/Ab4 | A#4/Bb4 | G#4/Ab4 | F4 | C#4/Db4', -# 'notes_duration': '0.407140 | 0.376190 | 0.242180 | 0.509550 0.183420 | 0.315400 0.235020 | 0.361660 | 0.223070 | 0.377270 | 0.340550 | 0.299620 | 0.344510 | 0.283770 | 0.323390 | 0.360340' -# } -# c = { -# 'text': '小酒窝长睫毛AP是你最美的记号', -# 'ph_seq': 'x iao j iu w o ch ang ang j ie ie m ao AP sh i n i z ui m ei d e j i h ao', -# 'note_seq': 'C#4/Db4 C#4/Db4 F#4/Gb4 F#4/Gb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 F#4/Gb4 F#4/Gb4 F#4/Gb4 C#4/Db4 C#4/Db4 C#4/Db4 rest C#4/Db4 C#4/Db4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 F4 F4 C#4/Db4 C#4/Db4', -# 'note_dur_seq': '0.407140 0.407140 0.376190 0.376190 0.242180 0.242180 0.509550 0.509550 0.183420 0.315400 0.315400 0.235020 0.361660 0.361660 0.223070 0.377270 0.377270 0.340550 0.340550 0.299620 0.299620 0.344510 0.344510 0.283770 0.283770 0.323390 0.323390 0.360340 0.360340', -# 'is_slur_seq': '0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0' -# } # input like Opencpop dataset. -# a.preprocess_input(b) -# a.preprocess_input(c, input_type='phoneme') diff --git a/inference/ds_cascade.py b/inference/ds_cascade.py index 0c4ddb338..15d5741fb 100644 --- a/inference/ds_cascade.py +++ b/inference/ds_cascade.py @@ -172,7 +172,7 @@ def input_to_batch(self, item): midi_dur = torch.FloatTensor(item['midi_dur'])[None, :hparams['max_frames']].to(self.device) is_slur = torch.LongTensor(item['is_slur'])[None, :hparams['max_frames']].to(self.device) mel2ph = None - log2f0 = None + f0 = None if item['ph_dur'] is not None: print('Using manual phone duration') ph_acc = np.around( @@ -192,7 +192,7 @@ def input_to_batch(self, item): t_max = (len(f0_seq) - 1) * f0_timestep dt = hparams['hop_size'] / hparams['audio_sample_rate'] f0_interp = np.interp(np.arange(0, t_max, dt), f0_timestep * np.arange(len(f0_seq)), f0_seq) - log2f0 = torch.FloatTensor(np.log2(f0_interp))[None, :].to(self.device) + f0 = torch.FloatTensor(f0_interp)[None, :].to(self.device) else: print('Using automatic pitch curve') @@ -244,7 +244,7 @@ def input_to_batch(self, item): 'midi_dur': midi_dur, 'is_slur': is_slur, 'mel2ph': mel2ph, - 'log2f0': log2f0, + 'f0': f0, 'key_shift': key_shift, 'speed': speed } @@ -263,7 +263,7 @@ def forward_model(self, inp, return_mel=False): spk_mix_embed = None output = self.model(txt_tokens, spk_mix_embed=spk_mix_embed, ref_mels=None, infer=True, pitch_midi=sample['pitch_midi'], midi_dur=sample['midi_dur'], - is_slur=sample['is_slur'], mel2ph=sample['mel2ph'], f0=sample['log2f0'], + is_slur=sample['is_slur'], mel2ph=sample['mel2ph'], f0=sample['f0'], key_shift=sample['key_shift'], speed=sample['speed']) mel_out = output['mel_out'] # [B, T, M] f0_pred = output['f0_denorm'] @@ -272,29 +272,3 @@ def forward_model(self, inp, return_mel=False): wav_out = self.run_vocoder(mel_out, f0=f0_pred) wav_out = wav_out.cpu().numpy() return wav_out[0] - - -if __name__ == '__main__': - inp = { - "text": "SP 哥 哥 SP 哥 哥 AP 你 给 我 买 这 个 你 女 朋 友 知 道 了 SP 不 会 生 气 吧 AP 真 好 吃 SP 哥 你 尝 一 口 SP 哥 哥 AP 你 女 朋 友 要 是 知 道 我 俩 吃 一 SP 同 一 个 棒 棒 糖 你 女 朋 友 不 会 吃 醋 吧 AP 哥 哥 你 骑 着 小 SP 小 电 动 车 带 着 我 你 女 朋 友 知 道 了 SP 不 会 揍 我 吧 AP 好 可 怕 你 女 朋 友 SP 不 像 我 SP 我 只 会 心 疼 哥 啊 哥 啊 SP", - "ph_seq": "SP g e g e SP g e g e AP n i g e w o m ai zh ei g e n i n v p eng y ou zh i d ao l e SP b u h ui sh eng q i b a SP zh en h ao ch i SP g e n i ch ang y i k ou SP g e g e e e AP n i n v p eng y ou y ao sh i zh i d ao w o l iang ch i y i SP t ong y i g e b ang b ang t ang n i n v p eng y ou b u h ui ch i c u b a a a a a AP g e g e n i q i zh uo x iao SP x iao d ian d ong ch e d ai zh uo w o n i n v p eng y ou zh i d ao l e SP b u h ui z ou w o b a AP h ao k e p a n i n v p eng y ou SP b u x iang w o SP w o zh i h ui x in t eng g i y e g i y e e e SP", - "note_seq": "rest D#4 D#4 C4 C4 rest G4 G4 D4 D4 rest D4 D4 F4 F4 F4 F4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 F4 F4 F4 F4 F4 F4 rest D4 D4 F4 F4 F4 F4 F4 F4 A3 A3 rest E4 E4 C4 C4 C4 C4 rest G4 G4 C4 C4 D4 D4 E4 E4 C4 C4 rest C5 C5 C5 C5 A4 F4 rest C5 C5 C5 C5 C5 C5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 C5 C5 A#4 A#4 A4 A4 C5 C5 G#4 G#4 rest B4 B4 B4 B4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G4 G4 F4 F4 D#4 C4 A#3 G#3 rest D4 D4 C4 C4 C4 C4 E4 E4 F#4 F#4 C4 C4 rest C4 C4 F4 F4 E4 E4 E4 E4 F#4 F#4 D4 D4 B3 B3 E4 E4 F#4 F#4 E4 E4 F#4 F#4 F4 F4 F4 F4 C#4 C#4 rest G#4 G#4 B4 B4 A#4 A#4 F4 F4 F4 F4 rest A#4 A#4 G#4 G#4 A#4 A#4 G4 G4 G4 G4 E4 E4 D4 D4 rest C4 C4 A#3 A#3 G#3 G#3 rest D#4 D#4 C#4 C#4 D4 D4 G4 G4 G4 G4 E4 E4 E4 E4 E4 E4 E4 E4 D4 C4 rest", - "note_dur_seq": "0.4583333 0.25 0.25 0.1666667 0.1666667 0.2916666 0.1666667 0.1666667 0.2083333 0.2083333 0.8750001 0.08333325 0.08333325 0.08333325 0.08333325 0.08333349 0.08333349 0.25 0.25 0.1666665 0.1666665 0.125 0.125 0.125 0.125 0.125 0.125 0.1041667 0.1041667 0.1666667 0.1666667 0.125 0.125 0.1458333 0.1458333 0.08333349 0.08333349 0.08333302 0.08333349 0.08333349 0.2083335 0.2083335 0.375 0.375 0.333333 0.333333 0.25 0.25 2.125 0.125 0.125 0.166667 0.166667 0.125 0.125 0.08333349 0.166666 0.166666 0.125 0.125 0.166667 0.166667 0.08333302 0.08333302 0.125 0.125 1.291667 0.291667 0.291667 0.166666 0.166666 0.08333397 0.08333302 0.791667 0.125 0.125 0.166666 0.166666 0.08333397 0.08333397 0.08333302 0.08333302 0.08333302 0.08333302 0.125 0.125 0.125 0.125 0.08333397 0.08333397 0.166666 0.166666 0.166667 0.166667 0.125 0.125 0.125 0.125 0.145833 0.166667 0.166667 0.125 0.125 0.125 0.125 0.104167 0.104167 0.08333302 0.08333302 0.125 0.125 0.125 0.125 0.166667 0.166667 0.08333302 0.08333302 0.145833 0.145833 0.08333397 0.08333397 0.291666 0.291666 0.354167 0.354167 0.3125 0.3125 0.0625 0.0625 0.0625 0.0625 0.08333302 0.08333397 0.833333 0.270834 0.270834 0.1875 0.1875 0.25 0.25 0.104166 0.104166 0.208334 0.208334 0.125 0.125 0.4583321 0.145834 0.145834 0.229166 0.229166 0.2291679 0.2291679 0.229166 0.229166 0.291666 0.291666 0.08333397 0.08333397 0.104166 0.104166 0.1041679 0.1041679 0.1875 0.1875 0.08333206 0.08333206 0.1875 0.1875 0.1041679 0.1041679 0.229166 0.229166 0.125 0.125 0.5 0.104166 0.104166 0.1666679 0.1666679 0.2083321 0.2083321 0.208334 0.208334 0.291666 0.291666 0.7291679 0.2708321 0.2708321 0.208334 0.208334 0.125 0.125 0.1875 0.1875 0.208334 0.208334 0.229166 0.229166 0.229166 0.229166 0.1875 0.2916679 0.2916679 0.3125 0.3125 0.4583321 0.4583321 0.270834 0.166666 0.166666 0.1666679 0.1666679 0.166666 0.166666 0.229166 0.229166 0.3125 0.3125 0.0625 0.0625 0.270834 0.270834 0.0625 0.0625 0.270834 0.270834 0.104166 0.229166 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0", - "ph_dur": "0.398333 0.06 0.19 0.06 0.166667 0.216667 0.075 0.106667 0.06 0.208333 0.83 0.045 0.053334 0.03 0.053333 0.03 0.053334 0.03 0.175 0.075 0.140185 0.026481 0.08 0.045 0.08 0.045 0.08 0.045 0.074167 0.03 0.11887 0.047797 0.08 0.045 0.120832 0.025001 0.083333 0.038333 0.045 0.053333 0.03 0.121223 0.08711 0.255 0.12 0.273333 0.06 0.25 2.035 0.09 0.094341 0.030659 0.086668 0.079999 0.125 0.023333 0.06 0.121666 0.045 0.071647 0.053353 0.138932 0.027735 0.049297 0.034036 0.125 1.216667 0.075 0.186666 0.105 0.166666 0.083334 0.083333 0.746668 0.044999 0.08 0.045 0.106666 0.06 0.053335 0.029999 0.053332 0.030001 0.058332 0.025001 0.069697 0.055303 0.096569 0.028431 0.053333 0.030001 0.108162 0.058504 0.083734 0.082933 0.08 0.045 0.125 0.006268 0.139565 0.121667 0.045 0.08 0.045 0.08 0.045 0.074166 0.030001 0.053332 0.030001 0.088875 0.036125 0.08 0.045 0.106666 0.060001 0.053334 0.029999 0.100833 0.045 0.053335 0.029999 0.107224 0.184443 0.176946 0.177221 0.287498 0.025002 0.0625 0.0625 0.0625 0.083333 0.083334 0.773334 0.059999 0.210835 0.059999 0.1425 0.045 0.136473 0.113527 0.079165 0.025002 0.133333 0.075001 0.125 0.398333 0.059999 0.100834 0.045 0.184168 0.044998 0.154169 0.074999 0.169167 0.059999 0.216665 0.075001 0.053335 0.029999 0.05789 0.046276 0.067486 0.036682 0.127499 0.060001 0.053333 0.029999 0.094126 0.093374 0.074167 0.030001 0.169165 0.060001 0.125 0.455 0.045 0.074165 0.030001 0.106667 0.060001 0.163332 0.045 0.163336 0.044998 0.291666 0.669168 0.059999 0.180832 0.09 0.133335 0.074999 0.08 0.045 0.127501 0.059999 0.133333 0.075001 0.184168 0.044998 0.229166 0.127501 0.059999 0.201668 0.09 0.2675 0.045 0.458332 0.225834 0.045 0.106667 0.059999 0.121668 0.045 0.106667 0.059999 0.169167 0.059999 0.2675 0.045 0.032501 0.029999 0.210835 0.059999 0.047501 0.014999 0.270834 0.104166 0.229166 0.05", - "f0_timestep": "0.005", - "f0_seq": "333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.4 331.5 326.6 326. 325.6 325.2 325.2 324.8 324.1 323.4 322.8 322.7 322.7 322.4 322.2 321.8 322.2 323. 324.4 326.2 328.1 330.3 332.6 334.4 336.6 339. 341.2 342.8 344. 345. 345.4 342.7 335.9 327.1 318.4 311.5 306.3 303.3 302.5 304.3 306.8 309.2 311.6 314.1 316.5 318.9 321.3 323.8 323.6 322.1 321.4 321.5 320.9 320.2 318.8 316.3 313.5 311. 308.3 305.6 302.6 299.8 296.8 293.6 290.3 286.8 281.2 270.9 262.6 256.2 251.3 246.8 243.8 241. 238.3 234.9 231.7 229.4 227.3 224.9 222.1 220.7 218.4 218.9 221.1 223.4 225.7 227.9 230.2 232.5 234.8 237. 239.3 241.6 243.8 246.1 248.4 250.7 252.9 255.2 257.5 259.7 262. 264.3 266.5 268.8 271.1 273.4 275.6 277.9 280.2 282.4 284.7 287. 289.3 291.5 293.8 296.1 298.3 300.6 302.9 305.1 307.4 309.7 312. 314.2 316.5 318.8 321. 323.3 325.6 327.9 330.1 332.4 334.7 336.9 339.2 341.5 343.8 346.2 349.1 353.2 358.6 364.6 370.2 374.7 378.1 380.5 386.6 393.9 399.1 402.2 404.2 406.5 408.8 410.6 411.2 410.4 406.9 400.4 392.2 387. 379.8 368. 368. 380.7 381.9 387.3 392.7 398.1 403.5 408.9 413.1 410.8 398. 394.7 392.7 390. 387.7 386.9 386. 384.4 382.6 379.9 376.6 373.2 369.9 367.2 364.3 360.9 357.5 353.2 348.6 344.4 340.4 336.1 330. 318.1 307.2 299.8 294.6 288.7 282.6 277.3 273.3 268.6 264. 260.1 256.5 252.1 247. 241.9 239.8 239. 237.4 236.2 236.6 237. 237.5 237.9 238.3 238.7 239.1 239.5 239.9 240.3 240.7 241.2 241.6 242. 242.4 242.8 243.2 243.6 244. 244.4 244.9 245.3 245.7 246.1 246.5 246.9 247.3 247.7 248.1 248.5 249. 249.4 249.8 250.2 250.6 251. 251.4 251.8 252.2 252.7 253.1 253.5 253.9 254.3 254.7 255.1 255.5 255.9 256.4 256.8 257.2 257.6 258. 258.4 258.8 259.2 259.6 260.1 260.5 260.9 261.3 261.7 262.1 262.5 262.9 263.3 263.8 264.2 264.6 265. 265.4 265.8 266.2 266.6 267. 267.5 267.9 268.3 268.7 269.1 269.5 269.9 270.3 270.7 271.2 271.6 272. 272.4 272.8 273.2 273.6 274. 274.4 274.8 275.3 275.7 276.1 276.5 276.9 277.3 277.7 278.1 278.5 279. 279.4 279.8 280.2 280.6 281. 281.4 281.8 282.2 282.7 283.1 283.5 283.9 284.3 284.7 285.1 285.5 285.9 286.4 286.8 287.2 287.6 288. 288.4 288.8 289.2 289.6 290.1 290.5 290.9 291.3 291.7 292.1 292.5 292.9 293.3 293.8 294.2 294.6 295. 295.4 295.8 296.2 296.6 297. 297.4 297.9 298.3 298.7 299.1 299.5 299.9 300.3 300.7 301.1 301.6 302. 302.4 302.8 303.2 303.6 304. 304.4 304.8 305.3 305.7 306.1 306.5 306.9 307.3 307.7 308. 307.5 299.9 295.3 294.5 293.6 292.7 291.7 291.4 290. 287.5 284.9 281.1 276.1 272.9 269.4 263.9 261.1 259. 260.5 266.8 274.6 287.1 297.7 303.6 308.4 312.5 315.6 317.8 321. 324.3 327.7 330.4 332.4 334.2 336.5 339.3 341.8 343.1 344.3 344.8 344.9 344.6 344.9 346.5 348.5 348.8 349.2 349.4 348.3 347.5 346.2 344.2 344.3 345.9 348.9 351.1 350.3 347. 342.8 337. 331.3 326.9 323.1 318.4 311.7 304.9 298.2 291.5 284.8 278.1 272.6 270.6 268.3 266. 263.7 261.6 259.8 258.4 257.3 248.1 238.3 228.5 225. 226.1 228.5 235.5 236.6 237.6 244.9 254. 263.2 272.3 281.4 290.5 299.6 308.7 317.8 326.9 334.5 336.3 337.7 338.2 338.2 337.9 337.5 337. 336.5 336.1 335.6 335.1 334.8 334.1 332.7 331.3 330. 328.5 327.4 326.2 324.7 322.7 320.3 317.9 315.9 314.6 313.8 313.7 311.1 305. 298.9 292.8 286.7 280.6 274.5 268.4 263.9 263.2 266.2 265.9 264.8 263.8 261.9 259. 256.3 254.1 252.4 251.3 250.3 249.8 249.6 249.3 248.2 247.3 248. 247.2 246.3 246.8 248.1 248.5 251.7 257.2 263.3 267. 269.5 271.3 273.2 280.1 293.2 297.8 300.5 302.4 303.9 306.4 311.1 316.6 319.8 322. 323.8 325.7 327. 328.1 330. 330.8 331. 330.6 329.1 325.8 321.7 318.3 316.3 314. 310.8 306.6 301.3 295.1 292.4 289.1 277.3 262.3 253.6 251.2 248.7 246.2 244.8 242.8 240.4 240. 241. 242.1 243.2 244.2 245.3 246.4 247.4 248.5 249.6 250.6 251.7 252.8 258.3 263.9 267.4 267.7 269.8 279.7 284. 286.4 292.8 299.6 302.9 302.6 302.4 302.9 304. 305.2 306.4 307.5 308.8 310.9 313.7 314.1 314.2 313.6 310.9 305.8 302.3 297.4 293.6 290.2 287.5 284.7 279.9 273.8 274.6 275.5 276.3 277.1 277.9 278.7 279.5 280.4 281.2 282. 282.8 283.6 284.4 285.3 286.1 286.9 287.7 288.5 289.3 291. 293.6 294.9 295.2 295.3 294.8 292.5 289.1 286.4 290.4 294.5 298.5 302.5 306.5 310.6 314.6 319.1 332. 334.8 335.6 336. 336.3 336.4 335.9 335. 333.6 332.4 330.8 328.4 325. 321.2 318.2 315.4 313.3 311.3 309.5 308. 306.5 304.8 303.6 302.1 300.1 296.9 292.2 286.7 281.6 278.9 274.3 269.4 265.4 262.4 260. 258.2 255.9 253.1 249.9 247. 244.6 242.7 241. 237.7 229.1 226.6 227.8 231.5 235. 236.7 239.3 241.8 244.3 246.9 249.4 252. 254.5 257. 259.6 262.1 264.6 267.2 269.7 272.2 273.8 275. 276.2 276. 275. 273. 270.7 268.6 265.3 263.6 267.4 271.8 276.2 280.7 285.1 289.5 294. 298.4 301.1 305.2 310.6 312. 314.6 320.3 325.1 327.9 329.7 331. 331.4 331.7 331.6 330.5 327.7 320.3 309.3 300.4 296.9 294.8 296.7 299.6 302.6 305.6 308.6 311.6 314.6 317.6 320.6 323.6 326.6 329.6 332.6 335.6 338.6 341.6 344.6 347.6 350.6 352.7 347.6 345.6 345.6 345.5 345.5 345.7 345.8 344.6 344.2 342.5 341.5 340.7 339.9 339.1 338.8 337.9 338.4 339. 339.2 339.7 341. 341.6 343.5 344.6 346. 347.2 347.1 346.1 345.8 343.1 341.8 340. 337.8 336.3 335. 333. 332.9 334.5 336.4 338.3 340.2 342. 343.9 345.8 347.7 349.5 351.4 353.3 355.2 357. 358.9 360.8 362.7 364.5 366.4 368.3 370.1 372. 373.9 375.8 377.6 379.5 381.4 383.3 385.1 387. 388.9 390.8 392.6 394.5 396.1 396.2 391.9 393.7 393. 384.5 378.5 378.1 378.6 379. 378.4 378.4 378.2 377.2 375.6 373.9 371.9 370.5 368.4 366.3 363.9 360.1 355.6 352.3 349.7 346.8 344.3 341.6 338.8 336.6 334.7 332.7 330.4 327.2 322.1 316.9 312. 307.8 304.9 302.6 300.4 298.4 296.2 293.8 291.2 287.5 277.9 266.6 260.9 262.4 264.2 263.8 263.4 263.1 262.7 262.4 262. 260.9 258.8 257.8 254.5 252.8 250.2 248.2 246.6 245.5 245.1 245. 243.9 243. 241.7 240. 238.4 237. 235.5 234.2 233.1 232.2 231. 229.5 228. 226.5 224.8 224. 223.2 222.1 220.9 220. 219.7 219.4 219.3 219. 218.2 216.9 215.1 212.9 211.9 211.2 210.9 210.5 210.2 210. 209.7 208.9 207.6 205.7 204. 202.7 201.7 200.8 199.3 197.6 195.2 192.5 191.8 192.1 192.4 192.7 193. 193.3 193.6 193.9 194.2 194.5 194.8 195.1 195.4 195.7 196. 196.3 196.6 196.9 197.2 197.5 197.8 198.1 198.4 198.7 199. 199.3 199.6 199.9 200.2 200.5 200.8 201.1 201.4 201.7 202. 202.3 202.6 202.9 203.2 203.5 203.8 204.1 204.4 204.7 205. 205.3 205.6 205.9 206.2 206.5 206.8 207.1 207.4 207.7 208. 208.3 208.6 208.9 209.2 209.5 209.8 210.1 210.4 210.7 211. 211.3 211.6 211.9 212.2 212.5 212.8 213.1 213.4 213.7 214. 214.3 214.6 214.9 215.2 215.5 215.8 216.1 216.4 216.7 217. 217.3 217.6 217.9 218.2 218.5 218.8 219.1 219.4 219.7 220. 220.3 220.6 220.9 221.2 221.5 221.8 222.1 222.4 222.7 223. 223.3 223.6 223.9 224.2 224.5 224.8 225.1 225.4 225.7 226. 226.3 226.6 226.9 227.2 227.5 227.8 228.1 228.4 228.7 229. 229.3 229.6 229.9 230.2 230.5 230.8 231.1 231.4 231.7 232. 232.3 232.6 232.9 233.2 233.5 233.8 234.1 234.4 234.7 235. 235.3 235.6 235.9 236.2 236.5 236.8 237.1 237.4 237.7 238. 238.3 238.6 238.9 239.2 239.5 239.8 240.1 240.4 240.7 241. 241.3 241.6 241.9 242.2 242.5 242.8 243.1 243.4 243.7 244. 244.3 244.6 244.9 245.2 245.5 245.8 246.1 246.4 246.7 247. 247.3 247.6 247.9 248.2 248.5 248.8 249.1 249.4 249.7 250. 250.3 250.6 250.9 251.2 251.5 251.8 252.1 252.4 252.7 253. 253.3 253.6 253.9 254.2 254.5 254.8 255.1 255.4 255.7 256. 256.3 256.6 256.9 257.2 257.5 257.8 258.1 258.4 258.7 259. 259.3 259.6 259.9 260.2 260.5 260.8 261.1 261.4 261.7 262. 262.3 262.6 262.9 263.2 263.5 263.8 264.1 264.4 264.7 265. 265.3 265.6 265.9 266.2 266.5 266.8 267.1 267.4 267.7 268. 268.3 268.6 268.9 269.2 269.5 269.8 270.1 270.4 270.7 271. 271.3 271.6 271.9 272.2 272.5 272.8 273.1 273.4 273.7 274. 274.3 274.6 274.9 275.2 275.5 275.8 276.1 276.4 276.7 277. 277.3 277.6 277.9 278.2 278.5 278.8 279.1 279.4 279.7 280. 280.3 280.6 280.9 281.2 281.5 281.8 282.1 282.4 282.7 283. 283.3 283.6 283.9 284.2 284.5 284.8 285.1 285.4 285.7 286. 286.3 286.6 286.9 287.2 287.5 287.8 288.1 288.4 288.7 289. 289.3 289.6 289.9 290.2 290.5 290.8 291.1 291.4 291.7 292. 292.3 292.6 292.9 293.2 293.5 293.8 294.1 294.4 294.7 295. 295.3 295.6 295.9 296.2 296.5 296.8 297.1 297.4 297.7 298. 298.3 298.6 298.9 299.2 299.5 299.8 300.1 300.4 300.7 301. 301.3 301.6 301.9 302.2 302.5 302.8 303.1 303.4 303.7 304. 304.3 304.6 304.9 305.2 305.5 305.8 306.1 306.4 306.7 307. 307.3 307.6 307.9 308.2 308.5 308.8 309.1 309.4 309.7 310. 310.3 310.6 310.9 311.2 311.5 311.8 312.1 312.4 312.7 313. 313.3 313.6 313.9 314.2 314.5 314.8 315.1 315.4 315.7 316. 316.3 316.6 316.9 317.2 317.5 317.8 318.1 318.4 318.7 319. 319.3 319.6 319.9 320.2 320.5 320.8 321.1 321.4 321.7 322. 322.3 322.6 322.9 323.2 323.1 323.4 323.7 323.5 324.3 324.2 322.9 323.1 323.5 324.1 325.4 328.2 330.4 331.4 333.8 335.4 336.6 338.2 337.9 335.7 334.5 333. 330.5 326. 318.6 308.9 301.4 296.4 291.6 287.1 282.3 277. 272.5 269.8 268.4 267.1 266.5 263.8 261.4 261.4 261.5 261.6 261.7 261.7 261.8 261.9 262. 262. 262.1 262.2 262.3 262.4 262.4 262.5 262.6 262.7 262.7 262.8 262.9 263. 263. 263.1 263.2 263.3 263.3 263.4 262.3 260.3 257.7 255.8 255.5 255.1 255.2 256.4 258.6 262.4 272.7 276.9 283.4 289.9 296.5 303. 309.5 316.1 322.6 329.1 335.7 342.2 348.7 355.3 361.8 368.3 374.9 381.4 384.8 386.2 387.4 388.4 389.4 390.4 391.3 391.5 391.2 391.3 392.2 392.6 393.4 393.6 390.4 382. 375.9 375.5 376.9 374.9 374.6 374.4 371.8 368.4 365.2 361.8 354.3 347.7 342. 336.7 332.3 328.5 324.5 320.6 316.8 312.5 308.8 303.7 297.7 295.6 293.5 291.4 289.3 287.2 285.1 283. 280.9 278.8 276.7 274.6 272.5 270.4 268.3 266.2 264.1 261.9 259.8 257.7 255.6 256.2 259. 259.3 258.6 258.3 257.3 256.5 256.2 256.4 257.1 257.9 258.9 260.9 263.4 267.3 271.7 275.5 278.8 283.8 288.3 291.8 297.2 304.1 311. 317.9 324.8 331.7 333.9 335.4 336.6 337.4 337.9 338.1 337.3 335.5 331.4 325.9 319.7 316. 314.3 312.7 311. 309.3 307.7 306. 304.3 302.7 301. 299.3 297.7 295.4 292.4 290. 290.9 290.3 288.1 281.6 274.1 266.8 258.8 254.5 250.6 248.6 248.5 248.7 247. 243.8 242.2 244.2 246.3 248.4 250.5 252.5 254.6 256.7 258.7 260.8 262.9 264.9 267. 269.1 271.1 273.2 275.3 277.3 279.4 281.5 283.5 284.7 285.7 286.8 287.9 289. 290. 291.1 292.2 293.3 294.3 295.4 296.5 297.6 298.6 299.7 300.8 301.9 302.9 304. 305.1 306.1 307.2 308.3 309.4 310.4 311.5 312.6 313.7 314.7 315.8 316.9 318. 319. 320.1 321.2 322.2 323.3 324.4 325.5 326.5 327.6 328.7 329.8 330.8 331.9 333. 334.1 335.1 336.2 337.3 338.3 339.4 340.5 341.6 342.6 343.7 344.8 345.9 346.9 348. 349.1 350.2 351.2 352.3 353.4 354.5 355.5 356.6 357.7 358.7 359.8 360.9 362. 363. 364.1 365.2 366.3 367.3 368.4 369.5 370.6 371.6 372.7 373.8 374.8 375.9 377. 378.1 379.1 380.2 381.3 382.4 383.4 384.5 385.6 386.7 387.7 388.8 389.9 390.9 392. 393.1 394.2 395.2 396.3 397.4 398.5 399.5 400.6 401.7 402.8 403.8 404.9 406. 407.1 408.1 409.2 410.3 411.3 412.4 413.5 414.6 415.6 416.7 417.8 418.9 419.9 421. 422.1 423.2 424.2 425.3 426.4 427.4 428.5 429.6 430.7 431.7 432.8 433.9 435. 436. 437.1 438.2 439.3 440.3 441.4 442.5 443.5 444.6 445.7 446.8 447.8 448.9 450. 451.1 452.1 453.2 454.3 455.4 456.4 457.5 458.6 459.7 460.7 461.8 462.9 463.9 465. 466.1 467.2 468.2 469.3 470.4 471.5 472.5 473.6 474.7 475.8 476.8 477.9 479. 480. 481.1 482.2 483.3 484.3 485.4 486.5 487.6 488.6 489.7 490.8 491.9 492.9 494. 495.1 496.1 497.2 498.3 499.4 500.4 501.5 502.6 503.7 504.7 505.8 506.9 508. 509. 510.1 511.2 512.3 513.3 514.4 515.5 516.5 517.6 518.7 519.8 520.8 521.9 523. 524.1 525.1 526.2 527.3 528.4 529.4 530.5 531.6 532.6 533.7 534.8 535.9 536.9 538. 539.1 536.9 534.2 531.6 526.1 516.3 504.8 493.7 484.8 479. 475.5 476.7 480.7 486. 490.6 494.6 499.1 505.1 510.4 514. 516.6 518.3 519.7 521.4 523.9 525.8 527.7 529.3 531.3 534.2 537.2 540. 542.5 543.7 544.7 545.4 545.5 544.7 544.1 543.3 541.5 539.2 535.5 527.8 516.7 506. 490.2 474.4 466.4 462.4 461.1 459.6 456.4 471.1 485.9 500.6 500.4 495.2 494.9 495.5 496.1 498.2 500.2 502. 504.6 507.8 510.8 512.7 514.3 515.7 517. 518. 518.8 519.4 520.2 521.9 523.5 524.9 526.3 527.3 527.9 528.1 527.8 527.3 527. 526.7 526.5 526.2 525.9 524.8 523.9 522.4 520.7 518.7 515.7 510.8 504.5 497.7 490.5 483. 473.7 464.2 458.5 453.2 449.2 445.5 441.3 436.6 431.7 426.6 421.2 414.2 406.9 398.7 391.3 385.7 380.4 373.9 367.1 358.3 350.5 344.6 342.7 344.1 345.5 346.8 348.2 349.6 350.9 352.3 353.7 355.1 356.4 357.8 359.2 360.5 361.9 363.3 364.6 366. 367.4 368.8 370.1 371.5 372.9 374.2 375.6 377. 378.3 379.7 381.1 382.5 383.8 385.2 386.6 387.9 389.3 390.7 392. 393.4 394.8 396.2 397.5 398.9 400.3 401.6 403. 404.4 405.7 407.1 408.5 409.9 411.2 412.6 414. 415.3 416.7 418.1 419.4 420.8 422.2 423.6 424.9 426.3 427.7 429. 430.4 431.8 433.1 434.5 435.9 437.3 438.6 440. 441.4 442.7 444.1 445.5 446.8 448.2 449.6 451. 452.3 453.7 455.1 456.4 457.8 459.2 460.6 461.9 463.3 464.7 466. 467.4 468.8 470.1 471.5 472.9 474.3 475.6 477. 478.4 479.7 481.1 482.5 483.8 485.2 486.6 488. 489.3 490.7 492.1 493.4 494.8 496.2 497.5 498.9 500.3 501.7 503. 504.4 505.8 507.1 508.5 509.9 511.2 512.6 514. 515.4 516.7 518.1 519.5 520.8 522.2 523.6 524.9 526.3 527.7 529.1 530.4 531.8 533.2 534.5 535.9 537.3 538.6 540. 541.4 542.8 544.1 545.5 546.9 548.2 549.6 551. 552.3 553.7 555.1 556.5 557.8 559.2 560.6 561.9 563.3 564.7 566. 567.4 568.8 567.4 558.8 541.2 519.8 507.3 496.6 489.2 485.4 483.5 483. 483.5 484.3 485.7 487.6 491.8 501.2 513. 517.6 520. 523.1 526.3 529. 531.6 533.7 533.9 532.1 530.9 529.2 526.7 522.9 518.4 514.6 510.8 507.6 504.4 501.6 499.3 496.8 492. 483.6 479.1 476.2 472.9 469.8 466.4 461.1 453.9 445.5 432.3 420.7 413.3 411.1 415.8 411.4 410.8 410. 413.1 422.2 435.6 443.3 445.1 445.4 442.6 439. 437.2 436.6 435.6 436.6 439.1 441.5 445.1 450.3 457. 463.8 469.4 474.2 478.4 484.1 489.6 494.7 499.5 503.5 507.1 512.7 517.9 523.1 528.3 532.2 534.1 535.4 536.3 536.4 536.4 536.9 537.2 536.9 536.4 535.9 534.6 533. 530.7 528.2 525.2 520.2 514.9 508.3 502.2 496.2 487.8 478.1 470.1 471.9 474.3 476.6 478.5 480.2 481.9 483.2 483.4 482.6 480.5 477.8 474.5 470.6 466.4 461.4 457.1 454.6 452.1 449.4 451.2 458.2 460.2 462.2 464.3 466.3 468.3 470.3 472.4 474.4 476.4 478.4 476.1 474.3 473.7 472.9 472.3 471.9 471.4 470.5 469.9 469.1 469.6 473.4 480.5 488.6 496.3 500.7 502.9 504.1 504.9 505.1 505.1 504.7 503.9 501.9 498.9 495.2 489.6 484.6 481.7 479.1 477.1 475.1 472.3 467.9 461.6 455.4 449.9 446.4 443.7 442. 442. 443.4 445.3 448.7 452.1 455.5 459.2 464.7 474.5 481.2 485.8 489.2 491.1 492.7 495.1 498.6 502.6 503.2 505.7 514. 524.5 530.9 533.3 534.6 536.3 537.1 535.6 528.1 514.9 502.8 496.6 488.2 475.4 460.7 448. 439. 432.3 425.2 421.9 419.5 417.4 416.8 421.7 429.1 419.2 422.8 426.4 430. 433.6 437.2 440.8 444.4 448. 451.6 455.2 458.8 462.4 466. 469.6 473.2 476.8 480.4 484. 487.6 491.2 494.8 498.2 497.9 498.1 498.5 499.1 499.8 500.2 500.1 499.6 498.8 498. 497.2 496.4 495.7 495.1 493.6 491.5 488.9 485.8 481.8 476.3 469.9 463.7 458.1 452.8 439.7 421.6 408.6 403.5 396.4 391.1 386.4 381.5 376.3 371.6 368.1 363.9 359.2 352.1 346.6 342.9 339.4 335.4 330.4 326.5 326.9 330.4 333.9 337.1 340.3 343.5 346.7 350. 353.2 356.4 359.6 362.8 366.1 369.3 372.5 375.7 378.9 382.1 385.4 388.6 391.8 395. 398.2 401.5 404.7 407.9 411.1 414.3 417.5 411. 403.1 398. 394.7 392.3 390.5 389.5 390.4 394.1 402.7 413.2 422.4 430.8 436.7 443.5 450.1 457. 463.8 470.7 478.2 490.8 504.2 518.4 530.4 539.2 544.8 548.4 555.9 564.3 574.6 583.3 588.7 592.5 595.9 599.3 601.2 599.7 595.5 591. 586.4 580.5 573.9 567.4 561.7 557.5 550.9 537.3 519. 509.7 503.4 490. 477.3 465.6 457.1 451.2 441.3 426.2 411.1 416.7 423.7 428.9 431.2 432.6 433.6 434.8 435.9 436.6 437.2 437.3 436.5 432.6 418.4 403.1 402.5 407.6 411.1 414.3 417.1 421.3 430.2 449. 459.5 460.7 460.1 459.5 459.2 457.9 456.9 455.8 454.7 453.8 452.8 450.9 448.6 444.9 441. 439. 438.1 436.3 435.2 432.9 430.9 425.8 420.8 416.7 414. 411.6 409.7 407.4 405.2 402.7 400.3 398.6 397.4 396.3 394.7 394.1 395.8 401.6 410. 416.5 419.4 420. 421.3 423.1 423. 419.8 415.7 412.7 407. 395.2 381.9 369.2 356.4 347.1 346.5 347.5 348.9 350.3 351.6 352.7 355.4 361.6 363.9 367.9 372.9 377. 381.6 388. 395.2 404.1 413.7 422.4 430.3 436.1 440.4 444.1 448.5 453.4 456.8 459.5 461.6 463.3 464.7 466.2 468.6 469.7 470.3 471.1 472.3 473.2 474.6 474.1 473.6 471.9 469. 465.5 461.6 454.3 445.9 440.6 435.2 430.9 427.7 424.4 419.8 411.5 399.6 391.7 387.6 384.8 382. 379.4 376.8 374.3 371.4 367.9 362.8 354.5 342.1 326.4 310.7 310.2 311.6 314.2 319. 323.2 325.9 334.5 351. 369. 379.4 383.3 386. 392.7 403. 406.6 407.6 408.1 408.8 409.7 411.7 416.3 419. 420.3 420.5 419.4 416.9 415.4 414.6 412.9 412.9 412.2 410.4 406.4 399.2 384.6 369.1 366.8 366.7 368.1 369.9 371.5 373.2 382.4 383. 375.3 366.7 362.9 359.5 357.4 360.1 360.2 364.1 368.5 372.1 376.7 382.8 386.4 390.4 395.1 399.5 402.2 403.3 404.2 405. 406.1 408.3 409.1 409.1 408.9 408.3 407.1 406.6 405.8 405. 404. 402.7 401. 397.5 393.2 388.2 382.7 388.8 398.8 402.9 403.3 403.8 404.3 404.8 405.3 405.8 406.2 406.7 407.2 407.7 408.2 408.7 409.1 409.6 410.1 410.6 411.1 411.6 412. 412.5 413. 413.5 414. 414.5 414.9 415.4 415.9 416.4 416.9 417.4 417.8 418.3 418.8 419.3 419.8 420.2 420.7 421.2 421.7 420.1 418.2 416.8 415.8 415.1 414.2 413.9 414.4 414.9 415.5 416.3 417. 417.9 419. 419.7 420. 420.1 420. 419.8 419.4 419. 418.5 417.9 417. 416.1 415.4 414.5 413. 412.1 419. 422.4 423.9 425.4 426.8 428.3 429.8 431.3 432.7 434.2 435.7 437.2 438.6 440.1 441.6 443.1 444.5 446. 447.5 449. 450.4 451.9 453.4 454.8 456.3 457.8 459.3 460.7 462.2 463.7 465.2 466.6 468.1 469.6 471.1 472.5 474. 475.5 477. 478.4 479.9 481.4 482.9 484.3 485.8 487.3 488.8 490.2 491.7 493.2 494.7 496.1 497.6 496.8 487.3 474.3 464.8 455.9 446.5 442.6 441.8 442. 443.1 443.1 443.3 443.6 443.3 442.3 441.4 439.5 437.7 435.9 434.3 432.5 430.7 428.8 426.3 423.4 420.3 416. 410.8 405.1 400.1 395.8 392.6 389.5 385.9 381.5 377.3 373.7 369.6 364.6 361.8 355.8 341.2 330.4 326.8 324.9 323.1 328.2 330.2 336. 344.6 345.1 337.5 330.8 326.2 322.1 317.9 315. 313. 311.1 308.8 305.9 302.6 300.1 298.5 297.6 296.9 296.2 295.5 294.5 293.4 292.1 290.5 288.6 286. 283.2 280.1 278.1 276.7 275.3 273.9 272.7 271.6 270.2 268.4 266.8 264.3 261.3 259.3 257.3 255.9 254.5 253.2 251.8 250.3 249. 247.6 246.2 244.7 242.3 239.9 237.7 235.9 234.6 233.6 232.5 231.1 229.5 227.8 225.7 222.8 219.1 216.5 215.2 214.1 213.2 212.4 211.5 210.2 206.6 201.6 198.9 197.4 197.3 197.9 198.5 199.1 199.8 200.4 201. 201.6 202.3 202.9 203.5 204.2 204.8 205.4 206. 206.7 207.3 207.9 208.5 209.2 209.8 210.4 211. 211.7 212.3 212.9 213.6 214.2 214.8 215.4 216.1 216.7 217.3 217.9 218.6 219.2 219.8 220.5 221.1 221.7 222.3 223. 223.6 224.2 224.8 225.5 226.1 226.7 227.3 228. 228.6 229.2 229.9 230.5 231.1 231.7 232.4 233. 233.6 234.2 234.9 235.5 236.1 236.8 237.4 238. 238.6 239.3 239.9 240.5 241.1 241.8 242.4 243. 243.6 244.3 244.9 245.5 246.2 246.8 247.4 248. 248.7 249.3 249.9 250.5 251.2 251.8 252.4 253.1 253.7 254.3 254.9 255.6 256.2 256.8 257.4 258.1 258.7 259.3 260. 260.6 261.2 261.8 262.5 263.1 263.7 264.3 265. 265.6 266.2 266.8 267.5 268.1 268.7 269.4 270. 270.6 271.2 271.9 272.5 273.1 273.7 274.4 275. 275.6 276.3 276.9 277.5 278.1 278.8 279.4 280. 280.6 281.3 281.9 282.5 283.1 283.8 284.4 285. 285.7 286.3 286.9 287.5 288.2 288.8 289.4 290. 290.7 291.3 291.9 292.6 293.2 293.8 294.4 295.1 295.7 296.3 296.9 297.6 298.2 298.8 299.4 300.1 302.4 305.2 306.5 306.7 306.7 306.1 305.4 304.4 303.1 301.9 300.9 299.6 298.5 297.4 296.3 294.9 293.9 293.3 293. 292.8 292.9 293.1 293.2 293.4 293.7 294.3 294.8 295.9 297.3 298.9 300.5 301.5 301.7 300.9 299.4 296.3 292.4 289.6 289.6 289.5 289.5 289.4 289.4 289.3 289.3 289.2 289.2 289.2 289.1 289.1 289. 289. 288.9 288.9 288.8 288.8 291.3 293.5 294.7 295.6 296. 295.5 294.6 293.2 291.3 289.4 287.6 285.9 284.1 282.5 278.9 275.9 273.5 271.5 269.8 268.2 266.1 264.2 262.2 260.6 259.3 258.2 257.2 255.9 253.7 251.9 250.9 250.5 250.1 249.4 248.5 246.7 245.9 248.4 250.2 251.1 251.8 253.4 255.1 257.2 258.3 259.1 259.9 261.2 263.2 264.2 264.5 264.2 263.8 263.2 262.6 262.2 261.5 256.1 253.7 252.1 251.6 252.9 254.2 255.5 256.8 258.2 259.5 260.8 262.1 263.4 264.7 266.1 267.4 268.7 270. 271.3 272.7 274. 275.3 276.6 277.9 279.3 280.6 281.9 283.2 284.5 285.9 287.2 287.7 287.7 287.8 288.7 289.7 291.4 294. 296.3 297.3 297.6 299.2 311.8 312.5 313. 313.7 315. 317. 321. 336.7 339.7 351.4 363.6 371.5 373.8 376.3 378.4 379.7 379.9 379.8 379.3 378.6 377.8 377. 376.1 374.4 367. 360.8 355.7 345.6 333.7 331.3 328.9 326.5 324.1 321.7 319.3 316.9 314.5 312. 309.6 307.2 304.8 302.4 300. 297.6 295.2 292.8 290.4 288. 284.8 280.8 275.9 271.1 266.5 263.9 262.1 260.8 259.9 258.6 257.6 256.7 256. 255.2 254.7 254.1 253.4 252.7 252.3 252. 251.7 251.2 250.6 250.8 251. 251.2 251.4 251.6 251.8 252. 252.2 252.4 252.6 252.8 253. 253.2 253.4 253.7 253.9 254.1 254.3 254.5 254.7 254.9 255.1 255.3 255.5 255.7 255.9 256.1 256.3 256.5 256.7 256.9 257.1 257.3 257.5 257.7 257.9 258.1 258.3 258.5 258.8 259. 259.2 259.4 259.6 259.8 260. 260.2 260.4 260.6 260.8 261. 261.2 261.4 261.6 261.8 262. 262.2 262.4 262.6 262.8 263. 263.2 263.4 263.6 263.9 264.1 264.3 264.5 264.7 264.9 265.1 265.3 265.5 265.7 265.9 266.1 266.3 266.5 266.7 266.9 267.1 267.3 267.5 267.7 267.9 268.1 268.3 268.5 268.7 269. 269.2 269.4 269.6 269.8 269.5 267.9 266.5 264.8 263.7 262.6 261.6 260.8 259.6 258.5 256.8 255.6 254.3 252. 249.9 256. 262.1 268.1 274.2 280.3 286.4 292.4 298.5 304.6 310.7 313.2 316.9 324.2 335.8 343.4 355.9 367.1 374. 376.8 378.8 380.8 382.9 384.9 385.7 385. 383.5 381.7 380. 378.3 376.9 375.2 367.8 359. 353. 350.7 348. 344.5 341.7 338.7 335.7 333.3 330.1 325.5 321.5 319.5 316.9 315.1 312.5 308.7 305.1 302.3 299. 296.3 295.1 294.5 302.6 317.7 333. 348.3 363.6 366. 366.1 366.2 366.3 367.1 367.9 368.4 368.2 365.1 362. 359.3 356.3 353.2 350.2 347.2 344.1 341.1 338.9 336.7 329.8 326. 322.5 319.8 316. 310.6 304.9 298.5 292.6 288.7 289.2 291. 293.5 296. 298.5 301. 303.5 306. 308.4 310.9 313.4 315.9 318.4 320.9 323.4 325.9 328.4 330.9 333.4 335.9 338.3 340.8 343.3 345.8 348.3 350.2 351. 352. 352.6 352.5 352. 350.6 348.5 346. 342.4 338.1 335. 333.4 332.7 332.5 332.7 332.9 333.2 333.9 335.3 338.4 341.6 343.3 347.5 351.7 355.8 360. 364.2 368.3 372.5 376.7 380.8 385. 389.1 393.3 397.5 401.6 405.4 407.3 407.8 408.6 409.6 410.4 411.4 412.6 413.4 413.8 414. 413.6 412.5 411.3 410.1 408.6 406.9 404.7 402.4 399.8 396.2 391.7 386.7 380. 375.4 371.6 368.2 364.3 360. 355.9 351.4 347.6 342.3 336.5 330.6 324.8 319.1 314.6 311.3 308.9 306.2 304.3 303. 301.3 298.6 292.6 288.6 288.1 288.5 288.9 289.3 289.6 290. 290.4 290.8 291.2 291.3 290.9 290.7 290.3 289.7 288.3 286.4 284.4 283. 281.3 280.2 279.3 277.9 274.8 270.3 266.3 263.8 261.6 260.1 258. 256.2 254.6 253.7 254.3 255.1 252.7 251.9 250.5 249.3 249.1 249.4 249.4 250.2 250.3 251.2 252.5 257.4 265.8 273.2 277.1 282.7 286.4 298.3 310.6 318.9 324. 325.6 338.1 351.3 357.7 361.8 366.2 370.8 374.1 376.6 380. 383.4 387.4 392.6 397.4 399.6 400.9 400.7 399. 397.5 396.4 394.9 392.5 389.1 385.8 381.8 375.7 368.7 363.1 358.7 353.8 348.1 341.1 332.7 325.7 320.6 315.4 313.6 311.4 307.1 302.2 299.6 299. 298.5 298. 297.5 296.9 296.4 295.9 295.3 294.8 294.3 293.7 293.2 292.7 292.2 292.6 293.7 294.5 296.5 299.9 303.9 307.9 311.8 316.1 323.1 328.4 332.4 335.8 338.4 340.8 343.8 347.4 350.6 354.7 358.1 360.5 361.9 362.2 362.3 361.7 361.2 360. 358.5 356.9 354.9 352.5 350. 349.7 349.4 349.1 348.7 348.4 348.1 347.8 347.5 347.2 346.9 346.5 346.2 345.9 345.6 345.3 345. 344.6 344.3 344. 343.3 340.2 332.2 332. 332.4 333. 333.8 335. 336.3 337.8 339.6 340.9 341.4 341.2 340.6 339.6 336.9 340.5 344. 347.6 351.1 354.7 358.2 361.8 365.3 364.3 364.4 364.6 364.8 365. 365.3 365.3 364.9 364.4 363.4 362.4 361.8 361.2 360.7 359.7 358.4 356.9 355.4 354. 352.3 350.8 349. 347.3 345. 341.3 336.7 332.4 328.9 326.5 323.4 320.3 317.5 314.9 312.3 309.9 307.4 304.4 302.1 300.3 298.9 297.7 296.7 296. 294.9 294. 292.8 291. 287.4 282.2 278.8 275.1 272.7 270.3 268.7 267.6 266.6 265.7 264.6 263.9 262.5 260.6 257.3 251.2 249.3 247.2 245.8 243.9 242.2 240.5 239.5 241.6 241.4 242.6 243.7 244.9 246. 247.2 248.3 249.5 250.6 251.8 252.9 254.1 255.2 256.4 257.5 258.7 259.8 261. 262.1 263.3 264.4 265.6 266.7 267.9 269. 270.1 271.3 272.4 273.6 274.7 275.9 277. 278.2 279.3 280.5 281.6 282.8 283.9 285.1 286.2 287.4 288.5 289.7 290.8 292. 293.1 294.3 295.4 296.6 297.7 298.9 300. 301.2 302.3 303.4 304.6 305.7 306.9 308. 309.2 310.3 311.5 312.6 313.8 314.9 316.1 317.2 318.4 319.5 320.7 321.8 323. 324.1 325.3 326.4 327.6 328.7 329.9 331. 332.2 333.3 334.5 335.6 336.7 337.9 339. 340.2 341.3 342.5 343.6 344.8 345.9 347.1 348.2 349.4 350.5 349.7 349.7 351.5 358.8 373.4 376.6 379.5 381.6 382.3 382.6 382.8 382.3 380.5 376.3 377.6 384.7 393.1 402.1 414. 422.1 431.9 445.6 465.1 486.9 506.5 515.8 521.7 526.3 528.1 524.8 519.6 514.1 510.3 507.1 504. 498.8 493.4 491.4 492.8 494.1 495.5 496.8 498.1 499.5 500.8 502.2 503.5 504.9 506.2 507.5 508.9 510.2 511.6 512.9 514.3 515.6 512.2 519.7 532.3 541.2 545.3 548. 551. 554.2 557.3 560.3 563.2 565.6 567.5 568.6 569.5 569.1 567.4 564.7 562. 559.4 556.7 553.3 549. 544.8 540.8 535.3 525.2 511.7 504.8 500.7 495.4 488.5 481.2 473.9 465.1 449.6 442. 436.6 431.5 422. 409.6 403. 398.6 392.7 391.2 390.4 390. 389.5 388.8 387.5 384.7 383.1 381.4 379.3 376.5 372.7 368.2 364.8 361.9 360.3 359. 351.1 337. 326. 323.3 320.2 318.2 316.6 315. 313.7 312.6 310.9 309.5 308.3 306.6 304.3 303.2 303.4 300.9 307.8 314.7 321.6 328.5 337.6 346.9 348.8 351.7 353.6 355. 356.3 357.9 360.2 362.5 365.3 367.8 371.2 375. 379.9 386.1 391.2 394.5 398. 400.7 403.1 404.9 405.6 405.2 403.4 400.2 395.9 391.5 387.6 384.2 380.9 377.5 374.9 371.7 368.5 364.8 360.1 354.6 349.7 345.5 341.1 337. 332.8 328.7 324.6 320.6 317.1 313.4 309.8 305.5 299.2 293.1 289.2 285.5 282. 279. 276.3 273.7 271.1 269.5 275.3 281.1 286.9 292.7 298.5 304.2 310. 315.8 321.6 327.4 333.2 339. 344.8 350.6 356.4 362.2 368. 373.8 379.6 385.4 391.2 397. 402.8 408.6 414.4 420.2 426. 431.8 437.6 443.4 449.2 455. 460.8 466.6 472.4 478.2 483.9 489.7 495.5 501.3 507.1 512.9 518.7 524.5 530.3 536.1 541.9 547.7 553.5 559.3 565.1 570.9 576.7 582.5 588.3 594.1 599.9 605.7 611.5 617.3 623.1 628.9 634.7 640.5 646.3 652.1 657.8 644.4 639.3 635.3 628.3 618. 616.8 622. 623.2 625.2 630. 629.7 618.5 612.1 610.5 588.4 587.9 594. 603.9 613.8 623.7 633.5 643.4 653.3 671.5 696.7 713. 708.5 698.6 685.4 672.6 664.9 663. 666.4 675.9 689.9 703.5 724.9 743.6 752.5 743.8 721.4 701.2 668.6 648.4 645.4 642.1 645.2 655.2 660.4 663.5 670.3 670.9 677.2 691.5 701.5 702.4 707.1 704.6 673.6 678.1 686.6 688.4 690.7 671.3 652. 632.7 613.3 594. 574.7 555.3 536. 516.7 497.3 478. 458.7 439.3 426.5 435.8 441.5 445. 446.2 447. 447.4 447. 445.7 444.3 444. 443.5 444. 445.9 448.9 452. 455.8 458.7 461.4 464.2 467.8 473.2 479.2 482.8 485. 485.8 483.8 481.2 479.8 479.4 479.8 480.5 483.4 481.6 477.9 474.2 470.5 466.8 463.1 459.4 455.7 452. 448.3 444.6 440.9 437.2 433.5 429.8 426.1 422.4 418.7 415. 411.3 407.6 403.9 400.2 400. 405. 409.8 415. 418. 421.3 424.3 426.8 428.2 428.7 428.7 428.2 427.5 426. 424. 421. 416.6 412.8 410.3 405.2 405.5 408.4 411.4 414.4 417.4 420.4 423.3 426.3 428.2 430.1 434.9 442.8 450. 466.3 482.8 495.9 509.5 524.5 537.3 545.6 553.4 558.3 559.9 556.1 546.5 533.1 511.3 490.8 475.2 462.6 454.6 449.3 444.8 440.9 435. 428.8 425.5 423.7 422.7 422.5 421.5 416.7 413.9 404.8 394.4 386.6 383.4 381.4 381.8 382.2 382.7 383.1 383.6 384. 384.5 384.9 385.4 385.8 386.2 386.7 387.1 387.6 388. 388.5 388.9 389.4 389.8 390.2 390.7 391.1 391.6 392. 392.5 392.9 395.1 399.9 403.3 399.5 402.4 404.1 401.9 400.1 397.1 390.8 389.2 388.9 388.5 385.8 383. 380.8 378.6 376.5 374.1 370.8 367.5 364.7 362.5 360.6 359. 357.7 356.2 353.8 343.6 333.5 329.7 326.6 321.2 321.2 322.2 323.3 324.4 325.6 326.7 327.8 329. 330.1 331.2 332.4 333.5 334.6 335.8 336.9 338. 339.2 340.3 341.4 343. 345.6 347.6 347.6 346.7 342.8 335.8 328.6 323.9 319.5 310.9 305.3 300.1 297.9 298.8 298.7 298.2 298.4 299.4 303.1 305.8 307.2 309.2 309.5 309.8 310.7 311.1 313. 315.7 317.8 319.3 320.2 321.6 324. 326.5 329.2 332.2 335.4 338.3 340.7 342.4 343.8 345.2 347. 349.6 351.3 351.8 352.5 353.1 353.6 354.3 354.7 354.7 354.1 352.2 350. 347.5 344.9 342.8 341.1 339.5 338.2 336.9 335.6 334.3 332.8 330.6 328.1 325. 321.3 316.4 311.1 306.6 303.3 300.6 297.2 293.6 289.8 286.2 282.9 279.8 276.6 273.9 271.8 269.7 266.5 261.2 254.3 249.2 243.6 243. 243.3 243.6 243.9 244.2 244.5 244.8 245.1 245.4 245.7 246. 246.3 246.6 246.9 247.2 247.5 247.8 237. 237. 237. 236.7 236.5 236.3 236.1 236.1 236.1 236.1 235.9 235.5 235. 234.9 234.7 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 235.3 236.2 236.9 237.7 239.4 240.3 241.8 243.3 244. 245.9 247.7 249.4 251.7 252.7 254.1 255.3 256.4 257.1 258.8 259.4 260.4 261.5 262.5 263.3 263.6 263.7 264.4 264.4 265. 265.4 265.4 265.4 265.4 265.6 266. 266.9 268. 269. 270.1 270.1 270.1 270.7 270.6 269.5 265.8 259.6 255.9 254.1 253.1 252.5 252.1 251.8 251.5 251.1 250.6 248.9 246.9 244.7 243.2 242.7 242.3 242.4 242.7 242.7 242.7 242.6 241.8 240.2 239.8 239.7 239.5 239.9 240.3 240.4 240.2 240.3 240. 239.5 238.8 238.1 237.4 236.7 236. 235.3 234.6 233.9 233.2 232.4 231.7 231. 230.3 229.6 228.9 228.2 227.5 226.7 226. 225.3 224.6 223.9 223.2 222.5 221.8 221.1 220.4 220.7 222.1 219.8 216.4 215.1 213.3 213.5 212.6 211.4 210.5 208.4 205.8 205.1 203.8 202.8 202.6 202.4 201.9 201.5 201.3 200.9 201. 200. 199.1 198.9 197.7 195.5 195. 194.9 194.9 195. 195.1 195.2 195.3 195.4 195.4 195.5 195.6 195.7 195.8 195.8 195.9 196. 196.1 196.2 196.3 196.3 196.4 196.5 196.6 196.7 196.8 196.8 196.9 197. 197.1 197.2 197.2 197.3 197.4 197.5 197.6 197.7 197.7 197.9 199. 199.7 202. 205.1 207.5 210.5 212.6 214.6 216.4 218.2 219.6 220.4 221. 221.2 221.7 221.7 221.5 221.4 221.4 221.6 222. 222.6 222.7 222.2 221.7 221.2 220.7 222. 223.3 224.6 226. 227.3 228.6 230. 231.3 232.6 234. 235.3 236.6 238. 239.3 240.6 241.9 243.3 244.6 245.9 247.3 248.6 249.9 251.3 252.6 253.9 255.2 256.6 257.9 259.2 260.6 261.9 263.2 264.6 265.9 267.2 268.6 269.9 271.2 272.5 273.9 275.2 276.5 277.9 279.2 280.5 281.9 283.2 284.5 285.8 287.2 288.5 289.8 291.2 292.5 293.8 295.2 296.5 297.8 299.2 300.5 301.4 303. 305.4 306.6 307. 311.8 312.8 313.5 314.3 315.6 316.6 316.1 315.6 315.1 314.6 314.1 313.6 313.1 312.6 312.1 311.6 311.1 310.6 310.1 309.6 309.1 308.6 308.1 307.6 307.1 306.6 306.1 304.6 301. 297.6 295.7 293.3 290. 286.9 284.6 282.6 280.6 278.6 276.5 274.5 272.4 269.4 265.1 260.6 256.1 252. 249.4 248.7 248. 244.7 238.7 240. 250.8 262.8 274.7 286.7 290.8 285.4 283.9 286.9 288.6 293.3 298.3 302.8 307.8 309.9 310.5 310.4 309.8 308.7 306.7 302.8 299. 296.4 288.2 296.1 302.4 302.6 302.7 303.8 312.7 321.6 330.6 339.5 348.4 357.3 366.3 375.2 384.1 385. 385.4 384.8 383.5 383.2 383.8 384.4 384.9 384.2 384.2 382.8 373.7 358.6 355.9 354.6 354.3 354.5 356.7 360.3 362.3 363.3 372.1 377.1 377.8 380.6 386.3 391.3 397.7 407.5 411.8 414.8 417.7 419.9 420.2 425.7 424. 421.8 419.7 417.5 415.4 413.3 411.1 409. 406.8 404.7 402.5 400.4 398.2 396.1 394. 391.8 389.7 387.5 385.4 383.2 381.1 379. 376.8 374.7 372.5 370.4 368.2 366.1 363.9 361.8 359.7 357.5 355.4 353.2 351.1 348.9 346.8 344.7 347.2 356.2 366.2 372.3 377.4 376.1 380. 390.8 400. 408.3 417.4 422.3 424.9 426. 427.4 427.7 427.7 427.3 425.5 422.4 419.2 414.7 410.4 407.2 405.7 408.7 405.3 403.7 402.2 400.7 399.2 397.7 396.2 394.7 391. 386.2 382.1 378.4 374.9 370.9 366.4 362.8 360.1 357.9 355.7 353.1 350.2 347.3 345.8 344.8 343.4 342.1 341.2 340.8 340.7 340.9 341. 340.8 340.7 340.8 341.2 341.4 341.2 340.8 339.9 338.9 337.8 336.4 335.1 334. 333.1 333. 333.8 335. 336.5 337.9 339.2 340.6 341.6 342.5 343.3 343.9 344.1 341.6 338.3 334.7 332.6 328.5 325.1 323.5 322.5 319. 313.3 309.7 312.1 316.4 319.5 320.4 321.3 322.2 323.1 324. 324.9 325.8 326.6 327.5 328.4 329.3 330.2 331.1 332. 332.9 333.8 334.7 335.6 336.5 337.3 338.2 339.1 340. 340.8 340.4 339.7 338.7 337.6 336.7 336.7 336.7 336.7 336.6 336.6 336.6 336.6 336.9 338.2 339.7 341.3 342.1 342.1 341.7 341. 339.6 337.5 334.8 332.2 329.4 326.6 324.1 321.5 319.4 317.5 316.2 315.3 314.5 313.8 313.2 312.3 311.4 310.6 309.9 309.3 308.7 308.3 308.3 308.4 308.6 308.7 308.7 308.5 307.9 307. 305.8 305.1 303.9 302.4 300.6 299. 298.2 297.8 297.5 297.5 297.6 297.6 297.5 297.3 296.3 294. 291. 288.5 286.2 283.4 280. 277.4 274.5 271.4 268.9 267.5 266.9 266.4 266.2 266.2 266.3 266.4 266.4 266.7 267. 267.3 267.4 267.3 267.1 266.3 265.5 264.8 263.3 262.1 260. 258.4 257.4 256.7 256.1 255.3 253.8 249.4 247.9 247.2 246.7 246.4 245.7 245.2 244.3 243.9 243.3", - "input_type": "phoneme" - } # Manual pitch curve mode - inp2 = { - 'text': 'SP 还 记 得 那 场 音 乐 会 的 烟 火 SP 还 记 得 那 个 凉 凉 的 深 秋 SP 还 记 得 人 潮 把 你 推 向 了 我 SP 游 乐 园 拥 挤 的 正 是 时 候 SP 一 个 夜 晚 坚 持 不 睡 的 等 候 SP 一 起 泡 温 泉 奢 侈 的 享 受 SP 有 一 次 日 记 里 愚 蠢 的 困 惑 SP 因 为 你 的 微 笑 幻 化 成 风 SP 你 大 大 的 勇 敢 保 护 着 我 SP 我 小 小 的 关 怀 喋 喋 不 休 SP 感 谢 我 们 一 起 走 了 那 么 久 SP 又 再 一 次 回 到 凉 凉 深 秋 SP 给 你 我 的 手 SP 像 温 柔 野 兽 SP 把 自 由 交 给 草 原 的 辽 阔 SP 我 们 小 手 拉 大 手 SP 一 起 郊 游 SP 今 天 别 想 太 多 SP 你 是 我 的 梦 SP 像 北 方 的 风 SP 吹 着 南 方 暖 洋 洋 的 哀 愁 SP 我 们 小 手 拉 大 手 SP 今 天 加 油 SP 向 昨 天 挥 挥 手 SP', - 'ph_seq': 'SP h ai j i d e n a ch ang y in y ve h ui d e y an h uo uo SP h ai j i d e n a g e l iang l iang d e sh en en q iu iu SP h ai j i d e r en ch ao b a n i t ui x iang l e w o o SP y ou l e y van y ong j i d e zh eng sh i sh i h ou ou SP y i g e y e w an j ian ch i b u sh ui d e d eng h ou ou SP y i q i p ao w en q van sh e ch i d e x iang iang sh ou ou SP y ou y i c i r i j i l i y v ch un d e k un h uo uo SP y in w ei n i d e w ei x iao h uan h ua ch eng f eng eng SP n i d a d a d e y ong g an b ao h u zh e w o o SP w o x iao x iao d e g uan h uai d ie d ie b u x iu iu SP g an x ie w o m en y i q i z ou l e n a m e j iu iu SP y ou z ai y i c i h ui d ao ao l iang l iang sh en q iu iu SP g ei n i w o d e sh ou SP x iang w en r ou y e sh ou SP b a z i y ou j iao g ei c ao y van d e l iao iao k uo uo uo SP w o m en x iao sh ou l a d a sh ou SP y i q i j iao iao y ou SP j in t ian b ie x iang t ai d uo uo SP n i sh i w o d e m eng SP x iang b ei f ang d e f eng SP ch ui zh e n an f ang n uan y ang y ang d e ai ai ch ou ou ou SP w o m en x iao sh ou l a d a sh ou SP j in t ian j ia ia y ou SP x iang z uo t ian h ui h ui ui sh ou ou ou SP', - 'note_seq': 'rest G3 G3 G3 G3 A3 A3 C4 C4 D4 D4 E4 E4 A4 A4 G4 G4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 D4 D4 E4 E4 E4 D4 rest D4 D4 E4 E4 D4 D4 C#4 C#4 D4 D4 G4 G4 B3 B3 D4 D4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 A3 A3 A3 A3 G3 rest G3 G3 G3 G3 A3 A3 C4 C4 D4 D4 E4 E4 A4 A4 G4 G4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 D4 D4 E4 E4 E4 D4 rest D4 D4 E4 E4 D4 D4 C#4 C#4 D4 D4 G4 G4 B3 B3 D4 D4 E4 E4 D4 D4 D4 D4 C4 rest C4 C4 D4 D4 C4 C4 B3 B3 C4 C4 F4 F4 A3 A3 C4 C4 D4 D4 D4 D4 C4 rest E4 E4 F4 F4 E4 E4 D4 D4 E4 E4 F4 F4 E4 E4 D4 D4 E4 E4 E4 E4 F4 rest F4 F4 G4 G4 F4 F4 G4 G4 F4 F4 E4 E4 D4 D4 C4 C4 D4 D4 D4 D4 E4 rest E4 E4 E4 E4 D4 D4 C#4 C#4 E4 E4 E4 E4 D4 D4 D4 D4 D4 D4 C#4 C#4 C#4 C#4 D4 rest D4 D4 D4 D4 E4 E4 F#4 F#4 D4 D4 G4 G4 A4 G4 G4 G4 G4 F#4 F#4 F#4 F#4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest G4 G4 A4 A4 G4 G4 A4 A4 B4 B4 C5 C5 E4 E4 E4 E4 G4 G4 A4 A4 A4 G4 G4 rest C4 C4 D4 D4 C4 C4 F4 F4 E4 E4 D4 D4 C4 C4 rest F4 F4 E4 E4 D4 D4 C4 C4 C4 rest C4 C4 D4 D4 A3 A3 C4 C4 E4 E4 E4 E4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest E4 E4 F4 F4 E4 E4 F4 F4 G4 G4 rest G4 G4 A4 A4 G4 G4 A4 A4 B4 B4 C5 C5 E4 E4 E4 E4 G4 A4 A4 A4 G4 G4 rest C4 C4 D4 D4 C4 C4 F4 F4 E4 E4 D4 D4 C4 C4 rest F4 F4 E4 E4 D4 D4 C4 C4 C4 rest C4 C4 D4 D4 A3 A3 C4 C4 C4 C4 D4 D4 D4 C4 C4 rest', - 'note_dur_seq': '8.076923 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.3028846 0.389423 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1298077 0.1298077 0.3317308 0.2307692 0.2307692 0.2884615 0.403846 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2740385 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.2307692 0.2307692 0.2740385 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2884615 0.403846 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1153846 0.1153846 0.3461539 0.2740385 0.2740385 0.2307692 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2740385 0.418269 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.2596154 0.2596154 0.3173077 0.346154 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1442308 0.1442308 0.4182692 0.360577 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1586538 0.1586538 0.3894231 0.375 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.2307692 0.2307692 0.1586538 0.1586538 0.4615385 0.302885 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.1298077 0.1298077 0.3317308 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1442308 0.1442308 0.3461539 0.432692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5480769 0.5480769 0.375 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5480769 0.5480769 0.375 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1153846 0.1153846 0.3461539 0.2740385 0.2740385 0.4182692 0.375 0.317308 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.375 0.375 0.317308 0.2307692 0.2307692 0.4615385 0.4615385 0.2740385 0.2740385 0.1875 0.2307692 0.2307692 0.230769 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.4615385 0.4615385 0.1442308 0.1442308 0.4326923 0.346154 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5913461 0.5913461 0.331731 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.5913461 0.5913461 0.331731 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1298077 0.3317308 0.2884615 0.2884615 0.4038461 0.3028846 0.389423 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.3894231 0.3894231 0.302885 0.2307692 0.2307692 0.4615385 0.4615385 0.2740385 0.2740385 0.1875 0.1730769 0.1730769 0.288462 0.2307692 0.2307692 0.4615385 0.4615385 0.2307692 0.2307692 0.4615385 0.4615385 0.1298077 0.1298077 0.3317308 0.2163462 0.2163462 0.4759615 0.3894231 1', - 'is_slur_seq': '0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0', - 'ph_dur': '7.911923 0.165 0.12718 0.103589 0.185769 0.045 0.155769 0.075 0.13782 0.092949 0.185769 0.045 0.416538 0.045 0.124423 0.106346 0.416538 0.045 0.185769 0.045 0.155768 0.075001 0.230769 0.302885 0.314423 0.075 0.107052 0.123717 0.185769 0.045 0.167052 0.063717 0.17077 0.059999 0.170769 0.06 0.401666 0.059873 0.185769 0.045 0.290193 0.171346 0.129808 0.188012 0.143719 0.230769 0.288462 0.343847 0.059999 0.131732 0.099037 0.185769 0.045 0.185769 0.045 0.1475 0.083269 0.185769 0.045 0.371538 0.09 0.142179 0.088591 0.311539 0.15 0.155768 0.075001 0.177501 0.053268 0.230769 0.274038 0.393268 0.025002 0.187821 0.042948 0.185769 0.045 0.185769 0.045 0.150062 0.080708 0.185384 0.045385 0.356537 0.105001 0.119102 0.111668 0.311537 0.150002 0.356539 0.105 0.230769 0.274038 0.373269 0.045 0.102946 0.127823 0.185769 0.045 0.185769 0.045 0.185771 0.044998 0.108523 0.122246 0.401537 0.060001 0.111154 0.119616 0.416538 0.045 0.172755 0.058014 0.15577 0.074999 0.230769 0.288462 0.358846 0.045 0.111602 0.119167 0.15577 0.074999 0.162692 0.068077 0.15577 0.074999 0.131024 0.099745 0.311539 0.15 0.185769 0.045 0.317692 0.143847 0.115385 0.196152 0.150002 0.274038 0.230769 0.373271 0.044998 0.124934 0.105835 0.118781 0.111988 0.185771 0.044998 0.155768 0.075001 0.127177 0.103592 0.41654 0.044998 0.127563 0.103207 0.41654 0.044998 0.129546 0.101223 0.14532 0.085449 0.230769 0.274038 0.393268 0.025002 0.17686 0.053909 0.170768 0.060001 0.185771 0.044998 0.185767 0.045002 0.114741 0.116028 0.356539 0.105 0.150062 0.080708 0.301085 0.160454 0.290259 0.17128 0.259615 0.317308 0.300961 0.045193 0.15673 0.074039 0.203528 0.027241 0.197818 0.032951 0.169616 0.061153 0.151668 0.079102 0.41654 0.044998 0.132499 0.09827 0.356535 0.105003 0.385771 0.075768 0.144231 0.418269 0.317951 0.042625 0.103847 0.126923 0.154811 0.075958 0.185767 0.045002 0.170772 0.059998 0.127372 0.103397 0.416536 0.045002 0.139617 0.091152 0.386538 0.075001 0.312758 0.148781 0.158654 0.389423 0.314999 0.060001 0.116088 0.114681 0.185767 0.045002 0.155768 0.075001 0.185771 0.044998 0.087241 0.143528 0.34532 0.116219 0.182818 0.047951 0.356539 0.105 0.155768 0.075001 0.154998 0.075771 0.158654 0.461538 0.257883 0.045002 0.128524 0.102245 0.202945 0.027824 0.097816 0.132954 0.155772 0.074997 0.168716 0.062054 0.129808 0.271729 0.060001 0.164615 0.066154 0.323973 0.137566 0.308973 0.152565 0.144231 0.346154 0.372691 0.060001 0.185771 0.044998 0.185767 0.045002 0.185771 0.044998 0.109038 0.121731 0.548077 0.240002 0.134998 0.185767 0.045002 0.185771 0.044998 0.185767 0.045002 0.109233 0.121536 0.548077 0.330002 0.044998 0.116408 0.114361 0.185767 0.045002 0.170577 0.060192 0.185771 0.044998 0.111991 0.118778 0.41654 0.044998 0.185767 0.045002 0.386538 0.075001 0.115385 0.245194 0.10096 0.274038 0.418269 0.375 0.281472 0.035835 0.170768 0.060001 0.15301 0.077759 0.10942 0.121349 0.386538 0.075001 0.127177 0.103592 0.313074 0.148464 0.375 0.272302 0.045006 0.101024 0.129745 0.356543 0.104996 0.274038 0.10878 0.07872 0.230769 0.140773 0.089996 0.129741 0.101028 0.41654 0.044998 0.114749 0.11602 0.326536 0.135002 0.385962 0.075577 0.144231 0.432692 0.301156 0.044998 0.1114 0.11937 0.185771 0.044998 0.185771 0.044998 0.119479 0.11129 0.591346 0.196736 0.134995 0.185771 0.044998 0.155765 0.075005 0.185771 0.044998 0.11967 0.111099 0.591346 0.226735 0.104996 0.144809 0.08596 0.155765 0.075005 0.155765 0.075005 0.170772 0.059998 0.151668 0.079102 0.41654 0.044998 0.185771 0.044998 0.401533 0.189813 0.211423 0.120308 0.288462 0.403846 0.302885 0.344417 0.045006 0.170764 0.060005 0.132129 0.09864 0.118671 0.112099 0.401533 0.060005 0.146937 0.083832 0.323286 0.138252 0.389423 0.197889 0.104996 0.155772 0.074997 0.358343 0.103195 0.274038 0.142502 0.044998 0.173077 0.153459 0.135002 0.124492 0.106277 0.341543 0.119995 0.121677 0.109093 0.29225 0.169289 0.129808 0.173413 0.158318 0.216346 0.475962 0.389423 0.05', - 'f0_timestep': '0.', - 'f0_seq': None, # Automatic pitch curve mode - 'input_type': 'phoneme' - } - DiffSingerCascadeInfer.example_run(inp) From f6ef6b55b9cff52aa148bc0037f8eb880c10b90c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 13 Mar 2023 00:19:19 +0800 Subject: [PATCH 063/475] Fix missing spk_id --- preprocessing/acoustic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/acoustic.py b/preprocessing/acoustic.py index a489213bb..00d8617f1 100644 --- a/preprocessing/acoustic.py +++ b/preprocessing/acoustic.py @@ -27,7 +27,7 @@ from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" -ACOUSTIC_ITEM_ATTRIBUTES = ['mel', 'tokens', 'mel2ph', 'f0', 'key_shift', 'speed'] +ACOUSTIC_ITEM_ATTRIBUTES = ['spk_id', 'mel', 'tokens', 'mel2ph', 'f0', 'key_shift', 'speed'] class AcousticBinarizer(BaseBinarizer): From a0cd33d1af45edc45706cf2f2326a9921b83f1bd Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 13 Mar 2023 12:36:54 +0800 Subject: [PATCH 064/475] Fix size concat --- utils/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/__init__.py b/utils/__init__.py index b93b6aae5..8f0430161 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -40,7 +40,7 @@ def collate_nd(values, pad_value=0, max_len=None): """ Pad a list of Nd tensors on their first dimension and stack them into a (N+1)d tensor. """ - size = ((max(v.size(0) for v in values) if max_len is None else max_len), values[0].shape[1:]) + size = ((max(v.size(0) for v in values) if max_len is None else max_len), *values[0].shape[1:]) res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) for i, v in enumerate(values): From 3292cd7697c5d2458585d839b6b4dcae2e694477 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 15 Mar 2023 12:10:04 +0800 Subject: [PATCH 065/475] Re-organize model structure and add migrating scripts --- basics/base_model.py | 18 +++ basics/base_task.py | 11 +- configs/acoustic.yaml | 3 +- configs/base.yaml | 2 - deployment/export/export_acoustic.py | 28 +++-- inference/ds_cascade.py | 27 ++-- modules/diff/diffusion.py | 45 +++---- modules/fastspeech/acoustic_encoder.py | 30 +++-- modules/toplevel/acoustic_model.py | 37 ++++++ .../{acoustic.py => acoustic_binarizer.py} | 0 scripts/migrate.py | 41 ++++++ training/{acoustic.py => acoustic_task.py} | 117 +++++------------- utils/__init__.py | 57 +++++---- utils/pl_utils.py | 5 +- 14 files changed, 227 insertions(+), 194 deletions(-) create mode 100644 basics/base_model.py create mode 100644 modules/toplevel/acoustic_model.py rename preprocessing/{acoustic.py => acoustic_binarizer.py} (100%) create mode 100644 scripts/migrate.py rename training/{acoustic.py => acoustic_task.py} (77%) diff --git a/basics/base_model.py b/basics/base_model.py new file mode 100644 index 000000000..a933be613 --- /dev/null +++ b/basics/base_model.py @@ -0,0 +1,18 @@ +from torch import nn + + +class CategorizedModule(nn.Module): + @property + def category(self): + raise NotImplementedError() + + def check_category(self, category): + if category is None: + raise RuntimeError('Category is not specified in this checkpoint.\n' + 'If this is a checkpoint in the old format, please consider ' + 'migrating it to the new format via the following command:\n' + 'python scripts/migrate.py -i -o ') + elif category != self.category: + raise RuntimeError('Category mismatches!\n' + f'This checkpoint is of the category \'{category}\', ' + f'but a checkpoint of category \'{self.category}\' is required.') diff --git a/basics/base_task.py b/basics/base_task.py index 4eaaf589d..223b86116 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -3,6 +3,8 @@ import matplotlib +from basics.base_model import CategorizedModule + matplotlib.use('Agg') from utils.hparams import hparams, set_hparams @@ -79,12 +81,6 @@ def __init__(self, *args, **kwargs): def build_model(self): raise NotImplementedError - def load_ckpt(self, ckpt_base_dir, current_model_name=None, model_name='model', force=True, strict=True): - # This function is updated on 2021.12.13 - if current_model_name is None: - current_model_name = model_name - utils.load_ckpt(self.__getattr__(current_model_name), ckpt_base_dir, current_model_name, force, strict) - def on_epoch_start(self): self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} @@ -337,7 +333,8 @@ def on_load_checkpoint(self, checkpoint): pass def on_save_checkpoint(self, checkpoint): - pass + if isinstance(self.model, CategorizedModule): + checkpoint['category'] = self.model.category def on_sanity_check_start(self): pass diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 09ffbed28..d11c94b2d 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -1,7 +1,7 @@ base_config: - configs/base.yaml -task_cls: training.acoustic.AcousticTask +task_cls: training.acoustic_task.AcousticTask num_spk: 1 speakers: - opencpop @@ -48,7 +48,6 @@ g2p_dictionary: dictionaries/opencpop-extension.txt spec_min: [-5] spec_max: [0] keep_bins: 128 -mel_loss: "ssim:0.5|l1:0.5" mel_vmin: -6. #-6. mel_vmax: 1.5 save_f0: true diff --git a/configs/base.yaml b/configs/base.yaml index 5302021a0..8ec348538 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -62,13 +62,11 @@ optimizer_adam_beta1: 0.9 optimizer_adam_beta2: 0.98 weight_decay: 0 clip_grad_norm: 1 -mel_loss: l1:0.5|ssim:0.5 # l1|l2|gdl|ssim or l1:0.5|ssim:0.5 dur_loss: mse # huber|mol ########### # train and eval ########### -load_ckpt: '' save_ckpt: true save_best: false num_ckpt_keep: 3 diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index b4189bb96..0f91db312 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -22,6 +22,8 @@ import torch.nn.functional as F from torch.nn import Linear, Embedding + +from basics.base_model import CategorizedModule from modules.commons.common_layers import Mish from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder from modules.diff.diffusion import beta_schedule @@ -65,7 +67,7 @@ def forward(self, dur): return mel2ph -class FastSpeech2Acoustic(nn.Module): +class FastSpeech2Acoustic(CategorizedModule): def __init__(self, vocab_size): super().__init__() self.lr = LengthRegulator() @@ -92,6 +94,10 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) + @property + def category(self): + return 'acoustic' + def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): durations *= tokens > 0 mel2ph = self.lr.forward(durations) @@ -325,7 +331,7 @@ def forward(self, x): return x * d + m -class GaussianDiffusion(nn.Module): +class GaussianDiffusion(CategorizedModule): def __init__(self, out_dims, timesteps=1000, k_step=1000, spec_min=None, spec_max=None): super().__init__() self.mel_bins = out_dims @@ -367,6 +373,10 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, spec_min=None, spec_ma self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) + @property + def category(self): + return 'acoustic' + def build_submodules(self): # Move registered buffers into submodules after loading state dict. self.naive_noise_predictor.register_buffer('sqrt_recip_alphas_cumprod', self.sqrt_recip_alphas_cumprod) @@ -459,7 +469,8 @@ def build_fs2_model(device, ckpt_steps=None): vocab_size=len(TokenTextEncoder(vocab_list=build_phoneme_list())) ) model.eval() - load_ckpt(model, hparams['work_dir'], 'model.fs2', ckpt_steps=ckpt_steps, strict=True) + load_ckpt(model, hparams['work_dir'], 'model.fs2', ckpt_steps=ckpt_steps, + required_category='acoustic', strict=True) model.to(device) return model @@ -473,7 +484,8 @@ def build_diff_model(device, ckpt_steps=None): spec_max=hparams['spec_max'], ) model.eval() - load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps, strict=False) + load_ckpt(model, hparams['work_dir'], 'model.diffusion', ckpt_steps=ckpt_steps, + required_category='acoustic', strict=False) model.build_submodules() model.to(device) return model @@ -1190,11 +1202,13 @@ def merge(fs2_path, diff_path, target_path): print('FastSpeech2 and GaussianDiffusion models merged.') onnx.save(merged_model, target_path) -def export_phonemes_txt(phonemes_txt_path:str): - textEncoder = TokenTextEncoder(vocab_list=build_phoneme_list()) - textEncoder.store_to_file(phonemes_txt_path) + +def export_phonemes_txt(path: str): + TokenTextEncoder(vocab_list=build_phoneme_list()).store_to_file(path) + if __name__ == '__main__': + # print('Oops, exporting ') parser = argparse.ArgumentParser(description='Export DiffSinger acoustic model to ONNX format.') parser.add_argument('--exp', type=str, required=True, help='experiment to export') parser.add_argument('--ckpt', type=int, required=False, help='checkpoint training steps') diff --git a/inference/ds_cascade.py b/inference/ds_cascade.py index 15d5741fb..f82f679d8 100644 --- a/inference/ds_cascade.py +++ b/inference/ds_cascade.py @@ -2,24 +2,21 @@ from basics.base_svs_infer import BaseSVSInfer from utils import load_ckpt from utils.hparams import hparams -from modules.diff.diffusion import GaussianDiffusion from modules.fastspeech.tts_modules import LengthRegulator +from modules.toplevel.acoustic_model import DiffSingerAcoustic import librosa import numpy as np class DiffSingerCascadeInfer(BaseSVSInfer): def build_model(self, ckpt_steps=None): - model = GaussianDiffusion( + model = DiffSingerAcoustic( vocab_size=len(self.ph_encoder), - out_dims=hparams['audio_num_mel_bins'], - timesteps=hparams['timesteps'], - K_step=hparams['K_step'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], + out_dims=hparams['audio_num_mel_bins'] ) model.eval() - load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps) + load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps, + required_category='acoustic', strict=True, device=self.device) return model def preprocess_word_level_input(self, inp): @@ -261,14 +258,12 @@ def forward_model(self, inp, return_mel=False): spk_mix_embed = torch.stack(spk_mix_embed, dim=1).sum(dim=1) else: spk_mix_embed = None - output = self.model(txt_tokens, spk_mix_embed=spk_mix_embed, ref_mels=None, infer=True, - pitch_midi=sample['pitch_midi'], midi_dur=sample['midi_dur'], - is_slur=sample['is_slur'], mel2ph=sample['mel2ph'], f0=sample['f0'], - key_shift=sample['key_shift'], speed=sample['speed']) - mel_out = output['mel_out'] # [B, T, M] - f0_pred = output['f0_denorm'] + f0 = sample['f0'] + mel = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], + key_shift=sample['key_shift'], speed=sample['speed'], + spk_mix_embed=spk_mix_embed, infer=True) if return_mel: - return mel_out.cpu(), f0_pred.cpu() - wav_out = self.run_vocoder(mel_out, f0=f0_pred) + return mel.cpu(), f0.cpu() + wav_out = self.run_vocoder(mel, f0=f0) wav_out = wav_out.cpu().numpy() return wav_out[0] diff --git a/modules/diff/diffusion.py b/modules/diff/diffusion.py index 42f40e229..b42801110 100644 --- a/modules/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -8,11 +8,9 @@ from torch import nn from tqdm import tqdm -from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from modules.diff.wavenet import DiffNet from utils.hparams import hparams - DIFF_DENOISERS = { 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), } @@ -68,13 +66,12 @@ def cosine_beta_schedule(timesteps, s=0.008): class GaussianDiffusion(nn.Module): - def __init__(self, vocab_size, out_dims, timesteps=1000, K_step=1000, - loss_type=hparams.get('diff_loss_type', 'l1'), betas=None, spec_min=None, - spec_max=None): + def __init__(self, out_dims, timesteps=1000, k_step=1000, + denoiser_type=None, loss_type=None, betas=None, + spec_min=None, spec_max=None): super().__init__() - self.denoise_fn = DIFF_DENOISERS[hparams['diff_decoder_type']](hparams) - self.fs2 = FastSpeech2Acoustic(vocab_size=vocab_size) - self.mel_bins = out_dims + self.denoise_fn = DIFF_DENOISERS[denoiser_type](hparams) + self.out_dims = out_dims if exists(betas): betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas @@ -90,7 +87,7 @@ def __init__(self, vocab_size, out_dims, timesteps=1000, K_step=1000, timesteps, = betas.shape self.num_timesteps = int(timesteps) - self.K_step = K_step + self.K_step = k_step self.loss_type = loss_type self.noise_list = deque(maxlen=4) @@ -189,7 +186,7 @@ def get_x_pred(x, noise_t, t): noise_pred_prime = (3 * noise_pred - noise_list[-1]) / 2 elif len(noise_list) == 2: noise_pred_prime = (23 * noise_pred - 16 * noise_list[-1] + 5 * noise_list[-2]) / 12 - elif len(noise_list) >= 3: + else: noise_pred_prime = (55 * noise_pred - 59 * noise_list[-1] + 37 * noise_list[-2] - 9 * noise_list[-3]) / 24 x_prev = get_x_pred(x, noise_pred_prime, t) @@ -224,23 +221,21 @@ def p_losses(self, x_start, t, cond, noise=None, nonpadding=None): return loss - def forward(self, txt_tokens, mel2ph=None, spk_embed=None, - ref_mels=None, f0=None, infer=False, **kwargs): + def forward(self, condition, gt_spec=None, infer=True): """ conditioning diffusion, use fastspeech2 encoder output as the condition """ - ret = self.fs2(txt_tokens, mel2ph=mel2ph, f0=f0, spk_embed_id=spk_embed, infer=infer, **kwargs) - cond = ret['decoder_inp'].transpose(1, 2) - b, *_, device = *txt_tokens.shape, txt_tokens.device + cond = condition.transpose(1, 2) + b, device = condition.shape[0], condition.device if not infer: - spec = self.norm_spec(ref_mels) + spec = self.norm_spec(gt_spec) t = torch.randint(0, self.K_step, (b,), device=device).long() norm_spec = spec.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] - ret['diff_loss'] = self.p_losses(norm_spec, t, cond=cond) + return self.p_losses(norm_spec, t, cond=cond) else: t = self.K_step - shape = (cond.shape[0], 1, self.mel_bins, cond.shape[2]) + shape = (cond.shape[0], 1, self.out_dims, cond.shape[2]) x = torch.randn(shape, device=device) if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: # obsolete: pndm_speedup, now use dpm_solver. @@ -291,21 +286,11 @@ def wrapped(x, t, **kwargs): else: for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) - x = x[:, 0].transpose(1, 2) - if mel2ph is not None: # for singing - ret['mel_out'] = self.denorm_spec(x) * ((mel2ph > 0).float()[:, :, None]) - else: - ret['mel_out'] = self.denorm_spec(x) - return ret + x = x.squeeze(1).transpose(1, 2) # [B, T, M] + return self.denorm_spec(x) def norm_spec(self, x): return (x - self.spec_min) / (self.spec_max - self.spec_min) * 2 - 1 def denorm_spec(self, x): return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min - - def cwt2f0_norm(self, cwt_spec, mean, std, mel2ph): - return self.fs2.cwt2f0_norm(cwt_spec, mean, std, mel2ph) - - def out2mel(self, x): - return x diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index d4ac65741..7c8ded9ff 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -63,15 +63,15 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=False, **kwarg): + def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=False, **kwargs): B, T = txt_tokens.shape dur = mel2ph_to_dur(mel2ph, T).float() dur_embed = self.dur_embed(dur[:, :, None]) encoder_out = self.encoder(txt_tokens, dur_embed) - decoder_inp = F.pad(encoder_out, [0, 0, 1, 0]) + encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) - decoder_inp = torch.gather(decoder_inp, 1, mel2ph_) + condition = torch.gather(encoder_out, 1, mel2ph_) nframes = mel2ph.size(1) delta_l = nframes - f0.size(1) @@ -85,9 +85,10 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal else: f0_mel = (1 + f0 / 700).log() pitch_embed = self.pitch_embed(f0_mel[:, :, None]) + condition += pitch_embed if hparams.get('use_key_shift_embed', False): - key_shift = kwarg['key_shift'] + key_shift = kwargs['key_shift'] if len(key_shift.shape) == 1: key_shift_embed = self.key_shift_embed(key_shift[:, None, None]) else: @@ -96,11 +97,10 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal key_shift = torch.cat((key_shift, torch.FloatTensor([[x[-1]] * delta_l for x in key_shift]).to(key_shift.device)), 1) key_shift = key_shift[:, :nframes] key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) - else: - key_shift_embed = 0 + condition += key_shift_embed if hparams.get('use_speed_embed', False): - speed = kwarg['speed'] + speed = kwargs['speed'] if len(speed.shape) == 1: speed_embed = self.speed_embed(speed[:, None, None]) else: @@ -109,12 +109,12 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal speed = torch.cat((speed, torch.FloatTensor([[x[-1]] * delta_l for x in speed]).to(speed.device)), 1) speed = speed[:, :nframes] speed_embed = self.speed_embed(speed[:, :, None]) - else: - speed_embed = 0 - + condition += speed_embed + if hparams['use_spk_id']: - if infer: - spk_embed = kwarg.get('spk_mix_embed') # (1, t, 256) + spk_mix_embed = kwargs.get('spk_mix_embed') + if spk_mix_embed is not None: + spk_embed = spk_mix_embed mix_frames = spk_embed.size(1) if mix_frames > nframes: spk_embed = spk_embed[:, :nframes, :] @@ -122,8 +122,6 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal spk_embed = torch.cat((spk_embed, spk_embed[:, -1:, :].repeat(1, nframes - mix_frames, 1)), dim=1) else: spk_embed = self.spk_embed(spk_embed_id)[:, None, :] - else: - spk_embed = 0 + condition += spk_embed - ret = {'decoder_inp': decoder_inp + pitch_embed + key_shift_embed + speed_embed + spk_embed, 'f0_denorm': f0} - return ret + return condition diff --git a/modules/toplevel/acoustic_model.py b/modules/toplevel/acoustic_model.py new file mode 100644 index 000000000..4a36743f7 --- /dev/null +++ b/modules/toplevel/acoustic_model.py @@ -0,0 +1,37 @@ +from basics.base_model import CategorizedModule +from modules.diff.diffusion import GaussianDiffusion +from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic +from utils.hparams import hparams + + +class DiffSingerAcoustic(CategorizedModule): + def __init__(self, vocab_size, out_dims): + super().__init__() + self.fs2 = FastSpeech2Acoustic( + vocab_size=vocab_size + ) + self.diffusion = GaussianDiffusion( + out_dims=out_dims, + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + loss_type=hparams['diff_loss_type'], + spec_min=hparams['spec_min'], + spec_max=hparams['spec_max'] + ) + + @property + def category(self): + return 'acoustic' + + def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, + spk_embed_id=None, gt_mel=None, infer=True, **kwargs): + condition = self.fs2(txt_tokens, mel2ph, f0, key_shift=key_shift, speed=speed, + spk_embed_id=spk_embed_id, **kwargs) + if infer: + mel = self.diffusion(condition, infer=True) + mel *= ((mel2ph > 0).float()[:, :, None]) + return mel + else: + loss = self.diffusion(condition, gt_spec=gt_mel, infer=False) + return loss diff --git a/preprocessing/acoustic.py b/preprocessing/acoustic_binarizer.py similarity index 100% rename from preprocessing/acoustic.py rename to preprocessing/acoustic_binarizer.py diff --git a/scripts/migrate.py b/scripts/migrate.py new file mode 100644 index 000000000..cec5fe50e --- /dev/null +++ b/scripts/migrate.py @@ -0,0 +1,41 @@ +import argparse +import pathlib +from collections import OrderedDict + +import torch + + +parser = argparse.ArgumentParser(description='Migrate checkpoint files of MIDI-less acoustic models from old format') +parser.add_argument('-i', '--input', required=True, type=str, help='Path to the input file') +parser.add_argument('-o', '--output', required=True, type=str, help='Path to the output file') +parser.add_argument('--overwrite', required=False, default=False, + action='store_true', help='Overwrite the existing file') +args = parser.parse_args() + +input_ckpt = pathlib.Path(args.input).resolve() +output_ckpt = pathlib.Path(args.output).resolve() +assert input_ckpt.exists(), 'The input file does not exist.' +assert args.overwrite or not output_ckpt.exists(), \ + 'The output file already exists or is the same as the input file.\n' \ + 'This is not recommended because migration scripts may not be stable, ' \ + 'and you may be at risk of losing your model.\n' \ + 'If you are sure to OVERWRITE the existing file, please re-run this script with the \'--overwrite\' argument.' + +ckpt_loaded = torch.load(input_ckpt) +if 'category' in ckpt_loaded: + print('This checkpoint file is already in the new format.') + exit(0) +state_dict: OrderedDict = ckpt_loaded['state_dict'] +ckpt_loaded['optimizer_states'][0]['state'].clear() +new_state_dict = OrderedDict() +for key in state_dict: + if key.startswith('model.fs2'): + # keep model.fs2.xxx + new_state_dict[key] = state_dict[key] + else: + # model.xxx => model.diffusion.xxx + path = key.split('.', maxsplit=1)[1] + new_state_dict[f'model.diffusion.{path}'] = state_dict[key] +ckpt_loaded['category'] = 'acoustic' +ckpt_loaded['state_dict'] = new_state_dict +torch.save(ckpt_loaded, output_ckpt) diff --git a/training/acoustic.py b/training/acoustic_task.py similarity index 77% rename from training/acoustic.py rename to training/acoustic_task.py index 07d707cf7..a8324d309 100644 --- a/training/acoustic.py +++ b/training/acoustic_task.py @@ -14,18 +14,17 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder -from utils.binarizer_utils import get_pitch_parselmouth from modules.fastspeech.tts_modules import mel2ph_to_dur +from modules.toplevel.acoustic_model import DiffSingerAcoustic +from modules.vocoders.registry import get_vocoder_cls from utils import audio +from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams from utils.indexed_datasets import IndexedDataset from utils.phoneme_utils import build_phoneme_list -from utils.pitch_utils import denorm_f0 from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder -from modules.diff.diffusion import GaussianDiffusion -from modules.vocoders.registry import get_vocoder_cls matplotlib.use('Agg') @@ -86,19 +85,6 @@ def __init__(self): self.saving_result_pool = None self.saving_results_futures = None self.stats = {} - self.mse_loss_fn = torch.nn.MSELoss() - mel_losses = hparams['mel_loss'].split('|') - self.loss_and_lambda = {} - for i, l in enumerate(mel_losses): - if l == '': - continue - if ':' in l: - l, lbd = l.split(':') - lbd = float(lbd) - else: - lbd = 1.0 - self.loss_and_lambda[l] = lbd - print('| Mel losses:', self.loss_and_lambda) self.logged_gt_wav = set() @staticmethod @@ -107,17 +93,10 @@ def build_phone_encoder(): return TokenTextEncoder(vocab_list=phone_list) def build_model(self): - mel_bins = hparams['audio_num_mel_bins'] - self.model = GaussianDiffusion( + self.model = DiffSingerAcoustic( vocab_size=len(self.phone_encoder), - out_dims=mel_bins, - timesteps=hparams['timesteps'], - K_step=hparams['K_step'], - loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], + out_dims=hparams['audio_num_mel_bins'] ) - if hparams['load_ckpt'] != '': - self.load_ckpt(hparams['load_ckpt'], strict=True) utils.print_arch(self.model) return self.model @@ -156,36 +135,32 @@ def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) def run_model(self, sample, return_output=False, infer=False): - ''' + """ steps: 1. run the full model, calc the main loss 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor - ''' + """ txt_tokens = sample['tokens'] # [B, T_t] target = sample['mels'] # [B, T_s, 80] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] - energy = sample.get('energy') key_shift = sample.get('key_shift') speed = sample.get('speed') - if infer: - if hparams['use_spk_id']: - spk_embed = self.model.fs2.spk_embed(sample['spk_ids'])[:, None, :] - elif hparams['use_spk_embed']: - spk_embed = sample['spk_embed'] - else: - spk_embed = None - output = self.model(txt_tokens, mel2ph=mel2ph, spk_mix_embed=spk_embed, ref_mels=target, - f0=f0, energy=energy, key_shift=key_shift, speed=speed, infer=infer) + if hparams['use_spk_id']: + spk_embed_id = sample['spk_ids'] + # elif hparams['use_spk_embed']: + # spk_embed = sample['spk_embed'] else: - spk_embed = sample.get('spk_ids') if hparams['use_spk_id'] else sample.get('spk_embed') - output = self.model(txt_tokens, mel2ph=mel2ph, spk_embed=spk_embed, ref_mels=target, - f0=f0, energy=energy, key_shift=key_shift, speed=speed, infer=infer) + spk_embed_id = None + output = self.model(txt_tokens, mel2ph=mel2ph, f0=f0, + key_shift=key_shift, speed=speed, + spk_embed_id=spk_embed_id, + gt_mel=target, infer=infer) losses = {} - if 'diff_loss' in output: - losses['mel'] = output['diff_loss'] + if not infer: + losses['mel'] = output if not return_output: return losses else: @@ -199,35 +174,18 @@ def _training_step(self, sample, batch_idx, _): return total_loss, log_outputs def validation_step(self, sample, batch_idx): - outputs = {} - txt_tokens = sample['tokens'] # [B, T_t] - - energy = sample.get('energy') - key_shift = sample.get('key_shift') - speed = sample.get('speed') - # spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - - outputs['losses'], model_out = self.run_model(sample, return_output=True, infer=False) - - outputs['total_loss'] = sum(outputs['losses'].values()) - outputs['nsamples'] = sample['nsamples'] + losses = self.run_model(sample, return_output=False, infer=False) + total_loss = sum(losses.values()) + outputs = { + 'losses': losses, + 'total_loss': total_loss, 'nsamples': sample['nsamples'] + } outputs = utils.tensors_to_scalars(outputs) + if batch_idx < hparams['num_valid_plots']: - if hparams['use_spk_id']: - spk_embed = self.model.fs2.spk_embed(sample['spk_ids'])[:, None, :] - elif hparams['use_spk_embed']: - spk_embed = sample['spk_embed'] - else: - spk_embed = None - model_out = self.model( - txt_tokens, spk_mix_embed=spk_embed, mel2ph=mel2ph, f0=f0, energy=energy, - key_shift=key_shift, speed=speed, ref_mels=None, infer=True - ) - - self.plot_wav(batch_idx, sample['mels'], model_out['mel_out'], f0=model_out['f0_denorm']) - self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'], name=f'diffmel_{batch_idx}') + _, mel_pred = self.run_model(sample, return_output=True, infer=True) + self.plot_wav(batch_idx, sample['mels'], mel_pred, f0=sample['f0']) + self.plot_mel(batch_idx, sample['mels'], mel_pred, name=f'diffmel_{batch_idx}') return outputs @@ -276,23 +234,8 @@ def test_start(self): self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() def test_step(self, sample, batch_idx): - spk_embed = sample.get('spk_embed') if not hparams['use_spk_id'] else sample.get('spk_ids') - txt_tokens = sample['tokens'] - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - ref_mels = None - outputs = self.model( - txt_tokens, spk_embed=spk_embed, mel2ph=mel2ph, f0=f0, - ref_mels=ref_mels, infer=True - ) - sample['outputs'] = self.model.out2mel(outputs['mel_out']) - sample['mel2ph_pred'] = outputs['mel2ph'] - if hparams.get('pe_enable') is not None and hparams['pe_enable']: - sample['f0'] = self.pe(sample['mels'])['f0_denorm_pred'] # pe predict from GT mel - sample['f0_pred'] = self.pe(sample['outputs'])['f0_denorm_pred'] # pe predict from Pred mel - else: - sample['f0'] = denorm_f0(sample['f0'], sample['uv']) - sample['f0_pred'] = outputs.get('f0_denorm') + _, mel_pred = self.run_model(sample, return_output=True, infer=True) + sample['outputs'] = mel_pred return self.after_infer(sample) def test_end(self, outputs): diff --git a/utils/__init__.py b/utils/__init__.py index 8f0430161..2f41cf2f0 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -4,10 +4,14 @@ import os import sys import types +from collections import OrderedDict + import numpy as np import torch import torch.nn.functional as F +from basics.base_model import CategorizedModule + def tensors_to_scalars(metrics): new_metrics = {} @@ -147,7 +151,8 @@ def unpack_dict_to_list(samples): return samples_ -def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', ckpt_steps=None, force=True, strict=True): +def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', required_category=None, + ckpt_steps=None, strict=True, device='cpu'): if os.path.isfile(ckpt_base_dir): ckpt_base_dir = os.path.dirname(ckpt_base_dir) checkpoint_path = [ckpt_base_dir] @@ -165,30 +170,32 @@ def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', ckpt_steps=None, key=lambda x: int(re.findall(f'model_ckpt_steps_(\d+).ckpt', x.replace('\\', '/'))[0]) ) ] - if len(checkpoint_path) > 0: - checkpoint_path = checkpoint_path[-1] - state_dict = torch.load(checkpoint_path, map_location="cpu")["state_dict"] - state_dict = {k[len(prefix_in_ckpt) + 1:]: v for k, v in state_dict.items() - if k.startswith(f'{prefix_in_ckpt}.')} - if not strict: - cur_model_state_dict = cur_model.state_dict() - unmatched_keys = [] - for key, param in state_dict.items(): - if key in cur_model_state_dict: - new_param = cur_model_state_dict[key] - if new_param.shape != param.shape: - unmatched_keys.append(key) - print("| Unmatched keys: ", key, new_param.shape, param.shape) - for key in unmatched_keys: - del state_dict[key] - cur_model.load_state_dict(state_dict, strict=strict) - print(f"| load '{prefix_in_ckpt}' from '{checkpoint_path}'.") - else: - e_msg = f"| ckpt not found in {ckpt_base_dir}." - if force: - assert False, e_msg - else: - print(e_msg) + assert len(checkpoint_path) > 0, f'| ckpt not found in {ckpt_base_dir}.' + checkpoint_path = checkpoint_path[-1] + ckpt_loaded = torch.load(checkpoint_path, map_location=device) + if required_category is not None: + if not isinstance(cur_model, CategorizedModule): + raise TypeError(f'The \'{required_category}\' argument can only be used ' + f'on a \'basics.base_model.CategorizedModule\'.') + cur_model.check_category(ckpt_loaded.get('category')) + state_dict = ckpt_loaded['state_dict'] + state_dict = OrderedDict({ + k[len(prefix_in_ckpt) + 1:]: v + for k, v in state_dict.items() if k.startswith(f'{prefix_in_ckpt}.') + }) + if not strict: + cur_model_state_dict = cur_model.state_dict() + unmatched_keys = [] + for key, param in state_dict.items(): + if key in cur_model_state_dict: + new_param = cur_model_state_dict[key] + if new_param.shape != param.shape: + unmatched_keys.append(key) + print('| Unmatched keys: ', key, new_param.shape, param.shape) + for key in unmatched_keys: + del state_dict[key] + cur_model.load_state_dict(state_dict, strict=strict) + print(f'| load \'{prefix_in_ckpt}\' from \'{checkpoint_path}\'.') def remove_padding(x, padding_idx=0): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 3df2c435b..b69e76390 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -2,10 +2,11 @@ from torch.nn import DataParallel from torch.nn.parallel import DistributedDataParallel +from basics.base_model import CategorizedModule + matplotlib.use('Agg') import glob import itertools -import subprocess import threading import traceback @@ -474,7 +475,7 @@ def num_gpus(self): def data_parallel(self): return self.use_dp or self.use_ddp - def get_model(self): + def get_model(self) -> CategorizedModule: is_dp_module = isinstance(self.model, (DDP, DP)) model = self.model.module if is_dp_module else self.model return model From 29d4bf6b58fc997ef3f0bed4aca10093349ed6e7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 15 Mar 2023 17:20:55 +0800 Subject: [PATCH 066/475] Rename attribute, remove useless comments --- deployment/export/export_acoustic.py | 5 ++--- modules/diff/diffusion.py | 6 +++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index 0f91db312..1dd79a293 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -335,7 +335,7 @@ class GaussianDiffusion(CategorizedModule): def __init__(self, out_dims, timesteps=1000, k_step=1000, spec_min=None, spec_max=None): super().__init__() self.mel_bins = out_dims - self.K_step = k_step + self.k_step = k_step self.denoise_fn = DiffNet(out_dims) self.naive_noise_predictor = NaiveNoisePredictor() @@ -402,7 +402,7 @@ def forward(self, condition, speedup): device = condition.device n_frames = condition.shape[2] - step_range = torch.arange(0, self.K_step, speedup, dtype=torch.long, device=device).flip(0) + step_range = torch.arange(0, self.k_step, speedup, dtype=torch.long, device=device).flip(0) x = torch.randn((1, 1, self.mel_bins, n_frames), device=device) if speedup > 1: @@ -1208,7 +1208,6 @@ def export_phonemes_txt(path: str): if __name__ == '__main__': - # print('Oops, exporting ') parser = argparse.ArgumentParser(description='Export DiffSinger acoustic model to ONNX format.') parser.add_argument('--exp', type=str, required=True, help='experiment to export') parser.add_argument('--ckpt', type=int, required=False, help='checkpoint training steps') diff --git a/modules/diff/diffusion.py b/modules/diff/diffusion.py index b42801110..dbf386225 100644 --- a/modules/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -87,7 +87,7 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, timesteps, = betas.shape self.num_timesteps = int(timesteps) - self.K_step = k_step + self.k_step = k_step self.loss_type = loss_type self.noise_list = deque(maxlen=4) @@ -230,11 +230,11 @@ def forward(self, condition, gt_spec=None, infer=True): if not infer: spec = self.norm_spec(gt_spec) - t = torch.randint(0, self.K_step, (b,), device=device).long() + t = torch.randint(0, self.k_step, (b,), device=device).long() norm_spec = spec.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] return self.p_losses(norm_spec, t, cond=cond) else: - t = self.K_step + t = self.k_step shape = (cond.shape[0], 1, self.out_dims, cond.shape[2]) x = torch.randn(shape, device=device) if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: From 329e4229cf40efc100a19bc6d4c777f1cff9692d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 15 Mar 2023 18:46:31 +0800 Subject: [PATCH 067/475] Rename module --- modules/diff/diffusion.py | 4 ++-- modules/diff/wavenet.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/diff/diffusion.py b/modules/diff/diffusion.py index dbf386225..04263139e 100644 --- a/modules/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -8,11 +8,11 @@ from torch import nn from tqdm import tqdm -from modules.diff.wavenet import DiffNet +from modules.diff.wavenet import WaveNet from utils.hparams import hparams DIFF_DENOISERS = { - 'wavenet': lambda hp: DiffNet(hp['audio_num_mel_bins']), + 'wavenet': lambda hp: WaveNet(hp['audio_num_mel_bins']), } diff --git a/modules/diff/wavenet.py b/modules/diff/wavenet.py index 45f47c0cf..93085e40b 100644 --- a/modules/diff/wavenet.py +++ b/modules/diff/wavenet.py @@ -84,7 +84,7 @@ def forward(self, x, conditioner, diffusion_step): return (x + residual) / sqrt(2.0), skip -class DiffNet(nn.Module): +class WaveNet(nn.Module): def __init__(self, in_dims=80): super().__init__() self.params = params = AttrDict( From 4263d455aae2f22f9938f2de7905b3bf7445ebe6 Mon Sep 17 00:00:00 2001 From: yxlllc <33565655+yxlllc@users.noreply.github.com> Date: Wed, 15 Mar 2023 22:16:17 +0800 Subject: [PATCH 068/475] fix bugs of f0 alignment --- inference/ds_cascade.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/inference/ds_cascade.py b/inference/ds_cascade.py index f82f679d8..e219343b4 100644 --- a/inference/ds_cascade.py +++ b/inference/ds_cascade.py @@ -258,7 +258,13 @@ def forward_model(self, inp, return_mel=False): spk_mix_embed = torch.stack(spk_mix_embed, dim=1).sum(dim=1) else: spk_mix_embed = None + mel2ph = sample['mel2ph'] f0 = sample['f0'] + nframes = mel2ph.size(1) + delta_l = nframes - f0.size(1) + if delta_l > 0: + f0 = torch.cat((f0,torch.FloatTensor([[x[-1]] * delta_l for x in f0]).to(f0.device)),1) + f0 = f0[:, :nframes] mel = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], key_shift=sample['key_shift'], speed=sample['speed'], spk_mix_embed=spk_mix_embed, infer=True) From bc50ea3366b0c4fbaf2abd13481d772a52241078 Mon Sep 17 00:00:00 2001 From: yxlllc <33565655+yxlllc@users.noreply.github.com> Date: Wed, 15 Mar 2023 22:41:37 +0800 Subject: [PATCH 069/475] fix bug --- configs/acoustic.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index d11c94b2d..6746e96ca 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -43,7 +43,7 @@ binarization_args: raw_data_dir: 'data/opencpop/raw' binary_data_dir: 'data/opencpop/binary' -binarizer_cls: preprocessing.acoustic.AcousticBinarizer +binarizer_cls: preprocessing.acoustic_binarizer.AcousticBinarizer g2p_dictionary: dictionaries/opencpop-extension.txt spec_min: [-5] spec_max: [0] From 3d4970a7f0ff18fdab4b9d3274a699d56b1cd499 Mon Sep 17 00:00:00 2001 From: yxlllc <33565655+yxlllc@users.noreply.github.com> Date: Wed, 15 Mar 2023 22:51:26 +0800 Subject: [PATCH 070/475] fix bug --- configs/base.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/configs/base.yaml b/configs/base.yaml index 8ec348538..9e8e4448c 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -7,7 +7,6 @@ debug: false save_codes: - configs - modules - - src - utils ############# From 5a698691b5909ca04ac1810a91be29443450a9cc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 16 Mar 2023 00:14:02 +0800 Subject: [PATCH 071/475] Save code in 'training/' directory --- configs/base.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/configs/base.yaml b/configs/base.yaml index 9e8e4448c..6043eb568 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -7,6 +7,7 @@ debug: false save_codes: - configs - modules + - training - utils ############# From 6ec49456bcb06306a08fb877583696899a58335f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 16 Mar 2023 00:42:51 +0800 Subject: [PATCH 072/475] Remove max_frames --- basics/base_dataset.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/basics/base_dataset.py b/basics/base_dataset.py index ffea96bb4..ac34ebd0d 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -49,8 +49,7 @@ def num_tokens(self, index): def size(self, index): """Return an example's size as a float or tuple. This value is used when filtering a dataset with ``--max-positions``.""" - size = min(self._sizes[index], hparams['max_frames']) - return size + return self._sizes[index] def ordered_indices(self): """Return an ordered list of indices. Batches will be constructed based @@ -66,4 +65,4 @@ def ordered_indices(self): @property def num_workers(self): - return int(os.getenv('NUM_WORKERS', hparams['ds_workers'])) + return int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 0))) From 93a485a57481b4e9173cf4a6f986e50b823ae3fc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 18 Mar 2023 00:26:17 +0800 Subject: [PATCH 073/475] Re-implement inference; refactor code --- basics/base_svs_infer.py | 129 +---------- configs/acoustic.yaml | 2 - configs/base.yaml | 1 - deployment/export/export_acoustic.py | 5 +- deployment/export/export_nsf_hifigan.py | 2 +- docs/README-SVS-opencpop-cascade.md | 2 +- inference/ds_acoustic.py | 216 +++++++++++++++++++ inference/ds_cascade.py | 275 ------------------------ inference/vocoder/val_nsf_hifigan.py | 2 +- modules/fastspeech/acoustic_encoder.py | 33 +-- scripts/infer.py | 22 +- training/acoustic_task.py | 36 ++-- utils/infer_utils.py | 17 +- utils/spk_utils.py | 1 - 14 files changed, 272 insertions(+), 471 deletions(-) create mode 100644 inference/ds_acoustic.py delete mode 100644 inference/ds_cascade.py diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 8d6331fb7..5becc7416 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -1,149 +1,40 @@ # coding=utf8 -import json -import os import torch -from pypinyin import lazy_pinyin -from modules.vocoders.registry import VOCODERS from utils.hparams import hparams -from utils.phoneme_utils import build_g2p_dictionary, build_phoneme_list -from utils.text_encoder import TokenTextEncoder class BaseSVSInfer: """ Base class for SVS inference models. - 1. *example_run* and *infer_once*: - the overall pipeline; - 2. *build_vocoder* and *run_vocoder*: - a HifiGAN vocoder; - 3. *preprocess_word_level_input*: - convert words to phonemes, add slurs. - Subclasses should define: 1. *build_model*: how to build the model; - 2. *forward_model*: + 2. *run_model*: how to run the model (typically, generate a mel-spectrogram and pass it to the pre-built vocoder); 3. *preprocess_input*: how to preprocess user input. + 4. *infer_once* + infer from raw inputs to the final outputs """ - def __init__(self, hparams, device=None, load_model=True, load_vocoder=True, ckpt_steps=None): + def __init__(self, device=None): if device is None: device = 'cuda' if torch.cuda.is_available() else 'cpu' - self.hparams = hparams self.device = device - - if load_model: - phone_list = build_phoneme_list() - self.ph_encoder = TokenTextEncoder(vocab_list=phone_list) - self.pinyin2phs = build_g2p_dictionary() - if hparams['use_spk_id']: - with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: - self.spk_map = json.load(f) - assert isinstance(self.spk_map, dict) and len(self.spk_map) > 0, 'Invalid or empty speaker map!' - assert len(self.spk_map) == len(set(self.spk_map.values())), 'Duplicate speaker id in speaker map!' - self.model = self.build_model(ckpt_steps=ckpt_steps) - self.model.eval() - self.model.to(self.device) - if load_vocoder: - self.vocoder = self.build_vocoder() - self.vocoder.model.eval() - self.vocoder.model.to(self.device) + self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] + self.spk_map = {} def build_model(self, ckpt_steps=None): raise NotImplementedError - def forward_model(self, inp, return_mel): + def preprocess_input(self, inp): raise NotImplementedError - def build_vocoder(self): - if hparams['vocoder'] in VOCODERS: - vocoder = VOCODERS[hparams['vocoder']]() - else: - vocoder = VOCODERS[hparams['vocoder'].split('.')[-1]]() - return vocoder - - def run_vocoder(self, c, **kwargs): - y = self.vocoder.spec2wav_torch(c, **kwargs) - return y[None] - - def preprocess_word_level_input(self, inp): - # Pypinyin can't solve polyphonic words - text_raw = inp['text'].replace('最长', '最常').replace('长睫毛', '常睫毛') \ - .replace('那么长', '那么常').replace('多长', '多常') \ - .replace('很长', - '很常') # We hope someone could provide a better g2p module for us by opening pull requests. - - # lyric - pinyins = lazy_pinyin(text_raw, strict=False) - ph_per_word_lst = [' '.join(self.pinyin2phs[pinyin.strip()]) - for pinyin in pinyins - if pinyin.strip() in self.pinyin2phs] - - # Note - note_per_word_lst = [x.strip() for x in inp['notes'].split('|') if x.strip() != ''] - mididur_per_word_lst = [x.strip() for x in inp['notes_duration'].split('|') if x.strip() != ''] - - if not len(note_per_word_lst) == len(ph_per_word_lst) == len(mididur_per_word_lst): - raise RuntimeError('The number of words does\'t match the number of notes\' windows: ' - f'{len(ph_per_word_lst)} {len(note_per_word_lst)} {len(mididur_per_word_lst)}\n', - 'You should split the note(s) for each word by | mark.') - - note_lst = [] - ph_lst = [] - midi_dur_lst = [] - is_slur = [] - for idx, ph_per_word in enumerate(ph_per_word_lst): - # for phs in one word: - # single ph like ['ai'] or multiple phs like ['n', 'i'] - ph_in_this_word = ph_per_word.split() - - # for notes in one word: - # single note like ['D4'] or multiple notes like ['D4', 'E4'] which means a 'slur' here. - note_in_this_word = note_per_word_lst[idx].split() - midi_dur_in_this_word = mididur_per_word_lst[idx].split() - # process for the model input - # Step 1. - # Deal with note of 'not slur' case or the first note of 'slur' case - # j ie - # F#4/Gb4 F#4/Gb4 - # 0 0 - for ph in ph_in_this_word: - ph_lst.append(ph) - note_lst.append(note_in_this_word[0]) - midi_dur_lst.append(midi_dur_in_this_word[0]) - is_slur.append(0) - # step 2. - # Deal with the 2nd, 3rd... notes of 'slur' case - # j ie ie - # F#4/Gb4 F#4/Gb4 C#4/Db4 - # 0 0 1 - if len(note_in_this_word) > 1: # is_slur = True, we should repeat the YUNMU to match the 2nd, 3rd... notes. - for idx in range(1, len(note_in_this_word)): - ph_lst.append(ph_in_this_word[-1]) - note_lst.append(note_in_this_word[idx]) - midi_dur_lst.append(midi_dur_in_this_word[idx]) - is_slur.append(1) - ph_seq = ' '.join(ph_lst) - - if not len(ph_lst) == len(note_lst) == len(midi_dur_lst): - raise RuntimeError('The number of words does\'t match the number of notes\' windows: ' - f'{len(ph_lst)} {len(note_lst)} {len(midi_dur_lst)}\n', - 'You should split the note(s) for each word by | mark.') - return ph_seq, note_lst, midi_dur_lst, is_slur - - def preprocess_input(self, inp, input_type): + def run_model(self, param, return_mel): raise NotImplementedError - def postprocess_output(self, output): - return output - - def infer_once(self, inp, return_mel=False): - inp = self.preprocess_input(inp, input_type=inp['input_type'] if inp.get('input_type') else 'word') - output = self.forward_model(inp, return_mel=return_mel) - output = self.postprocess_output(output) - return output + def infer_once(self, param): + raise NotImplementedError() diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 6746e96ca..13dbb9de3 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -52,8 +52,6 @@ mel_vmin: -6. #-6. mel_vmax: 1.5 save_f0: true -max_frames: 8000 -use_midi: false use_spk_embed: false use_spk_id: false use_pitch_embed: true diff --git a/configs/base.yaml b/configs/base.yaml index 6043eb568..89e7c87dc 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -23,7 +23,6 @@ binarization_args: endless_ds: true -max_frames: 1550 audio_num_mel_bins: 80 audio_sample_rate: 22050 hop_size: 256 # For 22050Hz, 275 ~= 12.5 ms (0.0125 * sample_rate) diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index 1dd79a293..da1bfda9e 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -981,9 +981,6 @@ def _save_speaker_embed(path: str, spk_embed: np.ndarray): def export(fs2_path, diff_path, ckpt_steps=None, expose_gender=False, expose_velocity=False, spk_export_list=None, frozen_spk=None): - if hparams.get('use_midi', True) or not hparams['use_pitch_embed']: - raise NotImplementedError('Only checkpoints of MIDI-less mode are supported.') - # Build models to export device = 'cuda' if torch.cuda.is_available() else 'cpu' fs2 = FastSpeech2Wrapper( @@ -1252,7 +1249,7 @@ def export_phonemes_txt(path: str): out = f'deployment/assets/{exp}' os.chdir(root_dir) sys.argv = [ - 'inference/ds_cascade.py', + 'inference/ds_acoustic.py', '--exp_name', exp, '--infer' diff --git a/deployment/export/export_nsf_hifigan.py b/deployment/export/export_nsf_hifigan.py index 28290637a..52c7070cd 100644 --- a/deployment/export/export_nsf_hifigan.py +++ b/deployment/export/export_nsf_hifigan.py @@ -309,7 +309,7 @@ def export(model_path): if __name__ == '__main__': sys.argv = [ - 'inference/ds_cascade.py', + 'inference/ds_acoustic.py', '--config', 'configs/acoustic.yaml', ] diff --git a/docs/README-SVS-opencpop-cascade.md b/docs/README-SVS-opencpop-cascade.md index 2e3556020..225380bf4 100644 --- a/docs/README-SVS-opencpop-cascade.md +++ b/docs/README-SVS-opencpop-cascade.md @@ -82,7 +82,7 @@ Remember to put the pre-trained models in `checkpoints` directory. ### 4. Inference from raw inputs ```sh -python inference/ds_cascade.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME +python inference/ds_acoustic.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME ``` Raw inputs: ``` diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py new file mode 100644 index 000000000..329287722 --- /dev/null +++ b/inference/ds_acoustic.py @@ -0,0 +1,216 @@ +import json +import os + +import numpy as np +import torch + +from basics.base_svs_infer import BaseSVSInfer +from modules.fastspeech.tts_modules import LengthRegulator +from modules.toplevel.acoustic_model import DiffSingerAcoustic +from modules.vocoders.registry import VOCODERS +from utils import load_ckpt +from utils.hparams import hparams +from utils.infer_utils import resample_align_curve +from utils.phoneme_utils import build_phoneme_list +from utils.text_encoder import TokenTextEncoder + + +class DiffSingerAcousticInfer(BaseSVSInfer): + def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=None): + super().__init__(device=device) + if load_model: + self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) + if hparams['use_spk_id']: + with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: + self.spk_map = json.load(f) + assert isinstance(self.spk_map, dict) and len(self.spk_map) > 0, 'Invalid or empty speaker map!' + assert len(self.spk_map) == len(set(self.spk_map.values())), 'Duplicate speaker id in speaker map!' + self.model = self.build_model(ckpt_steps=ckpt_steps) + self.lr = LengthRegulator().to(self.device) + if load_vocoder: + self.vocoder = self.build_vocoder() + self.vocoder.model.eval() + self.vocoder.model.to(self.device) + + def build_model(self, ckpt_steps=None): + model = DiffSingerAcoustic( + vocab_size=len(self.ph_encoder), + out_dims=hparams['audio_num_mel_bins'] + ).eval().to(self.device) + load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps, + required_category='acoustic', strict=True, device=self.device) + return model + + def build_vocoder(self): + if hparams['vocoder'] in VOCODERS: + vocoder = VOCODERS[hparams['vocoder']]() + else: + vocoder = VOCODERS[hparams['vocoder'].split('.')[-1]]() + vocoder.model.eval() + vocoder.model.to(self.device) + return vocoder + + def preprocess_input(self, param): + """ + :param param: one segment in the .ds file + :return: batch of the model inputs + """ + batch = {} + txt_tokens = torch.LongTensor([self.ph_encoder.encode(param['ph_seq'])]).to(self.device) # => [B, T_txt] + batch['tokens'] = txt_tokens + + ph_dur = torch.from_numpy(np.array(param['ph_dur'].split(), np.float32)).to(self.device) + ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() + durations = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device))[None] # => [B=1, T_txt] + mel2ph = self.lr(durations, txt_tokens == 0) # => [B=1, T] + batch['mel2ph'] = mel2ph + length = mel2ph.size(1) # => T + + if hparams['use_spk_id']: + spk_mix_map = param.get('spk_mix') # { spk_name: value } or { spk_name: "value value value ..." } + dynamic = False + if spk_mix_map is None: + # Get the first speaker + for name in self.spk_map.keys(): + spk_mix_map = {name: 1.0} + break + else: + for name in spk_mix_map: + assert name in self.spk_map, f'Speaker \'{name}\' not found.' + + if len(spk_mix_map) == 1: + print(f'Using speaker \'{list(spk_mix_map.keys())[0]}\'') + elif any([isinstance(val, str) for val in spk_mix_map.values()]): + print_mix = '|'.join(spk_mix_map.keys()) + print(f'Using dynamic speaker mix \'{print_mix}\'') + dynamic = True + else: + print_mix = '|'.join([f'{n}:{"%.3f" % spk_mix_map[n]}' for n in spk_mix_map]) + print(f'Using static speaker mix \'{print_mix}\'') + + spk_mix_id_list = [] + spk_mix_value_list = [] + if dynamic: + for name, values in spk_mix_map.items(): + spk_mix_id_list.append(self.spk_map[name]) + if isinstance(values, str): + # this speaker has a variable proportion + cur_spk_mix_value = torch.from_numpy(resample_align_curve( + np.array(values.split(), 'float32'), + original_timestep=float(param['spk_mix_timestep']), + target_timestep=self.timestep, + align_length=length + )).to(self.device)[None] # => [B=1, T] + assert torch.all(cur_spk_mix_value >= 0.), \ + f'Speaker mix checks failed.\n' \ + f'Proportions of speaker \'{name}\' on some frames are negative.' + else: + # this speaker has a constant proportion + assert values >= 0., f'Speaker mix checks failed.\n' \ + f'Proportion of speaker \'{name}\' is negative.' + cur_spk_mix_value = torch.full( + (1, length), fill_value=values, + dtype=torch.float32, device=self.device + ) + spk_mix_value_list.append(cur_spk_mix_value) + spk_mix_id = torch.LongTensor(spk_mix_id_list).to(self.device)[None, None] # => [B=1, 1, N] + spk_mix_value = torch.stack(spk_mix_value_list, dim=2) # [B=1, T] => [B=1, T, N] + spk_mix_value_sum = torch.sum(spk_mix_value, dim=2, keepdim=True) # => [B=1, T, 1] + assert torch.all(spk_mix_value_sum > 0.), \ + f'Speaker mix checks failed.\n' \ + f'Proportions of speaker mix on some frames sum to zero.' + spk_mix_value /= spk_mix_value_sum # normalize + else: + for name, value in spk_mix_map.items(): + spk_mix_id_list.append(self.spk_map[name]) + assert value >= 0., f'Speaker mix checks failed.\n' \ + f'Proportion of speaker \'{name}\' is negative.' + spk_mix_value_list.append(value) + spk_mix_id = torch.LongTensor(spk_mix_id_list).to(self.device)[None, None] # => [B=1, 1, N] + spk_mix_value = torch.FloatTensor(spk_mix_value_list).to(self.device)[None, None] # => [B=1, 1, N] + spk_mix_value_sum = spk_mix_value.sum() + assert spk_mix_value_sum > 0., f'Speaker mix checks failed.\n' \ + f'Proportions of speaker mix sum to zero.' + spk_mix_value /= spk_mix_value_sum # normalize + + batch['spk_mix_id'] = spk_mix_id + batch['spk_mix_value'] = spk_mix_value + + batch['f0'] = torch.from_numpy(resample_align_curve( + np.array(param['f0_seq'].split(), np.float32), + original_timestep=float(param['f0_timestep']), + target_timestep=self.timestep, + align_length=length + )).to(self.device)[None] + + if hparams.get('use_key_shift_embed', False): + shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] + if isinstance(param['gender'], float): # static gender value + gender = param['gender'] + print(f'Using static gender value: {gender:.3f}') + key_shift_value = gender * shift_max if gender >= 0 else gender * abs(shift_min) + batch['key_shift'] = torch.FloatTensor([key_shift_value]).to(self.device)[:, None] # => [B=1, T=1] + else: + print('Using dynamic gender curve') + gender_seq = resample_align_curve( + np.array(param['gender'].split(), np.float32), + original_timestep=float(param['gender_timestep']), + target_timestep=self.timestep, + align_length=length + ) + gender_mask = gender_seq >= 0 + key_shift_seq = gender_seq * (gender_mask * shift_max + (1 - gender_mask) * abs(shift_min)) + batch['key_shift'] = torch.clip( + torch.from_numpy(key_shift_seq).to(self.device)[None], # => [B=1, T] + min=shift_min, max=shift_max + ) + + if hparams.get('use_speed_embed', False): + if param['velocity'] is None: + print('Using default velocity curve') + batch['speed'] = torch.FloatTensor([1.]).to(self.device)[:, None] # => [B=1, T=1] + else: + print('Using manual velocity curve') + speed_min, speed_max = hparams['augmentation_args']['random_time_stretching']['range'] + speed_seq = resample_align_curve( + np.array(param['velocity'].split(), np.float32), + original_timestep=float(param['velocity_timestep']), + target_timestep=self.timestep, + align_length=length + ) + batch['speed'] = torch.clip( + torch.from_numpy(speed_seq).to(self.device)[None], # => [B=1, T] + min=speed_min, max=speed_max + ) + + return batch + + @torch.no_grad() + def run_model(self, sample, return_mel=False): + txt_tokens = sample['tokens'] + if hparams['use_spk_id']: + # perform mixing on spk embed + spk_mix_embed = torch.sum( + self.model.fs2.spk_embed(sample['spk_mix_id']) * sample['spk_mix_value'].unsqueeze(3), # => [B, T, N, H] + dim=2, keepdim=False + ) # => [B, T, H] + else: + spk_mix_embed = None + mel_pred = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], + key_shift=sample.get('key_shift'), speed=sample.get('speed'), + spk_mix_embed=spk_mix_embed, infer=True) + return mel_pred + + @torch.no_grad() + def run_vocoder(self, spec, **kwargs): + y = self.vocoder.spec2wav_torch(spec, **kwargs) + return y[None] + + def infer_once(self, param, return_mel=False): + batch = self.preprocess_input(param) + mel = self.run_model(batch, return_mel=True) + if return_mel: + return mel.cpu(), batch['f0'].cpu() + else: + waveform = self.run_vocoder(mel, f0=batch['f0']) + return waveform.view(-1).cpu().numpy() diff --git a/inference/ds_cascade.py b/inference/ds_cascade.py deleted file mode 100644 index e219343b4..000000000 --- a/inference/ds_cascade.py +++ /dev/null @@ -1,275 +0,0 @@ -import torch -from basics.base_svs_infer import BaseSVSInfer -from utils import load_ckpt -from utils.hparams import hparams -from modules.fastspeech.tts_modules import LengthRegulator -from modules.toplevel.acoustic_model import DiffSingerAcoustic -import librosa -import numpy as np - - -class DiffSingerCascadeInfer(BaseSVSInfer): - def build_model(self, ckpt_steps=None): - model = DiffSingerAcoustic( - vocab_size=len(self.ph_encoder), - out_dims=hparams['audio_num_mel_bins'] - ) - model.eval() - load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps, - required_category='acoustic', strict=True, device=self.device) - return model - - def preprocess_word_level_input(self, inp): - return super().preprocess_word_level_input(inp) - - def preprocess_phoneme_level_input(self, inp): - ph_seq = inp['ph_seq'] - note_lst = inp['note_seq'].split() - midi_dur_lst = inp['note_dur_seq'].split() - is_slur = np.array(inp['is_slur_seq'].split(), 'float') - ph_dur = None - f0_timestep = float(inp['f0_timestep']) - f0_seq = None - gender_timestep = None - gender = 0. - if inp['f0_seq'] is not None: - f0_seq = np.array(inp['f0_seq'].split(), 'float') - if inp.get('gender') is not None: - if isinstance(inp['gender'], str): - gender_timestep = float(inp['gender_timestep']) - gender = np.array(inp['gender'].split(), 'float') - else: - gender = float(inp['gender']) - velocity_timestep = None - velocity = None - if inp.get('velocity') is not None: - velocity_timestep = float(inp['velocity_timestep']) - velocity = np.array(inp['velocity'].split(), 'float') - ph_seq_lst = ph_seq.split() - if inp['ph_dur'] is not None: - ph_dur = np.array(inp['ph_dur'].split(), 'float') - if not len(note_lst) == len(ph_seq_lst) == len(midi_dur_lst) == len(ph_dur): - raise RuntimeError(f'The number of notes, phones and durations mismatch:' - f'{len(note_lst)} {len(ph_seq.split())} {len(midi_dur_lst)} {len(ph_dur)}') - else: - if not len(note_lst) == len(ph_seq.split()) == len(midi_dur_lst): - raise RuntimeError(f'The number of notes, phones and durations mismatch:' - f'{len(note_lst)} {len(ph_seq.split())} {len(midi_dur_lst)}') - print(f'Processed {len(ph_seq_lst)} tokens: {" ".join(ph_seq_lst)}') - - return ph_seq, note_lst, midi_dur_lst, is_slur, ph_dur, \ - f0_timestep, f0_seq, gender_timestep, gender, velocity_timestep, velocity - - def preprocess_input(self, inp, input_type='word'): - """ - :param inp: {'text': str, 'item_name': (str, optional), 'spk_name': (str, optional)} - :return: - """ - - item_name = inp.get('item_name', '') - if hparams['use_spk_id']: - spk_mix = inp.get('spk_mix') - if spk_mix is None: - for name in self.spk_map.keys(): - spk_mix = {name: 1.0} - break - else: - for name in spk_mix: - assert name in self.spk_map, f'Speaker \'{name}\' not found.' - if len(spk_mix) == 1: - print(f'Using speaker \'{list(spk_mix.keys())[0]}\'') - elif any([isinstance(val, list) for val in spk_mix.values()]): - print_mix = '|'.join(spk_mix.keys()) - print(f'Using dynamic speaker mix \'{print_mix}\'') - else: - print_mix = '|'.join([f'{n}:{"%.3f" % spk_mix[n]}' for n in spk_mix]) - print(f'Using static speaker mix \'{print_mix}\'') - else: - spk_mix = None - - # get ph seq, note lst, midi dur lst, is slur lst. - if input_type == 'word': - ph_seq, note_lst, midi_dur_lst, is_slur = self.preprocess_word_level_input(inp) - ph_dur = f0_timestep = f0_seq = gender_timestep = gender = velocity_timestep = velocity = None - elif input_type == 'phoneme': # like transcriptions.txt in Opencpop dataset. - ph_seq, note_lst, midi_dur_lst, is_slur, ph_dur, \ - f0_timestep, f0_seq, gender_timestep, gender, velocity_timestep, velocity = \ - self.preprocess_phoneme_level_input(inp) - else: - raise ValueError('Invalid input type. Must be \'word\' or \'phoneme\'.') - - # convert note lst to midi id; convert note dur lst to midi duration - try: - midis = [librosa.note_to_midi(x.split("/")[0]) if x != 'rest' else 0 - for x in note_lst] - midi_dur_lst = [float(x) for x in midi_dur_lst] - except Exception as e: - print(e) - print('Invalid Input Type.') - return None - - ph_token = self.ph_encoder.encode(ph_seq) - item = {'item_name': item_name, 'text': inp['text'], 'ph': ph_seq, 'spk_mix': spk_mix, - 'ph_token': ph_token, 'pitch_midi': np.asarray(midis), 'midi_dur': np.asarray(midi_dur_lst), - 'is_slur': np.asarray(is_slur), 'ph_dur': None, 'f0_timestep': 0., 'f0_seq': None} - item['ph_len'] = len(item['ph_token']) - if input_type == 'phoneme': - item['ph_dur'] = ph_dur - item['f0_timestep'] = f0_timestep - item['f0_seq'] = f0_seq - item['gender_timestep'] = gender_timestep - item['gender'] = gender - item['velocity_timestep'] = velocity_timestep - item['velocity'] = velocity - item['spk_mix_timestep'] = inp.get('spk_mix_timestep') - return item - - def input_to_batch(self, item): - item_names = [item['item_name']] - text = [item['text']] - ph = [item['ph']] - txt_tokens = torch.LongTensor(item['ph_token'])[None, :].to(self.device) - txt_lengths = torch.LongTensor([txt_tokens.shape[1]]).to(self.device) - if hparams['use_spk_id']: - spk_mix_map = item['spk_mix'] - dynamic_mix = any([isinstance(val, list) for val in spk_mix_map.values()]) - max_length = max([len(val) for val in spk_mix_map.values()]) if dynamic_mix else 0 - if dynamic_mix: - mix_value_list = [] - for spk_name in spk_mix_map: - if isinstance(spk_mix_map[spk_name], list): - mix_seq = spk_mix_map[spk_name] + \ - [spk_mix_map[spk_name][-1]] * (max_length - len(spk_mix_map[spk_name])) - else: - mix_seq = [spk_mix_map[spk_name]] * max_length - timestep = item['spk_mix_timestep'] - t_max = (max_length - 1) * timestep - dt = hparams['hop_size'] / hparams['audio_sample_rate'] - mix_value_list.append(np.interp(np.arange(0, t_max, dt), timestep * np.arange(max_length), mix_seq)) - mix_value_ndarray = np.stack(mix_value_list, axis=0) - assert np.all(mix_value_ndarray >= 0), 'All proportion values of speaker mix should be non-negative.' - frame_sum = mix_value_ndarray.sum(axis=0)[None, :] - assert np.all(frame_sum > 0), 'Proportions of speaker mix on some frames sum to zero.' - mix_value_list = list(mix_value_ndarray / frame_sum) - spk_mixes = { - torch.LongTensor([self.spk_map[n]]).to(self.device) : torch.FloatTensor(mix_value_list[i][None, :, None]).to(self.device) - for i, n in enumerate(spk_mix_map.keys()) - } - else: - assert all([val >= 0 for val in spk_mix_map.values()]), 'All proportion values of speaker mix should be non-negative.' - proportion_sum = sum(spk_mix_map.values()) - assert proportion_sum > 0, 'Proportions of speaker mix sum to zero.' - spk_mixes = { - torch.LongTensor([self.spk_map[n]]).to(self.device) : spk_mix_map[n] / proportion_sum - for n in spk_mix_map - } - else: - spk_mixes = None - pitch_midi = torch.LongTensor(item['pitch_midi'])[None, :hparams['max_frames']].to(self.device) - midi_dur = torch.FloatTensor(item['midi_dur'])[None, :hparams['max_frames']].to(self.device) - is_slur = torch.LongTensor(item['is_slur'])[None, :hparams['max_frames']].to(self.device) - mel2ph = None - f0 = None - if item['ph_dur'] is not None: - print('Using manual phone duration') - ph_acc = np.around( - np.add.accumulate(item['ph_dur']) * hparams['audio_sample_rate'] / hparams['hop_size'] + 0.5).astype( - 'int') - ph_dur = np.diff(ph_acc, prepend=0) - ph_dur = torch.LongTensor(ph_dur)[None, :hparams['max_frames']].to(self.device) - lr = LengthRegulator() - mel2ph = lr(ph_dur, txt_tokens == 0).detach() - else: - print('Using automatic phone duration') - - if item['f0_timestep'] > 0. and item['f0_seq'] is not None: - print('Using manual pitch curve') - f0_timestep = item['f0_timestep'] - f0_seq = item['f0_seq'] - t_max = (len(f0_seq) - 1) * f0_timestep - dt = hparams['hop_size'] / hparams['audio_sample_rate'] - f0_interp = np.interp(np.arange(0, t_max, dt), f0_timestep * np.arange(len(f0_seq)), f0_seq) - f0 = torch.FloatTensor(f0_interp)[None, :].to(self.device) - else: - print('Using automatic pitch curve') - - if hparams.get('use_key_shift_embed', False): - shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] - if isinstance(item['gender'], float): - print(f'Using static gender value: {item["gender"]:.3f}') - gender = item['gender'] - key_shift_value = gender * shift_max if gender >= 0 else gender * abs(shift_min) - key_shift = torch.FloatTensor([key_shift_value]).to(self.device) - else: - print('Using dynamic gender curve') - gender_timestep = item['gender_timestep'] - gender_seq = item['gender'] - gender_mask = gender_seq >= 0 - key_shift_seq = gender_seq * (gender_mask * shift_max + (1 - gender_mask) * abs(shift_min)) - t_max = (len(key_shift_seq) - 1) * gender_timestep - dt = hparams['hop_size'] / hparams['audio_sample_rate'] - key_shift_interp = np.interp(np.arange(0, t_max, dt), gender_timestep * np.arange(len(key_shift_seq)), key_shift_seq) - key_shift = torch.FloatTensor(key_shift_interp)[None, :].to(self.device) - else: - key_shift = None - - if hparams.get('use_speed_embed', False): - if item['velocity'] is None: - print('Using default velocity curve') - speed = torch.FloatTensor([1.]).to(self.device) - else: - print('Using manual velocity curve') - velocity_timestep = item['velocity_timestep'] - velocity_seq = item['velocity'] - speed_min, speed_max = hparams['augmentation_args']['random_time_stretching']['range'] - speed_seq = np.clip(velocity_seq, a_min=speed_min, a_max=speed_max) - t_max = (len(speed_seq) - 1) * velocity_timestep - dt = hparams['hop_size'] / hparams['audio_sample_rate'] - speed_interp = np.interp(np.arange(0, t_max, dt), velocity_timestep * np.arange(len(speed_seq)), speed_seq) - speed = torch.FloatTensor(speed_interp)[None, :].to(self.device) - else: - speed = None - - batch = { - 'item_name': item_names, - 'text': text, - 'ph': ph, - 'txt_tokens': txt_tokens, - 'txt_lengths': txt_lengths, - 'spk_mixes': spk_mixes, - 'pitch_midi': pitch_midi, - 'midi_dur': midi_dur, - 'is_slur': is_slur, - 'mel2ph': mel2ph, - 'f0': f0, - 'key_shift': key_shift, - 'speed': speed - } - return batch - - def forward_model(self, inp, return_mel=False): - sample = self.input_to_batch(inp) - txt_tokens = sample['txt_tokens'] # [B, T_t] - with torch.no_grad(): - if hparams['use_spk_id']: - spk_mixes = sample['spk_mixes'] - spk_mix_embed = [self.model.fs2.spk_embed(spk_id)[:, None, :] * spk_mixes[spk_id] for spk_id in - spk_mixes] - spk_mix_embed = torch.stack(spk_mix_embed, dim=1).sum(dim=1) - else: - spk_mix_embed = None - mel2ph = sample['mel2ph'] - f0 = sample['f0'] - nframes = mel2ph.size(1) - delta_l = nframes - f0.size(1) - if delta_l > 0: - f0 = torch.cat((f0,torch.FloatTensor([[x[-1]] * delta_l for x in f0]).to(f0.device)),1) - f0 = f0[:, :nframes] - mel = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], - key_shift=sample['key_shift'], speed=sample['speed'], - spk_mix_embed=spk_mix_embed, infer=True) - if return_mel: - return mel.cpu(), f0.cpu() - wav_out = self.run_vocoder(mel, f0=f0) - wav_out = wav_out.cpu().numpy() - return wav_out[0] diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/vocoder/val_nsf_hifigan.py index 79a50b11f..d649ff535 100644 --- a/inference/vocoder/val_nsf_hifigan.py +++ b/inference/vocoder/val_nsf_hifigan.py @@ -13,7 +13,7 @@ from utils.hparams import set_hparams, hparams sys.argv = [ - 'inference/svs/ds_cascade.py', + 'inference/svs/ds_acoustic.py', '--config', 'configs/acoustic.yaml', ] diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 7c8ded9ff..69daa2b33 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -63,7 +63,7 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=False, **kwargs): + def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, spk_embed_id=None, **kwargs): B, T = txt_tokens.shape dur = mel2ph_to_dur(mel2ph, T).float() dur_embed = self.dur_embed(dur[:, :, None]) @@ -73,12 +73,6 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) condition = torch.gather(encoder_out, 1, mel2ph_) - nframes = mel2ph.size(1) - delta_l = nframes - f0.size(1) - if delta_l > 0: - f0 = torch.cat((f0,torch.FloatTensor([[x[-1]] * delta_l for x in f0]).to(f0.device)),1) - f0 = f0[:, :nframes] - if self.f0_embed_type == 'discrete': pitch = f0_to_coarse(f0) pitch_embed = self.pitch_embed(pitch) @@ -88,38 +82,17 @@ def forward(self, txt_tokens, mel2ph=None, f0=None, spk_embed_id=None, infer=Fal condition += pitch_embed if hparams.get('use_key_shift_embed', False): - key_shift = kwargs['key_shift'] - if len(key_shift.shape) == 1: - key_shift_embed = self.key_shift_embed(key_shift[:, None, None]) - else: - delta_l = nframes - key_shift.size(1) - if delta_l > 0: - key_shift = torch.cat((key_shift, torch.FloatTensor([[x[-1]] * delta_l for x in key_shift]).to(key_shift.device)), 1) - key_shift = key_shift[:, :nframes] - key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) + key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) condition += key_shift_embed if hparams.get('use_speed_embed', False): - speed = kwargs['speed'] - if len(speed.shape) == 1: - speed_embed = self.speed_embed(speed[:, None, None]) - else: - delta_l = nframes - speed.size(1) - if delta_l > 0: - speed = torch.cat((speed, torch.FloatTensor([[x[-1]] * delta_l for x in speed]).to(speed.device)), 1) - speed = speed[:, :nframes] - speed_embed = self.speed_embed(speed[:, :, None]) + speed_embed = self.speed_embed(speed[:, :, None]) condition += speed_embed if hparams['use_spk_id']: spk_mix_embed = kwargs.get('spk_mix_embed') if spk_mix_embed is not None: spk_embed = spk_mix_embed - mix_frames = spk_embed.size(1) - if mix_frames > nframes: - spk_embed = spk_embed[:, :nframes, :] - elif mix_frames > 1: - spk_embed = torch.cat((spk_embed, spk_embed[:, -1:, :].repeat(1, nframes - mix_frames, 1)), dim=1) else: spk_embed = self.spk_embed(spk_embed_id)[:, None, :] condition += spk_embed diff --git a/scripts/infer.py b/scripts/infer.py index 38859d2d9..9766613a3 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -12,7 +12,7 @@ import numpy as np import torch -from inference.ds_cascade import DiffSingerCascadeInfer +from inference.ds_acoustic import DiffSingerAcousticInfer from utils.audio import save_wav from utils.hparams import set_hparams, hparams from utils.infer_utils import cross_fade, trans_key @@ -59,9 +59,6 @@ '--infer' ] -if args.speedup > 0: - sys.argv += ['--hparams', f'pndm_speedup={args.speedup}'] - with open(args.proj, 'r', encoding='utf-8') as f: params = json.load(f) if not isinstance(params, list): @@ -78,6 +75,9 @@ assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' set_hparams(print_hparams=False) +if args.speedup > 0: + hparams['pndm_speedup'] = args.speedup + sample_rate = hparams['audio_sample_rate'] # Check for vocoder path @@ -87,26 +87,18 @@ infer_ins = None if len(params) > 0: - infer_ins = DiffSingerCascadeInfer(hparams, load_vocoder=not args.mel, ckpt_steps=args.ckpt) + infer_ins = DiffSingerAcousticInfer(load_vocoder=not args.mel, ckpt_steps=args.ckpt) spk_mix = parse_commandline_spk_mix(args.spk) if hparams['use_spk_id'] and args.spk is not None else None for param in params: if args.gender is not None and hparams.get('use_key_shift_embed'): param['gender'] = args.gender + if spk_mix is not None: param['spk_mix'] = spk_mix - elif 'spk_mix' in param: - param_spk_mix = param['spk_mix'] - for spk_name in param_spk_mix: - values = str(param_spk_mix[spk_name]).split() - if len(values) == 1: - param_spk_mix[spk_name] = float(values[0]) - else: - param_spk_mix[spk_name] = [float(v) for v in values] - if not hparams.get('use_midi', False): - merge_slurs(param) + merge_slurs(param) def infer_once(path: str, save_mel=False): diff --git a/training/acoustic_task.py b/training/acoustic_task.py index a8324d309..3c35f6767 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -46,30 +46,26 @@ def __getitem__(self, index): def collater(self, samples): if len(samples) == 0: return {} - txt_lengths = torch.LongTensor([s['tokens'].numel() for s in samples]) tokens = utils.collate_nd([s['tokens'] for s in samples], 0) f0 = utils.collate_nd([s['f0'] for s in samples], 0.0) - mel_lengths = torch.LongTensor([s['mel'].shape[0] for s in samples]) mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) - mels = utils.collate_nd([s['mel'] for s in samples], 0.0) + mel = utils.collate_nd([s['mel'] for s in samples], 0.0) batch = { - 'nsamples': len(samples), - 'txt_lengths': txt_lengths, + 'size': len(samples), 'tokens': tokens, - 'mel_lengths': mel_lengths, 'mel2ph': mel2ph, - 'mels': mels, + 'mel': mel, 'f0': f0, } - if hparams['use_energy_embed']: - batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) + # if hparams['use_energy_embed']: + # batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) if hparams.get('use_key_shift_embed', False): - batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples]) + batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if hparams.get('use_speed_embed', False): - batch['speed'] = torch.FloatTensor([s['speed'] for s in samples]) - if hparams['use_spk_embed']: - spk_embed = torch.stack([s['spk_embed'] for s in samples]) - batch['spk_embed'] = spk_embed + batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] + # if hparams['use_spk_embed']: + # spk_embed = torch.stack([s['spk_embed'] for s in samples]) + # batch['spk_embed'] = spk_embed if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids @@ -141,7 +137,7 @@ def run_model(self, sample, return_output=False, infer=False): 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor """ txt_tokens = sample['tokens'] # [B, T_t] - target = sample['mels'] # [B, T_s, 80] + target = sample['mel'] # [B, T_s, 80] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] key_shift = sample.get('key_shift') @@ -178,14 +174,14 @@ def validation_step(self, sample, batch_idx): total_loss = sum(losses.values()) outputs = { 'losses': losses, - 'total_loss': total_loss, 'nsamples': sample['nsamples'] + 'total_loss': total_loss, 'size': sample['size'] } outputs = utils.tensors_to_scalars(outputs) if batch_idx < hparams['num_valid_plots']: _, mel_pred = self.run_model(sample, return_output=True, infer=True) - self.plot_wav(batch_idx, sample['mels'], mel_pred, f0=sample['f0']) - self.plot_mel(batch_idx, sample['mels'], mel_pred, name=f'diffmel_{batch_idx}') + self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) + self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') return outputs @@ -194,7 +190,7 @@ def _validation_end(self, outputs): 'total_loss': utils.AvgrageMeter(), } for output in outputs: - n = output['nsamples'] + n = output['size'] for k, v in output['losses'].items(): if k not in all_losses_meter: all_losses_meter[k] = utils.AvgrageMeter() @@ -259,7 +255,7 @@ def after_infer(self, predictions): text = prediction.get('text').replace(':', '%3A')[:80] # remove paddings - mel_gt = prediction['mels'] + mel_gt = prediction['mel'] mel_gt_mask = np.abs(mel_gt).sum(-1) > 0 mel_gt = mel_gt[mel_gt_mask] mel2ph_gt = prediction.get('mel2ph') diff --git a/utils/infer_utils.py b/utils/infer_utils.py index 24f599e29..3a05e4daf 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -57,5 +57,20 @@ def trans_key(raw_data, key): else: warning_tag = True if warning_tag: - print("Warning:parts of f0_seq do not exist, please freeze the pitch line in the editor.\r\n") + print("Warning: parts of f0_seq do not exist, please freeze the pitch line in the editor.\r\n") return raw_data + + +def resample_align_curve(points: np.ndarray, original_timestep: float, target_timestep: float, align_length: int): + t_max = (len(points) - 1) * original_timestep + curve_interp = np.interp( + np.arange(0, t_max, target_timestep), + original_timestep * np.arange(len(points)), + points + ).astype(points.dtype) + delta_l = align_length - len(curve_interp) + if delta_l < 0: + curve_interp = curve_interp[:align_length] + elif delta_l > 0: + curve_interp = np.concatenate((curve_interp, np.full(delta_l, fill_value=curve_interp[-1])), axis=0) + return curve_interp diff --git a/utils/spk_utils.py b/utils/spk_utils.py index c35bd73c5..3b93dc08d 100644 --- a/utils/spk_utils.py +++ b/utils/spk_utils.py @@ -32,4 +32,3 @@ def parse_commandline_spk_mix(mix: str) -> dict: for name in proportion_map: proportion_map[name] /= sum_all_proportions return proportion_map - From debf4321cec76b03e2c49fcff477fb1d8c90cae9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 18 Mar 2023 01:06:40 +0800 Subject: [PATCH 074/475] Remove unused config --- configs/acoustic.yaml | 1 - configs/base.yaml | 1 - 2 files changed, 2 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 13dbb9de3..a951dc05b 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -54,7 +54,6 @@ save_f0: true use_spk_embed: false use_spk_id: false -use_pitch_embed: true f0_embed_type: continuous use_key_shift_embed: false use_speed_embed: false diff --git a/configs/base.yaml b/configs/base.yaml index 89e7c87dc..80a3b1966 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -47,7 +47,6 @@ num_heads: 2 enc_ffn_kernel_size: 9 ffn_act: gelu ffn_padding: 'SAME' -use_pitch_embed: true use_energy_embed: false use_spk_id: false use_spk_embed: false From 8889a9e9fd52b18841ee08cfa1f897d177f87488 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 18 Mar 2023 01:11:57 +0800 Subject: [PATCH 075/475] Change license to Apache 2.0 --- LICENSE | 222 ++++++++++++++++++++++++++++++++++++++++++++++++------ README.md | 2 + 2 files changed, 203 insertions(+), 21 deletions(-) diff --git a/LICENSE b/LICENSE index 563c90c56..58b657fc9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,201 @@ -MIT License - -Copyright (c) 2021 Jinglin Liu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 Team OpenVPI + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 4e3e71178..c68603483 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,9 @@ OpenUTAU, an open-sourced SVS editor with modern GUI, has unofficial temporary s See the original [paper](https://arxiv.org/abs/2105.02446), the [docs/](docs/) folder and [releases](https://github.com/openvpi/DiffSinger/releases) for more details. +## License +This forked DiffSinger is licensed under [Apache 2.0 License](LICENSE). --- From 99ce3536991fdb426adc30c2863a35217bb4387f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 18 Mar 2023 17:15:49 +0800 Subject: [PATCH 076/475] Support CSV format transcriptions --- configs/acoustic.yaml | 1 - preparation/acoustic_preparation.ipynb | 35 +++++++++++++++++++--- preprocessing/acoustic_binarizer.py | 40 +++++++++++++++++++------- 3 files changed, 60 insertions(+), 16 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index a951dc05b..fefb0913a 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -25,7 +25,6 @@ fmax: 16000 min_level_db: -120 binarization_args: - label_format: grid shuffle: true num_workers: 0 interp_uv: true diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index d348bf4f6..df686bacd 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -66,6 +66,7 @@ "outputs": [], "source": [ "import glob\n", + "import json\n", "import os\n", "import shutil\n", "import sys\n", @@ -875,7 +876,9 @@ "\n", "Please provide a unique name for your dataset, usually the name of the singer/speaker (whether real or virtual). For example, `opencpop` will be a good name for the dataset. You can also add tags to represent dataset version, model capacity or improvements. For example, `v2` represents the version, `large` represents the capacity, and `fix_br` means you fixed breaths since your trained last model.\n", "\n", - "Please edit the following cell before you run it. Remember only using letters, numbers and underlines (`_`).\n" + "Please edit the following cell before you run it. Remember only using letters, numbers and underlines (`_`).\n", + "\n", + "Formatting of the data labels: `csv` is a more comprehensive format that is newly introduced to this pipeline. If you want to generate old label format where attributes are seperated by `|`, please change it to `grid` in the following cell.\n" ] }, { @@ -895,8 +898,12 @@ "dataset_name = '???' # Required\n", "dataset_tags = '' # Optional\n", "\n", + "# Label format (will only use 'csv' in the future)\n", + "label_format = 'csv'\n", + "\n", "########################################\n", "\n", + "import csv\n", "import random\n", "import re\n", "\n", @@ -910,6 +917,8 @@ " full_name += f'_{dataset_tags}'\n", "assert not os.path.exists(f'../data/{full_name}'), f'The name \\'{full_name}\\' already exists in your \\'data\\' folder!'\n", "\n", + "assert label_format in ['csv', 'grid'], 'Label format must be \\'csv\\' or \\'grid\\'.'\n", + "\n", "print('Dataset name:', dataset_name)\n", "if dataset_tags != '':\n", " print('Tags:', dataset_tags)\n", @@ -946,9 +955,27 @@ " ph_seq = ' '.join(ph_seq)\n", " ph_dur = ' '.join([str(round(d, 6)) for d in ph_dur])\n", " soundfile.write(os.path.join(formatted_path, f'{name}.wav'), y, samplerate)\n", - " transcriptions.append(f'{name}|啊|{ph_seq}|rest|0|{ph_dur}|0')\n", - "with open(f'../data/{full_name}/raw/transcriptions.txt', 'w', encoding='utf8') as f:\n", - " print('\\n'.join(transcriptions), file=f)\n", + " if label_format == 'grid':\n", + " transcriptions.append(f'{name}|啊|{ph_seq}|rest|0|{ph_dur}|0')\n", + " else:\n", + " transcriptions.append({'name': name, 'ph_seq': ph_seq, 'ph_dur': ph_dur})\n", + "\n", + "with open(f'../data/{full_name}/raw/meta.json', 'w', encoding='utf8') as f:\n", + " meta = {\n", + " 'category': 'acoustic',\n", + " 'format': label_format\n", + " }\n", + " json.dump(meta, f, indent=4)\n", + "\n", + "if label_format == 'grid':\n", + " with open(f'../data/{full_name}/raw/transcriptions.txt', 'w', encoding='utf8') as f:\n", + " print('\\n'.join(transcriptions), file=f)\n", + "else:\n", + " with open(f'../data/{full_name}/raw/transcriptions.csv', 'w', encoding='utf8', newline='') as f:\n", + " writer = csv.DictWriter(f, fieldnames=['name', 'ph_seq', 'ph_dur'])\n", + " writer.writeheader()\n", + " writer.writerows(transcriptions)\n", + "\n", "print(f'All wavs and transcriptions saved at \\'data/{full_name}/raw/\\'.')\n" ] }, diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 00d8617f1..9266b5e6d 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -6,6 +6,7 @@ ph_seq: phoneme sequence ph_dur: phoneme durations """ +import csv import json import os import os.path @@ -36,19 +37,36 @@ def __init__(self): self.lr = LengthRegulator() def load_meta_data(self, raw_data_dir, ds_id): - utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), encoding='utf-8').readlines() + meta_info = { + 'category': 'acoustic', + 'format': 'grid' + } + meta_file = os.path.join(raw_data_dir, 'meta.json') + if os.path.exists(meta_file): + meta_info.update(json.load(open(meta_file, 'r', encoding='utf8'))) + category = meta_info['category'] + assert category == 'acoustic', \ + f'Dataset in \'{raw_data_dir}\' is of category \'{category}\', ' \ + f'but a dataset of category \'acoustic\' is required.' + meta_data_dict = {} - for utterance_label in utterance_labels: - if self.binarization_args.get('label_format', 'grid') == 'json': - label_dict = json.loads(utterance_label) - item_name = label_dict['item_name'] + if meta_info['format'] == 'csv': + for utterance_label in csv.DictReader( + open(os.path.join(raw_data_dir, 'transcriptions.csv'), 'r', encoding='utf-8') + ): + item_name = utterance_label['name'] temp_dict = { 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', - 'ph_seq': label_dict['ph_seq'].split(), - 'ph_dur': [float(x) for x in label_dict['ph_dur'].split()], + 'ph_seq': utterance_label['ph_seq'].split(), + 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], 'spk_id': ds_id } - else: + assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ + f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + else: + utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), 'r', encoding='utf-8').readlines() + for utterance_label in utterance_labels: song_info = utterance_label.split('|') item_name = song_info[0] temp_dict = { @@ -57,9 +75,9 @@ def load_meta_data(self, raw_data_dir, ds_id): 'ph_dur': [float(x) for x in song_info[5].split()], 'spk_id': ds_id } - assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ - f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' - meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ + f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict self.items.update(meta_data_dict) def process(self): From 504e7d71d1f7940c32cbe0a99025c6156ebbced7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 03:30:02 +0800 Subject: [PATCH 077/475] Try to fix multiprocess binarize on Linux --- scripts/binarize.py | 2 ++ utils/multiprocess_utils.py | 9 +++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/binarize.py b/scripts/binarize.py index 63295839f..a7bd8d7f1 100644 --- a/scripts/binarize.py +++ b/scripts/binarize.py @@ -14,4 +14,6 @@ def binarize(): if __name__ == '__main__': + from multiprocessing import set_start_method + set_start_method("spawn", force=True) binarize() diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 00d389ae4..65e81e876 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -1,8 +1,8 @@ import re import traceback -from multiprocessing import Queue, Process, current_process +from torch.multiprocessing import Process, Manager, current_process -is_main_process = not bool(re.match(r'Process-\d+', current_process().name)) +is_main_process = not bool(re.match(r'(Process)|(SyncManager)-\d+', current_process().name)) def main_process_print(self, *args, sep=' ', end='\n', file=None): @@ -23,6 +23,7 @@ def chunked_worker(worker_id, map_func, args, results_queue=None, init_ctx_func= traceback.print_exc() results_queue.put((job_idx, None)) + def chunked_multiprocess_run(map_func, args, num_workers, ordered=True, init_ctx_func=None, q_max_size=1000): args = zip(range(len(args)), args) args = list(args) @@ -30,9 +31,9 @@ def chunked_multiprocess_run(map_func, args, num_workers, ordered=True, init_ctx results_queues = [] if ordered: for i in range(num_workers): - results_queues.append(Queue(maxsize=q_max_size // num_workers)) + results_queues.append(Manager().Queue(maxsize=q_max_size // num_workers)) else: - results_queue = Queue(maxsize=q_max_size) + results_queue = Manager().Queue(maxsize=q_max_size) for i in range(num_workers): results_queues.append(results_queue) workers = [] From ab2dd6943f18d02722df0e7f4036032e6288b86e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 03:51:39 +0800 Subject: [PATCH 078/475] Call set_start_method() only on non-Windows platforms --- scripts/binarize.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/binarize.py b/scripts/binarize.py index a7bd8d7f1..d17f3e25d 100644 --- a/scripts/binarize.py +++ b/scripts/binarize.py @@ -1,4 +1,6 @@ import importlib +import platform + from utils.hparams import set_hparams, hparams set_hparams() @@ -14,6 +16,7 @@ def binarize(): if __name__ == '__main__': - from multiprocessing import set_start_method - set_start_method("spawn", force=True) + if platform.system().lower() != 'windows': + from multiprocessing import set_start_method + set_start_method('spawn', force=True) binarize() From e28819e6e398e29acb20171f556d071c75480a8a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 14:16:50 +0800 Subject: [PATCH 079/475] Use mp.Pool instead of mp.Process --- scripts/binarize.py | 4 --- utils/multiprocess_utils.py | 65 +++++++++++++++---------------------- 2 files changed, 26 insertions(+), 43 deletions(-) diff --git a/scripts/binarize.py b/scripts/binarize.py index d17f3e25d..44256012f 100644 --- a/scripts/binarize.py +++ b/scripts/binarize.py @@ -1,5 +1,4 @@ import importlib -import platform from utils.hparams import set_hparams, hparams @@ -16,7 +15,4 @@ def binarize(): if __name__ == '__main__': - if platform.system().lower() != 'windows': - from multiprocessing import set_start_method - set_start_method('spawn', force=True) binarize() diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 65e81e876..09f29713f 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -1,8 +1,9 @@ +import platform import re import traceback -from torch.multiprocessing import Process, Manager, current_process +from torch.multiprocessing import Pool, Manager, current_process, get_context -is_main_process = not bool(re.match(r'(Process)|(SyncManager)-\d+', current_process().name)) +is_main_process = not bool(re.match(r'(Process)|(SyncManager)|(.*PoolWorker)-\d+', current_process().name)) def main_process_print(self, *args, sep=' ', end='\n', file=None): @@ -10,44 +11,30 @@ def main_process_print(self, *args, sep=' ', end='\n', file=None): print(self, *args, sep=sep, end=end, file=file) -def chunked_worker(worker_id, map_func, args, results_queue=None, init_ctx_func=None): - ctx = init_ctx_func(worker_id) if init_ctx_func is not None else None - for job_idx, arg in args: - try: - if ctx is not None: - res = map_func(*arg, ctx=ctx) - else: - res = map_func(*arg) - results_queue.put((job_idx, res)) - except: - traceback.print_exc() - results_queue.put((job_idx, None)) +def run_and_collect_once(args): + map_func = args[0] + map_func_args = args[1] + result_queue = args[2] + # noinspection PyBroadException + try: + res = map_func(*map_func_args) + result_queue.put(res) + except: + traceback.print_exc() + result_queue.put(None) -def chunked_multiprocess_run(map_func, args, num_workers, ordered=True, init_ctx_func=None, q_max_size=1000): - args = zip(range(len(args)), args) - args = list(args) +def chunked_multiprocess_run(map_func, args, num_workers, q_max_size=100): n_jobs = len(args) - results_queues = [] - if ordered: - for i in range(num_workers): - results_queues.append(Manager().Queue(maxsize=q_max_size // num_workers)) + queue = Manager().Queue(maxsize=q_max_size) + if platform.system().lower() != 'windows': + pool_creation_func = get_context('spawn').Pool else: - results_queue = Manager().Queue(maxsize=q_max_size) - for i in range(num_workers): - results_queues.append(results_queue) - workers = [] - for i in range(num_workers): - args_worker = args[i::num_workers] - p = Process(target=chunked_worker, args=( - i, map_func, args_worker, results_queues[i], init_ctx_func), daemon=True) - workers.append(p) - p.start() - for n_finished in range(n_jobs): - results_queue = results_queues[n_finished % num_workers] - job_idx, res = results_queue.get() - assert job_idx == n_finished or not ordered, (job_idx, n_finished) - yield res - for w in workers: - w.join() - w.close() + pool_creation_func = Pool + with pool_creation_func(processes=num_workers) as pool: + pool.map_async(run_and_collect_once, [(map_func, i_args, queue) for i_args in args]) + for n_finished in range(n_jobs): + res = queue.get() + yield res + pool.close() + pool.join() From 238378415231f4d0765fe0b0960775af06e9ab28 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 19:13:11 +0800 Subject: [PATCH 080/475] Change back to mp.Process --- utils/multiprocess_utils.py | 58 ++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 24 deletions(-) diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 09f29713f..5a8693804 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -1,7 +1,8 @@ import platform import re import traceback -from torch.multiprocessing import Pool, Manager, current_process, get_context + +from torch.multiprocessing import Manager, Process, current_process, get_context is_main_process = not bool(re.match(r'(Process)|(SyncManager)|(.*PoolWorker)-\d+', current_process().name)) @@ -11,30 +12,39 @@ def main_process_print(self, *args, sep=' ', end='\n', file=None): print(self, *args, sep=sep, end=end, file=file) -def run_and_collect_once(args): - map_func = args[0] - map_func_args = args[1] - result_queue = args[2] - # noinspection PyBroadException - try: - res = map_func(*map_func_args) - result_queue.put(res) - except: - traceback.print_exc() - result_queue.put(None) +def chunked_worker_run(map_func, args, results_queue=None): + for a in args: + # noinspection PyBroadException + try: + res = map_func(*a) + results_queue.put(res) + except: + traceback.print_exc() + results_queue.put(None) + +def chunked_multiprocess_run(map_func, args, num_workers, q_max_size=1000): + num_jobs = len(args) + if num_jobs < num_workers: + num_workers = num_jobs -def chunked_multiprocess_run(map_func, args, num_workers, q_max_size=100): - n_jobs = len(args) - queue = Manager().Queue(maxsize=q_max_size) + queues = [Manager().Queue(maxsize=q_max_size // num_workers) for _ in range(num_workers)] if platform.system().lower() != 'windows': - pool_creation_func = get_context('spawn').Pool + process_creation_func = get_context('spawn').Process else: - pool_creation_func = Pool - with pool_creation_func(processes=num_workers) as pool: - pool.map_async(run_and_collect_once, [(map_func, i_args, queue) for i_args in args]) - for n_finished in range(n_jobs): - res = queue.get() - yield res - pool.close() - pool.join() + process_creation_func = Process + + workers = [] + for i in range(num_workers): + worker = process_creation_func( + target=chunked_worker_run, args=(map_func, args[i::num_workers], queues[i]), daemon=True + ) + workers.append(worker) + worker.start() + + for i in range(num_jobs): + yield queues[i % num_workers].get() + + for worker in workers: + worker.join() + worker.close() From 6435242318110fdc0aa7583e7bc48d98c7f7d3c4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 19:57:58 +0800 Subject: [PATCH 081/475] Update process name regex --- utils/multiprocess_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 5a8693804..41eb13142 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -4,7 +4,7 @@ from torch.multiprocessing import Manager, Process, current_process, get_context -is_main_process = not bool(re.match(r'(Process)|(SyncManager)|(.*PoolWorker)-\d+', current_process().name)) +is_main_process = not bool(re.match(r'((.*Process)|(SyncManager)|(.*PoolWorker))-\d+', current_process().name)) def main_process_print(self, *args, sep=' ', end='\n', file=None): From 2665e067341fe8809d11891a77dc02b45f6ca638 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 21:21:49 +0800 Subject: [PATCH 082/475] Remove useless file --- utils/text_norm.py | 790 --------------------------------------------- 1 file changed, 790 deletions(-) delete mode 100644 utils/text_norm.py diff --git a/utils/text_norm.py b/utils/text_norm.py deleted file mode 100644 index d0973cebc..000000000 --- a/utils/text_norm.py +++ /dev/null @@ -1,790 +0,0 @@ -# coding=utf-8 -# Authors: -# 2019.5 Zhiyang Zhou (https://github.com/Joee1995/chn_text_norm.git) -# 2019.9 Jiayu DU -# -# requirements: -# - python 3.X -# notes: python 2.X WILL fail or produce misleading results - -import sys, os, argparse, codecs, string, re - -# ================================================================================ # -# basic constant -# ================================================================================ # -CHINESE_DIGIS = u'零一二三四五六七八九' -BIG_CHINESE_DIGIS_SIMPLIFIED = u'零壹贰叁肆伍陆柒捌玖' -BIG_CHINESE_DIGIS_TRADITIONAL = u'零壹貳參肆伍陸柒捌玖' -SMALLER_BIG_CHINESE_UNITS_SIMPLIFIED = u'十百千万' -SMALLER_BIG_CHINESE_UNITS_TRADITIONAL = u'拾佰仟萬' -LARGER_CHINESE_NUMERING_UNITS_SIMPLIFIED = u'亿兆京垓秭穰沟涧正载' -LARGER_CHINESE_NUMERING_UNITS_TRADITIONAL = u'億兆京垓秭穰溝澗正載' -SMALLER_CHINESE_NUMERING_UNITS_SIMPLIFIED = u'十百千万' -SMALLER_CHINESE_NUMERING_UNITS_TRADITIONAL = u'拾佰仟萬' - -ZERO_ALT = u'〇' -ONE_ALT = u'幺' -TWO_ALTS = [u'两', u'兩'] - -POSITIVE = [u'正', u'正'] -NEGATIVE = [u'负', u'負'] -POINT = [u'点', u'點'] -# PLUS = [u'加', u'加'] -# SIL = [u'杠', u'槓'] - -# 中文数字系统类型 -NUMBERING_TYPES = ['low', 'mid', 'high'] - -CURRENCY_NAMES = '(人民币|美元|日元|英镑|欧元|马克|法郎|加拿大元|澳元|港币|先令|芬兰马克|爱尔兰镑|' \ - '里拉|荷兰盾|埃斯库多|比塞塔|印尼盾|林吉特|新西兰元|比索|卢布|新加坡元|韩元|泰铢)' -CURRENCY_UNITS = '((亿|千万|百万|万|千|百)|(亿|千万|百万|万|千|百|)元|(亿|千万|百万|万|千|百|)块|角|毛|分)' -COM_QUANTIFIERS = '(匹|张|座|回|场|尾|条|个|首|阙|阵|网|炮|顶|丘|棵|只|支|袭|辆|挑|担|颗|壳|窠|曲|墙|群|腔|' \ - '砣|座|客|贯|扎|捆|刀|令|打|手|罗|坡|山|岭|江|溪|钟|队|单|双|对|出|口|头|脚|板|跳|枝|件|贴|' \ - '针|线|管|名|位|身|堂|课|本|页|家|户|层|丝|毫|厘|分|钱|两|斤|担|铢|石|钧|锱|忽|(千|毫|微)克|' \ - '毫|厘|分|寸|尺|丈|里|寻|常|铺|程|(千|分|厘|毫|微)米|撮|勺|合|升|斗|石|盘|碗|碟|叠|桶|笼|盆|' \ - '盒|杯|钟|斛|锅|簋|篮|盘|桶|罐|瓶|壶|卮|盏|箩|箱|煲|啖|袋|钵|年|月|日|季|刻|时|周|天|秒|分|旬|' \ - '纪|岁|世|更|夜|春|夏|秋|冬|代|伏|辈|丸|泡|粒|颗|幢|堆|条|根|支|道|面|片|张|颗|块)' - -# punctuation information are based on Zhon project (https://github.com/tsroten/zhon.git) -CHINESE_PUNC_STOP = '!?。。' -CHINESE_PUNC_NON_STOP = '"#$%&'()*+,-/:;<=>@[\]^_`{|}~⦅⦆「」、、〃《》「」『』【】〔〕〖〗〘〙〚〛〜〝〞〟〰〾〿–—‘’‛“”„‟…‧﹏' -CHINESE_PUNC_LIST = CHINESE_PUNC_STOP + CHINESE_PUNC_NON_STOP - - -# ================================================================================ # -# basic class -# ================================================================================ # -class ChineseChar(object): - """ - 中文字符 - 每个字符对应简体和繁体, - e.g. 简体 = '负', 繁体 = '負' - 转换时可转换为简体或繁体 - """ - - def __init__(self, simplified, traditional): - self.simplified = simplified - self.traditional = traditional - # self.__repr__ = self.__str__ - - def __str__(self): - return self.simplified or self.traditional or None - - def __repr__(self): - return self.__str__() - - -class ChineseNumberUnit(ChineseChar): - """ - 中文数字/数位字符 - 每个字符除繁简体外还有一个额外的大写字符 - e.g. '陆' 和 '陸' - """ - - def __init__(self, power, simplified, traditional, big_s, big_t): - super(ChineseNumberUnit, self).__init__(simplified, traditional) - self.power = power - self.big_s = big_s - self.big_t = big_t - - def __str__(self): - return '10^{}'.format(self.power) - - @classmethod - def create(cls, index, value, numbering_type=NUMBERING_TYPES[1], small_unit=False): - - if small_unit: - return ChineseNumberUnit(power=index + 1, - simplified=value[0], traditional=value[1], big_s=value[1], big_t=value[1]) - elif numbering_type == NUMBERING_TYPES[0]: - return ChineseNumberUnit(power=index + 8, - simplified=value[0], traditional=value[1], big_s=value[0], big_t=value[1]) - elif numbering_type == NUMBERING_TYPES[1]: - return ChineseNumberUnit(power=(index + 2) * 4, - simplified=value[0], traditional=value[1], big_s=value[0], big_t=value[1]) - elif numbering_type == NUMBERING_TYPES[2]: - return ChineseNumberUnit(power=pow(2, index + 3), - simplified=value[0], traditional=value[1], big_s=value[0], big_t=value[1]) - else: - raise ValueError( - 'Counting type should be in {0} ({1} provided).'.format(NUMBERING_TYPES, numbering_type)) - - -class ChineseNumberDigit(ChineseChar): - """ - 中文数字字符 - """ - - def __init__(self, value, simplified, traditional, big_s, big_t, alt_s=None, alt_t=None): - super(ChineseNumberDigit, self).__init__(simplified, traditional) - self.value = value - self.big_s = big_s - self.big_t = big_t - self.alt_s = alt_s - self.alt_t = alt_t - - def __str__(self): - return str(self.value) - - @classmethod - def create(cls, i, v): - return ChineseNumberDigit(i, v[0], v[1], v[2], v[3]) - - -class ChineseMath(ChineseChar): - """ - 中文数位字符 - """ - - def __init__(self, simplified, traditional, symbol, expression=None): - super(ChineseMath, self).__init__(simplified, traditional) - self.symbol = symbol - self.expression = expression - self.big_s = simplified - self.big_t = traditional - - -CC, CNU, CND, CM = ChineseChar, ChineseNumberUnit, ChineseNumberDigit, ChineseMath - - -class NumberSystem(object): - """ - 中文数字系统 - """ - pass - - -class MathSymbol(object): - """ - 用于中文数字系统的数学符号 (繁/简体), e.g. - positive = ['正', '正'] - negative = ['负', '負'] - point = ['点', '點'] - """ - - def __init__(self, positive, negative, point): - self.positive = positive - self.negative = negative - self.point = point - - def __iter__(self): - for v in self.__dict__.values(): - yield v - - -# class OtherSymbol(object): -# """ -# 其他符号 -# """ -# -# def __init__(self, sil): -# self.sil = sil -# -# def __iter__(self): -# for v in self.__dict__.values(): -# yield v - - -# ================================================================================ # -# basic utils -# ================================================================================ # -def create_system(numbering_type=NUMBERING_TYPES[1]): - """ - 根据数字系统类型返回创建相应的数字系统,默认为 mid - NUMBERING_TYPES = ['low', 'mid', 'high']: 中文数字系统类型 - low: '兆' = '亿' * '十' = $10^{9}$, '京' = '兆' * '十', etc. - mid: '兆' = '亿' * '万' = $10^{12}$, '京' = '兆' * '万', etc. - high: '兆' = '亿' * '亿' = $10^{16}$, '京' = '兆' * '兆', etc. - 返回对应的数字系统 - """ - - # chinese number units of '亿' and larger - all_larger_units = zip( - LARGER_CHINESE_NUMERING_UNITS_SIMPLIFIED, LARGER_CHINESE_NUMERING_UNITS_TRADITIONAL) - larger_units = [CNU.create(i, v, numbering_type, False) - for i, v in enumerate(all_larger_units)] - # chinese number units of '十, 百, 千, 万' - all_smaller_units = zip( - SMALLER_CHINESE_NUMERING_UNITS_SIMPLIFIED, SMALLER_CHINESE_NUMERING_UNITS_TRADITIONAL) - smaller_units = [CNU.create(i, v, small_unit=True) - for i, v in enumerate(all_smaller_units)] - # digis - chinese_digis = zip(CHINESE_DIGIS, CHINESE_DIGIS, - BIG_CHINESE_DIGIS_SIMPLIFIED, BIG_CHINESE_DIGIS_TRADITIONAL) - digits = [CND.create(i, v) for i, v in enumerate(chinese_digis)] - digits[0].alt_s, digits[0].alt_t = ZERO_ALT, ZERO_ALT - digits[1].alt_s, digits[1].alt_t = ONE_ALT, ONE_ALT - digits[2].alt_s, digits[2].alt_t = TWO_ALTS[0], TWO_ALTS[1] - - # symbols - positive_cn = CM(POSITIVE[0], POSITIVE[1], '+', lambda x: x) - negative_cn = CM(NEGATIVE[0], NEGATIVE[1], '-', lambda x: -x) - point_cn = CM(POINT[0], POINT[1], '.', lambda x, - y: float(str(x) + '.' + str(y))) - # sil_cn = CM(SIL[0], SIL[1], '-', lambda x, y: float(str(x) + '-' + str(y))) - system = NumberSystem() - system.units = smaller_units + larger_units - system.digits = digits - system.math = MathSymbol(positive_cn, negative_cn, point_cn) - # system.symbols = OtherSymbol(sil_cn) - return system - - -def chn2num(chinese_string, numbering_type=NUMBERING_TYPES[1]): - def get_symbol(char, system): - for u in system.units: - if char in [u.traditional, u.simplified, u.big_s, u.big_t]: - return u - for d in system.digits: - if char in [d.traditional, d.simplified, d.big_s, d.big_t, d.alt_s, d.alt_t]: - return d - for m in system.math: - if char in [m.traditional, m.simplified]: - return m - - def string2symbols(chinese_string, system): - int_string, dec_string = chinese_string, '' - for p in [system.math.point.simplified, system.math.point.traditional]: - if p in chinese_string: - int_string, dec_string = chinese_string.split(p) - break - return [get_symbol(c, system) for c in int_string], \ - [get_symbol(c, system) for c in dec_string] - - def correct_symbols(integer_symbols, system): - """ - 一百八 to 一百八十 - 一亿一千三百万 to 一亿 一千万 三百万 - """ - - if integer_symbols and isinstance(integer_symbols[0], CNU): - if integer_symbols[0].power == 1: - integer_symbols = [system.digits[1]] + integer_symbols - - if len(integer_symbols) > 1: - if isinstance(integer_symbols[-1], CND) and isinstance(integer_symbols[-2], CNU): - integer_symbols.append( - CNU(integer_symbols[-2].power - 1, None, None, None, None)) - - result = [] - unit_count = 0 - for s in integer_symbols: - if isinstance(s, CND): - result.append(s) - unit_count = 0 - elif isinstance(s, CNU): - current_unit = CNU(s.power, None, None, None, None) - unit_count += 1 - - if unit_count == 1: - result.append(current_unit) - elif unit_count > 1: - for i in range(len(result)): - if isinstance(result[-i - 1], CNU) and result[-i - 1].power < current_unit.power: - result[-i - 1] = CNU(result[-i - 1].power + - current_unit.power, None, None, None, None) - return result - - def compute_value(integer_symbols): - """ - Compute the value. - When current unit is larger than previous unit, current unit * all previous units will be used as all previous units. - e.g. '两千万' = 2000 * 10000 not 2000 + 10000 - """ - value = [0] - last_power = 0 - for s in integer_symbols: - if isinstance(s, CND): - value[-1] = s.value - elif isinstance(s, CNU): - value[-1] *= pow(10, s.power) - if s.power > last_power: - value[:-1] = list(map(lambda v: v * - pow(10, s.power), value[:-1])) - last_power = s.power - value.append(0) - return sum(value) - - system = create_system(numbering_type) - int_part, dec_part = string2symbols(chinese_string, system) - int_part = correct_symbols(int_part, system) - int_str = str(compute_value(int_part)) - dec_str = ''.join([str(d.value) for d in dec_part]) - if dec_part: - return '{0}.{1}'.format(int_str, dec_str) - else: - return int_str - - -def num2chn(number_string, numbering_type=NUMBERING_TYPES[1], big=False, - traditional=False, alt_zero=False, alt_one=False, alt_two=True, - use_zeros=True, use_units=True): - def get_value(value_string, use_zeros=True): - - striped_string = value_string.lstrip('0') - - # record nothing if all zeros - if not striped_string: - return [] - - # record one digits - elif len(striped_string) == 1: - if use_zeros and len(value_string) != len(striped_string): - return [system.digits[0], system.digits[int(striped_string)]] - else: - return [system.digits[int(striped_string)]] - - # recursively record multiple digits - else: - result_unit = next(u for u in reversed( - system.units) if u.power < len(striped_string)) - result_string = value_string[:-result_unit.power] - return get_value(result_string) + [result_unit] + get_value(striped_string[-result_unit.power:]) - - system = create_system(numbering_type) - - int_dec = number_string.split('.') - if len(int_dec) == 1: - int_string = int_dec[0] - dec_string = "" - elif len(int_dec) == 2: - int_string = int_dec[0] - dec_string = int_dec[1] - else: - raise ValueError( - "invalid input num string with more than one dot: {}".format(number_string)) - - if use_units and len(int_string) > 1: - result_symbols = get_value(int_string) - else: - result_symbols = [system.digits[int(c)] for c in int_string] - dec_symbols = [system.digits[int(c)] for c in dec_string] - if dec_string: - result_symbols += [system.math.point] + dec_symbols - - if alt_two: - liang = CND(2, system.digits[2].alt_s, system.digits[2].alt_t, - system.digits[2].big_s, system.digits[2].big_t) - for i, v in enumerate(result_symbols): - if isinstance(v, CND) and v.value == 2: - next_symbol = result_symbols[i + - 1] if i < len(result_symbols) - 1 else None - previous_symbol = result_symbols[i - 1] if i > 0 else None - if isinstance(next_symbol, CNU) and isinstance(previous_symbol, (CNU, type(None))): - if next_symbol.power != 1 and ((previous_symbol is None) or (previous_symbol.power != 1)): - result_symbols[i] = liang - - # if big is True, '两' will not be used and `alt_two` has no impact on output - if big: - attr_name = 'big_' - if traditional: - attr_name += 't' - else: - attr_name += 's' - else: - if traditional: - attr_name = 'traditional' - else: - attr_name = 'simplified' - - result = ''.join([getattr(s, attr_name) for s in result_symbols]) - - # if not use_zeros: - # result = result.strip(getattr(system.digits[0], attr_name)) - - if alt_zero: - result = result.replace( - getattr(system.digits[0], attr_name), system.digits[0].alt_s) - - if alt_one: - result = result.replace( - getattr(system.digits[1], attr_name), system.digits[1].alt_s) - - for i, p in enumerate(POINT): - if result.startswith(p): - return CHINESE_DIGIS[0] + result - - # ^10, 11, .., 19 - if len(result) >= 2 and result[1] in [SMALLER_CHINESE_NUMERING_UNITS_SIMPLIFIED[0], - SMALLER_CHINESE_NUMERING_UNITS_TRADITIONAL[0]] and \ - result[0] in [CHINESE_DIGIS[1], BIG_CHINESE_DIGIS_SIMPLIFIED[1], BIG_CHINESE_DIGIS_TRADITIONAL[1]]: - result = result[1:] - - return result - - -# ================================================================================ # -# different types of rewriters -# ================================================================================ # -class Cardinal: - """ - CARDINAL类 - """ - - def __init__(self, cardinal=None, chntext=None): - self.cardinal = cardinal - self.chntext = chntext - - def chntext2cardinal(self): - return chn2num(self.chntext) - - def cardinal2chntext(self): - return num2chn(self.cardinal) - - -class Digit: - """ - DIGIT类 - """ - - def __init__(self, digit=None, chntext=None): - self.digit = digit - self.chntext = chntext - - # def chntext2digit(self): - # return chn2num(self.chntext) - - def digit2chntext(self): - return num2chn(self.digit, alt_two=False, use_units=False) - - -class TelePhone: - """ - TELEPHONE类 - """ - - def __init__(self, telephone=None, raw_chntext=None, chntext=None): - self.telephone = telephone - self.raw_chntext = raw_chntext - self.chntext = chntext - - # def chntext2telephone(self): - # sil_parts = self.raw_chntext.split('') - # self.telephone = '-'.join([ - # str(chn2num(p)) for p in sil_parts - # ]) - # return self.telephone - - def telephone2chntext(self, fixed=False): - - if fixed: - sil_parts = self.telephone.split('-') - self.raw_chntext = ''.join([ - num2chn(part, alt_two=False, use_units=False) for part in sil_parts - ]) - self.chntext = self.raw_chntext.replace('', '') - else: - sp_parts = self.telephone.strip('+').split() - self.raw_chntext = ''.join([ - num2chn(part, alt_two=False, use_units=False) for part in sp_parts - ]) - self.chntext = self.raw_chntext.replace('', '') - return self.chntext - - -class Fraction: - """ - FRACTION类 - """ - - def __init__(self, fraction=None, chntext=None): - self.fraction = fraction - self.chntext = chntext - - def chntext2fraction(self): - denominator, numerator = self.chntext.split('分之') - return chn2num(numerator) + '/' + chn2num(denominator) - - def fraction2chntext(self): - numerator, denominator = self.fraction.split('/') - return num2chn(denominator) + '分之' + num2chn(numerator) - - -class Date: - """ - DATE类 - """ - - def __init__(self, date=None, chntext=None): - self.date = date - self.chntext = chntext - - # def chntext2date(self): - # chntext = self.chntext - # try: - # year, other = chntext.strip().split('年', maxsplit=1) - # year = Digit(chntext=year).digit2chntext() + '年' - # except ValueError: - # other = chntext - # year = '' - # if other: - # try: - # month, day = other.strip().split('月', maxsplit=1) - # month = Cardinal(chntext=month).chntext2cardinal() + '月' - # except ValueError: - # day = chntext - # month = '' - # if day: - # day = Cardinal(chntext=day[:-1]).chntext2cardinal() + day[-1] - # else: - # month = '' - # day = '' - # date = year + month + day - # self.date = date - # return self.date - - def date2chntext(self): - date = self.date - try: - year, other = date.strip().split('年', 1) - year = Digit(digit=year).digit2chntext() + '年' - except ValueError: - other = date - year = '' - if other: - try: - month, day = other.strip().split('月', 1) - month = Cardinal(cardinal=month).cardinal2chntext() + '月' - except ValueError: - day = date - month = '' - if day: - day = Cardinal(cardinal=day[:-1]).cardinal2chntext() + day[-1] - else: - month = '' - day = '' - chntext = year + month + day - self.chntext = chntext - return self.chntext - - -class Money: - """ - MONEY类 - """ - - def __init__(self, money=None, chntext=None): - self.money = money - self.chntext = chntext - - # def chntext2money(self): - # return self.money - - def money2chntext(self): - money = self.money - pattern = re.compile(r'(\d+(\.\d+)?)') - matchers = pattern.findall(money) - if matchers: - for matcher in matchers: - money = money.replace(matcher[0], Cardinal(cardinal=matcher[0]).cardinal2chntext()) - self.chntext = money - return self.chntext - - -class Percentage: - """ - PERCENTAGE类 - """ - - def __init__(self, percentage=None, chntext=None): - self.percentage = percentage - self.chntext = chntext - - def chntext2percentage(self): - return chn2num(self.chntext.strip().strip('百分之')) + '%' - - def percentage2chntext(self): - return '百分之' + num2chn(self.percentage.strip().strip('%')) - - -# ================================================================================ # -# NSW Normalizer -# ================================================================================ # -class NSWNormalizer: - def __init__(self, raw_text): - self.raw_text = '^' + raw_text + '$' - self.norm_text = '' - - def _particular(self): - text = self.norm_text - pattern = re.compile(r"(([a-zA-Z]+)二([a-zA-Z]+))") - matchers = pattern.findall(text) - if matchers: - # print('particular') - for matcher in matchers: - text = text.replace(matcher[0], matcher[1] + '2' + matcher[2], 1) - self.norm_text = text - return self.norm_text - - def normalize(self, remove_punc=True): - text = self.raw_text - - # 规范化日期 - pattern = re.compile(r"\D+((([089]\d|(19|20)\d{2})年)?(\d{1,2}月(\d{1,2}[日号])?)?)") - matchers = pattern.findall(text) - if matchers: - # print('date') - for matcher in matchers: - text = text.replace(matcher[0], Date(date=matcher[0]).date2chntext(), 1) - - # 规范化金钱 - pattern = re.compile(r"\D+((\d+(\.\d+)?)[多余几]?" + CURRENCY_UNITS + r"(\d" + CURRENCY_UNITS + r"?)?)") - matchers = pattern.findall(text) - if matchers: - # print('money') - for matcher in matchers: - text = text.replace(matcher[0], Money(money=matcher[0]).money2chntext(), 1) - - # 规范化固话/手机号码 - # 手机 - # http://www.jihaoba.com/news/show/13680 - # 移动:139、138、137、136、135、134、159、158、157、150、151、152、188、187、182、183、184、178、198 - # 联通:130、131、132、156、155、186、185、176 - # 电信:133、153、189、180、181、177 - pattern = re.compile(r"\D((\+?86 ?)?1([38]\d|5[0-35-9]|7[678]|9[89])\d{8})\D") - matchers = pattern.findall(text) - if matchers: - # print('telephone') - for matcher in matchers: - text = text.replace(matcher[0], TelePhone(telephone=matcher[0]).telephone2chntext(), 1) - # 固话 - pattern = re.compile(r"\D((0(10|2[1-3]|[3-9]\d{2})-?)?[1-9]\d{6,7})\D") - matchers = pattern.findall(text) - if matchers: - # print('fixed telephone') - for matcher in matchers: - text = text.replace(matcher[0], TelePhone(telephone=matcher[0]).telephone2chntext(fixed=True), 1) - - # 规范化分数 - pattern = re.compile(r"(\d+/\d+)") - matchers = pattern.findall(text) - if matchers: - # print('fraction') - for matcher in matchers: - text = text.replace(matcher, Fraction(fraction=matcher).fraction2chntext(), 1) - - # 规范化百分数 - text = text.replace('%', '%') - pattern = re.compile(r"(\d+(\.\d+)?%)") - matchers = pattern.findall(text) - if matchers: - # print('percentage') - for matcher in matchers: - text = text.replace(matcher[0], Percentage(percentage=matcher[0]).percentage2chntext(), 1) - - # 规范化纯数+量词 - pattern = re.compile(r"(\d+(\.\d+)?)[多余几]?" + COM_QUANTIFIERS) - matchers = pattern.findall(text) - if matchers: - # print('cardinal+quantifier') - for matcher in matchers: - text = text.replace(matcher[0], Cardinal(cardinal=matcher[0]).cardinal2chntext(), 1) - - # 规范化数字编号 - pattern = re.compile(r"(\d{4,32})") - matchers = pattern.findall(text) - if matchers: - # print('digit') - for matcher in matchers: - text = text.replace(matcher, Digit(digit=matcher).digit2chntext(), 1) - - # 规范化纯数 - pattern = re.compile(r"(\d+(\.\d+)?)") - matchers = pattern.findall(text) - if matchers: - # print('cardinal') - for matcher in matchers: - text = text.replace(matcher[0], Cardinal(cardinal=matcher[0]).cardinal2chntext(), 1) - - self.norm_text = text - self._particular() - - text = self.norm_text.lstrip('^').rstrip('$') - if remove_punc: - # Punctuations removal - old_chars = CHINESE_PUNC_LIST + string.punctuation # includes all CN and EN punctuations - new_chars = ' ' * len(old_chars) - del_chars = '' - text = text.translate(str.maketrans(old_chars, new_chars, del_chars)) - return text - - -def nsw_test_case(raw_text): - print('I:' + raw_text) - print('O:' + NSWNormalizer(raw_text).normalize()) - print('') - - -def nsw_test(): - nsw_test_case('固话:0595-23865596或23880880。') - nsw_test_case('固话:0595-23865596或23880880。') - nsw_test_case('手机:+86 19859213959或15659451527。') - nsw_test_case('分数:32477/76391。') - nsw_test_case('百分数:80.03%。') - nsw_test_case('编号:31520181154418。') - nsw_test_case('纯数:2983.07克或12345.60米。') - nsw_test_case('日期:1999年2月20日或09年3月15号。') - nsw_test_case('金钱:12块5,34.5元,20.1万') - nsw_test_case('特殊:O2O或B2C。') - nsw_test_case('3456万吨') - nsw_test_case('2938个') - nsw_test_case('938') - nsw_test_case('今天吃了115个小笼包231个馒头') - nsw_test_case('有62%的概率') - - -if __name__ == '__main__': - # nsw_test() - - p = argparse.ArgumentParser() - p.add_argument('ifile', help='input filename, assume utf-8 encoding') - p.add_argument('ofile', help='output filename') - p.add_argument('--to_upper', action='store_true', help='convert to upper case') - p.add_argument('--to_lower', action='store_true', help='convert to lower case') - p.add_argument('--has_key', action='store_true', help="input text has Kaldi's key as first field.") - p.add_argument('--log_interval', type=int, default=10000, help='log interval in number of processed lines') - args = p.parse_args() - - ifile = codecs.open(args.ifile, 'r', 'utf8') - ofile = codecs.open(args.ofile, 'w+', 'utf8') - - n = 0 - for l in ifile: - key = '' - text = '' - if args.has_key: - cols = l.split(maxsplit=1) - key = cols[0] - if len(cols) == 2: - text = cols[1] - else: - text = '' - else: - text = l - - # cases - if args.to_upper and args.to_lower: - sys.stderr.write('text norm: to_upper OR to_lower?') - exit(1) - if args.to_upper: - text = text.upper() - if args.to_lower: - text = text.lower() - - # NSW(Non-Standard-Word) normalization - text = NSWNormalizer(text).normalize() - - # - if args.has_key: - ofile.write(key + '\t' + text) - else: - ofile.write(text) - - n += 1 - if n % args.log_interval == 0: - sys.stderr.write("text norm: {} lines done.\n".format(n)) - - sys.stderr.write("text norm: {} lines done in total.\n".format(n)) - - ifile.close() - ofile.close() From bcb9346a2f5d8a484afa67a209a520d263eca5a9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 21:22:33 +0800 Subject: [PATCH 083/475] Optimize audio I/O --- modules/nsf_hifigan/nvSTFT.py | 37 ++++------------------------------- requirements.txt | 1 - utils/audio.py | 2 -- 3 files changed, 4 insertions(+), 36 deletions(-) diff --git a/modules/nsf_hifigan/nvSTFT.py b/modules/nsf_hifigan/nvSTFT.py index b2d509308..d4b416e88 100644 --- a/modules/nsf_hifigan/nvSTFT.py +++ b/modules/nsf_hifigan/nvSTFT.py @@ -5,40 +5,11 @@ import numpy as np import librosa from librosa.filters import mel as librosa_mel_fn -import soundfile as sf import torch.nn.functional as F -def load_wav_to_torch(full_path, target_sr=None, return_empty_on_exception=False): - sampling_rate = None - try: - data, sampling_rate = sf.read(full_path, always_2d=True)# than soundfile. - except Exception as ex: - print(f"'{full_path}' failed to load.\nException:") - print(ex) - if return_empty_on_exception: - return [], sampling_rate or target_sr or 48000 - else: - raise Exception(ex) - - if len(data.shape) > 1: - data = data[:, 0] - assert len(data) > 2# check duration of audio file is > 2 samples (because otherwise the slice operation was on the wrong dimension) - - if np.issubdtype(data.dtype, np.integer): # if audio data is type int - max_mag = -np.iinfo(data.dtype).min # maximum magnitude = min possible value of intXX - else: # if audio data is type fp32 - max_mag = max(np.amax(data), -np.amin(data)) - max_mag = (2**31)+1 if max_mag > (2**15) else ((2**15)+1 if max_mag > 1.01 else 1.0) # data should be either 16-bit INT, 32-bit INT or [-1 to 1] float32 - - data = torch.FloatTensor(data.astype(np.float32))/max_mag - - if (torch.isinf(data) | torch.isnan(data)).any() and return_empty_on_exception:# resample will crash with inf/NaN inputs. return_empty_on_exception will return empty arr instead of except - return [], sampling_rate or target_sr or 48000 - if target_sr is not None and sampling_rate != target_sr: - data = torch.from_numpy(librosa.core.resample(data.numpy(), orig_sr=sampling_rate, target_sr=target_sr)) - sampling_rate = target_sr - - return data, sampling_rate +def load_wav_to_torch(full_path, target_sr=None): + data, sr = librosa.load(full_path, sr=target_sr, mono=True) + return torch.from_numpy(data), sr def dynamic_range_compression(x, C=1, clip_val=1e-5): return np.log(np.clip(x, a_min=clip_val, a_max=None) * C) @@ -101,7 +72,7 @@ def get_mel(self, y, keyshift=0, speed=1, center=False): spec = torch.stft(y, n_fft_new, hop_length=hop_length_new, win_length=win_size_new, window=self.hann_window[keyshift_key], center=center, pad_mode='reflect', normalized=False, onesided=True, return_complex=False) # print(111,spec) - spec = torch.sqrt(spec.pow(2).sum(-1)+(1e-9)) + spec = torch.sqrt(spec.pow(2).sum(-1) + 1e-9) if keyshift != 0: size = n_fft // 2 + 1 resize = spec.size(1) diff --git a/requirements.txt b/requirements.txt index 8672c82fb..56c1439a9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,5 @@ resemblyzer==0.1.1.dev0 tensorboardX==2.5.1 -SoundFile==0.11.0 h5py==3.7.0 future==0.18.2 g2p-en==2.1.0 diff --git a/utils/audio.py b/utils/audio.py index aba7ab926..460358bb3 100644 --- a/utils/audio.py +++ b/utils/audio.py @@ -1,11 +1,9 @@ -import subprocess import matplotlib matplotlib.use('Agg') import librosa import librosa.filters import numpy as np -from scipy import signal from scipy.io import wavfile From 171671f8bcf60231f86c8f97d7420bc34b2b442e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 21:36:31 +0800 Subject: [PATCH 084/475] Fix TypeError aligning mel2ph --- utils/binarizer_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index 9db41b6d5..36e5a1a05 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -49,7 +49,7 @@ def get_mel2ph_torch(lr, durs, length, hparams, device='cpu'): mel2ph = lr(ph_dur[None])[0] num_frames = mel2ph.shape[0] if num_frames < length: - mel2ph = torch.cat((mel2ph, torch.full(length - num_frames, mel2ph[-1])), dim=0) + mel2ph = torch.cat((mel2ph, torch.full((length - num_frames,), mel2ph[-1])), dim=0) elif num_frames > length: mel2ph = mel2ph[:length] return mel2ph From 59e3678074dc6374e78f47273504db376b12c8dd Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 22:03:59 +0800 Subject: [PATCH 085/475] Add missing device --- utils/binarizer_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index 36e5a1a05..fa1c9843d 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -49,7 +49,7 @@ def get_mel2ph_torch(lr, durs, length, hparams, device='cpu'): mel2ph = lr(ph_dur[None])[0] num_frames = mel2ph.shape[0] if num_frames < length: - mel2ph = torch.cat((mel2ph, torch.full((length - num_frames,), mel2ph[-1])), dim=0) + mel2ph = torch.cat((mel2ph, torch.full((length - num_frames,), fill_value=mel2ph[-1], device=device)), dim=0) elif num_frames > length: mel2ph = mel2ph[:length] return mel2ph From 83505844fba2db039899bcb79b2d42512eb12938 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 22:44:12 +0800 Subject: [PATCH 086/475] Fix epoch logging --- basics/base_task.py | 6 +----- configs/base.yaml | 2 -- training/acoustic_task.py | 3 +-- 3 files changed, 2 insertions(+), 9 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 223b86116..33923bb7e 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -174,7 +174,7 @@ def configure_optimizers(self): return [optm] def build_dataloader(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, endless=False, batch_by_size=True): + required_batch_size_multiple=-1, batch_by_size=True): devices_cnt = torch.cuda.device_count() if devices_cnt == 0: devices_cnt = 1 @@ -202,12 +202,8 @@ def shuffle_batches(batches): if shuffle: batches = shuffle_batches(list(batch_sampler)) - if endless: - batches = [b for _ in range(1000) for b in shuffle_batches(list(batch_sampler))] else: batches = batch_sampler - if endless: - batches = [b for _ in range(1000) for b in batches] num_workers = dataset.num_workers if self.trainer.use_ddp: num_replicas = dist.get_world_size() diff --git a/configs/base.yaml b/configs/base.yaml index 80a3b1966..709a32630 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -21,8 +21,6 @@ binarization_args: shuffle: false num_workers: 0 -endless_ds: true - audio_num_mel_bins: 80 audio_sample_rate: 22050 hop_size: 256 # For 22050Hz, 275 ~= 12.5 ms (0.0125 * sample_rate) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 3c35f6767..fd453930a 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -110,8 +110,7 @@ def build_scheduler(self, optimizer): @data_loader def train_dataloader(self): train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) - return self.build_dataloader(train_dataset, True, self.max_tokens, self.max_sentences, - endless=hparams['endless_ds']) + return self.build_dataloader(train_dataset, True, self.max_tokens, self.max_sentences) @data_loader def val_dataloader(self): From 27c11a2cffb7b25328f1c7c4002db46a2cd734f4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 19 Mar 2023 23:02:27 +0800 Subject: [PATCH 087/475] Simplify logging on epoch end --- basics/base_task.py | 9 +++++---- utils/pl_utils.py | 1 + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 33923bb7e..679f7cd65 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -130,10 +130,11 @@ def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) def on_epoch_end(self): - loss_outputs = {k: round(v.avg, 4) for k, v in self.training_losses_meter.items()} - print(f"\n==============\n " - f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" - f"\n==============\n") + pass + # loss_outputs = {k: round(v.avg, 4) for k, v in self.training_losses_meter.items()} + # print(f"\n==============\n " + # f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" + # f"\n==============\n") def validation_step(self, sample, batch_idx): """ diff --git a/utils/pl_utils.py b/utils/pl_utils.py index b69e76390..be54db67d 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1364,6 +1364,7 @@ def train(self): # RUN TNG EPOCH # ----------------- self.run_training_epoch() + print() # start a new line for the next epoch # update LR schedulers if self.lr_schedulers is not None: From f99442149b089a047a551e23731d64f8818493c3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 20 Mar 2023 10:45:44 +0800 Subject: [PATCH 088/475] Fix TensorBoard logging interval --- utils/pl_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/pl_utils.py b/utils/pl_utils.py index be54db67d..34ba72f46 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1415,13 +1415,13 @@ def run_training_epoch(self): self.run_evaluation(test=self.testing) # when logs should be saved - should_save_log = (batch_idx + 1) % self.log_save_interval == 0 or early_stop_epoch + should_save_log = (self.total_batch_idx + 1) % self.log_save_interval == 0 or early_stop_epoch if should_save_log: if self.proc_rank == 0 and self.logger is not None: self.logger.save() # when metrics should be logged - should_log_metrics = batch_idx % self.row_log_interval == 0 or early_stop_epoch + should_log_metrics = self.total_batch_idx % self.row_log_interval == 0 or early_stop_epoch if should_log_metrics: # logs user requested information to logger self.log_metrics(batch_step_metrics, grad_norm_dic) From 1f8ab826da5358f59a692639a0857a68e69d392e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 20 Mar 2023 12:35:03 +0800 Subject: [PATCH 089/475] Change logging epoch+1 to epoch --- utils/pl_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 34ba72f46..b2a67a285 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1354,7 +1354,7 @@ def train(self): # reset progress bar # .reset() doesn't work on disabled progress bar so we should check - desc = f'Epoch {epoch + 1}' if not self.is_iterable_train_dataloader else '' + desc = f'Epoch {epoch}' if not self.is_iterable_train_dataloader else '' self.main_progress_bar.set_description(desc) # changing gradient according accumulation_scheduler From 36febc2cf0cd2e8f78b41a0c750ac353643889e6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 20 Mar 2023 13:58:35 +0800 Subject: [PATCH 090/475] Adjust dictionary copying and loading logic --- basics/base_task.py | 36 ++++++++++++++-------- configs/acoustic.yaml | 2 +- docs/README-SVS-custom-phonemes.md | 2 +- preprocessing/acoustic_binarizer.py | 6 +++- utils/hparams.py | 19 +----------- utils/phoneme_utils.py | 46 +++++++++++++++++++++++------ 6 files changed, 69 insertions(+), 42 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 679f7cd65..efb80f7bc 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -1,5 +1,6 @@ -from datetime import datetime +import pathlib import shutil +from datetime import datetime import matplotlib @@ -13,6 +14,7 @@ import numpy as np import torch.distributed as dist from pytorch_lightning.loggers import TensorBoardLogger +from utils.phoneme_utils import locate_dictionary from utils.pl_utils import LatestModelCheckpoint, BaseTrainer, data_loader, DDP from torch import nn import torch.utils.data @@ -236,7 +238,7 @@ def start(cls): random.seed(hparams['seed']) np.random.seed(hparams['seed']) task = cls() - work_dir = hparams['work_dir'] + work_dir = pathlib.Path(hparams['work_dir']) trainer = BaseTrainer( checkpoint_callback=LatestModelCheckpoint( filepath=work_dir, @@ -250,7 +252,7 @@ def start(cls): period=1 if hparams['save_ckpt'] else 100000 ), logger=TensorBoardLogger( - save_dir=work_dir, + save_dir=str(work_dir), name='lightning_logs', version='lastest' ), @@ -266,17 +268,27 @@ def start(cls): copy_code = True # backup code every time if copy_code: t = datetime.now().strftime('%Y%m%d%H%M%S') - code_dir = f'{work_dir}/codes/{t}' - os.makedirs(code_dir, exist_ok=True) + code_dir = work_dir.joinpath('codes').joinpath(str(t)) + code_dir.mkdir(exist_ok=True, parents=True) for c in hparams['save_codes']: shutil.copytree(c, code_dir, dirs_exist_ok=True) - print(f"| Copied codes to {code_dir}.") - # Copy spk_map.json to work dir - spk_map = os.path.join(work_dir, 'spk_map.json') - spk_map_orig = os.path.join(hparams['binary_data_dir'], 'spk_map.json') - if not os.path.exists(spk_map) and os.path.exists(spk_map_orig): - shutil.copy(spk_map_orig, spk_map) - print(f"| Copied spk map to {spk_map}.") + print(f'| Copied codes to {code_dir}.') + # Copy spk_map.json and dictionary.txt to work dir + binary_dir = pathlib.Path(hparams['binary_data_dir']) + spk_map = work_dir.joinpath('spk_map.json') + spk_map_src = binary_dir.joinpath('spk_map.json') + if not spk_map.exists() and spk_map_src.exists(): + shutil.copy(spk_map_src, spk_map) + print(f'| Copied spk map to {spk_map}.') + dictionary = work_dir.joinpath('dictionary.txt') + dict_src = binary_dir.joinpath('dictionary.txt') + if not dictionary.exists(): + if dict_src.exists(): + shutil.copy(dict_src, dictionary) + else: + shutil.copy(locate_dictionary(), dictionary) + print(f'| Copied dictionary to {dictionary}.') + trainer.checkpoint_callback.task = task trainer.fit(task) else: diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index fefb0913a..f7487feca 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -43,7 +43,7 @@ binarization_args: raw_data_dir: 'data/opencpop/raw' binary_data_dir: 'data/opencpop/binary' binarizer_cls: preprocessing.acoustic_binarizer.AcousticBinarizer -g2p_dictionary: dictionaries/opencpop-extension.txt +dictionary: dictionaries/opencpop-extension.txt spec_min: [-5] spec_max: [0] keep_bins: 128 diff --git a/docs/README-SVS-custom-phonemes.md b/docs/README-SVS-custom-phonemes.md index f526688bc..c7809bf5f 100644 --- a/docs/README-SVS-custom-phonemes.md +++ b/docs/README-SVS-custom-phonemes.md @@ -58,7 +58,7 @@ It is reasonable for the dictionary to design a unique symbol for each pronuncia To preprocess your data with a customized dictionary, you should specify the dictionary path in the config file: ```yaml -g2p_dictionary: path/to/your/dictionary.txt +dictionary: path/to/your/dictionary.txt ``` If not specified, this hyperparamerter will fall back to `dictionaries/opencpop.txt` for backward compatibility. diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 9266b5e6d..3c31a1d4d 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -11,6 +11,7 @@ import os import os.path import random +import shutil from copy import deepcopy import matplotlib.pyplot as plt @@ -25,7 +26,7 @@ from utils.hparams import hparams from utils.indexed_datasets import IndexedDatasetBuilder from utils.multiprocess_utils import chunked_multiprocess_run -from utils.phoneme_utils import build_phoneme_list +from utils.phoneme_utils import build_phoneme_list, locate_dictionary os.environ["OMP_NUM_THREADS"] = "1" ACOUSTIC_ITEM_ATTRIBUTES = ['spk_id', 'mel', 'tokens', 'mel2ph', 'f0', 'key_shift', 'speed'] @@ -143,6 +144,9 @@ def check_coverage(self): f' (+) {sorted(unrecognizable_phones)}\n' f' (-) {sorted(missing_phones)}') + # Copy dictionary to binary data dir + shutil.copy(locate_dictionary(), os.path.join(hparams['binary_data_dir'], 'dictionary.txt')) + def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): data_dir = hparams['binary_data_dir'] args = [] diff --git a/utils/hparams.py b/utils/hparams.py index e3bd34521..bd82807c4 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -1,6 +1,5 @@ import argparse import os -import shutil import yaml @@ -102,30 +101,14 @@ def load_config(config_fn): # deep first else: hparams_[k] = type(hparams_[k])(v) - dictionary = hparams_.get('g2p_dictionary') - if dictionary is None: - dictionary = 'dictionaries/opencpop.txt' - ckpt_dictionary = os.path.join(hparams_['work_dir'], os.path.basename(dictionary)) if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: os.makedirs(hparams_['work_dir'], exist_ok=True) if is_main_process: - # Only the main process will save the config file and dictionary + # Only the main process will save the config file with open(ckpt_config_path, 'w', encoding='utf-8') as f: hparams_non_recursive = hparams_.copy() hparams_non_recursive['base_config'] = [] yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') - if hparams_.get('reset_phone_dict') or not os.path.exists(ckpt_dictionary): - shutil.copy(dictionary, ckpt_dictionary) - - ckpt_dictionary_exists = os.path.exists(ckpt_dictionary) - if not os.path.exists(dictionary) and not ckpt_dictionary_exists: - raise FileNotFoundError(f'G2P dictionary not found in either of the following paths:\n' - f' - \'{dictionary}\'\n' - f' - \'{ckpt_dictionary}\'') - hparams_['original_g2p_dictionary'] = dictionary - if ckpt_dictionary_exists: - dictionary = ckpt_dictionary - hparams_['g2p_dictionary'] = dictionary hparams_['infer'] = args.infer hparams_['debug'] = args.debug diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index bec97a2cf..d9f2edd0d 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,26 +1,54 @@ +import pathlib + +from utils.hparams import hparams from utils.multiprocess_utils import main_process_print _initialized = False _ALL_CONSONANTS_SET = set() _ALL_VOWELS_SET = set() -_g2p_dictionary = { +_dictionary = { 'AP': ['AP'], 'SP': ['SP'] } _phoneme_list: list +def locate_dictionary(): + """ + Search and locate the dictionary file. + Order: + 1. hparams['dictionary'] + 1. hparams['g2p_dictionary'] + 2. 'dictionary.txt' in hparams['work_dir'] + 3. file with same name as hparams['g2p_dictionary'] in hparams['work_dir'] + :return: pathlib.Path of the dictionary file + """ + assert 'dictionary' in hparams or 'g2p_dictionary' in hparams, \ + 'Please specify a dictionary file in your config.' + config_dict_path = pathlib.Path(hparams.get('dictionary', hparams.get('g2p_dictionary'))) + if config_dict_path.exists(): + return config_dict_path + work_dir = pathlib.Path(hparams['work_dir']) + ckpt_dict_path = work_dir.joinpath(config_dict_path.name) + if ckpt_dict_path.exists(): + return ckpt_dict_path + ckpt_dict_path = work_dir.joinpath('dictionary.txt') + if ckpt_dict_path.exists(): + return ckpt_dict_path + raise FileNotFoundError('Unable to locate the dictionary file. ' + 'Please specify the right dictionary in your config.') + + def _build_dict_and_list(): - from utils.hparams import hparams - global _g2p_dictionary, _phoneme_list + global _dictionary, _phoneme_list _set = set() - with open(hparams['g2p_dictionary'], 'r', encoding='utf8') as _df: + with open(locate_dictionary(), 'r', encoding='utf8') as _df: _lines = _df.readlines() for _line in _lines: _pinyin, _ph_str = _line.strip().split('\t') - _g2p_dictionary[_pinyin] = _ph_str.split() - for _list in _g2p_dictionary.values(): + _dictionary[_pinyin] = _ph_str.split() + for _list in _dictionary.values(): [_set.add(ph) for ph in _list] _phoneme_list = sorted(list(_set)) main_process_print('| load phoneme set:', _phoneme_list) @@ -28,7 +56,7 @@ def _build_dict_and_list(): def _initialize_consonants_and_vowels(): # Currently we only support two-part consonant-vowel phoneme systems. - for _ph_list in _g2p_dictionary.values(): + for _ph_list in _dictionary.values(): _ph_count = len(_ph_list) if _ph_count == 0 or _ph_list[0] in ['AP', 'SP']: continue @@ -57,9 +85,9 @@ def get_all_vowels(): return sorted(_ALL_VOWELS_SET) -def build_g2p_dictionary() -> dict: +def build_dictionary() -> dict: _initialize() - return _g2p_dictionary + return _dictionary def build_phoneme_list() -> list: From 82c276a6638ee75ccc2d361d3c87a00aa197c798 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 20 Mar 2023 19:06:15 +0800 Subject: [PATCH 091/475] Support training without vocoder --- configs/acoustic.yaml | 1 + training/acoustic_task.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index f7487feca..c697e4889 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -77,6 +77,7 @@ decay_steps: 50000 gamma: 0.5 max_tokens: 80000 max_sentences: 48 +val_with_vocoder: true val_check_interval: 2000 num_valid_plots: 10 max_updates: 320000 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index fd453930a..388941e3f 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -76,8 +76,10 @@ class AcousticTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = AcousticDataset - self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() self.phone_encoder = self.build_phone_encoder() + self.use_vocoder = hparams['infer'] or hparams.get('val_with_vocoder', True) + if self.use_vocoder: + self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() self.saving_result_pool = None self.saving_results_futures = None self.stats = {} @@ -179,7 +181,8 @@ def validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots']: _, mel_pred = self.run_model(sample, return_output=True, infer=True) - self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) + if self.use_vocoder: + self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') return outputs From b851167790d287d15d148777069de71eaa46d340 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 20 Mar 2023 23:56:53 +0800 Subject: [PATCH 092/475] Make train_dataloader persistent --- basics/base_task.py | 6 ++++-- training/acoustic_task.py | 8 ++++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index efb80f7bc..d827338ec 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -75,6 +75,7 @@ def __init__(self, *args, **kwargs): hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences self.model = None + self.persistent_dataloader = None self.training_losses_meter = None ########### @@ -177,7 +178,7 @@ def configure_optimizers(self): return [optm] def build_dataloader(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, batch_by_size=True): + required_batch_size_multiple=-1, batch_by_size=True, persistent=False): devices_cnt = torch.cuda.device_count() if devices_cnt == 0: devices_cnt = 1 @@ -216,7 +217,8 @@ def shuffle_batches(batches): collate_fn=dataset.collater, batch_sampler=batches, num_workers=num_workers, - pin_memory=False) + pin_memory=False, + persistent_workers=persistent) def test_start(self): pass diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 388941e3f..88706b500 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -111,8 +111,12 @@ def build_scheduler(self, optimizer): @data_loader def train_dataloader(self): - train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) - return self.build_dataloader(train_dataset, True, self.max_tokens, self.max_sentences) + if self.persistent_dataloader is None: + train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) + self.persistent_dataloader = self.build_dataloader( + train_dataset, True, self.max_tokens, self.max_sentences, persistent=True + ) + return self.persistent_dataloader @data_loader def val_dataloader(self): From e229a656268b493c950b5bdb18fa07e44d94dae7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 21 Mar 2023 00:32:35 +0800 Subject: [PATCH 093/475] Add length logging --- inference/ds_acoustic.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 329287722..268015dbc 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -66,6 +66,8 @@ def preprocess_input(self, param): batch['mel2ph'] = mel2ph length = mel2ph.size(1) # => T + print(f'Length: {txt_tokens.size(1)} token(s), {length} frame(s), {length * self.timestep:.2f} second(s)') + if hparams['use_spk_id']: spk_mix_map = param.get('spk_mix') # { spk_name: value } or { spk_name: "value value value ..." } dynamic = False From 984ae5fe3cc848f97fbd077339346a7cbc215c2b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 21 Mar 2023 00:44:05 +0800 Subject: [PATCH 094/475] Use positional arguments in migrate.py --- basics/base_model.py | 2 +- scripts/migrate.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/basics/base_model.py b/basics/base_model.py index a933be613..3f2c2c113 100644 --- a/basics/base_model.py +++ b/basics/base_model.py @@ -11,7 +11,7 @@ def check_category(self, category): raise RuntimeError('Category is not specified in this checkpoint.\n' 'If this is a checkpoint in the old format, please consider ' 'migrating it to the new format via the following command:\n' - 'python scripts/migrate.py -i -o ') + 'python scripts/migrate.py ') elif category != self.category: raise RuntimeError('Category mismatches!\n' f'This checkpoint is of the category \'{category}\', ' diff --git a/scripts/migrate.py b/scripts/migrate.py index cec5fe50e..ebb15ce2a 100644 --- a/scripts/migrate.py +++ b/scripts/migrate.py @@ -6,8 +6,8 @@ parser = argparse.ArgumentParser(description='Migrate checkpoint files of MIDI-less acoustic models from old format') -parser.add_argument('-i', '--input', required=True, type=str, help='Path to the input file') -parser.add_argument('-o', '--output', required=True, type=str, help='Path to the output file') +parser.add_argument('input', type=str, help='Path to the input file') +parser.add_argument('output', type=str, help='Path to the output file') parser.add_argument('--overwrite', required=False, default=False, action='store_true', help='Overwrite the existing file') args = parser.parse_args() From 69d01b18c54e5e231b996c08bbdc7bdd984aec7f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 21 Mar 2023 00:45:05 +0800 Subject: [PATCH 095/475] Clip gender and velocity, forcing them to be within range --- deployment/export/export_acoustic.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index da1bfda9e..1b1badd84 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -89,6 +89,7 @@ def __init__(self, vocab_size): self.key_shift_embed = Linear(1, hparams['hidden_size']) if hparams.get('use_speed_embed', False): + self.speed_min, self.speed_max = hparams['augmentation_args']['random_time_stretching']['range'] self.speed_embed = Linear(1, hparams['hidden_size']) if hparams['use_spk_id']: @@ -121,12 +122,14 @@ def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=N if frozen_gender >= 0. else frozen_gender * abs(self.shift_min) key_shift_embed = self.key_shift_embed(key_shift[:, None, None]) else: + gender = torch.clip(gender, min=-1., max=1.) gender_mask = (gender < 0.).float() key_shift = gender * ((1. - gender_mask) * self.shift_max + gender_mask * abs(self.shift_min)) key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) condition += key_shift_embed if hparams.get('use_speed_embed', False): if velocity is not None: + velocity = torch.clip(velocity, min=self.speed_min, max=self.speed_max) speed_embed = self.speed_embed(velocity[:, :, None]) else: speed_embed = self.speed_embed(torch.FloatTensor([1.]).to(condition.device)[:, None, None]) From 7acf94f3fd2050653a8c5b58060b3ed4e062432e Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 22 Mar 2023 01:16:13 -0500 Subject: [PATCH 096/475] Initial attempt to refactor lightning code --- augmentation/spec_stretch.py | 10 +- basics/base_task.py | 329 ++++++++++------------------ configs/base.yaml | 2 +- modules/nsf_hifigan/models.py | 4 +- modules/vocoders/nsf_hifigan.py | 4 +- preprocessing/acoustic_binarizer.py | 15 +- scripts/train.py | 2 +- training/acoustic_task.py | 86 ++++---- utils/__init__.py | 4 +- utils/hparams.py | 9 +- utils/indexed_datasets.py | 65 ++++-- utils/multiprocess_utils.py | 3 +- utils/training_utils.py | 67 ++++++ 13 files changed, 301 insertions(+), 299 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 3a6244083..728c04d43 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -31,7 +31,7 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['wav_fn'], keyshift=key_shift, speed=speed ) - aug_item['mel'] = torch.from_numpy(mel) + aug_item['mel'] = mel if speed != 1. or hparams.get('use_speed_embed', False): aug_item['length'] = mel.shape[0] @@ -39,18 +39,18 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['seconds'] /= aug_item['speed'] aug_item['ph_dur'] /= aug_item['speed'] aug_item['mel2ph'] = get_mel2ph_torch( - self.lr, aug_item['ph_dur'], aug_item['length'], hparams, device=self.device - ) + self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], hparams, device=self.device + ).cpu().numpy() f0, f0_coarse, _ = get_pitch_parselmouth( wav, aug_item['length'], hparams, speed=speed, interp_uv=item['interp_uv'] ) aug_item['f0'], aug_item['f0_coarse'] = \ - torch.from_numpy(f0), torch.from_numpy(f0_coarse) + f0.astype(np.float32), f0_coarse if key_shift != 0. or hparams.get('use_key_shift_embed', False): aug_item['key_shift'] = key_shift aug_item['f0'] *= 2 ** (key_shift / 12) - aug_item['f0_coarse'] = torch.from_numpy(f0_to_coarse(aug_item['f0'].numpy())) + aug_item['f0_coarse'] = f0_to_coarse(aug_item['f0']) if replace_spk_id is not None: aug_item['spk_id'] = replace_spk_id diff --git a/basics/base_task.py b/basics/base_task.py index d827338ec..809c70a1a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,13 +13,18 @@ import sys import numpy as np import torch.distributed as dist +import pytorch_lightning as pl +from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.loggers import TensorBoardLogger +from pytorch_lightning.strategies import DDPStrategy +from pytorch_lightning.utilities import grad_norm, rank_zero_only from utils.phoneme_utils import locate_dictionary -from utils.pl_utils import LatestModelCheckpoint, BaseTrainer, data_loader, DDP +from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength from torch import nn import torch.utils.data import utils import logging +from functools import partial import os torch.multiprocessing.set_sharing_strategy(os.getenv('TORCH_SHARE_STRATEGY', 'file_system')) @@ -29,7 +34,7 @@ format=log_format, datefmt='%m/%d %I:%M:%S %p') -class BaseTask(nn.Module): +class BaseTask(pl.LightningModule): ''' Base class for training tasks. 1. *load_ckpt*: @@ -48,21 +53,14 @@ class BaseTask(nn.Module): how to build the model, the optimizer and the training scheduler; 2. *_training_step*: one training step of the model; - 3. *validation_end* and *_validation_end*: + 3. *on_validation_end* and *_on_validation_end*: postprocess the validation output. ''' def __init__(self, *args, **kwargs): # dataset configs super(BaseTask, self).__init__(*args, **kwargs) - self.current_epoch = 0 - self.global_step = 0 self.loaded_optimizer_states_dict = {} - self.trainer = None - self.logger = None - self.on_gpu = False - self.use_dp = False - self.use_ddp = False self.example_input_array = None self.max_tokens = hparams['max_tokens'] @@ -74,17 +72,18 @@ def __init__(self, *args, **kwargs): if self.max_eval_sentences == -1: hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences - self.model = None - self.persistent_dataloader = None self.training_losses_meter = None + + self.model = None ########### # Training, validation and testing ########### + def build_model(self): raise NotImplementedError - def on_epoch_start(self): + def on_train_epoch_start(self): self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} def _training_step(self, sample, batch_idx, optimizer_idx): @@ -118,26 +117,35 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): pass # log_outputs['all_loss'] = total_loss.item() - progress_bar_log = log_outputs + progress_bar_log = log_outputs | {'step': self.global_step} tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} + self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False) + self.log_dict(tb_log) return { - 'loss': total_loss, - 'progress_bar': progress_bar_log, - 'log': tb_log + 'loss': total_loss } - def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): - optimizer.step() - optimizer.zero_grad() - if self.scheduler is not None: - self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) - - def on_epoch_end(self): + def on_train_epoch_end(self): pass # loss_outputs = {k: round(v.avg, 4) for k, v in self.training_losses_meter.items()} # print(f"\n==============\n " # f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" # f"\n==============\n") + + def on_before_optimizer_step(self, optimizer): + self.log_dict(grad_norm(self, norm_type=2)) + + def on_validation_start(self): + self.validation_step_outputs = [] + + def _validation_step(self, sample, batch_idx): + """ + + :param sample: + :param batch_idx: + :return: output: dict + """ + raise NotImplementedError def validation_step(self, sample, batch_idx): """ @@ -146,9 +154,14 @@ def validation_step(self, sample, batch_idx): :param batch_idx: :return: output: dict """ - raise NotImplementedError + outputs = self._validation_step(sample, batch_idx) + self.validation_step_outputs.append(outputs) + + return { + 'val_loss': outputs['total_loss'] + } - def _validation_end(self, outputs): + def _on_validation_end(self, outputs): """ :param outputs: @@ -156,13 +169,13 @@ def _validation_end(self, outputs): """ raise NotImplementedError - def validation_end(self, outputs): - loss_output = self._validation_end(outputs) + def on_validation_epoch_end(self): + loss_output = self._on_validation_end(self.validation_step_outputs) print(f"\n==============\n " f"valid results: {loss_output}" f"\n==============\n") + self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True) return { - 'log': {f'val/{k}': v for k, v in loss_output.items()}, 'val_loss': loss_output['total_loss'] } @@ -174,60 +187,43 @@ def build_optimizer(self, model): def configure_optimizers(self): optm = self.build_optimizer(self.model) - self.scheduler = self.build_scheduler(optm) - return [optm] + scheduler = self.build_scheduler(optm) + return { + "optimizer": optm, + "lr_scheduler": { + "scheduler": scheduler, + "interval": "step", + "frequency": hparams['accumulate_grad_batches'], + } + } - def build_dataloader(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, batch_by_size=True, persistent=False): + def build_batch_sampler(self, dataset, shuffle, max_tokens=None, max_sentences=None, + required_batch_size_multiple=-1, batch_by_size=True): devices_cnt = torch.cuda.device_count() if devices_cnt == 0: devices_cnt = 1 if required_batch_size_multiple == -1: required_batch_size_multiple = devices_cnt - - def shuffle_batches(batches): - np.random.shuffle(batches) - return batches - - if max_tokens is not None: - max_tokens *= devices_cnt - if max_sentences is not None: - max_sentences *= devices_cnt - indices = dataset.ordered_indices() - if batch_by_size: - batch_sampler = utils.batch_by_size( - indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple, - ) + + batch_sampler_cls = partial(BatchSamplerSimilarLength, + max_tokens=max_tokens, max_sentences=max_sentences, + required_batch_size_multiple=required_batch_size_multiple, + batch_by_size=batch_by_size) + if self.trainer.distributed_sampler_kwargs: + sampler = DistributedBatchSamplerSimilarLength(dataset, batch_sampler_cls=batch_sampler_cls, + shuffle=shuffle, **self.trainer.distributed_sampler_kwargs) else: - batch_sampler = [] - for i in range(0, len(indices), max_sentences): - batch_sampler.append(indices[i:i + max_sentences]) + sampler = batch_sampler_cls(dataset=dataset, indices=dataset.ordered_indices(), shuffle=shuffle) + return sampler - if shuffle: - batches = shuffle_batches(list(batch_sampler)) - else: - batches = batch_sampler - num_workers = dataset.num_workers - if self.trainer.use_ddp: - num_replicas = dist.get_world_size() - rank = dist.get_rank() - batches = [x[rank::num_replicas] for x in batches if len(x) % num_replicas == 0] - return torch.utils.data.DataLoader(dataset, - collate_fn=dataset.collater, - batch_sampler=batches, - num_workers=num_workers, - pin_memory=False, - persistent_workers=persistent) - - def test_start(self): - pass + def on_test_start(self): + self.on_validation_start() def test_step(self, sample, batch_idx): return self.validation_step(sample, batch_idx) - def test_end(self, outputs): - return self.validation_end(outputs) + def on_test_end(self): + return self.on_validation_end() ########### # Running configuration @@ -235,165 +231,74 @@ def test_end(self, outputs): @classmethod def start(cls): - set_hparams() - os.environ['MASTER_PORT'] = str(random.randint(15000, 30000)) random.seed(hparams['seed']) np.random.seed(hparams['seed']) task = cls() work_dir = pathlib.Path(hparams['work_dir']) - trainer = BaseTrainer( - checkpoint_callback=LatestModelCheckpoint( - filepath=work_dir, - verbose=True, - monitor='val_loss', - mode='min', - num_ckpt_keep=hparams['num_ckpt_keep'], - permanent_ckpt_start=hparams.get('permanent_ckpt_start', 0), - permanent_ckpt_interval=hparams.get('permanent_ckpt_interval', -1), - save_best=hparams['save_best'], - period=1 if hparams['save_ckpt'] else 100000 - ), + trainer = pl.Trainer( + accelerator='gpu', + devices=4, + strategy=DDPStrategy(find_unused_parameters=False, process_group_backend='gloo'), + precision="bf16", + callbacks=[ + ModelCheckpoint( + dirpath=work_dir, + filename='model_ckpt_steps_{step}.ckpt', + monitor='val_loss', + mode='min', + save_last=hparams['save_last'], + save_top_k=hparams['num_ckpt_keep'], + every_n_train_steps=hparams['val_check_interval'], + verbose=True + ) + ], logger=TensorBoardLogger( save_dir=str(work_dir), name='lightning_logs', version='lastest' ), + num_sanity_val_steps=0, gradient_clip_val=hparams['clip_grad_norm'], val_check_interval=hparams['val_check_interval'], - row_log_interval=hparams['log_interval'], - max_updates=hparams['max_updates'], - num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, + check_val_every_n_epoch=None, + log_every_n_steps=hparams['log_interval'], + max_steps=hparams['max_updates'], + use_distributed_sampler=False, + # num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train - # copy_code = input(f'{hparams["save_codes"]} code backup? y/n: ') == 'y' - copy_code = True # backup code every time - if copy_code: - t = datetime.now().strftime('%Y%m%d%H%M%S') - code_dir = work_dir.joinpath('codes').joinpath(str(t)) - code_dir.mkdir(exist_ok=True, parents=True) - for c in hparams['save_codes']: - shutil.copytree(c, code_dir, dirs_exist_ok=True) - print(f'| Copied codes to {code_dir}.') - # Copy spk_map.json and dictionary.txt to work dir - binary_dir = pathlib.Path(hparams['binary_data_dir']) - spk_map = work_dir.joinpath('spk_map.json') - spk_map_src = binary_dir.joinpath('spk_map.json') - if not spk_map.exists() and spk_map_src.exists(): - shutil.copy(spk_map_src, spk_map) - print(f'| Copied spk map to {spk_map}.') - dictionary = work_dir.joinpath('dictionary.txt') - dict_src = binary_dir.joinpath('dictionary.txt') - if not dictionary.exists(): - if dict_src.exists(): - shutil.copy(dict_src, dictionary) - else: - shutil.copy(locate_dictionary(), dictionary) - print(f'| Copied dictionary to {dictionary}.') - - trainer.checkpoint_callback.task = task + if trainer.local_rank == 0: + set_hparams(print_hparams=True, is_main_process=True) + # copy_code = input(f'{hparams["save_codes"]} code backup? y/n: ') == 'y' + copy_code = True # backup code every time + if copy_code: + code_dir = work_dir / 'codes' / datetime.now().strftime('%Y%m%d%H%M%S') + code_dir.mkdir(exist_ok=True, parents=True) + for c in hparams['save_codes']: + shutil.copytree(c, code_dir, dirs_exist_ok=True) + print(f'| Copied codes to {code_dir}.') + # Copy spk_map.json and dictionary.txt to work dir + binary_dir = pathlib.Path(hparams['binary_data_dir']) + spk_map = work_dir / 'spk_map.json' + spk_map_src = binary_dir / 'spk_map.json' + if not spk_map.exists() and spk_map_src.exists(): + shutil.copy(spk_map_src, spk_map) + print(f'| Copied spk map to {spk_map}.') + dictionary = work_dir / 'dictionary.txt' + dict_src = binary_dir / 'dictionary.txt' + if not dictionary.exists(): + if dict_src.exists(): + shutil.copy(dict_src, dictionary) + else: + shutil.copy(locate_dictionary(), dictionary) + print(f'| Copied dictionary to {dictionary}.') trainer.fit(task) + if trainer.local_rank == 0: + trainer.callbacks[0].on_validation_end(trainer, task) else: trainer.test(task) - def configure_ddp(self, model, device_ids): - model = DDP( - model, - device_ids=device_ids, - find_unused_parameters=True - ) - if dist.get_rank() != 0 and not hparams['debug']: - sys.stdout = open(os.devnull, "w") - sys.stderr = open(os.devnull, "w") - random.seed(hparams['seed']) - np.random.seed(hparams['seed']) - return model - - def training_end(self, *args, **kwargs): - return None - - def init_ddp_connection(self, proc_rank, world_size): - set_hparams(print_hparams=False) - # guarantees unique ports across jobs from same grid search - default_port = 12910 - # if user gave a port number, use that one instead - try: - default_port = os.environ['MASTER_PORT'] - except Exception: - os.environ['MASTER_PORT'] = str(default_port) - - # figure out the root node addr - root_node = '127.0.0.2' - root_node = self.trainer.resolve_root_node_address(root_node) - os.environ['MASTER_ADDR'] = root_node - dist.init_process_group('nccl', rank=proc_rank, world_size=world_size) - - @data_loader - def train_dataloader(self): - return None - - @data_loader - def test_dataloader(self): - return None - - @data_loader - def val_dataloader(self): - return None - - def on_load_checkpoint(self, checkpoint): - pass - def on_save_checkpoint(self, checkpoint): if isinstance(self.model, CategorizedModule): checkpoint['category'] = self.model.category - - def on_sanity_check_start(self): - pass - - def on_train_start(self): - pass - - def on_train_end(self): - pass - - def on_batch_start(self, batch): - pass - - def on_batch_end(self): - pass - - def on_pre_performance_check(self): - pass - - def on_post_performance_check(self): - pass - - def on_before_zero_grad(self, optimizer): - pass - - def on_after_backward(self): - pass - - def backward(self, loss, optimizer): - loss.backward() - - def grad_norm(self, norm_type): - results = {} - total_norm = 0 - for name, p in self.named_parameters(): - if p.requires_grad: - try: - param_norm = p.grad.data.norm(norm_type) - total_norm += param_norm ** norm_type - norm = param_norm ** (1 / norm_type) - - grad = round(norm.data.cpu().numpy().flatten()[0], 3) - results['grad_{}_norm_{}'.format(norm_type, name)] = grad - except Exception: - # this param had no grad - pass - - total_norm = total_norm ** (1. / norm_type) - grad = round(total_norm.data.cpu().numpy().flatten()[0], 3) - results['grad_{}_norm_total'.format(norm_type)] = grad - return results diff --git a/configs/base.yaml b/configs/base.yaml index 709a32630..3669ed347 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -64,7 +64,7 @@ dur_loss: mse # huber|mol # train and eval ########### save_ckpt: true -save_best: false +save_last: false num_ckpt_keep: 3 accumulate_grad_batches: 1 log_interval: 100 diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 1c3006d61..337284063 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -22,9 +22,9 @@ def load_model(model_path, device='cuda'): json_config = json.loads(data) h = AttrDict(json_config) - generator = Generator(h).to(device) + generator = Generator(h)#.to(device) - cp_dict = torch.load(model_path, map_location=device) + cp_dict = torch.load(model_path)#, map_location=device) generator.load_state_dict(cp_dict['generator']) generator.eval() generator.remove_weight_norm() diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index d232718a7..40fcb6a4e 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -70,12 +70,12 @@ def spec2wav(self, mel, **kwargs): if self.h.fmax != hparams['fmax']: print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') with torch.no_grad(): - c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(self.device) + c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1)#.to(self.device) # log10 to log mel c = 2.30259 * c f0 = kwargs.get('f0') if f0 is not None: - f0 = torch.FloatTensor(f0[None, :]).to(self.device) + f0 = torch.FloatTensor(f0[None, :])#.to(self.device) y = self.model(c, f0).view(-1) else: y = self.model(c).view(-1) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 3c31a1d4d..6d7510ff8 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -185,12 +185,13 @@ def postprocess(_item): # code for parallel processing for item in tqdm( chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))) + total=len(list(self.meta_data_iterator(prefix))), + ncols=80 ): postprocess(item) else: # code for single cpu processing - for a in tqdm(args): + for a in tqdm(args, ncols=80): item = self.process_item(*a) postprocess(item) @@ -219,9 +220,9 @@ def process_item(self, item_name, meta_data, binarization_args): 'spk_id': meta_data['spk_id'], 'seconds': seconds, 'length': length, - 'mel': torch.from_numpy(mel), - 'tokens': torch.LongTensor(self.phone_encoder.encode(meta_data['ph_seq'])), - 'ph_dur': torch.FloatTensor(meta_data['ph_dur']), + 'mel': mel, + 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), + 'ph_dur': np.array(meta_data['ph_dur']), 'interp_uv': self.binarization_args['interp_uv'], } @@ -231,10 +232,10 @@ def process_item(self, item_name, meta_data, binarization_args): ) if uv.all(): # All unvoiced raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') - processed_input['f0'] = torch.from_numpy(gt_f0).float() + processed_input['f0'] = gt_f0.astype(np.float32) # get ground truth dur - processed_input['mel2ph'] = get_mel2ph_torch(self.lr, processed_input['ph_dur'], length, hparams) + processed_input['mel2ph'] = get_mel2ph_torch(self.lr, torch.from_numpy(processed_input['ph_dur']), length, hparams).cpu().numpy() if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. diff --git a/scripts/train.py b/scripts/train.py index bb2d51f47..98f12e27f 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -2,7 +2,7 @@ from utils.hparams import set_hparams, hparams -set_hparams(print_hparams=False) +set_hparams(is_main_process=False) def run_task(): assert hparams['task_cls'] != '' diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 88706b500..f10c29730 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -8,6 +8,7 @@ import torch.distributions import torch.optim import torch.utils.data +import pytorch_lightning as pl from tqdm import tqdm import utils @@ -35,13 +36,12 @@ def __init__(self, prefix, shuffle=False): self.data_dir = hparams['binary_data_dir'] self.prefix = prefix self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) - self.indexed_ds = None + self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) def __getitem__(self, index): - if self.indexed_ds is None: - self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) - sample = self.indexed_ds[index] - return sample + # if self.indexed_ds is None: + # self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) + return self.indexed_ds[index] def collater(self, samples): if len(samples) == 0: @@ -60,23 +60,21 @@ def collater(self, samples): # if hparams['use_energy_embed']: # batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) if hparams.get('use_key_shift_embed', False): - batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] + batch['key_shift'] = torch.FloatTensor([float(s['key_shift']) for s in samples])[:, None] if hparams.get('use_speed_embed', False): - batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] + batch['speed'] = torch.FloatTensor([float(s['speed']) for s in samples])[:, None] # if hparams['use_spk_embed']: # spk_embed = torch.stack([s['spk_embed'] for s in samples]) # batch['spk_embed'] = spk_embed if hparams['use_spk_id']: - spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) + spk_ids = torch.LongTensor([int(s['spk_id']) for s in samples]) batch['spk_ids'] = spk_ids return batch - class AcousticTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = AcousticDataset - self.phone_encoder = self.build_phone_encoder() self.use_vocoder = hparams['infer'] or hparams.get('val_with_vocoder', True) if self.use_vocoder: self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() @@ -84,6 +82,12 @@ def __init__(self): self.saving_results_futures = None self.stats = {} self.logged_gt_wav = set() + + def setup(self, stage): + self.phone_encoder = self.build_phone_encoder() + self.model = self.build_model() + self.train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) + self.valid_dataset = self.dataset_cls(hparams['valid_set_name'], shuffle=False) @staticmethod def build_phone_encoder(): @@ -91,15 +95,16 @@ def build_phone_encoder(): return TokenTextEncoder(vocab_list=phone_list) def build_model(self): - self.model = DiffSingerAcoustic( + model = DiffSingerAcoustic( vocab_size=len(self.phone_encoder), out_dims=hparams['audio_num_mel_bins'] ) - utils.print_arch(self.model) - return self.model + if self.trainer.local_rank == 0: + utils.print_arch(model) + return model def build_optimizer(self, model): - self.optimizer = optimizer = torch.optim.AdamW( + optimizer = torch.optim.AdamW( filter(lambda p: p.requires_grad, model.parameters()), lr=hparams['lr'], betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), @@ -108,33 +113,29 @@ def build_optimizer(self, model): def build_scheduler(self, optimizer): return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) - - @data_loader + def train_dataloader(self): - if self.persistent_dataloader is None: - train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) - self.persistent_dataloader = self.build_dataloader( - train_dataset, True, self.max_tokens, self.max_sentences, persistent=True - ) - return self.persistent_dataloader - - @data_loader + sampler = self.build_batch_sampler(self.train_dataset, True, self.max_tokens, self.max_sentences) + return torch.utils.data.DataLoader(self.train_dataset, + collate_fn=self.train_dataset.collater, + batch_sampler=sampler, + num_workers=self.train_dataset.num_workers, + prefetch_factor=4, + pin_memory=False, + persistent_workers=True) + def val_dataloader(self): - valid_dataset = self.dataset_cls(hparams['valid_set_name'], shuffle=False) - return self.build_dataloader(valid_dataset, False, self.max_eval_tokens, self.max_eval_sentences) + sampler = self.build_batch_sampler(self.valid_dataset, False, self.max_tokens, self.max_sentences) + return torch.utils.data.DataLoader(self.valid_dataset, + collate_fn=self.valid_dataset.collater, + batch_sampler=sampler, + num_workers=self.valid_dataset.num_workers, + prefetch_factor=4, + shuffle=False) - @data_loader def test_dataloader(self): return self.val_dataloader() - - def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): - if optimizer is None: - return - optimizer.step() - optimizer.zero_grad() - if self.scheduler is not None: - self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) - + def run_model(self, sample, return_output=False, infer=False): """ steps: @@ -171,15 +172,16 @@ def _training_step(self, sample, batch_idx, _): log_outputs = self.run_model(sample) total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) log_outputs['batch_size'] = sample['tokens'].size()[0] - log_outputs['lr'] = self.scheduler.get_lr()[0] + log_outputs['lr'] = self.lr_schedulers().get_lr()[0] return total_loss, log_outputs - def validation_step(self, sample, batch_idx): + def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, return_output=False, infer=False) total_loss = sum(losses.values()) outputs = { 'losses': losses, - 'total_loss': total_loss, 'size': sample['size'] + 'total_loss': total_loss, + 'size': sample['size'] } outputs = utils.tensors_to_scalars(outputs) @@ -191,7 +193,7 @@ def validation_step(self, sample, batch_idx): return outputs - def _validation_end(self, outputs): + def _on_validation_end(self, outputs): all_losses_meter = { 'total_loss': utils.AvgrageMeter(), } @@ -230,7 +232,7 @@ def plot_mel(self, batch_idx, spec, spec_out, name=None): ############ # infer ############ - def test_start(self): + def on_test_start(self): self.saving_result_pool = Pool(8) self.saving_results_futures = [] self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() @@ -240,7 +242,7 @@ def test_step(self, sample, batch_idx): sample['outputs'] = mel_pred return self.after_infer(sample) - def test_end(self, outputs): + def on_test_end(self): self.saving_result_pool.close() [f.get() for f in tqdm(self.saving_results_futures)] self.saving_result_pool.join() diff --git a/utils/__init__.py b/utils/__init__.py index 2f41cf2f0..3d3fe6db3 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -45,7 +45,7 @@ def collate_nd(values, pad_value=0, max_len=None): Pad a list of Nd tensors on their first dimension and stack them into a (N+1)d tensor. """ size = ((max(v.size(0) for v in values) if max_len is None else max_len), *values[0].shape[1:]) - res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) + res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype) for i, v in enumerate(values): res[i, :len(v), ...] = v @@ -64,7 +64,7 @@ def _is_batch_full(batch, num_tokens, max_tokens, max_sentences): def batch_by_size( indices, num_tokens_fn, max_tokens=None, max_sentences=None, - required_batch_size_multiple=1, distributed=False + required_batch_size_multiple=1 ): """ Yield mini-batches of indices bucketed by size. Batches may contain diff --git a/utils/hparams.py b/utils/hparams.py index bd82807c4..8be0a0663 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -3,8 +3,7 @@ import yaml -from utils.multiprocess_utils import is_main_process - +from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True hparams = {} @@ -23,7 +22,7 @@ def override_config(old_config: dict, new_config: dict): old_config[k] = v -def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True): +def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True, is_main_process=None): """ Load hparams from multiple sources: 1. config chain (i.e. first load base_config, then load config); @@ -47,6 +46,9 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob args = Args(config=config, exp_name=exp_name, hparams=hparams_str, infer=False, validate=False, reset=False, debug=False) + if is_main_process is None: + is_main_process = mp_is_main_process + args_work_dir = '' if args.exp_name != '': args.work_dir = args.exp_name @@ -117,6 +119,7 @@ def load_config(config_fn): # deep first if global_hparams: hparams.clear() hparams.update(hparams_) + hparams['is_main_process'] = is_main_process if is_main_process and print_hparams and global_print_hparams and global_hparams: print('| Hparams chains: ', config_chains) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 5c8f84581..c0482667c 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,6 +1,10 @@ import os.path +import pathlib import pickle +import multiprocessing from copy import deepcopy +import h5py +import torch import numpy as np @@ -8,20 +12,25 @@ class IndexedDataset: def __init__(self, path, prefix, num_cache=0): super().__init__() - self.path = path - self.data_file = None - self.data_offsets = np.load(os.path.join(path, f'{prefix}.idx')) - self.data_file = open(os.path.join(path, f'{prefix}.data'), 'rb', buffering=-1) + self.path = pathlib.Path(path) + # self.data_file = None + # self.data_offsets = np.load(self.path / f'{prefix}.idx')) + # self.data_file = open(self.path / f'{prefix}.data', 'rb', buffering=-1) + self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') self.cache = [] self.num_cache = num_cache def check_index(self, i): - if i < 0 or i >= len(self.data_offsets) - 1: + # if i < 0 or i >= len(self.data_offsets) - 1: + # raise IndexError('index out of range') + if i < 0 or i >= len(self.dset): raise IndexError('index out of range') def __del__(self): - if self.data_file: - self.data_file.close() + # if self.data_file: + # self.data_file.close() + if self.dset: + del self.dset def __getitem__(self, i): self.check_index(i) @@ -29,22 +38,27 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - self.data_file.seek(self.data_offsets[i]) - b = self.data_file.read(self.data_offsets[i + 1] - self.data_offsets[i]) - item = pickle.loads(b) + # self.data_file.seek(self.data_offsets[i]) + # b = self.data_file.read(self.data_offsets[i + 1] - self.data_offsets[i]) + # item = pickle.loads(b) + item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache = [(i, deepcopy(item))] + self.cache[:-1] return item def __len__(self): - return len(self.data_offsets) - 1 + # return len(self.data_offsets) - 1 + return len(self.dset) class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): - self.path = path + self.path = pathlib.Path(path) self.prefix = prefix - self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') - self.byte_offsets = [0] + # self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') + self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'w') + self.counter = 0 + self.lock = multiprocessing.Lock() + # self.byte_offsets = [0] if allowed_attr is not None: self.allowed_attr = set(allowed_attr) else: @@ -56,15 +70,24 @@ def add_item(self, item): k: item.get(k) for k in self.allowed_attr } - s = pickle.dumps(item) - n_bytes = self.out_file.write(s) - self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) + with self.lock: + item_no = self.counter + self.counter += 1 + for k, v in item.items(): + if isinstance(v, np.ndarray): + self.dset.create_dataset(f'{item_no}/{k}', data=v, compression="gzip", compression_opts=4) + else: + self.dset.create_dataset(f'{item_no}/{k}', data=v) + # s = pickle.dumps(item) + # n_bytes = self.out_file.write(s) + # self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) def finalize(self): - self.out_file.close() - with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: - # noinspection PyTypeChecker - np.save(f, self.byte_offsets) + del self.dset + # self.out_file.close() + # with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: + # # noinspection PyTypeChecker + # np.save(f, self.byte_offsets) if __name__ == "__main__": diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 41eb13142..b54f99db7 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -8,7 +8,8 @@ def main_process_print(self, *args, sep=' ', end='\n', file=None): - if is_main_process: + from utils.hparams import hparams + if hparams['is_main_process']: print(self, *args, sep=sep, end=end, file=file) diff --git a/utils/training_utils.py b/utils/training_utils.py index 409b15388..63af4718e 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,5 +1,9 @@ +import utils from utils.hparams import hparams +import math +import numpy as np +from torch.utils.data.distributed import Sampler, DistributedSampler class RSQRTSchedule(object): def __init__(self, optimizer): @@ -25,3 +29,66 @@ def step(self, num_updates): def get_lr(self): return self.optimizer.param_groups[0]['lr'] + +class BatchSamplerSimilarLength(Sampler): + def __init__(self, dataset, indices=None, max_tokens=None, max_sentences=None, required_batch_size_multiple=-1, batch_by_size=True, shuffle=True): + self.shuffle = shuffle + + if batch_by_size: + self.batches = utils.batch_by_size( + indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, + required_batch_size_multiple=required_batch_size_multiple + ) + else: + self.batches = [indices[i:i + max_sentences] for i in range(0, len(indices), max_sentences)] + + def __iter__(self): + if self.shuffle: + np.random.shuffle(self.batches) + for batch in self.batches: + yield batch + + def __len__(self): + return len(self.batches) + +class DistributedBatchSamplerSimilarLength(DistributedSampler): + def __init__(self, dataset, num_replicas=None, + rank=None, shuffle=True, + seed=0, drop_last=False, batch_sampler_cls=None) -> None: + super().__init__(dataset=dataset, num_replicas=num_replicas, rank=rank, shuffle=shuffle, seed=seed, + drop_last=drop_last) + self.batch_sampler_cls = batch_sampler_cls + self.batch_sampler = None + + def __iter__(self): + if self.shuffle: + indices = np.random.RandomState(seed=self.seed).permutation(len(self.dataset)) + if self.dataset.sort_by_len: + indices = indices[np.argsort(np.array(self.dataset._sizes)[indices], kind='mergesort')] + else: + indices = np.arange(len(self.dataset)) + indices = indices.tolist() + + if not self.drop_last: + # add extra samples to make it evenly divisible + padding_size = self.total_size - len(indices) + if padding_size <= len(indices): + indices += indices[:padding_size] + else: + indices += (indices * math.ceil(padding_size / len(indices)))[:padding_size] + else: + # remove tail of data to make it evenly divisible. + indices = indices[:self.total_size] + assert len(indices) == self.total_size + + # subsample + indices = indices[self.rank:self.total_size:self.num_replicas] + assert len(indices) == self.num_samples + + self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, shuffle=self.shuffle) + return iter(self.batch_sampler) + + def __len__(self) -> int: + if self.batch_sampler is None: + raise ValueError("BatchSampler is not initialized. Call __iter__ first.") + return len(self.batch_sampler) From a0c24453c1c87f7e4177afd0e91b077dc9a2e8cf Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 23 Mar 2023 01:20:25 -0500 Subject: [PATCH 097/475] Add hparams to yaml, successful checkpointing --- basics/base_task.py | 54 +- preparation/acoustic_preparation.ipynb | 24 +- training/acoustic_task.py | 1 - utils/pl_utils.py | 1672 +----------------------- 4 files changed, 93 insertions(+), 1658 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 809c70a1a..0f055f05a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -12,14 +12,12 @@ import random import sys import numpy as np -import torch.distributed as dist import pytorch_lightning as pl -from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.loggers import TensorBoardLogger -from pytorch_lightning.strategies import DDPStrategy -from pytorch_lightning.utilities import grad_norm, rank_zero_only +from pytorch_lightning.utilities import grad_norm from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength +from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj from torch import nn import torch.utils.data import utils @@ -119,8 +117,8 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): # log_outputs['all_loss'] = total_loss.item() progress_bar_log = log_outputs | {'step': self.global_step} tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False) - self.log_dict(tb_log) + self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False, rank_zero_only=True) + self.log_dict(tb_log, prog_bar=False, logger=True, on_step=True, on_epoch=False, rank_zero_only=True) return { 'loss': total_loss } @@ -133,7 +131,7 @@ def on_train_epoch_end(self): # f"\n==============\n") def on_before_optimizer_step(self, optimizer): - self.log_dict(grad_norm(self, norm_type=2)) + self.log_dict(grad_norm(self, norm_type=2), rank_zero_only=True) def on_validation_start(self): self.validation_step_outputs = [] @@ -156,10 +154,7 @@ def validation_step(self, sample, batch_idx): """ outputs = self._validation_step(sample, batch_idx) self.validation_step_outputs.append(outputs) - - return { - 'val_loss': outputs['total_loss'] - } + return outputs def _on_validation_end(self, outputs): """ @@ -171,13 +166,11 @@ def _on_validation_end(self, outputs): def on_validation_epoch_end(self): loss_output = self._on_validation_end(self.validation_step_outputs) - print(f"\n==============\n " - f"valid results: {loss_output}" - f"\n==============\n") - self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True) - return { - 'val_loss': loss_output['total_loss'] - } + # print(f"\n==============\n " + # f"valid results: {loss_output}" + # f"\n==============\n") + self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) + self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, prog_bar=False, logger=True, sync_dist=True) def build_scheduler(self, optimizer): raise NotImplementedError @@ -231,24 +224,24 @@ def on_test_end(self): @classmethod def start(cls): - random.seed(hparams['seed']) - np.random.seed(hparams['seed']) + pl.seed_everything(hparams['seed'], workers=True) task = cls() work_dir = pathlib.Path(hparams['work_dir']) trainer = pl.Trainer( - accelerator='gpu', - devices=4, - strategy=DDPStrategy(find_unused_parameters=False, process_group_backend='gloo'), - precision="bf16", + accelerator=hparams['pl_trainer_accelerator'], + devices=hparams['pl_trainer_devices'], + strategy=get_stategy_obj(hparams['pl_trainer_strategy']), + precision=hparams['pl_trainer_precision'], callbacks=[ - ModelCheckpoint( + DiffModelCheckpoint( dirpath=work_dir, - filename='model_ckpt_steps_{step}.ckpt', + filename='model_ckpt_steps_{step}', monitor='val_loss', mode='min', save_last=hparams['save_last'], save_top_k=hparams['num_ckpt_keep'], - every_n_train_steps=hparams['val_check_interval'], + save_on_train_epoch_end=True, + auto_insert_metric_name=False, verbose=True ) ], @@ -257,14 +250,13 @@ def start(cls): name='lightning_logs', version='lastest' ), - num_sanity_val_steps=0, gradient_clip_val=hparams['clip_grad_norm'], val_check_interval=hparams['val_check_interval'], check_val_every_n_epoch=None, log_every_n_steps=hparams['log_interval'], max_steps=hparams['max_updates'], use_distributed_sampler=False, - # num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, + num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train @@ -293,9 +285,7 @@ def start(cls): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') - trainer.fit(task) - if trainer.local_rank == 0: - trainer.callbacks[0].on_validation_end(trainer, task) + trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: trainer.test(task) diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index df686bacd..105059ad4 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1228,7 +1228,16 @@ " 'num_ckpt_keep': num_ckpt_keep,\n", " 'max_updates': max_updates,\n", " 'permanent_ckpt_start': permanent_ckpt_start,\n", - " 'permanent_ckpt_interval': permanent_ckpt_interval\n", + " 'permanent_ckpt_interval': permanent_ckpt_interval,\n", + " \n", + " ###########\n", + " # pytorch lightning\n", + " # Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values\n", + " ###########\n", + " 'pl_trainer_accelerator': 'auto',\n", + " 'pl_trainer_devices': 'auto',\n", + " 'pl_trainer_strategy': 'auto', # With extra choice 'ddp_gloo' in case that NCCL backend stucks on you\n", + " 'pl_trainer_precision': '32-true',\n", "}\n", "\n", "augmentation_args = {}\n", @@ -1254,6 +1263,8 @@ " configs['use_speed_embed'] = True\n", "configs['augmentation_args'] = augmentation_args\n", "\n", + "\n", + "\n", "with open(f'../data/{full_name}/config.yaml', 'w', encoding='utf8') as f:\n", " yaml.dump(configs, f, sort_keys=False, allow_unicode=True)\n", "\n", @@ -1536,7 +1547,16 @@ " 'num_ckpt_keep': num_ckpt_keep,\n", " 'max_updates': max_updates,\n", " 'permanent_ckpt_start': permanent_ckpt_start,\n", - " 'permanent_ckpt_interval': permanent_ckpt_interval\n", + " 'permanent_ckpt_interval': permanent_ckpt_interval,\n", + " \n", + " ###########\n", + " # pytorch lightning\n", + " # Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values\n", + " ###########\n", + " 'pl_trainer_accelerator': 'auto',\n", + " 'pl_trainer_devices': 'auto',\n", + " 'pl_trainer_strategy': 'auto', # With extra choice 'ddp_gloo' in case that NCCL backend stucks on you\n", + " 'pl_trainer_precision': '32-true',\n", "}\n", "\n", "augmentation_args = {}\n", diff --git a/training/acoustic_task.py b/training/acoustic_task.py index f10c29730..7beb709ce 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -23,7 +23,6 @@ from utils.hparams import hparams from utils.indexed_datasets import IndexedDataset from utils.phoneme_utils import build_phoneme_list -from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder diff --git a/utils/pl_utils.py b/utils/pl_utils.py index b2a67a285..55db650f2 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1,1628 +1,54 @@ -import matplotlib -from torch.nn import DataParallel -from torch.nn.parallel import DistributedDataParallel - -from basics.base_model import CategorizedModule - -matplotlib.use('Agg') -import glob -import itertools -import threading -import traceback - -from pytorch_lightning.callbacks import GradientAccumulationScheduler -from pytorch_lightning.callbacks import ModelCheckpoint - -from functools import wraps -from torch.cuda._utils import _get_device_index -import numpy as np -import torch.optim -import torch.utils.data -import copy -import logging +from copy import deepcopy import os import re -import sys -import torch -import torch.distributed as dist -import torch.multiprocessing as mp -import tqdm -from torch.optim.optimizer import Optimizer - - -def get_a_var(obj): # pragma: no cover - if isinstance(obj, torch.Tensor): - return obj - - if isinstance(obj, list) or isinstance(obj, tuple): - for result in map(get_a_var, obj): - if isinstance(result, torch.Tensor): - return result - if isinstance(obj, dict): - for result in map(get_a_var, obj.items()): - if isinstance(result, torch.Tensor): - return result - return None - -def data_loader(fn): - """ - Decorator to make any fx with this use the lazy property - :param fn: - :return: - """ - - wraps(fn) - attr_name = '_lazy_' + fn.__name__ - - def _get_data_loader(self): - try: - value = getattr(self, attr_name) - except AttributeError: - try: - value = fn(self) # Lazy evaluation, done only once. - if ( - value is not None and - not isinstance(value, list) and - fn.__name__ in ['test_dataloader', 'val_dataloader'] - ): - value = [value] - except AttributeError as e: - # Guard against AttributeError suppression. (Issue #142) - traceback.print_exc() - error = f'{fn.__name__}: An AttributeError was encountered: ' + str(e) - raise RuntimeError(error) from e - setattr(self, attr_name, value) # Memoize evaluation. - return value - - return _get_data_loader - - -def parallel_apply(modules, inputs, kwargs_tup=None, devices=None): # pragma: no cover - r"""Applies each `module` in :attr:`modules` in parallel on arguments - contained in :attr:`inputs` (positional) and :attr:`kwargs_tup` (keyword) - on each of :attr:`devices`. - - Args: - modules (Module): modules to be parallelized - inputs (tensor): inputs to the modules - devices (list of int or torch.device): CUDA devices - - :attr:`modules`, :attr:`inputs`, :attr:`kwargs_tup` (if given), and - :attr:`devices` (if given) should all have same length. Moreover, each - element of :attr:`inputs` can either be a single object as the only argument - to a module, or a collection of positional arguments. - """ - assert len(modules) == len(inputs) - if kwargs_tup is not None: - assert len(modules) == len(kwargs_tup) - else: - kwargs_tup = ({},) * len(modules) - if devices is not None: - assert len(modules) == len(devices) - else: - devices = [None] * len(modules) - devices = list(map(lambda x: _get_device_index(x, True), devices)) - lock = threading.Lock() - results = {} - grad_enabled = torch.is_grad_enabled() - - def _worker(i, module, input, kwargs, device=None): - torch.set_grad_enabled(grad_enabled) - if device is None: - device = get_a_var(input).get_device() - try: - with torch.cuda.device(device): - # this also avoids accidental slicing of `input` if it is a Tensor - if not isinstance(input, (list, tuple)): - input = (input,) - - # --------------- - # CHANGE - if module.training: - output = module.training_step(*input, **kwargs) - - elif module.testing: - output = module.test_step(*input, **kwargs) - - else: - output = module.validation_step(*input, **kwargs) - # --------------- - - with lock: - results[i] = output - except Exception as e: - with lock: - results[i] = e - - # make sure each module knows what training state it's in... - # fixes weird bug where copies are out of sync - root_m = modules[0] - for m in modules[1:]: - m.training = root_m.training - m.testing = root_m.testing - - if len(modules) > 1: - threads = [threading.Thread(target=_worker, - args=(i, module, input, kwargs, device)) - for i, (module, input, kwargs, device) in - enumerate(zip(modules, inputs, kwargs_tup, devices))] +import torch - for thread in threads: - thread.start() - for thread in threads: - thread.join() +import pytorch_lightning as pl +from pytorch_lightning.callbacks import ModelCheckpoint +from pytorch_lightning.strategies import DDPStrategy + +class DiffModelCheckpoint(ModelCheckpoint): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _monitor_candidates(self, trainer: "pl.Trainer"): + monitor_candidates = deepcopy(trainer.callback_metrics) + monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) + monitor_candidates["step"] = torch.tensor(trainer.global_step) + return monitor_candidates + + def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool: + from pytorch_lightning.trainer.states import RunningStage + return trainer.state.stage == RunningStage.TRAINING and super()._should_save_on_train_epoch_end(trainer) + + # @classmethod + # def _format_checkpoint_name(cls, filename, metrics, prefix = "", auto_insert_metric_name = True): + # # metrics = {k: v + 1 if k == 'step' or k == 'epoch' else v for k, v in metrics.items()} + # return super()._format_checkpoint_name(filename, metrics, prefix, auto_insert_metric_name) + +def get_latest_checkpoint_path(work_dir): + if not os.path.exists(work_dir): + return None + + last_steps = -1 + last_ckpt_name = None + + checkpoints = os.listdir(work_dir) + for name in checkpoints: + if '.ckpt' in name and not name.endswith('part'): + if 'steps_' in name: + steps = name.split('steps_')[1] + steps = int(re.sub('[^0-9]', '', steps)) + + if steps > last_steps: + last_steps = steps + last_ckpt_name = name + + return os.path.join(work_dir, last_ckpt_name) if last_ckpt_name is not None else None + +def get_stategy_obj(strategy): + if strategy == 'ddp_gloo': + return DDPStrategy(process_group_backend='gloo') else: - _worker(0, modules[0], inputs[0], kwargs_tup[0], devices[0]) - - outputs = [] - for i in range(len(inputs)): - output = results[i] - if isinstance(output, Exception): - raise output - outputs.append(output) - return outputs - - -def _find_tensors(obj): # pragma: no cover - r""" - Recursively find all tensors contained in the specified object. - """ - if isinstance(obj, torch.Tensor): - return [obj] - if isinstance(obj, (list, tuple)): - return itertools.chain(*map(_find_tensors, obj)) - if isinstance(obj, dict): - return itertools.chain(*map(_find_tensors, obj.values())) - return [] - - -class DDP(DistributedDataParallel): - """ - Override the forward call in lightning so it goes to training and validation step respectively - """ - - def parallel_apply(self, replicas, inputs, kwargs): - return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)]) - - def forward(self, *inputs, **kwargs): # pragma: no cover - self._sync_params() - if self.device_ids: - inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids) - if len(self.device_ids) == 1: - # -------------- - # LIGHTNING MOD - # -------------- - # normal - # output = self.module(*inputs[0], **kwargs[0]) - # lightning - if self.module.training: - output = self.module.training_step(*inputs[0], **kwargs[0]) - elif self.module.testing: - output = self.module.test_step(*inputs[0], **kwargs[0]) - else: - output = self.module.validation_step(*inputs[0], **kwargs[0]) - else: - outputs = self.parallel_apply(self._module_copies[:len(inputs)], inputs, kwargs) - output = self.gather(outputs, self.output_device) - else: - # normal - output = self.module(*inputs, **kwargs) - - if torch.is_grad_enabled(): - # We'll return the output object verbatim since it is a freeform - # object. We need to find any tensors in this object, though, - # because we need to figure out which parameters were used during - # this forward pass, to ensure we short circuit reduction for any - # unused parameters. Only if `find_unused_parameters` is set. - if self.find_unused_parameters: - self.reducer.prepare_for_backward(list(_find_tensors(output))) - else: - self.reducer.prepare_for_backward([]) - return output - - -class DP(DataParallel): - """ - Override the forward call in lightning so it goes to training and validation step respectively - """ - - def forward(self, *inputs, **kwargs): - if not self.device_ids: - return self.module(*inputs, **kwargs) - - for t in itertools.chain(self.module.parameters(), self.module.buffers()): - if t.device != self.src_device_obj: - raise RuntimeError("module must have its parameters and buffers " - "on device {} (device_ids[0]) but found one of " - "them on device: {}".format(self.src_device_obj, t.device)) - - inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids) - if len(self.device_ids) == 1: - # lightning - if self.module.training: - return self.module.training_step(*inputs[0], **kwargs[0]) - elif self.module.testing: - return self.module.test_step(*inputs[0], **kwargs[0]) - else: - return self.module.validation_step(*inputs[0], **kwargs[0]) - - replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) - outputs = self.parallel_apply(replicas, inputs, kwargs) - return self.gather(outputs, self.output_device) - - def parallel_apply(self, replicas, inputs, kwargs): - return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)]) - - -class GradientAccumulationScheduler: - def __init__(self, scheduling: dict): - if scheduling == {}: # empty dict error - raise TypeError("Empty dict cannot be interpreted correct") - - for key in scheduling.keys(): - if not isinstance(key, int) or not isinstance(scheduling[key], int): - raise TypeError("All epoches and accumulation factor must be integers") - - minimal_epoch = min(scheduling.keys()) - if minimal_epoch < 1: - msg = f"Epochs indexing from 1, epoch {minimal_epoch} cannot be interpreted correct" - raise IndexError(msg) - elif minimal_epoch != 1: # if user didnt define first epoch accumulation factor - scheduling.update({1: 1}) - - self.scheduling = scheduling - self.epochs = sorted(scheduling.keys()) - - def on_epoch_begin(self, epoch, trainer): - epoch += 1 # indexing epochs from 1 - for i in reversed(range(len(self.epochs))): - if epoch >= self.epochs[i]: - trainer.accumulate_grad_batches = self.scheduling.get(self.epochs[i]) - break - - -class LatestModelCheckpoint(ModelCheckpoint): - def __init__(self, filepath, monitor='val_loss', verbose=0, num_ckpt_keep=5, - permanent_ckpt_start=0, permanent_ckpt_interval=-1, save_weights_only=False, - mode='auto', period=1, prefix='model', save_best=True): - super(ModelCheckpoint, self).__init__() - self.monitor = monitor - self.verbose = verbose - self.filepath = filepath - os.makedirs(filepath, exist_ok=True) - self.num_ckpt_keep = num_ckpt_keep - self.permanent_ckpt_start = max(0, permanent_ckpt_start) - self.permanent_ckpt_interval = permanent_ckpt_interval - self.save_best = save_best - self.save_weights_only = save_weights_only - self.period = period - self.epochs_since_last_check = 0 - self.prefix = prefix - self.best_k_models = {} - # {filename: monitor} - self.kth_best_model = '' - self.save_top_k = 1 - self.task = None - if mode == 'min': - self.monitor_op = np.less - self.best = np.Inf - self.mode = 'min' - elif mode == 'max': - self.monitor_op = np.greater - self.best = -np.Inf - self.mode = 'max' - else: - if 'acc' in self.monitor or self.monitor.startswith('fmeasure'): - self.monitor_op = np.greater - self.best = -np.Inf - self.mode = 'max' - else: - self.monitor_op = np.less - self.best = np.Inf - self.mode = 'min' - if os.path.exists(f'{self.filepath}/best_valid.npy'): - self.best = np.load(f'{self.filepath}/best_valid.npy')[0] - - def get_all_ckpts(self): - return sorted(glob.glob(f'{self.filepath}/{self.prefix}_ckpt_steps_*.ckpt'), - key=lambda x: -int(re.findall(r'.*steps_(\d+)\.ckpt', x)[0])) - - def on_epoch_end(self, epoch, logs=None): - logs = logs or {} - self.epochs_since_last_check += 1 - best_filepath = f'{self.filepath}/{self.prefix}_ckpt_best.pt' - if self.epochs_since_last_check >= self.period: - self.epochs_since_last_check = 0 - filepath = f'{self.filepath}/{self.prefix}_ckpt_steps_{self.task.global_step}.ckpt' - if self.verbose > 0: - logging.info(f'Epoch {epoch:05d}@{self.task.global_step}: saving model to {filepath}') - self._save_model(filepath) - for old_ckpt in self.get_all_ckpts()[self.num_ckpt_keep:]: - if self.permanent_ckpt_interval > 0: - ckpt_steps_diff = int(re.findall(r'.*steps_(\d+)\.ckpt', old_ckpt)[0]) - self.permanent_ckpt_start - if ckpt_steps_diff >= 0 and ckpt_steps_diff % self.permanent_ckpt_interval == 0: - # Skip permanent checkpoints - continue - os.remove(old_ckpt) - if self.verbose > 0: - logging.info(f'Delete ckpt: {os.path.basename(old_ckpt)}') - current = logs.get(self.monitor) - if current is not None and self.save_best: - if self.monitor_op(current, self.best): - self.best = current - if self.verbose > 0: - logging.info( - f'Epoch {epoch:05d}@{self.task.global_step}: {self.monitor} reached' - f' {current:0.5f} (best {self.best:0.5f}), saving model to' - f' {best_filepath} as top 1') - self._save_model(best_filepath) - np.save(f'{self.filepath}/best_valid.npy', [self.best]) - - -class BaseTrainer: - def __init__( - self, - logger=True, - checkpoint_callback=True, - default_save_path=None, - gradient_clip_val=0, - process_position=0, - gpus=-1, - log_gpu_memory=None, - show_progress_bar=True, - track_grad_norm=-1, - check_val_every_n_epoch=1, - accumulate_grad_batches=1, - max_updates=1000, - min_epochs=1, - val_check_interval=1.0, - log_save_interval=100, - row_log_interval=10, - print_nan_grads=False, - weights_summary='full', - num_sanity_val_steps=5, - resume_from_checkpoint=None, - ): - self.log_gpu_memory = log_gpu_memory - self.gradient_clip_val = gradient_clip_val - self.check_val_every_n_epoch = check_val_every_n_epoch - self.track_grad_norm = track_grad_norm - self.on_gpu = True if (gpus and torch.cuda.is_available()) else False - self.process_position = process_position - self.weights_summary = weights_summary - self.max_updates = max_updates - self.min_epochs = min_epochs - self.num_sanity_val_steps = num_sanity_val_steps - self.print_nan_grads = print_nan_grads - self.resume_from_checkpoint = resume_from_checkpoint - self.default_save_path = default_save_path - - # training bookeeping - self.total_batch_idx = 0 - self.running_loss = [] - self.avg_loss = 0 - self.batch_idx = 0 - self.tqdm_metrics = {} - self.callback_metrics = {} - self.num_val_batches = 0 - self.num_training_batches = 0 - self.num_test_batches = 0 - self.get_train_dataloader = None - self.get_test_dataloaders = None - self.get_val_dataloaders = None - self.is_iterable_train_dataloader = False - - # training state - self.model = None - self.testing = False - self.disable_validation = False - self.lr_schedulers = [] - self.optimizers = None - self.global_step = 0 - self.current_epoch = 0 - self.total_batches = 0 - - # configure checkpoint callback - self.checkpoint_callback = checkpoint_callback - self.checkpoint_callback.save_function = self.save_checkpoint - self.weights_save_path = self.checkpoint_callback.filepath - - # accumulated grads - self.configure_accumulated_gradients(accumulate_grad_batches) - - # allow int, string and gpu list - self.data_parallel_device_ids = [ - int(x) for x in os.environ.get("CUDA_VISIBLE_DEVICES", "").split(",") if x != ''] - if len(self.data_parallel_device_ids) == 0: - self.root_gpu = None - self.on_gpu = False - else: - self.root_gpu = self.data_parallel_device_ids[0] - self.on_gpu = True - - # distributed backend choice - self.use_ddp = False - self.use_dp = False - self.single_gpu = False - self.distributed_backend = 'ddp' if self.num_gpus > 0 else 'dp' - self.set_distributed_mode(self.distributed_backend) - - self.proc_rank = 0 - self.world_size = 1 - self.node_rank = 0 - - # can't init progress bar here because starting a new process - # means the progress_bar won't survive pickling - self.show_progress_bar = show_progress_bar - - # logging - self.log_save_interval = log_save_interval - self.val_check_interval = val_check_interval - self.logger = logger - self.logger.rank = 0 - self.row_log_interval = row_log_interval - - @property - def num_gpus(self): - gpus = self.data_parallel_device_ids - if gpus is None: - return 0 - else: - return len(gpus) - - @property - def data_parallel(self): - return self.use_dp or self.use_ddp - - def get_model(self) -> CategorizedModule: - is_dp_module = isinstance(self.model, (DDP, DP)) - model = self.model.module if is_dp_module else self.model - return model - - # ----------------------------- - # MODEL TRAINING - # ----------------------------- - def fit(self, model): - if self.use_ddp: - mp.spawn(self.ddp_train, nprocs=self.num_gpus, args=(model,)) - else: - model.model = model.build_model() - if not self.testing: - self.optimizers, self.lr_schedulers = self.init_optimizers(model.configure_optimizers()) - if self.use_dp: - model.cuda(self.root_gpu) - model = DP(model, device_ids=self.data_parallel_device_ids) - elif self.single_gpu: - model.cuda(self.root_gpu) - self.run_pretrain_routine(model) - return 1 - - def init_optimizers(self, optimizers): - - # single optimizer - if isinstance(optimizers, Optimizer): - return [optimizers], [] - - # two lists - elif len(optimizers) == 2 and isinstance(optimizers[0], list): - optimizers, lr_schedulers = optimizers - return optimizers, lr_schedulers - - # single list or tuple - elif isinstance(optimizers, list) or isinstance(optimizers, tuple): - return optimizers, [] - - def run_pretrain_routine(self, model): - """Sanity check a few things before starting actual training. - - :param model: - """ - ref_model = model - if self.data_parallel: - ref_model = model.module - - # give model convenience properties - ref_model.trainer = self - - # set local properties on the model - self.copy_trainer_model_properties(ref_model) - - # link up experiment object - if self.logger is not None: - ref_model.logger = self.logger - self.logger.save() - - if self.use_ddp: - dist.barrier() - - # set up checkpoint callback - # self.configure_checkpoint_callback() - - # transfer data loaders from model - self.get_dataloaders(ref_model) - - # track model now. - # if cluster resets state, the model will update with the saved weights - self.model = model - - # restore training and model before hpc call - self.restore_weights(model) - - # when testing requested only run test and return - if self.testing: - self.run_evaluation(test=True) - return - - # check if we should run validation during training - self.disable_validation = self.num_val_batches == 0 - - # run tiny validation (if validation defined) - # to make sure program won't crash during val - ref_model.on_sanity_check_start() - ref_model.on_train_start() - if not self.disable_validation and self.num_sanity_val_steps > 0: - # init progress bars for validation sanity check - pbar = tqdm.tqdm(desc='Validation sanity check', - total=self.num_sanity_val_steps * len(self.get_val_dataloaders()), - leave=False, position=2 * self.process_position, - disable=not self.show_progress_bar, dynamic_ncols=True, unit='batch') - self.main_progress_bar = pbar - # dummy validation progress bar - self.val_progress_bar = tqdm.tqdm(disable=True) - - self.evaluate(model, self.get_val_dataloaders(), self.num_sanity_val_steps, self.testing) - - # close progress bars - self.main_progress_bar.close() - self.val_progress_bar.close() - - # init progress bar - pbar = tqdm.tqdm(leave=True, position=2 * self.process_position, - disable=not self.show_progress_bar, dynamic_ncols=True, unit='batch', - file=sys.stdout) - self.main_progress_bar = pbar - - # clear cache before training - if self.on_gpu: - torch.cuda.empty_cache() - - # CORE TRAINING LOOP - self.train() - - def test(self, model): - self.testing = True - self.fit(model) - - @property - def training_tqdm_dict(self): - tqdm_dict = { - 'step': '{}'.format(self.global_step), - } - tqdm_dict.update(self.tqdm_metrics) - return tqdm_dict - - # -------------------- - # restore ckpt - # -------------------- - def restore_weights(self, model): - """ - To restore weights we have two cases. - First, attempt to restore hpc weights. If successful, don't restore - other weights. - - Otherwise, try to restore actual weights - :param model: - :return: - """ - # clear cache before restore - if self.on_gpu: - torch.cuda.empty_cache() - - if self.resume_from_checkpoint is not None: - self.restore(self.resume_from_checkpoint, on_gpu=self.on_gpu) - else: - # restore weights if same exp version - self.restore_state_if_checkpoint_exists(model) - - # wait for all models to restore weights - if self.use_ddp: - # wait for all processes to catch up - dist.barrier() - - # clear cache after restore - if self.on_gpu: - torch.cuda.empty_cache() - - def restore_state_if_checkpoint_exists(self, model): - did_restore = False - - # do nothing if there's not dir or callback - no_ckpt_callback = (self.checkpoint_callback is None) or (not self.checkpoint_callback) - if no_ckpt_callback or not os.path.exists(self.checkpoint_callback.filepath): - return did_restore - - # restore trainer state and model if there is a weight for this experiment - last_steps = -1 - last_ckpt_name = None - - # find last epoch - checkpoints = os.listdir(self.checkpoint_callback.filepath) - for name in checkpoints: - if '.ckpt' in name and not name.endswith('part'): - if 'steps_' in name: - steps = name.split('steps_')[1] - steps = int(re.sub('[^0-9]', '', steps)) - - if steps > last_steps: - last_steps = steps - last_ckpt_name = name - - # restore last checkpoint - if last_ckpt_name is not None: - last_ckpt_path = os.path.join(self.checkpoint_callback.filepath, last_ckpt_name) - self.restore(last_ckpt_path, self.on_gpu) - logging.info(f'model and trainer restored from checkpoint: {last_ckpt_path}') - did_restore = True - - return did_restore - - def restore(self, checkpoint_path, on_gpu): - checkpoint = torch.load(checkpoint_path, map_location='cpu') - - # load model state - model = self.get_model() - - # load the state_dict on the model automatically - model.load_state_dict(checkpoint['state_dict'], strict=False) - if on_gpu: - model.cuda(self.root_gpu) - # load training state (affects trainer only) - self.restore_training_state(checkpoint) - model.global_step = self.global_step - del checkpoint - - try: - if dist.is_initialized() and dist.get_rank() > 0: - return - except Exception as e: - print(e) - return - - def restore_training_state(self, checkpoint): - """ - Restore trainer state. - Model will get its change to update - :param checkpoint: - :return: - """ - if self.checkpoint_callback is not None and self.checkpoint_callback is not False: - self.checkpoint_callback.best = checkpoint['checkpoint_callback_best'] - - self.global_step = checkpoint['global_step'] - self.current_epoch = checkpoint['epoch'] - - if self.testing: - return - - # restore the optimizers - optimizer_states = checkpoint['optimizer_states'] - for optimizer, opt_state in zip(self.optimizers, optimizer_states): - if optimizer is None: - return - optimizer.load_state_dict(opt_state) - - # move optimizer to GPU 1 weight at a time - # avoids OOM - if self.root_gpu is not None: - for state in optimizer.state.values(): - for k, v in state.items(): - if isinstance(v, torch.Tensor): - state[k] = v.cuda(self.root_gpu) - - # restore the lr schedulers - lr_schedulers = checkpoint['lr_schedulers'] - for scheduler, lrs_state in zip(self.lr_schedulers, lr_schedulers): - scheduler.load_state_dict(lrs_state) - - # -------------------- - # MODEL SAVE CHECKPOINT - # -------------------- - def _atomic_save(self, checkpoint, filepath): - """Saves a checkpoint atomically, avoiding the creation of incomplete checkpoints. - - This will create a temporary checkpoint with a suffix of ``.part``, then copy it to the final location once - saving is finished. - - Args: - checkpoint (object): The object to save. - Built to be used with the ``dump_checkpoint`` method, but can deal with anything which ``torch.save`` - accepts. - filepath (str|pathlib.Path): The path to which the checkpoint will be saved. - This points to the file that the checkpoint will be stored in. - """ - tmp_path = str(filepath) + ".part" - torch.save(checkpoint, tmp_path) - os.replace(tmp_path, filepath) - - def save_checkpoint(self, filepath): - checkpoint = self.dump_checkpoint() - self._atomic_save(checkpoint, filepath) - - def dump_checkpoint(self): - - checkpoint = { - 'epoch': self.current_epoch, - 'global_step': self.global_step - } - - if self.checkpoint_callback is not None and self.checkpoint_callback is not False: - checkpoint['checkpoint_callback_best'] = self.checkpoint_callback.best - - # save optimizers - optimizer_states = [] - for i, optimizer in enumerate(self.optimizers): - if optimizer is not None: - optimizer_states.append(optimizer.state_dict()) - - checkpoint['optimizer_states'] = optimizer_states - - # save lr schedulers - lr_schedulers = [] - for i, scheduler in enumerate(self.lr_schedulers): - lr_schedulers.append(scheduler.state_dict()) - - checkpoint['lr_schedulers'] = lr_schedulers - - # add the hparams and state_dict from the model - model = self.get_model() - checkpoint['state_dict'] = model.state_dict() - # give the model a chance to add a few things - model.on_save_checkpoint(checkpoint) - - return checkpoint - - def copy_trainer_model_properties(self, model): - if isinstance(model, DP): - ref_model = model.module - elif isinstance(model, DDP): - ref_model = model.module - else: - ref_model = model - - for m in [model, ref_model]: - m.trainer = self - m.on_gpu = self.on_gpu - m.use_dp = self.use_dp - m.use_ddp = self.use_ddp - m.testing = self.testing - m.single_gpu = self.single_gpu - - def transfer_batch_to_gpu(self, batch, gpu_id): - # base case: object can be directly moved using `cuda` or `to` - if callable(getattr(batch, 'cuda', None)): - return batch.cuda(gpu_id, non_blocking=True) - - elif callable(getattr(batch, 'to', None)): - return batch.to(torch.device('cuda', gpu_id), non_blocking=True) - - # when list - elif isinstance(batch, list): - for i, x in enumerate(batch): - batch[i] = self.transfer_batch_to_gpu(x, gpu_id) - return batch - - # when tuple - elif isinstance(batch, tuple): - batch = list(batch) - for i, x in enumerate(batch): - batch[i] = self.transfer_batch_to_gpu(x, gpu_id) - return tuple(batch) - - # when dict - elif isinstance(batch, dict): - for k, v in batch.items(): - batch[k] = self.transfer_batch_to_gpu(v, gpu_id) - - return batch - - # nothing matches, return the value as is without transform - return batch - - def set_distributed_mode(self, distributed_backend): - # skip for CPU - if self.num_gpus == 0: - return - - # single GPU case - # in single gpu case we allow ddp so we can train on multiple - # nodes, 1 gpu per node - elif self.num_gpus == 1: - self.single_gpu = True - self.use_dp = False - self.use_ddp = False - self.root_gpu = 0 - self.data_parallel_device_ids = [0] - else: - if distributed_backend is not None: - self.use_dp = distributed_backend == 'dp' - self.use_ddp = distributed_backend == 'ddp' - elif distributed_backend is None: - self.use_dp = True - self.use_ddp = False - - logging.info(f'gpu available: {torch.cuda.is_available()}, used: {self.on_gpu}') - - def ddp_train(self, gpu_idx, model): - """ - Entry point into a DP thread - :param gpu_idx: - :param model: - :param cluster_obj: - :return: - """ - # otherwise default to node rank 0 - self.node_rank = 0 - - # show progressbar only on progress_rank 0 - self.show_progress_bar = self.show_progress_bar and self.node_rank == 0 and gpu_idx == 0 - - # determine which process we are and world size - if self.use_ddp: - self.proc_rank = self.node_rank * self.num_gpus + gpu_idx - self.world_size = self.num_gpus - - # let the exp know the rank to avoid overwriting logs - if self.logger is not None: - self.logger.rank = self.proc_rank - - # set up server using proc 0's ip address - # try to init for 20 times at max in case ports are taken - # where to store ip_table - model.trainer = self - model.init_ddp_connection(self.proc_rank, self.world_size) - - # CHOOSE OPTIMIZER - # allow for lr schedulers as well - model.model = model.build_model() - if not self.testing: - self.optimizers, self.lr_schedulers = self.init_optimizers(model.configure_optimizers()) - - # MODEL - # copy model to each gpu - if self.distributed_backend == 'ddp': - torch.cuda.set_device(gpu_idx) - model.cuda(gpu_idx) - - # set model properties before going into wrapper - self.copy_trainer_model_properties(model) - - # override root GPU - self.root_gpu = gpu_idx - - if self.distributed_backend == 'ddp': - device_ids = [gpu_idx] - else: - device_ids = None - - # allow user to configure ddp - model = model.configure_ddp(model, device_ids) - - # continue training routine - self.run_pretrain_routine(model) - - def resolve_root_node_address(self, root_node): - if '[' in root_node: - name = root_node.split('[')[0] - number = root_node.split(',')[0] - if '-' in number: - number = number.split('-')[0] - - number = re.sub('[^0-9]', '', number) - root_node = name + number - - return root_node - - def log_metrics(self, metrics, grad_norm_dic, step=None): - """Logs the metric dict passed in. - - :param metrics: - :param grad_norm_dic: - """ - # added metrics by Lightning for convenience - metrics['epoch'] = self.current_epoch - - # add norms - metrics.update(grad_norm_dic) - - # turn all tensors to scalars - scalar_metrics = self.metrics_to_scalars(metrics) - - step = step if step is not None else self.global_step - # log actual metrics - if self.proc_rank == 0 and self.logger is not None: - self.logger.log_metrics(scalar_metrics, step=step) - self.logger.save() - - def add_tqdm_metrics(self, metrics): - for k, v in metrics.items(): - if type(v) is torch.Tensor: - v = v.item() - - self.tqdm_metrics[k] = v - - def metrics_to_scalars(self, metrics): - new_metrics = {} - for k, v in metrics.items(): - if isinstance(v, torch.Tensor): - v = v.item() - - if type(v) is dict: - v = self.metrics_to_scalars(v) - - new_metrics[k] = v - - return new_metrics - - def process_output(self, output, train=False): - """Reduces output according to the training mode. - - Separates loss from logging and tqdm metrics - :param output: - :return: - """ - # --------------- - # EXTRACT CALLBACK KEYS - # --------------- - # all keys not progress_bar or log are candidates for callbacks - callback_metrics = {} - for k, v in output.items(): - if k not in ['progress_bar', 'log', 'hiddens']: - callback_metrics[k] = v - - if train and self.use_dp: - num_gpus = self.num_gpus - callback_metrics = self.reduce_distributed_output(callback_metrics, num_gpus) - - for k, v in callback_metrics.items(): - if isinstance(v, torch.Tensor): - callback_metrics[k] = v.item() - - # --------------- - # EXTRACT PROGRESS BAR KEYS - # --------------- - try: - progress_output = output['progress_bar'] - - # reduce progress metrics for tqdm when using dp - if train and self.use_dp: - num_gpus = self.num_gpus - progress_output = self.reduce_distributed_output(progress_output, num_gpus) - - progress_bar_metrics = progress_output - except Exception: - progress_bar_metrics = {} - - # --------------- - # EXTRACT LOGGING KEYS - # --------------- - # extract metrics to log to experiment - try: - log_output = output['log'] - - # reduce progress metrics for tqdm when using dp - if train and self.use_dp: - num_gpus = self.num_gpus - log_output = self.reduce_distributed_output(log_output, num_gpus) - - log_metrics = log_output - except Exception: - log_metrics = {} - - # --------------- - # EXTRACT LOSS - # --------------- - # if output dict doesn't have the keyword loss - # then assume the output=loss if scalar - loss = None - if train: - try: - loss = output['loss'] - except Exception: - if type(output) is torch.Tensor: - loss = output - else: - raise RuntimeError( - 'No `loss` value in the dictionary returned from `model.training_step()`.' - ) - - # when using dp need to reduce the loss - if self.use_dp: - loss = self.reduce_distributed_output(loss, self.num_gpus) - - # --------------- - # EXTRACT HIDDEN - # --------------- - hiddens = output.get('hiddens') - - # use every metric passed in as a candidate for callback - callback_metrics.update(progress_bar_metrics) - callback_metrics.update(log_metrics) - - # convert tensors to numpy - for k, v in callback_metrics.items(): - if isinstance(v, torch.Tensor): - callback_metrics[k] = v.item() - - return loss, progress_bar_metrics, log_metrics, callback_metrics, hiddens - - def reduce_distributed_output(self, output, num_gpus): - if num_gpus <= 1: - return output - - # when using DP, we get one output per gpu - # average outputs and return - if type(output) is torch.Tensor: - return output.mean() - - for k, v in output.items(): - # recurse on nested dics - if isinstance(output[k], dict): - output[k] = self.reduce_distributed_output(output[k], num_gpus) - - # do nothing when there's a scalar - elif isinstance(output[k], torch.Tensor) and output[k].dim() == 0: - pass - - # reduce only metrics that have the same number of gpus - elif output[k].size(0) == num_gpus: - reduced = torch.mean(output[k]) - output[k] = reduced - return output - - def clip_gradients(self): - if self.gradient_clip_val > 0: - model = self.get_model() - torch.nn.utils.clip_grad_norm_(model.parameters(), self.gradient_clip_val) - - def print_nan_gradients(self): - model = self.get_model() - for param in model.parameters(): - if (param.grad is not None) and torch.isnan(param.grad.float()).any(): - logging.info(param, param.grad) - - def configure_accumulated_gradients(self, accumulate_grad_batches): - self.accumulate_grad_batches = None - - if isinstance(accumulate_grad_batches, dict): - self.accumulation_scheduler = GradientAccumulationScheduler(accumulate_grad_batches) - elif isinstance(accumulate_grad_batches, int): - schedule = {1: accumulate_grad_batches} - self.accumulation_scheduler = GradientAccumulationScheduler(schedule) - else: - raise TypeError("Gradient accumulation supports only int and dict types") - - def get_dataloaders(self, model): - if not self.testing: - self.init_train_dataloader(model) - self.init_val_dataloader(model) - else: - self.init_test_dataloader(model) - - if self.use_ddp: - dist.barrier() - if not self.testing: - self.get_train_dataloader() - self.get_val_dataloaders() - else: - self.get_test_dataloaders() - - def init_train_dataloader(self, model): - self.fisrt_epoch = True - self.get_train_dataloader = model.train_dataloader - if isinstance(self.get_train_dataloader(), torch.utils.data.DataLoader): - self.num_training_batches = len(self.get_train_dataloader()) - self.num_training_batches = int(self.num_training_batches) - else: - self.num_training_batches = float('inf') - self.is_iterable_train_dataloader = True - if isinstance(self.val_check_interval, int): - self.val_check_batch = self.val_check_interval - else: - self._percent_range_check('val_check_interval') - self.val_check_batch = int(self.num_training_batches * self.val_check_interval) - self.val_check_batch = max(1, self.val_check_batch) - - def init_val_dataloader(self, model): - self.get_val_dataloaders = model.val_dataloader - self.num_val_batches = 0 - if self.get_val_dataloaders() is not None: - if isinstance(self.get_val_dataloaders()[0], torch.utils.data.DataLoader): - self.num_val_batches = sum(len(dataloader) for dataloader in self.get_val_dataloaders()) - self.num_val_batches = int(self.num_val_batches) - else: - self.num_val_batches = float('inf') - - def init_test_dataloader(self, model): - self.get_test_dataloaders = model.test_dataloader - if self.get_test_dataloaders() is not None: - if isinstance(self.get_test_dataloaders()[0], torch.utils.data.DataLoader): - self.num_test_batches = sum(len(dataloader) for dataloader in self.get_test_dataloaders()) - self.num_test_batches = int(self.num_test_batches) - else: - self.num_test_batches = float('inf') - - def evaluate(self, model, dataloaders, max_batches, test=False): - """Run evaluation code. - - :param model: PT model - :param dataloaders: list of PT dataloaders - :param max_batches: Scalar - :param test: boolean - :return: - """ - # enable eval mode - model.zero_grad() - model.eval() - - # copy properties for forward overrides - self.copy_trainer_model_properties(model) - - # disable gradients to save memory - torch.set_grad_enabled(False) - - if test: - self.get_model().test_start() - # bookkeeping - outputs = [] - - # run training - for dataloader_idx, dataloader in enumerate(dataloaders): - dl_outputs = [] - for batch_idx, batch in enumerate(dataloader): - - if batch is None: # pragma: no cover - continue - - # stop short when on fast_dev_run (sets max_batch=1) - if batch_idx >= max_batches: - break - - # ----------------- - # RUN EVALUATION STEP - # ----------------- - output = self.evaluation_forward(model, - batch, - batch_idx, - dataloader_idx, - test) - - # track outputs for collation - dl_outputs.append(output) - - # batch done - if test: - self.test_progress_bar.update(1) - else: - self.val_progress_bar.update(1) - outputs.append(dl_outputs) - - # with a single dataloader don't pass an array - if len(dataloaders) == 1: - outputs = outputs[0] - - # give model a chance to do something with the outputs (and method defined) - model = self.get_model() - if test: - eval_results_ = model.test_end(outputs) - else: - eval_results_ = model.validation_end(outputs) - eval_results = eval_results_ - - # enable train mode again - model.train() - - # enable gradients to save memory - torch.set_grad_enabled(True) - - return eval_results - - def run_evaluation(self, test=False): - # when testing make sure user defined a test step - model = self.get_model() - model.on_pre_performance_check() - - # select dataloaders - if test: - dataloaders = self.get_test_dataloaders() - max_batches = self.num_test_batches - else: - # val - dataloaders = self.get_val_dataloaders() - max_batches = self.num_val_batches - - # init validation or test progress bar - # main progress bar will already be closed when testing so initial position is free - position = 2 * self.process_position + (not test) - desc = 'Testing' if test else 'Validating' - pbar = tqdm.tqdm(desc=desc, total=max_batches, leave=test, position=position, - disable=not self.show_progress_bar, dynamic_ncols=True, - unit='batch', file=sys.stdout) - setattr(self, f'{"test" if test else "val"}_progress_bar', pbar) - - # run evaluation - eval_results = self.evaluate(self.model, - dataloaders, - max_batches, - test) - if eval_results is not None: - _, prog_bar_metrics, log_metrics, callback_metrics, _ = self.process_output( - eval_results) - - # add metrics to prog bar - self.add_tqdm_metrics(prog_bar_metrics) - - # log metrics - self.log_metrics(log_metrics, {}) - - # track metrics for callbacks - self.callback_metrics.update(callback_metrics) - - # hook - model.on_post_performance_check() - - # add model specific metrics - tqdm_metrics = self.training_tqdm_dict - if not test: - self.main_progress_bar.set_postfix(**tqdm_metrics) - - # close progress bar - if test: - self.test_progress_bar.close() - else: - self.val_progress_bar.close() - - # model checkpointing - if self.proc_rank == 0 and self.checkpoint_callback is not None and not test: - self.checkpoint_callback.on_epoch_end(epoch=self.current_epoch, - logs=self.callback_metrics) - - def evaluation_forward(self, model, batch, batch_idx, dataloader_idx, test=False): - # make dataloader_idx arg in validation_step optional - args = [batch, batch_idx] - - if test and len(self.get_test_dataloaders()) > 1: - args.append(dataloader_idx) - - elif not test and len(self.get_val_dataloaders()) > 1: - args.append(dataloader_idx) - - # handle DP, DDP forward - if self.use_ddp or self.use_dp: - output = model(*args) - return output - - # single GPU - if self.single_gpu: - # for single GPU put inputs on gpu manually - root_gpu = 0 - if isinstance(self.data_parallel_device_ids, list): - root_gpu = self.data_parallel_device_ids[0] - batch = self.transfer_batch_to_gpu(batch, root_gpu) - args[0] = batch - - # CPU - if test: - output = model.test_step(*args) - else: - output = model.validation_step(*args) - - return output - - def train(self): - model = self.get_model() - # run all epochs - for epoch in range(self.current_epoch, 1000000): - # set seed for distributed sampler (enables shuffling for each epoch) - if self.use_ddp and hasattr(self.get_train_dataloader().sampler, 'set_epoch'): - self.get_train_dataloader().sampler.set_epoch(epoch) - - # get model - model = self.get_model() - - # update training progress in trainer and model - model.current_epoch = epoch - self.current_epoch = epoch - - total_val_batches = 0 - if not self.disable_validation: - # val can be checked multiple times in epoch - is_val_epoch = (self.current_epoch + 1) % self.check_val_every_n_epoch == 0 - val_checks_per_epoch = self.num_training_batches // self.val_check_batch - val_checks_per_epoch = val_checks_per_epoch if is_val_epoch else 0 - total_val_batches = self.num_val_batches * val_checks_per_epoch - - # total batches includes multiple val checks - self.total_batches = self.num_training_batches + total_val_batches - self.batch_loss_value = 0 # accumulated grads - - if self.is_iterable_train_dataloader: - # for iterable train loader, the progress bar never ends - num_iterations = None - else: - num_iterations = self.total_batches - - # reset progress bar - # .reset() doesn't work on disabled progress bar so we should check - desc = f'Epoch {epoch}' if not self.is_iterable_train_dataloader else '' - self.main_progress_bar.set_description(desc) - - # changing gradient according accumulation_scheduler - self.accumulation_scheduler.on_epoch_begin(epoch, self) - - # ----------------- - # RUN TNG EPOCH - # ----------------- - self.run_training_epoch() - print() # start a new line for the next epoch - - # update LR schedulers - if self.lr_schedulers is not None: - for lr_scheduler in self.lr_schedulers: - lr_scheduler.step(epoch=self.current_epoch) - - self.main_progress_bar.close() - - model.on_train_end() - - if self.logger is not None: - self.logger.finalize("success") - - def run_training_epoch(self): - # before epoch hook - if self.is_function_implemented('on_epoch_start'): - model = self.get_model() - model.on_epoch_start() - - # run epoch - for batch_idx, batch in enumerate(self.get_train_dataloader()): - # stop epoch if we limited the number of training batches - if batch_idx >= self.num_training_batches: - break - - self.batch_idx = batch_idx - - model = self.get_model() - model.global_step = self.global_step - - # --------------- - # RUN TRAIN STEP - # --------------- - output = self.run_training_batch(batch, batch_idx) - batch_result, grad_norm_dic, batch_step_metrics = output - - # when returning -1 from train_step, we end epoch early - early_stop_epoch = batch_result == -1 - - # --------------- - # RUN VAL STEP - # --------------- - should_check_val = ( - not self.disable_validation and self.global_step % self.val_check_batch == 0 and not self.fisrt_epoch) - self.fisrt_epoch = False - - if should_check_val: - self.run_evaluation(test=self.testing) - - # when logs should be saved - should_save_log = (self.total_batch_idx + 1) % self.log_save_interval == 0 or early_stop_epoch - if should_save_log: - if self.proc_rank == 0 and self.logger is not None: - self.logger.save() - - # when metrics should be logged - should_log_metrics = self.total_batch_idx % self.row_log_interval == 0 or early_stop_epoch - if should_log_metrics: - # logs user requested information to logger - self.log_metrics(batch_step_metrics, grad_norm_dic) - - self.global_step += 1 - self.total_batch_idx += 1 - - # end epoch early - # stop when the flag is changed or we've gone past the amount - # requested in the batches - if early_stop_epoch: - break - if self.global_step > self.max_updates: - print("| Training end..") - exit() - - # epoch end hook - if self.is_function_implemented('on_epoch_end'): - model = self.get_model() - model.on_epoch_end() - - def run_training_batch(self, batch, batch_idx): - # track grad norms - grad_norm_dic = {} - - # track all metrics for callbacks - all_callback_metrics = [] - - # track metrics to log - all_log_metrics = [] - - if batch is None: - return 0, grad_norm_dic, {} - - # hook - if self.is_function_implemented('on_batch_start'): - model_ref = self.get_model() - response = model_ref.on_batch_start(batch) - - if response == -1: - return -1, grad_norm_dic, {} - - splits = [batch] - self.hiddens = None - for split_idx, split_batch in enumerate(splits): - self.split_idx = split_idx - - # call training_step once per optimizer - for opt_idx, optimizer in enumerate(self.optimizers): - if optimizer is None: - continue - # make sure only the gradients of the current optimizer's paramaters are calculated - # in the training step to prevent dangling gradients in multiple-optimizer setup. - if len(self.optimizers) > 1: - for param in self.get_model().parameters(): - param.requires_grad = False - for group in optimizer.param_groups: - for param in group['params']: - param.requires_grad = True - - # wrap the forward step in a closure so second order methods work - def optimizer_closure(): - # forward pass - output = self.training_forward( - split_batch, batch_idx, opt_idx, self.hiddens) - - closure_loss = output[0] - progress_bar_metrics = output[1] - log_metrics = output[2] - callback_metrics = output[3] - self.hiddens = output[4] - if closure_loss is None: - return None - - # accumulate loss - # (if accumulate_grad_batches = 1 no effect) - closure_loss = closure_loss / self.accumulate_grad_batches - - # backward pass - model_ref = self.get_model() - if closure_loss.requires_grad: - model_ref.backward(closure_loss, optimizer) - - # track metrics for callbacks - all_callback_metrics.append(callback_metrics) - - # track progress bar metrics - self.add_tqdm_metrics(progress_bar_metrics) - all_log_metrics.append(log_metrics) - - # insert after step hook - if self.is_function_implemented('on_after_backward'): - model_ref = self.get_model() - model_ref.on_after_backward() - - return closure_loss - - # calculate loss - loss = optimizer_closure() - if loss is None: - continue - - # nan grads - if self.print_nan_grads: - self.print_nan_gradients() - - # track total loss for logging (avoid mem leaks) - self.batch_loss_value += loss.item() - - # gradient update with accumulated gradients - if (self.batch_idx + 1) % self.accumulate_grad_batches == 0: - - # track gradient norms when requested - if batch_idx % self.row_log_interval == 0: - if self.track_grad_norm > 0: - model = self.get_model() - grad_norm_dic = model.grad_norm( - self.track_grad_norm) - - # clip gradients - self.clip_gradients() - - # calls .step(), .zero_grad() - # override function to modify this behavior - model = self.get_model() - model.optimizer_step(self.current_epoch, batch_idx, optimizer, opt_idx) - - # calculate running loss for display - self.running_loss.append(self.batch_loss_value) - self.batch_loss_value = 0 - self.avg_loss = np.mean(self.running_loss[-100:]) - - # activate batch end hook - if self.is_function_implemented('on_batch_end'): - model = self.get_model() - model.on_batch_end() - - # update progress bar - self.main_progress_bar.update(1) - self.main_progress_bar.set_postfix(**self.training_tqdm_dict) - - # collapse all metrics into one dict - all_log_metrics = {k: v for d in all_log_metrics for k, v in d.items()} - - # track all metrics for callbacks - self.callback_metrics.update({k: v for d in all_callback_metrics for k, v in d.items()}) - - return 0, grad_norm_dic, all_log_metrics - - def training_forward(self, batch, batch_idx, opt_idx, hiddens): - """ - Handle forward for each training case (distributed, single gpu, etc...) - :param batch: - :param batch_idx: - :return: - """ - # --------------- - # FORWARD - # --------------- - # enable not needing to add opt_idx to training_step - args = [batch, batch_idx, opt_idx] - - # distributed forward - if self.use_ddp or self.use_dp: - output = self.model(*args) - # single GPU forward - elif self.single_gpu: - gpu_id = 0 - if isinstance(self.data_parallel_device_ids, list): - gpu_id = self.data_parallel_device_ids[0] - batch = self.transfer_batch_to_gpu(copy.copy(batch), gpu_id) - args[0] = batch - output = self.model.training_step(*args) - # CPU forward - else: - output = self.model.training_step(*args) - - # allow any mode to define training_end - model_ref = self.get_model() - output_ = model_ref.training_end(output) - if output_ is not None: - output = output_ - - # format and reduce outputs accordingly - output = self.process_output(output, train=True) - - return output - - # --------------- - # Utils - # --------------- - def is_function_implemented(self, f_name): - model = self.get_model() - f_op = getattr(model, f_name, None) - return callable(f_op) - - def _percent_range_check(self, name): - value = getattr(self, name) - msg = f"`{name}` must lie in the range [0.0, 1.0], but got {value:.3f}." - if name == "val_check_interval": - msg += " If you want to disable validation set `val_percent_check` to 0.0 instead." - - if not 0. <= value <= 1.: - raise ValueError(msg) + return 'auto' From 1421ce2d4e62506a2426014e1021a7a2427c060b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 23 Mar 2023 20:21:32 +0800 Subject: [PATCH 098/475] Fix KeyError of gender and velocity --- inference/ds_acoustic.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 268015dbc..d0fdba70a 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -147,6 +147,7 @@ def preprocess_input(self, param): if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] + gender = param.get('gender', 0.) if isinstance(param['gender'], float): # static gender value gender = param['gender'] print(f'Using static gender value: {gender:.3f}') @@ -155,7 +156,7 @@ def preprocess_input(self, param): else: print('Using dynamic gender curve') gender_seq = resample_align_curve( - np.array(param['gender'].split(), np.float32), + np.array(gender.split(), np.float32), original_timestep=float(param['gender_timestep']), target_timestep=self.timestep, align_length=length @@ -168,7 +169,7 @@ def preprocess_input(self, param): ) if hparams.get('use_speed_embed', False): - if param['velocity'] is None: + if param.get('velocity') is None: print('Using default velocity curve') batch['speed'] = torch.FloatTensor([1.]).to(self.device)[:, None] # => [B=1, T=1] else: From bdb6cb9764a2284a5347bc6295a54f8df8bd3e04 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 23 Mar 2023 20:22:04 +0800 Subject: [PATCH 099/475] Remove unused modules --- modules/commons/common_layers.py | 132 ------------------------------- 1 file changed, 132 deletions(-) diff --git a/modules/commons/common_layers.py b/modules/commons/common_layers.py index 192997cee..cd9eb7fad 100644 --- a/modules/commons/common_layers.py +++ b/modules/commons/common_layers.py @@ -7,58 +7,6 @@ import utils -class Reshape(nn.Module): - def __init__(self, *args): - super(Reshape, self).__init__() - self.shape = args - - def forward(self, x): - return x.view(self.shape) - - -class Permute(nn.Module): - def __init__(self, *args): - super(Permute, self).__init__() - self.args = args - - def forward(self, x): - return x.permute(self.args) - - -class LinearNorm(torch.nn.Module): - def __init__(self, in_dim, out_dim, bias=True, w_init_gain='linear'): - super(LinearNorm, self).__init__() - self.linear_layer = torch.nn.Linear(in_dim, out_dim, bias=bias) - - torch.nn.init.xavier_uniform_( - self.linear_layer.weight, - gain=torch.nn.init.calculate_gain(w_init_gain)) - - def forward(self, x): - return self.linear_layer(x) - - -class ConvNorm(torch.nn.Module): - def __init__(self, in_channels, out_channels, kernel_size=1, stride=1, - padding=None, dilation=1, bias=True, w_init_gain='linear'): - super(ConvNorm, self).__init__() - if padding is None: - assert (kernel_size % 2 == 1) - padding = int(dilation * (kernel_size - 1) / 2) - - self.conv = torch.nn.Conv1d(in_channels, out_channels, - kernel_size=kernel_size, stride=stride, - padding=padding, dilation=dilation, - bias=bias) - - torch.nn.init.xavier_uniform_( - self.conv.weight, gain=torch.nn.init.calculate_gain(w_init_gain)) - - def forward(self, signal): - conv_signal = self.conv(signal) - return conv_signal - - def Embedding(num_embeddings, embedding_dim, padding_idx=None): m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx) nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5) @@ -589,83 +537,3 @@ def forward(self, x, encoder_padding_mask=None, **kwargs): x = residual + x x = x * (1 - encoder_padding_mask.float()).transpose(0, 1)[..., None] return x - - -class DecSALayer(nn.Module): - def __init__(self, c, num_heads, dropout, attention_dropout=0.1, relu_dropout=0.1, kernel_size=9, act='gelu'): - super().__init__() - self.c = c - self.dropout = dropout - self.layer_norm1 = LayerNorm(c) - self.self_attn = MultiheadAttention( - c, num_heads, self_attention=True, dropout=attention_dropout, bias=False - ) - self.layer_norm2 = LayerNorm(c) - self.encoder_attn = MultiheadAttention( - c, num_heads, encoder_decoder_attention=True, dropout=attention_dropout, bias=False, - ) - self.layer_norm3 = LayerNorm(c) - self.ffn = TransformerFFNLayer( - c, 4 * c, padding='LEFT', kernel_size=kernel_size, dropout=relu_dropout, act=act) - - def forward( - self, - x, - encoder_out=None, - encoder_padding_mask=None, - incremental_state=None, - self_attn_mask=None, - self_attn_padding_mask=None, - attn_out=None, - reset_attn_weight=None, - **kwargs, - ): - layer_norm_training = kwargs.get('layer_norm_training', None) - if layer_norm_training is not None: - self.layer_norm1.training = layer_norm_training - self.layer_norm2.training = layer_norm_training - self.layer_norm3.training = layer_norm_training - residual = x - x = self.layer_norm1(x) - x, _ = self.self_attn( - query=x, - key=x, - value=x, - key_padding_mask=self_attn_padding_mask, - incremental_state=incremental_state, - attn_mask=self_attn_mask - ) - x = F.dropout(x, self.dropout, training=self.training) - x = residual + x - - residual = x - x = self.layer_norm2(x) - if encoder_out is not None: - x, attn = self.encoder_attn( - query=x, - key=encoder_out, - value=encoder_out, - key_padding_mask=encoder_padding_mask, - incremental_state=incremental_state, - static_kv=True, - enc_dec_attn_constraint_mask=None, #utils.get_incremental_state(self, incremental_state, 'enc_dec_attn_constraint_mask'), - reset_attn_weight=reset_attn_weight - ) - attn_logits = attn[1] - else: - assert attn_out is not None - x = self.encoder_attn.in_proj_v(attn_out.transpose(0, 1)) - attn_logits = None - x = F.dropout(x, self.dropout, training=self.training) - x = residual + x - - residual = x - x = self.layer_norm3(x) - x = self.ffn(x, incremental_state=incremental_state) - x = F.dropout(x, self.dropout, training=self.training) - x = residual + x - # if len(attn_logits.size()) > 3: - # indices = attn_logits.softmax(-1).max(-1).values.sum(-1).argmax(-1) - # attn_logits = attn_logits.gather(1, - # indices[:, None, None, None].repeat(1, 1, attn_logits.size(-2), attn_logits.size(-1))).squeeze(1) - return x, attn_logits From 20ae2f3d7567e0e4c42bd98bab6bc4b8f1644a80 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 23 Mar 2023 18:55:05 -0500 Subject: [PATCH 100/475] Fix batch sampler and lr_scheduler step freq --- basics/base_task.py | 38 +++++++------- configs/base.yaml | 1 + training/acoustic_task.py | 25 ++++++++-- utils/__init__.py | 10 ++-- utils/pl_utils.py | 7 +-- utils/training_utils.py | 102 +++++++++++++++++++++++--------------- 6 files changed, 109 insertions(+), 74 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 0f055f05a..f1fc09761 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,6 +13,7 @@ import sys import numpy as np import pytorch_lightning as pl +from pytorch_lightning.callbacks import RichProgressBar from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm from utils.phoneme_utils import locate_dictionary @@ -71,6 +72,7 @@ def __init__(self, *args, **kwargs): hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences self.training_losses_meter = None + self.training_sampler = None self.model = None @@ -83,6 +85,8 @@ def build_model(self): def on_train_epoch_start(self): self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} + if self.training_sampler is not None: + self.training_sampler.set_epoch(self.current_epoch) def _training_step(self, sample, batch_idx, optimizer_idx): """ @@ -117,8 +121,8 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): # log_outputs['all_loss'] = total_loss.item() progress_bar_log = log_outputs | {'step': self.global_step} tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False, rank_zero_only=True) - self.log_dict(tb_log, prog_bar=False, logger=True, on_step=True, on_epoch=False, rank_zero_only=True) + self.log_dict(progress_bar_log, prog_bar=True, on_step=True, on_epoch=False) + self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return { 'loss': total_loss } @@ -131,7 +135,7 @@ def on_train_epoch_end(self): # f"\n==============\n") def on_before_optimizer_step(self, optimizer): - self.log_dict(grad_norm(self, norm_type=2), rank_zero_only=True) + self.log_dict(grad_norm(self, norm_type=2)) def on_validation_start(self): self.validation_step_outputs = [] @@ -186,27 +190,22 @@ def configure_optimizers(self): "lr_scheduler": { "scheduler": scheduler, "interval": "step", - "frequency": hparams['accumulate_grad_batches'], + "frequency": 1 } } - def build_batch_sampler(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, batch_by_size=True): - devices_cnt = torch.cuda.device_count() - if devices_cnt == 0: - devices_cnt = 1 - if required_batch_size_multiple == -1: - required_batch_size_multiple = devices_cnt - + def build_batch_sampler(self, dataset, max_tokens, max_sentences, batch_by_size=True, shuffle=False): batch_sampler_cls = partial(BatchSamplerSimilarLength, - max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple, + max_tokens=max_tokens, max_sentences=max_sentences, batch_by_size=batch_by_size) if self.trainer.distributed_sampler_kwargs: - sampler = DistributedBatchSamplerSimilarLength(dataset, batch_sampler_cls=batch_sampler_cls, - shuffle=shuffle, **self.trainer.distributed_sampler_kwargs) + sampler = DistributedBatchSamplerSimilarLength(dataset, + batch_sampler_cls=batch_sampler_cls, + seed=hparams['seed'], + shuffle=shuffle, + **self.trainer.distributed_sampler_kwargs) else: - sampler = batch_sampler_cls(dataset=dataset, indices=dataset.ordered_indices(), shuffle=shuffle) + sampler = batch_sampler_cls(dataset, seed=hparams['seed'], shuffle=shuffle) return sampler def on_test_start(self): @@ -242,8 +241,9 @@ def start(cls): save_top_k=hparams['num_ckpt_keep'], save_on_train_epoch_end=True, auto_insert_metric_name=False, - verbose=True - ) + # verbose=True + ), + # RichProgressBar() ], logger=TensorBoardLogger( save_dir=str(work_dir), diff --git a/configs/base.yaml b/configs/base.yaml index 3669ed347..b3a4dd46f 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -32,6 +32,7 @@ min_level_db: -100 num_spk: 1 mel_vmin: -6 mel_vmax: 1.5 +sampler_frame_count_grid: 200 ds_workers: 4 ######### diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 7beb709ce..3da211773 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -25,6 +25,7 @@ from utils.phoneme_utils import build_phoneme_list from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder +from utils.training_utils import WarmupCosineSchedule matplotlib.use('Agg') @@ -70,6 +71,15 @@ def collater(self, samples): batch['spk_ids'] = spk_ids return batch +class MyScheduler(torch.optim.lr_scheduler.StepLR): + def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=- 1, verbose=False): + super().__init__(optimizer, step_size, gamma, last_epoch, verbose) + + def get_lr(self): + ret = super().get_lr() + print("------GET_LR", self.last_epoch, self._step_count, ret) + return ret + class AcousticTask(BaseTask): def __init__(self): super().__init__() @@ -111,20 +121,29 @@ def build_optimizer(self, model): return optimizer def build_scheduler(self, optimizer): + # return WarmupCosineSchedule(optimizer, + # warmup_steps=hparams['warmup_updates'], + # t_total=hparams['max_updates'] // hparams['accumulate_grad_batches'], + # eta_min=0) return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) def train_dataloader(self): - sampler = self.build_batch_sampler(self.train_dataset, True, self.max_tokens, self.max_sentences) + self.training_sampler = self.build_batch_sampler(self.train_dataset, + max_tokens=self.max_tokens, + max_sentences=self.max_sentences, + shuffle=True) return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, - batch_sampler=sampler, + batch_sampler=self.training_sampler, num_workers=self.train_dataset.num_workers, prefetch_factor=4, pin_memory=False, persistent_workers=True) def val_dataloader(self): - sampler = self.build_batch_sampler(self.valid_dataset, False, self.max_tokens, self.max_sentences) + sampler = self.build_batch_sampler(self.valid_dataset, + max_tokens=self.max_tokens, + max_sentences=self.max_sentences) return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, diff --git a/utils/__init__.py b/utils/__init__.py index 3d3fe6db3..27aa595c6 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -63,7 +63,7 @@ def _is_batch_full(batch, num_tokens, max_tokens, max_sentences): def batch_by_size( - indices, num_tokens_fn, max_tokens=None, max_sentences=None, + indices, num_tokens_fn, max_tokens=80000, max_sentences=48, required_batch_size_multiple=1 ): """ @@ -75,14 +75,10 @@ def batch_by_size( num_tokens_fn (callable): function that returns the number of tokens at a given index max_tokens (int, optional): max number of tokens in each batch - (default: None). + (default: 80000). max_sentences (int, optional): max number of sentences in each - batch (default: None). - required_batch_size_multiple (int, optional): require batch size to - be a multiple of N (default: 1). + batch (default: 48). """ - max_tokens = max_tokens if max_tokens is not None else sys.maxsize - max_sentences = max_sentences if max_sentences is not None else sys.maxsize bsz_mult = required_batch_size_multiple if isinstance(indices, types.GeneratorType): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 55db650f2..323736925 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -22,11 +22,6 @@ def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool: from pytorch_lightning.trainer.states import RunningStage return trainer.state.stage == RunningStage.TRAINING and super()._should_save_on_train_epoch_end(trainer) - # @classmethod - # def _format_checkpoint_name(cls, filename, metrics, prefix = "", auto_insert_metric_name = True): - # # metrics = {k: v + 1 if k == 'step' or k == 'epoch' else v for k, v in metrics.items()} - # return super()._format_checkpoint_name(filename, metrics, prefix, auto_insert_metric_name) - def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None @@ -51,4 +46,4 @@ def get_stategy_obj(strategy): if strategy == 'ddp_gloo': return DDPStrategy(process_group_backend='gloo') else: - return 'auto' + return strategy diff --git a/utils/training_utils.py b/utils/training_utils.py index 63af4718e..9154d9d28 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,10 +1,12 @@ -import utils -from utils.hparams import hparams - import math + import numpy as np +from torch.optim.lr_scheduler import LambdaLR from torch.utils.data.distributed import Sampler, DistributedSampler +import utils +from utils.hparams import hparams + class RSQRTSchedule(object): def __init__(self, optimizer): super().__init__() @@ -30,27 +32,71 @@ def step(self, num_updates): def get_lr(self): return self.optimizer.param_groups[0]['lr'] +class WarmupCosineSchedule(LambdaLR): + """ Linear warmup and then cosine decay. + Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps. + Decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps following a cosine curve. + If `cycles` (default=0.5) is different from default, learning rate follows cosine function after warmup. + `eta_min` (default=0.0) corresponds to the minimum learning rate reached by the scheduler. + """ + def __init__(self, optimizer, warmup_steps, t_total, eta_min=0.0, cycles=.5, last_epoch=-1): + self.warmup_steps = warmup_steps + self.t_total = t_total + self.eta_min = eta_min + self.cycles = cycles + super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch) + + def lr_lambda(self, step): + if step < self.warmup_steps: + return step / max(1.0, self.warmup_steps) + # progress after warmup + progress = (step - self.warmup_steps) / max(1, self.t_total - self.warmup_steps) + return max(self.eta_min, 0.5 * (1. + math.cos(math.pi * self.cycles * 2.0 * progress))) + class BatchSamplerSimilarLength(Sampler): - def __init__(self, dataset, indices=None, max_tokens=None, max_sentences=None, required_batch_size_multiple=-1, batch_by_size=True, shuffle=True): + def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, seed=0, shuffle=True): + self.dataset = dataset + self.sub_indices = indices + self.max_tokens = max_tokens + self.max_sentences = max_sentences + self.batch_by_size = batch_by_size self.shuffle = shuffle - - if batch_by_size: - self.batches = utils.batch_by_size( - indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple - ) - else: - self.batches = [indices[i:i + max_sentences] for i in range(0, len(indices), max_sentences)] + self.seed = seed + self.epoch = 0 + self.batches = None def __iter__(self): if self.shuffle: - np.random.shuffle(self.batches) + rng = np.random.RandomState(self.seed + self.epoch) + if self.sub_indices is not None: + rng.shuffle(self.sub_indices) + indices = np.array(self.sub_indices) + else: + indices = rng.permutation(len(self.dataset)) + if self.dataset.sort_by_len: + grid = hparams.get('sampler_frame_count_grid', 100) + sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) + indices = indices[np.argsort(sizes, kind='mergesort')] + indices = indices.tolist() + else: + indices = self.sub_indices if self.sub_indices is not None else list(range(len(self.dataset))) + + if self.batch_by_size: + self.batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) + else: + self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] + for batch in self.batches: yield batch def __len__(self): + if self.batches is None: + raise RuntimeError("Batches are not initialized. Call __iter__ first.") return len(self.batches) + def set_epoch(self, epoch): + self.epoch = epoch + class DistributedBatchSamplerSimilarLength(DistributedSampler): def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, @@ -61,34 +107,12 @@ def __init__(self, dataset, num_replicas=None, self.batch_sampler = None def __iter__(self): - if self.shuffle: - indices = np.random.RandomState(seed=self.seed).permutation(len(self.dataset)) - if self.dataset.sort_by_len: - indices = indices[np.argsort(np.array(self.dataset._sizes)[indices], kind='mergesort')] - else: - indices = np.arange(len(self.dataset)) - indices = indices.tolist() - - if not self.drop_last: - # add extra samples to make it evenly divisible - padding_size = self.total_size - len(indices) - if padding_size <= len(indices): - indices += indices[:padding_size] - else: - indices += (indices * math.ceil(padding_size / len(indices)))[:padding_size] - else: - # remove tail of data to make it evenly divisible. - indices = indices[:self.total_size] - assert len(indices) == self.total_size - - # subsample - indices = indices[self.rank:self.total_size:self.num_replicas] - assert len(indices) == self.num_samples - - self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, shuffle=self.shuffle) + indices = list(super().__iter__()) + self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, seed=self.seed, shuffle=self.shuffle) + self.batch_sampler.set_epoch(self.epoch) return iter(self.batch_sampler) def __len__(self) -> int: if self.batch_sampler is None: - raise ValueError("BatchSampler is not initialized. Call __iter__ first.") + raise RuntimeError("BatchSampler is not initialized. Call __iter__ first.") return len(self.batch_sampler) From 219b6636871eeaa8280917ac72871329ff35cefe Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 00:11:37 -0500 Subject: [PATCH 101/475] Checkpointing done --- basics/base_task.py | 35 +++++++-- modules/diff/diffusion.py | 4 +- modules/vocoders/nsf_hifigan.py | 3 +- training/acoustic_task.py | 9 --- utils/multiprocess_utils.py | 3 +- utils/phoneme_utils.py | 5 +- utils/pl_utils.py | 134 ++++++++++++++++++++++++++++---- 7 files changed, 152 insertions(+), 41 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index f1fc09761..9b5a330ee 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,9 +13,10 @@ import sys import numpy as np import pytorch_lightning as pl -from pytorch_lightning.callbacks import RichProgressBar +from pytorch_lightning.callbacks import RichProgressBar, ModelSummary from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm +from pytorch_lightning.utilities.rank_zero import rank_zero_debug from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj @@ -73,6 +74,8 @@ def __init__(self, *args, **kwargs): self.training_losses_meter = None self.training_sampler = None + self.skip_immediate_validation = False + self.skip_immediate_ckpt_save = False self.model = None @@ -156,6 +159,9 @@ def validation_step(self, sample, batch_idx): :param batch_idx: :return: output: dict """ + if self.skip_immediate_validation: + rank_zero_debug('In validation step, skip immediate validation!') + return {} outputs = self._validation_step(sample, batch_idx) self.validation_step_outputs.append(outputs) return outputs @@ -169,12 +175,16 @@ def _on_validation_end(self, outputs): raise NotImplementedError def on_validation_epoch_end(self): + if self.skip_immediate_validation: + self.skip_immediate_validation = False + self.skip_immediate_ckpt_save = True + return loss_output = self._on_validation_end(self.validation_step_outputs) # print(f"\n==============\n " # f"valid results: {loss_output}" # f"\n==============\n") - self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) - self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, prog_bar=False, logger=True, sync_dist=True) + self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, sync_dist=True) + self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) def build_scheduler(self, optimizer): raise NotImplementedError @@ -235,15 +245,17 @@ def start(cls): DiffModelCheckpoint( dirpath=work_dir, filename='model_ckpt_steps_{step}', - monitor='val_loss', - mode='min', + monitor='step', + mode='max', save_last=hparams['save_last'], save_top_k=hparams['num_ckpt_keep'], - save_on_train_epoch_end=True, - auto_insert_metric_name=False, + max_updates=hparams['max_updates'], + permanent_ckpt_start=hparams['permanent_ckpt_start'], + permanent_ckpt_interval=hparams['permanent_ckpt_interval'], # verbose=True ), - # RichProgressBar() + # RichProgressBar(), + # ModelSummary(max_depth=-1), ], logger=TensorBoardLogger( save_dir=str(work_dir), @@ -285,6 +297,7 @@ def start(cls): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') + hparams['disable_sample_tqdm'] = True trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: trainer.test(task) @@ -292,3 +305,9 @@ def start(cls): def on_save_checkpoint(self, checkpoint): if isinstance(self.model, CategorizedModule): checkpoint['category'] = self.model.category + checkpoint['trainer_stage'] = self.trainer.state.stage.value + + def on_load_checkpoint(self, checkpoint): + from pytorch_lightning.trainer.states import RunningStage + if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: + self.skip_immediate_validation = True diff --git a/modules/diff/diffusion.py b/modules/diff/diffusion.py index 04263139e..91e4b9718 100644 --- a/modules/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -274,7 +274,7 @@ def wrapped(x, t, **kwargs): dpm_solver = DPM_Solver(model_fn, noise_schedule) steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps) + self.bar = tqdm(desc="sample time step", total=steps, disable=hparams['disable_sample_tqdm']) x = dpm_solver.sample( x, steps=steps, @@ -284,7 +284,7 @@ def wrapped(x, t, **kwargs): ) self.bar.close() else: - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): + for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=hparams['disable_sample_tqdm']): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) x = x.squeeze(1).transpose(1, 2) # [B, T, M] return self.denorm_spec(x) diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 40fcb6a4e..7b4a0a531 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,6 +1,7 @@ import os import torch +from pytorch_lightning.utilities.rank_zero import rank_zero_info from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT @@ -17,7 +18,7 @@ def __init__(self, device=None): self.device = device model_path = hparams['vocoder_ckpt'] assert os.path.exists(model_path), 'HifiGAN model file is not found!' - print('| Load HifiGAN: ', model_path) + rank_zero_info('| Load HifiGAN: ' + model_path) self.model, self.h = load_model(model_path, device=self.device) def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 3da211773..f42b1c301 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -71,15 +71,6 @@ def collater(self, samples): batch['spk_ids'] = spk_ids return batch -class MyScheduler(torch.optim.lr_scheduler.StepLR): - def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=- 1, verbose=False): - super().__init__(optimizer, step_size, gamma, last_epoch, verbose) - - def get_lr(self): - ret = super().get_lr() - print("------GET_LR", self.last_epoch, self._step_count, ret) - return ret - class AcousticTask(BaseTask): def __init__(self): super().__init__() diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index b54f99db7..41eb13142 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -8,8 +8,7 @@ def main_process_print(self, *args, sep=' ', end='\n', file=None): - from utils.hparams import hparams - if hparams['is_main_process']: + if is_main_process: print(self, *args, sep=sep, end=end, file=file) diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index d9f2edd0d..8bca6510e 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,7 +1,8 @@ import pathlib +from pytorch_lightning.utilities.rank_zero import rank_zero_info + from utils.hparams import hparams -from utils.multiprocess_utils import main_process_print _initialized = False _ALL_CONSONANTS_SET = set() @@ -51,7 +52,7 @@ def _build_dict_and_list(): for _list in _dictionary.values(): [_set.add(ph) for ph in _list] _phoneme_list = sorted(list(_set)) - main_process_print('| load phoneme set:', _phoneme_list) + rank_zero_info('| load phoneme set: ' + str(_phoneme_list)) def _initialize_consonants_and_vowels(): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 323736925..237c8024c 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1,46 +1,146 @@ from copy import deepcopy +from glob import glob import os +from pathlib import Path import re +import warnings import torch import pytorch_lightning as pl from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.strategies import DDPStrategy +from pytorch_lightning.trainer.states import RunningStage +from pytorch_lightning.utilities.rank_zero import rank_zero_info class DiffModelCheckpoint(ModelCheckpoint): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + dirpath, + filename, + monitor, + save_last, + save_top_k, + mode, + max_updates, + permanent_ckpt_start, + permanent_ckpt_interval, + verbose = False, + save_weights_only = False + ): + super().__init__( + dirpath=dirpath, + filename=filename, + monitor=monitor, + verbose=verbose, + save_last=save_last, + save_top_k=save_top_k, + save_weights_only=save_weights_only, + mode=mode, + auto_insert_metric_name=False + ) + self.max_updates = max_updates + self.permanent_ckpt_start = permanent_ckpt_start + self.permanent_ckpt_interval = permanent_ckpt_interval + self.last_permanent_step = 0 + def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + """Save a checkpoint at the end of the last interrupted training step.""" + if not self._should_skip_saving_checkpoint(trainer) and \ + trainer.state.stage == RunningStage.TRAINING and \ + trainer.global_step == self.max_updates: + monitor_candidates = self._monitor_candidates(trainer) + if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) + self._save_checkpoint(trainer, filepath) + self._save_last_checkpoint(trainer, monitor_candidates) + + def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + """Save a checkpoint at the end of the validation stage.""" + if trainer.lightning_module.skip_immediate_ckpt_save: + trainer.lightning_module.skip_immediate_ckpt_save = False + return + if not self._should_skip_saving_checkpoint(trainer): + monitor_candidates = self._monitor_candidates(trainer) + if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: + self._save_topk_checkpoint(trainer, monitor_candidates) + self._save_last_checkpoint(trainer, monitor_candidates) + + def state_dict(self): + ret = super().state_dict() + ret['last_permanent_step'] = self.last_permanent_step + return ret + + def load_state_dict(self, state_dict): + dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) + + if self.dirpath == dirpath_from_ckpt: + self.best_model_score = state_dict["best_model_score"] + self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) + self.kth_value = state_dict.get("kth_value", self.kth_value) + self.best_k_models = state_dict.get("best_k_models", self.best_k_models) + self.last_model_path = state_dict.get("last_model_path", self.last_model_path) + self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) + else: + warnings.warn( + f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," + " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_permanent_step`," + " `last_model_path` and `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." + ) + self.best_model_path = state_dict["best_model_path"] + def _monitor_candidates(self, trainer: "pl.Trainer"): monitor_candidates = deepcopy(trainer.callback_metrics) monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) monitor_candidates["step"] = torch.tensor(trainer.global_step) return monitor_candidates - - def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool: - from pytorch_lightning.trainer.states import RunningStage - return trainer.state.stage == RunningStage.TRAINING and super()._should_save_on_train_epoch_end(trainer) + + def _save_monitor_checkpoint(self, trainer: "pl.Trainer", monitor_candidates): + assert self.monitor + current = monitor_candidates.get(self.monitor) + if self.check_monitor_top_k(trainer, current): + assert current is not None + self._update_best_and_save(current, trainer, monitor_candidates) + elif self.verbose: + epoch = monitor_candidates["epoch"] + step = monitor_candidates["step"] + rank_zero_info(f"Epoch {epoch:d}, global step {step:d}: {self.monitor!r} was not in top {self.save_top_k}") + if step >= self.last_permanent_step + self.permanent_ckpt_interval: + self.last_permanent_step = step + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) + self._save_checkpoint(trainer, filepath) + rank_zero_info(f"Epoch {epoch:d}, global step {step:d} is a permanent checkpoint, saved to {filepath}") + + def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: + """Calls the strategy to remove the checkpoint file.""" + if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: + search = re.search(r'steps_\d+', Path(filepath).stem) + if search: + step = int(search.group(0)[6:]) + if step >= self.permanent_ckpt_start and \ + (self.last_permanent_step is None or \ + step >= self.last_permanent_step + self.permanent_ckpt_interval): + self.last_permanent_step = step + return + trainer.strategy.remove_checkpoint(filepath) def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None - last_steps = -1 + last_step = -1 last_ckpt_name = None - checkpoints = os.listdir(work_dir) + checkpoints = glob(str(Path(work_dir) / '*.ckpt')) for name in checkpoints: - if '.ckpt' in name and not name.endswith('part'): - if 'steps_' in name: - steps = name.split('steps_')[1] - steps = int(re.sub('[^0-9]', '', steps)) - - if steps > last_steps: - last_steps = steps - last_ckpt_name = name + search = re.search(r'steps_\d+', name) + if search: + step = int(search.group(0)[6:]) + if step > last_step: + last_step = step + last_ckpt_name = name - return os.path.join(work_dir, last_ckpt_name) if last_ckpt_name is not None else None + return last_ckpt_name if last_ckpt_name is not None else None def get_stategy_obj(strategy): if strategy == 'ddp_gloo': From 7819e0b41900566912f63fa0b8041b58cc63282b Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 00:29:50 -0500 Subject: [PATCH 102/475] Use pl rankzero utils to discriminate main proc --- basics/base_task.py | 8 ++++---- scripts/infer.py | 2 ++ scripts/train.py | 3 ++- utils/hparams.py | 39 ++++++++++++++++++++------------------- 4 files changed, 28 insertions(+), 24 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 9b5a330ee..a15e384a4 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -16,7 +16,7 @@ from pytorch_lightning.callbacks import RichProgressBar, ModelSummary from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm -from pytorch_lightning.utilities.rank_zero import rank_zero_debug +from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj @@ -272,8 +272,8 @@ def start(cls): accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train - if trainer.local_rank == 0: - set_hparams(print_hparams=True, is_main_process=True) + @rank_zero_only + def train_payload_copy(): # copy_code = input(f'{hparams["save_codes"]} code backup? y/n: ') == 'y' copy_code = True # backup code every time if copy_code: @@ -297,7 +297,7 @@ def start(cls): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') - hparams['disable_sample_tqdm'] = True + train_payload_copy() trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: trainer.test(task) diff --git a/scripts/infer.py b/scripts/infer.py index 9766613a3..ea61c7add 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -75,6 +75,8 @@ assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' set_hparams(print_hparams=False) +hparams['disable_sample_tqdm'] = False + if args.speedup > 0: hparams['pndm_speedup'] = args.speedup diff --git a/scripts/train.py b/scripts/train.py index 98f12e27f..f229100c0 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -2,7 +2,8 @@ from utils.hparams import set_hparams, hparams -set_hparams(is_main_process=False) +set_hparams() +hparams['disable_sample_tqdm'] = True def run_task(): assert hparams['task_cls'] != '' diff --git a/utils/hparams.py b/utils/hparams.py index 8be0a0663..28c2bcbb7 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -1,9 +1,8 @@ import argparse import os - import yaml -from utils.multiprocess_utils import is_main_process as mp_is_main_process +from pytorch_lightning.utilities.rank_zero import rank_zero_only global_print_hparams = True hparams = {} @@ -22,7 +21,7 @@ def override_config(old_config: dict, new_config: dict): old_config[k] = v -def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True, is_main_process=None): +def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True): """ Load hparams from multiple sources: 1. config chain (i.e. first load base_config, then load config); @@ -45,9 +44,6 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob else: args = Args(config=config, exp_name=exp_name, hparams=hparams_str, infer=False, validate=False, reset=False, debug=False) - - if is_main_process is None: - is_main_process = mp_is_main_process args_work_dir = '' if args.exp_name != '': @@ -103,31 +99,36 @@ def load_config(config_fn): # deep first else: hparams_[k] = type(hparams_[k])(v) - if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: - os.makedirs(hparams_['work_dir'], exist_ok=True) - if is_main_process: + @rank_zero_only + def dump_hparams(): + if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: + os.makedirs(hparams_['work_dir'], exist_ok=True) # Only the main process will save the config file with open(ckpt_config_path, 'w', encoding='utf-8') as f: hparams_non_recursive = hparams_.copy() hparams_non_recursive['base_config'] = [] yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') + dump_hparams() hparams_['infer'] = args.infer hparams_['debug'] = args.debug hparams_['validate'] = args.validate - global global_print_hparams if global_hparams: hparams.clear() hparams.update(hparams_) - hparams['is_main_process'] = is_main_process - - if is_main_process and print_hparams and global_print_hparams and global_hparams: - print('| Hparams chains: ', config_chains) - print('| Hparams: ') - for i, (k, v) in enumerate(sorted(hparams_.items())): - print(f"\033[;33;m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") - print("") - global_print_hparams = False + + @rank_zero_only + def print_hparams(): + global global_print_hparams + if print_hparams and global_print_hparams and global_hparams: + print('| Hparams chains: ', config_chains) + print('| Hparams: ') + for i, (k, v) in enumerate(sorted(hparams_.items())): + print(f"\033[;33;m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") + print("") + global_print_hparams = False + print_hparams() + # print(hparams_.keys()) if hparams.get('exp_name') is None: hparams['exp_name'] = args.exp_name From 311653ae64fb21b320e6bf2b1facc8f029f12dab Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 00:32:44 -0500 Subject: [PATCH 103/475] use rank_zero_info --- modules/nsf_hifigan/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 337284063..f01265309 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -7,6 +7,7 @@ import torch.nn.functional as F from torch.nn import Conv1d, ConvTranspose1d from torch.nn.utils import weight_norm, remove_weight_norm +from pytorch_lightning.utilities.rank_zero import rank_zero_info from .env import AttrDict from .utils import init_weights, get_padding @@ -274,7 +275,7 @@ def forward(self, x, f0): return x def remove_weight_norm(self): - print('Removing weight norm...') + rank_zero_info('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for l in self.resblocks: From 9dda2eb318f0129498b6c843ffc6a3c12ab3e9db Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 01:04:41 -0500 Subject: [PATCH 104/475] Clean indexed ds, add main proc check back to hparam --- utils/hparams.py | 15 +++++++++------ utils/indexed_datasets.py | 24 ++---------------------- 2 files changed, 11 insertions(+), 28 deletions(-) diff --git a/utils/hparams.py b/utils/hparams.py index 28c2bcbb7..5b8ecc8fb 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -3,6 +3,8 @@ import yaml from pytorch_lightning.utilities.rank_zero import rank_zero_only + +from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True hparams = {} @@ -103,11 +105,12 @@ def load_config(config_fn): # deep first def dump_hparams(): if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: os.makedirs(hparams_['work_dir'], exist_ok=True) - # Only the main process will save the config file - with open(ckpt_config_path, 'w', encoding='utf-8') as f: - hparams_non_recursive = hparams_.copy() - hparams_non_recursive['base_config'] = [] - yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') + if mp_is_main_process: + # Only the main process will save the config file + with open(ckpt_config_path, 'w', encoding='utf-8') as f: + hparams_non_recursive = hparams_.copy() + hparams_non_recursive['base_config'] = [] + yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') dump_hparams() hparams_['infer'] = args.infer @@ -120,7 +123,7 @@ def dump_hparams(): @rank_zero_only def print_hparams(): global global_print_hparams - if print_hparams and global_print_hparams and global_hparams: + if mp_is_main_process and print_hparams and global_print_hparams and global_hparams: print('| Hparams chains: ', config_chains) print('| Hparams: ') for i, (k, v) in enumerate(sorted(hparams_.items())): diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index c0482667c..ed4420994 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,6 +1,4 @@ -import os.path import pathlib -import pickle import multiprocessing from copy import deepcopy import h5py @@ -13,22 +11,15 @@ class IndexedDataset: def __init__(self, path, prefix, num_cache=0): super().__init__() self.path = pathlib.Path(path) - # self.data_file = None - # self.data_offsets = np.load(self.path / f'{prefix}.idx')) - # self.data_file = open(self.path / f'{prefix}.data', 'rb', buffering=-1) self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') self.cache = [] self.num_cache = num_cache def check_index(self, i): - # if i < 0 or i >= len(self.data_offsets) - 1: - # raise IndexError('index out of range') if i < 0 or i >= len(self.dset): raise IndexError('index out of range') def __del__(self): - # if self.data_file: - # self.data_file.close() if self.dset: del self.dset @@ -38,27 +29,21 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - # self.data_file.seek(self.data_offsets[i]) - # b = self.data_file.read(self.data_offsets[i + 1] - self.data_offsets[i]) - # item = pickle.loads(b) item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache = [(i, deepcopy(item))] + self.cache[:-1] return item def __len__(self): - # return len(self.data_offsets) - 1 return len(self.dset) class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): self.path = pathlib.Path(path) self.prefix = prefix - # self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'w') self.counter = 0 self.lock = multiprocessing.Lock() - # self.byte_offsets = [0] if allowed_attr is not None: self.allowed_attr = set(allowed_attr) else: @@ -74,20 +59,15 @@ def add_item(self, item): item_no = self.counter self.counter += 1 for k, v in item.items(): + if v is None: + continue if isinstance(v, np.ndarray): self.dset.create_dataset(f'{item_no}/{k}', data=v, compression="gzip", compression_opts=4) else: self.dset.create_dataset(f'{item_no}/{k}', data=v) - # s = pickle.dumps(item) - # n_bytes = self.out_file.write(s) - # self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) def finalize(self): del self.dset - # self.out_file.close() - # with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: - # # noinspection PyTypeChecker - # np.save(f, self.byte_offsets) if __name__ == "__main__": From b663b104c58427bf8822e5674d27701110ee6cd4 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 01:09:00 -0500 Subject: [PATCH 105/475] remove h5py compression and ncols in binarizer tqdm --- preprocessing/acoustic_binarizer.py | 5 ++--- utils/indexed_datasets.py | 5 +---- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 6d7510ff8..4c3ca8dbb 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -185,13 +185,12 @@ def postprocess(_item): # code for parallel processing for item in tqdm( chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))), - ncols=80 + total=len(list(self.meta_data_iterator(prefix))) ): postprocess(item) else: # code for single cpu processing - for a in tqdm(args, ncols=80): + for a in tqdm(args): item = self.process_item(*a) postprocess(item) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index ed4420994..e20f5f99f 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -61,10 +61,7 @@ def add_item(self, item): for k, v in item.items(): if v is None: continue - if isinstance(v, np.ndarray): - self.dset.create_dataset(f'{item_no}/{k}', data=v, compression="gzip", compression_opts=4) - else: - self.dset.create_dataset(f'{item_no}/{k}', data=v) + self.dset.create_dataset(f'{item_no}/{k}', data=v) def finalize(self): del self.dset From 5652c43bd3ac81968f3fd224dd3e13a1724d0c15 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 13:55:34 -0500 Subject: [PATCH 106/475] Format tqdm --- basics/base_task.py | 7 ++----- utils/pl_utils.py | 20 +++++++++++++++++++- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index a15e384a4..efb0aa9ae 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,13 +13,12 @@ import sys import numpy as np import pytorch_lightning as pl -from pytorch_lightning.callbacks import RichProgressBar, ModelSummary from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength -from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj +from utils.pl_utils import DiffModelCheckpoint, DiffTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj from torch import nn import torch.utils.data import utils @@ -252,10 +251,8 @@ def start(cls): max_updates=hparams['max_updates'], permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], - # verbose=True ), - # RichProgressBar(), - # ModelSummary(max_depth=-1), + DiffTQDMProgressBar(), ], logger=TensorBoardLogger( save_dir=str(work_dir), diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 237c8024c..d288b4ffe 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -8,7 +8,7 @@ import torch import pytorch_lightning as pl -from pytorch_lightning.callbacks import ModelCheckpoint +from pytorch_lightning.callbacks import ModelCheckpoint, TQDMProgressBar from pytorch_lightning.strategies import DDPStrategy from pytorch_lightning.trainer.states import RunningStage from pytorch_lightning.utilities.rank_zero import rank_zero_info @@ -124,6 +124,7 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: return trainer.strategy.remove_checkpoint(filepath) + def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None @@ -142,6 +143,23 @@ def get_latest_checkpoint_path(work_dir): return last_ckpt_name if last_ckpt_name is not None else None + +class DiffTQDMProgressBar(TQDMProgressBar): + def __init__(self, refresh_rate: int = 1, process_position: int = 0): + super().__init__(refresh_rate, process_position) + + def get_metrics(self, trainer, model): + items = super().get_metrics(trainer, model) + for name in ['step', 'batch_size']: + if name in items: + items[name] = int(items[name]) + for k, v in items.items(): + if isinstance(v, float): + if 0.00001 <= v < 10: + items[k] = f"{v:.5f}" + return items + + def get_stategy_obj(strategy): if strategy == 'ddp_gloo': return DDPStrategy(process_group_backend='gloo') From ee67f8163f8938a8b8f74bd3e28942dfa7cdd4d8 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 20:14:30 -0500 Subject: [PATCH 107/475] Fix bug in val_check_interval, hide v_num --- basics/base_task.py | 2 +- utils/pl_utils.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/basics/base_task.py b/basics/base_task.py index efb0aa9ae..cf381e3ca 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -260,7 +260,7 @@ def start(cls): version='lastest' ), gradient_clip_val=hparams['clip_grad_norm'], - val_check_interval=hparams['val_check_interval'], + val_check_interval=hparams['val_check_interval'] * hparams['accumulate_grad_batches'], # so this is global_steps check_val_every_n_epoch=None, log_every_n_steps=hparams['log_interval'], max_steps=hparams['max_updates'], diff --git a/utils/pl_utils.py b/utils/pl_utils.py index d288b4ffe..5b09d4b14 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -157,6 +157,7 @@ def get_metrics(self, trainer, model): if isinstance(v, float): if 0.00001 <= v < 10: items[k] = f"{v:.5f}" + items.pop("v_num", None) return items From 7b2d23f987a4409b0826da534297fa69417c6773 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 21:09:00 -0500 Subject: [PATCH 108/475] Cleanup dataset codes, rename custom callbacks --- basics/base_dataset.py | 27 +++------------------------ basics/base_task.py | 22 +++++++++++----------- configs/base.yaml | 1 + training/acoustic_task.py | 20 +++++++++----------- utils/indexed_datasets.py | 8 ++++---- utils/pl_utils.py | 4 ++-- utils/training_utils.py | 9 +++++---- 7 files changed, 35 insertions(+), 56 deletions(-) diff --git a/basics/base_dataset.py b/basics/base_dataset.py index ac34ebd0d..7c1d0c59f 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -9,12 +9,9 @@ class BaseDataset(Dataset): ''' Base class for datasets. - 1. *ordered_indices*: - if self.shuffle == True, shuffle the indices; - if self.sort_by_len == True, sort data by length; - 2. *sizes*: + 1. *sizes*: clipped length if "max_frames" is set; - 3. *num_tokens*: + 2. *num_tokens*: unclipped length. Subclasses should define: @@ -23,11 +20,9 @@ class BaseDataset(Dataset): 2. *__getitem__*: the index function. ''' - def __init__(self, shuffle): + def __init__(self): super().__init__() self.hparams = hparams - self.shuffle = shuffle - self.sort_by_len = hparams['sort_by_len'] self.sizes = None @property @@ -50,19 +45,3 @@ def size(self, index): """Return an example's size as a float or tuple. This value is used when filtering a dataset with ``--max-positions``.""" return self._sizes[index] - - def ordered_indices(self): - """Return an ordered list of indices. Batches will be constructed based - on this order.""" - if self.shuffle: - indices = np.random.permutation(len(self)) - if self.sort_by_len: - indices = indices[np.argsort(np.array(self._sizes)[indices], kind='mergesort')] - # 先random, 然后稳定排序, 保证排序后同长度的数据顺序是依照random permutation的 (被其随机打乱). - else: - indices = np.arange(len(self)) - return indices - - @property - def num_workers(self): - return int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 0))) diff --git a/basics/base_task.py b/basics/base_task.py index cf381e3ca..1f1b406dc 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -17,8 +17,8 @@ from pytorch_lightning.utilities import grad_norm from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary -from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength -from utils.pl_utils import DiffModelCheckpoint, DiffTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj +from utils.training_utils import DsBatchSampler, DsDistributedBatchSampler +from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj from torch import nn import torch.utils.data import utils @@ -204,15 +204,15 @@ def configure_optimizers(self): } def build_batch_sampler(self, dataset, max_tokens, max_sentences, batch_by_size=True, shuffle=False): - batch_sampler_cls = partial(BatchSamplerSimilarLength, + batch_sampler_cls = partial(DsBatchSampler, max_tokens=max_tokens, max_sentences=max_sentences, - batch_by_size=batch_by_size) + batch_by_size=batch_by_size, sort_by_similar_size=hparams['sort_by_len']) if self.trainer.distributed_sampler_kwargs: - sampler = DistributedBatchSamplerSimilarLength(dataset, - batch_sampler_cls=batch_sampler_cls, - seed=hparams['seed'], - shuffle=shuffle, - **self.trainer.distributed_sampler_kwargs) + sampler = DsDistributedBatchSampler(dataset, + batch_sampler_cls=batch_sampler_cls, + seed=hparams['seed'], + shuffle=shuffle, + **self.trainer.distributed_sampler_kwargs) else: sampler = batch_sampler_cls(dataset, seed=hparams['seed'], shuffle=shuffle) return sampler @@ -241,7 +241,7 @@ def start(cls): strategy=get_stategy_obj(hparams['pl_trainer_strategy']), precision=hparams['pl_trainer_precision'], callbacks=[ - DiffModelCheckpoint( + DsModelCheckpoint( dirpath=work_dir, filename='model_ckpt_steps_{step}', monitor='step', @@ -252,7 +252,7 @@ def start(cls): permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], ), - DiffTQDMProgressBar(), + DsTQDMProgressBar(), ], logger=TensorBoardLogger( save_dir=str(work_dir), diff --git a/configs/base.yaml b/configs/base.yaml index b3a4dd46f..0d10b7ad3 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -34,6 +34,7 @@ mel_vmin: -6 mel_vmax: 1.5 sampler_frame_count_grid: 200 ds_workers: 4 +dataloader_prefetch_factor: 2 ######### # model diff --git a/training/acoustic_task.py b/training/acoustic_task.py index f42b1c301..f00291083 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -31,16 +31,14 @@ class AcousticDataset(BaseDataset): - def __init__(self, prefix, shuffle=False): - super().__init__(shuffle) + def __init__(self, prefix): + super().__init__() self.data_dir = hparams['binary_data_dir'] self.prefix = prefix self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) def __getitem__(self, index): - # if self.indexed_ds is None: - # self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) return self.indexed_ds[index] def collater(self, samples): @@ -86,8 +84,8 @@ def __init__(self): def setup(self, stage): self.phone_encoder = self.build_phone_encoder() self.model = self.build_model() - self.train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) - self.valid_dataset = self.dataset_cls(hparams['valid_set_name'], shuffle=False) + self.train_dataset = self.dataset_cls(hparams['train_set_name']) + self.valid_dataset = self.dataset_cls(hparams['valid_set_name']) @staticmethod def build_phone_encoder(): @@ -126,9 +124,9 @@ def train_dataloader(self): return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, batch_sampler=self.training_sampler, - num_workers=self.train_dataset.num_workers, - prefetch_factor=4, - pin_memory=False, + num_workers=int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 1))), + prefetch_factor=hparams.get('dataloader_prefetch_factor', 2), + pin_memory=True, persistent_workers=True) def val_dataloader(self): @@ -138,8 +136,8 @@ def val_dataloader(self): return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, - num_workers=self.valid_dataset.num_workers, - prefetch_factor=4, + num_workers=int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 1))), + prefetch_factor=hparams.get('dataloader_prefetch_factor', 2), shuffle=False) def test_dataloader(self): diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index e20f5f99f..0357e45fd 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,9 +1,9 @@ import pathlib import multiprocessing -from copy import deepcopy +from collections import deque + import h5py import torch - import numpy as np @@ -12,7 +12,7 @@ def __init__(self, path, prefix, num_cache=0): super().__init__() self.path = pathlib.Path(path) self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') - self.cache = [] + self.cache = deque(maxlen=num_cache) self.num_cache = num_cache def check_index(self, i): @@ -31,7 +31,7 @@ def __getitem__(self, i): return c[1] item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: - self.cache = [(i, deepcopy(item))] + self.cache[:-1] + self.cache.appendleft((i, item)) return item def __len__(self): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 5b09d4b14..cca36f1b8 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -13,7 +13,7 @@ from pytorch_lightning.trainer.states import RunningStage from pytorch_lightning.utilities.rank_zero import rank_zero_info -class DiffModelCheckpoint(ModelCheckpoint): +class DsModelCheckpoint(ModelCheckpoint): def __init__( self, dirpath, @@ -144,7 +144,7 @@ def get_latest_checkpoint_path(work_dir): return last_ckpt_name if last_ckpt_name is not None else None -class DiffTQDMProgressBar(TQDMProgressBar): +class DsTQDMProgressBar(TQDMProgressBar): def __init__(self, refresh_rate: int = 1, process_position: int = 0): super().__init__(refresh_rate, process_position) diff --git a/utils/training_utils.py b/utils/training_utils.py index 9154d9d28..605633cc8 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -53,13 +53,14 @@ def lr_lambda(self, step): progress = (step - self.warmup_steps) / max(1, self.t_total - self.warmup_steps) return max(self.eta_min, 0.5 * (1. + math.cos(math.pi * self.cycles * 2.0 * progress))) -class BatchSamplerSimilarLength(Sampler): - def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, seed=0, shuffle=True): +class DsBatchSampler(Sampler): + def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, sort_by_similar_size=True, seed=0, shuffle=True): self.dataset = dataset self.sub_indices = indices self.max_tokens = max_tokens self.max_sentences = max_sentences self.batch_by_size = batch_by_size + self.sort_by_similar_size = sort_by_similar_size self.shuffle = shuffle self.seed = seed self.epoch = 0 @@ -73,7 +74,7 @@ def __iter__(self): indices = np.array(self.sub_indices) else: indices = rng.permutation(len(self.dataset)) - if self.dataset.sort_by_len: + if self.sort_by_similar_size: grid = hparams.get('sampler_frame_count_grid', 100) sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] @@ -97,7 +98,7 @@ def __len__(self): def set_epoch(self, epoch): self.epoch = epoch -class DistributedBatchSamplerSimilarLength(DistributedSampler): +class DsDistributedBatchSampler(DistributedSampler): def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, seed=0, drop_last=False, batch_sampler_cls=None) -> None: From 4798f68a3438e3090df1e15c6e1905fb921b0459 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 21:09:47 -0500 Subject: [PATCH 109/475] Bump requirement torch and pl version --- requirements.txt | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/requirements.txt b/requirements.txt index 56c1439a9..4984c69bc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ resemblyzer==0.1.1.dev0 -tensorboardX==2.5.1 +tensorboardX==2.6 h5py==3.7.0 future==0.18.2 g2p-en==2.1.0 @@ -21,17 +21,18 @@ matplotlib==3.6.2 torchcrepe==0.0.17 python-dateutil==2.8.2 python-Levenshtein==0.12.2 -pytorch-lightning==0.7.1 +pytorch-lightning==2.0.0 six==1.16.0 -tqdm==4.64.1 +tqdm==4.65.0 resampy==0.4.2 +rich==13.3.2 imageio==2.23.0 einops==0.6.0 pycwt==0.3.0a22 praat-parselmouth==0.4.3 scikit-image==0.19.3 pyloudnorm==0.1.0 -torchmetrics==0.5.0 +torchmetrics==0.11.4 tensorboard==2.11.0 tensorboard-plugin-wit==1.8.1 protobuf==3.13.0 @@ -40,6 +41,6 @@ pypinyin==0.39.0 # It is recommended to install PyTorch manually. # See instructions at https://pytorch.org/get-started/previous-versions/ -# torch==1.8.2 -# torchaudio==0.8.2 -# torchvision==0.9.2 +# torch==2.0.0 +# torchaudio==2.0.0 +# torchvision==0.15.0 From 27753f6c5f5e919315df956817a1ef3bca9360b3 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 22:35:43 -0500 Subject: [PATCH 110/475] binarizer joint aug bug fix --- preprocessing/acoustic_binarizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 4c3ca8dbb..d4c40bf9e 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -353,7 +353,7 @@ def arrange_data_augmentation(self, prefix): aug_list.append(aug_task) elif aug_type == 1: aug_task = deepcopy(aug_item) - aug_item['kwargs']['speed'] = speed + aug_task['kwargs']['speed'] = speed if aug_item['name'] in aug_map: aug_map[aug_item['name']].append(aug_task) else: From 1d7cd381a17dfe3776e644899f7d1e079aaa874c Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 23:26:34 -0500 Subject: [PATCH 111/475] revert torch version --- basics/base_task.py | 12 ++++++------ modules/nsf_hifigan/models.py | 2 +- modules/vocoders/nsf_hifigan.py | 2 +- requirements.txt | 10 +++++----- training/acoustic_task.py | 2 +- utils/hparams.py | 2 +- utils/phoneme_utils.py | 2 +- utils/pl_utils.py | 10 +++++----- 8 files changed, 21 insertions(+), 21 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 1f1b406dc..2915b679b 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -12,10 +12,10 @@ import random import sys import numpy as np -import pytorch_lightning as pl -from pytorch_lightning.loggers import TensorBoardLogger -from pytorch_lightning.utilities import grad_norm -from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only +import lightning.pytorch as pl +from lightning.pytorch.loggers import TensorBoardLogger +from lightning.pytorch.utilities import grad_norm +from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary from utils.training_utils import DsBatchSampler, DsDistributedBatchSampler from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj @@ -136,7 +136,7 @@ def on_train_epoch_end(self): # f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" # f"\n==============\n") - def on_before_optimizer_step(self, optimizer): + def on_before_optimizer_step(self, *args, **kwargs): self.log_dict(grad_norm(self, norm_type=2)) def on_validation_start(self): @@ -305,6 +305,6 @@ def on_save_checkpoint(self, checkpoint): checkpoint['trainer_stage'] = self.trainer.state.stage.value def on_load_checkpoint(self, checkpoint): - from pytorch_lightning.trainer.states import RunningStage + from lightning.pytorch.trainer.states import RunningStage if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: self.skip_immediate_validation = True diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index f01265309..8c2ad2aa7 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -7,7 +7,7 @@ import torch.nn.functional as F from torch.nn import Conv1d, ConvTranspose1d from torch.nn.utils import weight_norm, remove_weight_norm -from pytorch_lightning.utilities.rank_zero import rank_zero_info +from lightning.pytorch.utilities.rank_zero import rank_zero_info from .env import AttrDict from .utils import init_weights, get_padding diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 7b4a0a531..0f98a7fc8 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,7 +1,7 @@ import os import torch -from pytorch_lightning.utilities.rank_zero import rank_zero_info +from lightning.pytorch.utilities.rank_zero import rank_zero_info from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT diff --git a/requirements.txt b/requirements.txt index 4984c69bc..6de58f06e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,7 +21,7 @@ matplotlib==3.6.2 torchcrepe==0.0.17 python-dateutil==2.8.2 python-Levenshtein==0.12.2 -pytorch-lightning==2.0.0 +lightning==2.0.0 six==1.16.0 tqdm==4.65.0 resampy==0.4.2 @@ -36,11 +36,11 @@ torchmetrics==0.11.4 tensorboard==2.11.0 tensorboard-plugin-wit==1.8.1 protobuf==3.13.0 -PyYAML==5.4 +PyYAML==6.0 pypinyin==0.39.0 # It is recommended to install PyTorch manually. # See instructions at https://pytorch.org/get-started/previous-versions/ -# torch==2.0.0 -# torchaudio==2.0.0 -# torchvision==0.15.0 +# torch==1.13.1 +# torchaudio==0.13.1 +# torchvision==0.14.1 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index f00291083..bc46b4a4b 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -8,7 +8,7 @@ import torch.distributions import torch.optim import torch.utils.data -import pytorch_lightning as pl +import lightning.pytorch as pl from tqdm import tqdm import utils diff --git a/utils/hparams.py b/utils/hparams.py index 5b8ecc8fb..0448f5fc1 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -2,7 +2,7 @@ import os import yaml -from pytorch_lightning.utilities.rank_zero import rank_zero_only +from lightning.pytorch.utilities.rank_zero import rank_zero_only from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index 8bca6510e..086008bd8 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,6 +1,6 @@ import pathlib -from pytorch_lightning.utilities.rank_zero import rank_zero_info +from lightning.pytorch.utilities.rank_zero import rank_zero_info from utils.hparams import hparams diff --git a/utils/pl_utils.py b/utils/pl_utils.py index cca36f1b8..829c08f07 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -7,11 +7,11 @@ import torch -import pytorch_lightning as pl -from pytorch_lightning.callbacks import ModelCheckpoint, TQDMProgressBar -from pytorch_lightning.strategies import DDPStrategy -from pytorch_lightning.trainer.states import RunningStage -from pytorch_lightning.utilities.rank_zero import rank_zero_info +import lightning.pytorch as pl +from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar +from lightning.pytorch.strategies import DDPStrategy +from lightning.pytorch.trainer.states import RunningStage +from lightning.pytorch.utilities.rank_zero import rank_zero_info class DsModelCheckpoint(ModelCheckpoint): def __init__( From 97a52dbf1f863c8ade9c1f38ac430619edea6cfe Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 00:15:13 -0500 Subject: [PATCH 112/475] Remove py3.9 syntax --- basics/base_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 2915b679b..756de765e 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -121,9 +121,9 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): pass # log_outputs['all_loss'] = total_loss.item() - progress_bar_log = log_outputs | {'step': self.global_step} + log_outputs.update({'step': self.global_step}) tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(progress_bar_log, prog_bar=True, on_step=True, on_epoch=False) + self.log_dict(log_outputs, prog_bar=True, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return { 'loss': total_loss From 14c92e33421c4861d2483d06c08ffc171b3ce8c9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 25 Mar 2023 19:30:21 +0800 Subject: [PATCH 113/475] Fix unsorted labels in phoneme distribution --- preprocessing/acoustic_binarizer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 3c31a1d4d..e6a63a54f 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -120,8 +120,8 @@ def check_coverage(self): # Draw graph. plt.figure(figsize=(int(len(ph_required) * 0.8), 10)) - x = list(phoneme_map.keys()) - values = list(phoneme_map.values()) + x = sorted(phoneme_map.keys()) + values = [phoneme_map[k] for k in x] plt.bar(x=x, height=values) plt.tick_params(labelsize=15) plt.xlim(-1, len(ph_required)) @@ -141,8 +141,8 @@ def check_coverage(self): unrecognizable_phones = ph_occurred.difference(ph_required) missing_phones = ph_required.difference(ph_occurred) raise BinarizationError('transcriptions and dictionary mismatch.\n' - f' (+) {sorted(unrecognizable_phones)}\n' - f' (-) {sorted(missing_phones)}') + f' (+) {sorted(unrecognizable_phones)}\n' + f' (-) {sorted(missing_phones)}') # Copy dictionary to binary data dir shutil.copy(locate_dictionary(), os.path.join(hparams['binary_data_dir'], 'dictionary.txt')) From 8bcb0ddfcb7c2e7350f4b5c0ae983910e60b7af5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 25 Mar 2023 19:37:32 +0800 Subject: [PATCH 114/475] Remove unused energy and spk_embed --- configs/acoustic.yaml | 1 - configs/base.yaml | 2 -- preprocessing/acoustic_binarizer.py | 2 -- training/acoustic_task.py | 7 ------- 4 files changed, 12 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index c697e4889..87a246759 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -51,7 +51,6 @@ mel_vmin: -6. #-6. mel_vmax: 1.5 save_f0: true -use_spk_embed: false use_spk_id: false f0_embed_type: continuous use_key_shift_embed: false diff --git a/configs/base.yaml b/configs/base.yaml index 709a32630..1ff76a324 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -45,9 +45,7 @@ num_heads: 2 enc_ffn_kernel_size: 9 ffn_act: gelu ffn_padding: 'SAME' -use_energy_embed: false use_spk_id: false -use_spk_embed: false ########### # optimization diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index e6a63a54f..99d596b59 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -168,8 +168,6 @@ def postprocess(_item): nonlocal total_sec, total_raw_sec if _item is None: return - # item_['spk_embed'] = voice_encoder.embed_utterance(item_['wav']) \ - # if self.binarization_args['with_spk_embed'] else None builder.add_item(_item) lengths.append(_item['length']) total_sec += _item['seconds'] diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 88706b500..592cb68bb 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -57,15 +57,10 @@ def collater(self, samples): 'mel': mel, 'f0': f0, } - # if hparams['use_energy_embed']: - # batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) if hparams.get('use_key_shift_embed', False): batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if hparams.get('use_speed_embed', False): batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] - # if hparams['use_spk_embed']: - # spk_embed = torch.stack([s['spk_embed'] for s in samples]) - # batch['spk_embed'] = spk_embed if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids @@ -150,8 +145,6 @@ def run_model(self, sample, return_output=False, infer=False): if hparams['use_spk_id']: spk_embed_id = sample['spk_ids'] - # elif hparams['use_spk_embed']: - # spk_embed = sample['spk_embed'] else: spk_embed_id = None output = self.model(txt_tokens, mel2ph=mel2ph, f0=f0, From 3452811b278f28e86329745594592b8c4df459ad Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 25 Mar 2023 19:38:18 +0800 Subject: [PATCH 115/475] Use correct raise --- basics/base_vocoder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index 1c898b8d3..af2f400d3 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -6,7 +6,7 @@ def spec2wav(self, mel, **kwargs): :return: wav: [T'] """ - raise NotImplementedError + raise NotImplementedError() @staticmethod def wav2spec(wav_fn): @@ -15,4 +15,4 @@ def wav2spec(wav_fn): :param wav_fn: str :return: wav, mel: [T, 80] """ - raise NotImplementedError + raise NotImplementedError() From 3ac11f409ce455a990ccfaf7325d25d84317fdae Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 11:13:03 -0500 Subject: [PATCH 116/475] Add env for CUDNN API change, clean more codes --- basics/base_task.py | 34 ++++------------------------------ scripts/train.py | 2 ++ training/acoustic_task.py | 1 - 3 files changed, 6 insertions(+), 31 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 756de765e..4187d5d45 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -71,7 +71,6 @@ def __init__(self, *args, **kwargs): if self.max_eval_sentences == -1: hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences - self.training_losses_meter = None self.training_sampler = None self.skip_immediate_validation = False self.skip_immediate_ckpt_save = False @@ -86,7 +85,6 @@ def build_model(self): raise NotImplementedError def on_train_epoch_start(self): - self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} if self.training_sampler is not None: self.training_sampler.set_epoch(self.current_epoch) @@ -100,34 +98,13 @@ def _training_step(self, sample, batch_idx, optimizer_idx): raise NotImplementedError def training_step(self, sample, batch_idx, optimizer_idx=-1): - loss_ret = self._training_step(sample, batch_idx, optimizer_idx) - self.opt_idx = optimizer_idx - if loss_ret is None: - return {'loss': None} - total_loss, log_outputs = loss_ret - log_outputs = utils.tensors_to_scalars(log_outputs) - for k, v in log_outputs.items(): - if k not in self.training_losses_meter: - self.training_losses_meter[k] = utils.AvgrageMeter() - if not np.isnan(v): - self.training_losses_meter[k].update(v) - self.training_losses_meter['total_loss'].update(total_loss.item()) - - try: - log_outputs['lr'] = self.scheduler.get_lr() - if isinstance(log_outputs['lr'], list): - log_outputs['lr'] = log_outputs['lr'][0] - except: - pass - - # log_outputs['all_loss'] = total_loss.item() - log_outputs.update({'step': self.global_step}) + total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) + + log_outputs.update({'step': self.global_step, 'lr': self.lr_schedulers().get_lr()[0]}) tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} self.log_dict(log_outputs, prog_bar=True, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) - return { - 'loss': total_loss - } + return total_loss def on_train_epoch_end(self): pass @@ -179,9 +156,6 @@ def on_validation_epoch_end(self): self.skip_immediate_ckpt_save = True return loss_output = self._on_validation_end(self.validation_step_outputs) - # print(f"\n==============\n " - # f"valid results: {loss_output}" - # f"\n==============\n") self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, sync_dist=True) self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) diff --git a/scripts/train.py b/scripts/train.py index f229100c0..60f8d8f54 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -1,4 +1,6 @@ +import os import importlib +os.environ['TORCH_CUDNN_V8_API_ENABLED'] = '1' # Prevent unacceptable slowdowns when using 16 precision from utils.hparams import set_hparams, hparams diff --git a/training/acoustic_task.py b/training/acoustic_task.py index bc46b4a4b..c7dff035f 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -179,7 +179,6 @@ def _training_step(self, sample, batch_idx, _): log_outputs = self.run_model(sample) total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) log_outputs['batch_size'] = sample['tokens'].size()[0] - log_outputs['lr'] = self.lr_schedulers().get_lr()[0] return total_loss, log_outputs def _validation_step(self, sample, batch_idx): From ae2946c8fa6194a942f39addffea5be49616b5af Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 22 Mar 2023 01:16:13 -0500 Subject: [PATCH 117/475] Initial attempt to refactor lightning code --- augmentation/spec_stretch.py | 10 +- basics/base_task.py | 329 ++++++++++------------------ configs/base.yaml | 2 +- modules/nsf_hifigan/models.py | 4 +- modules/vocoders/nsf_hifigan.py | 4 +- preprocessing/acoustic_binarizer.py | 15 +- scripts/train.py | 2 +- training/acoustic_task.py | 84 +++---- utils/__init__.py | 4 +- utils/hparams.py | 9 +- utils/indexed_datasets.py | 65 ++++-- utils/multiprocess_utils.py | 3 +- utils/training_utils.py | 67 ++++++ 13 files changed, 300 insertions(+), 298 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 3a6244083..728c04d43 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -31,7 +31,7 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['wav_fn'], keyshift=key_shift, speed=speed ) - aug_item['mel'] = torch.from_numpy(mel) + aug_item['mel'] = mel if speed != 1. or hparams.get('use_speed_embed', False): aug_item['length'] = mel.shape[0] @@ -39,18 +39,18 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['seconds'] /= aug_item['speed'] aug_item['ph_dur'] /= aug_item['speed'] aug_item['mel2ph'] = get_mel2ph_torch( - self.lr, aug_item['ph_dur'], aug_item['length'], hparams, device=self.device - ) + self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], hparams, device=self.device + ).cpu().numpy() f0, f0_coarse, _ = get_pitch_parselmouth( wav, aug_item['length'], hparams, speed=speed, interp_uv=item['interp_uv'] ) aug_item['f0'], aug_item['f0_coarse'] = \ - torch.from_numpy(f0), torch.from_numpy(f0_coarse) + f0.astype(np.float32), f0_coarse if key_shift != 0. or hparams.get('use_key_shift_embed', False): aug_item['key_shift'] = key_shift aug_item['f0'] *= 2 ** (key_shift / 12) - aug_item['f0_coarse'] = torch.from_numpy(f0_to_coarse(aug_item['f0'].numpy())) + aug_item['f0_coarse'] = f0_to_coarse(aug_item['f0']) if replace_spk_id is not None: aug_item['spk_id'] = replace_spk_id diff --git a/basics/base_task.py b/basics/base_task.py index d827338ec..809c70a1a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,13 +13,18 @@ import sys import numpy as np import torch.distributed as dist +import pytorch_lightning as pl +from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.loggers import TensorBoardLogger +from pytorch_lightning.strategies import DDPStrategy +from pytorch_lightning.utilities import grad_norm, rank_zero_only from utils.phoneme_utils import locate_dictionary -from utils.pl_utils import LatestModelCheckpoint, BaseTrainer, data_loader, DDP +from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength from torch import nn import torch.utils.data import utils import logging +from functools import partial import os torch.multiprocessing.set_sharing_strategy(os.getenv('TORCH_SHARE_STRATEGY', 'file_system')) @@ -29,7 +34,7 @@ format=log_format, datefmt='%m/%d %I:%M:%S %p') -class BaseTask(nn.Module): +class BaseTask(pl.LightningModule): ''' Base class for training tasks. 1. *load_ckpt*: @@ -48,21 +53,14 @@ class BaseTask(nn.Module): how to build the model, the optimizer and the training scheduler; 2. *_training_step*: one training step of the model; - 3. *validation_end* and *_validation_end*: + 3. *on_validation_end* and *_on_validation_end*: postprocess the validation output. ''' def __init__(self, *args, **kwargs): # dataset configs super(BaseTask, self).__init__(*args, **kwargs) - self.current_epoch = 0 - self.global_step = 0 self.loaded_optimizer_states_dict = {} - self.trainer = None - self.logger = None - self.on_gpu = False - self.use_dp = False - self.use_ddp = False self.example_input_array = None self.max_tokens = hparams['max_tokens'] @@ -74,17 +72,18 @@ def __init__(self, *args, **kwargs): if self.max_eval_sentences == -1: hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences - self.model = None - self.persistent_dataloader = None self.training_losses_meter = None + + self.model = None ########### # Training, validation and testing ########### + def build_model(self): raise NotImplementedError - def on_epoch_start(self): + def on_train_epoch_start(self): self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} def _training_step(self, sample, batch_idx, optimizer_idx): @@ -118,26 +117,35 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): pass # log_outputs['all_loss'] = total_loss.item() - progress_bar_log = log_outputs + progress_bar_log = log_outputs | {'step': self.global_step} tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} + self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False) + self.log_dict(tb_log) return { - 'loss': total_loss, - 'progress_bar': progress_bar_log, - 'log': tb_log + 'loss': total_loss } - def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): - optimizer.step() - optimizer.zero_grad() - if self.scheduler is not None: - self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) - - def on_epoch_end(self): + def on_train_epoch_end(self): pass # loss_outputs = {k: round(v.avg, 4) for k, v in self.training_losses_meter.items()} # print(f"\n==============\n " # f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" # f"\n==============\n") + + def on_before_optimizer_step(self, optimizer): + self.log_dict(grad_norm(self, norm_type=2)) + + def on_validation_start(self): + self.validation_step_outputs = [] + + def _validation_step(self, sample, batch_idx): + """ + + :param sample: + :param batch_idx: + :return: output: dict + """ + raise NotImplementedError def validation_step(self, sample, batch_idx): """ @@ -146,9 +154,14 @@ def validation_step(self, sample, batch_idx): :param batch_idx: :return: output: dict """ - raise NotImplementedError + outputs = self._validation_step(sample, batch_idx) + self.validation_step_outputs.append(outputs) + + return { + 'val_loss': outputs['total_loss'] + } - def _validation_end(self, outputs): + def _on_validation_end(self, outputs): """ :param outputs: @@ -156,13 +169,13 @@ def _validation_end(self, outputs): """ raise NotImplementedError - def validation_end(self, outputs): - loss_output = self._validation_end(outputs) + def on_validation_epoch_end(self): + loss_output = self._on_validation_end(self.validation_step_outputs) print(f"\n==============\n " f"valid results: {loss_output}" f"\n==============\n") + self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True) return { - 'log': {f'val/{k}': v for k, v in loss_output.items()}, 'val_loss': loss_output['total_loss'] } @@ -174,60 +187,43 @@ def build_optimizer(self, model): def configure_optimizers(self): optm = self.build_optimizer(self.model) - self.scheduler = self.build_scheduler(optm) - return [optm] + scheduler = self.build_scheduler(optm) + return { + "optimizer": optm, + "lr_scheduler": { + "scheduler": scheduler, + "interval": "step", + "frequency": hparams['accumulate_grad_batches'], + } + } - def build_dataloader(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, batch_by_size=True, persistent=False): + def build_batch_sampler(self, dataset, shuffle, max_tokens=None, max_sentences=None, + required_batch_size_multiple=-1, batch_by_size=True): devices_cnt = torch.cuda.device_count() if devices_cnt == 0: devices_cnt = 1 if required_batch_size_multiple == -1: required_batch_size_multiple = devices_cnt - - def shuffle_batches(batches): - np.random.shuffle(batches) - return batches - - if max_tokens is not None: - max_tokens *= devices_cnt - if max_sentences is not None: - max_sentences *= devices_cnt - indices = dataset.ordered_indices() - if batch_by_size: - batch_sampler = utils.batch_by_size( - indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple, - ) + + batch_sampler_cls = partial(BatchSamplerSimilarLength, + max_tokens=max_tokens, max_sentences=max_sentences, + required_batch_size_multiple=required_batch_size_multiple, + batch_by_size=batch_by_size) + if self.trainer.distributed_sampler_kwargs: + sampler = DistributedBatchSamplerSimilarLength(dataset, batch_sampler_cls=batch_sampler_cls, + shuffle=shuffle, **self.trainer.distributed_sampler_kwargs) else: - batch_sampler = [] - for i in range(0, len(indices), max_sentences): - batch_sampler.append(indices[i:i + max_sentences]) + sampler = batch_sampler_cls(dataset=dataset, indices=dataset.ordered_indices(), shuffle=shuffle) + return sampler - if shuffle: - batches = shuffle_batches(list(batch_sampler)) - else: - batches = batch_sampler - num_workers = dataset.num_workers - if self.trainer.use_ddp: - num_replicas = dist.get_world_size() - rank = dist.get_rank() - batches = [x[rank::num_replicas] for x in batches if len(x) % num_replicas == 0] - return torch.utils.data.DataLoader(dataset, - collate_fn=dataset.collater, - batch_sampler=batches, - num_workers=num_workers, - pin_memory=False, - persistent_workers=persistent) - - def test_start(self): - pass + def on_test_start(self): + self.on_validation_start() def test_step(self, sample, batch_idx): return self.validation_step(sample, batch_idx) - def test_end(self, outputs): - return self.validation_end(outputs) + def on_test_end(self): + return self.on_validation_end() ########### # Running configuration @@ -235,165 +231,74 @@ def test_end(self, outputs): @classmethod def start(cls): - set_hparams() - os.environ['MASTER_PORT'] = str(random.randint(15000, 30000)) random.seed(hparams['seed']) np.random.seed(hparams['seed']) task = cls() work_dir = pathlib.Path(hparams['work_dir']) - trainer = BaseTrainer( - checkpoint_callback=LatestModelCheckpoint( - filepath=work_dir, - verbose=True, - monitor='val_loss', - mode='min', - num_ckpt_keep=hparams['num_ckpt_keep'], - permanent_ckpt_start=hparams.get('permanent_ckpt_start', 0), - permanent_ckpt_interval=hparams.get('permanent_ckpt_interval', -1), - save_best=hparams['save_best'], - period=1 if hparams['save_ckpt'] else 100000 - ), + trainer = pl.Trainer( + accelerator='gpu', + devices=4, + strategy=DDPStrategy(find_unused_parameters=False, process_group_backend='gloo'), + precision="bf16", + callbacks=[ + ModelCheckpoint( + dirpath=work_dir, + filename='model_ckpt_steps_{step}.ckpt', + monitor='val_loss', + mode='min', + save_last=hparams['save_last'], + save_top_k=hparams['num_ckpt_keep'], + every_n_train_steps=hparams['val_check_interval'], + verbose=True + ) + ], logger=TensorBoardLogger( save_dir=str(work_dir), name='lightning_logs', version='lastest' ), + num_sanity_val_steps=0, gradient_clip_val=hparams['clip_grad_norm'], val_check_interval=hparams['val_check_interval'], - row_log_interval=hparams['log_interval'], - max_updates=hparams['max_updates'], - num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, + check_val_every_n_epoch=None, + log_every_n_steps=hparams['log_interval'], + max_steps=hparams['max_updates'], + use_distributed_sampler=False, + # num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train - # copy_code = input(f'{hparams["save_codes"]} code backup? y/n: ') == 'y' - copy_code = True # backup code every time - if copy_code: - t = datetime.now().strftime('%Y%m%d%H%M%S') - code_dir = work_dir.joinpath('codes').joinpath(str(t)) - code_dir.mkdir(exist_ok=True, parents=True) - for c in hparams['save_codes']: - shutil.copytree(c, code_dir, dirs_exist_ok=True) - print(f'| Copied codes to {code_dir}.') - # Copy spk_map.json and dictionary.txt to work dir - binary_dir = pathlib.Path(hparams['binary_data_dir']) - spk_map = work_dir.joinpath('spk_map.json') - spk_map_src = binary_dir.joinpath('spk_map.json') - if not spk_map.exists() and spk_map_src.exists(): - shutil.copy(spk_map_src, spk_map) - print(f'| Copied spk map to {spk_map}.') - dictionary = work_dir.joinpath('dictionary.txt') - dict_src = binary_dir.joinpath('dictionary.txt') - if not dictionary.exists(): - if dict_src.exists(): - shutil.copy(dict_src, dictionary) - else: - shutil.copy(locate_dictionary(), dictionary) - print(f'| Copied dictionary to {dictionary}.') - - trainer.checkpoint_callback.task = task + if trainer.local_rank == 0: + set_hparams(print_hparams=True, is_main_process=True) + # copy_code = input(f'{hparams["save_codes"]} code backup? y/n: ') == 'y' + copy_code = True # backup code every time + if copy_code: + code_dir = work_dir / 'codes' / datetime.now().strftime('%Y%m%d%H%M%S') + code_dir.mkdir(exist_ok=True, parents=True) + for c in hparams['save_codes']: + shutil.copytree(c, code_dir, dirs_exist_ok=True) + print(f'| Copied codes to {code_dir}.') + # Copy spk_map.json and dictionary.txt to work dir + binary_dir = pathlib.Path(hparams['binary_data_dir']) + spk_map = work_dir / 'spk_map.json' + spk_map_src = binary_dir / 'spk_map.json' + if not spk_map.exists() and spk_map_src.exists(): + shutil.copy(spk_map_src, spk_map) + print(f'| Copied spk map to {spk_map}.') + dictionary = work_dir / 'dictionary.txt' + dict_src = binary_dir / 'dictionary.txt' + if not dictionary.exists(): + if dict_src.exists(): + shutil.copy(dict_src, dictionary) + else: + shutil.copy(locate_dictionary(), dictionary) + print(f'| Copied dictionary to {dictionary}.') trainer.fit(task) + if trainer.local_rank == 0: + trainer.callbacks[0].on_validation_end(trainer, task) else: trainer.test(task) - def configure_ddp(self, model, device_ids): - model = DDP( - model, - device_ids=device_ids, - find_unused_parameters=True - ) - if dist.get_rank() != 0 and not hparams['debug']: - sys.stdout = open(os.devnull, "w") - sys.stderr = open(os.devnull, "w") - random.seed(hparams['seed']) - np.random.seed(hparams['seed']) - return model - - def training_end(self, *args, **kwargs): - return None - - def init_ddp_connection(self, proc_rank, world_size): - set_hparams(print_hparams=False) - # guarantees unique ports across jobs from same grid search - default_port = 12910 - # if user gave a port number, use that one instead - try: - default_port = os.environ['MASTER_PORT'] - except Exception: - os.environ['MASTER_PORT'] = str(default_port) - - # figure out the root node addr - root_node = '127.0.0.2' - root_node = self.trainer.resolve_root_node_address(root_node) - os.environ['MASTER_ADDR'] = root_node - dist.init_process_group('nccl', rank=proc_rank, world_size=world_size) - - @data_loader - def train_dataloader(self): - return None - - @data_loader - def test_dataloader(self): - return None - - @data_loader - def val_dataloader(self): - return None - - def on_load_checkpoint(self, checkpoint): - pass - def on_save_checkpoint(self, checkpoint): if isinstance(self.model, CategorizedModule): checkpoint['category'] = self.model.category - - def on_sanity_check_start(self): - pass - - def on_train_start(self): - pass - - def on_train_end(self): - pass - - def on_batch_start(self, batch): - pass - - def on_batch_end(self): - pass - - def on_pre_performance_check(self): - pass - - def on_post_performance_check(self): - pass - - def on_before_zero_grad(self, optimizer): - pass - - def on_after_backward(self): - pass - - def backward(self, loss, optimizer): - loss.backward() - - def grad_norm(self, norm_type): - results = {} - total_norm = 0 - for name, p in self.named_parameters(): - if p.requires_grad: - try: - param_norm = p.grad.data.norm(norm_type) - total_norm += param_norm ** norm_type - norm = param_norm ** (1 / norm_type) - - grad = round(norm.data.cpu().numpy().flatten()[0], 3) - results['grad_{}_norm_{}'.format(norm_type, name)] = grad - except Exception: - # this param had no grad - pass - - total_norm = total_norm ** (1. / norm_type) - grad = round(total_norm.data.cpu().numpy().flatten()[0], 3) - results['grad_{}_norm_total'.format(norm_type)] = grad - return results diff --git a/configs/base.yaml b/configs/base.yaml index 1ff76a324..da958b8cb 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -62,7 +62,7 @@ dur_loss: mse # huber|mol # train and eval ########### save_ckpt: true -save_best: false +save_last: false num_ckpt_keep: 3 accumulate_grad_batches: 1 log_interval: 100 diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 1c3006d61..337284063 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -22,9 +22,9 @@ def load_model(model_path, device='cuda'): json_config = json.loads(data) h = AttrDict(json_config) - generator = Generator(h).to(device) + generator = Generator(h)#.to(device) - cp_dict = torch.load(model_path, map_location=device) + cp_dict = torch.load(model_path)#, map_location=device) generator.load_state_dict(cp_dict['generator']) generator.eval() generator.remove_weight_norm() diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index d232718a7..40fcb6a4e 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -70,12 +70,12 @@ def spec2wav(self, mel, **kwargs): if self.h.fmax != hparams['fmax']: print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') with torch.no_grad(): - c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(self.device) + c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1)#.to(self.device) # log10 to log mel c = 2.30259 * c f0 = kwargs.get('f0') if f0 is not None: - f0 = torch.FloatTensor(f0[None, :]).to(self.device) + f0 = torch.FloatTensor(f0[None, :])#.to(self.device) y = self.model(c, f0).view(-1) else: y = self.model(c).view(-1) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 99d596b59..635a5b89d 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -183,12 +183,13 @@ def postprocess(_item): # code for parallel processing for item in tqdm( chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))) + total=len(list(self.meta_data_iterator(prefix))), + ncols=80 ): postprocess(item) else: # code for single cpu processing - for a in tqdm(args): + for a in tqdm(args, ncols=80): item = self.process_item(*a) postprocess(item) @@ -217,9 +218,9 @@ def process_item(self, item_name, meta_data, binarization_args): 'spk_id': meta_data['spk_id'], 'seconds': seconds, 'length': length, - 'mel': torch.from_numpy(mel), - 'tokens': torch.LongTensor(self.phone_encoder.encode(meta_data['ph_seq'])), - 'ph_dur': torch.FloatTensor(meta_data['ph_dur']), + 'mel': mel, + 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), + 'ph_dur': np.array(meta_data['ph_dur']), 'interp_uv': self.binarization_args['interp_uv'], } @@ -229,10 +230,10 @@ def process_item(self, item_name, meta_data, binarization_args): ) if uv.all(): # All unvoiced raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') - processed_input['f0'] = torch.from_numpy(gt_f0).float() + processed_input['f0'] = gt_f0.astype(np.float32) # get ground truth dur - processed_input['mel2ph'] = get_mel2ph_torch(self.lr, processed_input['ph_dur'], length, hparams) + processed_input['mel2ph'] = get_mel2ph_torch(self.lr, torch.from_numpy(processed_input['ph_dur']), length, hparams).cpu().numpy() if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. diff --git a/scripts/train.py b/scripts/train.py index bb2d51f47..98f12e27f 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -2,7 +2,7 @@ from utils.hparams import set_hparams, hparams -set_hparams(print_hparams=False) +set_hparams(is_main_process=False) def run_task(): assert hparams['task_cls'] != '' diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 592cb68bb..3ae080dbd 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -8,6 +8,7 @@ import torch.distributions import torch.optim import torch.utils.data +import pytorch_lightning as pl from tqdm import tqdm import utils @@ -35,13 +36,12 @@ def __init__(self, prefix, shuffle=False): self.data_dir = hparams['binary_data_dir'] self.prefix = prefix self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) - self.indexed_ds = None + self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) def __getitem__(self, index): - if self.indexed_ds is None: - self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) - sample = self.indexed_ds[index] - return sample + # if self.indexed_ds is None: + # self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) + return self.indexed_ds[index] def collater(self, samples): if len(samples) == 0: @@ -58,20 +58,18 @@ def collater(self, samples): 'f0': f0, } if hparams.get('use_key_shift_embed', False): - batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] + batch['key_shift'] = torch.FloatTensor([float(s['key_shift']) for s in samples])[:, None] if hparams.get('use_speed_embed', False): batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] if hparams['use_spk_id']: - spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) + spk_ids = torch.LongTensor([int(s['spk_id']) for s in samples]) batch['spk_ids'] = spk_ids return batch - class AcousticTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = AcousticDataset - self.phone_encoder = self.build_phone_encoder() self.use_vocoder = hparams['infer'] or hparams.get('val_with_vocoder', True) if self.use_vocoder: self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() @@ -79,6 +77,12 @@ def __init__(self): self.saving_results_futures = None self.stats = {} self.logged_gt_wav = set() + + def setup(self, stage): + self.phone_encoder = self.build_phone_encoder() + self.model = self.build_model() + self.train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) + self.valid_dataset = self.dataset_cls(hparams['valid_set_name'], shuffle=False) @staticmethod def build_phone_encoder(): @@ -86,15 +90,16 @@ def build_phone_encoder(): return TokenTextEncoder(vocab_list=phone_list) def build_model(self): - self.model = DiffSingerAcoustic( + model = DiffSingerAcoustic( vocab_size=len(self.phone_encoder), out_dims=hparams['audio_num_mel_bins'] ) - utils.print_arch(self.model) - return self.model + if self.trainer.local_rank == 0: + utils.print_arch(model) + return model def build_optimizer(self, model): - self.optimizer = optimizer = torch.optim.AdamW( + optimizer = torch.optim.AdamW( filter(lambda p: p.requires_grad, model.parameters()), lr=hparams['lr'], betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), @@ -103,33 +108,29 @@ def build_optimizer(self, model): def build_scheduler(self, optimizer): return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) - - @data_loader + def train_dataloader(self): - if self.persistent_dataloader is None: - train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) - self.persistent_dataloader = self.build_dataloader( - train_dataset, True, self.max_tokens, self.max_sentences, persistent=True - ) - return self.persistent_dataloader - - @data_loader + sampler = self.build_batch_sampler(self.train_dataset, True, self.max_tokens, self.max_sentences) + return torch.utils.data.DataLoader(self.train_dataset, + collate_fn=self.train_dataset.collater, + batch_sampler=sampler, + num_workers=self.train_dataset.num_workers, + prefetch_factor=4, + pin_memory=False, + persistent_workers=True) + def val_dataloader(self): - valid_dataset = self.dataset_cls(hparams['valid_set_name'], shuffle=False) - return self.build_dataloader(valid_dataset, False, self.max_eval_tokens, self.max_eval_sentences) + sampler = self.build_batch_sampler(self.valid_dataset, False, self.max_tokens, self.max_sentences) + return torch.utils.data.DataLoader(self.valid_dataset, + collate_fn=self.valid_dataset.collater, + batch_sampler=sampler, + num_workers=self.valid_dataset.num_workers, + prefetch_factor=4, + shuffle=False) - @data_loader def test_dataloader(self): return self.val_dataloader() - - def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx): - if optimizer is None: - return - optimizer.step() - optimizer.zero_grad() - if self.scheduler is not None: - self.scheduler.step(self.global_step // hparams['accumulate_grad_batches']) - + def run_model(self, sample, return_output=False, infer=False): """ steps: @@ -164,15 +165,16 @@ def _training_step(self, sample, batch_idx, _): log_outputs = self.run_model(sample) total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) log_outputs['batch_size'] = sample['tokens'].size()[0] - log_outputs['lr'] = self.scheduler.get_lr()[0] + log_outputs['lr'] = self.lr_schedulers().get_lr()[0] return total_loss, log_outputs - def validation_step(self, sample, batch_idx): + def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, return_output=False, infer=False) total_loss = sum(losses.values()) outputs = { 'losses': losses, - 'total_loss': total_loss, 'size': sample['size'] + 'total_loss': total_loss, + 'size': sample['size'] } outputs = utils.tensors_to_scalars(outputs) @@ -184,7 +186,7 @@ def validation_step(self, sample, batch_idx): return outputs - def _validation_end(self, outputs): + def _on_validation_end(self, outputs): all_losses_meter = { 'total_loss': utils.AvgrageMeter(), } @@ -223,7 +225,7 @@ def plot_mel(self, batch_idx, spec, spec_out, name=None): ############ # infer ############ - def test_start(self): + def on_test_start(self): self.saving_result_pool = Pool(8) self.saving_results_futures = [] self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() @@ -233,7 +235,7 @@ def test_step(self, sample, batch_idx): sample['outputs'] = mel_pred return self.after_infer(sample) - def test_end(self, outputs): + def on_test_end(self): self.saving_result_pool.close() [f.get() for f in tqdm(self.saving_results_futures)] self.saving_result_pool.join() diff --git a/utils/__init__.py b/utils/__init__.py index 2f41cf2f0..3d3fe6db3 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -45,7 +45,7 @@ def collate_nd(values, pad_value=0, max_len=None): Pad a list of Nd tensors on their first dimension and stack them into a (N+1)d tensor. """ size = ((max(v.size(0) for v in values) if max_len is None else max_len), *values[0].shape[1:]) - res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) + res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype) for i, v in enumerate(values): res[i, :len(v), ...] = v @@ -64,7 +64,7 @@ def _is_batch_full(batch, num_tokens, max_tokens, max_sentences): def batch_by_size( indices, num_tokens_fn, max_tokens=None, max_sentences=None, - required_batch_size_multiple=1, distributed=False + required_batch_size_multiple=1 ): """ Yield mini-batches of indices bucketed by size. Batches may contain diff --git a/utils/hparams.py b/utils/hparams.py index bd82807c4..8be0a0663 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -3,8 +3,7 @@ import yaml -from utils.multiprocess_utils import is_main_process - +from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True hparams = {} @@ -23,7 +22,7 @@ def override_config(old_config: dict, new_config: dict): old_config[k] = v -def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True): +def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True, is_main_process=None): """ Load hparams from multiple sources: 1. config chain (i.e. first load base_config, then load config); @@ -47,6 +46,9 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob args = Args(config=config, exp_name=exp_name, hparams=hparams_str, infer=False, validate=False, reset=False, debug=False) + if is_main_process is None: + is_main_process = mp_is_main_process + args_work_dir = '' if args.exp_name != '': args.work_dir = args.exp_name @@ -117,6 +119,7 @@ def load_config(config_fn): # deep first if global_hparams: hparams.clear() hparams.update(hparams_) + hparams['is_main_process'] = is_main_process if is_main_process and print_hparams and global_print_hparams and global_hparams: print('| Hparams chains: ', config_chains) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 5c8f84581..c0482667c 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,6 +1,10 @@ import os.path +import pathlib import pickle +import multiprocessing from copy import deepcopy +import h5py +import torch import numpy as np @@ -8,20 +12,25 @@ class IndexedDataset: def __init__(self, path, prefix, num_cache=0): super().__init__() - self.path = path - self.data_file = None - self.data_offsets = np.load(os.path.join(path, f'{prefix}.idx')) - self.data_file = open(os.path.join(path, f'{prefix}.data'), 'rb', buffering=-1) + self.path = pathlib.Path(path) + # self.data_file = None + # self.data_offsets = np.load(self.path / f'{prefix}.idx')) + # self.data_file = open(self.path / f'{prefix}.data', 'rb', buffering=-1) + self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') self.cache = [] self.num_cache = num_cache def check_index(self, i): - if i < 0 or i >= len(self.data_offsets) - 1: + # if i < 0 or i >= len(self.data_offsets) - 1: + # raise IndexError('index out of range') + if i < 0 or i >= len(self.dset): raise IndexError('index out of range') def __del__(self): - if self.data_file: - self.data_file.close() + # if self.data_file: + # self.data_file.close() + if self.dset: + del self.dset def __getitem__(self, i): self.check_index(i) @@ -29,22 +38,27 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - self.data_file.seek(self.data_offsets[i]) - b = self.data_file.read(self.data_offsets[i + 1] - self.data_offsets[i]) - item = pickle.loads(b) + # self.data_file.seek(self.data_offsets[i]) + # b = self.data_file.read(self.data_offsets[i + 1] - self.data_offsets[i]) + # item = pickle.loads(b) + item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache = [(i, deepcopy(item))] + self.cache[:-1] return item def __len__(self): - return len(self.data_offsets) - 1 + # return len(self.data_offsets) - 1 + return len(self.dset) class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): - self.path = path + self.path = pathlib.Path(path) self.prefix = prefix - self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') - self.byte_offsets = [0] + # self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') + self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'w') + self.counter = 0 + self.lock = multiprocessing.Lock() + # self.byte_offsets = [0] if allowed_attr is not None: self.allowed_attr = set(allowed_attr) else: @@ -56,15 +70,24 @@ def add_item(self, item): k: item.get(k) for k in self.allowed_attr } - s = pickle.dumps(item) - n_bytes = self.out_file.write(s) - self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) + with self.lock: + item_no = self.counter + self.counter += 1 + for k, v in item.items(): + if isinstance(v, np.ndarray): + self.dset.create_dataset(f'{item_no}/{k}', data=v, compression="gzip", compression_opts=4) + else: + self.dset.create_dataset(f'{item_no}/{k}', data=v) + # s = pickle.dumps(item) + # n_bytes = self.out_file.write(s) + # self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) def finalize(self): - self.out_file.close() - with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: - # noinspection PyTypeChecker - np.save(f, self.byte_offsets) + del self.dset + # self.out_file.close() + # with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: + # # noinspection PyTypeChecker + # np.save(f, self.byte_offsets) if __name__ == "__main__": diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index 41eb13142..b54f99db7 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -8,7 +8,8 @@ def main_process_print(self, *args, sep=' ', end='\n', file=None): - if is_main_process: + from utils.hparams import hparams + if hparams['is_main_process']: print(self, *args, sep=sep, end=end, file=file) diff --git a/utils/training_utils.py b/utils/training_utils.py index 409b15388..63af4718e 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,5 +1,9 @@ +import utils from utils.hparams import hparams +import math +import numpy as np +from torch.utils.data.distributed import Sampler, DistributedSampler class RSQRTSchedule(object): def __init__(self, optimizer): @@ -25,3 +29,66 @@ def step(self, num_updates): def get_lr(self): return self.optimizer.param_groups[0]['lr'] + +class BatchSamplerSimilarLength(Sampler): + def __init__(self, dataset, indices=None, max_tokens=None, max_sentences=None, required_batch_size_multiple=-1, batch_by_size=True, shuffle=True): + self.shuffle = shuffle + + if batch_by_size: + self.batches = utils.batch_by_size( + indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, + required_batch_size_multiple=required_batch_size_multiple + ) + else: + self.batches = [indices[i:i + max_sentences] for i in range(0, len(indices), max_sentences)] + + def __iter__(self): + if self.shuffle: + np.random.shuffle(self.batches) + for batch in self.batches: + yield batch + + def __len__(self): + return len(self.batches) + +class DistributedBatchSamplerSimilarLength(DistributedSampler): + def __init__(self, dataset, num_replicas=None, + rank=None, shuffle=True, + seed=0, drop_last=False, batch_sampler_cls=None) -> None: + super().__init__(dataset=dataset, num_replicas=num_replicas, rank=rank, shuffle=shuffle, seed=seed, + drop_last=drop_last) + self.batch_sampler_cls = batch_sampler_cls + self.batch_sampler = None + + def __iter__(self): + if self.shuffle: + indices = np.random.RandomState(seed=self.seed).permutation(len(self.dataset)) + if self.dataset.sort_by_len: + indices = indices[np.argsort(np.array(self.dataset._sizes)[indices], kind='mergesort')] + else: + indices = np.arange(len(self.dataset)) + indices = indices.tolist() + + if not self.drop_last: + # add extra samples to make it evenly divisible + padding_size = self.total_size - len(indices) + if padding_size <= len(indices): + indices += indices[:padding_size] + else: + indices += (indices * math.ceil(padding_size / len(indices)))[:padding_size] + else: + # remove tail of data to make it evenly divisible. + indices = indices[:self.total_size] + assert len(indices) == self.total_size + + # subsample + indices = indices[self.rank:self.total_size:self.num_replicas] + assert len(indices) == self.num_samples + + self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, shuffle=self.shuffle) + return iter(self.batch_sampler) + + def __len__(self) -> int: + if self.batch_sampler is None: + raise ValueError("BatchSampler is not initialized. Call __iter__ first.") + return len(self.batch_sampler) From 6b156650a2c19f3dfddbfaa7f09ec766088bce14 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 23 Mar 2023 01:20:25 -0500 Subject: [PATCH 118/475] Add hparams to yaml, successful checkpointing --- basics/base_task.py | 54 +- preparation/acoustic_preparation.ipynb | 24 +- training/acoustic_task.py | 1 - utils/pl_utils.py | 1672 +----------------------- 4 files changed, 93 insertions(+), 1658 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 809c70a1a..0f055f05a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -12,14 +12,12 @@ import random import sys import numpy as np -import torch.distributed as dist import pytorch_lightning as pl -from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.loggers import TensorBoardLogger -from pytorch_lightning.strategies import DDPStrategy -from pytorch_lightning.utilities import grad_norm, rank_zero_only +from pytorch_lightning.utilities import grad_norm from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength +from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj from torch import nn import torch.utils.data import utils @@ -119,8 +117,8 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): # log_outputs['all_loss'] = total_loss.item() progress_bar_log = log_outputs | {'step': self.global_step} tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False) - self.log_dict(tb_log) + self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False, rank_zero_only=True) + self.log_dict(tb_log, prog_bar=False, logger=True, on_step=True, on_epoch=False, rank_zero_only=True) return { 'loss': total_loss } @@ -133,7 +131,7 @@ def on_train_epoch_end(self): # f"\n==============\n") def on_before_optimizer_step(self, optimizer): - self.log_dict(grad_norm(self, norm_type=2)) + self.log_dict(grad_norm(self, norm_type=2), rank_zero_only=True) def on_validation_start(self): self.validation_step_outputs = [] @@ -156,10 +154,7 @@ def validation_step(self, sample, batch_idx): """ outputs = self._validation_step(sample, batch_idx) self.validation_step_outputs.append(outputs) - - return { - 'val_loss': outputs['total_loss'] - } + return outputs def _on_validation_end(self, outputs): """ @@ -171,13 +166,11 @@ def _on_validation_end(self, outputs): def on_validation_epoch_end(self): loss_output = self._on_validation_end(self.validation_step_outputs) - print(f"\n==============\n " - f"valid results: {loss_output}" - f"\n==============\n") - self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True) - return { - 'val_loss': loss_output['total_loss'] - } + # print(f"\n==============\n " + # f"valid results: {loss_output}" + # f"\n==============\n") + self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) + self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, prog_bar=False, logger=True, sync_dist=True) def build_scheduler(self, optimizer): raise NotImplementedError @@ -231,24 +224,24 @@ def on_test_end(self): @classmethod def start(cls): - random.seed(hparams['seed']) - np.random.seed(hparams['seed']) + pl.seed_everything(hparams['seed'], workers=True) task = cls() work_dir = pathlib.Path(hparams['work_dir']) trainer = pl.Trainer( - accelerator='gpu', - devices=4, - strategy=DDPStrategy(find_unused_parameters=False, process_group_backend='gloo'), - precision="bf16", + accelerator=hparams['pl_trainer_accelerator'], + devices=hparams['pl_trainer_devices'], + strategy=get_stategy_obj(hparams['pl_trainer_strategy']), + precision=hparams['pl_trainer_precision'], callbacks=[ - ModelCheckpoint( + DiffModelCheckpoint( dirpath=work_dir, - filename='model_ckpt_steps_{step}.ckpt', + filename='model_ckpt_steps_{step}', monitor='val_loss', mode='min', save_last=hparams['save_last'], save_top_k=hparams['num_ckpt_keep'], - every_n_train_steps=hparams['val_check_interval'], + save_on_train_epoch_end=True, + auto_insert_metric_name=False, verbose=True ) ], @@ -257,14 +250,13 @@ def start(cls): name='lightning_logs', version='lastest' ), - num_sanity_val_steps=0, gradient_clip_val=hparams['clip_grad_norm'], val_check_interval=hparams['val_check_interval'], check_val_every_n_epoch=None, log_every_n_steps=hparams['log_interval'], max_steps=hparams['max_updates'], use_distributed_sampler=False, - # num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, + num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train @@ -293,9 +285,7 @@ def start(cls): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') - trainer.fit(task) - if trainer.local_rank == 0: - trainer.callbacks[0].on_validation_end(trainer, task) + trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: trainer.test(task) diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index df686bacd..105059ad4 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1228,7 +1228,16 @@ " 'num_ckpt_keep': num_ckpt_keep,\n", " 'max_updates': max_updates,\n", " 'permanent_ckpt_start': permanent_ckpt_start,\n", - " 'permanent_ckpt_interval': permanent_ckpt_interval\n", + " 'permanent_ckpt_interval': permanent_ckpt_interval,\n", + " \n", + " ###########\n", + " # pytorch lightning\n", + " # Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values\n", + " ###########\n", + " 'pl_trainer_accelerator': 'auto',\n", + " 'pl_trainer_devices': 'auto',\n", + " 'pl_trainer_strategy': 'auto', # With extra choice 'ddp_gloo' in case that NCCL backend stucks on you\n", + " 'pl_trainer_precision': '32-true',\n", "}\n", "\n", "augmentation_args = {}\n", @@ -1254,6 +1263,8 @@ " configs['use_speed_embed'] = True\n", "configs['augmentation_args'] = augmentation_args\n", "\n", + "\n", + "\n", "with open(f'../data/{full_name}/config.yaml', 'w', encoding='utf8') as f:\n", " yaml.dump(configs, f, sort_keys=False, allow_unicode=True)\n", "\n", @@ -1536,7 +1547,16 @@ " 'num_ckpt_keep': num_ckpt_keep,\n", " 'max_updates': max_updates,\n", " 'permanent_ckpt_start': permanent_ckpt_start,\n", - " 'permanent_ckpt_interval': permanent_ckpt_interval\n", + " 'permanent_ckpt_interval': permanent_ckpt_interval,\n", + " \n", + " ###########\n", + " # pytorch lightning\n", + " # Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values\n", + " ###########\n", + " 'pl_trainer_accelerator': 'auto',\n", + " 'pl_trainer_devices': 'auto',\n", + " 'pl_trainer_strategy': 'auto', # With extra choice 'ddp_gloo' in case that NCCL backend stucks on you\n", + " 'pl_trainer_precision': '32-true',\n", "}\n", "\n", "augmentation_args = {}\n", diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 3ae080dbd..46b4cae90 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -23,7 +23,6 @@ from utils.hparams import hparams from utils.indexed_datasets import IndexedDataset from utils.phoneme_utils import build_phoneme_list -from utils.pl_utils import data_loader from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder diff --git a/utils/pl_utils.py b/utils/pl_utils.py index b2a67a285..55db650f2 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1,1628 +1,54 @@ -import matplotlib -from torch.nn import DataParallel -from torch.nn.parallel import DistributedDataParallel - -from basics.base_model import CategorizedModule - -matplotlib.use('Agg') -import glob -import itertools -import threading -import traceback - -from pytorch_lightning.callbacks import GradientAccumulationScheduler -from pytorch_lightning.callbacks import ModelCheckpoint - -from functools import wraps -from torch.cuda._utils import _get_device_index -import numpy as np -import torch.optim -import torch.utils.data -import copy -import logging +from copy import deepcopy import os import re -import sys -import torch -import torch.distributed as dist -import torch.multiprocessing as mp -import tqdm -from torch.optim.optimizer import Optimizer - - -def get_a_var(obj): # pragma: no cover - if isinstance(obj, torch.Tensor): - return obj - - if isinstance(obj, list) or isinstance(obj, tuple): - for result in map(get_a_var, obj): - if isinstance(result, torch.Tensor): - return result - if isinstance(obj, dict): - for result in map(get_a_var, obj.items()): - if isinstance(result, torch.Tensor): - return result - return None - -def data_loader(fn): - """ - Decorator to make any fx with this use the lazy property - :param fn: - :return: - """ - - wraps(fn) - attr_name = '_lazy_' + fn.__name__ - - def _get_data_loader(self): - try: - value = getattr(self, attr_name) - except AttributeError: - try: - value = fn(self) # Lazy evaluation, done only once. - if ( - value is not None and - not isinstance(value, list) and - fn.__name__ in ['test_dataloader', 'val_dataloader'] - ): - value = [value] - except AttributeError as e: - # Guard against AttributeError suppression. (Issue #142) - traceback.print_exc() - error = f'{fn.__name__}: An AttributeError was encountered: ' + str(e) - raise RuntimeError(error) from e - setattr(self, attr_name, value) # Memoize evaluation. - return value - - return _get_data_loader - - -def parallel_apply(modules, inputs, kwargs_tup=None, devices=None): # pragma: no cover - r"""Applies each `module` in :attr:`modules` in parallel on arguments - contained in :attr:`inputs` (positional) and :attr:`kwargs_tup` (keyword) - on each of :attr:`devices`. - - Args: - modules (Module): modules to be parallelized - inputs (tensor): inputs to the modules - devices (list of int or torch.device): CUDA devices - - :attr:`modules`, :attr:`inputs`, :attr:`kwargs_tup` (if given), and - :attr:`devices` (if given) should all have same length. Moreover, each - element of :attr:`inputs` can either be a single object as the only argument - to a module, or a collection of positional arguments. - """ - assert len(modules) == len(inputs) - if kwargs_tup is not None: - assert len(modules) == len(kwargs_tup) - else: - kwargs_tup = ({},) * len(modules) - if devices is not None: - assert len(modules) == len(devices) - else: - devices = [None] * len(modules) - devices = list(map(lambda x: _get_device_index(x, True), devices)) - lock = threading.Lock() - results = {} - grad_enabled = torch.is_grad_enabled() - - def _worker(i, module, input, kwargs, device=None): - torch.set_grad_enabled(grad_enabled) - if device is None: - device = get_a_var(input).get_device() - try: - with torch.cuda.device(device): - # this also avoids accidental slicing of `input` if it is a Tensor - if not isinstance(input, (list, tuple)): - input = (input,) - - # --------------- - # CHANGE - if module.training: - output = module.training_step(*input, **kwargs) - - elif module.testing: - output = module.test_step(*input, **kwargs) - - else: - output = module.validation_step(*input, **kwargs) - # --------------- - - with lock: - results[i] = output - except Exception as e: - with lock: - results[i] = e - - # make sure each module knows what training state it's in... - # fixes weird bug where copies are out of sync - root_m = modules[0] - for m in modules[1:]: - m.training = root_m.training - m.testing = root_m.testing - - if len(modules) > 1: - threads = [threading.Thread(target=_worker, - args=(i, module, input, kwargs, device)) - for i, (module, input, kwargs, device) in - enumerate(zip(modules, inputs, kwargs_tup, devices))] +import torch - for thread in threads: - thread.start() - for thread in threads: - thread.join() +import pytorch_lightning as pl +from pytorch_lightning.callbacks import ModelCheckpoint +from pytorch_lightning.strategies import DDPStrategy + +class DiffModelCheckpoint(ModelCheckpoint): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _monitor_candidates(self, trainer: "pl.Trainer"): + monitor_candidates = deepcopy(trainer.callback_metrics) + monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) + monitor_candidates["step"] = torch.tensor(trainer.global_step) + return monitor_candidates + + def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool: + from pytorch_lightning.trainer.states import RunningStage + return trainer.state.stage == RunningStage.TRAINING and super()._should_save_on_train_epoch_end(trainer) + + # @classmethod + # def _format_checkpoint_name(cls, filename, metrics, prefix = "", auto_insert_metric_name = True): + # # metrics = {k: v + 1 if k == 'step' or k == 'epoch' else v for k, v in metrics.items()} + # return super()._format_checkpoint_name(filename, metrics, prefix, auto_insert_metric_name) + +def get_latest_checkpoint_path(work_dir): + if not os.path.exists(work_dir): + return None + + last_steps = -1 + last_ckpt_name = None + + checkpoints = os.listdir(work_dir) + for name in checkpoints: + if '.ckpt' in name and not name.endswith('part'): + if 'steps_' in name: + steps = name.split('steps_')[1] + steps = int(re.sub('[^0-9]', '', steps)) + + if steps > last_steps: + last_steps = steps + last_ckpt_name = name + + return os.path.join(work_dir, last_ckpt_name) if last_ckpt_name is not None else None + +def get_stategy_obj(strategy): + if strategy == 'ddp_gloo': + return DDPStrategy(process_group_backend='gloo') else: - _worker(0, modules[0], inputs[0], kwargs_tup[0], devices[0]) - - outputs = [] - for i in range(len(inputs)): - output = results[i] - if isinstance(output, Exception): - raise output - outputs.append(output) - return outputs - - -def _find_tensors(obj): # pragma: no cover - r""" - Recursively find all tensors contained in the specified object. - """ - if isinstance(obj, torch.Tensor): - return [obj] - if isinstance(obj, (list, tuple)): - return itertools.chain(*map(_find_tensors, obj)) - if isinstance(obj, dict): - return itertools.chain(*map(_find_tensors, obj.values())) - return [] - - -class DDP(DistributedDataParallel): - """ - Override the forward call in lightning so it goes to training and validation step respectively - """ - - def parallel_apply(self, replicas, inputs, kwargs): - return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)]) - - def forward(self, *inputs, **kwargs): # pragma: no cover - self._sync_params() - if self.device_ids: - inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids) - if len(self.device_ids) == 1: - # -------------- - # LIGHTNING MOD - # -------------- - # normal - # output = self.module(*inputs[0], **kwargs[0]) - # lightning - if self.module.training: - output = self.module.training_step(*inputs[0], **kwargs[0]) - elif self.module.testing: - output = self.module.test_step(*inputs[0], **kwargs[0]) - else: - output = self.module.validation_step(*inputs[0], **kwargs[0]) - else: - outputs = self.parallel_apply(self._module_copies[:len(inputs)], inputs, kwargs) - output = self.gather(outputs, self.output_device) - else: - # normal - output = self.module(*inputs, **kwargs) - - if torch.is_grad_enabled(): - # We'll return the output object verbatim since it is a freeform - # object. We need to find any tensors in this object, though, - # because we need to figure out which parameters were used during - # this forward pass, to ensure we short circuit reduction for any - # unused parameters. Only if `find_unused_parameters` is set. - if self.find_unused_parameters: - self.reducer.prepare_for_backward(list(_find_tensors(output))) - else: - self.reducer.prepare_for_backward([]) - return output - - -class DP(DataParallel): - """ - Override the forward call in lightning so it goes to training and validation step respectively - """ - - def forward(self, *inputs, **kwargs): - if not self.device_ids: - return self.module(*inputs, **kwargs) - - for t in itertools.chain(self.module.parameters(), self.module.buffers()): - if t.device != self.src_device_obj: - raise RuntimeError("module must have its parameters and buffers " - "on device {} (device_ids[0]) but found one of " - "them on device: {}".format(self.src_device_obj, t.device)) - - inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids) - if len(self.device_ids) == 1: - # lightning - if self.module.training: - return self.module.training_step(*inputs[0], **kwargs[0]) - elif self.module.testing: - return self.module.test_step(*inputs[0], **kwargs[0]) - else: - return self.module.validation_step(*inputs[0], **kwargs[0]) - - replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) - outputs = self.parallel_apply(replicas, inputs, kwargs) - return self.gather(outputs, self.output_device) - - def parallel_apply(self, replicas, inputs, kwargs): - return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)]) - - -class GradientAccumulationScheduler: - def __init__(self, scheduling: dict): - if scheduling == {}: # empty dict error - raise TypeError("Empty dict cannot be interpreted correct") - - for key in scheduling.keys(): - if not isinstance(key, int) or not isinstance(scheduling[key], int): - raise TypeError("All epoches and accumulation factor must be integers") - - minimal_epoch = min(scheduling.keys()) - if minimal_epoch < 1: - msg = f"Epochs indexing from 1, epoch {minimal_epoch} cannot be interpreted correct" - raise IndexError(msg) - elif minimal_epoch != 1: # if user didnt define first epoch accumulation factor - scheduling.update({1: 1}) - - self.scheduling = scheduling - self.epochs = sorted(scheduling.keys()) - - def on_epoch_begin(self, epoch, trainer): - epoch += 1 # indexing epochs from 1 - for i in reversed(range(len(self.epochs))): - if epoch >= self.epochs[i]: - trainer.accumulate_grad_batches = self.scheduling.get(self.epochs[i]) - break - - -class LatestModelCheckpoint(ModelCheckpoint): - def __init__(self, filepath, monitor='val_loss', verbose=0, num_ckpt_keep=5, - permanent_ckpt_start=0, permanent_ckpt_interval=-1, save_weights_only=False, - mode='auto', period=1, prefix='model', save_best=True): - super(ModelCheckpoint, self).__init__() - self.monitor = monitor - self.verbose = verbose - self.filepath = filepath - os.makedirs(filepath, exist_ok=True) - self.num_ckpt_keep = num_ckpt_keep - self.permanent_ckpt_start = max(0, permanent_ckpt_start) - self.permanent_ckpt_interval = permanent_ckpt_interval - self.save_best = save_best - self.save_weights_only = save_weights_only - self.period = period - self.epochs_since_last_check = 0 - self.prefix = prefix - self.best_k_models = {} - # {filename: monitor} - self.kth_best_model = '' - self.save_top_k = 1 - self.task = None - if mode == 'min': - self.monitor_op = np.less - self.best = np.Inf - self.mode = 'min' - elif mode == 'max': - self.monitor_op = np.greater - self.best = -np.Inf - self.mode = 'max' - else: - if 'acc' in self.monitor or self.monitor.startswith('fmeasure'): - self.monitor_op = np.greater - self.best = -np.Inf - self.mode = 'max' - else: - self.monitor_op = np.less - self.best = np.Inf - self.mode = 'min' - if os.path.exists(f'{self.filepath}/best_valid.npy'): - self.best = np.load(f'{self.filepath}/best_valid.npy')[0] - - def get_all_ckpts(self): - return sorted(glob.glob(f'{self.filepath}/{self.prefix}_ckpt_steps_*.ckpt'), - key=lambda x: -int(re.findall(r'.*steps_(\d+)\.ckpt', x)[0])) - - def on_epoch_end(self, epoch, logs=None): - logs = logs or {} - self.epochs_since_last_check += 1 - best_filepath = f'{self.filepath}/{self.prefix}_ckpt_best.pt' - if self.epochs_since_last_check >= self.period: - self.epochs_since_last_check = 0 - filepath = f'{self.filepath}/{self.prefix}_ckpt_steps_{self.task.global_step}.ckpt' - if self.verbose > 0: - logging.info(f'Epoch {epoch:05d}@{self.task.global_step}: saving model to {filepath}') - self._save_model(filepath) - for old_ckpt in self.get_all_ckpts()[self.num_ckpt_keep:]: - if self.permanent_ckpt_interval > 0: - ckpt_steps_diff = int(re.findall(r'.*steps_(\d+)\.ckpt', old_ckpt)[0]) - self.permanent_ckpt_start - if ckpt_steps_diff >= 0 and ckpt_steps_diff % self.permanent_ckpt_interval == 0: - # Skip permanent checkpoints - continue - os.remove(old_ckpt) - if self.verbose > 0: - logging.info(f'Delete ckpt: {os.path.basename(old_ckpt)}') - current = logs.get(self.monitor) - if current is not None and self.save_best: - if self.monitor_op(current, self.best): - self.best = current - if self.verbose > 0: - logging.info( - f'Epoch {epoch:05d}@{self.task.global_step}: {self.monitor} reached' - f' {current:0.5f} (best {self.best:0.5f}), saving model to' - f' {best_filepath} as top 1') - self._save_model(best_filepath) - np.save(f'{self.filepath}/best_valid.npy', [self.best]) - - -class BaseTrainer: - def __init__( - self, - logger=True, - checkpoint_callback=True, - default_save_path=None, - gradient_clip_val=0, - process_position=0, - gpus=-1, - log_gpu_memory=None, - show_progress_bar=True, - track_grad_norm=-1, - check_val_every_n_epoch=1, - accumulate_grad_batches=1, - max_updates=1000, - min_epochs=1, - val_check_interval=1.0, - log_save_interval=100, - row_log_interval=10, - print_nan_grads=False, - weights_summary='full', - num_sanity_val_steps=5, - resume_from_checkpoint=None, - ): - self.log_gpu_memory = log_gpu_memory - self.gradient_clip_val = gradient_clip_val - self.check_val_every_n_epoch = check_val_every_n_epoch - self.track_grad_norm = track_grad_norm - self.on_gpu = True if (gpus and torch.cuda.is_available()) else False - self.process_position = process_position - self.weights_summary = weights_summary - self.max_updates = max_updates - self.min_epochs = min_epochs - self.num_sanity_val_steps = num_sanity_val_steps - self.print_nan_grads = print_nan_grads - self.resume_from_checkpoint = resume_from_checkpoint - self.default_save_path = default_save_path - - # training bookeeping - self.total_batch_idx = 0 - self.running_loss = [] - self.avg_loss = 0 - self.batch_idx = 0 - self.tqdm_metrics = {} - self.callback_metrics = {} - self.num_val_batches = 0 - self.num_training_batches = 0 - self.num_test_batches = 0 - self.get_train_dataloader = None - self.get_test_dataloaders = None - self.get_val_dataloaders = None - self.is_iterable_train_dataloader = False - - # training state - self.model = None - self.testing = False - self.disable_validation = False - self.lr_schedulers = [] - self.optimizers = None - self.global_step = 0 - self.current_epoch = 0 - self.total_batches = 0 - - # configure checkpoint callback - self.checkpoint_callback = checkpoint_callback - self.checkpoint_callback.save_function = self.save_checkpoint - self.weights_save_path = self.checkpoint_callback.filepath - - # accumulated grads - self.configure_accumulated_gradients(accumulate_grad_batches) - - # allow int, string and gpu list - self.data_parallel_device_ids = [ - int(x) for x in os.environ.get("CUDA_VISIBLE_DEVICES", "").split(",") if x != ''] - if len(self.data_parallel_device_ids) == 0: - self.root_gpu = None - self.on_gpu = False - else: - self.root_gpu = self.data_parallel_device_ids[0] - self.on_gpu = True - - # distributed backend choice - self.use_ddp = False - self.use_dp = False - self.single_gpu = False - self.distributed_backend = 'ddp' if self.num_gpus > 0 else 'dp' - self.set_distributed_mode(self.distributed_backend) - - self.proc_rank = 0 - self.world_size = 1 - self.node_rank = 0 - - # can't init progress bar here because starting a new process - # means the progress_bar won't survive pickling - self.show_progress_bar = show_progress_bar - - # logging - self.log_save_interval = log_save_interval - self.val_check_interval = val_check_interval - self.logger = logger - self.logger.rank = 0 - self.row_log_interval = row_log_interval - - @property - def num_gpus(self): - gpus = self.data_parallel_device_ids - if gpus is None: - return 0 - else: - return len(gpus) - - @property - def data_parallel(self): - return self.use_dp or self.use_ddp - - def get_model(self) -> CategorizedModule: - is_dp_module = isinstance(self.model, (DDP, DP)) - model = self.model.module if is_dp_module else self.model - return model - - # ----------------------------- - # MODEL TRAINING - # ----------------------------- - def fit(self, model): - if self.use_ddp: - mp.spawn(self.ddp_train, nprocs=self.num_gpus, args=(model,)) - else: - model.model = model.build_model() - if not self.testing: - self.optimizers, self.lr_schedulers = self.init_optimizers(model.configure_optimizers()) - if self.use_dp: - model.cuda(self.root_gpu) - model = DP(model, device_ids=self.data_parallel_device_ids) - elif self.single_gpu: - model.cuda(self.root_gpu) - self.run_pretrain_routine(model) - return 1 - - def init_optimizers(self, optimizers): - - # single optimizer - if isinstance(optimizers, Optimizer): - return [optimizers], [] - - # two lists - elif len(optimizers) == 2 and isinstance(optimizers[0], list): - optimizers, lr_schedulers = optimizers - return optimizers, lr_schedulers - - # single list or tuple - elif isinstance(optimizers, list) or isinstance(optimizers, tuple): - return optimizers, [] - - def run_pretrain_routine(self, model): - """Sanity check a few things before starting actual training. - - :param model: - """ - ref_model = model - if self.data_parallel: - ref_model = model.module - - # give model convenience properties - ref_model.trainer = self - - # set local properties on the model - self.copy_trainer_model_properties(ref_model) - - # link up experiment object - if self.logger is not None: - ref_model.logger = self.logger - self.logger.save() - - if self.use_ddp: - dist.barrier() - - # set up checkpoint callback - # self.configure_checkpoint_callback() - - # transfer data loaders from model - self.get_dataloaders(ref_model) - - # track model now. - # if cluster resets state, the model will update with the saved weights - self.model = model - - # restore training and model before hpc call - self.restore_weights(model) - - # when testing requested only run test and return - if self.testing: - self.run_evaluation(test=True) - return - - # check if we should run validation during training - self.disable_validation = self.num_val_batches == 0 - - # run tiny validation (if validation defined) - # to make sure program won't crash during val - ref_model.on_sanity_check_start() - ref_model.on_train_start() - if not self.disable_validation and self.num_sanity_val_steps > 0: - # init progress bars for validation sanity check - pbar = tqdm.tqdm(desc='Validation sanity check', - total=self.num_sanity_val_steps * len(self.get_val_dataloaders()), - leave=False, position=2 * self.process_position, - disable=not self.show_progress_bar, dynamic_ncols=True, unit='batch') - self.main_progress_bar = pbar - # dummy validation progress bar - self.val_progress_bar = tqdm.tqdm(disable=True) - - self.evaluate(model, self.get_val_dataloaders(), self.num_sanity_val_steps, self.testing) - - # close progress bars - self.main_progress_bar.close() - self.val_progress_bar.close() - - # init progress bar - pbar = tqdm.tqdm(leave=True, position=2 * self.process_position, - disable=not self.show_progress_bar, dynamic_ncols=True, unit='batch', - file=sys.stdout) - self.main_progress_bar = pbar - - # clear cache before training - if self.on_gpu: - torch.cuda.empty_cache() - - # CORE TRAINING LOOP - self.train() - - def test(self, model): - self.testing = True - self.fit(model) - - @property - def training_tqdm_dict(self): - tqdm_dict = { - 'step': '{}'.format(self.global_step), - } - tqdm_dict.update(self.tqdm_metrics) - return tqdm_dict - - # -------------------- - # restore ckpt - # -------------------- - def restore_weights(self, model): - """ - To restore weights we have two cases. - First, attempt to restore hpc weights. If successful, don't restore - other weights. - - Otherwise, try to restore actual weights - :param model: - :return: - """ - # clear cache before restore - if self.on_gpu: - torch.cuda.empty_cache() - - if self.resume_from_checkpoint is not None: - self.restore(self.resume_from_checkpoint, on_gpu=self.on_gpu) - else: - # restore weights if same exp version - self.restore_state_if_checkpoint_exists(model) - - # wait for all models to restore weights - if self.use_ddp: - # wait for all processes to catch up - dist.barrier() - - # clear cache after restore - if self.on_gpu: - torch.cuda.empty_cache() - - def restore_state_if_checkpoint_exists(self, model): - did_restore = False - - # do nothing if there's not dir or callback - no_ckpt_callback = (self.checkpoint_callback is None) or (not self.checkpoint_callback) - if no_ckpt_callback or not os.path.exists(self.checkpoint_callback.filepath): - return did_restore - - # restore trainer state and model if there is a weight for this experiment - last_steps = -1 - last_ckpt_name = None - - # find last epoch - checkpoints = os.listdir(self.checkpoint_callback.filepath) - for name in checkpoints: - if '.ckpt' in name and not name.endswith('part'): - if 'steps_' in name: - steps = name.split('steps_')[1] - steps = int(re.sub('[^0-9]', '', steps)) - - if steps > last_steps: - last_steps = steps - last_ckpt_name = name - - # restore last checkpoint - if last_ckpt_name is not None: - last_ckpt_path = os.path.join(self.checkpoint_callback.filepath, last_ckpt_name) - self.restore(last_ckpt_path, self.on_gpu) - logging.info(f'model and trainer restored from checkpoint: {last_ckpt_path}') - did_restore = True - - return did_restore - - def restore(self, checkpoint_path, on_gpu): - checkpoint = torch.load(checkpoint_path, map_location='cpu') - - # load model state - model = self.get_model() - - # load the state_dict on the model automatically - model.load_state_dict(checkpoint['state_dict'], strict=False) - if on_gpu: - model.cuda(self.root_gpu) - # load training state (affects trainer only) - self.restore_training_state(checkpoint) - model.global_step = self.global_step - del checkpoint - - try: - if dist.is_initialized() and dist.get_rank() > 0: - return - except Exception as e: - print(e) - return - - def restore_training_state(self, checkpoint): - """ - Restore trainer state. - Model will get its change to update - :param checkpoint: - :return: - """ - if self.checkpoint_callback is not None and self.checkpoint_callback is not False: - self.checkpoint_callback.best = checkpoint['checkpoint_callback_best'] - - self.global_step = checkpoint['global_step'] - self.current_epoch = checkpoint['epoch'] - - if self.testing: - return - - # restore the optimizers - optimizer_states = checkpoint['optimizer_states'] - for optimizer, opt_state in zip(self.optimizers, optimizer_states): - if optimizer is None: - return - optimizer.load_state_dict(opt_state) - - # move optimizer to GPU 1 weight at a time - # avoids OOM - if self.root_gpu is not None: - for state in optimizer.state.values(): - for k, v in state.items(): - if isinstance(v, torch.Tensor): - state[k] = v.cuda(self.root_gpu) - - # restore the lr schedulers - lr_schedulers = checkpoint['lr_schedulers'] - for scheduler, lrs_state in zip(self.lr_schedulers, lr_schedulers): - scheduler.load_state_dict(lrs_state) - - # -------------------- - # MODEL SAVE CHECKPOINT - # -------------------- - def _atomic_save(self, checkpoint, filepath): - """Saves a checkpoint atomically, avoiding the creation of incomplete checkpoints. - - This will create a temporary checkpoint with a suffix of ``.part``, then copy it to the final location once - saving is finished. - - Args: - checkpoint (object): The object to save. - Built to be used with the ``dump_checkpoint`` method, but can deal with anything which ``torch.save`` - accepts. - filepath (str|pathlib.Path): The path to which the checkpoint will be saved. - This points to the file that the checkpoint will be stored in. - """ - tmp_path = str(filepath) + ".part" - torch.save(checkpoint, tmp_path) - os.replace(tmp_path, filepath) - - def save_checkpoint(self, filepath): - checkpoint = self.dump_checkpoint() - self._atomic_save(checkpoint, filepath) - - def dump_checkpoint(self): - - checkpoint = { - 'epoch': self.current_epoch, - 'global_step': self.global_step - } - - if self.checkpoint_callback is not None and self.checkpoint_callback is not False: - checkpoint['checkpoint_callback_best'] = self.checkpoint_callback.best - - # save optimizers - optimizer_states = [] - for i, optimizer in enumerate(self.optimizers): - if optimizer is not None: - optimizer_states.append(optimizer.state_dict()) - - checkpoint['optimizer_states'] = optimizer_states - - # save lr schedulers - lr_schedulers = [] - for i, scheduler in enumerate(self.lr_schedulers): - lr_schedulers.append(scheduler.state_dict()) - - checkpoint['lr_schedulers'] = lr_schedulers - - # add the hparams and state_dict from the model - model = self.get_model() - checkpoint['state_dict'] = model.state_dict() - # give the model a chance to add a few things - model.on_save_checkpoint(checkpoint) - - return checkpoint - - def copy_trainer_model_properties(self, model): - if isinstance(model, DP): - ref_model = model.module - elif isinstance(model, DDP): - ref_model = model.module - else: - ref_model = model - - for m in [model, ref_model]: - m.trainer = self - m.on_gpu = self.on_gpu - m.use_dp = self.use_dp - m.use_ddp = self.use_ddp - m.testing = self.testing - m.single_gpu = self.single_gpu - - def transfer_batch_to_gpu(self, batch, gpu_id): - # base case: object can be directly moved using `cuda` or `to` - if callable(getattr(batch, 'cuda', None)): - return batch.cuda(gpu_id, non_blocking=True) - - elif callable(getattr(batch, 'to', None)): - return batch.to(torch.device('cuda', gpu_id), non_blocking=True) - - # when list - elif isinstance(batch, list): - for i, x in enumerate(batch): - batch[i] = self.transfer_batch_to_gpu(x, gpu_id) - return batch - - # when tuple - elif isinstance(batch, tuple): - batch = list(batch) - for i, x in enumerate(batch): - batch[i] = self.transfer_batch_to_gpu(x, gpu_id) - return tuple(batch) - - # when dict - elif isinstance(batch, dict): - for k, v in batch.items(): - batch[k] = self.transfer_batch_to_gpu(v, gpu_id) - - return batch - - # nothing matches, return the value as is without transform - return batch - - def set_distributed_mode(self, distributed_backend): - # skip for CPU - if self.num_gpus == 0: - return - - # single GPU case - # in single gpu case we allow ddp so we can train on multiple - # nodes, 1 gpu per node - elif self.num_gpus == 1: - self.single_gpu = True - self.use_dp = False - self.use_ddp = False - self.root_gpu = 0 - self.data_parallel_device_ids = [0] - else: - if distributed_backend is not None: - self.use_dp = distributed_backend == 'dp' - self.use_ddp = distributed_backend == 'ddp' - elif distributed_backend is None: - self.use_dp = True - self.use_ddp = False - - logging.info(f'gpu available: {torch.cuda.is_available()}, used: {self.on_gpu}') - - def ddp_train(self, gpu_idx, model): - """ - Entry point into a DP thread - :param gpu_idx: - :param model: - :param cluster_obj: - :return: - """ - # otherwise default to node rank 0 - self.node_rank = 0 - - # show progressbar only on progress_rank 0 - self.show_progress_bar = self.show_progress_bar and self.node_rank == 0 and gpu_idx == 0 - - # determine which process we are and world size - if self.use_ddp: - self.proc_rank = self.node_rank * self.num_gpus + gpu_idx - self.world_size = self.num_gpus - - # let the exp know the rank to avoid overwriting logs - if self.logger is not None: - self.logger.rank = self.proc_rank - - # set up server using proc 0's ip address - # try to init for 20 times at max in case ports are taken - # where to store ip_table - model.trainer = self - model.init_ddp_connection(self.proc_rank, self.world_size) - - # CHOOSE OPTIMIZER - # allow for lr schedulers as well - model.model = model.build_model() - if not self.testing: - self.optimizers, self.lr_schedulers = self.init_optimizers(model.configure_optimizers()) - - # MODEL - # copy model to each gpu - if self.distributed_backend == 'ddp': - torch.cuda.set_device(gpu_idx) - model.cuda(gpu_idx) - - # set model properties before going into wrapper - self.copy_trainer_model_properties(model) - - # override root GPU - self.root_gpu = gpu_idx - - if self.distributed_backend == 'ddp': - device_ids = [gpu_idx] - else: - device_ids = None - - # allow user to configure ddp - model = model.configure_ddp(model, device_ids) - - # continue training routine - self.run_pretrain_routine(model) - - def resolve_root_node_address(self, root_node): - if '[' in root_node: - name = root_node.split('[')[0] - number = root_node.split(',')[0] - if '-' in number: - number = number.split('-')[0] - - number = re.sub('[^0-9]', '', number) - root_node = name + number - - return root_node - - def log_metrics(self, metrics, grad_norm_dic, step=None): - """Logs the metric dict passed in. - - :param metrics: - :param grad_norm_dic: - """ - # added metrics by Lightning for convenience - metrics['epoch'] = self.current_epoch - - # add norms - metrics.update(grad_norm_dic) - - # turn all tensors to scalars - scalar_metrics = self.metrics_to_scalars(metrics) - - step = step if step is not None else self.global_step - # log actual metrics - if self.proc_rank == 0 and self.logger is not None: - self.logger.log_metrics(scalar_metrics, step=step) - self.logger.save() - - def add_tqdm_metrics(self, metrics): - for k, v in metrics.items(): - if type(v) is torch.Tensor: - v = v.item() - - self.tqdm_metrics[k] = v - - def metrics_to_scalars(self, metrics): - new_metrics = {} - for k, v in metrics.items(): - if isinstance(v, torch.Tensor): - v = v.item() - - if type(v) is dict: - v = self.metrics_to_scalars(v) - - new_metrics[k] = v - - return new_metrics - - def process_output(self, output, train=False): - """Reduces output according to the training mode. - - Separates loss from logging and tqdm metrics - :param output: - :return: - """ - # --------------- - # EXTRACT CALLBACK KEYS - # --------------- - # all keys not progress_bar or log are candidates for callbacks - callback_metrics = {} - for k, v in output.items(): - if k not in ['progress_bar', 'log', 'hiddens']: - callback_metrics[k] = v - - if train and self.use_dp: - num_gpus = self.num_gpus - callback_metrics = self.reduce_distributed_output(callback_metrics, num_gpus) - - for k, v in callback_metrics.items(): - if isinstance(v, torch.Tensor): - callback_metrics[k] = v.item() - - # --------------- - # EXTRACT PROGRESS BAR KEYS - # --------------- - try: - progress_output = output['progress_bar'] - - # reduce progress metrics for tqdm when using dp - if train and self.use_dp: - num_gpus = self.num_gpus - progress_output = self.reduce_distributed_output(progress_output, num_gpus) - - progress_bar_metrics = progress_output - except Exception: - progress_bar_metrics = {} - - # --------------- - # EXTRACT LOGGING KEYS - # --------------- - # extract metrics to log to experiment - try: - log_output = output['log'] - - # reduce progress metrics for tqdm when using dp - if train and self.use_dp: - num_gpus = self.num_gpus - log_output = self.reduce_distributed_output(log_output, num_gpus) - - log_metrics = log_output - except Exception: - log_metrics = {} - - # --------------- - # EXTRACT LOSS - # --------------- - # if output dict doesn't have the keyword loss - # then assume the output=loss if scalar - loss = None - if train: - try: - loss = output['loss'] - except Exception: - if type(output) is torch.Tensor: - loss = output - else: - raise RuntimeError( - 'No `loss` value in the dictionary returned from `model.training_step()`.' - ) - - # when using dp need to reduce the loss - if self.use_dp: - loss = self.reduce_distributed_output(loss, self.num_gpus) - - # --------------- - # EXTRACT HIDDEN - # --------------- - hiddens = output.get('hiddens') - - # use every metric passed in as a candidate for callback - callback_metrics.update(progress_bar_metrics) - callback_metrics.update(log_metrics) - - # convert tensors to numpy - for k, v in callback_metrics.items(): - if isinstance(v, torch.Tensor): - callback_metrics[k] = v.item() - - return loss, progress_bar_metrics, log_metrics, callback_metrics, hiddens - - def reduce_distributed_output(self, output, num_gpus): - if num_gpus <= 1: - return output - - # when using DP, we get one output per gpu - # average outputs and return - if type(output) is torch.Tensor: - return output.mean() - - for k, v in output.items(): - # recurse on nested dics - if isinstance(output[k], dict): - output[k] = self.reduce_distributed_output(output[k], num_gpus) - - # do nothing when there's a scalar - elif isinstance(output[k], torch.Tensor) and output[k].dim() == 0: - pass - - # reduce only metrics that have the same number of gpus - elif output[k].size(0) == num_gpus: - reduced = torch.mean(output[k]) - output[k] = reduced - return output - - def clip_gradients(self): - if self.gradient_clip_val > 0: - model = self.get_model() - torch.nn.utils.clip_grad_norm_(model.parameters(), self.gradient_clip_val) - - def print_nan_gradients(self): - model = self.get_model() - for param in model.parameters(): - if (param.grad is not None) and torch.isnan(param.grad.float()).any(): - logging.info(param, param.grad) - - def configure_accumulated_gradients(self, accumulate_grad_batches): - self.accumulate_grad_batches = None - - if isinstance(accumulate_grad_batches, dict): - self.accumulation_scheduler = GradientAccumulationScheduler(accumulate_grad_batches) - elif isinstance(accumulate_grad_batches, int): - schedule = {1: accumulate_grad_batches} - self.accumulation_scheduler = GradientAccumulationScheduler(schedule) - else: - raise TypeError("Gradient accumulation supports only int and dict types") - - def get_dataloaders(self, model): - if not self.testing: - self.init_train_dataloader(model) - self.init_val_dataloader(model) - else: - self.init_test_dataloader(model) - - if self.use_ddp: - dist.barrier() - if not self.testing: - self.get_train_dataloader() - self.get_val_dataloaders() - else: - self.get_test_dataloaders() - - def init_train_dataloader(self, model): - self.fisrt_epoch = True - self.get_train_dataloader = model.train_dataloader - if isinstance(self.get_train_dataloader(), torch.utils.data.DataLoader): - self.num_training_batches = len(self.get_train_dataloader()) - self.num_training_batches = int(self.num_training_batches) - else: - self.num_training_batches = float('inf') - self.is_iterable_train_dataloader = True - if isinstance(self.val_check_interval, int): - self.val_check_batch = self.val_check_interval - else: - self._percent_range_check('val_check_interval') - self.val_check_batch = int(self.num_training_batches * self.val_check_interval) - self.val_check_batch = max(1, self.val_check_batch) - - def init_val_dataloader(self, model): - self.get_val_dataloaders = model.val_dataloader - self.num_val_batches = 0 - if self.get_val_dataloaders() is not None: - if isinstance(self.get_val_dataloaders()[0], torch.utils.data.DataLoader): - self.num_val_batches = sum(len(dataloader) for dataloader in self.get_val_dataloaders()) - self.num_val_batches = int(self.num_val_batches) - else: - self.num_val_batches = float('inf') - - def init_test_dataloader(self, model): - self.get_test_dataloaders = model.test_dataloader - if self.get_test_dataloaders() is not None: - if isinstance(self.get_test_dataloaders()[0], torch.utils.data.DataLoader): - self.num_test_batches = sum(len(dataloader) for dataloader in self.get_test_dataloaders()) - self.num_test_batches = int(self.num_test_batches) - else: - self.num_test_batches = float('inf') - - def evaluate(self, model, dataloaders, max_batches, test=False): - """Run evaluation code. - - :param model: PT model - :param dataloaders: list of PT dataloaders - :param max_batches: Scalar - :param test: boolean - :return: - """ - # enable eval mode - model.zero_grad() - model.eval() - - # copy properties for forward overrides - self.copy_trainer_model_properties(model) - - # disable gradients to save memory - torch.set_grad_enabled(False) - - if test: - self.get_model().test_start() - # bookkeeping - outputs = [] - - # run training - for dataloader_idx, dataloader in enumerate(dataloaders): - dl_outputs = [] - for batch_idx, batch in enumerate(dataloader): - - if batch is None: # pragma: no cover - continue - - # stop short when on fast_dev_run (sets max_batch=1) - if batch_idx >= max_batches: - break - - # ----------------- - # RUN EVALUATION STEP - # ----------------- - output = self.evaluation_forward(model, - batch, - batch_idx, - dataloader_idx, - test) - - # track outputs for collation - dl_outputs.append(output) - - # batch done - if test: - self.test_progress_bar.update(1) - else: - self.val_progress_bar.update(1) - outputs.append(dl_outputs) - - # with a single dataloader don't pass an array - if len(dataloaders) == 1: - outputs = outputs[0] - - # give model a chance to do something with the outputs (and method defined) - model = self.get_model() - if test: - eval_results_ = model.test_end(outputs) - else: - eval_results_ = model.validation_end(outputs) - eval_results = eval_results_ - - # enable train mode again - model.train() - - # enable gradients to save memory - torch.set_grad_enabled(True) - - return eval_results - - def run_evaluation(self, test=False): - # when testing make sure user defined a test step - model = self.get_model() - model.on_pre_performance_check() - - # select dataloaders - if test: - dataloaders = self.get_test_dataloaders() - max_batches = self.num_test_batches - else: - # val - dataloaders = self.get_val_dataloaders() - max_batches = self.num_val_batches - - # init validation or test progress bar - # main progress bar will already be closed when testing so initial position is free - position = 2 * self.process_position + (not test) - desc = 'Testing' if test else 'Validating' - pbar = tqdm.tqdm(desc=desc, total=max_batches, leave=test, position=position, - disable=not self.show_progress_bar, dynamic_ncols=True, - unit='batch', file=sys.stdout) - setattr(self, f'{"test" if test else "val"}_progress_bar', pbar) - - # run evaluation - eval_results = self.evaluate(self.model, - dataloaders, - max_batches, - test) - if eval_results is not None: - _, prog_bar_metrics, log_metrics, callback_metrics, _ = self.process_output( - eval_results) - - # add metrics to prog bar - self.add_tqdm_metrics(prog_bar_metrics) - - # log metrics - self.log_metrics(log_metrics, {}) - - # track metrics for callbacks - self.callback_metrics.update(callback_metrics) - - # hook - model.on_post_performance_check() - - # add model specific metrics - tqdm_metrics = self.training_tqdm_dict - if not test: - self.main_progress_bar.set_postfix(**tqdm_metrics) - - # close progress bar - if test: - self.test_progress_bar.close() - else: - self.val_progress_bar.close() - - # model checkpointing - if self.proc_rank == 0 and self.checkpoint_callback is not None and not test: - self.checkpoint_callback.on_epoch_end(epoch=self.current_epoch, - logs=self.callback_metrics) - - def evaluation_forward(self, model, batch, batch_idx, dataloader_idx, test=False): - # make dataloader_idx arg in validation_step optional - args = [batch, batch_idx] - - if test and len(self.get_test_dataloaders()) > 1: - args.append(dataloader_idx) - - elif not test and len(self.get_val_dataloaders()) > 1: - args.append(dataloader_idx) - - # handle DP, DDP forward - if self.use_ddp or self.use_dp: - output = model(*args) - return output - - # single GPU - if self.single_gpu: - # for single GPU put inputs on gpu manually - root_gpu = 0 - if isinstance(self.data_parallel_device_ids, list): - root_gpu = self.data_parallel_device_ids[0] - batch = self.transfer_batch_to_gpu(batch, root_gpu) - args[0] = batch - - # CPU - if test: - output = model.test_step(*args) - else: - output = model.validation_step(*args) - - return output - - def train(self): - model = self.get_model() - # run all epochs - for epoch in range(self.current_epoch, 1000000): - # set seed for distributed sampler (enables shuffling for each epoch) - if self.use_ddp and hasattr(self.get_train_dataloader().sampler, 'set_epoch'): - self.get_train_dataloader().sampler.set_epoch(epoch) - - # get model - model = self.get_model() - - # update training progress in trainer and model - model.current_epoch = epoch - self.current_epoch = epoch - - total_val_batches = 0 - if not self.disable_validation: - # val can be checked multiple times in epoch - is_val_epoch = (self.current_epoch + 1) % self.check_val_every_n_epoch == 0 - val_checks_per_epoch = self.num_training_batches // self.val_check_batch - val_checks_per_epoch = val_checks_per_epoch if is_val_epoch else 0 - total_val_batches = self.num_val_batches * val_checks_per_epoch - - # total batches includes multiple val checks - self.total_batches = self.num_training_batches + total_val_batches - self.batch_loss_value = 0 # accumulated grads - - if self.is_iterable_train_dataloader: - # for iterable train loader, the progress bar never ends - num_iterations = None - else: - num_iterations = self.total_batches - - # reset progress bar - # .reset() doesn't work on disabled progress bar so we should check - desc = f'Epoch {epoch}' if not self.is_iterable_train_dataloader else '' - self.main_progress_bar.set_description(desc) - - # changing gradient according accumulation_scheduler - self.accumulation_scheduler.on_epoch_begin(epoch, self) - - # ----------------- - # RUN TNG EPOCH - # ----------------- - self.run_training_epoch() - print() # start a new line for the next epoch - - # update LR schedulers - if self.lr_schedulers is not None: - for lr_scheduler in self.lr_schedulers: - lr_scheduler.step(epoch=self.current_epoch) - - self.main_progress_bar.close() - - model.on_train_end() - - if self.logger is not None: - self.logger.finalize("success") - - def run_training_epoch(self): - # before epoch hook - if self.is_function_implemented('on_epoch_start'): - model = self.get_model() - model.on_epoch_start() - - # run epoch - for batch_idx, batch in enumerate(self.get_train_dataloader()): - # stop epoch if we limited the number of training batches - if batch_idx >= self.num_training_batches: - break - - self.batch_idx = batch_idx - - model = self.get_model() - model.global_step = self.global_step - - # --------------- - # RUN TRAIN STEP - # --------------- - output = self.run_training_batch(batch, batch_idx) - batch_result, grad_norm_dic, batch_step_metrics = output - - # when returning -1 from train_step, we end epoch early - early_stop_epoch = batch_result == -1 - - # --------------- - # RUN VAL STEP - # --------------- - should_check_val = ( - not self.disable_validation and self.global_step % self.val_check_batch == 0 and not self.fisrt_epoch) - self.fisrt_epoch = False - - if should_check_val: - self.run_evaluation(test=self.testing) - - # when logs should be saved - should_save_log = (self.total_batch_idx + 1) % self.log_save_interval == 0 or early_stop_epoch - if should_save_log: - if self.proc_rank == 0 and self.logger is not None: - self.logger.save() - - # when metrics should be logged - should_log_metrics = self.total_batch_idx % self.row_log_interval == 0 or early_stop_epoch - if should_log_metrics: - # logs user requested information to logger - self.log_metrics(batch_step_metrics, grad_norm_dic) - - self.global_step += 1 - self.total_batch_idx += 1 - - # end epoch early - # stop when the flag is changed or we've gone past the amount - # requested in the batches - if early_stop_epoch: - break - if self.global_step > self.max_updates: - print("| Training end..") - exit() - - # epoch end hook - if self.is_function_implemented('on_epoch_end'): - model = self.get_model() - model.on_epoch_end() - - def run_training_batch(self, batch, batch_idx): - # track grad norms - grad_norm_dic = {} - - # track all metrics for callbacks - all_callback_metrics = [] - - # track metrics to log - all_log_metrics = [] - - if batch is None: - return 0, grad_norm_dic, {} - - # hook - if self.is_function_implemented('on_batch_start'): - model_ref = self.get_model() - response = model_ref.on_batch_start(batch) - - if response == -1: - return -1, grad_norm_dic, {} - - splits = [batch] - self.hiddens = None - for split_idx, split_batch in enumerate(splits): - self.split_idx = split_idx - - # call training_step once per optimizer - for opt_idx, optimizer in enumerate(self.optimizers): - if optimizer is None: - continue - # make sure only the gradients of the current optimizer's paramaters are calculated - # in the training step to prevent dangling gradients in multiple-optimizer setup. - if len(self.optimizers) > 1: - for param in self.get_model().parameters(): - param.requires_grad = False - for group in optimizer.param_groups: - for param in group['params']: - param.requires_grad = True - - # wrap the forward step in a closure so second order methods work - def optimizer_closure(): - # forward pass - output = self.training_forward( - split_batch, batch_idx, opt_idx, self.hiddens) - - closure_loss = output[0] - progress_bar_metrics = output[1] - log_metrics = output[2] - callback_metrics = output[3] - self.hiddens = output[4] - if closure_loss is None: - return None - - # accumulate loss - # (if accumulate_grad_batches = 1 no effect) - closure_loss = closure_loss / self.accumulate_grad_batches - - # backward pass - model_ref = self.get_model() - if closure_loss.requires_grad: - model_ref.backward(closure_loss, optimizer) - - # track metrics for callbacks - all_callback_metrics.append(callback_metrics) - - # track progress bar metrics - self.add_tqdm_metrics(progress_bar_metrics) - all_log_metrics.append(log_metrics) - - # insert after step hook - if self.is_function_implemented('on_after_backward'): - model_ref = self.get_model() - model_ref.on_after_backward() - - return closure_loss - - # calculate loss - loss = optimizer_closure() - if loss is None: - continue - - # nan grads - if self.print_nan_grads: - self.print_nan_gradients() - - # track total loss for logging (avoid mem leaks) - self.batch_loss_value += loss.item() - - # gradient update with accumulated gradients - if (self.batch_idx + 1) % self.accumulate_grad_batches == 0: - - # track gradient norms when requested - if batch_idx % self.row_log_interval == 0: - if self.track_grad_norm > 0: - model = self.get_model() - grad_norm_dic = model.grad_norm( - self.track_grad_norm) - - # clip gradients - self.clip_gradients() - - # calls .step(), .zero_grad() - # override function to modify this behavior - model = self.get_model() - model.optimizer_step(self.current_epoch, batch_idx, optimizer, opt_idx) - - # calculate running loss for display - self.running_loss.append(self.batch_loss_value) - self.batch_loss_value = 0 - self.avg_loss = np.mean(self.running_loss[-100:]) - - # activate batch end hook - if self.is_function_implemented('on_batch_end'): - model = self.get_model() - model.on_batch_end() - - # update progress bar - self.main_progress_bar.update(1) - self.main_progress_bar.set_postfix(**self.training_tqdm_dict) - - # collapse all metrics into one dict - all_log_metrics = {k: v for d in all_log_metrics for k, v in d.items()} - - # track all metrics for callbacks - self.callback_metrics.update({k: v for d in all_callback_metrics for k, v in d.items()}) - - return 0, grad_norm_dic, all_log_metrics - - def training_forward(self, batch, batch_idx, opt_idx, hiddens): - """ - Handle forward for each training case (distributed, single gpu, etc...) - :param batch: - :param batch_idx: - :return: - """ - # --------------- - # FORWARD - # --------------- - # enable not needing to add opt_idx to training_step - args = [batch, batch_idx, opt_idx] - - # distributed forward - if self.use_ddp or self.use_dp: - output = self.model(*args) - # single GPU forward - elif self.single_gpu: - gpu_id = 0 - if isinstance(self.data_parallel_device_ids, list): - gpu_id = self.data_parallel_device_ids[0] - batch = self.transfer_batch_to_gpu(copy.copy(batch), gpu_id) - args[0] = batch - output = self.model.training_step(*args) - # CPU forward - else: - output = self.model.training_step(*args) - - # allow any mode to define training_end - model_ref = self.get_model() - output_ = model_ref.training_end(output) - if output_ is not None: - output = output_ - - # format and reduce outputs accordingly - output = self.process_output(output, train=True) - - return output - - # --------------- - # Utils - # --------------- - def is_function_implemented(self, f_name): - model = self.get_model() - f_op = getattr(model, f_name, None) - return callable(f_op) - - def _percent_range_check(self, name): - value = getattr(self, name) - msg = f"`{name}` must lie in the range [0.0, 1.0], but got {value:.3f}." - if name == "val_check_interval": - msg += " If you want to disable validation set `val_percent_check` to 0.0 instead." - - if not 0. <= value <= 1.: - raise ValueError(msg) + return 'auto' From 705265f074ed44f7ea7613f1210caa08f32f923e Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 23 Mar 2023 18:55:05 -0500 Subject: [PATCH 119/475] Fix batch sampler and lr_scheduler step freq --- basics/base_task.py | 38 +++++++------- configs/base.yaml | 1 + training/acoustic_task.py | 25 ++++++++-- utils/__init__.py | 10 ++-- utils/pl_utils.py | 7 +-- utils/training_utils.py | 102 +++++++++++++++++++++++--------------- 6 files changed, 109 insertions(+), 74 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 0f055f05a..f1fc09761 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,6 +13,7 @@ import sys import numpy as np import pytorch_lightning as pl +from pytorch_lightning.callbacks import RichProgressBar from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm from utils.phoneme_utils import locate_dictionary @@ -71,6 +72,7 @@ def __init__(self, *args, **kwargs): hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences self.training_losses_meter = None + self.training_sampler = None self.model = None @@ -83,6 +85,8 @@ def build_model(self): def on_train_epoch_start(self): self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} + if self.training_sampler is not None: + self.training_sampler.set_epoch(self.current_epoch) def _training_step(self, sample, batch_idx, optimizer_idx): """ @@ -117,8 +121,8 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): # log_outputs['all_loss'] = total_loss.item() progress_bar_log = log_outputs | {'step': self.global_step} tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(progress_bar_log, prog_bar=True, logger=False, on_step=True, on_epoch=False, rank_zero_only=True) - self.log_dict(tb_log, prog_bar=False, logger=True, on_step=True, on_epoch=False, rank_zero_only=True) + self.log_dict(progress_bar_log, prog_bar=True, on_step=True, on_epoch=False) + self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return { 'loss': total_loss } @@ -131,7 +135,7 @@ def on_train_epoch_end(self): # f"\n==============\n") def on_before_optimizer_step(self, optimizer): - self.log_dict(grad_norm(self, norm_type=2), rank_zero_only=True) + self.log_dict(grad_norm(self, norm_type=2)) def on_validation_start(self): self.validation_step_outputs = [] @@ -186,27 +190,22 @@ def configure_optimizers(self): "lr_scheduler": { "scheduler": scheduler, "interval": "step", - "frequency": hparams['accumulate_grad_batches'], + "frequency": 1 } } - def build_batch_sampler(self, dataset, shuffle, max_tokens=None, max_sentences=None, - required_batch_size_multiple=-1, batch_by_size=True): - devices_cnt = torch.cuda.device_count() - if devices_cnt == 0: - devices_cnt = 1 - if required_batch_size_multiple == -1: - required_batch_size_multiple = devices_cnt - + def build_batch_sampler(self, dataset, max_tokens, max_sentences, batch_by_size=True, shuffle=False): batch_sampler_cls = partial(BatchSamplerSimilarLength, - max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple, + max_tokens=max_tokens, max_sentences=max_sentences, batch_by_size=batch_by_size) if self.trainer.distributed_sampler_kwargs: - sampler = DistributedBatchSamplerSimilarLength(dataset, batch_sampler_cls=batch_sampler_cls, - shuffle=shuffle, **self.trainer.distributed_sampler_kwargs) + sampler = DistributedBatchSamplerSimilarLength(dataset, + batch_sampler_cls=batch_sampler_cls, + seed=hparams['seed'], + shuffle=shuffle, + **self.trainer.distributed_sampler_kwargs) else: - sampler = batch_sampler_cls(dataset=dataset, indices=dataset.ordered_indices(), shuffle=shuffle) + sampler = batch_sampler_cls(dataset, seed=hparams['seed'], shuffle=shuffle) return sampler def on_test_start(self): @@ -242,8 +241,9 @@ def start(cls): save_top_k=hparams['num_ckpt_keep'], save_on_train_epoch_end=True, auto_insert_metric_name=False, - verbose=True - ) + # verbose=True + ), + # RichProgressBar() ], logger=TensorBoardLogger( save_dir=str(work_dir), diff --git a/configs/base.yaml b/configs/base.yaml index da958b8cb..341598c4a 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -32,6 +32,7 @@ min_level_db: -100 num_spk: 1 mel_vmin: -6 mel_vmax: 1.5 +sampler_frame_count_grid: 200 ds_workers: 4 ######### diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 46b4cae90..a546f1c6f 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -25,6 +25,7 @@ from utils.phoneme_utils import build_phoneme_list from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder +from utils.training_utils import WarmupCosineSchedule matplotlib.use('Agg') @@ -65,6 +66,15 @@ def collater(self, samples): batch['spk_ids'] = spk_ids return batch +class MyScheduler(torch.optim.lr_scheduler.StepLR): + def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=- 1, verbose=False): + super().__init__(optimizer, step_size, gamma, last_epoch, verbose) + + def get_lr(self): + ret = super().get_lr() + print("------GET_LR", self.last_epoch, self._step_count, ret) + return ret + class AcousticTask(BaseTask): def __init__(self): super().__init__() @@ -106,20 +116,29 @@ def build_optimizer(self, model): return optimizer def build_scheduler(self, optimizer): + # return WarmupCosineSchedule(optimizer, + # warmup_steps=hparams['warmup_updates'], + # t_total=hparams['max_updates'] // hparams['accumulate_grad_batches'], + # eta_min=0) return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) def train_dataloader(self): - sampler = self.build_batch_sampler(self.train_dataset, True, self.max_tokens, self.max_sentences) + self.training_sampler = self.build_batch_sampler(self.train_dataset, + max_tokens=self.max_tokens, + max_sentences=self.max_sentences, + shuffle=True) return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, - batch_sampler=sampler, + batch_sampler=self.training_sampler, num_workers=self.train_dataset.num_workers, prefetch_factor=4, pin_memory=False, persistent_workers=True) def val_dataloader(self): - sampler = self.build_batch_sampler(self.valid_dataset, False, self.max_tokens, self.max_sentences) + sampler = self.build_batch_sampler(self.valid_dataset, + max_tokens=self.max_tokens, + max_sentences=self.max_sentences) return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, diff --git a/utils/__init__.py b/utils/__init__.py index 3d3fe6db3..27aa595c6 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -63,7 +63,7 @@ def _is_batch_full(batch, num_tokens, max_tokens, max_sentences): def batch_by_size( - indices, num_tokens_fn, max_tokens=None, max_sentences=None, + indices, num_tokens_fn, max_tokens=80000, max_sentences=48, required_batch_size_multiple=1 ): """ @@ -75,14 +75,10 @@ def batch_by_size( num_tokens_fn (callable): function that returns the number of tokens at a given index max_tokens (int, optional): max number of tokens in each batch - (default: None). + (default: 80000). max_sentences (int, optional): max number of sentences in each - batch (default: None). - required_batch_size_multiple (int, optional): require batch size to - be a multiple of N (default: 1). + batch (default: 48). """ - max_tokens = max_tokens if max_tokens is not None else sys.maxsize - max_sentences = max_sentences if max_sentences is not None else sys.maxsize bsz_mult = required_batch_size_multiple if isinstance(indices, types.GeneratorType): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 55db650f2..323736925 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -22,11 +22,6 @@ def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool: from pytorch_lightning.trainer.states import RunningStage return trainer.state.stage == RunningStage.TRAINING and super()._should_save_on_train_epoch_end(trainer) - # @classmethod - # def _format_checkpoint_name(cls, filename, metrics, prefix = "", auto_insert_metric_name = True): - # # metrics = {k: v + 1 if k == 'step' or k == 'epoch' else v for k, v in metrics.items()} - # return super()._format_checkpoint_name(filename, metrics, prefix, auto_insert_metric_name) - def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None @@ -51,4 +46,4 @@ def get_stategy_obj(strategy): if strategy == 'ddp_gloo': return DDPStrategy(process_group_backend='gloo') else: - return 'auto' + return strategy diff --git a/utils/training_utils.py b/utils/training_utils.py index 63af4718e..9154d9d28 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,10 +1,12 @@ -import utils -from utils.hparams import hparams - import math + import numpy as np +from torch.optim.lr_scheduler import LambdaLR from torch.utils.data.distributed import Sampler, DistributedSampler +import utils +from utils.hparams import hparams + class RSQRTSchedule(object): def __init__(self, optimizer): super().__init__() @@ -30,27 +32,71 @@ def step(self, num_updates): def get_lr(self): return self.optimizer.param_groups[0]['lr'] +class WarmupCosineSchedule(LambdaLR): + """ Linear warmup and then cosine decay. + Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps. + Decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps following a cosine curve. + If `cycles` (default=0.5) is different from default, learning rate follows cosine function after warmup. + `eta_min` (default=0.0) corresponds to the minimum learning rate reached by the scheduler. + """ + def __init__(self, optimizer, warmup_steps, t_total, eta_min=0.0, cycles=.5, last_epoch=-1): + self.warmup_steps = warmup_steps + self.t_total = t_total + self.eta_min = eta_min + self.cycles = cycles + super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch) + + def lr_lambda(self, step): + if step < self.warmup_steps: + return step / max(1.0, self.warmup_steps) + # progress after warmup + progress = (step - self.warmup_steps) / max(1, self.t_total - self.warmup_steps) + return max(self.eta_min, 0.5 * (1. + math.cos(math.pi * self.cycles * 2.0 * progress))) + class BatchSamplerSimilarLength(Sampler): - def __init__(self, dataset, indices=None, max_tokens=None, max_sentences=None, required_batch_size_multiple=-1, batch_by_size=True, shuffle=True): + def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, seed=0, shuffle=True): + self.dataset = dataset + self.sub_indices = indices + self.max_tokens = max_tokens + self.max_sentences = max_sentences + self.batch_by_size = batch_by_size self.shuffle = shuffle - - if batch_by_size: - self.batches = utils.batch_by_size( - indices, dataset.num_tokens, max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_size_multiple=required_batch_size_multiple - ) - else: - self.batches = [indices[i:i + max_sentences] for i in range(0, len(indices), max_sentences)] + self.seed = seed + self.epoch = 0 + self.batches = None def __iter__(self): if self.shuffle: - np.random.shuffle(self.batches) + rng = np.random.RandomState(self.seed + self.epoch) + if self.sub_indices is not None: + rng.shuffle(self.sub_indices) + indices = np.array(self.sub_indices) + else: + indices = rng.permutation(len(self.dataset)) + if self.dataset.sort_by_len: + grid = hparams.get('sampler_frame_count_grid', 100) + sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) + indices = indices[np.argsort(sizes, kind='mergesort')] + indices = indices.tolist() + else: + indices = self.sub_indices if self.sub_indices is not None else list(range(len(self.dataset))) + + if self.batch_by_size: + self.batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) + else: + self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] + for batch in self.batches: yield batch def __len__(self): + if self.batches is None: + raise RuntimeError("Batches are not initialized. Call __iter__ first.") return len(self.batches) + def set_epoch(self, epoch): + self.epoch = epoch + class DistributedBatchSamplerSimilarLength(DistributedSampler): def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, @@ -61,34 +107,12 @@ def __init__(self, dataset, num_replicas=None, self.batch_sampler = None def __iter__(self): - if self.shuffle: - indices = np.random.RandomState(seed=self.seed).permutation(len(self.dataset)) - if self.dataset.sort_by_len: - indices = indices[np.argsort(np.array(self.dataset._sizes)[indices], kind='mergesort')] - else: - indices = np.arange(len(self.dataset)) - indices = indices.tolist() - - if not self.drop_last: - # add extra samples to make it evenly divisible - padding_size = self.total_size - len(indices) - if padding_size <= len(indices): - indices += indices[:padding_size] - else: - indices += (indices * math.ceil(padding_size / len(indices)))[:padding_size] - else: - # remove tail of data to make it evenly divisible. - indices = indices[:self.total_size] - assert len(indices) == self.total_size - - # subsample - indices = indices[self.rank:self.total_size:self.num_replicas] - assert len(indices) == self.num_samples - - self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, shuffle=self.shuffle) + indices = list(super().__iter__()) + self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, seed=self.seed, shuffle=self.shuffle) + self.batch_sampler.set_epoch(self.epoch) return iter(self.batch_sampler) def __len__(self) -> int: if self.batch_sampler is None: - raise ValueError("BatchSampler is not initialized. Call __iter__ first.") + raise RuntimeError("BatchSampler is not initialized. Call __iter__ first.") return len(self.batch_sampler) From 6bc26ed1444b96019557416b0cc147c8cb9e3627 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 00:11:37 -0500 Subject: [PATCH 120/475] Checkpointing done --- basics/base_task.py | 35 +++++++-- modules/diff/diffusion.py | 4 +- modules/vocoders/nsf_hifigan.py | 3 +- training/acoustic_task.py | 9 --- utils/multiprocess_utils.py | 3 +- utils/phoneme_utils.py | 5 +- utils/pl_utils.py | 134 ++++++++++++++++++++++++++++---- 7 files changed, 152 insertions(+), 41 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index f1fc09761..9b5a330ee 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,9 +13,10 @@ import sys import numpy as np import pytorch_lightning as pl -from pytorch_lightning.callbacks import RichProgressBar +from pytorch_lightning.callbacks import RichProgressBar, ModelSummary from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm +from pytorch_lightning.utilities.rank_zero import rank_zero_debug from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj @@ -73,6 +74,8 @@ def __init__(self, *args, **kwargs): self.training_losses_meter = None self.training_sampler = None + self.skip_immediate_validation = False + self.skip_immediate_ckpt_save = False self.model = None @@ -156,6 +159,9 @@ def validation_step(self, sample, batch_idx): :param batch_idx: :return: output: dict """ + if self.skip_immediate_validation: + rank_zero_debug('In validation step, skip immediate validation!') + return {} outputs = self._validation_step(sample, batch_idx) self.validation_step_outputs.append(outputs) return outputs @@ -169,12 +175,16 @@ def _on_validation_end(self, outputs): raise NotImplementedError def on_validation_epoch_end(self): + if self.skip_immediate_validation: + self.skip_immediate_validation = False + self.skip_immediate_ckpt_save = True + return loss_output = self._on_validation_end(self.validation_step_outputs) # print(f"\n==============\n " # f"valid results: {loss_output}" # f"\n==============\n") - self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) - self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, prog_bar=False, logger=True, sync_dist=True) + self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, sync_dist=True) + self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) def build_scheduler(self, optimizer): raise NotImplementedError @@ -235,15 +245,17 @@ def start(cls): DiffModelCheckpoint( dirpath=work_dir, filename='model_ckpt_steps_{step}', - monitor='val_loss', - mode='min', + monitor='step', + mode='max', save_last=hparams['save_last'], save_top_k=hparams['num_ckpt_keep'], - save_on_train_epoch_end=True, - auto_insert_metric_name=False, + max_updates=hparams['max_updates'], + permanent_ckpt_start=hparams['permanent_ckpt_start'], + permanent_ckpt_interval=hparams['permanent_ckpt_interval'], # verbose=True ), - # RichProgressBar() + # RichProgressBar(), + # ModelSummary(max_depth=-1), ], logger=TensorBoardLogger( save_dir=str(work_dir), @@ -285,6 +297,7 @@ def start(cls): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') + hparams['disable_sample_tqdm'] = True trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: trainer.test(task) @@ -292,3 +305,9 @@ def start(cls): def on_save_checkpoint(self, checkpoint): if isinstance(self.model, CategorizedModule): checkpoint['category'] = self.model.category + checkpoint['trainer_stage'] = self.trainer.state.stage.value + + def on_load_checkpoint(self, checkpoint): + from pytorch_lightning.trainer.states import RunningStage + if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: + self.skip_immediate_validation = True diff --git a/modules/diff/diffusion.py b/modules/diff/diffusion.py index 04263139e..91e4b9718 100644 --- a/modules/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -274,7 +274,7 @@ def wrapped(x, t, **kwargs): dpm_solver = DPM_Solver(model_fn, noise_schedule) steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps) + self.bar = tqdm(desc="sample time step", total=steps, disable=hparams['disable_sample_tqdm']) x = dpm_solver.sample( x, steps=steps, @@ -284,7 +284,7 @@ def wrapped(x, t, **kwargs): ) self.bar.close() else: - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t): + for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=hparams['disable_sample_tqdm']): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) x = x.squeeze(1).transpose(1, 2) # [B, T, M] return self.denorm_spec(x) diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 40fcb6a4e..7b4a0a531 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,6 +1,7 @@ import os import torch +from pytorch_lightning.utilities.rank_zero import rank_zero_info from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT @@ -17,7 +18,7 @@ def __init__(self, device=None): self.device = device model_path = hparams['vocoder_ckpt'] assert os.path.exists(model_path), 'HifiGAN model file is not found!' - print('| Load HifiGAN: ', model_path) + rank_zero_info('| Load HifiGAN: ' + model_path) self.model, self.h = load_model(model_path, device=self.device) def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] diff --git a/training/acoustic_task.py b/training/acoustic_task.py index a546f1c6f..ab119e240 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -66,15 +66,6 @@ def collater(self, samples): batch['spk_ids'] = spk_ids return batch -class MyScheduler(torch.optim.lr_scheduler.StepLR): - def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=- 1, verbose=False): - super().__init__(optimizer, step_size, gamma, last_epoch, verbose) - - def get_lr(self): - ret = super().get_lr() - print("------GET_LR", self.last_epoch, self._step_count, ret) - return ret - class AcousticTask(BaseTask): def __init__(self): super().__init__() diff --git a/utils/multiprocess_utils.py b/utils/multiprocess_utils.py index b54f99db7..41eb13142 100644 --- a/utils/multiprocess_utils.py +++ b/utils/multiprocess_utils.py @@ -8,8 +8,7 @@ def main_process_print(self, *args, sep=' ', end='\n', file=None): - from utils.hparams import hparams - if hparams['is_main_process']: + if is_main_process: print(self, *args, sep=sep, end=end, file=file) diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index d9f2edd0d..8bca6510e 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,7 +1,8 @@ import pathlib +from pytorch_lightning.utilities.rank_zero import rank_zero_info + from utils.hparams import hparams -from utils.multiprocess_utils import main_process_print _initialized = False _ALL_CONSONANTS_SET = set() @@ -51,7 +52,7 @@ def _build_dict_and_list(): for _list in _dictionary.values(): [_set.add(ph) for ph in _list] _phoneme_list = sorted(list(_set)) - main_process_print('| load phoneme set:', _phoneme_list) + rank_zero_info('| load phoneme set: ' + str(_phoneme_list)) def _initialize_consonants_and_vowels(): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 323736925..237c8024c 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -1,46 +1,146 @@ from copy import deepcopy +from glob import glob import os +from pathlib import Path import re +import warnings import torch import pytorch_lightning as pl from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.strategies import DDPStrategy +from pytorch_lightning.trainer.states import RunningStage +from pytorch_lightning.utilities.rank_zero import rank_zero_info class DiffModelCheckpoint(ModelCheckpoint): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + dirpath, + filename, + monitor, + save_last, + save_top_k, + mode, + max_updates, + permanent_ckpt_start, + permanent_ckpt_interval, + verbose = False, + save_weights_only = False + ): + super().__init__( + dirpath=dirpath, + filename=filename, + monitor=monitor, + verbose=verbose, + save_last=save_last, + save_top_k=save_top_k, + save_weights_only=save_weights_only, + mode=mode, + auto_insert_metric_name=False + ) + self.max_updates = max_updates + self.permanent_ckpt_start = permanent_ckpt_start + self.permanent_ckpt_interval = permanent_ckpt_interval + self.last_permanent_step = 0 + def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + """Save a checkpoint at the end of the last interrupted training step.""" + if not self._should_skip_saving_checkpoint(trainer) and \ + trainer.state.stage == RunningStage.TRAINING and \ + trainer.global_step == self.max_updates: + monitor_candidates = self._monitor_candidates(trainer) + if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) + self._save_checkpoint(trainer, filepath) + self._save_last_checkpoint(trainer, monitor_candidates) + + def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + """Save a checkpoint at the end of the validation stage.""" + if trainer.lightning_module.skip_immediate_ckpt_save: + trainer.lightning_module.skip_immediate_ckpt_save = False + return + if not self._should_skip_saving_checkpoint(trainer): + monitor_candidates = self._monitor_candidates(trainer) + if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: + self._save_topk_checkpoint(trainer, monitor_candidates) + self._save_last_checkpoint(trainer, monitor_candidates) + + def state_dict(self): + ret = super().state_dict() + ret['last_permanent_step'] = self.last_permanent_step + return ret + + def load_state_dict(self, state_dict): + dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) + + if self.dirpath == dirpath_from_ckpt: + self.best_model_score = state_dict["best_model_score"] + self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) + self.kth_value = state_dict.get("kth_value", self.kth_value) + self.best_k_models = state_dict.get("best_k_models", self.best_k_models) + self.last_model_path = state_dict.get("last_model_path", self.last_model_path) + self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) + else: + warnings.warn( + f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," + " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_permanent_step`," + " `last_model_path` and `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." + ) + self.best_model_path = state_dict["best_model_path"] + def _monitor_candidates(self, trainer: "pl.Trainer"): monitor_candidates = deepcopy(trainer.callback_metrics) monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) monitor_candidates["step"] = torch.tensor(trainer.global_step) return monitor_candidates - - def _should_save_on_train_epoch_end(self, trainer: "pl.Trainer") -> bool: - from pytorch_lightning.trainer.states import RunningStage - return trainer.state.stage == RunningStage.TRAINING and super()._should_save_on_train_epoch_end(trainer) + + def _save_monitor_checkpoint(self, trainer: "pl.Trainer", monitor_candidates): + assert self.monitor + current = monitor_candidates.get(self.monitor) + if self.check_monitor_top_k(trainer, current): + assert current is not None + self._update_best_and_save(current, trainer, monitor_candidates) + elif self.verbose: + epoch = monitor_candidates["epoch"] + step = monitor_candidates["step"] + rank_zero_info(f"Epoch {epoch:d}, global step {step:d}: {self.monitor!r} was not in top {self.save_top_k}") + if step >= self.last_permanent_step + self.permanent_ckpt_interval: + self.last_permanent_step = step + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) + self._save_checkpoint(trainer, filepath) + rank_zero_info(f"Epoch {epoch:d}, global step {step:d} is a permanent checkpoint, saved to {filepath}") + + def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: + """Calls the strategy to remove the checkpoint file.""" + if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: + search = re.search(r'steps_\d+', Path(filepath).stem) + if search: + step = int(search.group(0)[6:]) + if step >= self.permanent_ckpt_start and \ + (self.last_permanent_step is None or \ + step >= self.last_permanent_step + self.permanent_ckpt_interval): + self.last_permanent_step = step + return + trainer.strategy.remove_checkpoint(filepath) def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None - last_steps = -1 + last_step = -1 last_ckpt_name = None - checkpoints = os.listdir(work_dir) + checkpoints = glob(str(Path(work_dir) / '*.ckpt')) for name in checkpoints: - if '.ckpt' in name and not name.endswith('part'): - if 'steps_' in name: - steps = name.split('steps_')[1] - steps = int(re.sub('[^0-9]', '', steps)) - - if steps > last_steps: - last_steps = steps - last_ckpt_name = name + search = re.search(r'steps_\d+', name) + if search: + step = int(search.group(0)[6:]) + if step > last_step: + last_step = step + last_ckpt_name = name - return os.path.join(work_dir, last_ckpt_name) if last_ckpt_name is not None else None + return last_ckpt_name if last_ckpt_name is not None else None def get_stategy_obj(strategy): if strategy == 'ddp_gloo': From 93e4627a902d4aed7539a1ada2601e4e5f8d4561 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 00:29:50 -0500 Subject: [PATCH 121/475] Use pl rankzero utils to discriminate main proc --- basics/base_task.py | 8 ++++---- scripts/infer.py | 2 ++ scripts/train.py | 3 ++- utils/hparams.py | 39 ++++++++++++++++++++------------------- 4 files changed, 28 insertions(+), 24 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 9b5a330ee..a15e384a4 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -16,7 +16,7 @@ from pytorch_lightning.callbacks import RichProgressBar, ModelSummary from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm -from pytorch_lightning.utilities.rank_zero import rank_zero_debug +from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj @@ -272,8 +272,8 @@ def start(cls): accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train - if trainer.local_rank == 0: - set_hparams(print_hparams=True, is_main_process=True) + @rank_zero_only + def train_payload_copy(): # copy_code = input(f'{hparams["save_codes"]} code backup? y/n: ') == 'y' copy_code = True # backup code every time if copy_code: @@ -297,7 +297,7 @@ def start(cls): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') - hparams['disable_sample_tqdm'] = True + train_payload_copy() trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: trainer.test(task) diff --git a/scripts/infer.py b/scripts/infer.py index 9766613a3..ea61c7add 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -75,6 +75,8 @@ assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' set_hparams(print_hparams=False) +hparams['disable_sample_tqdm'] = False + if args.speedup > 0: hparams['pndm_speedup'] = args.speedup diff --git a/scripts/train.py b/scripts/train.py index 98f12e27f..f229100c0 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -2,7 +2,8 @@ from utils.hparams import set_hparams, hparams -set_hparams(is_main_process=False) +set_hparams() +hparams['disable_sample_tqdm'] = True def run_task(): assert hparams['task_cls'] != '' diff --git a/utils/hparams.py b/utils/hparams.py index 8be0a0663..28c2bcbb7 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -1,9 +1,8 @@ import argparse import os - import yaml -from utils.multiprocess_utils import is_main_process as mp_is_main_process +from pytorch_lightning.utilities.rank_zero import rank_zero_only global_print_hparams = True hparams = {} @@ -22,7 +21,7 @@ def override_config(old_config: dict, new_config: dict): old_config[k] = v -def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True, is_main_process=None): +def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, global_hparams=True): """ Load hparams from multiple sources: 1. config chain (i.e. first load base_config, then load config); @@ -45,9 +44,6 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob else: args = Args(config=config, exp_name=exp_name, hparams=hparams_str, infer=False, validate=False, reset=False, debug=False) - - if is_main_process is None: - is_main_process = mp_is_main_process args_work_dir = '' if args.exp_name != '': @@ -103,31 +99,36 @@ def load_config(config_fn): # deep first else: hparams_[k] = type(hparams_[k])(v) - if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: - os.makedirs(hparams_['work_dir'], exist_ok=True) - if is_main_process: + @rank_zero_only + def dump_hparams(): + if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: + os.makedirs(hparams_['work_dir'], exist_ok=True) # Only the main process will save the config file with open(ckpt_config_path, 'w', encoding='utf-8') as f: hparams_non_recursive = hparams_.copy() hparams_non_recursive['base_config'] = [] yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') + dump_hparams() hparams_['infer'] = args.infer hparams_['debug'] = args.debug hparams_['validate'] = args.validate - global global_print_hparams if global_hparams: hparams.clear() hparams.update(hparams_) - hparams['is_main_process'] = is_main_process - - if is_main_process and print_hparams and global_print_hparams and global_hparams: - print('| Hparams chains: ', config_chains) - print('| Hparams: ') - for i, (k, v) in enumerate(sorted(hparams_.items())): - print(f"\033[;33;m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") - print("") - global_print_hparams = False + + @rank_zero_only + def print_hparams(): + global global_print_hparams + if print_hparams and global_print_hparams and global_hparams: + print('| Hparams chains: ', config_chains) + print('| Hparams: ') + for i, (k, v) in enumerate(sorted(hparams_.items())): + print(f"\033[;33;m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") + print("") + global_print_hparams = False + print_hparams() + # print(hparams_.keys()) if hparams.get('exp_name') is None: hparams['exp_name'] = args.exp_name From 6bb4cae99fee6c302d809312958b377abd34fec3 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 00:32:44 -0500 Subject: [PATCH 122/475] use rank_zero_info --- modules/nsf_hifigan/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 337284063..f01265309 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -7,6 +7,7 @@ import torch.nn.functional as F from torch.nn import Conv1d, ConvTranspose1d from torch.nn.utils import weight_norm, remove_weight_norm +from pytorch_lightning.utilities.rank_zero import rank_zero_info from .env import AttrDict from .utils import init_weights, get_padding @@ -274,7 +275,7 @@ def forward(self, x, f0): return x def remove_weight_norm(self): - print('Removing weight norm...') + rank_zero_info('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for l in self.resblocks: From 1dd369d1b2071e68527701e0be55e782cbf48d21 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 01:04:41 -0500 Subject: [PATCH 123/475] Clean indexed ds, add main proc check back to hparam --- utils/hparams.py | 15 +++++++++------ utils/indexed_datasets.py | 24 ++---------------------- 2 files changed, 11 insertions(+), 28 deletions(-) diff --git a/utils/hparams.py b/utils/hparams.py index 28c2bcbb7..5b8ecc8fb 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -3,6 +3,8 @@ import yaml from pytorch_lightning.utilities.rank_zero import rank_zero_only + +from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True hparams = {} @@ -103,11 +105,12 @@ def load_config(config_fn): # deep first def dump_hparams(): if args_work_dir != '' and (not os.path.exists(ckpt_config_path) or args.reset) and not args.infer: os.makedirs(hparams_['work_dir'], exist_ok=True) - # Only the main process will save the config file - with open(ckpt_config_path, 'w', encoding='utf-8') as f: - hparams_non_recursive = hparams_.copy() - hparams_non_recursive['base_config'] = [] - yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') + if mp_is_main_process: + # Only the main process will save the config file + with open(ckpt_config_path, 'w', encoding='utf-8') as f: + hparams_non_recursive = hparams_.copy() + hparams_non_recursive['base_config'] = [] + yaml.safe_dump(hparams_non_recursive, f, allow_unicode=True, encoding='utf-8') dump_hparams() hparams_['infer'] = args.infer @@ -120,7 +123,7 @@ def dump_hparams(): @rank_zero_only def print_hparams(): global global_print_hparams - if print_hparams and global_print_hparams and global_hparams: + if mp_is_main_process and print_hparams and global_print_hparams and global_hparams: print('| Hparams chains: ', config_chains) print('| Hparams: ') for i, (k, v) in enumerate(sorted(hparams_.items())): diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index c0482667c..ed4420994 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,6 +1,4 @@ -import os.path import pathlib -import pickle import multiprocessing from copy import deepcopy import h5py @@ -13,22 +11,15 @@ class IndexedDataset: def __init__(self, path, prefix, num_cache=0): super().__init__() self.path = pathlib.Path(path) - # self.data_file = None - # self.data_offsets = np.load(self.path / f'{prefix}.idx')) - # self.data_file = open(self.path / f'{prefix}.data', 'rb', buffering=-1) self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') self.cache = [] self.num_cache = num_cache def check_index(self, i): - # if i < 0 or i >= len(self.data_offsets) - 1: - # raise IndexError('index out of range') if i < 0 or i >= len(self.dset): raise IndexError('index out of range') def __del__(self): - # if self.data_file: - # self.data_file.close() if self.dset: del self.dset @@ -38,27 +29,21 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - # self.data_file.seek(self.data_offsets[i]) - # b = self.data_file.read(self.data_offsets[i + 1] - self.data_offsets[i]) - # item = pickle.loads(b) item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache = [(i, deepcopy(item))] + self.cache[:-1] return item def __len__(self): - # return len(self.data_offsets) - 1 return len(self.dset) class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): self.path = pathlib.Path(path) self.prefix = prefix - # self.out_file = open(os.path.join(path, f'{prefix}.data'), 'wb') self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'w') self.counter = 0 self.lock = multiprocessing.Lock() - # self.byte_offsets = [0] if allowed_attr is not None: self.allowed_attr = set(allowed_attr) else: @@ -74,20 +59,15 @@ def add_item(self, item): item_no = self.counter self.counter += 1 for k, v in item.items(): + if v is None: + continue if isinstance(v, np.ndarray): self.dset.create_dataset(f'{item_no}/{k}', data=v, compression="gzip", compression_opts=4) else: self.dset.create_dataset(f'{item_no}/{k}', data=v) - # s = pickle.dumps(item) - # n_bytes = self.out_file.write(s) - # self.byte_offsets.append(self.byte_offsets[-1] + n_bytes) def finalize(self): del self.dset - # self.out_file.close() - # with open(os.path.join(self.path, f'{self.prefix}.idx'), 'wb') as f: - # # noinspection PyTypeChecker - # np.save(f, self.byte_offsets) if __name__ == "__main__": From f17263aae263fa96c3a8bafb9d9a15ab7f69bc5c Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 01:09:00 -0500 Subject: [PATCH 124/475] remove h5py compression and ncols in binarizer tqdm --- preprocessing/acoustic_binarizer.py | 5 ++--- utils/indexed_datasets.py | 5 +---- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 635a5b89d..baaadb615 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -183,13 +183,12 @@ def postprocess(_item): # code for parallel processing for item in tqdm( chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))), - ncols=80 + total=len(list(self.meta_data_iterator(prefix))) ): postprocess(item) else: # code for single cpu processing - for a in tqdm(args, ncols=80): + for a in tqdm(args): item = self.process_item(*a) postprocess(item) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index ed4420994..e20f5f99f 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -61,10 +61,7 @@ def add_item(self, item): for k, v in item.items(): if v is None: continue - if isinstance(v, np.ndarray): - self.dset.create_dataset(f'{item_no}/{k}', data=v, compression="gzip", compression_opts=4) - else: - self.dset.create_dataset(f'{item_no}/{k}', data=v) + self.dset.create_dataset(f'{item_no}/{k}', data=v) def finalize(self): del self.dset From 49e62713bddc5c213e43ffe142e9aab7d014921d Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 13:55:34 -0500 Subject: [PATCH 125/475] Format tqdm --- basics/base_task.py | 7 ++----- utils/pl_utils.py | 20 +++++++++++++++++++- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index a15e384a4..efb0aa9ae 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -13,13 +13,12 @@ import sys import numpy as np import pytorch_lightning as pl -from pytorch_lightning.callbacks import RichProgressBar, ModelSummary from pytorch_lightning.loggers import TensorBoardLogger from pytorch_lightning.utilities import grad_norm from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength -from utils.pl_utils import DiffModelCheckpoint, get_latest_checkpoint_path, get_stategy_obj +from utils.pl_utils import DiffModelCheckpoint, DiffTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj from torch import nn import torch.utils.data import utils @@ -252,10 +251,8 @@ def start(cls): max_updates=hparams['max_updates'], permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], - # verbose=True ), - # RichProgressBar(), - # ModelSummary(max_depth=-1), + DiffTQDMProgressBar(), ], logger=TensorBoardLogger( save_dir=str(work_dir), diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 237c8024c..d288b4ffe 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -8,7 +8,7 @@ import torch import pytorch_lightning as pl -from pytorch_lightning.callbacks import ModelCheckpoint +from pytorch_lightning.callbacks import ModelCheckpoint, TQDMProgressBar from pytorch_lightning.strategies import DDPStrategy from pytorch_lightning.trainer.states import RunningStage from pytorch_lightning.utilities.rank_zero import rank_zero_info @@ -124,6 +124,7 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: return trainer.strategy.remove_checkpoint(filepath) + def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None @@ -142,6 +143,23 @@ def get_latest_checkpoint_path(work_dir): return last_ckpt_name if last_ckpt_name is not None else None + +class DiffTQDMProgressBar(TQDMProgressBar): + def __init__(self, refresh_rate: int = 1, process_position: int = 0): + super().__init__(refresh_rate, process_position) + + def get_metrics(self, trainer, model): + items = super().get_metrics(trainer, model) + for name in ['step', 'batch_size']: + if name in items: + items[name] = int(items[name]) + for k, v in items.items(): + if isinstance(v, float): + if 0.00001 <= v < 10: + items[k] = f"{v:.5f}" + return items + + def get_stategy_obj(strategy): if strategy == 'ddp_gloo': return DDPStrategy(process_group_backend='gloo') From 5eb3d74718a99719066a891f8cfe9e26bb8e5033 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 20:14:30 -0500 Subject: [PATCH 126/475] Fix bug in val_check_interval, hide v_num --- basics/base_task.py | 2 +- utils/pl_utils.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/basics/base_task.py b/basics/base_task.py index efb0aa9ae..cf381e3ca 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -260,7 +260,7 @@ def start(cls): version='lastest' ), gradient_clip_val=hparams['clip_grad_norm'], - val_check_interval=hparams['val_check_interval'], + val_check_interval=hparams['val_check_interval'] * hparams['accumulate_grad_batches'], # so this is global_steps check_val_every_n_epoch=None, log_every_n_steps=hparams['log_interval'], max_steps=hparams['max_updates'], diff --git a/utils/pl_utils.py b/utils/pl_utils.py index d288b4ffe..5b09d4b14 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -157,6 +157,7 @@ def get_metrics(self, trainer, model): if isinstance(v, float): if 0.00001 <= v < 10: items[k] = f"{v:.5f}" + items.pop("v_num", None) return items From b2aa7894a25e5c80a0878729e85b0047baa242d7 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 21:09:00 -0500 Subject: [PATCH 127/475] Cleanup dataset codes, rename custom callbacks --- basics/base_dataset.py | 27 +++------------------------ basics/base_task.py | 22 +++++++++++----------- configs/base.yaml | 1 + training/acoustic_task.py | 20 +++++++++----------- utils/indexed_datasets.py | 8 ++++---- utils/pl_utils.py | 4 ++-- utils/training_utils.py | 9 +++++---- 7 files changed, 35 insertions(+), 56 deletions(-) diff --git a/basics/base_dataset.py b/basics/base_dataset.py index ac34ebd0d..7c1d0c59f 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -9,12 +9,9 @@ class BaseDataset(Dataset): ''' Base class for datasets. - 1. *ordered_indices*: - if self.shuffle == True, shuffle the indices; - if self.sort_by_len == True, sort data by length; - 2. *sizes*: + 1. *sizes*: clipped length if "max_frames" is set; - 3. *num_tokens*: + 2. *num_tokens*: unclipped length. Subclasses should define: @@ -23,11 +20,9 @@ class BaseDataset(Dataset): 2. *__getitem__*: the index function. ''' - def __init__(self, shuffle): + def __init__(self): super().__init__() self.hparams = hparams - self.shuffle = shuffle - self.sort_by_len = hparams['sort_by_len'] self.sizes = None @property @@ -50,19 +45,3 @@ def size(self, index): """Return an example's size as a float or tuple. This value is used when filtering a dataset with ``--max-positions``.""" return self._sizes[index] - - def ordered_indices(self): - """Return an ordered list of indices. Batches will be constructed based - on this order.""" - if self.shuffle: - indices = np.random.permutation(len(self)) - if self.sort_by_len: - indices = indices[np.argsort(np.array(self._sizes)[indices], kind='mergesort')] - # 先random, 然后稳定排序, 保证排序后同长度的数据顺序是依照random permutation的 (被其随机打乱). - else: - indices = np.arange(len(self)) - return indices - - @property - def num_workers(self): - return int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 0))) diff --git a/basics/base_task.py b/basics/base_task.py index cf381e3ca..1f1b406dc 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -17,8 +17,8 @@ from pytorch_lightning.utilities import grad_norm from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary -from utils.training_utils import BatchSamplerSimilarLength, DistributedBatchSamplerSimilarLength -from utils.pl_utils import DiffModelCheckpoint, DiffTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj +from utils.training_utils import DsBatchSampler, DsDistributedBatchSampler +from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj from torch import nn import torch.utils.data import utils @@ -204,15 +204,15 @@ def configure_optimizers(self): } def build_batch_sampler(self, dataset, max_tokens, max_sentences, batch_by_size=True, shuffle=False): - batch_sampler_cls = partial(BatchSamplerSimilarLength, + batch_sampler_cls = partial(DsBatchSampler, max_tokens=max_tokens, max_sentences=max_sentences, - batch_by_size=batch_by_size) + batch_by_size=batch_by_size, sort_by_similar_size=hparams['sort_by_len']) if self.trainer.distributed_sampler_kwargs: - sampler = DistributedBatchSamplerSimilarLength(dataset, - batch_sampler_cls=batch_sampler_cls, - seed=hparams['seed'], - shuffle=shuffle, - **self.trainer.distributed_sampler_kwargs) + sampler = DsDistributedBatchSampler(dataset, + batch_sampler_cls=batch_sampler_cls, + seed=hparams['seed'], + shuffle=shuffle, + **self.trainer.distributed_sampler_kwargs) else: sampler = batch_sampler_cls(dataset, seed=hparams['seed'], shuffle=shuffle) return sampler @@ -241,7 +241,7 @@ def start(cls): strategy=get_stategy_obj(hparams['pl_trainer_strategy']), precision=hparams['pl_trainer_precision'], callbacks=[ - DiffModelCheckpoint( + DsModelCheckpoint( dirpath=work_dir, filename='model_ckpt_steps_{step}', monitor='step', @@ -252,7 +252,7 @@ def start(cls): permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], ), - DiffTQDMProgressBar(), + DsTQDMProgressBar(), ], logger=TensorBoardLogger( save_dir=str(work_dir), diff --git a/configs/base.yaml b/configs/base.yaml index 341598c4a..b6fa0c4f5 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -34,6 +34,7 @@ mel_vmin: -6 mel_vmax: 1.5 sampler_frame_count_grid: 200 ds_workers: 4 +dataloader_prefetch_factor: 2 ######### # model diff --git a/training/acoustic_task.py b/training/acoustic_task.py index ab119e240..b95d9dcaf 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -31,16 +31,14 @@ class AcousticDataset(BaseDataset): - def __init__(self, prefix, shuffle=False): - super().__init__(shuffle) + def __init__(self, prefix): + super().__init__() self.data_dir = hparams['binary_data_dir'] self.prefix = prefix self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) def __getitem__(self, index): - # if self.indexed_ds is None: - # self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) return self.indexed_ds[index] def collater(self, samples): @@ -81,8 +79,8 @@ def __init__(self): def setup(self, stage): self.phone_encoder = self.build_phone_encoder() self.model = self.build_model() - self.train_dataset = self.dataset_cls(hparams['train_set_name'], shuffle=True) - self.valid_dataset = self.dataset_cls(hparams['valid_set_name'], shuffle=False) + self.train_dataset = self.dataset_cls(hparams['train_set_name']) + self.valid_dataset = self.dataset_cls(hparams['valid_set_name']) @staticmethod def build_phone_encoder(): @@ -121,9 +119,9 @@ def train_dataloader(self): return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, batch_sampler=self.training_sampler, - num_workers=self.train_dataset.num_workers, - prefetch_factor=4, - pin_memory=False, + num_workers=int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 1))), + prefetch_factor=hparams.get('dataloader_prefetch_factor', 2), + pin_memory=True, persistent_workers=True) def val_dataloader(self): @@ -133,8 +131,8 @@ def val_dataloader(self): return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, - num_workers=self.valid_dataset.num_workers, - prefetch_factor=4, + num_workers=int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 1))), + prefetch_factor=hparams.get('dataloader_prefetch_factor', 2), shuffle=False) def test_dataloader(self): diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index e20f5f99f..0357e45fd 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -1,9 +1,9 @@ import pathlib import multiprocessing -from copy import deepcopy +from collections import deque + import h5py import torch - import numpy as np @@ -12,7 +12,7 @@ def __init__(self, path, prefix, num_cache=0): super().__init__() self.path = pathlib.Path(path) self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') - self.cache = [] + self.cache = deque(maxlen=num_cache) self.num_cache = num_cache def check_index(self, i): @@ -31,7 +31,7 @@ def __getitem__(self, i): return c[1] item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: - self.cache = [(i, deepcopy(item))] + self.cache[:-1] + self.cache.appendleft((i, item)) return item def __len__(self): diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 5b09d4b14..cca36f1b8 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -13,7 +13,7 @@ from pytorch_lightning.trainer.states import RunningStage from pytorch_lightning.utilities.rank_zero import rank_zero_info -class DiffModelCheckpoint(ModelCheckpoint): +class DsModelCheckpoint(ModelCheckpoint): def __init__( self, dirpath, @@ -144,7 +144,7 @@ def get_latest_checkpoint_path(work_dir): return last_ckpt_name if last_ckpt_name is not None else None -class DiffTQDMProgressBar(TQDMProgressBar): +class DsTQDMProgressBar(TQDMProgressBar): def __init__(self, refresh_rate: int = 1, process_position: int = 0): super().__init__(refresh_rate, process_position) diff --git a/utils/training_utils.py b/utils/training_utils.py index 9154d9d28..605633cc8 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -53,13 +53,14 @@ def lr_lambda(self, step): progress = (step - self.warmup_steps) / max(1, self.t_total - self.warmup_steps) return max(self.eta_min, 0.5 * (1. + math.cos(math.pi * self.cycles * 2.0 * progress))) -class BatchSamplerSimilarLength(Sampler): - def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, seed=0, shuffle=True): +class DsBatchSampler(Sampler): + def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, sort_by_similar_size=True, seed=0, shuffle=True): self.dataset = dataset self.sub_indices = indices self.max_tokens = max_tokens self.max_sentences = max_sentences self.batch_by_size = batch_by_size + self.sort_by_similar_size = sort_by_similar_size self.shuffle = shuffle self.seed = seed self.epoch = 0 @@ -73,7 +74,7 @@ def __iter__(self): indices = np.array(self.sub_indices) else: indices = rng.permutation(len(self.dataset)) - if self.dataset.sort_by_len: + if self.sort_by_similar_size: grid = hparams.get('sampler_frame_count_grid', 100) sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] @@ -97,7 +98,7 @@ def __len__(self): def set_epoch(self, epoch): self.epoch = epoch -class DistributedBatchSamplerSimilarLength(DistributedSampler): +class DsDistributedBatchSampler(DistributedSampler): def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, seed=0, drop_last=False, batch_sampler_cls=None) -> None: From bb3c637f93851aacfe4bce1335874f5df3202d94 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 21:09:47 -0500 Subject: [PATCH 128/475] Bump requirement torch and pl version --- requirements.txt | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/requirements.txt b/requirements.txt index 56c1439a9..4984c69bc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ resemblyzer==0.1.1.dev0 -tensorboardX==2.5.1 +tensorboardX==2.6 h5py==3.7.0 future==0.18.2 g2p-en==2.1.0 @@ -21,17 +21,18 @@ matplotlib==3.6.2 torchcrepe==0.0.17 python-dateutil==2.8.2 python-Levenshtein==0.12.2 -pytorch-lightning==0.7.1 +pytorch-lightning==2.0.0 six==1.16.0 -tqdm==4.64.1 +tqdm==4.65.0 resampy==0.4.2 +rich==13.3.2 imageio==2.23.0 einops==0.6.0 pycwt==0.3.0a22 praat-parselmouth==0.4.3 scikit-image==0.19.3 pyloudnorm==0.1.0 -torchmetrics==0.5.0 +torchmetrics==0.11.4 tensorboard==2.11.0 tensorboard-plugin-wit==1.8.1 protobuf==3.13.0 @@ -40,6 +41,6 @@ pypinyin==0.39.0 # It is recommended to install PyTorch manually. # See instructions at https://pytorch.org/get-started/previous-versions/ -# torch==1.8.2 -# torchaudio==0.8.2 -# torchvision==0.9.2 +# torch==2.0.0 +# torchaudio==2.0.0 +# torchvision==0.15.0 From a39a9faa671b6a8665e3c276db8236b740f39a6d Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 22:35:43 -0500 Subject: [PATCH 129/475] binarizer joint aug bug fix --- preprocessing/acoustic_binarizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index baaadb615..2f63cce62 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -351,7 +351,7 @@ def arrange_data_augmentation(self, prefix): aug_list.append(aug_task) elif aug_type == 1: aug_task = deepcopy(aug_item) - aug_item['kwargs']['speed'] = speed + aug_task['kwargs']['speed'] = speed if aug_item['name'] in aug_map: aug_map[aug_item['name']].append(aug_task) else: From f745a6e471d95db320aa4dcf075aa63c332b57ba Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 24 Mar 2023 23:26:34 -0500 Subject: [PATCH 130/475] revert torch version --- basics/base_task.py | 12 ++++++------ modules/nsf_hifigan/models.py | 2 +- modules/vocoders/nsf_hifigan.py | 2 +- requirements.txt | 10 +++++----- training/acoustic_task.py | 2 +- utils/hparams.py | 2 +- utils/phoneme_utils.py | 2 +- utils/pl_utils.py | 10 +++++----- 8 files changed, 21 insertions(+), 21 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 1f1b406dc..2915b679b 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -12,10 +12,10 @@ import random import sys import numpy as np -import pytorch_lightning as pl -from pytorch_lightning.loggers import TensorBoardLogger -from pytorch_lightning.utilities import grad_norm -from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_only +import lightning.pytorch as pl +from lightning.pytorch.loggers import TensorBoardLogger +from lightning.pytorch.utilities import grad_norm +from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only from utils.phoneme_utils import locate_dictionary from utils.training_utils import DsBatchSampler, DsDistributedBatchSampler from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj @@ -136,7 +136,7 @@ def on_train_epoch_end(self): # f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" # f"\n==============\n") - def on_before_optimizer_step(self, optimizer): + def on_before_optimizer_step(self, *args, **kwargs): self.log_dict(grad_norm(self, norm_type=2)) def on_validation_start(self): @@ -305,6 +305,6 @@ def on_save_checkpoint(self, checkpoint): checkpoint['trainer_stage'] = self.trainer.state.stage.value def on_load_checkpoint(self, checkpoint): - from pytorch_lightning.trainer.states import RunningStage + from lightning.pytorch.trainer.states import RunningStage if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: self.skip_immediate_validation = True diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index f01265309..8c2ad2aa7 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -7,7 +7,7 @@ import torch.nn.functional as F from torch.nn import Conv1d, ConvTranspose1d from torch.nn.utils import weight_norm, remove_weight_norm -from pytorch_lightning.utilities.rank_zero import rank_zero_info +from lightning.pytorch.utilities.rank_zero import rank_zero_info from .env import AttrDict from .utils import init_weights, get_padding diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 7b4a0a531..0f98a7fc8 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,7 +1,7 @@ import os import torch -from pytorch_lightning.utilities.rank_zero import rank_zero_info +from lightning.pytorch.utilities.rank_zero import rank_zero_info from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT diff --git a/requirements.txt b/requirements.txt index 4984c69bc..6de58f06e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,7 +21,7 @@ matplotlib==3.6.2 torchcrepe==0.0.17 python-dateutil==2.8.2 python-Levenshtein==0.12.2 -pytorch-lightning==2.0.0 +lightning==2.0.0 six==1.16.0 tqdm==4.65.0 resampy==0.4.2 @@ -36,11 +36,11 @@ torchmetrics==0.11.4 tensorboard==2.11.0 tensorboard-plugin-wit==1.8.1 protobuf==3.13.0 -PyYAML==5.4 +PyYAML==6.0 pypinyin==0.39.0 # It is recommended to install PyTorch manually. # See instructions at https://pytorch.org/get-started/previous-versions/ -# torch==2.0.0 -# torchaudio==2.0.0 -# torchvision==0.15.0 +# torch==1.13.1 +# torchaudio==0.13.1 +# torchvision==0.14.1 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index b95d9dcaf..5cd5d3dcd 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -8,7 +8,7 @@ import torch.distributions import torch.optim import torch.utils.data -import pytorch_lightning as pl +import lightning.pytorch as pl from tqdm import tqdm import utils diff --git a/utils/hparams.py b/utils/hparams.py index 5b8ecc8fb..0448f5fc1 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -2,7 +2,7 @@ import os import yaml -from pytorch_lightning.utilities.rank_zero import rank_zero_only +from lightning.pytorch.utilities.rank_zero import rank_zero_only from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index 8bca6510e..086008bd8 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,6 +1,6 @@ import pathlib -from pytorch_lightning.utilities.rank_zero import rank_zero_info +from lightning.pytorch.utilities.rank_zero import rank_zero_info from utils.hparams import hparams diff --git a/utils/pl_utils.py b/utils/pl_utils.py index cca36f1b8..829c08f07 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -7,11 +7,11 @@ import torch -import pytorch_lightning as pl -from pytorch_lightning.callbacks import ModelCheckpoint, TQDMProgressBar -from pytorch_lightning.strategies import DDPStrategy -from pytorch_lightning.trainer.states import RunningStage -from pytorch_lightning.utilities.rank_zero import rank_zero_info +import lightning.pytorch as pl +from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar +from lightning.pytorch.strategies import DDPStrategy +from lightning.pytorch.trainer.states import RunningStage +from lightning.pytorch.utilities.rank_zero import rank_zero_info class DsModelCheckpoint(ModelCheckpoint): def __init__( From 7dae5aa525a7ddaf58e10b72dd295cca0077fdd4 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 00:15:13 -0500 Subject: [PATCH 131/475] Remove py3.9 syntax --- basics/base_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 2915b679b..756de765e 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -121,9 +121,9 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): pass # log_outputs['all_loss'] = total_loss.item() - progress_bar_log = log_outputs | {'step': self.global_step} + log_outputs.update({'step': self.global_step}) tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(progress_bar_log, prog_bar=True, on_step=True, on_epoch=False) + self.log_dict(log_outputs, prog_bar=True, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return { 'loss': total_loss From 2bbc42b3b04693681250b64f34cca4f24cd8cea2 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 11:13:03 -0500 Subject: [PATCH 132/475] Add env for CUDNN API change, clean more codes --- basics/base_task.py | 34 ++++------------------------------ scripts/train.py | 2 ++ training/acoustic_task.py | 1 - 3 files changed, 6 insertions(+), 31 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 756de765e..4187d5d45 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -71,7 +71,6 @@ def __init__(self, *args, **kwargs): if self.max_eval_sentences == -1: hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences - self.training_losses_meter = None self.training_sampler = None self.skip_immediate_validation = False self.skip_immediate_ckpt_save = False @@ -86,7 +85,6 @@ def build_model(self): raise NotImplementedError def on_train_epoch_start(self): - self.training_losses_meter = {'total_loss': utils.AvgrageMeter()} if self.training_sampler is not None: self.training_sampler.set_epoch(self.current_epoch) @@ -100,34 +98,13 @@ def _training_step(self, sample, batch_idx, optimizer_idx): raise NotImplementedError def training_step(self, sample, batch_idx, optimizer_idx=-1): - loss_ret = self._training_step(sample, batch_idx, optimizer_idx) - self.opt_idx = optimizer_idx - if loss_ret is None: - return {'loss': None} - total_loss, log_outputs = loss_ret - log_outputs = utils.tensors_to_scalars(log_outputs) - for k, v in log_outputs.items(): - if k not in self.training_losses_meter: - self.training_losses_meter[k] = utils.AvgrageMeter() - if not np.isnan(v): - self.training_losses_meter[k].update(v) - self.training_losses_meter['total_loss'].update(total_loss.item()) - - try: - log_outputs['lr'] = self.scheduler.get_lr() - if isinstance(log_outputs['lr'], list): - log_outputs['lr'] = log_outputs['lr'][0] - except: - pass - - # log_outputs['all_loss'] = total_loss.item() - log_outputs.update({'step': self.global_step}) + total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) + + log_outputs.update({'step': self.global_step, 'lr': self.lr_schedulers().get_lr()[0]}) tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} self.log_dict(log_outputs, prog_bar=True, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) - return { - 'loss': total_loss - } + return total_loss def on_train_epoch_end(self): pass @@ -179,9 +156,6 @@ def on_validation_epoch_end(self): self.skip_immediate_ckpt_save = True return loss_output = self._on_validation_end(self.validation_step_outputs) - # print(f"\n==============\n " - # f"valid results: {loss_output}" - # f"\n==============\n") self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, sync_dist=True) self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) diff --git a/scripts/train.py b/scripts/train.py index f229100c0..60f8d8f54 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -1,4 +1,6 @@ +import os import importlib +os.environ['TORCH_CUDNN_V8_API_ENABLED'] = '1' # Prevent unacceptable slowdowns when using 16 precision from utils.hparams import set_hparams, hparams diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 5cd5d3dcd..c08f3857c 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -172,7 +172,6 @@ def _training_step(self, sample, batch_idx, _): log_outputs = self.run_model(sample) total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) log_outputs['batch_size'] = sample['tokens'].size()[0] - log_outputs['lr'] = self.lr_schedulers().get_lr()[0] return total_loss, log_outputs def _validation_step(self, sample, batch_idx): From 287d31c6fb415f482b3f1310ca98257b85e40b8e Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 11:26:31 -0500 Subject: [PATCH 133/475] commit to pl rank_zero util --- training/acoustic_task.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index c08f3857c..620c9fa9d 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -8,7 +8,7 @@ import torch.distributions import torch.optim import torch.utils.data -import lightning.pytorch as pl +from lightning.pytorch.utilities.rank_zero import rank_zero_only from tqdm import tqdm import utils @@ -92,8 +92,10 @@ def build_model(self): vocab_size=len(self.phone_encoder), out_dims=hparams['audio_num_mel_bins'] ) - if self.trainer.local_rank == 0: + @rank_zero_only + def print_arch(): utils.print_arch(model) + print_arch() return model def build_optimizer(self, model): From 7f983d9a5349b3a9ca5e3d8026423c9bfd6aa04b Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 11:34:11 -0500 Subject: [PATCH 134/475] Resolve merged changes --- training/acoustic_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 620c9fa9d..ff3e2f577 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -56,11 +56,11 @@ def collater(self, samples): 'f0': f0, } if hparams.get('use_key_shift_embed', False): - batch['key_shift'] = torch.FloatTensor([float(s['key_shift']) for s in samples])[:, None] + batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if hparams.get('use_speed_embed', False): batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] if hparams['use_spk_id']: - spk_ids = torch.LongTensor([int(s['spk_id']) for s in samples]) + spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids return batch From 0726128749df152fb713c0bc7ccd8b70c02d06f4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 26 Mar 2023 02:18:00 +0800 Subject: [PATCH 135/475] Show warning instead of throwing error when f0 is empty --- preprocessing/acoustic_binarizer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 99d596b59..24607a395 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -228,7 +228,8 @@ def process_item(self, item_name, meta_data, binarization_args): wav, length, hparams, interp_uv=self.binarization_args['interp_uv'] ) if uv.all(): # All unvoiced - raise BinarizationError(f'Empty gt f0 in \'{item_name}\'.') + print(f'Skipped \'{item_name}\': empty gt f0') + return None processed_input['f0'] = torch.from_numpy(gt_f0).float() # get ground truth dur From 0543914f9d3febbe01874a0ec379d9ae62202ca8 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 22:36:44 -0500 Subject: [PATCH 136/475] Auto strategy choose, gloo backend by default --- basics/base_task.py | 42 ++++++++++++---------- configs/base.yaml | 4 +++ preparation/acoustic_preparation.ipynb | 2 -- scripts/train.py | 2 ++ utils/pl_utils.py | 48 +++++++++++++++++++++++--- 5 files changed, 73 insertions(+), 25 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 4187d5d45..79f9921c3 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -1,30 +1,25 @@ +from datetime import datetime +from functools import partial +import logging +import os import pathlib import shutil -from datetime import datetime +import sys import matplotlib - -from basics.base_model import CategorizedModule - matplotlib.use('Agg') -from utils.hparams import hparams, set_hparams -import random -import sys -import numpy as np +import torch.utils.data import lightning.pytorch as pl from lightning.pytorch.loggers import TensorBoardLogger from lightning.pytorch.utilities import grad_norm from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only -from utils.phoneme_utils import locate_dictionary + +from basics.base_model import CategorizedModule +from utils.hparams import hparams from utils.training_utils import DsBatchSampler, DsDistributedBatchSampler -from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy_obj -from torch import nn -import torch.utils.data -import utils -import logging -from functools import partial -import os +from utils.phoneme_utils import locate_dictionary +from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy torch.multiprocessing.set_sharing_strategy(os.getenv('TORCH_SHARE_STRATEGY', 'file_system')) @@ -102,7 +97,7 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): log_outputs.update({'step': self.global_step, 'lr': self.lr_schedulers().get_lr()[0]}) tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(log_outputs, prog_bar=True, on_step=True, on_epoch=False) + self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return total_loss @@ -156,7 +151,7 @@ def on_validation_epoch_end(self): self.skip_immediate_ckpt_save = True return loss_output = self._on_validation_end(self.validation_step_outputs) - self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, sync_dist=True) + self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) def build_scheduler(self, optimizer): @@ -168,6 +163,8 @@ def build_optimizer(self, model): def configure_optimizers(self): optm = self.build_optimizer(self.model) scheduler = self.build_scheduler(optm) + if scheduler is None: + return optm return { "optimizer": optm, "lr_scheduler": { @@ -212,7 +209,14 @@ def start(cls): trainer = pl.Trainer( accelerator=hparams['pl_trainer_accelerator'], devices=hparams['pl_trainer_devices'], - strategy=get_stategy_obj(hparams['pl_trainer_strategy']), + num_nodes=hparams['pl_trainer_num_nodes'], + strategy=get_stategy( + accelerator=hparams['pl_trainer_accelerator'], + devices=hparams['pl_trainer_devices'], + num_nodes=hparams['pl_trainer_num_nodes'], + strategy=hparams['pl_trainer_strategy'], + backend=hparams['ddp_backend'] + ), precision=hparams['pl_trainer_precision'], callbacks=[ DsModelCheckpoint( diff --git a/configs/base.yaml b/configs/base.yaml index b6fa0c4f5..49bced42f 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -84,3 +84,7 @@ save_gt: false save_f0: false gen_dir_name: '' num_valid_plots: 5 + +pl_trainer_num_nodes: 1 +pl_trainer_strategy: 'auto' +ddp_backend: 'gloo' # choose from 'gloo', 'nccl', 'nccl_no_p2p' diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index 105059ad4..728208f22 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1236,7 +1236,6 @@ " ###########\n", " 'pl_trainer_accelerator': 'auto',\n", " 'pl_trainer_devices': 'auto',\n", - " 'pl_trainer_strategy': 'auto', # With extra choice 'ddp_gloo' in case that NCCL backend stucks on you\n", " 'pl_trainer_precision': '32-true',\n", "}\n", "\n", @@ -1555,7 +1554,6 @@ " ###########\n", " 'pl_trainer_accelerator': 'auto',\n", " 'pl_trainer_devices': 'auto',\n", - " 'pl_trainer_strategy': 'auto', # With extra choice 'ddp_gloo' in case that NCCL backend stucks on you\n", " 'pl_trainer_precision': '32-true',\n", "}\n", "\n", diff --git a/scripts/train.py b/scripts/train.py index 60f8d8f54..f922b8a81 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -6,6 +6,8 @@ set_hparams() hparams['disable_sample_tqdm'] = True +if hparams['ddp_backend'] == 'nccl_no_p2p': + os.environ['NCCL_P2P_DISABLE'] = '1' def run_task(): assert hparams['task_cls'] != '' diff --git a/utils/pl_utils.py b/utils/pl_utils.py index 829c08f07..87c7c95d6 100644 --- a/utils/pl_utils.py +++ b/utils/pl_utils.py @@ -161,8 +161,48 @@ def get_metrics(self, trainer, model): return items -def get_stategy_obj(strategy): - if strategy == 'ddp_gloo': - return DDPStrategy(process_group_backend='gloo') - else: +def get_stategy(accelerator, devices, num_nodes, strategy, backend): + if accelerator != 'auto' and accelerator != 'gpu': return strategy + + from lightning_fabric.utilities.imports import _IS_INTERACTIVE + from lightning.pytorch.accelerators import AcceleratorRegistry + from lightning.pytorch.accelerators.cuda import CUDAAccelerator + from lightning.pytorch.accelerators.mps import MPSAccelerator + from pytorch_lightning.utilities.exceptions import MisconfigurationException + + def _choose_gpu_accelerator_backend(): + if MPSAccelerator.is_available(): + return "mps" + if CUDAAccelerator.is_available(): + return "cuda" + raise MisconfigurationException("No supported gpu backend found!") + _accelerator_flag = _choose_gpu_accelerator_backend() + + _num_nodes_flag = int(num_nodes) if num_nodes is not None else 1 + _devices_flag = devices + + accelerator = AcceleratorRegistry.get(_accelerator_flag) + accelerator_cls = accelerator.__class__ + + if _devices_flag == "auto": + _devices_flag = accelerator.auto_device_count() + + _devices_flag = accelerator_cls.parse_devices(_devices_flag) + _parallel_devices = accelerator_cls.get_parallel_devices(_devices_flag) + + def get_ddp_strategy(_backend): + if _backend == 'gloo': + return DDPStrategy(process_group_backend='gloo') + elif _backend == 'nccl' or _backend == 'nccl_no_p2p': + return DDPStrategy(process_group_backend='nccl') + else: + raise ValueError(f'backend {_backend} is not valid.') + + if _num_nodes_flag > 1: + return get_ddp_strategy(backend) + if len(_parallel_devices) <= 1: + return strategy + if len(_parallel_devices) > 1 and _IS_INTERACTIVE: + return strategy + return get_ddp_strategy(backend) From 1a2f2c9a0a973ec66adf546a90563b94daec06c4 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 23:39:04 -0500 Subject: [PATCH 137/475] Fix for reviews --- modules/diff/diffusion.py | 4 ++-- requirements.txt | 6 +++--- scripts/infer.py | 1 - scripts/train.py | 5 +++-- utils/hparams.py | 16 +++++++++++----- utils/indexed_datasets.py | 6 ++++-- 6 files changed, 23 insertions(+), 15 deletions(-) diff --git a/modules/diff/diffusion.py b/modules/diff/diffusion.py index 91e4b9718..849b5aad8 100644 --- a/modules/diff/diffusion.py +++ b/modules/diff/diffusion.py @@ -274,7 +274,7 @@ def wrapped(x, t, **kwargs): dpm_solver = DPM_Solver(model_fn, noise_schedule) steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps, disable=hparams['disable_sample_tqdm']) + self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer']) x = dpm_solver.sample( x, steps=steps, @@ -284,7 +284,7 @@ def wrapped(x, t, **kwargs): ) self.bar.close() else: - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=hparams['disable_sample_tqdm']): + for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=not hparams['infer']): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) x = x.squeeze(1).transpose(1, 2) # [B, T, M] return self.denorm_spec(x) diff --git a/requirements.txt b/requirements.txt index 6de58f06e..8592f9f90 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,6 +41,6 @@ pypinyin==0.39.0 # It is recommended to install PyTorch manually. # See instructions at https://pytorch.org/get-started/previous-versions/ -# torch==1.13.1 -# torchaudio==0.13.1 -# torchvision==0.14.1 +# torch==2.0.0 +# torchaudio==2.0.0 +# torchvision==0.15.0 diff --git a/scripts/infer.py b/scripts/infer.py index ea61c7add..b2bcd9b8c 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -75,7 +75,6 @@ assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' set_hparams(print_hparams=False) -hparams['disable_sample_tqdm'] = False if args.speedup > 0: hparams['pndm_speedup'] = args.speedup diff --git a/scripts/train.py b/scripts/train.py index f922b8a81..f07a8ad8d 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -1,12 +1,13 @@ -import os import importlib +import os +import sys os.environ['TORCH_CUDNN_V8_API_ENABLED'] = '1' # Prevent unacceptable slowdowns when using 16 precision from utils.hparams import set_hparams, hparams set_hparams() -hparams['disable_sample_tqdm'] = True if hparams['ddp_backend'] == 'nccl_no_p2p': + print("Disabling NCCL P2P") os.environ['NCCL_P2P_DISABLE'] = '1' def run_task(): diff --git a/utils/hparams.py b/utils/hparams.py index 0448f5fc1..170b8c67f 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -43,6 +43,10 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob parser.add_argument('--reset', action='store_true', help='reset hparams') parser.add_argument('--debug', action='store_true', help='debug') args, unknown = parser.parse_known_args() + + tmp_args_hparams = args.hparams.split(',') if args.hparams.strip() != '' else [] + tmp_args_hparams.extend(hparams_str.split(',') if hparams_str.strip() != '' else []) + args.hparams = ','.join(tmp_args_hparams) else: args = Args(config=config, exp_name=exp_name, hparams=hparams_str, infer=False, validate=False, reset=False, debug=False) @@ -93,6 +97,8 @@ def load_config(config_fn): # deep first if args.hparams != "": for new_hparam in args.hparams.split(","): + if new_hparam.strip() == "": + continue k, v = new_hparam.split("=") if k not in hparams_: hparams_[k] = eval(v) @@ -120,6 +126,11 @@ def dump_hparams(): hparams.clear() hparams.update(hparams_) + if hparams.get('exp_name') is None: + hparams['exp_name'] = args.exp_name + if hparams_.get('exp_name') is None: + hparams_['exp_name'] = args.exp_name + @rank_zero_only def print_hparams(): global global_print_hparams @@ -132,9 +143,4 @@ def print_hparams(): global_print_hparams = False print_hparams() - # print(hparams_.keys()) - if hparams.get('exp_name') is None: - hparams['exp_name'] = args.exp_name - if hparams_.get('exp_name') is None: - hparams_['exp_name'] = args.exp_name return hparams_ diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 0357e45fd..d7ee2b1d8 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -10,8 +10,10 @@ class IndexedDataset: def __init__(self, path, prefix, num_cache=0): super().__init__() - self.path = pathlib.Path(path) - self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'r') + self.path = pathlib.Path(path) / f'{prefix}.data' + if not self.path.exists(): + raise FileNotFoundError(f'IndexedDataset not found: {self.path}') + self.dset = h5py.File(self.path, 'r') self.cache = deque(maxlen=num_cache) self.num_cache = num_cache From 736013a1fb25455dd4775daabc772a332d700ed3 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 23:54:43 -0500 Subject: [PATCH 138/475] Temporary disable lock in indexed dataset builder --- utils/indexed_datasets.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index d7ee2b1d8..909b5a95c 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -54,12 +54,12 @@ def __init__(self, path, prefix, allowed_attr=None): def add_item(self, item): if self.allowed_attr is not None: item = { - k: item.get(k) + k: item[k] for k in self.allowed_attr + if k in item } - with self.lock: - item_no = self.counter - self.counter += 1 + item_no = self.counter + self.counter += 1 for k, v in item.items(): if v is None: continue From b648b74264d9a291d83ba08a60b4fef0b30429ca Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 25 Mar 2023 23:59:41 -0500 Subject: [PATCH 139/475] Default nccl backend --- configs/base.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/base.yaml b/configs/base.yaml index 49bced42f..3a36c19d8 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -87,4 +87,4 @@ num_valid_plots: 5 pl_trainer_num_nodes: 1 pl_trainer_strategy: 'auto' -ddp_backend: 'gloo' # choose from 'gloo', 'nccl', 'nccl_no_p2p' +ddp_backend: 'nccl' # choose from 'gloo', 'nccl', 'nccl_no_p2p' From 1977f53efef5dfd8f35427f1c04f4b229fa4199a Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 26 Mar 2023 00:05:32 -0500 Subject: [PATCH 140/475] Indexed dataset builder ext to .data --- utils/indexed_datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 909b5a95c..7c7be58ae 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -43,7 +43,7 @@ class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): self.path = pathlib.Path(path) self.prefix = prefix - self.dset = h5py.File(self.path / f'{prefix}.hdf5', 'w') + self.dset = h5py.File(self.path / f'{prefix}.data', 'w') self.counter = 0 self.lock = multiprocessing.Lock() if allowed_attr is not None: From c1ab92af686e2a649896a62f86fc7d2d793f45db Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 26 Mar 2023 00:11:44 -0500 Subject: [PATCH 141/475] Revert back some small changes for diff --- scripts/infer.py | 1 - utils/__init__.py | 2 +- utils/hparams.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/infer.py b/scripts/infer.py index b2bcd9b8c..9766613a3 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -75,7 +75,6 @@ assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' set_hparams(print_hparams=False) - if args.speedup > 0: hparams['pndm_speedup'] = args.speedup diff --git a/utils/__init__.py b/utils/__init__.py index 27aa595c6..a5ab3b5d6 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -45,7 +45,7 @@ def collate_nd(values, pad_value=0, max_len=None): Pad a list of Nd tensors on their first dimension and stack them into a (N+1)d tensor. """ size = ((max(v.size(0) for v in values) if max_len is None else max_len), *values[0].shape[1:]) - res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype) + res = torch.full((len(values), *size), fill_value=pad_value, dtype=values[0].dtype, device=values[0].device) for i, v in enumerate(values): res[i, :len(v), ...] = v diff --git a/utils/hparams.py b/utils/hparams.py index 170b8c67f..95b35dac3 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -50,7 +50,7 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob else: args = Args(config=config, exp_name=exp_name, hparams=hparams_str, infer=False, validate=False, reset=False, debug=False) - + args_work_dir = '' if args.exp_name != '': args.work_dir = args.exp_name From 8888fe7517d4f3905f53cb6def0b49fc2a3d41b9 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 26 Mar 2023 00:30:03 -0500 Subject: [PATCH 142/475] Convert type to builtin --- utils/indexed_datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 7c7be58ae..a2839ab5e 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -31,7 +31,7 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - item = {k: v[()] if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} + item = {k: v[()].item() if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache.appendleft((i, item)) return item From d287cbe81f9547e6e8ada2b9072651bdee3351c3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 26 Mar 2023 13:46:03 +0800 Subject: [PATCH 143/475] Avoid potential KeyError with gender --- inference/ds_acoustic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index d0fdba70a..dc4af962c 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -148,8 +148,7 @@ def preprocess_input(self, param): if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] gender = param.get('gender', 0.) - if isinstance(param['gender'], float): # static gender value - gender = param['gender'] + if isinstance(gender, float): # static gender value print(f'Using static gender value: {gender:.3f}') key_shift_value = gender * shift_max if gender >= 0 else gender * abs(shift_min) batch['key_shift'] = torch.FloatTensor([key_shift_value]).to(self.device)[:, None] # => [B=1, T=1] From b7dbe558bb9baaaa8cfd901b8264e959371c3bdd Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 26 Mar 2023 01:07:43 -0500 Subject: [PATCH 144/475] patch potential device mismatch for hifigan --- modules/nsf_hifigan/models.py | 4 ++-- modules/vocoders/nsf_hifigan.py | 13 +++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 8c2ad2aa7..6ab05789c 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -23,9 +23,9 @@ def load_model(model_path, device='cuda'): json_config = json.loads(data) h = AttrDict(json_config) - generator = Generator(h)#.to(device) + generator = Generator(h) - cp_dict = torch.load(model_path)#, map_location=device) + cp_dict = torch.load(model_path, map_location='cpu') generator.load_state_dict(cp_dict['generator']) generator.eval() generator.remove_weight_norm() diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 0f98a7fc8..a96e73aee 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -13,13 +13,14 @@ @register_vocoder class NsfHifiGAN(BaseVocoder): def __init__(self, device=None): - if device is None: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - self.device = device model_path = hparams['vocoder_ckpt'] assert os.path.exists(model_path), 'HifiGAN model file is not found!' rank_zero_info('| Load HifiGAN: ' + model_path) - self.model, self.h = load_model(model_path, device=self.device) + self.model, self.h = load_model(model_path) + + @property + def device(self): + return next(self.model.parameters()).device def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] if self.h.sampling_rate != hparams['audio_sample_rate']: @@ -71,12 +72,12 @@ def spec2wav(self, mel, **kwargs): if self.h.fmax != hparams['fmax']: print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.h.fmax, '(vocoder)') with torch.no_grad(): - c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1)#.to(self.device) + c = torch.FloatTensor(mel).unsqueeze(0).transpose(2, 1).to(self.device) # log10 to log mel c = 2.30259 * c f0 = kwargs.get('f0') if f0 is not None: - f0 = torch.FloatTensor(f0[None, :])#.to(self.device) + f0 = torch.FloatTensor(f0[None, :]).to(self.device) y = self.model(c, f0).view(-1) else: y = self.model(c).view(-1) From f0b03e41d1df9e7d5fc100f382d57271f1f90a96 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 26 Mar 2023 20:50:08 +0800 Subject: [PATCH 145/475] Fix dtype mismatch of gender and velocity --- inference/ds_acoustic.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index dc4af962c..ac7001210 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -148,7 +148,7 @@ def preprocess_input(self, param): if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] gender = param.get('gender', 0.) - if isinstance(gender, float): # static gender value + if isinstance(gender, (int, float, bool)): # static gender value print(f'Using static gender value: {gender:.3f}') key_shift_value = gender * shift_max if gender >= 0 else gender * abs(shift_min) batch['key_shift'] = torch.FloatTensor([key_shift_value]).to(self.device)[:, None] # => [B=1, T=1] @@ -163,13 +163,13 @@ def preprocess_input(self, param): gender_mask = gender_seq >= 0 key_shift_seq = gender_seq * (gender_mask * shift_max + (1 - gender_mask) * abs(shift_min)) batch['key_shift'] = torch.clip( - torch.from_numpy(key_shift_seq).to(self.device)[None], # => [B=1, T] + torch.from_numpy(key_shift_seq.astype(np.float32)).to(self.device)[None], # => [B=1, T] min=shift_min, max=shift_max ) if hparams.get('use_speed_embed', False): if param.get('velocity') is None: - print('Using default velocity curve') + print('Using default velocity value') batch['speed'] = torch.FloatTensor([1.]).to(self.device)[:, None] # => [B=1, T=1] else: print('Using manual velocity curve') @@ -181,7 +181,7 @@ def preprocess_input(self, param): align_length=length ) batch['speed'] = torch.clip( - torch.from_numpy(speed_seq).to(self.device)[None], # => [B=1, T] + torch.from_numpy(speed_seq.astype(np.float32)).to(self.device)[None], # => [B=1, T] min=speed_min, max=speed_max ) From f1bef04d3e2cf540b54db60b7f0c03aec4b37ddf Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 27 Mar 2023 00:43:15 +0800 Subject: [PATCH 146/475] Fix torch.load error on pure-CPU machines --- scripts/migrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/migrate.py b/scripts/migrate.py index ebb15ce2a..4f10d6291 100644 --- a/scripts/migrate.py +++ b/scripts/migrate.py @@ -21,7 +21,7 @@ 'and you may be at risk of losing your model.\n' \ 'If you are sure to OVERWRITE the existing file, please re-run this script with the \'--overwrite\' argument.' -ckpt_loaded = torch.load(input_ckpt) +ckpt_loaded = torch.load(input_ckpt, map_location='cpu') if 'category' in ckpt_loaded: print('This checkpoint file is already in the new format.') exit(0) From a25cfffb356cfc223026a292c093b84ac7b1e7cc Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 26 Mar 2023 18:13:29 -0500 Subject: [PATCH 147/475] Fix for reviews --- basics/base_task.py | 19 ++- configs/base.yaml | 2 - training/acoustic_task.py | 1 + utils/pl_utils.py | 208 -------------------------------- utils/training_utils.py | 243 ++++++++++++++++++++++++++++++++++++-- 5 files changed, 245 insertions(+), 228 deletions(-) delete mode 100644 utils/pl_utils.py diff --git a/basics/base_task.py b/basics/base_task.py index 79f9921c3..ce3cbe573 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -17,9 +17,12 @@ from basics.base_model import CategorizedModule from utils.hparams import hparams -from utils.training_utils import DsBatchSampler, DsDistributedBatchSampler +from utils.training_utils import ( + DsBatchSampler, DsDistributedBatchSampler, + DsModelCheckpoint, DsTQDMProgressBar, + get_latest_checkpoint_path, get_stategy +) from utils.phoneme_utils import locate_dictionary -from utils.pl_utils import DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy torch.multiprocessing.set_sharing_strategy(os.getenv('TORCH_SHARE_STRATEGY', 'file_system')) @@ -100,13 +103,6 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return total_loss - - def on_train_epoch_end(self): - pass - # loss_outputs = {k: round(v.avg, 4) for k, v in self.training_losses_meter.items()} - # print(f"\n==============\n " - # f"Epoch {self.current_epoch} ended. Steps: {self.global_step}. {loss_outputs}" - # f"\n==============\n") def on_before_optimizer_step(self, *args, **kwargs): self.log_dict(grad_norm(self, norm_type=2)) @@ -174,9 +170,10 @@ def configure_optimizers(self): } } - def build_batch_sampler(self, dataset, max_tokens, max_sentences, batch_by_size=True, shuffle=False): + def build_batch_sampler(self, dataset, max_tokens, max_sentences, required_batch_count_multiple=1, batch_by_size=True, shuffle=False): batch_sampler_cls = partial(DsBatchSampler, max_tokens=max_tokens, max_sentences=max_sentences, + required_batch_count_multiple=required_batch_count_multiple, batch_by_size=batch_by_size, sort_by_similar_size=hparams['sort_by_len']) if self.trainer.distributed_sampler_kwargs: sampler = DsDistributedBatchSampler(dataset, @@ -224,7 +221,7 @@ def start(cls): filename='model_ckpt_steps_{step}', monitor='step', mode='max', - save_last=hparams['save_last'], + save_last=False, save_top_k=hparams['num_ckpt_keep'], max_updates=hparams['max_updates'], permanent_ckpt_start=hparams['permanent_ckpt_start'], diff --git a/configs/base.yaml b/configs/base.yaml index 3a36c19d8..c2d19eb87 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -63,8 +63,6 @@ dur_loss: mse # huber|mol ########### # train and eval ########### -save_ckpt: true -save_last: false num_ckpt_keep: 3 accumulate_grad_batches: 1 log_interval: 100 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index ff3e2f577..1d9bb8162 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -117,6 +117,7 @@ def train_dataloader(self): self.training_sampler = self.build_batch_sampler(self.train_dataset, max_tokens=self.max_tokens, max_sentences=self.max_sentences, + required_batch_count_multiple=hparams['accumulate_grad_batches'], shuffle=True) return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, diff --git a/utils/pl_utils.py b/utils/pl_utils.py deleted file mode 100644 index 87c7c95d6..000000000 --- a/utils/pl_utils.py +++ /dev/null @@ -1,208 +0,0 @@ -from copy import deepcopy -from glob import glob -import os -from pathlib import Path -import re -import warnings - -import torch - -import lightning.pytorch as pl -from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar -from lightning.pytorch.strategies import DDPStrategy -from lightning.pytorch.trainer.states import RunningStage -from lightning.pytorch.utilities.rank_zero import rank_zero_info - -class DsModelCheckpoint(ModelCheckpoint): - def __init__( - self, - dirpath, - filename, - monitor, - save_last, - save_top_k, - mode, - max_updates, - permanent_ckpt_start, - permanent_ckpt_interval, - verbose = False, - save_weights_only = False - ): - super().__init__( - dirpath=dirpath, - filename=filename, - monitor=monitor, - verbose=verbose, - save_last=save_last, - save_top_k=save_top_k, - save_weights_only=save_weights_only, - mode=mode, - auto_insert_metric_name=False - ) - self.max_updates = max_updates - self.permanent_ckpt_start = permanent_ckpt_start - self.permanent_ckpt_interval = permanent_ckpt_interval - self.last_permanent_step = 0 - - def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: - """Save a checkpoint at the end of the last interrupted training step.""" - if not self._should_skip_saving_checkpoint(trainer) and \ - trainer.state.stage == RunningStage.TRAINING and \ - trainer.global_step == self.max_updates: - monitor_candidates = self._monitor_candidates(trainer) - if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: - filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) - self._save_checkpoint(trainer, filepath) - self._save_last_checkpoint(trainer, monitor_candidates) - - def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: - """Save a checkpoint at the end of the validation stage.""" - if trainer.lightning_module.skip_immediate_ckpt_save: - trainer.lightning_module.skip_immediate_ckpt_save = False - return - if not self._should_skip_saving_checkpoint(trainer): - monitor_candidates = self._monitor_candidates(trainer) - if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: - self._save_topk_checkpoint(trainer, monitor_candidates) - self._save_last_checkpoint(trainer, monitor_candidates) - - def state_dict(self): - ret = super().state_dict() - ret['last_permanent_step'] = self.last_permanent_step - return ret - - def load_state_dict(self, state_dict): - dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) - - if self.dirpath == dirpath_from_ckpt: - self.best_model_score = state_dict["best_model_score"] - self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) - self.kth_value = state_dict.get("kth_value", self.kth_value) - self.best_k_models = state_dict.get("best_k_models", self.best_k_models) - self.last_model_path = state_dict.get("last_model_path", self.last_model_path) - self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) - else: - warnings.warn( - f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," - " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_permanent_step`," - " `last_model_path` and `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." - ) - self.best_model_path = state_dict["best_model_path"] - - def _monitor_candidates(self, trainer: "pl.Trainer"): - monitor_candidates = deepcopy(trainer.callback_metrics) - monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) - monitor_candidates["step"] = torch.tensor(trainer.global_step) - return monitor_candidates - - def _save_monitor_checkpoint(self, trainer: "pl.Trainer", monitor_candidates): - assert self.monitor - current = monitor_candidates.get(self.monitor) - if self.check_monitor_top_k(trainer, current): - assert current is not None - self._update_best_and_save(current, trainer, monitor_candidates) - elif self.verbose: - epoch = monitor_candidates["epoch"] - step = monitor_candidates["step"] - rank_zero_info(f"Epoch {epoch:d}, global step {step:d}: {self.monitor!r} was not in top {self.save_top_k}") - if step >= self.last_permanent_step + self.permanent_ckpt_interval: - self.last_permanent_step = step - filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) - self._save_checkpoint(trainer, filepath) - rank_zero_info(f"Epoch {epoch:d}, global step {step:d} is a permanent checkpoint, saved to {filepath}") - - def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: - """Calls the strategy to remove the checkpoint file.""" - if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: - search = re.search(r'steps_\d+', Path(filepath).stem) - if search: - step = int(search.group(0)[6:]) - if step >= self.permanent_ckpt_start and \ - (self.last_permanent_step is None or \ - step >= self.last_permanent_step + self.permanent_ckpt_interval): - self.last_permanent_step = step - return - trainer.strategy.remove_checkpoint(filepath) - - -def get_latest_checkpoint_path(work_dir): - if not os.path.exists(work_dir): - return None - - last_step = -1 - last_ckpt_name = None - - checkpoints = glob(str(Path(work_dir) / '*.ckpt')) - for name in checkpoints: - search = re.search(r'steps_\d+', name) - if search: - step = int(search.group(0)[6:]) - if step > last_step: - last_step = step - last_ckpt_name = name - - return last_ckpt_name if last_ckpt_name is not None else None - - -class DsTQDMProgressBar(TQDMProgressBar): - def __init__(self, refresh_rate: int = 1, process_position: int = 0): - super().__init__(refresh_rate, process_position) - - def get_metrics(self, trainer, model): - items = super().get_metrics(trainer, model) - for name in ['step', 'batch_size']: - if name in items: - items[name] = int(items[name]) - for k, v in items.items(): - if isinstance(v, float): - if 0.00001 <= v < 10: - items[k] = f"{v:.5f}" - items.pop("v_num", None) - return items - - -def get_stategy(accelerator, devices, num_nodes, strategy, backend): - if accelerator != 'auto' and accelerator != 'gpu': - return strategy - - from lightning_fabric.utilities.imports import _IS_INTERACTIVE - from lightning.pytorch.accelerators import AcceleratorRegistry - from lightning.pytorch.accelerators.cuda import CUDAAccelerator - from lightning.pytorch.accelerators.mps import MPSAccelerator - from pytorch_lightning.utilities.exceptions import MisconfigurationException - - def _choose_gpu_accelerator_backend(): - if MPSAccelerator.is_available(): - return "mps" - if CUDAAccelerator.is_available(): - return "cuda" - raise MisconfigurationException("No supported gpu backend found!") - _accelerator_flag = _choose_gpu_accelerator_backend() - - _num_nodes_flag = int(num_nodes) if num_nodes is not None else 1 - _devices_flag = devices - - accelerator = AcceleratorRegistry.get(_accelerator_flag) - accelerator_cls = accelerator.__class__ - - if _devices_flag == "auto": - _devices_flag = accelerator.auto_device_count() - - _devices_flag = accelerator_cls.parse_devices(_devices_flag) - _parallel_devices = accelerator_cls.get_parallel_devices(_devices_flag) - - def get_ddp_strategy(_backend): - if _backend == 'gloo': - return DDPStrategy(process_group_backend='gloo') - elif _backend == 'nccl' or _backend == 'nccl_no_p2p': - return DDPStrategy(process_group_backend='nccl') - else: - raise ValueError(f'backend {_backend} is not valid.') - - if _num_nodes_flag > 1: - return get_ddp_strategy(backend) - if len(_parallel_devices) <= 1: - return strategy - if len(_parallel_devices) > 1 and _IS_INTERACTIVE: - return strategy - return get_ddp_strategy(backend) diff --git a/utils/training_utils.py b/utils/training_utils.py index 605633cc8..01fd23da2 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,12 +1,27 @@ +from copy import deepcopy +from glob import glob import math +import os +from pathlib import Path +import re +import warnings import numpy as np +import torch from torch.optim.lr_scheduler import LambdaLR from torch.utils.data.distributed import Sampler, DistributedSampler +import lightning.pytorch as pl +from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar +from lightning.pytorch.strategies import DDPStrategy +from lightning.pytorch.trainer.states import RunningStage +from lightning.pytorch.utilities.rank_zero import rank_zero_info + import utils from utils.hparams import hparams +#==========LR schedulers========== + class RSQRTSchedule(object): def __init__(self, optimizer): super().__init__() @@ -53,29 +68,36 @@ def lr_lambda(self, step): progress = (step - self.warmup_steps) / max(1, self.t_total - self.warmup_steps) return max(self.eta_min, 0.5 * (1. + math.cos(math.pi * self.cycles * 2.0 * progress))) +#==========Torch samplers========== + class DsBatchSampler(Sampler): - def __init__(self, dataset, max_tokens, max_sentences, indices=None, batch_by_size=True, sort_by_similar_size=True, seed=0, shuffle=True): + def __init__(self, + dataset, max_tokens, max_sentences, indices=None, + required_batch_count_multiple=1, batch_by_size=True, sort_by_similar_size=True, + seed=0, shuffle=True): self.dataset = dataset self.sub_indices = indices self.max_tokens = max_tokens self.max_sentences = max_sentences + self.required_batch_count_multiple = required_batch_count_multiple self.batch_by_size = batch_by_size self.sort_by_similar_size = sort_by_similar_size self.shuffle = shuffle self.seed = seed self.epoch = 0 self.batches = None + self.to_be_dropped = None def __iter__(self): + rng = np.random.RandomState(self.seed + self.epoch) if self.shuffle: - rng = np.random.RandomState(self.seed + self.epoch) if self.sub_indices is not None: rng.shuffle(self.sub_indices) indices = np.array(self.sub_indices) else: indices = rng.permutation(len(self.dataset)) if self.sort_by_similar_size: - grid = hparams.get('sampler_frame_count_grid', 100) + grid = hparams.get('sampler_frame_count_grid', 200) sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] indices = indices.tolist() @@ -86,14 +108,25 @@ def __iter__(self): self.batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) else: self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] - - for batch in self.batches: + + if self.shuffle: + rng.shuffle(self.batches) + + # TODO: correctly handle the indices outside of samplers + self.to_be_dropped = set() + if self.required_batch_count_multiple > 1: + num_batches_to_remove = len(self.batches) % self.required_batch_count_multiple + self.to_be_dropped = set(rng.choice(len(self.batches), num_batches_to_remove, replace=False)) + + for i, batch in enumerate(self.batches): + if i in self.to_be_dropped: + continue yield batch def __len__(self): - if self.batches is None: + if self.batches is None or self.to_be_dropped is None: raise RuntimeError("Batches are not initialized. Call __iter__ first.") - return len(self.batches) + return len(self.batches) - len(self.to_be_dropped) def set_epoch(self, epoch): self.epoch = epoch @@ -117,3 +150,199 @@ def __len__(self) -> int: if self.batch_sampler is None: raise RuntimeError("BatchSampler is not initialized. Call __iter__ first.") return len(self.batch_sampler) + +#==========PL related========== + +class DsModelCheckpoint(ModelCheckpoint): + def __init__( + self, + dirpath, + filename, + monitor, + save_last, + save_top_k, + mode, + max_updates, + permanent_ckpt_start, + permanent_ckpt_interval, + verbose = False, + save_weights_only = False + ): + super().__init__( + dirpath=dirpath, + filename=filename, + monitor=monitor, + verbose=verbose, + save_last=save_last, + save_top_k=save_top_k, + save_weights_only=save_weights_only, + mode=mode, + auto_insert_metric_name=False + ) + self.max_updates = max_updates + self.permanent_ckpt_start = permanent_ckpt_start + self.permanent_ckpt_interval = permanent_ckpt_interval + self.last_permanent_step = 0 + + def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + """Save a checkpoint at the end of the last interrupted training step.""" + if not self._should_skip_saving_checkpoint(trainer) and \ + trainer.state.stage == RunningStage.TRAINING and \ + trainer.global_step == self.max_updates: + monitor_candidates = self._monitor_candidates(trainer) + if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) + self._save_checkpoint(trainer, filepath) + self._save_last_checkpoint(trainer, monitor_candidates) + + def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + """Save a checkpoint at the end of the validation stage.""" + if trainer.lightning_module.skip_immediate_ckpt_save: + trainer.lightning_module.skip_immediate_ckpt_save = False + return + if not self._should_skip_saving_checkpoint(trainer): + monitor_candidates = self._monitor_candidates(trainer) + if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: + self._save_topk_checkpoint(trainer, monitor_candidates) + self._save_last_checkpoint(trainer, monitor_candidates) + + def state_dict(self): + ret = super().state_dict() + ret['last_permanent_step'] = self.last_permanent_step + return ret + + def load_state_dict(self, state_dict): + dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) + + if self.dirpath == dirpath_from_ckpt: + self.best_model_score = state_dict["best_model_score"] + self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) + self.kth_value = state_dict.get("kth_value", self.kth_value) + self.best_k_models = state_dict.get("best_k_models", self.best_k_models) + self.last_model_path = state_dict.get("last_model_path", self.last_model_path) + self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) + else: + warnings.warn( + f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," + " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_permanent_step`," + " `last_model_path` and `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." + ) + self.best_model_path = state_dict["best_model_path"] + + def _monitor_candidates(self, trainer: "pl.Trainer"): + monitor_candidates = deepcopy(trainer.callback_metrics) + monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) + monitor_candidates["step"] = torch.tensor(trainer.global_step) + return monitor_candidates + + def _save_monitor_checkpoint(self, trainer: "pl.Trainer", monitor_candidates): + assert self.monitor + current = monitor_candidates.get(self.monitor) + if self.check_monitor_top_k(trainer, current): + assert current is not None + self._update_best_and_save(current, trainer, monitor_candidates) + elif self.verbose: + epoch = monitor_candidates["epoch"] + step = monitor_candidates["step"] + rank_zero_info(f"Epoch {epoch:d}, global step {step:d}: {self.monitor!r} was not in top {self.save_top_k}") + if step >= self.last_permanent_step + self.permanent_ckpt_interval: + self.last_permanent_step = step + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) + self._save_checkpoint(trainer, filepath) + rank_zero_info(f"Epoch {epoch:d}, global step {step:d} is a permanent checkpoint, saved to {filepath}") + + def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: + """Calls the strategy to remove the checkpoint file.""" + if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: + search = re.search(r'steps_\d+', Path(filepath).stem) + if search: + step = int(search.group(0)[6:]) + if step >= self.permanent_ckpt_start and \ + (self.last_permanent_step is None or \ + step >= self.last_permanent_step + self.permanent_ckpt_interval): + self.last_permanent_step = step + return + trainer.strategy.remove_checkpoint(filepath) + + +def get_latest_checkpoint_path(work_dir): + if not os.path.exists(work_dir): + return None + + last_step = -1 + last_ckpt_name = None + + checkpoints = glob(str(Path(work_dir) / '*.ckpt')) + for name in checkpoints: + search = re.search(r'steps_\d+', name) + if search: + step = int(search.group(0)[6:]) + if step > last_step: + last_step = step + last_ckpt_name = name + + return last_ckpt_name if last_ckpt_name is not None else None + + +class DsTQDMProgressBar(TQDMProgressBar): + def __init__(self, refresh_rate: int = 1, process_position: int = 0): + super().__init__(refresh_rate, process_position) + + def get_metrics(self, trainer, model): + items = super().get_metrics(trainer, model) + for name in ['step', 'batch_size']: + if name in items: + items[name] = int(items[name]) + for k, v in items.items(): + if isinstance(v, float): + if 0.00001 <= v < 10: + items[k] = f"{v:.5f}" + items.pop("v_num", None) + return items + + +def get_stategy(accelerator, devices, num_nodes, strategy, backend): + if accelerator != 'auto' and accelerator != 'gpu': + return strategy + + from lightning_fabric.utilities.imports import _IS_INTERACTIVE + from lightning.pytorch.accelerators import AcceleratorRegistry + from lightning.pytorch.accelerators.cuda import CUDAAccelerator + from lightning.pytorch.accelerators.mps import MPSAccelerator + from pytorch_lightning.utilities.exceptions import MisconfigurationException + + def _choose_gpu_accelerator_backend(): + if MPSAccelerator.is_available(): + return "mps" + if CUDAAccelerator.is_available(): + return "cuda" + raise MisconfigurationException("No supported gpu backend found!") + _accelerator_flag = _choose_gpu_accelerator_backend() + + _num_nodes_flag = int(num_nodes) if num_nodes is not None else 1 + _devices_flag = devices + + accelerator = AcceleratorRegistry.get(_accelerator_flag) + accelerator_cls = accelerator.__class__ + + if _devices_flag == "auto": + _devices_flag = accelerator.auto_device_count() + + _devices_flag = accelerator_cls.parse_devices(_devices_flag) + _parallel_devices = accelerator_cls.get_parallel_devices(_devices_flag) + + def get_ddp_strategy(_backend): + if _backend == 'gloo': + return DDPStrategy(process_group_backend='gloo') + elif _backend == 'nccl' or _backend == 'nccl_no_p2p': + return DDPStrategy(process_group_backend='nccl') + else: + raise ValueError(f'backend {_backend} is not valid.') + + if _num_nodes_flag > 1: + return get_ddp_strategy(backend) + if len(_parallel_devices) <= 1: + return strategy + if len(_parallel_devices) > 1 and _IS_INTERACTIVE: + return strategy + return get_ddp_strategy(backend) From f0e3e9c05dc7290cbc4ae19b86317609cba115ca Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 27 Mar 2023 14:02:44 +0800 Subject: [PATCH 148/475] Drop 'interp_uv' and 'f0_coarse' from item --- augmentation/spec_stretch.py | 8 +++----- configs/acoustic.yaml | 2 +- preprocessing/acoustic_binarizer.py | 3 +-- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 3a6244083..663a86130 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -41,16 +41,14 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['mel2ph'] = get_mel2ph_torch( self.lr, aug_item['ph_dur'], aug_item['length'], hparams, device=self.device ) - f0, f0_coarse, _ = get_pitch_parselmouth( - wav, aug_item['length'], hparams, speed=speed, interp_uv=item['interp_uv'] + f0, _, _ = get_pitch_parselmouth( + wav, aug_item['length'], hparams, speed=speed, interp_uv=hparams['interp_uv'] ) - aug_item['f0'], aug_item['f0_coarse'] = \ - torch.from_numpy(f0), torch.from_numpy(f0_coarse) + aug_item['f0'] = torch.from_numpy(f0) if key_shift != 0. or hparams.get('use_key_shift_embed', False): aug_item['key_shift'] = key_shift aug_item['f0'] *= 2 ** (key_shift / 12) - aug_item['f0_coarse'] = torch.from_numpy(f0_to_coarse(aug_item['f0'].numpy())) if replace_spk_id is not None: aug_item['spk_id'] = replace_spk_id diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 87a246759..36b16ffc7 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -27,7 +27,6 @@ min_level_db: -120 binarization_args: shuffle: true num_workers: 0 - interp_uv: true #augmentation_args: # random_pitch_shifting: # range: [-5., 5.] @@ -49,6 +48,7 @@ spec_max: [0] keep_bins: 128 mel_vmin: -6. #-6. mel_vmax: 1.5 +interp_uv: true save_f0: true use_spk_id: false diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 24607a395..19c0c67db 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -220,12 +220,11 @@ def process_item(self, item_name, meta_data, binarization_args): 'mel': torch.from_numpy(mel), 'tokens': torch.LongTensor(self.phone_encoder.encode(meta_data['ph_seq'])), 'ph_dur': torch.FloatTensor(meta_data['ph_dur']), - 'interp_uv': self.binarization_args['interp_uv'], } # get ground truth f0 gt_f0, _, uv = get_pitch_parselmouth( - wav, length, hparams, interp_uv=self.binarization_args['interp_uv'] + wav, length, hparams, interp_uv=hparams['interp_uv'] ) if uv.all(): # All unvoiced print(f'Skipped \'{item_name}\': empty gt f0') From ff6fc36139537952550e61539256f72a85ee9e60 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 27 Mar 2023 14:42:55 +0800 Subject: [PATCH 149/475] Require item keys to be the same before and after augmentation --- augmentation/spec_stretch.py | 6 +++--- basics/base_augmentation.py | 11 +++++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 663a86130..2d4e19d04 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -3,12 +3,11 @@ import numpy as np import torch -from basics.base_augmentation import BaseAugmentation -from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch +from basics.base_augmentation import BaseAugmentation, require_same_keys from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS +from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from utils.hparams import hparams -from utils.pitch_utils import f0_to_coarse class SpectrogramStretchAugmentation(BaseAugmentation): @@ -20,6 +19,7 @@ def __init__(self, data_dirs: list, augmentation_args: dict): self.device = 'cuda' if torch.cuda.is_available() else 'cpu' self.lr = LengthRegulator().to(self.device) + @require_same_keys def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) -> dict: aug_item = deepcopy(item) if hparams['vocoder'] in VOCODERS: diff --git a/basics/base_augmentation.py b/basics/base_augmentation.py index ac55de3df..193546d95 100644 --- a/basics/base_augmentation.py +++ b/basics/base_augmentation.py @@ -11,3 +11,14 @@ def __init__(self, data_dirs: list, augmentation_args: dict): def process_item(self, item: dict, **kwargs) -> dict: raise NotImplementedError() + + +def require_same_keys(func): + def run(*args, **kwargs): + item: dict = args[1] + res: dict = func(*args, **kwargs) + assert set(item.keys()) == set(res.keys()), 'Item keys mismatch after augmentation.\n' \ + f'Before: {sorted(item.keys())}\n' \ + f'After: {sorted(res.keys())}' + return res + return run From f1dedec4b6f85d5150221869604cf211b0e8c5b3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 27 Mar 2023 14:46:36 +0800 Subject: [PATCH 150/475] Remove redundant comments and adjust checks --- basics/base_binarizer.py | 7 +++++-- preprocessing/acoustic_binarizer.py | 4 ---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 18679c864..776934459 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -12,6 +12,7 @@ class BinarizationError(Exception): pass + class BaseBinarizer: """ Base class for data processing. @@ -33,6 +34,7 @@ class BaseBinarizer: 3. load_ph_set: the phoneme set. """ + def __init__(self, data_dir=None): if data_dir is None: data_dir = hparams['raw_data_dir'] @@ -42,8 +44,9 @@ def __init__(self, data_dir=None): assert len(speakers) == len(set(speakers)), 'Speakers cannot contain duplicate names' self.raw_data_dirs = data_dir if isinstance(data_dir, list) else [data_dir] - assert len(speakers) == len(self.raw_data_dirs), \ - 'Number of raw data dirs must equal number of speaker names!' + if hparams['use_spk_id']: + assert len(speakers) == len(self.raw_data_dirs), \ + 'Number of raw data dirs must equal number of speaker names!' self.binarization_args = hparams['binarization_args'] self.augmentation_args = hparams.get('augmentation_args', {}) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 19c0c67db..ef4e68e92 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -155,10 +155,6 @@ def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): total_sec = 0 total_raw_sec = 0 - # if self.binarization_args['with_spk_embed']: - # from resemblyzer import VoiceEncoder - # voice_encoder = VoiceEncoder().cuda() - for item_name, meta_data in self.meta_data_iterator(prefix): args.append([item_name, meta_data, self.binarization_args]) From dbb6c057f611086549fffb56d62049d6877895fb Mon Sep 17 00:00:00 2001 From: hrukalive Date: Mon, 27 Mar 2023 12:24:50 -0500 Subject: [PATCH 151/475] Fix checkpointing --- basics/base_task.py | 16 ++---- training/acoustic_task.py | 1 - utils/training_utils.py | 109 +++++--------------------------------- 3 files changed, 16 insertions(+), 110 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index ce3cbe573..9fdce26c9 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -11,6 +11,7 @@ import torch.utils.data import lightning.pytorch as pl +from lightning.pytorch.callbacks import LearningRateMonitor from lightning.pytorch.loggers import TensorBoardLogger from lightning.pytorch.utilities import grad_norm from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only @@ -70,9 +71,6 @@ def __init__(self, *args, **kwargs): hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences self.training_sampler = None - self.skip_immediate_validation = False - self.skip_immediate_ckpt_save = False - self.model = None ########### @@ -98,7 +96,6 @@ def _training_step(self, sample, batch_idx, optimizer_idx): def training_step(self, sample, batch_idx, optimizer_idx=-1): total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) - log_outputs.update({'step': self.global_step, 'lr': self.lr_schedulers().get_lr()[0]}) tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) @@ -126,9 +123,6 @@ def validation_step(self, sample, batch_idx): :param batch_idx: :return: output: dict """ - if self.skip_immediate_validation: - rank_zero_debug('In validation step, skip immediate validation!') - return {} outputs = self._validation_step(sample, batch_idx) self.validation_step_outputs.append(outputs) return outputs @@ -142,10 +136,6 @@ def _on_validation_end(self, outputs): raise NotImplementedError def on_validation_epoch_end(self): - if self.skip_immediate_validation: - self.skip_immediate_validation = False - self.skip_immediate_ckpt_save = True - return loss_output = self._on_validation_end(self.validation_step_outputs) self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) @@ -219,14 +209,16 @@ def start(cls): DsModelCheckpoint( dirpath=work_dir, filename='model_ckpt_steps_{step}', + auto_insert_metric_name=False, monitor='step', mode='max', save_last=False, + every_n_train_steps=hparams['val_check_interval'], save_top_k=hparams['num_ckpt_keep'], - max_updates=hparams['max_updates'], permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], ), + LearningRateMonitor(logging_interval='step'), DsTQDMProgressBar(), ], logger=TensorBoardLogger( diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 1d9bb8162..ff3e2f577 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -117,7 +117,6 @@ def train_dataloader(self): self.training_sampler = self.build_batch_sampler(self.train_dataset, max_tokens=self.max_tokens, max_sentences=self.max_sentences, - required_batch_count_multiple=hparams['accumulate_grad_batches'], shuffle=True) return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, diff --git a/utils/training_utils.py b/utils/training_utils.py index 01fd23da2..aefd90637 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -112,7 +112,6 @@ def __iter__(self): if self.shuffle: rng.shuffle(self.batches) - # TODO: correctly handle the indices outside of samplers self.to_be_dropped = set() if self.required_batch_count_multiple > 1: num_batches_to_remove = len(self.batches) % self.required_batch_count_multiple @@ -156,103 +155,17 @@ def __len__(self) -> int: class DsModelCheckpoint(ModelCheckpoint): def __init__( self, - dirpath, - filename, - monitor, - save_last, - save_top_k, - mode, - max_updates, + *args, permanent_ckpt_start, permanent_ckpt_interval, - verbose = False, - save_weights_only = False + **kwargs ): - super().__init__( - dirpath=dirpath, - filename=filename, - monitor=monitor, - verbose=verbose, - save_last=save_last, - save_top_k=save_top_k, - save_weights_only=save_weights_only, - mode=mode, - auto_insert_metric_name=False - ) - self.max_updates = max_updates + super().__init__(*args, **kwargs) self.permanent_ckpt_start = permanent_ckpt_start self.permanent_ckpt_interval = permanent_ckpt_interval - self.last_permanent_step = 0 + self.last_permanent_step = None - def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: - """Save a checkpoint at the end of the last interrupted training step.""" - if not self._should_skip_saving_checkpoint(trainer) and \ - trainer.state.stage == RunningStage.TRAINING and \ - trainer.global_step == self.max_updates: - monitor_candidates = self._monitor_candidates(trainer) - if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: - filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) - self._save_checkpoint(trainer, filepath) - self._save_last_checkpoint(trainer, monitor_candidates) - - def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: - """Save a checkpoint at the end of the validation stage.""" - if trainer.lightning_module.skip_immediate_ckpt_save: - trainer.lightning_module.skip_immediate_ckpt_save = False - return - if not self._should_skip_saving_checkpoint(trainer): - monitor_candidates = self._monitor_candidates(trainer) - if self._every_n_epochs >= 1 and (trainer.current_epoch + 1) % self._every_n_epochs == 0: - self._save_topk_checkpoint(trainer, monitor_candidates) - self._save_last_checkpoint(trainer, monitor_candidates) - - def state_dict(self): - ret = super().state_dict() - ret['last_permanent_step'] = self.last_permanent_step - return ret - - def load_state_dict(self, state_dict): - dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) - - if self.dirpath == dirpath_from_ckpt: - self.best_model_score = state_dict["best_model_score"] - self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) - self.kth_value = state_dict.get("kth_value", self.kth_value) - self.best_k_models = state_dict.get("best_k_models", self.best_k_models) - self.last_model_path = state_dict.get("last_model_path", self.last_model_path) - self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) - else: - warnings.warn( - f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," - " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_permanent_step`," - " `last_model_path` and `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." - ) - self.best_model_path = state_dict["best_model_path"] - - def _monitor_candidates(self, trainer: "pl.Trainer"): - monitor_candidates = deepcopy(trainer.callback_metrics) - monitor_candidates["epoch"] = torch.tensor(trainer.current_epoch) - monitor_candidates["step"] = torch.tensor(trainer.global_step) - return monitor_candidates - - def _save_monitor_checkpoint(self, trainer: "pl.Trainer", monitor_candidates): - assert self.monitor - current = monitor_candidates.get(self.monitor) - if self.check_monitor_top_k(trainer, current): - assert current is not None - self._update_best_and_save(current, trainer, monitor_candidates) - elif self.verbose: - epoch = monitor_candidates["epoch"] - step = monitor_candidates["step"] - rank_zero_info(f"Epoch {epoch:d}, global step {step:d}: {self.monitor!r} was not in top {self.save_top_k}") - if step >= self.last_permanent_step + self.permanent_ckpt_interval: - self.last_permanent_step = step - filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer) - self._save_checkpoint(trainer, filepath) - rank_zero_info(f"Epoch {epoch:d}, global step {step:d} is a permanent checkpoint, saved to {filepath}") - - def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: - """Calls the strategy to remove the checkpoint file.""" + def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: search = re.search(r'steps_\d+', Path(filepath).stem) if search: @@ -262,7 +175,7 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: step >= self.last_permanent_step + self.permanent_ckpt_interval): self.last_permanent_step = step return - trainer.strategy.remove_checkpoint(filepath) + super()._remove_checkpoint(trainer, filepath) def get_latest_checkpoint_path(work_dir): @@ -285,14 +198,16 @@ def get_latest_checkpoint_path(work_dir): class DsTQDMProgressBar(TQDMProgressBar): - def __init__(self, refresh_rate: int = 1, process_position: int = 0): + def __init__(self, refresh_rate: int = 1, process_position: int = 0, show_steps: bool = True): super().__init__(refresh_rate, process_position) + self.show_steps = show_steps def get_metrics(self, trainer, model): items = super().get_metrics(trainer, model) - for name in ['step', 'batch_size']: - if name in items: - items[name] = int(items[name]) + if 'batch_size' in items: + items['batch_size'] = int(items['batch_size']) + if self.show_steps: + items['steps'] = trainer.global_step for k, v in items.items(): if isinstance(v, float): if 0.00001 <= v < 10: From 0cc0fd657c25c0388976a7571521357d098a5c55 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 00:54:15 -0500 Subject: [PATCH 152/475] Default PL configs to base.yaml --- configs/base.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/configs/base.yaml b/configs/base.yaml index c2d19eb87..c21767555 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -83,6 +83,13 @@ save_f0: false gen_dir_name: '' num_valid_plots: 5 +########### +# pytorch lightning +# Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values +########### +pl_trainer_accelerator: 'auto' +pl_trainer_devices: 'auto' +pl_trainer_precision: '32-true' pl_trainer_num_nodes: 1 pl_trainer_strategy: 'auto' ddp_backend: 'nccl' # choose from 'gloo', 'nccl', 'nccl_no_p2p' From 5222dca3b5df22d4110f8d995e5f027efa9efb08 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 11:56:54 -0500 Subject: [PATCH 153/475] Fix validation dataloader --- augmentation/spec_stretch.py | 2 +- training/acoustic_task.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 2ddf451cb..306948569 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -42,7 +42,7 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], hparams, device=self.device ).cpu().numpy() f0, _, _ = get_pitch_parselmouth( - wav, aug_item['length'], hparams, speed=speed, interp_uv=item['interp_uv'] + wav, aug_item['length'], hparams, speed=speed, interp_uv=hparams['interp_uv'] ) aug_item['f0'] = f0.astype(np.float32) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index ff3e2f577..63fc527ed 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -129,7 +129,7 @@ def train_dataloader(self): def val_dataloader(self): sampler = self.build_batch_sampler(self.valid_dataset, max_tokens=self.max_tokens, - max_sentences=self.max_sentences) + max_sentences=self.max_eval_sentences) return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, From c2ab4119fa973e5ac36f2920acff50a375f0950d Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 12:12:59 -0500 Subject: [PATCH 154/475] Show num params once --- utils/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/__init__.py b/utils/__init__.py index a5ab3b5d6..4a454d623 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -224,7 +224,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def print_arch(model, model_name='model'): print(f"| {model_name} Arch: ", model) - num_params(model, model_name=model_name) + # num_params(model, model_name=model_name) def num_params(model, print_out=True, model_name="model"): From a3ec41c5322cc89b4b3a13b166e53cfc860c2291 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 20:16:57 -0500 Subject: [PATCH 155/475] Support csv in multi speaker preparation --- preparation/acoustic_preparation.ipynb | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index 728208f22..db6794c32 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -65,6 +65,7 @@ }, "outputs": [], "source": [ + "import csv\n", "import glob\n", "import json\n", "import os\n", @@ -1479,8 +1480,9 @@ "\n", "training_cases = []\n", "for raw_data_dir in raw_data_dirs:\n", - " with open(os.path.join('..', raw_data_dir, 'transcriptions.txt'), 'r', encoding='utf8') as f:\n", - " training_cases.append([line.split('|')[0] for line in f.readlines()])\n", + " with open(os.path.join('..', raw_data_dir, 'transcriptions.csv'), 'r', encoding='utf8') as f:\n", + " reader = csv.DictReader(f)\n", + " training_cases.append([row['name'] for row in reader])\n", "valid_test_cases = []\n", "if len(test_prefixes) > 0:\n", " for prefix in deepcopy(test_prefixes):\n", From 27411c888cab7944130bf4e6b587cd045e56cf55 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 20:59:47 -0500 Subject: [PATCH 156/475] Multispeaker compatible with old txt --- preparation/acoustic_preparation.ipynb | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index db6794c32..d76bc080e 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1381,7 +1381,7 @@ "for selection in datasets:\n", " assert selection['dataset'] in os.listdir('../data/'), f'Dataset \\'{selection[\"dataset\"]}\\' not found.'\n", " assert os.path.exists(f'../data/{selection[\"dataset\"]}/raw/wavs'), f'Wave directory not found in dataset \\'{selection[\"dataset\"]}\\''\n", - " assert os.path.exists(f'../data/{selection[\"dataset\"]}/raw/transcriptions.txt'), f'Transcriptions not found in dataset \\'{selection[\"dataset\"]}\\''\n", + " assert os.path.exists(f'../data/{selection[\"dataset\"]}/raw/transcriptions.txt') or os.path.exists(f'../data/{selection[\"dataset\"]}/raw/transcriptions.csv'), f'Transcriptions not found in dataset \\'{selection[\"dataset\"]}\\''\n", " assert re.search(r'[^0-9A-Za-z_-]', selection['speaker']) is None, 'Speaker name contains invalid characters.'\n", " speakers.append(selection['speaker'])\n", " raw_data_dirs.append(f'data/{selection[\"dataset\"]}/raw')\n", @@ -1480,9 +1480,13 @@ "\n", "training_cases = []\n", "for raw_data_dir in raw_data_dirs:\n", - " with open(os.path.join('..', raw_data_dir, 'transcriptions.csv'), 'r', encoding='utf8') as f:\n", - " reader = csv.DictReader(f)\n", - " training_cases.append([row['name'] for row in reader])\n", + " if os.path.exists(os.path.join('..', raw_data_dir, 'transcriptions.txt')):\n", + " with open(os.path.join('..', raw_data_dir, 'transcriptions.txt'), 'r', encoding='utf8') as f:\n", + " training_cases.append([line.split('|')[0] for line in f.readlines()])\n", + " else:\n", + " with open(os.path.join('..', raw_data_dir, 'transcriptions.csv'), 'r', encoding='utf8') as f:\n", + " reader = csv.DictReader(f)\n", + " training_cases.append([row['name'] for row in reader])\n", "valid_test_cases = []\n", "if len(test_prefixes) > 0:\n", " for prefix in deepcopy(test_prefixes):\n", From b0f17127a8fb317f5abb70f094f2a93edc391b96 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 21:10:08 -0500 Subject: [PATCH 157/475] auto strategy fix for auto --- utils/training_utils.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index aefd90637..c7eaed9ab 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -223,16 +223,41 @@ def get_stategy(accelerator, devices, num_nodes, strategy, backend): from lightning_fabric.utilities.imports import _IS_INTERACTIVE from lightning.pytorch.accelerators import AcceleratorRegistry from lightning.pytorch.accelerators.cuda import CUDAAccelerator + from lightning.pytorch.accelerators.hpu import HPUAccelerator + from lightning.pytorch.accelerators.ipu import IPUAccelerator from lightning.pytorch.accelerators.mps import MPSAccelerator - from pytorch_lightning.utilities.exceptions import MisconfigurationException + from lightning.pytorch.accelerators.tpu import TPUAccelerator + from lightning.pytorch.utilities.exceptions import MisconfigurationException + def _choose_auto_accelerator(): + if TPUAccelerator.is_available(): + return "tpu" + if IPUAccelerator.is_available(): + return "ipu" + if HPUAccelerator.is_available(): + return "hpu" + if MPSAccelerator.is_available(): + return "mps" + if CUDAAccelerator.is_available(): + return "cuda" + return "cpu" + def _choose_gpu_accelerator_backend(): if MPSAccelerator.is_available(): return "mps" if CUDAAccelerator.is_available(): return "cuda" raise MisconfigurationException("No supported gpu backend found!") - _accelerator_flag = _choose_gpu_accelerator_backend() + + if accelerator == "auto": + _accelerator_flag = _choose_auto_accelerator() + elif accelerator == "gpu": + _accelerator_flag = _choose_gpu_accelerator_backend() + else: + return strategy + + if _accelerator_flag != "mps" and _accelerator_flag != "cuda": + return strategy _num_nodes_flag = int(num_nodes) if num_nodes is not None else 1 _devices_flag = devices From 406279dca27114751630f6b560214c0f263c0dee Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 28 Mar 2023 23:50:22 -0500 Subject: [PATCH 158/475] Not logging grad norm --- basics/base_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 9fdce26c9..7c6f89c2a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -101,8 +101,8 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return total_loss - def on_before_optimizer_step(self, *args, **kwargs): - self.log_dict(grad_norm(self, norm_type=2)) + # def on_before_optimizer_step(self, *args, **kwargs): + # self.log_dict(grad_norm(self, norm_type=2)) def on_validation_start(self): self.validation_step_outputs = [] From caaf6e78ffc0fba30a7fd427b0fa9f8bc220c53c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 29 Mar 2023 21:31:13 +0800 Subject: [PATCH 159/475] Fix failed checks on fixed pitch shifting augmentation --- augmentation/spec_stretch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 2d4e19d04..2a69174d4 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -47,10 +47,10 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['f0'] = torch.from_numpy(f0) if key_shift != 0. or hparams.get('use_key_shift_embed', False): - aug_item['key_shift'] = key_shift + if replace_spk_id is None: + aug_item['key_shift'] = key_shift + else: + aug_item['spk_id'] = replace_spk_id aug_item['f0'] *= 2 ** (key_shift / 12) - if replace_spk_id is not None: - aug_item['spk_id'] = replace_spk_id - return aug_item From 254a9a0b6c6c3aff195646001a9b896197873466 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 29 Mar 2023 09:31:37 -0500 Subject: [PATCH 160/475] Show lr in prog bar --- basics/base_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/basics/base_task.py b/basics/base_task.py index 7c6f89c2a..d32676098 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -95,7 +95,7 @@ def _training_step(self, sample, batch_idx, optimizer_idx): def training_step(self, sample, batch_idx, optimizer_idx=-1): total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) - + log_outputs['lr'] = self.lr_schedulers().get_lr()[0] tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) From 048b459bfb6c843bb3ff734e00bef43d55f2c3a8 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 29 Mar 2023 13:40:35 -0500 Subject: [PATCH 161/475] Remove dependency on lightning during deployment --- utils/hparams.py | 6 +++++- utils/phoneme_utils.py | 5 ++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/utils/hparams.py b/utils/hparams.py index 95b35dac3..99a63bed9 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -2,7 +2,11 @@ import os import yaml -from lightning.pytorch.utilities.rank_zero import rank_zero_only +try: + from lightning.pytorch.utilities.rank_zero import rank_zero_only +except ModuleNotFoundError: + def rank_zero_only(f): + return f from utils.multiprocess_utils import is_main_process as mp_is_main_process global_print_hparams = True diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index 086008bd8..8ae198296 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -1,6 +1,9 @@ import pathlib -from lightning.pytorch.utilities.rank_zero import rank_zero_info +try: + from lightning.pytorch.utilities.rank_zero import rank_zero_info +except ModuleNotFoundError: + rank_zero_info = print from utils.hparams import hparams From fc43aff767d1bebf3dc0224636aebd828f76b281 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 29 Mar 2023 17:59:45 -0500 Subject: [PATCH 162/475] Guard the sampler shuffle grid size --- utils/training_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index c7eaed9ab..b551d4b1f 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -97,7 +97,8 @@ def __iter__(self): else: indices = rng.permutation(len(self.dataset)) if self.sort_by_similar_size: - grid = hparams.get('sampler_frame_count_grid', 200) + grid = int(hparams.get('sampler_frame_count_grid', 200)) + assert grid > 0 sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] indices = indices.tolist() From 49331dca89836b65ac855de3b65150ce8ad542dc Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 29 Mar 2023 20:53:41 -0500 Subject: [PATCH 163/475] More meaningful grid size --- configs/base.yaml | 2 +- training/acoustic_task.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/base.yaml b/configs/base.yaml index c21767555..021520e5e 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -32,7 +32,7 @@ min_level_db: -100 num_spk: 1 mel_vmin: -6 mel_vmax: 1.5 -sampler_frame_count_grid: 200 +sampler_frame_count_grid: 6 ds_workers: 4 dataloader_prefetch_factor: 2 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 63fc527ed..9171a01bd 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -109,7 +109,7 @@ def build_optimizer(self, model): def build_scheduler(self, optimizer): # return WarmupCosineSchedule(optimizer, # warmup_steps=hparams['warmup_updates'], - # t_total=hparams['max_updates'] // hparams['accumulate_grad_batches'], + # t_total=hparams['max_updates'], # eta_min=0) return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) From b1222e1ed28cdf909c62881f6dae78673e90b0cc Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 29 Mar 2023 20:56:27 -0500 Subject: [PATCH 164/475] Separate log of lr on prog bar --- basics/base_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/basics/base_task.py b/basics/base_task.py index d32676098..9cb349f78 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -95,9 +95,9 @@ def _training_step(self, sample, batch_idx, optimizer_idx): def training_step(self, sample, batch_idx, optimizer_idx=-1): total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) - log_outputs['lr'] = self.lr_schedulers().get_lr()[0] tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) + self.log('lr', self.lr_schedulers().get_lr()[0], prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) return total_loss From e065812803c4b31dd54db6657f41d7999899742d Mon Sep 17 00:00:00 2001 From: hrukalive Date: Wed, 29 Mar 2023 21:09:39 -0500 Subject: [PATCH 165/475] Prevent step in prog bar scientific notation --- utils/training_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index b551d4b1f..621759104 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -208,7 +208,7 @@ def get_metrics(self, trainer, model): if 'batch_size' in items: items['batch_size'] = int(items['batch_size']) if self.show_steps: - items['steps'] = trainer.global_step + items['steps'] = str(trainer.global_step) for k, v in items.items(): if isinstance(v, float): if 0.00001 <= v < 10: From 312c810ebcbc540ab0e594ae714cd9dc543fe71c Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 30 Mar 2023 01:22:15 -0500 Subject: [PATCH 166/475] Prevent distributed sampler padding --- utils/training_utils.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index 621759104..de248e154 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -133,15 +133,20 @@ def set_epoch(self, epoch): class DsDistributedBatchSampler(DistributedSampler): def __init__(self, dataset, num_replicas=None, - rank=None, shuffle=True, - seed=0, drop_last=False, batch_sampler_cls=None) -> None: - super().__init__(dataset=dataset, num_replicas=num_replicas, rank=rank, shuffle=shuffle, seed=seed, - drop_last=drop_last) + rank=None, shuffle=True, seed=0, + drop_last=False, batch_sampler_cls=None) -> None: + super().__init__(dataset=dataset, num_replicas=num_replicas, rank=rank, + shuffle=shuffle, seed=seed, drop_last=drop_last) + self.total_size = len(self.dataset) self.batch_sampler_cls = batch_sampler_cls self.batch_sampler = None def __iter__(self): - indices = list(super().__iter__()) + # Always shuffle to distribute to batch samplers, sorting is done in batch sampler + g = torch.Generator() + g.manual_seed(self.seed + self.epoch) + indices = torch.randperm(len(self.dataset), generator=g).tolist() + indices = indices[self.rank:self.total_size:self.num_replicas] self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, seed=self.seed, shuffle=self.shuffle) self.batch_sampler.set_epoch(self.epoch) return iter(self.batch_sampler) From 2db8751bca33c9ef3babb8a4009061afc586ad71 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 30 Mar 2023 16:56:11 +0800 Subject: [PATCH 167/475] Re-organize infer_utils --- configs/acoustic.yaml | 1 - configs/base.yaml | 1 - deployment/export/export_acoustic.py | 2 +- inference/vocoder/val_nsf_hifigan.py | 2 +- scripts/infer.py | 7 +-- scripts/vocode.py | 3 +- training/acoustic_task.py | 6 +-- utils/audio.py | 54 --------------------- utils/infer_utils.py | 72 +++++++++++++++++++++++++++- utils/slur_utils.py | 22 --------- utils/spk_utils.py | 34 ------------- 11 files changed, 78 insertions(+), 126 deletions(-) delete mode 100644 utils/audio.py delete mode 100644 utils/slur_utils.py delete mode 100644 utils/spk_utils.py diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 36b16ffc7..325f7db76 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -22,7 +22,6 @@ fft_size: 2048 # FFT size. win_size: 2048 # FFT size. fmin: 40 fmax: 16000 -min_level_db: -120 binarization_args: shuffle: true diff --git a/configs/base.yaml b/configs/base.yaml index 1ff76a324..4a79f854d 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -28,7 +28,6 @@ win_size: 1024 # For 22050Hz, 1100 ~= 50 ms (If None, win_size: fft_size) (0.05 fmin: 80 # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) fmax: 7600 # To be increased/reduced depending on data. fft_size: 1024 # Extra window size is filled with 0 paddings to match this parameter -min_level_db: -100 num_spk: 1 mel_vmin: -6 mel_vmax: 1.5 diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index 1b1badd84..4d9d8edc3 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -31,7 +31,7 @@ from utils import load_ckpt from utils.hparams import hparams, set_hparams from utils.phoneme_utils import build_phoneme_list -from utils.spk_utils import parse_commandline_spk_mix +from utils.infer_utils import parse_commandline_spk_mix from utils.text_encoder import TokenTextEncoder, PAD_INDEX diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/vocoder/val_nsf_hifigan.py index d649ff535..b51e5de0f 100644 --- a/inference/vocoder/val_nsf_hifigan.py +++ b/inference/vocoder/val_nsf_hifigan.py @@ -9,7 +9,7 @@ from utils.binarizer_utils import get_pitch_parselmouth from modules.vocoders.nsf_hifigan import NsfHifiGAN -from utils.audio import save_wav +from utils.infer_utils import save_wav from utils.hparams import set_hparams, hparams sys.argv = [ diff --git a/scripts/infer.py b/scripts/infer.py index 9766613a3..168a728e3 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -13,11 +13,8 @@ import torch from inference.ds_acoustic import DiffSingerAcousticInfer -from utils.audio import save_wav from utils.hparams import set_hparams, hparams -from utils.infer_utils import cross_fade, trans_key -from utils.slur_utils import merge_slurs -from utils.spk_utils import parse_commandline_spk_mix +from utils.infer_utils import merge_slurs, cross_fade, trans_key, parse_commandline_spk_mix, save_wav parser = argparse.ArgumentParser(description='Run DiffSinger inference') parser.add_argument('proj', type=str, help='Path to the input file') @@ -69,7 +66,7 @@ key_suffix = '%+dkey' % args.key if not args.title: name += key_suffix - print(f'音调基于原音频{key_suffix}') + print(f'| key transition: {args.key:+d}') if args.gender is not None: assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' diff --git a/scripts/vocode.py b/scripts/vocode.py index 80d667e18..d171c68d5 100644 --- a/scripts/vocode.py +++ b/scripts/vocode.py @@ -13,8 +13,7 @@ import tqdm from basics.base_svs_infer import BaseSVSInfer -from utils.infer_utils import cross_fade -from utils.audio import save_wav +from utils.infer_utils import cross_fade, save_wav from utils.hparams import set_hparams, hparams parser = argparse.ArgumentParser(description='Run DiffSinger vocoder') diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 592cb68bb..46ca39452 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -11,13 +11,13 @@ from tqdm import tqdm import utils +import utils.infer_utils from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder from modules.fastspeech.tts_modules import mel2ph_to_dur from modules.toplevel.acoustic_model import DiffSingerAcoustic from modules.vocoders.registry import get_vocoder_cls -from utils import audio from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams from utils.indexed_datasets import IndexedDataset @@ -327,8 +327,8 @@ def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, me base_fn += text base_fn += ('-' + hparams['exp_name']) np.save(os.path.join(hparams['work_dir'], f'{prefix}_mels_npy', item_name), mel) - audio.save_wav(wav_out, f'{gen_dir}/wavs/{base_fn}.wav', hparams['audio_sample_rate'], - norm=hparams['out_wav_norm']) + utils.infer_utils.save_wav(wav_out, f'{gen_dir}/wavs/{base_fn}.wav', hparams['audio_sample_rate'], + norm=hparams['out_wav_norm']) fig = plt.figure(figsize=(14, 10)) spec_vmin = hparams['mel_vmin'] spec_vmax = hparams['mel_vmax'] diff --git a/utils/audio.py b/utils/audio.py deleted file mode 100644 index 460358bb3..000000000 --- a/utils/audio.py +++ /dev/null @@ -1,54 +0,0 @@ -import matplotlib - -matplotlib.use('Agg') -import librosa -import librosa.filters -import numpy as np -from scipy.io import wavfile - - -def save_wav(wav, path, sr, norm=False): - if norm: - wav = wav / np.abs(wav).max() - wav *= 32767 - # proposed by @dsmiller - wavfile.write(path, sr, wav.astype(np.int16)) - - -def get_hop_size(hparams): - hop_size = hparams['hop_size'] - if hop_size is None: - assert hparams['frame_shift_ms'] is not None - hop_size = int(hparams['frame_shift_ms'] / 1000 * hparams['audio_sample_rate']) - return hop_size - - -########################################################################################### -def _stft(y, hparams): - return librosa.stft(y=y, n_fft=hparams['fft_size'], hop_length=get_hop_size(hparams), - win_length=hparams['win_size'], pad_mode='constant') - - -def _istft(y, hparams): - return librosa.istft(y, hop_length=get_hop_size(hparams), win_length=hparams['win_size']) - - -def librosa_pad_lr(x, fsize, fshift, pad_sides=1): - '''compute right padding (final frame) or both sides padding (first and final frames) - ''' - assert pad_sides in (1, 2) - # return int(fsize // 2) - pad = (x.shape[0] // fshift + 1) * fshift - x.shape[0] - if pad_sides == 1: - return 0, pad - else: - return pad // 2, pad // 2 + pad % 2 - - -# Conversions -def amp_to_db(x): - return 20 * np.log10(np.maximum(1e-5, x)) - - -def normalize(S, hparams): - return (S - hparams['min_level_db']) / -hparams['min_level_db'] diff --git a/utils/infer_utils.py b/utils/infer_utils.py index 3a05e4daf..a8ec041ab 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -1,10 +1,36 @@ -import json +import re +import librosa import numpy as np +from scipy.io import wavfile head_list = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"] +def merge_slurs(param): + ph_seq = param['ph_seq'].split() + note_seq = param['note_seq'].split() + note_dur_seq = param['note_dur_seq'].split() + is_slur_seq = [int(s) for s in param['is_slur_seq'].split()] + ph_dur = [float(d) for d in param['ph_dur'].split()] + i = 0 + while i < len(ph_seq): + if is_slur_seq[i]: + ph_dur[i - 1] += ph_dur[i] + ph_seq.pop(i) + note_seq.pop(i) + note_dur_seq.pop(i) + is_slur_seq.pop(i) + ph_dur.pop(i) + else: + i += 1 + param['ph_seq'] = ' '.join(ph_seq) + param['note_seq'] = ' '.join(note_seq) + param['note_dur_seq'] = ' '.join(note_dur_seq) + param['is_slur_seq'] = ' '.join([str(s) for s in is_slur_seq]) + param['ph_dur'] = ' '.join([str(d) for d in ph_dur]) + + def cross_fade(a: np.ndarray, b: np.ndarray, idx: int): result = np.zeros(idx + b.shape[0]) fade_len = a.shape[0] - idx @@ -41,7 +67,8 @@ def trans_key(raw_data, key): new_note_seq_list = [] for note_seq in note_seq_list: if note_seq != "rest": - new_note_seq = move_key(note_seq, key) + new_note_seq = librosa.midi_to_note(librosa.note_to_midi(note_seq) + key) + # new_note_seq = move_key(note_seq, key) new_note_seq_list.append(new_note_seq) else: new_note_seq_list.append(note_seq) @@ -74,3 +101,44 @@ def resample_align_curve(points: np.ndarray, original_timestep: float, target_ti elif delta_l > 0: curve_interp = np.concatenate((curve_interp, np.full(delta_l, fill_value=curve_interp[-1])), axis=0) return curve_interp + + +def parse_commandline_spk_mix(mix: str) -> dict: + """ + Parse speaker mix info from commandline + :param mix: Input like "opencpop" or "opencpop|qixuan" or "opencpop:0.5|qixuan:0.5" + :return: A dict whose keys are speaker names and values are proportions + """ + name_pattern = r'[0-9A-Za-z_-]+' + proportion_pattern = r'\d+(\.\d+)?' + single_pattern = rf'{name_pattern}(:{proportion_pattern})?' + assert re.fullmatch(rf'{single_pattern}(\|{single_pattern})*', mix) is not None, f'Invalid mix pattern: {mix}' + without_proportion = set() + proportion_map = {} + for component in mix.split('|'): + # If already exists + name_and_proportion = component.split(':') + assert name_and_proportion[0] not in without_proportion and name_and_proportion[0] not in proportion_map, \ + f'Duplicate speaker name: {name_and_proportion[0]}' + if ':' in component: + proportion_map[name_and_proportion[0]] = float(name_and_proportion[1]) + else: + without_proportion.add(name_and_proportion[0]) + sum_given_proportions = sum(proportion_map.values()) + assert sum_given_proportions < 1 or len(without_proportion) == 0, \ + 'Proportion of all speakers should be specified if the sum of all given proportions are larger than 1.' + for name in without_proportion: + proportion_map[name] = (1 - sum_given_proportions) / len(without_proportion) + sum_all_proportions = sum(proportion_map.values()) + assert sum_all_proportions > 0, 'Sum of all proportions should be positive.' + for name in proportion_map: + proportion_map[name] /= sum_all_proportions + return proportion_map + + +def save_wav(wav, path, sr, norm=False): + if norm: + wav = wav / np.abs(wav).max() + wav *= 32767 + # proposed by @dsmiller + wavfile.write(path, sr, wav.astype(np.int16)) diff --git a/utils/slur_utils.py b/utils/slur_utils.py deleted file mode 100644 index 539a18148..000000000 --- a/utils/slur_utils.py +++ /dev/null @@ -1,22 +0,0 @@ -def merge_slurs(param): - ph_seq = param['ph_seq'].split() - note_seq = param['note_seq'].split() - note_dur_seq = param['note_dur_seq'].split() - is_slur_seq = [int(s) for s in param['is_slur_seq'].split()] - ph_dur = [float(d) for d in param['ph_dur'].split()] - i = 0 - while i < len(ph_seq): - if is_slur_seq[i]: - ph_dur[i - 1] += ph_dur[i] - ph_seq.pop(i) - note_seq.pop(i) - note_dur_seq.pop(i) - is_slur_seq.pop(i) - ph_dur.pop(i) - else: - i += 1 - param['ph_seq'] = ' '.join(ph_seq) - param['note_seq'] = ' '.join(note_seq) - param['note_dur_seq'] = ' '.join(note_dur_seq) - param['is_slur_seq'] = ' '.join([str(s) for s in is_slur_seq]) - param['ph_dur'] = ' '.join([str(d) for d in ph_dur]) diff --git a/utils/spk_utils.py b/utils/spk_utils.py deleted file mode 100644 index 3b93dc08d..000000000 --- a/utils/spk_utils.py +++ /dev/null @@ -1,34 +0,0 @@ -import re - - -def parse_commandline_spk_mix(mix: str) -> dict: - """ - Parse speaker mix info from commandline - :param mix: Input like "opencpop" or "opencpop|qixuan" or "opencpop:0.5|qixuan:0.5" - :return: A dict whose keys are speaker names and values are proportions - """ - name_pattern = r'[0-9A-Za-z_-]+' - proportion_pattern = r'\d+(\.\d+)?' - single_pattern = rf'{name_pattern}(:{proportion_pattern})?' - assert re.fullmatch(rf'{single_pattern}(\|{single_pattern})*', mix) is not None, f'Invalid mix pattern: {mix}' - without_proportion = set() - proportion_map = {} - for component in mix.split('|'): - # If already exists - name_and_proportion = component.split(':') - assert name_and_proportion[0] not in without_proportion and name_and_proportion[0] not in proportion_map, \ - f'Duplicate speaker name: {name_and_proportion[0]}' - if ':' in component: - proportion_map[name_and_proportion[0]] = float(name_and_proportion[1]) - else: - without_proportion.add(name_and_proportion[0]) - sum_given_proportions = sum(proportion_map.values()) - assert sum_given_proportions < 1 or len(without_proportion) == 0, \ - 'Proportion of all speakers should be specified if the sum of all given proportions are larger than 1.' - for name in without_proportion: - proportion_map[name] = (1 - sum_given_proportions) / len(without_proportion) - sum_all_proportions = sum(proportion_map.values()) - assert sum_all_proportions > 0, 'Sum of all proportions should be positive.' - for name in proportion_map: - proportion_map[name] /= sum_all_proportions - return proportion_map From 331d1a29775dd645e29826d3ce5387538b61adf0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 30 Mar 2023 20:57:57 +0800 Subject: [PATCH 168/475] Bump dependencies and simplify requirements --- requirements.txt | 59 +++++++++++++----------------------------------- 1 file changed, 16 insertions(+), 43 deletions(-) diff --git a/requirements.txt b/requirements.txt index 8592f9f90..2580172e8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,46 +1,19 @@ -resemblyzer==0.1.1.dev0 -tensorboardX==2.6 -h5py==3.7.0 -future==0.18.2 -g2p-en==2.1.0 -g2pM==0.1.2.5 -et-xmlfile==1.0.1 -absl-py==1.3.0 +# It is recommended to install PyTorch manually. +# PyTorch >= 2.0 is recommended, but 1.12 and 1.13 is compatible. +# See instructions at https://pytorch.org/get-started/locally/ -altgraph==0.17.3 -appdirs==1.4.4 -async-timeout==4.0.2 -audioread==3.0.0 -backcall==0.2.0 -blinker==1.4 -brotlipy==0.7.0 -scipy==1.9.3 +h5py +librosa>=0.10.0,<1.0.0 +lightning>=2.0.0 +matplotlib numpy==1.23.5 -librosa==0.9.1 -matplotlib==3.6.2 -torchcrepe==0.0.17 -python-dateutil==2.8.2 -python-Levenshtein==0.12.2 -lightning==2.0.0 -six==1.16.0 -tqdm==4.65.0 -resampy==0.4.2 -rich==13.3.2 -imageio==2.23.0 -einops==0.6.0 -pycwt==0.3.0a22 +onnx==1.13.1 +onnxsim==0.4.19 praat-parselmouth==0.4.3 -scikit-image==0.19.3 -pyloudnorm==0.1.0 -torchmetrics==0.11.4 -tensorboard==2.11.0 -tensorboard-plugin-wit==1.8.1 -protobuf==3.13.0 -PyYAML==6.0 -pypinyin==0.39.0 - -# It is recommended to install PyTorch manually. -# See instructions at https://pytorch.org/get-started/previous-versions/ -# torch==2.0.0 -# torchaudio==2.0.0 -# torchvision==0.15.0 +protobuf==3.20.3 +PyYAML +resampy +scipy +tensorboard +tensorboardX +tqdm From 8aaaacdcc4fcab8bf260a0e862874151d298ec63 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 30 Mar 2023 21:35:47 +0800 Subject: [PATCH 169/475] Downgrade librosa version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2580172e8..54df8a5e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ # See instructions at https://pytorch.org/get-started/locally/ h5py -librosa>=0.10.0,<1.0.0 +librosa<0.10.0 lightning>=2.0.0 matplotlib numpy==1.23.5 From 77e9a04909c26e4eb44c482046a56ff48bea9a50 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 30 Mar 2023 23:19:56 -0500 Subject: [PATCH 170/475] Correct validation logging, sampler revamp, fix hparam print bug --- basics/base_task.py | 63 +++++++++---------- basics/base_vocoder.py | 7 +++ modules/vocoders/ddsp.py | 3 + modules/vocoders/nsf_hifigan.py | 8 ++- training/acoustic_task.py | 60 +++++++++--------- utils/__init__.py | 16 ----- utils/hparams.py | 4 +- utils/training_utils.py | 105 +++++++++++++++++++------------- 8 files changed, 139 insertions(+), 127 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 9cb349f78..827c061aa 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -9,7 +9,9 @@ import matplotlib matplotlib.use('Agg') +from torch import nn import torch.utils.data +from torchmetrics import MeanMetric import lightning.pytorch as pl from lightning.pytorch.callbacks import LearningRateMonitor from lightning.pytorch.loggers import TensorBoardLogger @@ -19,7 +21,6 @@ from basics.base_model import CategorizedModule from utils.hparams import hparams from utils.training_utils import ( - DsBatchSampler, DsDistributedBatchSampler, DsModelCheckpoint, DsTQDMProgressBar, get_latest_checkpoint_path, get_stategy ) @@ -72,6 +73,10 @@ def __init__(self, *args, **kwargs): self.training_sampler = None self.model = None + + self.valid_metrics = nn.ModuleDict({ + 'total_loss': MeanMetric() + }) ########### # Training, validation and testing @@ -89,30 +94,39 @@ def _training_step(self, sample, batch_idx, optimizer_idx): :param sample: :param batch_idx: - :return: total loss: torch.Tensor, loss_log: dict + :return: total loss: torch.Tensor, loss_log: dict, other_log: dict """ raise NotImplementedError def training_step(self, sample, batch_idx, optimizer_idx=-1): total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) - tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} + + # logs to progress bar self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log('lr', self.lr_schedulers().get_lr()[0], prog_bar=True, logger=False, on_step=True, on_epoch=False) + # logs to tensorboard + tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) + return total_loss # def on_before_optimizer_step(self, *args, **kwargs): # self.log_dict(grad_norm(self, norm_type=2)) + def _on_validation_start(self): + pass + def on_validation_start(self): - self.validation_step_outputs = [] + self._on_validation_start() + for metric in self.valid_metrics.values(): + metric.reset() def _validation_step(self, sample, batch_idx): """ :param sample: :param batch_idx: - :return: output: dict + :return: loss_log: dict, weight: int """ raise NotImplementedError @@ -121,24 +135,18 @@ def validation_step(self, sample, batch_idx): :param sample: :param batch_idx: - :return: output: dict - """ - outputs = self._validation_step(sample, batch_idx) - self.validation_step_outputs.append(outputs) - return outputs - - def _on_validation_end(self, outputs): """ - - :param outputs: - :return: loss_output: dict - """ - raise NotImplementedError + outputs, weight = self._validation_step(sample, batch_idx) + for k, v in outputs.items(): + if isinstance(self.valid_metrics[k], MeanMetric): + self.valid_metrics[k].update(v, weight=weight) def on_validation_epoch_end(self): - loss_output = self._on_validation_end(self.validation_step_outputs) - self.log('val_loss', loss_output['total_loss'], on_epoch=True, prog_bar=True, logger=False, sync_dist=True) - self.log_dict({f'val/{k}': v for k, v in loss_output.items()}, on_epoch=True, logger=True, sync_dist=True) + metric_vals = {k: v.compute() for k, v in self.valid_metrics.items()} + self.log('val_loss', metric_vals['total_loss'], on_epoch=True, prog_bar=True, logger=False) + self.log_dict({f'val/{k}': v for k, v in metric_vals.items()}, on_epoch=True, logger=True) + for metric in self.valid_metrics.values(): + metric.reset() def build_scheduler(self, optimizer): raise NotImplementedError @@ -160,21 +168,6 @@ def configure_optimizers(self): } } - def build_batch_sampler(self, dataset, max_tokens, max_sentences, required_batch_count_multiple=1, batch_by_size=True, shuffle=False): - batch_sampler_cls = partial(DsBatchSampler, - max_tokens=max_tokens, max_sentences=max_sentences, - required_batch_count_multiple=required_batch_count_multiple, - batch_by_size=batch_by_size, sort_by_similar_size=hparams['sort_by_len']) - if self.trainer.distributed_sampler_kwargs: - sampler = DsDistributedBatchSampler(dataset, - batch_sampler_cls=batch_sampler_cls, - seed=hparams['seed'], - shuffle=shuffle, - **self.trainer.distributed_sampler_kwargs) - else: - sampler = batch_sampler_cls(dataset, seed=hparams['seed'], shuffle=shuffle) - return sampler - def on_test_start(self): self.on_validation_start() diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index af2f400d3..519a4012a 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -1,4 +1,11 @@ class BaseVocoder: + def to(self, device): + """ + + :param device: torch.device or str + """ + raise NotImplementedError() + def spec2wav(self, mel, **kwargs): """ diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index 29243c46b..41ce1b374 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -114,6 +114,9 @@ def __init__(self, device='cpu'): model_path = hparams['vocoder_ckpt'] assert os.path.exists(model_path), 'DDSP model file is not found!' self.model, self.args = load_model(model_path, device=self.device) + + def to(self, device): + pass def spec2wav_torch(self, mel, f0): # mel: [B, T, bins] f0: [B, T] if self.args.data.sampling_rate != hparams['audio_sample_rate']: diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index a96e73aee..ffccf137b 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,7 +1,10 @@ import os import torch -from lightning.pytorch.utilities.rank_zero import rank_zero_info +try: + from lightning.pytorch.utilities.rank_zero import rank_zero_info +except ModuleNotFoundError: + rank_zero_info = print from modules.nsf_hifigan.models import load_model from modules.nsf_hifigan.nvSTFT import load_wav_to_torch, STFT @@ -21,6 +24,9 @@ def __init__(self, device=None): @property def device(self): return next(self.model.parameters()).device + + def to(self, device): + self.model.to(device) def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] if self.h.sampling_rate != hparams['audio_sample_rate']: diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 9171a01bd..70c1147d8 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -25,7 +25,7 @@ from utils.phoneme_utils import build_phoneme_list from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder -from utils.training_utils import WarmupCosineSchedule +from utils.training_utils import DsBatchSampler, DsEvalBatchSampler, WarmupCosineSchedule matplotlib.use('Agg') @@ -114,10 +114,17 @@ def build_scheduler(self, optimizer): return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) def train_dataloader(self): - self.training_sampler = self.build_batch_sampler(self.train_dataset, - max_tokens=self.max_tokens, - max_sentences=self.max_sentences, - shuffle=True) + self.training_sampler = DsBatchSampler( + self.train_dataset, + max_tokens=self.max_tokens, + max_sentences=self.max_sentences, + num_replicas=(self.trainer.distributed_sampler_kwargs or {}).get('num_replicas', 1), + rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), + sort_by_similar_size=hparams['sort_by_len'], + shuffle_sample=True, + shuffle_batch=False, + seed=hparams['seed'] + ) return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, batch_sampler=self.training_sampler, @@ -127,9 +134,13 @@ def train_dataloader(self): persistent_workers=True) def val_dataloader(self): - sampler = self.build_batch_sampler(self.valid_dataset, - max_tokens=self.max_tokens, - max_sentences=self.max_eval_sentences) + sampler = DsEvalBatchSampler( + self.valid_dataset, + max_tokens=self.max_tokens, + max_sentences=self.max_eval_sentences, + rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), + batch_by_size=False + ) return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, @@ -171,41 +182,28 @@ def run_model(self, sample, return_output=False, infer=False): return losses, output def _training_step(self, sample, batch_idx, _): - log_outputs = self.run_model(sample) - total_loss = sum([v for v in log_outputs.values() if isinstance(v, torch.Tensor) and v.requires_grad]) - log_outputs['batch_size'] = sample['tokens'].size()[0] - return total_loss, log_outputs + losses = self.run_model(sample) + total_loss = sum([v for v in losses.values() if isinstance(v, torch.Tensor) and v.requires_grad]) + return total_loss, {**losses, 'batch_size': sample['tokens'].size()[0]} + def _on_validation_start(self): + if self.use_vocoder: + self.vocoder.to(next(self.model.parameters()).device) + def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, return_output=False, infer=False) total_loss = sum(losses.values()) outputs = { - 'losses': losses, - 'total_loss': total_loss, - 'size': sample['size'] + 'total_loss': total_loss } - outputs = utils.tensors_to_scalars(outputs) - if batch_idx < hparams['num_valid_plots']: + if batch_idx < hparams['num_valid_plots'] and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: _, mel_pred = self.run_model(sample, return_output=True, infer=True) if self.use_vocoder: self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') - return outputs - - def _on_validation_end(self, outputs): - all_losses_meter = { - 'total_loss': utils.AvgrageMeter(), - } - for output in outputs: - n = output['size'] - for k, v in output['losses'].items(): - if k not in all_losses_meter: - all_losses_meter[k] = utils.AvgrageMeter() - all_losses_meter[k].update(v, n) - all_losses_meter['total_loss'].update(output['total_loss'], n) - return {k: round(v.avg, 4) for k, v in all_losses_meter.items()} + return outputs, sample['size'] ############ # validation plots diff --git a/utils/__init__.py b/utils/__init__.py index 4a454d623..33cef296c 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -24,22 +24,6 @@ def tensors_to_scalars(metrics): return new_metrics -class AvgrageMeter(object): - - def __init__(self): - self.reset() - - def reset(self): - self.avg = 0 - self.sum = 0 - self.cnt = 0 - - def update(self, val, n=1): - self.sum += val * n - self.cnt += n - self.avg = self.sum / self.cnt - - def collate_nd(values, pad_value=0, max_len=None): """ Pad a list of Nd tensors on their first dimension and stack them into a (N+1)d tensor. diff --git a/utils/hparams.py b/utils/hparams.py index 99a63bed9..796ba8e6b 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -136,7 +136,7 @@ def dump_hparams(): hparams_['exp_name'] = args.exp_name @rank_zero_only - def print_hparams(): + def print_out_hparams(): global global_print_hparams if mp_is_main_process and print_hparams and global_print_hparams and global_hparams: print('| Hparams chains: ', config_chains) @@ -145,6 +145,6 @@ def print_hparams(): print(f"\033[;33;m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") print("") global_print_hparams = False - print_hparams() + print_out_hparams() return hparams_ diff --git a/utils/training_utils.py b/utils/training_utils.py index de248e154..2ff94b66e 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -71,90 +71,111 @@ def lr_lambda(self, step): #==========Torch samplers========== class DsBatchSampler(Sampler): - def __init__(self, - dataset, max_tokens, max_sentences, indices=None, + def __init__(self, dataset, max_tokens, max_sentences, sub_indices=None, + num_replicas=None, rank=None, required_batch_count_multiple=1, batch_by_size=True, sort_by_similar_size=True, - seed=0, shuffle=True): + shuffle_sample=False, shuffle_batch=False, seed=0, drop_last=False) -> None: self.dataset = dataset - self.sub_indices = indices self.max_tokens = max_tokens self.max_sentences = max_sentences + self.sub_indices = sub_indices + self.num_replicas = num_replicas + self.rank = rank self.required_batch_count_multiple = required_batch_count_multiple self.batch_by_size = batch_by_size self.sort_by_similar_size = sort_by_similar_size - self.shuffle = shuffle + self.shuffle_sample = shuffle_sample + self.shuffle_batch = shuffle_batch self.seed = seed + self.drop_last = drop_last self.epoch = 0 self.batches = None - self.to_be_dropped = None def __iter__(self): - rng = np.random.RandomState(self.seed + self.epoch) - if self.shuffle: + rng = np.random.default_rng(self.seed + self.epoch) + if self.shuffle_sample: if self.sub_indices is not None: rng.shuffle(self.sub_indices) indices = np.array(self.sub_indices) else: indices = rng.permutation(len(self.dataset)) + if self.sort_by_similar_size: grid = int(hparams.get('sampler_frame_count_grid', 200)) assert grid > 0 sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] + indices = indices.tolist() else: indices = self.sub_indices if self.sub_indices is not None else list(range(len(self.dataset))) if self.batch_by_size: - self.batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) + batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) else: - self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] + batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] - if self.shuffle: - rng.shuffle(self.batches) + if self.shuffle_batch: + rng.shuffle(batches) - self.to_be_dropped = set() + floored_total_count = (len(batches) // self.num_replicas) * self.num_replicas + if self.drop_last and len(batches) > floored_total_count: + batches = batches[:floored_total_count] + leftovers = [] + else: + leftovers = (rng.permutation(len(batches) - floored_total_count) + floored_total_count).tolist() + + batch_assignment = rng.permuted(np.arange(floored_total_count).reshape(-1, self.num_replicas).transpose(), axis=0)[self.rank].tolist() + floored_batch_count = len(batch_assignment) + if self.rank < len(leftovers): + batch_assignment.append(leftovers[self.rank]) if self.required_batch_count_multiple > 1: - num_batches_to_remove = len(self.batches) % self.required_batch_count_multiple - self.to_be_dropped = set(rng.choice(len(self.batches), num_batches_to_remove, replace=False)) + batch_assignment = batch_assignment[:((floored_batch_count // self.required_batch_count_multiple) * self.required_batch_count_multiple)] + + self.batches = [deepcopy(batches[i]) for i in batch_assignment] - for i, batch in enumerate(self.batches): - if i in self.to_be_dropped: - continue + del indices + del batches + del batch_assignment + + for batch in self.batches: yield batch - + def __len__(self): - if self.batches is None or self.to_be_dropped is None: + if self.batches is None: raise RuntimeError("Batches are not initialized. Call __iter__ first.") - return len(self.batches) - len(self.to_be_dropped) + return len(self.batches) def set_epoch(self, epoch): self.epoch = epoch -class DsDistributedBatchSampler(DistributedSampler): - def __init__(self, dataset, num_replicas=None, - rank=None, shuffle=True, seed=0, - drop_last=False, batch_sampler_cls=None) -> None: - super().__init__(dataset=dataset, num_replicas=num_replicas, rank=rank, - shuffle=shuffle, seed=seed, drop_last=drop_last) - self.total_size = len(self.dataset) - self.batch_sampler_cls = batch_sampler_cls - self.batch_sampler = None + +class DsEvalBatchSampler(Sampler): + def __init__(self, dataset, max_tokens, max_sentences, rank=None, batch_by_size=True) -> None: + self.dataset = dataset + self.max_tokens = max_tokens + self.max_sentences = max_sentences + self.rank = rank + self.batch_by_size = batch_by_size + self.batches = None def __iter__(self): - # Always shuffle to distribute to batch samplers, sorting is done in batch sampler - g = torch.Generator() - g.manual_seed(self.seed + self.epoch) - indices = torch.randperm(len(self.dataset), generator=g).tolist() - indices = indices[self.rank:self.total_size:self.num_replicas] - self.batch_sampler = self.batch_sampler_cls(self.dataset, indices=indices, seed=self.seed, shuffle=self.shuffle) - self.batch_sampler.set_epoch(self.epoch) - return iter(self.batch_sampler) + if self.rank == 0: + indices = list(range(len(self.dataset))) + if self.batch_by_size: + self.batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) + else: + self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] + else: + self.batches = [[0]] + + for batch in self.batches: + yield batch - def __len__(self) -> int: - if self.batch_sampler is None: - raise RuntimeError("BatchSampler is not initialized. Call __iter__ first.") - return len(self.batch_sampler) + def __len__(self): + if self.batches is None: + raise RuntimeError("Batches are not initialized. Call __iter__ first.") + return len(self.batches) #==========PL related========== From 5f64a4183ca3de7db70cc0d5a45f39c2eef2b2cc Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 01:01:42 -0500 Subject: [PATCH 171/475] Batch shuffle order and fix bf16 type in vocoder --- modules/vocoders/ddsp.py | 2 +- modules/vocoders/nsf_hifigan.py | 2 +- training/acoustic_task.py | 2 +- utils/training_utils.py | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index 41ce1b374..77d4b18d9 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -167,7 +167,7 @@ def spec2wav(self, mel, f0): f0 = torch.FloatTensor(f0).unsqueeze(0).unsqueeze(-1).to(self.device) signal, _, (s_h, s_n) = self.model(mel.to(self.device), f0.to(self.device)) signal = signal.view(-1) - wav_out = signal.cpu().numpy() + wav_out = signal.cpu().float().numpy() return wav_out @staticmethod diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index ffccf137b..605eb45d2 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -87,7 +87,7 @@ def spec2wav(self, mel, **kwargs): y = self.model(c, f0).view(-1) else: y = self.model(c).view(-1) - wav_out = y.cpu().numpy() + wav_out = y.cpu().float().numpy() return wav_out @staticmethod diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 70c1147d8..ad095bc32 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -122,7 +122,7 @@ def train_dataloader(self): rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), sort_by_similar_size=hparams['sort_by_len'], shuffle_sample=True, - shuffle_batch=False, + shuffle_batch=True, seed=hparams['seed'] ) return torch.utils.data.DataLoader(self.train_dataset, diff --git a/utils/training_utils.py b/utils/training_utils.py index 2ff94b66e..92763f129 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -115,9 +115,6 @@ def __iter__(self): else: batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] - if self.shuffle_batch: - rng.shuffle(batches) - floored_total_count = (len(batches) // self.num_replicas) * self.num_replicas if self.drop_last and len(batches) > floored_total_count: batches = batches[:floored_total_count] @@ -134,6 +131,9 @@ def __iter__(self): self.batches = [deepcopy(batches[i]) for i in batch_assignment] + if self.shuffle_batch: + rng.shuffle(self.batches) + del indices del batches del batch_assignment From 947f841328b0d4c2af6fc5f8032622b7bad6b4f0 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 09:53:22 -0500 Subject: [PATCH 172/475] Fix h5py pickle on windows --- utils/indexed_datasets.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index a2839ab5e..64ba1bdf4 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -13,7 +13,7 @@ def __init__(self, path, prefix, num_cache=0): self.path = pathlib.Path(path) / f'{prefix}.data' if not self.path.exists(): raise FileNotFoundError(f'IndexedDataset not found: {self.path}') - self.dset = h5py.File(self.path, 'r') + self.dset = None self.cache = deque(maxlen=num_cache) self.num_cache = num_cache @@ -23,9 +23,11 @@ def check_index(self, i): def __del__(self): if self.dset: - del self.dset + self.dset.close() def __getitem__(self, i): + if self.dset is None: + self.dset = h5py.File(self.path, 'r') self.check_index(i) if self.num_cache > 0: for c in self.cache: @@ -41,9 +43,9 @@ def __len__(self): class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): - self.path = pathlib.Path(path) + self.path = pathlib.Path(path) / f'{prefix}.data' self.prefix = prefix - self.dset = h5py.File(self.path / f'{prefix}.data', 'w') + self.dset = None self.counter = 0 self.lock = multiprocessing.Lock() if allowed_attr is not None: @@ -52,6 +54,8 @@ def __init__(self, path, prefix, allowed_attr=None): self.allowed_attr = None def add_item(self, item): + if self.dset is None: + self.dset = h5py.File(self.path, 'w') if self.allowed_attr is not None: item = { k: item[k] @@ -66,7 +70,8 @@ def add_item(self, item): self.dset.create_dataset(f'{item_no}/{k}', data=v) def finalize(self): - del self.dset + if self.dset is not None: + self.dset.close() if __name__ == "__main__": From 78c279cf6af05a4ffe1eadab99db5865a534743c Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 11:31:19 -0500 Subject: [PATCH 173/475] Make sure validation is in fp32 --- basics/base_task.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/basics/base_task.py b/basics/base_task.py index 827c061aa..95c3f3a3a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -136,7 +136,8 @@ def validation_step(self, sample, batch_idx): :param sample: :param batch_idx: """ - outputs, weight = self._validation_step(sample, batch_idx) + with torch.autocast('cuda' if next(self.model.parameters()).is_cuda else 'cpu', dtype=torch.float32): + outputs, weight = self._validation_step(sample, batch_idx) for k, v in outputs.items(): if isinstance(self.valid_metrics[k], MeanMetric): self.valid_metrics[k].update(v, weight=weight) From 77e1d3aa5296eb846fe7cfe08e729d66e0eeadb5 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 11:33:00 -0500 Subject: [PATCH 174/475] Remove unnecessary cast to float in vocoder --- modules/vocoders/ddsp.py | 2 +- modules/vocoders/nsf_hifigan.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index 77d4b18d9..41ce1b374 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -167,7 +167,7 @@ def spec2wav(self, mel, f0): f0 = torch.FloatTensor(f0).unsqueeze(0).unsqueeze(-1).to(self.device) signal, _, (s_h, s_n) = self.model(mel.to(self.device), f0.to(self.device)) signal = signal.view(-1) - wav_out = signal.cpu().float().numpy() + wav_out = signal.cpu().numpy() return wav_out @staticmethod diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 605eb45d2..ffccf137b 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -87,7 +87,7 @@ def spec2wav(self, mel, **kwargs): y = self.model(c, f0).view(-1) else: y = self.model(c).view(-1) - wav_out = y.cpu().float().numpy() + wav_out = y.cpu().numpy() return wav_out @staticmethod From 78b2298baef66be46c16ec8c0843d2bfa9f352ac Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 11:59:21 -0500 Subject: [PATCH 175/475] Metric manually to device --- basics/base_task.py | 5 +++-- basics/base_vocoder.py | 2 +- modules/vocoders/ddsp.py | 2 +- modules/vocoders/nsf_hifigan.py | 2 +- training/acoustic_task.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 95c3f3a3a..82ace4c00 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -74,9 +74,9 @@ def __init__(self, *args, **kwargs): self.training_sampler = None self.model = None - self.valid_metrics = nn.ModuleDict({ + self.valid_metrics = { 'total_loss': MeanMetric() - }) + } ########### # Training, validation and testing @@ -119,6 +119,7 @@ def _on_validation_start(self): def on_validation_start(self): self._on_validation_start() for metric in self.valid_metrics.values(): + metric.to(self.device) metric.reset() def _validation_step(self, sample, batch_idx): diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index 519a4012a..9613b7b8a 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -1,5 +1,5 @@ class BaseVocoder: - def to(self, device): + def to_device(self, device): """ :param device: torch.device or str diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index 41ce1b374..5e8c52ed7 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -115,7 +115,7 @@ def __init__(self, device='cpu'): assert os.path.exists(model_path), 'DDSP model file is not found!' self.model, self.args = load_model(model_path, device=self.device) - def to(self, device): + def to_device(self, device): pass def spec2wav_torch(self, mel, f0): # mel: [B, T, bins] f0: [B, T] diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index ffccf137b..b540ef3ec 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -25,7 +25,7 @@ def __init__(self, device=None): def device(self): return next(self.model.parameters()).device - def to(self, device): + def to_device(self, device): self.model.to(device) def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] diff --git a/training/acoustic_task.py b/training/acoustic_task.py index ad095bc32..195c1177a 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -188,7 +188,7 @@ def _training_step(self, sample, batch_idx, _): def _on_validation_start(self): if self.use_vocoder: - self.vocoder.to(next(self.model.parameters()).device) + self.vocoder.to_device(self.device) def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, return_output=False, infer=False) From 5c0263e9652bbc71f33eb5fc2e52b83346f98531 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 21:56:09 -0500 Subject: [PATCH 176/475] Sampler padding and default not shuffle batch --- training/acoustic_task.py | 2 +- utils/training_utils.py | 12 +++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 195c1177a..02c86456d 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -122,7 +122,7 @@ def train_dataloader(self): rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), sort_by_similar_size=hparams['sort_by_len'], shuffle_sample=True, - shuffle_batch=True, + shuffle_batch=False, seed=hparams['seed'] ) return torch.utils.data.DataLoader(self.train_dataset, diff --git a/utils/training_utils.py b/utils/training_utils.py index 92763f129..7c825aa07 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -115,17 +115,19 @@ def __iter__(self): else: batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] - floored_total_count = (len(batches) // self.num_replicas) * self.num_replicas - if self.drop_last and len(batches) > floored_total_count: - batches = batches[:floored_total_count] + floored_total_batch_count = (len(batches) // self.num_replicas) * self.num_replicas + if self.drop_last and len(batches) > floored_total_batch_count: + batches = batches[:floored_total_batch_count] leftovers = [] else: - leftovers = (rng.permutation(len(batches) - floored_total_count) + floored_total_count).tolist() + leftovers = (rng.permutation(len(batches) - floored_total_batch_count) + floored_total_batch_count).tolist() - batch_assignment = rng.permuted(np.arange(floored_total_count).reshape(-1, self.num_replicas).transpose(), axis=0)[self.rank].tolist() + batch_assignment = rng.permuted(np.arange(floored_total_batch_count).reshape(-1, self.num_replicas).transpose(), axis=0)[self.rank].tolist() floored_batch_count = len(batch_assignment) if self.rank < len(leftovers): batch_assignment.append(leftovers[self.rank]) + elif len(leftovers) > 0: + batch_assignment.append(batch_assignment[self.epoch % floored_batch_count]) if self.required_batch_count_multiple > 1: batch_assignment = batch_assignment[:((floored_batch_count // self.required_batch_count_multiple) * self.required_batch_count_multiple)] From 3c51a8350839159d6be5539690a16f9beb31864a Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 31 Mar 2023 22:19:23 -0500 Subject: [PATCH 177/475] Verbose checkpointing --- basics/base_task.py | 1 + utils/training_utils.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 82ace4c00..a6e36386c 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -212,6 +212,7 @@ def start(cls): save_top_k=hparams['num_ckpt_keep'], permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], + verbose=True ), LearningRateMonitor(logging_interval='step'), DsTQDMProgressBar(), diff --git a/utils/training_utils.py b/utils/training_utils.py index 7c825aa07..0e13202f9 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -12,7 +12,7 @@ from torch.utils.data.distributed import Sampler, DistributedSampler import lightning.pytorch as pl -from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar +from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar, RichProgressBar from lightning.pytorch.strategies import DDPStrategy from lightning.pytorch.trainer.states import RunningStage from lightning.pytorch.utilities.rank_zero import rank_zero_info @@ -193,18 +193,31 @@ def __init__( self.permanent_ckpt_start = permanent_ckpt_start self.permanent_ckpt_interval = permanent_ckpt_interval self.last_permanent_step = None + self._verbose = self.verbose + self.verbose = False + + def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: + super()._save_checkpoint(trainer, filepath) + relative_path = Path(filepath).relative_to(Path('.').resolve()) + if self._verbose: + rank_zero_info(f'Checkpoint {relative_path} saved.') def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): + relative_path = Path(filepath).relative_to(Path('.').resolve()) if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: - search = re.search(r'steps_\d+', Path(filepath).stem) + search = re.search(r'steps_\d+', relative_path.stem) if search: step = int(search.group(0)[6:]) if step >= self.permanent_ckpt_start and \ (self.last_permanent_step is None or \ step >= self.last_permanent_step + self.permanent_ckpt_interval): self.last_permanent_step = step + if self._verbose: + rank_zero_info(f'Checkpoint {relative_path} is permanent now.') return super()._remove_checkpoint(trainer, filepath) + if self._verbose: + rank_zero_info(f'Removed checkpoint {relative_path}.') def get_latest_checkpoint_path(work_dir): From 6851b7f0861fd82a55d9fecf32c95c8e47984df7 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 1 Apr 2023 00:07:36 -0500 Subject: [PATCH 178/475] Fix permanent checkpointing --- utils/training_utils.py | 39 ++++++++++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index 0e13202f9..d55499912 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -4,6 +4,7 @@ import os from pathlib import Path import re +from typing import Optional import warnings import numpy as np @@ -193,17 +194,26 @@ def __init__( self.permanent_ckpt_start = permanent_ckpt_start self.permanent_ckpt_interval = permanent_ckpt_interval self.last_permanent_step = None + self.permanent_steps = set() self._verbose = self.verbose self.verbose = False - - def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: - super()._save_checkpoint(trainer, filepath) - relative_path = Path(filepath).relative_to(Path('.').resolve()) - if self._verbose: - rank_zero_info(f'Checkpoint {relative_path} saved.') - def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): + def state_dict(self): + ret = super().state_dict() + ret['last_permanent_step'] = self.last_permanent_step + ret['permanent_steps'] = list(self.permanent_steps) + return ret + + def load_state_dict(self, state_dict) -> None: + super().load_state_dict(state_dict) + self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) + self.permanent_steps = set(state_dict.get("permanent_steps", self.permanent_steps)) + print(f'permanent_steps: {self.permanent_steps}') + print(f'last_permanent_step: {self.last_permanent_step}') + + def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: relative_path = Path(filepath).relative_to(Path('.').resolve()) + is_permament = False if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: search = re.search(r'steps_\d+', relative_path.stem) if search: @@ -212,8 +222,19 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): (self.last_permanent_step is None or \ step >= self.last_permanent_step + self.permanent_ckpt_interval): self.last_permanent_step = step - if self._verbose: - rank_zero_info(f'Checkpoint {relative_path} is permanent now.') + self.permanent_steps.add(step) + is_permament = True + super()._save_checkpoint(trainer, filepath) + if self._verbose: + rank_zero_info(f'{"Permanent checkpoint" if is_permament else "Checkpoint"} {relative_path} saved.') + + def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): + relative_path = Path(filepath).relative_to(Path('.').resolve()) + if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: + search = re.search(r'steps_\d+', relative_path.stem) + if search: + step = int(search.group(0)[6:]) + if step in self.permanent_steps: return super()._remove_checkpoint(trainer, filepath) if self._verbose: From 9d2fdac001e1f94b8b73893752ce8e07b05c7b3a Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 1 Apr 2023 00:16:56 -0500 Subject: [PATCH 179/475] Remove printing --- utils/training_utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index d55499912..1bc71acfe 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -208,8 +208,6 @@ def load_state_dict(self, state_dict) -> None: super().load_state_dict(state_dict) self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) self.permanent_steps = set(state_dict.get("permanent_steps", self.permanent_steps)) - print(f'permanent_steps: {self.permanent_steps}') - print(f'last_permanent_step: {self.last_permanent_step}') def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: relative_path = Path(filepath).relative_to(Path('.').resolve()) From 441c622584624367f54b6a2220e270ef02d73b39 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 1 Apr 2023 01:10:03 -0500 Subject: [PATCH 180/475] Fix checkpoint top-k config change behavior --- utils/training_utils.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index 1bc71acfe..89db9d52c 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -200,12 +200,29 @@ def __init__( def state_dict(self): ret = super().state_dict() + ret['save_top_k'] = self.save_top_k ret['last_permanent_step'] = self.last_permanent_step ret['permanent_steps'] = list(self.permanent_steps) return ret def load_state_dict(self, state_dict) -> None: - super().load_state_dict(state_dict) + dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) + + if self.dirpath == dirpath_from_ckpt: + self.best_model_score = state_dict["best_model_score"] + if self.save_top_k >= state_dict.get('save_top_k', self.save_top_k): + self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) + self.kth_value = state_dict.get("kth_value", self.kth_value) + self.best_k_models = state_dict.get("best_k_models", self.best_k_models) + self.last_model_path = state_dict.get("last_model_path", self.last_model_path) + else: + warnings.warn( + f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," + " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_model_path` and" + " `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." + ) + + self.best_model_path = state_dict["best_model_path"] self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) self.permanent_steps = set(state_dict.get("permanent_steps", self.permanent_steps)) From d457ace09d56b3c5158df7f6132e38b8df668460 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sat, 1 Apr 2023 09:20:15 -0500 Subject: [PATCH 181/475] Various fix for checkpointing --- utils/training_utils.py | 48 ++++++++++++++++++++++++----------------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index 89db9d52c..877b052c7 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -4,7 +4,7 @@ import os from pathlib import Path import re -from typing import Optional +from typing import Optional, Dict import warnings import numpy as np @@ -200,32 +200,40 @@ def __init__( def state_dict(self): ret = super().state_dict() - ret['save_top_k'] = self.save_top_k ret['last_permanent_step'] = self.last_permanent_step ret['permanent_steps'] = list(self.permanent_steps) return ret def load_state_dict(self, state_dict) -> None: - dirpath_from_ckpt = state_dict.get("dirpath", self.dirpath) - - if self.dirpath == dirpath_from_ckpt: - self.best_model_score = state_dict["best_model_score"] - if self.save_top_k >= state_dict.get('save_top_k', self.save_top_k): - self.kth_best_model_path = state_dict.get("kth_best_model_path", self.kth_best_model_path) - self.kth_value = state_dict.get("kth_value", self.kth_value) - self.best_k_models = state_dict.get("best_k_models", self.best_k_models) - self.last_model_path = state_dict.get("last_model_path", self.last_model_path) - else: - warnings.warn( - f"The dirpath has changed from {dirpath_from_ckpt!r} to {self.dirpath!r}," - " therefore `best_model_score`, `kth_best_model_path`, `kth_value`, `last_model_path` and" - " `best_k_models` won't be reloaded. Only `best_model_path` will be reloaded." - ) - - self.best_model_path = state_dict["best_model_path"] + super().load_state_dict(state_dict) self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) + if self.last_permanent_step is not None: + self.last_permanent_step = self.permanent_ckpt_start + self.permanent_ckpt_interval * ((self.last_permanent_step - self.permanent_ckpt_start) // self.permanent_ckpt_interval) self.permanent_steps = set(state_dict.get("permanent_steps", self.permanent_steps)) + def _update_best_and_save( + self, current: torch.Tensor, trainer: "pl.Trainer", monitor_candidates: Dict[str, torch.Tensor] + ) -> None: + k = len(self.best_k_models) + 1 if self.save_top_k == -1 else self.save_top_k + + _op = max if self.mode == "min" else min + while len(self.best_k_models) > k and k > 0: + self.kth_best_model_path = _op(self.best_k_models, key=self.best_k_models.get) # type: ignore[arg-type] + self.kth_value = self.best_k_models[self.kth_best_model_path] + + del_filepath = self.kth_best_model_path + self.best_k_models.pop(del_filepath) + filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer, del_filepath) + if del_filepath is not None and filepath != del_filepath: + rank_zero_info(f"Deleting {Path(del_filepath).relative_to(Path('.').resolve())} as it is not in top {k} checkpoints.") + self._remove_checkpoint(trainer, del_filepath) + + if len(self.best_k_models) == k: + self.kth_best_model_path = _op(self.best_k_models, key=self.best_k_models.get) # type: ignore[arg-type] + self.kth_value = self.best_k_models[self.kth_best_model_path] + + super()._update_best_and_save(current, trainer, monitor_candidates) + def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: relative_path = Path(filepath).relative_to(Path('.').resolve()) is_permament = False @@ -241,7 +249,7 @@ def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: is_permament = True super()._save_checkpoint(trainer, filepath) if self._verbose: - rank_zero_info(f'{"Permanent checkpoint" if is_permament else "Checkpoint"} {relative_path} saved.') + rank_zero_info(f'Checkpoint {relative_path} saved.{" (Permanent)" if is_permament else ""}') def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): relative_path = Path(filepath).relative_to(Path('.').resolve()) From 2db2510b83563a34c85a8c522092f71b3268f140 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 3 Apr 2023 11:54:33 +0800 Subject: [PATCH 182/475] Extract method --- modules/fastspeech/acoustic_encoder.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 69daa2b33..a8ec0c05d 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -36,6 +36,7 @@ def forward(self, txt_tokens, dur_embed): x = super()._forward(x, encoder_padding_mask) return x + class FastSpeech2Acoustic(nn.Module): def __init__(self, vocab_size): super().__init__() @@ -62,17 +63,20 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - + def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, spk_embed_id=None, **kwargs): - B, T = txt_tokens.shape - dur = mel2ph_to_dur(mel2ph, T).float() + dur = mel2ph_to_dur(mel2ph, txt_tokens.shape[1]).float() dur_embed = self.dur_embed(dur[:, :, None]) encoder_out = self.encoder(txt_tokens, dur_embed) - + encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) condition = torch.gather(encoder_out, 1, mel2ph_) + return self.forward_variance_embedding( + condition, f0=f0, key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, **kwargs + ) + def forward_variance_embedding(self, condition, f0, key_shift=None, speed=None, spk_embed_id=None, **kwargs): if self.f0_embed_type == 'discrete': pitch = f0_to_coarse(f0) pitch_embed = self.pitch_embed(pitch) From 76ef3c6a8ddce524e812012034c12bd33f69cef3 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Mon, 3 Apr 2023 01:19:06 -0500 Subject: [PATCH 183/475] Permanent checkpointing --- utils/training_utils.py | 50 +++++++++++++++-------------------------- 1 file changed, 18 insertions(+), 32 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index 877b052c7..fc787a956 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -191,31 +191,27 @@ def __init__( **kwargs ): super().__init__(*args, **kwargs) - self.permanent_ckpt_start = permanent_ckpt_start - self.permanent_ckpt_interval = permanent_ckpt_interval - self.last_permanent_step = None - self.permanent_steps = set() + self.permanent_ckpt_start = permanent_ckpt_start or 0 + self.permanent_ckpt_interval = permanent_ckpt_interval or 0 + self.enable_permanent_ckpt = self.permanent_ckpt_start > 0 and self.permanent_ckpt_interval > 9 + self._verbose = self.verbose self.verbose = False def state_dict(self): ret = super().state_dict() - ret['last_permanent_step'] = self.last_permanent_step - ret['permanent_steps'] = list(self.permanent_steps) + ret.pop('dirpath') return ret def load_state_dict(self, state_dict) -> None: super().load_state_dict(state_dict) - self.last_permanent_step = state_dict.get("last_permanent_step", self.last_permanent_step) - if self.last_permanent_step is not None: - self.last_permanent_step = self.permanent_ckpt_start + self.permanent_ckpt_interval * ((self.last_permanent_step - self.permanent_ckpt_start) // self.permanent_ckpt_interval) - self.permanent_steps = set(state_dict.get("permanent_steps", self.permanent_steps)) - + def _update_best_and_save( self, current: torch.Tensor, trainer: "pl.Trainer", monitor_candidates: Dict[str, torch.Tensor] ) -> None: k = len(self.best_k_models) + 1 if self.save_top_k == -1 else self.save_top_k + del_filepath = None _op = max if self.mode == "min" else min while len(self.best_k_models) > k and k > 0: self.kth_best_model_path = _op(self.best_k_models, key=self.best_k_models.get) # type: ignore[arg-type] @@ -225,40 +221,30 @@ def _update_best_and_save( self.best_k_models.pop(del_filepath) filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer, del_filepath) if del_filepath is not None and filepath != del_filepath: - rank_zero_info(f"Deleting {Path(del_filepath).relative_to(Path('.').resolve())} as it is not in top {k} checkpoints.") self._remove_checkpoint(trainer, del_filepath) - if len(self.best_k_models) == k: + if len(self.best_k_models) == k and k > 0: self.kth_best_model_path = _op(self.best_k_models, key=self.best_k_models.get) # type: ignore[arg-type] self.kth_value = self.best_k_models[self.kth_best_model_path] super()._update_best_and_save(current, trainer, monitor_candidates) def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: - relative_path = Path(filepath).relative_to(Path('.').resolve()) - is_permament = False - if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: - search = re.search(r'steps_\d+', relative_path.stem) - if search: - step = int(search.group(0)[6:]) - if step >= self.permanent_ckpt_start and \ - (self.last_permanent_step is None or \ - step >= self.last_permanent_step + self.permanent_ckpt_interval): - self.last_permanent_step = step - self.permanent_steps.add(step) - is_permament = True super()._save_checkpoint(trainer, filepath) if self._verbose: - rank_zero_info(f'Checkpoint {relative_path} saved.{" (Permanent)" if is_permament else ""}') + relative_path = Path(filepath).relative_to(Path('.').resolve()) + rank_zero_info(f'Checkpoint {relative_path} saved.') def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): relative_path = Path(filepath).relative_to(Path('.').resolve()) - if (self.permanent_ckpt_start or 0) > 0 and (self.permanent_ckpt_interval or 0) > 0: - search = re.search(r'steps_\d+', relative_path.stem) - if search: - step = int(search.group(0)[6:]) - if step in self.permanent_steps: - return + search = re.search(r'steps_\d+', relative_path.stem) + if search: + step = int(search.group(0)[6:]) + if self.enable_permanent_ckpt and \ + step >= self.permanent_ckpt_start and \ + (step - self.permanent_ckpt_start) % self.permanent_ckpt_interval == 0: + rank_zero_info(f'Checkpoint {relative_path} is now permanent.') + return super()._remove_checkpoint(trainer, filepath) if self._verbose: rank_zero_info(f'Removed checkpoint {relative_path}.') From 12aab1596797a65f84238906e889d6eb8bb38d89 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 3 Apr 2023 21:19:20 +0800 Subject: [PATCH 184/475] Simplify nn modules --- deployment/export/export_acoustic.py | 111 +------ modules/commons/common_layers.py | 406 +++---------------------- modules/diff/wavenet.py | 95 +++--- modules/fastspeech/acoustic_encoder.py | 5 +- modules/fastspeech/tts_modules.py | 6 +- 5 files changed, 80 insertions(+), 543 deletions(-) diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index 1b1badd84..f955b09dc 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -22,12 +22,9 @@ import torch.nn.functional as F from torch.nn import Linear, Embedding - from basics.base_model import CategorizedModule -from modules.commons.common_layers import Mish +from modules.diff.diffusion import beta_schedule, DIFF_DENOISERS from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder -from modules.diff.diffusion import beta_schedule -from modules.diff.wavenet import AttrDict from utils import load_ckpt from utils.hparams import hparams, set_hparams from utils.phoneme_utils import build_phoneme_list @@ -147,106 +144,6 @@ def extract(a, t): return a[t].reshape((1, 1, 1, 1)) -class SinusoidalPosEmb(nn.Module): - def __init__(self, dim): - super().__init__() - self.dim = dim - half_dim = dim // 2 - emb = math.log(10000) / (half_dim - 1) - self.register_buffer('emb', torch.exp(torch.arange(half_dim) * torch.tensor(-emb)).unsqueeze(0)) - - def forward(self, x): - emb = self.emb * x - emb = torch.cat((emb.sin(), emb.cos()), dim=-1) - return emb - - -class ResidualBlock(nn.Module): - def __init__(self, encoder_hidden, residual_channels, dilation): - super().__init__() - self.residual_channels = residual_channels - self.dilated_conv = nn.Conv1d( - residual_channels, - 2 * residual_channels, - 3, - padding=dilation, - dilation=dilation) - self.diffusion_projection = Linear(residual_channels, residual_channels) - self.conditioner_projection = nn.Conv1d(encoder_hidden, 2 * residual_channels, 1) - self.output_projection = nn.Conv1d(residual_channels, 2 * residual_channels, 1) - - def forward(self, x, conditioner, diffusion_step): - diffusion_step = self.diffusion_projection(diffusion_step).unsqueeze(-1) - conditioner = self.conditioner_projection(conditioner) - y = x + diffusion_step - - y = self.dilated_conv(y) + conditioner - - # Using torch.split instead of torch.chunk to avoid using onnx::Slice - gate, filter = torch.split(y, [self.residual_channels, self.residual_channels], dim=1) - - y = torch.sigmoid(gate) * torch.tanh(filter) - y = self.output_projection(y) - - # Using torch.split instead of torch.chunk to avoid using onnx::Slice - residual, skip = torch.split(y, [self.residual_channels, self.residual_channels], dim=1) - - return (x + residual) / math.sqrt(2.0), skip - - -class DiffNet(nn.Module): - def __init__(self, in_dims=80): - super().__init__() - self.params = params = AttrDict( - # Model params - encoder_hidden=hparams['hidden_size'], - residual_layers=hparams['residual_layers'], - residual_channels=hparams['residual_channels'], - dilation_cycle_length=hparams['dilation_cycle_length'], - ) - self.input_projection = nn.Conv1d(in_dims, params.residual_channels, 1) - self.diffusion_embedding = SinusoidalPosEmb(params.residual_channels) - dim = params.residual_channels - self.mlp = nn.Sequential( - nn.Linear(dim, dim * 4), - Mish(), - nn.Linear(dim * 4, dim) - ) - self.residual_layers = nn.ModuleList([ - ResidualBlock(params.encoder_hidden, params.residual_channels, 2 ** (i % params.dilation_cycle_length)) - for i in range(params.residual_layers) - ]) - self.skip_projection = nn.Conv1d(params.residual_channels, params.residual_channels, 1) - self.output_projection = nn.Conv1d(params.residual_channels, in_dims, 1) - nn.init.zeros_(self.output_projection.weight) - - def forward(self, spec, diffusion_step, cond): - """ - :param spec: [B, 1, M, T] - :param diffusion_step: [B, 1] - :param cond: [B, M, T] - :return: - """ - x = spec.squeeze(1) - x = self.input_projection(x) # [B, residual_channel, T] - - x = F.relu(x) - diffusion_step = diffusion_step.float() - diffusion_step = self.diffusion_embedding(diffusion_step) - diffusion_step = self.mlp(diffusion_step) - - skip = [] - for layer in self.residual_layers: - x, skip_connection = layer(x, cond, diffusion_step) - skip.append(skip_connection) - - x = torch.sum(torch.stack(skip), dim=0) / math.sqrt(len(self.residual_layers)) - x = self.skip_projection(x) - x = F.relu(x) - x = self.output_projection(x) # [B, mel_bins, T] - return x.unsqueeze(1) - - class NaiveNoisePredictor(nn.Module): def __init__(self): super().__init__() @@ -335,12 +232,13 @@ def forward(self, x): class GaussianDiffusion(CategorizedModule): - def __init__(self, out_dims, timesteps=1000, k_step=1000, spec_min=None, spec_max=None): + def __init__(self, out_dims, timesteps=1000, k_step=1000, + denoiser_type=None,spec_min=None, spec_max=None): super().__init__() self.mel_bins = out_dims self.k_step = k_step - self.denoise_fn = DiffNet(out_dims) + self.denoise_fn = DIFF_DENOISERS[denoiser_type](hparams) self.naive_noise_predictor = NaiveNoisePredictor() self.plms_noise_predictor = PLMSNoisePredictor() self.mel_extractor = MelExtractor() @@ -483,6 +381,7 @@ def build_diff_model(device, ckpt_steps=None): out_dims=hparams['audio_num_mel_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], spec_min=hparams['spec_min'], spec_max=hparams['spec_max'], ) diff --git a/modules/commons/common_layers.py b/modules/commons/common_layers.py index cd9eb7fad..56a1e0c21 100644 --- a/modules/commons/common_layers.py +++ b/modules/commons/common_layers.py @@ -1,36 +1,26 @@ import math + import torch -from torch import nn -from torch.nn import Parameter -import torch.onnx.operators import torch.nn.functional as F -import utils - +import torch.onnx.operators +from torch import nn +from torch.nn import LayerNorm, MultiheadAttention, ReLU, GELU, SiLU -def Embedding(num_embeddings, embedding_dim, padding_idx=None): - m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx) - nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5) - if padding_idx is not None: - nn.init.constant_(m.weight[padding_idx], 0) - return m +import utils -def LayerNorm(normalized_shape, eps=1e-5, elementwise_affine=True, export=False): - if not export and torch.cuda.is_available(): - try: - from apex.normalization import FusedLayerNorm - return FusedLayerNorm(normalized_shape, eps, elementwise_affine) - except ImportError: - pass - return torch.nn.LayerNorm(normalized_shape, eps, elementwise_affine) +class NormalInitEmbedding(torch.nn.Embedding): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + nn.init.normal_(self.weight, mean=0, std=self.embedding_dim ** -0.5) -def Linear(in_features, out_features, bias=True): - m = nn.Linear(in_features, out_features, bias) - nn.init.xavier_uniform_(m.weight) - if bias: - nn.init.constant_(m.bias, 0.) - return m +class XavierUniformInitLinear(torch.nn.Linear): + def __init__(self, *args, bias=True, **kwargs): + super().__init__(*args, **kwargs) + nn.init.xavier_uniform_(self.weight) + if bias: + nn.init.constant_(self.bias, 0.) class SinusoidalPositionalEmbedding(nn.Module): @@ -69,9 +59,9 @@ def get_embedding(num_embeddings, embedding_dim, padding_idx=None): emb[padding_idx, :] = 0 return emb - def forward(self, input, incremental_state=None, timestep=None, positions=None, **kwargs): + def forward(self, x, incremental_state=None, timestep=None, positions=None): """Input is expected to be of size [bsz x seqlen].""" - bsz, seq_len = input.shape[:2] + bsz, seq_len = x.shape[:2] max_pos = self.padding_idx + 1 + seq_len if self.weights is None or max_pos > self.weights.size(0): # recompute/expand embeddings if needed @@ -87,10 +77,11 @@ def forward(self, input, incremental_state=None, timestep=None, positions=None, pos = timestep.view(-1)[0] + 1 if timestep is not None else seq_len return self.weights[self.padding_idx + pos, :].expand(bsz, 1, -1) - positions = utils.make_positions(input, self.padding_idx) if positions is None else positions + positions = utils.make_positions(x, self.padding_idx) if positions is None else positions return self.weights.index_select(0, positions.view(-1)).view(bsz, seq_len, -1).detach() - def max_positions(self): + @staticmethod + def max_positions(): """Maximum number of supported positions.""" return int(1e5) # an arbitrary large number @@ -107,332 +98,9 @@ def __init__(self, in_channels, out_channels, kernel_size, padding=0): self.kernel_size, in_channels, out_channels)) self.bias = torch.nn.Parameter(torch.Tensor(out_channels)) - def forward(self, input): - return torch.conv_tbc(input.contiguous(), self.weight, self.bias, self.padding) - - -class MultiheadAttention(nn.Module): - def __init__(self, embed_dim, num_heads, kdim=None, vdim=None, dropout=0., bias=True, - add_bias_kv=False, add_zero_attn=False, self_attention=False, - encoder_decoder_attention=False): - super().__init__() - self.embed_dim = embed_dim - self.kdim = kdim if kdim is not None else embed_dim - self.vdim = vdim if vdim is not None else embed_dim - self.qkv_same_dim = self.kdim == embed_dim and self.vdim == embed_dim - - self.num_heads = num_heads - self.dropout = dropout - self.head_dim = embed_dim // num_heads - assert self.head_dim * num_heads == self.embed_dim, "embed_dim must be divisible by num_heads" - self.scaling = self.head_dim ** -0.5 - - self.self_attention = self_attention - self.encoder_decoder_attention = encoder_decoder_attention - - assert not self.self_attention or self.qkv_same_dim, 'Self-attention requires query, key and ' \ - 'value to be of the same size' - - if self.qkv_same_dim: - self.in_proj_weight = Parameter(torch.Tensor(3 * embed_dim, embed_dim)) - else: - self.k_proj_weight = Parameter(torch.Tensor(embed_dim, self.kdim)) - self.v_proj_weight = Parameter(torch.Tensor(embed_dim, self.vdim)) - self.q_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim)) - - if bias: - self.in_proj_bias = Parameter(torch.Tensor(3 * embed_dim)) - else: - self.register_parameter('in_proj_bias', None) - - self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) - - if add_bias_kv: - self.bias_k = Parameter(torch.Tensor(1, 1, embed_dim)) - self.bias_v = Parameter(torch.Tensor(1, 1, embed_dim)) - else: - self.bias_k = self.bias_v = None - - self.add_zero_attn = add_zero_attn - - self.reset_parameters() - - self.enable_torch_version = False - if hasattr(F, "multi_head_attention_forward"): - self.enable_torch_version = True - else: - self.enable_torch_version = False - self.last_attn_probs = None - - def reset_parameters(self): - if self.qkv_same_dim: - nn.init.xavier_uniform_(self.in_proj_weight) - else: - nn.init.xavier_uniform_(self.k_proj_weight) - nn.init.xavier_uniform_(self.v_proj_weight) - nn.init.xavier_uniform_(self.q_proj_weight) - - nn.init.xavier_uniform_(self.out_proj.weight) - if self.in_proj_bias is not None: - nn.init.constant_(self.in_proj_bias, 0.) - nn.init.constant_(self.out_proj.bias, 0.) - if self.bias_k is not None: - nn.init.xavier_normal_(self.bias_k) - if self.bias_v is not None: - nn.init.xavier_normal_(self.bias_v) - - def forward( - self, - query, key, value, - key_padding_mask=None, - incremental_state=None, - need_weights=True, - static_kv=False, - attn_mask=None, - before_softmax=False, - need_head_weights=False, - enc_dec_attn_constraint_mask=None, - reset_attn_weight=None - ): - """Input shape: Time x Batch x Channel - - Args: - key_padding_mask (ByteTensor, optional): mask to exclude - keys that are pads, of shape `(batch, src_len)`, where - padding elements are indicated by 1s. - need_weights (bool, optional): return the attention weights, - averaged over heads (default: False). - attn_mask (ByteTensor, optional): typically used to - implement causal attention, where the mask prevents the - attention from looking forward in time (default: None). - before_softmax (bool, optional): return the raw attention - weights and values before the attention softmax. - need_head_weights (bool, optional): return the attention - weights for each head. Implies *need_weights*. Default: - return the average attention weights over all heads. - """ - if need_head_weights: - need_weights = True - - tgt_len, bsz, embed_dim = query.size() - assert embed_dim == self.embed_dim - assert list(query.size()) == [tgt_len, bsz, embed_dim] - - if self.enable_torch_version and incremental_state is None and not static_kv and reset_attn_weight is None: - if self.qkv_same_dim: - return F.multi_head_attention_forward(query, key, value, - self.embed_dim, self.num_heads, - self.in_proj_weight, - self.in_proj_bias, self.bias_k, self.bias_v, - self.add_zero_attn, self.dropout, - self.out_proj.weight, self.out_proj.bias, - self.training, key_padding_mask, need_weights, - attn_mask) - else: - return F.multi_head_attention_forward(query, key, value, - self.embed_dim, self.num_heads, - torch.empty([0]), - self.in_proj_bias, self.bias_k, self.bias_v, - self.add_zero_attn, self.dropout, - self.out_proj.weight, self.out_proj.bias, - self.training, key_padding_mask, need_weights, - attn_mask, use_separate_proj_weight=True, - q_proj_weight=self.q_proj_weight, - k_proj_weight=self.k_proj_weight, - v_proj_weight=self.v_proj_weight) - - if incremental_state is not None: - print('Not implemented error.') - exit() - else: - saved_state = None - - if self.self_attention: - # self-attention - q, k, v = self.in_proj_qkv(query) - elif self.encoder_decoder_attention: - # encoder-decoder attention - q = self.in_proj_q(query) - if key is None: - assert value is None - k = v = None - else: - k = self.in_proj_k(key) - v = self.in_proj_v(key) - - else: - q = self.in_proj_q(query) - k = self.in_proj_k(key) - v = self.in_proj_v(value) - q *= self.scaling - - if self.bias_k is not None: - assert self.bias_v is not None - k = torch.cat([k, self.bias_k.repeat(1, bsz, 1)]) - v = torch.cat([v, self.bias_v.repeat(1, bsz, 1)]) - if attn_mask is not None: - attn_mask = torch.cat([attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1) - if key_padding_mask is not None: - key_padding_mask = torch.cat( - [key_padding_mask, key_padding_mask.new_zeros(key_padding_mask.size(0), 1)], dim=1) - - q = q.contiguous().view(tgt_len, bsz * self.num_heads, self.head_dim).transpose(0, 1) - if k is not None: - k = k.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1) - if v is not None: - v = v.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1) - - if saved_state is not None: - print('Not implemented error.') - exit() - - src_len = k.size(1) - - # This is part of a workaround to get around fork/join parallelism - # not supporting Optional types. - if key_padding_mask is not None and key_padding_mask.shape == torch.Size([]): - key_padding_mask = None - - if key_padding_mask is not None: - assert key_padding_mask.size(0) == bsz - assert key_padding_mask.size(1) == src_len - - if self.add_zero_attn: - src_len += 1 - k = torch.cat([k, k.new_zeros((k.size(0), 1) + k.size()[2:])], dim=1) - v = torch.cat([v, v.new_zeros((v.size(0), 1) + v.size()[2:])], dim=1) - if attn_mask is not None: - attn_mask = torch.cat([attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1) - if key_padding_mask is not None: - key_padding_mask = torch.cat( - [key_padding_mask, torch.zeros(key_padding_mask.size(0), 1).type_as(key_padding_mask)], dim=1) - - attn_weights = torch.bmm(q, k.transpose(1, 2)) - attn_weights = self.apply_sparse_mask(attn_weights, tgt_len, src_len, bsz) - - assert list(attn_weights.size()) == [bsz * self.num_heads, tgt_len, src_len] - - if attn_mask is not None: - if len(attn_mask.shape) == 2: - attn_mask = attn_mask.unsqueeze(0) - elif len(attn_mask.shape) == 3: - attn_mask = attn_mask[:, None].repeat([1, self.num_heads, 1, 1]).reshape( - bsz * self.num_heads, tgt_len, src_len) - attn_weights = attn_weights + attn_mask - - if enc_dec_attn_constraint_mask is not None: # bs x head x L_kv - attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) - attn_weights = attn_weights.masked_fill( - enc_dec_attn_constraint_mask.unsqueeze(2).bool(), - -1e9, - ) - attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) - - if key_padding_mask is not None: - # don't attend to padding symbols - attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) - attn_weights = attn_weights.masked_fill( - key_padding_mask.unsqueeze(1).unsqueeze(2), - -1e9, - ) - attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) - - attn_logits = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) - - if before_softmax: - return attn_weights, v - - attn_weights_float = utils.softmax(attn_weights, dim=-1) - attn_weights = attn_weights_float.type_as(attn_weights) - attn_probs = F.dropout(attn_weights_float.type_as(attn_weights), p=self.dropout, training=self.training) - - if reset_attn_weight is not None: - if reset_attn_weight: - self.last_attn_probs = attn_probs.detach() - else: - assert self.last_attn_probs is not None - attn_probs = self.last_attn_probs - attn = torch.bmm(attn_probs, v) - assert list(attn.size()) == [bsz * self.num_heads, tgt_len, self.head_dim] - attn = attn.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim) - attn = self.out_proj(attn) - - if need_weights: - attn_weights = attn_weights_float.view(bsz, self.num_heads, tgt_len, src_len).transpose(1, 0) - if not need_head_weights: - # average attention weights over heads - attn_weights = attn_weights.mean(dim=0) - else: - attn_weights = None - - return attn, (attn_weights, attn_logits) - - def in_proj_qkv(self, query): - return self._in_proj(query).chunk(3, dim=-1) - - def in_proj_q(self, query): - if self.qkv_same_dim: - return self._in_proj(query, end=self.embed_dim) - else: - bias = self.in_proj_bias - if bias is not None: - bias = bias[:self.embed_dim] - return F.linear(query, self.q_proj_weight, bias) - - def in_proj_k(self, key): - if self.qkv_same_dim: - return self._in_proj(key, start=self.embed_dim, end=2 * self.embed_dim) - else: - weight = self.k_proj_weight - bias = self.in_proj_bias - if bias is not None: - bias = bias[self.embed_dim:2 * self.embed_dim] - return F.linear(key, weight, bias) - - def in_proj_v(self, value): - if self.qkv_same_dim: - return self._in_proj(value, start=2 * self.embed_dim) - else: - weight = self.v_proj_weight - bias = self.in_proj_bias - if bias is not None: - bias = bias[2 * self.embed_dim:] - return F.linear(value, weight, bias) - - def _in_proj(self, input, start=0, end=None): - weight = self.in_proj_weight - bias = self.in_proj_bias - weight = weight[start:end, :] - if bias is not None: - bias = bias[start:end] - return F.linear(input, weight, bias) - - - def apply_sparse_mask(self, attn_weights, tgt_len, src_len, bsz): - return attn_weights - - -class Swish(torch.autograd.Function): - @staticmethod - def forward(ctx, i): - result = i * torch.sigmoid(i) - ctx.save_for_backward(i) - return result - - @staticmethod - def backward(ctx, grad_output): - i = ctx.saved_variables[0] - sigmoid_i = torch.sigmoid(i) - return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i))) - - -class CustomSwish(nn.Module): - def forward(self, input_tensor): - return Swish.apply(input_tensor) - -class Mish(nn.Module): def forward(self, x): - return x * torch.tanh(F.softplus(x)) + return torch.conv_tbc(x.contiguous(), self.weight, self.bias, self.padding) + class TransformerFFNLayer(nn.Module): def __init__(self, hidden_size, filter_size, padding="SAME", kernel_size=1, dropout=0., act='gelu'): @@ -447,27 +115,20 @@ def __init__(self, hidden_size, filter_size, padding="SAME", kernel_size=1, drop nn.ConstantPad1d((kernel_size - 1, 0), 0.0), nn.Conv1d(hidden_size, filter_size, kernel_size) ) - self.ffn_2 = Linear(filter_size, hidden_size) - if self.act == 'swish': - self.swish_fn = CustomSwish() + if self.act == 'relu': + self.act_fn = ReLU() + elif self.act == 'gelu': + self.act_fn = GELU() + elif self.act == 'swish': + self.act_fn = SiLU() + self.ffn_2 = XavierUniformInitLinear(filter_size, hidden_size) - def forward(self, x, incremental_state=None): + def forward(self, x): # x: T x B x C - if incremental_state is not None: - assert incremental_state is None, 'Nar-generation does not allow this.' - exit(1) - x = self.ffn_1(x.permute(1, 2, 0)).permute(2, 0, 1) x = x * self.kernel_size ** -0.5 - if incremental_state is not None: - x = x[-1:] - if self.act == 'gelu': - x = F.gelu(x) - if self.act == 'relu': - x = F.relu(x) - if self.act == 'swish': - x = self.swish_fn(x) + x = self.act_fn(x) x = F.dropout(x, self.dropout, training=self.training) x = self.ffn_2(x) return x @@ -503,14 +164,15 @@ def __init__(self, c, num_heads, dropout, attention_dropout=0.1, elif norm == 'bn': self.layer_norm1 = BatchNorm1dTBC(c) self.self_attn = MultiheadAttention( - self.c, num_heads, self_attention=True, dropout=attention_dropout, bias=False, + self.c, num_heads, dropout=attention_dropout, bias=False, ) if norm == 'ln': self.layer_norm2 = LayerNorm(c) elif norm == 'bn': self.layer_norm2 = BatchNorm1dTBC(c) self.ffn = TransformerFFNLayer( - c, 4 * c, kernel_size=kernel_size, dropout=relu_dropout, padding=padding, act=act) + c, 4 * c, kernel_size=kernel_size, dropout=relu_dropout, padding=padding, act=act + ) def forward(self, x, encoder_padding_mask=None, **kwargs): layer_norm_training = kwargs.get('layer_norm_training', None) diff --git a/modules/diff/wavenet.py b/modules/diff/wavenet.py index 93085e40b..ff7a3feb5 100644 --- a/modules/diff/wavenet.py +++ b/modules/diff/wavenet.py @@ -1,32 +1,18 @@ import math +from math import sqrt import torch import torch.nn as nn import torch.nn.functional as F - -from math import sqrt +from torch.nn import Mish from utils.hparams import hparams -from modules.commons.common_layers import Mish -class AttrDict(dict): +class Conv1d(torch.nn.Conv1d): def __init__(self, *args, **kwargs): - super(AttrDict, self).__init__(*args, **kwargs) - self.__dict__ = self - - def __getattr__(self, item): - return self[item] - - def override(self, attrs): - if isinstance(attrs, dict): - self.__dict__.update(**attrs) - elif isinstance(attrs, (list, tuple, set)): - for attr in attrs: - self.override(attr) - elif attrs is not None: - raise NotImplementedError - return self + super().__init__(*args, **kwargs) + nn.init.kaiming_normal_(self.weight) class SinusoidalPosEmb(nn.Module): @@ -44,24 +30,20 @@ def forward(self, x): return emb -def Conv1d(*args, **kwargs): - layer = nn.Conv1d(*args, **kwargs) - nn.init.kaiming_normal_(layer.weight) - return layer - - -@torch.jit.script -def silu(x): - return x * torch.sigmoid(x) - - class ResidualBlock(nn.Module): def __init__(self, encoder_hidden, residual_channels, dilation): super().__init__() - self.dilated_conv = Conv1d(residual_channels, 2 * residual_channels, 3, padding=dilation, dilation=dilation) + self.residual_channels = residual_channels + self.dilated_conv = nn.Conv1d( + residual_channels, + 2 * residual_channels, + kernel_size=3, + padding=dilation, + dilation=dilation + ) self.diffusion_projection = nn.Linear(residual_channels, residual_channels) - self.conditioner_projection = Conv1d(encoder_hidden, 2 * residual_channels, 1) - self.output_projection = Conv1d(residual_channels, 2 * residual_channels, 1) + self.conditioner_projection = nn.Conv1d(encoder_hidden, 2 * residual_channels, 1) + self.output_projection = nn.Conv1d(residual_channels, 2 * residual_channels, 1) def forward(self, x, conditioner, diffusion_step): diffusion_step = self.diffusion_projection(diffusion_step).unsqueeze(-1) @@ -70,67 +52,60 @@ def forward(self, x, conditioner, diffusion_step): y = self.dilated_conv(y) + conditioner - gate, filter = torch.chunk(y, 2, dim=1) # Using torch.split instead of torch.chunk to avoid using onnx::Slice - # gate, filter = torch.split(y, torch.div(y.shape[1], 2), dim=1) - + gate, filter = torch.split(y, [self.residual_channels, self.residual_channels], dim=1) y = torch.sigmoid(gate) * torch.tanh(filter) y = self.output_projection(y) - residual, skip = torch.chunk(y, 2, dim=1) + # Using torch.split instead of torch.chunk to avoid using onnx::Slice - # residual, skip = torch.split(y, torch.div(y.shape[1], 2), dim=1) - - return (x + residual) / sqrt(2.0), skip + residual, skip = torch.split(y, [self.residual_channels, self.residual_channels], dim=1) + return (x + residual) / math.sqrt(2.0), skip class WaveNet(nn.Module): - def __init__(self, in_dims=80): + def __init__(self, in_dims): super().__init__() - self.params = params = AttrDict( - # Model params - encoder_hidden=hparams['hidden_size'], - residual_layers=hparams['residual_layers'], - residual_channels=hparams['residual_channels'], - dilation_cycle_length=hparams['dilation_cycle_length'], - ) - self.input_projection = Conv1d(in_dims, params.residual_channels, 1) - self.diffusion_embedding = SinusoidalPosEmb(params.residual_channels) - dim = params.residual_channels + dim = hparams['residual_channels'] + self.input_projection = Conv1d(in_dims, dim, 1) + self.diffusion_embedding = SinusoidalPosEmb(dim) self.mlp = nn.Sequential( nn.Linear(dim, dim * 4), Mish(), nn.Linear(dim * 4, dim) ) self.residual_layers = nn.ModuleList([ - ResidualBlock(params.encoder_hidden, params.residual_channels, 2 ** (i % params.dilation_cycle_length)) - for i in range(params.residual_layers) + ResidualBlock( + encoder_hidden=hparams['hidden_size'], + residual_channels=dim, + dilation=2 ** (i % hparams['dilation_cycle_length']) + ) + for i in range(hparams['residual_layers']) ]) - self.skip_projection = Conv1d(params.residual_channels, params.residual_channels, 1) - self.output_projection = Conv1d(params.residual_channels, in_dims, 1) + self.skip_projection = Conv1d(dim, dim, 1) + self.output_projection = Conv1d(dim, in_dims, 1) nn.init.zeros_(self.output_projection.weight) def forward(self, spec, diffusion_step, cond): """ - :param spec: [B, 1, M, T] :param diffusion_step: [B, 1] :param cond: [B, M, T] :return: """ - x = spec[:, 0] - x = self.input_projection(x) # x [B, residual_channel, T] + x = spec.squeeze(1) + x = self.input_projection(x) # [B, residual_channel, T] x = F.relu(x) diffusion_step = self.diffusion_embedding(diffusion_step) diffusion_step = self.mlp(diffusion_step) skip = [] - for layer_id, layer in enumerate(self.residual_layers): + for layer in self.residual_layers: x, skip_connection = layer(x, cond, diffusion_step) skip.append(skip_connection) x = torch.sum(torch.stack(skip), dim=0) / sqrt(len(self.residual_layers)) x = self.skip_projection(x) x = F.relu(x) - x = self.output_projection(x) # [B, 80, T] + x = self.output_projection(x) # [B, mel_bins, T] return x[:, None, :, :] diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index a8ec0c05d..a60079056 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -2,7 +2,10 @@ import torch.nn as nn from torch.nn import functional as F -from modules.commons.common_layers import Embedding, Linear +from modules.commons.common_layers import ( + NormalInitEmbedding as Embedding, + XavierUniformInitLinear as Linear, +) from modules.fastspeech.tts_modules import FastSpeech2Encoder, mel2ph_to_dur from utils.hparams import hparams from utils.pitch_utils import f0_to_coarse diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index b842953f4..767afb63f 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -15,16 +15,14 @@ class TransformerEncoderLayer(nn.Module): def __init__(self, hidden_size, dropout, kernel_size=None, num_heads=2, norm='ln'): super().__init__() - self.hidden_size = hidden_size - self.dropout = dropout - self.num_heads = num_heads self.op = EncSALayer( hidden_size, num_heads, dropout=dropout, attention_dropout=0.0, relu_dropout=dropout, kernel_size=kernel_size if kernel_size is not None else hparams['enc_ffn_kernel_size'], padding=hparams['ffn_padding'], - norm=norm, act=hparams['ffn_act']) + norm=norm, act=hparams['ffn_act'] + ) def forward(self, x, **kwargs): return self.op(x, **kwargs) From b76cfcc7d8d746c08e90137e9236879c08d7b82d Mon Sep 17 00:00:00 2001 From: hrukalive Date: Tue, 4 Apr 2023 00:15:57 -0500 Subject: [PATCH 185/475] Fixes for lightning 2.0 --- basics/base_task.py | 23 +++++++++++++----- training/acoustic_task.py | 3 ++- utils/indexed_datasets.py | 2 ++ utils/training_utils.py | 51 +++++++++++++++++++++++++-------------- 4 files changed, 54 insertions(+), 25 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index a6e36386c..f2cde27fd 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -22,7 +22,7 @@ from utils.hparams import hparams from utils.training_utils import ( DsModelCheckpoint, DsTQDMProgressBar, - get_latest_checkpoint_path, get_stategy + get_latest_checkpoint_path, get_strategy ) from utils.phoneme_utils import locate_dictionary @@ -73,6 +73,8 @@ def __init__(self, *args, **kwargs): self.training_sampler = None self.model = None + self.skip_immediate_validation = False + self.skip_immediate_ckpt_save = False self.valid_metrics = { 'total_loss': MeanMetric() @@ -106,7 +108,8 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): self.log('lr', self.lr_schedulers().get_lr()[0], prog_bar=True, logger=False, on_step=True, on_epoch=False) # logs to tensorboard tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} - self.log_dict(tb_log, logger=True, on_step=True, on_epoch=False) + if self.global_step % self.trainer.log_every_n_steps == 0: + self.logger.log_metrics(tb_log, step=self.global_step) return total_loss @@ -137,16 +140,24 @@ def validation_step(self, sample, batch_idx): :param sample: :param batch_idx: """ - with torch.autocast('cuda' if next(self.model.parameters()).is_cuda else 'cpu', dtype=torch.float32): + if self.skip_immediate_validation: + rank_zero_debug(f"Skip validation {batch_idx}") + return {} + with torch.autocast(self.device.type, enabled=False): outputs, weight = self._validation_step(sample, batch_idx) for k, v in outputs.items(): if isinstance(self.valid_metrics[k], MeanMetric): self.valid_metrics[k].update(v, weight=weight) + return outputs def on_validation_epoch_end(self): + if self.skip_immediate_validation: + self.skip_immediate_validation = False + self.skip_immediate_ckpt_save = True + return metric_vals = {k: v.compute() for k, v in self.valid_metrics.items()} self.log('val_loss', metric_vals['total_loss'], on_epoch=True, prog_bar=True, logger=False) - self.log_dict({f'val/{k}': v for k, v in metric_vals.items()}, on_epoch=True, logger=True) + self.logger.log_metrics({f'val/{k}': v for k, v in metric_vals.items()}, step=self.global_step) for metric in self.valid_metrics.values(): metric.reset() @@ -192,7 +203,7 @@ def start(cls): accelerator=hparams['pl_trainer_accelerator'], devices=hparams['pl_trainer_devices'], num_nodes=hparams['pl_trainer_num_nodes'], - strategy=get_stategy( + strategy=get_strategy( accelerator=hparams['pl_trainer_accelerator'], devices=hparams['pl_trainer_devices'], num_nodes=hparams['pl_trainer_num_nodes'], @@ -208,7 +219,7 @@ def start(cls): monitor='step', mode='max', save_last=False, - every_n_train_steps=hparams['val_check_interval'], + # every_n_train_steps=hparams['val_check_interval'], save_top_k=hparams['num_ckpt_keep'], permanent_ckpt_start=hparams['permanent_ckpt_start'], permanent_ckpt_interval=hparams['permanent_ckpt_interval'], diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 02c86456d..c73114c7d 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -121,6 +121,7 @@ def train_dataloader(self): num_replicas=(self.trainer.distributed_sampler_kwargs or {}).get('num_replicas', 1), rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), sort_by_similar_size=hparams['sort_by_len'], + required_batch_count_multiple=hparams['accumulate_grad_batches'], shuffle_sample=True, shuffle_batch=False, seed=hparams['seed'] @@ -291,7 +292,7 @@ def after_infer(self, predictions): if self.phone_encoder is not None and 'tokens' in prediction: str_phs = self.phone_encoder.decode(prediction['tokens'], strip_padding=True) gen_dir = os.path.join(hparams['work_dir'], - f'generated_{self.trainer.global_step}_{hparams["gen_dir_name"]}') + f'generated_{self.global_step}_{hparams["gen_dir_name"]}') wav_pred = self.vocoder.spec2wav(mel_pred, f0=f0_pred) os.makedirs(gen_dir, exist_ok=True) os.makedirs(f'{gen_dir}/wavs', exist_ok=True) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 64ba1bdf4..a3965c0b4 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -39,6 +39,8 @@ def __getitem__(self, i): return item def __len__(self): + if self.dset is None: + self.dset = h5py.File(self.path, 'r') return len(self.dset) class IndexedDatasetBuilder: diff --git a/utils/training_utils.py b/utils/training_utils.py index fc787a956..efdfc879a 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -91,8 +91,11 @@ def __init__(self, dataset, max_tokens, max_sentences, sub_indices=None, self.drop_last = drop_last self.epoch = 0 self.batches = None - - def __iter__(self): + self.formed = None + + def __form_batches(self): + if self.formed == self.epoch + self.seed: + return rng = np.random.default_rng(self.seed + self.epoch) if self.shuffle_sample: if self.sub_indices is not None: @@ -125,12 +128,16 @@ def __iter__(self): batch_assignment = rng.permuted(np.arange(floored_total_batch_count).reshape(-1, self.num_replicas).transpose(), axis=0)[self.rank].tolist() floored_batch_count = len(batch_assignment) + ceiled_batch_count = floored_batch_count + (1 if len(leftovers) > 0 else 0) if self.rank < len(leftovers): batch_assignment.append(leftovers[self.rank]) elif len(leftovers) > 0: batch_assignment.append(batch_assignment[self.epoch % floored_batch_count]) - if self.required_batch_count_multiple > 1: - batch_assignment = batch_assignment[:((floored_batch_count // self.required_batch_count_multiple) * self.required_batch_count_multiple)] + if self.required_batch_count_multiple > 1 and ceiled_batch_count % self.required_batch_count_multiple != 0: + # batch_assignment = batch_assignment[:((floored_batch_count // self.required_batch_count_multiple) * self.required_batch_count_multiple)] + ceiled_batch_count = math.ceil(ceiled_batch_count / self.required_batch_count_multiple) * self.required_batch_count_multiple + for i in range(ceiled_batch_count - len(batch_assignment)): + batch_assignment.append(batch_assignment[(i + self.epoch * self.required_batch_count_multiple) % floored_batch_count]) self.batches = [deepcopy(batches[i]) for i in batch_assignment] @@ -140,13 +147,15 @@ def __iter__(self): del indices del batches del batch_assignment - - for batch in self.batches: - yield batch + + def __iter__(self): + self.__form_batches() + return iter(self.batches) def __len__(self): + self.__form_batches() if self.batches is None: - raise RuntimeError("Batches are not initialized. Call __iter__ first.") + raise RuntimeError("Batches are not initialized. Call __form_batches first.") return len(self.batches) def set_epoch(self, epoch): @@ -161,8 +170,9 @@ def __init__(self, dataset, max_tokens, max_sentences, rank=None, batch_by_size= self.rank = rank self.batch_by_size = batch_by_size self.batches = None - - def __iter__(self): + self.batch_size = max_sentences + self.drop_last = False + if self.rank == 0: indices = list(range(len(self.dataset))) if self.batch_by_size: @@ -171,13 +181,11 @@ def __iter__(self): self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] else: self.batches = [[0]] - - for batch in self.batches: - yield batch + + def __iter__(self): + return iter(self.batches) def __len__(self): - if self.batches is None: - raise RuntimeError("Batches are not initialized. Call __iter__ first.") return len(self.batches) #==========PL related========== @@ -206,6 +214,13 @@ def state_dict(self): def load_state_dict(self, state_dict) -> None: super().load_state_dict(state_dict) + def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None: + if trainer.lightning_module.skip_immediate_ckpt_save: + trainer.lightning_module.skip_immediate_ckpt_save = False + return + self.last_val_step = trainer.global_step + super().on_validation_end(trainer, pl_module) + def _update_best_and_save( self, current: torch.Tensor, trainer: "pl.Trainer", monitor_candidates: Dict[str, torch.Tensor] ) -> None: @@ -288,7 +303,7 @@ def get_metrics(self, trainer, model): return items -def get_stategy(accelerator, devices, num_nodes, strategy, backend): +def get_strategy(accelerator, devices, num_nodes, strategy, backend): if accelerator != 'auto' and accelerator != 'gpu': return strategy @@ -345,9 +360,9 @@ def _choose_gpu_accelerator_backend(): def get_ddp_strategy(_backend): if _backend == 'gloo': - return DDPStrategy(process_group_backend='gloo') + return DDPStrategy(process_group_backend='gloo', find_unused_parameters=False) elif _backend == 'nccl' or _backend == 'nccl_no_p2p': - return DDPStrategy(process_group_backend='nccl') + return DDPStrategy(process_group_backend='nccl', find_unused_parameters=False) else: raise ValueError(f'backend {_backend} is not valid.') From 7687c56c5568f46c16fde6a5db6fa6c145c85e52 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 4 Apr 2023 13:40:40 +0800 Subject: [PATCH 186/475] Rename package --- deployment/export/export_acoustic.py | 2 +- modules/{diff/diffusion.py => diffusion/ddpm.py} | 2 +- modules/{diff => diffusion}/wavenet.py | 0 modules/toplevel/acoustic_model.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename modules/{diff/diffusion.py => diffusion/ddpm.py} (99%) rename modules/{diff => diffusion}/wavenet.py (100%) diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index f955b09dc..1b9170baa 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -23,7 +23,7 @@ from torch.nn import Linear, Embedding from basics.base_model import CategorizedModule -from modules.diff.diffusion import beta_schedule, DIFF_DENOISERS +from modules.diffusion.ddpm import beta_schedule, DIFF_DENOISERS from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder from utils import load_ckpt from utils.hparams import hparams, set_hparams diff --git a/modules/diff/diffusion.py b/modules/diffusion/ddpm.py similarity index 99% rename from modules/diff/diffusion.py rename to modules/diffusion/ddpm.py index 849b5aad8..e9f434964 100644 --- a/modules/diff/diffusion.py +++ b/modules/diffusion/ddpm.py @@ -8,7 +8,7 @@ from torch import nn from tqdm import tqdm -from modules.diff.wavenet import WaveNet +from modules.diffusion.wavenet import WaveNet from utils.hparams import hparams DIFF_DENOISERS = { diff --git a/modules/diff/wavenet.py b/modules/diffusion/wavenet.py similarity index 100% rename from modules/diff/wavenet.py rename to modules/diffusion/wavenet.py diff --git a/modules/toplevel/acoustic_model.py b/modules/toplevel/acoustic_model.py index 4a36743f7..10587e6a4 100644 --- a/modules/toplevel/acoustic_model.py +++ b/modules/toplevel/acoustic_model.py @@ -1,5 +1,5 @@ from basics.base_model import CategorizedModule -from modules.diff.diffusion import GaussianDiffusion +from modules.diffusion.ddpm import GaussianDiffusion from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from utils.hparams import hparams From 273b5ce910f4dd5fc7a54aeba78b88fb689b8c54 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Apr 2023 00:30:11 +0800 Subject: [PATCH 187/475] Refactor modules and onnx --- basics/{base_model.py => base_module.py} | 0 basics/base_onnx_export.py | 34 +++++++ basics/base_task.py | 2 +- deployment/.gitignore | 1 + deployment/export/export_acoustic.py | 22 ++--- deployment/modules/diffusion.py | 7 ++ deployment/modules/fastspeech2.py | 90 +++++++++++++++++++ deployment/modules/toplevel.py | 38 ++++++++ deployment/requirements.txt | 15 ---- inference/ds_acoustic.py | 2 +- .../commons/espnet_positional_embedding.py | 55 +++++------- modules/fastspeech/acoustic_encoder.py | 2 +- modules/fastspeech/tts_modules.py | 5 +- .../acoustic_model.py => toplevel.py} | 8 +- training/acoustic_task.py | 4 +- utils/__init__.py | 4 +- 16 files changed, 215 insertions(+), 74 deletions(-) rename basics/{base_model.py => base_module.py} (100%) create mode 100644 basics/base_onnx_export.py create mode 100644 deployment/modules/diffusion.py create mode 100644 deployment/modules/fastspeech2.py create mode 100644 deployment/modules/toplevel.py delete mode 100644 deployment/requirements.txt rename modules/{toplevel/acoustic_model.py => toplevel.py} (89%) diff --git a/basics/base_model.py b/basics/base_module.py similarity index 100% rename from basics/base_model.py rename to basics/base_module.py diff --git a/basics/base_onnx_export.py b/basics/base_onnx_export.py new file mode 100644 index 000000000..3128a6a0e --- /dev/null +++ b/basics/base_onnx_export.py @@ -0,0 +1,34 @@ +import json +import os +import pathlib + +import torch +import torch.nn as nn + +from utils.hparams import hparams + + +class BaseOnnxExport: + def __init__(self, device=None, cache_dir=None, **kwargs): + self.device = device if device is not None else torch.device('cuda' if torch.cuda.is_available() else 'cpu') + self.cache_dir: pathlib.Path = cache_dir if cache_dir is not None \ + else pathlib.Path(__file__).parent.parent / 'deployment' / 'cache' + if not self.cache_dir.exists(): + self.cache_dir.mkdir(parents=True, exist_ok=True) + + # noinspection PyMethodMayBeStatic + def build_spk_map(self) -> dict: + if hparams['use_spk_id']: + with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: + spk_map = json.load(f) + assert isinstance(spk_map, dict) and len(spk_map) > 0, 'Invalid or empty speaker map!' + assert len(spk_map) == len(set(spk_map.values())), 'Duplicate speaker id in speaker map!' + return spk_map + else: + return {} + + def build_model(self) -> nn.Module: + raise NotImplementedError() + + def export_model(self, path: pathlib.Path): + raise NotImplementedError() diff --git a/basics/base_task.py b/basics/base_task.py index a6e36386c..e6e6103d8 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -18,7 +18,7 @@ from lightning.pytorch.utilities import grad_norm from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only -from basics.base_model import CategorizedModule +from basics.base_module import CategorizedModule from utils.hparams import hparams from utils.training_utils import ( DsModelCheckpoint, DsTQDMProgressBar, diff --git a/deployment/.gitignore b/deployment/.gitignore index 2d648685e..54f44899c 100644 --- a/deployment/.gitignore +++ b/deployment/.gitignore @@ -3,3 +3,4 @@ *.npy *.wav temp/ +cache/ diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py index 1b9170baa..dd9b68982 100644 --- a/deployment/export/export_acoustic.py +++ b/deployment/export/export_acoustic.py @@ -22,7 +22,7 @@ import torch.nn.functional as F from torch.nn import Linear, Embedding -from basics.base_model import CategorizedModule +from basics.base_module import CategorizedModule from modules.diffusion.ddpm import beta_schedule, DIFF_DENOISERS from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder from utils import load_ckpt @@ -47,7 +47,7 @@ def f0_to_coarse(f0): a = (f0_bin - 2) / (f0_mel_max - f0_mel_min) b = f0_mel_min * a - 1. f0_mel = torch.where(f0_mel > 0, f0_mel * a - b, f0_mel) - torch.clip_(f0_mel, min=1., max=float(f0_bin - 1)) + torch.clamp_(f0_mel, min=1., max=float(f0_bin - 1)) f0_coarse = torch.round(f0_mel).long() return f0_coarse @@ -97,9 +97,9 @@ def category(self): return 'acoustic' def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): - durations *= tokens > 0 + durations = durations * (tokens > 0) mel2ph = self.lr.forward(durations) - f0 *= mel2ph > 0 + f0 = f0 * (mel2ph > 0) mel2ph = mel2ph[..., None].repeat((1, 1, hparams['hidden_size'])) dur_embed = self.dur_embed(durations.float()[:, :, None]) encoded = self.encoder(tokens, dur_embed) @@ -233,7 +233,7 @@ def forward(self, x): class GaussianDiffusion(CategorizedModule): def __init__(self, out_dims, timesteps=1000, k_step=1000, - denoiser_type=None,spec_min=None, spec_max=None): + denoiser_type=None, spec_min=None, spec_max=None): super().__init__() self.mel_bins = out_dims self.k_step = k_step @@ -303,7 +303,7 @@ def forward(self, condition, speedup): device = condition.device n_frames = condition.shape[2] - step_range = torch.arange(0, self.k_step, speedup, dtype=torch.long, device=device).flip(0) + step_range = torch.arange(0, self.k_step, speedup, dtype=torch.long, device=device).flip(0)[:, None] x = torch.randn((1, 1, self.mel_bins, n_frames), device=device) if speedup > 1: @@ -970,7 +970,7 @@ def export(fs2_path, diff_path, ckpt_steps=None, 'condition' ], dynamic_axes=dynamix_axes, - opset_version=11 + opset_version=15 ) model = onnx.load(fs2_path) model, check = onnxsim.simplify(model, include_subgraph=True) @@ -981,7 +981,7 @@ def export(fs2_path, diff_path, ckpt_steps=None, noise_t = torch.randn(shape, device=device) noise_list = torch.randn((3, *shape), device=device) condition = torch.rand((1, hparams['hidden_size'], n_frames), device=device) - step = (torch.rand((), device=device) * hparams['K_step']).long() + step = (torch.rand((1,), device=device) * hparams['K_step']).long() speedup = (torch.rand((), device=device) * step / 10.).long() step_prev = torch.maximum(step - speedup, torch.tensor(0, dtype=torch.long, device=device)) @@ -1040,7 +1040,6 @@ def export(fs2_path, diff_path, ckpt_steps=None, diffusion = torch.jit.script(diffusion) condition = torch.rand((1, n_frames, hparams['hidden_size']), device=device) speedup = torch.tensor(10, dtype=torch.long, device=device) - dummy = diffusion.forward(condition, speedup, ) torch.onnx.export( diffusion, @@ -1061,10 +1060,7 @@ def export(fs2_path, diff_path, ckpt_steps=None, 1: 'n_frames' } }, - opset_version=11, - example_outputs=( - dummy - ) + opset_version=15 ) print('PyTorch ONNX export finished.') diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py new file mode 100644 index 000000000..03b5debcc --- /dev/null +++ b/deployment/modules/diffusion.py @@ -0,0 +1,7 @@ +from modules.diffusion.ddpm import GaussianDiffusion + + +class GaussianDiffusionOnnx(GaussianDiffusion): + # noinspection PyMethodOverriding + def forward(self, condition, speedup): + pass diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py new file mode 100644 index 000000000..8c2c793c2 --- /dev/null +++ b/deployment/modules/fastspeech2.py @@ -0,0 +1,90 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic +from utils.hparams import hparams +from utils.pitch_utils import ( + f0_bin, f0_mel_min, f0_mel_max +) + + +def f0_to_coarse(f0): + f0_mel = 1127 * (1 + f0 / 700).log() + a = (f0_bin - 2) / (f0_mel_max - f0_mel_min) + b = f0_mel_min * a - 1. + f0_mel = torch.where(f0_mel > 0, f0_mel * a - b, f0_mel) + torch.clip_(f0_mel, min=1., max=float(f0_bin - 1)) + f0_coarse = torch.round(f0_mel).long() + return f0_coarse + + +class LengthRegulator(nn.Module): + # noinspection PyMethodMayBeStatic + def forward(self, dur): + token_idx = torch.arange(1, dur.shape[1] + 1, device=dur.device)[None, :, None] + dur_cumsum = torch.cumsum(dur, dim=1) + dur_cumsum_prev = F.pad(dur_cumsum, (1, -1), mode='constant', value=0) + pos_idx = torch.arange(dur.sum(dim=1).max(), device=dur.device)[None, None] + token_mask = (pos_idx >= dur_cumsum_prev[:, :, None]) & (pos_idx < dur_cumsum[:, :, None]) + mel2ph = (token_idx * token_mask).sum(dim=1) + return mel2ph + + +class FastSpeech2AcousticOnnx(FastSpeech2Acoustic): + def __init__(self, vocab_size, frozen_gender=None, frozen_spk_embed=None): + super().__init__(vocab_size=vocab_size) + self.lr = LengthRegulator() + if hparams.get('use_key_shift_embed', False): + self.shift_min, self.shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] + if hparams.get('use_speed_embed', False): + self.speed_min, self.speed_max = hparams['augmentation_args']['random_time_stretching']['range'] + self.frozen_gender = frozen_gender + self.frozen_spk_embed = frozen_spk_embed + + # noinspection PyMethodOverriding + def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): + durations = durations * (tokens > 0) + mel2ph = self.lr(durations) + f0 = f0 * (mel2ph > 0) + mel2ph = mel2ph[..., None].repeat((1, 1, hparams['hidden_size'])) + dur_embed = self.dur_embed(durations.float()[:, :, None]) + encoded = self.encoder(tokens, dur_embed) + encoded = F.pad(encoded, (0, 0, 1, 0)) + condition = torch.gather(encoded, 1, mel2ph) + + if self.f0_embed_type == 'discrete': + pitch = f0_to_coarse(f0) + pitch_embed = self.pitch_embed(pitch) + else: + f0_mel = (1 + f0 / 700).log() + pitch_embed = self.pitch_embed(f0_mel[:, :, None]) + condition += pitch_embed + + if hparams.get('use_key_shift_embed', False): + if self.frozen_gender is not None: + # noinspection PyUnresolvedReferences, PyTypeChecker + key_shift = frozen_gender * self.shift_max \ + if frozen_gender >= 0. else frozen_gender * abs(self.shift_min) + key_shift_embed = self.key_shift_embed(key_shift[:, None, None]) + else: + gender = torch.clip(gender, min=-1., max=1.) + gender_mask = (gender < 0.).float() + key_shift = gender * ((1. - gender_mask) * self.shift_max + gender_mask * abs(self.shift_min)) + key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) + condition += key_shift_embed + + if hparams.get('use_speed_embed', False): + if velocity is not None: + velocity = torch.clip(velocity, min=self.speed_min, max=self.speed_max) + speed_embed = self.speed_embed(velocity[:, :, None]) + else: + speed_embed = self.speed_embed(torch.FloatTensor([1.]).to(condition.device)[:, None, None]) + condition += speed_embed + + if hparams['use_spk_id']: + if self.frozen_spk_embed is not None: + condition += self.frozen_spk_embed + else: + condition += spk_embed + return condition diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py new file mode 100644 index 000000000..06925e136 --- /dev/null +++ b/deployment/modules/toplevel.py @@ -0,0 +1,38 @@ +from basics.base_module import CategorizedModule +from deployment.modules.diffusion import GaussianDiffusionOnnx +from deployment.modules.fastspeech2 import FastSpeech2AcousticOnnx +from utils.hparams import hparams + + +class DiffSingerAcousticOnnx(CategorizedModule): + @property + def category(self): + return 'acoustic' + + def __init__(self, vocab_size, out_dims, frozen_gender=None, frozen_spk_embed=None): + super().__init__() + self.fs2 = FastSpeech2AcousticOnnx( + vocab_size=vocab_size, + frozen_gender=frozen_gender, + frozen_spk_embed=frozen_spk_embed + ) + self.diffusion = GaussianDiffusionOnnx( + out_dims=out_dims, + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + loss_type=hparams['diff_loss_type'], + spec_min=hparams['spec_min'], + spec_max=hparams['spec_max'] + ) + + def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None, speedup=None): + condition = self.fs2(tokens, durations, f0, gender=gender, velocity=velocity, spk_embed=spk_embed) + mel = self.diffusion(condition, speedup=speedup) + return mel + + +class DiffSingerVarianceOnnx(CategorizedModule): + @property + def category(self): + return 'variance' diff --git a/deployment/requirements.txt b/deployment/requirements.txt deleted file mode 100644 index f452c8a9a..000000000 --- a/deployment/requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -setuptools~=57.5.0 -# torch==1.8.1 -protobuf==3.13.0 -onnx==1.12.0 -onnxsim==0.4.10 -pyyaml -librosa -tqdm -einops -pycwt -praat-parselmouth -scikit-image -webrtcvad -pyloudnorm -g2pM \ No newline at end of file diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index ac7001210..6f7fefa74 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -6,7 +6,7 @@ from basics.base_svs_infer import BaseSVSInfer from modules.fastspeech.tts_modules import LengthRegulator -from modules.toplevel.acoustic_model import DiffSingerAcoustic +from modules.toplevel import DiffSingerAcoustic from modules.vocoders.registry import VOCODERS from utils import load_ckpt from utils.hparams import hparams diff --git a/modules/commons/espnet_positional_embedding.py b/modules/commons/espnet_positional_embedding.py index 74decb6ab..c2dff7e20 100644 --- a/modules/commons/espnet_positional_embedding.py +++ b/modules/commons/espnet_positional_embedding.py @@ -1,4 +1,5 @@ import math + import torch @@ -7,42 +8,31 @@ class PositionalEncoding(torch.nn.Module): Args: d_model (int): Embedding dimension. dropout_rate (float): Dropout rate. - max_len (int): Maximum input length. reverse (bool): Whether to reverse the input position. """ - def __init__(self, d_model, dropout_rate, max_len=5000, reverse=False): + def __init__(self, d_model, dropout_rate, reverse=False): """Construct an PositionalEncoding object.""" super(PositionalEncoding, self).__init__() self.d_model = d_model self.reverse = reverse self.xscale = math.sqrt(self.d_model) self.dropout = torch.nn.Dropout(p=dropout_rate) - self.pe = None - self.extend_pe(torch.tensor(0.0).expand(1, max_len)) - def extend_pe(self, x): - """Reset the positional encodings.""" - if self.pe is not None: - if self.pe.size(1) >= x.size(1): - if self.pe.dtype != x.dtype or self.pe.device != x.device: - self.pe = self.pe.to(dtype=x.dtype, device=x.device) - return - pe = torch.zeros(x.size(1), self.d_model) + def generate_pe(self, length, device): + """Generate the positional encodings.""" + position = torch.arange(0, length, 1., device=device).unsqueeze(1) if self.reverse: - position = torch.arange( - x.size(1) - 1, -1, -1.0, dtype=torch.float32 - ).unsqueeze(1) - else: - position = torch.arange(0, x.size(1), dtype=torch.float32).unsqueeze(1) + position = position.flip(0) div_term = torch.exp( - torch.arange(0, self.d_model, 2, dtype=torch.float32) + torch.arange(0., self.d_model, 2., device=device) * -(math.log(10000.0) / self.d_model) ) - pe[:, 0::2] = torch.sin(position * div_term) - pe[:, 1::2] = torch.cos(position * div_term) - pe = pe.unsqueeze(0) - self.pe = pe.to(device=x.device, dtype=x.dtype) + pe = torch.stack([ + torch.sin(position * div_term), + torch.cos(position * div_term) + ], dim=2).view(-1, self.d_model).unsqueeze(0) + return pe def forward(self, x: torch.Tensor): """Add positional encoding. @@ -51,8 +41,7 @@ def forward(self, x: torch.Tensor): Returns: torch.Tensor: Encoded tensor (batch, time, `*`). """ - self.extend_pe(x) - x = x * self.xscale + self.pe[:, : x.size(1)] + x = x * self.xscale + self.generate_pe(x.size(1), x.device) return self.dropout(x) @@ -62,12 +51,11 @@ class ScaledPositionalEncoding(PositionalEncoding): Args: d_model (int): Embedding dimension. dropout_rate (float): Dropout rate. - max_len (int): Maximum input length. """ - def __init__(self, d_model, dropout_rate, max_len=5000): + def __init__(self, d_model, dropout_rate): """Initialize class.""" - super().__init__(d_model=d_model, dropout_rate=dropout_rate, max_len=max_len) + super().__init__(d_model=d_model, dropout_rate=dropout_rate) self.alpha = torch.nn.Parameter(torch.tensor(1.0)) def reset_parameters(self): @@ -81,8 +69,7 @@ def forward(self, x): Returns: torch.Tensor: Encoded tensor (batch, time, `*`). """ - self.extend_pe(x) - x = x + self.alpha * self.pe[:, : x.size(1)] + x = x + self.alpha * self.generate_pe(x.size(1), x.device) return self.dropout(x) @@ -92,12 +79,11 @@ class RelPositionalEncoding(PositionalEncoding): Args: d_model (int): Embedding dimension. dropout_rate (float): Dropout rate. - max_len (int): Maximum input length. """ - def __init__(self, d_model, dropout_rate, max_len=5000): + def __init__(self, d_model, dropout_rate): """Initialize class.""" - super().__init__(d_model, dropout_rate, max_len, reverse=True) + super().__init__(d_model, dropout_rate, reverse=True) def forward(self, x): """Compute positional encoding. @@ -107,7 +93,6 @@ def forward(self, x): torch.Tensor: Encoded tensor (batch, time, `*`). torch.Tensor: Positional embedding tensor (1, time, `*`). """ - self.extend_pe(x) x = x * self.xscale - pos_emb = self.pe[:, : x.size(1)] - return self.dropout(x) + self.dropout(pos_emb) \ No newline at end of file + pos_emb = self.generate_pe(x.size(1), x.device) + return self.dropout(x) + self.dropout(pos_emb) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index a60079056..96d0e6f3f 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -18,7 +18,7 @@ def forward_embedding(self, txt_tokens, dur_embed): x = self.embed_scale * self.embed_tokens(txt_tokens) x = x + dur_embed if hparams['use_pos_embed']: - if hparams.get('rel_pos', False): + if hparams['rel_pos']: x = self.embed_positions(x) else: positions = self.embed_positions(txt_tokens) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 767afb63f..2aef458bd 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -242,8 +242,7 @@ def __init__(self, embed_tokens, hidden_size, num_layers, ffn_kernel_size=9, dro embed_dim, self.padding_idx, init_size=DEFAULT_MAX_TARGET_POSITIONS, ) - self.layers = nn.ModuleList([]) - self.layers.extend([ + self.layers = nn.ModuleList([ TransformerEncoderLayer(self.hidden_size, self.dropout, kernel_size=kernel_size, num_heads=num_heads) for _ in range(self.num_layers) @@ -259,7 +258,7 @@ def __init__(self, embed_tokens, hidden_size, num_layers, ffn_kernel_size=9, dro self.embed_tokens = embed_tokens self.embed_scale = math.sqrt(hidden_size) self.padding_idx = 0 - if hparams.get('rel_pos') is not None and hparams['rel_pos']: + if hparams['rel_pos']: self.embed_positions = RelPositionalEncoding(hidden_size, dropout_rate=0.0) else: self.embed_positions = SinusoidalPositionalEmbedding( diff --git a/modules/toplevel/acoustic_model.py b/modules/toplevel.py similarity index 89% rename from modules/toplevel/acoustic_model.py rename to modules/toplevel.py index 10587e6a4..899b8c0e4 100644 --- a/modules/toplevel/acoustic_model.py +++ b/modules/toplevel.py @@ -1,4 +1,4 @@ -from basics.base_model import CategorizedModule +from basics.base_module import CategorizedModule from modules.diffusion.ddpm import GaussianDiffusion from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from utils.hparams import hparams @@ -35,3 +35,9 @@ def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, else: loss = self.diffusion(condition, gt_spec=gt_mel, infer=False) return loss + + +class DiffSingerVariance(CategorizedModule): + @property + def category(self): + return 'variance' diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 02c86456d..0ed3b53a3 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -16,7 +16,7 @@ from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder from modules.fastspeech.tts_modules import mel2ph_to_dur -from modules.toplevel.acoustic_model import DiffSingerAcoustic +from modules.toplevel import DiffSingerAcoustic from modules.vocoders.registry import get_vocoder_cls from utils import audio from utils.binarizer_utils import get_pitch_parselmouth @@ -25,7 +25,7 @@ from utils.phoneme_utils import build_phoneme_list from utils.plot import spec_to_figure from utils.text_encoder import TokenTextEncoder -from utils.training_utils import DsBatchSampler, DsEvalBatchSampler, WarmupCosineSchedule +from utils.training_utils import DsBatchSampler, DsEvalBatchSampler matplotlib.use('Agg') diff --git a/utils/__init__.py b/utils/__init__.py index 33cef296c..50ea11fd4 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -10,7 +10,7 @@ import torch import torch.nn.functional as F -from basics.base_model import CategorizedModule +from basics.base_module import CategorizedModule def tensors_to_scalars(metrics): @@ -147,7 +147,7 @@ def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', required_categor os.path.basename(ckpt) for ckpt in glob.glob(f'{base_dir}/model_ckpt_steps_*.ckpt') ], - key=lambda x: int(re.findall(f'model_ckpt_steps_(\d+).ckpt', x.replace('\\', '/'))[0]) + key=lambda x: int(re.findall(fr'model_ckpt_steps_(\d+).ckpt', x.replace('\\', '/'))[0]) ) ] assert len(checkpoint_path) > 0, f'| ckpt not found in {ckpt_base_dir}.' From 394060038db9134650df9ecef5ef7fb1bcf8f84e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Apr 2023 04:17:49 +0800 Subject: [PATCH 188/475] Successfully trace and script `GaussianDiffusion` --- deployment/modules/diffusion.py | 84 ++++++++++++++++++++++++++++++++- modules/diffusion/ddpm.py | 7 ++- modules/toplevel.py | 4 +- requirements.txt | 3 +- 4 files changed, 89 insertions(+), 9 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 03b5debcc..34616fb96 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -1,7 +1,87 @@ +from typing import List + +import torch + from modules.diffusion.ddpm import GaussianDiffusion +def extract(a, t): + return a[t].reshape((1, 1, 1, 1)) + + +# noinspection PyMethodOverriding class GaussianDiffusionOnnx(GaussianDiffusion): - # noinspection PyMethodOverriding + def p_sample(self, x, t, cond): + x_pred = self.denoise_fn(x, t, cond) + x_recon = ( + extract(self.sqrt_recip_alphas_cumprod, t) * x - + extract(self.sqrt_recipm1_alphas_cumprod, t) * x_pred + ) + x_recon = torch.clamp(x_recon, min=-1., max=1.) + + model_mean = ( + extract(self.posterior_mean_coef1, t) * x_recon + + extract(self.posterior_mean_coef2, t) * x + ) + model_log_variance = extract(self.posterior_log_variance_clipped, t) + noise = torch.randn_like(x) + # no noise when t == 0 + nonzero_mask = ((t > 0).float()).reshape(1, 1, 1, 1) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + def plms_get_x_pred(self, x, noise_t, t, t_prev): + a_t = extract(self.alphas_cumprod, t) + a_prev = extract(self.alphas_cumprod, t_prev) + a_t_sq, a_prev_sq = a_t.sqrt(), a_prev.sqrt() + + x_delta = (a_prev - a_t) * ((1 / (a_t_sq * (a_t_sq + a_prev_sq))) * x - 1 / ( + a_t_sq * (((1 - a_prev) * a_t).sqrt() + ((1 - a_t) * a_prev).sqrt())) * noise_t) + x_pred = x + x_delta + + return x_pred + + def p_sample_plms(self, x_prev, t, interval, cond, noise_list: List[torch.Tensor], stage): + noise_pred = self.denoise_fn(x_prev, t, cond) + t_prev = t - interval + t_prev = t_prev * (t_prev > 0) + if stage == 0: + x_pred = self.plms_get_x_pred(x_prev, noise_pred, t, t_prev) + noise_pred_prev = self.denoise_fn(x_pred, t_prev, cond) + noise_pred_prime = (noise_pred + noise_pred_prev) / 2 + elif stage == 1: + noise_pred_prime = (3 * noise_pred - noise_list[-1]) / 2 + elif stage == 2: + noise_pred_prime = (23 * noise_pred - 16 * noise_list[-1] + 5 * noise_list[-2]) / 12 + else: + noise_pred_prime = (55 * noise_pred - 59 * noise_list[-1] + 37 * noise_list[-2] - 9 * noise_list[-3]) / 24 + x_prev = self.plms_get_x_pred(x_prev, noise_pred_prime, t, t_prev) + return noise_pred, x_prev + def forward(self, condition, speedup): - pass + condition = condition.transpose(1, 2) # [1, T, H] => [1, H, T] + device = condition.device + n_frames = condition.shape[2] + + step_range = torch.arange(0, self.k_step, speedup, dtype=torch.long, device=device).flip(0)[:, None] + x = torch.randn((1, 1, self.out_dims, n_frames), device=device) + + if speedup > 1: + plms_noise_stage = torch.tensor(0, dtype=torch.long, device=device) + noise_list: List[torch.Tensor] = [] + for t in step_range: + noise_pred, x = self.p_sample_plms( + x, torch.full((1,), int(t), device=device, dtype=torch.long), + speedup, condition, noise_list, plms_noise_stage + ) + if plms_noise_stage < 3: + noise_list.append(noise_pred) + plms_noise_stage = plms_noise_stage + 1 + else: + noise_list = [noise_list[-2], noise_list[-1], noise_pred] + else: + for t in step_range: + x = self.p_sample(x, torch.full((1,), int(t), device=device, dtype=torch.long), condition) + + x = x.squeeze(1).permute(0, 2, 1) # [B, T, M] + x = self.denorm_spec(x) + return x diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index e9f434964..c0c472d0b 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -140,12 +140,11 @@ def q_posterior(self, x_start, x_t, t): posterior_log_variance_clipped = extract(self.posterior_log_variance_clipped, t, x_t.shape) return posterior_mean, posterior_variance, posterior_log_variance_clipped - def p_mean_variance(self, x, t, cond, clip_denoised: bool): + def p_mean_variance(self, x, t, cond): noise_pred = self.denoise_fn(x, t, cond=cond) x_recon = self.predict_start_from_noise(x, t=t, noise=noise_pred) - if clip_denoised: - x_recon.clamp_(-1., 1.) + x_recon.clamp_(-1., 1.) model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) return model_mean, posterior_variance, posterior_log_variance @@ -153,7 +152,7 @@ def p_mean_variance(self, x, t, cond, clip_denoised: bool): @torch.no_grad() def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): b, *_, device = *x.shape, x.device - model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, cond=cond, clip_denoised=clip_denoised) + model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, cond=cond) noise = noise_like(x.shape, device, repeat_noise) # no noise when t == 0 nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) diff --git a/modules/toplevel.py b/modules/toplevel.py index 899b8c0e4..94b834776 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -16,8 +16,8 @@ def __init__(self, vocab_size, out_dims): k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], loss_type=hparams['diff_loss_type'], - spec_min=hparams['spec_min'], - spec_max=hparams['spec_max'] + norm_min=hparams['spec_min'], + norm_max=hparams['spec_max'] ) @property diff --git a/requirements.txt b/requirements.txt index 54df8a5e9..b09552d8c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,9 +6,10 @@ h5py librosa<0.10.0 lightning>=2.0.0 matplotlib +MonkeyType==23.3.0 numpy==1.23.5 onnx==1.13.1 -onnxsim==0.4.19 +onnxsim==0.4.17 praat-parselmouth==0.4.3 protobuf==3.20.3 PyYAML From 3b6e9ef47296f1e0ff82c07933caf7d67b8430fb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Apr 2023 17:08:22 +0800 Subject: [PATCH 189/475] Fix for key mismatch --- modules/toplevel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index 94b834776..899b8c0e4 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -16,8 +16,8 @@ def __init__(self, vocab_size, out_dims): k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], loss_type=hparams['diff_loss_type'], - norm_min=hparams['spec_min'], - norm_max=hparams['spec_max'] + spec_min=hparams['spec_min'], + spec_max=hparams['spec_max'] ) @property From 2c37c14c252f574a2ca9ed57f14dc0d54726e1e6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Apr 2023 17:08:41 +0800 Subject: [PATCH 190/475] Fix type inference errors --- deployment/modules/diffusion.py | 18 ++++++++++++------ deployment/modules/toplevel.py | 6 ++++-- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 34616fb96..3ff7ca977 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -40,23 +40,29 @@ def plms_get_x_pred(self, x, noise_t, t, t_prev): return x_pred - def p_sample_plms(self, x_prev, t, interval, cond, noise_list: List[torch.Tensor], stage): + def p_sample_plms(self, x_prev, t, interval, cond, noise_list: List[torch.Tensor], stage: int): noise_pred = self.denoise_fn(x_prev, t, cond) t_prev = t - interval t_prev = t_prev * (t_prev > 0) if stage == 0: x_pred = self.plms_get_x_pred(x_prev, noise_pred, t, t_prev) noise_pred_prev = self.denoise_fn(x_pred, t_prev, cond) - noise_pred_prime = (noise_pred + noise_pred_prev) / 2 + noise_pred_prime = (noise_pred + noise_pred_prev) / 2. elif stage == 1: - noise_pred_prime = (3 * noise_pred - noise_list[-1]) / 2 + noise_pred_prime = (3. * noise_pred - noise_list[-1]) / 2. elif stage == 2: - noise_pred_prime = (23 * noise_pred - 16 * noise_list[-1] + 5 * noise_list[-2]) / 12 + noise_pred_prime = (23. * noise_pred - 16. * noise_list[-1] + 5. * noise_list[-2]) / 12. else: - noise_pred_prime = (55 * noise_pred - 59 * noise_list[-1] + 37 * noise_list[-2] - 9 * noise_list[-3]) / 24 + noise_pred_prime = (55. * noise_pred - 59. * noise_list[-1] + 37. + * noise_list[-2] - 9. * noise_list[-3]) / 24. x_prev = self.plms_get_x_pred(x_prev, noise_pred_prime, t, t_prev) return noise_pred, x_prev + def denorm_spec(self, x): + d = (self.spec_max - self.spec_min) / 2. + m = (self.spec_max + self.spec_min) / 2. + return x * d + m + def forward(self, condition, speedup): condition = condition.transpose(1, 2) # [1, T, H] => [1, H, T] device = condition.device @@ -66,7 +72,7 @@ def forward(self, condition, speedup): x = torch.randn((1, 1, self.out_dims, n_frames), device=device) if speedup > 1: - plms_noise_stage = torch.tensor(0, dtype=torch.long, device=device) + plms_noise_stage: int = 0 noise_list: List[torch.Tensor] = [] for t in step_range: noise_pred, x = self.p_sample_plms( diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 06925e136..ce62528de 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -1,3 +1,5 @@ +from torch import Tensor + from basics.base_module import CategorizedModule from deployment.modules.diffusion import GaussianDiffusionOnnx from deployment.modules.fastspeech2 import FastSpeech2AcousticOnnx @@ -26,8 +28,8 @@ def __init__(self, vocab_size, out_dims, frozen_gender=None, frozen_spk_embed=No spec_max=hparams['spec_max'] ) - def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None, speedup=None): - condition = self.fs2(tokens, durations, f0, gender=gender, velocity=velocity, spk_embed=spk_embed) + def forward(self, tokens: Tensor, durations: Tensor, f0: Tensor, speedup: Tensor) -> Tensor: + condition = self.fs2(tokens, durations, f0) mel = self.diffusion(condition, speedup=speedup) return mel From 90d831038437c6b9554e3e2b2389f07f3a8a3598 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Apr 2023 18:10:28 +0800 Subject: [PATCH 191/475] Fix hparams printing colors on some terminals --- utils/hparams.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/hparams.py b/utils/hparams.py index 796ba8e6b..36cce5168 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -142,7 +142,7 @@ def print_out_hparams(): print('| Hparams chains: ', config_chains) print('| Hparams: ') for i, (k, v) in enumerate(sorted(hparams_.items())): - print(f"\033[;33;m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") + print(f"\033[0;33m{k}\033[0m: {v}, ", end="\n" if i % 5 == 4 else "") print("") global_print_hparams = False print_out_hparams() From 9a195b8e9274ab2bc23e86f915d24437bfd48452 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Apr 2023 22:14:22 +0800 Subject: [PATCH 192/475] Revert and fix `PositionalEncoding` --- .../commons/espnet_positional_embedding.py | 45 ++++++++++++------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/modules/commons/espnet_positional_embedding.py b/modules/commons/espnet_positional_embedding.py index c2dff7e20..26ac372d8 100644 --- a/modules/commons/espnet_positional_embedding.py +++ b/modules/commons/espnet_positional_embedding.py @@ -1,5 +1,4 @@ import math - import torch @@ -8,31 +7,42 @@ class PositionalEncoding(torch.nn.Module): Args: d_model (int): Embedding dimension. dropout_rate (float): Dropout rate. + max_len (int): Maximum input length. reverse (bool): Whether to reverse the input position. """ - def __init__(self, d_model, dropout_rate, reverse=False): + def __init__(self, d_model, dropout_rate, max_len=5000, reverse=False): """Construct an PositionalEncoding object.""" super(PositionalEncoding, self).__init__() self.d_model = d_model self.reverse = reverse self.xscale = math.sqrt(self.d_model) self.dropout = torch.nn.Dropout(p=dropout_rate) + self.pe = None + self.extend_pe(torch.tensor(0.0).expand(1, max_len)) - def generate_pe(self, length, device): - """Generate the positional encodings.""" - position = torch.arange(0, length, 1., device=device).unsqueeze(1) + def extend_pe(self, x): + """Reset the positional encodings.""" + if self.pe is not None: + if self.pe.size(1) >= x.size(1): + if self.pe.dtype != x.dtype or self.pe.device != x.device: + self.pe = self.pe.to(dtype=x.dtype, device=x.device) + return if self.reverse: - position = position.flip(0) + position = torch.arange( + x.size(1) - 1, -1, -1.0, dtype=torch.float32 + ).unsqueeze(1) + else: + position = torch.arange(0, x.size(1), dtype=torch.float32).unsqueeze(1) div_term = torch.exp( - torch.arange(0., self.d_model, 2., device=device) + torch.arange(0, self.d_model, 2, dtype=torch.float32) * -(math.log(10000.0) / self.d_model) ) pe = torch.stack([ torch.sin(position * div_term), torch.cos(position * div_term) ], dim=2).view(-1, self.d_model).unsqueeze(0) - return pe + self.pe = pe.to(device=x.device, dtype=x.dtype) def forward(self, x: torch.Tensor): """Add positional encoding. @@ -41,7 +51,8 @@ def forward(self, x: torch.Tensor): Returns: torch.Tensor: Encoded tensor (batch, time, `*`). """ - x = x * self.xscale + self.generate_pe(x.size(1), x.device) + self.extend_pe(x) + x = x * self.xscale + self.pe[:, : x.size(1)] return self.dropout(x) @@ -51,11 +62,12 @@ class ScaledPositionalEncoding(PositionalEncoding): Args: d_model (int): Embedding dimension. dropout_rate (float): Dropout rate. + max_len (int): Maximum input length. """ - def __init__(self, d_model, dropout_rate): + def __init__(self, d_model, dropout_rate, max_len=5000): """Initialize class.""" - super().__init__(d_model=d_model, dropout_rate=dropout_rate) + super().__init__(d_model=d_model, dropout_rate=dropout_rate, max_len=max_len) self.alpha = torch.nn.Parameter(torch.tensor(1.0)) def reset_parameters(self): @@ -69,7 +81,8 @@ def forward(self, x): Returns: torch.Tensor: Encoded tensor (batch, time, `*`). """ - x = x + self.alpha * self.generate_pe(x.size(1), x.device) + self.extend_pe(x) + x = x + self.alpha * self.pe[:, : x.size(1)] return self.dropout(x) @@ -79,11 +92,12 @@ class RelPositionalEncoding(PositionalEncoding): Args: d_model (int): Embedding dimension. dropout_rate (float): Dropout rate. + max_len (int): Maximum input length. """ - def __init__(self, d_model, dropout_rate): + def __init__(self, d_model, dropout_rate, max_len=5000): """Initialize class.""" - super().__init__(d_model, dropout_rate, reverse=True) + super().__init__(d_model, dropout_rate, max_len, reverse=True) def forward(self, x): """Compute positional encoding. @@ -93,6 +107,7 @@ def forward(self, x): torch.Tensor: Encoded tensor (batch, time, `*`). torch.Tensor: Positional embedding tensor (1, time, `*`). """ + self.extend_pe(x) x = x * self.xscale - pos_emb = self.generate_pe(x.size(1), x.device) + pos_emb = self.pe[:, : x.size(1)] return self.dropout(x) + self.dropout(pos_emb) From 3e584d6b8eef7a09cbe49571b7e7be8ee80e0f45 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 6 Apr 2023 01:47:16 +0800 Subject: [PATCH 193/475] Avoid using `Sequence*` nodes --- deployment/modules/diffusion.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 3ff7ca977..1c1d2fc01 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -1,5 +1,3 @@ -from typing import List - import torch from modules.diffusion.ddpm import GaussianDiffusion @@ -40,7 +38,7 @@ def plms_get_x_pred(self, x, noise_t, t, t_prev): return x_pred - def p_sample_plms(self, x_prev, t, interval, cond, noise_list: List[torch.Tensor], stage: int): + def p_sample_plms(self, x_prev, t, interval, cond, noise_list, stage: int): noise_pred = self.denoise_fn(x_prev, t, cond) t_prev = t - interval t_prev = t_prev * (t_prev > 0) @@ -73,17 +71,21 @@ def forward(self, condition, speedup): if speedup > 1: plms_noise_stage: int = 0 - noise_list: List[torch.Tensor] = [] + noise_list = torch.zeros((0, 1, 1, self.out_dims, n_frames), device=device) + # noise_list : List[torch.Tensor] = [] for t in step_range: noise_pred, x = self.p_sample_plms( x, torch.full((1,), int(t), device=device, dtype=torch.long), speedup, condition, noise_list, plms_noise_stage ) + noise_pred = noise_pred.unsqueeze(0) if plms_noise_stage < 3: - noise_list.append(noise_pred) + noise_list = torch.cat((noise_list, noise_pred), dim=0) + # noise_list.append(noise_pred) plms_noise_stage = plms_noise_stage + 1 else: - noise_list = [noise_list[-2], noise_list[-1], noise_pred] + noise_list = torch.cat((noise_list[-2:], noise_pred), dim=0) + # [noise_list[-2], noise_list[-1], noise_pred] else: for t in step_range: x = self.p_sample(x, torch.full((1,), int(t), device=device, dtype=torch.long), condition) From 5ea5c51a7c2ec5eee95ad52158dc5304e23cf115 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 6 Apr 2023 01:47:47 +0800 Subject: [PATCH 194/475] Remove useless checks and assignments --- deployment/modules/toplevel.py | 1 - modules/diffusion/ddpm.py | 5 +---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index ce62528de..6ff2ba300 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -23,7 +23,6 @@ def __init__(self, vocab_size, out_dims, frozen_gender=None, frozen_spk_embed=No timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - loss_type=hparams['diff_loss_type'], spec_min=hparams['spec_min'], spec_max=hparams['spec_max'] ) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index c0c472d0b..78402345c 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -76,10 +76,7 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, if exists(betas): betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas else: - if 'schedule_type' in hparams.keys(): - betas = beta_schedule[hparams['schedule_type']](timesteps) - else: - betas = cosine_beta_schedule(timesteps) + betas = beta_schedule[hparams['schedule_type']](timesteps) alphas = 1. - betas alphas_cumprod = np.cumprod(alphas, axis=0) From 1a3d3168031ea8d7dfa08352a53358f8d2c49ad4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 6 Apr 2023 12:38:19 +0800 Subject: [PATCH 195/475] Successfully export the whole model with `Sequence*` ops --- deployment/modules/diffusion.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 1c1d2fc01..d6cea312d 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -1,4 +1,7 @@ +from typing import List + import torch +from torch import Tensor from modules.diffusion.ddpm import GaussianDiffusion @@ -38,7 +41,7 @@ def plms_get_x_pred(self, x, noise_t, t, t_prev): return x_pred - def p_sample_plms(self, x_prev, t, interval, cond, noise_list, stage: int): + def p_sample_plms(self, x_prev, t, interval, cond, noise_list: List[Tensor], stage: int): noise_pred = self.denoise_fn(x_prev, t, cond) t_prev = t - interval t_prev = t_prev * (t_prev > 0) @@ -71,21 +74,19 @@ def forward(self, condition, speedup): if speedup > 1: plms_noise_stage: int = 0 - noise_list = torch.zeros((0, 1, 1, self.out_dims, n_frames), device=device) - # noise_list : List[torch.Tensor] = [] + noise_list: List[Tensor] = [] for t in step_range: noise_pred, x = self.p_sample_plms( x, torch.full((1,), int(t), device=device, dtype=torch.long), speedup, condition, noise_list, plms_noise_stage ) - noise_pred = noise_pred.unsqueeze(0) - if plms_noise_stage < 3: - noise_list = torch.cat((noise_list, noise_pred), dim=0) - # noise_list.append(noise_pred) - plms_noise_stage = plms_noise_stage + 1 + if plms_noise_stage == 0: + noise_list = [noise_pred] else: - noise_list = torch.cat((noise_list[-2:], noise_pred), dim=0) - # [noise_list[-2], noise_list[-1], noise_pred] + if plms_noise_stage >= 3: + noise_list.pop(0) + noise_list.append(noise_pred) + plms_noise_stage = plms_noise_stage + 1 else: for t in step_range: x = self.p_sample(x, torch.full((1,), int(t), device=device, dtype=torch.long), condition) From c6ed927699d80c2de8e5dc4f1ab783d0636dabd7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 6 Apr 2023 23:25:59 +0800 Subject: [PATCH 196/475] Successfully export and merge `model.fs2` and `model.diffusion` --- deployment/modules/diffusion.py | 10 ++++++---- deployment/modules/toplevel.py | 28 ++++++++++++++++++++++++---- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index d6cea312d..f72b3a09e 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -77,19 +77,21 @@ def forward(self, condition, speedup): noise_list: List[Tensor] = [] for t in step_range: noise_pred, x = self.p_sample_plms( - x, torch.full((1,), int(t), device=device, dtype=torch.long), - speedup, condition, noise_list, plms_noise_stage + x, t, interval=speedup, cond=condition, + noise_list=noise_list, stage=plms_noise_stage ) if plms_noise_stage == 0: noise_list = [noise_pred] + plms_noise_stage = plms_noise_stage + 1 else: if plms_noise_stage >= 3: noise_list.pop(0) + else: + plms_noise_stage = plms_noise_stage + 1 noise_list.append(noise_pred) - plms_noise_stage = plms_noise_stage + 1 else: for t in step_range: - x = self.p_sample(x, torch.full((1,), int(t), device=device, dtype=torch.long), condition) + x = self.p_sample(x, t, cond=condition) x = x.squeeze(1).permute(0, 2, 1) # [B, T, M] x = self.denorm_spec(x) diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 6ff2ba300..023b62bf2 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -1,4 +1,6 @@ -from torch import Tensor +import copy + +from torch import Tensor, nn from basics.base_module import CategorizedModule from deployment.modules.diffusion import GaussianDiffusionOnnx @@ -27,11 +29,29 @@ def __init__(self, vocab_size, out_dims, frozen_gender=None, frozen_spk_embed=No spec_max=hparams['spec_max'] ) - def forward(self, tokens: Tensor, durations: Tensor, f0: Tensor, speedup: Tensor) -> Tensor: - condition = self.fs2(tokens, durations, f0) - mel = self.diffusion(condition, speedup=speedup) + def forward(self, tokens: Tensor, durations: Tensor, f0: Tensor, speedup: int) -> Tensor: + condition = self.forward_fs2(tokens, durations, f0) + mel = self.forward_diffusion(condition, speedup=speedup) return mel + def forward_fs2(self, tokens: Tensor, durations: Tensor, f0: Tensor) -> Tensor: + return self.fs2(tokens, durations, f0) + + def forward_diffusion(self, condition: Tensor, speedup: int) -> Tensor: + return self.diffusion(condition, speedup) + + def view_as_fs2(self) -> nn.Module: + model = copy.deepcopy(self) + model.diffusion = None + model.forward = model.forward_fs2 + return model + + def view_as_diffusion(self) -> nn.Module: + model = copy.deepcopy(self) + model.fs2 = None + model.forward = model.forward_diffusion + return model + class DiffSingerVarianceOnnx(CategorizedModule): @property From 664a98e038e5b271084c26808aeca028439f4fd8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 7 Apr 2023 00:55:32 +0800 Subject: [PATCH 197/475] Fix gender NoneType bug --- scripts/infer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/infer.py b/scripts/infer.py index 168a728e3..09b12c817 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -25,7 +25,7 @@ parser.add_argument('--title', type=str, required=False, help='Title of output file') parser.add_argument('--num', type=int, required=False, default=1, help='Number of runs') parser.add_argument('--key', type=int, required=False, default=0, help='Key transition of pitch') -parser.add_argument('--gender', type=float, required=False, help='Formant shifting (gender control)') +parser.add_argument('--gender', type=float, required=False, default=0, help='Formant shifting (gender control)') parser.add_argument('--seed', type=int, required=False, help='Random seed of the inference') parser.add_argument('--speedup', type=int, required=False, default=0, help='PNDM speed-up ratio') parser.add_argument('--mel', action='store_true', required=False, default=False, From b84ddc5b6f6fbe2297f0c758820756edd42cbbc6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 7 Apr 2023 02:02:32 +0800 Subject: [PATCH 198/475] Add helper to simplify and optimize ONNX graphs --- utils/onnx_helper.py | 284 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 284 insertions(+) create mode 100644 utils/onnx_helper.py diff --git a/utils/onnx_helper.py b/utils/onnx_helper.py new file mode 100644 index 000000000..952b929d5 --- /dev/null +++ b/utils/onnx_helper.py @@ -0,0 +1,284 @@ +import re +from typing import Dict, Tuple, Union + +import onnx +from google.protobuf.internal.containers import RepeatedCompositeFieldContainer +from onnx import ModelProto, ValueInfoProto, GraphProto + + +__verbose__: bool = True +""" +Whether log information of successful operations +""" + + +def _verbose(self, *args, sep=' ', end='\n', file=None): + if __verbose__: + print(self, *args, sep=sep, end=end, file=file) + + +def model_override_io_shapes( + model: ModelProto, + input_shapes: Dict[str, Tuple[Union[str, int]]] = None, + output_shapes: Dict[str, Tuple[Union[str, int]]] = None, +): + """ + Override the shapes of inputs/outputs of the model graph (in-place operation). + @param model: model to perform the operation on + @param input_shapes: a dict with keys as input/output names and values as shape tuples + @param output_shapes: the same as input_shapes + """ + def _override_shapes( + shape_list_old: RepeatedCompositeFieldContainer[ValueInfoProto], + shape_dict_new: Dict[str, Tuple[Union[str, int]]]): + for value_info in shape_list_old: + if value_info.name in shape_dict_new: + name = value_info.name + dims = value_info.type.tensor_type.shape.dim + assert len(shape_dict_new[name]) == len(dims), \ + f'Number of given and existing dimensions mismatch: {name}' + for i, dim in enumerate(shape_dict_new[name]): + if isinstance(dim, int): + dims[i].dim_param = '' + dims[i].dim_value = dim + else: + dims[i].dim_value = 0 + dims[i].dim_param = dim + _verbose(f'| override shape of \'{name}\' with {list(shape_dict_new[name])}') + + if input_shapes is not None: + _override_shapes(model.graph.input, input_shapes) + if output_shapes is not None: + _override_shapes(model.graph.output, output_shapes) + + +def model_add_prefixes( + model: ModelProto, + initializer_prefix=None, + value_info_prefix=None, + node_prefix=None, + dim_prefix=None, + ignored_pattern=None, +): + """ + Adds prefixes to names inside the given ONNX model graph, including sub-graphs (in-place operation). + This method is a complete version of the official onnx.compose.add_prefix API, which does not consider sub-graphs. + """ + initializers = set() + value_infos = set() + + def _record_initializers_and_value_infos_recursive(subgraph): + # Record names in current graph + for initializer in subgraph.initializer: + if re.match(ignored_pattern, initializer.name): + continue + initializers.add(initializer.name) + for value_info in subgraph.value_info: + if re.match(ignored_pattern, value_info.name): + continue + value_infos.add(value_info.name) + for node in subgraph.node: + # For 'If' and 'Loop' nodes, do recording recursively + if node.op_type == 'If': + for attr in node.attribute: + branch = onnx.helper.get_attribute_value(attr) + _record_initializers_and_value_infos_recursive(branch) + elif node.op_type == 'Loop': + for attr in node.attribute: + if attr.name == 'body': + body = onnx.helper.get_attribute_value(attr) + _record_initializers_and_value_infos_recursive(body) + + def _add_prefixes_recursive(subgraph): + # Add prefixes in current graph + if initializer_prefix is not None: + for initializer in subgraph.initializer: + if re.match(ignored_pattern, initializer.name): + continue + new_name = initializer_prefix + initializer.name + _verbose('| add prefix:', initializer.name, '->', new_name) + initializer.name = new_name + + for value_info in subgraph.value_info: + if dim_prefix is not None: + for dim in value_info.type.tensor_type.shape.dim: + if dim.dim_param is None or dim.dim_param == '' or re.match(ignored_pattern, dim.dim_param): + continue + new_dim_param = dim_prefix + dim.dim_param + _verbose('| add prefix:', dim.dim_param, '->', new_dim_param) + dim.dim_param = new_dim_param + + if value_info_prefix is None or re.match(ignored_pattern, value_info.name): + continue + new_name = value_info_prefix + value_info.name + _verbose('| add prefix:', value_info.name, '->', new_name) + value_info.name = new_name + + if node_prefix is not None: + for node in subgraph.node: + if re.match(ignored_pattern, node.name): + continue + new_name = node_prefix + node.name + _verbose('| add prefix:', node.name, '->', new_name) + node.name = new_name + + for node in subgraph.node: + # For 'If' and 'Loop' nodes, add prefixes recursively + if node.op_type == 'If': + for attr in node.attribute: + branch = onnx.helper.get_attribute_value(attr) + _add_prefixes_recursive(branch) + elif node.op_type == 'Loop': + for attr in node.attribute: + if attr.name == 'body': + body = onnx.helper.get_attribute_value(attr) + _add_prefixes_recursive(body) + + # For each node, rename its inputs and outputs + for io_list in [node.input, node.output]: + for i, io_value in enumerate(io_list): + if io_value in initializers and initializer_prefix is not None: + new_value = initializer_prefix + io_value + _verbose('| add prefix:', io_value, '->', new_value) + io_list.pop(i) + io_list.insert(i, new_value) + if io_value in value_infos and value_info_prefix is not None: + new_value = value_info_prefix + io_value + _verbose('| add prefix:', io_value, '->', new_value) + io_list.pop(i) + io_list.insert(i, new_value) + + _record_initializers_and_value_infos_recursive(model.graph) + _add_prefixes_recursive(model.graph) + + +def graph_fold_back_to_squeeze(graph: GraphProto): + """ + Fold the substructures of 'Shape', 'Gather', 'Equal', 'If' to one single 'Squeeze' node. + This can unify the different behaviors between aten::squeeze and onnx:Squeeze. + """ + def _graph_fold_back_to_squeeze_recursive(subgraph: GraphProto): + # Do folding in sub-graphs recursively. + for node in subgraph.node: + if node.op_type == 'If': + for attr in node.attribute: + branch = onnx.helper.get_attribute_value(attr) + _graph_fold_back_to_squeeze_recursive(branch) + elif node.op_type == 'Loop': + for attr in node.attribute: + if attr.name == 'body': + body = onnx.helper.get_attribute_value(attr) + _graph_fold_back_to_squeeze_recursive(body) + + # Do folding in current graph. + i_shape = 0 + while i_shape < len(subgraph.node): + if subgraph.node[i_shape].op_type == 'Shape': + shape_node = subgraph.node[i_shape] + shape_out = shape_node.output[0] + i_gather = i_shape + 1 + while i_gather < len(subgraph.node): + if subgraph.node[i_gather].op_type == 'Gather' and subgraph.node[i_gather].input[0] == shape_out: + gather_node = subgraph.node[i_gather] + gather_out = gather_node.output[0] + i_equal = i_gather + 1 + while i_equal < len(subgraph.node): + if subgraph.node[i_equal].op_type == 'Equal' and ( + subgraph.node[i_equal].input[0] == gather_out + or subgraph.node[i_equal].input[1] == gather_out): + equal_node = subgraph.node[i_equal] + equal_out = equal_node.output[0] + i_if = i_equal + 1 + while i_if < len(subgraph.node): + if subgraph.node[i_if].op_type == 'If' \ + and subgraph.node[i_if].input[0] == equal_out: + # Found the substructure to be folded. + if_node = subgraph.node[i_if] + # Create 'Squeeze' node. + squeeze_node = onnx.helper.make_node( + op_type='Squeeze', + inputs=[ + *list(shape_node.input), + # For ONNX opset >= 13, axes should be an input instead of an attribute. + gather_node.input[1] # Use 'indices' input of 'Gather' + ], + outputs=if_node.output, + name=shape_node.name.replace('Shape', 'Squeeze') + ) + # Replace 'Shape', 'Gather', 'Equal', 'If' with 'Squeeze'. + subgraph.node.insert(i_shape, squeeze_node) + subgraph.node.remove(shape_node) + subgraph.node.remove(gather_node) + subgraph.node.remove(equal_node) + subgraph.node.remove(if_node) + _verbose( + f'| fold nodes: [\'{shape_node.name}\', \'{gather_node.name}\', ' + f'\'{equal_node.name}\', \'{if_node.name}\'] -> \'{squeeze_node.name}\'') + break + i_if += 1 + else: + break + i_equal += 1 + else: + break + i_gather += 1 + else: + break + i_shape += 1 + + _graph_fold_back_to_squeeze_recursive(graph) + + +def graph_remove_unused_values(graph: GraphProto): + used_values = set() + + def _record_usage_recursive(subgraph: GraphProto): + for node in subgraph.node: + # For 'If' and 'Loop' nodes, do recording recursively + if node.op_type == 'If': + for attr in node.attribute: + branch = onnx.helper.get_attribute_value(attr) + _record_usage_recursive(branch) + elif node.op_type == 'Loop': + for attr in node.attribute: + if attr.name == 'body': + body = onnx.helper.get_attribute_value(attr) + _record_usage_recursive(body) + # For each node, record its inputs and outputs + for io_list in [node.input, node.output]: + for io_value in io_list: + used_values.add(io_value) + + def _clean_unused_recursively(subgraph): + # Do cleaning in sub-graphs recursively. + for node in subgraph.node: + if node.op_type == 'If': + for attr in node.attribute: + branch = onnx.helper.get_attribute_value(attr) + _clean_unused_recursively(branch) + elif node.op_type == 'Loop': + for attr in node.attribute: + if attr.name == 'body': + body = onnx.helper.get_attribute_value(attr) + _clean_unused_recursively(body) + + # Do cleaning in current graph. + i = 0 + while i < len(subgraph.initializer): + name = subgraph.initializer[i].name + if name not in used_values: + subgraph.initializer.pop(i) + _verbose(f'| remove unused initializer: {name}') + else: + i += 1 + i = 0 + while i < len(subgraph.value_info): + name = subgraph.value_info[i].name + if name not in used_values: + subgraph.value_info.pop(i) + _verbose(f'| remove unused value info: {name}') + else: + i += 1 + + _record_usage_recursive(graph) + _clean_unused_recursively(graph) From e8ad9e44c2d21c5548611c0303a81a9d88a1e0fa Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 7 Apr 2023 12:58:51 +0800 Subject: [PATCH 199/475] Extract conditioner projections to avoid repeated calculation --- utils/onnx_helper.py | 83 ++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 77 insertions(+), 6 deletions(-) diff --git a/utils/onnx_helper.py b/utils/onnx_helper.py index 952b929d5..cd7b6090a 100644 --- a/utils/onnx_helper.py +++ b/utils/onnx_helper.py @@ -3,8 +3,7 @@ import onnx from google.protobuf.internal.containers import RepeatedCompositeFieldContainer -from onnx import ModelProto, ValueInfoProto, GraphProto - +from onnx import GraphProto, ModelProto, NodeProto, ValueInfoProto __verbose__: bool = True """ @@ -24,9 +23,9 @@ def model_override_io_shapes( ): """ Override the shapes of inputs/outputs of the model graph (in-place operation). - @param model: model to perform the operation on - @param input_shapes: a dict with keys as input/output names and values as shape tuples - @param output_shapes: the same as input_shapes + :param model: model to perform the operation on + :param input_shapes: a dict with keys as input/output names and values as shape tuples + :param output_shapes: the same as input_shapes """ def _override_shapes( shape_list_old: RepeatedCompositeFieldContainer[ValueInfoProto], @@ -44,7 +43,7 @@ def _override_shapes( else: dims[i].dim_value = 0 dims[i].dim_param = dim - _verbose(f'| override shape of \'{name}\' with {list(shape_dict_new[name])}') + _verbose(f'| override shape of \'{name}\' with {shape_dict_new[name]}') if input_shapes is not None: _override_shapes(model.graph.input, input_shapes) @@ -229,6 +228,78 @@ def _graph_fold_back_to_squeeze_recursive(subgraph: GraphProto): _graph_fold_back_to_squeeze_recursive(graph) +def graph_extract_conditioner_projections( + graph: GraphProto, + op_type: str, + weight_pattern: str, + alias_prefix: str +): + """ + Extract conditioner projection nodes out of the denoiser wrapped by diffusion. + These nodes only need to be calculated once before entering the main denoising loop, + and can be reused inside the loop. This optimizes the performance of ONNX inference. + + :param graph: graph to perform the operation on + :param op_type: the ONNX operator type of the conditioner projections (usually 'Conv' or 'Gemm') + :param weight_pattern: a regular expression as pattern of the conditioner projection weight keys + :param alias_prefix: add prefixes to the outputs of extracted projection nodes + """ + node_dict: Dict[str, Tuple[str, NodeProto]] = {} # key: pattern match, value: (alias, node) + + def _extract_conv_nodes_recursive(subgraph: GraphProto): + to_be_removed = [] + for sub_node in subgraph.node: + if sub_node.op_type == 'If': + for attr in sub_node.attribute: + branch = onnx.helper.get_attribute_value(attr) + _extract_conv_nodes_recursive(branch) + elif sub_node.op_type == 'Loop': + for attr in sub_node.attribute: + if attr.name == 'body': + body = onnx.helper.get_attribute_value(attr) + _extract_conv_nodes_recursive(body) + elif sub_node.op_type == op_type and re.match(weight_pattern, sub_node.input[1]): + # Found node to extract + cached = node_dict.get(sub_node.input[1]) + if cached is None: + out_alias = f'{alias_prefix}.{len(node_dict)}' + node_dict[sub_node.input[1]] = (out_alias, sub_node) + else: + out_alias = cached[0] + out = sub_node.output[0] + # Search for nodes downstream the extracted node and match them to the renamed output. + for dep_node in subgraph.node: + for dep_idx, dep_input in enumerate(dep_node.input): + if dep_input == out: + dep_node.input.remove(out) + dep_node.input.insert(dep_idx, out_alias) + # Add the node to the remove list. + to_be_removed.append(sub_node) + [subgraph.node.remove(_n) for _n in to_be_removed] + + for i, n in enumerate(graph.node): + if n.op_type == 'If': + for a in n.attribute: + b = onnx.helper.get_attribute_value(a) + _extract_conv_nodes_recursive(b) + # Insert the extracted nodes before the first 'If' node which carries the main denoising loop. + for key in reversed(node_dict): + alias, node = node_dict[key] + # Rename output of the node. + out_name = node.output[0] + node.output.remove(node.output[0]) + node.output.insert(0, alias) + # Insert node into the main graph. + graph.node.insert(i, node) + # Rename value info of the output. + for v in graph.value_info: + if v.name == out_name: + v.name = alias + break + _verbose(f'| extract conditioner projection: \'{node.name}\'') + break + + def graph_remove_unused_values(graph: GraphProto): used_values = set() From c3c2e05eeffbb4f3cf2c891287864ba442c09225 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 7 Apr 2023 22:47:09 +0800 Subject: [PATCH 200/475] Finished `DiffSingerAcousticExporter` --- basics/base_onnx_export.py | 39 +- deployment/export/export_acoustic.py | 1211 ----------------- deployment/exporters/acoustic_exporter.py | 298 ++++ .../export_nsf_hifigan.py | 0 deployment/modules/fastspeech2.py | 13 +- deployment/modules/toplevel.py | 26 +- modules/diffusion/ddpm.py | 2 +- 7 files changed, 351 insertions(+), 1238 deletions(-) delete mode 100644 deployment/export/export_acoustic.py create mode 100644 deployment/exporters/acoustic_exporter.py rename deployment/{export => exporters}/export_nsf_hifigan.py (100%) diff --git a/basics/base_onnx_export.py b/basics/base_onnx_export.py index 3128a6a0e..56299ee4f 100644 --- a/basics/base_onnx_export.py +++ b/basics/base_onnx_export.py @@ -1,6 +1,7 @@ import json import os -import pathlib +from typing import Union +from pathlib import Path import torch import torch.nn as nn @@ -8,11 +9,16 @@ from utils.hparams import hparams -class BaseOnnxExport: - def __init__(self, device=None, cache_dir=None, **kwargs): +class BaseExporter: + def __init__( + self, + device: Union[str, torch.device] = None, + cache_dir: Path = None, + **kwargs + ): self.device = device if device is not None else torch.device('cuda' if torch.cuda.is_available() else 'cpu') - self.cache_dir: pathlib.Path = cache_dir if cache_dir is not None \ - else pathlib.Path(__file__).parent.parent / 'deployment' / 'cache' + self.cache_dir: Path = cache_dir.resolve() if cache_dir is not None \ + else Path(__file__).parent.parent / 'deployment' / 'cache' if not self.cache_dir.exists(): self.cache_dir.mkdir(parents=True, exist_ok=True) @@ -28,7 +34,28 @@ def build_spk_map(self) -> dict: return {} def build_model(self) -> nn.Module: + """ + Creates an instance of nn.Module and load its state dict on the target device. + """ raise NotImplementedError() - def export_model(self, path: pathlib.Path): + def export_model(self, path: Path): + """ + Exports the model to ONNX format. + :param path: the target model path + """ + raise NotImplementedError() + + def export_attachments(self, path: Path): + """ + Exports related files and configs (e.g. the dictionary) to the target directory. + :param path: the target directory + """ + raise NotImplementedError() + + def export(self, path: Path): + """ + Export all the artifacts to the target directory. + :param path: the target directory + """ raise NotImplementedError() diff --git a/deployment/export/export_acoustic.py b/deployment/export/export_acoustic.py deleted file mode 100644 index 89a08458e..000000000 --- a/deployment/export/export_acoustic.py +++ /dev/null @@ -1,1211 +0,0 @@ -import json -import os -import shutil -import sys -import warnings - -root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -os.environ['PYTHONPATH'] = f'"{root_dir}"' -sys.path.insert(0, root_dir) - -import argparse -import math -import re -import struct -from functools import partial - -import numpy as np -import onnx -import onnxsim -import torch -import torch.nn as nn -import torch.nn.functional as F -from torch.nn import Linear, Embedding - -from basics.base_module import CategorizedModule -from modules.diffusion.ddpm import beta_schedule, DIFF_DENOISERS -from modules.fastspeech.acoustic_encoder import FastSpeech2AcousticEncoder -from utils import load_ckpt -from utils.hparams import hparams, set_hparams -from utils.phoneme_utils import build_phoneme_list -from utils.infer_utils import parse_commandline_spk_mix -from utils.text_encoder import TokenTextEncoder, PAD_INDEX - - -f0_bin = 256 -f0_max = 1100.0 -f0_min = 50.0 -f0_mel_min = 1127 * math.log(1 + f0_min / 700) -f0_mel_max = 1127 * math.log(1 + f0_max / 700) - -frozen_gender = None -frozen_spk_embed = None - - -def f0_to_coarse(f0): - f0_mel = 1127 * (1 + f0 / 700).log() - a = (f0_bin - 2) / (f0_mel_max - f0_mel_min) - b = f0_mel_min * a - 1. - f0_mel = torch.where(f0_mel > 0, f0_mel * a - b, f0_mel) - torch.clamp_(f0_mel, min=1., max=float(f0_bin - 1)) - f0_coarse = torch.round(f0_mel).long() - return f0_coarse - - -class LengthRegulator(nn.Module): - # noinspection PyMethodMayBeStatic - def forward(self, dur): - token_idx = torch.arange(1, dur.shape[1] + 1, device=dur.device)[None, :, None] - dur_cumsum = torch.cumsum(dur, dim=1) - dur_cumsum_prev = F.pad(dur_cumsum, (1, -1), mode='constant', value=0) - pos_idx = torch.arange(dur.sum(dim=1).max(), device=dur.device)[None, None] - token_mask = (pos_idx >= dur_cumsum_prev[:, :, None]) & (pos_idx < dur_cumsum[:, :, None]) - mel2ph = (token_idx * token_mask).sum(dim=1) - return mel2ph - - -class FastSpeech2Acoustic(CategorizedModule): - def __init__(self, vocab_size): - super().__init__() - self.lr = LengthRegulator() - self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) - self.dur_embed = Linear(1, hparams['hidden_size']) - self.encoder = FastSpeech2AcousticEncoder(self.txt_embed, hparams['hidden_size'], hparams['enc_layers'], - hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads']) - - self.f0_embed_type = hparams.get('f0_embed_type', 'discrete') - if self.f0_embed_type == 'discrete': - self.pitch_embed = Embedding(300, hparams['hidden_size'], PAD_INDEX) - elif self.f0_embed_type == 'continuous': - self.pitch_embed = Linear(1, hparams['hidden_size']) - else: - raise ValueError('f0_embed_type must be \'discrete\' or \'continuous\'.') - - if hparams.get('use_key_shift_embed', False): - self.shift_min, self.shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] - self.key_shift_embed = Linear(1, hparams['hidden_size']) - - if hparams.get('use_speed_embed', False): - self.speed_min, self.speed_max = hparams['augmentation_args']['random_time_stretching']['range'] - self.speed_embed = Linear(1, hparams['hidden_size']) - - if hparams['use_spk_id']: - self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - - @property - def category(self): - return 'acoustic' - - def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): - durations = durations * (tokens > 0) - mel2ph = self.lr.forward(durations) - f0 = f0 * (mel2ph > 0) - mel2ph = mel2ph[..., None].repeat((1, 1, hparams['hidden_size'])) - dur_embed = self.dur_embed(durations.float()[:, :, None]) - encoded = self.encoder(tokens, dur_embed) - encoded = F.pad(encoded, (0, 0, 1, 0)) - condition = torch.gather(encoded, 1, mel2ph) - if self.f0_embed_type == 'discrete': - pitch = f0_to_coarse(f0) - pitch_embed = self.pitch_embed(pitch) - else: - f0_mel = (1 + f0 / 700).log() - pitch_embed = self.pitch_embed(f0_mel[:, :, None]) - condition += pitch_embed - if hparams.get('use_key_shift_embed', False): - if frozen_gender is not None: - # noinspection PyUnresolvedReferences, PyTypeChecker - key_shift = frozen_gender * self.shift_max \ - if frozen_gender >= 0. else frozen_gender * abs(self.shift_min) - key_shift_embed = self.key_shift_embed(key_shift[:, None, None]) - else: - gender = torch.clip(gender, min=-1., max=1.) - gender_mask = (gender < 0.).float() - key_shift = gender * ((1. - gender_mask) * self.shift_max + gender_mask * abs(self.shift_min)) - key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) - condition += key_shift_embed - if hparams.get('use_speed_embed', False): - if velocity is not None: - velocity = torch.clip(velocity, min=self.speed_min, max=self.speed_max) - speed_embed = self.speed_embed(velocity[:, :, None]) - else: - speed_embed = self.speed_embed(torch.FloatTensor([1.]).to(condition.device)[:, None, None]) - condition += speed_embed - - if hparams['use_spk_id']: - if frozen_spk_embed is not None: - condition += frozen_spk_embed - else: - condition += spk_embed - return condition - - -def extract(a, t): - return a[t].reshape((1, 1, 1, 1)) - - -class NaiveNoisePredictor(nn.Module): - def __init__(self): - super().__init__() - to_torch = partial(torch.tensor, dtype=torch.float32) - - self.register_buffer('clip_min', to_torch(-1.)) - self.register_buffer('clip_max', to_torch(1.)) - - def forward(self, x, noise_pred, t): - x_recon = ( - extract(self.sqrt_recip_alphas_cumprod, t) * x - - extract(self.sqrt_recipm1_alphas_cumprod, t) * noise_pred - ) - x_recon = torch.clamp(x_recon, min=self.clip_min, max=self.clip_max) - - model_mean = ( - extract(self.posterior_mean_coef1, t) * x_recon + - extract(self.posterior_mean_coef2, t) * x - ) - model_log_variance = extract(self.posterior_log_variance_clipped, t) - noise = torch.randn_like(x) - # no noise when t == 0 - nonzero_mask = ((t > 0).float()).reshape(1, 1, 1, 1) - return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise - - -class PLMSNoisePredictor(nn.Module): - def __init__(self): - super().__init__() - to_torch = partial(torch.tensor, dtype=torch.float32) - - # Below are buffers for TorchScript to pass jit compilation. - self.register_buffer('_1', to_torch(1)) - self.register_buffer('_2', to_torch(2)) - self.register_buffer('_3', to_torch(3)) - self.register_buffer('_5', to_torch(5)) - self.register_buffer('_9', to_torch(9)) - self.register_buffer('_12', to_torch(12)) - self.register_buffer('_16', to_torch(16)) - self.register_buffer('_23', to_torch(23)) - self.register_buffer('_24', to_torch(24)) - self.register_buffer('_37', to_torch(37)) - self.register_buffer('_55', to_torch(55)) - self.register_buffer('_59', to_torch(59)) - - def forward(self, x, noise_t, t, t_prev): - a_t = extract(self.alphas_cumprod, t) - a_prev = extract(self.alphas_cumprod, t_prev) - a_t_sq, a_prev_sq = a_t.sqrt(), a_prev.sqrt() - - x_delta = (a_prev - a_t) * ((self._1 / (a_t_sq * (a_t_sq + a_prev_sq))) * x - self._1 / ( - a_t_sq * (((self._1 - a_prev) * a_t).sqrt() + ((self._1 - a_t) * a_prev).sqrt())) * noise_t) - x_pred = x + x_delta - - return x_pred - - def predict_stage0(self, noise_pred, noise_pred_prev): - return (noise_pred - + noise_pred_prev) / self._2 - - def predict_stage1(self, noise_pred, noise_list): - return (noise_pred * self._3 - - noise_list[-1]) / self._2 - - def predict_stage2(self, noise_pred, noise_list): - return (noise_pred * self._23 - - noise_list[-1] * self._16 - + noise_list[-2] * self._5) / self._12 - - def predict_stage3(self, noise_pred, noise_list): - return (noise_pred * self._55 - - noise_list[-1] * self._59 - + noise_list[-2] * self._37 - - noise_list[-3] * self._9) / self._24 - - -class MelExtractor(nn.Module): - def __init__(self): - super().__init__() - - def forward(self, x): - x = x.squeeze(1).permute(0, 2, 1) - d = (self.spec_max - self.spec_min) / 2 - m = (self.spec_max + self.spec_min) / 2 - return x * d + m - - -class GaussianDiffusion(CategorizedModule): - def __init__(self, out_dims, timesteps=1000, k_step=1000, - denoiser_type=None, spec_min=None, spec_max=None): - super().__init__() - self.mel_bins = out_dims - self.k_step = k_step - - self.denoise_fn = DIFF_DENOISERS[denoiser_type](hparams) - self.naive_noise_predictor = NaiveNoisePredictor() - self.plms_noise_predictor = PLMSNoisePredictor() - self.mel_extractor = MelExtractor() - - betas = beta_schedule[hparams.get('schedule_type', 'cosine')](timesteps) - - # Below are buffers for state_dict to load into. - alphas = 1. - betas - alphas_cumprod = np.cumprod(alphas, axis=0) - alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) - - timesteps, = betas.shape - self.num_timesteps = int(timesteps) - - to_torch = partial(torch.tensor, dtype=torch.float32) - - self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) - - # calculations for diffusion q(x_t | x_{t-1}) and others - self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) - self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) - - # calculations for posterior q(x_{t-1} | x_t, x_0) - posterior_variance = betas * (1. - alphas_cumprod_prev) / (1. - alphas_cumprod) - # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) - # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain - self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) - self.register_buffer('posterior_mean_coef1', to_torch( - betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) - self.register_buffer('posterior_mean_coef2', to_torch( - (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) - - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) - - @property - def category(self): - return 'acoustic' - - def build_submodules(self): - # Move registered buffers into submodules after loading state dict. - self.naive_noise_predictor.register_buffer('sqrt_recip_alphas_cumprod', self.sqrt_recip_alphas_cumprod) - self.naive_noise_predictor.register_buffer('sqrt_recipm1_alphas_cumprod', self.sqrt_recipm1_alphas_cumprod) - self.naive_noise_predictor.register_buffer( - 'posterior_log_variance_clipped', self.posterior_log_variance_clipped) - self.naive_noise_predictor.register_buffer('posterior_mean_coef1', self.posterior_mean_coef1) - self.naive_noise_predictor.register_buffer('posterior_mean_coef2', self.posterior_mean_coef2) - self.plms_noise_predictor.register_buffer('alphas_cumprod', self.alphas_cumprod) - self.mel_extractor.register_buffer('spec_min', self.spec_min) - self.mel_extractor.register_buffer('spec_max', self.spec_max) - del self.sqrt_recip_alphas_cumprod - del self.sqrt_recipm1_alphas_cumprod - del self.posterior_log_variance_clipped - del self.posterior_mean_coef1 - del self.posterior_mean_coef2 - del self.alphas_cumprod - del self.spec_min - del self.spec_max - - def forward(self, condition, speedup): - condition = condition.transpose(1, 2) # (1, n_frames, 256) => (1, 256, n_frames) - - device = condition.device - n_frames = condition.shape[2] - step_range = torch.arange(0, self.k_step, speedup, dtype=torch.long, device=device).flip(0)[:, None] - x = torch.randn((1, 1, self.mel_bins, n_frames), device=device) - - if speedup > 1: - plms_noise_stage = torch.tensor(0, dtype=torch.long, device=device) - noise_list = torch.zeros((0, 1, 1, self.mel_bins, n_frames), device=device) - for t in step_range: - noise_pred = self.denoise_fn(x, t, condition) - t_prev = t - speedup - t_prev = t_prev * (t_prev > 0) - - if plms_noise_stage == 0: - x_pred = self.plms_noise_predictor(x, noise_pred, t, t_prev) - noise_pred_prev = self.denoise_fn(x_pred, t_prev, condition) - noise_pred_prime = self.plms_noise_predictor.predict_stage0(noise_pred, noise_pred_prev) - elif plms_noise_stage == 1: - noise_pred_prime = self.plms_noise_predictor.predict_stage1(noise_pred, noise_list) - elif plms_noise_stage == 2: - noise_pred_prime = self.plms_noise_predictor.predict_stage2(noise_pred, noise_list) - else: - noise_pred_prime = self.plms_noise_predictor.predict_stage3(noise_pred, noise_list) - - noise_pred = noise_pred.unsqueeze(0) - if plms_noise_stage < 3: - noise_list = torch.cat((noise_list, noise_pred), dim=0) - plms_noise_stage = plms_noise_stage + 1 - else: - noise_list = torch.cat((noise_list[-2:], noise_pred), dim=0) - - x = self.plms_noise_predictor(x, noise_pred_prime, t, t_prev) - - # from dpm_solver import NoiseScheduleVP, model_wrapper, DpmSolver - # ## 1. Define the noise schedule. - # noise_schedule = NoiseScheduleVP(betas=self.betas) - # - # ## 2. Convert your discrete-time `model` to the continuous-time - # # noise prediction model. Here is an example for a diffusion model - # ## `model` with the noise prediction type ("noise") . - # - # model_fn = model_wrapper( - # self.denoise_fn, - # noise_schedule, - # model_kwargs={"cond": condition} - # ) - # - # ## 3. Define dpm-solver and sample by singlestep DPM-Solver. - # ## (We recommend singlestep DPM-Solver for unconditional sampling) - # ## You can adjust the `steps` to balance the computation - # ## costs and the sample quality. - # dpm_solver = DpmSolver(model_fn, noise_schedule) - # - # steps = t // hparams["pndm_speedup"] - # x = dpm_solver.sample(x, steps=steps) - else: - for t in step_range: - pred = self.denoise_fn(x, t, condition) - x = self.naive_noise_predictor(x, pred, t) - - mel = self.mel_extractor(x) - return mel - - -def build_fs2_model(device, ckpt_steps=None): - model = FastSpeech2Acoustic( - vocab_size=len(TokenTextEncoder(vocab_list=build_phoneme_list())) - ) - model.eval() - load_ckpt(model, hparams['work_dir'], 'model.fs2', ckpt_steps=ckpt_steps, - required_category='acoustic', strict=True) - model.to(device) - return model - - -def build_diff_model(device, ckpt_steps=None): - model = GaussianDiffusion( - out_dims=hparams['audio_num_mel_bins'], - timesteps=hparams['timesteps'], - k_step=hparams['K_step'], - denoiser_type=hparams['diff_decoder_type'], - spec_min=hparams['spec_min'], - spec_max=hparams['spec_max'], - ) - model.eval() - load_ckpt(model, hparams['work_dir'], 'model.diffusion', ckpt_steps=ckpt_steps, - required_category='acoustic', strict=False) - model.build_submodules() - model.to(device) - return model - - -class ModuleWrapper(nn.Module): - def __init__(self, model, name='model'): - super().__init__() - self.wrapped_name = name - setattr(self, name, model) - - def forward(self, *args, **kwargs): - return getattr(self, self.wrapped_name)(*args, **kwargs) - - -class FastSpeech2Wrapper(nn.Module): - def __init__(self, model): - super().__init__() - self.model = ModuleWrapper(model, name='fs2') - - def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): - return self.model(tokens, durations, f0, gender=gender, velocity=velocity, spk_embed=spk_embed) - - -class DiffusionWrapper(nn.Module): - def __init__(self, model): - super().__init__() - self.model = model - - def forward(self, condition, speedup): - return self.model(condition, speedup) - - -def _fix_cast_nodes(graph, logs=None): - if logs is None: - logs = [] - for sub_node in graph.node: - if sub_node.op_type == 'If': - for attr in sub_node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _fix_cast_nodes(branch, logs) - elif sub_node.op_type == 'Loop': - for attr in sub_node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _fix_cast_nodes(body, logs) - elif sub_node.op_type == 'Cast': - for i, sub_attr in enumerate(sub_node.attribute): - if sub_attr.name == 'to': - to = onnx.helper.get_attribute_value(sub_attr) - if to == onnx.TensorProto.DOUBLE: - float32 = onnx.helper.make_attribute('to', onnx.TensorProto.FLOAT) - sub_node.attribute.remove(sub_attr) - sub_node.attribute.insert(i, float32) - logs.append(sub_node.name) - break - return logs - - -def _fold_shape_gather_equal_if_to_squeeze(graph, subgraph, logs=None): - if logs is None: - logs = [] - - # Do folding in sub-graphs recursively. - for node in subgraph.node: - if node.op_type == 'If': - for attr in node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _fold_shape_gather_equal_if_to_squeeze(graph, branch, logs) - elif node.op_type == 'Loop': - for attr in node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _fold_shape_gather_equal_if_to_squeeze(graph, body, logs) - - # Do folding in current graph. - i_shape = 0 - while i_shape < len(subgraph.node): - if subgraph.node[i_shape].op_type == 'Shape': - shape_node = subgraph.node[i_shape] - shape_out = shape_node.output[0] - i_gather = i_shape + 1 - while i_gather < len(subgraph.node): - if subgraph.node[i_gather].op_type == 'Gather' and subgraph.node[i_gather].input[0] == shape_out: - gather_node = subgraph.node[i_gather] - gather_out = gather_node.output[0] - i_equal = i_gather + 1 - while i_equal < len(subgraph.node): - if subgraph.node[i_equal].op_type == 'Equal' and ( - subgraph.node[i_equal].input[0] == gather_out - or subgraph.node[i_equal].input[1] == gather_out): - equal_node = subgraph.node[i_equal] - equal_out = equal_node.output[0] - i_if = i_equal + 1 - while i_if < len(subgraph.node): - if subgraph.node[i_if].op_type == 'If' and subgraph.node[i_if].input[0] == equal_out: - # Found the substructure to be folded. - if_node = subgraph.node[i_if] - # Search and clean initializer values. - squeeze_axes_tensor = None - for tensor in subgraph.initializer: - if tensor.name == gather_node.input[1]: - squeeze_axes_tensor = tensor - subgraph.initializer.remove(tensor) - elif tensor.name == equal_node.input[1]: - subgraph.initializer.remove(tensor) - # Create 'Squeeze' node. - squeeze_node = onnx.helper.make_node( - op_type='Squeeze', - inputs=shape_node.input, - outputs=if_node.output - ) - squeeze_axes = onnx.helper.make_attribute( - key='axes', - value=[struct.unpack('q', squeeze_axes_tensor.raw_data)[0]] # unpack int64 - ) - squeeze_node.attribute.extend([squeeze_axes]) - # Replace 'Shape', 'Gather', 'Equal', 'If' with 'Squeeze'. - subgraph.node.insert(i_shape, squeeze_node) - subgraph.node.remove(shape_node) - subgraph.node.remove(gather_node) - subgraph.node.remove(equal_node) - subgraph.node.remove(if_node) - logs.append((shape_node.name, gather_node.name, equal_node.name, if_node.name)) - break - i_if += 1 - else: - break - i_equal += 1 - else: - break - i_gather += 1 - else: - break - i_shape += 1 - return logs - - -def _extract_conv_nodes(graph, weight_pattern, alias_prefix): - node_dict = {} # key: pattern match, value: (alias, node) - logs = [] - - def _extract_conv_nodes_recursive(subgraph): - to_be_removed = [] - for sub_node in subgraph.node: - if sub_node.op_type == 'If': - for attr in sub_node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _extract_conv_nodes_recursive(branch) - elif sub_node.op_type == 'Loop': - for attr in sub_node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _extract_conv_nodes_recursive(body) - elif sub_node.op_type == 'Conv' and re.match(weight_pattern, sub_node.input[1]): - # Found node to extract - cached = node_dict.get(sub_node.input[1]) - if cached is None: - out_alias = f'{alias_prefix}.{len(node_dict)}' - node_dict[sub_node.input[1]] = (out_alias, sub_node) - else: - out_alias = cached[0] - out = sub_node.output[0] - # Search for nodes downstream the extracted node and match them to the renamed output - for dep_node in subgraph.node: - for dep_idx, dep_input in enumerate(dep_node.input): - if dep_input == out: - dep_node.input.remove(out) - dep_node.input.insert(dep_idx, out_alias) - # Add the node to the remove list - to_be_removed.append(sub_node) - logs.append(sub_node.name) - [subgraph.node.remove(_n) for _n in to_be_removed] - - for i, n in enumerate(graph.node): - if n.op_type == 'If': - for a in n.attribute: - b = onnx.helper.get_attribute_value(a) - _extract_conv_nodes_recursive(b) - for key in reversed(node_dict): - alias, node = node_dict[key] - # Rename output of the node - out_name = node.output[0] - node.output.remove(node.output[0]) - node.output.insert(0, alias) - # Insert node into the main graph - graph.node.insert(i, node) - # Rename value info of the output - for v in graph.value_info: - if v.name == out_name: - v.name = alias - break - break - return logs - - -def _remove_unused_values(graph): - used_values = set() - cleaned_values = [] - - def _record_usage_recursive(subgraph): - for node in subgraph.node: - # For 'If' and 'Loop' nodes, do recording recursively - if node.op_type == 'If': - for attr in node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _record_usage_recursive(branch) - elif node.op_type == 'Loop': - for attr in node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _record_usage_recursive(body) - # For each node, record its inputs and outputs - for input_value in node.input: - used_values.add(input_value) - for output_value in node.output: - used_values.add(output_value) - - def _clean_unused_recursively(subgraph): - # Do cleaning in sub-graphs recursively. - for node in subgraph.node: - if node.op_type == 'If': - for attr in node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _clean_unused_recursively(branch) - elif node.op_type == 'Loop': - for attr in node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _clean_unused_recursively(body) - - # Do cleaning in current graph. - i = 0 - while i < len(subgraph.initializer): - if subgraph.initializer[i].name not in used_values: - cleaned_values.append(subgraph.initializer[i].name) - subgraph.initializer.remove(subgraph.initializer[i]) - else: - i += 1 - i = 0 - while i < len(subgraph.value_info): - if subgraph.value_info[i].name not in used_values: - cleaned_values.append(subgraph.value_info[i].name) - subgraph.value_info.remove(subgraph.value_info[i]) - else: - i += 1 - - _record_usage_recursive(graph) - _clean_unused_recursively(graph) - return cleaned_values - - -def _add_prefixes(model, - initializer_prefix=None, - value_info_prefix=None, - node_prefix=None, - dim_prefix=None, - ignored_pattern=None): - initializers = set() - value_infos = set() - - def _record_initializers_and_value_infos_recursive(subgraph): - # Record names in current graph - for initializer in subgraph.initializer: - if re.match(ignored_pattern, initializer.name): - continue - initializers.add(initializer.name) - for value_info in subgraph.value_info: - if re.match(ignored_pattern, value_info.name): - continue - value_infos.add(value_info.name) - for node in subgraph.node: - # For 'If' and 'Loop' nodes, do recording recursively - if node.op_type == 'If': - for attr in node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _record_initializers_and_value_infos_recursive(branch) - elif node.op_type == 'Loop': - for attr in node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _record_initializers_and_value_infos_recursive(body) - - def _add_prefixes_recursive(subgraph): - # Add prefixes in current graph - if initializer_prefix is not None: - for initializer in subgraph.initializer: - if re.match(ignored_pattern, initializer.name): - continue - initializer.name = initializer_prefix + initializer.name - for value_info in subgraph.value_info: - if dim_prefix is not None: - for dim in value_info.type.tensor_type.shape.dim: - if dim.dim_param is None or dim.dim_param == '' or re.match(ignored_pattern, dim.dim_param): - continue - dim.dim_param = dim_prefix + dim.dim_param - if value_info_prefix is None or re.match(ignored_pattern, value_info.name): - continue - value_info.name = value_info_prefix + value_info.name - if node_prefix is not None: - for node in subgraph.node: - if re.match(ignored_pattern, node.name): - continue - node.name = node_prefix + node.name - for node in subgraph.node: - # For 'If' and 'Loop' nodes, rename recursively - if node.op_type == 'If': - for attr in node.attribute: - branch = onnx.helper.get_attribute_value(attr) - _add_prefixes_recursive(branch) - elif node.op_type == 'Loop': - for attr in node.attribute: - if attr.name == 'body': - body = onnx.helper.get_attribute_value(attr) - _add_prefixes_recursive(body) - # For each node, rename its inputs and outputs - for i, input_value in enumerate(node.input): - if input_value in initializers and initializer_prefix is not None: - node.input.remove(input_value) - node.input.insert(i, initializer_prefix + input_value) - if input_value in value_infos and value_info_prefix is not None: - node.input.remove(input_value) - node.input.insert(i, value_info_prefix + input_value) - for i, output_value in enumerate(node.output): - if output_value in initializers and initializer_prefix is not None: - node.output.remove(output_value) - node.output.insert(i, initializer_prefix + output_value) - if output_value in value_infos and value_info_prefix is not None: - node.output.remove(output_value) - node.output.insert(i, value_info_prefix + output_value) - - _record_initializers_and_value_infos_recursive(model.graph) - _add_prefixes_recursive(model.graph) - - -def fix(src, target): - model = onnx.load(src) - - # The output dimension are wrongly hinted by TorchScript - in_dims = model.graph.input[0].type.tensor_type.shape.dim - out_dims = model.graph.output[0].type.tensor_type.shape.dim - out_dims.remove(out_dims[1]) - out_dims.insert(1, in_dims[1]) - print(f'| annotate output: \'{model.graph.output[0].name}\'') - - # Fix 'Cast' nodes in sub-graphs that wrongly cast tensors to float64 - fixed_casts = _fix_cast_nodes(model.graph) - print('| fix node(s): ') - for i, log in enumerate(fixed_casts): - if i == len(fixed_casts) - 1: - end = '\n' - elif i % 10 == 9: - end = ',\n' - else: - end = ', ' - print(f'\'{log}\'', end=end) - - # Run #1 of the simplifier to fix missing value info and type hints and remove unnecessary 'Cast'. - print('Running ONNX simplifier...') - model, check = onnxsim.simplify(model, include_subgraph=True) - assert check, 'Simplified ONNX model could not be validated' - - in_dims = model.graph.input[0].type.tensor_type.shape.dim - out_dims = model.graph.output[0].type.tensor_type.shape.dim - - then_branch = None - for node in model.graph.node: - if node.op_type == 'If': - # Add type hint to let the simplifier fold 'Shape', 'Gather', 'Equal', 'If' to 'Squeeze' - if_out = node.output[0] - for info in model.graph.value_info: - if info.name == if_out: - if_out_dim = info.type.tensor_type.shape.dim - while len(if_out_dim) > 0: - if_out_dim.remove(if_out_dim[0]) - if_out_dim.insert(0, in_dims[0]) # batch_size - if_out_dim.insert(1, in_dims[0]) # 1 - if_out_dim.insert(2, out_dims[2]) # mel_bins - if_out_dim.insert(3, in_dims[1]) # n_frames - print(f'| annotate node: \'{node.name}\'') - - # Manually fold 'Shape', 'Gather', 'Equal', 'If' to 'Squeeze' in sub-graphs - folded_groups = [] - for attr in node.attribute: - branch = onnx.helper.get_attribute_value(attr) - folded_groups += _fold_shape_gather_equal_if_to_squeeze(model.graph, branch) - if attr.name == 'then_branch': - # Save branch for future use - then_branch = branch - print('| fold node group(s): ') - print(', '.join(['[' + ', '.join([f'\'{n}\'' for n in log]) + ']' for log in folded_groups])) - break - - # Optimize 'Concat' nodes for shapes - concat_node = None - shape_prefix_name = 'noise.shape.prefix' - list_length_name = 'list.length' - for node in model.graph.node: - if node.op_type == 'Concat': - concat_node = node - for i, ini in enumerate(model.graph.initializer): - if ini.name == node.input[0]: - shape_prefix = onnx.helper.make_tensor( - name=shape_prefix_name, - data_type=onnx.TensorProto.INT64, - dims=(3,), - vals=[out_dims[0].dim_value, 1, out_dims[2].dim_value] - ) - list_length = onnx.helper.make_tensor( - name=list_length_name, - data_type=onnx.TensorProto.INT64, - dims=(1,), - vals=[0] - ) - model.graph.initializer.extend([shape_prefix, list_length]) - break - for i in range(3): - node.input.remove(node.input[0]) - node.input.insert(0, shape_prefix_name) - print(f'| optimize node: \'{node.name}\'') - break - for node in then_branch.node: - if node.op_type == 'Concat': - concat_inputs = list(node.input) - dep_nodes = [] - for dep_node in then_branch.node: - if dep_node.op_type == 'Unsqueeze' and dep_node.output[0] in concat_inputs: - dep_nodes.append(dep_node) - [then_branch.node.remove(d_n) for d_n in dep_nodes] - while len(node.input) > 0: - node.input.remove(node.input[0]) - node.input.extend([list_length_name, concat_node.output[0]]) - print(f'| optimize node: \'{node.name}\'') - break - - # Extract 'Conv' nodes and cache results of conditioner projection - # of each residual layer from loop bodies to improve performance. - extracted_convs = _extract_conv_nodes( - model.graph, - r'model\.denoise_fn\.residual_layers\.\d+\.conditioner_projection\.weight', - 'cache' - ) - - print(f'| extract node(s):') - for i, log in enumerate(extracted_convs): - if i == len(extracted_convs) - 1: - end = '\n' - elif i % 10 == 9: - end = ',\n' - else: - end = ', ' - print(f'\'{log}\'', end=end) - - # Remove unused initializers and value infos - cleaned_values = _remove_unused_values(model.graph) - print(f'| clean value(s):') - for i, log in enumerate(cleaned_values): - if i == len(cleaned_values) - 1: - end = '\n' - elif i % 15 == 14: - end = ',\n' - else: - end = ', ' - print(f'\'{log}\'', end=end) - - # Run #2 of the simplifier to further optimize the graph and reduce dangling sub-graphs. - print('Running ONNX simplifier...') - model, check = onnxsim.simplify(model, include_subgraph=True) - assert check, 'Simplified ONNX model could not be validated' - - onnx.save(model, target) - print('Graph fixed and optimized.') - - -def _perform_speaker_mix(spk_embedding: nn.Embedding, spk_map: dict, spk_mix_map: dict, device): - for spk_name in spk_mix_map: - assert spk_name in spk_map, f'Speaker \'{spk_name}\' not found.' - spk_mix_embed = [ - spk_embedding(torch.LongTensor([spk_map[spk_name]]).to(device)) * spk_mix_map[spk_name] - for spk_name in spk_mix_map - ] - spk_mix_embed = torch.stack(spk_mix_embed, dim=1).sum(dim=1) - return spk_mix_embed - - -def _save_speaker_embed(path: str, spk_embed: np.ndarray): - with open(path, 'wb') as f: - f.write(spk_embed.tobytes()) - print(f'| export spk embed of \'{path.rsplit(".", maxsplit=2)[1]}\'') - - -@torch.no_grad() -def export(fs2_path, diff_path, ckpt_steps=None, - expose_gender=False, expose_velocity=False, - spk_export_list=None, frozen_spk=None): - # Build models to export - device = 'cuda' if torch.cuda.is_available() else 'cpu' - fs2 = FastSpeech2Wrapper( - model=build_fs2_model(device, ckpt_steps=ckpt_steps) - ) - diffusion = DiffusionWrapper( - model=build_diff_model(device, ckpt_steps=ckpt_steps) - ) - - # Export speakers and speaker mixes - global frozen_gender, frozen_spk_embed - if hparams['use_spk_id']: - with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: - spk_map = json.load(f) - - if spk_export_list is None and frozen_spk is None: - if len(spk_map) == 1: - # Freeze the only speaker by default - frozen_spk = list(spk_map.keys())[0] - else: - warnings.warn('Combined models cannot run without speaker keys. ' - 'Did you forget to export at least one speaker via the \'--spk\' argument, ' - 'or freeze one speaker via the \'--freeze_spk\' argument?', category=UserWarning) - warnings.filterwarnings(action='default') - if frozen_spk is not None: - frozen_spk_embed = _perform_speaker_mix(fs2.model.fs2.spk_embed, spk_map, - parse_commandline_spk_mix(frozen_spk), device) - elif spk_export_list is not None: - for spk in spk_export_list: - _save_speaker_embed(spk['path'], _perform_speaker_mix( - fs2.model.fs2.spk_embed, spk_map, parse_commandline_spk_mix(spk['mix']), device - ).cpu().numpy()) - - # Export PyTorch modules - n_frames = 10 - tokens = torch.tensor([[3]], dtype=torch.long, device=device) - durations = torch.tensor([[n_frames]], dtype=torch.long, device=device) - f0 = torch.tensor([[440.] * n_frames], dtype=torch.float32, device=device) - kwargs = {} - arguments = (tokens, durations, f0, kwargs) - input_names = ['tokens', 'durations', 'f0'] - dynamix_axes = { - 'tokens': { - 1: 'n_tokens' - }, - 'durations': { - 1: 'n_tokens' - }, - 'f0': { - 1: 'n_frames' - } - } - if hparams.get('use_key_shift_embed', False): - if expose_gender: - # noinspection PyTypedDict - kwargs['gender'] = torch.rand((1, n_frames), dtype=torch.float32, device=device) - input_names.append('gender') - dynamix_axes['gender'] = { - 1: 'n_frames' - } - elif frozen_gender is not None: - frozen_gender = torch.FloatTensor([frozen_gender]).to(device) - if hparams.get('use_speed_embed', False): - if expose_velocity: - # noinspection PyTypedDict - kwargs['velocity'] = torch.rand((1, n_frames), dtype=torch.float32, device=device) - input_names.append('velocity') - dynamix_axes['velocity'] = { - 1: 'n_frames' - } - if hparams['use_spk_id'] and frozen_spk is None: - # noinspection PyTypedDict - kwargs['spk_embed'] = torch.rand((1, n_frames, hparams['hidden_size']), dtype=torch.float32, device=device) - input_names.append('spk_embed') - dynamix_axes['spk_embed'] = { - 1: 'n_frames' - } - print('Exporting FastSpeech2...') - torch.onnx.export( - fs2, - arguments, - fs2_path, - input_names=input_names, - output_names=[ - 'condition' - ], - dynamic_axes=dynamix_axes, - opset_version=15 - ) - model = onnx.load(fs2_path) - model, check = onnxsim.simplify(model, include_subgraph=True) - assert check, 'Simplified ONNX model could not be validated' - onnx.save(model, fs2_path) - - shape = (1, 1, hparams['audio_num_mel_bins'], n_frames) - noise_t = torch.randn(shape, device=device) - noise_list = torch.randn((3, *shape), device=device) - condition = torch.rand((1, hparams['hidden_size'], n_frames), device=device) - step = (torch.rand((1,), device=device) * hparams['K_step']).long() - speedup = (torch.rand((), device=device) * step / 10.).long() - step_prev = torch.maximum(step - speedup, torch.tensor(0, dtype=torch.long, device=device)) - - print('Tracing GaussianDiffusion submodules...') - diffusion.model.denoise_fn = torch.jit.trace( - diffusion.model.denoise_fn, - ( - noise_t, - step, - condition - ) - ) - diffusion.model.naive_noise_predictor = torch.jit.trace( - diffusion.model.naive_noise_predictor, - ( - noise_t, - noise_t, - step - ), - check_trace=False - ) - diffusion.model.plms_noise_predictor = torch.jit.trace_module( - diffusion.model.plms_noise_predictor, - { - 'forward': ( - noise_t, - noise_t, - step, - step_prev - ), - 'predict_stage0': ( - noise_t, - noise_t - ), - 'predict_stage1': ( - noise_t, - noise_list - ), - 'predict_stage2': ( - noise_t, - noise_list - ), - 'predict_stage3': ( - noise_t, - noise_list - ), - } - ) - diffusion.model.mel_extractor = torch.jit.trace( - diffusion.model.mel_extractor, - ( - noise_t - ) - ) - - diffusion = torch.jit.script(diffusion) - condition = torch.rand((1, n_frames, hparams['hidden_size']), device=device) - speedup = torch.tensor(10, dtype=torch.long, device=device) - - torch.onnx.export( - diffusion, - ( - condition, - speedup - ), - diff_path, - input_names=[ - 'condition', - 'speedup' - ], - output_names=[ - 'mel' - ], - dynamic_axes={ - 'condition': { - 1: 'n_frames' - } - }, - opset_version=15 - ) - print('PyTorch ONNX export finished.') - - -def merge(fs2_path, diff_path, target_path): - fs2_model = onnx.load(fs2_path) - diff_model = onnx.load(diff_path) - - # Add prefixes to names inside the model graph. - print('Adding prefixes to models...') - _add_prefixes( - fs2_model, initializer_prefix='fs2.', value_info_prefix='fs2.', - node_prefix='Enc_', ignored_pattern=r'model\.fs2\.' - ) - _add_prefixes( - fs2_model, dim_prefix='enc__', ignored_pattern=r'(n_tokens)|(n_frames)' - ) - _add_prefixes( - diff_model, initializer_prefix='diffusion.', value_info_prefix='diffusion.', - node_prefix='Dec_', ignored_pattern=r'model.' - ) - _add_prefixes( - diff_model, dim_prefix='dec__', ignored_pattern='n_frames' - ) - # Official onnx API does not consider sub-graphs. - # onnx.compose.add_prefix(fs2_model, prefix='fs2.', inplace=True) - # onnx.compose.add_prefix(diff_model, prefix='diffusion.', inplace=True) - - merged_model = onnx.compose.merge_models( - fs2_model, diff_model, io_map=[('condition', 'condition')], - prefix1='', prefix2='', doc_string='' - ) - merged_model.graph.name = fs2_model.graph.name - print('FastSpeech2 and GaussianDiffusion models merged.') - onnx.save(merged_model, target_path) - - -def export_phonemes_txt(path: str): - TokenTextEncoder(vocab_list=build_phoneme_list()).store_to_file(path) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Export DiffSinger acoustic model to ONNX format.') - parser.add_argument('--exp', type=str, required=True, help='experiment to export') - parser.add_argument('--ckpt', type=int, required=False, help='checkpoint training steps') - parser.add_argument('--out', required=False, type=str, help='output directory for ONNX models and speaker keys') - group_gender = parser.add_mutually_exclusive_group() - group_gender.add_argument('--expose_gender', required=False, default=False, action='store_true', - help='(for models with random pitch shifting) expose gender control functionality') - group_gender.add_argument('--freeze_gender', type=float, required=False, - help='(for models with random pitch shifting) freeze gender value into the model') - parser.add_argument('--expose_velocity', required=False, default=False, action='store_true', - help='(for models with random time stretching) expose velocity control functionality') - group_spk = parser.add_mutually_exclusive_group() - group_spk.add_argument('--export_spk', required=False, type=str, action='append', - help='(for combined models) speakers or speaker mixes to export') - group_spk.add_argument('--freeze_spk', required=False, type=str, - help='(for combined models) freeze speaker or speaker mix into the model') - args = parser.parse_args() - - # Check for frozen gender - if args.freeze_gender is not None: - assert -1. <= args.freeze_gender <= 1., 'Frozen gender must be in [-1, 1].' - frozen_gender = args.freeze_gender - elif not args.expose_gender: - frozen_gender = 0. - - exp = args.exp - if not os.path.exists(f'{root_dir}/checkpoints/{exp}'): - for ckpt in os.listdir(os.path.join(root_dir, 'checkpoints')): - if ckpt.startswith(exp): - print(f'| match ckpt by prefix: {ckpt}') - exp = ckpt - break - assert os.path.exists(f'{root_dir}/checkpoints/{exp}'), 'There are no matching exp in \'checkpoints\' folder. ' \ - 'Please specify \'--exp\' as the folder name or prefix.' - else: - print(f'| found ckpt by name: {exp}') - - cwd = os.getcwd() - if args.out: - out = os.path.join(cwd, args.out) if not os.path.isabs(args.out) else args.out - else: - out = f'deployment/assets/{exp}' - os.chdir(root_dir) - sys.argv = [ - 'inference/ds_acoustic.py', - '--exp_name', - exp, - '--infer' - ] - - os.makedirs(f'deployment/temp', exist_ok=True) - diff_model_path = f'deployment/temp/diffusion.onnx' - fs2_model_path = f'deployment/temp/fs2.onnx' - spk_name_pattern = r'[0-9A-Za-z_-]+' - spk_export_paths = None - frozen_spk_name = None - frozen_spk_mix = None - if args.export_spk is not None: - spk_export_paths = [] - for spk_export in args.export_spk: - assert '=' in spk_export or '|' not in spk_export, \ - 'You must specify an alias with \'NAME=\' for each speaker mix.' - if '=' in spk_export: - alias, mix = spk_export.split('=', maxsplit=1) - assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' - spk_export_paths.append({'mix': mix, 'path': f'deployment/temp/{exp}.{alias}.emb'}) - else: - assert re.fullmatch(spk_name_pattern, spk_export) is not None, \ - f'Invalid alias \'{spk_export}\' for speaker mix.' - spk_export_paths.append({'mix': spk_export, 'path': f'deployment/temp/{exp}.{spk_export}.emb'}) - elif args.freeze_spk is not None: - assert '=' in args.freeze_spk or '|' not in args.freeze_spk, \ - 'You must specify an alias with \'NAME=\' for each speaker mix.' - if '=' in args.freeze_spk: - alias, mix = args.freeze_spk.split('=', maxsplit=1) - assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' - frozen_spk_name = alias - frozen_spk_mix = mix - else: - assert re.fullmatch(spk_name_pattern, args.freeze_spk) is not None, \ - f'Invalid alias \'{args.freeze_spk}\' for speaker mix.' - frozen_spk_name = args.freeze_spk - frozen_spk_mix = args.freeze_spk - - if frozen_spk_name is None: - target_model_path = f'{out}/{exp}.onnx' - else: - target_model_path = f'{out}/{exp}.{frozen_spk_name}.onnx' - phonemes_txt_path =f'{out}/{exp}.phonemes.txt' - os.makedirs(out, exist_ok=True) - set_hparams(print_hparams=False) - export(fs2_path=fs2_model_path, diff_path=diff_model_path, ckpt_steps=args.ckpt, - expose_gender=args.expose_gender, expose_velocity=args.expose_velocity, - spk_export_list=spk_export_paths, frozen_spk=frozen_spk_mix) - fix(diff_model_path, diff_model_path) - merge(fs2_path=fs2_model_path, diff_path=diff_model_path, target_path=target_model_path) - export_phonemes_txt(phonemes_txt_path) - if spk_export_paths is not None: - [shutil.copy(p['path'], out) for p in spk_export_paths] - [os.remove(p['path']) for p in spk_export_paths if os.path.exists(p['path'])] - os.remove(fs2_model_path) - os.remove(diff_model_path) - - os.chdir(cwd) - log_path = out - print(f'| export \'model\' to \'{log_path}\'.') diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py new file mode 100644 index 000000000..6baff344e --- /dev/null +++ b/deployment/exporters/acoustic_exporter.py @@ -0,0 +1,298 @@ +import os +import shutil +import sys +from pathlib import Path +from typing import List, Union, Tuple, Dict + +import onnx +import onnxsim +import torch + +root_dir = str(Path(__file__).parent.parent.parent.resolve()) +os.environ['PYTHONPATH'] = root_dir +sys.path.insert(0, root_dir) + +from basics.base_onnx_export import BaseExporter +from deployment.modules.toplevel import DiffSingerAcousticOnnx +from utils import load_ckpt, onnx_helper +from utils.hparams import hparams +from utils.phoneme_utils import locate_dictionary, build_phoneme_list +from utils.text_encoder import TokenTextEncoder + + +class DiffSingerAcousticExporter(BaseExporter): + def __init__( + self, + device: Union[str, torch.device] = None, + cache_dir: Path = None, + ckpt_steps: int = None, + expose_gender: bool = False, + freeze_gender: float = None, + expose_velocity: bool = False, + export_spk: List[Tuple[str, Dict[str, float]]] = None, + freeze_spk: Tuple[str, Dict[str, float]] = None + ): + super().__init__(device=device, cache_dir=cache_dir) + # Basic attributes + self.model_name: str = hparams['exp_name'] + self.ckpt_steps: int = ckpt_steps + self.spk_map: dict = self.build_spk_map() + self.vocab = TokenTextEncoder(vocab_list=build_phoneme_list()) + self.model = self.build_model() + self.fs2_cache_path = self.cache_dir / 'fs2.onnx' + self.diffusion_cache_path = self.cache_dir / 'diffusion.onnx' + + # Attributes for logging + self.fs2_class_name = self.model.fs2.__class__.__name__.removesuffix('Onnx') + self.denoiser_class_name = self.model.diffusion.denoise_fn.__class__.__name__.removesuffix('Onnx') + self.diffusion_class_name = self.model.diffusion.__class__.__name__.removesuffix('Onnx') + + # Attributes for exporting + self.expose_gender = expose_gender + self.expose_velocity = expose_velocity + self.freeze_spk: Tuple[str, Dict[str, float]] = freeze_spk + self.export_spk: List[Tuple[str, Dict[str, float]]] = export_spk if export_spk is not None else [] + if hparams.get('use_key_shift_embed', False) and not self.expose_gender: + shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] + key_shift = freeze_gender * shift_max if freeze_gender >= 0. else freeze_gender * abs(shift_min) + key_shift = max(min(key_shift, shift_max), shift_min) # clip key shift + self.model.fs2.register_buffer('frozen_key_shift', torch.FloatTensor([key_shift]).to(self.device)) + if hparams['use_spk_id'] and not self.export_spk and freeze_spk is not None: + self.model.fs2.register_buffer('spk_mix_embed', self._perform_spk_mix(freeze_spk[1])) + + def build_model(self) -> DiffSingerAcousticOnnx: + model = DiffSingerAcousticOnnx( + vocab_size=len(self.vocab), + out_dims=hparams['audio_num_mel_bins'] + ).eval().to(self.device) + load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=self.ckpt_steps, + required_category='acoustic', strict=True, device=self.device) + return model + + def export(self, path: Path): + path.mkdir(parents=True, exist_ok=True) + model_name = self.model_name + if self.freeze_spk is not None: + model_name += '.' + self.freeze_spk[0] + self.export_model((path / model_name).with_suffix('.onnx')) + self.export_attachments(path) + + def export_model(self, path: Path): + self._torch_export_model() + fs2_onnx = self._optimize_fs2_graph(onnx.load(self.fs2_cache_path)) + diffusion_onnx = self._optimize_diffusion_graph(onnx.load(self.diffusion_cache_path)) + model_onnx = self._merge_fs2_diffusion_graphs(fs2_onnx, diffusion_onnx) + onnx.save(model_onnx, path) + self.fs2_cache_path.unlink() + self.diffusion_cache_path.unlink() + print(f'| export model => {path}') + + def export_attachments(self, path: Path): + path_model_name = path / self.model_name + for spk in self.export_spk: + self._export_spk_embed(path.with_suffix(f'.{spk[0]}.emb'), self._perform_spk_mix(spk[1])) + self._export_dictionary(path / 'dictionary.txt') + self._export_phonemes(path_model_name.with_suffix('.phonemes.txt')) + + @torch.no_grad() + def _torch_export_model(self): + # Prepare inputs for FastSpeech2 tracing + n_frames = 10 + tokens = torch.LongTensor([[1]]).to(self.device) + durations = torch.LongTensor([[n_frames]]).to(self.device) + f0 = torch.FloatTensor([[440.] * n_frames]).to(self.device) + kwargs: dict[str, torch.Tensor] = {} + arguments = (tokens, durations, f0, kwargs) + input_names = ['tokens', 'durations', 'f0'] + dynamix_axes = { + 'tokens': { + 1: 'n_tokens' + }, + 'durations': { + 1: 'n_tokens' + }, + 'f0': { + 1: 'n_frames' + } + } + if hparams.get('use_key_shift_embed', False): + if self.expose_gender: + kwargs['gender'] = torch.rand((1, n_frames), dtype=torch.float32, device=self.device) + input_names.append('gender') + dynamix_axes['gender'] = { + 1: 'n_frames' + } + if hparams.get('use_speed_embed', False): + if self.expose_velocity: + kwargs['velocity'] = torch.rand((1, n_frames), dtype=torch.float32, device=self.device) + input_names.append('velocity') + dynamix_axes['velocity'] = { + 1: 'n_frames' + } + if hparams['use_spk_id']: + kwargs['spk_embed'] = torch.rand( + (1, n_frames, hparams['hidden_size']), + dtype=torch.float32, device=self.device + ) + input_names.append('spk_embed') + dynamix_axes['spk_embed'] = { + 1: 'n_frames' + } + dynamix_axes['condition'] = { + 1: 'n_frames' + } + + # PyTorch ONNX export for FastSpeech2 + print(f'Exporting {self.fs2_class_name}...') + torch.onnx.export( + self.model.view_as_fs2(), + arguments, + self.fs2_cache_path, + input_names=input_names, + output_names=['condition'], + dynamic_axes=dynamix_axes, + opset_version=15 + ) + + # Prepare inputs for denoiser tracing and GaussianDiffusion scripting + shape = (1, 1, hparams['audio_num_mel_bins'], n_frames) + noise = torch.randn(shape, device=self.device) + condition = torch.rand((1, hparams['hidden_size'], n_frames), device=self.device) + step = (torch.rand((1,), device=self.device) * hparams['K_step']).long() + + print(f'Tracing {self.denoiser_class_name} denoiser...') + diffusion = self.model.view_as_diffusion() + diffusion.diffusion.denoise_fn = torch.jit.trace( + diffusion.diffusion.denoise_fn, + ( + noise, + step, + condition + ) + ) + + print(f'Scripting {self.diffusion_class_name}...') + diffusion = torch.jit.script( + diffusion, + example_inputs=[ + ( + condition.transpose(1, 2), + 1 # p_sample branch + ), + ( + condition.transpose(1, 2), + 200 # p_sample_plms branch + ) + ] + ) + + # PyTorch ONNX export for GaussianDiffusion + print(f'Exporting {self.diffusion_class_name}...') + torch.onnx.export( + diffusion, + ( + condition.transpose(1, 2), + 200 + ), + self.diffusion_cache_path, + input_names=[ + 'condition', 'speedup' + ], + output_names=[ + 'mel' + ], + dynamic_axes={ + 'condition': { + 1: 'n_frames' + }, + 'mel': { + 1: 'n_frames' + } + }, + opset_version=15 + ) + + def _perform_spk_mix(self, spk_mix: Dict[str, float]): + spk_mix_ids = [] + spk_mix_values = [] + for name, value in spk_mix.items(): + spk_mix_ids.append(self.spk_map[name]) + assert value >= 0., f'Speaker mix checks failed.\n' \ + f'Proportion of speaker \'{name}\' is negative.' + spk_mix_values.append(value) + spk_mix_id_N = torch.LongTensor(spk_mix_ids).to(self.device)[None] # => [1, N] + spk_mix_value_N = torch.FloatTensor(spk_mix_values).to(self.device)[None] # => [1, N] + spk_mix_value_sum = spk_mix_value_N.sum() + assert spk_mix_value_sum > 0., f'Speaker mix checks failed.\n' \ + f'Proportions of speaker mix sum to zero.' + spk_mix_value_N /= spk_mix_value_sum # normalize + spk_mix_embed = torch.sum( + self.model.fs2.spk_embed(spk_mix_id_N) * spk_mix_value_N.unsqueeze(2), # => [1, N, H] + dim=1, keepdim=False + ) # => [1, H] + return spk_mix_embed + + def _optimize_fs2_graph(self, fs2: onnx.ModelProto) -> onnx.ModelProto: + print(f'Running ONNX simplifier for {self.fs2_class_name}...') + fs2, check = onnxsim.simplify( + fs2, + include_subgraph=True + ) + assert check, 'Simplified ONNX model could not be validated' + print(f'| optimize graph: {self.fs2_class_name}') + return fs2 + + def _optimize_diffusion_graph(self, diffusion: onnx.ModelProto) -> onnx.ModelProto: + onnx_helper.model_override_io_shapes(diffusion, output_shapes={ + 'mel': (1, 'n_frames', hparams['audio_num_mel_bins']) + }) + print(f'Running ONNX simplifier #1 for {self.diffusion_class_name}...') + diffusion, check = onnxsim.simplify( + diffusion, + include_subgraph=True + ) + assert check, 'Simplified ONNX model could not be validated' + onnx_helper.graph_fold_back_to_squeeze(diffusion.graph) + onnx_helper.graph_extract_conditioner_projections( + graph=diffusion.graph, op_type='Conv', + weight_pattern=r'diffusion\.denoise_fn\.residual_layers\.\d+\.conditioner_projection\.weight', + alias_prefix='/diffusion/denoise_fn/cache' + ) + onnx_helper.graph_remove_unused_values(diffusion.graph) + print(f'Running ONNX simplifier #2 for {self.diffusion_class_name}...') + diffusion, check = onnxsim.simplify( + diffusion, + include_subgraph=True + ) + print(f'| optimize graph: {self.diffusion_class_name}') + assert check, 'Simplified ONNX model could not be validated' + return diffusion + + def _merge_fs2_diffusion_graphs(self, fs2: onnx.ModelProto, diffusion: onnx.ModelProto) -> onnx.ModelProto: + onnx_helper.model_add_prefixes(fs2, dim_prefix='fs2.', ignored_pattern=r'(n_tokens)|(n_frames)') + onnx_helper.model_add_prefixes(diffusion, dim_prefix='diffusion.', ignored_pattern='n_frames') + print(f'Merging {self.fs2_class_name} and {self.diffusion_class_name} ' + f'back into {self.model.__class__.__name__}...') + merged = onnx.compose.merge_models( + fs2, diffusion, io_map=[('condition', 'condition')], + prefix1='', prefix2='', doc_string='', + producer_name=fs2.producer_name, producer_version=fs2.producer_version, + domain=fs2.domain, model_version=fs2.model_version + ) + merged.graph.name = fs2.graph.name + return merged + + # noinspection PyMethodMayBeStatic + def _export_spk_embed(self, path: Path, spk_embed: torch.Tensor): + with open(path, 'wb') as f: + f.write(spk_embed.cpu().numpy().tobytes()) + print(f'| export spk embed => {path}') + + # noinspection PyMethodMayBeStatic + def _export_dictionary(self, path: Path): + print(f'| export dictionary => {path}') + shutil.copy(locate_dictionary(), path) + + def _export_phonemes(self, path: Path): + self.vocab.store_to_file(path) + print(f'| export phonemes => {path}') diff --git a/deployment/export/export_nsf_hifigan.py b/deployment/exporters/export_nsf_hifigan.py similarity index 100% rename from deployment/export/export_nsf_hifigan.py rename to deployment/exporters/export_nsf_hifigan.py diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py index 8c2c793c2..e959294fa 100644 --- a/deployment/modules/fastspeech2.py +++ b/deployment/modules/fastspeech2.py @@ -32,15 +32,13 @@ def forward(self, dur): class FastSpeech2AcousticOnnx(FastSpeech2Acoustic): - def __init__(self, vocab_size, frozen_gender=None, frozen_spk_embed=None): + def __init__(self, vocab_size): super().__init__(vocab_size=vocab_size) self.lr = LengthRegulator() if hparams.get('use_key_shift_embed', False): self.shift_min, self.shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] if hparams.get('use_speed_embed', False): self.speed_min, self.speed_max = hparams['augmentation_args']['random_time_stretching']['range'] - self.frozen_gender = frozen_gender - self.frozen_spk_embed = frozen_spk_embed # noinspection PyMethodOverriding def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): @@ -62,11 +60,8 @@ def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=N condition += pitch_embed if hparams.get('use_key_shift_embed', False): - if self.frozen_gender is not None: - # noinspection PyUnresolvedReferences, PyTypeChecker - key_shift = frozen_gender * self.shift_max \ - if frozen_gender >= 0. else frozen_gender * abs(self.shift_min) - key_shift_embed = self.key_shift_embed(key_shift[:, None, None]) + if hasattr(self, 'frozen_key_shift'): + key_shift_embed = self.key_shift_embed(self.key_shift[:, None, None]) else: gender = torch.clip(gender, min=-1., max=1.) gender_mask = (gender < 0.).float() @@ -83,7 +78,7 @@ def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=N condition += speed_embed if hparams['use_spk_id']: - if self.frozen_spk_embed is not None: + if hasattr(self, 'frozen_spk_embed'): condition += self.frozen_spk_embed else: condition += spk_embed diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 023b62bf2..876e09c09 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -13,12 +13,10 @@ class DiffSingerAcousticOnnx(CategorizedModule): def category(self): return 'acoustic' - def __init__(self, vocab_size, out_dims, frozen_gender=None, frozen_spk_embed=None): + def __init__(self, vocab_size, out_dims): super().__init__() self.fs2 = FastSpeech2AcousticOnnx( - vocab_size=vocab_size, - frozen_gender=frozen_gender, - frozen_spk_embed=frozen_spk_embed + vocab_size=vocab_size ) self.diffusion = GaussianDiffusionOnnx( out_dims=out_dims, @@ -29,13 +27,19 @@ def __init__(self, vocab_size, out_dims, frozen_gender=None, frozen_spk_embed=No spec_max=hparams['spec_max'] ) - def forward(self, tokens: Tensor, durations: Tensor, f0: Tensor, speedup: int) -> Tensor: - condition = self.forward_fs2(tokens, durations, f0) - mel = self.forward_diffusion(condition, speedup=speedup) - return mel - - def forward_fs2(self, tokens: Tensor, durations: Tensor, f0: Tensor) -> Tensor: - return self.fs2(tokens, durations, f0) + def forward_fs2( + self, + tokens: Tensor, + durations: Tensor, + f0: Tensor, + gender: Tensor = None, + velocity: Tensor = None, + spk_embed: Tensor = None + ) -> Tensor: + return self.fs2( + tokens, durations, f0, + gender=gender, velocity=velocity, spk_embed=spk_embed + ) def forward_diffusion(self, condition: Tensor, speedup: int) -> Tensor: return self.diffusion(condition, speedup) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 78402345c..b2f1cd15e 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -70,7 +70,7 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, denoiser_type=None, loss_type=None, betas=None, spec_min=None, spec_max=None): super().__init__() - self.denoise_fn = DIFF_DENOISERS[denoiser_type](hparams) + self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type](hparams) self.out_dims = out_dims if exists(betas): From 6a4e4507875fab58b55441c508d4a320ec2066cb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 7 Apr 2023 22:59:15 +0800 Subject: [PATCH 201/475] Onnx -> ONNX --- deployment/exporters/acoustic_exporter.py | 12 ++++++------ deployment/modules/diffusion.py | 2 +- deployment/modules/fastspeech2.py | 2 +- deployment/modules/toplevel.py | 10 +++++----- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 6baff344e..24b4ca841 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -13,7 +13,7 @@ sys.path.insert(0, root_dir) from basics.base_onnx_export import BaseExporter -from deployment.modules.toplevel import DiffSingerAcousticOnnx +from deployment.modules.toplevel import DiffSingerAcousticONNX from utils import load_ckpt, onnx_helper from utils.hparams import hparams from utils.phoneme_utils import locate_dictionary, build_phoneme_list @@ -43,9 +43,9 @@ def __init__( self.diffusion_cache_path = self.cache_dir / 'diffusion.onnx' # Attributes for logging - self.fs2_class_name = self.model.fs2.__class__.__name__.removesuffix('Onnx') - self.denoiser_class_name = self.model.diffusion.denoise_fn.__class__.__name__.removesuffix('Onnx') - self.diffusion_class_name = self.model.diffusion.__class__.__name__.removesuffix('Onnx') + self.fs2_class_name = self.model.fs2.__class__.__name__.removesuffix('ONNX') + self.denoiser_class_name = self.model.diffusion.denoise_fn.__class__.__name__.removesuffix('ONNX') + self.diffusion_class_name = self.model.diffusion.__class__.__name__.removesuffix('ONNX') # Attributes for exporting self.expose_gender = expose_gender @@ -60,8 +60,8 @@ def __init__( if hparams['use_spk_id'] and not self.export_spk and freeze_spk is not None: self.model.fs2.register_buffer('spk_mix_embed', self._perform_spk_mix(freeze_spk[1])) - def build_model(self) -> DiffSingerAcousticOnnx: - model = DiffSingerAcousticOnnx( + def build_model(self) -> DiffSingerAcousticONNX: + model = DiffSingerAcousticONNX( vocab_size=len(self.vocab), out_dims=hparams['audio_num_mel_bins'] ).eval().to(self.device) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index f72b3a09e..e6499f0c3 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -11,7 +11,7 @@ def extract(a, t): # noinspection PyMethodOverriding -class GaussianDiffusionOnnx(GaussianDiffusion): +class GaussianDiffusionONNX(GaussianDiffusion): def p_sample(self, x, t, cond): x_pred = self.denoise_fn(x, t, cond) x_recon = ( diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py index e959294fa..8ab0a8bfc 100644 --- a/deployment/modules/fastspeech2.py +++ b/deployment/modules/fastspeech2.py @@ -31,7 +31,7 @@ def forward(self, dur): return mel2ph -class FastSpeech2AcousticOnnx(FastSpeech2Acoustic): +class FastSpeech2AcousticONNX(FastSpeech2Acoustic): def __init__(self, vocab_size): super().__init__(vocab_size=vocab_size) self.lr = LengthRegulator() diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 876e09c09..4411ad3d2 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -3,22 +3,22 @@ from torch import Tensor, nn from basics.base_module import CategorizedModule -from deployment.modules.diffusion import GaussianDiffusionOnnx -from deployment.modules.fastspeech2 import FastSpeech2AcousticOnnx +from deployment.modules.diffusion import GaussianDiffusionONNX +from deployment.modules.fastspeech2 import FastSpeech2AcousticONNX from utils.hparams import hparams -class DiffSingerAcousticOnnx(CategorizedModule): +class DiffSingerAcousticONNX(CategorizedModule): @property def category(self): return 'acoustic' def __init__(self, vocab_size, out_dims): super().__init__() - self.fs2 = FastSpeech2AcousticOnnx( + self.fs2 = FastSpeech2AcousticONNX( vocab_size=vocab_size ) - self.diffusion = GaussianDiffusionOnnx( + self.diffusion = GaussianDiffusionONNX( out_dims=out_dims, timesteps=hparams['timesteps'], k_step=hparams['K_step'], From b70626e9830a72eba53f63ddea1a808e3878dc01 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 8 Apr 2023 01:13:23 +0800 Subject: [PATCH 202/475] Finish export.py for acoustic exporter --- .gitignore | 12 +- .../{base_onnx_export.py => base_exporter.py} | 3 +- deployment/exporters/__init__.py | 1 + deployment/exporters/acoustic_exporter.py | 8 +- requirements.txt | 1 + scripts/export.py | 125 ++++++++++++++++++ 6 files changed, 135 insertions(+), 15 deletions(-) rename basics/{base_onnx_export.py => base_exporter.py} (94%) create mode 100644 deployment/exporters/__init__.py create mode 100644 scripts/export.py diff --git a/.gitignore b/.gitignore index ac2836be5..ad1c7ec9a 100644 --- a/.gitignore +++ b/.gitignore @@ -8,12 +8,12 @@ local_tools/ (!/preparation/assets)/*.wav infer_out/ *.onnx -data/* -!data/.gitkeep -checkpoints/ -venv/ +/data/* +!/data/.gitkeep +/checkpoints/* +!/checkpoints/.gitkeep +/venv/ +/artifacts/ .vscode -WPy64-38100 -Winpython64-3.8.10.0dot.exe .ipynb_checkpoints/ diff --git a/basics/base_onnx_export.py b/basics/base_exporter.py similarity index 94% rename from basics/base_onnx_export.py rename to basics/base_exporter.py index 56299ee4f..5300ad2b1 100644 --- a/basics/base_onnx_export.py +++ b/basics/base_exporter.py @@ -19,8 +19,7 @@ def __init__( self.device = device if device is not None else torch.device('cuda' if torch.cuda.is_available() else 'cpu') self.cache_dir: Path = cache_dir.resolve() if cache_dir is not None \ else Path(__file__).parent.parent / 'deployment' / 'cache' - if not self.cache_dir.exists(): - self.cache_dir.mkdir(parents=True, exist_ok=True) + self.cache_dir.mkdir(parents=True, exist_ok=True) # noinspection PyMethodMayBeStatic def build_spk_map(self) -> dict: diff --git a/deployment/exporters/__init__.py b/deployment/exporters/__init__.py new file mode 100644 index 000000000..01f2f5bfd --- /dev/null +++ b/deployment/exporters/__init__.py @@ -0,0 +1 @@ +from .acoustic_exporter import DiffSingerAcousticExporter diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 24b4ca841..ba6085e53 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -1,6 +1,4 @@ -import os import shutil -import sys from pathlib import Path from typing import List, Union, Tuple, Dict @@ -8,11 +6,7 @@ import onnxsim import torch -root_dir = str(Path(__file__).parent.parent.parent.resolve()) -os.environ['PYTHONPATH'] = root_dir -sys.path.insert(0, root_dir) - -from basics.base_onnx_export import BaseExporter +from basics.base_exporter import BaseExporter from deployment.modules.toplevel import DiffSingerAcousticONNX from utils import load_ckpt, onnx_helper from utils.hparams import hparams diff --git a/requirements.txt b/requirements.txt index b09552d8c..2a301b25e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ # PyTorch >= 2.0 is recommended, but 1.12 and 1.13 is compatible. # See instructions at https://pytorch.org/get-started/locally/ +click h5py librosa<0.10.0 lightning>=2.0.0 diff --git a/scripts/export.py b/scripts/export.py new file mode 100644 index 000000000..9867ec28a --- /dev/null +++ b/scripts/export.py @@ -0,0 +1,125 @@ +import os +import re +import sys +from pathlib import Path +from typing import List + +import click +import torch + +root_dir = Path(__file__).parent.parent.resolve() +os.environ['PYTHONPATH'] = str(root_dir) +sys.path.insert(0, str(root_dir)) + +from utils.hparams import set_hparams + + +@click.group() +def main(): + pass + + +@main.command(help='Export DiffSinger acoustic model to ONNX format.') +@click.option('--exp', type=str, required=True, metavar='', help='Choose an experiment to export.') +@click.option('--ckpt', type=int, required=False, metavar='', help='Checkpoint training steps.') +@click.option('--out', type=str, required=False, metavar='', help='Output directory for the artifacts.') +@click.option('--expose_gender', is_flag=True, show_default=True, + help='(for random pitch shifting) Expose gender control functionality.') +@click.option('--freeze_gender', type=float, default=0., show_default=True, metavar='', + help='(for random pitch shifting) Freeze gender value into the model.') +@click.option('--expose_velocity', is_flag=True, show_default=True, + help='(for random time stretching) Expose velocity control functionality.') +@click.option('--export_spk', type=str, required=False, multiple=True, metavar='', + help='(for multi-speaker models) Export one or more speaker or speaker mix keys.') +@click.option('--freeze_spk', type=str, required=False) +def acoustic( + exp: str, + ckpt: int, + out: str = None, + expose_gender: bool = False, + freeze_gender: float = 0., + expose_velocity: bool = False, + export_spk: List[str] = None, + freeze_spk: str = None +): + # Validate arguments + if expose_gender and freeze_gender: + print('--expose_gender is exclusive to --freeze_gender.') + exit(-1) + if export_spk and freeze_spk: + print('--export_spk is exclusive to --freeze_spk') + if freeze_gender is not None: + assert -1. <= freeze_gender <= 1., 'Frozen gender must be in [-1, 1].' + if not (root_dir / 'checkpoints' / exp).exists(): + for subdir in (root_dir / 'checkpoints').iterdir(): + if not subdir.is_dir(): + continue + if subdir.name.startswith(exp): + print(f'| match ckpt by prefix: {subdir.name}') + exp = subdir.name + break + else: + assert False, \ + f'There are no matching exp starting with \'{exp}\' in \'checkpoints\' folder. ' \ + 'Please specify \'--exp\' as the folder name or prefix.' + else: + print(f'| found ckpt by name: {exp}') + if out is None: + out = root_dir / 'artifacts' / exp + else: + out = Path(out) + out = out.resolve() + if export_spk is None: + export_spk = [] + else: + export_spk = list(export_spk) + from utils.infer_utils import parse_commandline_spk_mix + spk_name_pattern = r'[0-9A-Za-z_-]+' + export_spk_mix = [] + for spk in export_spk: + assert '=' in spk or '|' not in spk, \ + 'You must specify an alias with \'NAME=\' for each speaker mix.' + if '=' in spk: + alias, mix = spk.split('=', maxsplit=1) + assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' + export_spk_mix.append((alias, parse_commandline_spk_mix(mix))) + else: + export_spk_mix.append((spk, {spk: 1.0})) + freeze_spk_mix = None + if freeze_spk is not None: + assert '=' in freeze_spk or '|' not in freeze_spk, \ + 'You must specify an alias with \'NAME=\' for each speaker mix.' + if '=' in freeze_spk: + alias, mix = freeze_spk.split('=', maxsplit=1) + assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' + freeze_spk_mix = (alias, parse_commandline_spk_mix(mix)) + else: + freeze_spk_mix = (freeze_spk, {freeze_spk: 1.0}) + + # Load configurations + sys.argv = [ + sys.argv[0], + '--exp_name', + exp, + '--infer' + ] + set_hparams() + + # Export artifacts + from deployment.exporters import DiffSingerAcousticExporter + exporter = DiffSingerAcousticExporter( + device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), + cache_dir=root_dir / 'deployment' / 'cache', + ckpt_steps=ckpt, + expose_gender=expose_gender, + freeze_gender=freeze_gender, + expose_velocity=expose_velocity, + export_spk=export_spk_mix, + freeze_spk=freeze_spk_mix + ) + print(f'| Exporter: {exporter.__class__}') + exporter.export(out) + + +if __name__ == '__main__': + main() From f7d159368fedcf252ad8c0448edb7503c9ac6a99 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 8 Apr 2023 01:27:04 +0800 Subject: [PATCH 203/475] Adjust documentation for model export --- README.md | 6 +++--- docs/README-SVS-deployment.md | 23 +++++++++++++---------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index c68603483..05ed6228e 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Usage of Refactor Branch -This is a cleaner version of Diffsinger, which provides: +This is a cleaner version of DiffSinger, which provides: - fewer code: scripts unused or obsolete in the DiffSinger are removed; - better readability: many important functions are annotated (however, **we assume the reader already knows how the neural networks work**); - abstract classes: the bass classes are filtered out into the "basics/" folder and are annotated. Other classes directly inherent from the base classes. @@ -68,10 +68,10 @@ See more supported arguments with `python scripts/infer.py -h`. See examples of Please see this [documentation](docs/README-SVS-deployment.md) before you run the following command: ```sh -python deployment/export/export_acoustic.py --exp $MY_DS_EXP_NAME +python scripts/export.py acoustic --exp $MY_DS_EXP_NAME ``` -See more supported arguments with `python deployment/export/export_acoustic.py -h`. +See more supported arguments with `scripts/export.py acoustic --help`. #### Use DiffSinger via OpenUTAU editor diff --git a/docs/README-SVS-deployment.md b/docs/README-SVS-deployment.md index 3fa0f3836..3eed6657d 100644 --- a/docs/README-SVS-deployment.md +++ b/docs/README-SVS-deployment.md @@ -11,13 +11,7 @@ Currently, we only support exporting MIDI-less acoustic model to ONNX format. ### 0. Environment Preparation -** Important:** Due to the complexity of the model and inference procedure, these scripts are only compatible with **PyTorch 1.8**. We apologize for any inconvenience caused; we will try to address this issue in the future. - -Run with the command to install extra requirements for exporting the model to ONNX format. - -```bash -pip install onnx==1.12.0 onnxsim==0.4.10 protobuf==3.13.0 # Used for graph repairing and optimization -``` +** Important:** The exporting script is only tested under **PyTorch 1.13**. For the most stability, we recommend you to export your model with the same version of PyTorch as we used to test this functionality. The `onnxruntime` package is required to run inference with ONNX model and ONNXRuntime. See the [official guidance](https://onnxruntime.ai/) for instructions to install packages matching your hardware. CUDA, DirectML and default CPU are recommended since the model has been tested on these execution providers. @@ -25,16 +19,25 @@ The `onnxruntime` package is required to run inference with ONNX model and ONNXR Run with the command -```bash -python deployment/export/export_acoustic.py --exp EXP [--out OUT] +```commandline +python scripts/export.py acoustic --exp EXP [--out OUT] ``` -where `EXP` is the name of experiment, `OUT` is the output directory. +where `EXP` is the name of experiment, `OUT` is the output directory for all artifacts exported. For more functionalities of this script, run + +```commandline +python scripts/export.py acoustic --help +``` This script will export the acoustic model to the ONNX format and do a lot of optimization (25% ~ 50% faster with ONNXRuntime than PyTorch). Note: DPM-Solver acceleration is not currently included, but PNDM is wrapped into the model. Use any `speedup` larger than 1 to enable it. +These attachments will be exported along the model: +- the dictionary which the model uses +- a text file carrying all phonemes representing the tokens in the model +- all speaker mix embeddings, if a multi-speaker model is exported with `--export_spk` options specified + ### 2. Inference with ONNXRuntime See `deployment/infer/infer_acoustic.py` for details. From c24bdf1ae711c5885d5041d15c2cdda3e65f13f9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 8 Apr 2023 02:34:36 +0800 Subject: [PATCH 204/475] Add benchmarks --- deployment/benchmarks/infer_acoustic.py | 33 ++++++++++++++++++ .../infer_nsf_hifigan.py | 0 deployment/infer/infer_acoustic.py | 34 ------------------- 3 files changed, 33 insertions(+), 34 deletions(-) create mode 100644 deployment/benchmarks/infer_acoustic.py rename deployment/{infer => benchmarks}/infer_nsf_hifigan.py (100%) delete mode 100644 deployment/infer/infer_acoustic.py diff --git a/deployment/benchmarks/infer_acoustic.py b/deployment/benchmarks/infer_acoustic.py new file mode 100644 index 000000000..9af2d083e --- /dev/null +++ b/deployment/benchmarks/infer_acoustic.py @@ -0,0 +1,33 @@ +import numpy as np +import onnxruntime as ort +import tqdm + +n_tokens = 10 +n_frames = 100 +n_runs = 20 +speedup = 20 +provider = 'DmlExecutionProvider' + +tokens = np.array([[1] * n_tokens], dtype=np.int64) +durations = np.array([[n_frames // n_tokens] * n_tokens], dtype=np.int64) +f0 = np.array([[440.] * n_frames], dtype=np.float32) +speedup = np.array(speedup, dtype=np.int64) + + +session = ort.InferenceSession('model1.onnx', providers=[provider]) +for _ in tqdm.tqdm(range(n_runs)): + session.run(['mel'], { + 'tokens': tokens, + 'durations': durations, + 'f0': f0, + 'speedup': speedup + }) + +session = ort.InferenceSession('model2.onnx', providers=[provider]) +for _ in tqdm.tqdm(range(n_runs)): + session.run(['mel'], { + 'tokens': tokens, + 'durations': durations, + 'f0': f0, + 'speedup': speedup + }) diff --git a/deployment/infer/infer_nsf_hifigan.py b/deployment/benchmarks/infer_nsf_hifigan.py similarity index 100% rename from deployment/infer/infer_nsf_hifigan.py rename to deployment/benchmarks/infer_nsf_hifigan.py diff --git a/deployment/infer/infer_acoustic.py b/deployment/infer/infer_acoustic.py deleted file mode 100644 index b8e02b897..000000000 --- a/deployment/infer/infer_acoustic.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -import time - -import numpy as np -import onnxruntime as ort - - -# os.add_dll_directory(r'D:\NVIDIA GPU Computing Toolkit\CUDA\v11.6\bin') -# os.add_dll_directory(r'D:\NVIDIA GPU Computing Toolkit\cuDNN\bin') - -tokens = np.load('deployment/assets/tokens.npy') -durations = np.load('deployment/assets/durations.npy') -f0 = np.load('deployment/assets/f0_denorm.npy') -speedup = np.array(50, dtype=np.int64) - -print('tokens', tokens.shape) -print('durations', durations.shape) -print('f0', f0.shape) - -options = ort.SessionOptions() -session = ort.InferenceSession( - 'deployment/assets/1220_zhibin_ds1000.onnx', - providers=['CUDAExecutionProvider', 'CPUExecutionProvider'], - sess_options=options -) - -start = time.time() -mel = session.run(['mel'], {'tokens': tokens, 'durations': durations, 'f0': f0, 'speedup': speedup})[0] -end = time.time() - -print('mel', mel.shape) -print('cost', end - start) - -np.save('deployment/assets/mel_test.npy', mel) From 339cbfca8e3fa76336f15935cce85863ce9ed0df Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 8 Apr 2023 13:21:39 +0800 Subject: [PATCH 205/475] Fix relative path not found when resuming trainer from another place --- utils/training_utils.py | 68 +++++++++++++++++++++-------------------- 1 file changed, 35 insertions(+), 33 deletions(-) diff --git a/utils/training_utils.py b/utils/training_utils.py index efdfc879a..de007e6b0 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -92,7 +92,7 @@ def __init__(self, dataset, max_tokens, max_sentences, sub_indices=None, self.epoch = 0 self.batches = None self.formed = None - + def __form_batches(self): if self.formed == self.epoch + self.seed: return @@ -103,29 +103,29 @@ def __form_batches(self): indices = np.array(self.sub_indices) else: indices = rng.permutation(len(self.dataset)) - + if self.sort_by_similar_size: grid = int(hparams.get('sampler_frame_count_grid', 200)) assert grid > 0 sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] - + indices = indices.tolist() else: indices = self.sub_indices if self.sub_indices is not None else list(range(len(self.dataset))) - + if self.batch_by_size: batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) else: batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] - + floored_total_batch_count = (len(batches) // self.num_replicas) * self.num_replicas if self.drop_last and len(batches) > floored_total_batch_count: batches = batches[:floored_total_batch_count] leftovers = [] else: leftovers = (rng.permutation(len(batches) - floored_total_batch_count) + floored_total_batch_count).tolist() - + batch_assignment = rng.permuted(np.arange(floored_total_batch_count).reshape(-1, self.num_replicas).transpose(), axis=0)[self.rank].tolist() floored_batch_count = len(batch_assignment) ceiled_batch_count = floored_batch_count + (1 if len(leftovers) > 0 else 0) @@ -138,12 +138,12 @@ def __form_batches(self): ceiled_batch_count = math.ceil(ceiled_batch_count / self.required_batch_count_multiple) * self.required_batch_count_multiple for i in range(ceiled_batch_count - len(batch_assignment)): batch_assignment.append(batch_assignment[(i + self.epoch * self.required_batch_count_multiple) % floored_batch_count]) - + self.batches = [deepcopy(batches[i]) for i in batch_assignment] - + if self.shuffle_batch: rng.shuffle(self.batches) - + del indices del batches del batch_assignment @@ -172,7 +172,7 @@ def __init__(self, dataset, max_tokens, max_sentences, rank=None, batch_by_size= self.batches = None self.batch_size = max_sentences self.drop_last = False - + if self.rank == 0: indices = list(range(len(self.dataset))) if self.batch_by_size: @@ -192,7 +192,7 @@ def __len__(self): class DsModelCheckpoint(ModelCheckpoint): def __init__( - self, + self, *args, permanent_ckpt_start, permanent_ckpt_interval, @@ -202,10 +202,10 @@ def __init__( self.permanent_ckpt_start = permanent_ckpt_start or 0 self.permanent_ckpt_interval = permanent_ckpt_interval or 0 self.enable_permanent_ckpt = self.permanent_ckpt_start > 0 and self.permanent_ckpt_interval > 9 - + self._verbose = self.verbose self.verbose = False - + def state_dict(self): ret = super().state_dict() ret.pop('dirpath') @@ -220,38 +220,40 @@ def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModul return self.last_val_step = trainer.global_step super().on_validation_end(trainer, pl_module) - + def _update_best_and_save( self, current: torch.Tensor, trainer: "pl.Trainer", monitor_candidates: Dict[str, torch.Tensor] ) -> None: k = len(self.best_k_models) + 1 if self.save_top_k == -1 else self.save_top_k - + del_filepath = None _op = max if self.mode == "min" else min while len(self.best_k_models) > k and k > 0: self.kth_best_model_path = _op(self.best_k_models, key=self.best_k_models.get) # type: ignore[arg-type] self.kth_value = self.best_k_models[self.kth_best_model_path] - + del_filepath = self.kth_best_model_path self.best_k_models.pop(del_filepath) filepath = self._get_metric_interpolated_filepath_name(monitor_candidates, trainer, del_filepath) if del_filepath is not None and filepath != del_filepath: self._remove_checkpoint(trainer, del_filepath) - + if len(self.best_k_models) == k and k > 0: self.kth_best_model_path = _op(self.best_k_models, key=self.best_k_models.get) # type: ignore[arg-type] self.kth_value = self.best_k_models[self.kth_best_model_path] super()._update_best_and_save(current, trainer, monitor_candidates) - + def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None: - super()._save_checkpoint(trainer, filepath) + filepath = (Path(self.dirpath) / Path(filepath).name).resolve() + super()._save_checkpoint(trainer, str(filepath)) if self._verbose: - relative_path = Path(filepath).relative_to(Path('.').resolve()) + relative_path = filepath.relative_to(Path('.').resolve()) rank_zero_info(f'Checkpoint {relative_path} saved.') - + def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): - relative_path = Path(filepath).relative_to(Path('.').resolve()) + filepath = (Path(self.dirpath) / Path(filepath).name).resolve() + relative_path = filepath.relative_to(Path('.').resolve()) search = re.search(r'steps_\d+', relative_path.stem) if search: step = int(search.group(0)[6:]) @@ -268,7 +270,7 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): def get_latest_checkpoint_path(work_dir): if not os.path.exists(work_dir): return None - + last_step = -1 last_ckpt_name = None @@ -280,7 +282,7 @@ def get_latest_checkpoint_path(work_dir): if step > last_step: last_step = step last_ckpt_name = name - + return last_ckpt_name if last_ckpt_name is not None else None @@ -306,7 +308,7 @@ def get_metrics(self, trainer, model): def get_strategy(accelerator, devices, num_nodes, strategy, backend): if accelerator != 'auto' and accelerator != 'gpu': return strategy - + from lightning_fabric.utilities.imports import _IS_INTERACTIVE from lightning.pytorch.accelerators import AcceleratorRegistry from lightning.pytorch.accelerators.cuda import CUDAAccelerator @@ -328,36 +330,36 @@ def _choose_auto_accelerator(): if CUDAAccelerator.is_available(): return "cuda" return "cpu" - + def _choose_gpu_accelerator_backend(): if MPSAccelerator.is_available(): return "mps" if CUDAAccelerator.is_available(): return "cuda" raise MisconfigurationException("No supported gpu backend found!") - + if accelerator == "auto": _accelerator_flag = _choose_auto_accelerator() elif accelerator == "gpu": _accelerator_flag = _choose_gpu_accelerator_backend() else: return strategy - + if _accelerator_flag != "mps" and _accelerator_flag != "cuda": return strategy - + _num_nodes_flag = int(num_nodes) if num_nodes is not None else 1 _devices_flag = devices - + accelerator = AcceleratorRegistry.get(_accelerator_flag) accelerator_cls = accelerator.__class__ if _devices_flag == "auto": _devices_flag = accelerator.auto_device_count() - + _devices_flag = accelerator_cls.parse_devices(_devices_flag) _parallel_devices = accelerator_cls.get_parallel_devices(_devices_flag) - + def get_ddp_strategy(_backend): if _backend == 'gloo': return DDPStrategy(process_group_backend='gloo', find_unused_parameters=False) @@ -365,7 +367,7 @@ def get_ddp_strategy(_backend): return DDPStrategy(process_group_backend='nccl', find_unused_parameters=False) else: raise ValueError(f'backend {_backend} is not valid.') - + if _num_nodes_flag > 1: return get_ddp_strategy(backend) if len(_parallel_devices) <= 1: From 493a80dd96a5c9772758eb6dae601f60b7a7e525 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 8 Apr 2023 17:23:29 +0800 Subject: [PATCH 206/475] Finish `NSFHiFiGANExporter` --- deployment/exporters/__init__.py | 1 + deployment/exporters/acoustic_exporter.py | 12 +- deployment/exporters/export_nsf_hifigan.py | 319 ------------------- deployment/exporters/nsf_hifigan_exporter.py | 94 ++++++ deployment/modules/nsf_hifigan.py | 16 + modules/nsf_hifigan/env.py | 4 - modules/nsf_hifigan/models.py | 70 ++-- scripts/export.py | 38 ++- 8 files changed, 190 insertions(+), 364 deletions(-) delete mode 100644 deployment/exporters/export_nsf_hifigan.py create mode 100644 deployment/exporters/nsf_hifigan_exporter.py create mode 100644 deployment/modules/nsf_hifigan.py diff --git a/deployment/exporters/__init__.py b/deployment/exporters/__init__.py index 01f2f5bfd..0a3f94678 100644 --- a/deployment/exporters/__init__.py +++ b/deployment/exporters/__init__.py @@ -1 +1,2 @@ from .acoustic_exporter import DiffSingerAcousticExporter +from .nsf_hifigan_exporter import NSFHiFiGANExporter diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index ba6085e53..05e4201bc 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -17,7 +17,7 @@ class DiffSingerAcousticExporter(BaseExporter): def __init__( self, - device: Union[str, torch.device] = None, + device: Union[str, torch.device] = 'cpu', cache_dir: Path = None, ckpt_steps: int = None, expose_gender: bool = False, @@ -228,10 +228,7 @@ def _perform_spk_mix(self, spk_mix: Dict[str, float]): def _optimize_fs2_graph(self, fs2: onnx.ModelProto) -> onnx.ModelProto: print(f'Running ONNX simplifier for {self.fs2_class_name}...') - fs2, check = onnxsim.simplify( - fs2, - include_subgraph=True - ) + fs2, check = onnxsim.simplify(fs2, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' print(f'| optimize graph: {self.fs2_class_name}') return fs2 @@ -241,10 +238,7 @@ def _optimize_diffusion_graph(self, diffusion: onnx.ModelProto) -> onnx.ModelPro 'mel': (1, 'n_frames', hparams['audio_num_mel_bins']) }) print(f'Running ONNX simplifier #1 for {self.diffusion_class_name}...') - diffusion, check = onnxsim.simplify( - diffusion, - include_subgraph=True - ) + diffusion, check = onnxsim.simplify(diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' onnx_helper.graph_fold_back_to_squeeze(diffusion.graph) onnx_helper.graph_extract_conditioner_projections( diff --git a/deployment/exporters/export_nsf_hifigan.py b/deployment/exporters/export_nsf_hifigan.py deleted file mode 100644 index 52c7070cd..000000000 --- a/deployment/exporters/export_nsf_hifigan.py +++ /dev/null @@ -1,319 +0,0 @@ -import json -import os -import sys - -import numpy as np -import onnx -import onnxsim -import torch -import torch.nn as nn -import torch.nn.functional as F -from torch.nn import Conv1d, ConvTranspose1d -from torch.nn.utils import weight_norm, remove_weight_norm - -from modules.nsf_hifigan.env import AttrDict -from modules.nsf_hifigan.models import ResBlock1, ResBlock2 -from modules.nsf_hifigan.utils import init_weights -from utils.hparams import set_hparams, hparams - -LRELU_SLOPE = 0.1 - - -class SineGen(torch.nn.Module): - """ Definition of sine generator - SineGen(samp_rate, harmonic_num = 0, - sine_amp = 0.1, noise_std = 0.003, - voiced_threshold = 0, - flag_for_pulse=False) - samp_rate: sampling rate in Hz - harmonic_num: number of harmonic overtones (default 0) - sine_amp: amplitude of sine-wavefrom (default 0.1) - noise_std: std of Gaussian noise (default 0.003) - voiced_thoreshold: F0 threshold for U/V classification (default 0) - flag_for_pulse: this SinGen is used inside PulseGen (default False) - Note: when flag_for_pulse is True, the first time step of a voiced - segment is always sin(np.pi) or cos(0) - """ - - def __init__(self, samp_rate, harmonic_num=0, - sine_amp=0.1, noise_std=0.003, - voiced_threshold=0): - super(SineGen, self).__init__() - self.sine_amp = sine_amp - self.noise_std = noise_std - self.harmonic_num = harmonic_num - self.dim = self.harmonic_num + 1 - self.sampling_rate = samp_rate - self.voiced_threshold = voiced_threshold - - def _f02sine(self, f0_values, upp): - """ f0_values: (batchsize, length, dim) - where dim indicates fundamental tone and overtones - """ - rad_values = (f0_values / self.sampling_rate).fmod(1.) ###%1意味着n_har的乘积无法后处理优化 - rand_ini = torch.rand(1, self.dim, device=f0_values.device) - rand_ini[:, 0] = 0 - rad_values[:, 0, :] += rand_ini - is_half = rad_values.dtype is not torch.float32 - tmp_over_one = torch.cumsum(rad_values.double(), 1) # % 1 #####%1意味着后面的cumsum无法再优化 - if is_half: - tmp_over_one = tmp_over_one.half() - else: - tmp_over_one = tmp_over_one.float() - tmp_over_one *= upp - tmp_over_one = F.interpolate( - tmp_over_one.transpose(2, 1), scale_factor=upp, - mode='linear', align_corners=True - ).transpose(2, 1) - rad_values = F.interpolate(rad_values.transpose(2, 1), scale_factor=upp, mode='nearest').transpose(2, 1) - tmp_over_one = tmp_over_one.fmod(1.) - diff = F.conv2d( - tmp_over_one.unsqueeze(1), torch.FloatTensor([[[[-1.], [1.]]]]).to(tmp_over_one.device), - stride=(1, 1), padding=0, dilation=(1, 1) - ).squeeze(1) # Equivalent to torch.diff, but able to export ONNX - cumsum_shift = (diff < 0).double() - cumsum_shift = torch.cat(( - torch.zeros((1, 1, self.dim), dtype=torch.double).to(f0_values.device), - cumsum_shift - ), dim=1) - sines = torch.sin(torch.cumsum(rad_values.double() + cumsum_shift, dim=1) * 2 * np.pi) - if is_half: - sines = sines.half() - else: - sines = sines.float() - return sines - - - @torch.no_grad() - def forward(self, f0, upp): - """ sine_tensor, uv = forward(f0) - input F0: tensor(batchsize=1, length, dim=1) - f0 for unvoiced steps should be 0 - output sine_tensor: tensor(batchsize=1, length, dim) - output uv: tensor(batchsize=1, length, 1) - """ - f0 = f0.unsqueeze(-1) - fn = torch.multiply(f0, torch.arange(1, self.dim + 1, device=f0.device).reshape((1, 1, -1))) - sine_waves = self._f02sine(fn, upp) - sine_waves = sine_waves * self.sine_amp - uv = (f0 > self.voiced_threshold).float() - uv = F.interpolate(uv.transpose(2, 1), scale_factor=upp, mode='nearest').transpose(2, 1) - noise_amp = uv * self.noise_std + (1 - uv) * self.sine_amp / 3 - noise = noise_amp * torch.randn_like(sine_waves) - sine_waves = sine_waves * uv + noise - return sine_waves - - -class SourceModuleHnNSF(torch.nn.Module): - """ SourceModule for hn-nsf - SourceModule(sampling_rate, harmonic_num=0, sine_amp=0.1, - add_noise_std=0.003, voiced_threshod=0) - sampling_rate: sampling_rate in Hz - harmonic_num: number of harmonic above F0 (default: 0) - sine_amp: amplitude of sine source signal (default: 0.1) - add_noise_std: std of additive Gaussian noise (default: 0.003) - note that amplitude of noise in unvoiced is decided - by sine_amp - voiced_threshold: threhold to set U/V given F0 (default: 0) - Sine_source, noise_source = SourceModuleHnNSF(F0_sampled) - F0_sampled (batchsize, length, 1) - Sine_source (batchsize, length, 1) - noise_source (batchsize, length 1) - uv (batchsize, length, 1) - """ - - def __init__(self, sampling_rate, harmonic_num=0, sine_amp=0.1, - add_noise_std=0.003, voiced_threshod=0): - super(SourceModuleHnNSF, self).__init__() - - self.sine_amp = sine_amp - self.noise_std = add_noise_std - - # to produce sine waveforms - self.l_sin_gen = SineGen(sampling_rate, harmonic_num, sine_amp, add_noise_std, voiced_threshod) - - # to merge source harmonics into a single excitation - self.l_linear = torch.nn.Linear(harmonic_num + 1, 1) - self.l_tanh = torch.nn.Tanh() - - def forward(self, x, upp): - sine_wavs = self.l_sin_gen(x, upp) - sine_merge = self.l_tanh(self.l_linear(sine_wavs)) - return sine_merge - - -class Generator(torch.nn.Module): - def __init__(self, h): - super(Generator, self).__init__() - self.h = h - self.num_kernels = len(h.resblock_kernel_sizes) - self.num_upsamples = len(h.upsample_rates) - self.m_source = SourceModuleHnNSF( - sampling_rate=h.sampling_rate, - harmonic_num=8 - ) - self.noise_convs = nn.ModuleList() - self.conv_pre = weight_norm(Conv1d(h.num_mels, h.upsample_initial_channel, 7, 1, padding=3)) - resblock = ResBlock1 if h.resblock == '1' else ResBlock2 - - self.ups = nn.ModuleList() - for i, (u, k) in enumerate(zip(h.upsample_rates, h.upsample_kernel_sizes)): - c_cur = h.upsample_initial_channel // (2 ** (i + 1)) - self.ups.append(weight_norm( - ConvTranspose1d(h.upsample_initial_channel // (2 ** i), h.upsample_initial_channel // (2 ** (i + 1)), - k, u, padding=(k - u) // 2))) - if i + 1 < len(h.upsample_rates): # - stride_f0 = np.prod(h.upsample_rates[i + 1:]) - self.noise_convs.append(Conv1d( - 1, c_cur, kernel_size=stride_f0 * 2, stride=int(stride_f0), padding=stride_f0 // 2)) - else: - self.noise_convs.append(Conv1d(1, c_cur, kernel_size=1)) - self.resblocks = nn.ModuleList() - ch = h.upsample_initial_channel - for i in range(len(self.ups)): - ch //= 2 - for j, (k, d) in enumerate(zip(h.resblock_kernel_sizes, h.resblock_dilation_sizes)): - self.resblocks.append(resblock(h, ch, k, d)) - - self.conv_post = weight_norm(Conv1d(ch, 1, 7, 1, padding=3)) - self.ups.apply(init_weights) - self.conv_post.apply(init_weights) - self.upp = int(np.prod(h.upsample_rates)) - - def forward(self, x, f0): - har_source = self.m_source(f0, self.upp).transpose(1, 2) - x = self.conv_pre(x) - for i in range(self.num_upsamples): - x = F.leaky_relu(x, LRELU_SLOPE) - - x = self.ups[i](x) - x_source = self.noise_convs[i](har_source) - - x = x + x_source - xs = None - for j in range(self.num_kernels): - if xs is None: - xs = self.resblocks[i * self.num_kernels + j](x) - else: - xs += self.resblocks[i * self.num_kernels + j](x) - x = xs / self.num_kernels - x = F.leaky_relu(x) - x = self.conv_post(x) - x = torch.tanh(x) - x = x.squeeze(1) - return x - - def remove_weight_norm(self): - print('Removing weight norm...') - for up in self.ups: - remove_weight_norm(up) - for block in self.resblocks: - block.remove_weight_norm() - remove_weight_norm(self.conv_pre) - remove_weight_norm(self.conv_post) - - -class NsfHiFiGAN(torch.nn.Module): - def __init__(self, device=None): - super().__init__() - if device is None: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - self.device = device - model_path = hparams['vocoder_ckpt'] - self.generator, self.hparams = load_model(model_path, device) - print(f'| load \'NSF-HiFiGAN\' from \'{model_path}\'.') - - def forward(self, mel: torch.Tensor, f0: torch.Tensor): - mel = mel.transpose(2, 1) * 2.30259 - wav = self.generator.forward(mel, f0) - return wav - - -def load_model(model_path, device): - config_file = os.path.join(os.path.split(model_path)[0], 'config.json') - with open(config_file) as f: - data = f.read() - - json_config = json.loads(data) - h = AttrDict(json_config) - - generator = Generator(h).to(device) - - cp_dict = torch.load(model_path) - generator.load_state_dict(cp_dict['generator']) - generator.eval() - generator.remove_weight_norm() - del cp_dict - return generator, h - - -def simplify(src, target): - model = onnx.load(src) - - in_dims = model.graph.input[0].type.tensor_type.shape.dim - outputs = model.graph.output - new_output = onnx.helper.make_value_info( - name=outputs[0].name, - type_proto=onnx.helper.make_tensor_type_proto( - elem_type=onnx.TensorProto.FLOAT, - shape=(in_dims[0].dim_value, 'n_samples') - ) - ) - outputs.remove(outputs[0]) - outputs.insert(0, new_output) - print(f'| annotate output: \'{model.graph.output[0].name}\'') - - print('Running ONNX simplifier...') - model, check = onnxsim.simplify(model, include_subgraph=True) - assert check, 'Simplified ONNX model could not be validated' - - onnx.save(model, target) - print('Graph simplified.') - - -def export(model_path): - set_hparams(print_hparams=False) - device = 'cuda' if torch.cuda.is_available() else 'cpu' - vocoder = NsfHiFiGAN(device) - n_frames = 10 - - with torch.no_grad(): - mel = torch.rand((1, n_frames, 128), device=device) - f0 = torch.rand((1, n_frames), device=device) - torch.onnx.export( - vocoder, - ( - mel, - f0 - ), - model_path, - input_names=[ - 'mel', - 'f0' - ], - output_names=[ - 'waveform' - ], - dynamic_axes={ - 'mel': { - 1: 'n_frames', - }, - 'f0': { - 1: 'n_frames' - } - }, - opset_version=13 - ) - print('PyTorch ONNX export finished.') - - -if __name__ == '__main__': - sys.argv = [ - 'inference/ds_acoustic.py', - '--config', - 'configs/acoustic.yaml', - ] - path = 'deployment/assets/nsf_hifigan2.onnx' - export(path) - simplify(path, path) - print(f'| export \'NSF-HiFiGAN\' to \'{path}\'.') diff --git a/deployment/exporters/nsf_hifigan_exporter.py b/deployment/exporters/nsf_hifigan_exporter.py new file mode 100644 index 000000000..9b5cd7166 --- /dev/null +++ b/deployment/exporters/nsf_hifigan_exporter.py @@ -0,0 +1,94 @@ +import json +from pathlib import Path +from typing import Union + +import onnx +import onnxsim +import torch +from torch import nn + +from basics.base_exporter import BaseExporter +from deployment.modules.nsf_hifigan import NSFHiFiGANONNX +from utils.hparams import hparams + + +class NSFHiFiGANExporter(BaseExporter): + def __init__( + self, + device: Union[str, torch.device] = 'cpu', + cache_dir: Path = None, + model_path: Path = None, + model_name: str = 'nsf_hifigan' + ): + super().__init__(device=device, cache_dir=cache_dir) + self.model_path = model_path + self.model_name = model_name + self.model = self.build_model() + self.model_class_name = self.model.__class__.__name__.removesuffix('ONNX') + self.model_cache_path = (self.cache_dir / self.model_name).with_suffix('.onnx') + + def build_model(self) -> nn.Module: + config_path = self.model_path.with_name('config.json') + with open(config_path, 'r', encoding='utf8') as f: + config = json.load(f) + model = NSFHiFiGANONNX(config) + cp_dict = torch.load(self.model_path, map_location=self.device) + model.generator.load_state_dict(cp_dict['generator']) + del cp_dict + model.generator.remove_weight_norm() + model.eval() + return model + + def export(self, path: Path): + path.mkdir(parents=True, exist_ok=True) + self.export_model(path / self.model_cache_path.name) + + def export_model(self, path: Path): + self._torch_export_model() + model_onnx = self._optimize_model_graph(onnx.load(self.model_cache_path)) + onnx.save(model_onnx, path) + self.model_cache_path.unlink() + print(f'| export model => {path}') + + @torch.no_grad() + def _torch_export_model(self): + # Prepare inputs for NSFHiFiGAN + n_frames = 10 + mel = torch.randn((1, n_frames, hparams['audio_num_mel_bins']), dtype=torch.float32, device=self.device) + f0 = torch.randn((1, n_frames), dtype=torch.float32, device=self.device) + 440. + + # PyTorch ONNX export for NSFHiFiGAN + print(f'Exporting {self.model_class_name}...') + torch.onnx.export( + self.model, + ( + mel, + f0 + ), + self.model_cache_path, + input_names=[ + 'mel', + 'f0' + ], + output_names=[ + 'waveform' + ], + dynamic_axes={ + 'mel': { + 1: 'n_frames' + }, + 'f0': { + 1: 'n_frames' + }, + 'waveform': { + 1: 'n_samples' + } + }, + opset_version=15 + ) + + def _optimize_model_graph(self, model: onnx.ModelProto) -> onnx.ModelProto: + print(f'Running ONNX simplifier for {self.model_class_name}...') + model, check = onnxsim.simplify(model, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + return model diff --git a/deployment/modules/nsf_hifigan.py b/deployment/modules/nsf_hifigan.py new file mode 100644 index 000000000..b93c4e0be --- /dev/null +++ b/deployment/modules/nsf_hifigan.py @@ -0,0 +1,16 @@ +import torch + +from modules.nsf_hifigan.env import AttrDict +from modules.nsf_hifigan.models import Generator + + +# noinspection SpellCheckingInspection +class NSFHiFiGANONNX(torch.nn.Module): + def __init__(self, attrs: dict): + super().__init__() + self.generator = Generator(AttrDict(attrs)) + + def forward(self, mel: torch.Tensor, f0: torch.Tensor): + mel = mel.transpose(1, 2) * 2.30259 + wav = self.generator(mel, f0) + return wav.squeeze(1) diff --git a/modules/nsf_hifigan/env.py b/modules/nsf_hifigan/env.py index b4b8de71f..b576e130e 100644 --- a/modules/nsf_hifigan/env.py +++ b/modules/nsf_hifigan/env.py @@ -1,7 +1,3 @@ -import os -import shutil - - class AttrDict(dict): def __init__(self, *args, **kwargs): super(AttrDict, self).__init__(*args, **kwargs) diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 6ab05789c..223cbb68f 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -15,7 +15,7 @@ LRELU_SLOPE = 0.1 -def load_model(model_path, device='cuda'): +def load_model(model_path): config_file = os.path.join(os.path.split(model_path)[0], 'config.json') with open(config_file) as f: data = f.read() @@ -105,9 +105,9 @@ class SineGen(torch.nn.Module): flag_for_pulse=False) samp_rate: sampling rate in Hz harmonic_num: number of harmonic overtones (default 0) - sine_amp: amplitude of sine-wavefrom (default 0.1) + sine_amp: amplitude of sine-waveform (default 0.1) noise_std: std of Gaussian noise (default 0.003) - voiced_thoreshold: F0 threshold for U/V classification (default 0) + voiced_threshold: F0 threshold for U/V classification (default 0) flag_for_pulse: this SinGen is used inside PulseGen (default False) Note: when flag_for_pulse is True, the first time step of a voiced segment is always sin(np.pi) or cos(0) @@ -130,20 +130,14 @@ def _f02uv(self, f0): uv = uv * (f0 > self.voiced_threshold) return uv - @torch.no_grad() - def forward(self, f0, upp): - """ sine_tensor, uv = forward(f0) - input F0: tensor(batchsize=1, length, dim=1) - f0 for unvoiced steps should be 0 - output sine_tensor: tensor(batchsize=1, length, dim) - output uv: tensor(batchsize=1, length, 1) + def _f02sine(self, f0_values, upp): + """ f0_values: (batchsize, length, dim) + where dim indicates fundamental tone and overtones """ - f0 = f0.unsqueeze(-1) - fn = torch.multiply(f0, torch.arange(1, self.dim + 1, device=f0.device).reshape((1, 1, -1))) - rad_values = (fn / self.sampling_rate) % 1 ###%1意味着n_har的乘积无法后处理优化 - rand_ini = torch.rand(fn.shape[0], fn.shape[2], device=fn.device) + rad_values = (f0_values / self.sampling_rate).fmod(1.) # %1意味着n_har的乘积无法后处理优化 + rand_ini = torch.rand(1, self.dim, device=f0_values.device) rand_ini[:, 0] = 0 - rad_values[:, 0, :] = rad_values[:, 0, :] + rand_ini + rad_values[:, 0, :] += rand_ini is_half = rad_values.dtype is not torch.float32 tmp_over_one = torch.cumsum(rad_values.double(), 1) # % 1 #####%1意味着后面的cumsum无法再优化 if is_half: @@ -156,24 +150,40 @@ def forward(self, f0, upp): mode='linear', align_corners=True ).transpose(2, 1) rad_values = F.interpolate(rad_values.transpose(2, 1), scale_factor=upp, mode='nearest').transpose(2, 1) - tmp_over_one %= 1 - tmp_over_one_idx = (tmp_over_one[:, 1:, :] - tmp_over_one[:, :-1, :]) < 0 - cumsum_shift = torch.zeros_like(rad_values) - cumsum_shift[:, 1:, :] = tmp_over_one_idx * -1.0 - rad_values = rad_values.double() - cumsum_shift = cumsum_shift.double() - sine_waves = torch.sin(torch.cumsum(rad_values + cumsum_shift, dim=1) * 2 * np.pi) + tmp_over_one = tmp_over_one.fmod(1.) + diff = F.conv2d( + tmp_over_one.unsqueeze(1), torch.FloatTensor([[[[-1.], [1.]]]]).to(tmp_over_one.device), + stride=(1, 1), padding=0, dilation=(1, 1) + ).squeeze(1) # Equivalent to torch.diff, but able to export ONNX + cumsum_shift = (diff < 0).double() + cumsum_shift = torch.cat(( + torch.zeros((1, 1, self.dim), dtype=torch.double).to(f0_values.device), + cumsum_shift + ), dim=1) + sines = torch.sin(torch.cumsum(rad_values.double() + cumsum_shift, dim=1) * 2 * np.pi) if is_half: - sine_waves = sine_waves.half() + sines = sines.half() else: - sine_waves = sine_waves.float() - sine_waves = sine_waves * self.sine_amp - uv = self._f02uv(f0) + sines = sines.float() + return sines + + @torch.no_grad() + def forward(self, f0, upp): + """ sine_tensor, uv = forward(f0) + input F0: tensor(batchsize=1, length, dim=1) + f0 for unvoiced steps should be 0 + output sine_tensor: tensor(batchsize=1, length, dim) + output uv: tensor(batchsize=1, length, 1) + """ + f0 = f0.unsqueeze(-1) + fn = torch.multiply(f0, torch.arange(1, self.dim + 1, device=f0.device).reshape((1, 1, -1))) + sine_waves = self._f02sine(fn, upp) * self.sine_amp + uv = (f0 > self.voiced_threshold).float() uv = F.interpolate(uv.transpose(2, 1), scale_factor=upp, mode='nearest').transpose(2, 1) noise_amp = uv * self.noise_std + (1 - uv) * self.sine_amp / 3 noise = noise_amp * torch.randn_like(sine_waves) sine_waves = sine_waves * uv + noise - return sine_waves, uv, noise + return sine_waves class SourceModuleHnNSF(torch.nn.Module): @@ -195,7 +205,7 @@ class SourceModuleHnNSF(torch.nn.Module): """ def __init__(self, sampling_rate, harmonic_num=0, sine_amp=0.1, - add_noise_std=0.003, voiced_threshod=0): + add_noise_std=0.003, voiced_threshold=0): super(SourceModuleHnNSF, self).__init__() self.sine_amp = sine_amp @@ -203,14 +213,14 @@ def __init__(self, sampling_rate, harmonic_num=0, sine_amp=0.1, # to produce sine waveforms self.l_sin_gen = SineGen(sampling_rate, harmonic_num, - sine_amp, add_noise_std, voiced_threshod) + sine_amp, add_noise_std, voiced_threshold) # to merge source harmonics into a single excitation self.l_linear = torch.nn.Linear(harmonic_num + 1, 1) self.l_tanh = torch.nn.Tanh() def forward(self, x, upp): - sine_wavs, uv, _ = self.l_sin_gen(x, upp) + sine_wavs = self.l_sin_gen(x, upp) sine_merge = self.l_tanh(self.l_linear(sine_wavs)) return sine_merge diff --git a/scripts/export.py b/scripts/export.py index 9867ec28a..de6718517 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -11,7 +11,7 @@ os.environ['PYTHONPATH'] = str(root_dir) sys.path.insert(0, str(root_dir)) -from utils.hparams import set_hparams +from utils.hparams import set_hparams, hparams @click.group() @@ -34,7 +34,7 @@ def main(): @click.option('--freeze_spk', type=str, required=False) def acoustic( exp: str, - ckpt: int, + ckpt: int = None, out: str = None, expose_gender: bool = False, freeze_gender: float = 0., @@ -121,5 +121,39 @@ def acoustic( exporter.export(out) +@main.command(help='Export NSF-HiFiGAN vocoder model to ONNX format.') +@click.option('--config', type=str, required=True, metavar='', help='Specify a config path of the vocoder') +@click.option('--out', type=str, required=False, metavar='', help='Output directory for the artifacts.') +@click.option('--name', type=str, required=False, metavar='', default='nsf_hifigan', show_default=False, + help='Specify filename (without suffix) of the target model file.') +def nsf_hifigan( + config: str, + out: str = None, + name: str = None +): + # Check arguments + if not Path(config).resolve().exists(): + raise FileNotFoundError(f'{config} is not a valid config path.') + if out is None: + out = root_dir / 'artifacts' / 'nsf_hifigan' + else: + out = Path(out) + out = out.resolve() + + # Load configurations + set_hparams(config) + + # Export artifacts + from deployment.exporters import NSFHiFiGANExporter + exporter = NSFHiFiGANExporter( + device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), + cache_dir=root_dir / 'deployment' / 'cache', + model_path=Path(hparams['vocoder_ckpt']).resolve(), + model_name=name + ) + print(f'| Exporter: {exporter.__class__}') + exporter.export(out) + + if __name__ == '__main__': main() From c9a8b105e5d77a6422a3d5501d3ec7638f48031a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 8 Apr 2023 19:20:57 +0800 Subject: [PATCH 207/475] Update checkpoints loading for NSF-HiFiGAN --- deployment/exporters/acoustic_exporter.py | 4 +-- deployment/exporters/nsf_hifigan_exporter.py | 10 +++--- inference/ds_acoustic.py | 4 +-- modules/vocoders/nsf_hifigan.py | 2 +- scripts/export.py | 2 +- utils/__init__.py | 36 ++++++++++++-------- 6 files changed, 33 insertions(+), 25 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 05e4201bc..d31684ced 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -59,8 +59,8 @@ def build_model(self) -> DiffSingerAcousticONNX: vocab_size=len(self.vocab), out_dims=hparams['audio_num_mel_bins'] ).eval().to(self.device) - load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=self.ckpt_steps, - required_category='acoustic', strict=True, device=self.device) + load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, required_category='acoustic', + prefix_in_ckpt='model', strict=True, device=self.device) return model def export(self, path: Path): diff --git a/deployment/exporters/nsf_hifigan_exporter.py b/deployment/exporters/nsf_hifigan_exporter.py index 9b5cd7166..fb893edfd 100644 --- a/deployment/exporters/nsf_hifigan_exporter.py +++ b/deployment/exporters/nsf_hifigan_exporter.py @@ -9,6 +9,7 @@ from basics.base_exporter import BaseExporter from deployment.modules.nsf_hifigan import NSFHiFiGANONNX +from utils import load_ckpt from utils.hparams import hparams @@ -31,12 +32,11 @@ def build_model(self) -> nn.Module: config_path = self.model_path.with_name('config.json') with open(config_path, 'r', encoding='utf8') as f: config = json.load(f) - model = NSFHiFiGANONNX(config) - cp_dict = torch.load(self.model_path, map_location=self.device) - model.generator.load_state_dict(cp_dict['generator']) - del cp_dict + model = NSFHiFiGANONNX(config).eval().to(self.device) + load_ckpt(model.generator, str(self.model_path), + prefix_in_ckpt=None, key_in_ckpt='generator', + strict=True, device=self.device) model.generator.remove_weight_norm() - model.eval() return model def export(self, path: Path): diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 6f7fefa74..320ebebea 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -37,8 +37,8 @@ def build_model(self, ckpt_steps=None): vocab_size=len(self.ph_encoder), out_dims=hparams['audio_num_mel_bins'] ).eval().to(self.device) - load_ckpt(model, hparams['work_dir'], 'model', ckpt_steps=ckpt_steps, - required_category='acoustic', strict=True, device=self.device) + load_ckpt(model, hparams['work_dir'], ckpt_steps=ckpt_steps, required_category='acoustic', + prefix_in_ckpt='model', strict=True, device=self.device) return model def build_vocoder(self): diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index b540ef3ec..19a1ba703 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -15,7 +15,7 @@ @register_vocoder class NsfHifiGAN(BaseVocoder): - def __init__(self, device=None): + def __init__(self): model_path = hparams['vocoder_ckpt'] assert os.path.exists(model_path), 'HifiGAN model file is not found!' rank_zero_info('| Load HifiGAN: ' + model_path) diff --git a/scripts/export.py b/scripts/export.py index de6718517..632d7764f 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -122,7 +122,7 @@ def acoustic( @main.command(help='Export NSF-HiFiGAN vocoder model to ONNX format.') -@click.option('--config', type=str, required=True, metavar='', help='Specify a config path of the vocoder') +@click.option('--config', type=str, required=True, metavar='', help='Specify a config path of the vocoder.') @click.option('--out', type=str, required=False, metavar='', help='Output directory for the artifacts.') @click.option('--name', type=str, required=False, metavar='', default='nsf_hifigan', show_default=False, help='Specify filename (without suffix) of the target model file.') diff --git a/utils/__init__.py b/utils/__init__.py index 50ea11fd4..51a76a6c4 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,8 +1,7 @@ import glob +import os import re import time -import os -import sys import types from collections import OrderedDict @@ -108,9 +107,7 @@ def make_positions(tensor, padding_idx): # prefers ints, cumsum defaults to output longs, and ONNX doesn't know # how to handle the dtype kwarg in cumsum. mask = tensor.ne(padding_idx).int() - return ( - torch.cumsum(mask, dim=1).type_as(mask) * mask - ).long() + padding_idx + return (torch.cumsum(mask, dim=1).type_as(mask) * mask).long() + padding_idx def softmax(x, dim): @@ -131,10 +128,12 @@ def unpack_dict_to_list(samples): return samples_ -def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', required_category=None, - ckpt_steps=None, strict=True, device='cpu'): +def load_ckpt( + cur_model, ckpt_base_dir, ckpt_steps=None, + required_category=None, prefix_in_ckpt='model', key_in_ckpt='state_dict', + strict=True, device='cpu' +): if os.path.isfile(ckpt_base_dir): - ckpt_base_dir = os.path.dirname(ckpt_base_dir) checkpoint_path = [ckpt_base_dir] elif ckpt_steps is not None: checkpoint_path = [os.path.join(ckpt_base_dir, f'model_ckpt_steps_{int(ckpt_steps)}.ckpt')] @@ -158,11 +157,15 @@ def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', required_categor raise TypeError(f'The \'{required_category}\' argument can only be used ' f'on a \'basics.base_model.CategorizedModule\'.') cur_model.check_category(ckpt_loaded.get('category')) - state_dict = ckpt_loaded['state_dict'] - state_dict = OrderedDict({ - k[len(prefix_in_ckpt) + 1:]: v - for k, v in state_dict.items() if k.startswith(f'{prefix_in_ckpt}.') - }) + if key_in_ckpt is None: + state_dict = ckpt_loaded + else: + state_dict = ckpt_loaded[key_in_ckpt] + if prefix_in_ckpt is not None: + state_dict = OrderedDict({ + k[len(prefix_in_ckpt) + 1:]: v + for k, v in state_dict.items() if k.startswith(f'{prefix_in_ckpt}.') + }) if not strict: cur_model_state_dict = cur_model.state_dict() unmatched_keys = [] @@ -175,7 +178,12 @@ def load_ckpt(cur_model, ckpt_base_dir, prefix_in_ckpt='model', required_categor for key in unmatched_keys: del state_dict[key] cur_model.load_state_dict(state_dict, strict=strict) - print(f'| load \'{prefix_in_ckpt}\' from \'{checkpoint_path}\'.') + shown_model_name = 'state dict' + if prefix_in_ckpt is not None: + shown_model_name = f'\'{prefix_in_ckpt}\'' + elif key_in_ckpt is not None: + shown_model_name = f'\'{key_in_ckpt}\'' + print(f'| load {shown_model_name} from \'{checkpoint_path}\'.') def remove_padding(x, padding_idx=0): From 2d914210acab7202d99f0c49365836ce56def1ff Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 9 Apr 2023 13:16:23 +0800 Subject: [PATCH 208/475] Fix bugs for spk_embed --- deployment/exporters/acoustic_exporter.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index d31684ced..d3c40a5f9 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -52,7 +52,7 @@ def __init__( key_shift = max(min(key_shift, shift_max), shift_min) # clip key shift self.model.fs2.register_buffer('frozen_key_shift', torch.FloatTensor([key_shift]).to(self.device)) if hparams['use_spk_id'] and not self.export_spk and freeze_spk is not None: - self.model.fs2.register_buffer('spk_mix_embed', self._perform_spk_mix(freeze_spk[1])) + self.model.fs2.register_buffer('frozen_spk_embed', self._perform_spk_mix(freeze_spk[1])) def build_model(self) -> DiffSingerAcousticONNX: model = DiffSingerAcousticONNX( @@ -123,7 +123,7 @@ def _torch_export_model(self): dynamix_axes['velocity'] = { 1: 'n_frames' } - if hparams['use_spk_id']: + if hparams['use_spk_id'] and not self.freeze_spk: kwargs['spk_embed'] = torch.rand( (1, n_frames, hparams['hidden_size']), dtype=torch.float32, device=self.device @@ -206,6 +206,7 @@ def _torch_export_model(self): opset_version=15 ) + @torch.no_grad() def _perform_spk_mix(self, spk_mix: Dict[str, float]): spk_mix_ids = [] spk_mix_values = [] From 81826ae1bbbdd9f9bb2892f7ffc04ab7c31e2527 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 9 Apr 2023 13:16:50 +0800 Subject: [PATCH 209/475] Adjust stdout --- scripts/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/export.py b/scripts/export.py index 632d7764f..5c6143024 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -107,6 +107,7 @@ def acoustic( # Export artifacts from deployment.exporters import DiffSingerAcousticExporter + print(f'| Exporter: {DiffSingerAcousticExporter}') exporter = DiffSingerAcousticExporter( device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), cache_dir=root_dir / 'deployment' / 'cache', @@ -117,7 +118,6 @@ def acoustic( export_spk=export_spk_mix, freeze_spk=freeze_spk_mix ) - print(f'| Exporter: {exporter.__class__}') exporter.export(out) @@ -145,13 +145,13 @@ def nsf_hifigan( # Export artifacts from deployment.exporters import NSFHiFiGANExporter + print(f'| Exporter: {NSFHiFiGANExporter}') exporter = NSFHiFiGANExporter( device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), cache_dir=root_dir / 'deployment' / 'cache', model_path=Path(hparams['vocoder_ckpt']).resolve(), model_name=name ) - print(f'| Exporter: {exporter.__class__}') exporter.export(out) From 0a44ba37a6ef1d9a393b9210164baf197de06163 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 9 Apr 2023 22:12:37 +0800 Subject: [PATCH 210/475] Fix AttributeError and wrong paths of .emb artifacts --- deployment/exporters/acoustic_exporter.py | 2 +- deployment/modules/fastspeech2.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index d3c40a5f9..c3eeec241 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -84,7 +84,7 @@ def export_model(self, path: Path): def export_attachments(self, path: Path): path_model_name = path / self.model_name for spk in self.export_spk: - self._export_spk_embed(path.with_suffix(f'.{spk[0]}.emb'), self._perform_spk_mix(spk[1])) + self._export_spk_embed(path_model_name.with_suffix(f'.{spk[0]}.emb'), self._perform_spk_mix(spk[1])) self._export_dictionary(path / 'dictionary.txt') self._export_phonemes(path_model_name.with_suffix('.phonemes.txt')) diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py index 8ab0a8bfc..bf7987602 100644 --- a/deployment/modules/fastspeech2.py +++ b/deployment/modules/fastspeech2.py @@ -61,7 +61,7 @@ def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=N if hparams.get('use_key_shift_embed', False): if hasattr(self, 'frozen_key_shift'): - key_shift_embed = self.key_shift_embed(self.key_shift[:, None, None]) + key_shift_embed = self.key_shift_embed(self.frozen_key_shift[:, None, None]) else: gender = torch.clip(gender, min=-1., max=1.) gender_mask = (gender < 0.).float() From 6a6de253ef0317816682c1275b3104b6cf79b0b6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 9 Apr 2023 23:44:26 +0800 Subject: [PATCH 211/475] Optimize spk export and freeze logic - if there is only one speaker, freeze him/her by default - if there are multiple speakers but no --freeze_spk and no --export_spk is set, export them all --- deployment/exporters/acoustic_exporter.py | 14 ++++++++++++-- scripts/export.py | 3 ++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index c3eeec241..25fa0c35c 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -51,8 +51,18 @@ def __init__( key_shift = freeze_gender * shift_max if freeze_gender >= 0. else freeze_gender * abs(shift_min) key_shift = max(min(key_shift, shift_max), shift_min) # clip key shift self.model.fs2.register_buffer('frozen_key_shift', torch.FloatTensor([key_shift]).to(self.device)) - if hparams['use_spk_id'] and not self.export_spk and freeze_spk is not None: - self.model.fs2.register_buffer('frozen_spk_embed', self._perform_spk_mix(freeze_spk[1])) + if hparams['use_spk_id']: + if not self.export_spk and freeze_spk is None: + # In case the user did not specify any speaker settings: + if len(self.spk_map) == 1: + # If there is only one speaker, freeze him/her. + first_spk = next(self.spk_map.keys()) + freeze_spk = (first_spk, {first_spk: 1.0}) + else: + # If there are multiple speakers, export them all. + self.export_spk = [(name, {name: 1.0}) for name in self.spk_map.keys()] + if freeze_spk is not None: + self.model.fs2.register_buffer('frozen_spk_embed', self._perform_spk_mix(freeze_spk[1])) def build_model(self) -> DiffSingerAcousticONNX: model = DiffSingerAcousticONNX( diff --git a/scripts/export.py b/scripts/export.py index 5c6143024..72375b911 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -47,7 +47,8 @@ def acoustic( print('--expose_gender is exclusive to --freeze_gender.') exit(-1) if export_spk and freeze_spk: - print('--export_spk is exclusive to --freeze_spk') + print('--export_spk is exclusive to --freeze_spk.') + exit(-1) if freeze_gender is not None: assert -1. <= freeze_gender <= 1., 'Frozen gender must be in [-1, 1].' if not (root_dir / 'checkpoints' / exp).exists(): From 94325dd79380feba80d4abc4f4118e2789ac2277 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 10 Apr 2023 12:15:06 +0800 Subject: [PATCH 212/475] Finish benchmarks --- deployment/benchmarks/infer_acoustic.py | 1 - deployment/benchmarks/infer_nsf_hifigan.py | 38 ++++++++-------------- 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/deployment/benchmarks/infer_acoustic.py b/deployment/benchmarks/infer_acoustic.py index 9af2d083e..588462a84 100644 --- a/deployment/benchmarks/infer_acoustic.py +++ b/deployment/benchmarks/infer_acoustic.py @@ -13,7 +13,6 @@ f0 = np.array([[440.] * n_frames], dtype=np.float32) speedup = np.array(speedup, dtype=np.int64) - session = ort.InferenceSession('model1.onnx', providers=[provider]) for _ in tqdm.tqdm(range(n_runs)): session.run(['mel'], { diff --git a/deployment/benchmarks/infer_nsf_hifigan.py b/deployment/benchmarks/infer_nsf_hifigan.py index 49ecc05ca..da348c8a4 100644 --- a/deployment/benchmarks/infer_nsf_hifigan.py +++ b/deployment/benchmarks/infer_nsf_hifigan.py @@ -1,26 +1,16 @@ -import time - import numpy as np import onnxruntime as ort -from scipy.io import wavfile - -mel = np.load('deployment/assets/mel.npy') -f0 = np.load('deployment/assets/f0.npy') - -print('mel', mel.shape) -print('f0', f0.shape) - -session = ort.InferenceSession( - 'deployment/assets/nsf_hifigan.onnx', - providers=['CPUExecutionProvider'] -) - -start = time.time() -wav = session.run(['waveform'], {'mel': mel, 'f0': f0})[0] -end = time.time() - -print('waveform', wav.shape) -print('cost', end - start) - - -wavfile.write('deployment/assets/waveform.wav', 44100, wav[0]) +import tqdm + +n_frames = 1000 +n_runs = 20 +mel = np.random.randn(1, n_frames, 128).astype(np.float32) +f0 = np.random.randn(1, n_frames).astype(np.float32) + 440. +provider = 'DmlExecutionProvider' + +session = ort.InferenceSession('nsf_hifigan.onnx', providers=[provider]) +for _ in tqdm.tqdm(range(n_runs)): + session.run(['waveform'], { + 'mel': mel, + 'f0': f0 + }) From 86b1076877d3c4c3ec9269bec44a4900c2e9667d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 10 Apr 2023 21:08:10 +0800 Subject: [PATCH 213/475] Rename hparam keys max_tokens -> max_batch_frames max_sentences -> max_batch_size max_eval_tokens -> max_val_batch_frames max_eval_sentences -> max_val_batch_size decay_steps -> lr_decay_steps gamma -> lr_decay_gamma --- basics/base_dataset.py | 4 +-- basics/base_task.py | 16 +++++------ configs/acoustic.yaml | 8 +++--- configs/base.yaml | 8 +++--- preparation/acoustic_preparation.ipynb | 38 +++++++++++++------------- training/acoustic_task.py | 30 ++++++++++---------- utils/__init__.py | 33 +++++++++++----------- utils/training_utils.py | 28 +++++++++++-------- 8 files changed, 86 insertions(+), 79 deletions(-) diff --git a/basics/base_dataset.py b/basics/base_dataset.py index 7c1d0c59f..031586a19 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -11,7 +11,7 @@ class BaseDataset(Dataset): Base class for datasets. 1. *sizes*: clipped length if "max_frames" is set; - 2. *num_tokens*: + 2. *num_frames*: unclipped length. Subclasses should define: @@ -38,7 +38,7 @@ def collater(self, samples): def __len__(self): return len(self._sizes) - def num_tokens(self, index): + def num_frames(self, index): return self.size(index) def size(self, index): diff --git a/basics/base_task.py b/basics/base_task.py index f2cde27fd..2524c3efd 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -62,14 +62,14 @@ def __init__(self, *args, **kwargs): self.loaded_optimizer_states_dict = {} self.example_input_array = None - self.max_tokens = hparams['max_tokens'] - self.max_sentences = hparams['max_sentences'] - self.max_eval_tokens = hparams['max_eval_tokens'] - if self.max_eval_tokens == -1: - hparams['max_eval_tokens'] = self.max_eval_tokens = self.max_tokens - self.max_eval_sentences = hparams['max_eval_sentences'] - if self.max_eval_sentences == -1: - hparams['max_eval_sentences'] = self.max_eval_sentences = self.max_sentences + self.max_batch_frames = hparams['max_batch_frames'] + self.max_batch_size = hparams['max_batch_size'] + self.max_val_batch_frames = hparams['max_val_batch_frames'] + if self.max_val_batch_frames == -1: + hparams['max_val_batch_frames'] = self.max_val_batch_frames = self.max_batch_frames + self.max_val_batch_size = hparams['max_val_batch_frames'] + if self.max_val_batch_size == -1: + hparams['max_val_batch_frames'] = self.max_val_batch_size = self.max_batch_size self.training_sampler = None self.model = None diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 325f7db76..edf612ede 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -71,10 +71,10 @@ schedule_type: 'linear' # train and eval num_sanity_val_steps: 1 lr: 0.0004 -decay_steps: 50000 -gamma: 0.5 -max_tokens: 80000 -max_sentences: 48 +lr_decay_steps: 50000 +lr_decay_gamma: 0.5 +max_batch_frames: 80000 +max_batch_size: 48 val_with_vocoder: true val_check_interval: 2000 num_valid_plots: 10 diff --git a/configs/base.yaml b/configs/base.yaml index 94d3e1669..58db5ce89 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -68,10 +68,10 @@ log_interval: 100 num_sanity_val_steps: 5 # steps of validation at the beginning val_check_interval: 2000 max_updates: 120000 -max_tokens: 32000 -max_sentences: 100000 -max_eval_sentences: 1 -max_eval_tokens: 60000 +max_batch_frames: 32000 +max_batch_size: 100000 +max_val_batch_frames: 60000 +max_val_batch_size: 1 train_set_name: 'train' valid_set_name: 'valid' vocoder: '' diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index d76bc080e..903c62b3d 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1103,13 +1103,13 @@ "\n", "All files with name prefixes specified in this list will be put into the test set. Each time when a checkpoint is saved, the program will first run inference on the test set and put the result on the TensorBoard. Thus, you can listen to these demos and judge the quality of your model. If you leave it empty, test cases will be randomly selected.\n", "\n", - "##### `max_tokens` and `max_sentences`\n", + "##### `max_batch_frames` and `max_batch_size`\n", "\n", "These two parameters jointly determine the batch size at training time, the former representing maximum number of frames in one batch and the latter limiting the maximum batch size. Larger batches consumes more GPU memory at training time. This value can be adjusted according to your GPU memory. Remember not to set this value too low because the model may not converge with small batches.\n", "\n", - "##### `lr`, `decay_steps`, `gamma`\n", + "##### `lr`, `lr_decay_steps`, `lr_decay_gamma`\n", "\n", - "The learning rate starts at `lr`, decays with the rate `gamma` at every `decay_steps` during training. If you decreased your batch size, you may consider using a smaller learning rate and more decay steps, or larger gamma.\n", + "The learning rate starts at `lr`, decays with the rate `lr_decay_gamma` at every `lr_decay_steps` during training. If you decreased your batch size, you may consider using a smaller learning rate and more decay steps, or larger gamma.\n", "\n", "##### `val_check_interval`, `num_ckpt_keep` and `max_updates`\n", "\n", @@ -1166,12 +1166,12 @@ "\n", "]\n", "\n", - "max_tokens = 80000\n", - "max_sentences = 48\n", + "max_batch_frames = 80000\n", + "max_batch_size = 48\n", "\n", "lr = 0.0004\n", - "decay_steps = 50000\n", - "gamma = 0.5\n", + "lr_decay_steps = 50000\n", + "lr_decay_gamma = 0.5\n", "\n", "val_check_interval = 2000\n", "num_ckpt_keep = 5\n", @@ -1219,11 +1219,11 @@ " 'residual_layers': residual_layers,\n", " 'f0_embed_type': f0_embed_type,\n", " 'test_prefixes': test_prefixes,\n", - " 'max_tokens': max_tokens,\n", - " 'max_sentences': max_sentences,\n", + " 'max_batch_frames': max_batch_frames,\n", + " 'max_batch_size': max_batch_size,\n", " 'lr': lr,\n", - " 'decay_steps': decay_steps,\n", - " 'gamma': gamma,\n", + " 'lr_decay_steps': lr_decay_steps,\n", + " 'lr_decay_gamma': lr_decay_gamma,\n", " 'val_check_interval': val_check_interval,\n", " 'num_valid_plots': min(10, len(test_prefixes)),\n", " 'num_ckpt_keep': num_ckpt_keep,\n", @@ -1458,12 +1458,12 @@ "\n", "]\n", "\n", - "max_tokens = 80000\n", - "max_sentences = 48\n", + "max_batch_frames = 80000\n", + "max_batch_size = 48\n", "\n", "lr = 0.0004\n", - "decay_steps = 50000\n", - "gamma = 0.5\n", + "lr_decay_steps = 50000\n", + "lr_decay_gamma = 0.5\n", "\n", "val_check_interval = 2000\n", "num_ckpt_keep = 5\n", @@ -1542,11 +1542,11 @@ " 'residual_layers': residual_layers,\n", " 'f0_embed_type': f0_embed_type,\n", " 'test_prefixes': test_prefixes,\n", - " 'max_tokens': max_tokens,\n", - " 'max_sentences': max_sentences,\n", + " 'max_batch_frames': max_batch_frames,\n", + " 'max_batch_size': max_batch_size,\n", " 'lr': lr,\n", - " 'decay_steps': decay_steps,\n", - " 'gamma': gamma,\n", + " 'lr_decay_steps': lr_decay_steps,\n", + " 'lr_decay_gamma': lr_decay_gamma,\n", " 'val_check_interval': val_check_interval,\n", " 'num_valid_plots': min(20, len(test_prefixes)),\n", " 'num_ckpt_keep': num_ckpt_keep,\n", diff --git a/training/acoustic_task.py b/training/acoustic_task.py index c240600ef..9ea2669a8 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -68,14 +68,14 @@ class AcousticTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = AcousticDataset - self.use_vocoder = hparams['infer'] or hparams.get('val_with_vocoder', True) + self.use_vocoder = hparams['infer'] or hparams['val_with_vocoder'] if self.use_vocoder: self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() self.saving_result_pool = None self.saving_results_futures = None self.stats = {} self.logged_gt_wav = set() - + def setup(self, stage): self.phone_encoder = self.build_phone_encoder() self.model = self.build_model() @@ -111,13 +111,15 @@ def build_scheduler(self, optimizer): # warmup_steps=hparams['warmup_updates'], # t_total=hparams['max_updates'], # eta_min=0) - return torch.optim.lr_scheduler.StepLR(optimizer, hparams['decay_steps'], gamma=hparams.get('gamma', 0.5)) - + return torch.optim.lr_scheduler.StepLR( + optimizer, step_size=hparams['lr_decay_steps'], gamma=hparams['lr_decay_gamma'] + ) + def train_dataloader(self): self.training_sampler = DsBatchSampler( self.train_dataset, - max_tokens=self.max_tokens, - max_sentences=self.max_sentences, + max_batch_frames=self.max_batch_frames, + max_batch_size=self.max_batch_size, num_replicas=(self.trainer.distributed_sampler_kwargs or {}).get('num_replicas', 1), rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), sort_by_similar_size=hparams['sort_by_len'], @@ -129,29 +131,29 @@ def train_dataloader(self): return torch.utils.data.DataLoader(self.train_dataset, collate_fn=self.train_dataset.collater, batch_sampler=self.training_sampler, - num_workers=int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 1))), - prefetch_factor=hparams.get('dataloader_prefetch_factor', 2), + num_workers=hparams['ds_workers'], + prefetch_factor=hparams['dataloader_prefetch_factor'], pin_memory=True, persistent_workers=True) def val_dataloader(self): sampler = DsEvalBatchSampler( self.valid_dataset, - max_tokens=self.max_tokens, - max_sentences=self.max_eval_sentences, + max_batch_frames=self.max_val_batch_frames, + max_batch_size=self.max_val_batch_size, rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), batch_by_size=False ) return torch.utils.data.DataLoader(self.valid_dataset, collate_fn=self.valid_dataset.collater, batch_sampler=sampler, - num_workers=int(hparams.get('ds_workers', os.getenv('NUM_WORKERS', 1))), - prefetch_factor=hparams.get('dataloader_prefetch_factor', 2), + num_workers=hparams['ds_workers'], + prefetch_factor=hparams['dataloader_prefetch_factor'], shuffle=False) def test_dataloader(self): return self.val_dataloader() - + def run_model(self, sample, return_output=False, infer=False): """ steps: @@ -190,7 +192,7 @@ def _training_step(self, sample, batch_idx, _): def _on_validation_start(self): if self.use_vocoder: self.vocoder.to_device(self.device) - + def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, return_output=False, infer=False) total_loss = sum(losses.values()) diff --git a/utils/__init__.py b/utils/__init__.py index 33cef296c..6d2fd90cc 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,8 +1,7 @@ import glob +import os import re import time -import os -import sys import types from collections import OrderedDict @@ -36,18 +35,18 @@ def collate_nd(values, pad_value=0, max_len=None): return res -def _is_batch_full(batch, num_tokens, max_tokens, max_sentences): +def _is_batch_full(batch, num_frames, max_batch_frames, max_batch_size): if len(batch) == 0: return 0 - if len(batch) == max_sentences: + if len(batch) == max_batch_size: return 1 - if num_tokens > max_tokens: + if num_frames > max_batch_frames: return 1 return 0 def batch_by_size( - indices, num_tokens_fn, max_tokens=80000, max_sentences=48, + indices, num_frames_fn, max_batch_samples=80000, max_batch_size=48, required_batch_size_multiple=1 ): """ @@ -56,11 +55,11 @@ def batch_by_size( Args: indices (List[int]): ordered list of dataset indices - num_tokens_fn (callable): function that returns the number of tokens at + num_frames_fn (callable): function that returns the number of frames at a given index - max_tokens (int, optional): max number of tokens in each batch + max_batch_samples (int, optional): max number of frames in each batch (default: 80000). - max_sentences (int, optional): max number of sentences in each + max_batch_size (int, optional): max number of sentences in each batch (default: 48). """ bsz_mult = required_batch_size_multiple @@ -74,16 +73,16 @@ def batch_by_size( batches = [] for i in range(len(indices)): idx = indices[i] - num_tokens = num_tokens_fn(idx) - sample_lens.append(num_tokens) - sample_len = max(sample_len, num_tokens) - assert sample_len <= max_tokens, ( - "sentence at index {} of size {} exceeds max_tokens " - "limit of {}!".format(idx, sample_len, max_tokens) + num_frames = num_frames_fn(idx) + sample_lens.append(num_frames) + sample_len = max(sample_len, num_frames) + assert sample_len <= max_batch_samples, ( + "sentence at index {} of size {} exceeds max_batch_samples " + "limit of {}!".format(idx, sample_len, max_batch_samples) ) - num_tokens = (len(batch) + 1) * sample_len + num_frames = (len(batch) + 1) * sample_len - if _is_batch_full(batch, num_tokens, max_tokens, max_sentences): + if _is_batch_full(batch, num_frames, max_batch_samples, max_batch_size): mod_len = max( bsz_mult * (len(batch) // bsz_mult), len(batch) % bsz_mult, diff --git a/utils/training_utils.py b/utils/training_utils.py index de007e6b0..6fca50b8a 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -72,13 +72,13 @@ def lr_lambda(self, step): #==========Torch samplers========== class DsBatchSampler(Sampler): - def __init__(self, dataset, max_tokens, max_sentences, sub_indices=None, + def __init__(self, dataset, max_batch_frames, max_batch_size, sub_indices=None, num_replicas=None, rank=None, required_batch_count_multiple=1, batch_by_size=True, sort_by_similar_size=True, shuffle_sample=False, shuffle_batch=False, seed=0, drop_last=False) -> None: self.dataset = dataset - self.max_tokens = max_tokens - self.max_sentences = max_sentences + self.max_batch_frames = max_batch_frames + self.max_batch_size = max_batch_size self.sub_indices = sub_indices self.num_replicas = num_replicas self.rank = rank @@ -115,9 +115,9 @@ def __form_batches(self): indices = self.sub_indices if self.sub_indices is not None else list(range(len(self.dataset))) if self.batch_by_size: - batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) + batches = utils.batch_by_size(indices, self.dataset.num_frames, max_batch_samples=self.max_batch_frames, max_batch_size=self.max_batch_size) else: - batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] + batches = [indices[i:i + self.max_batch_size] for i in range(0, len(indices), self.max_batch_size)] floored_total_batch_count = (len(batches) // self.num_replicas) * self.num_replicas if self.drop_last and len(batches) > floored_total_batch_count: @@ -163,22 +163,28 @@ def set_epoch(self, epoch): class DsEvalBatchSampler(Sampler): - def __init__(self, dataset, max_tokens, max_sentences, rank=None, batch_by_size=True) -> None: + def __init__(self, dataset, max_batch_frames, max_batch_size, rank=None, batch_by_size=True) -> None: self.dataset = dataset - self.max_tokens = max_tokens - self.max_sentences = max_sentences + self.max_batch_samples = max_batch_frames + self.max_batch_size = max_batch_size self.rank = rank self.batch_by_size = batch_by_size self.batches = None - self.batch_size = max_sentences + self.batch_size = max_batch_size self.drop_last = False if self.rank == 0: indices = list(range(len(self.dataset))) if self.batch_by_size: - self.batches = utils.batch_by_size(indices, self.dataset.num_tokens, max_tokens=self.max_tokens, max_sentences=self.max_sentences) + self.batches = utils.batch_by_size( + indices, self.dataset.num_frames, + max_batch_samples=self.max_batch_samples, max_batch_size=self.max_batch_size + ) else: - self.batches = [indices[i:i + self.max_sentences] for i in range(0, len(indices), self.max_sentences)] + self.batches = [ + indices[i:i + self.max_batch_size] + for i in range(0, len(indices), self.max_batch_size) + ] else: self.batches = [[0]] From cf48b5f6d3c3191c06426dc2eb00f63c4aaadc5a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 10 Apr 2023 21:26:15 +0800 Subject: [PATCH 214/475] Rename argument --- utils/__init__.py | 10 +++++----- utils/training_utils.py | 12 +++++++++--- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/utils/__init__.py b/utils/__init__.py index 6d2fd90cc..145219c63 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -46,7 +46,7 @@ def _is_batch_full(batch, num_frames, max_batch_frames, max_batch_size): def batch_by_size( - indices, num_frames_fn, max_batch_samples=80000, max_batch_size=48, + indices, num_frames_fn, max_batch_frames=80000, max_batch_size=48, required_batch_size_multiple=1 ): """ @@ -57,7 +57,7 @@ def batch_by_size( indices (List[int]): ordered list of dataset indices num_frames_fn (callable): function that returns the number of frames at a given index - max_batch_samples (int, optional): max number of frames in each batch + max_batch_frames (int, optional): max number of frames in each batch (default: 80000). max_batch_size (int, optional): max number of sentences in each batch (default: 48). @@ -76,13 +76,13 @@ def batch_by_size( num_frames = num_frames_fn(idx) sample_lens.append(num_frames) sample_len = max(sample_len, num_frames) - assert sample_len <= max_batch_samples, ( + assert sample_len <= max_batch_frames, ( "sentence at index {} of size {} exceeds max_batch_samples " - "limit of {}!".format(idx, sample_len, max_batch_samples) + "limit of {}!".format(idx, sample_len, max_batch_frames) ) num_frames = (len(batch) + 1) * sample_len - if _is_batch_full(batch, num_frames, max_batch_samples, max_batch_size): + if _is_batch_full(batch, num_frames, max_batch_frames, max_batch_size): mod_len = max( bsz_mult * (len(batch) // bsz_mult), len(batch) % bsz_mult, diff --git a/utils/training_utils.py b/utils/training_utils.py index 6fca50b8a..827dfa1a0 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -115,7 +115,11 @@ def __form_batches(self): indices = self.sub_indices if self.sub_indices is not None else list(range(len(self.dataset))) if self.batch_by_size: - batches = utils.batch_by_size(indices, self.dataset.num_frames, max_batch_samples=self.max_batch_frames, max_batch_size=self.max_batch_size) + batches = utils.batch_by_size( + indices, self.dataset.num_frames, + max_batch_frames=self.max_batch_frames, + max_batch_size=self.max_batch_size + ) else: batches = [indices[i:i + self.max_batch_size] for i in range(0, len(indices), self.max_batch_size)] @@ -126,7 +130,9 @@ def __form_batches(self): else: leftovers = (rng.permutation(len(batches) - floored_total_batch_count) + floored_total_batch_count).tolist() - batch_assignment = rng.permuted(np.arange(floored_total_batch_count).reshape(-1, self.num_replicas).transpose(), axis=0)[self.rank].tolist() + batch_assignment = rng.permuted( + np.arange(floored_total_batch_count).reshape(-1, self.num_replicas).transpose(), axis=0 + )[self.rank].tolist() floored_batch_count = len(batch_assignment) ceiled_batch_count = floored_batch_count + (1 if len(leftovers) > 0 else 0) if self.rank < len(leftovers): @@ -178,7 +184,7 @@ def __init__(self, dataset, max_batch_frames, max_batch_size, rank=None, batch_b if self.batch_by_size: self.batches = utils.batch_by_size( indices, self.dataset.num_frames, - max_batch_samples=self.max_batch_samples, max_batch_size=self.max_batch_size + max_batch_frames=self.max_batch_samples, max_batch_size=self.max_batch_size ) else: self.batches = [ From c670d7c0a50740d5e8850530804ddd2e04c76157 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 10 Apr 2023 21:53:39 +0800 Subject: [PATCH 215/475] Fixes for freeze spk and docstring --- basics/base_exporter.py | 2 +- deployment/exporters/acoustic_exporter.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/basics/base_exporter.py b/basics/base_exporter.py index 5300ad2b1..fc7c73287 100644 --- a/basics/base_exporter.py +++ b/basics/base_exporter.py @@ -54,7 +54,7 @@ def export_attachments(self, path: Path): def export(self, path: Path): """ - Export all the artifacts to the target directory. + Exports all the artifacts to the target directory. :param path: the target directory """ raise NotImplementedError() diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 25fa0c35c..b2f4bf8e1 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -52,17 +52,17 @@ def __init__( key_shift = max(min(key_shift, shift_max), shift_min) # clip key shift self.model.fs2.register_buffer('frozen_key_shift', torch.FloatTensor([key_shift]).to(self.device)) if hparams['use_spk_id']: - if not self.export_spk and freeze_spk is None: + if not self.export_spk and self.freeze_spk is None: # In case the user did not specify any speaker settings: if len(self.spk_map) == 1: # If there is only one speaker, freeze him/her. first_spk = next(self.spk_map.keys()) - freeze_spk = (first_spk, {first_spk: 1.0}) + self.freeze_spk = (first_spk, {first_spk: 1.0}) else: # If there are multiple speakers, export them all. self.export_spk = [(name, {name: 1.0}) for name in self.spk_map.keys()] - if freeze_spk is not None: - self.model.fs2.register_buffer('frozen_spk_embed', self._perform_spk_mix(freeze_spk[1])) + if self.freeze_spk is not None: + self.model.fs2.register_buffer('frozen_spk_embed', self._perform_spk_mix(self.freeze_spk[1])) def build_model(self) -> DiffSingerAcousticONNX: model = DiffSingerAcousticONNX( @@ -78,7 +78,7 @@ def export(self, path: Path): model_name = self.model_name if self.freeze_spk is not None: model_name += '.' + self.freeze_spk[0] - self.export_model((path / model_name).with_suffix('.onnx')) + self.export_model((path / 'dummy').with_suffix('.onnx').with_stem(model_name)) self.export_attachments(path) def export_model(self, path: Path): From 8d3de77c5476b45e8b187dda431ea25f463b50af Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 11 Apr 2023 12:15:21 +0800 Subject: [PATCH 216/475] Reformat code --- basics/base_task.py | 6 +- inference/dpm_solver_pytorch.py | 270 ++++++++++++++------------ modules/commons/ssim.py | 322 +------------------------------- utils/__init__.py | 2 + utils/training_utils.py | 60 +++--- 5 files changed, 192 insertions(+), 468 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index f4e45e048..ddea1b9c0 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -1,21 +1,19 @@ -from datetime import datetime -from functools import partial import logging import os import pathlib import shutil import sys +from datetime import datetime import matplotlib + matplotlib.use('Agg') -from torch import nn import torch.utils.data from torchmetrics import MeanMetric import lightning.pytorch as pl from lightning.pytorch.callbacks import LearningRateMonitor from lightning.pytorch.loggers import TensorBoardLogger -from lightning.pytorch.utilities import grad_norm from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only from basics.base_module import CategorizedModule diff --git a/inference/dpm_solver_pytorch.py b/inference/dpm_solver_pytorch.py index 8e1d80d25..9d4cb8c2b 100644 --- a/inference/dpm_solver_pytorch.py +++ b/inference/dpm_solver_pytorch.py @@ -1,7 +1,7 @@ -import torch -import torch.nn.functional as F import math +import torch + class NoiseScheduleVP: def __init__( @@ -11,7 +11,7 @@ def __init__( alphas_cumprod=None, continuous_beta_0=0.1, continuous_beta_1=20., - ): + ): """Create a wrapper class for the forward SDE (VP type). *** @@ -93,7 +93,9 @@ def __init__( """ if schedule not in ['discrete', 'linear', 'cosine']: - raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(schedule)) + raise ValueError( + "Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format( + schedule)) self.schedule = schedule if schedule == 'discrete': @@ -112,7 +114,8 @@ def __init__( self.beta_1 = continuous_beta_1 self.cosine_s = 0.008 self.cosine_beta_max = 999. - self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * ( + 1. + self.cosine_s) / math.pi - self.cosine_s self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) self.schedule = schedule if schedule == 'cosine': @@ -127,12 +130,13 @@ def marginal_log_mean_coeff(self, t): Compute log(alpha_t) of a given continuous-time label t in [0, T]. """ if self.schedule == 'discrete': - return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) + return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), + self.log_alpha_array.to(t.device)).reshape((-1)) elif self.schedule == 'linear': return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 elif self.schedule == 'cosine': log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) - log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 + log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 return log_alpha_t def marginal_alpha(self, t): @@ -161,30 +165,32 @@ def inverse_lambda(self, lamb): """ if self.schedule == 'linear': tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) - Delta = self.beta_0**2 + tmp + Delta = self.beta_0 ** 2 + tmp return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) elif self.schedule == 'discrete': log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) - t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) + t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), + torch.flip(self.t_array.to(lamb.device), [1])) return t.reshape((-1,)) else: log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) - t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * ( + 1. + self.cosine_s) / math.pi - self.cosine_s t = t_fn(log_alpha) return t def model_wrapper( - model, - noise_schedule, - model_type="noise", - model_kwargs={}, - guidance_type="uncond", - condition=None, - unconditional_condition=None, - guidance_scale=1., - classifier_fn=None, - classifier_kwargs={}, + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1., + classifier_fn=None, + classifier_kwargs={}, ): """Create a wrapper function for the noise prediction model. @@ -392,7 +398,7 @@ def data_prediction_fn(self, x, t): alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) if self.thresholding: - p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. + p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims) x0 = torch.clamp(x0, -s, s) / s @@ -431,10 +437,11 @@ def get_time_steps(self, skip_type, t_T, t_0, N, device): return torch.linspace(t_T, t_0, N + 1).to(device) elif skip_type == 'time_quadratic': t_order = 2 - t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) + t = torch.linspace(t_T ** (1. / t_order), t_0 ** (1. / t_order), N + 1).pow(t_order).to(device) return t else: - raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) + raise ValueError( + "Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): """ @@ -471,28 +478,29 @@ def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type if order == 3: K = steps // 3 + 1 if steps % 3 == 0: - orders = [3,] * (K - 2) + [2, 1] + orders = [3, ] * (K - 2) + [2, 1] elif steps % 3 == 1: - orders = [3,] * (K - 1) + [1] + orders = [3, ] * (K - 1) + [1] else: - orders = [3,] * (K - 1) + [2] + orders = [3, ] * (K - 1) + [2] elif order == 2: if steps % 2 == 0: K = steps // 2 - orders = [2,] * K + orders = [2, ] * K else: K = steps // 2 + 1 - orders = [2,] * (K - 1) + [1] + orders = [2, ] * (K - 1) + [1] elif order == 1: K = 1 - orders = [1,] * steps + orders = [1, ] * steps else: raise ValueError("'order' must be '1' or '2' or '3'.") if skip_type == 'logSNR': # To reproduce the results in DPM-Solver paper timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) else: - timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders),dim=0).to(device)] + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[ + torch.cumsum(torch.tensor([0, ] + orders), dim=0).to(device)] return timesteps_outer, orders def denoise_fn(self, x, s): @@ -528,8 +536,8 @@ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=Fal if model_s is None: model_s = self.model_fn(x, s) x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s ) if return_intermediate: return x_t, {'model_s': model_s} @@ -540,15 +548,16 @@ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=Fal if model_s is None: model_s = self.model_fn(x, s) x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s ) if return_intermediate: return x_t, {'model_s': model_s} else: return x_t - def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, return_intermediate=False, solver_type='dpm_solver'): + def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, return_intermediate=False, + solver_type='dpm_solver'): """ Singlestep solver DPM-Solver-2 from time `s` to time `t`. @@ -575,7 +584,8 @@ def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, ret h = lambda_t - lambda_s lambda_s1 = lambda_s + r1 * h s1 = ns.inverse_lambda(lambda_s1) - log_alpha_s, log_alpha_s1, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(t) + log_alpha_s, log_alpha_s1, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff( + s1), ns.marginal_log_mean_coeff(t) sigma_s, sigma_s1, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std(t) alpha_s1, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_t) @@ -586,21 +596,22 @@ def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, ret if model_s is None: model_s = self.model_fn(x, s) x_s1 = ( - expand_dims(sigma_s1 / sigma_s, dims) * x - - expand_dims(alpha_s1 * phi_11, dims) * model_s + expand_dims(sigma_s1 / sigma_s, dims) * x + - expand_dims(alpha_s1 * phi_11, dims) * model_s ) model_s1 = self.model_fn(x_s1, s1) if solver_type == 'dpm_solver': x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - - (0.5 / r1) * expand_dims(alpha_t * phi_1, dims) * (model_s1 - model_s) + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + - (0.5 / r1) * expand_dims(alpha_t * phi_1, dims) * (model_s1 - model_s) ) elif solver_type == 'taylor': x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - + (1. / r1) * expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * (model_s1 - model_s) + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + + (1. / r1) * expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * ( + model_s1 - model_s) ) else: phi_11 = torch.expm1(r1 * h) @@ -609,28 +620,29 @@ def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, ret if model_s is None: model_s = self.model_fn(x, s) x_s1 = ( - expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x - - expand_dims(sigma_s1 * phi_11, dims) * model_s + expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x + - expand_dims(sigma_s1 * phi_11, dims) * model_s ) model_s1 = self.model_fn(x_s1, s1) if solver_type == 'dpm_solver': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - (0.5 / r1) * expand_dims(sigma_t * phi_1, dims) * (model_s1 - model_s) + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - (0.5 / r1) * expand_dims(sigma_t * phi_1, dims) * (model_s1 - model_s) ) elif solver_type == 'taylor': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - (1. / r1) * expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * (model_s1 - model_s) + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - (1. / r1) * expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * (model_s1 - model_s) ) if return_intermediate: return x_t, {'model_s': model_s, 'model_s1': model_s1} else: return x_t - def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_s=None, model_s1=None, return_intermediate=False, solver_type='dpm_solver'): + def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., model_s=None, model_s1=None, + return_intermediate=False, solver_type='dpm_solver'): """ Singlestep solver DPM-Solver-3 from time `s` to time `t`. @@ -664,8 +676,10 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_ lambda_s2 = lambda_s + r2 * h s1 = ns.inverse_lambda(lambda_s1) s2 = ns.inverse_lambda(lambda_s2) - log_alpha_s, log_alpha_s1, log_alpha_s2, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(s2), ns.marginal_log_mean_coeff(t) - sigma_s, sigma_s1, sigma_s2, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std(s2), ns.marginal_std(t) + log_alpha_s, log_alpha_s1, log_alpha_s2, log_alpha_t = ns.marginal_log_mean_coeff( + s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(s2), ns.marginal_log_mean_coeff(t) + sigma_s, sigma_s1, sigma_s2, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std( + s2), ns.marginal_std(t) alpha_s1, alpha_s2, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_s2), torch.exp(log_alpha_t) if self.predict_x0: @@ -680,21 +694,21 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_ model_s = self.model_fn(x, s) if model_s1 is None: x_s1 = ( - expand_dims(sigma_s1 / sigma_s, dims) * x - - expand_dims(alpha_s1 * phi_11, dims) * model_s + expand_dims(sigma_s1 / sigma_s, dims) * x + - expand_dims(alpha_s1 * phi_11, dims) * model_s ) model_s1 = self.model_fn(x_s1, s1) x_s2 = ( - expand_dims(sigma_s2 / sigma_s, dims) * x - - expand_dims(alpha_s2 * phi_12, dims) * model_s - + r2 / r1 * expand_dims(alpha_s2 * phi_22, dims) * (model_s1 - model_s) + expand_dims(sigma_s2 / sigma_s, dims) * x + - expand_dims(alpha_s2 * phi_12, dims) * model_s + + r2 / r1 * expand_dims(alpha_s2 * phi_22, dims) * (model_s1 - model_s) ) model_s2 = self.model_fn(x_s2, s2) if solver_type == 'dpm_solver': x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - + (1. / r2) * expand_dims(alpha_t * phi_2, dims) * (model_s2 - model_s) + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + + (1. / r2) * expand_dims(alpha_t * phi_2, dims) * (model_s2 - model_s) ) elif solver_type == 'taylor': D1_0 = (1. / r1) * (model_s1 - model_s) @@ -702,10 +716,10 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_ D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1) D2 = 2. * (D1_1 - D1_0) / (r2 - r1) x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - + expand_dims(alpha_t * phi_2, dims) * D1 - - expand_dims(alpha_t * phi_3, dims) * D2 + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + + expand_dims(alpha_t * phi_2, dims) * D1 + - expand_dims(alpha_t * phi_3, dims) * D2 ) else: phi_11 = torch.expm1(r1 * h) @@ -719,21 +733,21 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_ model_s = self.model_fn(x, s) if model_s1 is None: x_s1 = ( - expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x - - expand_dims(sigma_s1 * phi_11, dims) * model_s + expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x + - expand_dims(sigma_s1 * phi_11, dims) * model_s ) model_s1 = self.model_fn(x_s1, s1) x_s2 = ( - expand_dims(torch.exp(log_alpha_s2 - log_alpha_s), dims) * x - - expand_dims(sigma_s2 * phi_12, dims) * model_s - - r2 / r1 * expand_dims(sigma_s2 * phi_22, dims) * (model_s1 - model_s) + expand_dims(torch.exp(log_alpha_s2 - log_alpha_s), dims) * x + - expand_dims(sigma_s2 * phi_12, dims) * model_s + - r2 / r1 * expand_dims(sigma_s2 * phi_22, dims) * (model_s1 - model_s) ) model_s2 = self.model_fn(x_s2, s2) if solver_type == 'dpm_solver': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - (1. / r2) * expand_dims(sigma_t * phi_2, dims) * (model_s2 - model_s) + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - (1. / r2) * expand_dims(sigma_t * phi_2, dims) * (model_s2 - model_s) ) elif solver_type == 'taylor': D1_0 = (1. / r1) * (model_s1 - model_s) @@ -741,10 +755,10 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_ D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1) D2 = 2. * (D1_1 - D1_0) / (r2 - r1) x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - expand_dims(sigma_t * phi_2, dims) * D1 - - expand_dims(sigma_t * phi_3, dims) * D2 + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - expand_dims(sigma_t * phi_2, dims) * D1 + - expand_dims(sigma_t * phi_3, dims) * D2 ) if return_intermediate: @@ -772,7 +786,8 @@ def multistep_dpm_solver_second_update(self, x, model_prev_list, t_prev_list, t, dims = x.dim() model_prev_1, model_prev_0 = model_prev_list t_prev_1, t_prev_0 = t_prev_list - lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t) + lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_1), ns.marginal_lambda( + t_prev_0), ns.marginal_lambda(t) log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) alpha_t = torch.exp(log_alpha_t) @@ -784,28 +799,28 @@ def multistep_dpm_solver_second_update(self, x, model_prev_list, t_prev_list, t, if self.predict_x0: if solver_type == 'dpm_solver': x_t = ( - expand_dims(sigma_t / sigma_prev_0, dims) * x - - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 - - 0.5 * expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * D1_0 + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 + - 0.5 * expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * D1_0 ) elif solver_type == 'taylor': x_t = ( - expand_dims(sigma_t / sigma_prev_0, dims) * x - - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 - + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1_0 + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 + + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1_0 ) else: if solver_type == 'dpm_solver': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 - - 0.5 * expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * D1_0 + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 + - 0.5 * expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * D1_0 ) elif solver_type == 'taylor': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 - - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1_0 + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 + - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1_0 ) return x_t @@ -827,7 +842,8 @@ def multistep_dpm_solver_third_update(self, x, model_prev_list, t_prev_list, t, dims = x.dim() model_prev_2, model_prev_1, model_prev_0 = model_prev_list t_prev_2, t_prev_1, t_prev_0 = t_prev_list - lambda_prev_2, lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_2), ns.marginal_lambda(t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t) + lambda_prev_2, lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_2), ns.marginal_lambda( + t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t) log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) alpha_t = torch.exp(log_alpha_t) @@ -842,21 +858,22 @@ def multistep_dpm_solver_third_update(self, x, model_prev_list, t_prev_list, t, D2 = expand_dims(1. / (r0 + r1), dims) * (D1_0 - D1_1) if self.predict_x0: x_t = ( - expand_dims(sigma_t / sigma_prev_0, dims) * x - - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 - + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1 - - expand_dims(alpha_t * ((torch.exp(-h) - 1. + h) / h**2 - 0.5), dims) * D2 + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 + + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1 + - expand_dims(alpha_t * ((torch.exp(-h) - 1. + h) / h ** 2 - 0.5), dims) * D2 ) else: x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 - - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1 - - expand_dims(sigma_t * ((torch.exp(h) - 1. - h) / h**2 - 0.5), dims) * D2 + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 + - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1 + - expand_dims(sigma_t * ((torch.exp(h) - 1. - h) / h ** 2 - 0.5), dims) * D2 ) return x_t - def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False, solver_type='dpm_solver', r1=None, r2=None): + def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False, solver_type='dpm_solver', r1=None, + r2=None): """ Singlestep DPM-Solver with the order `order` from time `s` to time `t`. @@ -876,9 +893,11 @@ def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False if order == 1: return self.dpm_solver_first_update(x, s, t, return_intermediate=return_intermediate) elif order == 2: - return self.singlestep_dpm_solver_second_update(x, s, t, return_intermediate=return_intermediate, solver_type=solver_type, r1=r1) + return self.singlestep_dpm_solver_second_update(x, s, t, return_intermediate=return_intermediate, + solver_type=solver_type, r1=r1) elif order == 3: - return self.singlestep_dpm_solver_third_update(x, s, t, return_intermediate=return_intermediate, solver_type=solver_type, r1=r1, r2=r2) + return self.singlestep_dpm_solver_third_update(x, s, t, return_intermediate=return_intermediate, + solver_type=solver_type, r1=r1, r2=r2) else: raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order)) @@ -906,7 +925,8 @@ def multistep_dpm_solver_update(self, x, model_prev_list, t_prev_list, t, order, else: raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order)) - def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol=0.05, theta=0.9, t_err=1e-5, solver_type='dpm_solver'): + def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol=0.05, theta=0.9, t_err=1e-5, + solver_type='dpm_solver'): """ The adaptive step size solver based on singlestep DPM-Solver. @@ -938,11 +958,17 @@ def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol if order == 2: r1 = 0.5 lower_update = lambda x, s, t: self.dpm_solver_first_update(x, s, t, return_intermediate=True) - higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, solver_type=solver_type, **kwargs) + higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, + solver_type=solver_type, + **kwargs) elif order == 3: r1, r2 = 1. / 3., 2. / 3. - lower_update = lambda x, s, t: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, return_intermediate=True, solver_type=solver_type) - higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_third_update(x, s, t, r1=r1, r2=r2, solver_type=solver_type, **kwargs) + lower_update = lambda x, s, t: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, + return_intermediate=True, + solver_type=solver_type) + higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_third_update(x, s, t, r1=r1, r2=r2, + solver_type=solver_type, + **kwargs) else: raise ValueError("For adaptive step size solver, order must be 2 or 3, got {}".format(order)) while torch.abs((s - t_0)).mean() > t_err: @@ -963,9 +989,9 @@ def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol return x def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time_uniform', - method='singlestep', denoise=False, solver_type='dpm_solver', atol=0.0078, - rtol=0.05, - ): + method='singlestep', denoise=False, solver_type='dpm_solver', atol=0.0078, + rtol=0.05, + ): """ Compute the sample at time `t_end` by DPM-Solver, given the initial `x` at time `t_start`. @@ -1062,7 +1088,8 @@ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time device = x.device if method == 'adaptive': with torch.no_grad(): - x = self.dpm_solver_adaptive(x, order=order, t_T=t_T, t_0=t_0, atol=atol, rtol=rtol, solver_type=solver_type) + x = self.dpm_solver_adaptive(x, order=order, t_T=t_T, t_0=t_0, atol=atol, rtol=rtol, + solver_type=solver_type) elif method == 'multistep': assert steps >= order timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) @@ -1074,13 +1101,15 @@ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time # Init the first `order` values by lower order multistep DPM-Solver. for init_order in range(1, order): vec_t = timesteps[init_order].expand(x.shape[0]) - x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, init_order, solver_type=solver_type) + x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, init_order, + solver_type=solver_type) model_prev_list.append(self.model_fn(x, vec_t)) t_prev_list.append(vec_t) # Compute the remaining values by `order`-th order multistep DPM-Solver. for step in range(order, steps + 1): vec_t = timesteps[step].expand(x.shape[0]) - x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, order, solver_type=solver_type) + x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, order, + solver_type=solver_type) for i in range(order - 1): t_prev_list[i] = t_prev_list[i + 1] model_prev_list[i] = model_prev_list[i + 1] @@ -1090,14 +1119,18 @@ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time model_prev_list[-1] = self.model_fn(x, vec_t) elif method in ['singlestep', 'singlestep_fixed']: if method == 'singlestep': - timesteps_outer, orders = self.get_orders_and_timesteps_for_singlestep_solver(steps=steps, order=order, skip_type=skip_type, t_T=t_T, t_0=t_0, device=device) + timesteps_outer, orders = self.get_orders_and_timesteps_for_singlestep_solver(steps=steps, order=order, + skip_type=skip_type, + t_T=t_T, t_0=t_0, + device=device) elif method == 'singlestep_fixed': K = steps // order - orders = [order,] * K + orders = [order, ] * K timesteps_outer = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=K, device=device) for i, order in enumerate(orders): t_T_inner, t_0_inner = timesteps_outer[i], timesteps_outer[i + 1] - timesteps_inner = self.get_time_steps(skip_type=skip_type, t_T=t_T_inner.item(), t_0=t_0_inner.item(), N=order, device=device) + timesteps_inner = self.get_time_steps(skip_type=skip_type, t_T=t_T_inner.item(), t_0=t_0_inner.item(), + N=order, device=device) lambda_inner = self.noise_schedule.marginal_lambda(timesteps_inner) vec_s, vec_t = t_T_inner.repeat(x.shape[0]), t_0_inner.repeat(x.shape[0]) h = lambda_inner[-1] - lambda_inner[0] @@ -1109,7 +1142,6 @@ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time return x - ############################################################# # other utility functions ############################################################# @@ -1166,4 +1198,4 @@ def expand_dims(v, dims): Returns: a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. """ - return v[(...,) + (None,)*(dims - 1)] + return v[(...,) + (None,) * (dims - 1)] diff --git a/modules/commons/ssim.py b/modules/commons/ssim.py index 0d0241f26..7f72f68f0 100644 --- a/modules/commons/ssim.py +++ b/modules/commons/ssim.py @@ -1,319 +1,12 @@ -# ''' -# https://github.com/One-sixth/ms_ssim_pytorch/blob/master/ssim.py -# ''' -# -# import torch -# import torch.jit -# import torch.nn.functional as F -# -# -# @torch.jit.script -# def create_window(window_size: int, sigma: float, channel: int): -# ''' -# Create 1-D gauss kernel -# :param window_size: the size of gauss kernel -# :param sigma: sigma of normal distribution -# :param channel: input channel -# :return: 1D kernel -# ''' -# coords = torch.arange(window_size, dtype=torch.float) -# coords -= window_size // 2 -# -# g = torch.exp(-(coords ** 2) / (2 * sigma ** 2)) -# g /= g.sum() -# -# g = g.reshape(1, 1, 1, -1).repeat(channel, 1, 1, 1) -# return g -# -# -# @torch.jit.script -# def _gaussian_filter(x, window_1d, use_padding: bool): -# ''' -# Blur input with 1-D kernel -# :param x: batch of tensors to be blured -# :param window_1d: 1-D gauss kernel -# :param use_padding: padding image before conv -# :return: blured tensors -# ''' -# C = x.shape[1] -# padding = 0 -# if use_padding: -# window_size = window_1d.shape[3] -# padding = window_size // 2 -# out = F.conv2d(x, window_1d, stride=1, padding=(0, padding), groups=C) -# out = F.conv2d(out, window_1d.transpose(2, 3), stride=1, padding=(padding, 0), groups=C) -# return out -# -# -# @torch.jit.script -# def ssim(X, Y, window, data_range: float, use_padding: bool = False): -# ''' -# Calculate ssim index for X and Y -# :param X: images [B, C, H, N_bins] -# :param Y: images [B, C, H, N_bins] -# :param window: 1-D gauss kernel -# :param data_range: value range of input images. (usually 1.0 or 255) -# :param use_padding: padding image before conv -# :return: -# ''' -# -# K1 = 0.01 -# K2 = 0.03 -# compensation = 1.0 -# -# C1 = (K1 * data_range) ** 2 -# C2 = (K2 * data_range) ** 2 -# -# mu1 = _gaussian_filter(X, window, use_padding) -# mu2 = _gaussian_filter(Y, window, use_padding) -# sigma1_sq = _gaussian_filter(X * X, window, use_padding) -# sigma2_sq = _gaussian_filter(Y * Y, window, use_padding) -# sigma12 = _gaussian_filter(X * Y, window, use_padding) -# -# mu1_sq = mu1.pow(2) -# mu2_sq = mu2.pow(2) -# mu1_mu2 = mu1 * mu2 -# -# sigma1_sq = compensation * (sigma1_sq - mu1_sq) -# sigma2_sq = compensation * (sigma2_sq - mu2_sq) -# sigma12 = compensation * (sigma12 - mu1_mu2) -# -# cs_map = (2 * sigma12 + C2) / (sigma1_sq + sigma2_sq + C2) -# # Fixed the issue that the negative value of cs_map caused ms_ssim to output Nan. -# cs_map = cs_map.clamp_min(0.) -# ssim_map = ((2 * mu1_mu2 + C1) / (mu1_sq + mu2_sq + C1)) * cs_map -# -# ssim_val = ssim_map.mean(dim=(1, 2, 3)) # reduce along CHW -# cs = cs_map.mean(dim=(1, 2, 3)) -# -# return ssim_val, cs -# -# -# @torch.jit.script -# def ms_ssim(X, Y, window, data_range: float, weights, use_padding: bool = False, eps: float = 1e-8): -# ''' -# interface of ms-ssim -# :param X: a batch of images, (N,C,H,W) -# :param Y: a batch of images, (N,C,H,W) -# :param window: 1-D gauss kernel -# :param data_range: value range of input images. (usually 1.0 or 255) -# :param weights: weights for different levels -# :param use_padding: padding image before conv -# :param eps: use for avoid grad nan. -# :return: -# ''' -# levels = weights.shape[0] -# cs_vals = [] -# ssim_vals = [] -# for _ in range(levels): -# ssim_val, cs = ssim(X, Y, window=window, data_range=data_range, use_padding=use_padding) -# # Use for fix a issue. When c = a ** b and a is 0, c.backward() will cause the a.grad become inf. -# ssim_val = ssim_val.clamp_min(eps) -# cs = cs.clamp_min(eps) -# cs_vals.append(cs) -# -# ssim_vals.append(ssim_val) -# padding = (X.shape[2] % 2, X.shape[3] % 2) -# X = F.avg_pool2d(X, kernel_size=2, stride=2, padding=padding) -# Y = F.avg_pool2d(Y, kernel_size=2, stride=2, padding=padding) -# -# cs_vals = torch.stack(cs_vals, dim=0) -# ms_ssim_val = torch.prod((cs_vals[:-1] ** weights[:-1].unsqueeze(1)) * (ssim_vals[-1] ** weights[-1]), dim=0) -# return ms_ssim_val -# -# -# class SSIM(torch.jit.ScriptModule): -# __constants__ = ['data_range', 'use_padding'] -# -# def __init__(self, window_size=11, window_sigma=1.5, data_range=255., channel=3, use_padding=False): -# ''' -# :param window_size: the size of gauss kernel -# :param window_sigma: sigma of normal distribution -# :param data_range: value range of input images. (usually 1.0 or 255) -# :param channel: input channels (default: 3) -# :param use_padding: padding image before conv -# ''' -# super().__init__() -# assert window_size % 2 == 1, 'Window size must be odd.' -# window = create_window(window_size, window_sigma, channel) -# self.register_buffer('window', window) -# self.data_range = data_range -# self.use_padding = use_padding -# -# @torch.jit.script_method -# def forward(self, X, Y): -# r = ssim(X, Y, window=self.window, data_range=self.data_range, use_padding=self.use_padding) -# return r[0] -# -# -# class MS_SSIM(torch.jit.ScriptModule): -# __constants__ = ['data_range', 'use_padding', 'eps'] -# -# def __init__(self, window_size=11, window_sigma=1.5, data_range=255., channel=3, use_padding=False, weights=None, -# levels=None, eps=1e-8): -# ''' -# class for ms-ssim -# :param window_size: the size of gauss kernel -# :param window_sigma: sigma of normal distribution -# :param data_range: value range of input images. (usually 1.0 or 255) -# :param channel: input channels -# :param use_padding: padding image before conv -# :param weights: weights for different levels. (default [0.0448, 0.2856, 0.3001, 0.2363, 0.1333]) -# :param levels: number of downsampling -# :param eps: Use for fix a issue. When c = a ** b and a is 0, c.backward() will cause the a.grad become inf. -# ''' -# super().__init__() -# assert window_size % 2 == 1, 'Window size must be odd.' -# self.data_range = data_range -# self.use_padding = use_padding -# self.eps = eps -# -# window = create_window(window_size, window_sigma, channel) -# self.register_buffer('window', window) -# -# if weights is None: -# weights = [0.0448, 0.2856, 0.3001, 0.2363, 0.1333] -# weights = torch.tensor(weights, dtype=torch.float) -# -# if levels is not None: -# weights = weights[:levels] -# weights = weights / weights.sum() -# -# self.register_buffer('weights', weights) -# -# @torch.jit.script_method -# def forward(self, X, Y): -# return ms_ssim(X, Y, window=self.window, data_range=self.data_range, weights=self.weights, -# use_padding=self.use_padding, eps=self.eps) -# -# -# if __name__ == '__main__': -# print('Simple Test') -# im = torch.randint(0, 255, (5, 3, 256, 256), dtype=torch.float, device='cuda') -# img1 = im / 255 -# img2 = img1 * 0.5 -# -# losser = SSIM(data_range=1.).cuda() -# loss = losser(img1, img2).mean() -# -# losser2 = MS_SSIM(data_range=1.).cuda() -# loss2 = losser2(img1, img2).mean() -# -# print(loss.item()) -# print(loss2.item()) -# -# if __name__ == '__main__': -# print('Training Test') -# import cv2 -# import torch.optim -# import numpy as np -# import imageio -# import time -# -# out_test_video = False -# # 最好不要直接输出gif图,会非常大,最好先输出mkv文件后用ffmpeg转换到GIF -# video_use_gif = False -# -# im = cv2.imread('test_img1.jpg', 1) -# t_im = torch.from_numpy(im).cuda().permute(2, 0, 1).float()[None] / 255. -# -# if out_test_video: -# if video_use_gif: -# fps = 0.5 -# out_wh = (im.shape[1] // 2, im.shape[0] // 2) -# suffix = '.gif' -# else: -# fps = 5 -# out_wh = (im.shape[1], im.shape[0]) -# suffix = '.mkv' -# video_last_time = time.perf_counter() -# video = imageio.get_writer('ssim_test' + suffix, fps=fps) -# -# # 测试ssim -# print('Training SSIM') -# rand_im = torch.randint_like(t_im, 0, 255, dtype=torch.float32) / 255. -# rand_im.requires_grad = True -# optim = torch.optim.Adam([rand_im], 0.003, eps=1e-8) -# losser = SSIM(data_range=1., channel=t_im.shape[1]).cuda() -# ssim_score = 0 -# while ssim_score < 0.999: -# optim.zero_grad() -# loss = losser(rand_im, t_im) -# (-loss).sum().backward() -# ssim_score = loss.item() -# optim.step() -# r_im = np.transpose(rand_im.detach().cpu().numpy().clip(0, 1) * 255, [0, 2, 3, 1]).astype(np.uint8)[0] -# r_im = cv2.putText(r_im, 'ssim %f' % ssim_score, (10, 30), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0), 2) -# -# if out_test_video: -# if time.perf_counter() - video_last_time > 1. / fps: -# video_last_time = time.perf_counter() -# out_frame = cv2.cvtColor(r_im, cv2.COLOR_BGR2RGB) -# out_frame = cv2.resize(out_frame, out_wh, interpolation=cv2.INTER_AREA) -# if isinstance(out_frame, cv2.UMat): -# out_frame = out_frame.get() -# video.append_data(out_frame) -# -# cv2.imshow('ssim', r_im) -# cv2.setWindowTitle('ssim', 'ssim %f' % ssim_score) -# cv2.waitKey(1) -# -# if out_test_video: -# video.close() -# -# # 测试ms_ssim -# if out_test_video: -# if video_use_gif: -# fps = 0.5 -# out_wh = (im.shape[1] // 2, im.shape[0] // 2) -# suffix = '.gif' -# else: -# fps = 5 -# out_wh = (im.shape[1], im.shape[0]) -# suffix = '.mkv' -# video_last_time = time.perf_counter() -# video = imageio.get_writer('ms_ssim_test' + suffix, fps=fps) -# -# print('Training MS_SSIM') -# rand_im = torch.randint_like(t_im, 0, 255, dtype=torch.float32) / 255. -# rand_im.requires_grad = True -# optim = torch.optim.Adam([rand_im], 0.003, eps=1e-8) -# losser = MS_SSIM(data_range=1., channel=t_im.shape[1]).cuda() -# ssim_score = 0 -# while ssim_score < 0.999: -# optim.zero_grad() -# loss = losser(rand_im, t_im) -# (-loss).sum().backward() -# ssim_score = loss.item() -# optim.step() -# r_im = np.transpose(rand_im.detach().cpu().numpy().clip(0, 1) * 255, [0, 2, 3, 1]).astype(np.uint8)[0] -# r_im = cv2.putText(r_im, 'ms_ssim %f' % ssim_score, (10, 30), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0), 2) -# -# if out_test_video: -# if time.perf_counter() - video_last_time > 1. / fps: -# video_last_time = time.perf_counter() -# out_frame = cv2.cvtColor(r_im, cv2.COLOR_BGR2RGB) -# out_frame = cv2.resize(out_frame, out_wh, interpolation=cv2.INTER_AREA) -# if isinstance(out_frame, cv2.UMat): -# out_frame = out_frame.get() -# video.append_data(out_frame) -# -# cv2.imshow('ms_ssim', r_im) -# cv2.setWindowTitle('ms_ssim', 'ms_ssim %f' % ssim_score) -# cv2.waitKey(1) -# -# if out_test_video: -# video.close() - """ Adapted from https://github.com/Po-Hsun-Su/pytorch-ssim """ +from math import exp + import torch import torch.nn.functional as F from torch.autograd import Variable -import numpy as np -from math import exp def gaussian(window_size, sigma): @@ -365,11 +58,7 @@ def forward(self, img1, img2): if channel == self.channel and self.window.data.type() == img1.data.type(): window = self.window else: - window = create_window(self.window_size, channel) - - if img1.is_cuda: - window = window.cuda(img1.get_device()) - window = window.type_as(img1) + window = create_window(self.window_size, channel).to(img1) self.window = window self.channel = channel @@ -384,8 +73,5 @@ def ssim(img1, img2, window_size=11, size_average=True): (_, channel, _, _) = img1.size() global window if window is None: - window = create_window(window_size, channel) - if img1.is_cuda: - window = window.cuda(img1.get_device()) - window = window.type_as(img1) + window = create_window(window_size, channel).to(img1) return _ssim(img1, img2, window, window_size, channel, size_average) diff --git a/utils/__init__.py b/utils/__init__.py index dad9fe4cc..f8ca8c164 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -61,6 +61,8 @@ def batch_by_size( (default: 80000). max_batch_size (int, optional): max number of sentences in each batch (default: 48). + required_batch_size_multiple: require the batch size to be multiple + of a given number """ bsz_mult = required_batch_size_multiple diff --git a/utils/training_utils.py b/utils/training_utils.py index 827dfa1a0..5f60f8232 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,27 +1,25 @@ -from copy import deepcopy -from glob import glob import math import os -from pathlib import Path import re -from typing import Optional, Dict -import warnings +from copy import deepcopy +from glob import glob +from pathlib import Path +from typing import Dict +import lightning.pytorch as pl import numpy as np import torch -from torch.optim.lr_scheduler import LambdaLR -from torch.utils.data.distributed import Sampler, DistributedSampler - -import lightning.pytorch as pl -from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar, RichProgressBar +from lightning.pytorch.callbacks import ModelCheckpoint, TQDMProgressBar from lightning.pytorch.strategies import DDPStrategy -from lightning.pytorch.trainer.states import RunningStage from lightning.pytorch.utilities.rank_zero import rank_zero_info +from torch.optim.lr_scheduler import LambdaLR +from torch.utils.data.distributed import Sampler import utils from utils.hparams import hparams -#==========LR schedulers========== + +# ==========LR schedulers========== class RSQRTSchedule(object): def __init__(self, optimizer): @@ -48,6 +46,7 @@ def step(self, num_updates): def get_lr(self): return self.optimizer.param_groups[0]['lr'] + class WarmupCosineSchedule(LambdaLR): """ Linear warmup and then cosine decay. Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps. @@ -55,6 +54,7 @@ class WarmupCosineSchedule(LambdaLR): If `cycles` (default=0.5) is different from default, learning rate follows cosine function after warmup. `eta_min` (default=0.0) corresponds to the minimum learning rate reached by the scheduler. """ + def __init__(self, optimizer, warmup_steps, t_total, eta_min=0.0, cycles=.5, last_epoch=-1): self.warmup_steps = warmup_steps self.t_total = t_total @@ -69,7 +69,8 @@ def lr_lambda(self, step): progress = (step - self.warmup_steps) / max(1, self.t_total - self.warmup_steps) return max(self.eta_min, 0.5 * (1. + math.cos(math.pi * self.cycles * 2.0 * progress))) -#==========Torch samplers========== + +# ==========Torch samplers========== class DsBatchSampler(Sampler): def __init__(self, dataset, max_batch_frames, max_batch_size, sub_indices=None, @@ -107,7 +108,8 @@ def __form_batches(self): if self.sort_by_similar_size: grid = int(hparams.get('sampler_frame_count_grid', 200)) assert grid > 0 - sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype(np.int64) + sizes = (np.round(np.array(self.dataset._sizes)[indices] / grid) * grid).clip(grid, None).astype( + np.int64) indices = indices[np.argsort(sizes, kind='mergesort')] indices = indices.tolist() @@ -140,10 +142,13 @@ def __form_batches(self): elif len(leftovers) > 0: batch_assignment.append(batch_assignment[self.epoch % floored_batch_count]) if self.required_batch_count_multiple > 1 and ceiled_batch_count % self.required_batch_count_multiple != 0: - # batch_assignment = batch_assignment[:((floored_batch_count // self.required_batch_count_multiple) * self.required_batch_count_multiple)] - ceiled_batch_count = math.ceil(ceiled_batch_count / self.required_batch_count_multiple) * self.required_batch_count_multiple + # batch_assignment = batch_assignment[:((floored_batch_count \ + # // self.required_batch_count_multiple) * self.required_batch_count_multiple)] + ceiled_batch_count = math.ceil( + ceiled_batch_count / self.required_batch_count_multiple) * self.required_batch_count_multiple for i in range(ceiled_batch_count - len(batch_assignment)): - batch_assignment.append(batch_assignment[(i + self.epoch * self.required_batch_count_multiple) % floored_batch_count]) + batch_assignment.append( + batch_assignment[(i + self.epoch * self.required_batch_count_multiple) % floored_batch_count]) self.batches = [deepcopy(batches[i]) for i in batch_assignment] @@ -200,15 +205,16 @@ def __iter__(self): def __len__(self): return len(self.batches) -#==========PL related========== + +# ==========PL related========== class DsModelCheckpoint(ModelCheckpoint): def __init__( - self, - *args, - permanent_ckpt_start, - permanent_ckpt_interval, - **kwargs + self, + *args, + permanent_ckpt_start, + permanent_ckpt_interval, + **kwargs ): super().__init__(*args, **kwargs) self.permanent_ckpt_start = permanent_ckpt_start or 0 @@ -234,7 +240,7 @@ def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModul super().on_validation_end(trainer, pl_module) def _update_best_and_save( - self, current: torch.Tensor, trainer: "pl.Trainer", monitor_candidates: Dict[str, torch.Tensor] + self, current: torch.Tensor, trainer: "pl.Trainer", monitor_candidates: Dict[str, torch.Tensor] ) -> None: k = len(self.best_k_models) + 1 if self.save_top_k == -1 else self.save_top_k @@ -270,8 +276,8 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): if search: step = int(search.group(0)[6:]) if self.enable_permanent_ckpt and \ - step >= self.permanent_ckpt_start and \ - (step - self.permanent_ckpt_start) % self.permanent_ckpt_interval == 0: + step >= self.permanent_ckpt_start and \ + (step - self.permanent_ckpt_start) % self.permanent_ckpt_interval == 0: rank_zero_info(f'Checkpoint {relative_path} is now permanent.') return super()._remove_checkpoint(trainer, filepath) @@ -321,7 +327,7 @@ def get_strategy(accelerator, devices, num_nodes, strategy, backend): if accelerator != 'auto' and accelerator != 'gpu': return strategy - from lightning_fabric.utilities.imports import _IS_INTERACTIVE + from lightning.fabric.utilities.imports import _IS_INTERACTIVE from lightning.pytorch.accelerators import AcceleratorRegistry from lightning.pytorch.accelerators.cuda import CUDAAccelerator from lightning.pytorch.accelerators.hpu import HPUAccelerator From 46aacbf2e11f8d8be3f5f1bd858812bc133dda1f Mon Sep 17 00:00:00 2001 From: hrukalive Date: Mon, 10 Apr 2023 23:45:01 -0500 Subject: [PATCH 217/475] Move vocoder to device when training starts --- training/acoustic_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 3ce580f8e..34c6841f0 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -189,7 +189,7 @@ def _training_step(self, sample, batch_idx, _): total_loss = sum([v for v in losses.values() if isinstance(v, torch.Tensor) and v.requires_grad]) return total_loss, {**losses, 'batch_size': sample['tokens'].size()[0]} - def _on_validation_start(self): + def on_train_start(self): if self.use_vocoder: self.vocoder.to_device(self.device) From c88733b9ea54d09d71477555cebfa0f3127fae9f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 11 Apr 2023 13:16:48 +0800 Subject: [PATCH 218/475] Migrate some path operations to pathlib --- basics/base_binarizer.py | 10 ++- basics/base_exporter.py | 5 +- inference/ds_acoustic.py | 4 +- modules/nsf_hifigan/models.py | 5 +- modules/vocoders/ddsp.py | 102 +++++++++++++++------------- modules/vocoders/nsf_hifigan.py | 7 +- preprocessing/acoustic_binarizer.py | 20 +++--- scripts/infer.py | 45 ++++++------ scripts/train.py | 5 +- scripts/vocode.py | 39 ++++++----- utils/__init__.py | 15 ++-- utils/infer_utils.py | 20 +++--- utils/phoneme_utils.py | 4 +- utils/training_utils.py | 13 ++-- 14 files changed, 159 insertions(+), 135 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 776934459..63b35c425 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -1,6 +1,7 @@ import json import logging import os +import pathlib import random from copy import deepcopy @@ -38,12 +39,15 @@ class BaseBinarizer: def __init__(self, data_dir=None): if data_dir is None: data_dir = hparams['raw_data_dir'] + if not isinstance(data_dir, list): + data_dir = [data_dir] speakers = hparams['speakers'] assert isinstance(speakers, list), 'Speakers must be a list' assert len(speakers) == len(set(speakers)), 'Speakers cannot contain duplicate names' - self.raw_data_dirs = data_dir if isinstance(data_dir, list) else [data_dir] + self.raw_data_dirs = [pathlib.Path(d) for d in data_dir] + self.binary_data_dir = pathlib.Path(hparams['binary_data_dir']) if hparams['use_spk_id']: assert len(speakers) == len(self.raw_data_dirs), \ 'Number of raw data dirs must equal number of speaker names!' @@ -57,7 +61,7 @@ def __init__(self, data_dir=None): # load each dataset for ds_id, data_dir in enumerate(self.raw_data_dirs): - self.load_meta_data(data_dir, ds_id) + self.load_meta_data(pathlib.Path(data_dir), ds_id) self.item_names = sorted(list(self.items.keys())) self._train_item_names, self._test_item_names = self.split_train_test_set() @@ -65,7 +69,7 @@ def __init__(self, data_dir=None): random.seed(hparams['seed']) random.shuffle(self.item_names) - def load_meta_data(self, raw_data_dir, ds_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): raise NotImplementedError() def split_train_test_set(self): diff --git a/basics/base_exporter.py b/basics/base_exporter.py index fc7c73287..cc016004a 100644 --- a/basics/base_exporter.py +++ b/basics/base_exporter.py @@ -1,7 +1,6 @@ import json -import os -from typing import Union from pathlib import Path +from typing import Union import torch import torch.nn as nn @@ -24,7 +23,7 @@ def __init__( # noinspection PyMethodMayBeStatic def build_spk_map(self) -> dict: if hparams['use_spk_id']: - with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: + with open(Path(hparams['work_dir']) / 'spk_map.json', 'r', encoding='utf8') as f: spk_map = json.load(f) assert isinstance(spk_map, dict) and len(spk_map) > 0, 'Invalid or empty speaker map!' assert len(spk_map) == len(set(spk_map.values())), 'Duplicate speaker id in speaker map!' diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 320ebebea..3ea543ebe 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -1,5 +1,5 @@ import json -import os +import pathlib import numpy as np import torch @@ -21,7 +21,7 @@ def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=N if load_model: self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) if hparams['use_spk_id']: - with open(os.path.join(hparams['work_dir'], 'spk_map.json'), 'r', encoding='utf8') as f: + with open(pathlib.Path(hparams['work_dir']) / 'spk_map.json', 'r', encoding='utf8') as f: self.spk_map = json.load(f) assert isinstance(self.spk_map, dict) and len(self.spk_map) > 0, 'Invalid or empty speaker map!' assert len(self.spk_map) == len(set(self.spk_map.values())), 'Duplicate speaker id in speaker map!' diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index 223cbb68f..a2dcac440 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -1,5 +1,6 @@ import json import os +import pathlib import numpy as np import torch @@ -15,8 +16,8 @@ LRELU_SLOPE = 0.1 -def load_model(model_path): - config_file = os.path.join(os.path.split(model_path)[0], 'config.json') +def load_model(model_path: pathlib.Path): + config_file = model_path.with_name('config.json') with open(config_file) as f: data = f.read() diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index 5e8c52ed7..3693daf67 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -1,4 +1,6 @@ import os +import pathlib + import librosa import torch import torch.nn.functional as F @@ -9,48 +11,49 @@ from modules.vocoders.registry import register_vocoder from utils.hparams import hparams + class DotDict(dict): - def __getattr__(*args): - val = dict.get(*args) - return DotDict(val) if type(val) is dict else val + def __getattr__(*args): + val = dict.get(*args) + return DotDict(val) if type(val) is dict else val - __setattr__ = dict.__setitem__ + __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ - -def load_model( - model_path, - device='cpu'): - config_file = os.path.join(os.path.split(model_path)[0], 'config.yaml') + + +def load_model(model_path: pathlib.Path, device='cpu'): + config_file = model_path.with_name('config.yaml') with open(config_file, "r") as config: args = yaml.safe_load(config) args = DotDict(args) - + # load model print(' [Loading] ' + model_path) model = torch.jit.load(model_path, map_location=torch.device(device)) - + return model, args - + + class Audio2Mel(torch.nn.Module): def __init__( - self, - hop_length, - sampling_rate, - n_mel_channels, - win_length, - n_fft=None, - mel_fmin=0, - mel_fmax=None, - clamp = 1e-5 + self, + hop_length, + sampling_rate, + n_mel_channels, + win_length, + n_fft=None, + mel_fmin=0, + mel_fmax=None, + clamp=1e-5 ): super().__init__() n_fft = win_length if n_fft is None else n_fft self.hann_window = {} mel_basis = librosa_mel_fn( sr=sampling_rate, - n_fft=n_fft, - n_mels=n_mel_channels, - fmin=mel_fmin, + n_fft=n_fft, + n_mels=n_mel_channels, + fmin=mel_fmin, fmax=mel_fmax) mel_basis = torch.from_numpy(mel_basis).float() self.register_buffer("mel_basis", mel_basis) @@ -66,15 +69,15 @@ def forward(self, audio, keyshift=0, speed=1): audio: B x C x T log_mel_spec: B x T_ x C x n_mel ''' - factor = 2 ** (keyshift / 12) + factor = 2 ** (keyshift / 12) n_fft_new = int(np.round(self.n_fft * factor)) win_length_new = int(np.round(self.win_length * factor)) hop_length_new = int(np.round(self.hop_length * speed)) - - keyshift_key = str(keyshift)+'_'+str(audio.device) + + keyshift_key = str(keyshift) + '_' + str(audio.device) if keyshift_key not in self.hann_window: self.hann_window[keyshift_key] = torch.hann_window(win_length_new).to(audio.device) - + B, C, T = audio.shape audio = audio.reshape(B * C, T) fft = torch.stft( @@ -87,34 +90,35 @@ def forward(self, audio, keyshift=0, speed=1): return_complex=False) real_part, imag_part = fft.unbind(-1) magnitude = torch.sqrt(real_part ** 2 + imag_part ** 2) - + if keyshift != 0: size = self.n_fft // 2 + 1 resize = magnitude.size(1) if resize < size: - magnitude = F.pad(magnitude, (0, 0, 0, size-resize)) + magnitude = F.pad(magnitude, (0, 0, 0, size - resize)) magnitude = magnitude[:, :size, :] * self.win_length / win_length_new - + mel_output = torch.matmul(self.mel_basis, magnitude) log_mel_spec = torch.log10(torch.clamp(mel_output, min=self.clamp)) # log_mel_spec: B x C, M, T T_ = log_mel_spec.shape[-1] - log_mel_spec = log_mel_spec.reshape(B, C, self.n_mel_channels ,T_) + log_mel_spec = log_mel_spec.reshape(B, C, self.n_mel_channels, T_) log_mel_spec = log_mel_spec.permute(0, 3, 1, 2) # print('og_mel_spec:', log_mel_spec.shape) - log_mel_spec = log_mel_spec.squeeze(2) # mono + log_mel_spec = log_mel_spec.squeeze(2) # mono return log_mel_spec + @register_vocoder class DDSP(BaseVocoder): def __init__(self, device='cpu'): self.device = device - model_path = hparams['vocoder_ckpt'] - assert os.path.exists(model_path), 'DDSP model file is not found!' + model_path = pathlib.Path(hparams['vocoder_ckpt']) + assert model_path.exists(), 'DDSP model file is not found!' self.model, self.args = load_model(model_path, device=self.device) - + def to_device(self, device): pass @@ -126,7 +130,8 @@ def spec2wav_torch(self, mel, f0): # mel: [B, T, bins] f0: [B, T] print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=', hparams['audio_num_mel_bins'], '!=', self.args.data.n_mels, '(vocoder)') if self.args.data.n_fft != hparams['fft_size']: - print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.args.data.n_fft, '(vocoder)') + print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.args.data.n_fft, + '(vocoder)') if self.args.data.win_length != hparams['win_size']: print('Mismatch parameters: hparams[\'win_size\']=', hparams['win_size'], '!=', self.args.data.win_length, '(vocoder)') @@ -134,9 +139,11 @@ def spec2wav_torch(self, mel, f0): # mel: [B, T, bins] f0: [B, T] print('Mismatch parameters: hparams[\'hop_size\']=', hparams['hop_size'], '!=', self.args.data.block_size, '(vocoder)') if self.args.data.mel_fmin != hparams['fmin']: - print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.args.data.mel_fmin, '(vocoder)') + print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.args.data.mel_fmin, + '(vocoder)') if self.args.data.mel_fmax != hparams['fmax']: - print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.args.data.mel_fmax, '(vocoder)') + print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.args.data.mel_fmax, + '(vocoder)') with torch.no_grad(): f0 = f0.unsqueeze(-1) signal, _, (s_h, s_n) = self.model(mel.to(self.device), f0.to(self.device)) @@ -151,7 +158,8 @@ def spec2wav(self, mel, f0): print('Mismatch parameters: hparams[\'audio_num_mel_bins\']=', hparams['audio_num_mel_bins'], '!=', self.args.data.n_mels, '(vocoder)') if self.args.data.n_fft != hparams['fft_size']: - print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.args.data.n_fft, '(vocoder)') + print('Mismatch parameters: hparams[\'fft_size\']=', hparams['fft_size'], '!=', self.args.data.n_fft, + '(vocoder)') if self.args.data.win_length != hparams['win_size']: print('Mismatch parameters: hparams[\'win_size\']=', hparams['win_size'], '!=', self.args.data.win_length, '(vocoder)') @@ -159,9 +167,11 @@ def spec2wav(self, mel, f0): print('Mismatch parameters: hparams[\'hop_size\']=', hparams['hop_size'], '!=', self.args.data.block_size, '(vocoder)') if self.args.data.mel_fmin != hparams['fmin']: - print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.args.data.mel_fmin, '(vocoder)') + print('Mismatch parameters: hparams[\'fmin\']=', hparams['fmin'], '!=', self.args.data.mel_fmin, + '(vocoder)') if self.args.data.mel_fmax != hparams['fmax']: - print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.args.data.mel_fmax, '(vocoder)') + print('Mismatch parameters: hparams[\'fmax\']=', hparams['fmax'], '!=', self.args.data.mel_fmax, + '(vocoder)') with torch.no_grad(): mel = torch.FloatTensor(mel).unsqueeze(0).to(self.device) f0 = torch.FloatTensor(f0).unsqueeze(0).unsqueeze(-1).to(self.device) @@ -181,12 +191,12 @@ def wav2spec(inp_path, keyshift=0, speed=1, device=None): hop_length = hparams['hop_size'] mel_fmin = hparams['fmin'] mel_fmax = hparams['fmax'] - + # load input x, _ = librosa.load(inp_path, sr=sampling_rate) x_t = torch.from_numpy(x).float().to(device) - x_t = x_t.unsqueeze(0).unsqueeze(0) # (T,) --> (1, 1, T) - + x_t = x_t.unsqueeze(0).unsqueeze(0) # (T,) --> (1, 1, T) + # mel analysis mel_extractor = Audio2Mel( hop_length=hop_length, @@ -196,6 +206,6 @@ def wav2spec(inp_path, keyshift=0, speed=1, device=None): n_fft=n_fft, mel_fmin=mel_fmin, mel_fmax=mel_fmax).to(device) - + mel = mel_extractor(x_t, keyshift=keyshift, speed=speed) return x, mel.squeeze(0).cpu().numpy() diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index 19a1ba703..d5cfeabcf 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,4 +1,5 @@ import os +import pathlib import torch try: @@ -16,9 +17,9 @@ @register_vocoder class NsfHifiGAN(BaseVocoder): def __init__(self): - model_path = hparams['vocoder_ckpt'] - assert os.path.exists(model_path), 'HifiGAN model file is not found!' - rank_zero_info('| Load HifiGAN: ' + model_path) + model_path = pathlib.Path(hparams['vocoder_ckpt']) + assert model_path.exists(), 'HifiGAN model file is not found!' + rank_zero_info(f'| Load HifiGAN: {model_path}') self.model, self.h = load_model(model_path) @property diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 2280f8ae6..43b47420a 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -9,7 +9,7 @@ import csv import json import os -import os.path +import pathlib import random import shutil from copy import deepcopy @@ -37,13 +37,13 @@ def __init__(self): super().__init__() self.lr = LengthRegulator() - def load_meta_data(self, raw_data_dir, ds_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_info = { 'category': 'acoustic', 'format': 'grid' } - meta_file = os.path.join(raw_data_dir, 'meta.json') - if os.path.exists(meta_file): + meta_file = raw_data_dir / 'meta.json' + if meta_file.exists(): meta_info.update(json.load(open(meta_file, 'r', encoding='utf8'))) category = meta_info['category'] assert category == 'acoustic', \ @@ -53,11 +53,11 @@ def load_meta_data(self, raw_data_dir, ds_id): meta_data_dict = {} if meta_info['format'] == 'csv': for utterance_label in csv.DictReader( - open(os.path.join(raw_data_dir, 'transcriptions.csv'), 'r', encoding='utf-8') + open(raw_data_dir / 'transcriptions.csv', 'r', encoding='utf-8') ): item_name = utterance_label['name'] temp_dict = { - 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', + 'wav_fn': str(raw_data_dir / 'wavs' / f'{item_name}.wav'), 'ph_seq': utterance_label['ph_seq'].split(), 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], 'spk_id': ds_id @@ -66,7 +66,7 @@ def load_meta_data(self, raw_data_dir, ds_id): f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict else: - utterance_labels = open(os.path.join(raw_data_dir, 'transcriptions.txt'), 'r', encoding='utf-8').readlines() + utterance_labels = open(raw_data_dir / 'transcriptions.txt', 'r', encoding='utf-8').readlines() for utterance_label in utterance_labels: song_info = utterance_label.split('|') item_name = song_info[0] @@ -131,7 +131,7 @@ def check_coverage(self): plt.title('Phoneme Distribution Summary', fontsize=30) plt.xlabel('Phoneme', fontsize=20) plt.ylabel('Number of occurrences', fontsize=20) - filename = os.path.join(hparams['binary_data_dir'], 'phoneme_distribution.jpg') + filename = self.binary_data_dir / 'phoneme_distribution.jpg' plt.savefig(fname=filename, bbox_inches='tight', pad_inches=0.25) @@ -145,7 +145,7 @@ def check_coverage(self): f' (-) {sorted(missing_phones)}') # Copy dictionary to binary data dir - shutil.copy(locate_dictionary(), os.path.join(hparams['binary_data_dir'], 'dictionary.txt')) + shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): data_dir = hparams['binary_data_dir'] @@ -189,7 +189,7 @@ def postprocess(_item): postprocess(item) builder.finalize() - with open(os.path.join(data_dir, f'{prefix}.lengths'), 'wb') as f: + with open(data_dir / f'{prefix}.lengths', 'wb') as f: # noinspection PyTypeChecker np.save(f, lengths) diff --git a/scripts/infer.py b/scripts/infer.py index 09b12c817..b3196ca9c 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -5,9 +5,9 @@ import pathlib import sys -root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) -os.environ['PYTHONPATH'] = root_dir -sys.path.insert(0, root_dir) +root_dir = pathlib.Path(__file__).parent.parent.resolve() +os.environ['PYTHONPATH'] = str(root_dir) +sys.path.insert(0, str(root_dir)) import numpy as np import torch @@ -32,31 +32,36 @@ help='Save intermediate mel format instead of waveform') args = parser.parse_args() -name = os.path.basename(args.proj).split('.')[0] if not args.title else args.title +proj = pathlib.Path(args.proj) +name = proj.stem if not args.title else args.title exp = args.exp -if not os.path.exists(f'{root_dir}/checkpoints/{exp}'): - for ckpt in os.listdir(os.path.join(root_dir, 'checkpoints')): - if ckpt.startswith(exp): - print(f'| match ckpt by prefix: {ckpt}') - exp = ckpt +if not (root_dir / 'checkpoints' / exp).exists(): + for ckpt in (root_dir / 'checkpoints').iterdir(): + if not ckpt.is_dir(): + continue + if ckpt.name.startswith(exp): + print(f'| match ckpt by prefix: {ckpt.name}') + exp = ckpt.name break - assert os.path.exists(f'{root_dir}/checkpoints/{exp}'), 'There are no matching exp in \'checkpoints\' folder. ' \ - 'Please specify \'--exp\' as the folder name or prefix.' + else: + raise FileNotFoundError('There are no matching exp in \'checkpoints\' folder. ' + 'Please specify \'--exp\' as the folder name or prefix.') else: print(f'| found ckpt by name: {exp}') -out = args.out -if not out: - out = os.path.dirname(os.path.abspath(args.proj)) +if args.out: + out = pathlib.Path(args.out) +else: + out = proj.parent sys.argv = [ - f'{root_dir}/inference/ds_cascade.py', + sys.argv[0], '--exp_name', exp, '--infer' ] -with open(args.proj, 'r', encoding='utf-8') as f: +with open(proj, 'r', encoding='utf-8') as f: params = json.load(f) if not isinstance(params, list): params = [params] @@ -78,7 +83,7 @@ sample_rate = hparams['audio_sample_rate'] # Check for vocoder path -assert os.path.exists(os.path.join(root_dir, hparams['vocoder_ckpt'])), \ +assert (root_dir / hparams['vocoder_ckpt']).exists(), \ f'Vocoder ckpt \'{hparams["vocoder_ckpt"]}\' not found. ' \ f'Please put it to the checkpoints directory to run inference.' @@ -98,7 +103,7 @@ merge_slurs(param) -def infer_once(path: str, save_mel=False): +def infer_once(path: pathlib.Path, save_mel=False): if save_mel: result = [] else: @@ -148,7 +153,7 @@ def infer_once(path: str, save_mel=False): os.makedirs(out, exist_ok=True) suffix = '.wav' if not args.mel else '.mel.pt' if args.num == 1: - infer_once(os.path.join(out, f'{name}{suffix}'), save_mel=args.mel) + infer_once(out / (name + suffix), save_mel=args.mel) else: for i in range(1, args.num + 1): - infer_once(os.path.join(out, f'{name}-{str(i).zfill(3)}{suffix}'), save_mel=args.mel) + infer_once(out / f'{name}-{str(i).zfill(3)}{suffix}', save_mel=args.mel) diff --git a/scripts/train.py b/scripts/train.py index f07a8ad8d..48ffb921c 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -1,7 +1,7 @@ import importlib import os -import sys -os.environ['TORCH_CUDNN_V8_API_ENABLED'] = '1' # Prevent unacceptable slowdowns when using 16 precision + +os.environ['TORCH_CUDNN_V8_API_ENABLED'] = '1' # Prevent unacceptable slowdowns when using 16 precision from utils.hparams import set_hparams, hparams @@ -10,6 +10,7 @@ print("Disabling NCCL P2P") os.environ['NCCL_P2P_DISABLE'] = '1' + def run_task(): assert hparams['task_cls'] != '' pkg = ".".join(hparams["task_cls"].split(".")[:-1]) diff --git a/scripts/vocode.py b/scripts/vocode.py index d171c68d5..517cdd75f 100644 --- a/scripts/vocode.py +++ b/scripts/vocode.py @@ -4,15 +4,15 @@ import pathlib import sys -root_dir = str(pathlib.Path(__file__).parent.parent.resolve()) -os.environ['PYTHONPATH'] = root_dir -sys.path.insert(0, root_dir) +root_dir = pathlib.Path(__file__).parent.parent.resolve() +os.environ['PYTHONPATH'] = str(root_dir) +sys.path.insert(0, str(root_dir)) import numpy as np import torch import tqdm -from basics.base_svs_infer import BaseSVSInfer +from inference.ds_acoustic import DiffSingerAcousticInfer from utils.infer_utils import cross_fade, save_wav from utils.hparams import set_hparams, hparams @@ -26,19 +26,20 @@ parser.add_argument('--title', type=str, required=False, help='Title of output file') args = parser.parse_args() -name = os.path.basename(args.mel).split('.')[0] if not args.title else args.title +mel = pathlib.Path(args.mel) +name = mel.stem if not args.title else args.title config = None if args.exp: - config = f'{root_dir}/checkpoints/{args.exp}/config.yaml' + config = root_dir / 'checkpoints' / args.exp / 'config.yaml' elif args.config: - config = args.config + config = pathlib.Path(args.config) else: assert False, 'Either argument \'--exp\' or \'--config\' should be specified.' sys.argv = [ - f'{root_dir}/inference/ds_e2e.py', + sys.argv[0], '--config', - config + str(config) ] set_hparams(print_hparams=False) @@ -50,18 +51,20 @@ out = args.out -if not out: - out = os.path.dirname(os.path.abspath(args.mel)) +if args.out: + out = pathlib.Path(args.out) +else: + out = mel.parent -mel_seq = torch.load(args.mel) -sample_rate = hparams['audio_sample_rate'] +mel_seq = torch.load(mel) +assert isinstance(mel_seq, list), 'Not a valid mel sequence.' +assert len(mel_seq) > 0, 'Mel sequence is empty.' -infer_ins = None -if len(mel_seq) > 0: - infer_ins = BaseSVSInfer(hparams, load_model=False) +sample_rate = hparams['audio_sample_rate'] +infer_ins = DiffSingerAcousticInfer(load_model=False) -def run_vocoder(path: str): +def run_vocoder(path: pathlib.Path): result = np.zeros(0) current_length = 0 @@ -81,4 +84,4 @@ def run_vocoder(path: str): os.makedirs(out, exist_ok=True) -run_vocoder(os.path.join(out, f'{name}.wav')) +run_vocoder(out / (name + '.wav')) diff --git a/utils/__init__.py b/utils/__init__.py index f8ca8c164..921e261d8 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,5 +1,4 @@ -import glob -import os +import pathlib import re import time import types @@ -135,18 +134,20 @@ def load_ckpt( required_category=None, prefix_in_ckpt='model', key_in_ckpt='state_dict', strict=True, device='cpu' ): - if os.path.isfile(ckpt_base_dir): + if not isinstance(ckpt_base_dir, pathlib.Path): + ckpt_base_dir = pathlib.Path(ckpt_base_dir) + if ckpt_base_dir.is_file(): checkpoint_path = [ckpt_base_dir] elif ckpt_steps is not None: - checkpoint_path = [os.path.join(ckpt_base_dir, f'model_ckpt_steps_{int(ckpt_steps)}.ckpt')] + checkpoint_path = [ckpt_base_dir / f'model_ckpt_steps_{int(ckpt_steps)}.ckpt'] else: base_dir = ckpt_base_dir checkpoint_path = [ - os.path.join(base_dir, ckpt_file) + base_dir / ckpt_file for ckpt_file in sorted( [ - os.path.basename(ckpt) - for ckpt in glob.glob(f'{base_dir}/model_ckpt_steps_*.ckpt') + ckpt.name + for ckpt in base_dir.glob('model_ckpt_steps_*.ckpt') ], key=lambda x: int(re.findall(fr'model_ckpt_steps_(\d+).ckpt', x.replace('\\', '/'))[0]) ) diff --git a/utils/infer_utils.py b/utils/infer_utils.py index a8ec041ab..43cb3eae3 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -31,16 +31,6 @@ def merge_slurs(param): param['ph_dur'] = ' '.join([str(d) for d in ph_dur]) -def cross_fade(a: np.ndarray, b: np.ndarray, idx: int): - result = np.zeros(idx + b.shape[0]) - fade_len = a.shape[0] - idx - np.copyto(dst=result[:idx], src=a[:idx]) - k = np.linspace(0, 1.0, num=fade_len, endpoint=True) - result[idx: a.shape[0]] = (1 - k) * a[idx:] + k * b[: fade_len] - np.copyto(dst=result[a.shape[0]:], src=b[fade_len:]) - return result - - def trans_f0_seq(feature_pit, transform): feature_pit = feature_pit * 2 ** (transform / 12) return round(feature_pit, 1) @@ -136,6 +126,16 @@ def parse_commandline_spk_mix(mix: str) -> dict: return proportion_map +def cross_fade(a: np.ndarray, b: np.ndarray, idx: int): + result = np.zeros(idx + b.shape[0]) + fade_len = a.shape[0] - idx + np.copyto(dst=result[:idx], src=a[:idx]) + k = np.linspace(0, 1.0, num=fade_len, endpoint=True) + result[idx: a.shape[0]] = (1 - k) * a[idx:] + k * b[: fade_len] + np.copyto(dst=result[a.shape[0]:], src=b[fade_len:]) + return result + + def save_wav(wav, path, sr, norm=False): if norm: wav = wav / np.abs(wav).max() diff --git a/utils/phoneme_utils.py b/utils/phoneme_utils.py index 8ae198296..f77a5e63f 100644 --- a/utils/phoneme_utils.py +++ b/utils/phoneme_utils.py @@ -33,10 +33,10 @@ def locate_dictionary(): if config_dict_path.exists(): return config_dict_path work_dir = pathlib.Path(hparams['work_dir']) - ckpt_dict_path = work_dir.joinpath(config_dict_path.name) + ckpt_dict_path = work_dir / config_dict_path.name if ckpt_dict_path.exists(): return ckpt_dict_path - ckpt_dict_path = work_dir.joinpath('dictionary.txt') + ckpt_dict_path = work_dir / 'dictionary.txt' if ckpt_dict_path.exists(): return ckpt_dict_path raise FileNotFoundError('Unable to locate the dictionary file. ' diff --git a/utils/training_utils.py b/utils/training_utils.py index 5f60f8232..87cf785d2 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -1,8 +1,6 @@ import math -import os import re from copy import deepcopy -from glob import glob from pathlib import Path from typing import Dict @@ -286,20 +284,21 @@ def _remove_checkpoint(self, trainer: "pl.Trainer", filepath: str): def get_latest_checkpoint_path(work_dir): - if not os.path.exists(work_dir): + if not isinstance(work_dir, Path): + work_dir = Path(work_dir) + if not work_dir.exists(): return None last_step = -1 last_ckpt_name = None - checkpoints = glob(str(Path(work_dir) / '*.ckpt')) - for name in checkpoints: - search = re.search(r'steps_\d+', name) + for ckpt in work_dir.glob('model_ckpt_steps_*.ckpt'): + search = re.search(r'steps_\d+', ckpt.name) if search: step = int(search.group(0)[6:]) if step > last_step: last_step = step - last_ckpt_name = name + last_ckpt_name = str(ckpt) return last_ckpt_name if last_ckpt_name is not None else None From f45dfb8bc70b1c892bc0e2b17cb4e7701a180270 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 11 Apr 2023 13:28:12 +0800 Subject: [PATCH 219/475] Optimize imports --- modules/nsf_hifigan/models.py | 3 +-- modules/vocoders/nsf_hifigan.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/nsf_hifigan/models.py b/modules/nsf_hifigan/models.py index a2dcac440..a77eb0a38 100644 --- a/modules/nsf_hifigan/models.py +++ b/modules/nsf_hifigan/models.py @@ -1,14 +1,13 @@ import json -import os import pathlib import numpy as np import torch import torch.nn as nn import torch.nn.functional as F +from lightning.pytorch.utilities.rank_zero import rank_zero_info from torch.nn import Conv1d, ConvTranspose1d from torch.nn.utils import weight_norm, remove_weight_norm -from lightning.pytorch.utilities.rank_zero import rank_zero_info from .env import AttrDict from .utils import init_weights, get_padding diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index d5cfeabcf..c4f48b3e9 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -1,7 +1,7 @@ -import os import pathlib import torch + try: from lightning.pytorch.utilities.rank_zero import rank_zero_info except ModuleNotFoundError: From 8701b8955dfecaa9c6cf9eefcae38120c619e262 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 11 Apr 2023 22:44:09 +0800 Subject: [PATCH 220/475] Add script to drop speaker embedding from checkpoints --- scripts/drop_spk.py | 63 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 scripts/drop_spk.py diff --git a/scripts/drop_spk.py b/scripts/drop_spk.py new file mode 100644 index 000000000..cfd8937e0 --- /dev/null +++ b/scripts/drop_spk.py @@ -0,0 +1,63 @@ +import argparse +import pathlib +import re + +parser = argparse.ArgumentParser(description='Drop or edit spk_embed in a checkpoint.') +parser.add_argument('input', type=str, help='Path to the input file') +parser.add_argument('output', type=str, help='Path to the output file') +drop_retain_group = parser.add_mutually_exclusive_group() +drop_retain_group.add_argument('--drop', type=str, required=False, metavar='ID,ID,...', + help='Drop specific speaker IDs.') +drop_retain_group.add_argument('--retain', type=str, required=False, metavar='ID,ID,...', + help='Retain specific speaker IDs and drop all the others.') +parser.add_argument('--fill', type=str, required=False, default='zeros', metavar='METHOD', + choices=['zeros', 'random', 'mean', 'cyclic'], + help='Specify a filling method for the dropped embedding. ' + 'Available methods: zeros, random, mean, cyclic') +parser.add_argument('--overwrite', required=False, default=False, + action='store_true', help='Overwrite if the output file exists.') +args = parser.parse_args() +assert args.drop is not None or args.retain is not None, 'Either --drop or --retain should be specified.' +if args.drop and not re.fullmatch(r'\d+(,\d+)*', args.drop): + print(f'Invalid format for --drop: \'{args.drop}\'') + exit(-1) +if args.retain and not re.fullmatch(r'\d+(,\d+)*', args.retain): + print(f'Invalid format for --retain: \'{args.retain}\'') + exit(-1) + +import torch +input_ckpt = pathlib.Path(args.input).resolve() +output_ckpt = pathlib.Path(args.output).resolve() +assert input_ckpt.exists(), 'The input file does not exist.' +assert args.overwrite or not output_ckpt.exists(), \ + 'The output file already exists or is the same as the input file.\n' \ + 'This is not recommended because spk_embed dropping scripts may not be stable, ' \ + 'and you may be at risk of losing your model.\n' \ + 'If you are sure to OVERWRITE the existing file, please re-run this script with the \'--overwrite\' argument.' + +ckpt_loaded = torch.load(input_ckpt, map_location='cpu') +spk_embed = ckpt_loaded['state_dict']['model.fs2.spk_embed.weight'] +num_spk, hidden_size = spk_embed.shape +all_ids = set(range(num_spk)) +if args.drop is not None: + drop_ids = set([int(i) for i in args.drop.split(',')]).intersection(all_ids) +else: + drop_ids = all_ids - set([int(i) for i in args.retain.split(',')]) + +fill_list = None +if args.fill == 'zeros': + fill_list = [0. for _ in drop_ids] +elif args.fill == 'random': + fill_list = [torch.randn(1, hidden_size, dtype=torch.float32, device='cpu') for _ in drop_ids] +elif args.fill == 'mean': + mean = torch.mean(spk_embed, dim=0, keepdim=True) + fill_list = [mean for _ in drop_ids] +elif args.fill == 'cyclic': + retain_ids = sorted(all_ids - drop_ids) + num_retain = len(retain_ids) + fill_list = [spk_embed[retain_ids[i % num_retain], :] for i, _ in enumerate(drop_ids)] + +for spk_id, fill in zip(sorted(drop_ids), fill_list): + spk_embed[spk_id, :] = fill + +torch.save(ckpt_loaded, output_ckpt) From 3b0748029dd9687bb72a1bf38b301a55ca731fe7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 11 Apr 2023 23:09:03 +0800 Subject: [PATCH 221/475] Support more argument formats --- scripts/drop_spk.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/drop_spk.py b/scripts/drop_spk.py index cfd8937e0..332f8516c 100644 --- a/scripts/drop_spk.py +++ b/scripts/drop_spk.py @@ -18,10 +18,10 @@ action='store_true', help='Overwrite if the output file exists.') args = parser.parse_args() assert args.drop is not None or args.retain is not None, 'Either --drop or --retain should be specified.' -if args.drop and not re.fullmatch(r'\d+(,\d+)*', args.drop): +if args.drop and not re.fullmatch(r'(\d+)?(,\d+)*,?', args.drop): print(f'Invalid format for --drop: \'{args.drop}\'') exit(-1) -if args.retain and not re.fullmatch(r'\d+(,\d+)*', args.retain): +if args.retain and not re.fullmatch(r'(\d+)?(,\d+)*,?', args.retain): print(f'Invalid format for --retain: \'{args.retain}\'') exit(-1) @@ -40,9 +40,9 @@ num_spk, hidden_size = spk_embed.shape all_ids = set(range(num_spk)) if args.drop is not None: - drop_ids = set([int(i) for i in args.drop.split(',')]).intersection(all_ids) + drop_ids = set([int(i) for i in args.drop.split(',') if i != '']).intersection(all_ids) else: - drop_ids = all_ids - set([int(i) for i in args.retain.split(',')]) + drop_ids = all_ids - set([int(i) for i in args.retain.split(',') if i != '']) fill_list = None if args.fill == 'zeros': From 30651fe3e50761b2e6fd16dacb11426d0b1c65cb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 11 Apr 2023 23:09:25 +0800 Subject: [PATCH 222/475] Ignore everything in assets/ --- deployment/.gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/.gitignore b/deployment/.gitignore index 54f44899c..bab8dba67 100644 --- a/deployment/.gitignore +++ b/deployment/.gitignore @@ -4,3 +4,4 @@ *.wav temp/ cache/ +assets/ From ed83c6ec8f4569a47486d55179dab3de712d2a15 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 12 Apr 2023 01:09:26 +0800 Subject: [PATCH 223/475] Add doc example --- docs/ConfigurationSchemas.md | 159 +++++++++++++++++++++++++++++++++++ 1 file changed, 159 insertions(+) create mode 100644 docs/ConfigurationSchemas.md diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md new file mode 100644 index 000000000..d2087c1d6 --- /dev/null +++ b/docs/ConfigurationSchemas.md @@ -0,0 +1,159 @@ +# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism + +[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) +[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) +[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) + | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) + +## Configuration Schemas + +This document explains the meaning and usages of all editable keys in a configuration file. + +### Common configurations + +#### base_config + +Path(s) of other config files that the current config is based on and will override. + +##### used by + +all + +##### type + +str, List[str] + +##### default + +_none_ + +##### Constraints + +_none_ + +### Neural networks + +#### hidden_size + +Dimension of hidden layers of FastSpeech2, token and variance embeddings, and diffusion condition. + +##### used by + +acoustic model + +##### type + +int + +##### default + +_256_ + +##### Constraints + +__none__ + +#### residual_channels + +TBD + +#### residual_layers + +TBD + +#### diff_decoder_type + +Denoiser type of the DDPM. + +##### used by + +acoustic model + +##### type + +str + +##### default + +_wavenet_ + +##### Constraints + +choose from [ _wavenet_ ] + +#### diff_loss_type + +Loss type of the DDPM. + +##### used by + +acoustic model + +##### type + +str + +##### default + +_l2_ + +##### Constraints + +choose from [ _l1_, _l2_ ] + +### Dataset information and preprocessing + +#### raw_data_dir + +Path(s) to the raw data including wave files, transcriptions, etc. + +##### used by + +all + +##### type + +str, List[str] + +##### default + +_none_ + +##### Constraints + +_none_ + +### Training, validation and inference + +#### task_cls + +TBD + +#### lr + +Initial learning rate of the scheduler. + +##### used by + +all + +##### type + +float + +##### default + +_0.0004_ + +##### Constraints + +_none_ + +#### max_batch_frames + +TBD + +#### max_batch_size + +TBD + From 5596de904e1c483021e69d2f62e021177f8a364c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 12 Apr 2023 01:21:32 +0800 Subject: [PATCH 224/475] Remove _none_ --- docs/ConfigurationSchemas.md | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index d2087c1d6..270987377 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -23,14 +23,6 @@ all str, List[str] -##### default - -_none_ - -##### Constraints - -_none_ - ### Neural networks #### hidden_size @@ -49,10 +41,6 @@ int _256_ -##### Constraints - -__none__ - #### residual_channels TBD @@ -115,14 +103,6 @@ all str, List[str] -##### default - -_none_ - -##### Constraints - -_none_ - ### Training, validation and inference #### task_cls @@ -145,10 +125,6 @@ float _0.0004_ -##### Constraints - -_none_ - #### max_batch_frames TBD From eb09ae01c0daad65114a8f6df3f83868f5d855c3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 12 Apr 2023 20:27:52 +0800 Subject: [PATCH 225/475] Optimize timestep calculation --- augmentation/spec_stretch.py | 4 ++-- basics/base_augmentation.py | 4 ++++ basics/base_binarizer.py | 1 + preprocessing/acoustic_binarizer.py | 6 ++++-- utils/binarizer_utils.py | 8 ++------ 5 files changed, 13 insertions(+), 10 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 936eee58c..e9c0f8f9a 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -35,11 +35,11 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) if speed != 1. or hparams.get('use_speed_embed', False): aug_item['length'] = mel.shape[0] - aug_item['speed'] = int(np.round(hparams['hop_size'] * speed)) / hparams['hop_size'] # real speed + aug_item['speed'] = int(np.round(hparams['hop_size'] * speed)) / hparams['hop_size'] # real speed aug_item['seconds'] /= aug_item['speed'] aug_item['ph_dur'] /= aug_item['speed'] aug_item['mel2ph'] = get_mel2ph_torch( - self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], hparams, device=self.device + self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], self.timestep, device=self.device ).cpu().numpy() f0, _, _ = get_pitch_parselmouth( wav, aug_item['length'], hparams, speed=speed, interp_uv=hparams['interp_uv'] diff --git a/basics/base_augmentation.py b/basics/base_augmentation.py index 193546d95..ac71f48f3 100644 --- a/basics/base_augmentation.py +++ b/basics/base_augmentation.py @@ -1,3 +1,6 @@ +from utils.hparams import hparams + + class BaseAugmentation: """ Base class for data augmentation. @@ -8,6 +11,7 @@ class BaseAugmentation: def __init__(self, data_dirs: list, augmentation_args: dict): self.raw_data_dirs = data_dirs self.augmentation_args = augmentation_args + self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] def process_item(self, item: dict, **kwargs) -> dict: raise NotImplementedError() diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 63b35c425..14bdd36bb 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -58,6 +58,7 @@ def __init__(self, data_dir=None): self.spk_map = None self.items = {} self.phone_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) + self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] # load each dataset for ds_id, data_dir in enumerate(self.raw_data_dirs): diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 43b47420a..ea9920fe5 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -215,7 +215,7 @@ def process_item(self, item_name, meta_data, binarization_args): 'length': length, 'mel': mel, 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), - 'ph_dur': np.array(meta_data['ph_dur']), + 'ph_dur': np.array(meta_data['ph_dur']).astype(np.float32), } # get ground truth f0 @@ -228,7 +228,9 @@ def process_item(self, item_name, meta_data, binarization_args): processed_input['f0'] = gt_f0.astype(np.float32) # get ground truth dur - processed_input['mel2ph'] = get_mel2ph_torch(self.lr, torch.from_numpy(processed_input['ph_dur']), length, hparams).cpu().numpy() + processed_input['mel2ph'] = get_mel2ph_torch( + self.lr, torch.from_numpy(processed_input['ph_dur']), length, self.timestep + ).cpu().numpy() if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index fa1c9843d..ec1e4be24 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -39,12 +39,8 @@ def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): @torch.no_grad() -def get_mel2ph_torch(lr, durs, length, hparams, device='cpu'): - ph_acc = torch.round( - torch.cumsum( - durs.to(device), dim=0 - ) * hparams['audio_sample_rate'] / hparams['hop_size'] + 0.5 - ).long() +def get_mel2ph_torch(lr, durs, length, timestep, device='cpu'): + ph_acc = torch.round(torch.cumsum(durs.to(device), dim=0) / timestep + 0.5).long() ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(device)) mel2ph = lr(ph_dur[None])[0] num_frames = mel2ph.shape[0] From 76d1480d8e7512b39be332364b03544d5461a90f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 12 Apr 2023 20:37:52 +0800 Subject: [PATCH 226/475] Remove test set and replace with valid set --- basics/base_binarizer.py | 47 +++++++++++++++++----------------------- 1 file changed, 20 insertions(+), 27 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 14bdd36bb..2ae1509b6 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -5,7 +5,7 @@ import random from copy import deepcopy -from utils.hparams import set_hparams, hparams +from utils.hparams import hparams from utils.phoneme_utils import build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -64,7 +64,7 @@ def __init__(self, data_dir=None): for ds_id, data_dir in enumerate(self.raw_data_dirs): self.load_meta_data(pathlib.Path(data_dir), ds_id) self.item_names = sorted(list(self.items.keys())) - self._train_item_names, self._test_item_names = self.split_train_test_set() + self._train_item_names, self._valid_item_names = self.split_train_valid_set() if self.binarization_args['shuffle']: random.seed(hparams['seed']) @@ -73,36 +73,40 @@ def __init__(self, data_dir=None): def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): raise NotImplementedError() - def split_train_test_set(self): + def split_train_valid_set(self): + """ + Split the dataset into training set and validation set. + :return: train_item_names, valid_item_names + """ item_names = set(deepcopy(self.item_names)) prefixes = set([str(pr) for pr in hparams['test_prefixes']]) - test_item_names = set() + valid_item_names = set() # Add prefixes that specified speaker index and matches exactly item name to test set for prefix in deepcopy(prefixes): if prefix in item_names: - test_item_names.add(prefix) + valid_item_names.add(prefix) prefixes.remove(prefix) # Add prefixes that exactly matches item name without speaker id to test set for prefix in deepcopy(prefixes): for name in item_names: if name.split(':')[-1] == prefix: - test_item_names.add(name) + valid_item_names.add(name) prefixes.remove(prefix) # Add names with one of the remaining prefixes to test set for prefix in deepcopy(prefixes): for name in item_names: if name.startswith(prefix): - test_item_names.add(name) + valid_item_names.add(name) prefixes.remove(prefix) for prefix in prefixes: for name in item_names: if name.split(':')[-1].startswith(prefix): - test_item_names.add(name) - test_item_names = sorted(list(test_item_names)) - train_item_names = [x for x in item_names if x not in set(test_item_names)] + valid_item_names.add(name) + valid_item_names = sorted(list(valid_item_names)) + train_item_names = [x for x in item_names if x not in set(valid_item_names)] logging.info("train {}".format(len(train_item_names))) - logging.info("test {}".format(len(test_item_names))) - return train_item_names, test_item_names + logging.info("test {}".format(len(valid_item_names))) + return train_item_names, valid_item_names @property def train_item_names(self): @@ -110,11 +114,7 @@ def train_item_names(self): @property def valid_item_names(self): - return self._test_item_names - - @property - def test_item_names(self): - return self._test_item_names + return self._valid_item_names def build_spk_map(self): spk_map = {x: i for i, x in enumerate(hparams['speakers'])} @@ -122,12 +122,10 @@ def build_spk_map(self): self.spk_map = spk_map def meta_data_iterator(self, prefix): - if prefix == 'valid': - item_names = self.valid_item_names - elif prefix == 'test': - item_names = self.test_item_names - else: + if prefix == 'train': item_names = self.train_item_names + else: + item_names = self.valid_item_names for item_name in item_names: meta_data = self.items[item_name] yield item_name, meta_data @@ -154,8 +152,3 @@ def arrange_data_augmentation(self, prefix): def process_item(self, item_name, meta_data, binarization_args): raise NotImplementedError() - - -if __name__ == "__main__": - set_hparams() - BaseBinarizer().process() From a21d1ff2bbc59b7380488d8c7921376696e48a4e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 12 Apr 2023 21:08:50 +0800 Subject: [PATCH 227/475] Add device to binarizer --- basics/base_binarizer.py | 3 +++ preprocessing/acoustic_binarizer.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 2ae1509b6..7e8516a03 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -5,6 +5,8 @@ import random from copy import deepcopy +import torch + from utils.hparams import hparams from utils.phoneme_utils import build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -54,6 +56,7 @@ def __init__(self, data_dir=None): self.binarization_args = hparams['binarization_args'] self.augmentation_args = hparams.get('augmentation_args', {}) + self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') self.spk_map = None self.items = {} diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index ea9920fe5..ee0e28c13 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -229,7 +229,7 @@ def process_item(self, item_name, meta_data, binarization_args): # get ground truth dur processed_input['mel2ph'] = get_mel2ph_torch( - self.lr, torch.from_numpy(processed_input['ph_dur']), length, self.timestep + self.lr, torch.from_numpy(processed_input['ph_dur']), length, self.timestep, device=self.device ).cpu().numpy() if hparams.get('use_key_shift_embed', False): From 0dd4d527e57a85f55794299a6c39155eabb803f6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 12 Apr 2023 22:49:15 +0800 Subject: [PATCH 228/475] Use `self.binary_data_dir` --- preprocessing/acoustic_binarizer.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index ee0e28c13..18de54659 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -148,9 +148,8 @@ def check_coverage(self): shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): - data_dir = hparams['binary_data_dir'] args = [] - builder = IndexedDatasetBuilder(data_dir, prefix=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) + builder = IndexedDatasetBuilder(self.binary_data_dir, prefix=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) lengths = [] total_sec = 0 total_raw_sec = 0 @@ -189,7 +188,7 @@ def postprocess(_item): postprocess(item) builder.finalize() - with open(data_dir / f'{prefix}.lengths', 'wb') as f: + with open(self.binary_data_dir / f'{prefix}.lengths', 'wb') as f: # noinspection PyTypeChecker np.save(f, lengths) From 4c380d8d937694857e6230f916c68884dd937e12 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 15:56:48 +0800 Subject: [PATCH 229/475] Reuse code of dataset and task --- basics/base_dataset.py | 22 +++++---- basics/base_task.py | 98 +++++++++++++++++++++++++++++++++------ training/acoustic_task.py | 87 ++-------------------------------- 3 files changed, 101 insertions(+), 106 deletions(-) diff --git a/basics/base_dataset.py b/basics/base_dataset.py index 031586a19..4cc5d2e8b 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -4,10 +4,11 @@ from torch.utils.data import Dataset from utils.hparams import hparams +from utils.indexed_datasets import IndexedDataset class BaseDataset(Dataset): - ''' + """ Base class for datasets. 1. *sizes*: clipped length if "max_frames" is set; @@ -19,21 +20,21 @@ class BaseDataset(Dataset): take the longest data, pad other data to the same length; 2. *__getitem__*: the index function. - ''' - def __init__(self): + """ + + def __init__(self, prefix): super().__init__() - self.hparams = hparams - self.sizes = None + self.prefix = prefix + self.data_dir = hparams['binary_data_dir'] + self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) + self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) @property def _sizes(self): return self.sizes def __getitem__(self, index): - raise NotImplementedError - - def collater(self, samples): - raise NotImplementedError + return self.indexed_ds[index] def __len__(self): return len(self._sizes) @@ -45,3 +46,6 @@ def size(self, index): """Return an example's size as a float or tuple. This value is used when filtering a dataset with ``--max-positions``.""" return self._sizes[index] + + def collater(self, samples): + raise NotImplementedError() diff --git a/basics/base_task.py b/basics/base_task.py index ddea1b9c0..6273474bd 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -7,6 +7,8 @@ import matplotlib +from utils.text_encoder import TokenTextEncoder + matplotlib.use('Agg') import torch.utils.data @@ -20,9 +22,10 @@ from utils.hparams import hparams from utils.training_utils import ( DsModelCheckpoint, DsTQDMProgressBar, + DsBatchSampler, DsEvalBatchSampler, get_latest_checkpoint_path, get_strategy ) -from utils.phoneme_utils import locate_dictionary +from utils.phoneme_utils import locate_dictionary, build_phoneme_list torch.multiprocessing.set_sharing_strategy(os.getenv('TORCH_SHARE_STRATEGY', 'file_system')) @@ -32,7 +35,7 @@ class BaseTask(pl.LightningModule): - ''' + """ Base class for training tasks. 1. *load_ckpt*: load checkpoint; @@ -52,14 +55,15 @@ class BaseTask(pl.LightningModule): one training step of the model; 3. *on_validation_end* and *_on_validation_end*: postprocess the validation output. - ''' + """ def __init__(self, *args, **kwargs): # dataset configs - super(BaseTask, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.loaded_optimizer_states_dict = {} self.example_input_array = None + self.dataset_cls = None self.max_batch_frames = hparams['max_batch_frames'] self.max_batch_size = hparams['max_batch_size'] self.max_val_batch_frames = hparams['max_val_batch_frames'] @@ -73,7 +77,7 @@ def __init__(self, *args, **kwargs): self.model = None self.skip_immediate_validation = False self.skip_immediate_ckpt_save = False - + self.valid_metrics = { 'total_loss': MeanMetric() } @@ -81,7 +85,17 @@ def __init__(self, *args, **kwargs): ########### # Training, validation and testing ########### - + def setup(self, stage): + self.phone_encoder = self.build_phone_encoder() + self.model = self.build_model() + self.train_dataset = self.dataset_cls(hparams['train_set_name']) + self.valid_dataset = self.dataset_cls(hparams['valid_set_name']) + + @staticmethod + def build_phone_encoder(): + phone_list = build_phoneme_list() + return TokenTextEncoder(vocab_list=phone_list) + def build_model(self): raise NotImplementedError @@ -100,7 +114,7 @@ def _training_step(self, sample, batch_idx, optimizer_idx): def training_step(self, sample, batch_idx, optimizer_idx=-1): total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) - + # logs to progress bar self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) self.log('lr', self.lr_schedulers().get_lr()[0], prog_bar=True, logger=False, on_step=True, on_epoch=False) @@ -108,15 +122,15 @@ def training_step(self, sample, batch_idx, optimizer_idx=-1): tb_log = {f'tr/{k}': v for k, v in log_outputs.items()} if self.global_step % self.trainer.log_every_n_steps == 0: self.logger.log_metrics(tb_log, step=self.global_step) - + return total_loss - + # def on_before_optimizer_step(self, *args, **kwargs): # self.log_dict(grad_norm(self, norm_type=2)) - + def _on_validation_start(self): pass - + def on_validation_start(self): self._on_validation_start() for metric in self.valid_metrics.values(): @@ -159,11 +173,24 @@ def on_validation_epoch_end(self): for metric in self.valid_metrics.values(): metric.reset() + # noinspection PyMethodMayBeStatic def build_scheduler(self, optimizer): - raise NotImplementedError + # return WarmupCosineSchedule(optimizer, + # warmup_steps=hparams['warmup_updates'], + # t_total=hparams['max_updates'], + # eta_min=0) + return torch.optim.lr_scheduler.StepLR( + optimizer, step_size=hparams['lr_decay_steps'], gamma=hparams['lr_decay_gamma'] + ) + # noinspection PyMethodMayBeStatic def build_optimizer(self, model): - raise NotImplementedError + optimizer = torch.optim.AdamW( + filter(lambda p: p.requires_grad, model.parameters()), + lr=hparams['lr'], + betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), + weight_decay=hparams['weight_decay']) + return optimizer def configure_optimizers(self): optm = self.build_optimizer(self.model) @@ -179,6 +206,45 @@ def configure_optimizers(self): } } + def train_dataloader(self): + self.training_sampler = DsBatchSampler( + self.train_dataset, + max_batch_frames=self.max_batch_frames, + max_batch_size=self.max_batch_size, + num_replicas=(self.trainer.distributed_sampler_kwargs or {}).get('num_replicas', 1), + rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), + sort_by_similar_size=hparams['sort_by_len'], + required_batch_count_multiple=hparams['accumulate_grad_batches'], + shuffle_sample=True, + shuffle_batch=False, + seed=hparams['seed'] + ) + return torch.utils.data.DataLoader(self.train_dataset, + collate_fn=self.train_dataset.collater, + batch_sampler=self.training_sampler, + num_workers=hparams['ds_workers'], + prefetch_factor=hparams['dataloader_prefetch_factor'], + pin_memory=True, + persistent_workers=True) + + def val_dataloader(self): + sampler = DsEvalBatchSampler( + self.valid_dataset, + max_batch_frames=self.max_val_batch_frames, + max_batch_size=self.max_val_batch_size, + rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), + batch_by_size=False + ) + return torch.utils.data.DataLoader(self.valid_dataset, + collate_fn=self.valid_dataset.collater, + batch_sampler=sampler, + num_workers=hparams['ds_workers'], + prefetch_factor=hparams['dataloader_prefetch_factor'], + shuffle=False) + + def test_dataloader(self): + return self.val_dataloader() + def on_test_start(self): self.on_validation_start() @@ -232,7 +298,8 @@ def start(cls): version='lastest' ), gradient_clip_val=hparams['clip_grad_norm'], - val_check_interval=hparams['val_check_interval'] * hparams['accumulate_grad_batches'], # so this is global_steps + val_check_interval=hparams['val_check_interval'] * hparams['accumulate_grad_batches'], + # so this is global_steps check_val_every_n_epoch=None, log_every_n_steps=hparams['log_interval'], max_steps=hparams['max_updates'], @@ -266,6 +333,7 @@ def train_payload_copy(): else: shutil.copy(locate_dictionary(), dictionary) print(f'| Copied dictionary to {dictionary}.') + train_payload_copy() trainer.fit(task, ckpt_path=get_latest_checkpoint_path(work_dir)) else: @@ -275,7 +343,7 @@ def on_save_checkpoint(self, checkpoint): if isinstance(self.model, CategorizedModule): checkpoint['category'] = self.model.category checkpoint['trainer_stage'] = self.trainer.state.stage.value - + def on_load_checkpoint(self, checkpoint): from lightning.pytorch.trainer.states import RunningStage if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 34c6841f0..c7f9784d3 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -21,26 +21,12 @@ from modules.vocoders.registry import get_vocoder_cls from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams -from utils.indexed_datasets import IndexedDataset -from utils.phoneme_utils import build_phoneme_list from utils.plot import spec_to_figure -from utils.text_encoder import TokenTextEncoder -from utils.training_utils import DsBatchSampler, DsEvalBatchSampler matplotlib.use('Agg') class AcousticDataset(BaseDataset): - def __init__(self, prefix): - super().__init__() - self.data_dir = hparams['binary_data_dir'] - self.prefix = prefix - self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) - self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) - - def __getitem__(self, index): - return self.indexed_ds[index] - def collater(self, samples): if len(samples) == 0: return {} @@ -64,6 +50,7 @@ def collater(self, samples): batch['spk_ids'] = spk_ids return batch + class AcousticTask(BaseTask): def __init__(self): super().__init__() @@ -76,84 +63,19 @@ def __init__(self): self.stats = {} self.logged_gt_wav = set() - def setup(self, stage): - self.phone_encoder = self.build_phone_encoder() - self.model = self.build_model() - self.train_dataset = self.dataset_cls(hparams['train_set_name']) - self.valid_dataset = self.dataset_cls(hparams['valid_set_name']) - - @staticmethod - def build_phone_encoder(): - phone_list = build_phoneme_list() - return TokenTextEncoder(vocab_list=phone_list) - def build_model(self): model = DiffSingerAcoustic( vocab_size=len(self.phone_encoder), out_dims=hparams['audio_num_mel_bins'] ) + @rank_zero_only def print_arch(): utils.print_arch(model) + print_arch() return model - def build_optimizer(self, model): - optimizer = torch.optim.AdamW( - filter(lambda p: p.requires_grad, model.parameters()), - lr=hparams['lr'], - betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), - weight_decay=hparams['weight_decay']) - return optimizer - - def build_scheduler(self, optimizer): - # return WarmupCosineSchedule(optimizer, - # warmup_steps=hparams['warmup_updates'], - # t_total=hparams['max_updates'], - # eta_min=0) - return torch.optim.lr_scheduler.StepLR( - optimizer, step_size=hparams['lr_decay_steps'], gamma=hparams['lr_decay_gamma'] - ) - - def train_dataloader(self): - self.training_sampler = DsBatchSampler( - self.train_dataset, - max_batch_frames=self.max_batch_frames, - max_batch_size=self.max_batch_size, - num_replicas=(self.trainer.distributed_sampler_kwargs or {}).get('num_replicas', 1), - rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), - sort_by_similar_size=hparams['sort_by_len'], - required_batch_count_multiple=hparams['accumulate_grad_batches'], - shuffle_sample=True, - shuffle_batch=False, - seed=hparams['seed'] - ) - return torch.utils.data.DataLoader(self.train_dataset, - collate_fn=self.train_dataset.collater, - batch_sampler=self.training_sampler, - num_workers=hparams['ds_workers'], - prefetch_factor=hparams['dataloader_prefetch_factor'], - pin_memory=True, - persistent_workers=True) - - def val_dataloader(self): - sampler = DsEvalBatchSampler( - self.valid_dataset, - max_batch_frames=self.max_val_batch_frames, - max_batch_size=self.max_val_batch_size, - rank=(self.trainer.distributed_sampler_kwargs or {}).get('rank', 0), - batch_by_size=False - ) - return torch.utils.data.DataLoader(self.valid_dataset, - collate_fn=self.valid_dataset.collater, - batch_sampler=sampler, - num_workers=hparams['ds_workers'], - prefetch_factor=hparams['dataloader_prefetch_factor'], - shuffle=False) - - def test_dataloader(self): - return self.val_dataloader() - def run_model(self, sample, return_output=False, infer=False): """ steps: @@ -200,7 +122,8 @@ def _validation_step(self, sample, batch_idx): 'total_loss': total_loss } - if batch_idx < hparams['num_valid_plots'] and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: + if batch_idx < hparams['num_valid_plots'] and (self.trainer.distributed_sampler_kwargs or {}).get('rank', + 0) == 0: _, mel_pred = self.run_model(sample, return_output=True, infer=True) if self.use_vocoder: self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) From ac9e867209ab0ef49e31f4fa74eeb3aab227fc5f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 18:31:45 +0800 Subject: [PATCH 230/475] Reuse more code, extract losses --- basics/base_dataset.py | 4 ++- basics/base_task.py | 26 ++++++++++----- modules/diffusion/ddpm.py | 26 +++------------ modules/losses/diff_loss.py | 21 +++++++++++++ modules/{commons => losses}/ssim.py | 0 modules/toplevel.py | 1 - training/acoustic_task.py | 49 ++++++++++++++--------------- 7 files changed, 69 insertions(+), 58 deletions(-) create mode 100644 modules/losses/diff_loss.py rename modules/{commons => losses}/ssim.py (100%) diff --git a/basics/base_dataset.py b/basics/base_dataset.py index 4cc5d2e8b..5aeab9ae5 100644 --- a/basics/base_dataset.py +++ b/basics/base_dataset.py @@ -48,4 +48,6 @@ def size(self, index): return self._sizes[index] def collater(self, samples): - raise NotImplementedError() + return { + 'size': len(samples) + } diff --git a/basics/base_task.py b/basics/base_task.py index 6273474bd..220e9d9e6 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -97,23 +97,33 @@ def build_phone_encoder(): return TokenTextEncoder(vocab_list=phone_list) def build_model(self): - raise NotImplementedError + raise NotImplementedError() + + def build_losses(self): + raise NotImplementedError() + + def run_model(self, sample, infer=False): + """ + steps: + 1. run the full model + 2. calculate losses if not infer + """ + raise NotImplementedError() def on_train_epoch_start(self): if self.training_sampler is not None: self.training_sampler.set_epoch(self.current_epoch) - def _training_step(self, sample, batch_idx, optimizer_idx): + def _training_step(self, sample): """ - - :param sample: - :param batch_idx: :return: total loss: torch.Tensor, loss_log: dict, other_log: dict """ - raise NotImplementedError + losses = self.run_model(sample) + total_loss = sum([v for v in losses.values() if isinstance(v, torch.Tensor) and v.requires_grad]) + return total_loss, {**losses, 'batch_size': sample['size']} def training_step(self, sample, batch_idx, optimizer_idx=-1): - total_loss, log_outputs = self._training_step(sample, batch_idx, optimizer_idx) + total_loss, log_outputs = self._training_step(sample) # logs to progress bar self.log_dict(log_outputs, prog_bar=True, logger=False, on_step=True, on_epoch=False) @@ -144,7 +154,7 @@ def _validation_step(self, sample, batch_idx): :param batch_idx: :return: loss_log: dict, weight: int """ - raise NotImplementedError + raise NotImplementedError() def validation_step(self, sample, batch_idx): """ diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index b2f1cd15e..706ea350e 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -4,7 +4,6 @@ import numpy as np import torch -import torch.nn.functional as F from torch import nn from tqdm import tqdm @@ -67,7 +66,7 @@ def cosine_beta_schedule(timesteps, s=0.008): class GaussianDiffusion(nn.Module): def __init__(self, out_dims, timesteps=1000, k_step=1000, - denoiser_type=None, loss_type=None, betas=None, + denoiser_type=None, betas=None, spec_min=None, spec_max=None): super().__init__() self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type](hparams) @@ -85,7 +84,6 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, timesteps, = betas.shape self.num_timesteps = int(timesteps) self.k_step = k_step - self.loss_type = loss_type self.noise_list = deque(maxlen=4) @@ -190,32 +188,19 @@ def get_x_pred(x, noise_t, t): return x_prev - def q_sample(self, x_start, t, noise=None): - noise = default(noise, lambda: torch.randn_like(x_start)) + def q_sample(self, x_start, t, noise): return ( extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise ) - def p_losses(self, x_start, t, cond, noise=None, nonpadding=None): + def p_losses(self, x_start, t, cond, noise=None): noise = default(noise, lambda: torch.randn_like(x_start)) x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) x_recon = self.denoise_fn(x_noisy, t, cond) - if self.loss_type == 'l1': - if nonpadding is not None: - loss = ((noise - x_recon).abs() * nonpadding.unsqueeze(1)).mean() - else: - # print('are you sure w/o nonpadding?') - loss = (noise - x_recon).abs().mean() - - elif self.loss_type == 'l2': - loss = F.mse_loss(noise, x_recon) - else: - raise NotImplementedError() - - return loss + return x_recon, noise def forward(self, condition, gt_spec=None, infer=True): """ @@ -285,8 +270,5 @@ def wrapped(x, t, **kwargs): x = x.squeeze(1).transpose(1, 2) # [B, T, M] return self.denorm_spec(x) - def norm_spec(self, x): - return (x - self.spec_min) / (self.spec_max - self.spec_min) * 2 - 1 - def denorm_spec(self, x): return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py new file mode 100644 index 000000000..36b36b648 --- /dev/null +++ b/modules/losses/diff_loss.py @@ -0,0 +1,21 @@ +import torch.nn as nn +from torch import Tensor + + +class DiffusionNoiseLoss(nn.Module): + def __init__(self, loss_type): + super().__init__() + self.loss_type = loss_type + if self.loss_type == 'l1': + self.loss = nn.L1Loss() + elif self.loss_type == 'l2': + self.loss = nn.MSELoss() + else: + raise NotImplementedError() + + def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> Tensor: + if nonpadding is not None: + nonpadding = nonpadding.unsqueeze(1) + x_recon *= nonpadding + noise *= nonpadding + return self.loss(x_recon, noise) diff --git a/modules/commons/ssim.py b/modules/losses/ssim.py similarity index 100% rename from modules/commons/ssim.py rename to modules/losses/ssim.py diff --git a/modules/toplevel.py b/modules/toplevel.py index 899b8c0e4..4f6348d5e 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -15,7 +15,6 @@ def __init__(self, vocab_size, out_dims): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - loss_type=hparams['diff_loss_type'], spec_min=hparams['spec_min'], spec_max=hparams['spec_max'] ) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index c7f9784d3..00428c550 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -17,6 +17,7 @@ from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder from modules.fastspeech.tts_modules import mel2ph_to_dur +from modules.losses.diff_loss import DiffusionNoiseLoss from modules.toplevel import DiffSingerAcoustic from modules.vocoders.registry import get_vocoder_cls from utils.binarizer_utils import get_pitch_parselmouth @@ -28,19 +29,18 @@ class AcousticDataset(BaseDataset): def collater(self, samples): - if len(samples) == 0: - return {} + batch = super().collater(samples) + tokens = utils.collate_nd([s['tokens'] for s in samples], 0) f0 = utils.collate_nd([s['f0'] for s in samples], 0.0) mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) mel = utils.collate_nd([s['mel'] for s in samples], 0.0) - batch = { - 'size': len(samples), + batch.update({ 'tokens': tokens, 'mel2ph': mel2ph, 'mel': mel, 'f0': f0, - } + }) if hparams.get('use_key_shift_embed', False): batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if hparams.get('use_speed_embed', False): @@ -76,14 +76,13 @@ def print_arch(): print_arch() return model - def run_model(self, sample, return_output=False, infer=False): - """ - steps: - 1. run the full model, calc the main loss - 2. calculate loss for dur_predictor, pitch_predictor, energy_predictor - """ + # noinspection PyAttributeOutsideInit + def build_losses(self): + self.mel_loss = DiffusionNoiseLoss(loss_type=hparams['diff_loss_type']) + + def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_t] - target = sample['mel'] # [B, T_s, 80] + target = sample['mel'] # [B, T_s, M] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] key_shift = sample.get('key_shift') @@ -98,25 +97,23 @@ def run_model(self, sample, return_output=False, infer=False): spk_embed_id=spk_embed_id, gt_mel=target, infer=infer) - losses = {} - if not infer: - losses['mel'] = output - if not return_output: - return losses + if infer: + mel_pred = output + return mel_pred else: - return losses, output - - def _training_step(self, sample, batch_idx, _): - losses = self.run_model(sample) - total_loss = sum([v for v in losses.values() if isinstance(v, torch.Tensor) and v.requires_grad]) - return total_loss, {**losses, 'batch_size': sample['tokens'].size()[0]} + x_recon, noise = output + mel_loss = self.mel_loss(x_recon, noise) + losses = { + 'mel_loss': mel_loss + } + return losses def on_train_start(self): if self.use_vocoder: self.vocoder.to_device(self.device) def _validation_step(self, sample, batch_idx): - losses = self.run_model(sample, return_output=False, infer=False) + losses = self.run_model(sample, infer=False) total_loss = sum(losses.values()) outputs = { 'total_loss': total_loss @@ -124,7 +121,7 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: - _, mel_pred = self.run_model(sample, return_output=True, infer=True) + mel_pred = self.run_model(sample, infer=True) if self.use_vocoder: self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') @@ -163,7 +160,7 @@ def on_test_start(self): self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() def test_step(self, sample, batch_idx): - _, mel_pred = self.run_model(sample, return_output=True, infer=True) + mel_pred = self.run_model(sample, infer=True) sample['outputs'] = mel_pred return self.after_infer(sample) From 09e451b3f617c0f96e246f2534a8c24dbb21dcc4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 18:41:04 +0800 Subject: [PATCH 231/475] Add missing call to `self.build_losses()` --- basics/base_task.py | 9 ++++++++- training/acoustic_task.py | 10 +--------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 220e9d9e6..658ce6fb5 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -7,6 +7,7 @@ import matplotlib +import utils from utils.text_encoder import TokenTextEncoder matplotlib.use('Agg') @@ -88,6 +89,8 @@ def __init__(self, *args, **kwargs): def setup(self, stage): self.phone_encoder = self.build_phone_encoder() self.model = self.build_model() + self.print_arch() + self.build_losses() self.train_dataset = self.dataset_cls(hparams['train_set_name']) self.valid_dataset = self.dataset_cls(hparams['valid_set_name']) @@ -99,6 +102,10 @@ def build_phone_encoder(): def build_model(self): raise NotImplementedError() + @rank_zero_only + def print_arch(self): + utils.print_arch(self.model) + def build_losses(self): raise NotImplementedError() @@ -119,7 +126,7 @@ def _training_step(self, sample): :return: total loss: torch.Tensor, loss_log: dict, other_log: dict """ losses = self.run_model(sample) - total_loss = sum([v for v in losses.values() if isinstance(v, torch.Tensor) and v.requires_grad]) + total_loss = sum(losses.values()) return total_loss, {**losses, 'batch_size': sample['size']} def training_step(self, sample, batch_idx, optimizer_idx=-1): diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 00428c550..b1b41f1ce 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -8,7 +8,6 @@ import torch.distributions import torch.optim import torch.utils.data -from lightning.pytorch.utilities.rank_zero import rank_zero_only from tqdm import tqdm import utils @@ -64,18 +63,11 @@ def __init__(self): self.logged_gt_wav = set() def build_model(self): - model = DiffSingerAcoustic( + return DiffSingerAcoustic( vocab_size=len(self.phone_encoder), out_dims=hparams['audio_num_mel_bins'] ) - @rank_zero_only - def print_arch(): - utils.print_arch(model) - - print_arch() - return model - # noinspection PyAttributeOutsideInit def build_losses(self): self.mel_loss = DiffusionNoiseLoss(loss_type=hparams['diff_loss_type']) From 0d0b254b2675dc7fd549867dd5024eddab77e90c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 20:13:14 +0800 Subject: [PATCH 232/475] Reuse more code in binarizer --- basics/base_binarizer.py | 75 +++++++++++++++++++++++++-- preprocessing/acoustic_binarizer.py | 78 ++--------------------------- 2 files changed, 76 insertions(+), 77 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 7e8516a03..fdc812e34 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -3,12 +3,17 @@ import os import pathlib import random +import shutil from copy import deepcopy +import numpy as np import torch +from tqdm import tqdm from utils.hparams import hparams -from utils.phoneme_utils import build_phoneme_list +from utils.indexed_datasets import IndexedDatasetBuilder +from utils.multiprocess_utils import chunked_multiprocess_run +from utils.phoneme_utils import build_phoneme_list, locate_dictionary from utils.text_encoder import TokenTextEncoder @@ -38,7 +43,7 @@ class BaseBinarizer: the phoneme set. """ - def __init__(self, data_dir=None): + def __init__(self, data_dir=None, data_attrs=None): if data_dir is None: data_dir = hparams['raw_data_dir'] if not isinstance(data_dir, list): @@ -50,6 +55,8 @@ def __init__(self, data_dir=None): self.raw_data_dirs = [pathlib.Path(d) for d in data_dir] self.binary_data_dir = pathlib.Path(hparams['binary_data_dir']) + self.data_attrs = [] if data_attrs is None else data_attrs + if hparams['use_spk_id']: assert len(speakers) == len(self.raw_data_dirs), \ 'Number of raw data dirs must equal number of speaker names!' @@ -135,17 +142,77 @@ def meta_data_iterator(self, prefix): def process(self): os.makedirs(hparams['binary_data_dir'], exist_ok=True) + + # Copy spk_map and dictionary to binary data dir self.build_spk_map() print("| spk_map: ", self.spk_map) spk_map_fn = f"{hparams['binary_data_dir']}/spk_map.json" json.dump(self.spk_map, open(spk_map_fn, 'w', encoding='utf-8')) + shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') self.check_coverage() + # Process train set and valid set + self.process_dataset('valid') + self.process_dataset( + 'train', + num_workers=int(self.binarization_args['num_workers']), + apply_augmentation=len(self.augmentation_args) > 0 + ) + def check_coverage(self): raise NotImplementedError() - def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): - raise NotImplementedError() + def process_dataset(self, prefix, num_workers=0, apply_augmentation=False): + args = [] + builder = IndexedDatasetBuilder(self.binary_data_dir, prefix=prefix, allowed_attr=self.data_attrs) + lengths = [] + total_sec = 0 + total_raw_sec = 0 + + for item_name, meta_data in self.meta_data_iterator(prefix): + args.append([item_name, meta_data, self.binarization_args]) + + aug_map = self.arrange_data_augmentation(self.meta_data_iterator(prefix)) if apply_augmentation else {} + + def postprocess(_item): + nonlocal total_sec, total_raw_sec + if _item is None: + return + builder.add_item(_item) + lengths.append(_item['length']) + total_sec += _item['seconds'] + total_raw_sec += _item['seconds'] + + for task in aug_map.get(_item['name'], []): + aug_item = task['func'](_item, **task['kwargs']) + builder.add_item(aug_item) + lengths.append(aug_item['length']) + total_sec += aug_item['seconds'] + + if num_workers > 0: + # code for parallel processing + for item in tqdm( + chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), + total=len(list(self.meta_data_iterator(prefix))) + ): + postprocess(item) + else: + # code for single cpu processing + for a in tqdm(args): + item = self.process_item(*a) + postprocess(item) + + builder.finalize() + with open(self.binary_data_dir / f'{prefix}.lengths', 'wb') as f: + # noinspection PyTypeChecker + np.save(f, lengths) + + if apply_augmentation: + print(f'| {prefix} total duration (before augmentation): {total_raw_sec:.2f}s') + print( + f'| {prefix} total duration (after augmentation): {total_sec:.2f}s ({total_sec / total_raw_sec:.2f}x)') + else: + print(f'| {prefix} total duration: {total_raw_sec:.2f}s') def arrange_data_augmentation(self, prefix): """ diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 18de54659..e405e03f7 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -11,22 +11,18 @@ import os import pathlib import random -import shutil from copy import deepcopy import matplotlib.pyplot as plt import numpy as np import torch -from tqdm import tqdm from basics.base_binarizer import BaseBinarizer, BinarizationError -from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS +from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from utils.hparams import hparams -from utils.indexed_datasets import IndexedDatasetBuilder -from utils.multiprocess_utils import chunked_multiprocess_run -from utils.phoneme_utils import build_phoneme_list, locate_dictionary +from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" ACOUSTIC_ITEM_ATTRIBUTES = ['spk_id', 'mel', 'tokens', 'mel2ph', 'f0', 'key_shift', 'speed'] @@ -34,7 +30,7 @@ class AcousticBinarizer(BaseBinarizer): def __init__(self): - super().__init__() + super().__init__(data_attrs=ACOUSTIC_ITEM_ATTRIBUTES) self.lr = LengthRegulator() def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): @@ -81,15 +77,6 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict self.items.update(meta_data_dict) - def process(self): - super().process() - self.process_data_split('valid') - self.process_data_split( - 'train', - num_workers=int(self.binarization_args.get('num_workers', os.getenv('N_PROC', 0))), - apply_augmentation=len(self.augmentation_args) > 0 - ) - def check_coverage(self): # Group by phonemes in the dictionary. ph_required = set(build_phoneme_list()) @@ -144,61 +131,6 @@ def check_coverage(self): f' (+) {sorted(unrecognizable_phones)}\n' f' (-) {sorted(missing_phones)}') - # Copy dictionary to binary data dir - shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') - - def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): - args = [] - builder = IndexedDatasetBuilder(self.binary_data_dir, prefix=prefix, allowed_attr=ACOUSTIC_ITEM_ATTRIBUTES) - lengths = [] - total_sec = 0 - total_raw_sec = 0 - - for item_name, meta_data in self.meta_data_iterator(prefix): - args.append([item_name, meta_data, self.binarization_args]) - - aug_map = self.arrange_data_augmentation(prefix) if apply_augmentation else {} - - def postprocess(_item): - nonlocal total_sec, total_raw_sec - if _item is None: - return - builder.add_item(_item) - lengths.append(_item['length']) - total_sec += _item['seconds'] - total_raw_sec += _item['seconds'] - - for task in aug_map.get(_item['name'], []): - aug_item = task['func'](_item, **task['kwargs']) - builder.add_item(aug_item) - lengths.append(aug_item['length']) - total_sec += aug_item['seconds'] - - if num_workers > 0: - # code for parallel processing - for item in tqdm( - chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))) - ): - postprocess(item) - else: - # code for single cpu processing - for a in tqdm(args): - item = self.process_item(*a) - postprocess(item) - - builder.finalize() - with open(self.binary_data_dir / f'{prefix}.lengths', 'wb') as f: - # noinspection PyTypeChecker - np.save(f, lengths) - - if apply_augmentation: - print(f'| {prefix} total duration (before augmentation): {total_raw_sec:.2f}s') - print( - f'| {prefix} total duration (after augmentation): {total_sec:.2f}s ({total_sec / total_raw_sec:.2f}x)') - else: - print(f'| {prefix} total duration: {total_raw_sec:.2f}s') - def process_item(self, item_name, meta_data, binarization_args): if hparams['vocoder'] in VOCODERS: wav, mel = VOCODERS[hparams['vocoder']].wav2spec(meta_data['wav_fn']) @@ -239,10 +171,10 @@ def process_item(self, item_name, meta_data, binarization_args): return processed_input - def arrange_data_augmentation(self, prefix): + def arrange_data_augmentation(self, data_iterator): aug_map = {} aug_list = [] - all_item_names = [item_name for item_name, _ in self.meta_data_iterator(prefix)] + all_item_names = [item_name for item_name, _ in data_iterator] total_scale = 0 if self.augmentation_args.get('random_pitch_shifting') is not None: from augmentation.spec_stretch import SpectrogramStretchAugmentation From f547283608d2a557b078c9cedea8c8858d1c6ec3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 02:36:08 +0800 Subject: [PATCH 233/475] Finish `VarianceBinarizer`, create module and task for dur predictor training --- configs/variance.yaml | 61 ++++++++++ modules/fastspeech/variance_encoder.py | 64 ++++++++++ preprocessing/variance_binarizer.py | 156 +++++++++++++++++++++++++ training/variance_task.py | 60 ++++++++++ 4 files changed, 341 insertions(+) create mode 100644 configs/variance.yaml create mode 100644 modules/fastspeech/variance_encoder.py create mode 100644 preprocessing/variance_binarizer.py create mode 100644 training/variance_task.py diff --git a/configs/variance.yaml b/configs/variance.yaml new file mode 100644 index 000000000..a45c20b0c --- /dev/null +++ b/configs/variance.yaml @@ -0,0 +1,61 @@ +base_config: + - configs/base.yaml + +task_cls: training.variance_task.VarianceTask +num_spk: 1 +speakers: + - opencpop +test_prefixes: [ + '2044', + '2086', + '2092', + '2093', + '2100', +] + +audio_sample_rate: 44100 +hop_size: 512 # Hop size. + +binarization_args: + shuffle: true + num_workers: 0 + +raw_data_dir: 'data/opencpop_variance/raw' +binary_data_dir: 'data/opencpop_variance/binary' +binarizer_cls: preprocessing.variance_binarizer.VarianceBinarizer +dictionary: dictionaries/opencpop-extension.txt + +use_spk_id: false + +K_step: 1000 +timesteps: 1000 +max_beta: 0.02 +rel_pos: true +pndm_speedup: 10 + +hidden_size: 256 +predictor_hidden: 384 +predictor_dropout: 0.1 +dur_predictor_kernel: 3 +dur_predictor_layers: 2 + +residual_layers: 20 +residual_channels: 384 +dilation_cycle_length: 4 # * +diff_decoder_type: 'wavenet' +diff_loss_type: l2 +schedule_type: 'linear' + +# train and eval +num_sanity_val_steps: 1 +lr: 0.0004 +lr_decay_steps: 50000 +lr_decay_gamma: 0.5 +max_batch_frames: 80000 +max_batch_size: 48 +val_with_vocoder: true +val_check_interval: 2000 +num_valid_plots: 10 +max_updates: 320000 +permanent_ckpt_start: 120000 +permanent_ckpt_interval: 40000 diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py new file mode 100644 index 000000000..9a3a366cd --- /dev/null +++ b/modules/fastspeech/variance_encoder.py @@ -0,0 +1,64 @@ +import torch.nn as nn +from torch.nn import functional as F + +from modules.commons.common_layers import ( + NormalInitEmbedding as Embedding, + XavierUniformInitLinear as Linear, +) +from modules.fastspeech.tts_modules import FastSpeech2Encoder, DurationPredictor +from utils.hparams import hparams +from utils.text_encoder import PAD_INDEX + + +class FastSpeech2VarianceEncoder(FastSpeech2Encoder): + def forward_embedding(self, txt_tokens, midi_embed, midi_dur_embed): + # embed tokens and positions + x = self.embed_scale * self.embed_tokens(txt_tokens) + x = x + midi_embed + midi_dur_embed + if hparams['use_pos_embed']: + if hparams['rel_pos']: + x = self.embed_positions(x) + else: + positions = self.embed_positions(txt_tokens) + x = x + positions + x = F.dropout(x, p=self.dropout, training=self.training) + return x + + def forward(self, txt_tokens, midi_embed, midi_dur_embed): + """ + :param txt_tokens: [B, T] + :param midi_embed: [B, T, H] + :param midi_dur_embed: [B, T, H] + :return: [T x B x H] + """ + encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() + x = self.forward_embedding(txt_tokens, midi_embed, midi_dur_embed) # [B, T, H] + x = super()._forward(x, encoder_padding_mask) + return x + + +class FastSpeech2Variance(nn.Module): + def __init__(self, vocab_size): + super().__init__() + self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) + self.midi_embed = Embedding(128, hparams['hidden_size'], PAD_INDEX) + self.midi_dur_embed = Linear(1, hparams['hidden_size']) + if hparams['use_spk_id']: + self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) + + self.encoder = FastSpeech2VarianceEncoder( + self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], + ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] + ) + + predictor_hidden = hparams['predictor_hidden'] if hparams['predictor_hidden'] > 0 else self.hidden_size + self.dur_predictor = DurationPredictor( + hparams['hidden_size'], + n_chans=predictor_hidden, + n_layers=hparams['dur_predictor_layers'], + dropout_rate=hparams['predictor_dropout'], padding=hparams['ffn_padding'], + kernel_size=hparams['dur_predictor_kernel'] + ) + + def forward(self, txt_tokens, midi, midi_dur, gt_ph_dur, **kwargs): + raise NotImplementedError() diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py new file mode 100644 index 000000000..3a1cb0414 --- /dev/null +++ b/preprocessing/variance_binarizer.py @@ -0,0 +1,156 @@ +""" + item: one piece of data + item_name: data id + wav_fn: wave file path + spk: dataset name + ph_seq: phoneme sequence + ph_dur: phoneme durations + midi_seq: midi note sequence + midi_dur: midi note durations +""" +import csv +import os +import pathlib +import shutil + +import librosa +import numpy as np +import torch +import torch.nn.functional as F +from tqdm import tqdm + +from basics.base_binarizer import BaseBinarizer +from modules.fastspeech.tts_modules import LengthRegulator +from utils.binarizer_utils import get_mel2ph_torch +from utils.indexed_datasets import IndexedDatasetBuilder +from utils.multiprocess_utils import chunked_multiprocess_run +from utils.phoneme_utils import locate_dictionary + +os.environ["OMP_NUM_THREADS"] = "1" +VARIANCE_ITEM_ATTRIBUTES = [ + 'spk_id', # index number of dataset/speaker + 'tokens', # index numbers of phonemes + 'ph_dur', # durations of phonemes, in seconds + 'ph_midi', # phoneme-level mean MIDI pitch + 'word_dur', # durations of words/syllables (vowel-consonant pattern) + # 'mel2ph', + # 'base_pitch', + # 'f0' +] + + +class VarianceBinarizer(BaseBinarizer): + def __init__(self): + super().__init__() + self.lr = LengthRegulator() + + def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): + meta_data_dict = {} + for utterance_label in csv.DictReader( + open(raw_data_dir / 'transcriptions.csv', 'r', encoding='utf8') + ): + item_name = utterance_label['name'] + temp_dict = { + 'spk_id': ds_id, + 'wav_fn': str(raw_data_dir / 'wav' / f'{item_name}.wav'), + 'ph_seq': utterance_label['ph_seq'].split(), + 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], + 'word_dur': [float(x) for x in utterance_label['word_dur'].split()], + 'note_seq': utterance_label['note_seq'].split(), + 'note_dur': [float(x) for x in utterance_label['note_dur'].split()], + } + assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']) == len(temp_dict['word_dur']), \ + f'Lengths of ph_seq, ph_dur and word_dur mismatch in \'{item_name}\'.' + assert len(temp_dict['note_seq']) == len(temp_dict['note_dur']), \ + f'Lengths of note_seq and note_dur mismatch in \'{item_name}\'.' + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + self.items.update(meta_data_dict) + + def process(self): + super().process() + self.process_data_split('valid') + self.process_data_split('train', num_workers=self.binarization_args['num_workers']) + + def check_coverage(self): + shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') + print('Coverage checks are temporarily skipped.') + pass + + def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): + args = [] + builder = IndexedDatasetBuilder(self.binary_data_dir, prefix=prefix, allowed_attr=VARIANCE_ITEM_ATTRIBUTES) + lengths = [] + total_raw_sec = 0 + + for item_name, meta_data in self.meta_data_iterator(prefix): + args.append([item_name, meta_data, self.binarization_args]) + + def postprocess(_item): + nonlocal total_raw_sec + if _item is None: + return + builder.add_item(_item) + lengths.append(_item['length']) + total_raw_sec += _item['seconds'] + + if num_workers > 0: + # code for parallel processing + for item in tqdm( + chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), + total=len(list(self.meta_data_iterator(prefix))) + ): + postprocess(item) + else: + # code for single cpu processing + for a in tqdm(args): + item = self.process_item(*a) + postprocess(item) + + builder.finalize() + with open(self.binary_data_dir / f'{prefix}.lengths', 'wb') as f: + # noinspection PyTypeChecker + np.save(f, lengths) + + print(f'| {prefix} total duration: {total_raw_sec:.2f}s') + + def process_item(self, item_name, meta_data, binarization_args): + length = len(meta_data['ph_dur']) # temporarily use number of tokens as sample length + seconds = sum(meta_data['ph_dur']) + processed_input = { + 'name': item_name, + 'wav_fn': meta_data['wav_fn'], + 'spk_id': meta_data['spk_id'], + 'seconds': seconds, + 'length': length, + 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), + 'ph_dur': np.array(meta_data['ph_dur']).astype(np.float32), + 'word_dur': np.array(meta_data['word_dur']).astype(np.float32), + } + + # Below: calculate phoneme-level mean pitch for MIDI input + ph_dur = torch.from_numpy(processed_input['ph_dur']).to(self.device) + mel2ph = get_mel2ph_torch( + self.lr, ph_dur, round(seconds / self.timestep), self.timestep, device=self.device + ) + ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() + ph_dur_long = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) + mel2dur = torch.gather(F.pad(ph_dur_long, [1, 0], value=1), 0, mel2ph) # frame-level phone duration + note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) + mel2note = get_mel2ph_torch( + self.lr, note_dur, mel2ph.shape[0], self.timestep, device=self.device + ) + note_pitch = torch.FloatTensor( + [(librosa.note_to_midi(n) if n != 'rest' else 0) for n in meta_data['note_seq']] + ).to(self.device) + frame_step_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch + # Below: handle rest parts where pitch == 0 in frame_step_pitch + ph_dur_rest = mel2ph.new_zeros(len(ph_dur) + 1).scatter_add( + 0, mel2ph, (frame_step_pitch == 0).long() + )[1:] + mel2dur_rest = torch.gather(F.pad(ph_dur_rest, [1, 0], value=1), 0, mel2ph) # frame-level rest phone duration + + ph_midi = mel2ph.new_zeros(ph_dur.shape[0] + 1).float().scatter_add( + 0, mel2ph, frame_step_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero + )[1:] + processed_input['ph_midi'] = ph_midi.long().cpu().numpy() + return processed_input diff --git a/training/variance_task.py b/training/variance_task.py new file mode 100644 index 000000000..b4419153a --- /dev/null +++ b/training/variance_task.py @@ -0,0 +1,60 @@ +import os +from multiprocessing.pool import Pool + +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +import torch +import torch.distributions +import torch.optim +import torch.utils.data +from lightning.pytorch.utilities.rank_zero import rank_zero_only +from tqdm import tqdm + +import utils +import utils.infer_utils +from basics.base_dataset import BaseDataset +from basics.base_task import BaseTask +from basics.base_vocoder import BaseVocoder +from modules.fastspeech.tts_modules import mel2ph_to_dur +from modules.toplevel import DiffSingerVariance +from utils.binarizer_utils import get_pitch_parselmouth +from utils.hparams import hparams +from utils.indexed_datasets import IndexedDataset +from utils.phoneme_utils import build_phoneme_list +from utils.plot import spec_to_figure +from utils.text_encoder import TokenTextEncoder +from utils.training_utils import DsBatchSampler, DsEvalBatchSampler + +matplotlib.use('Agg') + + +class VarianceDataset(BaseDataset): + def __init__(self, prefix): + super().__init__() + self.data_dir = hparams['binary_data_dir'] + self.prefix = prefix + self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) + self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) + + def __getitem__(self, index): + return self.indexed_ds[index] + + def collater(self, samples): + if len(samples) == 0: + return {} + tokens = utils.collate_nd([s['tokens'] for s in samples], 0) + ph_dur = utils.collate_nd([s['ph_dur'] for s in samples], 0) + ph_midi = utils.collate_nd([s['ph_midi'] for s in samples], 0) + midi_dur = utils.collate_nd([s['word_dur'] for s in samples], 0) + batch = { + 'size': len(samples), + 'tokens': tokens, + 'ph_dur': ph_dur, + 'midi': ph_midi, + 'midi_dur': midi_dur + } + if hparams['use_spk_id']: + spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) + batch['spk_ids'] = spk_ids + return batch From 36d464a65496a80498642f51f4cbb19407095ed4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 15:27:51 +0800 Subject: [PATCH 234/475] Init `VarianceTask` --- training/variance_task.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/training/variance_task.py b/training/variance_task.py index b4419153a..5b1ea751e 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -58,3 +58,9 @@ def collater(self, samples): spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids return batch + + +class VarianceTask(BaseTask): + def __init__(self): + super().__init__() + self.dataset_cls = VarianceDataset From b8ee9faffda301d61b867664851eb4dccdf6dc6c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 16:15:50 +0800 Subject: [PATCH 235/475] Remove overriding funcs in variance dataset/task --- training/variance_task.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 5b1ea751e..ae37d148d 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -17,29 +17,16 @@ from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder from modules.fastspeech.tts_modules import mel2ph_to_dur -from modules.toplevel import DiffSingerVariance +from modules.toplevel import DiffSingerAcoustic +from modules.vocoders.registry import get_vocoder_cls from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams -from utils.indexed_datasets import IndexedDataset -from utils.phoneme_utils import build_phoneme_list from utils.plot import spec_to_figure -from utils.text_encoder import TokenTextEncoder -from utils.training_utils import DsBatchSampler, DsEvalBatchSampler matplotlib.use('Agg') class VarianceDataset(BaseDataset): - def __init__(self, prefix): - super().__init__() - self.data_dir = hparams['binary_data_dir'] - self.prefix = prefix - self.sizes = np.load(os.path.join(self.data_dir, f'{self.prefix}.lengths')) - self.indexed_ds = IndexedDataset(self.data_dir, self.prefix) - - def __getitem__(self, index): - return self.indexed_ds[index] - def collater(self, samples): if len(samples) == 0: return {} From 230fc8a0de63e926d9e0e25e43f2d531366fe1e4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 18:46:26 +0800 Subject: [PATCH 236/475] Reuse `collater` --- training/variance_task.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index ae37d148d..93c78c301 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -8,7 +8,6 @@ import torch.distributions import torch.optim import torch.utils.data -from lightning.pytorch.utilities.rank_zero import rank_zero_only from tqdm import tqdm import utils @@ -17,7 +16,7 @@ from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder from modules.fastspeech.tts_modules import mel2ph_to_dur -from modules.toplevel import DiffSingerAcoustic +from modules.toplevel import DiffSingerVariance from modules.vocoders.registry import get_vocoder_cls from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams @@ -28,19 +27,18 @@ class VarianceDataset(BaseDataset): def collater(self, samples): - if len(samples) == 0: - return {} + batch = super().collater(samples) + tokens = utils.collate_nd([s['tokens'] for s in samples], 0) ph_dur = utils.collate_nd([s['ph_dur'] for s in samples], 0) ph_midi = utils.collate_nd([s['ph_midi'] for s in samples], 0) midi_dur = utils.collate_nd([s['word_dur'] for s in samples], 0) - batch = { - 'size': len(samples), + batch.update({ 'tokens': tokens, 'ph_dur': ph_dur, 'midi': ph_midi, 'midi_dur': midi_dur - } + }) if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids From 45d5dd55c70a016cc12eb49f7087f0e22ef623f8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 19:37:53 +0800 Subject: [PATCH 237/475] Refactor `DurationPredictor`, add `DurationLoss` --- configs/variance.yaml | 2 + modules/fastspeech/tts_modules.py | 88 +++++++++++--------------- modules/fastspeech/variance_encoder.py | 8 ++- modules/losses/dur_loss.py | 20 ++++++ preprocessing/variance_binarizer.py | 10 ++- training/variance_task.py | 13 ++++ 6 files changed, 85 insertions(+), 56 deletions(-) create mode 100644 modules/losses/dur_loss.py diff --git a/configs/variance.yaml b/configs/variance.yaml index a45c20b0c..465c297cc 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -38,6 +38,8 @@ predictor_hidden: 384 predictor_dropout: 0.1 dur_predictor_kernel: 3 dur_predictor_layers: 2 +dur_log_offset: 1.0 +dur_loss_type: mse residual_layers: 20 residual_channels: 384 diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 2aef458bd..664393057 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -64,10 +64,10 @@ class DurationPredictor(torch.nn.Module): the outputs are calculated in log domain but in `inference`, those are calculated in linear domain. """ - def __init__(self, idim, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0.1, offset=1.0, padding='SAME'): + def __init__(self, in_dims, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0.1, offset=1.0, padding='SAME'): """Initilize duration predictor module. Args: - idim (int): Input dimension. + in_dims (int): Input dimension. n_layers (int, optional): Number of convolutional layers. n_chans (int, optional): Number of channels of convolutional layers. kernel_size (int, optional): Kernel size of convolutional layers. @@ -80,7 +80,7 @@ def __init__(self, idim, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0. self.kernel_size = kernel_size self.padding = padding for idx in range(n_layers): - in_chans = idim if idx == 0 else n_chans + in_chans = in_dims if idx == 0 else n_chans self.conv += [torch.nn.Sequential( torch.nn.ConstantPad1d(((kernel_size - 1) // 2, (kernel_size - 1) // 2) if padding == 'SAME' @@ -90,69 +90,56 @@ def __init__(self, idim, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0. LayerNorm(n_chans, dim=1), torch.nn.Dropout(dropout_rate) )] - if hparams['dur_loss'] in ['mse', 'huber']: - odims = 1 - elif hparams['dur_loss'] == 'mog': - odims = 15 - elif hparams['dur_loss'] == 'crf': - odims = 32 - from torchcrf import CRF - self.crf = CRF(odims, batch_first=True) - self.linear = torch.nn.Linear(n_chans, odims) - - def _forward(self, xs, x_masks=None, is_inference=False): - xs = xs.transpose(1, -1) # (B, idim, Tmax) - for f in self.conv: - xs = f(xs) # (B, C, Tmax) - if x_masks is not None: - xs = xs * (1 - x_masks.float())[:, None, :] - - xs = self.linear(xs.transpose(1, -1)) # [B, T, C] - xs = xs * (1 - x_masks.float())[:, :, None] # (B, T, C) - if is_inference: - return self.out2dur(xs), xs + if hparams['dur_loss_type'] in ['mse', 'huber']: + self.out_dims = 1 + # elif hparams['dur_loss_type'] == 'mog': + # out_dims = 15 + # elif hparams['dur_loss_type'] == 'crf': + # out_dims = 32 + # from torchcrf import CRF + # self.crf = CRF(out_dims, batch_first=True) else: - if hparams['dur_loss'] in ['mse']: - xs = xs.squeeze(-1) # (B, Tmax) - return xs + raise NotImplementedError() + self.linear = torch.nn.Linear(n_chans, self.out_dims) def out2dur(self, xs): - if hparams['dur_loss'] in ['mse']: + if hparams['dur_loss_type'] in ['mse']: # NOTE: calculate in log domain xs = xs.squeeze(-1) # (B, Tmax) dur = torch.clamp(torch.round(xs.exp() - self.offset), min=0).long() # avoid negative value - elif hparams['dur_loss'] == 'mog': - return NotImplementedError - elif hparams['dur_loss'] == 'crf': - dur = torch.LongTensor(self.crf.decode(xs)).cuda() + # elif hparams['dur_loss_type'] == 'crf': + # dur = torch.LongTensor(self.crf.decode(xs)).cuda() + else: + raise NotImplementedError() return dur - def forward(self, xs, x_masks=None): + def forward(self, xs, x_masks=None, infer=True): """Calculate forward propagation. Args: xs (Tensor): Batch of input sequences (B, Tmax, idim). - x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax). - Returns: - Tensor: Batch of predicted durations in log domain (B, Tmax). - """ - return self._forward(xs, x_masks, False) - - def inference(self, xs, x_masks=None): - """Inference duration. - Args: - xs (Tensor): Batch of input sequences (B, Tmax, idim). - x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax). + x_masks (BoolTensor, optional): Batch of masks indicating padded part (B, Tmax). + infer (bool): Whether inference Returns: - LongTensor: Batch of predicted durations in linear domain (B, Tmax). + (train) FloatTensor: Batch of predicted durations in log domain (B, Tmax); + (infer) LongTensor: Batch of predicted durations in linear domain (B, Tmax). """ - return self._forward(xs, x_masks, True) + xs = xs.transpose(1, -1) # (B, idim, Tmax) + for f in self.conv: + xs = f(xs) # (B, C, Tmax) + if x_masks is not None: + xs = xs * (1 - x_masks.float())[:, None, :] + xs = self.linear(xs.transpose(1, -1)) # [B, T, C] + xs = xs * (1 - x_masks.float())[:, :, None] # (B, T, C) + if infer: + return self.out2dur(xs), xs + else: + if self.out_dims == 1: + xs = xs.squeeze(-1) # (B, Tmax) + return xs class LengthRegulator(torch.nn.Module): - def __init__(self, pad_value=0.0): - super(LengthRegulator, self).__init__() - self.pad_value = pad_value - + # noinspection PyMethodMayBeStatic def forward(self, dur, dur_padding=None, alpha=1.0): """ Example (no batch dim version): @@ -187,6 +174,7 @@ def forward(self, dur, dur_padding=None, alpha=1.0): class StretchRegulator(torch.nn.Module): + # noinspection PyMethodMayBeStatic def forward(self, dur, mel2ph): """ Example (no batch dim version): diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 9a3a366cd..44cf25056 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -53,11 +53,13 @@ def __init__(self, vocab_size): predictor_hidden = hparams['predictor_hidden'] if hparams['predictor_hidden'] > 0 else self.hidden_size self.dur_predictor = DurationPredictor( - hparams['hidden_size'], + in_dims=hparams['hidden_size'], n_chans=predictor_hidden, n_layers=hparams['dur_predictor_layers'], - dropout_rate=hparams['predictor_dropout'], padding=hparams['ffn_padding'], - kernel_size=hparams['dur_predictor_kernel'] + dropout_rate=hparams['predictor_dropout'], + padding=hparams['ffn_padding'], + kernel_size=hparams['dur_predictor_kernel'], + offset=hparams['dur_log_offset'] ) def forward(self, txt_tokens, midi, midi_dur, gt_ph_dur, **kwargs): diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py new file mode 100644 index 000000000..c2094fd93 --- /dev/null +++ b/modules/losses/dur_loss.py @@ -0,0 +1,20 @@ +import torch +import torch.nn as nn +from torch import Tensor + + +class DurationLoss(nn.Module): + def __init__(self, loss_type, offset=1.0): + super().__init__() + self.loss_type = loss_type + if self.loss_type == 'mse': + self.loss = nn.MSELoss() + elif self.loss_type == 'huber': + self.loss = nn.HuberLoss() + else: + raise NotImplementedError() + self.offset = offset + + def forward(self, xs_pred: Tensor, xs_gt: Tensor) -> Tensor: + xs_gt_log = torch.log(xs_gt + self.offset) # calculate in log domain + return self.loss(xs_pred, xs_gt_log) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 3a1cb0414..ff2d9f477 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -33,7 +33,7 @@ 'ph_dur', # durations of phonemes, in seconds 'ph_midi', # phoneme-level mean MIDI pitch 'word_dur', # durations of words/syllables (vowel-consonant pattern) - # 'mel2ph', + 'mel2ph', # mel2ph format representing gt ph_dur # 'base_pitch', # 'f0' ] @@ -123,17 +123,17 @@ def process_item(self, item_name, meta_data, binarization_args): 'seconds': seconds, 'length': length, 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), - 'ph_dur': np.array(meta_data['ph_dur']).astype(np.float32), 'word_dur': np.array(meta_data['word_dur']).astype(np.float32), } # Below: calculate phoneme-level mean pitch for MIDI input - ph_dur = torch.from_numpy(processed_input['ph_dur']).to(self.device) + ph_dur = torch.FloatTensor(meta_data['ph_dur']).to(self.device) mel2ph = get_mel2ph_torch( self.lr, ph_dur, round(seconds / self.timestep), self.timestep, device=self.device ) ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() ph_dur_long = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) + mel2dur = torch.gather(F.pad(ph_dur_long, [1, 0], value=1), 0, mel2ph) # frame-level phone duration note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) mel2note = get_mel2ph_torch( @@ -152,5 +152,9 @@ def process_item(self, item_name, meta_data, binarization_args): ph_midi = mel2ph.new_zeros(ph_dur.shape[0] + 1).float().scatter_add( 0, mel2ph, frame_step_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero )[1:] + + processed_input['ph_dur'] = ph_dur_long.cpu().numpy() # number of frames of each phone processed_input['ph_midi'] = ph_midi.long().cpu().numpy() + processed_input['mel2ph'] = mel2ph.cpu().numpy() + return processed_input diff --git a/training/variance_task.py b/training/variance_task.py index 93c78c301..edf87eea9 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -16,6 +16,7 @@ from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder from modules.fastspeech.tts_modules import mel2ph_to_dur +from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from modules.vocoders.registry import get_vocoder_cls from utils.binarizer_utils import get_pitch_parselmouth @@ -42,6 +43,7 @@ def collater(self, samples): if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids + return batch @@ -49,3 +51,14 @@ class VarianceTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = VarianceDataset + + def build_model(self): + # return DiffSingerVariance() + raise NotImplementedError() + + # noinspection PyAttributeOutsideInit + def build_losses(self): + self.dur_loss = DurationLoss( + loss_type=hparams['dur_loss_type'], + offset=hparams['dur_log_offset'] + ) From b02fb5d870986a3acd80f5d1e38f3d414167db41 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 19:47:23 +0800 Subject: [PATCH 238/475] Add docstring and transpose --- modules/losses/diff_loss.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 36b36b648..885a5968b 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -14,8 +14,13 @@ def __init__(self, loss_type): raise NotImplementedError() def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> Tensor: + """ + :param x_recon: [B, 1, M, T] + :param noise: [B, 1, M, T] + :param nonpadding: [B, T, M] + """ if nonpadding is not None: - nonpadding = nonpadding.unsqueeze(1) + nonpadding = nonpadding.transpose(1, 2).unsqueeze(1) x_recon *= nonpadding noise *= nonpadding return self.loss(x_recon, noise) From 5db92dda7055ba2177c7dd0d32cb22ddd2f4736e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 20:01:14 +0800 Subject: [PATCH 239/475] Adjust comments --- preprocessing/variance_binarizer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index ff2d9f477..b45f313cb 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -126,7 +126,7 @@ def process_item(self, item_name, meta_data, binarization_args): 'word_dur': np.array(meta_data['word_dur']).astype(np.float32), } - # Below: calculate phoneme-level mean pitch for MIDI input + # Below: calculate frame-level MIDI pitch, which is a step function curve ph_dur = torch.FloatTensor(meta_data['ph_dur']).to(self.device) mel2ph = get_mel2ph_torch( self.lr, ph_dur, round(seconds / self.timestep), self.timestep, device=self.device @@ -143,7 +143,8 @@ def process_item(self, item_name, meta_data, binarization_args): [(librosa.note_to_midi(n) if n != 'rest' else 0) for n in meta_data['note_seq']] ).to(self.device) frame_step_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch - # Below: handle rest parts where pitch == 0 in frame_step_pitch + + # Below: calculate phoneme-level mean MIDI pitch, eliminating rest frames ph_dur_rest = mel2ph.new_zeros(len(ph_dur) + 1).scatter_add( 0, mel2ph, (frame_step_pitch == 0).long() )[1:] From cea94d4084db27f11287fee5f1e8e33a773fe0cc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 20:17:55 +0800 Subject: [PATCH 240/475] Adapt `VarianceBinarizer` to reuse more code --- basics/base_binarizer.py | 2 +- preprocessing/variance_binarizer.py | 53 +++-------------------------- 2 files changed, 5 insertions(+), 50 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index fdc812e34..6bd076c04 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -214,7 +214,7 @@ def postprocess(_item): else: print(f'| {prefix} total duration: {total_raw_sec:.2f}s') - def arrange_data_augmentation(self, prefix): + def arrange_data_augmentation(self, data_iterator): """ Code for all types of data augmentation should be added here. """ diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index b45f313cb..c203d2ef5 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -11,20 +11,15 @@ import csv import os import pathlib -import shutil import librosa import numpy as np import torch import torch.nn.functional as F -from tqdm import tqdm from basics.base_binarizer import BaseBinarizer from modules.fastspeech.tts_modules import LengthRegulator from utils.binarizer_utils import get_mel2ph_torch -from utils.indexed_datasets import IndexedDatasetBuilder -from utils.multiprocess_utils import chunked_multiprocess_run -from utils.phoneme_utils import locate_dictionary os.environ["OMP_NUM_THREADS"] = "1" VARIANCE_ITEM_ATTRIBUTES = [ @@ -41,7 +36,7 @@ class VarianceBinarizer(BaseBinarizer): def __init__(self): - super().__init__() + super().__init__(data_attrs=VARIANCE_ITEM_ATTRIBUTES) self.lr = LengthRegulator() def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): @@ -66,53 +61,10 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict self.items.update(meta_data_dict) - def process(self): - super().process() - self.process_data_split('valid') - self.process_data_split('train', num_workers=self.binarization_args['num_workers']) - def check_coverage(self): - shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') print('Coverage checks are temporarily skipped.') pass - def process_data_split(self, prefix, num_workers=0, apply_augmentation=False): - args = [] - builder = IndexedDatasetBuilder(self.binary_data_dir, prefix=prefix, allowed_attr=VARIANCE_ITEM_ATTRIBUTES) - lengths = [] - total_raw_sec = 0 - - for item_name, meta_data in self.meta_data_iterator(prefix): - args.append([item_name, meta_data, self.binarization_args]) - - def postprocess(_item): - nonlocal total_raw_sec - if _item is None: - return - builder.add_item(_item) - lengths.append(_item['length']) - total_raw_sec += _item['seconds'] - - if num_workers > 0: - # code for parallel processing - for item in tqdm( - chunked_multiprocess_run(self.process_item, args, num_workers=num_workers), - total=len(list(self.meta_data_iterator(prefix))) - ): - postprocess(item) - else: - # code for single cpu processing - for a in tqdm(args): - item = self.process_item(*a) - postprocess(item) - - builder.finalize() - with open(self.binary_data_dir / f'{prefix}.lengths', 'wb') as f: - # noinspection PyTypeChecker - np.save(f, lengths) - - print(f'| {prefix} total duration: {total_raw_sec:.2f}s') - def process_item(self, item_name, meta_data, binarization_args): length = len(meta_data['ph_dur']) # temporarily use number of tokens as sample length seconds = sum(meta_data['ph_dur']) @@ -159,3 +111,6 @@ def process_item(self, item_name, meta_data, binarization_args): processed_input['mel2ph'] = mel2ph.cpu().numpy() return processed_input + + def arrange_data_augmentation(self, data_iterator): + return {} From be6311b6b61941952c5a1f9d5ddd2462591d2b8e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 20:27:57 +0800 Subject: [PATCH 241/475] Drop 'f0_coarse' from `get_pitch_parselmouth` --- augmentation/spec_stretch.py | 2 +- inference/vocoder/val_nsf_hifigan.py | 2 +- preprocessing/acoustic_binarizer.py | 2 +- training/acoustic_task.py | 4 ++-- utils/binarizer_utils.py | 8 ++++---- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index e9c0f8f9a..1e59a2749 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -41,7 +41,7 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['mel2ph'] = get_mel2ph_torch( self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], self.timestep, device=self.device ).cpu().numpy() - f0, _, _ = get_pitch_parselmouth( + f0, _ = get_pitch_parselmouth( wav, aug_item['length'], hparams, speed=speed, interp_uv=hparams['interp_uv'] ) aug_item['f0'] = f0.astype(np.float32) diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/vocoder/val_nsf_hifigan.py index b51e5de0f..b754ab69a 100644 --- a/inference/vocoder/val_nsf_hifigan.py +++ b/inference/vocoder/val_nsf_hifigan.py @@ -61,7 +61,7 @@ def get_pitch(wav_data, mel, hparams, threshold=0.3): if not filename.endswith('.wav'): continue wav, mel = vocoder.wav2spec(os.path.join(in_path, filename)) - f0, _, _ = get_pitch_parselmouth(wav, len(mel), hparams) + f0, _ = get_pitch_parselmouth(wav, len(mel), hparams) wav_out = vocoder.spec2wav(mel, f0=f0) save_wav(wav_out, os.path.join(out_path, filename), hparams['audio_sample_rate']) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index e405e03f7..a05b3337c 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -150,7 +150,7 @@ def process_item(self, item_name, meta_data, binarization_args): } # get ground truth f0 - gt_f0, _, uv = get_pitch_parselmouth( + gt_f0, uv = get_pitch_parselmouth( wav, length, hparams, interp_uv=hparams['interp_uv'] ) if uv.all(): # All unvoiced diff --git a/training/acoustic_task.py b/training/acoustic_task.py index b1b41f1ce..540c7a255 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -226,7 +226,7 @@ def after_infer(self, predictions): import matplotlib.pyplot as plt # f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams) f0_pred_ = f0_pred - f0_gt_, _, _ = get_pitch_parselmouth(wav_gt, len(mel_gt), hparams) + f0_gt_, _ = get_pitch_parselmouth(wav_gt, len(mel_gt), hparams) fig = plt.figure() plt.plot(f0_pred_, label=r'$f0_P$') plt.plot(f0_gt_, label=r'$f0_G$') @@ -257,7 +257,7 @@ def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, me spec_vmax = hparams['mel_vmax'] heatmap = plt.pcolor(mel.T, vmin=spec_vmin, vmax=spec_vmax) fig.colorbar(heatmap) - f0, _, _ = get_pitch_parselmouth(wav_out, len(mel), hparams) + f0, _ = get_pitch_parselmouth(wav_out, len(mel), hparams) f0 = (f0 - 100) / (800 - 100) * 80 * (f0 > 0) plt.plot(f0, c='white', linewidth=1, alpha=0.6) if mel2ph is not None and str_phs is not None: diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index ec1e4be24..a9ca14099 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -5,7 +5,7 @@ warnings.filterwarnings("ignore") import parselmouth -from utils.pitch_utils import f0_to_coarse, interp_f0 +from utils.pitch_utils import interp_f0 import numpy as np @@ -25,7 +25,8 @@ def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): f0_min = 65 f0_max = 800 - f0 = parselmouth.Sound(wav_data, hparams['audio_sample_rate']).to_pitch_ac( + # noinspection PyArgumentList + f0 = parselmouth.Sound(wav_data, sampling_frequency=hparams['audio_sample_rate']).to_pitch_ac( time_step=time_step, voicing_threshold=0.6, pitch_floor=f0_min, pitch_ceiling=f0_max).selected_array['frequency'] len_f0 = f0.shape[0] @@ -34,8 +35,7 @@ def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): uv = f0 == 0 if interp_uv: f0, uv = interp_f0(f0, uv) - f0_coarse = f0_to_coarse(f0) - return f0, f0_coarse, uv + return f0, uv @torch.no_grad() From d230227868ebee1b8648b37ade0633ddccbfddf7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 23:23:56 +0800 Subject: [PATCH 242/475] Finish `VarianceBinarizer` for pitch predictor --- configs/variance.yaml | 1 + preprocessing/variance_binarizer.py | 76 ++++++++++++++++++++++------- utils/binarizer_utils.py | 12 ++++- 3 files changed, 70 insertions(+), 19 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 465c297cc..565030ec3 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -15,6 +15,7 @@ test_prefixes: [ audio_sample_rate: 44100 hop_size: 512 # Hop size. +midi_smooth_width: 0.2 # in seconds binarization_args: shuffle: true diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index c203d2ef5..fb94cba08 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -15,29 +15,47 @@ import librosa import numpy as np import torch +import torch.nn as nn import torch.nn.functional as F +from scipy import interpolate from basics.base_binarizer import BaseBinarizer from modules.fastspeech.tts_modules import LengthRegulator -from utils.binarizer_utils import get_mel2ph_torch +from utils.binarizer_utils import get_mel2ph_torch, get_pitch_parselmouth +from utils.hparams import hparams os.environ["OMP_NUM_THREADS"] = "1" VARIANCE_ITEM_ATTRIBUTES = [ 'spk_id', # index number of dataset/speaker 'tokens', # index numbers of phonemes - 'ph_dur', # durations of phonemes, in seconds + 'ph_dur', # durations of phonemes, in number of frames 'ph_midi', # phoneme-level mean MIDI pitch 'word_dur', # durations of words/syllables (vowel-consonant pattern) 'mel2ph', # mel2ph format representing gt ph_dur - # 'base_pitch', - # 'f0' + 'base_pitch', # interpolated and smoothed frame-level MIDI pitch + 'delta_pitch', # delta_pitch = actual_pitch - base_pitch, in semitones + 'uv', # flag of unvoiced frames where f0 == 0 ] class VarianceBinarizer(BaseBinarizer): def __init__(self): super().__init__(data_attrs=VARIANCE_ITEM_ATTRIBUTES) - self.lr = LengthRegulator() + self.lr = LengthRegulator().to(self.device) + smooth_kernel_size = round(hparams['midi_smooth_width'] / self.timestep) + self.smooth = nn.Conv1d( + in_channels=1, + out_channels=1, + kernel_size=smooth_kernel_size, + bias=False, + padding='same', + padding_mode='replicate' + ).eval().to(self.device) + smooth_kernel = torch.sin(torch.from_numpy( + np.linspace(0, 1, smooth_kernel_size).astype(np.float32) * np.pi + ).to(self.device)) + smooth_kernel /= smooth_kernel.sum() + self.smooth.weight.data = smooth_kernel[None, None] def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_data_dict = {} @@ -47,7 +65,7 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): item_name = utterance_label['name'] temp_dict = { 'spk_id': ds_id, - 'wav_fn': str(raw_data_dir / 'wav' / f'{item_name}.wav'), + 'wav_fn': str(raw_data_dir / 'wavs' / f'{item_name}.wav'), 'ph_seq': utterance_label['ph_seq'].split(), 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], 'word_dur': [float(x) for x in utterance_label['word_dur'].split()], @@ -65,9 +83,14 @@ def check_coverage(self): print('Coverage checks are temporarily skipped.') pass + @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): - length = len(meta_data['ph_dur']) # temporarily use number of tokens as sample length seconds = sum(meta_data['ph_dur']) + ph_dur = torch.FloatTensor(meta_data['ph_dur']).to(self.device) + mel2ph = get_mel2ph_torch( + self.lr, ph_dur, round(seconds / self.timestep), self.timestep, device=self.device + ) + length = mel2ph.shape[0] processed_input = { 'name': item_name, 'wav_fn': meta_data['wav_fn'], @@ -79,10 +102,6 @@ def process_item(self, item_name, meta_data, binarization_args): } # Below: calculate frame-level MIDI pitch, which is a step function curve - ph_dur = torch.FloatTensor(meta_data['ph_dur']).to(self.device) - mel2ph = get_mel2ph_torch( - self.lr, ph_dur, round(seconds / self.timestep), self.timestep, device=self.device - ) ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() ph_dur_long = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) @@ -94,22 +113,45 @@ def process_item(self, item_name, meta_data, binarization_args): note_pitch = torch.FloatTensor( [(librosa.note_to_midi(n) if n != 'rest' else 0) for n in meta_data['note_seq']] ).to(self.device) - frame_step_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch + if (note_pitch == 0).all(): + print(f'Skipped \'{item_name}\': all rest notes') + return None + frame_midi_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch # Below: calculate phoneme-level mean MIDI pitch, eliminating rest frames - ph_dur_rest = mel2ph.new_zeros(len(ph_dur) + 1).scatter_add( - 0, mel2ph, (frame_step_pitch == 0).long() - )[1:] + rest = frame_midi_pitch == 0 + ph_dur_rest = mel2ph.new_zeros(len(ph_dur) + 1).scatter_add(0, mel2ph, rest.long())[1:] mel2dur_rest = torch.gather(F.pad(ph_dur_rest, [1, 0], value=1), 0, mel2ph) # frame-level rest phone duration - ph_midi = mel2ph.new_zeros(ph_dur.shape[0] + 1).float().scatter_add( - 0, mel2ph, frame_step_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero + 0, mel2ph, frame_midi_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero )[1:] processed_input['ph_dur'] = ph_dur_long.cpu().numpy() # number of frames of each phone processed_input['ph_midi'] = ph_midi.long().cpu().numpy() processed_input['mel2ph'] = mel2ph.cpu().numpy() + # Below: interpolate and smooth the pitch step curve as the base pitch curve + frame_midi_pitch = frame_midi_pitch.cpu().numpy() + rest = rest.cpu().numpy() + interp_func = interpolate.interp1d( + np.where(~rest)[0], frame_midi_pitch[~rest], + kind='nearest', fill_value='extrapolate' + ) + frame_midi_pitch[rest] = interp_func(np.where(rest)[0]) + smoothed_midi_pitch = self.smooth(torch.from_numpy(frame_midi_pitch).to(self.device)[None])[0] + + processed_input['base_pitch'] = smoothed_midi_pitch.cpu().numpy() + + # Below: extract actual f0, convert to pitch and calculate delta pitch + waveform, _ = librosa.load(meta_data['wav_fn'], sr=hparams['audio_sample_rate'], mono=True) + f0, uv = get_pitch_parselmouth(waveform, length, hparams, interp_uv=True) + if uv.all(): # All unvoiced + print(f'Skipped \'{item_name}\': empty gt f0') + return None + + processed_input['delta_pitch'] = librosa.hz_to_midi(f0.astype(np.float32)) - processed_input['base_pitch'] + processed_input['uv'] = uv + return processed_input def arrange_data_augmentation(self, data_iterator): diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index a9ca14099..0a80b1e34 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -30,8 +30,16 @@ def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): time_step=time_step, voicing_threshold=0.6, pitch_floor=f0_min, pitch_ceiling=f0_max).selected_array['frequency'] len_f0 = f0.shape[0] - pad_size = (int(len(wav_data) // hop_size) - len_f0 + 1) // 2 - f0 = np.pad(f0, [[pad_size, length - len_f0 - pad_size]], mode='constant') + lpad = (int(len(wav_data) // hop_size) - len_f0 + 1) // 2 + rpad = length - len_f0 - lpad + if lpad < 0: + f0 = f0[-lpad:] + lpad = 0 + if rpad < 0: + f0 = f0[:rpad] + rpad = 0 + if lpad > 0 or rpad > 0: + f0 = np.pad(f0, [[lpad, rpad]], mode='constant') uv = f0 == 0 if interp_uv: f0, uv = interp_f0(f0, uv) From 204503f84acb84b49d0a81605f118110fe54257c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Apr 2023 23:37:03 +0800 Subject: [PATCH 243/475] Add attributes for pitch predictor to `collater` --- training/variance_task.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index edf87eea9..9ec6a4ca9 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -34,11 +34,19 @@ def collater(self, samples): ph_dur = utils.collate_nd([s['ph_dur'] for s in samples], 0) ph_midi = utils.collate_nd([s['ph_midi'] for s in samples], 0) midi_dur = utils.collate_nd([s['word_dur'] for s in samples], 0) + mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) + base_pitch = utils.collate_nd([s['base_pitch'] for s in samples], 0) + delta_pitch = utils.collate_nd([s['delta_pitch'] for s in samples], 0) + uv = utils.collate_nd([s['uv'] for s in samples], 0) batch.update({ 'tokens': tokens, 'ph_dur': ph_dur, 'midi': ph_midi, - 'midi_dur': midi_dur + 'midi_dur': midi_dur, + 'mel2ph': mel2ph, + 'base_pitch': base_pitch, + 'delta_pitch': delta_pitch, + 'uv': uv }) if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) From 57ab5131c792310f28339a58c21b037ef04e9331 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Apr 2023 00:48:53 +0800 Subject: [PATCH 244/475] Swap attribute order --- preprocessing/variance_binarizer.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index fb94cba08..770146c74 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -86,11 +86,13 @@ def check_coverage(self): @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): seconds = sum(meta_data['ph_dur']) + length = round(seconds / self.timestep) ph_dur = torch.FloatTensor(meta_data['ph_dur']).to(self.device) mel2ph = get_mel2ph_torch( - self.lr, ph_dur, round(seconds / self.timestep), self.timestep, device=self.device + self.lr, ph_dur, length, self.timestep, device=self.device ) - length = mel2ph.shape[0] + ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() + ph_dur_long = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) processed_input = { 'name': item_name, 'wav_fn': meta_data['wav_fn'], @@ -99,12 +101,11 @@ def process_item(self, item_name, meta_data, binarization_args): 'length': length, 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), 'word_dur': np.array(meta_data['word_dur']).astype(np.float32), + 'ph_dur': ph_dur_long.cpu().numpy(), # number of frames of each phone + 'mel2ph': mel2ph.cpu().numpy(), } # Below: calculate frame-level MIDI pitch, which is a step function curve - ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() - ph_dur_long = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) - mel2dur = torch.gather(F.pad(ph_dur_long, [1, 0], value=1), 0, mel2ph) # frame-level phone duration note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) mel2note = get_mel2ph_torch( @@ -126,9 +127,7 @@ def process_item(self, item_name, meta_data, binarization_args): 0, mel2ph, frame_midi_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero )[1:] - processed_input['ph_dur'] = ph_dur_long.cpu().numpy() # number of frames of each phone processed_input['ph_midi'] = ph_midi.long().cpu().numpy() - processed_input['mel2ph'] = mel2ph.cpu().numpy() # Below: interpolate and smooth the pitch step curve as the base pitch curve frame_midi_pitch = frame_midi_pitch.cpu().numpy() From 24ad74b6f46ddc4656564b784fe67b3043c941ee Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Apr 2023 01:16:12 +0800 Subject: [PATCH 245/475] Add back missing `norm_spec` --- modules/diffusion/ddpm.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 706ea350e..0a0146b03 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -270,5 +270,8 @@ def wrapped(x, t, **kwargs): x = x.squeeze(1).transpose(1, 2) # [B, T, M] return self.denorm_spec(x) + def norm_spec(self, x): + return (x - self.spec_min) / (self.spec_max - self.spec_min) * 2 - 1 + def denorm_spec(self, x): return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min From 51ffa345987bbc72fdffa531e089501719806dbf Mon Sep 17 00:00:00 2001 From: hrukalive Date: Thu, 13 Apr 2023 15:32:09 -0500 Subject: [PATCH 246/475] Fix typos for validation --- basics/base_task.py | 4 ++-- utils/training_utils.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 658ce6fb5..cd92a5dba 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -70,9 +70,9 @@ def __init__(self, *args, **kwargs): self.max_val_batch_frames = hparams['max_val_batch_frames'] if self.max_val_batch_frames == -1: hparams['max_val_batch_frames'] = self.max_val_batch_frames = self.max_batch_frames - self.max_val_batch_size = hparams['max_val_batch_frames'] + self.max_val_batch_size = hparams['max_val_batch_size'] if self.max_val_batch_size == -1: - hparams['max_val_batch_frames'] = self.max_val_batch_size = self.max_batch_size + hparams['max_val_batch_size'] = self.max_val_batch_size = self.max_batch_size self.training_sampler = None self.model = None diff --git a/utils/training_utils.py b/utils/training_utils.py index 87cf785d2..1563daca6 100644 --- a/utils/training_utils.py +++ b/utils/training_utils.py @@ -174,7 +174,7 @@ def set_epoch(self, epoch): class DsEvalBatchSampler(Sampler): def __init__(self, dataset, max_batch_frames, max_batch_size, rank=None, batch_by_size=True) -> None: self.dataset = dataset - self.max_batch_samples = max_batch_frames + self.max_batch_frames = max_batch_frames self.max_batch_size = max_batch_size self.rank = rank self.batch_by_size = batch_by_size @@ -187,7 +187,7 @@ def __init__(self, dataset, max_batch_frames, max_batch_size, rank=None, batch_b if self.batch_by_size: self.batches = utils.batch_by_size( indices, self.dataset.num_frames, - max_batch_frames=self.max_batch_samples, max_batch_size=self.max_batch_size + max_batch_frames=self.max_batch_frames, max_batch_size=self.max_batch_size ) else: self.batches = [ From dfb3e7da42f3b0ccc49b0ddb843ca5a2eca1723c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Apr 2023 16:56:40 +0800 Subject: [PATCH 247/475] Add `CurveLoss2d` module --- modules/losses/curve_loss.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 modules/losses/curve_loss.py diff --git a/modules/losses/curve_loss.py b/modules/losses/curve_loss.py new file mode 100644 index 000000000..1d17bfab8 --- /dev/null +++ b/modules/losses/curve_loss.py @@ -0,0 +1,35 @@ +import torch +import torch.nn as nn +from torch import Tensor + + +class CurveLoss2d(nn.Module): + """ + Loss module for parameter curve represented by gaussian-blurred 2-d probability bins. + """ + def __init__(self, vmin, vmax, num_bins, deviation): + super().__init__() + self.vmin = vmin + self.vmax = vmax + self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins + self.sigma = deviation / self.interval + self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # (1, 1, N) + self.loss = nn.BCEWithLogitsLoss() + + def value_to_bin(self, value: Tensor) -> Tensor: + return (value - self.vmin) / self.interval + + def curve_to_probs(self, curve: Tensor) -> Tensor: + miu = self.value_to_bin(curve)[:, :, None] # (B, T, 1) + probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, (B, T, N) + return probs + + def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: + """ + Calculate BCE with logits loss between predicted probs and gaussian-blurred bins representing gt curve. + :param y_pred: predicted probs (B, T, N) + :param c_gt: ground truth curve (B, T) + :param mask: (bool) mask of valid parts in ground truth curve (B, T) + """ + y_gt = self.curve_to_probs(c_gt) + return self.loss(y_pred, y_gt * mask[:, :, None]) From 76fb972f704711dab4b4566047ed99145d1f0ae2 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 03:10:27 +0800 Subject: [PATCH 248/475] Introduce `ph2word`, add modules, implement task --- configs/variance.yaml | 13 +++- modules/fastspeech/variance_encoder.py | 85 +++++++++++++++++++++--- modules/losses/curve_loss.py | 19 +++--- modules/toplevel.py | 41 ++++++++++++ preprocessing/variance_binarizer.py | 65 +++++++++--------- training/acoustic_task.py | 4 +- training/variance_task.py | 92 ++++++++++++++++++++------ 7 files changed, 240 insertions(+), 79 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 565030ec3..c982885c7 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -35,12 +35,21 @@ rel_pos: true pndm_speedup: 10 hidden_size: 256 -predictor_hidden: 384 -predictor_dropout: 0.1 + +predict_dur: true +dur_predictor_hidden: 384 +dur_predictor_dropout: 0.1 dur_predictor_kernel: 3 dur_predictor_layers: 2 dur_log_offset: 1.0 dur_loss_type: mse +dur_cascade: false + +predict_pitch: true +pitch_delta_vmin: -12.75 +pitch_delta_vmax: 12.75 +pitch_delta_num_bins: 256 +pitch_delta_deviation: 0.25 residual_layers: 20 residual_channels: 384 diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 44cf25056..aa3e9bf00 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -1,3 +1,4 @@ +import torch import torch.nn as nn from torch.nn import functional as F @@ -11,10 +12,10 @@ class FastSpeech2VarianceEncoder(FastSpeech2Encoder): - def forward_embedding(self, txt_tokens, midi_embed, midi_dur_embed): + def forward_embedding(self, txt_tokens, midi_embed, word_dur_embed): # embed tokens and positions x = self.embed_scale * self.embed_tokens(txt_tokens) - x = x + midi_embed + midi_dur_embed + x = x + midi_embed + word_dur_embed if hparams['use_pos_embed']: if hparams['rel_pos']: x = self.embed_positions(x) @@ -24,15 +25,15 @@ def forward_embedding(self, txt_tokens, midi_embed, midi_dur_embed): x = F.dropout(x, p=self.dropout, training=self.training) return x - def forward(self, txt_tokens, midi_embed, midi_dur_embed): + def forward(self, txt_tokens, midi_embed, word_dur_embed): """ :param txt_tokens: [B, T] :param midi_embed: [B, T, H] - :param midi_dur_embed: [B, T, H] + :param word_dur_embed: [B, T, H] :return: [T x B x H] """ encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() - x = self.forward_embedding(txt_tokens, midi_embed, midi_dur_embed) # [B, T, H] + x = self.forward_embedding(txt_tokens, midi_embed, word_dur_embed) # [B, T, H] x = super()._forward(x, encoder_padding_mask) return x @@ -42,7 +43,9 @@ def __init__(self, vocab_size): super().__init__() self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) self.midi_embed = Embedding(128, hparams['hidden_size'], PAD_INDEX) - self.midi_dur_embed = Linear(1, hparams['hidden_size']) + self.word_dur_embed = Linear(1, hparams['hidden_size']) + self.dur_log_offset = hparams['dur_log_offset'] + if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) @@ -51,16 +54,78 @@ def __init__(self, vocab_size): ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] ) - predictor_hidden = hparams['predictor_hidden'] if hparams['predictor_hidden'] > 0 else self.hidden_size + predictor_hidden = hparams['dur_predictor_hidden'] \ + if hparams['dur_predictor_hidden'] > 0 else self.hidden_size self.dur_predictor = DurationPredictor( in_dims=hparams['hidden_size'], n_chans=predictor_hidden, n_layers=hparams['dur_predictor_layers'], - dropout_rate=hparams['predictor_dropout'], + dropout_rate=hparams['dur_predictor_dropout'], padding=hparams['ffn_padding'], kernel_size=hparams['dur_predictor_kernel'], offset=hparams['dur_log_offset'] ) - def forward(self, txt_tokens, midi, midi_dur, gt_ph_dur, **kwargs): - raise NotImplementedError() + def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=True): + """ + :param txt_tokens: (train, infer) [B, T_ph] + :param midi: (train, infer) [B, T_ph] + :param ph2word: (train, infer) [B, T_ph] + :param ph_dur: (train) [B, T_ph] + :param word_dur: (infer) [B, T_w] + :param infer: whether inference + :return: (train) encoder_out, ph_dur_xs; (infer) encoder_out, ph_dur + """ + midi_embed = self.midi_embed(midi) # => [B, T_ph, H] + if word_dur is None or not infer: + b = txt_tokens.shape[0] + word_dur = ph_dur.new_zeros(b, ph2word.max() + 1).scatter_add( + 1, ph2word, ph_dur + )[:, 1:] # [B, T_ph] => [B, T_w] + word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] + word_dur_embed = self.word_dur_embed(torch.log(word_dur.float() + self.dur_log_offset)) + encoder_out = self.encoder(txt_tokens, midi_embed, word_dur_embed) + + if not hparams['predict_dur']: + return encoder_out, None + + if infer: + ph_dur, _ = self.dur_predictor(encoder_out, x_mask=txt_tokens == 0, infer=True) + return encoder_out, ph_dur + else: + ph_dur_xs = self.dur_predictor(encoder_out, x_mask=txt_tokens == 0, infer=False) + return encoder_out, ph_dur_xs + + +class DummyPitchPredictor(nn.Module): + def __init__(self, vmin, vmax, num_bins, deviation): + super().__init__() + self.vmin = vmin + self.vmax = vmax + self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins + self.sigma = deviation / self.interval + self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] + + self.base_pitch_embed = Linear(1, hparams['hidden_size']) + self.net = nn.Sequential( + Linear(hparams['hidden_size'], hparams['pitch_predictor_hidden']), + Linear(hparams['pitch_predictor_hidden'], hparams['num_pitch_bins']) + ) + + def bins_to_values(self, bins): + return bins * self.interval + self.vmin + + def out2pitch(self, probs): + logits = probs.sigmoid() # [B, T, N] + bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] + return self.bins_to_values(bins) + + def forward(self, condition, base_pitch): + """ + :param condition: [B, T, H] + :param base_pitch: [B, T] + :return: pitch_pred [B, T], probs [B, T, N] + """ + condition = condition + self.base_pitch_embed(base_pitch[:, :, None]) + probs = self.net(condition) + return self.out2pitch(probs) + base_pitch, probs diff --git a/modules/losses/curve_loss.py b/modules/losses/curve_loss.py index 1d17bfab8..0cfb5e1e1 100644 --- a/modules/losses/curve_loss.py +++ b/modules/losses/curve_loss.py @@ -5,31 +5,32 @@ class CurveLoss2d(nn.Module): """ - Loss module for parameter curve represented by gaussian-blurred 2-d probability bins. + Loss module for parameter curve represented by gaussian-blurred 2-D probability bins. """ + def __init__(self, vmin, vmax, num_bins, deviation): super().__init__() self.vmin = vmin self.vmax = vmax self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins self.sigma = deviation / self.interval - self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # (1, 1, N) + self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] self.loss = nn.BCEWithLogitsLoss() - def value_to_bin(self, value: Tensor) -> Tensor: - return (value - self.vmin) / self.interval + def values_to_bins(self, values: Tensor) -> Tensor: + return (values - self.vmin) / self.interval def curve_to_probs(self, curve: Tensor) -> Tensor: - miu = self.value_to_bin(curve)[:, :, None] # (B, T, 1) - probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, (B, T, N) + miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] + probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] return probs def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: """ Calculate BCE with logits loss between predicted probs and gaussian-blurred bins representing gt curve. - :param y_pred: predicted probs (B, T, N) - :param c_gt: ground truth curve (B, T) - :param mask: (bool) mask of valid parts in ground truth curve (B, T) + :param y_pred: predicted probs [B, T, N] + :param c_gt: ground truth curve [B, T] + :param mask: (bool) mask of valid parts in ground truth curve [B, T] """ y_gt = self.curve_to_probs(c_gt) return self.loss(y_pred, y_gt * mask[:, :, None]) diff --git a/modules/toplevel.py b/modules/toplevel.py index 4f6348d5e..d6eb9fdf1 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -1,6 +1,11 @@ +import torch +import torch.nn.functional as F + from basics.base_module import CategorizedModule from modules.diffusion.ddpm import GaussianDiffusion from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic +from modules.fastspeech.tts_modules import LengthRegulator +from modules.fastspeech.variance_encoder import FastSpeech2Variance, DummyPitchPredictor from utils.hparams import hparams @@ -37,6 +42,42 @@ def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, class DiffSingerVariance(CategorizedModule): + def __init__(self, vocab_size): + super().__init__() + self.fs2 = FastSpeech2Variance( + vocab_size=vocab_size + ) + self.lr = LengthRegulator() + self.pitch_predictor = DummyPitchPredictor( + vmin=hparams['pitch_delta_vmin'], + vmax=hparams['pitch_delta_vmax'], + num_bins=hparams['pitch_delta_num_bins'], + deviation=hparams['pitch_delta_deviation'] + ) + @property def category(self): return 'variance' + + def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, + mel2ph=None, base_pitch=None, infer=True): + encoder_out, dur_pred_out = self.fs2( + txt_tokens, midi=midi, ph2word=ph2word, + ph_dur=ph_dur, word_dur=word_dur, infer=infer + ) + + if not hparams['predict_pitch']: + return dur_pred_out, None + + if mel2ph is None or hparams['dur_cascade']: + # (extract mel2ph from dur_pred_out) + raise NotImplementedError() + + encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) + mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) + condition = torch.gather(encoder_out, 1, mel2ph_) + pitch_pred, pitch_probs = self.pitch_predictor(condition, base_pitch) + if infer: + return dur_pred_out, pitch_pred + else: + return dur_pred_out, pitch_probs diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 770146c74..30b28642e 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -1,13 +1,3 @@ -""" - item: one piece of data - item_name: data id - wav_fn: wave file path - spk: dataset name - ph_seq: phoneme sequence - ph_dur: phoneme durations - midi_seq: midi note sequence - midi_dur: midi note durations -""" import csv import os import pathlib @@ -26,15 +16,15 @@ os.environ["OMP_NUM_THREADS"] = "1" VARIANCE_ITEM_ATTRIBUTES = [ - 'spk_id', # index number of dataset/speaker - 'tokens', # index numbers of phonemes - 'ph_dur', # durations of phonemes, in number of frames - 'ph_midi', # phoneme-level mean MIDI pitch - 'word_dur', # durations of words/syllables (vowel-consonant pattern) - 'mel2ph', # mel2ph format representing gt ph_dur - 'base_pitch', # interpolated and smoothed frame-level MIDI pitch - 'delta_pitch', # delta_pitch = actual_pitch - base_pitch, in semitones - 'uv', # flag of unvoiced frames where f0 == 0 + 'spk_id', # index number of dataset/speaker, int64 + 'tokens', # index numbers of phonemes, int64[T_ph,] + 'ph_dur', # durations of phonemes, in number of frames, int64[T_ph,] + 'midi', # phoneme-level mean MIDI pitch, int64[T_ph,] + 'ph2word', # similar to mel2ph format, representing number of phones within each note, int64[T_ph,] + 'mel2ph', # mel2ph format representing number of frames within each phone, int64[T_t,] + 'base_pitch', # interpolated and smoothed frame-level MIDI pitch, float32[T_t,] + 'delta_pitch', # delta_pitch = actual_pitch - base_pitch, in semitones, float32[T_t,] + 'uv', # flag of unvoiced frames where f0 == 0, bool[T_t,] ] @@ -68,14 +58,18 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): 'wav_fn': str(raw_data_dir / 'wavs' / f'{item_name}.wav'), 'ph_seq': utterance_label['ph_seq'].split(), 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], - 'word_dur': [float(x) for x in utterance_label['word_dur'].split()], + 'ph_num': [int(x) for x in utterance_label['ph_num'].split()], 'note_seq': utterance_label['note_seq'].split(), 'note_dur': [float(x) for x in utterance_label['note_dur'].split()], } - assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']) == len(temp_dict['word_dur']), \ - f'Lengths of ph_seq, ph_dur and word_dur mismatch in \'{item_name}\'.' + assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ + f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' + assert len(temp_dict['ph_seq']) == sum(temp_dict['ph_num']), \ + f'Sum of ph_num does not equal length of ph_seq in \'{item_name}\'.' assert len(temp_dict['note_seq']) == len(temp_dict['note_dur']), \ f'Lengths of note_seq and note_dur mismatch in \'{item_name}\'.' + assert any([note != 'rest' for note in temp_dict['note_seq']]), \ + f'All notes are rest in \'{item_name}\'.' meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict self.items.update(meta_data_dict) @@ -87,12 +81,16 @@ def check_coverage(self): def process_item(self, item_name, meta_data, binarization_args): seconds = sum(meta_data['ph_dur']) length = round(seconds / self.timestep) - ph_dur = torch.FloatTensor(meta_data['ph_dur']).to(self.device) + t_txt = len(meta_data['ph_seq']) + ph_dur_sec = torch.FloatTensor(meta_data['ph_dur']).to(self.device) + ph_acc = torch.round(torch.cumsum(ph_dur_sec, dim=0) / self.timestep + 0.5).long() + ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) + ph_num = torch.LongTensor(meta_data['ph_num']).to(self.device) + ph2word = self.lr(ph_num[None])[0] mel2ph = get_mel2ph_torch( - self.lr, ph_dur, length, self.timestep, device=self.device + self.lr, ph_dur_sec, length, self.timestep, device=self.device ) - ph_acc = torch.round(torch.cumsum(ph_dur, dim=0) / self.timestep + 0.5).long() - ph_dur_long = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) + processed_input = { 'name': item_name, 'wav_fn': meta_data['wav_fn'], @@ -100,13 +98,13 @@ def process_item(self, item_name, meta_data, binarization_args): 'seconds': seconds, 'length': length, 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), - 'word_dur': np.array(meta_data['word_dur']).astype(np.float32), - 'ph_dur': ph_dur_long.cpu().numpy(), # number of frames of each phone + 'ph_dur': ph_dur.cpu().numpy(), + 'ph2word': ph2word.cpu().numpy(), 'mel2ph': mel2ph.cpu().numpy(), } # Below: calculate frame-level MIDI pitch, which is a step function curve - mel2dur = torch.gather(F.pad(ph_dur_long, [1, 0], value=1), 0, mel2ph) # frame-level phone duration + mel2dur = torch.gather(F.pad(ph_dur, [1, 0], value=1), 0, mel2ph) # frame-level phone duration note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) mel2note = get_mel2ph_torch( self.lr, note_dur, mel2ph.shape[0], self.timestep, device=self.device @@ -114,20 +112,17 @@ def process_item(self, item_name, meta_data, binarization_args): note_pitch = torch.FloatTensor( [(librosa.note_to_midi(n) if n != 'rest' else 0) for n in meta_data['note_seq']] ).to(self.device) - if (note_pitch == 0).all(): - print(f'Skipped \'{item_name}\': all rest notes') - return None frame_midi_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch # Below: calculate phoneme-level mean MIDI pitch, eliminating rest frames rest = frame_midi_pitch == 0 - ph_dur_rest = mel2ph.new_zeros(len(ph_dur) + 1).scatter_add(0, mel2ph, rest.long())[1:] + ph_dur_rest = mel2ph.new_zeros(t_txt + 1).scatter_add(0, mel2ph, rest.long())[1:] mel2dur_rest = torch.gather(F.pad(ph_dur_rest, [1, 0], value=1), 0, mel2ph) # frame-level rest phone duration - ph_midi = mel2ph.new_zeros(ph_dur.shape[0] + 1).float().scatter_add( + ph_midi = mel2ph.new_zeros(t_txt + 1).float().scatter_add( 0, mel2ph, frame_midi_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero )[1:] - processed_input['ph_midi'] = ph_midi.long().cpu().numpy() + processed_input['midi'] = ph_midi.long().cpu().numpy() # Below: interpolate and smooth the pitch step curve as the base pitch curve frame_midi_pitch = frame_midi_pitch.cpu().numpy() diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 540c7a255..f4e244d39 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -111,8 +111,8 @@ def _validation_step(self, sample, batch_idx): 'total_loss': total_loss } - if batch_idx < hparams['num_valid_plots'] and (self.trainer.distributed_sampler_kwargs or {}).get('rank', - 0) == 0: + if batch_idx < hparams['num_valid_plots'] \ + and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: mel_pred = self.run_model(sample, infer=True) if self.use_vocoder: self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) diff --git a/training/variance_task.py b/training/variance_task.py index 9ec6a4ca9..1d5e307f6 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -1,27 +1,17 @@ -import os -from multiprocessing.pool import Pool - import matplotlib -import matplotlib.pyplot as plt -import numpy as np import torch import torch.distributions import torch.optim import torch.utils.data -from tqdm import tqdm import utils import utils.infer_utils from basics.base_dataset import BaseDataset from basics.base_task import BaseTask -from basics.base_vocoder import BaseVocoder -from modules.fastspeech.tts_modules import mel2ph_to_dur +from modules.losses.curve_loss import CurveLoss2d from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance -from modules.vocoders.registry import get_vocoder_cls -from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams -from utils.plot import spec_to_figure matplotlib.use('Agg') @@ -32,8 +22,8 @@ def collater(self, samples): tokens = utils.collate_nd([s['tokens'] for s in samples], 0) ph_dur = utils.collate_nd([s['ph_dur'] for s in samples], 0) - ph_midi = utils.collate_nd([s['ph_midi'] for s in samples], 0) - midi_dur = utils.collate_nd([s['word_dur'] for s in samples], 0) + midi = utils.collate_nd([s['midi'] for s in samples], 0) + ph2word = utils.collate_nd([s['ph2word'] for s in samples], 0) mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) base_pitch = utils.collate_nd([s['base_pitch'] for s in samples], 0) delta_pitch = utils.collate_nd([s['delta_pitch'] for s in samples], 0) @@ -41,8 +31,8 @@ def collater(self, samples): batch.update({ 'tokens': tokens, 'ph_dur': ph_dur, - 'midi': ph_midi, - 'midi_dur': midi_dur, + 'midi': midi, + 'ph2word': ph2word, 'mel2ph': mel2ph, 'base_pitch': base_pitch, 'delta_pitch': delta_pitch, @@ -61,12 +51,72 @@ def __init__(self): self.dataset_cls = VarianceDataset def build_model(self): - # return DiffSingerVariance() - raise NotImplementedError() + return DiffSingerVariance( + vocab_size=len(self.phone_encoder), + ) # noinspection PyAttributeOutsideInit def build_losses(self): - self.dur_loss = DurationLoss( - loss_type=hparams['dur_loss_type'], - offset=hparams['dur_log_offset'] - ) + if hparams['predict_dur']: + self.dur_loss = DurationLoss( + loss_type=hparams['dur_loss_type'], + offset=hparams['dur_log_offset'] + ) + if hparams['predict_pitch']: + self.pitch_loss = CurveLoss2d( + vmin=hparams['pitch_delta_vmin'], + vmax=hparams['pitch_delta_vmax'], + num_bins=hparams['pitch_delta_num_bins'], + deviation=hparams['pitch_delta_deviation'] + ) + + def run_model(self, sample, infer=False): + txt_tokens = sample['tokens'] # [B, T_ph] + midi = sample['midi'] # [B, T_ph] + ph2word = sample['ph2word'] # [B, T_ph] + ph_dur = sample['ph_dur'] # [B, T_ph] + mel2ph = sample['mel2ph'] # [B, T_t] + base_pitch = sample['base_pitch'] # [B, T_t] + + output = self.model(txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, + mel2ph=mel2ph, base_pitch=base_pitch, infer=infer) + + if infer: + dur_pred, pitch_pred = output + return dur_pred, pitch_pred + else: + dur_pred_xs, pitch_prob = output + losses = {} + if dur_pred_xs is not None: + losses['dur_loss'] = self.dur_loss.forward(dur_pred_xs, ph_dur) + if pitch_prob is not None: + pitch_delta = sample['pitch_delta'] + uv = sample['uv'] + losses['pitch_loss'] = self.pitch_loss.forward(pitch_prob, pitch_delta, ~uv) + return losses + + def _validation_step(self, sample, batch_idx): + losses = self.run_model(sample, infer=False) + total_loss = sum(losses.values()) + outputs = { + 'total_loss': total_loss + } + + if batch_idx < hparams['num_valid_plots'] \ + and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: + dur_pred, pitch_pred = self.run_model(sample, infer=True) + self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, ph2word=sample['ph2word']) + self.plot_curve(batch_idx, sample['base_pitch'] + sample['pitch_delta'], pitch_pred, curve_name='pitch') + + return outputs, sample['size'] + + ############ + # validation plots + ############ + def plot_dur(self, batch_idx, gt_dur, pred_dur, ph2word=None): + # TODO: plot dur to TensorBoard + pass + + def plot_curve(self, batch_idx, gt_curve, pred_curve, curve_name='curve'): + # TODO: plot curve to TensorBoard + pass From 2dbe2a9c7531f903e189c2c5516872b36cb13db7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 11:00:40 +0800 Subject: [PATCH 249/475] Re-organize variance prediction hparams --- configs/variance.yaml | 24 ++++++++++-------- modules/fastspeech/tts_modules.py | 26 +++++++++---------- modules/fastspeech/variance_encoder.py | 35 ++++++++++++-------------- modules/toplevel.py | 12 ++++++--- training/variance_task.py | 14 ++++++----- 5 files changed, 59 insertions(+), 52 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index c982885c7..3354a9f92 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -37,19 +37,23 @@ pndm_speedup: 10 hidden_size: 256 predict_dur: true -dur_predictor_hidden: 384 -dur_predictor_dropout: 0.1 -dur_predictor_kernel: 3 -dur_predictor_layers: 2 -dur_log_offset: 1.0 -dur_loss_type: mse dur_cascade: false +dur_prediction_args: + hidden_size: 384 + dropout: 0.1 + num_layers: 2 + kernel_size: 3 + log_offset: 1.0 + loss_type: mse predict_pitch: true -pitch_delta_vmin: -12.75 -pitch_delta_vmax: 12.75 -pitch_delta_num_bins: 256 -pitch_delta_deviation: 0.25 +pitch_cascade: true +pitch_prediction_args: + pitch_delta_vmin: -12.75 + pitch_delta_vmax: 12.75 + num_pitch_bins: 256 + deviation: 0.25 + hidden_size: 512 residual_layers: 20 residual_channels: 384 diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 664393057..f7ebdf766 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -64,7 +64,8 @@ class DurationPredictor(torch.nn.Module): the outputs are calculated in log domain but in `inference`, those are calculated in linear domain. """ - def __init__(self, in_dims, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0.1, offset=1.0, padding='SAME'): + def __init__(self, in_dims, n_layers=2, n_chans=384, kernel_size=3, + dropout_rate=0.1, offset=1.0, padding='SAME', dur_loss_type='mse'): """Initilize duration predictor module. Args: in_dims (int): Input dimension. @@ -90,7 +91,9 @@ def __init__(self, in_dims, n_layers=2, n_chans=384, kernel_size=3, dropout_rate LayerNorm(n_chans, dim=1), torch.nn.Dropout(dropout_rate) )] - if hparams['dur_loss_type'] in ['mse', 'huber']: + + self.loss_type = dur_loss_type + if self.loss_type in ['mse', 'huber']: self.out_dims = 1 # elif hparams['dur_loss_type'] == 'mog': # out_dims = 15 @@ -103,10 +106,9 @@ def __init__(self, in_dims, n_layers=2, n_chans=384, kernel_size=3, dropout_rate self.linear = torch.nn.Linear(n_chans, self.out_dims) def out2dur(self, xs): - if hparams['dur_loss_type'] in ['mse']: - # NOTE: calculate in log domain - xs = xs.squeeze(-1) # (B, Tmax) - dur = torch.clamp(torch.round(xs.exp() - self.offset), min=0).long() # avoid negative value + if self.loss_type in ['mse', 'huber']: + # NOTE: calculate loss in log domain + dur = xs.squeeze(-1).exp() - self.offset # (B, Tmax) # elif hparams['dur_loss_type'] == 'crf': # dur = torch.LongTensor(self.crf.decode(xs)).cuda() else: @@ -120,8 +122,7 @@ def forward(self, xs, x_masks=None, infer=True): x_masks (BoolTensor, optional): Batch of masks indicating padded part (B, Tmax). infer (bool): Whether inference Returns: - (train) FloatTensor: Batch of predicted durations in log domain (B, Tmax); - (infer) LongTensor: Batch of predicted durations in linear domain (B, Tmax). + (train) FloatTensor, (infer) LongTensor: Batch of predicted durations in linear domain (B, Tmax). """ xs = xs.transpose(1, -1) # (B, idim, Tmax) for f in self.conv: @@ -130,12 +131,11 @@ def forward(self, xs, x_masks=None, infer=True): xs = xs * (1 - x_masks.float())[:, None, :] xs = self.linear(xs.transpose(1, -1)) # [B, T, C] xs = xs * (1 - x_masks.float())[:, :, None] # (B, T, C) + + dur_pred = self.out2dur(xs) if infer: - return self.out2dur(xs), xs - else: - if self.out_dims == 1: - xs = xs.squeeze(-1) # (B, Tmax) - return xs + dur_pred = torch.clamp(torch.round(dur_pred), min=0).long() # avoid negative value + return dur_pred class LengthRegulator(torch.nn.Module): diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index aa3e9bf00..434d9da72 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -44,7 +44,6 @@ def __init__(self, vocab_size): self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) self.midi_embed = Embedding(128, hparams['hidden_size'], PAD_INDEX) self.word_dur_embed = Linear(1, hparams['hidden_size']) - self.dur_log_offset = hparams['dur_log_offset'] if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) @@ -54,16 +53,17 @@ def __init__(self, vocab_size): ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] ) - predictor_hidden = hparams['dur_predictor_hidden'] \ - if hparams['dur_predictor_hidden'] > 0 else self.hidden_size + dur_hparams = hparams['dur_prediction_args'] + self.wdur_log_offset = dur_hparams['log_offset'] self.dur_predictor = DurationPredictor( in_dims=hparams['hidden_size'], - n_chans=predictor_hidden, - n_layers=hparams['dur_predictor_layers'], - dropout_rate=hparams['dur_predictor_dropout'], + n_chans=dur_hparams['hidden_size'], + n_layers=dur_hparams['num_layers'], + dropout_rate=dur_hparams['dropout'], padding=hparams['ffn_padding'], - kernel_size=hparams['dur_predictor_kernel'], - offset=hparams['dur_log_offset'] + kernel_size=dur_hparams['kernel_size'], + offset=dur_hparams['log_offset'], + dur_loss_type=dur_hparams['loss_type'] ) def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=True): @@ -83,22 +83,19 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T 1, ph2word, ph_dur )[:, 1:] # [B, T_ph] => [B, T_w] word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] - word_dur_embed = self.word_dur_embed(torch.log(word_dur.float() + self.dur_log_offset)) + word_dur_embed = self.word_dur_embed(torch.log(word_dur.float() + self.wdur_log_offset)) encoder_out = self.encoder(txt_tokens, midi_embed, word_dur_embed) if not hparams['predict_dur']: return encoder_out, None - if infer: - ph_dur, _ = self.dur_predictor(encoder_out, x_mask=txt_tokens == 0, infer=True) - return encoder_out, ph_dur - else: - ph_dur_xs = self.dur_predictor(encoder_out, x_mask=txt_tokens == 0, infer=False) - return encoder_out, ph_dur_xs + ph_dur_pred = self.dur_predictor(encoder_out, x_mask=txt_tokens == 0, infer=infer) + + return encoder_out, ph_dur_pred class DummyPitchPredictor(nn.Module): - def __init__(self, vmin, vmax, num_bins, deviation): + def __init__(self, vmin, vmax, num_bins, deviation, in_dims=256, hidden_size=512): super().__init__() self.vmin = vmin self.vmax = vmax @@ -106,10 +103,10 @@ def __init__(self, vmin, vmax, num_bins, deviation): self.sigma = deviation / self.interval self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] - self.base_pitch_embed = Linear(1, hparams['hidden_size']) + self.base_pitch_embed = Linear(1, in_dims) self.net = nn.Sequential( - Linear(hparams['hidden_size'], hparams['pitch_predictor_hidden']), - Linear(hparams['pitch_predictor_hidden'], hparams['num_pitch_bins']) + Linear(hparams['hidden_size'], hidden_size), + Linear(hidden_size, num_bins) ) def bins_to_values(self, bins): diff --git a/modules/toplevel.py b/modules/toplevel.py index d6eb9fdf1..3b0200503 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -48,11 +48,15 @@ def __init__(self, vocab_size): vocab_size=vocab_size ) self.lr = LengthRegulator() + + pitch_hparams = hparams['pitch_prediction_args'] self.pitch_predictor = DummyPitchPredictor( - vmin=hparams['pitch_delta_vmin'], - vmax=hparams['pitch_delta_vmax'], - num_bins=hparams['pitch_delta_num_bins'], - deviation=hparams['pitch_delta_deviation'] + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + num_bins=pitch_hparams['num_pitch_bins'], + deviation=pitch_hparams['deviation'], + in_dims=hparams['hidden_size'], + hidden_size=pitch_hparams['hidden_size'] ) @property diff --git a/training/variance_task.py b/training/variance_task.py index 1d5e307f6..c12764ddb 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -58,16 +58,18 @@ def build_model(self): # noinspection PyAttributeOutsideInit def build_losses(self): if hparams['predict_dur']: + dur_hparams = hparams['dur_prediction_args'] self.dur_loss = DurationLoss( - loss_type=hparams['dur_loss_type'], - offset=hparams['dur_log_offset'] + loss_type=dur_hparams['loss_type'], + offset=dur_hparams['log_offset'] ) if hparams['predict_pitch']: + pitch_hparams = hparams['pitch_prediction_args'] self.pitch_loss = CurveLoss2d( - vmin=hparams['pitch_delta_vmin'], - vmax=hparams['pitch_delta_vmax'], - num_bins=hparams['pitch_delta_num_bins'], - deviation=hparams['pitch_delta_deviation'] + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + num_bins=pitch_hparams['num_pitch_bins'], + deviation=pitch_hparams['deviation'] ) def run_model(self, sample, infer=False): From 0568bb18e5cbf56073c56ad92457e82ae3caa174 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 12:12:52 +0800 Subject: [PATCH 250/475] Finish pdur, wdur, sdur losses and lambda --- configs/variance.yaml | 7 ++++++- modules/losses/dur_loss.py | 43 ++++++++++++++++++++++++++++++++++---- training/variance_task.py | 17 ++++++++++----- 3 files changed, 57 insertions(+), 10 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 3354a9f92..d8b756c5f 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -38,16 +38,21 @@ hidden_size: 256 predict_dur: true dur_cascade: false +lambda_dur_loss: 1.0 dur_prediction_args: hidden_size: 384 dropout: 0.1 - num_layers: 2 + num_layers: 4 kernel_size: 3 log_offset: 1.0 loss_type: mse + lambda_pdur_loss: 0.6 + lambda_wdur_loss: 0.3 + lambda_sdur_loss: 0.1 predict_pitch: true pitch_cascade: true +lambda_pitch_loss: 1.0 pitch_prediction_args: pitch_delta_vmin: -12.75 pitch_delta_vmax: 12.75 diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py index c2094fd93..5059a8644 100644 --- a/modules/losses/dur_loss.py +++ b/modules/losses/dur_loss.py @@ -4,7 +4,12 @@ class DurationLoss(nn.Module): - def __init__(self, loss_type, offset=1.0): + """ + Loss module as combination of phone duration loss, word duration loss and sentence duration loss. + """ + + def __init__(self, offset, loss_type, + lambda_pdur=0.6, lambda_wdur=0.3, lambda_sdur=0.1): super().__init__() self.loss_type = loss_type if self.loss_type == 'mse': @@ -15,6 +20,36 @@ def __init__(self, loss_type, offset=1.0): raise NotImplementedError() self.offset = offset - def forward(self, xs_pred: Tensor, xs_gt: Tensor) -> Tensor: - xs_gt_log = torch.log(xs_gt + self.offset) # calculate in log domain - return self.loss(xs_pred, xs_gt_log) + self.lambda_pdur = lambda_pdur + self.lambda_wdur = lambda_wdur + self.lambda_sdur = lambda_sdur + + def linear2log(self, any_dur): + return torch.log(any_dur + self.offset) + + # noinspection PyMethodMayBeStatic + def pdur2wdur(self, ph_dur, ph2word): + b = ph_dur.shape[0] + word_dur = ph_dur.new_zeros(b, ph2word.max() + 1).scatter_add( + 1, ph2word, ph_dur + )[:, 1:] # [B, T_ph] => [B, T_w] + return word_dur + + def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: + # pdur_loss + pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) + + # wdur loss + wdur_pred = self.pdur2wdur(dur_pred, ph2word) + wdur_gt = self.pdur2wdur(dur_gt, ph2word) + wdur_loss = self.lambda_wdur * self.loss(self.linear2log(wdur_pred), self.linear2log(wdur_gt)) + + # sdur loss + sdur_pred = dur_pred.sum(dim=1) + sdur_gt = dur_gt.sum(dim=1) + sdur_loss = self.lambda_sdur * self.loss(self.linear2log(sdur_pred), self.linear2log(sdur_gt)) + + # combine + dur_loss = pdur_loss + wdur_loss + sdur_loss + + return dur_loss diff --git a/training/variance_task.py b/training/variance_task.py index c12764ddb..349f869f7 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -49,6 +49,10 @@ class VarianceTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = VarianceDataset + if hparams['predict_dur']: + self.lambda_dur_loss = hparams['lambda_dur_loss'] + if hparams['predict_pitch']: + self.lambda_pitch_loss = hparams['lambda_pitch_loss'] def build_model(self): return DiffSingerVariance( @@ -60,8 +64,11 @@ def build_losses(self): if hparams['predict_dur']: dur_hparams = hparams['dur_prediction_args'] self.dur_loss = DurationLoss( + offset=dur_hparams['log_offset'], loss_type=dur_hparams['loss_type'], - offset=dur_hparams['log_offset'] + lambda_pdur=dur_hparams['lambda_pdur_loss'], + lambda_wdur=dur_hparams['lambda_wdur_loss'], + lambda_sdur=dur_hparams['lambda_sdur_loss'] ) if hparams['predict_pitch']: pitch_hparams = hparams['pitch_prediction_args'] @@ -87,14 +94,14 @@ def run_model(self, sample, infer=False): dur_pred, pitch_pred = output return dur_pred, pitch_pred else: - dur_pred_xs, pitch_prob = output + dur_pred, pitch_prob = output losses = {} - if dur_pred_xs is not None: - losses['dur_loss'] = self.dur_loss.forward(dur_pred_xs, ph_dur) + if dur_pred is not None: + losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) if pitch_prob is not None: pitch_delta = sample['pitch_delta'] uv = sample['uv'] - losses['pitch_loss'] = self.pitch_loss.forward(pitch_prob, pitch_delta, ~uv) + losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, pitch_delta, ~uv) return losses def _validation_step(self, sample, batch_idx): From 5ae14f22b5e0a42b3d3040d98e43d6f9bafa96f2 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 13:55:28 +0800 Subject: [PATCH 251/475] Fix key error, dimension error and nan loss --- configs/variance.yaml | 4 ++-- modules/fastspeech/tts_modules.py | 4 ++-- modules/fastspeech/variance_encoder.py | 25 +++++++++++++------------ modules/toplevel.py | 19 ++++++++++--------- training/variance_task.py | 12 ++++++++---- 5 files changed, 35 insertions(+), 29 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index d8b756c5f..7c86f11c5 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -46,8 +46,8 @@ dur_prediction_args: kernel_size: 3 log_offset: 1.0 loss_type: mse - lambda_pdur_loss: 0.6 - lambda_wdur_loss: 0.3 + lambda_pdur_loss: 0.7 + lambda_wdur_loss: 0.2 lambda_sdur_loss: 0.1 predict_pitch: true diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index f7ebdf766..7bbd240dd 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -132,9 +132,9 @@ def forward(self, xs, x_masks=None, infer=True): xs = self.linear(xs.transpose(1, -1)) # [B, T, C] xs = xs * (1 - x_masks.float())[:, :, None] # (B, T, C) - dur_pred = self.out2dur(xs) + dur_pred = torch.clamp(self.out2dur(xs), min=0.) # avoid negative value if infer: - dur_pred = torch.clamp(torch.round(dur_pred), min=0).long() # avoid negative value + dur_pred = torch.round(dur_pred).long() return dur_pred diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 434d9da72..13e8167ee 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -55,16 +55,17 @@ def __init__(self, vocab_size): dur_hparams = hparams['dur_prediction_args'] self.wdur_log_offset = dur_hparams['log_offset'] - self.dur_predictor = DurationPredictor( - in_dims=hparams['hidden_size'], - n_chans=dur_hparams['hidden_size'], - n_layers=dur_hparams['num_layers'], - dropout_rate=dur_hparams['dropout'], - padding=hparams['ffn_padding'], - kernel_size=dur_hparams['kernel_size'], - offset=dur_hparams['log_offset'], - dur_loss_type=dur_hparams['loss_type'] - ) + if hparams['predict_dur']: + self.dur_predictor = DurationPredictor( + in_dims=hparams['hidden_size'], + n_chans=dur_hparams['hidden_size'], + n_layers=dur_hparams['num_layers'], + dropout_rate=dur_hparams['dropout'], + padding=hparams['ffn_padding'], + kernel_size=dur_hparams['kernel_size'], + offset=dur_hparams['log_offset'], + dur_loss_type=dur_hparams['loss_type'] + ) def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=True): """ @@ -83,13 +84,13 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T 1, ph2word, ph_dur )[:, 1:] # [B, T_ph] => [B, T_w] word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] - word_dur_embed = self.word_dur_embed(torch.log(word_dur.float() + self.wdur_log_offset)) + word_dur_embed = self.word_dur_embed(torch.log(word_dur.float() + self.wdur_log_offset)[:, :, None]) encoder_out = self.encoder(txt_tokens, midi_embed, word_dur_embed) if not hparams['predict_dur']: return encoder_out, None - ph_dur_pred = self.dur_predictor(encoder_out, x_mask=txt_tokens == 0, infer=infer) + ph_dur_pred = self.dur_predictor(encoder_out, x_masks=txt_tokens == 0, infer=infer) return encoder_out, ph_dur_pred diff --git a/modules/toplevel.py b/modules/toplevel.py index 3b0200503..063a9083c 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -49,15 +49,16 @@ def __init__(self, vocab_size): ) self.lr = LengthRegulator() - pitch_hparams = hparams['pitch_prediction_args'] - self.pitch_predictor = DummyPitchPredictor( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], - num_bins=pitch_hparams['num_pitch_bins'], - deviation=pitch_hparams['deviation'], - in_dims=hparams['hidden_size'], - hidden_size=pitch_hparams['hidden_size'] - ) + if hparams['predict_pitch']: + pitch_hparams = hparams['pitch_prediction_args'] + self.pitch_predictor = DummyPitchPredictor( + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + num_bins=pitch_hparams['num_pitch_bins'], + deviation=pitch_hparams['deviation'], + in_dims=hparams['hidden_size'], + hidden_size=pitch_hparams['hidden_size'] + ) @property def category(self): diff --git a/training/variance_task.py b/training/variance_task.py index 349f869f7..03b38896a 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -99,9 +99,9 @@ def run_model(self, sample, infer=False): if dur_pred is not None: losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) if pitch_prob is not None: - pitch_delta = sample['pitch_delta'] + delta_pitch = sample['delta_pitch'] uv = sample['uv'] - losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, pitch_delta, ~uv) + losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) return losses def _validation_step(self, sample, batch_idx): @@ -114,8 +114,12 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: dur_pred, pitch_pred = self.run_model(sample, infer=True) - self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, ph2word=sample['ph2word']) - self.plot_curve(batch_idx, sample['base_pitch'] + sample['pitch_delta'], pitch_pred, curve_name='pitch') + if dur_pred is not None: + self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, ph2word=sample['ph2word']) + if pitch_pred is not None: + self.plot_curve( + batch_idx, sample['base_pitch'] + sample['delta_pitch'], pitch_pred, curve_name='pitch' + ) return outputs, sample['size'] From 196dec7ce3ab5db641ea1dcba3c4f9426477f3f8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 14:29:58 +0800 Subject: [PATCH 252/475] Reuse shape --- modules/losses/dur_loss.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py index 5059a8644..a97be2014 100644 --- a/modules/losses/dur_loss.py +++ b/modules/losses/dur_loss.py @@ -27,21 +27,18 @@ def __init__(self, offset, loss_type, def linear2log(self, any_dur): return torch.log(any_dur + self.offset) - # noinspection PyMethodMayBeStatic - def pdur2wdur(self, ph_dur, ph2word): - b = ph_dur.shape[0] - word_dur = ph_dur.new_zeros(b, ph2word.max() + 1).scatter_add( - 1, ph2word, ph_dur - )[:, 1:] # [B, T_ph] => [B, T_w] - return word_dur - def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: # pdur_loss pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) # wdur loss - wdur_pred = self.pdur2wdur(dur_pred, ph2word) - wdur_gt = self.pdur2wdur(dur_gt, ph2word) + shape = dur_pred.shape[0], ph2word.max() + 1 + wdur_pred = dur_pred.new_zeros(*shape).scatter_add( + 1, ph2word, dur_pred + )[:, 1:] # [B, T_ph] => [B, T_w] + wdur_gt = dur_gt.new_zeros(*shape).scatter_add( + 1, ph2word, dur_gt + )[:, 1:] # [B, T_ph] => [B, T_w] wdur_loss = self.lambda_wdur * self.loss(self.linear2log(wdur_pred), self.linear2log(wdur_gt)) # sdur loss From 4868b9b3964a9ff1b20a4b0c7dc784c3a95d1dc0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 21:16:16 +0800 Subject: [PATCH 253/475] Add `plot_dur` --- training/variance_task.py | 12 ++++++++---- utils/plot.py | 27 +++++++++++++++++++-------- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 03b38896a..ab00f5d9d 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -12,6 +12,7 @@ from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams +from utils.plot import dur_to_figure matplotlib.use('Agg') @@ -115,7 +116,7 @@ def _validation_step(self, sample, batch_idx): and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: dur_pred, pitch_pred = self.run_model(sample, infer=True) if dur_pred is not None: - self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, ph2word=sample['ph2word']) + self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: self.plot_curve( batch_idx, sample['base_pitch'] + sample['delta_pitch'], pitch_pred, curve_name='pitch' @@ -126,9 +127,12 @@ def _validation_step(self, sample, batch_idx): ############ # validation plots ############ - def plot_dur(self, batch_idx, gt_dur, pred_dur, ph2word=None): - # TODO: plot dur to TensorBoard - pass + def plot_dur(self, batch_idx, gt_dur, pred_dur, txt=None): + name = f'dur_{batch_idx}' + gt_dur = gt_dur[0].cpu().numpy() + pred_dur = pred_dur[0].cpu().numpy() + txt = self.phone_encoder.decode(txt[0].cpu().numpy()).split() + self.logger.experiment.add_figure(name, dur_to_figure(gt_dur, pred_dur, txt), self.global_step) def plot_curve(self, batch_idx, gt_curve, pred_curve, curve_name='curve'): # TODO: plot curve to TensorBoard diff --git a/utils/plot.py b/utils/plot.py index 0e2a9ec82..752440eaf 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -29,17 +29,28 @@ def spec_f0_to_figure(spec, f0s, figsize=None): def dur_to_figure(dur_gt, dur_pred, txt): - dur_gt = dur_gt.long().cpu().numpy() - dur_pred = dur_pred.long().cpu().numpy() + if isinstance(dur_gt, torch.Tensor): + dur_gt = dur_gt.cpu().numpy() + if isinstance(dur_pred, torch.Tensor): + dur_pred = dur_pred.cpu().numpy() + dur_gt = dur_gt.astype(np.int64) + dur_pred = dur_pred.astype(np.int64) dur_gt = np.cumsum(dur_gt) dur_pred = np.cumsum(dur_pred) - fig = plt.figure(figsize=(12, 6)) - for i in range(len(dur_gt)): + width = max(12, min(48, len(txt) // 2)) + fig = plt.figure(figsize=(width, 6)) + plt.vlines(dur_pred, 10, 20, colors='r', label='pred') + plt.vlines(dur_gt, 0, 10, colors='b', label='gt') + for i in range(len(txt)): shift = (i % 8) + 1 - plt.text(dur_gt[i], shift, txt[i]) - plt.text(dur_pred[i], 10 + shift, txt[i]) - plt.vlines(dur_gt[i], 0, 10, colors='b') # blue is gt - plt.vlines(dur_pred[i], 10, 20, colors='r') # red is pred + plt.text((dur_pred[i-1] + dur_pred[i]) / 2 if i > 0 else dur_pred[i] / 2, 10 + shift, txt[i], + size=16, horizontalalignment='center') + plt.text((dur_gt[i-1] + dur_gt[i]) / 2 if i > 0 else dur_gt[i] / 2, shift, txt[i], + size=16, horizontalalignment='center') + plt.yticks([]) + plt.xlim(0, max(dur_pred[-1], dur_gt[-1])) + fig.legend() + fig.tight_layout() return fig From 7d4c31e77576e31755ba9a1b958adc1f10c24222 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 22:45:49 +0800 Subject: [PATCH 254/475] Add alignment dashed lines --- utils/plot.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/utils/plot.py b/utils/plot.py index 752440eaf..da2d09803 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -38,15 +38,16 @@ def dur_to_figure(dur_gt, dur_pred, txt): dur_gt = np.cumsum(dur_gt) dur_pred = np.cumsum(dur_pred) width = max(12, min(48, len(txt) // 2)) - fig = plt.figure(figsize=(width, 6)) - plt.vlines(dur_pred, 10, 20, colors='r', label='pred') + fig = plt.figure(figsize=(width, 8)) + plt.vlines(dur_pred, 12, 22, colors='r', label='pred') plt.vlines(dur_gt, 0, 10, colors='b', label='gt') for i in range(len(txt)): shift = (i % 8) + 1 - plt.text((dur_pred[i-1] + dur_pred[i]) / 2 if i > 0 else dur_pred[i] / 2, 10 + shift, txt[i], + plt.text((dur_pred[i-1] + dur_pred[i]) / 2 if i > 0 else dur_pred[i] / 2, 12 + shift, txt[i], size=16, horizontalalignment='center') plt.text((dur_gt[i-1] + dur_gt[i]) / 2 if i > 0 else dur_gt[i] / 2, shift, txt[i], size=16, horizontalalignment='center') + plt.plot([dur_pred[i], dur_gt[i]], [12, 10], color='black', linewidth=2, linestyle=':') plt.yticks([]) plt.xlim(0, max(dur_pred[-1], dur_gt[-1])) fig.legend() From a5e17c409d76a5bb7ab4b820d43c2ebcbaecb7bf Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Apr 2023 23:20:22 +0800 Subject: [PATCH 255/475] Update configs and comments --- configs/acoustic.yaml | 1 + configs/base.yaml | 1 - configs/variance.yaml | 17 +++++++++-------- modules/fastspeech/tts_modules.py | 2 +- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index edf612ede..e1d6ae1fa 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -79,5 +79,6 @@ val_with_vocoder: true val_check_interval: 2000 num_valid_plots: 10 max_updates: 320000 +num_ckpt_keep: 5 permanent_ckpt_start: 120000 permanent_ckpt_interval: 40000 diff --git a/configs/base.yaml b/configs/base.yaml index 58db5ce89..4909fabd3 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -57,7 +57,6 @@ optimizer_adam_beta1: 0.9 optimizer_adam_beta2: 0.98 weight_decay: 0 clip_grad_norm: 1 -dur_loss: mse # huber|mol ########### # train and eval diff --git a/configs/variance.yaml b/configs/variance.yaml index 7c86f11c5..700015fe5 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -40,15 +40,15 @@ predict_dur: true dur_cascade: false lambda_dur_loss: 1.0 dur_prediction_args: - hidden_size: 384 + hidden_size: 256 dropout: 0.1 - num_layers: 4 + num_layers: 5 kernel_size: 3 log_offset: 1.0 loss_type: mse - lambda_pdur_loss: 0.7 - lambda_wdur_loss: 0.2 - lambda_sdur_loss: 0.1 + lambda_pdur_loss: 0.3 + lambda_wdur_loss: 1.0 + lambda_sdur_loss: 3.0 predict_pitch: true pitch_cascade: true @@ -69,14 +69,15 @@ schedule_type: 'linear' # train and eval num_sanity_val_steps: 1 -lr: 0.0004 -lr_decay_steps: 50000 +lr: 0.0001 +lr_decay_steps: 5000 lr_decay_gamma: 0.5 max_batch_frames: 80000 max_batch_size: 48 val_with_vocoder: true -val_check_interval: 2000 +val_check_interval: 500 num_valid_plots: 10 max_updates: 320000 +num_ckpt_keep: 5 permanent_ckpt_start: 120000 permanent_ckpt_interval: 40000 diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 7bbd240dd..bc25594ac 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -66,7 +66,7 @@ class DurationPredictor(torch.nn.Module): def __init__(self, in_dims, n_layers=2, n_chans=384, kernel_size=3, dropout_rate=0.1, offset=1.0, padding='SAME', dur_loss_type='mse'): - """Initilize duration predictor module. + """Initialize duration predictor module. Args: in_dims (int): Input dimension. n_layers (int, optional): Number of convolutional layers. From c42f9ae304c0c7105145b10b8755518e7d7f0975 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 01:48:43 +0800 Subject: [PATCH 256/475] Add onset embedding --- modules/fastspeech/variance_encoder.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 13e8167ee..050fe1237 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -12,10 +12,10 @@ class FastSpeech2VarianceEncoder(FastSpeech2Encoder): - def forward_embedding(self, txt_tokens, midi_embed, word_dur_embed): + def forward_embedding(self, txt_tokens, midi_embed, onset_embed, word_dur_embed): # embed tokens and positions x = self.embed_scale * self.embed_tokens(txt_tokens) - x = x + midi_embed + word_dur_embed + x = x + midi_embed + onset_embed + word_dur_embed if hparams['use_pos_embed']: if hparams['rel_pos']: x = self.embed_positions(x) @@ -25,15 +25,16 @@ def forward_embedding(self, txt_tokens, midi_embed, word_dur_embed): x = F.dropout(x, p=self.dropout, training=self.training) return x - def forward(self, txt_tokens, midi_embed, word_dur_embed): + def forward(self, txt_tokens, midi_embed, onset_embed, word_dur_embed): """ :param txt_tokens: [B, T] :param midi_embed: [B, T, H] + :param onset_embed: [B, T, H] :param word_dur_embed: [B, T, H] :return: [T x B x H] """ encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() - x = self.forward_embedding(txt_tokens, midi_embed, word_dur_embed) # [B, T, H] + x = self.forward_embedding(txt_tokens, midi_embed, onset_embed, word_dur_embed) # [B, T, H] x = super()._forward(x, encoder_padding_mask) return x @@ -43,6 +44,7 @@ def __init__(self, vocab_size): super().__init__() self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) self.midi_embed = Embedding(128, hparams['hidden_size'], PAD_INDEX) + self.onset_embed = Embedding(2, hparams['hidden_size']) self.word_dur_embed = Linear(1, hparams['hidden_size']) if hparams['use_spk_id']: @@ -77,20 +79,22 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T :param infer: whether inference :return: (train) encoder_out, ph_dur_xs; (infer) encoder_out, ph_dur """ + b = txt_tokens.shape[0] midi_embed = self.midi_embed(midi) # => [B, T_ph, H] + onset = torch.diff(ph2word, dim=1, prepend=ph2word.new_zeros(b, 1)) > 0 + onset_embed = self.onset_embed(onset.long()) # [B, T_ph, H] if word_dur is None or not infer: - b = txt_tokens.shape[0] word_dur = ph_dur.new_zeros(b, ph2word.max() + 1).scatter_add( 1, ph2word, ph_dur )[:, 1:] # [B, T_ph] => [B, T_w] word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] - word_dur_embed = self.word_dur_embed(torch.log(word_dur.float() + self.wdur_log_offset)[:, :, None]) - encoder_out = self.encoder(txt_tokens, midi_embed, word_dur_embed) + word_dur_embed = self.word_dur_embed(word_dur.float()[:, :, None]) + encoder_out = self.encoder(txt_tokens, midi_embed, onset_embed, word_dur_embed) if not hparams['predict_dur']: return encoder_out, None - ph_dur_pred = self.dur_predictor(encoder_out, x_masks=txt_tokens == 0, infer=infer) + ph_dur_pred = self.dur_predictor(encoder_out, x_masks=txt_tokens == PAD_INDEX, infer=infer) return encoder_out, ph_dur_pred From ca5752b6d285a3b71609c5b2b8322266365a2b3a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 12:27:02 +0800 Subject: [PATCH 257/475] Add casting --- modules/losses/dur_loss.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py index a97be2014..5e2f4b056 100644 --- a/modules/losses/dur_loss.py +++ b/modules/losses/dur_loss.py @@ -28,6 +28,8 @@ def linear2log(self, any_dur): return torch.log(any_dur + self.offset) def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: + dur_gt = dur_gt.to(dtype=dur_pred.dtype) + # pdur_loss pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) From 9834a0887c32b7ba2ae06868a62a78ae099116b4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 13:56:28 +0800 Subject: [PATCH 258/475] Add fs2 pitch predictor and pitch plot --- configs/variance.yaml | 2 +- modules/fastspeech/tts_modules.py | 67 +++++++++++++++++++++++++++++++ modules/toplevel.py | 13 +++++- training/variance_task.py | 11 +++-- utils/plot.py | 14 +++---- 5 files changed, 93 insertions(+), 14 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 700015fe5..675371e0b 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -40,7 +40,7 @@ predict_dur: true dur_cascade: false lambda_dur_loss: 1.0 dur_prediction_args: - hidden_size: 256 + hidden_size: 384 dropout: 0.1 num_layers: 5 kernel_size: 3 diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index bc25594ac..a4550d423 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -138,6 +138,73 @@ def forward(self, xs, x_masks=None, infer=True): return dur_pred +class PitchPredictor(torch.nn.Module): + def __init__(self, vmin, vmax, num_bins, deviation, + in_dims, n_layers=5, n_chans=384, kernel_size=5, + dropout_rate=0.1, padding='SAME'): + """Initialize pitch predictor module. + Args: + in_dims (int): Input dimension. + n_layers (int, optional): Number of convolutional layers. + n_chans (int, optional): Number of channels of convolutional layers. + kernel_size (int, optional): Kernel size of convolutional layers. + dropout_rate (float, optional): Dropout rate. + """ + super(PitchPredictor, self).__init__() + self.vmin = vmin + self.vmax = vmax + self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins + self.sigma = deviation / self.interval + self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] + + self.base_pitch_embed = torch.nn.Linear(1, in_dims) + self.conv = torch.nn.ModuleList() + self.kernel_size = kernel_size + self.padding = padding + for idx in range(n_layers): + in_chans = in_dims if idx == 0 else n_chans + self.conv += [torch.nn.Sequential( + torch.nn.ConstantPad1d(((kernel_size - 1) // 2, (kernel_size - 1) // 2) + if padding == 'SAME' + else (kernel_size - 1, 0), 0), + torch.nn.Conv1d(in_chans, n_chans, kernel_size, stride=1, padding=0), + torch.nn.ReLU(), + LayerNorm(n_chans, dim=1), + torch.nn.Dropout(dropout_rate) + )] + self.linear = torch.nn.Linear(n_chans, num_bins) + self.embed_positions = SinusoidalPositionalEmbedding(in_dims, 0, init_size=4096) + self.pos_embed_alpha = nn.Parameter(torch.Tensor([1])) + + def bins_to_values(self, bins): + return bins * self.interval + self.vmin + + def out2pitch(self, probs): + logits = probs.sigmoid() # [B, T, N] + # return logits + # logits_sum = logits.sum(dim=2) # [B, T] + bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] + pitch = self.bins_to_values(bins) + # uv = logits_sum / (self.sigma * math.sqrt(2 * math.pi)) < 0.3 + # pitch[uv] = torch.nan + return pitch + + def forward(self, xs, base): + """ + :param xs: [B, T, H] + :param base: [B, T] + :return: [B, T, N] + """ + xs = xs + self.base_pitch_embed(base[..., None]) + positions = self.pos_embed_alpha * self.embed_positions(xs[..., 0]) + xs = xs + positions + xs = xs.transpose(1, -1) # (B, idim, Tmax) + for f in self.conv: + xs = f(xs) # (B, C, Tmax) + xs = self.linear(xs.transpose(1, -1)) # (B, Tmax, H) + return self.out2pitch(xs) + base, xs + + class LengthRegulator(torch.nn.Module): # noinspection PyMethodMayBeStatic def forward(self, dur, dur_padding=None, alpha=1.0): diff --git a/modules/toplevel.py b/modules/toplevel.py index 063a9083c..30b8414b7 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -50,15 +50,24 @@ def __init__(self, vocab_size): self.lr = LengthRegulator() if hparams['predict_pitch']: + from modules.fastspeech.tts_modules import PitchPredictor pitch_hparams = hparams['pitch_prediction_args'] - self.pitch_predictor = DummyPitchPredictor( + self.pitch_predictor = PitchPredictor( vmin=pitch_hparams['pitch_delta_vmin'], vmax=pitch_hparams['pitch_delta_vmax'], num_bins=pitch_hparams['num_pitch_bins'], deviation=pitch_hparams['deviation'], in_dims=hparams['hidden_size'], - hidden_size=pitch_hparams['hidden_size'] + n_chans=pitch_hparams['hidden_size'] ) + # self.pitch_predictor = DummyPitchPredictor( + # vmin=pitch_hparams['pitch_delta_vmin'], + # vmax=pitch_hparams['pitch_delta_vmax'], + # num_bins=pitch_hparams['num_pitch_bins'], + # deviation=pitch_hparams['deviation'], + # in_dims=hparams['hidden_size'], + # hidden_size=pitch_hparams['hidden_size'] + # ) @property def category(self): diff --git a/training/variance_task.py b/training/variance_task.py index ab00f5d9d..2b02baa8e 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -12,7 +12,7 @@ from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams -from utils.plot import dur_to_figure +from utils.plot import dur_to_figure, f0_to_figure matplotlib.use('Agg') @@ -28,7 +28,7 @@ def collater(self, samples): mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) base_pitch = utils.collate_nd([s['base_pitch'] for s in samples], 0) delta_pitch = utils.collate_nd([s['delta_pitch'] for s in samples], 0) - uv = utils.collate_nd([s['uv'] for s in samples], 0) + uv = utils.collate_nd([s['uv'] for s in samples], True) batch.update({ 'tokens': tokens, 'ph_dur': ph_dur, @@ -135,5 +135,8 @@ def plot_dur(self, batch_idx, gt_dur, pred_dur, txt=None): self.logger.experiment.add_figure(name, dur_to_figure(gt_dur, pred_dur, txt), self.global_step) def plot_curve(self, batch_idx, gt_curve, pred_curve, curve_name='curve'): - # TODO: plot curve to TensorBoard - pass + name = f'{curve_name}_{batch_idx}' + gt_curve = gt_curve[0].cpu().numpy() + pred_curve = pred_curve[0].cpu().numpy() + # self.logger.experiment.add_figure(name, spec_to_figure(pred_curve, vmin=0, vmax=1), self.global_step) + self.logger.experiment.add_figure(name, f0_to_figure(gt_curve, pred_curve), self.global_step) diff --git a/utils/plot.py b/utils/plot.py index da2d09803..4c1a900ab 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -55,15 +55,15 @@ def dur_to_figure(dur_gt, dur_pred, txt): return fig -def f0_to_figure(f0_gt, f0_cwt=None, f0_pred=None): +def f0_to_figure(f0_gt, f0_pred=None): + if isinstance(f0_gt, torch.Tensor): + f0_gt = f0_gt.cpu().numpy() + if isinstance(f0_pred, torch.Tensor): + f0_pred = f0_pred.cpu().numpy() fig = plt.figure() - f0_gt = f0_gt.cpu().numpy() - plt.plot(f0_gt, color='r', label='gt') - if f0_cwt is not None: - f0_cwt = f0_cwt.cpu().numpy() - plt.plot(f0_cwt, color='b', label='cwt') if f0_pred is not None: - f0_pred = f0_pred.cpu().numpy() plt.plot(f0_pred, color='green', label='pred') + plt.plot(f0_gt, color='r', label='gt') plt.legend() + plt.tight_layout() return fig From 3bc1dd8c2433bd590d02a0b4187baefe7141a9f0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 16:59:03 +0800 Subject: [PATCH 259/475] Adapt to diffusion pitch predictor --- configs/variance.yaml | 4 +-- modules/diffusion/ddpm.py | 46 ++++++++++++++++++++++---- modules/fastspeech/variance_encoder.py | 34 ------------------- modules/toplevel.py | 37 +++++++++++++-------- training/variance_task.py | 32 +++++++++++------- 5 files changed, 85 insertions(+), 68 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 675371e0b..35331d112 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -60,8 +60,8 @@ pitch_prediction_args: deviation: 0.25 hidden_size: 512 -residual_layers: 20 -residual_channels: 384 +residual_layers: 10 +residual_channels: 512 dilation_cycle_length: 4 # * diff_decoder_type: 'wavenet' diff_loss_type: l2 diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 0a0146b03..ab1502ccb 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -152,7 +152,7 @@ def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): # no noise when t == 0 nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise - + @torch.no_grad() def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=False): """ @@ -161,10 +161,11 @@ def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=F def get_x_pred(x, noise_t, t): a_t = extract(self.alphas_cumprod, t, x.shape) - a_prev = extract(self.alphas_cumprod, torch.max(t-interval, torch.zeros_like(t)), x.shape) + a_prev = extract(self.alphas_cumprod, torch.max(t - interval, torch.zeros_like(t)), x.shape) a_t_sq, a_prev_sq = a_t.sqrt(), a_prev.sqrt() - x_delta = (a_prev - a_t) * ((1 / (a_t_sq * (a_t_sq + a_prev_sq))) * x - 1 / (a_t_sq * (((1 - a_prev) * a_t).sqrt() + ((1 - a_t) * a_prev).sqrt())) * noise_t) + x_delta = (a_prev - a_t) * ((1 / (a_t_sq * (a_t_sq + a_prev_sq))) * x - 1 / ( + a_t_sq * (((1 - a_prev) * a_t).sqrt() + ((1 - a_t) * a_prev).sqrt())) * noise_t) x_pred = x + x_delta return x_pred @@ -174,7 +175,7 @@ def get_x_pred(x, noise_t, t): if len(noise_list) == 0: x_pred = get_x_pred(x, noise_pred, t) - noise_pred_prev = self.denoise_fn(x_pred, max(t-interval, 0), cond=cond) + noise_pred_prev = self.denoise_fn(x_pred, max(t - interval, 0), cond=cond) noise_pred_prime = (noise_pred + noise_pred_prev) / 2 elif len(noise_list) == 1: noise_pred_prime = (3 * noise_pred - noise_list[-1]) / 2 @@ -226,7 +227,7 @@ def forward(self, condition, gt_spec=None, infer=True): # total=t // iteration_interval): # x = self.p_sample_plms(x, torch.full((b,), i, device=device, dtype=torch.long), iteration_interval, # cond) - + from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver ## 1. Define the noise schedule. noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) @@ -239,8 +240,9 @@ def wrapped(x, t, **kwargs): ret = fn(x, t, **kwargs) self.bar.update(1) return ret + return wrapped - + model_fn = model_wrapper( my_wrapper(self.denoise_fn), noise_schedule, @@ -275,3 +277,35 @@ def norm_spec(self, x): def denorm_spec(self, x): return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min + + +class CurveDiffusion(GaussianDiffusion): + def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, + denoiser_type=None, betas=None): + super().__init__( + num_bins, timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, betas=betas, + spec_min=0., spec_max=1. + ) + self.vmin = vmin + self.vmax = vmax + self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins + self.sigma = deviation / self.interval + self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] + + def values_to_bins(self, values): + return (values - self.vmin) / self.interval + + def bins_to_values(self, bins): + return bins * self.interval + self.vmin + + def norm_spec(self, curve): + miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] + probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] + return super().norm_spec(probs) + + def denorm_spec(self, probs): + probs = super().denorm_spec(probs) + logits = probs.sigmoid() # [B, T, N] + bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] + return self.bins_to_values(bins) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 050fe1237..2dc4bdcbe 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -97,37 +97,3 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T ph_dur_pred = self.dur_predictor(encoder_out, x_masks=txt_tokens == PAD_INDEX, infer=infer) return encoder_out, ph_dur_pred - - -class DummyPitchPredictor(nn.Module): - def __init__(self, vmin, vmax, num_bins, deviation, in_dims=256, hidden_size=512): - super().__init__() - self.vmin = vmin - self.vmax = vmax - self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins - self.sigma = deviation / self.interval - self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] - - self.base_pitch_embed = Linear(1, in_dims) - self.net = nn.Sequential( - Linear(hparams['hidden_size'], hidden_size), - Linear(hidden_size, num_bins) - ) - - def bins_to_values(self, bins): - return bins * self.interval + self.vmin - - def out2pitch(self, probs): - logits = probs.sigmoid() # [B, T, N] - bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] - return self.bins_to_values(bins) - - def forward(self, condition, base_pitch): - """ - :param condition: [B, T, H] - :param base_pitch: [B, T] - :return: pitch_pred [B, T], probs [B, T, N] - """ - condition = condition + self.base_pitch_embed(base_pitch[:, :, None]) - probs = self.net(condition) - return self.out2pitch(probs) + base_pitch, probs diff --git a/modules/toplevel.py b/modules/toplevel.py index 30b8414b7..39d750870 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -2,10 +2,13 @@ import torch.nn.functional as F from basics.base_module import CategorizedModule -from modules.diffusion.ddpm import GaussianDiffusion +from modules.commons.common_layers import ( + XavierUniformInitLinear as Linear, +) +from modules.diffusion.ddpm import GaussianDiffusion, CurveDiffusion from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from modules.fastspeech.tts_modules import LengthRegulator -from modules.fastspeech.variance_encoder import FastSpeech2Variance, DummyPitchPredictor +from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -50,23 +53,25 @@ def __init__(self, vocab_size): self.lr = LengthRegulator() if hparams['predict_pitch']: - from modules.fastspeech.tts_modules import PitchPredictor pitch_hparams = hparams['pitch_prediction_args'] - self.pitch_predictor = PitchPredictor( + self.base_pitch_embed = Linear(1, hparams['hidden_size']) + self.pitch_predictor = CurveDiffusion( vmin=pitch_hparams['pitch_delta_vmin'], vmax=pitch_hparams['pitch_delta_vmax'], num_bins=pitch_hparams['num_pitch_bins'], deviation=pitch_hparams['deviation'], - in_dims=hparams['hidden_size'], - n_chans=pitch_hparams['hidden_size'] + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], ) - # self.pitch_predictor = DummyPitchPredictor( + # from modules.fastspeech.tts_modules import PitchPredictor + # self.pitch_predictor = PitchPredictor( # vmin=pitch_hparams['pitch_delta_vmin'], # vmax=pitch_hparams['pitch_delta_vmax'], # num_bins=pitch_hparams['num_pitch_bins'], # deviation=pitch_hparams['deviation'], # in_dims=hparams['hidden_size'], - # hidden_size=pitch_hparams['hidden_size'] + # n_chans=pitch_hparams['hidden_size'] # ) @property @@ -74,7 +79,7 @@ def category(self): return 'variance' def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, - mel2ph=None, base_pitch=None, infer=True): + mel2ph=None, base_pitch=None, delta_pitch=None, infer=True): encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, word_dur=word_dur, infer=infer @@ -90,8 +95,12 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) condition = torch.gather(encoder_out, 1, mel2ph_) - pitch_pred, pitch_probs = self.pitch_predictor(condition, base_pitch) - if infer: - return dur_pred_out, pitch_pred - else: - return dur_pred_out, pitch_probs + pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) + + pitch_pred_out = self.pitch_predictor.forward(pitch_cond, delta_pitch, infer) + return dur_pred_out, pitch_pred_out + # pitch_pred, pitch_probs = self.pitch_predictor(condition, base_pitch) + # if infer: + # return dur_pred_out, pitch_pred + # else: + # return dur_pred_out, pitch_probs diff --git a/training/variance_task.py b/training/variance_task.py index 2b02baa8e..395011ec2 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,6 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.curve_loss import CurveLoss2d +from modules.losses.diff_loss import DiffusionNoiseLoss from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams @@ -72,13 +73,16 @@ def build_losses(self): lambda_sdur=dur_hparams['lambda_sdur_loss'] ) if hparams['predict_pitch']: - pitch_hparams = hparams['pitch_prediction_args'] - self.pitch_loss = CurveLoss2d( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], - num_bins=pitch_hparams['num_pitch_bins'], - deviation=pitch_hparams['deviation'] + self.pitch_loss = DiffusionNoiseLoss( + loss_type=hparams['diff_loss_type'] ) + # pitch_hparams = hparams['pitch_prediction_args'] + # self.pitch_loss = CurveLoss2d( + # vmin=pitch_hparams['pitch_delta_vmin'], + # vmax=pitch_hparams['pitch_delta_vmax'], + # num_bins=pitch_hparams['num_pitch_bins'], + # deviation=pitch_hparams['deviation'] + # ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] @@ -87,22 +91,26 @@ def run_model(self, sample, infer=False): ph_dur = sample['ph_dur'] # [B, T_ph] mel2ph = sample['mel2ph'] # [B, T_t] base_pitch = sample['base_pitch'] # [B, T_t] + delta_pitch = sample['delta_pitch'] # [B, T_t] output = self.model(txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, - mel2ph=mel2ph, base_pitch=base_pitch, infer=infer) + mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, + infer=infer) if infer: dur_pred, pitch_pred = output return dur_pred, pitch_pred else: - dur_pred, pitch_prob = output + dur_pred, pitch_pred_out = output losses = {} if dur_pred is not None: losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) - if pitch_prob is not None: - delta_pitch = sample['delta_pitch'] - uv = sample['uv'] - losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) + if pitch_pred_out is not None: + pitch_x_recon, pitch_noise = pitch_pred_out + losses['pitch_loss'] = self.pitch_loss.forward(pitch_x_recon, pitch_noise) + # delta_pitch = sample['delta_pitch'] + # uv = sample['uv'] + # losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) return losses def _validation_step(self, sample, batch_idx): From 2e50800a1fa0cedacdfb539f531d697e33dd0384 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 17:08:44 +0800 Subject: [PATCH 260/475] Fix TypeError --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index ab1502ccb..322debde8 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -285,7 +285,7 @@ def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, super().__init__( num_bins, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, betas=betas, - spec_min=0., spec_max=1. + spec_min=[0.], spec_max=[1.] ) self.vmin = vmin self.vmax = vmax From 5910fcb5105e1d800aca527c685f3b7688a1b52b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 17:10:47 +0800 Subject: [PATCH 261/475] Remove `keep_bins` --- configs/acoustic.yaml | 1 - modules/diffusion/ddpm.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index e1d6ae1fa..8704298ef 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -44,7 +44,6 @@ binarizer_cls: preprocessing.acoustic_binarizer.AcousticBinarizer dictionary: dictionaries/opencpop-extension.txt spec_min: [-5] spec_max: [0] -keep_bins: 128 mel_vmin: -6. #-6. mel_vmax: 1.5 interp_uv: true diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 322debde8..a9561c3cf 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -111,8 +111,8 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, self.register_buffer('posterior_mean_coef2', to_torch( (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) + self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :out_dims]) + self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :out_dims]) def q_mean_variance(self, x_start, t): mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start From 1b60a011b771073e8b339e1b9b44c289a0145ed7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 17:15:31 +0800 Subject: [PATCH 262/475] Adapt wavenet denoiser --- modules/diffusion/ddpm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index a9561c3cf..0b3de3a16 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -11,7 +11,7 @@ from utils.hparams import hparams DIFF_DENOISERS = { - 'wavenet': lambda hp: WaveNet(hp['audio_num_mel_bins']), + 'wavenet': lambda attr: WaveNet(attr['num_bins']), } @@ -69,7 +69,7 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, denoiser_type=None, betas=None, spec_min=None, spec_max=None): super().__init__() - self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type](hparams) + self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type]({'num_bins': out_dims}) self.out_dims = out_dims if exists(betas): From 81f37aac04cd78b7e47644668f310c20c194f9e6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 17:19:57 +0800 Subject: [PATCH 263/475] Add base pitch --- training/variance_task.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index 395011ec2..189b4421d 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -126,8 +126,9 @@ def _validation_step(self, sample, batch_idx): if dur_pred is not None: self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: + base_pitch = sample['base_pitch'] self.plot_curve( - batch_idx, sample['base_pitch'] + sample['delta_pitch'], pitch_pred, curve_name='pitch' + batch_idx, base_pitch + sample['delta_pitch'], base_pitch + pitch_pred, curve_name='pitch' ) return outputs, sample['size'] From c2710591517f6babb1243a8bdcfaed25882b1d9e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 20:05:00 +0800 Subject: [PATCH 264/475] Reuse wavenet, add 1d diffusion --- configs/variance.yaml | 4 ++-- deployment/modules/toplevel.py | 4 ++++ modules/diffusion/ddpm.py | 30 +++++++++++++++++++++++------- modules/diffusion/wavenet.py | 19 +++++++++---------- modules/toplevel.py | 30 +++++++++++++++++++++++++----- 5 files changed, 63 insertions(+), 24 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 35331d112..49876b0d8 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -61,8 +61,8 @@ pitch_prediction_args: hidden_size: 512 residual_layers: 10 -residual_channels: 512 -dilation_cycle_length: 4 # * +residual_channels: 256 +dilation_cycle_length: 5 # * diff_decoder_type: 'wavenet' diff_loss_type: l2 schedule_type: 'linear' diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 4411ad3d2..23444d928 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -23,6 +23,10 @@ def __init__(self, vocab_size, out_dims): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + hparams['residual_layers'], + hparams['residual_channels'] + ), spec_min=hparams['spec_min'], spec_max=hparams['spec_max'] ) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 0b3de3a16..7e13eb79d 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -11,7 +11,7 @@ from utils.hparams import hparams DIFF_DENOISERS = { - 'wavenet': lambda attr: WaveNet(attr['num_bins']), + 'wavenet': lambda args: WaveNet(*args), } @@ -66,10 +66,10 @@ def cosine_beta_schedule(timesteps, s=0.008): class GaussianDiffusion(nn.Module): def __init__(self, out_dims, timesteps=1000, k_step=1000, - denoiser_type=None, betas=None, + denoiser_type=None, denoiser_args=None, betas=None, spec_min=None, spec_max=None): super().__init__() - self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type]({'num_bins': out_dims}) + self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type]((out_dims, *denoiser_args)) self.out_dims = out_dims if exists(betas): @@ -279,13 +279,29 @@ def denorm_spec(self, x): return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min -class CurveDiffusion(GaussianDiffusion): +class CurveDiffusion1d(GaussianDiffusion): + def __init__(self, vmin, vmax, timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, betas=None): + super().__init__( + 1, timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas, spec_min=[vmin], spec_max=[vmax] + ) + + def norm_spec(self, x): + return super().norm_spec(x.unsqueeze(-1)) + + def denorm_spec(self, x): + return super().denorm_spec(x).squeeze(-1) + + +class CurveDiffusion2d(GaussianDiffusion): def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, - denoiser_type=None, betas=None): + denoiser_type=None, denoiser_args=None, betas=None): super().__init__( num_bins, timesteps=timesteps, k_step=k_step, - denoiser_type=denoiser_type, betas=betas, - spec_min=[0.], spec_max=[1.] + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas, spec_min=[0.], spec_max=[1.] ) self.vmin = vmin self.vmax = vmax diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index ff7a3feb5..6c3160bc5 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -64,26 +64,25 @@ def forward(self, x, conditioner, diffusion_step): class WaveNet(nn.Module): - def __init__(self, in_dims): + def __init__(self, in_dims, n_layers, n_chans): super().__init__() - dim = hparams['residual_channels'] - self.input_projection = Conv1d(in_dims, dim, 1) - self.diffusion_embedding = SinusoidalPosEmb(dim) + self.input_projection = Conv1d(in_dims, n_chans, 1) + self.diffusion_embedding = SinusoidalPosEmb(n_chans) self.mlp = nn.Sequential( - nn.Linear(dim, dim * 4), + nn.Linear(n_chans, n_chans * 4), Mish(), - nn.Linear(dim * 4, dim) + nn.Linear(n_chans * 4, n_chans) ) self.residual_layers = nn.ModuleList([ ResidualBlock( encoder_hidden=hparams['hidden_size'], - residual_channels=dim, + residual_channels=n_chans, dilation=2 ** (i % hparams['dilation_cycle_length']) ) - for i in range(hparams['residual_layers']) + for i in range(n_layers) ]) - self.skip_projection = Conv1d(dim, dim, 1) - self.output_projection = Conv1d(dim, in_dims, 1) + self.skip_projection = Conv1d(n_chans, n_chans, 1) + self.output_projection = Conv1d(n_chans, in_dims, 1) nn.init.zeros_(self.output_projection.weight) def forward(self, spec, diffusion_step, cond): diff --git a/modules/toplevel.py b/modules/toplevel.py index 39d750870..101619a8d 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -5,7 +5,7 @@ from modules.commons.common_layers import ( XavierUniformInitLinear as Linear, ) -from modules.diffusion.ddpm import GaussianDiffusion, CurveDiffusion +from modules.diffusion.ddpm import GaussianDiffusion, CurveDiffusion1d, CurveDiffusion2d from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from modules.fastspeech.tts_modules import LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance @@ -23,6 +23,10 @@ def __init__(self, vocab_size, out_dims): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + hparams['residual_layers'], + hparams['residual_channels'] + ), spec_min=hparams['spec_min'], spec_max=hparams['spec_max'] ) @@ -55,15 +59,31 @@ def __init__(self, vocab_size): if hparams['predict_pitch']: pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) - self.pitch_predictor = CurveDiffusion( + self.pitch_predictor = CurveDiffusion1d( vmin=pitch_hparams['pitch_delta_vmin'], vmax=pitch_hparams['pitch_delta_vmax'], - num_bins=pitch_hparams['num_pitch_bins'], - deviation=pitch_hparams['deviation'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + hparams['residual_layers'], + hparams['residual_channels'] + ) ) + # self.pitch_predictor = CurveDiffusion2d( + # vmin=pitch_hparams['pitch_delta_vmin'], + # vmax=pitch_hparams['pitch_delta_vmax'], + # num_bins=pitch_hparams['num_pitch_bins'], + # deviation=pitch_hparams['deviation'], + # timesteps=hparams['timesteps'], + # k_step=hparams['K_step'], + # denoiser_type=hparams['diff_decoder_type'], + # denoiser_args=( + # hparams['audio_num_mel_bins'], + # hparams['residual_layers'], + # hparams['residual_channels'] + # ) + # ) # from modules.fastspeech.tts_modules import PitchPredictor # self.pitch_predictor = PitchPredictor( # vmin=pitch_hparams['pitch_delta_vmin'], @@ -97,7 +117,7 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, condition = torch.gather(encoder_out, 1, mel2ph_) pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) - pitch_pred_out = self.pitch_predictor.forward(pitch_cond, delta_pitch, infer) + pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) return dur_pred_out, pitch_pred_out # pitch_pred, pitch_probs = self.pitch_predictor(condition, base_pitch) # if infer: From a46977f14db0d10108396fb2a0d5b5a738807cda Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 20:14:21 +0800 Subject: [PATCH 265/475] Clamp curve --- modules/diffusion/ddpm.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 7e13eb79d..2a4c0a3fd 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -282,6 +282,8 @@ def denorm_spec(self, x): class CurveDiffusion1d(GaussianDiffusion): def __init__(self, vmin, vmax, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): + self.vmin = vmin + self.vmax = vmax super().__init__( 1, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, @@ -289,10 +291,10 @@ def __init__(self, vmin, vmax, timesteps=1000, k_step=1000, ) def norm_spec(self, x): - return super().norm_spec(x.unsqueeze(-1)) + return super().norm_spec(x.unsqueeze(-1).clamp(min=self.vmin, max=self.vmax)) def denorm_spec(self, x): - return super().denorm_spec(x).squeeze(-1) + return super().denorm_spec(x).clamp(min=self.vmin, max=self.vmax).squeeze(-1) class CurveDiffusion2d(GaussianDiffusion): From b7b927f544d605c8009ad63c9a6e4664bb011e51 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 20:33:01 +0800 Subject: [PATCH 266/475] Change color --- utils/plot.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/plot.py b/utils/plot.py index 4c1a900ab..73d33fe54 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -62,8 +62,8 @@ def f0_to_figure(f0_gt, f0_pred=None): f0_pred = f0_pred.cpu().numpy() fig = plt.figure() if f0_pred is not None: - plt.plot(f0_pred, color='green', label='pred') - plt.plot(f0_gt, color='r', label='gt') + plt.plot(f0_pred, color='r', label='pred') + plt.plot(f0_gt, color='b', label='gt') plt.legend() plt.tight_layout() return fig From 52c97152c213a3e0744addaf4c750fda55eb117e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 21:29:25 +0800 Subject: [PATCH 267/475] Try fix `CurveDiffusion2d` losses --- configs/variance.yaml | 2 +- modules/diffusion/ddpm.py | 9 +++++++++ modules/losses/diff_loss.py | 2 +- training/variance_task.py | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 49876b0d8..78af7ee34 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -69,7 +69,7 @@ schedule_type: 'linear' # train and eval num_sanity_val_steps: 1 -lr: 0.0001 +lr: 0.0004 lr_decay_steps: 5000 lr_decay_gamma: 0.5 max_batch_frames: 80000 diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 2a4c0a3fd..a38911273 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -317,6 +317,15 @@ def values_to_bins(self, values): def bins_to_values(self, bins): return bins * self.interval + self.vmin + def p_losses(self, x_start, t, cond, noise=None): + x_recon, noise = super().p_losses(x_start, t, cond, noise=noise) # [B, 1, M, T] + x_recon = self.denorm_spec(x_recon.squeeze(1).transpose(1, 2)).unsqueeze(-1) # [B, T, M=1] + noise = self.denorm_spec(noise.squeeze(1).transpose(1, 2)).unsqueeze(-1) # [B, T, M=1] + return ( + self.denorm_spec(x_recon).transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] + self.denorm_spec(noise).transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] + ) + def norm_spec(self, curve): miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 885a5968b..2adc1e4e8 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -21,6 +21,6 @@ def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> """ if nonpadding is not None: nonpadding = nonpadding.transpose(1, 2).unsqueeze(1) - x_recon *= nonpadding + x_recon = x_recon * nonpadding noise *= nonpadding return self.loss(x_recon, noise) diff --git a/training/variance_task.py b/training/variance_task.py index 189b4421d..f33fabaf5 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -107,7 +107,7 @@ def run_model(self, sample, infer=False): losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) if pitch_pred_out is not None: pitch_x_recon, pitch_noise = pitch_pred_out - losses['pitch_loss'] = self.pitch_loss.forward(pitch_x_recon, pitch_noise) + losses['pitch_loss'] = self.pitch_loss(pitch_x_recon, pitch_noise, (mel2ph > 0).float()) # delta_pitch = sample['delta_pitch'] # uv = sample['uv'] # losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) From 253ce52d4bd70c3d05c18663b37088d3e5c44208 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 21:44:54 +0800 Subject: [PATCH 268/475] Add `SSIMLoss` to diffusion loss --- modules/losses/diff_loss.py | 4 ++++ modules/losses/ssim.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 2adc1e4e8..9fa104c0c 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -1,6 +1,8 @@ import torch.nn as nn from torch import Tensor +from modules.losses.ssim import SSIMLoss + class DiffusionNoiseLoss(nn.Module): def __init__(self, loss_type): @@ -10,6 +12,8 @@ def __init__(self, loss_type): self.loss = nn.L1Loss() elif self.loss_type == 'l2': self.loss = nn.MSELoss() + elif self.loss_type == 'ssim': + self.loss = SSIMLoss() else: raise NotImplementedError() diff --git a/modules/losses/ssim.py b/modules/losses/ssim.py index 7f72f68f0..e0a9a8164 100644 --- a/modules/losses/ssim.py +++ b/modules/losses/ssim.py @@ -44,9 +44,9 @@ def _ssim(img1, img2, window, window_size, channel, size_average=True): return ssim_map.mean(1) -class SSIM(torch.nn.Module): +class SSIMLoss(torch.nn.Module): def __init__(self, window_size=11, size_average=True): - super(SSIM, self).__init__() + super(SSIMLoss, self).__init__() self.window_size = window_size self.size_average = size_average self.channel = 1 From 3b6b0a80f841b3d21c23f04081c66abca0b53fa4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 21:59:43 +0800 Subject: [PATCH 269/475] Fix dimension error --- modules/diffusion/ddpm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index a38911273..c771127ed 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -322,8 +322,8 @@ def p_losses(self, x_start, t, cond, noise=None): x_recon = self.denorm_spec(x_recon.squeeze(1).transpose(1, 2)).unsqueeze(-1) # [B, T, M=1] noise = self.denorm_spec(noise.squeeze(1).transpose(1, 2)).unsqueeze(-1) # [B, T, M=1] return ( - self.denorm_spec(x_recon).transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] - self.denorm_spec(noise).transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] + x_recon.transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] + noise.transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] ) def norm_spec(self, curve): From c9396069871a28f9bee42d78c25cfb7c8fd3aef6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 22:01:06 +0800 Subject: [PATCH 270/475] Fix dimension error --- training/variance_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index f33fabaf5..5fa479b0f 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -107,7 +107,7 @@ def run_model(self, sample, infer=False): losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) if pitch_pred_out is not None: pitch_x_recon, pitch_noise = pitch_pred_out - losses['pitch_loss'] = self.pitch_loss(pitch_x_recon, pitch_noise, (mel2ph > 0).float()) + losses['pitch_loss'] = self.pitch_loss(pitch_x_recon, pitch_noise, (mel2ph > 0).float().unsqueeze(-1)) # delta_pitch = sample['delta_pitch'] # uv = sample['uv'] # losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) From 83507fbcb176b8dfcfa5ab36c470d3e2b47faf56 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Apr 2023 22:27:01 +0800 Subject: [PATCH 271/475] Add missing `lambda_pitch_loss` --- training/variance_task.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index 5fa479b0f..7a8560fac 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -107,7 +107,9 @@ def run_model(self, sample, infer=False): losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) if pitch_pred_out is not None: pitch_x_recon, pitch_noise = pitch_pred_out - losses['pitch_loss'] = self.pitch_loss(pitch_x_recon, pitch_noise, (mel2ph > 0).float().unsqueeze(-1)) + losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss( + pitch_x_recon, pitch_noise, (mel2ph > 0).float().unsqueeze(-1) + ) # delta_pitch = sample['delta_pitch'] # uv = sample['uv'] # losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) From 130b2072afc57e1385dcb3ad551a2a1493551396 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 00:37:10 +0800 Subject: [PATCH 272/475] Add grid --- training/variance_task.py | 4 ++-- utils/plot.py | 17 +++++++++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 7a8560fac..2074a1ec6 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -13,7 +13,7 @@ from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams -from utils.plot import dur_to_figure, f0_to_figure +from utils.plot import dur_to_figure, curve_to_figure matplotlib.use('Agg') @@ -150,4 +150,4 @@ def plot_curve(self, batch_idx, gt_curve, pred_curve, curve_name='curve'): gt_curve = gt_curve[0].cpu().numpy() pred_curve = pred_curve[0].cpu().numpy() # self.logger.experiment.add_figure(name, spec_to_figure(pred_curve, vmin=0, vmax=1), self.global_step) - self.logger.experiment.add_figure(name, f0_to_figure(gt_curve, pred_curve), self.global_step) + self.logger.experiment.add_figure(name, curve_to_figure(gt_curve, pred_curve), self.global_step) diff --git a/utils/plot.py b/utils/plot.py index 73d33fe54..3db1142ec 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -55,15 +55,16 @@ def dur_to_figure(dur_gt, dur_pred, txt): return fig -def f0_to_figure(f0_gt, f0_pred=None): - if isinstance(f0_gt, torch.Tensor): - f0_gt = f0_gt.cpu().numpy() - if isinstance(f0_pred, torch.Tensor): - f0_pred = f0_pred.cpu().numpy() +def curve_to_figure(curve_gt, curve_pred=None): + if isinstance(curve_gt, torch.Tensor): + curve_gt = curve_gt.cpu().numpy() + if isinstance(curve_pred, torch.Tensor): + curve_pred = curve_pred.cpu().numpy() fig = plt.figure() - if f0_pred is not None: - plt.plot(f0_pred, color='r', label='pred') - plt.plot(f0_gt, color='b', label='gt') + if curve_pred is not None: + plt.plot(curve_pred, color='r', label='pred') + plt.plot(curve_gt, color='b', label='gt') + plt.grid(axis='y') plt.legend() plt.tight_layout() return fig From 260a9913a9e39154daa0ef902ff022c151d8c630 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 00:37:43 +0800 Subject: [PATCH 273/475] Remove arg --- modules/toplevel.py | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index 101619a8d..6ba51e8d5 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -79,7 +79,6 @@ def __init__(self, vocab_size): # k_step=hparams['K_step'], # denoiser_type=hparams['diff_decoder_type'], # denoiser_args=( - # hparams['audio_num_mel_bins'], # hparams['residual_layers'], # hparams['residual_channels'] # ) From 32fc061f8215d0ac9ac4c8935d8e9cca4ede2fe8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 01:10:14 +0800 Subject: [PATCH 274/475] Switch between diffusion 1d/2d, add weighted argmax --- modules/diffusion/ddpm.py | 21 +++++++--------- modules/toplevel.py | 51 +++++++++++++++++++++------------------ 2 files changed, 37 insertions(+), 35 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index c771127ed..01a523ecd 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -307,9 +307,11 @@ def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, ) self.vmin = vmin self.vmax = vmax + self.num_bins = num_bins self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins self.sigma = deviation / self.interval - self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] + self.width = int(3 * self.sigma) + self.register_buffer('x', torch.arange(self.num_bins).float().reshape(1, 1, -1)) # [1, 1, N] def values_to_bins(self, values): return (values - self.vmin) / self.interval @@ -317,22 +319,17 @@ def values_to_bins(self, values): def bins_to_values(self, bins): return bins * self.interval + self.vmin - def p_losses(self, x_start, t, cond, noise=None): - x_recon, noise = super().p_losses(x_start, t, cond, noise=noise) # [B, 1, M, T] - x_recon = self.denorm_spec(x_recon.squeeze(1).transpose(1, 2)).unsqueeze(-1) # [B, T, M=1] - noise = self.denorm_spec(noise.squeeze(1).transpose(1, 2)).unsqueeze(-1) # [B, T, M=1] - return ( - x_recon.transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] - noise.transpose(1, 2).unsqueeze(1), # [B, 1, M=1, T] - ) - def norm_spec(self, curve): miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] return super().norm_spec(probs) def denorm_spec(self, probs): - probs = super().denorm_spec(probs) - logits = probs.sigmoid() # [B, T, N] + probs = super().denorm_spec(probs) # [B, T, N] + logits = probs.sigmoid() + peaks = logits.argmax(dim=2, keepdim=True) # [B, T, 1] + start = torch.max(torch.tensor(0, device=logits.device), peaks - self.width) + end = torch.min(torch.tensor(self.num_bins, device=logits.device), peaks + self.width) + logits[(self.x < start) | (self.x > end)] = 0. bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] return self.bins_to_values(bins) diff --git a/modules/toplevel.py b/modules/toplevel.py index 6ba51e8d5..78fceefa7 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -59,30 +59,35 @@ def __init__(self, vocab_size): if hparams['predict_pitch']: pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) - self.pitch_predictor = CurveDiffusion1d( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], - timesteps=hparams['timesteps'], - k_step=hparams['K_step'], - denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - hparams['residual_layers'], - hparams['residual_channels'] + diff_predictor_mode = pitch_hparams['diff_predictor_mode'] + if diff_predictor_mode == '1d': + self.pitch_predictor = CurveDiffusion1d( + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + hparams['residual_layers'], + hparams['residual_channels'] + ) ) - ) - # self.pitch_predictor = CurveDiffusion2d( - # vmin=pitch_hparams['pitch_delta_vmin'], - # vmax=pitch_hparams['pitch_delta_vmax'], - # num_bins=pitch_hparams['num_pitch_bins'], - # deviation=pitch_hparams['deviation'], - # timesteps=hparams['timesteps'], - # k_step=hparams['K_step'], - # denoiser_type=hparams['diff_decoder_type'], - # denoiser_args=( - # hparams['residual_layers'], - # hparams['residual_channels'] - # ) - # ) + elif diff_predictor_mode == '2d': + self.pitch_predictor = CurveDiffusion2d( + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + num_bins=pitch_hparams['num_pitch_bins'], + deviation=pitch_hparams['deviation'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + hparams['residual_layers'], + hparams['residual_channels'] + ) + ) + else: + raise NotImplementedError() # from modules.fastspeech.tts_modules import PitchPredictor # self.pitch_predictor = PitchPredictor( # vmin=pitch_hparams['pitch_delta_vmin'], From bdc0bdd0eeb8f700c8bbb00bed1accc9755b63b6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 01:22:37 +0800 Subject: [PATCH 275/475] Add config key --- configs/variance.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/configs/variance.yaml b/configs/variance.yaml index 78af7ee34..c006ffaf2 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -54,6 +54,7 @@ predict_pitch: true pitch_cascade: true lambda_pitch_loss: 1.0 pitch_prediction_args: + diff_predictor_mode: 1d pitch_delta_vmin: -12.75 pitch_delta_vmax: 12.75 num_pitch_bins: 256 From a4fdce7f027eee8f76208d20bea69f09b15e7f63 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 01:41:23 +0800 Subject: [PATCH 276/475] Set grid width to 1 --- utils/plot.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/utils/plot.py b/utils/plot.py index 3db1142ec..b0edafc26 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -1,6 +1,7 @@ import matplotlib.pyplot as plt import numpy as np import torch +from matplotlib.ticker import MultipleLocator LINE_COLORS = ['w', 'r', 'y', 'cyan', 'm', 'b', 'lime'] @@ -64,6 +65,7 @@ def curve_to_figure(curve_gt, curve_pred=None): if curve_pred is not None: plt.plot(curve_pred, color='r', label='pred') plt.plot(curve_gt, color='b', label='gt') + plt.gca().yaxis.set_major_locator(MultipleLocator(1)) plt.grid(axis='y') plt.legend() plt.tight_layout() From 4e2c54570dbe9ff1f6cf316bf911ea48cc49ed74 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 16 Apr 2023 12:41:24 -0500 Subject: [PATCH 277/475] Allow optimizer config change when loading ckpts --- basics/base_task.py | 51 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 10 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index cd92a5dba..e9e581fc0 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -17,7 +17,7 @@ import lightning.pytorch as pl from lightning.pytorch.callbacks import LearningRateMonitor from lightning.pytorch.loggers import TensorBoardLogger -from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_only +from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_info, rank_zero_only from basics.base_module import CategorizedModule from utils.hparams import hparams @@ -83,6 +83,16 @@ def __init__(self, *args, **kwargs): 'total_loss': MeanMetric() } + self.optimizer_params = { + 'lr': hparams['lr'], + 'betas': (hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), + 'weight_decay': hparams['weight_decay'], + } + self.scheduler_params = { + 'step_size': hparams['lr_decay_steps'], + 'gamma': hparams['lr_decay_gamma'], + } + ########### # Training, validation and testing ########### @@ -192,21 +202,17 @@ def on_validation_epoch_end(self): # noinspection PyMethodMayBeStatic def build_scheduler(self, optimizer): - # return WarmupCosineSchedule(optimizer, - # warmup_steps=hparams['warmup_updates'], - # t_total=hparams['max_updates'], - # eta_min=0) - return torch.optim.lr_scheduler.StepLR( - optimizer, step_size=hparams['lr_decay_steps'], gamma=hparams['lr_decay_gamma'] + scheduler = torch.optim.lr_scheduler.StepLR( + optimizer, **self.scheduler_params ) + return scheduler # noinspection PyMethodMayBeStatic def build_optimizer(self, model): optimizer = torch.optim.AdamW( filter(lambda p: p.requires_grad, model.parameters()), - lr=hparams['lr'], - betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), - weight_decay=hparams['weight_decay']) + **self.optimizer_params + ) return optimizer def configure_optimizers(self): @@ -365,3 +371,28 @@ def on_load_checkpoint(self, checkpoint): from lightning.pytorch.trainer.states import RunningStage if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: self.skip_immediate_validation = True + + if 'optimizer_states' in checkpoint and checkpoint['optimizer_states']: + opt_states = checkpoint['optimizer_states'] + assert len(opt_states) == 1 + opt_state = opt_states[0] + for param_group in opt_state['param_groups']: + for k, v in self.optimizer_params.items(): + if k in param_group and param_group[k] != v: + rank_zero_info(f'| Overriding optimizer parameter {k} from checkpoint: {param_group[k]} -> {v}') + param_group.update(self.optimizer_params) + if 'initial_lr' in param_group: + param_group['initial_lr'] = self.optimizer_params['lr'] + + if 'lr_schedulers' in checkpoint and checkpoint['lr_schedulers']: + assert 'optimizer_states' in checkpoint and checkpoint['optimizer_states'] + schedulers = checkpoint['lr_schedulers'] + assert len(schedulers) == 1 + scheduler = schedulers[0] + for k, v in self.scheduler_params.items(): + if k in scheduler and scheduler[k] != v: + rank_zero_info(f'| Overriding scheduler parameter {k} from checkpoint: {scheduler[k]} -> {v}') + scheduler.update(self.scheduler_params) + scheduler['base_lrs'] = [group['initial_lr'] for group in checkpoint['optimizer_states'][0]['param_groups']] + if '_last_lr' in scheduler: + scheduler.pop('_last_lr') From 70cc5b12758d71b1654509a25b07d9fb7587ebe1 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 16 Apr 2023 13:00:18 -0500 Subject: [PATCH 278/475] Move vocoder to device as soon as possible --- basics/base_vocoder.py | 7 +++++++ modules/vocoders/ddsp.py | 3 +++ modules/vocoders/nsf_hifigan.py | 3 +++ training/acoustic_task.py | 6 +++++- 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/basics/base_vocoder.py b/basics/base_vocoder.py index 9613b7b8a..d6d003904 100644 --- a/basics/base_vocoder.py +++ b/basics/base_vocoder.py @@ -5,6 +5,13 @@ def to_device(self, device): :param device: torch.device or str """ raise NotImplementedError() + + def get_device(self): + """ + + :return: device: torch.device or str + """ + raise NotImplementedError() def spec2wav(self, mel, **kwargs): """ diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index 3693daf67..c985ad21b 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -122,6 +122,9 @@ def __init__(self, device='cpu'): def to_device(self, device): pass + def get_device(self): + return 'cpu' + def spec2wav_torch(self, mel, f0): # mel: [B, T, bins] f0: [B, T] if self.args.data.sampling_rate != hparams['audio_sample_rate']: print('Mismatch parameters: hparams[\'audio_sample_rate\']=', hparams['audio_sample_rate'], '!=', diff --git a/modules/vocoders/nsf_hifigan.py b/modules/vocoders/nsf_hifigan.py index c4f48b3e9..229c750e9 100644 --- a/modules/vocoders/nsf_hifigan.py +++ b/modules/vocoders/nsf_hifigan.py @@ -29,6 +29,9 @@ def device(self): def to_device(self, device): self.model.to(device) + def get_device(self): + return self.device + def spec2wav_torch(self, mel, **kwargs): # mel: [B, T, bins] if self.h.sampling_rate != hparams['audio_sample_rate']: print('Mismatch parameters: hparams[\'audio_sample_rate\']=', hparams['audio_sample_rate'], '!=', diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 540c7a255..a6e577406 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -101,7 +101,11 @@ def run_model(self, sample, infer=False): return losses def on_train_start(self): - if self.use_vocoder: + if self.use_vocoder and self.vocoder.get_device() != self.device: + self.vocoder.to_device(self.device) + + def _on_validation_start(self): + if self.use_vocoder and self.vocoder.get_device() != self.device: self.vocoder.to_device(self.device) def _validation_step(self, sample, batch_idx): From 20d0786b821c48fe3277677885597ef38f1745ec Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 02:04:53 +0800 Subject: [PATCH 279/475] `self.num_bins - 1` --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 01a523ecd..300643a4d 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -329,7 +329,7 @@ def denorm_spec(self, probs): logits = probs.sigmoid() peaks = logits.argmax(dim=2, keepdim=True) # [B, T, 1] start = torch.max(torch.tensor(0, device=logits.device), peaks - self.width) - end = torch.min(torch.tensor(self.num_bins, device=logits.device), peaks + self.width) + end = torch.min(torch.tensor(self.num_bins - 1, device=logits.device), peaks + self.width) logits[(self.x < start) | (self.x > end)] = 0. bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] return self.bins_to_values(bins) From bda326a5b692d6f6f515386fe32df5ad0ae51807 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Sun, 16 Apr 2023 13:11:27 -0500 Subject: [PATCH 280/475] Allow mixed kwargs to optimizer and scheduler cls --- basics/base_task.py | 12 ++++++++---- utils/__init__.py | 8 ++++++++ 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index e9e581fc0..7c646d081 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -20,6 +20,7 @@ from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_info, rank_zero_only from basics.base_module import CategorizedModule +from utils import filter_kwargs from utils.hparams import hparams from utils.training_utils import ( DsModelCheckpoint, DsTQDMProgressBar, @@ -202,16 +203,19 @@ def on_validation_epoch_end(self): # noinspection PyMethodMayBeStatic def build_scheduler(self, optimizer): - scheduler = torch.optim.lr_scheduler.StepLR( - optimizer, **self.scheduler_params + scheduler_cls = torch.optim.lr_scheduler.StepLR + scheduler = scheduler_cls( + optimizer, + **filter_kwargs(self.scheduler_params, scheduler_cls) ) return scheduler # noinspection PyMethodMayBeStatic def build_optimizer(self, model): - optimizer = torch.optim.AdamW( + optimizer_cls = torch.optim.AdamW + optimizer = optimizer_cls( filter(lambda p: p.requires_grad, model.parameters()), - **self.optimizer_params + **filter_kwargs(self.optimizer_params, optimizer_cls) ) return optimizer diff --git a/utils/__init__.py b/utils/__init__.py index 921e261d8..b98834735 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,3 +1,4 @@ +import inspect import pathlib import re import time @@ -129,6 +130,13 @@ def unpack_dict_to_list(samples): return samples_ +def filter_kwargs(dict_to_filter, kwarg_obj): + sig = inspect.signature(kwarg_obj) + filter_keys = [param.name for param in sig.parameters.values() if param.kind == param.POSITIONAL_OR_KEYWORD] + filtered_dict = {filter_key: dict_to_filter[filter_key] for filter_key in filter_keys if filter_key in dict_to_filter} + return filtered_dict + + def load_ckpt( cur_model, ckpt_base_dir, ckpt_steps=None, required_category=None, prefix_in_ckpt='model', key_in_ckpt='state_dict', From 48e9474705ce4b2ee0e23ff0f241d75223b08b70 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 02:55:28 +0800 Subject: [PATCH 281/475] Fix incorrect `spec_min` and `spec_max` --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 300643a4d..d7b2d6592 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -303,7 +303,7 @@ def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, super().__init__( num_bins, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas, spec_min=[0.], spec_max=[1.] + betas=betas, spec_min=[vmin], spec_max=[vmax] ) self.vmin = vmin self.vmax = vmax From ebc31b3ec64bfc672115b5fa014f441a4b2eaae9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 02:57:23 +0800 Subject: [PATCH 282/475] Revert "Fix incorrect `spec_min` and `spec_max`" This reverts commit 48e9474705ce4b2ee0e23ff0f241d75223b08b70. --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index d7b2d6592..300643a4d 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -303,7 +303,7 @@ def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, super().__init__( num_bins, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas, spec_min=[vmin], spec_max=[vmax] + betas=betas, spec_min=[0.], spec_max=[1.] ) self.vmin = vmin self.vmax = vmax From da16fa7bf684eeef9e47eb5192ab7efc84d76eaa Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 03:02:52 +0800 Subject: [PATCH 283/475] Remove sigmoid --- modules/diffusion/ddpm.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 300643a4d..75b411024 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -326,10 +326,9 @@ def norm_spec(self, curve): def denorm_spec(self, probs): probs = super().denorm_spec(probs) # [B, T, N] - logits = probs.sigmoid() - peaks = logits.argmax(dim=2, keepdim=True) # [B, T, 1] - start = torch.max(torch.tensor(0, device=logits.device), peaks - self.width) - end = torch.min(torch.tensor(self.num_bins - 1, device=logits.device), peaks + self.width) - logits[(self.x < start) | (self.x > end)] = 0. - bins = torch.sum(self.x * logits, dim=2) / torch.sum(logits, dim=2) # [B, T] + peaks = probs.argmax(dim=2, keepdim=True) # [B, T, 1] + start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) + end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) + probs[(self.x < start) | (self.x > end)] = 0. + bins = torch.sum(self.x * probs, dim=2) / torch.sum(probs, dim=2) # [B, T] return self.bins_to_values(bins) From 49f38530adc9994f18d98a8bd542e4ba930b369c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 03:06:33 +0800 Subject: [PATCH 284/475] Add clamp --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 75b411024..eb9d044b5 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -331,4 +331,4 @@ def denorm_spec(self, probs): end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) probs[(self.x < start) | (self.x > end)] = 0. bins = torch.sum(self.x * probs, dim=2) / torch.sum(probs, dim=2) # [B, T] - return self.bins_to_values(bins) + return self.bins_to_values(bins).clamp(min=self.vmin, max=self.vmax) From 6e0151ffee6083d5c2cffeceb85d25dd2e06e3f5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 12:54:30 +0800 Subject: [PATCH 285/475] Add total variance loss --- modules/losses/common_losses.py | 24 ++++++++++++++++++++++++ modules/losses/diff_loss.py | 19 ++++++++++++------- training/variance_task.py | 3 ++- 3 files changed, 38 insertions(+), 8 deletions(-) create mode 100644 modules/losses/common_losses.py diff --git a/modules/losses/common_losses.py b/modules/losses/common_losses.py new file mode 100644 index 000000000..5251965c2 --- /dev/null +++ b/modules/losses/common_losses.py @@ -0,0 +1,24 @@ +import torch.nn as nn + + +class TVLoss(nn.Module): + """ + Adapted from https://github.com/jxgu1016/Total_Variation_Loss.pytorch + """ + + def __init__(self, weight=1): + super(TVLoss, self).__init__() + self.weight = weight + + def forward(self, x): + """ + :param x: [B, C, H, W] + """ + b, c, h_x, w_x, *_ = x.shape + count_h = c * (h_x - 1) * w_x + count_w = c * h_x * (w_x - 1) + h_tv = (x[:, :, 1:, :] - x[:, :, :-1, :]).pow(2).sum() + w_tv = (x[:, :, :, 1:] - x[:, :, :, :-1]).pow(2).sum() + return self.weight * 2 * ( + (h_tv / count_h if count_h > 0 else 0) + (w_tv / count_w if count_w > 0 else 0) + ) / b diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 9fa104c0c..2180b650c 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -1,21 +1,22 @@ import torch.nn as nn from torch import Tensor -from modules.losses.ssim import SSIMLoss +from modules.losses.common_losses import TVLoss class DiffusionNoiseLoss(nn.Module): - def __init__(self, loss_type): + def __init__(self, loss_type, lambda_tv=0): super().__init__() self.loss_type = loss_type if self.loss_type == 'l1': - self.loss = nn.L1Loss() + self.main_loss = nn.L1Loss() elif self.loss_type == 'l2': - self.loss = nn.MSELoss() - elif self.loss_type == 'ssim': - self.loss = SSIMLoss() + self.main_loss = nn.MSELoss() else: raise NotImplementedError() + self.lambda_tv = lambda_tv + if lambda_tv > 0: + self.tv_loss = TVLoss() def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> Tensor: """ @@ -27,4 +28,8 @@ def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> nonpadding = nonpadding.transpose(1, 2).unsqueeze(1) x_recon = x_recon * nonpadding noise *= nonpadding - return self.loss(x_recon, noise) + main_loss = self.main_loss(x_recon, noise) + if self.lambda_tv > 0: + return main_loss + self.lambda_tv * self.tv_loss(x_recon) + else: + return main_loss diff --git a/training/variance_task.py b/training/variance_task.py index 2074a1ec6..f013ce409 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -74,7 +74,8 @@ def build_losses(self): ) if hparams['predict_pitch']: self.pitch_loss = DiffusionNoiseLoss( - loss_type=hparams['diff_loss_type'] + loss_type=hparams['diff_loss_type'], + lambda_tv=hparams['lambda_tv_loss'] ) # pitch_hparams = hparams['pitch_prediction_args'] # self.pitch_loss = CurveLoss2d( From 8c565c03b07afe66788f9bc67a4cf01a4d4b329c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 12:58:57 +0800 Subject: [PATCH 286/475] Fix key error --- training/variance_task.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index f013ce409..25e7d2748 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -73,9 +73,10 @@ def build_losses(self): lambda_sdur=dur_hparams['lambda_sdur_loss'] ) if hparams['predict_pitch']: + pitch_hparams = hparams['pitch_prediction_args'] self.pitch_loss = DiffusionNoiseLoss( loss_type=hparams['diff_loss_type'], - lambda_tv=hparams['lambda_tv_loss'] + lambda_tv=pitch_hparams['lambda_tv_loss'] ) # pitch_hparams = hparams['pitch_prediction_args'] # self.pitch_loss = CurveLoss2d( From 162517a110913ca672b53c181be20370224f4b15 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 13:03:15 +0800 Subject: [PATCH 287/475] Cancel tv loss --- modules/diffusion/ddpm.py | 1 + modules/losses/diff_loss.py | 17 ++++------------- training/variance_task.py | 4 +--- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index eb9d044b5..071c7ba75 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -326,6 +326,7 @@ def norm_spec(self, curve): def denorm_spec(self, probs): probs = super().denorm_spec(probs) # [B, T, N] + probs *= probs > 0 peaks = probs.argmax(dim=2, keepdim=True) # [B, T, 1] start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 2180b650c..2adc1e4e8 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -1,22 +1,17 @@ import torch.nn as nn from torch import Tensor -from modules.losses.common_losses import TVLoss - class DiffusionNoiseLoss(nn.Module): - def __init__(self, loss_type, lambda_tv=0): + def __init__(self, loss_type): super().__init__() self.loss_type = loss_type if self.loss_type == 'l1': - self.main_loss = nn.L1Loss() + self.loss = nn.L1Loss() elif self.loss_type == 'l2': - self.main_loss = nn.MSELoss() + self.loss = nn.MSELoss() else: raise NotImplementedError() - self.lambda_tv = lambda_tv - if lambda_tv > 0: - self.tv_loss = TVLoss() def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> Tensor: """ @@ -28,8 +23,4 @@ def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> nonpadding = nonpadding.transpose(1, 2).unsqueeze(1) x_recon = x_recon * nonpadding noise *= nonpadding - main_loss = self.main_loss(x_recon, noise) - if self.lambda_tv > 0: - return main_loss + self.lambda_tv * self.tv_loss(x_recon) - else: - return main_loss + return self.loss(x_recon, noise) diff --git a/training/variance_task.py b/training/variance_task.py index 25e7d2748..2074a1ec6 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -73,10 +73,8 @@ def build_losses(self): lambda_sdur=dur_hparams['lambda_sdur_loss'] ) if hparams['predict_pitch']: - pitch_hparams = hparams['pitch_prediction_args'] self.pitch_loss = DiffusionNoiseLoss( - loss_type=hparams['diff_loss_type'], - lambda_tv=pitch_hparams['lambda_tv_loss'] + loss_type=hparams['diff_loss_type'] ) # pitch_hparams = hparams['pitch_prediction_args'] # self.pitch_loss = CurveLoss2d( From e9b0af0f1c51531db43c25a8b5592bb7d6f4b7a2 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 18:12:55 +0800 Subject: [PATCH 288/475] Update code and configs --- configs/variance.yaml | 4 +++- modules/diffusion/ddpm.py | 2 +- modules/toplevel.py | 4 ++-- training/variance_task.py | 10 +++------- 4 files changed, 9 insertions(+), 11 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index c006ffaf2..0b8f59f63 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -57,9 +57,11 @@ pitch_prediction_args: diff_predictor_mode: 1d pitch_delta_vmin: -12.75 pitch_delta_vmax: 12.75 - num_pitch_bins: 256 + num_pitch_bins: 128 deviation: 0.25 hidden_size: 512 + residual_layers: 20 + residual_channels: 512 residual_layers: 10 residual_channels: 256 diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 071c7ba75..951de2dca 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -321,7 +321,7 @@ def bins_to_values(self, bins): def norm_spec(self, curve): miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] - probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] + probs = ((self.x - miu) / self.sigma).pow(2).div(-2).exp() # gaussian blur, [B, T, N] return super().norm_spec(probs) def denorm_spec(self, probs): diff --git a/modules/toplevel.py b/modules/toplevel.py index 78fceefa7..d5d6ecfa8 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -82,8 +82,8 @@ def __init__(self, vocab_size): k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], denoiser_args=( - hparams['residual_layers'], - hparams['residual_channels'] + pitch_hparams['residual_layers'], + pitch_hparams['residual_channels'] ) ) else: diff --git a/training/variance_task.py b/training/variance_task.py index 2074a1ec6..c55dc6769 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -97,22 +97,18 @@ def run_model(self, sample, infer=False): mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, infer=infer) + dur_pred, pitch_pred = output if infer: - dur_pred, pitch_pred = output return dur_pred, pitch_pred else: - dur_pred, pitch_pred_out = output losses = {} if dur_pred is not None: losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) - if pitch_pred_out is not None: - pitch_x_recon, pitch_noise = pitch_pred_out + if pitch_pred is not None: + (pitch_x_recon, pitch_noise) = pitch_pred losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss( pitch_x_recon, pitch_noise, (mel2ph > 0).float().unsqueeze(-1) ) - # delta_pitch = sample['delta_pitch'] - # uv = sample['uv'] - # losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss(pitch_prob, delta_pitch, ~uv) return losses def _validation_step(self, sample, batch_idx): From feb72ba242a8e38a916c2ba54ac87ba63def1d3e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 18:23:09 +0800 Subject: [PATCH 289/475] Bring back support of pndm --- configs/acoustic.yaml | 1 + configs/variance.yaml | 1 + modules/diffusion/ddpm.py | 102 ++++++++++++++++++++------------------ 3 files changed, 57 insertions(+), 47 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 8704298ef..7d464c82b 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -58,6 +58,7 @@ K_step: 1000 timesteps: 1000 max_beta: 0.02 rel_pos: true +diff_accelerator: dpm-solver pndm_speedup: 10 hidden_size: 256 residual_layers: 20 diff --git a/configs/variance.yaml b/configs/variance.yaml index 0b8f59f63..38073f8f6 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -32,6 +32,7 @@ K_step: 1000 timesteps: 1000 max_beta: 0.02 rel_pos: true +diff_accelerator: dpm-solver pndm_speedup: 10 hidden_size: 256 diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 951de2dca..88fbf15a5 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -156,7 +156,8 @@ def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): @torch.no_grad() def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=False): """ - Use the PLMS method from [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778). + Use the PLMS method from + [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778). """ def get_x_pred(x, noise_t, t): @@ -220,52 +221,59 @@ def forward(self, condition, gt_spec=None, infer=True): shape = (cond.shape[0], 1, self.out_dims, cond.shape[2]) x = torch.randn(shape, device=device) if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: - # obsolete: pndm_speedup, now use dpm_solver. - # self.noise_list = deque(maxlen=4) - # iteration_interval = hparams['pndm_speedup'] - # for i in tqdm(reversed(range(0, t, iteration_interval)), desc='sample time step', - # total=t // iteration_interval): - # x = self.p_sample_plms(x, torch.full((b,), i, device=device, dtype=torch.long), iteration_interval, - # cond) - - from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver - ## 1. Define the noise schedule. - noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) - - ## 2. Convert your discrete-time `model` to the continuous-time - # noise prediction model. Here is an example for a diffusion model - ## `model` with the noise prediction type ("noise") . - def my_wrapper(fn): - def wrapped(x, t, **kwargs): - ret = fn(x, t, **kwargs) - self.bar.update(1) - return ret - - return wrapped - - model_fn = model_wrapper( - my_wrapper(self.denoise_fn), - noise_schedule, - model_type="noise", # or "x_start" or "v" or "score" - model_kwargs={"cond": cond} - ) - - ## 3. Define dpm-solver and sample by singlestep DPM-Solver. - ## (We recommend singlestep DPM-Solver for unconditional sampling) - ## You can adjust the `steps` to balance the computation - ## costs and the sample quality. - dpm_solver = DPM_Solver(model_fn, noise_schedule) - - steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer']) - x = dpm_solver.sample( - x, - steps=steps, - order=3, - skip_type="time_uniform", - method="singlestep", - ) - self.bar.close() + algorithm = hparams.get('diff_accelerator', 'dpm-solver') + if algorithm == 'dpm-solver': + from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver + # 1. Define the noise schedule. + noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) + + # 2. Convert your discrete-time `model` to the continuous-time + # noise prediction model. Here is an example for a diffusion model + # `model` with the noise prediction type ("noise") . + def my_wrapper(fn): + def wrapped(x, t, **kwargs): + ret = fn(x, t, **kwargs) + self.bar.update(1) + return ret + + return wrapped + + model_fn = model_wrapper( + my_wrapper(self.denoise_fn), + noise_schedule, + model_type="noise", # or "x_start" or "v" or "score" + model_kwargs={"cond": cond} + ) + + # 3. Define dpm-solver and sample by singlestep DPM-Solver. + # (We recommend singlestep DPM-Solver for unconditional sampling) + # You can adjust the `steps` to balance the computation + # costs and the sample quality. + dpm_solver = DPM_Solver(model_fn, noise_schedule) + + steps = t // hparams["pndm_speedup"] + self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer']) + x = dpm_solver.sample( + x, + steps=steps, + order=3, + skip_type="time_uniform", + method="singlestep", + ) + self.bar.close() + elif algorithm == 'pndm': + self.noise_list = deque(maxlen=4) + iteration_interval = hparams['pndm_speedup'] + for i in tqdm( + reversed(range(0, t, iteration_interval)), desc='sample time step', + total=t // iteration_interval + ): + x = self.p_sample_plms( + x, torch.full((b,), i, device=device, dtype=torch.long), + iteration_interval, cond=cond + ) + else: + raise NotImplementedError(algorithm) else: for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=not hparams['infer']): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) From 713b51bd05a213f0ec7623a1c5e2345b1da9151e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 18:53:11 +0800 Subject: [PATCH 290/475] Disable tqdm --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 88fbf15a5..e38ccb963 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -266,7 +266,7 @@ def wrapped(x, t, **kwargs): iteration_interval = hparams['pndm_speedup'] for i in tqdm( reversed(range(0, t, iteration_interval)), desc='sample time step', - total=t // iteration_interval + total=t // iteration_interval, disable=not hparams['infer'] ): x = self.p_sample_plms( x, torch.full((b,), i, device=device, dtype=torch.long), From 1b9cb7674a967e6923dc6fc4926b5f77cf05fc5d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 20:13:57 +0800 Subject: [PATCH 291/475] Add new losses --- modules/losses/diff_loss.py | 50 +++++++++++++++++++++++++++++++------ training/variance_task.py | 11 +++++--- 2 files changed, 50 insertions(+), 11 deletions(-) diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 2adc1e4e8..9311066a7 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -1,26 +1,62 @@ import torch.nn as nn from torch import Tensor +from modules.losses.common_losses import TVLoss + class DiffusionNoiseLoss(nn.Module): def __init__(self, loss_type): super().__init__() self.loss_type = loss_type if self.loss_type == 'l1': - self.loss = nn.L1Loss() + self.loss = nn.L1Loss(reduction='none') elif self.loss_type == 'l2': - self.loss = nn.MSELoss() + self.loss = nn.MSELoss(reduction='none') else: raise NotImplementedError() + @staticmethod + def _mask_nonpadding(x_recon, noise, nonpadding=None): + if nonpadding is not None: + nonpadding = nonpadding.transpose(1, 2).unsqueeze(1) + return x_recon * nonpadding, noise * nonpadding + else: + return x_recon, noise + + def _forward(self, x_recon, noise): + return self.loss(x_recon, noise) + def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> Tensor: """ :param x_recon: [B, 1, M, T] :param noise: [B, 1, M, T] :param nonpadding: [B, T, M] """ - if nonpadding is not None: - nonpadding = nonpadding.transpose(1, 2).unsqueeze(1) - x_recon = x_recon * nonpadding - noise *= nonpadding - return self.loss(x_recon, noise) + x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) + return self._forward(x_recon, noise).mean() + + +class DiffusionNoiseWithSmoothnessLoss(DiffusionNoiseLoss): + def __init__(self, loss_type, lambda_tv=0.5): + super().__init__(loss_type) + self.lambda_tv = lambda_tv + self.tv_loss = TVLoss() + + def forward(self, x_recon, noise, nonpadding=None): + x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) + return self._forward(x_recon, noise).mean() + self.lambda_tv * self.tv_loss(x_recon - noise) + + +class DiffusionNoiseWithSensitivityLoss(DiffusionNoiseLoss): + def __init__(self, loss_type, alpha=1): + super().__init__(loss_type) + self.alpha = alpha + + def forward(self, x_recon, noise, nonpadding=None, reference=None): + x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) + loss = self._forward(x_recon, noise) + if reference is not None: + difference = reference.diff(dim=1, prepend=reference[:, 0]).abs() + sensitivity = 1 / (1 + self.alpha * difference) + loss = loss * sensitivity.transpose(1, 2).unsqueeze(1) + return loss.mean() diff --git a/training/variance_task.py b/training/variance_task.py index c55dc6769..6ba982138 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,9 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.curve_loss import CurveLoss2d -from modules.losses.diff_loss import DiffusionNoiseLoss +from modules.losses.diff_loss import ( + DiffusionNoiseLoss, DiffusionNoiseWithSmoothnessLoss +) from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams @@ -73,10 +75,11 @@ def build_losses(self): lambda_sdur=dur_hparams['lambda_sdur_loss'] ) if hparams['predict_pitch']: - self.pitch_loss = DiffusionNoiseLoss( - loss_type=hparams['diff_loss_type'] + pitch_hparams = hparams['pitch_prediction_args'] + self.pitch_loss = DiffusionNoiseWithSmoothnessLoss( + loss_type=hparams['diff_loss_type'], + lambda_tv=pitch_hparams['lambda_tv_loss'] ) - # pitch_hparams = hparams['pitch_prediction_args'] # self.pitch_loss = CurveLoss2d( # vmin=pitch_hparams['pitch_delta_vmin'], # vmax=pitch_hparams['pitch_delta_vmax'], From a474300f011739818fa820f01e2b2332553d3ba1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 20:56:16 +0800 Subject: [PATCH 292/475] Add viterbi decoder --- modules/diffusion/ddpm.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index e38ccb963..f04744760 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -2,6 +2,7 @@ from functools import partial from inspect import isfunction +import librosa.sequence import numpy as np import torch from torch import nn @@ -320,6 +321,9 @@ def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, self.sigma = deviation / self.interval self.width = int(3 * self.sigma) self.register_buffer('x', torch.arange(self.num_bins).float().reshape(1, 1, -1)) # [1, 1, N] + xx, yy = np.meshgrid(range(num_bins), range(num_bins)) + transition = np.maximum(self.width * 2 - abs(xx - yy), 0) + self.transition = transition / transition.sum(axis=1, keepdims=True) def values_to_bins(self, values): return (values - self.vmin) / self.interval @@ -334,8 +338,11 @@ def norm_spec(self, curve): def denorm_spec(self, probs): probs = super().denorm_spec(probs) # [B, T, N] - probs *= probs > 0 - peaks = probs.argmax(dim=2, keepdim=True) # [B, T, 1] + probs = probs.softmax(dim=2) + sequence = probs.cpu().numpy() + peaks = torch.from_numpy( + librosa.sequence.viterbi(sequence.transpose(1, 2), self.transition) + ).to(probs.device).long().unsqueeze(-1) start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) probs[(self.x < start) | (self.x > end)] = 0. From 864565bee003665980da70f72659ce29c58d1ebb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 21:12:38 +0800 Subject: [PATCH 293/475] Fix axes don't match array --- modules/diffusion/ddpm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index f04744760..e48e31dfa 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -339,9 +339,9 @@ def norm_spec(self, curve): def denorm_spec(self, probs): probs = super().denorm_spec(probs) # [B, T, N] probs = probs.softmax(dim=2) - sequence = probs.cpu().numpy() + sequences = probs.transpose(1, 2).cpu().numpy() peaks = torch.from_numpy( - librosa.sequence.viterbi(sequence.transpose(1, 2), self.transition) + librosa.sequence.viterbi(sequences, self.transition) ).to(probs.device).long().unsqueeze(-1) start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) From 536f571f6ea38b0b7406f0db3e45d584ff5e016c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 21:20:11 +0800 Subject: [PATCH 294/475] Fix dtype --- modules/diffusion/ddpm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index e48e31dfa..ffeb00e12 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -341,8 +341,8 @@ def denorm_spec(self, probs): probs = probs.softmax(dim=2) sequences = probs.transpose(1, 2).cpu().numpy() peaks = torch.from_numpy( - librosa.sequence.viterbi(sequences, self.transition) - ).to(probs.device).long().unsqueeze(-1) + librosa.sequence.viterbi(sequences, self.transition).astype(np.int64) + ).to(probs.device).unsqueeze(-1) start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) probs[(self.x < start) | (self.x > end)] = 0. From 249c58c62d57cc44e62d14006a85477dd78d55d8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 21:29:12 +0800 Subject: [PATCH 295/475] Replace softmax to relu --- modules/diffusion/ddpm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index ffeb00e12..b48a17efd 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -338,11 +338,11 @@ def norm_spec(self, curve): def denorm_spec(self, probs): probs = super().denorm_spec(probs) # [B, T, N] - probs = probs.softmax(dim=2) + probs *= probs > 0 sequences = probs.transpose(1, 2).cpu().numpy() peaks = torch.from_numpy( librosa.sequence.viterbi(sequences, self.transition).astype(np.int64) - ).to(probs.device).unsqueeze(-1) + ).to(probs.device).unsqueeze(-1) # [B, T, 1] start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) probs[(self.x < start) | (self.x > end)] = 0. From c8dff124909dd0f90084c1d581b30f60ba5979dc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 21:33:13 +0800 Subject: [PATCH 296/475] Use softmax for viterbi and relu for weighted mean --- modules/diffusion/ddpm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index b48a17efd..98ff20058 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -338,11 +338,11 @@ def norm_spec(self, curve): def denorm_spec(self, probs): probs = super().denorm_spec(probs) # [B, T, N] - probs *= probs > 0 - sequences = probs.transpose(1, 2).cpu().numpy() + sequences = probs.softmax(dim=2).transpose(1, 2).cpu().numpy() peaks = torch.from_numpy( librosa.sequence.viterbi(sequences, self.transition).astype(np.int64) ).to(probs.device).unsqueeze(-1) # [B, T, 1] + probs *= probs > 0 start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) probs[(self.x < start) | (self.x > end)] = 0. From db1ce60f621af30ce06ac8ad5005459aa31c8bc1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 22:32:07 +0800 Subject: [PATCH 297/475] Fix dimension error --- modules/losses/diff_loss.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index 9311066a7..a669d2275 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -56,7 +56,7 @@ def forward(self, x_recon, noise, nonpadding=None, reference=None): x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) loss = self._forward(x_recon, noise) if reference is not None: - difference = reference.diff(dim=1, prepend=reference[:, 0]).abs() + difference = reference.diff(dim=1, prepend=reference[:, :1]).abs() sensitivity = 1 / (1 + self.alpha * difference) loss = loss * sensitivity.transpose(1, 2).unsqueeze(1) return loss.mean() From fcce2a14acd3a40c332612d1bd810e6c7666b1a1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 22:48:52 +0800 Subject: [PATCH 298/475] Add base pitch to plot --- training/variance_task.py | 12 ++++++++---- utils/plot.py | 6 +++++- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 6ba982138..7fdcd4869 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -128,8 +128,13 @@ def _validation_step(self, sample, batch_idx): self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: base_pitch = sample['base_pitch'] + delta_pitch = sample['delta_pitch'] self.plot_curve( - batch_idx, base_pitch + sample['delta_pitch'], base_pitch + pitch_pred, curve_name='pitch' + batch_idx, + gt_curve=base_pitch + delta_pitch, + pred_curve=base_pitch + pitch_pred, + base_curve=base_pitch, + curve_name='pitch' ) return outputs, sample['size'] @@ -144,9 +149,8 @@ def plot_dur(self, batch_idx, gt_dur, pred_dur, txt=None): txt = self.phone_encoder.decode(txt[0].cpu().numpy()).split() self.logger.experiment.add_figure(name, dur_to_figure(gt_dur, pred_dur, txt), self.global_step) - def plot_curve(self, batch_idx, gt_curve, pred_curve, curve_name='curve'): + def plot_curve(self, batch_idx, gt_curve, pred_curve, base_curve=None, curve_name='curve'): name = f'{curve_name}_{batch_idx}' gt_curve = gt_curve[0].cpu().numpy() pred_curve = pred_curve[0].cpu().numpy() - # self.logger.experiment.add_figure(name, spec_to_figure(pred_curve, vmin=0, vmax=1), self.global_step) - self.logger.experiment.add_figure(name, curve_to_figure(gt_curve, pred_curve), self.global_step) + self.logger.experiment.add_figure(name, curve_to_figure(gt_curve, pred_curve, base_curve), self.global_step) diff --git a/utils/plot.py b/utils/plot.py index b0edafc26..8341a26be 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -56,12 +56,16 @@ def dur_to_figure(dur_gt, dur_pred, txt): return fig -def curve_to_figure(curve_gt, curve_pred=None): +def curve_to_figure(curve_gt, curve_pred=None, curve_base=None): if isinstance(curve_gt, torch.Tensor): curve_gt = curve_gt.cpu().numpy() if isinstance(curve_pred, torch.Tensor): curve_pred = curve_pred.cpu().numpy() + if isinstance(curve_base, torch.Tensor): + curve_base = curve_pred.cpu().numpy() fig = plt.figure() + if curve_base is not None: + plt.plot(curve_base, color='g', label='base') if curve_pred is not None: plt.plot(curve_pred, color='r', label='pred') plt.plot(curve_gt, color='b', label='gt') From 872f2bcc346a613b5840dc65ed8c6947f91e5cf4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 17 Apr 2023 22:55:42 +0800 Subject: [PATCH 299/475] Fix bug --- training/variance_task.py | 4 +++- utils/plot.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 7fdcd4869..ced5fd817 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -10,7 +10,7 @@ from basics.base_task import BaseTask from modules.losses.curve_loss import CurveLoss2d from modules.losses.diff_loss import ( - DiffusionNoiseLoss, DiffusionNoiseWithSmoothnessLoss + DiffusionNoiseLoss, DiffusionNoiseWithSmoothnessLoss, DiffusionNoiseWithSensitivityLoss ) from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance @@ -153,4 +153,6 @@ def plot_curve(self, batch_idx, gt_curve, pred_curve, base_curve=None, curve_nam name = f'{curve_name}_{batch_idx}' gt_curve = gt_curve[0].cpu().numpy() pred_curve = pred_curve[0].cpu().numpy() + if base_curve is not None: + base_curve = base_curve[0].cpu().numpy() self.logger.experiment.add_figure(name, curve_to_figure(gt_curve, pred_curve, base_curve), self.global_step) diff --git a/utils/plot.py b/utils/plot.py index 8341a26be..7e770f947 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -62,7 +62,7 @@ def curve_to_figure(curve_gt, curve_pred=None, curve_base=None): if isinstance(curve_pred, torch.Tensor): curve_pred = curve_pred.cpu().numpy() if isinstance(curve_base, torch.Tensor): - curve_base = curve_pred.cpu().numpy() + curve_base = curve_base.cpu().numpy() fig = plt.figure() if curve_base is not None: plt.plot(curve_base, color='g', label='base') From 89e2867e0388e3ba42cf658382598a21bd1f1101 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 18 Apr 2023 01:09:13 +0800 Subject: [PATCH 300/475] Allow dur to be None in `StretchRegulator` --- modules/fastspeech/tts_modules.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index a4550d423..071494b37 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -242,7 +242,7 @@ def forward(self, dur, dur_padding=None, alpha=1.0): class StretchRegulator(torch.nn.Module): # noinspection PyMethodMayBeStatic - def forward(self, dur, mel2ph): + def forward(self, mel2ph, dur=None): """ Example (no batch dim version): 1. dur = [2,4,3] @@ -257,6 +257,8 @@ def forward(self, dur, mel2ph): :return: stretch (B, T_speech) """ + if dur is None: + dur = mel2ph_to_dur(mel2ph, mel2ph.max()) dur = F.pad(dur, [1, 0], value=1) # Avoid dividing by zero mel2dur = torch.gather(dur, 1, mel2ph) bound_mask = torch.gt(mel2ph[:, 1:], mel2ph[:, :-1]) From b804a4fd38d63950f592ba84ba0e5a2e89512414 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Mon, 17 Apr 2023 12:26:30 -0500 Subject: [PATCH 301/475] Fix overriding opt and scheduler ckp setting --- basics/base_task.py | 74 +++++++++++++++----------- configs/acoustic.yaml | 8 +-- configs/base.yaml | 18 ++++--- preparation/acoustic_preparation.ipynb | 20 ++++--- utils/__init__.py | 24 ++++++++- 5 files changed, 98 insertions(+), 46 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 7c646d081..5d2c1d199 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -84,16 +84,6 @@ def __init__(self, *args, **kwargs): 'total_loss': MeanMetric() } - self.optimizer_params = { - 'lr': hparams['lr'], - 'betas': (hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']), - 'weight_decay': hparams['weight_decay'], - } - self.scheduler_params = { - 'step_size': hparams['lr_decay_steps'], - 'gamma': hparams['lr_decay_gamma'], - } - ########### # Training, validation and testing ########### @@ -203,19 +193,29 @@ def on_validation_epoch_end(self): # noinspection PyMethodMayBeStatic def build_scheduler(self, optimizer): - scheduler_cls = torch.optim.lr_scheduler.StepLR - scheduler = scheduler_cls( + from utils import build_object_from_config + + scheduler_args = hparams['lr_scheduler_args'] + assert scheduler_args['scheduler_cls'] != '' + scheduler = build_object_from_config( + scheduler_args['scheduler_cls'], optimizer, - **filter_kwargs(self.scheduler_params, scheduler_cls) + **scheduler_args ) return scheduler # noinspection PyMethodMayBeStatic def build_optimizer(self, model): - optimizer_cls = torch.optim.AdamW - optimizer = optimizer_cls( + from utils import build_object_from_config + + optimizer_args = hparams['optimizer_args'] + assert optimizer_args['optimizer_cls'] != '' + if 'beta1' in optimizer_args and 'beta2' in optimizer_args and 'betas' not in optimizer_args: + optimizer_args['betas'] = (optimizer_args['beta1'], optimizer_args['beta2']) + optimizer = build_object_from_config( + optimizer_args['optimizer_cls'], filter(lambda p: p.requires_grad, model.parameters()), - **filter_kwargs(self.optimizer_params, optimizer_cls) + **optimizer_args ) return optimizer @@ -373,30 +373,44 @@ def on_save_checkpoint(self, checkpoint): def on_load_checkpoint(self, checkpoint): from lightning.pytorch.trainer.states import RunningStage + from utils import simulate_lr_scheduler if checkpoint.get('trainer_stage', '') == RunningStage.VALIDATING.value: self.skip_immediate_validation = True - - if 'optimizer_states' in checkpoint and checkpoint['optimizer_states']: + + optimizer_args = hparams['optimizer_args'] + scheduler_args = hparams['lr_scheduler_args'] + + if 'beta1' in optimizer_args and 'beta2' in optimizer_args and 'betas' not in optimizer_args: + optimizer_args['betas'] = (optimizer_args['beta1'], optimizer_args['beta2']) + + if checkpoint.get('optimizer_states', None): opt_states = checkpoint['optimizer_states'] - assert len(opt_states) == 1 + assert len(opt_states) == 1 # only support one optimizer opt_state = opt_states[0] for param_group in opt_state['param_groups']: - for k, v in self.optimizer_params.items(): + for k, v in optimizer_args.items(): if k in param_group and param_group[k] != v: + if 'lr_schedulers' in checkpoint and checkpoint['lr_schedulers'] and k == 'lr': + continue rank_zero_info(f'| Overriding optimizer parameter {k} from checkpoint: {param_group[k]} -> {v}') - param_group.update(self.optimizer_params) - if 'initial_lr' in param_group: - param_group['initial_lr'] = self.optimizer_params['lr'] + param_group[k] = v + if 'initial_lr' in param_group and 'lr' in optimizer_args and param_group['initial_lr'] != optimizer_args['lr']: + rank_zero_info(f'| Overriding optimizer parameter initial_lr from checkpoint: {param_group["initial_lr"]} -> {optimizer_args["lr"]}') + param_group['initial_lr'] = optimizer_args['lr'] - if 'lr_schedulers' in checkpoint and checkpoint['lr_schedulers']: - assert 'optimizer_states' in checkpoint and checkpoint['optimizer_states'] + if checkpoint.get('lr_schedulers', None): + assert checkpoint.get('optimizer_states', False) schedulers = checkpoint['lr_schedulers'] - assert len(schedulers) == 1 + assert len(schedulers) == 1 # only support one scheduler scheduler = schedulers[0] - for k, v in self.scheduler_params.items(): + for k, v in scheduler_args.items(): if k in scheduler and scheduler[k] != v: rank_zero_info(f'| Overriding scheduler parameter {k} from checkpoint: {scheduler[k]} -> {v}') - scheduler.update(self.scheduler_params) + scheduler[k] = v scheduler['base_lrs'] = [group['initial_lr'] for group in checkpoint['optimizer_states'][0]['param_groups']] - if '_last_lr' in scheduler: - scheduler.pop('_last_lr') + new_lrs = simulate_lr_scheduler(optimizer_args, scheduler_args, scheduler['last_epoch']) + scheduler['_last_lr'] = new_lrs + for param_group, new_lr in zip(checkpoint['optimizer_states'][0]['param_groups'], new_lrs): + if param_group['lr'] != new_lr: + rank_zero_info(f'| Overriding optimizer parameter lr from checkpoint: {param_group["lr"]} -> {new_lr}') + param_group['lr'] = new_lr diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index edf612ede..0451a7f84 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -70,9 +70,11 @@ schedule_type: 'linear' # train and eval num_sanity_val_steps: 1 -lr: 0.0004 -lr_decay_steps: 50000 -lr_decay_gamma: 0.5 +optimizer_args: + lr: 0.0004 +lr_scheduler_args: + step_size: 50000 + gamma: 0.5 max_batch_frames: 80000 max_batch_size: 48 val_with_vocoder: true diff --git a/configs/base.yaml b/configs/base.yaml index 58db5ce89..1265e42fb 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -51,11 +51,17 @@ use_spk_id: false ########### # optimization ########### -lr: 2.0 -warmup_updates: 2000 -optimizer_adam_beta1: 0.9 -optimizer_adam_beta2: 0.98 -weight_decay: 0 +optimizer_args: + optimizer_cls: torch.optim.AdamW + lr: 2.0 + beta1: 0.9 + beta2: 0.98 + weight_decay: 0 +lr_scheduler_args: + scheduler_cls: torch.optim.lr_scheduler.StepLR + warmup_steps: 2000 + step_size: 50000 + gamma: 0.5 clip_grad_norm: 1 dur_loss: mse # huber|mol @@ -91,4 +97,4 @@ pl_trainer_devices: 'auto' pl_trainer_precision: '32-true' pl_trainer_num_nodes: 1 pl_trainer_strategy: 'auto' -ddp_backend: 'nccl' # choose from 'gloo', 'nccl', 'nccl_no_p2p' +ddp_backend: 'nccl_no_p2p' # choose from 'gloo', 'nccl', 'nccl_no_p2p' diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index 903c62b3d..a392707bf 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1221,9 +1221,13 @@ " 'test_prefixes': test_prefixes,\n", " 'max_batch_frames': max_batch_frames,\n", " 'max_batch_size': max_batch_size,\n", - " 'lr': lr,\n", - " 'lr_decay_steps': lr_decay_steps,\n", - " 'lr_decay_gamma': lr_decay_gamma,\n", + " 'optimizer_args': {\n", + " 'lr': lr\n", + " },\n", + " 'lr_scheduler_args': {\n", + " 'step_size': lr_decay_steps,\n", + " 'gamma': lr_decay_gamma\n", + " },\n", " 'val_check_interval': val_check_interval,\n", " 'num_valid_plots': min(10, len(test_prefixes)),\n", " 'num_ckpt_keep': num_ckpt_keep,\n", @@ -1544,9 +1548,13 @@ " 'test_prefixes': test_prefixes,\n", " 'max_batch_frames': max_batch_frames,\n", " 'max_batch_size': max_batch_size,\n", - " 'lr': lr,\n", - " 'lr_decay_steps': lr_decay_steps,\n", - " 'lr_decay_gamma': lr_decay_gamma,\n", + " 'optimizer_args': {\n", + " 'lr': lr\n", + " },\n", + " 'lr_scheduler_args': {\n", + " 'step_size': lr_decay_steps,\n", + " 'gamma': lr_decay_gamma\n", + " },\n", " 'val_check_interval': val_check_interval,\n", " 'num_valid_plots': min(20, len(test_prefixes)),\n", " 'num_ckpt_keep': num_ckpt_keep,\n", diff --git a/utils/__init__.py b/utils/__init__.py index b98834735..1001a98e2 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,4 +1,3 @@ -import inspect import pathlib import re import time @@ -131,6 +130,8 @@ def unpack_dict_to_list(samples): def filter_kwargs(dict_to_filter, kwarg_obj): + import inspect + sig = inspect.signature(kwarg_obj) filter_keys = [param.name for param in sig.parameters.values() if param.kind == param.POSITIONAL_OR_KEYWORD] filtered_dict = {filter_key: dict_to_filter[filter_key] for filter_key in filter_keys if filter_key in dict_to_filter} @@ -236,3 +237,24 @@ def num_params(model, print_out=True, model_name="model"): if print_out: print(f'| {model_name} Trainable Parameters: %.3fM' % parameters) return parameters + + +def build_object_from_config(cls_str, *args, **kwargs): + import importlib + + pkg = ".".join(cls_str.split(".")[:-1]) + cls_name = cls_str.split(".")[-1] + cls_type = getattr(importlib.import_module(pkg), cls_name) + + return cls_type(*args, **filter_kwargs(kwargs, cls_type)) + + +def simulate_lr_scheduler(optimizer_args, scheduler_args, last_epoch=-1): + optimizer = build_object_from_config(optimizer_args['optimizer_cls'], [torch.nn.Parameter()], **optimizer_args) + optimizer.param_groups[0]['initial_lr'] = optimizer_args['lr'] + scheduler = build_object_from_config(scheduler_args['scheduler_cls'], optimizer, last_epoch=last_epoch, **scheduler_args) + + if hasattr(scheduler, '_get_closed_form_lr'): + return scheduler._get_closed_form_lr() + else: + return scheduler.get_lr() From e59f3a092f925993c05a3f8460e5ab176ba8e586 Mon Sep 17 00:00:00 2001 From: hrukalive Date: Mon, 17 Apr 2023 12:32:03 -0500 Subject: [PATCH 302/475] Revert ddp backend setting --- configs/base.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/base.yaml b/configs/base.yaml index 1265e42fb..e21dd1dce 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -97,4 +97,4 @@ pl_trainer_devices: 'auto' pl_trainer_precision: '32-true' pl_trainer_num_nodes: 1 pl_trainer_strategy: 'auto' -ddp_backend: 'nccl_no_p2p' # choose from 'gloo', 'nccl', 'nccl_no_p2p' +ddp_backend: 'nccl' # choose from 'gloo', 'nccl', 'nccl_no_p2p' From dcd7199bb8011892be2192a3e40d199e79646c57 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 18 Apr 2023 01:47:07 +0800 Subject: [PATCH 303/475] Switch back to `DiffusionNoiseLoss` --- training/variance_task.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index ced5fd817..03c9fdf4b 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -8,10 +8,7 @@ import utils.infer_utils from basics.base_dataset import BaseDataset from basics.base_task import BaseTask -from modules.losses.curve_loss import CurveLoss2d -from modules.losses.diff_loss import ( - DiffusionNoiseLoss, DiffusionNoiseWithSmoothnessLoss, DiffusionNoiseWithSensitivityLoss -) +from modules.losses.diff_loss import DiffusionNoiseLoss from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams @@ -76,9 +73,8 @@ def build_losses(self): ) if hparams['predict_pitch']: pitch_hparams = hparams['pitch_prediction_args'] - self.pitch_loss = DiffusionNoiseWithSmoothnessLoss( + self.pitch_loss = DiffusionNoiseLoss( loss_type=hparams['diff_loss_type'], - lambda_tv=pitch_hparams['lambda_tv_loss'] ) # self.pitch_loss = CurveLoss2d( # vmin=pitch_hparams['pitch_delta_vmin'], From 60f30476f833459acf9dd7a00ed450689361282f Mon Sep 17 00:00:00 2001 From: hrukalive Date: Mon, 17 Apr 2023 14:54:00 -0500 Subject: [PATCH 304/475] Support potential multiple param groups --- basics/base_task.py | 8 ++++++-- utils/__init__.py | 9 ++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index 5d2c1d199..cec2f1c3a 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -394,7 +394,7 @@ def on_load_checkpoint(self, checkpoint): continue rank_zero_info(f'| Overriding optimizer parameter {k} from checkpoint: {param_group[k]} -> {v}') param_group[k] = v - if 'initial_lr' in param_group and 'lr' in optimizer_args and param_group['initial_lr'] != optimizer_args['lr']: + if 'initial_lr' in param_group and param_group['initial_lr'] != optimizer_args['lr']: rank_zero_info(f'| Overriding optimizer parameter initial_lr from checkpoint: {param_group["initial_lr"]} -> {optimizer_args["lr"]}') param_group['initial_lr'] = optimizer_args['lr'] @@ -408,7 +408,11 @@ def on_load_checkpoint(self, checkpoint): rank_zero_info(f'| Overriding scheduler parameter {k} from checkpoint: {scheduler[k]} -> {v}') scheduler[k] = v scheduler['base_lrs'] = [group['initial_lr'] for group in checkpoint['optimizer_states'][0]['param_groups']] - new_lrs = simulate_lr_scheduler(optimizer_args, scheduler_args, scheduler['last_epoch']) + new_lrs = simulate_lr_scheduler( + optimizer_args, scheduler_args, + last_epoch=scheduler['last_epoch'], + num_param_groups=len(checkpoint['optimizer_states'][0]['param_groups']) + ) scheduler['_last_lr'] = new_lrs for param_group, new_lr in zip(checkpoint['optimizer_states'][0]['param_groups'], new_lrs): if param_group['lr'] != new_lr: diff --git a/utils/__init__.py b/utils/__init__.py index 1001a98e2..9a7576202 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -249,9 +249,12 @@ def build_object_from_config(cls_str, *args, **kwargs): return cls_type(*args, **filter_kwargs(kwargs, cls_type)) -def simulate_lr_scheduler(optimizer_args, scheduler_args, last_epoch=-1): - optimizer = build_object_from_config(optimizer_args['optimizer_cls'], [torch.nn.Parameter()], **optimizer_args) - optimizer.param_groups[0]['initial_lr'] = optimizer_args['lr'] +def simulate_lr_scheduler(optimizer_args, scheduler_args, last_epoch=-1, num_param_groups=1): + optimizer = build_object_from_config( + optimizer_args['optimizer_cls'], + [{'params': torch.nn.Parameter(), 'initial_lr': optimizer_args['lr']} for _ in range(num_param_groups)], + **optimizer_args + ) scheduler = build_object_from_config(scheduler_args['scheduler_cls'], optimizer, last_epoch=last_epoch, **scheduler_args) if hasattr(scheduler, '_get_closed_form_lr'): From caf3a5c2d716cb02dfceadec94f4bf77bd79f339 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 18 Apr 2023 18:10:01 +0800 Subject: [PATCH 305/475] Deprecate rest notes --- preprocessing/variance_binarizer.py | 30 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 30b28642e..a053a0d39 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -103,36 +103,34 @@ def process_item(self, item_name, meta_data, binarization_args): 'mel2ph': mel2ph.cpu().numpy(), } - # Below: calculate frame-level MIDI pitch, which is a step function curve + # Below: calculate and interpolate frame-level MIDI pitch, which is a step function curve mel2dur = torch.gather(F.pad(ph_dur, [1, 0], value=1), 0, mel2ph) # frame-level phone duration note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) mel2note = get_mel2ph_torch( self.lr, note_dur, mel2ph.shape[0], self.timestep, device=self.device ) note_pitch = torch.FloatTensor( - [(librosa.note_to_midi(n) if n != 'rest' else 0) for n in meta_data['note_seq']] + [(librosa.note_to_midi(n, round_midi=False) if n != 'rest' else -1) for n in meta_data['note_seq']] ).to(self.device) frame_midi_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch - - # Below: calculate phoneme-level mean MIDI pitch, eliminating rest frames - rest = frame_midi_pitch == 0 - ph_dur_rest = mel2ph.new_zeros(t_txt + 1).scatter_add(0, mel2ph, rest.long())[1:] - mel2dur_rest = torch.gather(F.pad(ph_dur_rest, [1, 0], value=1), 0, mel2ph) # frame-level rest phone duration - ph_midi = mel2ph.new_zeros(t_txt + 1).float().scatter_add( - 0, mel2ph, frame_midi_pitch / ((mel2dur - mel2dur_rest) + (mel2dur == mel2dur_rest)) # avoid div by zero - )[1:] - - processed_input['midi'] = ph_midi.long().cpu().numpy() - - # Below: interpolate and smooth the pitch step curve as the base pitch curve + rest = (frame_midi_pitch < 0).cpu().numpy() frame_midi_pitch = frame_midi_pitch.cpu().numpy() - rest = rest.cpu().numpy() interp_func = interpolate.interp1d( np.where(~rest)[0], frame_midi_pitch[~rest], kind='nearest', fill_value='extrapolate' ) frame_midi_pitch[rest] = interp_func(np.where(rest)[0]) - smoothed_midi_pitch = self.smooth(torch.from_numpy(frame_midi_pitch).to(self.device)[None])[0] + frame_midi_pitch = torch.from_numpy(frame_midi_pitch).to(self.device) + + # Below: calculate phoneme-level mean MIDI pitch + ph_midi = frame_midi_pitch.new_zeros(t_txt + 1).scatter_add( + 0, mel2ph, frame_midi_pitch / mel2dur + )[1:] + + processed_input['midi'] = ph_midi.long().cpu().numpy() + + # Below: smoothen the pitch step curve as the base pitch curve + smoothed_midi_pitch = self.smooth(frame_midi_pitch[None])[0] processed_input['base_pitch'] = smoothed_midi_pitch.cpu().numpy() From 1f4e8ead5bb9767f521043faf13fc9accddaa9cf Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 18 Apr 2023 21:16:32 +0800 Subject: [PATCH 306/475] Finish `DiffSingerVarianceInfer` --- basics/base_svs_infer.py | 4 +- inference/ds_variance.py | 153 +++++++++++++++++++++++++++++++++++++++ utils/infer_utils.py | 4 +- 3 files changed, 158 insertions(+), 3 deletions(-) create mode 100644 inference/ds_variance.py diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 5becc7416..5c6d157b9 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -30,10 +30,10 @@ def __init__(self, device=None): def build_model(self, ckpt_steps=None): raise NotImplementedError - def preprocess_input(self, inp): + def preprocess_input(self, param): raise NotImplementedError - def run_model(self, param, return_mel): + def run_model(self, sample): raise NotImplementedError def infer_once(self, param): diff --git a/inference/ds_variance.py b/inference/ds_variance.py new file mode 100644 index 000000000..329f617c2 --- /dev/null +++ b/inference/ds_variance.py @@ -0,0 +1,153 @@ +import librosa +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from scipy import interpolate + +from basics.base_svs_infer import BaseSVSInfer +from modules.fastspeech.tts_modules import LengthRegulator, mel2ph_to_dur +from modules.toplevel import DiffSingerVariance +from utils.hparams import hparams +from utils.phoneme_utils import build_phoneme_list +from utils import load_ckpt +from utils.text_encoder import TokenTextEncoder + + +class DiffSingerVarianceInfer(BaseSVSInfer): + def __init__(self, device=None, ckpt_steps=None): + super().__init__(device=device) + self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) + self.model = self.build_model(ckpt_steps=ckpt_steps) + self.lr = LengthRegulator() + smooth_kernel_size = round(hparams['midi_smooth_width'] / self.timestep) + self.smooth = nn.Conv1d( + in_channels=1, + out_channels=1, + kernel_size=smooth_kernel_size, + bias=False, + padding='same', + padding_mode='replicate' + ).eval().to(self.device) + smooth_kernel = torch.sin(torch.from_numpy( + np.linspace(0, 1, smooth_kernel_size).astype(np.float32) * np.pi + ).to(self.device)) + smooth_kernel /= smooth_kernel.sum() + self.smooth.weight.data = smooth_kernel[None, None] + + def build_model(self, ckpt_steps=None): + model = DiffSingerVariance( + vocab_size=len(self.ph_encoder) + ).eval().to(self.device) + load_ckpt(model, hparams['work_dir'], ckpt_steps=ckpt_steps, required_category='variance', + prefix_in_ckpt='model', strict=True, device=self.device) + return model + + @torch.no_grad() + def preprocess_input(self, param): + """ + :param param: one segment in the .ds file + :return: batch of the model inputs + """ + batch = {} + txt_tokens = torch.LongTensor([self.ph_encoder.encode(param['ph_seq'].split())]).to(self.device) # [B=1, T_ph] + T_ph = txt_tokens.shape[1] + batch['tokens'] = txt_tokens + ph_num = torch.from_numpy(np.array([param['ph_num'].split()], np.int64)).to(self.device) # [B=1, T_w] + ph2word = self.lr(ph_num) # => [B=1, T_ph] + T_w = ph2word.max() + batch['ph2word'] = ph2word + + note_seq = torch.FloatTensor( + [(librosa.note_to_midi(n, round_midi=False) if n != 'rest' else -1) for n in param['note_seq'].split()] + ).to(self.device)[None] # [B=1, T_n] + note_dur_sec = torch.from_numpy(np.array([param['note_dur'].split()], np.float32)).to(self.device) # [B=1, T_n] + note_acc = torch.round(torch.cumsum(note_dur_sec, dim=1) / self.timestep + 0.5).long() + note_dur = torch.diff(note_acc, dim=1, prepend=note_acc.new_zeros(1, 1)) + mel2note = self.lr(note_dur) # [B=1, T_t] + T_t = mel2note.shape[1] + is_slur = torch.BoolTensor([[int(s) for s in param['note_slur'].split()]]).to(self.device) # [B=1, T_n] + note2word = torch.cumsum(~is_slur, dim=1) # [B=1, T_n] + word_dur = note_dur.new_zeros(1, T_w + 1).scatter_add( + 1, note2word, note_dur + )[:, 1:] # => [B=1, T_w] + mel2word = self.lr(word_dur) # [B=1, T_t] + + if mel2word.shape[1] != T_t: # Align words with notes + mel2word = F.pad(mel2word, [0, T_t - mel2word.shape[1]], value=mel2word[0, -1]) + word_dur = mel2ph_to_dur(mel2word, T_w) + batch['word_dur'] = word_dur + + if param.get('ph_dur'): # Get mel2ph if ph_dur is given + ph_dur_sec = torch.from_numpy( + np.array([param['ph_dur'].split()], np.float32) + ).to(self.device) # [B=1, T_ph] + ph_acc = torch.round(torch.cumsum(ph_dur_sec, dim=1) / self.timestep + 0.5).long() + ph_dur = torch.diff(ph_acc, dim=1, prepend=ph_acc.new_zeros(1, 1)) + mel2ph = self.lr(ph_dur, txt_tokens == 0) + if mel2ph.shape[1] != T_t: # Align phones with notes + mel2ph = F.pad(mel2ph, [0, T_t - mel2ph.shape[1]], value=mel2ph[0, -1]) + ph_dur = mel2ph_to_dur(mel2ph, T_ph) + else: + ph_dur = None + mel2ph = None + batch['mel2ph'] = mel2ph + + # Calculate frame-level MIDI pitch, which is a step function curve + frame_midi_pitch = torch.gather( + F.pad(note_seq, [1, 0]), 1, mel2note + ) # => frame-level MIDI pitch, [B=1, T_t] + rest = (frame_midi_pitch < 0)[0].cpu().numpy() + frame_midi_pitch = frame_midi_pitch[0].cpu().numpy() + interp_func = interpolate.interp1d( + np.where(~rest)[0], frame_midi_pitch[~rest], + kind='nearest', fill_value='extrapolate' + ) + frame_midi_pitch[rest] = interp_func(np.where(rest)[0]) + frame_midi_pitch = torch.from_numpy(frame_midi_pitch[None]).to(self.device) + base_pitch = self.smooth(frame_midi_pitch) + batch['base_pitch'] = base_pitch + + if ph_dur is not None: + # Phone durations are available, calculate phoneme-level MIDI. + mel2pdur = torch.gather(F.pad(ph_dur, [1, 0], value=1), 1, mel2ph) # frame-level phone duration + ph_midi = frame_midi_pitch.new_zeros(1, T_ph + 1).scatter_add( + 1, mel2ph, frame_midi_pitch / mel2pdur + )[:, 1:] + else: + # Phone durations are not available, calculate word-level MIDI instead. + mel2wdur = torch.gather(F.pad(word_dur, [1, 0], value=1), 1, mel2word) + w_midi = frame_midi_pitch.new_zeros(1, T_w + 1).scatter_add( + 1, mel2word, frame_midi_pitch / mel2wdur + )[:, 1:] + # Convert word-level MIDI to phoneme-level MIDI + ph_midi = torch.gather(F.pad(w_midi, [1, 0]), 1, ph2word) + ph_midi = ph_midi.round().long() + batch['midi'] = ph_midi + + return batch + + @torch.no_grad() + def run_model(self, sample): + txt_tokens = sample['tokens'] + base_pitch = sample['base_pitch'] + dur_pred, pitch_pred = self.model( + txt_tokens, midi=sample['midi'], ph2word=sample['ph2word'], + word_dur=sample['word_dur'], + mel2ph=sample['mel2ph'], base_pitch=base_pitch + ) + if pitch_pred is not None: + pitch_pred = base_pitch + pitch_pred + return dur_pred, pitch_pred + + def infer_once(self, param): + batch = self.preprocess_input(param) + dur_pred, pitch_pred = self.run_model(batch) + if dur_pred is not None: + dur_pred = dur_pred[0].cpu().numpy() + if pitch_pred is not None: + pitch_pred = pitch_pred[0].cpu().numpy() + f0_pred = librosa.midi_to_hz(pitch_pred) + else: + f0_pred = None + return dur_pred, f0_pred diff --git a/utils/infer_utils.py b/utils/infer_utils.py index 43cb3eae3..0f029cb8f 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -8,6 +8,8 @@ def merge_slurs(param): + if not param.get('is_slur_seq'): + return ph_seq = param['ph_seq'].split() note_seq = param['note_seq'].split() note_dur_seq = param['note_dur_seq'].split() @@ -28,7 +30,7 @@ def merge_slurs(param): param['note_seq'] = ' '.join(note_seq) param['note_dur_seq'] = ' '.join(note_dur_seq) param['is_slur_seq'] = ' '.join([str(s) for s in is_slur_seq]) - param['ph_dur'] = ' '.join([str(d) for d in ph_dur]) + param['ph_dur'] = ' '.join([str(round(d, 4)) for d in ph_dur]) def trans_f0_seq(feature_pit, transform): From 3ec315931fd04e8b1eac555549265dc013c50021 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 19 Apr 2023 00:22:16 +0800 Subject: [PATCH 307/475] Round `ph_midi` --- preprocessing/variance_binarizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index a053a0d39..f268af763 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -127,7 +127,7 @@ def process_item(self, item_name, meta_data, binarization_args): 0, mel2ph, frame_midi_pitch / mel2dur )[1:] - processed_input['midi'] = ph_midi.long().cpu().numpy() + processed_input['midi'] = ph_midi.round().long().cpu().numpy() # Below: smoothen the pitch step curve as the base pitch curve smoothed_midi_pitch = self.smooth(frame_midi_pitch[None])[0] From 86e49fe00b085b6e669a322688bd58701631d03e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 19 Apr 2023 00:25:00 +0800 Subject: [PATCH 308/475] Fix device mismatch with DDSP inference --- inference/ds_acoustic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 3ea543ebe..f42497841 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -46,8 +46,7 @@ def build_vocoder(self): vocoder = VOCODERS[hparams['vocoder']]() else: vocoder = VOCODERS[hparams['vocoder'].split('.')[-1]]() - vocoder.model.eval() - vocoder.model.to(self.device) + vocoder.to_device(self.device) return vocoder def preprocess_input(self, param): From 67f2cc77a0e0fe3cb82605c652950bb2ee12ee7e Mon Sep 17 00:00:00 2001 From: hrukalive Date: Fri, 21 Apr 2023 12:38:39 -0500 Subject: [PATCH 309/475] Fix DDSP vocoder device problem --- inference/ds_acoustic.py | 2 -- modules/vocoders/ddsp.py | 5 +++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index f42497841..ce136a0c1 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -29,8 +29,6 @@ def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=N self.lr = LengthRegulator().to(self.device) if load_vocoder: self.vocoder = self.build_vocoder() - self.vocoder.model.eval() - self.vocoder.model.to(self.device) def build_model(self, ckpt_steps=None): model = DiffSingerAcoustic( diff --git a/modules/vocoders/ddsp.py b/modules/vocoders/ddsp.py index c985ad21b..4682b7e63 100644 --- a/modules/vocoders/ddsp.py +++ b/modules/vocoders/ddsp.py @@ -28,8 +28,9 @@ def load_model(model_path: pathlib.Path, device='cpu'): args = DotDict(args) # load model - print(' [Loading] ' + model_path) + print(' [Loading] ' + str(model_path)) model = torch.jit.load(model_path, map_location=torch.device(device)) + model.eval() return model, args @@ -123,7 +124,7 @@ def to_device(self, device): pass def get_device(self): - return 'cpu' + return self.device def spec2wav_torch(self, mel, f0): # mel: [B, T, bins] f0: [B, T] if self.args.data.sampling_rate != hparams['audio_sample_rate']: From a1e13c37f4a79d9ce5fd878c63ae259ed93e1b1e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 21:34:51 +0800 Subject: [PATCH 310/475] Add `RepetitiveDiffusion` and set default --- configs/variance.yaml | 10 +++++----- modules/diffusion/ddpm.py | 21 +++++++++++++++++++++ modules/toplevel.py | 19 ++++++++++++++++--- 3 files changed, 42 insertions(+), 8 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 38073f8f6..552f5c651 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -55,14 +55,14 @@ predict_pitch: true pitch_cascade: true lambda_pitch_loss: 1.0 pitch_prediction_args: - diff_predictor_mode: 1d - pitch_delta_vmin: -12.75 - pitch_delta_vmax: 12.75 - num_pitch_bins: 128 + diff_predictor_mode: repeat + pitch_delta_vmin: -12 # -12.75 + pitch_delta_vmax: 12 # 12.75 + num_pitch_bins: 64 deviation: 0.25 hidden_size: 512 residual_layers: 20 - residual_channels: 512 + residual_channels: 256 residual_layers: 10 residual_channels: 256 diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 98ff20058..c13c99437 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -288,6 +288,27 @@ def denorm_spec(self, x): return (x + 1) / 2 * (self.spec_max - self.spec_min) + self.spec_min +class RepetitiveDiffusion(GaussianDiffusion): + def __init__(self, vmin, vmax, repeat_bins, + timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, + betas=None): + self.vmin = vmin + self.vmax = vmax + self.repeat_bins = repeat_bins + super().__init__( + repeat_bins, timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas, spec_min=[vmin], spec_max=[vmax] + ) + + def norm_spec(self, x): + return super().norm_spec(x.clamp(min=self.vmin, max=self.vmax).unsqueeze(-1).repeat([1, 1, self.repeat_bins])) + + def denorm_spec(self, x): + return super().denorm_spec(x).mean(dim=-1).clamp(min=self.vmin, max=self.vmax) + + class CurveDiffusion1d(GaussianDiffusion): def __init__(self, vmin, vmax, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): diff --git a/modules/toplevel.py b/modules/toplevel.py index d5d6ecfa8..3bbc2f2b0 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -5,7 +5,7 @@ from modules.commons.common_layers import ( XavierUniformInitLinear as Linear, ) -from modules.diffusion.ddpm import GaussianDiffusion, CurveDiffusion1d, CurveDiffusion2d +from modules.diffusion.ddpm import GaussianDiffusion, RepetitiveDiffusion, CurveDiffusion1d, CurveDiffusion2d from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from modules.fastspeech.tts_modules import LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance @@ -60,6 +60,19 @@ def __init__(self, vocab_size): pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) diff_predictor_mode = pitch_hparams['diff_predictor_mode'] + if diff_predictor_mode == 'repeat': + self.pitch_predictor = RepetitiveDiffusion( + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + repeat_bins=pitch_hparams['num_pitch_bins'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + pitch_hparams['residual_layers'], + pitch_hparams['residual_channels'] + ) + ) if diff_predictor_mode == '1d': self.pitch_predictor = CurveDiffusion1d( vmin=pitch_hparams['pitch_delta_vmin'], @@ -68,8 +81,8 @@ def __init__(self, vocab_size): k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], denoiser_args=( - hparams['residual_layers'], - hparams['residual_channels'] + pitch_hparams['residual_layers'], + pitch_hparams['residual_channels'] ) ) elif diff_predictor_mode == '2d': From cf9331f8f848c248940cb678f1a9bfd34945215a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 21:37:07 +0800 Subject: [PATCH 311/475] Add back missing `nn.init.constant_` --- modules/commons/common_layers.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/modules/commons/common_layers.py b/modules/commons/common_layers.py index 56a1e0c21..d72ec869c 100644 --- a/modules/commons/common_layers.py +++ b/modules/commons/common_layers.py @@ -10,14 +10,30 @@ class NormalInitEmbedding(torch.nn.Embedding): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + num_embeddings: int, + embedding_dim: int, + padding_idx: int | None = None, + *args, + **kwargs + ): + super().__init__(num_embeddings, embedding_dim, *args, padding_idx=padding_idx, **kwargs) nn.init.normal_(self.weight, mean=0, std=self.embedding_dim ** -0.5) + if padding_idx is not None: + nn.init.constant_(self.weight[padding_idx], 0) class XavierUniformInitLinear(torch.nn.Linear): - def __init__(self, *args, bias=True, **kwargs): - super().__init__(*args, **kwargs) + def __init__( + self, + in_features: int, + out_features: int, + *args, + bias: bool = True, + **kwargs + ): + super().__init__(in_features, out_features, *args, bias=bias, **kwargs) nn.init.xavier_uniform_(self.weight) if bias: nn.init.constant_(self.bias, 0.) From ac4b647ca07793d4b0356b10415f63af4d1e6471 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 21:48:09 +0800 Subject: [PATCH 312/475] Update scheduler and optimizer configs --- configs/variance.yaml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 552f5c651..35eedee4e 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -73,9 +73,11 @@ schedule_type: 'linear' # train and eval num_sanity_val_steps: 1 -lr: 0.0004 -lr_decay_steps: 5000 -lr_decay_gamma: 0.5 +optimizer_args: + lr: 0.0004 +lr_scheduler_args: + step_size: 50000 + gamma: 0.5 max_batch_frames: 80000 max_batch_size: 48 val_with_vocoder: true From 89d2d5a2b4afa0e1610bdd32d48693b4914b2e26 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 21:59:01 +0800 Subject: [PATCH 313/475] Reuse `FastSpeech2Encoder` --- modules/fastspeech/acoustic_encoder.py | 30 +------------------------- modules/fastspeech/tts_modules.py | 29 +++++++++++++++++-------- 2 files changed, 21 insertions(+), 38 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 96d0e6f3f..c616dc6dc 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -12,40 +12,12 @@ from utils.text_encoder import PAD_INDEX -class FastSpeech2AcousticEncoder(FastSpeech2Encoder): - def forward_embedding(self, txt_tokens, dur_embed): - # embed tokens and positions - x = self.embed_scale * self.embed_tokens(txt_tokens) - x = x + dur_embed - if hparams['use_pos_embed']: - if hparams['rel_pos']: - x = self.embed_positions(x) - else: - positions = self.embed_positions(txt_tokens) - x = x + positions - x = F.dropout(x, p=self.dropout, training=self.training) - return x - - def forward(self, txt_tokens, dur_embed): - """ - :param txt_tokens: [B, T] - :param dur_embed: [B, T, H] - :return: { - 'encoder_out': [T x B x H] - } - """ - encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() - x = self.forward_embedding(txt_tokens, dur_embed) # [B, T, H] - x = super()._forward(x, encoder_padding_mask) - return x - - class FastSpeech2Acoustic(nn.Module): def __init__(self, vocab_size): super().__init__() self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) self.dur_embed = Linear(1, hparams['hidden_size']) - self.encoder = FastSpeech2AcousticEncoder( + self.encoder = FastSpeech2Encoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] ) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 2aef458bd..b0ce9af00 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -265,14 +265,24 @@ def __init__(self, embed_tokens, hidden_size, num_layers, ffn_kernel_size=9, dro hidden_size, self.padding_idx, init_size=DEFAULT_MAX_TARGET_POSITIONS, ) - def _forward(self, x, padding_mask=None, attn_mask=None, return_hiddens=False): - """ - :param x: [B, T, C] - :param padding_mask: [B, T] - :return: [B, T, C] or [L, B, T, C] - """ - padding_mask = x.abs().sum(-1).eq(0).detach() if padding_mask is None else padding_mask - nonpadding_mask_TB = 1 - padding_mask.transpose(0, 1).float()[:, :, None] # [T, B, 1] + def forward_embedding(self, txt_tokens, extra_embed=None): + # embed tokens and positions + x = self.embed_scale * self.embed_tokens(txt_tokens) + if extra_embed is not None: + x = x + extra_embed + if hparams['use_pos_embed']: + if hparams['rel_pos']: + x = self.embed_positions(x) + else: + positions = self.embed_positions(txt_tokens) + x = x + positions + x = F.dropout(x, p=self.dropout, training=self.training) + return x + + def forward(self, txt_tokens, dur_embed, attn_mask=None, return_hiddens=False): + encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() + x = self.forward_embedding(txt_tokens, dur_embed) # [B, T, H] + nonpadding_mask_TB = 1 - encoder_padding_mask.transpose(0, 1).float()[:, :, None] # [T, B, 1] if self.use_pos_embed: positions = self.pos_embed_alpha * self.embed_positions(x[..., 0]) x = x + positions @@ -281,7 +291,7 @@ def _forward(self, x, padding_mask=None, attn_mask=None, return_hiddens=False): x = x.transpose(0, 1) * nonpadding_mask_TB hiddens = [] for layer in self.layers: - x = layer(x, encoder_padding_mask=padding_mask, attn_mask=attn_mask) * nonpadding_mask_TB + x = layer(x, encoder_padding_mask=encoder_padding_mask, attn_mask=attn_mask) * nonpadding_mask_TB hiddens.append(x) if self.use_last_norm: x = self.layer_norm(x) * nonpadding_mask_TB @@ -290,4 +300,5 @@ def _forward(self, x, padding_mask=None, attn_mask=None, return_hiddens=False): x = x.transpose(1, 2) # [L, B, T, C] else: x = x.transpose(0, 1) # [B, T, C] + x = x return x From 676a084a22c25883a0f8186ba3ce46ed8305671f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 22:04:11 +0800 Subject: [PATCH 314/475] Remove midi embedding from pitch prediction graph --- modules/fastspeech/tts_modules.py | 1 - modules/fastspeech/variance_encoder.py | 37 ++++---------------------- 2 files changed, 5 insertions(+), 33 deletions(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index f1669f56a..a4c56b9b7 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -357,5 +357,4 @@ def forward(self, txt_tokens, dur_embed, attn_mask=None, return_hiddens=False): x = x.transpose(1, 2) # [L, B, T, C] else: x = x.transpose(0, 1) # [B, T, C] - x = x return x diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 2dc4bdcbe..159788290 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -11,34 +11,6 @@ from utils.text_encoder import PAD_INDEX -class FastSpeech2VarianceEncoder(FastSpeech2Encoder): - def forward_embedding(self, txt_tokens, midi_embed, onset_embed, word_dur_embed): - # embed tokens and positions - x = self.embed_scale * self.embed_tokens(txt_tokens) - x = x + midi_embed + onset_embed + word_dur_embed - if hparams['use_pos_embed']: - if hparams['rel_pos']: - x = self.embed_positions(x) - else: - positions = self.embed_positions(txt_tokens) - x = x + positions - x = F.dropout(x, p=self.dropout, training=self.training) - return x - - def forward(self, txt_tokens, midi_embed, onset_embed, word_dur_embed): - """ - :param txt_tokens: [B, T] - :param midi_embed: [B, T, H] - :param onset_embed: [B, T, H] - :param word_dur_embed: [B, T, H] - :return: [T x B x H] - """ - encoder_padding_mask = txt_tokens.eq(self.padding_idx).detach() - x = self.forward_embedding(txt_tokens, midi_embed, onset_embed, word_dur_embed) # [B, T, H] - x = super()._forward(x, encoder_padding_mask) - return x - - class FastSpeech2Variance(nn.Module): def __init__(self, vocab_size): super().__init__() @@ -50,7 +22,7 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - self.encoder = FastSpeech2VarianceEncoder( + self.encoder = FastSpeech2Encoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] ) @@ -80,7 +52,6 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T :return: (train) encoder_out, ph_dur_xs; (infer) encoder_out, ph_dur """ b = txt_tokens.shape[0] - midi_embed = self.midi_embed(midi) # => [B, T_ph, H] onset = torch.diff(ph2word, dim=1, prepend=ph2word.new_zeros(b, 1)) > 0 onset_embed = self.onset_embed(onset.long()) # [B, T_ph, H] if word_dur is None or not infer: @@ -89,11 +60,13 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T )[:, 1:] # [B, T_ph] => [B, T_w] word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] word_dur_embed = self.word_dur_embed(word_dur.float()[:, :, None]) - encoder_out = self.encoder(txt_tokens, midi_embed, onset_embed, word_dur_embed) + encoder_out = self.encoder(txt_tokens, onset_embed + word_dur_embed) if not hparams['predict_dur']: return encoder_out, None - ph_dur_pred = self.dur_predictor(encoder_out, x_masks=txt_tokens == PAD_INDEX, infer=infer) + midi_embed = self.midi_embed(midi) # => [B, T_ph, H] + dur_cond = encoder_out + midi_embed + ph_dur_pred = self.dur_predictor(dur_cond, x_masks=txt_tokens == PAD_INDEX, infer=infer) return encoder_out, ph_dur_pred From 6f6de7e8d500ed6941e9f96e99aa005ee388dd7f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 22:04:59 +0800 Subject: [PATCH 315/475] Remove useless code --- modules/fastspeech/tts_modules.py | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index b0ce9af00..054d585bf 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -300,5 +300,4 @@ def forward(self, txt_tokens, dur_embed, attn_mask=None, return_hiddens=False): x = x.transpose(1, 2) # [L, B, T, C] else: x = x.transpose(0, 1) # [B, T, C] - x = x return x From 7d9411e51d9df4bdd4677d944b2d5aa594551b3b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 22 Apr 2023 23:36:00 +0800 Subject: [PATCH 316/475] elif --- modules/toplevel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index 3bbc2f2b0..5c3217e99 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -73,7 +73,7 @@ def __init__(self, vocab_size): pitch_hparams['residual_channels'] ) ) - if diff_predictor_mode == '1d': + elif diff_predictor_mode == '1d': self.pitch_predictor = CurveDiffusion1d( vmin=pitch_hparams['pitch_delta_vmin'], vmax=pitch_hparams['pitch_delta_vmax'], From 57fc0880aeff4e05c29159c61abde86027a09646 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 24 Apr 2023 20:26:49 +0800 Subject: [PATCH 317/475] Fix vanishing gradient problem caused by `torch.clamp` --- modules/fastspeech/tts_modules.py | 4 ++-- modules/losses/dur_loss.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index a4c56b9b7..61df94d73 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -132,9 +132,9 @@ def forward(self, xs, x_masks=None, infer=True): xs = self.linear(xs.transpose(1, -1)) # [B, T, C] xs = xs * (1 - x_masks.float())[:, :, None] # (B, T, C) - dur_pred = torch.clamp(self.out2dur(xs), min=0.) # avoid negative value + dur_pred = self.out2dur(xs) if infer: - dur_pred = torch.round(dur_pred).long() + dur_pred = dur_pred.clamp(min=0.).round().long() # avoid negative value return dur_pred diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py index 5e2f4b056..5aec3c929 100644 --- a/modules/losses/dur_loss.py +++ b/modules/losses/dur_loss.py @@ -33,6 +33,8 @@ def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: # pdur_loss pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) + dur_pred = dur_pred.clamp(min=0.) # clip to avoid NaN loss + # wdur loss shape = dur_pred.shape[0], ph2word.max() + 1 wdur_pred = dur_pred.new_zeros(*shape).scatter_add( From 3beaf5d6299c9ba27b83b5e663e7e319bab2ba0c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 24 Apr 2023 20:33:26 +0800 Subject: [PATCH 318/475] Remove `padding_idx` --- modules/fastspeech/variance_encoder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 159788290..080ad2df5 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -15,7 +15,7 @@ class FastSpeech2Variance(nn.Module): def __init__(self, vocab_size): super().__init__() self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) - self.midi_embed = Embedding(128, hparams['hidden_size'], PAD_INDEX) + self.midi_embed = Embedding(128, hparams['hidden_size']) self.onset_embed = Embedding(2, hparams['hidden_size']) self.word_dur_embed = Linear(1, hparams['hidden_size']) From 88cb146a488b760c2026a93c085ae39b2587d27d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 28 Apr 2023 23:30:17 +0800 Subject: [PATCH 319/475] Add energy prediction, re-organize losses --- configs/variance.yaml | 10 +++ modules/fastspeech/tts_modules.py | 45 ++++++++++++ modules/losses/common_losses.py | 24 ------ modules/losses/curve_loss.py | 36 --------- modules/losses/diff_loss.py | 28 ------- modules/losses/dur_loss.py | 56 -------------- modules/losses/variance_loss.py | 110 ++++++++++++++++++++++++++++ modules/toplevel.py | 42 ++++++++--- preprocessing/variance_binarizer.py | 14 +++- training/variance_task.py | 45 +++++++++--- utils/binarizer_utils.py | 68 +++++++++++------ utils/plot.py | 5 +- 12 files changed, 288 insertions(+), 195 deletions(-) delete mode 100644 modules/losses/common_losses.py delete mode 100644 modules/losses/curve_loss.py delete mode 100644 modules/losses/dur_loss.py create mode 100644 modules/losses/variance_loss.py diff --git a/configs/variance.yaml b/configs/variance.yaml index 35eedee4e..e879210f7 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -15,6 +15,7 @@ test_prefixes: [ audio_sample_rate: 44100 hop_size: 512 # Hop size. +win_size: 2048 # FFT size. midi_smooth_width: 0.2 # in seconds binarization_args: @@ -64,6 +65,15 @@ pitch_prediction_args: residual_layers: 20 residual_channels: 256 +predict_energy: true +lambda_energy_loss: 1.0 +energy_prediction_args: + hidden_size: 512 + dropout: 0.1 + num_layers: 8 + kernel_size: 3 + loss_type: mse + residual_layers: 10 residual_channels: 256 dilation_cycle_length: 5 # * diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 61df94d73..6814b5408 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -138,6 +138,51 @@ def forward(self, xs, x_masks=None, infer=True): return dur_pred +class VariancePredictor(torch.nn.Module): + def __init__(self, in_dims, n_layers=5, n_chans=512, kernel_size=5, + dropout_rate=0.1, padding='SAME'): + """Initialize variance predictor module. + Args: + in_dims (int): Input dimension. + n_layers (int, optional): Number of convolutional layers. + n_chans (int, optional): Number of channels of convolutional layers. + kernel_size (int, optional): Kernel size of convolutional layers. + dropout_rate (float, optional): Dropout rate. + """ + super(VariancePredictor, self).__init__() + + self.conv = torch.nn.ModuleList() + self.kernel_size = kernel_size + self.padding = padding + for idx in range(n_layers): + in_chans = in_dims if idx == 0 else n_chans + self.conv += [torch.nn.Sequential( + torch.nn.ConstantPad1d(((kernel_size - 1) // 2, (kernel_size - 1) // 2) + if padding == 'SAME' + else (kernel_size - 1, 0), 0), + torch.nn.Conv1d(in_chans, n_chans, kernel_size, stride=1, padding=0), + torch.nn.ReLU(), + LayerNorm(n_chans, dim=1), + torch.nn.Dropout(dropout_rate) + )] + self.linear = torch.nn.Linear(n_chans, 1) + self.embed_positions = SinusoidalPositionalEmbedding(in_dims, 0, init_size=4096) + self.pos_embed_alpha = nn.Parameter(torch.Tensor([1])) + + def forward(self, xs): + """ + :param xs: [B, T, H] + :return: [B, T] + """ + positions = self.pos_embed_alpha * self.embed_positions(xs[..., 0]) + xs = xs + positions + xs = xs.transpose(1, -1) # (B, idim, Tmax) + for f in self.conv: + xs = f(xs) # (B, C, Tmax) + xs = self.linear(xs.transpose(1, -1)).squeeze(-1) # (B, Tmax) + return xs + + class PitchPredictor(torch.nn.Module): def __init__(self, vmin, vmax, num_bins, deviation, in_dims, n_layers=5, n_chans=384, kernel_size=5, diff --git a/modules/losses/common_losses.py b/modules/losses/common_losses.py deleted file mode 100644 index 5251965c2..000000000 --- a/modules/losses/common_losses.py +++ /dev/null @@ -1,24 +0,0 @@ -import torch.nn as nn - - -class TVLoss(nn.Module): - """ - Adapted from https://github.com/jxgu1016/Total_Variation_Loss.pytorch - """ - - def __init__(self, weight=1): - super(TVLoss, self).__init__() - self.weight = weight - - def forward(self, x): - """ - :param x: [B, C, H, W] - """ - b, c, h_x, w_x, *_ = x.shape - count_h = c * (h_x - 1) * w_x - count_w = c * h_x * (w_x - 1) - h_tv = (x[:, :, 1:, :] - x[:, :, :-1, :]).pow(2).sum() - w_tv = (x[:, :, :, 1:] - x[:, :, :, :-1]).pow(2).sum() - return self.weight * 2 * ( - (h_tv / count_h if count_h > 0 else 0) + (w_tv / count_w if count_w > 0 else 0) - ) / b diff --git a/modules/losses/curve_loss.py b/modules/losses/curve_loss.py deleted file mode 100644 index 0cfb5e1e1..000000000 --- a/modules/losses/curve_loss.py +++ /dev/null @@ -1,36 +0,0 @@ -import torch -import torch.nn as nn -from torch import Tensor - - -class CurveLoss2d(nn.Module): - """ - Loss module for parameter curve represented by gaussian-blurred 2-D probability bins. - """ - - def __init__(self, vmin, vmax, num_bins, deviation): - super().__init__() - self.vmin = vmin - self.vmax = vmax - self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins - self.sigma = deviation / self.interval - self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] - self.loss = nn.BCEWithLogitsLoss() - - def values_to_bins(self, values: Tensor) -> Tensor: - return (values - self.vmin) / self.interval - - def curve_to_probs(self, curve: Tensor) -> Tensor: - miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] - probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] - return probs - - def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: - """ - Calculate BCE with logits loss between predicted probs and gaussian-blurred bins representing gt curve. - :param y_pred: predicted probs [B, T, N] - :param c_gt: ground truth curve [B, T] - :param mask: (bool) mask of valid parts in ground truth curve [B, T] - """ - y_gt = self.curve_to_probs(c_gt) - return self.loss(y_pred, y_gt * mask[:, :, None]) diff --git a/modules/losses/diff_loss.py b/modules/losses/diff_loss.py index a669d2275..9fc073501 100644 --- a/modules/losses/diff_loss.py +++ b/modules/losses/diff_loss.py @@ -1,8 +1,6 @@ import torch.nn as nn from torch import Tensor -from modules.losses.common_losses import TVLoss - class DiffusionNoiseLoss(nn.Module): def __init__(self, loss_type): @@ -34,29 +32,3 @@ def forward(self, x_recon: Tensor, noise: Tensor, nonpadding: Tensor = None) -> """ x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) return self._forward(x_recon, noise).mean() - - -class DiffusionNoiseWithSmoothnessLoss(DiffusionNoiseLoss): - def __init__(self, loss_type, lambda_tv=0.5): - super().__init__(loss_type) - self.lambda_tv = lambda_tv - self.tv_loss = TVLoss() - - def forward(self, x_recon, noise, nonpadding=None): - x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) - return self._forward(x_recon, noise).mean() + self.lambda_tv * self.tv_loss(x_recon - noise) - - -class DiffusionNoiseWithSensitivityLoss(DiffusionNoiseLoss): - def __init__(self, loss_type, alpha=1): - super().__init__(loss_type) - self.alpha = alpha - - def forward(self, x_recon, noise, nonpadding=None, reference=None): - x_recon, noise = self._mask_nonpadding(x_recon, noise, nonpadding) - loss = self._forward(x_recon, noise) - if reference is not None: - difference = reference.diff(dim=1, prepend=reference[:, :1]).abs() - sensitivity = 1 / (1 + self.alpha * difference) - loss = loss * sensitivity.transpose(1, 2).unsqueeze(1) - return loss.mean() diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py deleted file mode 100644 index 5aec3c929..000000000 --- a/modules/losses/dur_loss.py +++ /dev/null @@ -1,56 +0,0 @@ -import torch -import torch.nn as nn -from torch import Tensor - - -class DurationLoss(nn.Module): - """ - Loss module as combination of phone duration loss, word duration loss and sentence duration loss. - """ - - def __init__(self, offset, loss_type, - lambda_pdur=0.6, lambda_wdur=0.3, lambda_sdur=0.1): - super().__init__() - self.loss_type = loss_type - if self.loss_type == 'mse': - self.loss = nn.MSELoss() - elif self.loss_type == 'huber': - self.loss = nn.HuberLoss() - else: - raise NotImplementedError() - self.offset = offset - - self.lambda_pdur = lambda_pdur - self.lambda_wdur = lambda_wdur - self.lambda_sdur = lambda_sdur - - def linear2log(self, any_dur): - return torch.log(any_dur + self.offset) - - def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: - dur_gt = dur_gt.to(dtype=dur_pred.dtype) - - # pdur_loss - pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) - - dur_pred = dur_pred.clamp(min=0.) # clip to avoid NaN loss - - # wdur loss - shape = dur_pred.shape[0], ph2word.max() + 1 - wdur_pred = dur_pred.new_zeros(*shape).scatter_add( - 1, ph2word, dur_pred - )[:, 1:] # [B, T_ph] => [B, T_w] - wdur_gt = dur_gt.new_zeros(*shape).scatter_add( - 1, ph2word, dur_gt - )[:, 1:] # [B, T_ph] => [B, T_w] - wdur_loss = self.lambda_wdur * self.loss(self.linear2log(wdur_pred), self.linear2log(wdur_gt)) - - # sdur loss - sdur_pred = dur_pred.sum(dim=1) - sdur_gt = dur_gt.sum(dim=1) - sdur_loss = self.lambda_sdur * self.loss(self.linear2log(sdur_pred), self.linear2log(sdur_gt)) - - # combine - dur_loss = pdur_loss + wdur_loss + sdur_loss - - return dur_loss diff --git a/modules/losses/variance_loss.py b/modules/losses/variance_loss.py new file mode 100644 index 000000000..a24558f16 --- /dev/null +++ b/modules/losses/variance_loss.py @@ -0,0 +1,110 @@ +import torch +import torch.nn as nn +from torch import Tensor + + +class DurationLoss(nn.Module): + """ + Loss module as combination of phone duration loss, word duration loss and sentence duration loss. + """ + + def __init__(self, offset, loss_type, + lambda_pdur=0.6, lambda_wdur=0.3, lambda_sdur=0.1): + super().__init__() + self.loss_type = loss_type + if self.loss_type == 'mse': + self.loss = nn.MSELoss() + elif self.loss_type == 'huber': + self.loss = nn.HuberLoss() + else: + raise NotImplementedError() + self.offset = offset + + self.lambda_pdur = lambda_pdur + self.lambda_wdur = lambda_wdur + self.lambda_sdur = lambda_sdur + + def linear2log(self, any_dur): + return torch.log(any_dur + self.offset) + + def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: + dur_gt = dur_gt.to(dtype=dur_pred.dtype) + + # pdur_loss + pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) + + dur_pred = dur_pred.clamp(min=0.) # clip to avoid NaN loss + + # wdur loss + shape = dur_pred.shape[0], ph2word.max() + 1 + wdur_pred = dur_pred.new_zeros(*shape).scatter_add( + 1, ph2word, dur_pred + )[:, 1:] # [B, T_ph] => [B, T_w] + wdur_gt = dur_gt.new_zeros(*shape).scatter_add( + 1, ph2word, dur_gt + )[:, 1:] # [B, T_ph] => [B, T_w] + wdur_loss = self.lambda_wdur * self.loss(self.linear2log(wdur_pred), self.linear2log(wdur_gt)) + + # sdur loss + sdur_pred = dur_pred.sum(dim=1) + sdur_gt = dur_gt.sum(dim=1) + sdur_loss = self.lambda_sdur * self.loss(self.linear2log(sdur_pred), self.linear2log(sdur_gt)) + + # combine + dur_loss = pdur_loss + wdur_loss + sdur_loss + + return dur_loss + + +class CurveLoss1d(nn.Module): + """ + Loss module for 1d parameter curve with non-padding masks. + """ + def __init__(self, loss_type): + super().__init__() + self.loss_type = loss_type + if self.loss_type == 'mse': + self.loss = nn.MSELoss(reduction='none') + elif self.loss_type == 'huber': + self.loss = nn.HuberLoss(reduction='none') + else: + raise NotImplementedError() + + def forward(self, c_pred, c_gt, mask=None): + loss = self.loss(c_pred, c_gt) + if mask is not None: + loss *= mask + return loss.mean() + + +class CurveLoss2d(nn.Module): + """ + Loss module for parameter curve represented by gaussian-blurred 2-D probability bins. + """ + + def __init__(self, vmin, vmax, num_bins, deviation): + super().__init__() + self.vmin = vmin + self.vmax = vmax + self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins + self.sigma = deviation / self.interval + self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] + self.loss = nn.BCEWithLogitsLoss() + + def values_to_bins(self, values: Tensor) -> Tensor: + return (values - self.vmin) / self.interval + + def curve_to_probs(self, curve: Tensor) -> Tensor: + miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] + probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] + return probs + + def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: + """ + Calculate BCE with logits loss between predicted probs and gaussian-blurred bins representing gt curve. + :param y_pred: predicted probs [B, T, N] + :param c_gt: ground truth curve [B, T] + :param mask: (bool) mask of valid parts in ground truth curve [B, T] + """ + y_gt = self.curve_to_probs(c_gt) + return self.loss(y_pred, y_gt * mask[:, :, None]) diff --git a/modules/toplevel.py b/modules/toplevel.py index 5c3217e99..f9b0b43e3 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -7,7 +7,7 @@ ) from modules.diffusion.ddpm import GaussianDiffusion, RepetitiveDiffusion, CurveDiffusion1d, CurveDiffusion2d from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from modules.fastspeech.tts_modules import LengthRegulator +from modules.fastspeech.tts_modules import LengthRegulator, VariancePredictor from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -111,6 +111,18 @@ def __init__(self, vocab_size): # n_chans=pitch_hparams['hidden_size'] # ) + if hparams['predict_energy']: + self.pitch_embed = Linear(1, hparams['hidden_size']) + energy_hparams = hparams['energy_prediction_args'] + self.energy_predictor = VariancePredictor( + in_dims=hparams['hidden_size'], + n_chans=energy_hparams['hidden_size'], + n_layers=energy_hparams['num_layers'], + dropout_rate=energy_hparams['dropout'], + padding=hparams['ffn_padding'], + kernel_size=energy_hparams['kernel_size'] + ) + @property def category(self): return 'variance' @@ -122,8 +134,8 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, ph_dur=ph_dur, word_dur=word_dur, infer=infer ) - if not hparams['predict_pitch']: - return dur_pred_out, None + if not hparams['predict_pitch'] and not hparams['predict_energy']: + return dur_pred_out, None, None if mel2ph is None or hparams['dur_cascade']: # (extract mel2ph from dur_pred_out) @@ -132,12 +144,18 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) condition = torch.gather(encoder_out, 1, mel2ph_) - pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) - - pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) - return dur_pred_out, pitch_pred_out - # pitch_pred, pitch_probs = self.pitch_predictor(condition, base_pitch) - # if infer: - # return dur_pred_out, pitch_pred - # else: - # return dur_pred_out, pitch_probs + + if hparams['predict_pitch']: + pitch_cond = condition + self.pitch_embed(base_pitch[:, :, None]) + pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) + else: + pitch_pred_out = None + + if hparams['predict_energy']: + pitch_embed = self.pitch_embed((base_pitch + delta_pitch)[:, :, None]) + energy_cond = condition + pitch_embed + energy_pred_out = self.energy_predictor(energy_cond) + else: + energy_pred_out = None + + return dur_pred_out, pitch_pred_out, energy_pred_out diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index f268af763..cd8847a79 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -11,7 +11,11 @@ from basics.base_binarizer import BaseBinarizer from modules.fastspeech.tts_modules import LengthRegulator -from utils.binarizer_utils import get_mel2ph_torch, get_pitch_parselmouth +from utils.binarizer_utils import ( + get_mel2ph_torch, + get_pitch_parselmouth, + get_energy_librosa +) from utils.hparams import hparams os.environ["OMP_NUM_THREADS"] = "1" @@ -24,7 +28,7 @@ 'mel2ph', # mel2ph format representing number of frames within each phone, int64[T_t,] 'base_pitch', # interpolated and smoothed frame-level MIDI pitch, float32[T_t,] 'delta_pitch', # delta_pitch = actual_pitch - base_pitch, in semitones, float32[T_t,] - 'uv', # flag of unvoiced frames where f0 == 0, bool[T_t,] + 'energy', # float32[T_t,] ] @@ -142,7 +146,11 @@ def process_item(self, item_name, meta_data, binarization_args): return None processed_input['delta_pitch'] = librosa.hz_to_midi(f0.astype(np.float32)) - processed_input['base_pitch'] - processed_input['uv'] = uv + + # Below: extract energy + if hparams['predict_energy']: + energy = get_energy_librosa(waveform, length, hparams) + processed_input['energy'] = energy.astype(np.float32) return processed_input diff --git a/training/variance_task.py b/training/variance_task.py index 03c9fdf4b..3db509ead 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.dur_loss import DurationLoss +from modules.losses.variance_loss import DurationLoss, CurveLoss1d from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure @@ -28,7 +28,6 @@ def collater(self, samples): mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) base_pitch = utils.collate_nd([s['base_pitch'] for s in samples], 0) delta_pitch = utils.collate_nd([s['delta_pitch'] for s in samples], 0) - uv = utils.collate_nd([s['uv'] for s in samples], True) batch.update({ 'tokens': tokens, 'ph_dur': ph_dur, @@ -37,8 +36,10 @@ def collater(self, samples): 'mel2ph': mel2ph, 'base_pitch': base_pitch, 'delta_pitch': delta_pitch, - 'uv': uv }) + if hparams['predict_energy']: + energy = utils.collate_nd([s['energy'] for s in samples], 0) + batch['energy'] = energy if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids @@ -54,6 +55,8 @@ def __init__(self): self.lambda_dur_loss = hparams['lambda_dur_loss'] if hparams['predict_pitch']: self.lambda_pitch_loss = hparams['lambda_pitch_loss'] + if hparams['predict_energy']: + self.lambda_energy_loss = hparams['lambda_energy_loss'] def build_model(self): return DiffSingerVariance( @@ -82,6 +85,11 @@ def build_losses(self): # num_bins=pitch_hparams['num_pitch_bins'], # deviation=pitch_hparams['deviation'] # ) + if hparams['predict_energy']: + energy_hparams = hparams['energy_prediction_args'] + self.energy_loss = CurveLoss1d( + loss_type=energy_hparams['loss_type'] + ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] @@ -91,22 +99,28 @@ def run_model(self, sample, infer=False): mel2ph = sample['mel2ph'] # [B, T_t] base_pitch = sample['base_pitch'] # [B, T_t] delta_pitch = sample['delta_pitch'] # [B, T_t] + energy = sample.get('energy') # [B, T_t] output = self.model(txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, infer=infer) - dur_pred, pitch_pred = output + dur_pred, pitch_pred, energy_pred = output if infer: - return dur_pred, pitch_pred + return dur_pred, pitch_pred, energy_pred else: losses = {} if dur_pred is not None: losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) + nonpadding = (mel2ph > 0).float() if pitch_pred is not None: (pitch_x_recon, pitch_noise) = pitch_pred losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss( - pitch_x_recon, pitch_noise, (mel2ph > 0).float().unsqueeze(-1) + pitch_x_recon, pitch_noise, nonpadding=nonpadding.unsqueeze(-1) + ) + if energy_pred is not None: + losses['energy_loss'] = self.lambda_energy_loss * self.energy_loss( + energy_pred, energy, mask=nonpadding ) return losses @@ -119,7 +133,7 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: - dur_pred, pitch_pred = self.run_model(sample, infer=True) + dur_pred, pitch_pred, energy_pred = self.run_model(sample, infer=True) if dur_pred is not None: self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: @@ -130,7 +144,16 @@ def _validation_step(self, sample, batch_idx): gt_curve=base_pitch + delta_pitch, pred_curve=base_pitch + pitch_pred, base_curve=base_pitch, - curve_name='pitch' + curve_name='pitch', + grid=1 + ) + if energy_pred is not None: + energy = sample['energy'] + self.plot_curve( + batch_idx, + gt_curve=energy, + pred_curve=energy_pred, + curve_name='energy' ) return outputs, sample['size'] @@ -145,10 +168,12 @@ def plot_dur(self, batch_idx, gt_dur, pred_dur, txt=None): txt = self.phone_encoder.decode(txt[0].cpu().numpy()).split() self.logger.experiment.add_figure(name, dur_to_figure(gt_dur, pred_dur, txt), self.global_step) - def plot_curve(self, batch_idx, gt_curve, pred_curve, base_curve=None, curve_name='curve'): + def plot_curve(self, batch_idx, gt_curve, pred_curve, base_curve=None, grid=None, curve_name='curve'): name = f'{curve_name}_{batch_idx}' gt_curve = gt_curve[0].cpu().numpy() pred_curve = pred_curve[0].cpu().numpy() if base_curve is not None: base_curve = base_curve[0].cpu().numpy() - self.logger.experiment.add_figure(name, curve_to_figure(gt_curve, pred_curve, base_curve), self.global_step) + self.logger.experiment.add_figure(name, curve_to_figure( + gt_curve, pred_curve, base_curve, grid=grid + ), self.global_step) diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index 0a80b1e34..70e2fe486 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -1,5 +1,6 @@ import warnings +import librosa import torch warnings.filterwarnings("ignore") @@ -9,6 +10,31 @@ import numpy as np +@torch.no_grad() +def get_mel2ph_torch(lr, durs, length, timestep, device='cpu'): + ph_acc = torch.round(torch.cumsum(durs.to(device), dim=0) / timestep + 0.5).long() + ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(device)) + mel2ph = lr(ph_dur[None])[0] + num_frames = mel2ph.shape[0] + if num_frames < length: + mel2ph = torch.cat((mel2ph, torch.full((length - num_frames,), fill_value=mel2ph[-1], device=device)), dim=0) + elif num_frames > length: + mel2ph = mel2ph[:length] + return mel2ph + + +def pad_frames(frames, hop_size, n_samples, n_expect): + n_frames = frames.shape[0] + lpad = (int(n_samples // hop_size) - n_frames + 1) // 2 + rpad = n_expect - n_frames - lpad + if rpad < 0: + frames = frames[:rpad] + rpad = 0 + if lpad > 0 or rpad > 0: + frames = np.pad(frames, [[lpad, rpad]], mode='constant') + return frames + + def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): """ @@ -17,7 +43,7 @@ def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): :param hparams: :param speed: Change the speed :param interp_uv: Interpolate unvoiced parts - :return: f0, f0_coarse, uv + :return: f0, uv """ hop_size = int(np.round(hparams['hop_size'] * speed)) @@ -28,32 +54,26 @@ def get_pitch_parselmouth(wav_data, length, hparams, speed=1, interp_uv=False): # noinspection PyArgumentList f0 = parselmouth.Sound(wav_data, sampling_frequency=hparams['audio_sample_rate']).to_pitch_ac( time_step=time_step, voicing_threshold=0.6, - pitch_floor=f0_min, pitch_ceiling=f0_max).selected_array['frequency'] - len_f0 = f0.shape[0] - lpad = (int(len(wav_data) // hop_size) - len_f0 + 1) // 2 - rpad = length - len_f0 - lpad - if lpad < 0: - f0 = f0[-lpad:] - lpad = 0 - if rpad < 0: - f0 = f0[:rpad] - rpad = 0 - if lpad > 0 or rpad > 0: - f0 = np.pad(f0, [[lpad, rpad]], mode='constant') + pitch_floor=f0_min, pitch_ceiling=f0_max + ).selected_array['frequency'] + f0 = pad_frames(f0, hop_size, wav_data.shape[0], length) uv = f0 == 0 if interp_uv: f0, uv = interp_f0(f0, uv) return f0, uv -@torch.no_grad() -def get_mel2ph_torch(lr, durs, length, timestep, device='cpu'): - ph_acc = torch.round(torch.cumsum(durs.to(device), dim=0) / timestep + 0.5).long() - ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(device)) - mel2ph = lr(ph_dur[None])[0] - num_frames = mel2ph.shape[0] - if num_frames < length: - mel2ph = torch.cat((mel2ph, torch.full((length - num_frames,), fill_value=mel2ph[-1], device=device)), dim=0) - elif num_frames > length: - mel2ph = mel2ph[:length] - return mel2ph +def get_energy_librosa(wav_data, length, hparams): + """ + + :param wav_data: [T] + :param length: Expected number of frames + :param hparams: + :return: energy + """ + hop_size = hparams['hop_size'] + win_size = hparams['win_size'] + + energy = librosa.feature.rms(y=wav_data, frame_length=win_size, hop_length=hop_size)[0] + energy = pad_frames(energy, hop_size, wav_data.shape[0], length) + return energy diff --git a/utils/plot.py b/utils/plot.py index 7e770f947..eec0a1897 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -56,7 +56,7 @@ def dur_to_figure(dur_gt, dur_pred, txt): return fig -def curve_to_figure(curve_gt, curve_pred=None, curve_base=None): +def curve_to_figure(curve_gt, curve_pred=None, curve_base=None, grid=None): if isinstance(curve_gt, torch.Tensor): curve_gt = curve_gt.cpu().numpy() if isinstance(curve_pred, torch.Tensor): @@ -69,7 +69,8 @@ def curve_to_figure(curve_gt, curve_pred=None, curve_base=None): if curve_pred is not None: plt.plot(curve_pred, color='r', label='pred') plt.plot(curve_gt, color='b', label='gt') - plt.gca().yaxis.set_major_locator(MultipleLocator(1)) + if grid is not None: + plt.gca().yaxis.set_major_locator(MultipleLocator(grid)) plt.grid(axis='y') plt.legend() plt.tight_layout() From 667ea2bee39531b46f11f12ab68edbc2601df61b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 00:16:32 +0800 Subject: [PATCH 320/475] Calculate energy loss in dB domain --- modules/losses/variance_loss.py | 18 ++++++++++++++++++ training/variance_task.py | 9 +++++++-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/modules/losses/variance_loss.py b/modules/losses/variance_loss.py index a24558f16..6de627de0 100644 --- a/modules/losses/variance_loss.py +++ b/modules/losses/variance_loss.py @@ -1,3 +1,4 @@ +import librosa import torch import torch.nn as nn from torch import Tensor @@ -60,6 +61,7 @@ class CurveLoss1d(nn.Module): """ Loss module for 1d parameter curve with non-padding masks. """ + def __init__(self, loss_type): super().__init__() self.loss_type = loss_type @@ -108,3 +110,19 @@ def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: """ y_gt = self.curve_to_probs(c_gt) return self.loss(y_pred, y_gt * mask[:, :, None]) + + +class EnergyLoss(CurveLoss1d): + """ + Loss module for energy prediction. Calculates in dB domain. + """ + + def __init__(self, db_min, db_max, loss_type): + super().__init__(loss_type=loss_type) + self.e_min = 10. ** (db_min / 20.) + self.e_max = 10. ** (db_max / 20.) + + def forward(self, e_pred, e_gt, mask=None): + db_pred = e_pred.clamp(min=self.e_min, max=self.e_max).log10() * 20. + db_gt = e_gt.clamp(min=self.e_min, max=self.e_max).log10() * 20. + return super().forward(db_pred, db_gt, mask=mask) diff --git a/training/variance_task.py b/training/variance_task.py index 3db509ead..67cdcf425 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.variance_loss import DurationLoss, CurveLoss1d +from modules.losses.variance_loss import DurationLoss, CurveLoss1d, EnergyLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure @@ -87,9 +87,14 @@ def build_losses(self): # ) if hparams['predict_energy']: energy_hparams = hparams['energy_prediction_args'] - self.energy_loss = CurveLoss1d( + self.energy_loss = EnergyLoss( + db_min=energy_hparams['db_vmin'], + db_max=energy_hparams['db_vmax'], loss_type=energy_hparams['loss_type'] ) + # self.energy_loss = CurveLoss1d( + # loss_type=energy_hparams['loss_type'] + # ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] From 74cb74f70c4b581c89c5c75ecd6be73368619015 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 00:18:22 +0800 Subject: [PATCH 321/475] Add config keys --- configs/variance.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/configs/variance.yaml b/configs/variance.yaml index e879210f7..9758cf18a 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -68,6 +68,8 @@ pitch_prediction_args: predict_energy: true lambda_energy_loss: 1.0 energy_prediction_args: + db_vmin: -72.0 + db_vmax: 0.0 hidden_size: 512 dropout: 0.1 num_layers: 8 From 48b3eddaa0ab507661d88d57f356fa45bceb82e9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 00:36:27 +0800 Subject: [PATCH 322/475] Revert dB domain energy loss --- configs/variance.yaml | 2 -- modules/losses/variance_loss.py | 17 ----------------- training/variance_task.py | 9 ++------- 3 files changed, 2 insertions(+), 26 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 9758cf18a..e879210f7 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -68,8 +68,6 @@ pitch_prediction_args: predict_energy: true lambda_energy_loss: 1.0 energy_prediction_args: - db_vmin: -72.0 - db_vmax: 0.0 hidden_size: 512 dropout: 0.1 num_layers: 8 diff --git a/modules/losses/variance_loss.py b/modules/losses/variance_loss.py index 6de627de0..deb3d958f 100644 --- a/modules/losses/variance_loss.py +++ b/modules/losses/variance_loss.py @@ -1,4 +1,3 @@ -import librosa import torch import torch.nn as nn from torch import Tensor @@ -110,19 +109,3 @@ def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: """ y_gt = self.curve_to_probs(c_gt) return self.loss(y_pred, y_gt * mask[:, :, None]) - - -class EnergyLoss(CurveLoss1d): - """ - Loss module for energy prediction. Calculates in dB domain. - """ - - def __init__(self, db_min, db_max, loss_type): - super().__init__(loss_type=loss_type) - self.e_min = 10. ** (db_min / 20.) - self.e_max = 10. ** (db_max / 20.) - - def forward(self, e_pred, e_gt, mask=None): - db_pred = e_pred.clamp(min=self.e_min, max=self.e_max).log10() * 20. - db_gt = e_gt.clamp(min=self.e_min, max=self.e_max).log10() * 20. - return super().forward(db_pred, db_gt, mask=mask) diff --git a/training/variance_task.py b/training/variance_task.py index 67cdcf425..3db509ead 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.variance_loss import DurationLoss, CurveLoss1d, EnergyLoss +from modules.losses.variance_loss import DurationLoss, CurveLoss1d from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure @@ -87,14 +87,9 @@ def build_losses(self): # ) if hparams['predict_energy']: energy_hparams = hparams['energy_prediction_args'] - self.energy_loss = EnergyLoss( - db_min=energy_hparams['db_vmin'], - db_max=energy_hparams['db_vmax'], + self.energy_loss = CurveLoss1d( loss_type=energy_hparams['loss_type'] ) - # self.energy_loss = CurveLoss1d( - # loss_type=energy_hparams['loss_type'] - # ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] From 0c43e41b5b5642044b8807c49107ee19b2a765ca Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 00:55:22 +0800 Subject: [PATCH 323/475] Use diffusion to predict energy --- configs/variance.yaml | 5 +++++ modules/toplevel.py | 26 +++++++++++++++++++------- training/variance_task.py | 10 +++++++--- 3 files changed, 31 insertions(+), 10 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index e879210f7..0c6b25782 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -73,6 +73,11 @@ energy_prediction_args: num_layers: 8 kernel_size: 3 loss_type: mse + db_vmin: -72.0 + db_vmax: -20.0 + num_repeat_bins: 64 + residual_layers: 20 + residual_channels: 256 residual_layers: 10 residual_channels: 256 diff --git a/modules/toplevel.py b/modules/toplevel.py index f9b0b43e3..03065176a 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -114,14 +114,26 @@ def __init__(self, vocab_size): if hparams['predict_energy']: self.pitch_embed = Linear(1, hparams['hidden_size']) energy_hparams = hparams['energy_prediction_args'] - self.energy_predictor = VariancePredictor( - in_dims=hparams['hidden_size'], - n_chans=energy_hparams['hidden_size'], - n_layers=energy_hparams['num_layers'], - dropout_rate=energy_hparams['dropout'], - padding=hparams['ffn_padding'], - kernel_size=energy_hparams['kernel_size'] + self.energy_predictor = RepetitiveDiffusion( + vmin=10. ** (energy_hparams['db_vmin'] / 20.), + vmax=10. ** (energy_hparams['db_vmax'] / 20.), + repeat_bins=energy_hparams['num_repeat_bins'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + energy_hparams['residual_layers'], + energy_hparams['residual_channels'] + ) ) + # self.energy_predictor = VariancePredictor( + # in_dims=hparams['hidden_size'], + # n_chans=energy_hparams['hidden_size'], + # n_layers=energy_hparams['num_layers'], + # dropout_rate=energy_hparams['dropout'], + # padding=hparams['ffn_padding'], + # kernel_size=energy_hparams['kernel_size'] + # ) @property def category(self): diff --git a/training/variance_task.py b/training/variance_task.py index 3db509ead..c0945f01c 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -87,9 +87,12 @@ def build_losses(self): # ) if hparams['predict_energy']: energy_hparams = hparams['energy_prediction_args'] - self.energy_loss = CurveLoss1d( - loss_type=energy_hparams['loss_type'] + self.energy_loss = DiffusionNoiseLoss( + loss_type=hparams['diff_loss_type'], ) + # self.energy_loss = CurveLoss1d( + # loss_type=energy_hparams['loss_type'] + # ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] @@ -119,8 +122,9 @@ def run_model(self, sample, infer=False): pitch_x_recon, pitch_noise, nonpadding=nonpadding.unsqueeze(-1) ) if energy_pred is not None: + (energy_x_recon, energy_noise) = energy_pred losses['energy_loss'] = self.lambda_energy_loss * self.energy_loss( - energy_pred, energy, mask=nonpadding + energy_x_recon, energy_noise, nonpadding=nonpadding.unsqueeze(-1) ) return losses From 688d9d07f959216b56a9b9ff6079f9a9df41e49c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 00:58:45 +0800 Subject: [PATCH 324/475] Fix arguments passing --- modules/toplevel.py | 4 ++-- training/variance_task.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index 03065176a..bf3b8db8d 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -140,7 +140,7 @@ def category(self): return 'variance' def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, - mel2ph=None, base_pitch=None, delta_pitch=None, infer=True): + mel2ph=None, base_pitch=None, delta_pitch=None, energy=None, infer=True): encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, word_dur=word_dur, infer=infer @@ -166,7 +166,7 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, if hparams['predict_energy']: pitch_embed = self.pitch_embed((base_pitch + delta_pitch)[:, :, None]) energy_cond = condition + pitch_embed - energy_pred_out = self.energy_predictor(energy_cond) + energy_pred_out = self.energy_predictor(energy_cond, energy, infer) else: energy_pred_out = None diff --git a/training/variance_task.py b/training/variance_task.py index c0945f01c..035e806cd 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -105,7 +105,7 @@ def run_model(self, sample, infer=False): energy = sample.get('energy') # [B, T_t] output = self.model(txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, - mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, + mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, energy=energy, infer=infer) dur_pred, pitch_pred, energy_pred = output From f9161135a332956cbad522712189fb7f4b7d8ffa Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 01:46:46 +0800 Subject: [PATCH 325/475] Fix wrong embed --- modules/toplevel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index bf3b8db8d..88420c342 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -158,7 +158,7 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, condition = torch.gather(encoder_out, 1, mel2ph_) if hparams['predict_pitch']: - pitch_cond = condition + self.pitch_embed(base_pitch[:, :, None]) + pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) else: pitch_pred_out = None From 6d807ff1ef5db0c3c44bdde916a902bdbadfae8e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 12:58:47 +0800 Subject: [PATCH 326/475] Derive `PitchDiffusion` and `EnergyDiffusion` from `RepetitiveDiffusion` --- modules/diffusion/ddpm.py | 20 ++++++++++++++++++-- modules/toplevel.py | 9 ++++++--- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index c13c99437..f62b1480b 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -303,10 +303,26 @@ def __init__(self, vmin, vmax, repeat_bins, ) def norm_spec(self, x): - return super().norm_spec(x.clamp(min=self.vmin, max=self.vmax).unsqueeze(-1).repeat([1, 1, self.repeat_bins])) + return super().norm_spec(x.unsqueeze(-1).repeat([1, 1, self.repeat_bins])) def denorm_spec(self, x): - return super().denorm_spec(x).mean(dim=-1).clamp(min=self.vmin, max=self.vmax) + return super().denorm_spec(x).mean(dim=-1) + + +class PitchDiffusion(RepetitiveDiffusion): + def norm_spec(self, x): + return super().norm_spec(x.clamp(min=self.vmin, max=self.vmax)) + + def denorm_spec(self, x): + return super().denorm_spec(x).clamp(min=self.vmin, max=self.vmax) + + +class EnergyDiffusion(RepetitiveDiffusion): + def norm_spec(self, x): + return super().norm_spec(x.clamp(min=0., max=1.)) + + def denorm_spec(self, x): + return super().denorm_spec(x).clamp(min=0., max=1.) class CurveDiffusion1d(GaussianDiffusion): diff --git a/modules/toplevel.py b/modules/toplevel.py index 88420c342..422171e34 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -5,7 +5,10 @@ from modules.commons.common_layers import ( XavierUniformInitLinear as Linear, ) -from modules.diffusion.ddpm import GaussianDiffusion, RepetitiveDiffusion, CurveDiffusion1d, CurveDiffusion2d +from modules.diffusion.ddpm import ( + GaussianDiffusion, PitchDiffusion, EnergyDiffusion, + CurveDiffusion1d, CurveDiffusion2d +) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from modules.fastspeech.tts_modules import LengthRegulator, VariancePredictor from modules.fastspeech.variance_encoder import FastSpeech2Variance @@ -61,7 +64,7 @@ def __init__(self, vocab_size): self.base_pitch_embed = Linear(1, hparams['hidden_size']) diff_predictor_mode = pitch_hparams['diff_predictor_mode'] if diff_predictor_mode == 'repeat': - self.pitch_predictor = RepetitiveDiffusion( + self.pitch_predictor = PitchDiffusion( vmin=pitch_hparams['pitch_delta_vmin'], vmax=pitch_hparams['pitch_delta_vmax'], repeat_bins=pitch_hparams['num_pitch_bins'], @@ -114,7 +117,7 @@ def __init__(self, vocab_size): if hparams['predict_energy']: self.pitch_embed = Linear(1, hparams['hidden_size']) energy_hparams = hparams['energy_prediction_args'] - self.energy_predictor = RepetitiveDiffusion( + self.energy_predictor = EnergyDiffusion( vmin=10. ** (energy_hparams['db_vmin'] / 20.), vmax=10. ** (energy_hparams['db_vmax'] / 20.), repeat_bins=energy_hparams['num_repeat_bins'], From 3486795c543e24c5dc944891a647c5f2dc63b71b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 13:48:37 +0800 Subject: [PATCH 327/475] Support energy embed in acoustic model --- augmentation/spec_stretch.py | 4 ++++ configs/acoustic.yaml | 1 + modules/fastspeech/acoustic_encoder.py | 17 +++++++++++++--- modules/toplevel.py | 4 ++-- preprocessing/acoustic_binarizer.py | 28 ++++++++++++++++++++------ training/acoustic_task.py | 5 ++++- 6 files changed, 47 insertions(+), 12 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 1e59a2749..11fdf67fb 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -14,6 +14,7 @@ class SpectrogramStretchAugmentation(BaseAugmentation): """ This class contains methods for frequency-domain and time-domain stretching augmentation. """ + def __init__(self, data_dirs: list, augmentation_args: dict): super().__init__(data_dirs, augmentation_args) self.device = 'cuda' if torch.cuda.is_available() else 'cpu' @@ -21,6 +22,9 @@ def __init__(self, data_dirs: list, augmentation_args: dict): @require_same_keys def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) -> dict: + if 'energy' in item: + raise NotImplementedError('Energy has not been supported in augmentation.') + aug_item = deepcopy(item) if hparams['vocoder'] in VOCODERS: wav, mel = VOCODERS[hparams['vocoder']].wav2spec( diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 936ed07da..30d004fda 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -51,6 +51,7 @@ save_f0: true use_spk_id: false f0_embed_type: continuous +use_energy_embed: false use_key_shift_embed: false use_speed_embed: false diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index c616dc6dc..fe986c2d4 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -30,6 +30,9 @@ def __init__(self, vocab_size): else: raise ValueError('f0_embed_type must be \'discrete\' or \'continuous\'.') + if hparams.get('use_energy_embed', False): + self.energy_embed = Linear(1, hparams['hidden_size']) + if hparams.get('use_key_shift_embed', False): self.key_shift_embed = Linear(1, hparams['hidden_size']) @@ -39,7 +42,7 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, spk_embed_id=None, **kwargs): + def forward(self, txt_tokens, mel2ph, f0, energy=None, key_shift=None, speed=None, spk_embed_id=None, **kwargs): dur = mel2ph_to_dur(mel2ph, txt_tokens.shape[1]).float() dur_embed = self.dur_embed(dur[:, :, None]) encoder_out = self.encoder(txt_tokens, dur_embed) @@ -48,10 +51,14 @@ def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, spk_embed_ mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) condition = torch.gather(encoder_out, 1, mel2ph_) return self.forward_variance_embedding( - condition, f0=f0, key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, **kwargs + condition, f0=f0, energy=energy, key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, **kwargs ) - def forward_variance_embedding(self, condition, f0, key_shift=None, speed=None, spk_embed_id=None, **kwargs): + def forward_variance_embedding( + self, condition, f0, energy=None, + key_shift=None, speed=None, spk_embed_id=None, + **kwargs + ): if self.f0_embed_type == 'discrete': pitch = f0_to_coarse(f0) pitch_embed = self.pitch_embed(pitch) @@ -60,6 +67,10 @@ def forward_variance_embedding(self, condition, f0, key_shift=None, speed=None, pitch_embed = self.pitch_embed(f0_mel[:, :, None]) condition += pitch_embed + if hparams.get('use_energy_embed', False): + energy_embed = self.energy_embed(energy[:, :, None]) + condition += energy_embed + if hparams.get('use_key_shift_embed', False): key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) condition += key_shift_embed diff --git a/modules/toplevel.py b/modules/toplevel.py index 422171e34..e44d8abbb 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -38,9 +38,9 @@ def __init__(self, vocab_size, out_dims): def category(self): return 'acoustic' - def forward(self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, + def forward(self, txt_tokens, mel2ph, f0, energy=None, key_shift=None, speed=None, spk_embed_id=None, gt_mel=None, infer=True, **kwargs): - condition = self.fs2(txt_tokens, mel2ph, f0, key_shift=key_shift, speed=speed, + condition = self.fs2(txt_tokens, mel2ph, f0, energy=energy, key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, **kwargs) if infer: mel = self.diffusion(condition, infer=True) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index a05b3337c..e5dc138b6 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -20,12 +20,23 @@ from basics.base_binarizer import BaseBinarizer, BinarizationError from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS -from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch +from utils.binarizer_utils import ( + get_mel2ph_torch, get_pitch_parselmouth, get_energy_librosa +) from utils.hparams import hparams from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" -ACOUSTIC_ITEM_ATTRIBUTES = ['spk_id', 'mel', 'tokens', 'mel2ph', 'f0', 'key_shift', 'speed'] +ACOUSTIC_ITEM_ATTRIBUTES = [ + 'spk_id', + 'mel', + 'tokens', + 'mel2ph', + 'f0', + 'energy', + 'key_shift', + 'speed' +] class AcousticBinarizer(BaseBinarizer): @@ -149,6 +160,11 @@ def process_item(self, item_name, meta_data, binarization_args): 'ph_dur': np.array(meta_data['ph_dur']).astype(np.float32), } + # get ground truth dur + processed_input['mel2ph'] = get_mel2ph_torch( + self.lr, torch.from_numpy(processed_input['ph_dur']), length, self.timestep, device=self.device + ).cpu().numpy() + # get ground truth f0 gt_f0, uv = get_pitch_parselmouth( wav, length, hparams, interp_uv=hparams['interp_uv'] @@ -158,10 +174,10 @@ def process_item(self, item_name, meta_data, binarization_args): return None processed_input['f0'] = gt_f0.astype(np.float32) - # get ground truth dur - processed_input['mel2ph'] = get_mel2ph_torch( - self.lr, torch.from_numpy(processed_input['ph_dur']), length, self.timestep, device=self.device - ).cpu().numpy() + if hparams.get('use_energy_embed', False): + # get ground truth energy + energy = get_energy_librosa(wav, length, hparams) + processed_input['energy'] = energy if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 47b40ffd8..a35a4928c 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -40,6 +40,8 @@ def collater(self, samples): 'mel': mel, 'f0': f0, }) + if hparams.get('use_energy_embed', False): + batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) if hparams.get('use_key_shift_embed', False): batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if hparams.get('use_speed_embed', False): @@ -77,6 +79,7 @@ def run_model(self, sample, infer=False): target = sample['mel'] # [B, T_s, M] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] + energy = sample.get('energy') key_shift = sample.get('key_shift') speed = sample.get('speed') @@ -84,7 +87,7 @@ def run_model(self, sample, infer=False): spk_embed_id = sample['spk_ids'] else: spk_embed_id = None - output = self.model(txt_tokens, mel2ph=mel2ph, f0=f0, + output = self.model(txt_tokens, mel2ph=mel2ph, f0=f0, energy=energy, key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, gt_mel=target, infer=infer) From 68d02001cf7e0011ddd06eed5483c6ef1cb1ae28 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 19:06:23 +0800 Subject: [PATCH 328/475] Clean up unused imports --- configs/variance.yaml | 2 ++ modules/fastspeech/variance_encoder.py | 1 - modules/toplevel.py | 2 +- training/variance_task.py | 15 +++------------ 4 files changed, 6 insertions(+), 14 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 0c6b25782..93fbe4b46 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -64,6 +64,7 @@ pitch_prediction_args: hidden_size: 512 residual_layers: 20 residual_channels: 256 + diff_loss_type: l2 predict_energy: true lambda_energy_loss: 1.0 @@ -78,6 +79,7 @@ energy_prediction_args: num_repeat_bins: 64 residual_layers: 20 residual_channels: 256 + diff_loss_type: l2 residual_layers: 10 residual_channels: 256 diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 080ad2df5..109f151b1 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -28,7 +28,6 @@ def __init__(self, vocab_size): ) dur_hparams = hparams['dur_prediction_args'] - self.wdur_log_offset = dur_hparams['log_offset'] if hparams['predict_dur']: self.dur_predictor = DurationPredictor( in_dims=hparams['hidden_size'], diff --git a/modules/toplevel.py b/modules/toplevel.py index e44d8abbb..0cc9efed3 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -10,7 +10,7 @@ CurveDiffusion1d, CurveDiffusion2d ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from modules.fastspeech.tts_modules import LengthRegulator, VariancePredictor +from modules.fastspeech.tts_modules import LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams diff --git a/training/variance_task.py b/training/variance_task.py index 035e806cd..b4dc174c6 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.variance_loss import DurationLoss, CurveLoss1d +from modules.losses.variance_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure @@ -77,22 +77,13 @@ def build_losses(self): if hparams['predict_pitch']: pitch_hparams = hparams['pitch_prediction_args'] self.pitch_loss = DiffusionNoiseLoss( - loss_type=hparams['diff_loss_type'], + loss_type=pitch_hparams['diff_loss_type'], ) - # self.pitch_loss = CurveLoss2d( - # vmin=pitch_hparams['pitch_delta_vmin'], - # vmax=pitch_hparams['pitch_delta_vmax'], - # num_bins=pitch_hparams['num_pitch_bins'], - # deviation=pitch_hparams['deviation'] - # ) if hparams['predict_energy']: energy_hparams = hparams['energy_prediction_args'] self.energy_loss = DiffusionNoiseLoss( - loss_type=hparams['diff_loss_type'], + loss_type=energy_hparams['diff_loss_type'], ) - # self.energy_loss = CurveLoss1d( - # loss_type=energy_hparams['loss_type'] - # ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] From 0e251c22d98eb8be66656e0c9b826bf22f9dae62 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 21:00:22 +0800 Subject: [PATCH 329/475] Support energy prediction and inference --- inference/ds_acoustic.py | 16 +++++++++++++--- inference/ds_variance.py | 25 ++++++++++++++++++++----- modules/fastspeech/variance_encoder.py | 2 +- modules/toplevel.py | 6 +++++- 4 files changed, 39 insertions(+), 10 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index ce136a0c1..20ffe0dee 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -106,7 +106,7 @@ def preprocess_input(self, param): else: # this speaker has a constant proportion assert values >= 0., f'Speaker mix checks failed.\n' \ - f'Proportion of speaker \'{name}\' is negative.' + f'Proportion of speaker \'{name}\' is negative.' cur_spk_mix_value = torch.full( (1, length), fill_value=values, dtype=torch.float32, device=self.device @@ -142,6 +142,14 @@ def preprocess_input(self, param): align_length=length )).to(self.device)[None] + if hparams.get('use_energy_embed', False): + batch['energy'] = torch.from_numpy(resample_align_curve( + np.array(param['energy'].split(), np.float32), + original_timestep=float(param['energy_timestep']), + target_timestep=self.timestep, + align_length=length + )).to(self.device)[None] + if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] gender = param.get('gender', 0.) @@ -188,14 +196,16 @@ def preprocess_input(self, param): def run_model(self, sample, return_mel=False): txt_tokens = sample['tokens'] if hparams['use_spk_id']: + spk_mix_id = sample['spk_mix_id'] + spk_mix_value = sample['spk_mix_value'] # perform mixing on spk embed spk_mix_embed = torch.sum( - self.model.fs2.spk_embed(sample['spk_mix_id']) * sample['spk_mix_value'].unsqueeze(3), # => [B, T, N, H] + self.model.fs2.spk_embed(spk_mix_id) * spk_mix_value.unsqueeze(3), # => [B, T, N, H] dim=2, keepdim=False ) # => [B, T, H] else: spk_mix_embed = None - mel_pred = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], + mel_pred = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], energy=sample.get('energy'), key_shift=sample.get('key_shift'), speed=sample.get('speed'), spk_mix_embed=spk_mix_embed, infer=True) return mel_pred diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 329f617c2..24481b36e 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -8,9 +8,11 @@ from basics.base_svs_infer import BaseSVSInfer from modules.fastspeech.tts_modules import LengthRegulator, mel2ph_to_dur from modules.toplevel import DiffSingerVariance +from utils import load_ckpt from utils.hparams import hparams +from utils.infer_utils import resample_align_curve from utils.phoneme_utils import build_phoneme_list -from utils import load_ckpt +from utils.pitch_utils import interp_f0 from utils.text_encoder import TokenTextEncoder @@ -125,24 +127,35 @@ def preprocess_input(self, param): ph_midi = ph_midi.round().long() batch['midi'] = ph_midi + if param.get('f0_seq'): + f0 = resample_align_curve( + np.array(param['f0_seq'].split(), np.float32), + original_timestep=float(hparams['f0_timestep']), + target_timestep=self.timestep, + align_length=T_t + ) + batch['delta_pitch'] = torch.from_numpy( + librosa.hz_to_midi(interp_f0(f0)).astype(np.float32) + ).to(self.device)[None] + return batch @torch.no_grad() def run_model(self, sample): txt_tokens = sample['tokens'] base_pitch = sample['base_pitch'] - dur_pred, pitch_pred = self.model( + dur_pred, pitch_pred, energy_pred = self.model( txt_tokens, midi=sample['midi'], ph2word=sample['ph2word'], word_dur=sample['word_dur'], mel2ph=sample['mel2ph'], base_pitch=base_pitch ) if pitch_pred is not None: pitch_pred = base_pitch + pitch_pred - return dur_pred, pitch_pred + return dur_pred, pitch_pred, energy_pred def infer_once(self, param): batch = self.preprocess_input(param) - dur_pred, pitch_pred = self.run_model(batch) + dur_pred, pitch_pred, energy_pred = self.run_model(batch) if dur_pred is not None: dur_pred = dur_pred[0].cpu().numpy() if pitch_pred is not None: @@ -150,4 +163,6 @@ def infer_once(self, param): f0_pred = librosa.midi_to_hz(pitch_pred) else: f0_pred = None - return dur_pred, f0_pred + if energy_pred is not None: + energy_pred = energy_pred[0].cpu().numpy() + return dur_pred, f0_pred, energy_pred diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 109f151b1..1ee331ac1 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -48,7 +48,7 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T :param ph_dur: (train) [B, T_ph] :param word_dur: (infer) [B, T_w] :param infer: whether inference - :return: (train) encoder_out, ph_dur_xs; (infer) encoder_out, ph_dur + :return: encoder_out, ph_dur_pred """ b = txt_tokens.shape[0] onset = torch.diff(ph2word, dim=1, prepend=ph2word.new_zeros(b, 1)) > 0 diff --git a/modules/toplevel.py b/modules/toplevel.py index 0cc9efed3..f6ac74cb9 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -167,7 +167,11 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, pitch_pred_out = None if hparams['predict_energy']: - pitch_embed = self.pitch_embed((base_pitch + delta_pitch)[:, :, None]) + if delta_pitch is None: + pitch = base_pitch + pitch_pred_out + else: + pitch = base_pitch + delta_pitch + pitch_embed = self.pitch_embed(pitch[:, :, None]) energy_cond = condition + pitch_embed energy_pred_out = self.energy_predictor(energy_cond, energy, infer) else: From 8f8517c7f5980595d6b026f33ef5249e514f7b5f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 22:33:47 +0800 Subject: [PATCH 330/475] Fix wrong delta pitch and NoneType error --- inference/ds_variance.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 24481b36e..6256c670f 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -130,13 +130,13 @@ def preprocess_input(self, param): if param.get('f0_seq'): f0 = resample_align_curve( np.array(param['f0_seq'].split(), np.float32), - original_timestep=float(hparams['f0_timestep']), + original_timestep=float(param['f0_timestep']), target_timestep=self.timestep, align_length=T_t ) batch['delta_pitch'] = torch.from_numpy( - librosa.hz_to_midi(interp_f0(f0)).astype(np.float32) - ).to(self.device)[None] + librosa.hz_to_midi(interp_f0(f0)[0]).astype(np.float32) + ).to(self.device)[None] - base_pitch return batch @@ -147,7 +147,7 @@ def run_model(self, sample): dur_pred, pitch_pred, energy_pred = self.model( txt_tokens, midi=sample['midi'], ph2word=sample['ph2word'], word_dur=sample['word_dur'], - mel2ph=sample['mel2ph'], base_pitch=base_pitch + mel2ph=sample['mel2ph'], base_pitch=base_pitch, delta_pitch=sample.get('delta_pitch') ) if pitch_pred is not None: pitch_pred = base_pitch + pitch_pred From 9d376a501438e8347a847ff0d3a61f03c2f4578a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 29 Apr 2023 22:34:06 +0800 Subject: [PATCH 331/475] Add length print --- inference/ds_variance.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 6256c670f..685e295d5 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -57,7 +57,7 @@ def preprocess_input(self, param): batch['tokens'] = txt_tokens ph_num = torch.from_numpy(np.array([param['ph_num'].split()], np.int64)).to(self.device) # [B=1, T_w] ph2word = self.lr(ph_num) # => [B=1, T_ph] - T_w = ph2word.max() + T_w = int(ph2word.max()) batch['ph2word'] = ph2word note_seq = torch.FloatTensor( @@ -75,6 +75,9 @@ def preprocess_input(self, param): )[:, 1:] # => [B=1, T_w] mel2word = self.lr(word_dur) # [B=1, T_t] + print(f'Length: {T_w} word(s), {note_seq.shape[1]} note(s), {T_ph} token(s), ' + f'{T_t} frame(s), {T_t * self.timestep:.2f} second(s)') + if mel2word.shape[1] != T_t: # Align words with notes mel2word = F.pad(mel2word, [0, T_t - mel2word.shape[1]], value=mel2word[0, -1]) word_dur = mel2ph_to_dur(mel2word, T_w) From 6721eca22d49c42baeefa6cb5de5be2aada73f11 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 30 Apr 2023 14:53:51 +0800 Subject: [PATCH 332/475] Support breathiness extraction, prediction, embedding and training --- augmentation/spec_stretch.py | 3 ++ configs/acoustic.yaml | 1 + configs/variance.yaml | 19 ++++++- modules/fastspeech/acoustic_encoder.py | 19 +++++-- modules/toplevel.py | 70 ++++++++++++++++++++------ preprocessing/acoustic_binarizer.py | 11 +++- preprocessing/variance_binarizer.py | 9 +++- requirements.txt | 2 +- training/acoustic_task.py | 14 ++++-- training/variance_task.py | 37 +++++++++++--- utils/binarizer_utils.py | 36 ++++++++++++- 11 files changed, 186 insertions(+), 35 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 11fdf67fb..d5a8e8163 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -25,6 +25,9 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) if 'energy' in item: raise NotImplementedError('Energy has not been supported in augmentation.') + if 'breathiness' in item: + raise NotImplementedError('Breathiness has not been supported in augmentation.') + aug_item = deepcopy(item) if hparams['vocoder'] in VOCODERS: wav, mel = VOCODERS[hparams['vocoder']].wav2spec( diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 30d004fda..661f06629 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -52,6 +52,7 @@ save_f0: true use_spk_id: false f0_embed_type: continuous use_energy_embed: false +use_breathiness_embed: false use_key_shift_embed: false use_speed_embed: false diff --git a/configs/variance.yaml b/configs/variance.yaml index 93fbe4b46..85f2247fb 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -66,12 +66,27 @@ pitch_prediction_args: residual_channels: 256 diff_loss_type: l2 -predict_energy: true +predict_energy: false lambda_energy_loss: 1.0 energy_prediction_args: hidden_size: 512 dropout: 0.1 - num_layers: 8 + num_layers: 5 + kernel_size: 3 + loss_type: mse + db_vmin: -72.0 + db_vmax: -20.0 + num_repeat_bins: 64 + residual_layers: 20 + residual_channels: 256 + diff_loss_type: l2 + +predict_breathiness: false +lambda_breathiness_loss: 1.0 +breathiness_prediction_args: + hidden_size: 512 + dropout: 0.1 + num_layers: 5 kernel_size: 3 loss_type: mse db_vmin: -72.0 diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index fe986c2d4..cfcfc25db 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -33,6 +33,9 @@ def __init__(self, vocab_size): if hparams.get('use_energy_embed', False): self.energy_embed = Linear(1, hparams['hidden_size']) + if hparams.get('use_breathiness_embed', False): + self.breathiness_embed = Linear(1, hparams['hidden_size']) + if hparams.get('use_key_shift_embed', False): self.key_shift_embed = Linear(1, hparams['hidden_size']) @@ -42,7 +45,11 @@ def __init__(self, vocab_size): if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - def forward(self, txt_tokens, mel2ph, f0, energy=None, key_shift=None, speed=None, spk_embed_id=None, **kwargs): + def forward( + self, txt_tokens, mel2ph, f0, energy=None, breathiness=None, + key_shift=None, speed=None, spk_embed_id=None, + **kwargs + ): dur = mel2ph_to_dur(mel2ph, txt_tokens.shape[1]).float() dur_embed = self.dur_embed(dur[:, :, None]) encoder_out = self.encoder(txt_tokens, dur_embed) @@ -51,11 +58,13 @@ def forward(self, txt_tokens, mel2ph, f0, energy=None, key_shift=None, speed=Non mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) condition = torch.gather(encoder_out, 1, mel2ph_) return self.forward_variance_embedding( - condition, f0=f0, energy=energy, key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, **kwargs + condition, f0=f0, energy=energy, breathiness=breathiness, + key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, + **kwargs ) def forward_variance_embedding( - self, condition, f0, energy=None, + self, condition, f0, energy=None, breathiness=None, key_shift=None, speed=None, spk_embed_id=None, **kwargs ): @@ -71,6 +80,10 @@ def forward_variance_embedding( energy_embed = self.energy_embed(energy[:, :, None]) condition += energy_embed + if hparams.get('use_breathiness_embed', False): + breathiness_embed = self.breathiness_embed(breathiness[:, :, None]) + condition += breathiness_embed + if hparams.get('use_key_shift_embed', False): key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) condition += key_shift_embed diff --git a/modules/toplevel.py b/modules/toplevel.py index f6ac74cb9..0120328e0 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -10,7 +10,7 @@ CurveDiffusion1d, CurveDiffusion2d ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from modules.fastspeech.tts_modules import LengthRegulator +from modules.fastspeech.tts_modules import LengthRegulator, VariancePredictor from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -38,10 +38,16 @@ def __init__(self, vocab_size, out_dims): def category(self): return 'acoustic' - def forward(self, txt_tokens, mel2ph, f0, energy=None, key_shift=None, speed=None, - spk_embed_id=None, gt_mel=None, infer=True, **kwargs): - condition = self.fs2(txt_tokens, mel2ph, f0, energy=energy, key_shift=key_shift, speed=speed, - spk_embed_id=spk_embed_id, **kwargs) + def forward( + self, txt_tokens, mel2ph, f0, energy=None, breathiness=None, + key_shift=None, speed=None, + spk_embed_id=None, gt_mel=None, infer=True, **kwargs + ): + condition = self.fs2( + txt_tokens, mel2ph, f0, energy=energy, breathiness=breathiness, + key_shift=key_shift, speed=speed, + spk_embed_id=spk_embed_id, **kwargs + ) if infer: mel = self.diffusion(condition, infer=True) mel *= ((mel2ph > 0).float()[:, :, None]) @@ -54,12 +60,16 @@ def forward(self, txt_tokens, mel2ph, f0, energy=None, key_shift=None, speed=Non class DiffSingerVariance(CategorizedModule): def __init__(self, vocab_size): super().__init__() + self.predict_dur = hparams['predict_dur'] + self.predict_pitch = hparams['predict_pitch'] + self.predict_energy = hparams['predict_energy'] + self.predict_breathiness = hparams['predict_breathiness'] self.fs2 = FastSpeech2Variance( vocab_size=vocab_size ) self.lr = LengthRegulator() - if hparams['predict_pitch']: + if self.predict_pitch: pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) diff_predictor_mode = pitch_hparams['diff_predictor_mode'] @@ -114,8 +124,10 @@ def __init__(self, vocab_size): # n_chans=pitch_hparams['hidden_size'] # ) - if hparams['predict_energy']: + if self.predict_energy or self.predict_breathiness: self.pitch_embed = Linear(1, hparams['hidden_size']) + + if self.predict_energy: energy_hparams = hparams['energy_prediction_args'] self.energy_predictor = EnergyDiffusion( vmin=10. ** (energy_hparams['db_vmin'] / 20.), @@ -138,6 +150,29 @@ def __init__(self, vocab_size): # kernel_size=energy_hparams['kernel_size'] # ) + if self.predict_breathiness: + breathiness_hparams = hparams['breathiness_prediction_args'] + # self.breathiness_predictor = EnergyDiffusion( + # vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), + # vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), + # repeat_bins=breathiness_hparams['num_repeat_bins'], + # timesteps=hparams['timesteps'], + # k_step=hparams['K_step'], + # denoiser_type=hparams['diff_decoder_type'], + # denoiser_args=( + # breathiness_hparams['residual_layers'], + # breathiness_hparams['residual_channels'] + # ) + # ) + self.breathiness_predictor = VariancePredictor( + in_dims=hparams['hidden_size'], + n_chans=breathiness_hparams['hidden_size'], + n_layers=breathiness_hparams['num_layers'], + dropout_rate=breathiness_hparams['dropout'], + padding=hparams['ffn_padding'], + kernel_size=breathiness_hparams['kernel_size'] + ) + @property def category(self): return 'variance' @@ -149,8 +184,8 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, ph_dur=ph_dur, word_dur=word_dur, infer=infer ) - if not hparams['predict_pitch'] and not hparams['predict_energy']: - return dur_pred_out, None, None + if not self.predict_pitch and not self.predict_energy and not self.predict_breathiness: + return dur_pred_out, None, None, None if mel2ph is None or hparams['dur_cascade']: # (extract mel2ph from dur_pred_out) @@ -160,21 +195,28 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) condition = torch.gather(encoder_out, 1, mel2ph_) - if hparams['predict_pitch']: + if self.predict_pitch: pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) else: pitch_pred_out = None - if hparams['predict_energy']: + if self.predict_energy or self.predict_breathiness: if delta_pitch is None: pitch = base_pitch + pitch_pred_out else: pitch = base_pitch + delta_pitch pitch_embed = self.pitch_embed(pitch[:, :, None]) - energy_cond = condition + pitch_embed - energy_pred_out = self.energy_predictor(energy_cond, energy, infer) + condition += pitch_embed + + if self.predict_energy: + energy_pred_out = self.energy_predictor(condition, energy, infer) else: energy_pred_out = None - return dur_pred_out, pitch_pred_out, energy_pred_out + if self.predict_breathiness: + breathiness_pred_out = self.breathiness_predictor(condition) + else: + breathiness_pred_out = None + + return dur_pred_out, pitch_pred_out, energy_pred_out, breathiness_pred_out diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index e5dc138b6..f8c804cd5 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -21,7 +21,10 @@ from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS from utils.binarizer_utils import ( - get_mel2ph_torch, get_pitch_parselmouth, get_energy_librosa + get_mel2ph_torch, + get_pitch_parselmouth, + get_energy_librosa, + get_breathiness_pyworld ) from utils.hparams import hparams from utils.phoneme_utils import build_phoneme_list @@ -34,6 +37,7 @@ 'mel2ph', 'f0', 'energy', + 'breathiness', 'key_shift', 'speed' ] @@ -179,6 +183,11 @@ def process_item(self, item_name, meta_data, binarization_args): energy = get_energy_librosa(wav, length, hparams) processed_input['energy'] = energy + if hparams.get('use_breathiness_embed', False): + # get ground truth energy + breathiness = get_breathiness_pyworld(wav, gt_f0 * uv, length, hparams) + processed_input['breathiness'] = breathiness + if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index cd8847a79..38a56255e 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -14,7 +14,8 @@ from utils.binarizer_utils import ( get_mel2ph_torch, get_pitch_parselmouth, - get_energy_librosa + get_energy_librosa, + get_breathiness_pyworld ) from utils.hparams import hparams @@ -29,6 +30,7 @@ 'base_pitch', # interpolated and smoothed frame-level MIDI pitch, float32[T_t,] 'delta_pitch', # delta_pitch = actual_pitch - base_pitch, in semitones, float32[T_t,] 'energy', # float32[T_t,] + 'breathiness', # float32[T_t,] ] @@ -152,6 +154,11 @@ def process_item(self, item_name, meta_data, binarization_args): energy = get_energy_librosa(waveform, length, hparams) processed_input['energy'] = energy.astype(np.float32) + # Below: extract breathiness + if hparams['predict_breathiness']: + breathiness = get_breathiness_pyworld(waveform, f0 * uv, length, hparams) + processed_input['breathiness'] = breathiness.astype(np.float32) + return processed_input def arrange_data_augmentation(self, data_iterator): diff --git a/requirements.txt b/requirements.txt index 2a301b25e..b39920922 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ click h5py librosa<0.10.0 -lightning>=2.0.0 +lightning==2.0.2 matplotlib MonkeyType==23.3.0 numpy==1.23.5 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index a35a4928c..d685e99f5 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -42,6 +42,8 @@ def collater(self, samples): }) if hparams.get('use_energy_embed', False): batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) + if hparams.get('use_breathiness_embed', False): + batch['breathiness'] = utils.collate_nd([s['breathiness'] for s in samples], 0.0) if hparams.get('use_key_shift_embed', False): batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if hparams.get('use_speed_embed', False): @@ -80,6 +82,7 @@ def run_model(self, sample, infer=False): mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] energy = sample.get('energy') + breathiness = sample.get('breathiness') key_shift = sample.get('key_shift') speed = sample.get('speed') @@ -87,10 +90,13 @@ def run_model(self, sample, infer=False): spk_embed_id = sample['spk_ids'] else: spk_embed_id = None - output = self.model(txt_tokens, mel2ph=mel2ph, f0=f0, energy=energy, - key_shift=key_shift, speed=speed, - spk_embed_id=spk_embed_id, - gt_mel=target, infer=infer) + output = self.model( + txt_tokens, mel2ph=mel2ph, + f0=f0, energy=energy, breathiness=breathiness, + key_shift=key_shift, speed=speed, + spk_embed_id=spk_embed_id, + gt_mel=target, infer=infer + ) if infer: mel_pred = output diff --git a/training/variance_task.py b/training/variance_task.py index b4dc174c6..b1ecba013 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.variance_loss import DurationLoss +from modules.losses.variance_loss import DurationLoss, CurveLoss1d from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure @@ -40,6 +40,9 @@ def collater(self, samples): if hparams['predict_energy']: energy = utils.collate_nd([s['energy'] for s in samples], 0) batch['energy'] = energy + if hparams['predict_breathiness']: + breathiness = utils.collate_nd([s['breathiness'] for s in samples], 0) + batch['breathiness'] = breathiness if hparams['use_spk_id']: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids @@ -57,6 +60,8 @@ def __init__(self): self.lambda_pitch_loss = hparams['lambda_pitch_loss'] if hparams['predict_energy']: self.lambda_energy_loss = hparams['lambda_energy_loss'] + if hparams['predict_breathiness']: + self.lambda_breathiness_loss = hparams['lambda_breathiness_loss'] def build_model(self): return DiffSingerVariance( @@ -75,14 +80,17 @@ def build_losses(self): lambda_sdur=dur_hparams['lambda_sdur_loss'] ) if hparams['predict_pitch']: - pitch_hparams = hparams['pitch_prediction_args'] self.pitch_loss = DiffusionNoiseLoss( - loss_type=pitch_hparams['diff_loss_type'], + loss_type=hparams['diff_loss_type'], ) if hparams['predict_energy']: - energy_hparams = hparams['energy_prediction_args'] self.energy_loss = DiffusionNoiseLoss( - loss_type=energy_hparams['diff_loss_type'], + loss_type=hparams['diff_loss_type'], + ) + if hparams['predict_breathiness']: + breathiness_hparams = hparams['breathiness_prediction_args'] + self.breathiness_loss = CurveLoss1d( + loss_type=breathiness_hparams['loss_type'], ) def run_model(self, sample, infer=False): @@ -94,14 +102,15 @@ def run_model(self, sample, infer=False): base_pitch = sample['base_pitch'] # [B, T_t] delta_pitch = sample['delta_pitch'] # [B, T_t] energy = sample.get('energy') # [B, T_t] + breathiness = sample.get('breathiness') # [B, T_t] output = self.model(txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, energy=energy, infer=infer) - dur_pred, pitch_pred, energy_pred = output + dur_pred, pitch_pred, energy_pred, breathiness_pred = output if infer: - return dur_pred, pitch_pred, energy_pred + return dur_pred, pitch_pred, energy_pred, breathiness_pred else: losses = {} if dur_pred is not None: @@ -117,6 +126,10 @@ def run_model(self, sample, infer=False): losses['energy_loss'] = self.lambda_energy_loss * self.energy_loss( energy_x_recon, energy_noise, nonpadding=nonpadding.unsqueeze(-1) ) + if breathiness_pred is not None: + losses['breathiness_loss'] = self.lambda_breathiness_loss * self.breathiness_loss( + breathiness_pred, breathiness, mask=nonpadding + ) return losses def _validation_step(self, sample, batch_idx): @@ -128,7 +141,7 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: - dur_pred, pitch_pred, energy_pred = self.run_model(sample, infer=True) + dur_pred, pitch_pred, energy_pred, breathiness_pred = self.run_model(sample, infer=True) if dur_pred is not None: self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: @@ -150,6 +163,14 @@ def _validation_step(self, sample, batch_idx): pred_curve=energy_pred, curve_name='energy' ) + if breathiness_pred is not None: + breathiness = sample['breathiness'] + self.plot_curve( + batch_idx, + gt_curve=breathiness, + pred_curve=breathiness_pred, + curve_name='breathiness' + ) return outputs, sample['size'] diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index 70e2fe486..ac4e88a8c 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -2,12 +2,13 @@ import librosa import torch +import numpy as np +import pyworld as pw warnings.filterwarnings("ignore") import parselmouth from utils.pitch_utils import interp_f0 -import numpy as np @torch.no_grad() @@ -77,3 +78,36 @@ def get_energy_librosa(wav_data, length, hparams): energy = librosa.feature.rms(y=wav_data, frame_length=win_size, hop_length=hop_size)[0] energy = pad_frames(energy, hop_size, wav_data.shape[0], length) return energy + + +def get_breathiness_pyworld(wav_data, f0, length, hparams): + """ + + :param wav_data: [T] + :param f0: reference f0 + :param length: Expected number of frames + :param hparams: + :return: breathiness + """ + sample_rate = hparams['audio_sample_rate'] + hop_size = hparams['hop_size'] + fft_size = hparams['fft_size'] + + x = wav_data.astype(np.double) + wav_frames = (x.shape[0] + hop_size - 1) // hop_size + f0_frames = f0.shape[0] + if f0_frames < wav_frames: + f0 = np.pad(f0, [[0, wav_frames - f0_frames]], mode='constant') + elif f0_frames > wav_frames: + f0 = f0[:wav_frames] + + time_step = hop_size / sample_rate + t = np.arange(0, wav_frames) * time_step + sp = pw.cheaptrick(x, f0, t, sample_rate, fft_size=fft_size) # extract smoothed spectrogram + ap = pw.d4c(x, f0, t, sample_rate, fft_size=fft_size) # extract aperiodicity + y = pw.synthesize( + f0, sp * ap * ap, np.ones_like(ap), sample_rate, + frame_period=time_step * 1000 + ) # synthesize the aperiodic part using the parameters + breathiness = get_energy_librosa(y, length, hparams) + return breathiness From 23fb6e77cd1ceda4cc8a270ce968f2761db8016d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 30 Apr 2023 14:56:35 +0800 Subject: [PATCH 333/475] Update requirements.txt --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b39920922..f32835f00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ click h5py librosa<0.10.0 -lightning==2.0.2 +lightning>=2.0.0 matplotlib MonkeyType==23.3.0 numpy==1.23.5 @@ -13,6 +13,7 @@ onnx==1.13.1 onnxsim==0.4.17 praat-parselmouth==0.4.3 protobuf==3.20.3 +pyworld==0.3.3 PyYAML resampy scipy From e8f46f06b0a0ca2097ed868ea93ca83694a014a8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 30 Apr 2023 15:07:05 +0800 Subject: [PATCH 334/475] Fix wrong fft size --- configs/variance.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/configs/variance.yaml b/configs/variance.yaml index 85f2247fb..cf0799d8e 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -15,6 +15,7 @@ test_prefixes: [ audio_sample_rate: 44100 hop_size: 512 # Hop size. +fft_size: 2048 # FFT size. win_size: 2048 # FFT size. midi_smooth_width: 0.2 # in seconds From 10d0013571d2ba49982d69311390c8bdd10f726e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 30 Apr 2023 18:59:39 +0800 Subject: [PATCH 335/475] Fix wrong uv mask --- preprocessing/acoustic_binarizer.py | 2 +- preprocessing/variance_binarizer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index f8c804cd5..6baf5d92e 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -185,7 +185,7 @@ def process_item(self, item_name, meta_data, binarization_args): if hparams.get('use_breathiness_embed', False): # get ground truth energy - breathiness = get_breathiness_pyworld(wav, gt_f0 * uv, length, hparams) + breathiness = get_breathiness_pyworld(wav, gt_f0 * ~uv, length, hparams) processed_input['breathiness'] = breathiness if hparams.get('use_key_shift_embed', False): diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 38a56255e..2300d2ebe 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -156,7 +156,7 @@ def process_item(self, item_name, meta_data, binarization_args): # Below: extract breathiness if hparams['predict_breathiness']: - breathiness = get_breathiness_pyworld(waveform, f0 * uv, length, hparams) + breathiness = get_breathiness_pyworld(waveform, f0 * ~uv, length, hparams) processed_input['breathiness'] = breathiness.astype(np.float32) return processed_input From ab7b129ae20e267d92da5adc97d7c47707747b96 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 30 Apr 2023 19:58:02 +0800 Subject: [PATCH 336/475] Add norm and denorm for variance predictor and loss --- modules/fastspeech/tts_modules.py | 8 +++++++- modules/losses/variance_loss.py | 9 +++++++-- modules/toplevel.py | 2 ++ training/variance_task.py | 2 ++ 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 6814b5408..d8e7c2fa3 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -139,7 +139,8 @@ def forward(self, xs, x_masks=None, infer=True): class VariancePredictor(torch.nn.Module): - def __init__(self, in_dims, n_layers=5, n_chans=512, kernel_size=5, + def __init__(self, vmin, vmax, in_dims, + n_layers=5, n_chans=512, kernel_size=5, dropout_rate=0.1, padding='SAME'): """Initialize variance predictor module. Args: @@ -151,6 +152,8 @@ def __init__(self, in_dims, n_layers=5, n_chans=512, kernel_size=5, """ super(VariancePredictor, self).__init__() + self.vmin = vmin + self.vmax = vmax self.conv = torch.nn.ModuleList() self.kernel_size = kernel_size self.padding = padding @@ -169,6 +172,9 @@ def __init__(self, in_dims, n_layers=5, n_chans=512, kernel_size=5, self.embed_positions = SinusoidalPositionalEmbedding(in_dims, 0, init_size=4096) self.pos_embed_alpha = nn.Parameter(torch.Tensor([1])) + def out2value(self, xs): + return (xs + 1) / 2 * (self.vmax - self.vmin) + self.vmin + def forward(self, xs): """ :param xs: [B, T, H] diff --git a/modules/losses/variance_loss.py b/modules/losses/variance_loss.py index deb3d958f..78d7d3f29 100644 --- a/modules/losses/variance_loss.py +++ b/modules/losses/variance_loss.py @@ -61,8 +61,10 @@ class CurveLoss1d(nn.Module): Loss module for 1d parameter curve with non-padding masks. """ - def __init__(self, loss_type): + def __init__(self, vmin, vmax, loss_type): super().__init__() + self.vmin = vmin + self.vmax = vmax self.loss_type = loss_type if self.loss_type == 'mse': self.loss = nn.MSELoss(reduction='none') @@ -71,8 +73,11 @@ def __init__(self, loss_type): else: raise NotImplementedError() + def norm_target(self, c_gt): + return (c_gt - self.vmin) / (self.vmax - self.vmin) * 2 - 1 + def forward(self, c_pred, c_gt, mask=None): - loss = self.loss(c_pred, c_gt) + loss = self.loss(c_pred, self.norm_target(c_gt)) if mask is not None: loss *= mask return loss.mean() diff --git a/modules/toplevel.py b/modules/toplevel.py index 0120328e0..45f56db6d 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -165,6 +165,8 @@ def __init__(self, vocab_size): # ) # ) self.breathiness_predictor = VariancePredictor( + vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), + vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), in_dims=hparams['hidden_size'], n_chans=breathiness_hparams['hidden_size'], n_layers=breathiness_hparams['num_layers'], diff --git a/training/variance_task.py b/training/variance_task.py index b1ecba013..98c4b34a1 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -90,6 +90,8 @@ def build_losses(self): if hparams['predict_breathiness']: breathiness_hparams = hparams['breathiness_prediction_args'] self.breathiness_loss = CurveLoss1d( + vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), + vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), loss_type=breathiness_hparams['loss_type'], ) From 5e746ded9eb941b4ba6a623c32647c7f09190ebd Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 30 Apr 2023 20:01:07 +0800 Subject: [PATCH 337/475] Add missing call to `self.out2value()` --- modules/fastspeech/tts_modules.py | 5 ++++- modules/toplevel.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index d8e7c2fa3..014ade5dd 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -175,9 +175,10 @@ def __init__(self, vmin, vmax, in_dims, def out2value(self, xs): return (xs + 1) / 2 * (self.vmax - self.vmin) + self.vmin - def forward(self, xs): + def forward(self, xs, infer=True): """ :param xs: [B, T, H] + :param infer: whether inference :return: [B, T] """ positions = self.pos_embed_alpha * self.embed_positions(xs[..., 0]) @@ -186,6 +187,8 @@ def forward(self, xs): for f in self.conv: xs = f(xs) # (B, C, Tmax) xs = self.linear(xs.transpose(1, -1)).squeeze(-1) # (B, Tmax) + if infer: + xs = self.out2value(xs) return xs diff --git a/modules/toplevel.py b/modules/toplevel.py index 45f56db6d..9bcb7276f 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -217,7 +217,7 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, energy_pred_out = None if self.predict_breathiness: - breathiness_pred_out = self.breathiness_predictor(condition) + breathiness_pred_out = self.breathiness_predictor(condition, infer) else: breathiness_pred_out = None From 3de58035d96af97c1271ef09e1e7f9f667be5dd2 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 00:40:30 +0800 Subject: [PATCH 338/475] Support multiple features in `GaussianDiffusion` and `WaveNet` --- configs/variance.yaml | 3 -- deployment/modules/diffusion.py | 4 +- modules/diffusion/ddpm.py | 75 ++++++++++++++++++++++++++------- modules/diffusion/wavenet.py | 12 +++--- modules/toplevel.py | 46 +++++++++++--------- 5 files changed, 92 insertions(+), 48 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index cf0799d8e..987512dd0 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -65,7 +65,6 @@ pitch_prediction_args: hidden_size: 512 residual_layers: 20 residual_channels: 256 - diff_loss_type: l2 predict_energy: false lambda_energy_loss: 1.0 @@ -80,7 +79,6 @@ energy_prediction_args: num_repeat_bins: 64 residual_layers: 20 residual_channels: 256 - diff_loss_type: l2 predict_breathiness: false lambda_breathiness_loss: 1.0 @@ -95,7 +93,6 @@ breathiness_prediction_args: num_repeat_bins: 64 residual_layers: 20 residual_channels: 256 - diff_loss_type: l2 residual_layers: 10 residual_channels: 256 diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index e6499f0c3..b0f536e94 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -70,7 +70,7 @@ def forward(self, condition, speedup): n_frames = condition.shape[2] step_range = torch.arange(0, self.k_step, speedup, dtype=torch.long, device=device).flip(0)[:, None] - x = torch.randn((1, 1, self.out_dims, n_frames), device=device) + x = torch.randn((1, self.num_feats, self.out_dims, n_frames), device=device) if speedup > 1: plms_noise_stage: int = 0 @@ -93,6 +93,6 @@ def forward(self, condition, speedup): for t in step_range: x = self.p_sample(x, t, cond=condition) - x = x.squeeze(1).permute(0, 2, 1) # [B, T, M] + x = x.transpose(2, 3).squeeze(1) # [B, T, M] or [B, F, T, M] x = self.denorm_spec(x) return x diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index f62b1480b..c87a3fd93 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -66,12 +66,13 @@ def cosine_beta_schedule(timesteps, s=0.008): class GaussianDiffusion(nn.Module): - def __init__(self, out_dims, timesteps=1000, k_step=1000, + def __init__(self, out_dims, num_feats=1, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None, spec_min=None, spec_max=None): super().__init__() - self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type]((out_dims, *denoiser_args)) + self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type]((out_dims, num_feats, *denoiser_args)) self.out_dims = out_dims + self.num_feats = num_feats if exists(betas): betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas @@ -82,10 +83,7 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, alphas_cumprod = np.cumprod(alphas, axis=0) alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) - timesteps, = betas.shape - self.num_timesteps = int(timesteps) self.k_step = k_step - self.noise_list = deque(maxlen=4) to_torch = partial(torch.tensor, dtype=torch.float32) @@ -112,8 +110,10 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, self.register_buffer('posterior_mean_coef2', to_torch( (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :out_dims]) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :out_dims]) + # spec: [B, T, M] or [B, F, T, M] + # spec_min and spec_max: [1, 1, M] or [1, 1, F, M] => transpose(-3, -2) => [1, 1, M] or [1, F, 1, M] + self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :out_dims].transpose(-3, -2)) + self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :out_dims].transpose(-3, -2)) def q_mean_variance(self, x_start, t): mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start @@ -213,13 +213,15 @@ def forward(self, condition, gt_spec=None, infer=True): b, device = condition.shape[0], condition.device if not infer: - spec = self.norm_spec(gt_spec) + # gt_spec: [B, T, M] or [B, F, T, M] + spec = self.norm_spec(gt_spec).transpose(1, 2) # [B, M, T] or [B, F, M, T] + if self.num_feats == 1: + spec = spec[:, None, :, :] # [B, F=1, M, T] t = torch.randint(0, self.k_step, (b,), device=device).long() - norm_spec = spec.transpose(1, 2)[:, None, :, :] # [B, 1, M, T] - return self.p_losses(norm_spec, t, cond=cond) + return self.p_losses(spec, t, cond=cond) else: t = self.k_step - shape = (cond.shape[0], 1, self.out_dims, cond.shape[2]) + shape = (cond.shape[0], self.num_feats, self.out_dims, cond.shape[2]) x = torch.randn(shape, device=device) if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: algorithm = hparams.get('diff_accelerator', 'dpm-solver') @@ -278,7 +280,7 @@ def wrapped(x, t, **kwargs): else: for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=not hparams['infer']): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) - x = x.squeeze(1).transpose(1, 2) # [B, T, M] + x = x.transpose(2, 3).squeeze(1) # [B, F, M, T] => [B, T, M] or [B, F, T, M] return self.denorm_spec(x) def norm_spec(self, x): @@ -289,27 +291,56 @@ def denorm_spec(self, x): class RepetitiveDiffusion(GaussianDiffusion): - def __init__(self, vmin, vmax, repeat_bins, + def __init__(self, vmin: float | list, vmax: float | list, repeat_bins: int, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): + assert (isinstance(vmin, float) and isinstance(vmin, float)) or len(vmin) == len(vmax) + num_feats = 1 if isinstance(vmin, int) else len(vmin) self.vmin = vmin self.vmax = vmax self.repeat_bins = repeat_bins super().__init__( - repeat_bins, timesteps=timesteps, k_step=k_step, + out_dims=repeat_bins, num_feats=num_feats, + timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas, spec_min=[vmin], spec_max=[vmax] + betas=betas, spec_min=[[v] for v in vmin], spec_max=[[v] for v in vmax] ) def norm_spec(self, x): - return super().norm_spec(x.unsqueeze(-1).repeat([1, 1, self.repeat_bins])) + """ + + :param x: [B, T] or [B, F, T] + :return [B, T, R] or [B, F, T, R] + """ + if self.num_feats == 1: + repeats = [1, 1, self.repeat_bins] + else: + repeats = [1, 1, 1, self.repeat_bins] + return super().norm_spec(x.unsqueeze(-1).repeat(repeats)) def denorm_spec(self, x): + """ + + :param x: [B, T, R] or [B, F, T, R] + :return [B, T] or [B, F, T] + """ return super().denorm_spec(x).mean(dim=-1) class PitchDiffusion(RepetitiveDiffusion): + def __init__(self, vmin: float, vmax: float, repeat_bins, + timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, + betas=None): + assert isinstance(vmin, float) and isinstance(vmin, float) + super().__init__( + vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, + timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas + ) + def norm_spec(self, x): return super().norm_spec(x.clamp(min=self.vmin, max=self.vmax)) @@ -318,6 +349,18 @@ def denorm_spec(self, x): class EnergyDiffusion(RepetitiveDiffusion): + def __init__(self, vmin: float, vmax: float, repeat_bins, + timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, + betas=None): + assert isinstance(vmin, float) and isinstance(vmin, float) + super().__init__( + vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, + timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas + ) + def norm_spec(self, x): return super().norm_spec(x.clamp(min=0., max=1.)) diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index 6c3160bc5..461cfb5f4 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -4,7 +4,6 @@ import torch import torch.nn as nn import torch.nn.functional as F -from torch.nn import Mish from utils.hparams import hparams @@ -64,13 +63,14 @@ def forward(self, x, conditioner, diffusion_step): class WaveNet(nn.Module): - def __init__(self, in_dims, n_layers, n_chans): + def __init__(self, in_dims, n_feats, n_layers, n_chans): super().__init__() + self.n_feats = n_feats self.input_projection = Conv1d(in_dims, n_chans, 1) self.diffusion_embedding = SinusoidalPosEmb(n_chans) self.mlp = nn.Sequential( nn.Linear(n_chans, n_chans * 4), - Mish(), + nn.Mish(), nn.Linear(n_chans * 4, n_chans) ) self.residual_layers = nn.ModuleList([ @@ -87,12 +87,12 @@ def __init__(self, in_dims, n_layers, n_chans): def forward(self, spec, diffusion_step, cond): """ - :param spec: [B, 1, M, T] + :param spec: [B, F, M, T] :param diffusion_step: [B, 1] :param cond: [B, M, T] :return: """ - x = spec.squeeze(1) + x = spec.flatten(start_dim=1, end_dim=2) # [B, F x M, T] x = self.input_projection(x) # [B, residual_channel, T] x = F.relu(x) @@ -107,4 +107,4 @@ def forward(self, spec, diffusion_step, cond): x = self.skip_projection(x) x = F.relu(x) x = self.output_projection(x) # [B, mel_bins, T] - return x[:, None, :, :] + return x.unflatten(dim=1, sizes=(self.n_feats, -1)) diff --git a/modules/toplevel.py b/modules/toplevel.py index 9bcb7276f..6047b717c 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -23,6 +23,7 @@ def __init__(self, vocab_size, out_dims): ) self.diffusion = GaussianDiffusion( out_dims=out_dims, + num_feats=1, timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], @@ -152,35 +153,38 @@ def __init__(self, vocab_size): if self.predict_breathiness: breathiness_hparams = hparams['breathiness_prediction_args'] - # self.breathiness_predictor = EnergyDiffusion( - # vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), - # vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), - # repeat_bins=breathiness_hparams['num_repeat_bins'], - # timesteps=hparams['timesteps'], - # k_step=hparams['K_step'], - # denoiser_type=hparams['diff_decoder_type'], - # denoiser_args=( - # breathiness_hparams['residual_layers'], - # breathiness_hparams['residual_channels'] - # ) - # ) - self.breathiness_predictor = VariancePredictor( + self.breathiness_predictor = EnergyDiffusion( vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), - in_dims=hparams['hidden_size'], - n_chans=breathiness_hparams['hidden_size'], - n_layers=breathiness_hparams['num_layers'], - dropout_rate=breathiness_hparams['dropout'], - padding=hparams['ffn_padding'], - kernel_size=breathiness_hparams['kernel_size'] + repeat_bins=breathiness_hparams['num_repeat_bins'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + breathiness_hparams['residual_layers'], + breathiness_hparams['residual_channels'] + ) ) + # self.breathiness_predictor = VariancePredictor( + # vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), + # vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), + # in_dims=hparams['hidden_size'], + # n_chans=breathiness_hparams['hidden_size'], + # n_layers=breathiness_hparams['num_layers'], + # dropout_rate=breathiness_hparams['dropout'], + # padding=hparams['ffn_padding'], + # kernel_size=breathiness_hparams['kernel_size'] + # ) @property def category(self): return 'variance' - def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, - mel2ph=None, base_pitch=None, delta_pitch=None, energy=None, infer=True): + def forward( + self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, + base_pitch=None, delta_pitch=None, energy=None, breathiness=None, + infer=True + ): encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, word_dur=word_dur, infer=infer From 8c18b661454ad334d15a206e19c10c9ff0100e5e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 01:29:59 +0800 Subject: [PATCH 339/475] Fix breathiness dtype mismatch --- preprocessing/acoustic_binarizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 6baf5d92e..8ff8a7f59 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -181,12 +181,12 @@ def process_item(self, item_name, meta_data, binarization_args): if hparams.get('use_energy_embed', False): # get ground truth energy energy = get_energy_librosa(wav, length, hparams) - processed_input['energy'] = energy + processed_input['energy'] = energy.astype(np.float32) if hparams.get('use_breathiness_embed', False): # get ground truth energy breathiness = get_breathiness_pyworld(wav, gt_f0 * ~uv, length, hparams) - processed_input['breathiness'] = breathiness + processed_input['breathiness'] = breathiness.astype(np.float32) if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. From 438fc257e7b0bdd267ce97a44667d39a61a98cae Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 02:57:43 +0800 Subject: [PATCH 340/475] Implement `MultiVarianceDiffusion` for joint prediction --- configs/variance.yaml | 65 +++++--------- modules/diffusion/ddpm.py | 76 ++++++++++++++-- modules/toplevel.py | 184 +++++++++++++++----------------------- training/variance_task.py | 77 +++++++--------- 4 files changed, 195 insertions(+), 207 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 987512dd0..e56caa1bc 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -30,20 +30,17 @@ dictionary: dictionaries/opencpop-extension.txt use_spk_id: false -K_step: 1000 -timesteps: 1000 -max_beta: 0.02 rel_pos: true -diff_accelerator: dpm-solver -pndm_speedup: 10 - hidden_size: 256 predict_dur: true +predict_pitch: true +predict_energy: false +predict_breathiness: true + dur_cascade: false -lambda_dur_loss: 1.0 dur_prediction_args: - hidden_size: 384 + hidden_size: 512 dropout: 0.1 num_layers: 5 kernel_size: 3 @@ -53,53 +50,37 @@ dur_prediction_args: lambda_wdur_loss: 1.0 lambda_sdur_loss: 3.0 -predict_pitch: true -pitch_cascade: true -lambda_pitch_loss: 1.0 pitch_prediction_args: - diff_predictor_mode: repeat - pitch_delta_vmin: -12 # -12.75 - pitch_delta_vmax: 12 # 12.75 + pitch_delta_vmin: -12.0 # -12.75 + pitch_delta_vmax: 12.0 # 12.75 num_pitch_bins: 64 deviation: 0.25 - hidden_size: 512 residual_layers: 20 residual_channels: 256 -predict_energy: false -lambda_energy_loss: 1.0 -energy_prediction_args: - hidden_size: 512 - dropout: 0.1 - num_layers: 5 - kernel_size: 3 - loss_type: mse - db_vmin: -72.0 - db_vmax: -20.0 - num_repeat_bins: 64 - residual_layers: 20 - residual_channels: 256 +energy_db_min: -72.0 +energy_db_max: -12.0 +breathiness_db_min: -72.0 +breathiness_db_max: -20.0 -predict_breathiness: false -lambda_breathiness_loss: 1.0 -breathiness_prediction_args: - hidden_size: 512 - dropout: 0.1 - num_layers: 5 - kernel_size: 3 - loss_type: mse - db_vmin: -72.0 - db_vmax: -20.0 - num_repeat_bins: 64 +variances_prediction_args: + repeat_bins: 64 residual_layers: 20 residual_channels: 256 -residual_layers: 10 -residual_channels: 256 +lambda_dur_loss: 1.0 +lambda_pitch_loss: 1.0 +lambda_variances_loss: 1.0 + dilation_cycle_length: 5 # * +schedule_type: 'linear' +K_step: 1000 +timesteps: 1000 +max_beta: 0.02 diff_decoder_type: 'wavenet' diff_loss_type: l2 -schedule_type: 'linear' +diff_accelerator: dpm-solver +pndm_speedup: 10 # train and eval num_sanity_val_steps: 1 diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index c87a3fd93..604272024 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -1,6 +1,7 @@ from collections import deque from functools import partial from inspect import isfunction +from typing import List, Tuple import librosa.sequence import numpy as np @@ -112,8 +113,13 @@ def __init__(self, out_dims, num_feats=1, timesteps=1000, k_step=1000, # spec: [B, T, M] or [B, F, T, M] # spec_min and spec_max: [1, 1, M] or [1, 1, F, M] => transpose(-3, -2) => [1, 1, M] or [1, F, 1, M] - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :out_dims].transpose(-3, -2)) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :out_dims].transpose(-3, -2)) + spec_min = torch.FloatTensor(spec_min)[None, None, :out_dims].transpose(-3, -2) + spec_max = torch.FloatTensor(spec_max)[None, None, :out_dims].transpose(-3, -2) + if self.num_feats == 1: + spec_min = spec_min.squeeze(1) + spec_max = spec_max.squeeze(1) + self.register_buffer('spec_min', spec_min) + self.register_buffer('spec_max', spec_max) def q_mean_variance(self, x_start, t): mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start @@ -214,7 +220,7 @@ def forward(self, condition, gt_spec=None, infer=True): if not infer: # gt_spec: [B, T, M] or [B, F, T, M] - spec = self.norm_spec(gt_spec).transpose(1, 2) # [B, M, T] or [B, F, M, T] + spec = self.norm_spec(gt_spec).transpose(-2, -1) # [B, M, T] or [B, F, M, T] if self.num_feats == 1: spec = spec[:, None, :, :] # [B, F=1, M, T] t = torch.randint(0, self.k_step, (b,), device=device).long() @@ -297,8 +303,6 @@ def __init__(self, vmin: float | list, vmax: float | list, repeat_bins: int, betas=None): assert (isinstance(vmin, float) and isinstance(vmin, float)) or len(vmin) == len(vmax) num_feats = 1 if isinstance(vmin, int) else len(vmin) - self.vmin = vmin - self.vmax = vmax self.repeat_bins = repeat_bins super().__init__( out_dims=repeat_bins, num_feats=num_feats, @@ -333,7 +337,8 @@ def __init__(self, vmin: float, vmax: float, repeat_bins, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): - assert isinstance(vmin, float) and isinstance(vmin, float) + self.vmin = vmin + self.vmax = vmax super().__init__( vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, timesteps=timesteps, k_step=k_step, @@ -348,14 +353,67 @@ def denorm_spec(self, x): return super().denorm_spec(x).clamp(min=self.vmin, max=self.vmax) +class MultiVarianceDiffusion(RepetitiveDiffusion): + def __init__( + self, ranges: List[Tuple[float, float]], + clamps: List[Tuple[float | None, float | None] | None], + repeat_bins, timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, + betas=None + ): + assert len(ranges) == len(clamps) + self.clamps = clamps + super().__init__( + vmin=[r[0] for r in ranges], vmax=[r[1] for r in ranges], repeat_bins=repeat_bins, + timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas + ) + + def clamp_spec(self, xs: list | tuple): + clamped = [] + for x, c in zip(xs, self.clamps): + if c is None: + clamped.append(x) + continue + clamped.append(x.clamp(min=c[0], max=c[1])) + return clamped + + def norm_spec(self, xs: list | tuple): + """ + + :param xs: sequence of [B, T] + :return: [B, F, T] => super().norm_spec(xs) => [B, F, T, R] + """ + assert len(xs) == self.num_feats + clamped = self.clamp_spec(xs) + xs = torch.stack(clamped, dim=1) # [B, F, T] + if self.num_feats == 1: + xs = xs.squeeze(1) # [B, T] + return super().norm_spec(xs) + + def denorm_spec(self, xs): + """ + + :param xs: [B, T, R] or [B, F, T, R] => super().denorm_spec(xs) => [B, T] or [B, F, T] + :return: sequence of [B, T] + """ + xs = super().denorm_spec(xs) + if self.num_feats == 1: + xs = [xs] + else: + xs = xs.unbind(dim=1) + assert len(xs) == self.num_feats + return self.clamp_spec(xs) + + class EnergyDiffusion(RepetitiveDiffusion): - def __init__(self, vmin: float, vmax: float, repeat_bins, + def __init__(self, v_range: Tuple[float, float], repeat_bins, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): - assert isinstance(vmin, float) and isinstance(vmin, float) super().__init__( - vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, + vmin=v_range[0], vmax=v_range[1], repeat_bins=repeat_bins, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, betas=betas diff --git a/modules/toplevel.py b/modules/toplevel.py index 6047b717c..846c6c634 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -6,11 +6,10 @@ XavierUniformInitLinear as Linear, ) from modules.diffusion.ddpm import ( - GaussianDiffusion, PitchDiffusion, EnergyDiffusion, - CurveDiffusion1d, CurveDiffusion2d + GaussianDiffusion, PitchDiffusion, MultiVarianceDiffusion ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from modules.fastspeech.tts_modules import LengthRegulator, VariancePredictor +from modules.fastspeech.tts_modules import LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -63,8 +62,16 @@ def __init__(self, vocab_size): super().__init__() self.predict_dur = hparams['predict_dur'] self.predict_pitch = hparams['predict_pitch'] - self.predict_energy = hparams['predict_energy'] - self.predict_breathiness = hparams['predict_breathiness'] + + predict_energy = hparams['predict_energy'] + predict_breathiness = hparams['predict_breathiness'] + self.variance_prediction_list = [] + if predict_energy: + self.variance_prediction_list.append('energy') + if predict_breathiness: + self.variance_prediction_list.append('breathiness') + self.predict_variances = len(self.variance_prediction_list) > 0 + self.fs2 = FastSpeech2Variance( vocab_size=vocab_size ) @@ -73,125 +80,77 @@ def __init__(self, vocab_size): if self.predict_pitch: pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) - diff_predictor_mode = pitch_hparams['diff_predictor_mode'] - if diff_predictor_mode == 'repeat': - self.pitch_predictor = PitchDiffusion( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], - repeat_bins=pitch_hparams['num_pitch_bins'], - timesteps=hparams['timesteps'], - k_step=hparams['K_step'], - denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - pitch_hparams['residual_layers'], - pitch_hparams['residual_channels'] - ) - ) - elif diff_predictor_mode == '1d': - self.pitch_predictor = CurveDiffusion1d( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], - timesteps=hparams['timesteps'], - k_step=hparams['K_step'], - denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - pitch_hparams['residual_layers'], - pitch_hparams['residual_channels'] - ) - ) - elif diff_predictor_mode == '2d': - self.pitch_predictor = CurveDiffusion2d( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], - num_bins=pitch_hparams['num_pitch_bins'], - deviation=pitch_hparams['deviation'], - timesteps=hparams['timesteps'], - k_step=hparams['K_step'], - denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - pitch_hparams['residual_layers'], - pitch_hparams['residual_channels'] - ) - ) - else: - raise NotImplementedError() - # from modules.fastspeech.tts_modules import PitchPredictor - # self.pitch_predictor = PitchPredictor( - # vmin=pitch_hparams['pitch_delta_vmin'], - # vmax=pitch_hparams['pitch_delta_vmax'], - # num_bins=pitch_hparams['num_pitch_bins'], - # deviation=pitch_hparams['deviation'], - # in_dims=hparams['hidden_size'], - # n_chans=pitch_hparams['hidden_size'] - # ) - - if self.predict_energy or self.predict_breathiness: - self.pitch_embed = Linear(1, hparams['hidden_size']) - - if self.predict_energy: - energy_hparams = hparams['energy_prediction_args'] - self.energy_predictor = EnergyDiffusion( - vmin=10. ** (energy_hparams['db_vmin'] / 20.), - vmax=10. ** (energy_hparams['db_vmax'] / 20.), - repeat_bins=energy_hparams['num_repeat_bins'], + self.pitch_predictor = PitchDiffusion( + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + repeat_bins=pitch_hparams['num_pitch_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], denoiser_args=( - energy_hparams['residual_layers'], - energy_hparams['residual_channels'] + pitch_hparams['residual_layers'], + pitch_hparams['residual_channels'] ) ) - # self.energy_predictor = VariancePredictor( - # in_dims=hparams['hidden_size'], - # n_chans=energy_hparams['hidden_size'], - # n_layers=energy_hparams['num_layers'], - # dropout_rate=energy_hparams['dropout'], - # padding=hparams['ffn_padding'], - # kernel_size=energy_hparams['kernel_size'] - # ) - - if self.predict_breathiness: - breathiness_hparams = hparams['breathiness_prediction_args'] - self.breathiness_predictor = EnergyDiffusion( - vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), - vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), - repeat_bins=breathiness_hparams['num_repeat_bins'], + + if self.predict_variances: + self.pitch_embed = Linear(1, hparams['hidden_size']) + + ranges = [] + clamps = [] + + if predict_energy: + ranges.append(( + 10. ** (hparams['energy_db_min'] / 20.), + 10. ** (hparams['energy_db_max'] / 20.) + )) + clamps.append((0., 1.)) + + if predict_breathiness: + ranges.append(( + 10. ** (hparams['breathiness_db_min'] / 20.), + 10. ** (hparams['breathiness_db_max'] / 20.) + )) + clamps.append((0., 1.)) + + variances_hparams = hparams['variances_prediction_args'] + self.variance_predictor = MultiVarianceDiffusion( + ranges=ranges, + clamps=clamps, + repeat_bins=variances_hparams['repeat_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], denoiser_args=( - breathiness_hparams['residual_layers'], - breathiness_hparams['residual_channels'] + variances_hparams['residual_layers'], + variances_hparams['residual_channels'] ) ) - # self.breathiness_predictor = VariancePredictor( - # vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), - # vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), - # in_dims=hparams['hidden_size'], - # n_chans=breathiness_hparams['hidden_size'], - # n_layers=breathiness_hparams['num_layers'], - # dropout_rate=breathiness_hparams['dropout'], - # padding=hparams['ffn_padding'], - # kernel_size=breathiness_hparams['kernel_size'] - # ) @property def category(self): return 'variance' + def collect_variance_inputs(self, **kwargs): + return [kwargs.get(name) for name in self.variance_prediction_list] + + def collect_variance_outputs(self, variances: list | tuple) -> dict: + return { + name: pred + for name, pred in zip(self.variance_prediction_list, variances) + } + def forward( self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, - base_pitch=None, delta_pitch=None, energy=None, breathiness=None, - infer=True + base_pitch=None, delta_pitch=None, infer=True, **kwargs ): encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, word_dur=word_dur, infer=infer ) - if not self.predict_pitch and not self.predict_energy and not self.predict_breathiness: - return dur_pred_out, None, None, None + if not self.predict_pitch and not self.predict_variances: + return dur_pred_out, None, None, ({} if infer else None) if mel2ph is None or hparams['dur_cascade']: # (extract mel2ph from dur_pred_out) @@ -207,22 +166,21 @@ def forward( else: pitch_pred_out = None - if self.predict_energy or self.predict_breathiness: - if delta_pitch is None: - pitch = base_pitch + pitch_pred_out - else: - pitch = base_pitch + delta_pitch - pitch_embed = self.pitch_embed(pitch[:, :, None]) - condition += pitch_embed + if not self.predict_variances: + return dur_pred_out, pitch_pred_out, ({} if infer else None) - if self.predict_energy: - energy_pred_out = self.energy_predictor(condition, energy, infer) + if delta_pitch is None: + pitch = base_pitch + pitch_pred_out else: - energy_pred_out = None + pitch = base_pitch + delta_pitch + pitch_embed = self.pitch_embed(pitch[:, :, None]) + condition += pitch_embed - if self.predict_breathiness: - breathiness_pred_out = self.breathiness_predictor(condition, infer) + variance_inputs = self.collect_variance_inputs(**kwargs) + variance_outputs = self.variance_predictor(condition, variance_inputs, infer) + if infer: + variances_pred_out = self.collect_variance_outputs(variance_outputs) else: - breathiness_pred_out = None + variances_pred_out = variance_outputs - return dur_pred_out, pitch_pred_out, energy_pred_out, breathiness_pred_out + return dur_pred_out, pitch_pred_out, variances_pred_out diff --git a/training/variance_task.py b/training/variance_task.py index 98c4b34a1..ca52f863d 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.variance_loss import DurationLoss, CurveLoss1d +from modules.losses.variance_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure @@ -58,10 +58,16 @@ def __init__(self): self.lambda_dur_loss = hparams['lambda_dur_loss'] if hparams['predict_pitch']: self.lambda_pitch_loss = hparams['lambda_pitch_loss'] - if hparams['predict_energy']: - self.lambda_energy_loss = hparams['lambda_energy_loss'] - if hparams['predict_breathiness']: - self.lambda_breathiness_loss = hparams['lambda_breathiness_loss'] + + predict_energy = hparams['predict_energy'] + predict_breathiness = hparams['predict_breathiness'] + self.variance_prediction_list = [] + if predict_energy: + self.variance_prediction_list.append('energy') + if predict_breathiness: + self.variance_prediction_list.append('breathiness') + self.predict_variances = len(self.variance_prediction_list) > 0 + self.lambda_variances_loss = hparams['lambda_variances_loss'] def build_model(self): return DiffSingerVariance( @@ -83,17 +89,10 @@ def build_losses(self): self.pitch_loss = DiffusionNoiseLoss( loss_type=hparams['diff_loss_type'], ) - if hparams['predict_energy']: - self.energy_loss = DiffusionNoiseLoss( + if self.predict_variances: + self.variances_loss = DiffusionNoiseLoss( loss_type=hparams['diff_loss_type'], ) - if hparams['predict_breathiness']: - breathiness_hparams = hparams['breathiness_prediction_args'] - self.breathiness_loss = CurveLoss1d( - vmin=10. ** (breathiness_hparams['db_vmin'] / 20.), - vmax=10. ** (breathiness_hparams['db_vmax'] / 20.), - loss_type=breathiness_hparams['loss_type'], - ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] @@ -106,31 +105,30 @@ def run_model(self, sample, infer=False): energy = sample.get('energy') # [B, T_t] breathiness = sample.get('breathiness') # [B, T_t] - output = self.model(txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, - mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, energy=energy, - infer=infer) + output = self.model( + txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, mel2ph=mel2ph, + base_pitch=base_pitch, delta_pitch=delta_pitch, + energy=energy, breathiness=breathiness, + infer=infer + ) - dur_pred, pitch_pred, energy_pred, breathiness_pred = output + dur_pred, pitch_pred, variances_pred = output if infer: - return dur_pred, pitch_pred, energy_pred, breathiness_pred + return dur_pred, pitch_pred, variances_pred # Tensor, Tensor, Dict[Tensor] else: losses = {} if dur_pred is not None: losses['dur_loss'] = self.lambda_dur_loss * self.dur_loss(dur_pred, ph_dur, ph2word=ph2word) - nonpadding = (mel2ph > 0).float() + nonpadding = (mel2ph > 0).unsqueeze(-1).float() if pitch_pred is not None: (pitch_x_recon, pitch_noise) = pitch_pred losses['pitch_loss'] = self.lambda_pitch_loss * self.pitch_loss( - pitch_x_recon, pitch_noise, nonpadding=nonpadding.unsqueeze(-1) - ) - if energy_pred is not None: - (energy_x_recon, energy_noise) = energy_pred - losses['energy_loss'] = self.lambda_energy_loss * self.energy_loss( - energy_x_recon, energy_noise, nonpadding=nonpadding.unsqueeze(-1) + pitch_x_recon, pitch_noise, nonpadding=nonpadding ) - if breathiness_pred is not None: - losses['breathiness_loss'] = self.lambda_breathiness_loss * self.breathiness_loss( - breathiness_pred, breathiness, mask=nonpadding + if variances_pred is not None: + (variance_x_recon, variance_noise) = variances_pred + losses['variances_loss'] = self.lambda_variances_loss * self.variances_loss( + variance_x_recon, variance_noise, nonpadding=nonpadding ) return losses @@ -143,7 +141,7 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: - dur_pred, pitch_pred, energy_pred, breathiness_pred = self.run_model(sample, infer=True) + dur_pred, pitch_pred, variances_pred = self.run_model(sample, infer=True) if dur_pred is not None: self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: @@ -157,21 +155,14 @@ def _validation_step(self, sample, batch_idx): curve_name='pitch', grid=1 ) - if energy_pred is not None: - energy = sample['energy'] - self.plot_curve( - batch_idx, - gt_curve=energy, - pred_curve=energy_pred, - curve_name='energy' - ) - if breathiness_pred is not None: - breathiness = sample['breathiness'] + for name in self.variance_prediction_list: + variance = sample[name] + variance_pred = variances_pred[name] self.plot_curve( batch_idx, - gt_curve=breathiness, - pred_curve=breathiness_pred, - curve_name='breathiness' + gt_curve=variance, + pred_curve=variance_pred, + curve_name=name ) return outputs, sample['size'] From a07814088ddab055b7043ed0fb20e1d1bd3170ec Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 02:58:54 +0800 Subject: [PATCH 341/475] Fix type error --- modules/diffusion/ddpm.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 604272024..171820ad9 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -115,9 +115,6 @@ def __init__(self, out_dims, num_feats=1, timesteps=1000, k_step=1000, # spec_min and spec_max: [1, 1, M] or [1, 1, F, M] => transpose(-3, -2) => [1, 1, M] or [1, F, 1, M] spec_min = torch.FloatTensor(spec_min)[None, None, :out_dims].transpose(-3, -2) spec_max = torch.FloatTensor(spec_max)[None, None, :out_dims].transpose(-3, -2) - if self.num_feats == 1: - spec_min = spec_min.squeeze(1) - spec_max = spec_max.squeeze(1) self.register_buffer('spec_min', spec_min) self.register_buffer('spec_max', spec_max) @@ -302,13 +299,15 @@ def __init__(self, vmin: float | list, vmax: float | list, repeat_bins: int, denoiser_type=None, denoiser_args=None, betas=None): assert (isinstance(vmin, float) and isinstance(vmin, float)) or len(vmin) == len(vmax) - num_feats = 1 if isinstance(vmin, int) else len(vmin) + num_feats = 1 if isinstance(vmin, float) else len(vmin) + spec_min = [vmin] if num_feats == 1 else [[v] for v in vmin] + spec_max = [vmax] if num_feats == 1 else [[v] for v in vmax] self.repeat_bins = repeat_bins super().__init__( out_dims=repeat_bins, num_feats=num_feats, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas, spec_min=[[v] for v in vmin], spec_max=[[v] for v in vmax] + betas=betas, spec_min=spec_min, spec_max=spec_max ) def norm_spec(self, x): From d513aca6da59d3979f62530caae6d01b154a4901 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 03:03:57 +0800 Subject: [PATCH 342/475] Fix `spec_min` and `spec_max` dim error --- modules/diffusion/ddpm.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 171820ad9..3d01ee33b 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -362,8 +362,14 @@ def __init__( ): assert len(ranges) == len(clamps) self.clamps = clamps + vmin = [r[0] for r in ranges] + vmax = [r[1] for r in ranges] + if len(vmin) == 1: + vmin = vmin[0] + if len(vmin) == 1: + vmax = vmax[0] super().__init__( - vmin=[r[0] for r in ranges], vmax=[r[1] for r in ranges], repeat_bins=repeat_bins, + vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, timesteps=timesteps, k_step=k_step, denoiser_type=denoiser_type, denoiser_args=denoiser_args, betas=betas From f6f5101d6fe44a09b14c4811d849fc02eb49961f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 03:06:57 +0800 Subject: [PATCH 343/475] Fix TypeError --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 3d01ee33b..63fc7d692 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -366,7 +366,7 @@ def __init__( vmax = [r[1] for r in ranges] if len(vmin) == 1: vmin = vmin[0] - if len(vmin) == 1: + if len(vmax) == 1: vmax = vmax[0] super().__init__( vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, From 012e3edd3406d4df89dcdf12f1512f2a3ab48d36 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 03:12:21 +0800 Subject: [PATCH 344/475] Update names and configs --- configs/variance.yaml | 4 ++-- training/variance_task.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index e56caa1bc..bee6c42a1 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -36,7 +36,7 @@ hidden_size: 256 predict_dur: true predict_pitch: true predict_energy: false -predict_breathiness: true +predict_breathiness: false dur_cascade: false dur_prediction_args: @@ -70,7 +70,7 @@ variances_prediction_args: lambda_dur_loss: 1.0 lambda_pitch_loss: 1.0 -lambda_variances_loss: 1.0 +lambda_var_loss: 1.0 dilation_cycle_length: 5 # * schedule_type: 'linear' diff --git a/training/variance_task.py b/training/variance_task.py index ca52f863d..13e24e16d 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -67,7 +67,7 @@ def __init__(self): if predict_breathiness: self.variance_prediction_list.append('breathiness') self.predict_variances = len(self.variance_prediction_list) > 0 - self.lambda_variances_loss = hparams['lambda_variances_loss'] + self.lambda_var_loss = hparams['lambda_var_loss'] def build_model(self): return DiffSingerVariance( @@ -90,7 +90,7 @@ def build_losses(self): loss_type=hparams['diff_loss_type'], ) if self.predict_variances: - self.variances_loss = DiffusionNoiseLoss( + self.var_loss = DiffusionNoiseLoss( loss_type=hparams['diff_loss_type'], ) @@ -127,7 +127,7 @@ def run_model(self, sample, infer=False): ) if variances_pred is not None: (variance_x_recon, variance_noise) = variances_pred - losses['variances_loss'] = self.lambda_variances_loss * self.variances_loss( + losses['var_loss'] = self.lambda_var_loss * self.var_loss( variance_x_recon, variance_noise, nonpadding=nonpadding ) return losses From c454ef51feedf5259c9092025476aaf42818d478 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 03:15:17 +0800 Subject: [PATCH 345/475] Remove `EnergyDiffusion` --- modules/diffusion/ddpm.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 63fc7d692..ae6dea692 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -412,25 +412,6 @@ def denorm_spec(self, xs): return self.clamp_spec(xs) -class EnergyDiffusion(RepetitiveDiffusion): - def __init__(self, v_range: Tuple[float, float], repeat_bins, - timesteps=1000, k_step=1000, - denoiser_type=None, denoiser_args=None, - betas=None): - super().__init__( - vmin=v_range[0], vmax=v_range[1], repeat_bins=repeat_bins, - timesteps=timesteps, k_step=k_step, - denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas - ) - - def norm_spec(self, x): - return super().norm_spec(x.clamp(min=0., max=1.)) - - def denorm_spec(self, x): - return super().denorm_spec(x).clamp(min=0., max=1.) - - class CurveDiffusion1d(GaussianDiffusion): def __init__(self, vmin, vmax, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): From e4c25db8714d4932b0fbc236fc74f34fbdd0ddd2 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 12:07:01 +0800 Subject: [PATCH 346/475] Fix projection dim mismatch --- modules/diffusion/wavenet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index 461cfb5f4..8da96a5c1 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -66,7 +66,7 @@ class WaveNet(nn.Module): def __init__(self, in_dims, n_feats, n_layers, n_chans): super().__init__() self.n_feats = n_feats - self.input_projection = Conv1d(in_dims, n_chans, 1) + self.input_projection = Conv1d(in_dims * n_feats, n_chans, 1) self.diffusion_embedding = SinusoidalPosEmb(n_chans) self.mlp = nn.Sequential( nn.Linear(n_chans, n_chans * 4), @@ -82,7 +82,7 @@ def __init__(self, in_dims, n_feats, n_layers, n_chans): for i in range(n_layers) ]) self.skip_projection = Conv1d(n_chans, n_chans, 1) - self.output_projection = Conv1d(n_chans, in_dims, 1) + self.output_projection = Conv1d(n_chans, in_dims * n_feats, 1) nn.init.zeros_(self.output_projection.weight) def forward(self, spec, diffusion_step, cond): From 9ae6f909b97c4eafd2cd062a10e56f6fa02c9c8d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 19:22:18 +0800 Subject: [PATCH 347/475] Fix regression in ONNX export --- deployment/modules/diffusion.py | 5 ++++- modules/diffusion/wavenet.py | 21 ++++++++++++++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index b0f536e94..08fe607fb 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -93,6 +93,9 @@ def forward(self, condition, speedup): for t in step_range: x = self.p_sample(x, t, cond=condition) - x = x.transpose(2, 3).squeeze(1) # [B, T, M] or [B, F, T, M] + if self.num_feats == 1: + x = x.squeeze(1).permute(0, 2, 1) # [B, 1, M, T] => [B, T, M] + else: + x = x.permute(0, 1, 3, 2) # [B, F, M, T] => [B, F, T, M] x = self.denorm_spec(x) return x diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index 8da96a5c1..1b2077787 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -65,6 +65,7 @@ def forward(self, x, conditioner, diffusion_step): class WaveNet(nn.Module): def __init__(self, in_dims, n_feats, n_layers, n_chans): super().__init__() + self.in_dims = in_dims self.n_feats = n_feats self.input_projection = Conv1d(in_dims * n_feats, n_chans, 1) self.diffusion_embedding = SinusoidalPosEmb(n_chans) @@ -89,11 +90,14 @@ def forward(self, spec, diffusion_step, cond): """ :param spec: [B, F, M, T] :param diffusion_step: [B, 1] - :param cond: [B, M, T] + :param cond: [B, H, T] :return: """ - x = spec.flatten(start_dim=1, end_dim=2) # [B, F x M, T] - x = self.input_projection(x) # [B, residual_channel, T] + if self.n_feats == 1: + x = spec.squeeze(1) # [B, M, T] + else: + x = spec.flatten(start_dim=1, end_dim=2) # [B, F x M, T] + x = self.input_projection(x) # [B, C, T] x = F.relu(x) diffusion_step = self.diffusion_embedding(diffusion_step) @@ -106,5 +110,12 @@ def forward(self, spec, diffusion_step, cond): x = torch.sum(torch.stack(skip), dim=0) / sqrt(len(self.residual_layers)) x = self.skip_projection(x) x = F.relu(x) - x = self.output_projection(x) # [B, mel_bins, T] - return x.unflatten(dim=1, sizes=(self.n_feats, -1)) + x = self.output_projection(x) # [B, M, T] + if self.n_feats == 1: + x = x[:, None, :, :] + else: + # This is the temporary solution since PyTorch 1.13 + # does not support exporting aten::unflatten to ONNX + # x = x.unflatten(dim=1, sizes=(self.n_feats, self.in_dims)) + x = x.reshape(x.shape[0], self.n_feats, self.in_dims, x.shape[3]) + return x From f8360f357a8cbf7f18a3f36d2962f9c0738427a9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 1 May 2023 19:33:18 +0800 Subject: [PATCH 348/475] Bump `onnxsim` version to 0.4.27 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2a301b25e..8336e3670 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ matplotlib MonkeyType==23.3.0 numpy==1.23.5 onnx==1.13.1 -onnxsim==0.4.17 +onnxsim==0.4.27 praat-parselmouth==0.4.3 protobuf==3.20.3 PyYAML From 02b235a3d32f55445032f0b012114a21380bf3fa Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 00:29:02 +0800 Subject: [PATCH 349/475] Support variance adaptor in `DiffSingerAcoustic` --- configs/acoustic.yaml | 14 +++ inference/ds_acoustic.py | 8 +- modules/diffusion/ddpm.py | 8 +- modules/fastspeech/acoustic_encoder.py | 86 ++++++++++------ modules/fastspeech/param_adaptor.py | 58 +++++++++++ modules/toplevel.py | 137 +++++++++++++------------ preprocessing/acoustic_binarizer.py | 8 +- training/acoustic_task.py | 48 +++++++-- 8 files changed, 254 insertions(+), 113 deletions(-) create mode 100644 modules/fastspeech/param_adaptor.py diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 661f06629..0bb5a4953 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -56,6 +56,20 @@ use_breathiness_embed: false use_key_shift_embed: false use_speed_embed: false +predict_energy: false +predict_breathiness: false + +energy_db_min: -72.0 +energy_db_max: -12.0 +breathiness_db_min: -72.0 +breathiness_db_max: -20.0 + +variances_prediction_args: + repeat_bins: 64 + residual_layers: 20 + residual_channels: 256 +lambda_var_loss: 1.0 + K_step: 1000 timesteps: 1000 max_beta: 0.02 diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 20ffe0dee..6adbc0ed1 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -205,9 +205,11 @@ def run_model(self, sample, return_mel=False): ) # => [B, T, H] else: spk_mix_embed = None - mel_pred = self.model(txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], energy=sample.get('energy'), - key_shift=sample.get('key_shift'), speed=sample.get('speed'), - spk_mix_embed=spk_mix_embed, infer=True) + mel_pred, _ = self.model( + txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], energy=sample.get('energy'), + key_shift=sample.get('key_shift'), speed=sample.get('speed'), + spk_mix_embed=spk_mix_embed, infer=True + ) # var_pred ignored for now return mel_pred @torch.no_grad() diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index ae6dea692..6b1faf35a 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -17,12 +17,8 @@ } -def exists(x): - return x is not None - - def default(val, d): - if exists(val): + if val is not None: return val return d() if isfunction(d) else d @@ -75,7 +71,7 @@ def __init__(self, out_dims, num_feats=1, timesteps=1000, k_step=1000, self.out_dims = out_dims self.num_feats = num_feats - if exists(betas): + if betas is not None: betas = betas.detach().cpu().numpy() if isinstance(betas, torch.Tensor) else betas else: betas = beta_schedule[hparams['schedule_type']](timesteps) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index cfcfc25db..9472499a3 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -30,24 +30,33 @@ def __init__(self, vocab_size): else: raise ValueError('f0_embed_type must be \'discrete\' or \'continuous\'.') - if hparams.get('use_energy_embed', False): + self.use_energy_embed = hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False) + if self.use_energy_embed: + # energy is embedded but not predicted self.energy_embed = Linear(1, hparams['hidden_size']) - if hparams.get('use_breathiness_embed', False): + self.use_breathiness_embed = ( + hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False) + ) + if self.use_breathiness_embed: + # breathiness is embedded but not predicted self.breathiness_embed = Linear(1, hparams['hidden_size']) - if hparams.get('use_key_shift_embed', False): + self.use_key_shift_embed = hparams.get('use_key_shift_embed', False) + if self.use_key_shift_embed: self.key_shift_embed = Linear(1, hparams['hidden_size']) - if hparams.get('use_speed_embed', False): + self.use_speed_embed = hparams.get('use_speed_embed', False) + if self.use_speed_embed: self.speed_embed = Linear(1, hparams['hidden_size']) - if hparams['use_spk_id']: + self.use_spk_id = hparams['use_spk_id'] + if self.use_spk_id: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) def forward( self, txt_tokens, mel2ph, f0, energy=None, breathiness=None, - key_shift=None, speed=None, spk_embed_id=None, + key_shift=None, speed=None, spk_embed_id=None, infer=True, **kwargs ): dur = mel2ph_to_dur(mel2ph, txt_tokens.shape[1]).float() @@ -57,47 +66,66 @@ def forward( encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) mel2ph_ = mel2ph[..., None].repeat([1, 1, encoder_out.shape[-1]]) condition = torch.gather(encoder_out, 1, mel2ph_) - return self.forward_variance_embedding( - condition, f0=f0, energy=energy, breathiness=breathiness, - key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, - **kwargs - ) - def forward_variance_embedding( - self, condition, f0, energy=None, breathiness=None, - key_shift=None, speed=None, spk_embed_id=None, - **kwargs - ): + if self.use_spk_id: + spk_mix_embed = kwargs.get('spk_mix_embed') + if spk_mix_embed is not None: + spk_embed = spk_mix_embed + else: + spk_embed = self.spk_embed(spk_embed_id)[:, None, :] + condition += spk_embed + if self.f0_embed_type == 'discrete': pitch = f0_to_coarse(f0) pitch_embed = self.pitch_embed(pitch) else: f0_mel = (1 + f0 / 700).log() pitch_embed = self.pitch_embed(f0_mel[:, :, None]) - condition += pitch_embed - if hparams.get('use_energy_embed', False): + adaptor_cond = condition + pitch_embed + mel_cond = self.forward_variance_embedding( + adaptor_cond, energy=energy, breathiness=breathiness, + key_shift=key_shift, speed=speed + ) + + # During training, the data augmentation parameters (GEN and VEL) + # are seen to the variance adaptor; but during inference, + # we will always send the DEFAULT parameters (GEN = 0 and VEL = 1) + # to the variance adaptor so that the prediction outputs will NOT + # be influenced by these parameters, which is more reasonable for + # most users of singing voice synthesis systems. + if self.use_key_shift_embed: + if infer: + key_shift = torch.zeros_like(key_shift) + key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) + adaptor_cond += key_shift_embed + + if self.use_speed_embed: + if infer: + speed = torch.ones_like(speed) + speed_embed = self.speed_embed(speed[:, :, None]) + adaptor_cond += speed_embed + + return adaptor_cond, mel_cond + + def forward_variance_embedding( + self, condition, energy=None, breathiness=None, + key_shift=None, speed=None + ): + if self.use_energy_embed: energy_embed = self.energy_embed(energy[:, :, None]) condition += energy_embed - if hparams.get('use_breathiness_embed', False): + if self.use_breathiness_embed: breathiness_embed = self.breathiness_embed(breathiness[:, :, None]) condition += breathiness_embed - if hparams.get('use_key_shift_embed', False): + if self.use_key_shift_embed: key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) condition += key_shift_embed - if hparams.get('use_speed_embed', False): + if self.use_speed_embed: speed_embed = self.speed_embed(speed[:, :, None]) condition += speed_embed - if hparams['use_spk_id']: - spk_mix_embed = kwargs.get('spk_mix_embed') - if spk_mix_embed is not None: - spk_embed = spk_mix_embed - else: - spk_embed = self.spk_embed(spk_embed_id)[:, None, :] - condition += spk_embed - return condition diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py new file mode 100644 index 000000000..bc3333a1a --- /dev/null +++ b/modules/fastspeech/param_adaptor.py @@ -0,0 +1,58 @@ +import torch + +from modules.diffusion.ddpm import MultiVarianceDiffusion +from utils.hparams import hparams + + +class ParameterAdaptorModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.predict_energy = hparams.get('predict_energy', False) + self.predict_breathiness = hparams.get('predict_breathiness', False) + self.variance_prediction_list = [] + if self.predict_energy: + self.variance_prediction_list.append('energy') + if self.predict_breathiness: + self.variance_prediction_list.append('breathiness') + self.predict_variances = len(self.variance_prediction_list) > 0 + + def build_adaptor(self): + ranges = [] + clamps = [] + + if self.predict_energy: + ranges.append(( + 10. ** (hparams['energy_db_min'] / 20.), + 10. ** (hparams['energy_db_max'] / 20.) + )) + clamps.append((0., 1.)) + + if self.predict_breathiness: + ranges.append(( + 10. ** (hparams['breathiness_db_min'] / 20.), + 10. ** (hparams['breathiness_db_max'] / 20.) + )) + clamps.append((0., 1.)) + + variances_hparams = hparams['variances_prediction_args'] + return MultiVarianceDiffusion( + ranges=ranges, + clamps=clamps, + repeat_bins=variances_hparams['repeat_bins'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + variances_hparams['residual_layers'], + variances_hparams['residual_channels'] + ) + ) + + def collect_variance_inputs(self, **kwargs) -> list: + return [kwargs.get(name) for name in self.variance_prediction_list] + + def collect_variance_outputs(self, variances: list | tuple) -> dict: + return { + name: pred + for name, pred in zip(self.variance_prediction_list, variances) + } diff --git a/modules/toplevel.py b/modules/toplevel.py index 846c6c634..f7d3e0653 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -1,4 +1,5 @@ import torch +import torch.nn as nn import torch.nn.functional as F from basics.base_module import CategorizedModule @@ -6,20 +7,33 @@ XavierUniformInitLinear as Linear, ) from modules.diffusion.ddpm import ( - GaussianDiffusion, PitchDiffusion, MultiVarianceDiffusion + GaussianDiffusion, PitchDiffusion ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic +from modules.fastspeech.param_adaptor import ParameterAdaptorModule from modules.fastspeech.tts_modules import LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams -class DiffSingerAcoustic(CategorizedModule): +class DiffSingerAcoustic(ParameterAdaptorModule, CategorizedModule): + @property + def category(self): + return 'acoustic' + def __init__(self, vocab_size, out_dims): super().__init__() self.fs2 = FastSpeech2Acoustic( vocab_size=vocab_size ) + + if self.predict_variances: + self.variance_adaptor = self.build_adaptor() + self.variance_embeds = nn.ModuleDict({ + name: Linear(1, hparams['hidden_size']) + for name in self.variance_prediction_list + }) + self.diffusion = GaussianDiffusion( out_dims=out_dims, num_feats=1, @@ -34,33 +48,70 @@ def __init__(self, vocab_size, out_dims): spec_max=hparams['spec_max'] ) - @property - def category(self): - return 'acoustic' - def forward( - self, txt_tokens, mel2ph, f0, energy=None, breathiness=None, - key_shift=None, speed=None, + self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, spk_embed_id=None, gt_mel=None, infer=True, **kwargs ): - condition = self.fs2( - txt_tokens, mel2ph, f0, energy=energy, breathiness=breathiness, - key_shift=key_shift, speed=speed, - spk_embed_id=spk_embed_id, **kwargs + adaptor_cond, mel_cond = self.fs2( + txt_tokens, mel2ph, f0, key_shift=key_shift, speed=speed, + spk_embed_id=spk_embed_id, infer=infer, **kwargs ) + + variance_inputs = self.collect_variance_inputs(**kwargs) if infer: - mel = self.diffusion(condition, infer=True) - mel *= ((mel2ph > 0).float()[:, :, None]) - return mel + if not self.predict_variances: + variance_pred_out = {} + elif not all([v is not None for v in variance_inputs]): + variance_outputs = self.variance_adaptor(adaptor_cond, variance_inputs, infer) + variance_choices = [ + v_in if v_in is not None else v_pred + for v_in, v_pred in zip(variance_inputs, variance_outputs) + ] + variance_embeds = torch.stack([ + self.variance_embeds[v_name](v_choice[:, :, None]) # [B, T] => [B, T, H] + for v_name, v_choice in zip(self.variance_prediction_list, variance_choices) + ], dim=-1).sum(-1) + mel_cond += variance_embeds + variance_pred_out = self.collect_variance_outputs(variance_choices) + else: + variance_pred_out = { + name: kwargs[name] + for name in self.variance_prediction_list + } + + mel_pred_out = self.diffusion(mel_cond, infer=True) + mel_pred_out *= ((mel2ph > 0).float()[:, :, None]) + else: - loss = self.diffusion(condition, gt_spec=gt_mel, infer=False) - return loss + if self.predict_variances: + variance_pred_out = self.variance_adaptor(adaptor_cond, variance_inputs, infer) + + variance_embeds = torch.stack([ + self.variance_embeds[v_name](v_choice[:, :, None]) # [B, T] => [B, T, H] + for v_name, v_choice in zip(self.variance_prediction_list, variance_inputs) + ], dim=-1).sum(-1) + mel_cond += variance_embeds + else: + variance_pred_out = None + + mel_pred_out = self.diffusion(mel_cond, gt_spec=gt_mel, infer=False) + + return mel_pred_out, variance_pred_out -class DiffSingerVariance(CategorizedModule): +class DiffSingerVariance(ParameterAdaptorModule, CategorizedModule): + @property + def category(self): + return 'variance' + def __init__(self, vocab_size): super().__init__() self.predict_dur = hparams['predict_dur'] + self.fs2 = FastSpeech2Variance( + vocab_size=vocab_size + ) + self.lr = LengthRegulator() + self.predict_pitch = hparams['predict_pitch'] predict_energy = hparams['predict_energy'] @@ -72,11 +123,6 @@ def __init__(self, vocab_size): self.variance_prediction_list.append('breathiness') self.predict_variances = len(self.variance_prediction_list) > 0 - self.fs2 = FastSpeech2Variance( - vocab_size=vocab_size - ) - self.lr = LengthRegulator() - if self.predict_pitch: pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) @@ -95,50 +141,7 @@ def __init__(self, vocab_size): if self.predict_variances: self.pitch_embed = Linear(1, hparams['hidden_size']) - - ranges = [] - clamps = [] - - if predict_energy: - ranges.append(( - 10. ** (hparams['energy_db_min'] / 20.), - 10. ** (hparams['energy_db_max'] / 20.) - )) - clamps.append((0., 1.)) - - if predict_breathiness: - ranges.append(( - 10. ** (hparams['breathiness_db_min'] / 20.), - 10. ** (hparams['breathiness_db_max'] / 20.) - )) - clamps.append((0., 1.)) - - variances_hparams = hparams['variances_prediction_args'] - self.variance_predictor = MultiVarianceDiffusion( - ranges=ranges, - clamps=clamps, - repeat_bins=variances_hparams['repeat_bins'], - timesteps=hparams['timesteps'], - k_step=hparams['K_step'], - denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - variances_hparams['residual_layers'], - variances_hparams['residual_channels'] - ) - ) - - @property - def category(self): - return 'variance' - - def collect_variance_inputs(self, **kwargs): - return [kwargs.get(name) for name in self.variance_prediction_list] - - def collect_variance_outputs(self, variances: list | tuple) -> dict: - return { - name: pred - for name, pred in zip(self.variance_prediction_list, variances) - } + self.variance_predictor = self.build_adaptor() def forward( self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 8ff8a7f59..b8b032cff 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -47,6 +47,10 @@ class AcousticBinarizer(BaseBinarizer): def __init__(self): super().__init__(data_attrs=ACOUSTIC_ITEM_ATTRIBUTES) self.lr = LengthRegulator() + self.need_energy = hparams.get('use_energy_embed', False) or hparams.get('predict_energy', False) + self.need_breathiness = ( + hparams.get('use_breathiness_embed', False) or hparams.get('predict_breathiness', False) + ) def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_info = { @@ -178,12 +182,12 @@ def process_item(self, item_name, meta_data, binarization_args): return None processed_input['f0'] = gt_f0.astype(np.float32) - if hparams.get('use_energy_embed', False): + if self.need_energy: # get ground truth energy energy = get_energy_librosa(wav, length, hparams) processed_input['energy'] = energy.astype(np.float32) - if hparams.get('use_breathiness_embed', False): + if self.need_breathiness: # get ground truth energy breathiness = get_breathiness_pyworld(wav, gt_f0 * ~uv, length, hparams) processed_input['breathiness'] = breathiness.astype(np.float32) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index d685e99f5..07c046f21 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -21,7 +21,7 @@ from modules.vocoders.registry import get_vocoder_cls from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams -from utils.plot import spec_to_figure +from utils.plot import spec_to_figure, curve_to_figure matplotlib.use('Agg') @@ -66,6 +66,16 @@ def __init__(self): self.stats = {} self.logged_gt_wav = set() + predict_energy = hparams['predict_energy'] + predict_breathiness = hparams['predict_breathiness'] + self.variance_prediction_list = [] + if predict_energy: + self.variance_prediction_list.append('energy') + if predict_breathiness: + self.variance_prediction_list.append('breathiness') + self.predict_variances = len(self.variance_prediction_list) > 0 + self.lambda_var_loss = hparams['lambda_var_loss'] + def build_model(self): return DiffSingerAcoustic( vocab_size=len(self.phone_encoder), @@ -75,6 +85,10 @@ def build_model(self): # noinspection PyAttributeOutsideInit def build_losses(self): self.mel_loss = DiffusionNoiseLoss(loss_type=hparams['diff_loss_type']) + if self.predict_variances: + self.var_loss = DiffusionNoiseLoss( + loss_type=hparams['diff_loss_type'], + ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_t] @@ -99,14 +113,19 @@ def run_model(self, sample, infer=False): ) if infer: - mel_pred = output - return mel_pred + mel_pred, var_pred = output + return mel_pred, var_pred else: - x_recon, noise = output - mel_loss = self.mel_loss(x_recon, noise) + (x_recon, x_noise), var_pred_out = output + mel_loss = self.mel_loss(x_recon, x_noise) losses = { 'mel_loss': mel_loss } + + if self.predict_variances: + (v_recon, v_noise) = var_pred_out + losses['var_loss'] = self.lambda_var_loss * self.var_loss(v_recon, v_noise) + return losses def on_train_start(self): @@ -126,11 +145,22 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: - mel_pred = self.run_model(sample, infer=True) + mel_pred, var_pred = self.run_model(sample, infer=True) + if self.use_vocoder: self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') + for name in self.variance_prediction_list: + variance = sample[name] + variance_pred = var_pred[name] + self.plot_curve( + batch_idx, + gt_curve=variance, + pred_curve=variance_pred, + curve_name=name + ) + return outputs, sample['size'] ############ @@ -156,6 +186,12 @@ def plot_mel(self, batch_idx, spec, spec_out, name=None): spec_cat = torch.cat([(spec_out - spec).abs() + vmin, spec, spec_out], -1) self.logger.experiment.add_figure(name, spec_to_figure(spec_cat[0], vmin, vmax), self.global_step) + def plot_curve(self, batch_idx, gt_curve, pred_curve, curve_name='curve'): + name = f'{curve_name}_{batch_idx}' + gt_curve = gt_curve[0].cpu().numpy() + pred_curve = pred_curve[0].cpu().numpy() + self.logger.experiment.add_figure(name, curve_to_figure(gt_curve, pred_curve), self.global_step) + ############ # infer ############ From 6693cdc7b3bb0b04ff8f566d977ad3830c53d9b1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 12:45:59 +0800 Subject: [PATCH 350/475] Fix energy and breathiness is None --- configs/variance.yaml | 2 +- training/acoustic_task.py | 20 +++++++++++++++----- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index bee6c42a1..d0516daf9 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -92,7 +92,7 @@ lr_scheduler_args: max_batch_frames: 80000 max_batch_size: 48 val_with_vocoder: true -val_check_interval: 500 +val_check_interval: 2000 num_valid_plots: 10 max_updates: 320000 num_ckpt_keep: 5 diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 07c046f21..c214fd292 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -27,6 +27,16 @@ class AcousticDataset(BaseDataset): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.use_energy_embed = hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False) + self.use_breathiness_embed = ( + hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False) + ) + self.use_key_shift_embed = hparams.get('use_key_shift_embed', False) + self.use_speed_embed = hparams.get('use_speed_embed', False) + self.use_spk_id = hparams['use_spk_id'] + def collater(self, samples): batch = super().collater(samples) @@ -40,15 +50,15 @@ def collater(self, samples): 'mel': mel, 'f0': f0, }) - if hparams.get('use_energy_embed', False): + if self.use_energy_embed: batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) - if hparams.get('use_breathiness_embed', False): + if self.use_breathiness_embed: batch['breathiness'] = utils.collate_nd([s['breathiness'] for s in samples], 0.0) - if hparams.get('use_key_shift_embed', False): + if self.use_key_shift_embed: batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] - if hparams.get('use_speed_embed', False): + if self.use_speed_embed: batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] - if hparams['use_spk_id']: + if self.use_spk_id: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids return batch From 49a7fed2f2ed72f873b93f32491171cc27bebc4a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 12:48:54 +0800 Subject: [PATCH 351/475] Fix energy and breathiness is None --- training/acoustic_task.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index c214fd292..d89fa294b 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -29,13 +29,13 @@ class AcousticDataset(BaseDataset): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.use_energy_embed = hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False) - self.use_breathiness_embed = ( - hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False) + self.need_energy = hparams.get('use_energy_embed', False) or hparams.get('predict_energy', False) + self.need_breathiness = ( + hparams.get('use_breathiness_embed', False) or hparams.get('predict_breathiness', False) ) - self.use_key_shift_embed = hparams.get('use_key_shift_embed', False) - self.use_speed_embed = hparams.get('use_speed_embed', False) - self.use_spk_id = hparams['use_spk_id'] + self.need_key_shift = hparams.get('use_key_shift_embed', False) + self.need_speed = hparams.get('use_speed_embed', False) + self.need_spk_id = hparams['use_spk_id'] def collater(self, samples): batch = super().collater(samples) @@ -50,15 +50,15 @@ def collater(self, samples): 'mel': mel, 'f0': f0, }) - if self.use_energy_embed: + if self.need_energy: batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) - if self.use_breathiness_embed: + if self.need_breathiness: batch['breathiness'] = utils.collate_nd([s['breathiness'] for s in samples], 0.0) - if self.use_key_shift_embed: + if self.need_key_shift: batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] - if self.use_speed_embed: + if self.need_speed: batch['speed'] = torch.FloatTensor([s['speed'] for s in samples])[:, None] - if self.use_spk_id: + if self.need_spk_id: spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) batch['spk_ids'] = spk_ids return batch From d1dcab2bdca1d24dff62afe142d950e2a5181cc5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 12:50:15 +0800 Subject: [PATCH 352/475] Fix tuple index out of range --- modules/diffusion/wavenet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index 1b2077787..5f44ced5e 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -117,5 +117,5 @@ def forward(self, spec, diffusion_step, cond): # This is the temporary solution since PyTorch 1.13 # does not support exporting aten::unflatten to ONNX # x = x.unflatten(dim=1, sizes=(self.n_feats, self.in_dims)) - x = x.reshape(x.shape[0], self.n_feats, self.in_dims, x.shape[3]) + x = x.reshape(x.shape[0], self.n_feats, self.in_dims, x.shape[2]) return x From 2abe9529d353e622a3b2aac6b201d4ceb3721c38 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 12:53:50 +0800 Subject: [PATCH 353/475] Avoid inplace operation --- modules/fastspeech/acoustic_encoder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 9472499a3..ac96a9eb9 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -98,13 +98,13 @@ def forward( if infer: key_shift = torch.zeros_like(key_shift) key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) - adaptor_cond += key_shift_embed + adaptor_cond = adaptor_cond + key_shift_embed if self.use_speed_embed: if infer: speed = torch.ones_like(speed) speed_embed = self.speed_embed(speed[:, :, None]) - adaptor_cond += speed_embed + adaptor_cond = adaptor_cond + speed_embed return adaptor_cond, mel_cond From 25db18a5dc601d1617d2954c3a92eea23cf15c8a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 12:56:13 +0800 Subject: [PATCH 354/475] Avoid inplace operation --- modules/fastspeech/acoustic_encoder.py | 8 ++++---- modules/toplevel.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index ac96a9eb9..04b5518fc 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -114,18 +114,18 @@ def forward_variance_embedding( ): if self.use_energy_embed: energy_embed = self.energy_embed(energy[:, :, None]) - condition += energy_embed + condition = condition + energy_embed if self.use_breathiness_embed: breathiness_embed = self.breathiness_embed(breathiness[:, :, None]) - condition += breathiness_embed + condition = condition + breathiness_embed if self.use_key_shift_embed: key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) - condition += key_shift_embed + condition = condition + key_shift_embed if self.use_speed_embed: speed_embed = self.speed_embed(speed[:, :, None]) - condition += speed_embed + condition = condition + speed_embed return condition diff --git a/modules/toplevel.py b/modules/toplevel.py index f7d3e0653..4984feb6f 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -90,7 +90,7 @@ def forward( self.variance_embeds[v_name](v_choice[:, :, None]) # [B, T] => [B, T, H] for v_name, v_choice in zip(self.variance_prediction_list, variance_inputs) ], dim=-1).sum(-1) - mel_cond += variance_embeds + mel_cond = mel_cond + variance_embeds else: variance_pred_out = None From 69dec6748be97f94bc2571ce7b180b5c34669949 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 13:03:06 +0800 Subject: [PATCH 355/475] Pass None during validation if parameters need to be predicted --- training/acoustic_task.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index d89fa294b..3565d2a36 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -76,12 +76,12 @@ def __init__(self): self.stats = {} self.logged_gt_wav = set() - predict_energy = hparams['predict_energy'] - predict_breathiness = hparams['predict_breathiness'] + self.predict_energy = hparams['predict_energy'] + self.predict_breathiness = hparams['predict_breathiness'] self.variance_prediction_list = [] - if predict_energy: + if self.predict_energy: self.variance_prediction_list.append('energy') - if predict_breathiness: + if self.predict_breathiness: self.variance_prediction_list.append('breathiness') self.predict_variances = len(self.variance_prediction_list) > 0 self.lambda_var_loss = hparams['lambda_var_loss'] @@ -105,11 +105,12 @@ def run_model(self, sample, infer=False): target = sample['mel'] # [B, T_s, M] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] - energy = sample.get('energy') - breathiness = sample.get('breathiness') key_shift = sample.get('key_shift') speed = sample.get('speed') + energy = None if self.predict_energy else sample.get('energy') + breathiness = None if self.predict_breathiness else sample.get('breathiness') + if hparams['use_spk_id']: spk_embed_id = sample['spk_ids'] else: From 3a5990d1553c300339a21f46ca25b215681ce12e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 13:06:50 +0800 Subject: [PATCH 356/475] Fix energy and breathiness are NoneType --- training/acoustic_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 3565d2a36..1870d2112 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -108,8 +108,8 @@ def run_model(self, sample, infer=False): key_shift = sample.get('key_shift') speed = sample.get('speed') - energy = None if self.predict_energy else sample.get('energy') - breathiness = None if self.predict_breathiness else sample.get('breathiness') + energy = None if self.predict_energy and infer else sample.get('energy') + breathiness = None if self.predict_breathiness and infer else sample.get('breathiness') if hparams['use_spk_id']: spk_embed_id = sample['spk_ids'] From cd36159f6f0e5aca0b8b464f486f978b1336fee0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 23:09:53 +0800 Subject: [PATCH 357/475] Fix type matching and key error --- modules/diffusion/ddpm.py | 6 +++--- modules/toplevel.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 6b1faf35a..2626af531 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -290,12 +290,12 @@ def denorm_spec(self, x): class RepetitiveDiffusion(GaussianDiffusion): - def __init__(self, vmin: float | list, vmax: float | list, repeat_bins: int, + def __init__(self, vmin: float | int | list, vmax: float | int | list, repeat_bins: int, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): - assert (isinstance(vmin, float) and isinstance(vmin, float)) or len(vmin) == len(vmax) - num_feats = 1 if isinstance(vmin, float) else len(vmin) + assert (isinstance(vmin, float | int) and isinstance(vmin, float | int)) or len(vmin) == len(vmax) + num_feats = 1 if isinstance(vmin, float | int) else len(vmin) spec_min = [vmin] if num_feats == 1 else [[v] for v in vmin] spec_max = [vmax] if num_feats == 1 else [[v] for v in vmax] self.repeat_bins = repeat_bins diff --git a/modules/toplevel.py b/modules/toplevel.py index 4984feb6f..4ce3e4b51 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -114,8 +114,8 @@ def __init__(self, vocab_size): self.predict_pitch = hparams['predict_pitch'] - predict_energy = hparams['predict_energy'] - predict_breathiness = hparams['predict_breathiness'] + predict_energy = hparams.get('predict_energy', False) + predict_breathiness = hparams.get('predict_breathiness', False) self.variance_prediction_list = [] if predict_energy: self.variance_prediction_list.append('energy') From 62555303641edb18e69da825c8710bb9297d051d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 2 May 2023 23:10:39 +0800 Subject: [PATCH 358/475] Adapt acoustic and variance model inference to new arch --- inference/ds_acoustic.py | 38 +++++++++++++++++++++++++++++++++++--- inference/ds_variance.py | 14 ++++++++------ 2 files changed, 43 insertions(+), 9 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 6adbc0ed1..f9da6521f 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -19,6 +19,20 @@ class DiffSingerAcousticInfer(BaseSVSInfer): def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=None): super().__init__(device=device) if load_model: + self.variance_needed_list = [] + + self.use_energy_embed = hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False) + self.predict_energy = hparams.get('predict_energy', False) + if self.use_energy_embed or self.predict_energy: + self.variance_needed_list.append('energy') + + self.use_breathiness_embed = ( + hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False) + ) + self.predict_breathiness = hparams.get('predict_breathiness', False) + if self.use_breathiness_embed or self.predict_breathiness: + self.variance_needed_list.append('breathiness') + self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) if hparams['use_spk_id']: with open(pathlib.Path(hparams['work_dir']) / 'spk_map.json', 'r', encoding='utf8') as f: @@ -142,13 +156,27 @@ def preprocess_input(self, param): align_length=length )).to(self.device)[None] - if hparams.get('use_energy_embed', False): + if self.use_energy_embed or (self.predict_energy and param.get('energy')): batch['energy'] = torch.from_numpy(resample_align_curve( np.array(param['energy'].split(), np.float32), original_timestep=float(param['energy_timestep']), target_timestep=self.timestep, align_length=length )).to(self.device)[None] + print('Using manual energy curve') + else: + print('Using predicted energy curve') + + if self.use_breathiness_embed or (self.predict_breathiness and param.get('breathiness')): + batch['breathiness'] = torch.from_numpy(resample_align_curve( + np.array(param['breathiness'].split(), np.float32), + original_timestep=float(param['breathiness_timestep']), + target_timestep=self.timestep, + align_length=length + )).to(self.device)[None] + print('Using manual breathiness curve') + else: + print('Using predicted breathiness curve') if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] @@ -195,6 +223,10 @@ def preprocess_input(self, param): @torch.no_grad() def run_model(self, sample, return_mel=False): txt_tokens = sample['tokens'] + variance_kwargs = { + v_name: sample.get(v_name) + for v_name in self.variance_needed_list + } if hparams['use_spk_id']: spk_mix_id = sample['spk_mix_id'] spk_mix_value = sample['spk_mix_value'] @@ -206,9 +238,9 @@ def run_model(self, sample, return_mel=False): else: spk_mix_embed = None mel_pred, _ = self.model( - txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], energy=sample.get('energy'), + txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], key_shift=sample.get('key_shift'), speed=sample.get('speed'), - spk_mix_embed=spk_mix_embed, infer=True + spk_mix_embed=spk_mix_embed, infer=True, **variance_kwargs ) # var_pred ignored for now return mel_pred diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 685e295d5..d1e8c3c0d 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -147,18 +147,18 @@ def preprocess_input(self, param): def run_model(self, sample): txt_tokens = sample['tokens'] base_pitch = sample['base_pitch'] - dur_pred, pitch_pred, energy_pred = self.model( + dur_pred, pitch_pred, variance_pred = self.model( txt_tokens, midi=sample['midi'], ph2word=sample['ph2word'], word_dur=sample['word_dur'], mel2ph=sample['mel2ph'], base_pitch=base_pitch, delta_pitch=sample.get('delta_pitch') ) if pitch_pred is not None: pitch_pred = base_pitch + pitch_pred - return dur_pred, pitch_pred, energy_pred + return dur_pred, pitch_pred, variance_pred def infer_once(self, param): batch = self.preprocess_input(param) - dur_pred, pitch_pred, energy_pred = self.run_model(batch) + dur_pred, pitch_pred, variance_pred = self.run_model(batch) if dur_pred is not None: dur_pred = dur_pred[0].cpu().numpy() if pitch_pred is not None: @@ -166,6 +166,8 @@ def infer_once(self, param): f0_pred = librosa.midi_to_hz(pitch_pred) else: f0_pred = None - if energy_pred is not None: - energy_pred = energy_pred[0].cpu().numpy() - return dur_pred, f0_pred, energy_pred + variance_pred = { + k: v[0].cpu().numpy() + for k, v in variance_pred.items() + } + return dur_pred, f0_pred, variance_pred From 807a966e21ffbe7c23727c70de57bf502b1d9dca Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 3 May 2023 00:45:32 +0800 Subject: [PATCH 359/475] Make inference and training easier to generalize to other variances --- inference/ds_acoustic.py | 65 +++++++++++++++++---------------------- training/acoustic_task.py | 45 +++++++++++++-------------- 2 files changed, 50 insertions(+), 60 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index f9da6521f..0e268fece 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -14,24 +14,27 @@ from utils.phoneme_utils import build_phoneme_list from utils.text_encoder import TokenTextEncoder +VARIANCE_CHECKLIST = ['energy', 'breathiness'] + class DiffSingerAcousticInfer(BaseSVSInfer): def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=None): super().__init__(device=device) if load_model: - self.variance_needed_list = [] + self.variance_checklist = [] + + self.variances_to_embed = set() + self.variances_to_predict = set() - self.use_energy_embed = hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False) - self.predict_energy = hparams.get('predict_energy', False) - if self.use_energy_embed or self.predict_energy: - self.variance_needed_list.append('energy') + if hparams.get('predict_energy', False): + self.variances_to_predict.add('energy') + elif hparams.get('use_energy_embed', False): + self.variances_to_embed.add('energy') - self.use_breathiness_embed = ( - hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False) - ) - self.predict_breathiness = hparams.get('predict_breathiness', False) - if self.use_breathiness_embed or self.predict_breathiness: - self.variance_needed_list.append('breathiness') + if hparams.get('predict_breathiness', False): + self.variances_to_predict.add('breathiness') + elif hparams.get('use_breathiness_embed', False): + self.variances_to_embed.add('breathiness') self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) if hparams['use_spk_id']: @@ -156,27 +159,17 @@ def preprocess_input(self, param): align_length=length )).to(self.device)[None] - if self.use_energy_embed or (self.predict_energy and param.get('energy')): - batch['energy'] = torch.from_numpy(resample_align_curve( - np.array(param['energy'].split(), np.float32), - original_timestep=float(param['energy_timestep']), - target_timestep=self.timestep, - align_length=length - )).to(self.device)[None] - print('Using manual energy curve') - else: - print('Using predicted energy curve') - - if self.use_breathiness_embed or (self.predict_breathiness and param.get('breathiness')): - batch['breathiness'] = torch.from_numpy(resample_align_curve( - np.array(param['breathiness'].split(), np.float32), - original_timestep=float(param['breathiness_timestep']), - target_timestep=self.timestep, - align_length=length - )).to(self.device)[None] - print('Using manual breathiness curve') - else: - print('Using predicted breathiness curve') + for v_name in VARIANCE_CHECKLIST: + if v_name in self.variances_to_embed or (v_name in self.variances_to_predict and param.get(v_name)): + batch[v_name] = torch.from_numpy(resample_align_curve( + np.array(param[v_name].split(), np.float32), + original_timestep=float(param[f'{v_name}_timestep']), + target_timestep=self.timestep, + align_length=length + )).to(self.device)[None] + print(f'Using manual {v_name} curve') + else: + print(f'Using predicted {v_name} curve') if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] @@ -223,9 +216,9 @@ def preprocess_input(self, param): @torch.no_grad() def run_model(self, sample, return_mel=False): txt_tokens = sample['tokens'] - variance_kwargs = { + variances = { v_name: sample.get(v_name) - for v_name in self.variance_needed_list + for v_name in self.variances_to_embed | self.variances_to_predict } if hparams['use_spk_id']: spk_mix_id = sample['spk_mix_id'] @@ -238,9 +231,9 @@ def run_model(self, sample, return_mel=False): else: spk_mix_embed = None mel_pred, _ = self.model( - txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], + txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], **variances, key_shift=sample.get('key_shift'), speed=sample.get('speed'), - spk_mix_embed=spk_mix_embed, infer=True, **variance_kwargs + spk_mix_embed=spk_mix_embed, infer=True ) # var_pred ignored for now return mel_pred diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 1870d2112..5fd35e7c0 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -29,10 +29,12 @@ class AcousticDataset(BaseDataset): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.need_energy = hparams.get('use_energy_embed', False) or hparams.get('predict_energy', False) - self.need_breathiness = ( - hparams.get('use_breathiness_embed', False) or hparams.get('predict_breathiness', False) - ) + self.required_variances = {} # key: variance name, value: padding value + if hparams.get('use_energy_embed', False) or hparams.get('predict_energy', False): + self.required_variances['energy'] = 0.0 + if hparams.get('use_breathiness_embed', False) or hparams.get('predict_breathiness', False): + self.required_variances['breathiness'] = 0.0 + self.need_key_shift = hparams.get('use_key_shift_embed', False) self.need_speed = hparams.get('use_speed_embed', False) self.need_spk_id = hparams['use_spk_id'] @@ -50,10 +52,8 @@ def collater(self, samples): 'mel': mel, 'f0': f0, }) - if self.need_energy: - batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0.0) - if self.need_breathiness: - batch['breathiness'] = utils.collate_nd([s['breathiness'] for s in samples], 0.0) + for v_name, v_pad in self.required_variances.items(): + batch[v_name] = utils.collate_nd([s[v_name] for s in samples], v_pad) if self.need_key_shift: batch['key_shift'] = torch.FloatTensor([s['key_shift'] for s in samples])[:, None] if self.need_speed: @@ -76,14 +76,12 @@ def __init__(self): self.stats = {} self.logged_gt_wav = set() - self.predict_energy = hparams['predict_energy'] - self.predict_breathiness = hparams['predict_breathiness'] - self.variance_prediction_list = [] - if self.predict_energy: - self.variance_prediction_list.append('energy') - if self.predict_breathiness: - self.variance_prediction_list.append('breathiness') - self.predict_variances = len(self.variance_prediction_list) > 0 + self.variances_to_predict = set() + if hparams['predict_energy']: + self.variances_to_predict.add('energy') + if hparams['predict_breathiness']: + self.variances_to_predict.add('breathiness') + self.predict_variances = len(self.variances_to_predict) > 0 self.lambda_var_loss = hparams['lambda_var_loss'] def build_model(self): @@ -105,21 +103,20 @@ def run_model(self, sample, infer=False): target = sample['mel'] # [B, T_s, M] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] + variances = { + v_name: None if infer else sample[v_name] + for v_name in self.variances_to_predict + } key_shift = sample.get('key_shift') speed = sample.get('speed') - energy = None if self.predict_energy and infer else sample.get('energy') - breathiness = None if self.predict_breathiness and infer else sample.get('breathiness') - if hparams['use_spk_id']: spk_embed_id = sample['spk_ids'] else: spk_embed_id = None output = self.model( - txt_tokens, mel2ph=mel2ph, - f0=f0, energy=energy, breathiness=breathiness, - key_shift=key_shift, speed=speed, - spk_embed_id=spk_embed_id, + txt_tokens, mel2ph=mel2ph, f0=f0, **variances, + key_shift=key_shift, speed=speed, spk_embed_id=spk_embed_id, gt_mel=target, infer=infer ) @@ -162,7 +159,7 @@ def _validation_step(self, sample, batch_idx): self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') - for name in self.variance_prediction_list: + for name in self.variances_to_predict: variance = sample[name] variance_pred = var_pred[name] self.plot_curve( From 50a52830dd3327682e1b4006149ee3442a5f114a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 3 May 2023 13:49:17 +0800 Subject: [PATCH 360/475] Extract inference loop --- modules/diffusion/ddpm.py | 126 ++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 61 deletions(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 2626af531..aa65e48be 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -204,6 +204,70 @@ def p_losses(self, x_start, t, cond, noise=None): return x_recon, noise + def inference(self, cond, b=1, device=None): + t = self.k_step + shape = (b, self.num_feats, self.out_dims, cond.shape[2]) + x = torch.randn(shape, device=device) + if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: + algorithm = hparams.get('diff_accelerator', 'dpm-solver') + if algorithm == 'dpm-solver': + from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver + # 1. Define the noise schedule. + noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) + + # 2. Convert your discrete-time `model` to the continuous-time + # noise prediction model. Here is an example for a diffusion model + # `model` with the noise prediction type ("noise") . + def my_wrapper(fn): + def wrapped(x, t, **kwargs): + ret = fn(x, t, **kwargs) + self.bar.update(1) + return ret + + return wrapped + + model_fn = model_wrapper( + my_wrapper(self.denoise_fn), + noise_schedule, + model_type="noise", # or "x_start" or "v" or "score" + model_kwargs={"cond": cond} + ) + + # 3. Define dpm-solver and sample by singlestep DPM-Solver. + # (We recommend singlestep DPM-Solver for unconditional sampling) + # You can adjust the `steps` to balance the computation + # costs and the sample quality. + dpm_solver = DPM_Solver(model_fn, noise_schedule) + + steps = t // hparams["pndm_speedup"] + self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer']) + x = dpm_solver.sample( + x, + steps=steps, + order=3, + skip_type="time_uniform", + method="singlestep", + ) + self.bar.close() + elif algorithm == 'pndm': + self.noise_list = deque(maxlen=4) + iteration_interval = hparams['pndm_speedup'] + for i in tqdm( + reversed(range(0, t, iteration_interval)), desc='sample time step', + total=t // iteration_interval, disable=not hparams['infer'] + ): + x = self.p_sample_plms( + x, torch.full((b,), i, device=device, dtype=torch.long), + iteration_interval, cond=cond + ) + else: + raise NotImplementedError(algorithm) + else: + for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=not hparams['infer']): + x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) + x = x.transpose(2, 3).squeeze(1) # [B, F, M, T] => [B, T, M] or [B, F, T, M] + return x + def forward(self, condition, gt_spec=None, infer=True): """ conditioning diffusion, use fastspeech2 encoder output as the condition @@ -219,67 +283,7 @@ def forward(self, condition, gt_spec=None, infer=True): t = torch.randint(0, self.k_step, (b,), device=device).long() return self.p_losses(spec, t, cond=cond) else: - t = self.k_step - shape = (cond.shape[0], self.num_feats, self.out_dims, cond.shape[2]) - x = torch.randn(shape, device=device) - if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: - algorithm = hparams.get('diff_accelerator', 'dpm-solver') - if algorithm == 'dpm-solver': - from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver - # 1. Define the noise schedule. - noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) - - # 2. Convert your discrete-time `model` to the continuous-time - # noise prediction model. Here is an example for a diffusion model - # `model` with the noise prediction type ("noise") . - def my_wrapper(fn): - def wrapped(x, t, **kwargs): - ret = fn(x, t, **kwargs) - self.bar.update(1) - return ret - - return wrapped - - model_fn = model_wrapper( - my_wrapper(self.denoise_fn), - noise_schedule, - model_type="noise", # or "x_start" or "v" or "score" - model_kwargs={"cond": cond} - ) - - # 3. Define dpm-solver and sample by singlestep DPM-Solver. - # (We recommend singlestep DPM-Solver for unconditional sampling) - # You can adjust the `steps` to balance the computation - # costs and the sample quality. - dpm_solver = DPM_Solver(model_fn, noise_schedule) - - steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer']) - x = dpm_solver.sample( - x, - steps=steps, - order=3, - skip_type="time_uniform", - method="singlestep", - ) - self.bar.close() - elif algorithm == 'pndm': - self.noise_list = deque(maxlen=4) - iteration_interval = hparams['pndm_speedup'] - for i in tqdm( - reversed(range(0, t, iteration_interval)), desc='sample time step', - total=t // iteration_interval, disable=not hparams['infer'] - ): - x = self.p_sample_plms( - x, torch.full((b,), i, device=device, dtype=torch.long), - iteration_interval, cond=cond - ) - else: - raise NotImplementedError(algorithm) - else: - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=not hparams['infer']): - x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) - x = x.transpose(2, 3).squeeze(1) # [B, F, M, T] => [B, T, M] or [B, F, T, M] + x = self.inference(cond, b=b, device=device) return self.denorm_spec(x) def norm_spec(self, x): From cf791b9a499db071bb49b1355c65f104c0dd4443 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 4 May 2023 00:13:32 +0800 Subject: [PATCH 361/475] Support rhythm alignment and fix some logical mistakes --- configs/variance.yaml | 1 - inference/ds_acoustic.py | 2 +- inference/ds_variance.py | 13 ++++++++--- modules/fastspeech/tts_modules.py | 30 +++++++++++++++++++++++++- modules/toplevel.py | 36 +++++++++++++++++-------------- training/variance_task.py | 2 +- 6 files changed, 61 insertions(+), 23 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index d0516daf9..8c24f667c 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -38,7 +38,6 @@ predict_pitch: true predict_energy: false predict_breathiness: false -dur_cascade: false dur_prediction_args: hidden_size: 512 dropout: 0.1 diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 0e268fece..54452ce44 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -168,7 +168,7 @@ def preprocess_input(self, param): align_length=length )).to(self.device)[None] print(f'Using manual {v_name} curve') - else: + elif v_name in self.variances_to_predict: print(f'Using predicted {v_name} curve') if hparams.get('use_key_shift_embed', False): diff --git a/inference/ds_variance.py b/inference/ds_variance.py index d1e8c3c0d..948c59fef 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -6,7 +6,10 @@ from scipy import interpolate from basics.base_svs_infer import BaseSVSInfer -from modules.fastspeech.tts_modules import LengthRegulator, mel2ph_to_dur +from modules.fastspeech.tts_modules import ( + LengthRegulator, RhythmRegulator, + mel2ph_to_dur +) from modules.toplevel import DiffSingerVariance from utils import load_ckpt from utils.hparams import hparams @@ -22,6 +25,7 @@ def __init__(self, device=None, ckpt_steps=None): self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) self.model = self.build_model(ckpt_steps=ckpt_steps) self.lr = LengthRegulator() + self.rr = RhythmRegulator() smooth_kernel_size = round(hparams['midi_smooth_width'] / self.timestep) self.smooth = nn.Conv1d( in_channels=1, @@ -146,12 +150,15 @@ def preprocess_input(self, param): @torch.no_grad() def run_model(self, sample): txt_tokens = sample['tokens'] + word_dur = sample['word_dur'] + ph2word = sample['ph2word'] base_pitch = sample['base_pitch'] dur_pred, pitch_pred, variance_pred = self.model( - txt_tokens, midi=sample['midi'], ph2word=sample['ph2word'], - word_dur=sample['word_dur'], + txt_tokens, midi=sample['midi'], ph2word=ph2word, word_dur=word_dur, mel2ph=sample['mel2ph'], base_pitch=base_pitch, delta_pitch=sample.get('delta_pitch') ) + if dur_pred is not None: + dur_pred = self.rr(dur_pred, ph2word, word_dur) if pitch_pred is not None: pitch_pred = base_pitch + pitch_pred return dur_pred, pitch_pred, variance_pred diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 014ade5dd..b85312d3c 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -134,7 +134,7 @@ def forward(self, xs, x_masks=None, infer=True): dur_pred = self.out2dur(xs) if infer: - dur_pred = dur_pred.clamp(min=0.).round().long() # avoid negative value + dur_pred = dur_pred.clamp(min=0.) # avoid negative value return dur_pred @@ -259,6 +259,34 @@ def forward(self, xs, base): return self.out2pitch(xs) + base, xs +class RhythmRegulator(torch.nn.Module): + def __init__(self, eps=1e-5): + super().__init__() + self.eps = eps + + def forward(self, ph_dur, ph2word, word_dur): + """ + Example (no batch dim version): + 1. ph_dur = [4,2,3,2] + 2. word_dur = [3,4,2], ph2word = [1,2,2,3] + 3. word_dur_in = [4,5,2] + 4. alpha_w = [0.75,0.8,1], alpha_ph = [0.75,0.8,0.8,1] + 5. ph_dur_out = [3,1.6,2.4,2] + :param ph_dur: [B, T_ph] + :param ph2word: [B, T_ph] + :param word_dur: [B, T_w] + """ + ph_dur = ph_dur.float() * (ph2word > 0) + word_dur = word_dur.float() + word_dur_in = ph_dur.new_zeros(ph_dur.shape[0], ph2word.max() + 1).scatter_add( + 1, ph2word, ph_dur + )[:, 1:] # [B, T_ph] => [B, T_w] + alpha_w = word_dur / word_dur_in.clamp(min=self.eps) # avoid dividing by zero + alpha_ph = torch.gather(F.pad(alpha_w, [1, 0]), 1, ph2word) # [B, T_w] => [B, T_ph] + ph_dur_out = ph_dur * alpha_ph + return ph_dur_out.round().long() + + class LengthRegulator(torch.nn.Module): # noinspection PyMethodMayBeStatic def forward(self, dur, dur_padding=None, alpha=1.0): diff --git a/modules/toplevel.py b/modules/toplevel.py index 4ce3e4b51..1af945af5 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -11,7 +11,7 @@ ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic from modules.fastspeech.param_adaptor import ParameterAdaptorModule -from modules.fastspeech.tts_modules import LengthRegulator +from modules.fastspeech.tts_modules import RhythmRegulator, LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -61,23 +61,25 @@ def forward( if infer: if not self.predict_variances: variance_pred_out = {} - elif not all([v is not None for v in variance_inputs]): - variance_outputs = self.variance_adaptor(adaptor_cond, variance_inputs, infer) - variance_choices = [ - v_in if v_in is not None else v_pred - for v_in, v_pred in zip(variance_inputs, variance_outputs) - ] + else: + if not all([v is not None for v in variance_inputs]): + variance_outputs = self.variance_adaptor(adaptor_cond, variance_inputs, infer) + variance_choices = [ + v_in if v_in is not None else v_pred + for v_in, v_pred in zip(variance_inputs, variance_outputs) + ] + variance_pred_out = self.collect_variance_outputs(variance_choices) + else: + variance_choices = variance_inputs + variance_pred_out = { + name: kwargs[name] + for name in self.variance_prediction_list + } variance_embeds = torch.stack([ self.variance_embeds[v_name](v_choice[:, :, None]) # [B, T] => [B, T, H] for v_name, v_choice in zip(self.variance_prediction_list, variance_choices) ], dim=-1).sum(-1) mel_cond += variance_embeds - variance_pred_out = self.collect_variance_outputs(variance_choices) - else: - variance_pred_out = { - name: kwargs[name] - for name in self.variance_prediction_list - } mel_pred_out = self.diffusion(mel_cond, infer=True) mel_pred_out *= ((mel2ph > 0).float()[:, :, None]) @@ -110,6 +112,7 @@ def __init__(self, vocab_size): self.fs2 = FastSpeech2Variance( vocab_size=vocab_size ) + self.rr = RhythmRegulator() self.lr = LengthRegulator() self.predict_pitch = hparams['predict_pitch'] @@ -155,9 +158,10 @@ def forward( if not self.predict_pitch and not self.predict_variances: return dur_pred_out, None, None, ({} if infer else None) - if mel2ph is None or hparams['dur_cascade']: - # (extract mel2ph from dur_pred_out) - raise NotImplementedError() + if mel2ph is None and word_dur is not None: # inference from file + dur_pred_align = self.rr(dur_pred_out, ph2word, word_dur) + mel2ph = self.lr(dur_pred_align) + mel2ph = F.pad(mel2ph, [0, base_pitch.shape[1] - mel2ph.shape[1]]) encoder_out = F.pad(encoder_out, [0, 0, 1, 0]) mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) diff --git a/training/variance_task.py b/training/variance_task.py index 13e24e16d..8b7468fec 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -114,7 +114,7 @@ def run_model(self, sample, infer=False): dur_pred, pitch_pred, variances_pred = output if infer: - return dur_pred, pitch_pred, variances_pred # Tensor, Tensor, Dict[Tensor] + return dur_pred.round().long(), pitch_pred, variances_pred # Tensor, Tensor, Dict[Tensor] else: losses = {} if dur_pred is not None: From 483fbcb9c2abdfd27f76950785ee8a4ff19c0f0d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 4 May 2023 01:08:46 +0800 Subject: [PATCH 362/475] Support augmentation with energy and breathiness --- augmentation/spec_stretch.py | 39 ++++++++++++++++++++++++++++++------ 1 file changed, 33 insertions(+), 6 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index d5a8e8163..17eb05c44 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -8,6 +8,9 @@ from modules.vocoders.registry import VOCODERS from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from utils.hparams import hparams +from utils.infer_utils import resample_align_curve + +VARIANCE_CHECKLIST = ['energy', 'breathiness'] class SpectrogramStretchAugmentation(BaseAugmentation): @@ -22,12 +25,6 @@ def __init__(self, data_dirs: list, augmentation_args: dict): @require_same_keys def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) -> dict: - if 'energy' in item: - raise NotImplementedError('Energy has not been supported in augmentation.') - - if 'breathiness' in item: - raise NotImplementedError('Breathiness has not been supported in augmentation.') - aug_item = deepcopy(item) if hparams['vocoder'] in VOCODERS: wav, mel = VOCODERS[hparams['vocoder']].wav2spec( @@ -48,11 +45,41 @@ def process_item(self, item: dict, key_shift=0., speed=1., replace_spk_id=None) aug_item['mel2ph'] = get_mel2ph_torch( self.lr, torch.from_numpy(aug_item['ph_dur']), aug_item['length'], self.timestep, device=self.device ).cpu().numpy() + f0, _ = get_pitch_parselmouth( wav, aug_item['length'], hparams, speed=speed, interp_uv=hparams['interp_uv'] ) aug_item['f0'] = f0.astype(np.float32) + # NOTE: variance curves are directly resampled according to speed, + # despite how frequency-domain features change after the augmentation. + # For acoustic models, this can bring more (but not much) difficulty + # to learn how variance curves affect the mel spectrograms, since + # they must realize how the augmentation causes the mismatch. + # + # This is a simple way to combine augmentation and variances. However, + # dealing variance curves like this will decrease the accuracy of + # variance controls. In most situations, not being ~100% accurate + # will not ruin the user experience. For example, it does not matter + # if the energy does not exactly equal the RMS; it is just fine + # as long as higher energy can bring higher loudness and strength. + # The neural networks itself cannot be 100% accurate, though. + # + # There are yet other choices to simulate variance curves: + # 1. Re-extract the features from resampled waveforms; + # 2. Re-extract the features from re-constructed waveforms using + # the transformed mel spectrograms through the vocoder. + # But there are actually no perfect ways to make them all accurate + # and stable. + for v_name in VARIANCE_CHECKLIST: + if v_name in item: + aug_item[v_name] = resample_align_curve( + aug_item[v_name], + original_timestep=self.timestep, + target_timestep=self.timestep * aug_item['speed'], + align_length=aug_item['length'] + ) + if key_shift != 0. or hparams.get('use_key_shift_embed', False): if replace_spk_id is None: aug_item['key_shift'] = key_shift From 36e31043ad0dbdd7cf1ebd1fb655d6dea70329a1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 4 May 2023 01:28:44 +0800 Subject: [PATCH 363/475] Change acoustic model variance embeddings to `torch.nn.ModuleDict` --- modules/fastspeech/acoustic_encoder.py | 41 ++++++++++++-------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 04b5518fc..1d0dba6c8 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -30,17 +30,19 @@ def __init__(self, vocab_size): else: raise ValueError('f0_embed_type must be \'discrete\' or \'continuous\'.') - self.use_energy_embed = hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False) - if self.use_energy_embed: + self.variance_embed_list = [] + if hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False): # energy is embedded but not predicted - self.energy_embed = Linear(1, hparams['hidden_size']) - - self.use_breathiness_embed = ( - hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False) - ) - if self.use_breathiness_embed: + self.variance_embed_list.append('energy') + if hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False): # breathiness is embedded but not predicted - self.breathiness_embed = Linear(1, hparams['hidden_size']) + self.variance_embed_list.append('breathiness') + self.embed_variances = len(self.variance_embed_list) > 0 + if self.embed_variances: + self.variance_embeds = nn.ModuleDict({ + v_name: Linear(1, hparams['hidden_size']) + for v_name in self.variance_embed_list + }) self.use_key_shift_embed = hparams.get('use_key_shift_embed', False) if self.use_key_shift_embed: @@ -84,8 +86,7 @@ def forward( adaptor_cond = condition + pitch_embed mel_cond = self.forward_variance_embedding( - adaptor_cond, energy=energy, breathiness=breathiness, - key_shift=key_shift, speed=speed + adaptor_cond, key_shift=key_shift, speed=speed, **kwargs ) # During training, the data augmentation parameters (GEN and VEL) @@ -108,17 +109,13 @@ def forward( return adaptor_cond, mel_cond - def forward_variance_embedding( - self, condition, energy=None, breathiness=None, - key_shift=None, speed=None - ): - if self.use_energy_embed: - energy_embed = self.energy_embed(energy[:, :, None]) - condition = condition + energy_embed - - if self.use_breathiness_embed: - breathiness_embed = self.breathiness_embed(breathiness[:, :, None]) - condition = condition + breathiness_embed + def forward_variance_embedding(self, condition, key_shift=None, speed=None, **variances): + if self.embed_variances: + variance_embeds = torch.stack([ + self.variance_embeds[v_name](variances[v_name][:, :, None]) # [B, T] => [B, T, H] + for v_name in self.variance_embed_list + ], dim=-1).sum(-1) + condition = condition + variance_embeds if self.use_key_shift_embed: key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) From 77e4d2fca55b62207575c0e73279f3cca69bc9ce Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 4 May 2023 14:08:18 +0800 Subject: [PATCH 364/475] Unify variance embeddings and variance predictions --- augmentation/spec_stretch.py | 3 +- inference/ds_acoustic.py | 3 +- modules/fastspeech/acoustic_encoder.py | 27 +------- modules/fastspeech/param_adaptor.py | 2 + modules/toplevel.py | 86 +++++++++++++++----------- 5 files changed, 58 insertions(+), 63 deletions(-) diff --git a/augmentation/spec_stretch.py b/augmentation/spec_stretch.py index 17eb05c44..c56247675 100644 --- a/augmentation/spec_stretch.py +++ b/augmentation/spec_stretch.py @@ -4,14 +4,13 @@ import torch from basics.base_augmentation import BaseAugmentation, require_same_keys +from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS from utils.binarizer_utils import get_pitch_parselmouth, get_mel2ph_torch from utils.hparams import hparams from utils.infer_utils import resample_align_curve -VARIANCE_CHECKLIST = ['energy', 'breathiness'] - class SpectrogramStretchAugmentation(BaseAugmentation): """ diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 54452ce44..a28fb8c79 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -6,6 +6,7 @@ from basics.base_svs_infer import BaseSVSInfer from modules.fastspeech.tts_modules import LengthRegulator +from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST from modules.toplevel import DiffSingerAcoustic from modules.vocoders.registry import VOCODERS from utils import load_ckpt @@ -14,8 +15,6 @@ from utils.phoneme_utils import build_phoneme_list from utils.text_encoder import TokenTextEncoder -VARIANCE_CHECKLIST = ['energy', 'breathiness'] - class DiffSingerAcousticInfer(BaseSVSInfer): def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=None): diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index 1d0dba6c8..b9218f4af 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -30,20 +30,6 @@ def __init__(self, vocab_size): else: raise ValueError('f0_embed_type must be \'discrete\' or \'continuous\'.') - self.variance_embed_list = [] - if hparams.get('use_energy_embed', False) and not hparams.get('predict_energy', False): - # energy is embedded but not predicted - self.variance_embed_list.append('energy') - if hparams.get('use_breathiness_embed', False) and not hparams.get('predict_breathiness', False): - # breathiness is embedded but not predicted - self.variance_embed_list.append('breathiness') - self.embed_variances = len(self.variance_embed_list) > 0 - if self.embed_variances: - self.variance_embeds = nn.ModuleDict({ - v_name: Linear(1, hparams['hidden_size']) - for v_name in self.variance_embed_list - }) - self.use_key_shift_embed = hparams.get('use_key_shift_embed', False) if self.use_key_shift_embed: self.key_shift_embed = Linear(1, hparams['hidden_size']) @@ -86,14 +72,14 @@ def forward( adaptor_cond = condition + pitch_embed mel_cond = self.forward_variance_embedding( - adaptor_cond, key_shift=key_shift, speed=speed, **kwargs + adaptor_cond, key_shift=key_shift, speed=speed ) # During training, the data augmentation parameters (GEN and VEL) # are seen to the variance adaptor; but during inference, # we will always send the DEFAULT parameters (GEN = 0 and VEL = 1) # to the variance adaptor so that the prediction outputs will NOT - # be influenced by these parameters, which is more reasonable for + # be influenced by these parameters, which is more reasonable to # most users of singing voice synthesis systems. if self.use_key_shift_embed: if infer: @@ -109,14 +95,7 @@ def forward( return adaptor_cond, mel_cond - def forward_variance_embedding(self, condition, key_shift=None, speed=None, **variances): - if self.embed_variances: - variance_embeds = torch.stack([ - self.variance_embeds[v_name](variances[v_name][:, :, None]) # [B, T] => [B, T, H] - for v_name in self.variance_embed_list - ], dim=-1).sum(-1) - condition = condition + variance_embeds - + def forward_variance_embedding(self, condition, key_shift=None, speed=None): if self.use_key_shift_embed: key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) condition = condition + key_shift_embed diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py index bc3333a1a..fcdc61835 100644 --- a/modules/fastspeech/param_adaptor.py +++ b/modules/fastspeech/param_adaptor.py @@ -3,6 +3,8 @@ from modules.diffusion.ddpm import MultiVarianceDiffusion from utils.hparams import hparams +VARIANCE_CHECKLIST = ['energy', 'breathiness'] + class ParameterAdaptorModule(torch.nn.Module): def __init__(self): diff --git a/modules/toplevel.py b/modules/toplevel.py index 1af945af5..4ca61eb35 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -10,7 +10,7 @@ GaussianDiffusion, PitchDiffusion ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from modules.fastspeech.param_adaptor import ParameterAdaptorModule +from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST, ParameterAdaptorModule from modules.fastspeech.tts_modules import RhythmRegulator, LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -29,9 +29,22 @@ def __init__(self, vocab_size, out_dims): if self.predict_variances: self.variance_adaptor = self.build_adaptor() + variances_to_embed = set() + if hparams.get('use_energy_embed', False) and not self.predict_energy: + # energy is embedded but not predicted + variances_to_embed.add('energy') + if hparams.get('use_breathiness_embed', False) and not self.predict_breathiness: + # breathiness is embedded but not predicted + variances_to_embed.add('breathiness') + self.embed_variances = len(variances_to_embed) > 0 + self.variance_aware_list = [ + v_name for v_name in VARIANCE_CHECKLIST + if v_name in self.variance_prediction_list or v_name in self.variances_to_embed + ] + if self.embed_variances or self.predict_variances: self.variance_embeds = nn.ModuleDict({ - name: Linear(1, hparams['hidden_size']) - for name in self.variance_prediction_list + v_name: Linear(1, hparams['hidden_size']) + for v_name in self.variance_aware_list }) self.diffusion = GaussianDiffusion( @@ -57,45 +70,48 @@ def forward( spk_embed_id=spk_embed_id, infer=infer, **kwargs ) - variance_inputs = self.collect_variance_inputs(**kwargs) + variance_embed_inputs = { + v_name: kwargs.get(v_name) for v_name in self.variance_aware_list + } # all possible variance inputs + if infer: - if not self.predict_variances: - variance_pred_out = {} + if self.predict_variances: + # get variance predictor inputs + variance_preset_inputs = self.collect_variance_inputs(**variance_embed_inputs) + if not all([v is not None for v in variance_preset_inputs]): # need to predict some variances + variance_pred_outputs = self.collect_variance_outputs( + self.variance_adaptor(adaptor_cond, infer=True) + ) # dict of predictions + variance_embed_inputs = { + v_name: ( + variance_embed_inputs[v_name] if variance_embed_inputs[v_name] is not None + else variance_pred_outputs[v_name] + ) + for v_name in self.variance_aware_list + } # merge presets and predictions, should contain no NoneType + variance_pred_out = self.collect_variance_outputs(variance_embed_inputs) # collect from embed inputs else: - if not all([v is not None for v in variance_inputs]): - variance_outputs = self.variance_adaptor(adaptor_cond, variance_inputs, infer) - variance_choices = [ - v_in if v_in is not None else v_pred - for v_in, v_pred in zip(variance_inputs, variance_outputs) - ] - variance_pred_out = self.collect_variance_outputs(variance_choices) - else: - variance_choices = variance_inputs - variance_pred_out = { - name: kwargs[name] - for name in self.variance_prediction_list - } - variance_embeds = torch.stack([ - self.variance_embeds[v_name](v_choice[:, :, None]) # [B, T] => [B, T, H] - for v_name, v_choice in zip(self.variance_prediction_list, variance_choices) - ], dim=-1).sum(-1) - mel_cond += variance_embeds - - mel_pred_out = self.diffusion(mel_cond, infer=True) - mel_pred_out *= ((mel2ph > 0).float()[:, :, None]) - + variance_pred_out = {} else: if self.predict_variances: - variance_pred_out = self.variance_adaptor(adaptor_cond, variance_inputs, infer) - - variance_embeds = torch.stack([ - self.variance_embeds[v_name](v_choice[:, :, None]) # [B, T] => [B, T, H] - for v_name, v_choice in zip(self.variance_prediction_list, variance_inputs) - ], dim=-1).sum(-1) - mel_cond = mel_cond + variance_embeds + # use gt variances to train the predictor + variance_inputs = self.collect_variance_inputs(**variance_embed_inputs) + variance_pred_out = self.variance_adaptor(adaptor_cond, variance_inputs, infer=False) else: variance_pred_out = None + if self.predict_variances or self.embed_variances: + # embed variances into mel condition + variance_embeds = torch.stack([ + self.variance_embeds[v_name](variance_embed_inputs[v_name][:, :, None]) # [B, T] => [B, T, H] + for v_name in self.variance_aware_list + ], dim=-1).sum(-1) + mel_cond += variance_embeds + + if infer: + mel_pred_out = self.diffusion(mel_cond, infer=True) + mel_pred_out *= ((mel2ph > 0).float()[:, :, None]) + else: mel_pred_out = self.diffusion(mel_cond, gt_spec=gt_mel, infer=False) return mel_pred_out, variance_pred_out From adbc65cf20a9509768c61a69805982ea59697adb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 4 May 2023 22:20:29 +0800 Subject: [PATCH 365/475] Use a more proper midi smooth width (the former is too large) --- configs/variance.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 8c24f667c..c48cc46da 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -17,7 +17,7 @@ audio_sample_rate: 44100 hop_size: 512 # Hop size. fft_size: 2048 # FFT size. win_size: 2048 # FFT size. -midi_smooth_width: 0.2 # in seconds +midi_smooth_width: 0.06 # in seconds binarization_args: shuffle: true From cec7760005c3535fb0a9e1b73cc5e2349634b41d Mon Sep 17 00:00:00 2001 From: ms903x1 <108673569+ms903x1@users.noreply.github.com> Date: Tue, 9 May 2023 23:36:17 +0800 Subject: [PATCH 366/475] Freeze transcriptions generation format to CSV and deprecate meta.json (#85) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update acoustic_binarizer.py add "os.path.exists(path)" to determine whether to read "transcriptions.csv" or "transcriptions.txt". * Update acoustic_binarizer.py Delete Meta.json related code. Use pathlib API replacement of os.path. * 更新notebook 删除生成txt版transcriptions文件相关代码,删除meta.json相关代码。 * fix 多删了个换行,修复一下 --- preparation/acoustic_preparation.ipynb | 47 ++++++++++++-------------- preprocessing/acoustic_binarizer.py | 15 +------- 2 files changed, 23 insertions(+), 39 deletions(-) diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb index a392707bf..986a667c9 100644 --- a/preparation/acoustic_preparation.ipynb +++ b/preparation/acoustic_preparation.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -46,6 +47,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -67,7 +69,6 @@ "source": [ "import csv\n", "import glob\n", - "import json\n", "import os\n", "import shutil\n", "import sys\n", @@ -96,6 +97,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -153,6 +155,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -226,6 +229,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -270,6 +274,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -301,6 +306,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -310,6 +316,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -340,6 +347,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -376,6 +384,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -473,6 +482,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -518,6 +528,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -587,6 +598,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -787,6 +799,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -866,6 +879,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -877,9 +891,7 @@ "\n", "Please provide a unique name for your dataset, usually the name of the singer/speaker (whether real or virtual). For example, `opencpop` will be a good name for the dataset. You can also add tags to represent dataset version, model capacity or improvements. For example, `v2` represents the version, `large` represents the capacity, and `fix_br` means you fixed breaths since your trained last model.\n", "\n", - "Please edit the following cell before you run it. Remember only using letters, numbers and underlines (`_`).\n", - "\n", - "Formatting of the data labels: `csv` is a more comprehensive format that is newly introduced to this pipeline. If you want to generate old label format where attributes are seperated by `|`, please change it to `grid` in the following cell.\n" + "Please edit the following cell before you run it. Remember only using letters, numbers and underlines (`_`).\n" ] }, { @@ -899,9 +911,6 @@ "dataset_name = '???' # Required\n", "dataset_tags = '' # Optional\n", "\n", - "# Label format (will only use 'csv' in the future)\n", - "label_format = 'csv'\n", - "\n", "########################################\n", "\n", "import csv\n", @@ -918,8 +927,6 @@ " full_name += f'_{dataset_tags}'\n", "assert not os.path.exists(f'../data/{full_name}'), f'The name \\'{full_name}\\' already exists in your \\'data\\' folder!'\n", "\n", - "assert label_format in ['csv', 'grid'], 'Label format must be \\'csv\\' or \\'grid\\'.'\n", - "\n", "print('Dataset name:', dataset_name)\n", "if dataset_tags != '':\n", " print('Tags:', dataset_tags)\n", @@ -956,23 +963,9 @@ " ph_seq = ' '.join(ph_seq)\n", " ph_dur = ' '.join([str(round(d, 6)) for d in ph_dur])\n", " soundfile.write(os.path.join(formatted_path, f'{name}.wav'), y, samplerate)\n", - " if label_format == 'grid':\n", - " transcriptions.append(f'{name}|啊|{ph_seq}|rest|0|{ph_dur}|0')\n", - " else:\n", - " transcriptions.append({'name': name, 'ph_seq': ph_seq, 'ph_dur': ph_dur})\n", + " transcriptions.append({'name': name, 'ph_seq': ph_seq, 'ph_dur': ph_dur})\n", "\n", - "with open(f'../data/{full_name}/raw/meta.json', 'w', encoding='utf8') as f:\n", - " meta = {\n", - " 'category': 'acoustic',\n", - " 'format': label_format\n", - " }\n", - " json.dump(meta, f, indent=4)\n", - "\n", - "if label_format == 'grid':\n", - " with open(f'../data/{full_name}/raw/transcriptions.txt', 'w', encoding='utf8') as f:\n", - " print('\\n'.join(transcriptions), file=f)\n", - "else:\n", - " with open(f'../data/{full_name}/raw/transcriptions.csv', 'w', encoding='utf8', newline='') as f:\n", + "with open(f'../data/{full_name}/raw/transcriptions.csv', 'w', encoding='utf8', newline='') as f:\n", " writer = csv.DictWriter(f, fieldnames=['name', 'ph_seq', 'ph_dur'])\n", " writer.writeheader()\n", " writer.writerows(transcriptions)\n", @@ -981,6 +974,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -1021,6 +1015,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "tags": [] @@ -1302,6 +1297,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -1398,6 +1394,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index a05b3337c..3e32b033c 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -7,7 +7,6 @@ ph_dur: phoneme durations """ import csv -import json import os import pathlib import random @@ -34,20 +33,8 @@ def __init__(self): self.lr = LengthRegulator() def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): - meta_info = { - 'category': 'acoustic', - 'format': 'grid' - } - meta_file = raw_data_dir / 'meta.json' - if meta_file.exists(): - meta_info.update(json.load(open(meta_file, 'r', encoding='utf8'))) - category = meta_info['category'] - assert category == 'acoustic', \ - f'Dataset in \'{raw_data_dir}\' is of category \'{category}\', ' \ - f'but a dataset of category \'acoustic\' is required.' - meta_data_dict = {} - if meta_info['format'] == 'csv': + if (raw_data_dir / 'transcriptions.csv').exists(): for utterance_label in csv.DictReader( open(raw_data_dir / 'transcriptions.csv', 'r', encoding='utf-8') ): From d76ae79bd73a3b35c4989089ce0bfc98b86f855b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 10 May 2023 22:02:24 +0800 Subject: [PATCH 367/475] Some adjustments to ONNX modules --- deployment/modules/toplevel.py | 39 ++++++++++++++++++++++------------ 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 23444d928..197409fcc 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -2,19 +2,17 @@ from torch import Tensor, nn -from basics.base_module import CategorizedModule from deployment.modules.diffusion import GaussianDiffusionONNX from deployment.modules.fastspeech2 import FastSpeech2AcousticONNX +from modules.toplevel import DiffSingerAcoustic, DiffSingerVariance from utils.hparams import hparams -class DiffSingerAcousticONNX(CategorizedModule): - @property - def category(self): - return 'acoustic' - +class DiffSingerAcousticONNX(DiffSingerAcoustic): def __init__(self, vocab_size, out_dims): - super().__init__() + super().__init__(vocab_size, out_dims) + del self.fs2 + del self.diffusion self.fs2 = FastSpeech2AcousticONNX( vocab_size=vocab_size ) @@ -50,18 +48,33 @@ def forward_diffusion(self, condition: Tensor, speedup: int) -> Tensor: def view_as_fs2(self) -> nn.Module: model = copy.deepcopy(self) - model.diffusion = None + try: + del model.variance_embeds + del model.variance_adaptor + except AttributeError: + pass + del model.diffusion model.forward = model.forward_fs2 return model + def view_as_adaptor(self) -> nn.Module: + model = copy.deepcopy(self) + del model.fs2 + del model.diffusion + raise NotImplementedError() + + def view_as_diffusion(self) -> nn.Module: model = copy.deepcopy(self) - model.fs2 = None + del model.fs2 + try: + del model.variance_embeds + del model.variance_adaptor + except AttributeError: + pass model.forward = model.forward_diffusion return model -class DiffSingerVarianceOnnx(CategorizedModule): - @property - def category(self): - return 'variance' +class DiffSingerVarianceOnnx(DiffSingerVariance): + pass From 72d55e0c1868688001c854c3360edb7d54df12f4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 13 May 2023 20:51:54 +0800 Subject: [PATCH 368/475] Clean up unused codes and configs --- basics/base_task.py | 6 +- configs/acoustic.yaml | 1 - configs/base.yaml | 28 +++----- training/acoustic_task.py | 142 -------------------------------------- utils/hparams.py | 6 +- 5 files changed, 13 insertions(+), 170 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index cec2f1c3a..d7292a65f 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -20,7 +20,6 @@ from lightning.pytorch.utilities.rank_zero import rank_zero_debug, rank_zero_info, rank_zero_only from basics.base_module import CategorizedModule -from utils import filter_kwargs from utils.hparams import hparams from utils.training_utils import ( DsModelCheckpoint, DsTQDMProgressBar, @@ -62,9 +61,6 @@ class BaseTask(pl.LightningModule): def __init__(self, *args, **kwargs): # dataset configs super().__init__(*args, **kwargs) - self.loaded_optimizer_states_dict = {} - self.example_input_array = None - self.dataset_cls = None self.max_batch_frames = hparams['max_batch_frames'] self.max_batch_size = hparams['max_batch_size'] @@ -331,7 +327,7 @@ def start(cls): log_every_n_steps=hparams['log_interval'], max_steps=hparams['max_updates'], use_distributed_sampler=False, - num_sanity_val_steps=hparams['num_sanity_val_steps'] if not hparams['validate'] else 10000, + num_sanity_val_steps=hparams['num_sanity_val_steps'], accumulate_grad_batches=hparams['accumulate_grad_batches'] ) if not hparams['infer']: # train diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 0451a7f84..950626d53 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -48,7 +48,6 @@ keep_bins: 128 mel_vmin: -6. #-6. mel_vmax: 1.5 interp_uv: true -save_f0: true use_spk_id: false f0_embed_type: continuous diff --git a/configs/base.yaml b/configs/base.yaml index e21dd1dce..7c7075160 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -3,7 +3,6 @@ task_cls: '' work_dir: '' # experiment directory. infer: false # infer seed: 1234 -debug: false save_codes: - configs - modules @@ -21,13 +20,13 @@ binarization_args: shuffle: false num_workers: 0 -audio_num_mel_bins: 80 -audio_sample_rate: 22050 -hop_size: 256 # For 22050Hz, 275 ~= 12.5 ms (0.0125 * sample_rate) -win_size: 1024 # For 22050Hz, 1100 ~= 50 ms (If None, win_size: fft_size) (0.05 * sample_rate) -fmin: 80 # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) -fmax: 7600 # To be increased/reduced depending on data. -fft_size: 1024 # Extra window size is filled with 0 paddings to match this parameter +audio_num_mel_bins: 128 +audio_sample_rate: 44100 +hop_size: 512 # For 22050Hz, 275 ~= 12.5 ms (0.0125 * sample_rate) +win_size: 2048 # For 22050Hz, 1100 ~= 50 ms (If None, win_size: fft_size) (0.05 * sample_rate) +fmin: 40 # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) +fmax: 16000 # To be increased/reduced depending on data. +fft_size: 2048 # Extra window size is filled with 0 paddings to match this parameter num_spk: 1 mel_vmin: -6 mel_vmax: 1.5 @@ -53,7 +52,7 @@ use_spk_id: false ########### optimizer_args: optimizer_cls: torch.optim.AdamW - lr: 2.0 + lr: 0.0004 beta1: 0.9 beta2: 0.98 weight_decay: 0 @@ -63,15 +62,14 @@ lr_scheduler_args: step_size: 50000 gamma: 0.5 clip_grad_norm: 1 -dur_loss: mse # huber|mol ########### # train and eval ########### -num_ckpt_keep: 3 +num_ckpt_keep: 5 accumulate_grad_batches: 1 log_interval: 100 -num_sanity_val_steps: 5 # steps of validation at the beginning +num_sanity_val_steps: 1 # steps of validation at the beginning val_check_interval: 2000 max_updates: 120000 max_batch_frames: 32000 @@ -82,11 +80,7 @@ train_set_name: 'train' valid_set_name: 'valid' vocoder: '' vocoder_ckpt: '' -out_wav_norm: false -save_gt: false -save_f0: false -gen_dir_name: '' -num_valid_plots: 5 +num_valid_plots: 10 ########### # pytorch lightning diff --git a/training/acoustic_task.py b/training/acoustic_task.py index a6e577406..218f5c153 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -1,25 +1,17 @@ -import os -from multiprocessing.pool import Pool - import matplotlib -import matplotlib.pyplot as plt -import numpy as np import torch import torch.distributions import torch.optim import torch.utils.data -from tqdm import tqdm import utils import utils.infer_utils from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from basics.base_vocoder import BaseVocoder -from modules.fastspeech.tts_modules import mel2ph_to_dur from modules.losses.diff_loss import DiffusionNoiseLoss from modules.toplevel import DiffSingerAcoustic from modules.vocoders.registry import get_vocoder_cls -from utils.binarizer_utils import get_pitch_parselmouth from utils.hparams import hparams from utils.plot import spec_to_figure @@ -57,9 +49,6 @@ def __init__(self): self.use_vocoder = hparams['infer'] or hparams['val_with_vocoder'] if self.use_vocoder: self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() - self.saving_result_pool = None - self.saving_results_futures = None - self.stats = {} self.logged_gt_wav = set() def build_model(self): @@ -146,134 +135,3 @@ def plot_mel(self, batch_idx, spec, spec_out, name=None): vmax = hparams['mel_vmax'] spec_cat = torch.cat([(spec_out - spec).abs() + vmin, spec, spec_out], -1) self.logger.experiment.add_figure(name, spec_to_figure(spec_cat[0], vmin, vmax), self.global_step) - - ############ - # infer - ############ - def on_test_start(self): - self.saving_result_pool = Pool(8) - self.saving_results_futures = [] - self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() - - def test_step(self, sample, batch_idx): - mel_pred = self.run_model(sample, infer=True) - sample['outputs'] = mel_pred - return self.after_infer(sample) - - def on_test_end(self): - self.saving_result_pool.close() - [f.get() for f in tqdm(self.saving_results_futures)] - self.saving_result_pool.join() - return {} - - def after_infer(self, predictions): - if self.saving_result_pool is None: - self.saving_result_pool = Pool(min(int(os.getenv('N_PROC', os.cpu_count())), 16)) - self.saving_results_futures = [] - predictions = utils.unpack_dict_to_list(predictions) - t = tqdm(predictions) - for num_predictions, prediction in enumerate(t): - for k, v in prediction.items(): - if type(v) is torch.Tensor: - prediction[k] = v.cpu().numpy() - - item_name = prediction.get('item_name') - text = prediction.get('text').replace(':', '%3A')[:80] - - # remove paddings - mel_gt = prediction['mel'] - mel_gt_mask = np.abs(mel_gt).sum(-1) > 0 - mel_gt = mel_gt[mel_gt_mask] - mel2ph_gt = prediction.get('mel2ph') - mel2ph_gt = mel2ph_gt[mel_gt_mask] if mel2ph_gt is not None else None - mel_pred = prediction['outputs'] - mel_pred_mask = np.abs(mel_pred).sum(-1) > 0 - mel_pred = mel_pred[mel_pred_mask] - mel_gt = np.clip(mel_gt, hparams['mel_vmin'], hparams['mel_vmax']) - mel_pred = np.clip(mel_pred, hparams['mel_vmin'], hparams['mel_vmax']) - - mel2ph_pred = prediction.get('mel2ph_pred') - if mel2ph_pred is not None: - if len(mel2ph_pred) > len(mel_pred_mask): - mel2ph_pred = mel2ph_pred[:len(mel_pred_mask)] - mel2ph_pred = mel2ph_pred[mel_pred_mask] - - f0_gt = prediction.get('f0') - f0_pred = prediction.get('f0_pred') - if f0_pred is not None: - f0_gt = f0_gt[mel_gt_mask] - if len(f0_pred) > len(mel_pred_mask): - f0_pred = f0_pred[:len(mel_pred_mask)] - f0_pred = f0_pred[mel_pred_mask] - - str_phs = None - if self.phone_encoder is not None and 'tokens' in prediction: - str_phs = self.phone_encoder.decode(prediction['tokens'], strip_padding=True) - gen_dir = os.path.join(hparams['work_dir'], - f'generated_{self.global_step}_{hparams["gen_dir_name"]}') - wav_pred = self.vocoder.spec2wav(mel_pred, f0=f0_pred) - os.makedirs(gen_dir, exist_ok=True) - os.makedirs(f'{gen_dir}/wavs', exist_ok=True) - os.makedirs(f'{gen_dir}/plot', exist_ok=True) - os.makedirs(os.path.join(hparams['work_dir'], 'P_mels_npy'), exist_ok=True) - os.makedirs(os.path.join(hparams['work_dir'], 'G_mels_npy'), exist_ok=True) - self.saving_results_futures.append( - self.saving_result_pool.apply_async(self.save_result, args=[ - wav_pred, mel_pred, 'P', item_name, text, gen_dir, str_phs, mel2ph_pred, f0_gt, f0_pred])) - - if mel_gt is not None and hparams['save_gt']: - wav_gt = self.vocoder.spec2wav(mel_gt, f0=f0_gt) - self.saving_results_futures.append( - self.saving_result_pool.apply_async(self.save_result, args=[ - wav_gt, mel_gt, 'G', item_name, text, gen_dir, str_phs, mel2ph_gt, f0_gt, f0_pred])) - if hparams['save_f0']: - import matplotlib.pyplot as plt - # f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams) - f0_pred_ = f0_pred - f0_gt_, _ = get_pitch_parselmouth(wav_gt, len(mel_gt), hparams) - fig = plt.figure() - plt.plot(f0_pred_, label=r'$f0_P$') - plt.plot(f0_gt_, label=r'$f0_G$') - plt.legend() - plt.tight_layout() - plt.savefig(f'{gen_dir}/plot/[F0][{item_name}]{text}.png', format='png') - plt.close(fig) - - t.set_description( - f'Pred_shape: {mel_pred.shape}, gt_shape: {mel_gt.shape}') - - return {} - - @staticmethod - def save_result(wav_out, mel, prefix, item_name, text, gen_dir, str_phs=None, mel2ph=None, gt_f0=None, - pred_f0=None): - item_name = item_name.replace('/', '-') - base_fn = f'[{item_name}][{prefix}]' - - if text is not None: - base_fn += text - base_fn += ('-' + hparams['exp_name']) - np.save(os.path.join(hparams['work_dir'], f'{prefix}_mels_npy', item_name), mel) - utils.infer_utils.save_wav(wav_out, f'{gen_dir}/wavs/{base_fn}.wav', hparams['audio_sample_rate'], - norm=hparams['out_wav_norm']) - fig = plt.figure(figsize=(14, 10)) - spec_vmin = hparams['mel_vmin'] - spec_vmax = hparams['mel_vmax'] - heatmap = plt.pcolor(mel.T, vmin=spec_vmin, vmax=spec_vmax) - fig.colorbar(heatmap) - f0, _ = get_pitch_parselmouth(wav_out, len(mel), hparams) - f0 = (f0 - 100) / (800 - 100) * 80 * (f0 > 0) - plt.plot(f0, c='white', linewidth=1, alpha=0.6) - if mel2ph is not None and str_phs is not None: - decoded_txt = str_phs.split(' ') - dur = mel2ph_to_dur(torch.LongTensor(mel2ph)[None, :], len(decoded_txt))[0].numpy() - dur = [0] + list(np.cumsum(dur)) - for i in range(len(dur) - 1): - shift = (i % 20) + 1 - plt.text(dur[i], shift, decoded_txt[i]) - plt.hlines(shift, dur[i], dur[i + 1], colors='b' if decoded_txt[i] != '|' else 'black') - plt.vlines(dur[i], 0, 5, colors='b' if decoded_txt[i] != '|' else 'black', - alpha=1, linewidth=1) - plt.tight_layout() - plt.savefig(f'{gen_dir}/plot/{base_fn}.png', format='png', dpi=1000) - plt.close(fig) diff --git a/utils/hparams.py b/utils/hparams.py index 36cce5168..e339a54f9 100644 --- a/utils/hparams.py +++ b/utils/hparams.py @@ -43,9 +43,7 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob parser.add_argument('--hparams', type=str, default='', help='location of the data corpus') parser.add_argument('--infer', action='store_true', help='infer') - parser.add_argument('--validate', action='store_true', help='validate') parser.add_argument('--reset', action='store_true', help='reset hparams') - parser.add_argument('--debug', action='store_true', help='debug') args, unknown = parser.parse_known_args() tmp_args_hparams = args.hparams.split(',') if args.hparams.strip() != '' else [] @@ -53,7 +51,7 @@ def set_hparams(config='', exp_name='', hparams_str='', print_hparams=True, glob args.hparams = ','.join(tmp_args_hparams) else: args = Args(config=config, exp_name=exp_name, hparams=hparams_str, - infer=False, validate=False, reset=False, debug=False) + infer=False, reset=False) args_work_dir = '' if args.exp_name != '': @@ -124,8 +122,6 @@ def dump_hparams(): dump_hparams() hparams_['infer'] = args.infer - hparams_['debug'] = args.debug - hparams_['validate'] = args.validate if global_hparams: hparams.clear() hparams.update(hparams_) From a89d3ee6f6bc29c26e80c8ac3c84f96a04199789 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 13 May 2023 21:00:39 +0800 Subject: [PATCH 369/475] Add configuration references documentation (#86) * Add ConfigurationSchemas.md (#77) * Update ConfigurationSchemas.md Explain configuration parameters in detail * Update ConfigurationSchemas.md * Update ConfigurationSchemas.md * Update ConfigurationSchemas.md Add DDP configuration details * Update ConfigurationSchemas.md Update Neural Network configs ( 3 items remain TBD) * Update ConfigurationSchemas.md Add others * Refactor ConfigurationSchemas.md * Finish ConfigurationSchemas.md * Finish and format ConfigurationSchemas.md * Reduce long explanations and fix mistakes * Fix math expressions * Fix math expressions * used by -> visibility * Adjust descriptions --------- Co-authored-by: Weiyang --- docs/ConfigurationSchemas.md | 2307 +++++++++++++++++++++++++++++++++- 1 file changed, 2251 insertions(+), 56 deletions(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 270987377..6ed6930d4 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -3,133 +3,2328 @@ [![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) [![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) [![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) +| [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) ## Configuration Schemas This document explains the meaning and usages of all editable keys in a configuration file. -### Common configurations +Each configuration key (including nested keys) are described with a brief explanation and several attributes listed as +follows: -#### base_config +| Attribute | Explanation | +|:---------------:|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| visibility | Represents what kind(s) of models and tasks this configuration belongs to. | +| scope | The scope of effects of the configuration, indicating what it can influence within the whole pipeline. Possible values are:
**nn** - This configuration is related to how the neural networks are formed and initialized. Modifying it will result in failure when loading or resuming from checkpoints.
**preprocessing** - This configuration controls how raw data pieces or inference inputs are converted to inputs of neural networks. Binarizers should be re-run if this configuration is modified.
**training** - This configuration describes the training procedures. Most training configurations can affect training performance, memory consumption, device utilization and loss calculation. Modifying training-only configurations will not cause severe inconsistency or errors in most situations.
**inference** - This configuration describes the calculation logic through the model graph. Changing it can lead to inconsistent or wrong outputs of inference or validation.
**others** - Other configurations not discussed above. Will have different effects according to the descriptions. | +| customizability | The level of customizability of the configuration. Possible values are:
**required** - This configuration **must** be set or modified according to the actual situation or condition, otherwise errors can be raised.
**recommended** - It is recommended to adjust this configuration according to the dataset, requirements, environment and hardware. Most functionality-related and feature-related configurations are at this level, and all configurations in this level are widely tested with different values. However, leaving it unchanged will not cause problems.
**normal** - There is no need to modify it as the default value is carefully tuned and widely validated. However, one can still use another value if there are some special requirements or situations.
**not recommended** - No other values except the default one of this configuration are tested. Modifying it will not cause errors, but may cause unpredictable or significant impacts to the pipelines.
**preserved** - This configuration **must not** be modified. It appears in the configuration file only for future scalability, and currently changing it will result in errors. | +| type | Value type of the configuration. Follows the syntax of Python type hints. | +| constraints | Value constraints of the configuration. | +| default | Default value of the configuration. Uses YAML value syntax. | + +### accumulate_grad_batches + +Indicates that gradients of how many training steps are accumulated before each `optimizer.step()` call. 1 means no gradient accumulation. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +int + +#### default + +1 + +### audio_num_mel_bins + +Number of mel channels for feature extraction, diffusion sampling and waveform reconstruction. + +#### visibility + +acoustic + +#### scope + +nn, preprocessing, inference + +#### customizability + +preserved + +#### type + +int + +#### default + +128 + +### audio_sample_rate + +Sampling rate of waveforms. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +int + +#### default + +44100 + +### augmentation_args + +Arguments for data augmentation. + +#### type + +dict + +### augmentation_args.fixed_pitch_shifting + +Arguments for fixed pitch shifting augmentation. + +#### type + +dict + +### augmentation_args.fixed_pitch_shifting.scale + +Scale ratio of each target in fixed pitch shifting augmentation. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +recommended + +#### type + +tuple + +#### default + +0.75 + +### augmentation_args.fixed_pitch_shifting.targets + +Targets (in semitones) of fixed pitch shifting augmentation. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +not recommended + +#### type + +tuple + +#### default + +[-5.0, 5.0] + +### augmentation_args.random_pitch_shifting + +all + +#### scope + +preprocessing + +#### customizability + +recommended + +#### type + +float + +#### default + +0.75 + +### augmentation_args.fixed_pitch_shifting.targets + +Targets of fixed pitch shifting augmentation, each in semitones. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +not recommended + +#### type + +list + +#### default + +[-5, 5] + +### augmentation_args.random_pitch_shifting + +Arguments for random pitch shifting augmentation. + +#### type + +dict + +### augmentation_args.random_pitch_shifting.range + +Range of the random pitch shifting ( in semitones). + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +not recommended + +#### type + +tuple + +#### default + +[-5.0, 5.0] + +### augmentation_args.random_pitch_shifting.scale + +Scale ratio of the random pitch shifting augmentation. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +recommended + +#### type + +float + +#### default + +1.0 + +### augmentation_args.random_time_stretching.domain + +The domain where random time stretching factors are uniformly distributed in. + +- If 'linear', stretching ratio $x$ will be uniformly distributed in $[V_{min}, V_{max}]$. +- If 'log', $\ln{x}$ will be uniformly distributed in $[\ln{V_{min}}, \ln{V_{max}}]$. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +not recommended + +#### type + +str + +#### default + +log + +#### constraint + +Choose from 'log', 'linear'. + +### augmentation_args.random_time_stretching.range + +Range of random time stretching factors. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +not recommended + +#### type + +tuple + +#### default + +[0.5, 2] + +### augmentation_args.random_time_stretching.scale + +Scale ratio of random time stretching augmentation. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +recommended + +#### type + +float + +#### default + +0.75 + +### base_config Path(s) of other config files that the current config is based on and will override. -##### used by +#### scope + +others + +#### type + +Union[str, list] + +### binarization_args + +Arguments for binarizers. + +#### type + +dict + +### binarization_args.num_workers + +Number of worker subprocesses when running binarizers. More workers can speed up the preprocessing but will consume more memory. 0 means the main processing doing everything. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +recommended + +#### type + +int + +#### default + +1 + +### binarization_args.shuffle + +Whether binarized dataset will be shuffled or not. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +normal + +#### type + +bool + +#### default + +true + +### binarizer_cls + +Binarizer class name. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +str + +### binary_data_dir + +Path to the binarized dataset. + +#### visibility + +all + +#### scope + +preprocessing, training + +#### customizability + +required + +#### type + +str + +### clip_grad_norm + +The value at which to clip gradients. Equivalent to `gradient_clip_val` in `lightning.pytorch.Trainer`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +1 + +### dataloader_prefetch_factor + +Number of batches loaded in advance by each `torch.utils.data.DataLoader` worker. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +bool + +#### default + +true + +### ddp_backend + +The distributed training backend. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +str + +#### default + +nccl + +#### constraints + +Choose from 'gloo', 'nccl', 'nccl_no_p2p'. Windows platforms may use 'gloo'; Linux platforms may use 'nccl'; if Linux ddp gets stuck, use 'nccl_no_p2p'. + +### dictionary + +path to the word-phoneme mapping dictionary file. Training data must fully cover phonemes in the dictionary. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +normal + +#### type + +str + +### diff_decoder_type + +Denoiser type of the DDPM. + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +preserved + +#### type + +str + +#### default + +wavenet + +### diff_loss_type + +Loss type of the DDPM. + +#### visibility + +acoustic + +#### scope + +training + +#### customizability + +not recommended + +#### type + +str + +#### default + +l2 + +#### constraints + +Choose from 'l1', 'l2'. + +### dilation_cycle_length + +Length k of the cycle $2^0, 2^1 ...., 2^k$ of convolution dilation factors through WaveNet residual blocks. + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +int + +#### default + +4 + +### dropout + +Dropout rate in some FastSpeech2 modules. + +#### visibility + +all + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +float + +#### default + +0.1 + +### ds_workers + +Number of workers of `torch.utils.data.DataLoader`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +int + +#### default + +4 + +### enc_ffn_kernel_size + +Size of TransformerFFNLayer convolution kernel size in FastSpeech2 encoder. + +#### visibility + +all + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +int + +#### default + +9 + +### enc_layers + +Number of FastSpeech2 encoder layers. + +#### visibility + +all + +#### scope + +nn + +#### customizability + +normal + +#### type + +int + +#### default + +4 + +### f0_embed_type + +Map f0 to embedding using: + +- `torch.nn.Linear` if 'continuous' +- `torch.nn.Embedding` if 'discrete' + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +normal + +#### type + +str + +#### default + +continuous + +#### constraints + +Choose from 'continuous', 'discrete'. + +### ffn_act + +Activation function of TransformerFFNLayer in FastSpeech2 encoder: + +- `torch.nn.ReLU` if 'relu' +- `torch.nn.GELU` if 'gelu' +- `torch.nn.SiLU` if 'swish' + +#### visibility + +all + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +str + +#### default + +gelu + +#### constraints + +Choose from 'relu', 'gelu', 'swish'. + +### ffn_padding + +Padding mode of TransformerFFNLayer convolution in FastSpeech2 encoder. + +#### visibility + +all + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +str + +#### default + +SAME + +### fft_size + +Fast Fourier Transforms parameter for mel extraction. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +int + +#### default + +2048 + +### fmax + +Maximum frequency of mel extraction. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +int + +#### default + +16000 + +### fmin + +Minimum frequency of mel extraction. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +int + +#### default + +40 + +### hidden_size + +Dimension of hidden layers of FastSpeech2, token and variance embeddings, and diffusion condition. + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +normal + +#### type + +int + +#### default + +256 + +### hop_size + +Hop size or step length (in number of waveform samples) of mel and feature extraction. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +int + +#### default + +512 + +### interp_uv + +Whether to apply linear interpolation to unvoiced parts in f0. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +boolean + +#### default + +true + +### K_step + +Total number of diffusion steps. + +#### visibility + +all + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +int + +#### default + +1000 + +### log_interval + +Controls how often to log within training steps. Equivalent to `log_every_n_steps` in `lightning.pytorch.Trainer`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +int + +#### default + +100 + +### lr_scheduler_args.gamma + +Learning rate decay ratio of `torch.optim.lr_scheduler.StepLR`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +float + +#### default + +0.5 + +### lr_scheduler_args + +Arguments of learning rate scheduler. Keys will be used as keyword arguments when initializing the scheduler object. + +#### type + +dict + +### lr_scheduler_args.scheduler_cls + +Learning rate scheduler class name. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +str + +#### default + +torch.optim.lr_scheduler.StepLR + +### lr_scheduler_args.step_size + +Learning rate decays every this number of training steps. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +int + +#### default + +50000 + +### lr_scheduler_args.warmup_steps + +Number of warmup steps of the learning rate scheduler. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +int + +#### default + +2000 + +### max_batch_frames + +Maximum number of data frames in each training batch. Used to dynamically control the batch size. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +int + +#### default + +80000 + +### max_batch_size + +The maximum training batch size. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +int + +#### default + +48 + +### max_beta + +Max beta of the DDPM noise schedule. + +#### visibility + +all + +#### scope + +nn, inference + +#### customizability + +normal + +#### type + +float + +#### default + +0.02 + +### max_updates + +Stop training after this number of steps. Equivalent to `max_steps` in `lightning.pytorch.Trainer`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +int + +#### default + +320000 + +### max_val_batch_frames + +Maximum number of data frames in each validation batch. + +#### visibility + +all + +#### scope + +training + +#### customizability + +preserved + +#### type + +int + +#### default + +60000 + +### max_val_batch_size + +The maximum validation batch size. + +#### visibility + +all + +#### scope + +training + +#### customizability + +preserved + +#### type + +int + +#### default + +1 + +### mel_vmax + +Maximum mel spectrogram heatmap value for TensorBoard plotting. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +1.5 + +### mel_vmin + +Minimum mel spectrogram heatmap value for TensorBoard plotting. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +-6.0 + +### num_ckpt_keep + +Number of newest checkpoints kept during training. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +int + +#### default + +5 + +### num_heads + +The number of attention heads of `torch.nn.MultiheadAttention` in FastSpeech2 encoder. + +#### visibility + +all + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +int + +#### default + +2 + +### num_sanity_val_steps + +Number of sanity validation steps at the beginning. + +#### visibility + +all + +#### scope + +training + +#### customizability + +preserved + +#### type + +int + +#### default + +1 + +### num_spk + +Maximum number of speakers in multi-speaker models. + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +required + +#### type + +int + +#### default + +1 + +### num_valid_plots + +Number of validation plots in each validation. Plots will be chosen from the start of the validation set. + +#### visibility + +acoustic + +#### scope + +training + +#### customizability + +recommended + +#### type + +int + +#### default + +10 + +### optimizer_args + +Arguments of optimizer. Keys will be used as keyword arguments when initializing the optimizer object. + +#### type + +dict + +### optimizer_args.beta1 + +Parameter of the `torch.optim.AdamW` optimizer. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +0.9 + +### optimizer_args.beta2 + +Parameter of the `torch.optim.AdamW` optimizer. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +0.98 + +### optimizer_args.lr + +Initial learning rate of the optimizer. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +float + +#### default + +0.0004 + +### optimizer_args.optimizer_cls + +Optimizer class name + +#### visibility + +all + +#### scope + +training + +#### customizability + +preserved + +#### type + +str + +#### default + +torch.optim.AdamW + +### optimizer_args.weight_decay + +Weight decay ratio of optimizer. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +0 + +### permanent_ckpt_interval + +The interval (in number of training steps) of permanent checkpoints. Permanent checkpoints will not be removed even if they are not the newest ones. + +#### visibility + +all + +#### scope + +training + +#### type + +int + +#### default + +40000 + +### permanent_ckpt_start + +Checkpoints will be marked as permanent every [permanent_ckpt_interval](#permanent_ckpt_interval) training steps after this number of training steps. + +#### visibility + +all + +#### scope + +training + +#### type + +int + +#### default + +120000 + +### pl_trainer_accelerator + +Type of Lightning trainer hardware accelerator. + +#### visibility + +all + +#### scope + +training + +#### customization + +not recommended + +#### type + +str + +#### default + +auto + +#### constraints + +See [Accelerator — PyTorch Lightning 2.X.X documentation](https://lightning.ai/docs/pytorch/stable/extensions/accelerator.html?highlight=accelerator) for available values. + +### pl_trainer_devices + +To determine on which device(s) model should be trained. + +'auto' will utilize all visible devices defined with the `CUDA_VISIBLE_DEVICES` environment variable, or utilize all available devices if that variable is not set. Otherwise, it behaves like `CUDA_VISIBLE_DEVICES` which can filter out visible devices. + +#### visibility + +all + +#### scope + +training + +#### customization + +not recommended + +#### type + +str + +#### default + +auto + +### pl_trainer_precision + +The computation precision of training. + +#### visibility + +all + +#### scope + +training + +#### customization + +normal + +#### type + +str + +#### default + +32-true + +#### constraints + +Choose from '32-true', 'bf16-mixed', '16-mixed', 'bf16', '16'. See more possible values at [Trainer — PyTorch Lightning 2.X.X documentation](https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api). + +### pl_trainer_num_nodes + +Number of nodes in the training cluster of Lightning trainer. + +#### visibility + +all + +#### scope + +training + +#### customization + +preserved + +#### type + +int + +#### default + +1 + +### pl_trainer_strategy + +Strategies of the Lightning trainer behavior. + +#### visibility + +all + +#### scope + +training + +#### customization + +preserved + +#### type + +str + +#### default + +auto + +### pndm_speedup + +Diffusion sampling speed-up ratio. 1 means no speeding up. + +#### visibility + +all + +#### type + +int + +#### default + +10 + +#### constraints + +Must be a factor of [K_step](#K_step). + +### raw_data_dir + +Path(s) to the raw dataset including wave files, transcriptions, etc. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +required + +#### type + +str, List[str] + +### rel_pos + +Whether to use relative positional encoding in FastSpeech2 module. + +#### visibility all -##### type +#### scope -str, List[str] +nn -### Neural networks +#### customizability -#### hidden_size +not recommended -Dimension of hidden layers of FastSpeech2, token and variance embeddings, and diffusion condition. +#### type + +boolean + +#### default + +true + +### residual_channels -##### used by +Number of dilated convolution channels in residual blocks in WaveNet. -acoustic model +#### visibility -##### type +acoustic + +#### scope + +nn + +#### customizability + +normal + +#### type int -##### default +#### default -_256_ +512 -#### residual_channels +### residual_layers -TBD +Number of residual blocks in WaveNet. -#### residual_layers +#### visibility -TBD +acoustic -#### diff_decoder_type +#### scope -Denoiser type of the DDPM. +nn + +#### customizability + +normal + +#### type + +int + +#### default + +20 + +### sampler_frame_count_grid + +The batch sampler applies an algorithm called _sorting by similar length_ when collecting batches. Data samples are first grouped by their approximate lengths before they get shuffled within each group. Assume this value is set to $L_{grid}$, the approximate length of a data sample with length $L_{real}$ can be calculated through the following expression: + +$$ +L_{approx} = \lfloor\frac{L_{real}}{L_{grid}}\rfloor\cdot L_{grid} +$$ + +Training performance on some datasets may be very sensitive to this value. Change it to 1 (completely sorted by length without shuffling) to get the best performance in theory. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal -##### used by +#### type -acoustic model +int + +#### default + +6 + +### save_codes + +Files in these folders will be backed up every time a training starts. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +list + +#### default + +[configs, modules, training, utils] -##### type +### schedule_type + +The diffusion schedule type. + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +not recommended + +#### type str -##### default +#### default -_wavenet_ +linear -##### Constraints +#### constraints -choose from [ _wavenet_ ] +Choose from 'linear', 'cosine'. -#### diff_loss_type +### seed -Loss type of the DDPM. +The global random seed used to shuffle data, initializing model weights, etc. + +#### visibility + +all + +#### scope + +preprocessing, training + +#### customizability + +normal + +#### type + +int + +#### default + +1234 + +### sort_by_len + +Whether to apply the _sorting by similar length_ algorithm described in [sampler_frame_count_grid](#sampler_frame_count_grid). Turning off this option may slow down training because sorting by length can better utilize the computing resources. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +bool + +#### default + +true + +### speakers + +The names of speakers in a multi-speaker model. Speaker names are mapped to speaker indexes and stored into spk_map.json when preprocessing. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +required + +#### type + +list + +### spec_min + +Minimum mel spectrogram value used for normalization to [-1, 1]. Different mel bins can have different minimum values. + +#### visibility + +all + +#### scope + +inference + +#### customizability + +not recommended + +#### type + +List[float] + +#### default + +[-5.0] + +### spec_max + +Maximum mel spectrogram value used for normalization to [-1, 1]. Different mel bins can have different maximum values. + +#### visibility + +all + +#### scope + +inference + +#### customizability + +not recommended + +#### type + +List[float] + +#### default + +[0.0] + +### task_cls + +Task trainer class name. + +#### visibility + +all + +#### scope + +training + +#### customizability + +preserved + +#### type -##### used by +str + +### test_prefixes + +List of data item names or name prefixes for the validation set. For each string `s` in the list: + +- If `s` equals to an actual item name, add that item to validation set. +- If `s` does not equal to any item names, add all items whose names start with `s` to validation set. + +For multi-speaker datasets, "spk_id:name_prefix" can be used to apply the rules above within data from a specific speaker, where spk_id represents the speaker index. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +required + +#### type + +list + +### timesteps + +Equivalent to [K_step](#K_step). -acoustic model +### train_set_name -##### type +Name of the training set used in binary filenames, TensorBoard keys, etc. + +#### visibility + +all + +#### scope + +preprocessing, training + +#### customizability + +preserved + +#### type str -##### default +#### default + +train + +### use_key_shift_embed + +Whether to embed key shifting values introduced by random pitch shifting augmentation. + +#### visibility + +acoustic + +#### scope + +nn, preprocessing, inference + +#### customizability + +recommended + +#### type + +boolean + +#### default + +false + +#### constraints + +Must be true if random pitch shifting is enabled. + +### use_pos_embed + +Whether to use SinusoidalPositionalEmbedding in FastSpeech2 encoder. + +#### visibility + +acoustic + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +boolean + +#### default + +true + +### use_speed_embed + +Whether to embed speed values introduced by random time stretching augmentation. + +#### visibility + +acoustic + +#### type + +boolean + +#### default + +false + +#### constraints + +Must be true if random time stretching is enabled. + +### use_spk_id + +Whether embed the speaker id from a multi-speaker dataset. + +#### visibility -_l2_ +acoustic -##### Constraints +#### scope -choose from [ _l1_, _l2_ ] +nn, preprocessing, inference -### Dataset information and preprocessing +#### customizability -#### raw_data_dir +recommended -Path(s) to the raw data including wave files, transcriptions, etc. +#### type -##### used by +bool + +#### default + +false + +### val_check_interval + +Interval (in number of training steps) between validation checks. + +#### visibility all -##### type +#### scope -str, List[str] +training + +#### customizability + +recommended + +#### type + +int + +#### default + +2000 + +### val_with_vocoder + +Whether to load and use the vocoder to generate audio during validation. Validation audio will not be available if this option is disabled. + +#### visibility -### Training, validation and inference +acoustic -#### task_cls +#### scope -TBD +training -#### lr +#### customizability -Initial learning rate of the scheduler. +normal -##### used by +#### type + +bool + +#### default + +true + +### valid_set_name + +Name of the validation set used in binary filenames, TensorBoard keys, etc. + +#### visibility all -##### type +#### scope -float +preprocessing, training + +#### customizability + +preserved + +#### type + +str + +#### default + +valid + +### vocoder + +The vocoder class name. -##### default +#### visibility -_0.0004_ +acoustic -#### max_batch_frames +#### scope -TBD +preprocessing, training, inference + +#### customizability + +normal + +#### type + +str + +#### default + +NsfHifiGAN + +### vocoder_ckpt + +Path of the vocoder model. + +#### visibility + +acoustic + +#### scope + +preprocessing, training, inference + +#### customizability + +normal + +#### type + +str + +#### default + +checkpoints/nsf_hifigan/model + +### win_size + +Window size for mel or feature extraction. + +#### visibility + +all + +#### scope + +preprocessing + +#### customizability + +preserved + +#### type + +int -#### max_batch_size +#### default -TBD +2048 From 99c7c700df1fe962c8f2b85c3beab8f131d60b25 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 13 May 2023 21:04:32 +0800 Subject: [PATCH 370/475] Clean up unused codes and configs --- configs/acoustic.yaml | 1 - configs/base.yaml | 2 -- modules/diffusion/ddpm.py | 4 ++-- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 950626d53..927ff13d2 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -44,7 +44,6 @@ binarizer_cls: preprocessing.acoustic_binarizer.AcousticBinarizer dictionary: dictionaries/opencpop-extension.txt spec_min: [-5] spec_max: [0] -keep_bins: 128 mel_vmin: -6. #-6. mel_vmax: 1.5 interp_uv: true diff --git a/configs/base.yaml b/configs/base.yaml index 7c7075160..58aa46298 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -1,7 +1,5 @@ # task task_cls: '' -work_dir: '' # experiment directory. -infer: false # infer seed: 1234 save_codes: - configs diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 0a0146b03..662d87278 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -111,8 +111,8 @@ def __init__(self, out_dims, timesteps=1000, k_step=1000, self.register_buffer('posterior_mean_coef2', to_torch( (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) - self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['keep_bins']]) - self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['keep_bins']]) + self.register_buffer('spec_min', torch.FloatTensor(spec_min)[None, None, :hparams['audio_num_mel_bins']]) + self.register_buffer('spec_max', torch.FloatTensor(spec_max)[None, None, :hparams['audio_num_mel_bins']]) def q_mean_variance(self, x_start, t): mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start From 34fe5399ffcc1c2341dbbc210708b76d5eeaae5b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 13 May 2023 21:17:36 +0800 Subject: [PATCH 371/475] Augmentations can now be disabled with `enabled` config keys --- configs/acoustic.yaml | 25 ++++---- docs/ConfigurationSchemas.md | 96 +++++++++++++++++++++-------- preprocessing/acoustic_binarizer.py | 8 +-- 3 files changed, 88 insertions(+), 41 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 927ff13d2..b201fe279 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -26,17 +26,20 @@ fmax: 16000 binarization_args: shuffle: true num_workers: 0 -#augmentation_args: -# random_pitch_shifting: -# range: [-5., 5.] -# scale: 1.0 -# fixed_pitch_shifting: -# targets: [-5., 5.] -# scale: 0.75 -# random_time_stretching: -# range: [0.5, 2.] -# domain: log # or linear -# scale: 2.0 +augmentation_args: + random_pitch_shifting: + enabled: false + range: [-5., 5.] + scale: 1.0 + fixed_pitch_shifting: + enabled: false + targets: [-5., 5.] + scale: 0.75 + random_time_stretching: + enabled: false + range: [0.5, 2.] + domain: log # or linear + scale: 1.0 raw_data_dir: 'data/opencpop/raw' binary_data_dir: 'data/opencpop/binary' diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 6ed6930d4..a528887d2 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -109,9 +109,9 @@ Arguments for fixed pitch shifting augmentation. dict -### augmentation_args.fixed_pitch_shifting.scale +### augmentation_args.fixed_pitch_shifting.enabled -Scale ratio of each target in fixed pitch shifting augmentation. +Whether to apply fixed pitch shifting augmentation. #### visibility @@ -127,15 +127,19 @@ recommended #### type -tuple +bool #### default -0.75 +false -### augmentation_args.fixed_pitch_shifting.targets +#### constraints -Targets (in semitones) of fixed pitch shifting augmentation. +Must be false if [augmentation_args.random_pitch_shifting.enabled](#augmentation_args.random_pitch_shifting.enabled) is set to true. + +### augmentation_args.fixed_pitch_shifting.scale + +Scale ratio of each target in fixed pitch shifting augmentation. #### visibility @@ -147,7 +151,7 @@ preprocessing #### customizability -not recommended +recommended #### type @@ -155,11 +159,15 @@ tuple #### default -[-5.0, 5.0] +0.75 -### augmentation_args.random_pitch_shifting +### augmentation_args.fixed_pitch_shifting.targets -all +Targets (in semitones) of fixed pitch shifting augmentation. + +#### visibility + +acoustic #### scope @@ -167,23 +175,31 @@ preprocessing #### customizability -recommended +not recommended #### type -float +tuple #### default -0.75 +[-5.0, 5.0] -### augmentation_args.fixed_pitch_shifting.targets +### augmentation_args.random_pitch_shifting + +Arguments for random pitch shifting augmentation. + +#### type -Targets of fixed pitch shifting augmentation, each in semitones. +dict + +### augmentation_args.random_pitch_shifting.enabled + +Whether to apply random pitch shifting augmentation. #### visibility -all +acoustic #### scope @@ -191,23 +207,19 @@ preprocessing #### customizability -not recommended +recommended #### type -list +bool #### default -[-5, 5] - -### augmentation_args.random_pitch_shifting +false -Arguments for random pitch shifting augmentation. +#### constraints -#### type - -dict +Must be false if [augmentation_args.fixed_pitch_shifting.enabled](#augmentation_args.fixed_pitch_shifting.enabled) is set to true. ### augmentation_args.random_pitch_shifting.range @@ -257,6 +269,14 @@ float 1.0 +### augmentation_args.random_time_stretching + +Arguments for random time stretching augmentation. + +#### type + +dict + ### augmentation_args.random_time_stretching.domain The domain where random time stretching factors are uniformly distributed in. @@ -288,6 +308,30 @@ log Choose from 'log', 'linear'. +### augmentation_args.random_time_stretching.enabled + +Whether to apply random time stretching augmentation. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +recommended + +#### type + +bool + +#### default + +false + ### augmentation_args.random_time_stretching.range Range of random time stretching factors. @@ -334,7 +378,7 @@ float #### default -0.75 +1.0 ### base_config diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 3e32b033c..4be400e42 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -163,7 +163,7 @@ def arrange_data_augmentation(self, data_iterator): aug_list = [] all_item_names = [item_name for item_name, _ in data_iterator] total_scale = 0 - if self.augmentation_args.get('random_pitch_shifting') is not None: + if self.augmentation_args.get('random_pitch_shifting', {}).get('enabled', True): from augmentation.spec_stretch import SpectrogramStretchAugmentation aug_args = self.augmentation_args['random_pitch_shifting'] key_shift_min, key_shift_max = aug_args['range'] @@ -195,12 +195,12 @@ def arrange_data_augmentation(self, data_iterator): total_scale += scale - if self.augmentation_args.get('fixed_pitch_shifting') is not None: + if self.augmentation_args.get('fixed_pitch_shifting', {}).get('enabled', True): from augmentation.spec_stretch import SpectrogramStretchAugmentation aug_args = self.augmentation_args['fixed_pitch_shifting'] targets = aug_args['targets'] scale = aug_args['scale'] - assert self.augmentation_args.get('random_pitch_shifting') is None, \ + assert not self.augmentation_args.get('random_pitch_shifting', {}).get('enabled'), \ 'Fixed pitch shifting augmentation is not compatible with random pitch shifting.' assert len(targets) == len(set(targets)), \ 'Fixed pitch shifting augmentation requires having no duplicate targets.' @@ -227,7 +227,7 @@ def arrange_data_augmentation(self, data_iterator): total_scale += scale * len(targets) - if self.augmentation_args.get('random_time_stretching') is not None: + if self.augmentation_args.get('random_time_stretching', {}).get('enabled', True): from augmentation.spec_stretch import SpectrogramStretchAugmentation aug_args = self.augmentation_args['random_time_stretching'] speed_min, speed_max = aug_args['range'] From 2b080d04ed443f696094451c54377a608789d20e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 13 May 2023 21:22:37 +0800 Subject: [PATCH 372/475] Adjust descriptions --- docs/ConfigurationSchemas.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index a528887d2..077eb858c 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -1061,7 +1061,7 @@ float ### lr_scheduler_args -Arguments of learning rate scheduler. Keys will be used as keyword arguments when initializing the scheduler object. +Arguments of learning rate scheduler. Keys will be used as keyword arguments of the `__init__()` method of [lr_scheduler_args.scheduler_cls](#lr_scheduler_args.scheduler_cls). #### type @@ -1453,7 +1453,7 @@ int ### optimizer_args -Arguments of optimizer. Keys will be used as keyword arguments when initializing the optimizer object. +Arguments of optimizer. Keys will be used as keyword arguments of the `__init__()` method of [optimizer_args.optimizer_cls](#optimizer_args.optimizer_cls). #### type From 4b0d95ad650791dd6dc11435bd8fd511a7bd4406 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 13 May 2023 21:30:00 +0800 Subject: [PATCH 373/475] Use direct indexing on augmentation args --- preprocessing/acoustic_binarizer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 4be400e42..99f9812d6 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -163,7 +163,7 @@ def arrange_data_augmentation(self, data_iterator): aug_list = [] all_item_names = [item_name for item_name, _ in data_iterator] total_scale = 0 - if self.augmentation_args.get('random_pitch_shifting', {}).get('enabled', True): + if self.augmentation_args['random_pitch_shifting']['enabled']: from augmentation.spec_stretch import SpectrogramStretchAugmentation aug_args = self.augmentation_args['random_pitch_shifting'] key_shift_min, key_shift_max = aug_args['range'] @@ -195,12 +195,12 @@ def arrange_data_augmentation(self, data_iterator): total_scale += scale - if self.augmentation_args.get('fixed_pitch_shifting', {}).get('enabled', True): + if self.augmentation_args['fixed_pitch_shifting']['enabled']: from augmentation.spec_stretch import SpectrogramStretchAugmentation aug_args = self.augmentation_args['fixed_pitch_shifting'] targets = aug_args['targets'] scale = aug_args['scale'] - assert not self.augmentation_args.get('random_pitch_shifting', {}).get('enabled'), \ + assert not self.augmentation_args['random_pitch_shifting']['enabled'], \ 'Fixed pitch shifting augmentation is not compatible with random pitch shifting.' assert len(targets) == len(set(targets)), \ 'Fixed pitch shifting augmentation requires having no duplicate targets.' @@ -227,7 +227,7 @@ def arrange_data_augmentation(self, data_iterator): total_scale += scale * len(targets) - if self.augmentation_args.get('random_time_stretching', {}).get('enabled', True): + if self.augmentation_args['random_time_stretching']['enabled']: from augmentation.spec_stretch import SpectrogramStretchAugmentation aug_args = self.augmentation_args['random_time_stretching'] speed_min, speed_max = aug_args['range'] From 65b892efc74f02ef85c2cf60b65c7ee8d1c1cde7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 14 May 2023 22:28:15 +0800 Subject: [PATCH 374/475] Remove unused modules and losses --- modules/diffusion/ddpm.py | 63 ----------------- modules/losses/dur_loss.py | 56 +++++++++++++++ modules/losses/ssim.py | 77 --------------------- modules/losses/variance_loss.py | 116 -------------------------------- training/variance_task.py | 2 +- 5 files changed, 57 insertions(+), 257 deletions(-) create mode 100644 modules/losses/dur_loss.py delete mode 100644 modules/losses/ssim.py delete mode 100644 modules/losses/variance_loss.py diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index aa65e48be..b04229a28 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -3,7 +3,6 @@ from inspect import isfunction from typing import List, Tuple -import librosa.sequence import numpy as np import torch from torch import nn @@ -410,65 +409,3 @@ def denorm_spec(self, xs): xs = xs.unbind(dim=1) assert len(xs) == self.num_feats return self.clamp_spec(xs) - - -class CurveDiffusion1d(GaussianDiffusion): - def __init__(self, vmin, vmax, timesteps=1000, k_step=1000, - denoiser_type=None, denoiser_args=None, betas=None): - self.vmin = vmin - self.vmax = vmax - super().__init__( - 1, timesteps=timesteps, k_step=k_step, - denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas, spec_min=[vmin], spec_max=[vmax] - ) - - def norm_spec(self, x): - return super().norm_spec(x.unsqueeze(-1).clamp(min=self.vmin, max=self.vmax)) - - def denorm_spec(self, x): - return super().denorm_spec(x).clamp(min=self.vmin, max=self.vmax).squeeze(-1) - - -class CurveDiffusion2d(GaussianDiffusion): - def __init__(self, vmin, vmax, num_bins, deviation, timesteps=1000, k_step=1000, - denoiser_type=None, denoiser_args=None, betas=None): - super().__init__( - num_bins, timesteps=timesteps, k_step=k_step, - denoiser_type=denoiser_type, denoiser_args=denoiser_args, - betas=betas, spec_min=[0.], spec_max=[1.] - ) - self.vmin = vmin - self.vmax = vmax - self.num_bins = num_bins - self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins - self.sigma = deviation / self.interval - self.width = int(3 * self.sigma) - self.register_buffer('x', torch.arange(self.num_bins).float().reshape(1, 1, -1)) # [1, 1, N] - xx, yy = np.meshgrid(range(num_bins), range(num_bins)) - transition = np.maximum(self.width * 2 - abs(xx - yy), 0) - self.transition = transition / transition.sum(axis=1, keepdims=True) - - def values_to_bins(self, values): - return (values - self.vmin) / self.interval - - def bins_to_values(self, bins): - return bins * self.interval + self.vmin - - def norm_spec(self, curve): - miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] - probs = ((self.x - miu) / self.sigma).pow(2).div(-2).exp() # gaussian blur, [B, T, N] - return super().norm_spec(probs) - - def denorm_spec(self, probs): - probs = super().denorm_spec(probs) # [B, T, N] - sequences = probs.softmax(dim=2).transpose(1, 2).cpu().numpy() - peaks = torch.from_numpy( - librosa.sequence.viterbi(sequences, self.transition).astype(np.int64) - ).to(probs.device).unsqueeze(-1) # [B, T, 1] - probs *= probs > 0 - start = torch.max(torch.tensor(0, device=probs.device), peaks - self.width) - end = torch.min(torch.tensor(self.num_bins - 1, device=probs.device), peaks + self.width) - probs[(self.x < start) | (self.x > end)] = 0. - bins = torch.sum(self.x * probs, dim=2) / torch.sum(probs, dim=2) # [B, T] - return self.bins_to_values(bins).clamp(min=self.vmin, max=self.vmax) diff --git a/modules/losses/dur_loss.py b/modules/losses/dur_loss.py new file mode 100644 index 000000000..5aec3c929 --- /dev/null +++ b/modules/losses/dur_loss.py @@ -0,0 +1,56 @@ +import torch +import torch.nn as nn +from torch import Tensor + + +class DurationLoss(nn.Module): + """ + Loss module as combination of phone duration loss, word duration loss and sentence duration loss. + """ + + def __init__(self, offset, loss_type, + lambda_pdur=0.6, lambda_wdur=0.3, lambda_sdur=0.1): + super().__init__() + self.loss_type = loss_type + if self.loss_type == 'mse': + self.loss = nn.MSELoss() + elif self.loss_type == 'huber': + self.loss = nn.HuberLoss() + else: + raise NotImplementedError() + self.offset = offset + + self.lambda_pdur = lambda_pdur + self.lambda_wdur = lambda_wdur + self.lambda_sdur = lambda_sdur + + def linear2log(self, any_dur): + return torch.log(any_dur + self.offset) + + def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: + dur_gt = dur_gt.to(dtype=dur_pred.dtype) + + # pdur_loss + pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) + + dur_pred = dur_pred.clamp(min=0.) # clip to avoid NaN loss + + # wdur loss + shape = dur_pred.shape[0], ph2word.max() + 1 + wdur_pred = dur_pred.new_zeros(*shape).scatter_add( + 1, ph2word, dur_pred + )[:, 1:] # [B, T_ph] => [B, T_w] + wdur_gt = dur_gt.new_zeros(*shape).scatter_add( + 1, ph2word, dur_gt + )[:, 1:] # [B, T_ph] => [B, T_w] + wdur_loss = self.lambda_wdur * self.loss(self.linear2log(wdur_pred), self.linear2log(wdur_gt)) + + # sdur loss + sdur_pred = dur_pred.sum(dim=1) + sdur_gt = dur_gt.sum(dim=1) + sdur_loss = self.lambda_sdur * self.loss(self.linear2log(sdur_pred), self.linear2log(sdur_gt)) + + # combine + dur_loss = pdur_loss + wdur_loss + sdur_loss + + return dur_loss diff --git a/modules/losses/ssim.py b/modules/losses/ssim.py deleted file mode 100644 index e0a9a8164..000000000 --- a/modules/losses/ssim.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Adapted from https://github.com/Po-Hsun-Su/pytorch-ssim -""" - -from math import exp - -import torch -import torch.nn.functional as F -from torch.autograd import Variable - - -def gaussian(window_size, sigma): - gauss = torch.Tensor([exp(-(x - window_size // 2) ** 2 / float(2 * sigma ** 2)) for x in range(window_size)]) - return gauss / gauss.sum() - - -def create_window(window_size, channel): - _1D_window = gaussian(window_size, 1.5).unsqueeze(1) - _2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0) - window = Variable(_2D_window.expand(channel, 1, window_size, window_size).contiguous()) - return window - - -def _ssim(img1, img2, window, window_size, channel, size_average=True): - mu1 = F.conv2d(img1, window, padding=window_size // 2, groups=channel) - mu2 = F.conv2d(img2, window, padding=window_size // 2, groups=channel) - - mu1_sq = mu1.pow(2) - mu2_sq = mu2.pow(2) - mu1_mu2 = mu1 * mu2 - - sigma1_sq = F.conv2d(img1 * img1, window, padding=window_size // 2, groups=channel) - mu1_sq - sigma2_sq = F.conv2d(img2 * img2, window, padding=window_size // 2, groups=channel) - mu2_sq - sigma12 = F.conv2d(img1 * img2, window, padding=window_size // 2, groups=channel) - mu1_mu2 - - C1 = 0.01 ** 2 - C2 = 0.03 ** 2 - - ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2)) - - if size_average: - return ssim_map.mean() - else: - return ssim_map.mean(1) - - -class SSIMLoss(torch.nn.Module): - def __init__(self, window_size=11, size_average=True): - super(SSIMLoss, self).__init__() - self.window_size = window_size - self.size_average = size_average - self.channel = 1 - self.window = create_window(window_size, self.channel) - - def forward(self, img1, img2): - (_, channel, _, _) = img1.size() - - if channel == self.channel and self.window.data.type() == img1.data.type(): - window = self.window - else: - window = create_window(self.window_size, channel).to(img1) - - self.window = window - self.channel = channel - - return _ssim(img1, img2, window, self.window_size, channel, self.size_average) - - -window = None - - -def ssim(img1, img2, window_size=11, size_average=True): - (_, channel, _, _) = img1.size() - global window - if window is None: - window = create_window(window_size, channel).to(img1) - return _ssim(img1, img2, window, window_size, channel, size_average) diff --git a/modules/losses/variance_loss.py b/modules/losses/variance_loss.py deleted file mode 100644 index 78d7d3f29..000000000 --- a/modules/losses/variance_loss.py +++ /dev/null @@ -1,116 +0,0 @@ -import torch -import torch.nn as nn -from torch import Tensor - - -class DurationLoss(nn.Module): - """ - Loss module as combination of phone duration loss, word duration loss and sentence duration loss. - """ - - def __init__(self, offset, loss_type, - lambda_pdur=0.6, lambda_wdur=0.3, lambda_sdur=0.1): - super().__init__() - self.loss_type = loss_type - if self.loss_type == 'mse': - self.loss = nn.MSELoss() - elif self.loss_type == 'huber': - self.loss = nn.HuberLoss() - else: - raise NotImplementedError() - self.offset = offset - - self.lambda_pdur = lambda_pdur - self.lambda_wdur = lambda_wdur - self.lambda_sdur = lambda_sdur - - def linear2log(self, any_dur): - return torch.log(any_dur + self.offset) - - def forward(self, dur_pred: Tensor, dur_gt: Tensor, ph2word: Tensor) -> Tensor: - dur_gt = dur_gt.to(dtype=dur_pred.dtype) - - # pdur_loss - pdur_loss = self.lambda_pdur * self.loss(self.linear2log(dur_pred), self.linear2log(dur_gt)) - - dur_pred = dur_pred.clamp(min=0.) # clip to avoid NaN loss - - # wdur loss - shape = dur_pred.shape[0], ph2word.max() + 1 - wdur_pred = dur_pred.new_zeros(*shape).scatter_add( - 1, ph2word, dur_pred - )[:, 1:] # [B, T_ph] => [B, T_w] - wdur_gt = dur_gt.new_zeros(*shape).scatter_add( - 1, ph2word, dur_gt - )[:, 1:] # [B, T_ph] => [B, T_w] - wdur_loss = self.lambda_wdur * self.loss(self.linear2log(wdur_pred), self.linear2log(wdur_gt)) - - # sdur loss - sdur_pred = dur_pred.sum(dim=1) - sdur_gt = dur_gt.sum(dim=1) - sdur_loss = self.lambda_sdur * self.loss(self.linear2log(sdur_pred), self.linear2log(sdur_gt)) - - # combine - dur_loss = pdur_loss + wdur_loss + sdur_loss - - return dur_loss - - -class CurveLoss1d(nn.Module): - """ - Loss module for 1d parameter curve with non-padding masks. - """ - - def __init__(self, vmin, vmax, loss_type): - super().__init__() - self.vmin = vmin - self.vmax = vmax - self.loss_type = loss_type - if self.loss_type == 'mse': - self.loss = nn.MSELoss(reduction='none') - elif self.loss_type == 'huber': - self.loss = nn.HuberLoss(reduction='none') - else: - raise NotImplementedError() - - def norm_target(self, c_gt): - return (c_gt - self.vmin) / (self.vmax - self.vmin) * 2 - 1 - - def forward(self, c_pred, c_gt, mask=None): - loss = self.loss(c_pred, self.norm_target(c_gt)) - if mask is not None: - loss *= mask - return loss.mean() - - -class CurveLoss2d(nn.Module): - """ - Loss module for parameter curve represented by gaussian-blurred 2-D probability bins. - """ - - def __init__(self, vmin, vmax, num_bins, deviation): - super().__init__() - self.vmin = vmin - self.vmax = vmax - self.interval = (vmax - vmin) / (num_bins - 1) # align with centers of bins - self.sigma = deviation / self.interval - self.register_buffer('x', torch.arange(num_bins).float().reshape(1, 1, -1)) # [1, 1, N] - self.loss = nn.BCEWithLogitsLoss() - - def values_to_bins(self, values: Tensor) -> Tensor: - return (values - self.vmin) / self.interval - - def curve_to_probs(self, curve: Tensor) -> Tensor: - miu = self.values_to_bins(curve)[:, :, None] # [B, T, 1] - probs = (((self.x - miu) / self.sigma) ** 2 / -2).exp() # gaussian blur, [B, T, N] - return probs - - def forward(self, y_pred: Tensor, c_gt: Tensor, mask: Tensor = None) -> Tensor: - """ - Calculate BCE with logits loss between predicted probs and gaussian-blurred bins representing gt curve. - :param y_pred: predicted probs [B, T, N] - :param c_gt: ground truth curve [B, T] - :param mask: (bool) mask of valid parts in ground truth curve [B, T] - """ - y_gt = self.curve_to_probs(c_gt) - return self.loss(y_pred, y_gt * mask[:, :, None]) diff --git a/training/variance_task.py b/training/variance_task.py index 8b7468fec..29b9901f3 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -9,7 +9,7 @@ from basics.base_dataset import BaseDataset from basics.base_task import BaseTask from modules.losses.diff_loss import DiffusionNoiseLoss -from modules.losses.variance_loss import DurationLoss +from modules.losses.dur_loss import DurationLoss from modules.toplevel import DiffSingerVariance from utils.hparams import hparams from utils.plot import dur_to_figure, curve_to_figure From fe53b22bd0dddeeb482a4c1e4b7973d1300e0e54 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 14 May 2023 22:38:44 +0800 Subject: [PATCH 375/475] Directly use `ph_dur_embed` if predict_dur is false --- modules/fastspeech/variance_encoder.py | 53 ++++++++++++++++---------- 1 file changed, 32 insertions(+), 21 deletions(-) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 1ee331ac1..52ef40fb5 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -14,10 +14,15 @@ class FastSpeech2Variance(nn.Module): def __init__(self, vocab_size): super().__init__() + self.predict_dur = hparams['predict_dur'] + self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) - self.midi_embed = Embedding(128, hparams['hidden_size']) - self.onset_embed = Embedding(2, hparams['hidden_size']) - self.word_dur_embed = Linear(1, hparams['hidden_size']) + + if self.predict_dur: + self.onset_embed = Embedding(2, hparams['hidden_size']) + self.word_dur_embed = Linear(1, hparams['hidden_size']) + else: + self.ph_dur_embed = Linear(1, hparams['hidden_size']) if hparams['use_spk_id']: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) @@ -28,7 +33,8 @@ def __init__(self, vocab_size): ) dur_hparams = hparams['dur_prediction_args'] - if hparams['predict_dur']: + if self.predict_dur: + self.midi_embed = Embedding(128, hparams['hidden_size']) self.dur_predictor = DurationPredictor( in_dims=hparams['hidden_size'], n_chans=dur_hparams['hidden_size'], @@ -45,27 +51,32 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T :param txt_tokens: (train, infer) [B, T_ph] :param midi: (train, infer) [B, T_ph] :param ph2word: (train, infer) [B, T_ph] - :param ph_dur: (train) [B, T_ph] + :param ph_dur: (train, [infer]) [B, T_ph] :param word_dur: (infer) [B, T_w] :param infer: whether inference :return: encoder_out, ph_dur_pred """ - b = txt_tokens.shape[0] - onset = torch.diff(ph2word, dim=1, prepend=ph2word.new_zeros(b, 1)) > 0 - onset_embed = self.onset_embed(onset.long()) # [B, T_ph, H] - if word_dur is None or not infer: - word_dur = ph_dur.new_zeros(b, ph2word.max() + 1).scatter_add( - 1, ph2word, ph_dur - )[:, 1:] # [B, T_ph] => [B, T_w] - word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] - word_dur_embed = self.word_dur_embed(word_dur.float()[:, :, None]) - encoder_out = self.encoder(txt_tokens, onset_embed + word_dur_embed) + if self.predict_dur: + b = txt_tokens.shape[0] + onset = torch.diff(ph2word, dim=1, prepend=ph2word.new_zeros(b, 1)) > 0 + onset_embed = self.onset_embed(onset.long()) # [B, T_ph, H] - if not hparams['predict_dur']: - return encoder_out, None + if word_dur is None or not infer: + word_dur = ph_dur.new_zeros(b, ph2word.max() + 1).scatter_add( + 1, ph2word, ph_dur + )[:, 1:] # [B, T_ph] => [B, T_w] + word_dur = torch.gather(F.pad(word_dur, [1, 0], value=0), 1, ph2word) # [B, T_w] => [B, T_ph] + word_dur_embed = self.word_dur_embed(word_dur.float()[:, :, None]) - midi_embed = self.midi_embed(midi) # => [B, T_ph, H] - dur_cond = encoder_out + midi_embed - ph_dur_pred = self.dur_predictor(dur_cond, x_masks=txt_tokens == PAD_INDEX, infer=infer) + encoder_out = self.encoder(txt_tokens, onset_embed + word_dur_embed) - return encoder_out, ph_dur_pred + midi_embed = self.midi_embed(midi) # => [B, T_ph, H] + dur_cond = encoder_out + midi_embed + ph_dur_pred = self.dur_predictor(dur_cond, x_masks=txt_tokens == PAD_INDEX, infer=infer) + + return encoder_out, ph_dur_pred + else: + ph_dur_embed = self.ph_dur_embed(ph_dur.float()[:, :, None]) + encoder_out = self.encoder(txt_tokens, ph_dur_embed) + + return encoder_out, None From 79bea4a63e1510c80ab5dbad4950997f9235029f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 15 May 2023 19:10:40 +0800 Subject: [PATCH 376/475] Fix illegal call to NoneType when predict_dur is false --- training/variance_task.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index 29b9901f3..802c91dba 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -114,7 +114,9 @@ def run_model(self, sample, infer=False): dur_pred, pitch_pred, variances_pred = output if infer: - return dur_pred.round().long(), pitch_pred, variances_pred # Tensor, Tensor, Dict[Tensor] + if dur_pred is not None: + dur_pred = dur_pred.round().long() + return dur_pred, pitch_pred, variances_pred # Tensor, Tensor, Dict[Tensor] else: losses = {} if dur_pred is not None: From 336ded4be9cbc3dbee1f8a66660c585b70c3eae1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 15 May 2023 21:51:01 +0800 Subject: [PATCH 377/475] Fix AttributeError --- modules/toplevel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index 4ca61eb35..bfc1e8692 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -39,7 +39,7 @@ def __init__(self, vocab_size, out_dims): self.embed_variances = len(variances_to_embed) > 0 self.variance_aware_list = [ v_name for v_name in VARIANCE_CHECKLIST - if v_name in self.variance_prediction_list or v_name in self.variances_to_embed + if v_name in self.variance_prediction_list or v_name in variances_to_embed ] if self.embed_variances or self.predict_variances: self.variance_embeds = nn.ModuleDict({ From 96f0f6f5a13218a3e476317920f8014e5751ea05 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 16 May 2023 18:43:28 +0800 Subject: [PATCH 378/475] Swap order of gt and pred curve --- utils/plot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/plot.py b/utils/plot.py index eec0a1897..2f7e7f836 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -66,9 +66,9 @@ def curve_to_figure(curve_gt, curve_pred=None, curve_base=None, grid=None): fig = plt.figure() if curve_base is not None: plt.plot(curve_base, color='g', label='base') + plt.plot(curve_gt, color='b', label='gt') if curve_pred is not None: plt.plot(curve_pred, color='r', label='pred') - plt.plot(curve_gt, color='b', label='gt') if grid is not None: plt.gca().yaxis.set_major_locator(MultipleLocator(grid)) plt.grid(axis='y') From d984c16e28241d9ca38d2d61380d1f8ee3733b33 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 17 May 2023 14:02:26 +0800 Subject: [PATCH 379/475] Update type hint comments --- training/variance_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/variance_task.py b/training/variance_task.py index 802c91dba..5716628be 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -116,7 +116,7 @@ def run_model(self, sample, infer=False): if infer: if dur_pred is not None: dur_pred = dur_pred.round().long() - return dur_pred, pitch_pred, variances_pred # Tensor, Tensor, Dict[Tensor] + return dur_pred, pitch_pred, variances_pred # Tensor, Tensor, Dict[str, Tensor] else: losses = {} if dur_pred is not None: From 5ddd90d2696a6c68005a93f3097206ae03e84aa6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 17 May 2023 18:58:35 +0800 Subject: [PATCH 380/475] Support local retaking on pitch and variance parameters (#87) * Add retake mask and embedding * Fix device mismatch * Add retake mask and embedding * Fix device mismatch * Fix unexpected mutation on base_pitch and delta_pitch * Fix KeyError on `ph_dur`; add retake inference * Add missing `+1` * Fix illegal visit when `delta_pitch` is None * Support retaking variance parameters * Fix dimension mismatch * Add missing bitwise not * Remove useless code * Replace `torch.unbind` with `torch.split` --- inference/ds_variance.py | 13 ++++++--- modules/fastspeech/tts_modules.py | 7 ++--- modules/toplevel.py | 44 +++++++++++++++++++++++-------- training/variance_task.py | 27 +++++++++++++++---- 4 files changed, 69 insertions(+), 22 deletions(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 948c59fef..1ec0a1714 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -100,6 +100,7 @@ def preprocess_input(self, param): else: ph_dur = None mel2ph = None + batch['ph_dur'] = ph_dur batch['mel2ph'] = mel2ph # Calculate frame-level MIDI pitch, which is a step function curve @@ -150,12 +151,18 @@ def preprocess_input(self, param): @torch.no_grad() def run_model(self, sample): txt_tokens = sample['tokens'] - word_dur = sample['word_dur'] + midi = sample['midi'] ph2word = sample['ph2word'] + word_dur = sample['word_dur'] + ph_dur = sample['ph_dur'] + mel2ph = sample['mel2ph'] base_pitch = sample['base_pitch'] + delta_pitch = sample.get('delta_pitch') + dur_pred, pitch_pred, variance_pred = self.model( - txt_tokens, midi=sample['midi'], ph2word=ph2word, word_dur=word_dur, - mel2ph=sample['mel2ph'], base_pitch=base_pitch, delta_pitch=sample.get('delta_pitch') + txt_tokens, midi=midi, ph2word=ph2word, word_dur=word_dur, ph_dur=ph_dur, + mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, + retake=None, infer=True ) if dur_pred is not None: dur_pred = self.rr(dur_pred, ph2word, word_dur) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index b85312d3c..61bac0310 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -289,7 +289,7 @@ def forward(self, ph_dur, ph2word, word_dur): class LengthRegulator(torch.nn.Module): # noinspection PyMethodMayBeStatic - def forward(self, dur, dur_padding=None, alpha=1.0): + def forward(self, dur, dur_padding=None, alpha=None): """ Example (no batch dim version): 1. dur = [2,2,3] @@ -308,8 +308,9 @@ def forward(self, dur, dur_padding=None, alpha=1.0): :return: mel2ph (B, T_speech) """ - assert alpha > 0 - dur = torch.round(dur.float() * alpha).long() + assert alpha is None or alpha > 0 + if alpha is not None: + dur = torch.round(dur.float() * alpha).long() if dur_padding is not None: dur = dur * (1 - dur_padding.long()) token_idx = torch.arange(1, dur.shape[1] + 1)[None, :, None].to(dur.device) diff --git a/modules/toplevel.py b/modules/toplevel.py index bfc1e8692..30fc971da 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -5,6 +5,7 @@ from basics.base_module import CategorizedModule from modules.commons.common_layers import ( XavierUniformInitLinear as Linear, + NormalInitEmbedding as Embedding ) from modules.diffusion.ddpm import ( GaussianDiffusion, PitchDiffusion @@ -133,14 +134,8 @@ def __init__(self, vocab_size): self.predict_pitch = hparams['predict_pitch'] - predict_energy = hparams.get('predict_energy', False) - predict_breathiness = hparams.get('predict_breathiness', False) - self.variance_prediction_list = [] - if predict_energy: - self.variance_prediction_list.append('energy') - if predict_breathiness: - self.variance_prediction_list.append('breathiness') - self.predict_variances = len(self.variance_prediction_list) > 0 + if self.predict_pitch or self.predict_variances: + self.retake_embed = Embedding(2, hparams['hidden_size']) if self.predict_pitch: pitch_hparams = hparams['pitch_prediction_args'] @@ -160,11 +155,15 @@ def __init__(self, vocab_size): if self.predict_variances: self.pitch_embed = Linear(1, hparams['hidden_size']) + self.variance_embeds = nn.ModuleDict({ + v_name: Linear(1, hparams['hidden_size']) + for v_name in self.variance_prediction_list + }) self.variance_predictor = self.build_adaptor() def forward( self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, - base_pitch=None, delta_pitch=None, infer=True, **kwargs + base_pitch=None, delta_pitch=None, retake=None, infer=True, **kwargs ): encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, @@ -183,7 +182,17 @@ def forward( mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) condition = torch.gather(encoder_out, 1, mel2ph_) + if self.predict_pitch or self.predict_variances: + if retake is None: + retake_embed = self.retake_embed(torch.ones_like(mel2ph)) + else: + retake_embed = self.retake_embed(retake.long()) + condition += retake_embed + if self.predict_pitch: + if retake is not None: + base_pitch = base_pitch + delta_pitch * ~retake + delta_pitch = delta_pitch * retake pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) else: @@ -196,11 +205,24 @@ def forward( pitch = base_pitch + pitch_pred_out else: pitch = base_pitch + delta_pitch - pitch_embed = self.pitch_embed(pitch[:, :, None]) - condition += pitch_embed + condition += self.pitch_embed(pitch[:, :, None]) variance_inputs = self.collect_variance_inputs(**kwargs) + + if retake is None: + variance_embeds = [ + self.variance_embeds[v_name](torch.zeros_like(pitch)[:, :, None]) + for v_name in self.variance_prediction_list + ] + else: + variance_embeds = [ + self.variance_embeds[v_name]((v_input * ~retake)[:, :, None]) + for v_name, v_input in zip(self.variance_prediction_list, variance_inputs) + ] + condition += torch.stack(variance_embeds, dim=-1).sum(-1) + variance_outputs = self.variance_predictor(condition, variance_inputs, infer) + if infer: variances_pred_out = self.collect_variance_outputs(variance_outputs) else: diff --git a/training/variance_task.py b/training/variance_task.py index 5716628be..a97477d14 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -54,9 +54,13 @@ class VarianceTask(BaseTask): def __init__(self): super().__init__() self.dataset_cls = VarianceDataset - if hparams['predict_dur']: + + self.predict_dur = hparams['predict_dur'] + if self.predict_dur: self.lambda_dur_loss = hparams['lambda_dur_loss'] - if hparams['predict_pitch']: + + self.predict_pitch = hparams['predict_pitch'] + if self.predict_pitch: self.lambda_pitch_loss = hparams['lambda_pitch_loss'] predict_energy = hparams['predict_energy'] @@ -76,7 +80,7 @@ def build_model(self): # noinspection PyAttributeOutsideInit def build_losses(self): - if hparams['predict_dur']: + if self.predict_dur: dur_hparams = hparams['dur_prediction_args'] self.dur_loss = DurationLoss( offset=dur_hparams['log_offset'], @@ -85,7 +89,7 @@ def build_losses(self): lambda_wdur=dur_hparams['lambda_wdur_loss'], lambda_sdur=dur_hparams['lambda_sdur_loss'] ) - if hparams['predict_pitch']: + if self.predict_pitch: self.pitch_loss = DiffusionNoiseLoss( loss_type=hparams['diff_loss_type'], ) @@ -105,11 +109,24 @@ def run_model(self, sample, infer=False): energy = sample.get('energy') # [B, T_t] breathiness = sample.get('breathiness') # [B, T_t] + if (self.predict_pitch or self.predict_variances) and not infer: + # randomly select continuous retaking regions + b = sample['size'] + t = mel2ph.shape[1] + device = mel2ph.device + start, end = torch.sort( + torch.randint(low=0, high=t + 1, size=(b, 2), device=device), dim=1 + )[0].split(1, dim=1) + idx = torch.arange(0, t, dtype=torch.long, device=device)[None] + retake = (idx >= start) & (idx < end) + else: + retake = None + output = self.model( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, energy=energy, breathiness=breathiness, - infer=infer + retake=retake, infer=infer ) dur_pred, pitch_pred, variances_pred = output From 99f10793aaa7af18228e201194027c69496845bc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 17 May 2023 19:27:33 +0800 Subject: [PATCH 381/475] Change default `num_pad_tokens` to 1 --- configs/acoustic.yaml | 1 + docs/ConfigurationSchemas.md | 26 ++++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index b201fe279..6a2273a1a 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -45,6 +45,7 @@ raw_data_dir: 'data/opencpop/raw' binary_data_dir: 'data/opencpop/binary' binarizer_cls: preprocessing.acoustic_binarizer.AcousticBinarizer dictionary: dictionaries/opencpop-extension.txt +num_pad_tokens: 1 spec_min: [-5] spec_max: [0] mel_vmin: -6. #-6. diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 077eb858c..eaced22e0 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -1379,6 +1379,32 @@ int 2 +### num_pad_tokens + +Number of padding phoneme indexes before all real tokens. + +Due to some historical reasons, old checkpoints may have 3 padding tokens called \, \ and \. After refactoring, all padding tokens are called \, and only the first one (token == 0) will be used. + +#### visibility + +acoustic + +#### scope + +nn, preprocess + +#### customizability + +not recommended + +#### type + +int + +#### default + +1 + ### num_sanity_val_steps Number of sanity validation steps at the beginning. From 6f44a321df8d60dd4236b15830d196459fc289b8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 17 May 2023 19:31:41 +0800 Subject: [PATCH 382/475] preserved -> reserved --- docs/ConfigurationSchemas.md | 54 ++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index eaced22e0..fd7f6c048 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -12,14 +12,14 @@ This document explains the meaning and usages of all editable keys in a configur Each configuration key (including nested keys) are described with a brief explanation and several attributes listed as follows: -| Attribute | Explanation | -|:---------------:|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| visibility | Represents what kind(s) of models and tasks this configuration belongs to. | -| scope | The scope of effects of the configuration, indicating what it can influence within the whole pipeline. Possible values are:
**nn** - This configuration is related to how the neural networks are formed and initialized. Modifying it will result in failure when loading or resuming from checkpoints.
**preprocessing** - This configuration controls how raw data pieces or inference inputs are converted to inputs of neural networks. Binarizers should be re-run if this configuration is modified.
**training** - This configuration describes the training procedures. Most training configurations can affect training performance, memory consumption, device utilization and loss calculation. Modifying training-only configurations will not cause severe inconsistency or errors in most situations.
**inference** - This configuration describes the calculation logic through the model graph. Changing it can lead to inconsistent or wrong outputs of inference or validation.
**others** - Other configurations not discussed above. Will have different effects according to the descriptions. | -| customizability | The level of customizability of the configuration. Possible values are:
**required** - This configuration **must** be set or modified according to the actual situation or condition, otherwise errors can be raised.
**recommended** - It is recommended to adjust this configuration according to the dataset, requirements, environment and hardware. Most functionality-related and feature-related configurations are at this level, and all configurations in this level are widely tested with different values. However, leaving it unchanged will not cause problems.
**normal** - There is no need to modify it as the default value is carefully tuned and widely validated. However, one can still use another value if there are some special requirements or situations.
**not recommended** - No other values except the default one of this configuration are tested. Modifying it will not cause errors, but may cause unpredictable or significant impacts to the pipelines.
**preserved** - This configuration **must not** be modified. It appears in the configuration file only for future scalability, and currently changing it will result in errors. | -| type | Value type of the configuration. Follows the syntax of Python type hints. | -| constraints | Value constraints of the configuration. | -| default | Default value of the configuration. Uses YAML value syntax. | +| Attribute | Explanation | +|:---------------:|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| visibility | Represents what kind(s) of models and tasks this configuration belongs to. | +| scope | The scope of effects of the configuration, indicating what it can influence within the whole pipeline. Possible values are:
**nn** - This configuration is related to how the neural networks are formed and initialized. Modifying it will result in failure when loading or resuming from checkpoints.
**preprocessing** - This configuration controls how raw data pieces or inference inputs are converted to inputs of neural networks. Binarizers should be re-run if this configuration is modified.
**training** - This configuration describes the training procedures. Most training configurations can affect training performance, memory consumption, device utilization and loss calculation. Modifying training-only configurations will not cause severe inconsistency or errors in most situations.
**inference** - This configuration describes the calculation logic through the model graph. Changing it can lead to inconsistent or wrong outputs of inference or validation.
**others** - Other configurations not discussed above. Will have different effects according to the descriptions. | +| customizability | The level of customizability of the configuration. Possible values are:
**required** - This configuration **must** be set or modified according to the actual situation or condition, otherwise errors can be raised.
**recommended** - It is recommended to adjust this configuration according to the dataset, requirements, environment and hardware. Most functionality-related and feature-related configurations are at this level, and all configurations in this level are widely tested with different values. However, leaving it unchanged will not cause problems.
**normal** - There is no need to modify it as the default value is carefully tuned and widely validated. However, one can still use another value if there are some special requirements or situations.
**not recommended** - No other values except the default one of this configuration are tested. Modifying it will not cause errors, but may cause unpredictable or significant impacts to the pipelines.
**reserved** - This configuration **must not** be modified. It appears in the configuration file only for future scalability, and currently changing it will result in errors. | +| type | Value type of the configuration. Follows the syntax of Python type hints. | +| constraints | Value constraints of the configuration. | +| default | Default value of the configuration. Uses YAML value syntax. | ### accumulate_grad_batches @@ -59,7 +59,7 @@ nn, preprocessing, inference #### customizability -preserved +reserved #### type @@ -83,7 +83,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -462,7 +462,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -598,7 +598,7 @@ nn #### customizability -preserved +reserved #### type @@ -857,7 +857,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -881,7 +881,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -905,7 +905,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -953,7 +953,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -977,7 +977,7 @@ preprocessing #### customizability -preserved +reserved #### type @@ -1249,7 +1249,7 @@ training #### customizability -preserved +reserved #### type @@ -1273,7 +1273,7 @@ training #### customizability -preserved +reserved #### type @@ -1419,7 +1419,7 @@ training #### customizability -preserved +reserved #### type @@ -1571,7 +1571,7 @@ training #### customizability -preserved +reserved #### type @@ -1741,7 +1741,7 @@ training #### customization -preserved +reserved #### type @@ -1765,7 +1765,7 @@ training #### customization -preserved +reserved #### type @@ -2099,7 +2099,7 @@ training #### customizability -preserved +reserved #### type @@ -2148,7 +2148,7 @@ preprocessing, training #### customizability -preserved +reserved #### type @@ -2316,7 +2316,7 @@ preprocessing, training #### customizability -preserved +reserved #### type @@ -2388,7 +2388,7 @@ preprocessing #### customizability -preserved +reserved #### type From 9ec2966d77e9bcf516cb42d0ff0db3544e95883a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 17 May 2023 19:39:14 +0800 Subject: [PATCH 383/475] Set `num_pad_tokens` to 1 --- configs/variance.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/configs/variance.yaml b/configs/variance.yaml index c48cc46da..9861c5a6c 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -27,6 +27,7 @@ raw_data_dir: 'data/opencpop_variance/raw' binary_data_dir: 'data/opencpop_variance/binary' binarizer_cls: preprocessing.variance_binarizer.VarianceBinarizer dictionary: dictionaries/opencpop-extension.txt +num_pad_tokens: 1 use_spk_id: false From 70d572db97f931b1a4672f8498be7993387b0dd8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 18 May 2023 01:00:28 +0800 Subject: [PATCH 384/475] Make `note_slur` an optional key when `ph_dur` is given --- inference/ds_variance.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 1ec0a1714..f4291389e 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -72,22 +72,12 @@ def preprocess_input(self, param): note_dur = torch.diff(note_acc, dim=1, prepend=note_acc.new_zeros(1, 1)) mel2note = self.lr(note_dur) # [B=1, T_t] T_t = mel2note.shape[1] - is_slur = torch.BoolTensor([[int(s) for s in param['note_slur'].split()]]).to(self.device) # [B=1, T_n] - note2word = torch.cumsum(~is_slur, dim=1) # [B=1, T_n] - word_dur = note_dur.new_zeros(1, T_w + 1).scatter_add( - 1, note2word, note_dur - )[:, 1:] # => [B=1, T_w] - mel2word = self.lr(word_dur) # [B=1, T_t] print(f'Length: {T_w} word(s), {note_seq.shape[1]} note(s), {T_ph} token(s), ' f'{T_t} frame(s), {T_t * self.timestep:.2f} second(s)') - if mel2word.shape[1] != T_t: # Align words with notes - mel2word = F.pad(mel2word, [0, T_t - mel2word.shape[1]], value=mel2word[0, -1]) - word_dur = mel2ph_to_dur(mel2word, T_w) - batch['word_dur'] = word_dur - - if param.get('ph_dur'): # Get mel2ph if ph_dur is given + if param.get('ph_dur'): + # Get mel2ph if ph_dur is given ph_dur_sec = torch.from_numpy( np.array([param['ph_dur'].split()], np.float32) ).to(self.device) # [B=1, T_ph] @@ -97,12 +87,29 @@ def preprocess_input(self, param): if mel2ph.shape[1] != T_t: # Align phones with notes mel2ph = F.pad(mel2ph, [0, T_t - mel2ph.shape[1]], value=mel2ph[0, -1]) ph_dur = mel2ph_to_dur(mel2ph, T_ph) + # Get word_dur from ph_dur and ph_num + word_dur = note_dur.new_zeros(1, T_w + 1).scatter_add( + 1, ph2word, ph_dur + )[:, 1:] # => [B=1, T_w] else: ph_dur = None mel2ph = None + # Get word_dur from note_dur and note_slur + is_slur = torch.BoolTensor([[int(s) for s in param['note_slur'].split()]]).to(self.device) # [B=1, T_n] + note2word = torch.cumsum(~is_slur, dim=1) # [B=1, T_n] + word_dur = note_dur.new_zeros(1, T_w + 1).scatter_add( + 1, note2word, note_dur + )[:, 1:] # => [B=1, T_w] + batch['ph_dur'] = ph_dur batch['mel2ph'] = mel2ph + mel2word = self.lr(word_dur) # [B=1, T_t] + if mel2word.shape[1] != T_t: # Align words with notes + mel2word = F.pad(mel2word, [0, T_t - mel2word.shape[1]], value=mel2word[0, -1]) + word_dur = mel2ph_to_dur(mel2word, T_w) + batch['word_dur'] = word_dur + # Calculate frame-level MIDI pitch, which is a step function curve frame_midi_pitch = torch.gather( F.pad(note_seq, [1, 0]), 1, mel2note From c29e073d02a484212ee50572a248482b637083e4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 18 May 2023 18:22:57 +0800 Subject: [PATCH 385/475] Fix label format to CSV and add migrating scripts --- basics/base_module.py | 2 +- preprocessing/acoustic_binarizer.py | 19 ++--- scripts/migrate.py | 113 ++++++++++++++++++---------- 3 files changed, 82 insertions(+), 52 deletions(-) diff --git a/basics/base_module.py b/basics/base_module.py index 3f2c2c113..6256191da 100644 --- a/basics/base_module.py +++ b/basics/base_module.py @@ -11,7 +11,7 @@ def check_category(self, category): raise RuntimeError('Category is not specified in this checkpoint.\n' 'If this is a checkpoint in the old format, please consider ' 'migrating it to the new format via the following command:\n' - 'python scripts/migrate.py ') + 'python scripts/migrate.py ckpt ') elif category != self.category: raise RuntimeError('Category mismatches!\n' f'This checkpoint is of the category \'{category}\', ' diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 99f9812d6..7246273a3 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -49,19 +49,12 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict else: - utterance_labels = open(raw_data_dir / 'transcriptions.txt', 'r', encoding='utf-8').readlines() - for utterance_label in utterance_labels: - song_info = utterance_label.split('|') - item_name = song_info[0] - temp_dict = { - 'wav_fn': f'{raw_data_dir}/wavs/{item_name}.wav', - 'ph_seq': song_info[2].split(), - 'ph_dur': [float(x) for x in song_info[5].split()], - 'spk_id': ds_id - } - assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ - f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' - meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + raise FileNotFoundError( + f'transcriptions.csv not found in {raw_data_dir}. ' + 'If this is a dataset with the old transcription format, please consider ' + 'migrating it to the new format via the following command:\n' + 'python scripts/migrate.py txt ' + ) self.items.update(meta_data_dict) def check_coverage(self): diff --git a/scripts/migrate.py b/scripts/migrate.py index 4f10d6291..f1125f3d5 100644 --- a/scripts/migrate.py +++ b/scripts/migrate.py @@ -1,41 +1,78 @@ -import argparse import pathlib from collections import OrderedDict -import torch - - -parser = argparse.ArgumentParser(description='Migrate checkpoint files of MIDI-less acoustic models from old format') -parser.add_argument('input', type=str, help='Path to the input file') -parser.add_argument('output', type=str, help='Path to the output file') -parser.add_argument('--overwrite', required=False, default=False, - action='store_true', help='Overwrite the existing file') -args = parser.parse_args() - -input_ckpt = pathlib.Path(args.input).resolve() -output_ckpt = pathlib.Path(args.output).resolve() -assert input_ckpt.exists(), 'The input file does not exist.' -assert args.overwrite or not output_ckpt.exists(), \ - 'The output file already exists or is the same as the input file.\n' \ - 'This is not recommended because migration scripts may not be stable, ' \ - 'and you may be at risk of losing your model.\n' \ - 'If you are sure to OVERWRITE the existing file, please re-run this script with the \'--overwrite\' argument.' - -ckpt_loaded = torch.load(input_ckpt, map_location='cpu') -if 'category' in ckpt_loaded: - print('This checkpoint file is already in the new format.') - exit(0) -state_dict: OrderedDict = ckpt_loaded['state_dict'] -ckpt_loaded['optimizer_states'][0]['state'].clear() -new_state_dict = OrderedDict() -for key in state_dict: - if key.startswith('model.fs2'): - # keep model.fs2.xxx - new_state_dict[key] = state_dict[key] - else: - # model.xxx => model.diffusion.xxx - path = key.split('.', maxsplit=1)[1] - new_state_dict[f'model.diffusion.{path}'] = state_dict[key] -ckpt_loaded['category'] = 'acoustic' -ckpt_loaded['state_dict'] = new_state_dict -torch.save(ckpt_loaded, output_ckpt) +import click + + +@click.group() +def main(): + pass + + +@main.command(help='Migrate checkpoint files of MIDI-less acoustic models from old format') +@click.argument('input_ckpt', metavar='INPUT') +@click.argument('output_ckpt', metavar='OUTPUT') +@click.option('--overwrite', is_flag=True, show_default=True, help='Overwrite the existing file') +def ckpt( + input_ckpt: str, + output_ckpt: str, + overwrite: bool = False +): + input_ckpt = pathlib.Path(input_ckpt).resolve() + output_ckpt = pathlib.Path(output_ckpt).resolve() + assert input_ckpt.exists(), 'The input file does not exist.' + assert overwrite or not output_ckpt.exists(), \ + 'The output file already exists or is the same as the input file.\n' \ + 'This is not recommended because migration scripts may not be stable, ' \ + 'and you may be at risk of losing your model.\n' \ + 'If you are sure to OVERWRITE the existing file, please re-run this script with the \'--overwrite\' argument.' + + import torch + ckpt_loaded = torch.load(input_ckpt, map_location='cpu') + if 'category' in ckpt_loaded: + print('This checkpoint file is already in the new format.') + exit(0) + state_dict: OrderedDict = ckpt_loaded['state_dict'] + ckpt_loaded['optimizer_states'][0]['state'].clear() + new_state_dict = OrderedDict() + for key in state_dict: + if key.startswith('model.fs2'): + # keep model.fs2.xxx + new_state_dict[key] = state_dict[key] + else: + # model.xxx => model.diffusion.xxx + path = key.split('.', maxsplit=1)[1] + new_state_dict[f'model.diffusion.{path}'] = state_dict[key] + ckpt_loaded['category'] = 'acoustic' + ckpt_loaded['state_dict'] = new_state_dict + torch.save(ckpt_loaded, output_ckpt) + + +@main.command(help='Migrate transcriptions.txt in old datasets to transcriptions.csv') +@click.argument('input_txt', metavar='INPUT') +def txt( + input_txt: str +): + input_txt = pathlib.Path(input_txt).resolve() + assert input_txt.exists(), 'The input file does not exist.' + with open(input_txt, 'r', encoding='utf8') as f: + utterances = f.readlines() + utterances = [u.split('|') for u in utterances] + utterances = [ + { + 'name': u[0], + 'ph_seq': u[2], + 'ph_dur': u[5] + } + for u in utterances + ] + + import csv + with open(input_txt.with_suffix('.csv'), 'w', encoding='utf8', newline='') as f: + writer = csv.DictWriter(f, fieldnames=['name', 'ph_seq', 'ph_dur']) + writer.writeheader() + writer.writerows(utterances) + + +if __name__ == '__main__': + main() From 600bf01b8a7f0cea7707b38b1e42bd33f8455a8a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 19 May 2023 02:38:00 +0800 Subject: [PATCH 386/475] Reduce duplicate calculations --- modules/fastspeech/tts_modules.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/fastspeech/tts_modules.py b/modules/fastspeech/tts_modules.py index 61bac0310..3ca36210a 100644 --- a/modules/fastspeech/tts_modules.py +++ b/modules/fastspeech/tts_modules.py @@ -125,12 +125,14 @@ def forward(self, xs, x_masks=None, infer=True): (train) FloatTensor, (infer) LongTensor: Batch of predicted durations in linear domain (B, Tmax). """ xs = xs.transpose(1, -1) # (B, idim, Tmax) + masks = 1 - x_masks.float() + masks_ = masks[:, None, :] for f in self.conv: xs = f(xs) # (B, C, Tmax) if x_masks is not None: - xs = xs * (1 - x_masks.float())[:, None, :] + xs = xs * masks_ xs = self.linear(xs.transpose(1, -1)) # [B, T, C] - xs = xs * (1 - x_masks.float())[:, :, None] # (B, T, C) + xs = xs * masks[:, :, None] # (B, T, C) dur_pred = self.out2dur(xs) if infer: From c7303ef30cdf7fe60b98dbc49dd26a947a2a6c41 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 21 May 2023 02:30:01 +0800 Subject: [PATCH 387/475] Finish variance model exporting --- deployment/exporters/__init__.py | 1 + deployment/exporters/acoustic_exporter.py | 16 +- deployment/exporters/variance_exporter.py | 589 ++++++++++++++++++++++ deployment/modules/diffusion.py | 62 ++- deployment/modules/fastspeech2.py | 43 ++ deployment/modules/toplevel.py | 194 ++++++- modules/fastspeech/param_adaptor.py | 4 +- scripts/export.py | 68 ++- 8 files changed, 945 insertions(+), 32 deletions(-) create mode 100644 deployment/exporters/variance_exporter.py diff --git a/deployment/exporters/__init__.py b/deployment/exporters/__init__.py index 0a3f94678..910d3e02c 100644 --- a/deployment/exporters/__init__.py +++ b/deployment/exporters/__init__.py @@ -1,2 +1,3 @@ from .acoustic_exporter import DiffSingerAcousticExporter +from .variance_exporter import DiffSingerVarianceExporter from .nsf_hifigan_exporter import NSFHiFiGANExporter diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index b2f4bf8e1..58e2bc6d7 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -92,11 +92,13 @@ def export_model(self, path: Path): print(f'| export model => {path}') def export_attachments(self, path: Path): - path_model_name = path / self.model_name for spk in self.export_spk: - self._export_spk_embed(path_model_name.with_suffix(f'.{spk[0]}.emb'), self._perform_spk_mix(spk[1])) + self._export_spk_embed( + (path / 'dummy').with_suffix(f'.{spk[0]}.emb').with_stem(self.model_name), + self._perform_spk_mix(spk[1]) + ) self._export_dictionary(path / 'dictionary.txt') - self._export_phonemes(path_model_name.with_suffix('.phonemes.txt')) + self._export_phonemes((path / 'dummy').with_suffix('.phonemes.txt').with_stem(self.model_name)) @torch.no_grad() def _torch_export_model(self): @@ -238,7 +240,7 @@ def _perform_spk_mix(self, spk_mix: Dict[str, float]): return spk_mix_embed def _optimize_fs2_graph(self, fs2: onnx.ModelProto) -> onnx.ModelProto: - print(f'Running ONNX simplifier for {self.fs2_class_name}...') + print(f'Running ONNX Simplifier for {self.fs2_class_name}...') fs2, check = onnxsim.simplify(fs2, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' print(f'| optimize graph: {self.fs2_class_name}') @@ -248,7 +250,7 @@ def _optimize_diffusion_graph(self, diffusion: onnx.ModelProto) -> onnx.ModelPro onnx_helper.model_override_io_shapes(diffusion, output_shapes={ 'mel': (1, 'n_frames', hparams['audio_num_mel_bins']) }) - print(f'Running ONNX simplifier #1 for {self.diffusion_class_name}...') + print(f'Running ONNX Simplifier #1 for {self.diffusion_class_name}...') diffusion, check = onnxsim.simplify(diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' onnx_helper.graph_fold_back_to_squeeze(diffusion.graph) @@ -258,13 +260,13 @@ def _optimize_diffusion_graph(self, diffusion: onnx.ModelProto) -> onnx.ModelPro alias_prefix='/diffusion/denoise_fn/cache' ) onnx_helper.graph_remove_unused_values(diffusion.graph) - print(f'Running ONNX simplifier #2 for {self.diffusion_class_name}...') + print(f'Running ONNX Simplifier #2 for {self.diffusion_class_name}...') diffusion, check = onnxsim.simplify( diffusion, include_subgraph=True ) - print(f'| optimize graph: {self.diffusion_class_name}') assert check, 'Simplified ONNX model could not be validated' + print(f'| optimize graph: {self.diffusion_class_name}') return diffusion def _merge_fs2_diffusion_graphs(self, fs2: onnx.ModelProto, diffusion: onnx.ModelProto) -> onnx.ModelProto: diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py new file mode 100644 index 000000000..291d94c45 --- /dev/null +++ b/deployment/exporters/variance_exporter.py @@ -0,0 +1,589 @@ +import shutil +from pathlib import Path +from typing import Union + +import onnx +import onnxsim +import torch + +from basics.base_exporter import BaseExporter +from deployment.modules.toplevel import DiffSingerVarianceONNX +from utils import load_ckpt, onnx_helper +from utils.hparams import hparams +from utils.phoneme_utils import locate_dictionary, build_phoneme_list +from utils.text_encoder import TokenTextEncoder + + +class DiffSingerVarianceExporter(BaseExporter): + def __init__( + self, + device: Union[str, torch.device] = 'cpu', + cache_dir: Path = None, + ckpt_steps: int = None, + ): + super().__init__(device=device, cache_dir=cache_dir) + # Basic attributes + self.model_name: str = hparams['exp_name'] + self.ckpt_steps: int = ckpt_steps + self.vocab = TokenTextEncoder(vocab_list=build_phoneme_list()) + self.model = self.build_model() + self.linguistic_encoder_cache_path = self.cache_dir / 'linguistic.onnx' + self.dur_predictor_cache_path = self.cache_dir / 'dur.onnx' + self.pitch_preprocess_cache_path = self.cache_dir / 'pitch_pre.onnx' + self.pitch_diffusion_cache_path = self.cache_dir / 'pitch.onnx' + self.variance_preprocess_cache_path = self.cache_dir / 'variance_pre.onnx' + self.variance_diffusion_cache_path = self.cache_dir / 'variance.onnx' + self.variance_postprocess_cache_path = self.cache_dir / 'variance_post.onnx' + + # Attributes for logging + self.fs2_class_name = self.model.fs2.__class__.__name__.removesuffix('ONNX') + self.dur_predictor_class_name = \ + self.model.fs2.dur_predictor.__class__.__name__ if self.model.predict_dur else None + self.pitch_denoiser_class_name = \ + self.model.pitch_predictor.denoise_fn.__class__.__name__.removesuffix('ONNX') \ + if self.model.predict_pitch else None + self.pitch_diffusion_class_name = \ + self.model.pitch_predictor.__class__.__name__.removesuffix('ONNX') \ + if self.model.predict_pitch else None + self.variance_denoiser_class_name = \ + self.model.variance_predictor.denoise_fn.__class__.__name__.removesuffix('ONNX') \ + if self.model.predict_variances else None + self.variance_diffusion_class_name = \ + self.model.variance_predictor.__class__.__name__.removesuffix('ONNX') \ + if self.model.predict_variances else None + + # Attributes for exporting + ... + + def build_model(self) -> DiffSingerVarianceONNX: + model = DiffSingerVarianceONNX( + vocab_size=len(self.vocab) + ).eval().to(self.device) + load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, required_category='variance', + prefix_in_ckpt='model', strict=True, device=self.device) + model.build_smooth_op(self.device) + return model + + def export(self, path: Path): + path.mkdir(parents=True, exist_ok=True) + self.export_model(path) + self.export_attachments(path) + + def export_model(self, path: Path): + self._torch_export_model() + linguistic_onnx = self._optimize_linguistic_graph(onnx.load(self.linguistic_encoder_cache_path)) + linguistic_path = path / f'{self.model_name}.linguistic.onnx' + onnx.save(linguistic_onnx, linguistic_path) + print(f'| export linguistic encoder => {linguistic_path}') + self.linguistic_encoder_cache_path.unlink() + if self.model.predict_dur: + dur_predictor_onnx = self._optimize_dur_predictor_graph(onnx.load(self.dur_predictor_cache_path)) + dur_predictor_path = path / f'{self.model_name}.dur.onnx' + onnx.save(dur_predictor_onnx, dur_predictor_path) + self.dur_predictor_cache_path.unlink() + print(f'| export dur predictor => {dur_predictor_path}') + if self.model.predict_pitch and False: + pitch_predictor_onnx = self._optimize_merge_pitch_predictor_graph( + onnx.load(self.pitch_preprocess_cache_path), onnx.load(self.pitch_diffusion_cache_path) + ) + pitch_predictor_path = path / f'{self.model_name}.pitch.onnx' + onnx.save(pitch_predictor_onnx, pitch_predictor_path) + self.pitch_preprocess_cache_path.unlink() + self.pitch_diffusion_cache_path.unlink() + print(f'| export pitch predictor => {pitch_predictor_path}') + if self.model.predict_variances: + variance_predictor_onnx = self._optimize_merge_variance_predictor_graph( + onnx.load(self.variance_preprocess_cache_path), + onnx.load(self.variance_diffusion_cache_path), + onnx.load(self.variance_postprocess_cache_path) + ) + variance_predictor_path = path / f'{self.model_name}.variance.onnx' + onnx.save(variance_predictor_onnx, variance_predictor_path) + self.variance_preprocess_cache_path.unlink() + self.variance_diffusion_cache_path.unlink() + self.variance_postprocess_cache_path.unlink() + print(f'| export variance predictor => {variance_predictor_path}') + + def export_attachments(self, path: Path): + self._export_dictionary(path / 'dictionary.txt') + self._export_phonemes((path / f'{self.model_name}.phonemes.txt')) + + @torch.no_grad() + def _torch_export_model(self): + # Prepare inputs for FastSpeech2 and dur predictor tracing + tokens = torch.LongTensor([[1] * 5]).to(self.device) + ph_dur = torch.LongTensor([[3, 5, 2, 1, 4]]).to(self.device) + word_div = torch.LongTensor([[2, 2, 1]]).to(self.device) + word_dur = torch.LongTensor([[8, 3, 4]]).to(self.device) + encoder_out = torch.rand(1, 5, hparams['hidden_size'], dtype=torch.float32, device=self.device) + x_masks = tokens == 0 + ph_midi = torch.LongTensor([[60] * 5]).to(self.device) + encoder_output_names = ['encoder_out', 'x_masks'] + encoder_common_axes = { + 'encoder_out': { + 1: 'n_tokens' + }, + 'x_masks': { + 1: 'n_tokens' + } + } + + print(f'Exporting {self.fs2_class_name}...') + if self.model.predict_dur: + torch.onnx.export( + self.model.view_as_linguistic_encoder(), + ( + tokens, + word_div, + word_dur + ), + self.linguistic_encoder_cache_path, + input_names=[ + 'tokens', + 'word_div', + 'word_dur' + ], + output_names=encoder_output_names, + dynamic_axes={ + 'tokens': { + 1: 'n_tokens' + }, + 'word_div': { + 1: 'n_words' + }, + 'word_dur': { + 1: 'n_words' + }, + **encoder_common_axes + }, + opset_version=15 + ) + + print(f'Exporting {self.dur_predictor_class_name}...') + torch.onnx.export( + self.model.view_as_dur_predictor(), + ( + encoder_out, + x_masks, + ph_midi + ), + self.dur_predictor_cache_path, + input_names=[ + 'encoder_out', + 'x_masks', + 'ph_midi' + ], + output_names=[ + 'ph_dur_pred' + ], + dynamic_axes={ + 'ph_midi': { + 1: 'n_tokens' + }, + 'ph_dur_pred': { + 1: 'n_tokens' + }, + **encoder_common_axes + }, + opset_version=15 + ) + else: + torch.onnx.export( + self.model.view_as_linguistic_encoder(), + ( + tokens, + ph_dur + ), + self.linguistic_encoder_cache_path, + input_names=[ + 'tokens', + 'ph_dur' + ], + output_names=encoder_output_names, + dynamic_axes={ + 'tokens': { + 1: 'n_tokens' + }, + 'ph_dur': { + 1: 'n_tokens' + }, + **encoder_common_axes + }, + opset_version=15 + ) + + if self.model.predict_pitch and False: + # Prepare inputs for preprocessor of PitchDiffusion + note_midi = torch.FloatTensor([[60.] * 4]).to(self.device) + note_dur = torch.LongTensor([[2, 6, 3, 4]]).to(self.device) + pitch = torch.FloatTensor([[60.] * 15]).to(self.device) + retake = torch.ones_like(pitch, dtype=torch.bool) + pitch_common_io = ['pitch_cond', 'base_pitch'] + pitch_common_axes = { + 'pitch_cond': { + 1: 'n_frames' + }, + 'base_pitch': { + 1: 'n_frames' + } + } + torch.onnx.export( + self.model.view_as_pitch_preprocess(), + ( + encoder_out, + ph_dur, + note_midi, + note_dur, + pitch, + retake + ), + self.pitch_preprocess_cache_path, + input_names=[ + 'encoder_out', 'ph_dur', + 'note_midi', 'note_dur', + 'pitch', 'retake' + ], + output_names=pitch_common_io, + dynamic_axes={ + 'encoder_out': { + 1: 'n_tokens' + }, + 'ph_dur': { + 1: 'n_tokens' + }, + 'note_midi': { + 1: 'n_notes' + }, + 'note_dur': { + 1: 'n_notes' + }, + 'pitch': { + 1: 'n_frames' + }, + 'retake': { + 1: 'n_frames' + }, + **pitch_common_axes + }, + opset_version=15 + ) + + # Prepare inputs for denoiser tracing and PitchDiffusion scripting + shape = (1, 1, hparams['pitch_prediction_args']['num_pitch_bins'], 15) + noise = torch.randn(shape, device=self.device) + condition = torch.rand((1, hparams['hidden_size'], 15), device=self.device) + step = (torch.rand((1,), device=self.device) * hparams['K_step']).long() + + print(f'Tracing {self.pitch_denoiser_class_name} denoiser...') + pitch_diffusion = self.model.view_as_pitch_diffusion() + pitch_diffusion.pitch_predictor.denoise_fn = torch.jit.trace( + pitch_diffusion.pitch_predictor.denoise_fn, + ( + noise, + step, + condition + ) + ) + + print(f'Scripting {self.pitch_diffusion_class_name}...') + pitch_diffusion = torch.jit.script( + pitch_diffusion, + example_inputs=[ + ( + condition.transpose(1, 2), + pitch, + 1 # p_sample branch + ), + ( + condition.transpose(1, 2), + pitch, + 200 # p_sample_plms branch + ) + ] + ) + + print(f'Exporting {self.pitch_diffusion_class_name}...') + torch.onnx.export( + pitch_diffusion, + ( + condition.transpose(1, 2), + pitch, + 200 + ), + self.pitch_diffusion_cache_path, + input_names=[ + *pitch_common_io, 'speedup' + ], + output_names=[ + 'pitch_pred' + ], + dynamic_axes={ + **pitch_common_axes, + 'pitch_pred': { + 1: 'n_frames' + } + }, + opset_version=15 + ) + + if self.model.predict_variances: + repeat_bins = hparams['variances_prediction_args']['repeat_bins'] + + # Prepare inputs for preprocessor of MultiVarianceDiffusion + pitch = torch.FloatTensor([[60.] * 15]).to(self.device) + variances = { + v_name: torch.FloatTensor([[0.] * 15]).to(self.device) + for v_name in self.model.variance_prediction_list + } + retake = torch.ones_like(pitch, dtype=torch.bool) + torch.onnx.export( + self.model.view_as_variance_preprocess(), + ( + encoder_out, + ph_dur, + pitch, + variances, + retake + ), + self.variance_preprocess_cache_path, + input_names=[ + 'encoder_out', 'ph_dur', 'pitch', + *self.model.variance_prediction_list, + 'retake' + ], + output_names=[ + 'variance_cond' + ], + dynamic_axes={ + 'encoder_out': { + 1: 'n_tokens' + }, + 'ph_dur': { + 1: 'n_tokens' + }, + 'pitch': { + 1: 'n_frames' + }, + **{ + v_name: { + 1: 'n_frames' + } + for v_name in self.model.variance_prediction_list + }, + 'retake': { + 1: 'n_frames' + } + }, + opset_version=15 + ) + + # Prepare inputs for denoiser tracing and MultiVarianceDiffusion scripting + shape = (1, len(self.model.variance_prediction_list), repeat_bins, 15) + noise = torch.randn(shape, device=self.device) + condition = torch.rand((1, hparams['hidden_size'], 15), device=self.device) + step = (torch.rand((1,), device=self.device) * hparams['K_step']).long() + + print(f'Tracing {self.variance_denoiser_class_name} denoiser...') + variance_diffusion = self.model.view_as_variance_diffusion() + variance_diffusion.variance_predictor.denoise_fn = torch.jit.trace( + variance_diffusion.variance_predictor.denoise_fn, + ( + noise, + step, + condition + ) + ) + + print(f'Scripting {self.variance_diffusion_class_name}...') + variance_diffusion = torch.jit.script( + variance_diffusion, + example_inputs=[ + ( + condition.transpose(1, 2), + 1 # p_sample branch + ), + ( + condition.transpose(1, 2), + 200 # p_sample_plms branch + ) + ] + ) + + print(f'Exporting {self.variance_diffusion_class_name}...') + torch.onnx.export( + variance_diffusion, + ( + condition.transpose(1, 2), + 200 + ), + self.variance_diffusion_cache_path, + input_names=[ + 'variance_cond', 'speedup' + ], + output_names=[ + 'xs_pred' + ], + dynamic_axes={ + 'variance_cond': { + 1: 'n_frames' + }, + 'xs_pred': { + (1 if len(self.model.variance_prediction_list) == 1 else 2): 'n_frames' + } + }, + opset_version=15 + ) + + # Prepare inputs for postprocessor of MultiVarianceDiffusion + xs_shape = (1, 15) \ + if len(self.model.variance_prediction_list) == 1 \ + else (1, len(self.model.variance_prediction_list), 15) + xs_pred = torch.randn(xs_shape, dtype=torch.float32, device=self.device) + torch.onnx.export( + self.model.view_as_variance_postprocess(), + ( + xs_pred + ), + self.variance_postprocess_cache_path, + input_names=[ + 'xs_pred' + ], + output_names=[ + f'{v_name}_pred' + for v_name in self.model.variance_prediction_list + ], + dynamic_axes={ + 'xs_pred': { + (1 if len(self.model.variance_prediction_list) == 1 else 2): 'n_frames' + }, + **{ + f'{v_name}_pred': { + 1: 'n_frames' + } + for v_name in self.model.variance_prediction_list + } + }, + opset_version=15 + ) + + def _optimize_linguistic_graph(self, linguistic: onnx.ModelProto) -> onnx.ModelProto: + onnx_helper.model_override_io_shapes( + linguistic, + output_shapes={ + 'encoder_out': (1, 'n_tokens', hparams['hidden_size']) + } + ) + print(f'Running ONNX Simplifier for {self.fs2_class_name}...') + linguistic, check = onnxsim.simplify(linguistic, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + print(f'| optimize graph: {self.fs2_class_name}') + return linguistic + + def _optimize_dur_predictor_graph(self, dur_predictor: onnx.ModelProto) -> onnx.ModelProto: + onnx_helper.model_override_io_shapes( + dur_predictor, + output_shapes={ + 'ph_dur_pred': (1, 'n_tokens') + } + ) + print(f'Running ONNX Simplifier for {self.dur_predictor_class_name}...') + dur_predictor, check = onnxsim.simplify(dur_predictor, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + print(f'| optimize graph: {self.dur_predictor_class_name}') + return dur_predictor + + def _optimize_merge_pitch_predictor_graph( + self, pitch_pre: onnx.ModelProto, pitch_diffusion: onnx.ModelProto + ) -> onnx.ModelProto: + onnx_helper.model_override_io_shapes( + pitch_pre, output_shapes={'pitch_cond': (1, 'n_frames', hparams['hidden_size'])} + ) + pitch_pre, check = onnxsim.simplify(pitch_pre, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + + onnx_helper.model_override_io_shapes( + pitch_diffusion, output_shapes={'pitch_pred': (1, 'n_frames')} + ) + print(f'Running ONNX Simplifier #1 for {self.pitch_diffusion_class_name}...') + pitch_diffusion, check = onnxsim.simplify(pitch_diffusion, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + onnx_helper.graph_fold_back_to_squeeze(pitch_diffusion.graph) + onnx_helper.graph_extract_conditioner_projections( + graph=pitch_diffusion.graph, op_type='Conv', + weight_pattern=r'pitch_predictor\.denoise_fn\.residual_layers\.\d+\.conditioner_projection\.weight', + alias_prefix='/pitch_predictor/denoise_fn/cache' + ) + onnx_helper.graph_remove_unused_values(pitch_diffusion.graph) + print(f'Running ONNX Simplifier #2 for {self.pitch_diffusion_class_name}...') + pitch_diffusion, check = onnxsim.simplify(pitch_diffusion, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + + pitch_predictor = onnx.compose.merge_models( + pitch_pre, pitch_diffusion, io_map=[ + ('pitch_cond', 'pitch_cond'), ('base_pitch', 'base_pitch') + ], prefix1='', prefix2='', doc_string='', + producer_name=pitch_pre.producer_name, producer_version=pitch_pre.producer_version, + domain=pitch_pre.domain, model_version=pitch_pre.model_version + ) + pitch_predictor.graph.name = pitch_pre.graph.name + + print(f'| optimize graph: {self.pitch_diffusion_class_name}') + return pitch_predictor + + def _optimize_merge_variance_predictor_graph( + self, var_pre: onnx.ModelProto, var_diffusion: onnx.ModelProto, var_post: onnx.ModelProto + ): + onnx_helper.model_override_io_shapes( + var_pre, output_shapes={'variance_cond': (1, 'n_frames', hparams['hidden_size'])} + ) + var_pre, check = onnxsim.simplify(var_pre, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + + onnx_helper.model_override_io_shapes( + var_diffusion, output_shapes={ + 'xs_pred': (1, 'n_frames') + if len(self.model.variance_prediction_list) == 1 + else (1, len(self.model.variance_prediction_list), 'n_frames') + } + ) + print(f'Running ONNX Simplifier #1 for {self.variance_diffusion_class_name}...') + var_diffusion, check = onnxsim.simplify(var_diffusion, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + onnx_helper.graph_fold_back_to_squeeze(var_diffusion.graph) + onnx_helper.graph_extract_conditioner_projections( + graph=var_diffusion.graph, op_type='Conv', + weight_pattern=r'variance_predictor\.denoise_fn\.residual_layers\.\d+\.conditioner_projection\.weight', + alias_prefix='/variance_predictor/denoise_fn/cache' + ) + onnx_helper.graph_remove_unused_values(var_diffusion.graph) + print(f'Running ONNX Simplifier #2 for {self.variance_diffusion_class_name}...') + var_diffusion, check = onnxsim.simplify(var_diffusion, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + + var_post, check = onnxsim.simplify(var_post, include_subgraph=True) + assert check, 'Simplified ONNX model could not be validated' + + var_pre_diffusion = onnx.compose.merge_models( + var_pre, var_diffusion, io_map=[('variance_cond', 'variance_cond')], + prefix1='', prefix2='', doc_string='', + producer_name=var_pre.producer_name, producer_version=var_pre.producer_version, + domain=var_pre.domain, model_version=var_pre.model_version + ) + var_pre_diffusion.graph.name = var_pre.graph.name + var_predictor = onnx.compose.merge_models( + var_pre_diffusion, var_post, io_map=[('xs_pred', 'xs_pred')], + prefix1='', prefix2='', doc_string='', + producer_name=var_pre.producer_name, producer_version=var_pre.producer_version, + domain=var_pre.domain, model_version=var_pre.model_version + ) + var_predictor.graph.name = var_pre.graph.name + return var_predictor + + # noinspection PyMethodMayBeStatic + def _export_dictionary(self, path: Path): + print(f'| export dictionary => {path}') + shutil.copy(locate_dictionary(), path) + + def _export_phonemes(self, path: Path): + self.vocab.store_to_file(path) + print(f'| export phonemes => {path}') diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 08fe607fb..a29c1345c 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -1,9 +1,11 @@ -from typing import List +from typing import List, Tuple import torch from torch import Tensor -from modules.diffusion.ddpm import GaussianDiffusion +from modules.diffusion.ddpm import ( + GaussianDiffusion, PitchDiffusion, MultiVarianceDiffusion +) def extract(a, t): @@ -29,7 +31,7 @@ def p_sample(self, x, t, cond): # no noise when t == 0 nonzero_mask = ((t > 0).float()).reshape(1, 1, 1, 1) return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise - + def plms_get_x_pred(self, x, noise_t, t, t_prev): a_t = extract(self.alphas_cumprod, t) a_prev = extract(self.alphas_cumprod, t_prev) @@ -99,3 +101,57 @@ def forward(self, condition, speedup): x = x.permute(0, 1, 3, 2) # [B, F, M, T] => [B, F, T, M] x = self.denorm_spec(x) return x + + +class PitchDiffusionONNX(GaussianDiffusionONNX, PitchDiffusion): + def __init__(self, vmin: float, vmax: float, repeat_bins, + timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, + betas=None): + self.vmin = vmin + self.vmax = vmax + super(PitchDiffusion, self).__init__( + vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, + timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas + ) + + def denorm_spec(self, x): + d = (self.spec_max - self.spec_min) / 2. + m = (self.spec_max + self.spec_min) / 2. + x = x * d + m + x = x.mean(dim=-1) + x = x.clamp(min=self.vmin, max=self.vmax) + return x + + +class MultiVarianceDiffusionONNX(GaussianDiffusionONNX, MultiVarianceDiffusion): + def __init__( + self, ranges: List[Tuple[float, float]], + clamps: List[Tuple[float | None, float | None] | None], + repeat_bins, timesteps=1000, k_step=1000, + denoiser_type=None, denoiser_args=None, + betas=None + ): + assert len(ranges) == len(clamps) + self.clamps = clamps + vmin = [r[0] for r in ranges] + vmax = [r[1] for r in ranges] + if len(vmin) == 1: + vmin = vmin[0] + if len(vmax) == 1: + vmax = vmax[0] + super(MultiVarianceDiffusion, self).__init__( + vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, + timesteps=timesteps, k_step=k_step, + denoiser_type=denoiser_type, denoiser_args=denoiser_args, + betas=betas + ) + + def denorm_spec(self, x): + d = (self.spec_max - self.spec_min) / 2. + m = (self.spec_max + self.spec_min) / 2. + x = x * d + m + x = x.mean(dim=-1) + return x diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py index bf7987602..71aae48b9 100644 --- a/deployment/modules/fastspeech2.py +++ b/deployment/modules/fastspeech2.py @@ -1,12 +1,16 @@ +import copy + import torch import torch.nn as nn import torch.nn.functional as F from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic +from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams from utils.pitch_utils import ( f0_bin, f0_mel_min, f0_mel_max ) +from utils.text_encoder import PAD_INDEX def f0_to_coarse(f0): @@ -83,3 +87,42 @@ def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=N else: condition += spk_embed return condition + + +class FastSpeech2VarianceONNX(FastSpeech2Variance): + def __init__(self, vocab_size): + super().__init__(vocab_size=vocab_size) + self.lr = LengthRegulator() + + def forward_encoder_word(self, tokens, word_div, word_dur): + ph2word = self.lr(word_div) + onset = ph2word > F.pad(ph2word, [1, -1]) + onset_embed = self.onset_embed(onset.long()) + ph_word_dur = torch.gather(F.pad(word_dur, [1, 0]), 1, ph2word) + word_dur_embed = self.word_dur_embed(ph_word_dur.float()[:, :, None]) + return self.encoder(tokens, onset_embed + word_dur_embed), tokens == 0 + + def forward_encoder_phoneme(self, tokens, ph_dur): + ph_dur_embed = self.ph_dur_embed(ph_dur.float()[:, :, None]) + return self.encoder(tokens, ph_dur_embed), tokens == PAD_INDEX + + def forward_dur_predictor(self, encoder_out, x_masks, ph_midi): + midi_embed = self.midi_embed(ph_midi) + dur_cond = encoder_out + midi_embed + ph_dur = self.dur_predictor(dur_cond, x_masks=x_masks) + return ph_dur + + def view_as_encoder(self): + model = copy.deepcopy(self) + if self.predict_dur: + del model.dur_predictor + model.forward = model.forward_encoder_word + else: + model.forward = model.forward_encoder_phoneme + return model + + def view_as_dur_predictor(self): + model = copy.deepcopy(self) + del model.encoder + model.forward = model.forward_dur_predictor + return model diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 197409fcc..56f05f5b5 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -1,9 +1,15 @@ +import numpy as np import copy -from torch import Tensor, nn +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch import Tensor -from deployment.modules.diffusion import GaussianDiffusionONNX -from deployment.modules.fastspeech2 import FastSpeech2AcousticONNX +from deployment.modules.diffusion import ( + GaussianDiffusionONNX, PitchDiffusionONNX, MultiVarianceDiffusionONNX +) +from deployment.modules.fastspeech2 import FastSpeech2AcousticONNX, FastSpeech2VarianceONNX from modules.toplevel import DiffSingerAcoustic, DiffSingerVariance from utils.hparams import hparams @@ -62,7 +68,6 @@ def view_as_adaptor(self) -> nn.Module: del model.fs2 del model.diffusion raise NotImplementedError() - def view_as_diffusion(self) -> nn.Module: model = copy.deepcopy(self) @@ -76,5 +81,182 @@ def view_as_diffusion(self) -> nn.Module: return model -class DiffSingerVarianceOnnx(DiffSingerVariance): - pass +class DiffSingerVarianceONNX(DiffSingerVariance): + def __init__(self, vocab_size): + super().__init__(vocab_size=vocab_size) + del self.fs2 + self.fs2 = FastSpeech2VarianceONNX( + vocab_size=vocab_size + ) + self.hidden_size = hparams['hidden_size'] + if self.predict_pitch: + del self.pitch_predictor + self.smooth: nn.Conv1d = None + pitch_hparams = hparams['pitch_prediction_args'] + self.pitch_predictor = PitchDiffusionONNX( + vmin=pitch_hparams['pitch_delta_vmin'], + vmax=pitch_hparams['pitch_delta_vmax'], + repeat_bins=pitch_hparams['num_pitch_bins'], + timesteps=hparams['timesteps'], + k_step=hparams['K_step'], + denoiser_type=hparams['diff_decoder_type'], + denoiser_args=( + pitch_hparams['residual_layers'], + pitch_hparams['residual_channels'] + ) + ) + if self.predict_variances: + del self.variance_predictor + self.variance_predictor = self.build_adaptor(cls=MultiVarianceDiffusionONNX) + + def build_smooth_op(self, device): + smooth_kernel_size = round(hparams['midi_smooth_width'] * hparams['audio_sample_rate'] / hparams['hop_size']) + smooth = nn.Conv1d( + in_channels=1, + out_channels=1, + kernel_size=smooth_kernel_size, + bias=False, + padding='same', + padding_mode='replicate' + ).eval() + smooth_kernel = torch.sin(torch.from_numpy( + np.linspace(0, 1, smooth_kernel_size).astype(np.float32) * np.pi + )) + smooth_kernel /= smooth_kernel.sum() + smooth.weight.data = smooth_kernel[None, None] + self.smooth = smooth.to(device) + + def forward_linguistic_encoder_word(self, tokens, word_div, word_dur): + return self.fs2.forward_encoder_word(tokens, word_div, word_dur) + + def forward_linguistic_encoder_phoneme(self, tokens, ph_dur): + return self.fs2.forward_encoder_phoneme(tokens, ph_dur) + + def forward_dur_predictor(self, encoder_out, x_masks, ph_midi): + return self.fs2.forward_dur_predictor(encoder_out, x_masks, ph_midi) + + def forward_mel2x_gather(self, x_src, x_dur, x_dim=None): + mel2x = self.lr(x_dur) + if x_dim is not None: + x_src = F.pad(x_src, [0, 0, 1, 0]) + mel2x = mel2x[..., None].repeat([1, 1, x_dim]) + else: + x_src = F.pad(x_src, [1, 0]) + x_cond = torch.gather(x_src, 1, mel2x) + return x_cond + + def forward_pitch_preprocess( + self, encoder_out, ph_dur, note_midi, note_dur, + pitch=None, retake=None + ): + condition = self.forward_mel2x_gather(encoder_out, ph_dur, x_dim=self.hidden_size) + condition += self.retake_embed(retake.long()) + frame_midi_pitch = self.forward_mel2x_gather(note_midi, note_dur, x_dim=None) + base_pitch = self.smooth(frame_midi_pitch) + base_pitch += (pitch - base_pitch) * ~retake + pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) + return pitch_cond, base_pitch + + def forward_pitch_diffusion( + self, pitch_cond, base_pitch, speedup: int = 1 + ): + pitch_pred = self.pitch_predictor(pitch_cond, speedup) + base_pitch + return pitch_pred + + def forward_variance_preprocess( + self, encoder_out, ph_dur, pitch, variances: dict = None, retake=None + ): + condition = self.forward_mel2x_gather(encoder_out, ph_dur, x_dim=self.hidden_size) + condition += self.retake_embed(retake.long()) + variance_cond = condition + self.pitch_embed(pitch[:, :, None]) + non_retake = (~retake).float() + variance_embeds = [ + self.variance_embeds[v_name]((variances[v_name] * non_retake)[:, :, None]) + for v_name in self.variance_prediction_list + ] + variance_cond += torch.stack(variance_embeds, dim=-1).sum(-1) + return variance_cond + + def forward_variance_diffusion(self, variance_cond, speedup: int = 1): + xs_pred = self.variance_predictor(variance_cond, speedup) + return xs_pred + + def forward_variance_postprocess(self, xs_pred): + if self.variance_predictor.num_feats == 1: + xs_pred = [xs_pred] + else: + xs_pred = xs_pred.unbind(dim=1) + variance_pred = self.variance_predictor.clamp_spec(xs_pred) + return tuple(variance_pred) + + def view_as_linguistic_encoder(self): + model = copy.deepcopy(self) + if self.predict_pitch: + del model.pitch_predictor + if self.predict_variances: + del model.variance_predictor + model.fs2 = model.fs2.view_as_encoder() + if self.predict_dur: + model.forward = model.forward_linguistic_encoder_word + else: + model.forward = model.forward_linguistic_encoder_phoneme + return model + + def view_as_dur_predictor(self): + model = copy.deepcopy(self) + if self.predict_pitch: + del model.pitch_predictor + if self.predict_variances: + del model.variance_predictor + assert self.predict_dur + model.fs2 = model.fs2.view_as_dur_predictor() + model.forward = model.forward_dur_predictor + return model + + def view_as_pitch_preprocess(self): + model = copy.deepcopy(self) + del model.fs2 + if self.predict_pitch: + del model.pitch_predictor + if self.predict_variances: + del model.variance_predictor + model.forward = model.forward_pitch_preprocess + return model + + def view_as_pitch_diffusion(self): + model = copy.deepcopy(self) + del model.fs2 + del model.lr + if self.predict_variances: + del model.variance_predictor + assert self.predict_pitch + model.forward = model.forward_pitch_diffusion + return model + + def view_as_variance_preprocess(self): + model = copy.deepcopy(self) + del model.fs2 + if self.predict_pitch: + del model.pitch_predictor + if self.predict_variances: + del model.variance_predictor + model.forward = model.forward_variance_preprocess + return model + + def view_as_variance_diffusion(self): + model = copy.deepcopy(self) + del model.fs2 + del model.lr + if self.predict_pitch: + del model.pitch_predictor + assert self.predict_variances + model.forward = model.forward_variance_diffusion + return model + + def view_as_variance_postprocess(self): + model = copy.deepcopy(self) + del model.fs2 + if self.predict_pitch: + del model.pitch_predictor + model.forward = model.forward_variance_postprocess + return model diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py index fcdc61835..8b57ee6c1 100644 --- a/modules/fastspeech/param_adaptor.py +++ b/modules/fastspeech/param_adaptor.py @@ -18,7 +18,7 @@ def __init__(self): self.variance_prediction_list.append('breathiness') self.predict_variances = len(self.variance_prediction_list) > 0 - def build_adaptor(self): + def build_adaptor(self, cls=MultiVarianceDiffusion): ranges = [] clamps = [] @@ -37,7 +37,7 @@ def build_adaptor(self): clamps.append((0., 1.)) variances_hparams = hparams['variances_prediction_args'] - return MultiVarianceDiffusion( + return cls( ranges=ranges, clamps=clamps, repeat_bins=variances_hparams['repeat_bins'], diff --git a/scripts/export.py b/scripts/export.py index 72375b911..f9a9492db 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -14,6 +14,24 @@ from utils.hparams import set_hparams, hparams +def find_exp(exp): + if not (root_dir / 'checkpoints' / exp).exists(): + for subdir in (root_dir / 'checkpoints').iterdir(): + if not subdir.is_dir(): + continue + if subdir.name.startswith(exp): + print(f'| match ckpt by prefix: {subdir.name}') + exp = subdir.name + break + else: + assert False, \ + f'There are no matching exp starting with \'{exp}\' in \'checkpoints\' folder. ' \ + 'Please specify \'--exp\' as the folder name or prefix.' + else: + print(f'| found ckpt by name: {exp}') + return exp + + @click.group() def main(): pass @@ -51,20 +69,7 @@ def acoustic( exit(-1) if freeze_gender is not None: assert -1. <= freeze_gender <= 1., 'Frozen gender must be in [-1, 1].' - if not (root_dir / 'checkpoints' / exp).exists(): - for subdir in (root_dir / 'checkpoints').iterdir(): - if not subdir.is_dir(): - continue - if subdir.name.startswith(exp): - print(f'| match ckpt by prefix: {subdir.name}') - exp = subdir.name - break - else: - assert False, \ - f'There are no matching exp starting with \'{exp}\' in \'checkpoints\' folder. ' \ - 'Please specify \'--exp\' as the folder name or prefix.' - else: - print(f'| found ckpt by name: {exp}') + exp = find_exp(exp) if out is None: out = root_dir / 'artifacts' / exp else: @@ -122,6 +127,41 @@ def acoustic( exporter.export(out) +@main.command(help='Export DiffSinger variance model to ONNX format.') +@click.option('--exp', type=str, required=True, metavar='', help='Choose an experiment to export.') +@click.option('--ckpt', type=int, required=False, metavar='', help='Checkpoint training steps.') +@click.option('--out', type=str, required=False, metavar='', help='Output directory for the artifacts.') +def variance( + exp: str, + ckpt: int = None, + out: str = None, +): + # Validate arguments + exp = find_exp(exp) + if out is None: + out = root_dir / 'artifacts' / exp + else: + out = Path(out) + out = out.resolve() + + # Load configurations + sys.argv = [ + sys.argv[0], + '--exp_name', + exp, + '--infer' + ] + set_hparams() + from deployment.exporters import DiffSingerVarianceExporter + print(f'| Exporter: {DiffSingerVarianceExporter}') + exporter = DiffSingerVarianceExporter( + device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), + cache_dir=root_dir / 'deployment' / 'cache', + ckpt_steps=ckpt, + ) + exporter.export(out) + + @main.command(help='Export NSF-HiFiGAN vocoder model to ONNX format.') @click.option('--config', type=str, required=True, metavar='', help='Specify a config path of the vocoder.') @click.option('--out', type=str, required=False, metavar='', help='Output directory for the artifacts.') From 49931f3b04535da8d576a18a3da1359ba69fb366 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 21 May 2023 14:10:36 +0800 Subject: [PATCH 388/475] Fix logic of category check --- deployment/exporters/acoustic_exporter.py | 2 +- deployment/exporters/variance_exporter.py | 2 +- inference/ds_acoustic.py | 2 +- inference/ds_variance.py | 2 +- utils/__init__.py | 7 ++----- 5 files changed, 6 insertions(+), 9 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 58e2bc6d7..f392e1bec 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -69,7 +69,7 @@ def build_model(self) -> DiffSingerAcousticONNX: vocab_size=len(self.vocab), out_dims=hparams['audio_num_mel_bins'] ).eval().to(self.device) - load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, required_category='acoustic', + load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, prefix_in_ckpt='model', strict=True, device=self.device) return model diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 291d94c45..1cabc4d3e 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -59,7 +59,7 @@ def build_model(self) -> DiffSingerVarianceONNX: model = DiffSingerVarianceONNX( vocab_size=len(self.vocab) ).eval().to(self.device) - load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, required_category='variance', + load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, prefix_in_ckpt='model', strict=True, device=self.device) model.build_smooth_op(self.device) return model diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index a28fb8c79..e31d477a4 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -51,7 +51,7 @@ def build_model(self, ckpt_steps=None): vocab_size=len(self.ph_encoder), out_dims=hparams['audio_num_mel_bins'] ).eval().to(self.device) - load_ckpt(model, hparams['work_dir'], ckpt_steps=ckpt_steps, required_category='acoustic', + load_ckpt(model, hparams['work_dir'], ckpt_steps=ckpt_steps, prefix_in_ckpt='model', strict=True, device=self.device) return model diff --git a/inference/ds_variance.py b/inference/ds_variance.py index f4291389e..c90407480 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -45,7 +45,7 @@ def build_model(self, ckpt_steps=None): model = DiffSingerVariance( vocab_size=len(self.ph_encoder) ).eval().to(self.device) - load_ckpt(model, hparams['work_dir'], ckpt_steps=ckpt_steps, required_category='variance', + load_ckpt(model, hparams['work_dir'], ckpt_steps=ckpt_steps, prefix_in_ckpt='model', strict=True, device=self.device) return model diff --git a/utils/__init__.py b/utils/__init__.py index 9a7576202..f42ed8b69 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -140,7 +140,7 @@ def filter_kwargs(dict_to_filter, kwarg_obj): def load_ckpt( cur_model, ckpt_base_dir, ckpt_steps=None, - required_category=None, prefix_in_ckpt='model', key_in_ckpt='state_dict', + prefix_in_ckpt='model', key_in_ckpt='state_dict', strict=True, device='cpu' ): if not isinstance(ckpt_base_dir, pathlib.Path): @@ -164,10 +164,7 @@ def load_ckpt( assert len(checkpoint_path) > 0, f'| ckpt not found in {ckpt_base_dir}.' checkpoint_path = checkpoint_path[-1] ckpt_loaded = torch.load(checkpoint_path, map_location=device) - if required_category is not None: - if not isinstance(cur_model, CategorizedModule): - raise TypeError(f'The \'{required_category}\' argument can only be used ' - f'on a \'basics.base_model.CategorizedModule\'.') + if isinstance(cur_model, CategorizedModule): cur_model.check_category(ckpt_loaded.get('category')) if key_in_ckpt is None: state_dict = ckpt_loaded From fd4efd8a2e3e148e42a033646d49030a923fdbea Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 21 May 2023 22:10:41 +0800 Subject: [PATCH 389/475] Fix ONNX graph errors --- deployment/exporters/acoustic_exporter.py | 6 +- deployment/exporters/variance_exporter.py | 106 ++++++++++++++++------ deployment/modules/diffusion.py | 4 +- deployment/modules/toplevel.py | 16 +++- modules/diffusion/wavenet.py | 2 +- utils/onnx_helper.py | 14 +-- 6 files changed, 107 insertions(+), 41 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index f392e1bec..e05d0183d 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -240,7 +240,7 @@ def _perform_spk_mix(self, spk_mix: Dict[str, float]): return spk_mix_embed def _optimize_fs2_graph(self, fs2: onnx.ModelProto) -> onnx.ModelProto: - print(f'Running ONNX Simplifier for {self.fs2_class_name}...') + print(f'Running ONNX Simplifier on {self.fs2_class_name}...') fs2, check = onnxsim.simplify(fs2, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' print(f'| optimize graph: {self.fs2_class_name}') @@ -250,7 +250,7 @@ def _optimize_diffusion_graph(self, diffusion: onnx.ModelProto) -> onnx.ModelPro onnx_helper.model_override_io_shapes(diffusion, output_shapes={ 'mel': (1, 'n_frames', hparams['audio_num_mel_bins']) }) - print(f'Running ONNX Simplifier #1 for {self.diffusion_class_name}...') + print(f'Running ONNX Simplifier #1 on {self.diffusion_class_name}...') diffusion, check = onnxsim.simplify(diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' onnx_helper.graph_fold_back_to_squeeze(diffusion.graph) @@ -260,7 +260,7 @@ def _optimize_diffusion_graph(self, diffusion: onnx.ModelProto) -> onnx.ModelPro alias_prefix='/diffusion/denoise_fn/cache' ) onnx_helper.graph_remove_unused_values(diffusion.graph) - print(f'Running ONNX Simplifier #2 for {self.diffusion_class_name}...') + print(f'Running ONNX Simplifier #2 on {self.diffusion_class_name}...') diffusion, check = onnxsim.simplify( diffusion, include_subgraph=True diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 1cabc4d3e..dfccd847c 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -31,6 +31,7 @@ def __init__( self.dur_predictor_cache_path = self.cache_dir / 'dur.onnx' self.pitch_preprocess_cache_path = self.cache_dir / 'pitch_pre.onnx' self.pitch_diffusion_cache_path = self.cache_dir / 'pitch.onnx' + self.pitch_postprocess_cache_path = self.cache_dir / 'pitch_post.onnx' self.variance_preprocess_cache_path = self.cache_dir / 'variance_pre.onnx' self.variance_diffusion_cache_path = self.cache_dir / 'variance.onnx' self.variance_postprocess_cache_path = self.cache_dir / 'variance_post.onnx' @@ -82,9 +83,11 @@ def export_model(self, path: Path): onnx.save(dur_predictor_onnx, dur_predictor_path) self.dur_predictor_cache_path.unlink() print(f'| export dur predictor => {dur_predictor_path}') - if self.model.predict_pitch and False: + if self.model.predict_pitch: pitch_predictor_onnx = self._optimize_merge_pitch_predictor_graph( - onnx.load(self.pitch_preprocess_cache_path), onnx.load(self.pitch_diffusion_cache_path) + onnx.load(self.pitch_preprocess_cache_path), + onnx.load(self.pitch_diffusion_cache_path), + onnx.load(self.pitch_postprocess_cache_path) ) pitch_predictor_path = path / f'{self.model_name}.pitch.onnx' onnx.save(pitch_predictor_onnx, pitch_predictor_path) @@ -212,21 +215,12 @@ def _torch_export_model(self): opset_version=15 ) - if self.model.predict_pitch and False: + if self.model.predict_pitch: # Prepare inputs for preprocessor of PitchDiffusion note_midi = torch.FloatTensor([[60.] * 4]).to(self.device) note_dur = torch.LongTensor([[2, 6, 3, 4]]).to(self.device) pitch = torch.FloatTensor([[60.] * 15]).to(self.device) retake = torch.ones_like(pitch, dtype=torch.bool) - pitch_common_io = ['pitch_cond', 'base_pitch'] - pitch_common_axes = { - 'pitch_cond': { - 1: 'n_frames' - }, - 'base_pitch': { - 1: 'n_frames' - } - } torch.onnx.export( self.model.view_as_pitch_preprocess(), ( @@ -243,7 +237,9 @@ def _torch_export_model(self): 'note_midi', 'note_dur', 'pitch', 'retake' ], - output_names=pitch_common_io, + output_names=[ + 'pitch_cond', 'base_pitch' + ], dynamic_axes={ 'encoder_out': { 1: 'n_tokens' @@ -263,7 +259,12 @@ def _torch_export_model(self): 'retake': { 1: 'n_frames' }, - **pitch_common_axes + 'pitch_cond': { + 1: 'n_frames' + }, + 'base_pitch': { + 1: 'n_frames' + } }, opset_version=15 ) @@ -291,12 +292,10 @@ def _torch_export_model(self): example_inputs=[ ( condition.transpose(1, 2), - pitch, 1 # p_sample branch ), ( condition.transpose(1, 2), - pitch, 200 # p_sample_plms branch ) ] @@ -307,18 +306,48 @@ def _torch_export_model(self): pitch_diffusion, ( condition.transpose(1, 2), - pitch, 200 ), self.pitch_diffusion_cache_path, input_names=[ - *pitch_common_io, 'speedup' + 'pitch_cond', 'speedup' + ], + output_names=[ + 'x_pred' + ], + dynamic_axes={ + 'pitch_cond': { + 1: 'n_frames' + }, + 'x_pred': { + 1: 'n_frames' + } + }, + opset_version=15 + ) + + # Prepare inputs for postprocessor of MultiVarianceDiffusion + torch.onnx.export( + self.model.view_as_pitch_postprocess(), + ( + pitch, + pitch + ), + self.pitch_postprocess_cache_path, + input_names=[ + 'x_pred', + 'base_pitch' ], output_names=[ 'pitch_pred' ], dynamic_axes={ - **pitch_common_axes, + 'x_pred': { + 1: 'n_frames' + }, + 'base_pitch': { + 1: 'n_frames' + }, 'pitch_pred': { 1: 'n_frames' } @@ -473,7 +502,7 @@ def _optimize_linguistic_graph(self, linguistic: onnx.ModelProto) -> onnx.ModelP 'encoder_out': (1, 'n_tokens', hparams['hidden_size']) } ) - print(f'Running ONNX Simplifier for {self.fs2_class_name}...') + print(f'Running ONNX Simplifier on {self.fs2_class_name}...') linguistic, check = onnxsim.simplify(linguistic, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' print(f'| optimize graph: {self.fs2_class_name}') @@ -486,14 +515,14 @@ def _optimize_dur_predictor_graph(self, dur_predictor: onnx.ModelProto) -> onnx. 'ph_dur_pred': (1, 'n_tokens') } ) - print(f'Running ONNX Simplifier for {self.dur_predictor_class_name}...') + print(f'Running ONNX Simplifier on {self.dur_predictor_class_name}...') dur_predictor, check = onnxsim.simplify(dur_predictor, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' print(f'| optimize graph: {self.dur_predictor_class_name}') return dur_predictor def _optimize_merge_pitch_predictor_graph( - self, pitch_pre: onnx.ModelProto, pitch_diffusion: onnx.ModelProto + self, pitch_pre: onnx.ModelProto, pitch_diffusion: onnx.ModelProto, pitch_post: onnx.ModelProto ) -> onnx.ModelProto: onnx_helper.model_override_io_shapes( pitch_pre, output_shapes={'pitch_cond': (1, 'n_frames', hparams['hidden_size'])} @@ -504,7 +533,7 @@ def _optimize_merge_pitch_predictor_graph( onnx_helper.model_override_io_shapes( pitch_diffusion, output_shapes={'pitch_pred': (1, 'n_frames')} ) - print(f'Running ONNX Simplifier #1 for {self.pitch_diffusion_class_name}...') + print(f'Running ONNX Simplifier #1 on {self.pitch_diffusion_class_name}...') pitch_diffusion, check = onnxsim.simplify(pitch_diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' onnx_helper.graph_fold_back_to_squeeze(pitch_diffusion.graph) @@ -514,13 +543,24 @@ def _optimize_merge_pitch_predictor_graph( alias_prefix='/pitch_predictor/denoise_fn/cache' ) onnx_helper.graph_remove_unused_values(pitch_diffusion.graph) - print(f'Running ONNX Simplifier #2 for {self.pitch_diffusion_class_name}...') + print(f'Running ONNX Simplifier #2 on {self.pitch_diffusion_class_name}...') pitch_diffusion, check = onnxsim.simplify(pitch_diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' + onnx_helper.model_add_prefixes(pitch_pre, node_prefix='/pre', ignored_pattern=r'.*embed.*') + onnx_helper.model_add_prefixes(pitch_pre, dim_prefix='pre.', ignored_pattern='(n_tokens)|(n_notes)|(n_frames)') + onnx_helper.model_add_prefixes(pitch_post, node_prefix='/post', ignored_pattern=None) + onnx_helper.model_add_prefixes(pitch_post, dim_prefix='post.', ignored_pattern='n_frames') + pitch_pre_diffusion = onnx.compose.merge_models( + pitch_pre, pitch_diffusion, io_map=[('pitch_cond', 'pitch_cond')], + prefix1='', prefix2='', doc_string='', + producer_name=pitch_pre.producer_name, producer_version=pitch_pre.producer_version, + domain=pitch_pre.domain, model_version=pitch_pre.model_version + ) + pitch_pre_diffusion.graph.name = pitch_pre.graph.name pitch_predictor = onnx.compose.merge_models( - pitch_pre, pitch_diffusion, io_map=[ - ('pitch_cond', 'pitch_cond'), ('base_pitch', 'base_pitch') + pitch_pre_diffusion, pitch_post, io_map=[ + ('x_pred', 'x_pred'), ('base_pitch', 'base_pitch') ], prefix1='', prefix2='', doc_string='', producer_name=pitch_pre.producer_name, producer_version=pitch_pre.producer_version, domain=pitch_pre.domain, model_version=pitch_pre.model_version @@ -546,7 +586,8 @@ def _optimize_merge_variance_predictor_graph( else (1, len(self.model.variance_prediction_list), 'n_frames') } ) - print(f'Running ONNX Simplifier #1 for {self.variance_diffusion_class_name}...') + print(f'Running ONNX Simplifier #1 on' + f' {self.variance_diffusion_class_name}...') var_diffusion, check = onnxsim.simplify(var_diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' onnx_helper.graph_fold_back_to_squeeze(var_diffusion.graph) @@ -556,13 +597,22 @@ def _optimize_merge_variance_predictor_graph( alias_prefix='/variance_predictor/denoise_fn/cache' ) onnx_helper.graph_remove_unused_values(var_diffusion.graph) - print(f'Running ONNX Simplifier #2 for {self.variance_diffusion_class_name}...') + print(f'Running ONNX Simplifier #2 on {self.variance_diffusion_class_name}...') var_diffusion, check = onnxsim.simplify(var_diffusion, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' var_post, check = onnxsim.simplify(var_post, include_subgraph=True) assert check, 'Simplified ONNX model could not be validated' + ignored_variance_names = '|'.join([f'({v_name})' for v_name in self.model.variance_prediction_list]) + onnx_helper.model_add_prefixes( + var_pre, node_prefix='/pre', ignored_pattern=fr'.*((embed)|{ignored_variance_names}).*' + ) + onnx_helper.model_add_prefixes(var_pre, dim_prefix='pre.', ignored_pattern='(n_tokens)|(n_frames)') + onnx_helper.model_add_prefixes(var_post, node_prefix='/post', ignored_pattern=None) + onnx_helper.model_add_prefixes(var_post, dim_prefix='post.', ignored_pattern='n_frames') + + print(f'Merging {self.variance_diffusion_class_name} subroutines...') var_pre_diffusion = onnx.compose.merge_models( var_pre, var_diffusion, io_map=[('variance_cond', 'variance_cond')], prefix1='', prefix2='', doc_string='', diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index a29c1345c..ba91fccd4 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -117,12 +117,14 @@ def __init__(self, vmin: float, vmax: float, repeat_bins, betas=betas ) + def clamp_spec(self, x): + return x.clamp(min=self.vmin, max=self.vmax) + def denorm_spec(self, x): d = (self.spec_max - self.spec_min) / 2. m = (self.spec_max + self.spec_min) / 2. x = x * d + m x = x.mean(dim=-1) - x = x.clamp(min=self.vmin, max=self.vmax) return x diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 56f05f5b5..e0a76b67d 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -158,9 +158,13 @@ def forward_pitch_preprocess( return pitch_cond, base_pitch def forward_pitch_diffusion( - self, pitch_cond, base_pitch, speedup: int = 1 + self, pitch_cond, speedup: int = 1 ): - pitch_pred = self.pitch_predictor(pitch_cond, speedup) + base_pitch + x_pred = self.pitch_predictor(pitch_cond, speedup) + return x_pred + + def forward_pitch_postprocess(self, x_pred, base_pitch): + pitch_pred = self.pitch_predictor.clamp_spec(x_pred) + base_pitch return pitch_pred def forward_variance_preprocess( @@ -233,6 +237,14 @@ def view_as_pitch_diffusion(self): model.forward = model.forward_pitch_diffusion return model + def view_as_pitch_postprocess(self): + model = copy.deepcopy(self) + del model.fs2 + if self.predict_variances: + del model.variance_predictor + model.forward = model.forward_pitch_postprocess + return model + def view_as_variance_preprocess(self): model = copy.deepcopy(self) del model.fs2 diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index 5f44ced5e..de9d09eaa 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -117,5 +117,5 @@ def forward(self, spec, diffusion_step, cond): # This is the temporary solution since PyTorch 1.13 # does not support exporting aten::unflatten to ONNX # x = x.unflatten(dim=1, sizes=(self.n_feats, self.in_dims)) - x = x.reshape(x.shape[0], self.n_feats, self.in_dims, x.shape[2]) + x = x.reshape(-1, self.n_feats, self.in_dims, x.shape[2]) return x diff --git a/utils/onnx_helper.py b/utils/onnx_helper.py index cd7b6090a..e8d56d0ed 100644 --- a/utils/onnx_helper.py +++ b/utils/onnx_helper.py @@ -69,11 +69,11 @@ def model_add_prefixes( def _record_initializers_and_value_infos_recursive(subgraph): # Record names in current graph for initializer in subgraph.initializer: - if re.match(ignored_pattern, initializer.name): + if ignored_pattern is not None and re.match(ignored_pattern, initializer.name): continue initializers.add(initializer.name) for value_info in subgraph.value_info: - if re.match(ignored_pattern, value_info.name): + if ignored_pattern is not None and re.match(ignored_pattern, value_info.name): continue value_infos.add(value_info.name) for node in subgraph.node: @@ -92,7 +92,7 @@ def _add_prefixes_recursive(subgraph): # Add prefixes in current graph if initializer_prefix is not None: for initializer in subgraph.initializer: - if re.match(ignored_pattern, initializer.name): + if ignored_pattern is not None and re.match(ignored_pattern, initializer.name): continue new_name = initializer_prefix + initializer.name _verbose('| add prefix:', initializer.name, '->', new_name) @@ -101,13 +101,15 @@ def _add_prefixes_recursive(subgraph): for value_info in subgraph.value_info: if dim_prefix is not None: for dim in value_info.type.tensor_type.shape.dim: - if dim.dim_param is None or dim.dim_param == '' or re.match(ignored_pattern, dim.dim_param): + if dim.dim_param is None or dim.dim_param == '' or \ + ignored_pattern is not None and re.match(ignored_pattern, dim.dim_param): continue new_dim_param = dim_prefix + dim.dim_param _verbose('| add prefix:', dim.dim_param, '->', new_dim_param) dim.dim_param = new_dim_param - if value_info_prefix is None or re.match(ignored_pattern, value_info.name): + if value_info_prefix is None or \ + ignored_pattern is not None and re.match(ignored_pattern, value_info.name): continue new_name = value_info_prefix + value_info.name _verbose('| add prefix:', value_info.name, '->', new_name) @@ -115,7 +117,7 @@ def _add_prefixes_recursive(subgraph): if node_prefix is not None: for node in subgraph.node: - if re.match(ignored_pattern, node.name): + if ignored_pattern is not None and re.match(ignored_pattern, node.name): continue new_name = node_prefix + node.name _verbose('| add prefix:', node.name, '->', new_name) From 03928c2059b2dc90701043b427277b2096c4d139 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 22 May 2023 10:33:03 +0800 Subject: [PATCH 390/475] Clean up cache after exporting --- deployment/exporters/variance_exporter.py | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index dfccd847c..7146087f7 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -93,6 +93,7 @@ def export_model(self, path: Path): onnx.save(pitch_predictor_onnx, pitch_predictor_path) self.pitch_preprocess_cache_path.unlink() self.pitch_diffusion_cache_path.unlink() + self.pitch_postprocess_cache_path.unlink() print(f'| export pitch predictor => {pitch_predictor_path}') if self.model.predict_variances: variance_predictor_onnx = self._optimize_merge_variance_predictor_graph( From e511a86abab3bbf669f2372272b8aa0630391e6b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 25 May 2023 01:01:26 +0800 Subject: [PATCH 391/475] Fix padding values --- utils/binarizer_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index ac4e88a8c..dfa67fee3 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -32,7 +32,7 @@ def pad_frames(frames, hop_size, n_samples, n_expect): frames = frames[:rpad] rpad = 0 if lpad > 0 or rpad > 0: - frames = np.pad(frames, [[lpad, rpad]], mode='constant') + frames = np.pad(frames, (lpad, rpad), mode='constant', constant_values=(frames[0], frames[-1])) return frames From 1999fee3cc4c1c077247216c3533d44f28060014 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 25 May 2023 17:00:50 +0800 Subject: [PATCH 392/475] Use different values on pitch delta norm and clip --- configs/variance.yaml | 9 +++++---- deployment/modules/diffusion.py | 7 +++++-- deployment/modules/toplevel.py | 6 ++++-- modules/diffusion/ddpm.py | 13 ++++++++----- modules/toplevel.py | 6 ++++-- 5 files changed, 26 insertions(+), 15 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 9861c5a6c..3c2e68399 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -51,10 +51,11 @@ dur_prediction_args: lambda_sdur_loss: 3.0 pitch_prediction_args: - pitch_delta_vmin: -12.0 # -12.75 - pitch_delta_vmax: 12.0 # 12.75 - num_pitch_bins: 64 - deviation: 0.25 + pitd_norm_min: -8.0 + pitd_norm_max: 8.0 + pitd_clip_min: -12.0 + pitd_clip_max: 12.0 + repeat_bins: 64 residual_layers: 20 residual_channels: 256 diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index ba91fccd4..50e1c2d5a 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -104,12 +104,15 @@ def forward(self, condition, speedup): class PitchDiffusionONNX(GaussianDiffusionONNX, PitchDiffusion): - def __init__(self, vmin: float, vmax: float, repeat_bins, + def __init__(self, vmin: float, vmax: float, + cmin: float, cmax: float, repeat_bins, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): self.vmin = vmin self.vmax = vmax + self.cmin = cmin + self.cmax = cmax super(PitchDiffusion, self).__init__( vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, timesteps=timesteps, k_step=k_step, @@ -118,7 +121,7 @@ def __init__(self, vmin: float, vmax: float, repeat_bins, ) def clamp_spec(self, x): - return x.clamp(min=self.vmin, max=self.vmax) + return x.clamp(min=self.cmin, max=self.cmax) def denorm_spec(self, x): d = (self.spec_max - self.spec_min) / 2. diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index e0a76b67d..d33659d5f 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -94,8 +94,10 @@ def __init__(self, vocab_size): self.smooth: nn.Conv1d = None pitch_hparams = hparams['pitch_prediction_args'] self.pitch_predictor = PitchDiffusionONNX( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], + vmin=pitch_hparams['pitd_norm_min'], + vmax=pitch_hparams['pitd_norm_max'], + cmin=pitch_hparams['pitd_clip_min'], + cmax=pitch_hparams['pitd_clip_max'], repeat_bins=pitch_hparams['num_pitch_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index b04229a28..428ab4c3c 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -331,12 +331,15 @@ def denorm_spec(self, x): class PitchDiffusion(RepetitiveDiffusion): - def __init__(self, vmin: float, vmax: float, repeat_bins, + def __init__(self, vmin: float, vmax: float, + cmin: float, cmax: float, repeat_bins, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): - self.vmin = vmin - self.vmax = vmax + self.vmin = vmin # norm min + self.vmax = vmax # norm max + self.cmin = cmin # clip min + self.cmax = cmax # clip max super().__init__( vmin=vmin, vmax=vmax, repeat_bins=repeat_bins, timesteps=timesteps, k_step=k_step, @@ -345,10 +348,10 @@ def __init__(self, vmin: float, vmax: float, repeat_bins, ) def norm_spec(self, x): - return super().norm_spec(x.clamp(min=self.vmin, max=self.vmax)) + return super().norm_spec(x.clamp(min=self.cmin, max=self.cmax)) def denorm_spec(self, x): - return super().denorm_spec(x).clamp(min=self.vmin, max=self.vmax) + return super().denorm_spec(x).clamp(min=self.cmin, max=self.cmax) class MultiVarianceDiffusion(RepetitiveDiffusion): diff --git a/modules/toplevel.py b/modules/toplevel.py index 30fc971da..2860ca294 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -141,8 +141,10 @@ def __init__(self, vocab_size): pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) self.pitch_predictor = PitchDiffusion( - vmin=pitch_hparams['pitch_delta_vmin'], - vmax=pitch_hparams['pitch_delta_vmax'], + vmin=pitch_hparams['pitd_norm_min'], + vmax=pitch_hparams['pitd_norm_max'], + cmin=pitch_hparams['pitd_clip_min'], + cmax=pitch_hparams['pitd_clip_max'], repeat_bins=pitch_hparams['num_pitch_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], From a6b168cdf468a78ebebae9bbadb20ffec53f559b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 25 May 2023 18:59:23 +0800 Subject: [PATCH 393/475] Adjust config key name --- deployment/modules/toplevel.py | 2 +- modules/toplevel.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index d33659d5f..71cbdb10a 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -98,7 +98,7 @@ def __init__(self, vocab_size): vmax=pitch_hparams['pitd_norm_max'], cmin=pitch_hparams['pitd_clip_min'], cmax=pitch_hparams['pitd_clip_max'], - repeat_bins=pitch_hparams['num_pitch_bins'], + repeat_bins=pitch_hparams['repeat_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], diff --git a/modules/toplevel.py b/modules/toplevel.py index 2860ca294..f22f4cace 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -145,7 +145,7 @@ def __init__(self, vocab_size): vmax=pitch_hparams['pitd_norm_max'], cmin=pitch_hparams['pitd_clip_min'], cmax=pitch_hparams['pitd_clip_max'], - repeat_bins=pitch_hparams['num_pitch_bins'], + repeat_bins=pitch_hparams['repeat_bins'], timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], From eb161aa905aa8f910511a9c3ad1baba419dd0459 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 25 May 2023 19:24:03 +0800 Subject: [PATCH 394/475] Make some attributes optional in binarizer according to requirements --- inference/ds_variance.py | 20 ++--- modules/toplevel.py | 12 +-- preprocessing/variance_binarizer.py | 134 ++++++++++++++++------------ training/acoustic_task.py | 2 +- training/variance_task.py | 22 ++--- 5 files changed, 101 insertions(+), 89 deletions(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index c90407480..e7345b8a9 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -70,11 +70,11 @@ def preprocess_input(self, param): note_dur_sec = torch.from_numpy(np.array([param['note_dur'].split()], np.float32)).to(self.device) # [B=1, T_n] note_acc = torch.round(torch.cumsum(note_dur_sec, dim=1) / self.timestep + 0.5).long() note_dur = torch.diff(note_acc, dim=1, prepend=note_acc.new_zeros(1, 1)) - mel2note = self.lr(note_dur) # [B=1, T_t] - T_t = mel2note.shape[1] + mel2note = self.lr(note_dur) # [B=1, T_s] + T_s = mel2note.shape[1] print(f'Length: {T_w} word(s), {note_seq.shape[1]} note(s), {T_ph} token(s), ' - f'{T_t} frame(s), {T_t * self.timestep:.2f} second(s)') + f'{T_s} frame(s), {T_s * self.timestep:.2f} second(s)') if param.get('ph_dur'): # Get mel2ph if ph_dur is given @@ -84,8 +84,8 @@ def preprocess_input(self, param): ph_acc = torch.round(torch.cumsum(ph_dur_sec, dim=1) / self.timestep + 0.5).long() ph_dur = torch.diff(ph_acc, dim=1, prepend=ph_acc.new_zeros(1, 1)) mel2ph = self.lr(ph_dur, txt_tokens == 0) - if mel2ph.shape[1] != T_t: # Align phones with notes - mel2ph = F.pad(mel2ph, [0, T_t - mel2ph.shape[1]], value=mel2ph[0, -1]) + if mel2ph.shape[1] != T_s: # Align phones with notes + mel2ph = F.pad(mel2ph, [0, T_s - mel2ph.shape[1]], value=mel2ph[0, -1]) ph_dur = mel2ph_to_dur(mel2ph, T_ph) # Get word_dur from ph_dur and ph_num word_dur = note_dur.new_zeros(1, T_w + 1).scatter_add( @@ -104,16 +104,16 @@ def preprocess_input(self, param): batch['ph_dur'] = ph_dur batch['mel2ph'] = mel2ph - mel2word = self.lr(word_dur) # [B=1, T_t] - if mel2word.shape[1] != T_t: # Align words with notes - mel2word = F.pad(mel2word, [0, T_t - mel2word.shape[1]], value=mel2word[0, -1]) + mel2word = self.lr(word_dur) # [B=1, T_s] + if mel2word.shape[1] != T_s: # Align words with notes + mel2word = F.pad(mel2word, [0, T_s - mel2word.shape[1]], value=mel2word[0, -1]) word_dur = mel2ph_to_dur(mel2word, T_w) batch['word_dur'] = word_dur # Calculate frame-level MIDI pitch, which is a step function curve frame_midi_pitch = torch.gather( F.pad(note_seq, [1, 0]), 1, mel2note - ) # => frame-level MIDI pitch, [B=1, T_t] + ) # => frame-level MIDI pitch, [B=1, T_s] rest = (frame_midi_pitch < 0)[0].cpu().numpy() frame_midi_pitch = frame_midi_pitch[0].cpu().numpy() interp_func = interpolate.interp1d( @@ -147,7 +147,7 @@ def preprocess_input(self, param): np.array(param['f0_seq'].split(), np.float32), original_timestep=float(param['f0_timestep']), target_timestep=self.timestep, - align_length=T_t + align_length=T_s ) batch['delta_pitch'] = torch.from_numpy( librosa.hz_to_midi(interp_f0(f0)[0]).astype(np.float32) diff --git a/modules/toplevel.py b/modules/toplevel.py index f22f4cace..d5de0aa12 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -165,7 +165,7 @@ def __init__(self, vocab_size): def forward( self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, - base_pitch=None, delta_pitch=None, retake=None, infer=True, **kwargs + base_pitch=None, pitch=None, retake=None, infer=True, **kwargs ): encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, @@ -193,24 +193,20 @@ def forward( if self.predict_pitch: if retake is not None: - base_pitch = base_pitch + delta_pitch * ~retake - delta_pitch = delta_pitch * retake + base_pitch = base_pitch * retake + pitch * ~retake pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) - pitch_pred_out = self.pitch_predictor(pitch_cond, delta_pitch, infer) + pitch_pred_out = self.pitch_predictor(pitch_cond, pitch - base_pitch, infer) else: pitch_pred_out = None if not self.predict_variances: return dur_pred_out, pitch_pred_out, ({} if infer else None) - if delta_pitch is None: + if pitch is None: pitch = base_pitch + pitch_pred_out - else: - pitch = base_pitch + delta_pitch condition += self.pitch_embed(pitch[:, :, None]) variance_inputs = self.collect_variance_inputs(**kwargs) - if retake is None: variance_embeds = [ self.variance_embeds[v_name](torch.zeros_like(pitch)[:, :, None]) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 2300d2ebe..2582a398b 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -26,17 +26,22 @@ 'ph_dur', # durations of phonemes, in number of frames, int64[T_ph,] 'midi', # phoneme-level mean MIDI pitch, int64[T_ph,] 'ph2word', # similar to mel2ph format, representing number of phones within each note, int64[T_ph,] - 'mel2ph', # mel2ph format representing number of frames within each phone, int64[T_t,] - 'base_pitch', # interpolated and smoothed frame-level MIDI pitch, float32[T_t,] - 'delta_pitch', # delta_pitch = actual_pitch - base_pitch, in semitones, float32[T_t,] - 'energy', # float32[T_t,] - 'breathiness', # float32[T_t,] + 'mel2ph', # mel2ph format representing number of frames within each phone, int64[T_s,] + 'base_pitch', # interpolated and smoothed frame-level MIDI pitch, float32[T_s,] + 'pitch', # actual pitch in semitones, float32[T_s,] + 'energy', # frame-level RMS, float32[T_s,] + 'breathiness', # frame-level RMS of aperiodic parts, float32[T_s,] ] class VarianceBinarizer(BaseBinarizer): def __init__(self): super().__init__(data_attrs=VARIANCE_ITEM_ATTRIBUTES) + + predict_energy = hparams['predict_energy'] + predict_breathiness = hparams['predict_breathiness'] + self.predict_variances = predict_energy or predict_breathiness + self.lr = LengthRegulator().to(self.device) smooth_kernel_size = round(hparams['midi_smooth_width'] / self.timestep) self.smooth = nn.Conv1d( @@ -63,20 +68,27 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): 'spk_id': ds_id, 'wav_fn': str(raw_data_dir / 'wavs' / f'{item_name}.wav'), 'ph_seq': utterance_label['ph_seq'].split(), - 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], - 'ph_num': [int(x) for x in utterance_label['ph_num'].split()], - 'note_seq': utterance_label['note_seq'].split(), - 'note_dur': [float(x) for x in utterance_label['note_dur'].split()], + 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()] } + assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' - assert len(temp_dict['ph_seq']) == sum(temp_dict['ph_num']), \ - f'Sum of ph_num does not equal length of ph_seq in \'{item_name}\'.' - assert len(temp_dict['note_seq']) == len(temp_dict['note_dur']), \ - f'Lengths of note_seq and note_dur mismatch in \'{item_name}\'.' - assert any([note != 'rest' for note in temp_dict['note_seq']]), \ - f'All notes are rest in \'{item_name}\'.' + + if hparams['predict_dur']: + temp_dict['ph_num'] = [int(x) for x in utterance_label['ph_num'].split()] + assert len(temp_dict['ph_seq']) == sum(temp_dict['ph_num']), \ + f'Sum of ph_num does not equal length of ph_seq in \'{item_name}\'.' + + if hparams['predict_pitch']: + temp_dict['note_seq'] = utterance_label['note_seq'].split() + temp_dict['note_dur'] = [float(x) for x in utterance_label['note_dur'].split()] + assert len(temp_dict['note_seq']) == len(temp_dict['note_dur']), \ + f'Lengths of note_seq and note_dur mismatch in \'{item_name}\'.' + assert any([note != 'rest' for note in temp_dict['note_seq']]), \ + f'All notes are rest in \'{item_name}\'.' + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + self.items.update(meta_data_dict) def check_coverage(self): @@ -87,58 +99,27 @@ def check_coverage(self): def process_item(self, item_name, meta_data, binarization_args): seconds = sum(meta_data['ph_dur']) length = round(seconds / self.timestep) - t_txt = len(meta_data['ph_seq']) - ph_dur_sec = torch.FloatTensor(meta_data['ph_dur']).to(self.device) - ph_acc = torch.round(torch.cumsum(ph_dur_sec, dim=0) / self.timestep + 0.5).long() - ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) - ph_num = torch.LongTensor(meta_data['ph_num']).to(self.device) - ph2word = self.lr(ph_num[None])[0] - mel2ph = get_mel2ph_torch( - self.lr, ph_dur_sec, length, self.timestep, device=self.device - ) - + T_ph = len(meta_data['ph_seq']) processed_input = { 'name': item_name, 'wav_fn': meta_data['wav_fn'], 'spk_id': meta_data['spk_id'], 'seconds': seconds, 'length': length, - 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64), - 'ph_dur': ph_dur.cpu().numpy(), - 'ph2word': ph2word.cpu().numpy(), - 'mel2ph': mel2ph.cpu().numpy(), + 'tokens': np.array(self.phone_encoder.encode(meta_data['ph_seq']), dtype=np.int64) } - # Below: calculate and interpolate frame-level MIDI pitch, which is a step function curve - mel2dur = torch.gather(F.pad(ph_dur, [1, 0], value=1), 0, mel2ph) # frame-level phone duration - note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) - mel2note = get_mel2ph_torch( - self.lr, note_dur, mel2ph.shape[0], self.timestep, device=self.device - ) - note_pitch = torch.FloatTensor( - [(librosa.note_to_midi(n, round_midi=False) if n != 'rest' else -1) for n in meta_data['note_seq']] - ).to(self.device) - frame_midi_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) # => frame-level MIDI pitch - rest = (frame_midi_pitch < 0).cpu().numpy() - frame_midi_pitch = frame_midi_pitch.cpu().numpy() - interp_func = interpolate.interp1d( - np.where(~rest)[0], frame_midi_pitch[~rest], - kind='nearest', fill_value='extrapolate' - ) - frame_midi_pitch[rest] = interp_func(np.where(rest)[0]) - frame_midi_pitch = torch.from_numpy(frame_midi_pitch).to(self.device) - - # Below: calculate phoneme-level mean MIDI pitch - ph_midi = frame_midi_pitch.new_zeros(t_txt + 1).scatter_add( - 0, mel2ph, frame_midi_pitch / mel2dur - )[1:] - - processed_input['midi'] = ph_midi.round().long().cpu().numpy() + ph_dur_sec = torch.FloatTensor(meta_data['ph_dur']).to(self.device) + ph_acc = torch.round(torch.cumsum(ph_dur_sec, dim=0) / self.timestep + 0.5).long() + ph_dur = torch.diff(ph_acc, dim=0, prepend=torch.LongTensor([0]).to(self.device)) + processed_input['ph_dur'] = ph_dur.cpu().numpy() - # Below: smoothen the pitch step curve as the base pitch curve - smoothed_midi_pitch = self.smooth(frame_midi_pitch[None])[0] + mel2ph = get_mel2ph_torch( + self.lr, ph_dur_sec, length, self.timestep, device=self.device + ) - processed_input['base_pitch'] = smoothed_midi_pitch.cpu().numpy() + if hparams['predict_dur'] and (hparams['predict_pitch'] or self.predict_variances): + processed_input['mel2ph'] = mel2ph.cpu().numpy() # Below: extract actual f0, convert to pitch and calculate delta pitch waveform, _ = librosa.load(meta_data['wav_fn'], sr=hparams['audio_sample_rate'], mono=True) @@ -146,8 +127,43 @@ def process_item(self, item_name, meta_data, binarization_args): if uv.all(): # All unvoiced print(f'Skipped \'{item_name}\': empty gt f0') return None - - processed_input['delta_pitch'] = librosa.hz_to_midi(f0.astype(np.float32)) - processed_input['base_pitch'] + pitch = torch.from_numpy(librosa.hz_to_midi(f0.astype(np.float32))).to(self.device) + + if hparams['predict_dur']: + ph_num = torch.LongTensor(meta_data['ph_num']).to(self.device) + ph2word = self.lr(ph_num[None])[0] + processed_input['ph2word'] = ph2word.cpu().numpy() + mel2dur = torch.gather(F.pad(ph_dur, [1, 0], value=1), 0, mel2ph) # frame-level phone duration + ph_midi = pitch.new_zeros(T_ph + 1).scatter_add( + 0, mel2ph, pitch / mel2dur + )[1:] + processed_input['midi'] = ph_midi.round().long().clamp(min=0, max=127).cpu().numpy() + + if hparams['predict_pitch']: + # Below: calculate and interpolate frame-level MIDI pitch, which is a step function curve + note_dur = torch.FloatTensor(meta_data['note_dur']).to(self.device) + mel2note = get_mel2ph_torch( + self.lr, note_dur, mel2ph.shape[0], self.timestep, device=self.device + ) + note_pitch = torch.FloatTensor( + [(librosa.note_to_midi(n, round_midi=False) if n != 'rest' else -1) for n in meta_data['note_seq']] + ).to(self.device) + frame_midi_pitch = torch.gather(F.pad(note_pitch, [1, 0], value=0), 0, mel2note) + rest = (frame_midi_pitch < 0).cpu().numpy() + frame_midi_pitch = frame_midi_pitch.cpu().numpy() + interp_func = interpolate.interp1d( + np.where(~rest)[0], frame_midi_pitch[~rest], + kind='nearest', fill_value='extrapolate' + ) + frame_midi_pitch[rest] = interp_func(np.where(rest)[0]) + frame_midi_pitch = torch.from_numpy(frame_midi_pitch).to(self.device) + + # Below: smoothen the pitch step curve as the base pitch curve + smoothed_midi_pitch = self.smooth(frame_midi_pitch[None])[0] + processed_input['base_pitch'] = smoothed_midi_pitch.cpu().numpy() + + if hparams['predict_pitch'] or self.predict_variances: + processed_input['pitch'] = pitch.cpu().numpy() # Below: extract energy if hparams['predict_energy']: diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 0bff88fb4..f8db8a269 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -88,7 +88,7 @@ def build_losses(self): ) def run_model(self, sample, infer=False): - txt_tokens = sample['tokens'] # [B, T_t] + txt_tokens = sample['tokens'] # [B, T_ph] target = sample['mel'] # [B, T_s, M] mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] diff --git a/training/variance_task.py b/training/variance_task.py index a97477d14..90808369a 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -100,14 +100,14 @@ def build_losses(self): def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] - midi = sample['midi'] # [B, T_ph] - ph2word = sample['ph2word'] # [B, T_ph] ph_dur = sample['ph_dur'] # [B, T_ph] - mel2ph = sample['mel2ph'] # [B, T_t] - base_pitch = sample['base_pitch'] # [B, T_t] - delta_pitch = sample['delta_pitch'] # [B, T_t] - energy = sample.get('energy') # [B, T_t] - breathiness = sample.get('breathiness') # [B, T_t] + ph2word = sample.get('ph2word') # [B, T_ph] + midi = sample.get('midi') # [B, T_ph] + mel2ph = sample.get('mel2ph') # [B, T_s] + base_pitch = sample.get('base_pitch') # [B, T_s] + pitch = sample.get('pitch') # [B, T_s] + energy = sample.get('energy') # [B, T_s] + breathiness = sample.get('breathiness') # [B, T_s] if (self.predict_pitch or self.predict_variances) and not infer: # randomly select continuous retaking regions @@ -123,8 +123,9 @@ def run_model(self, sample, infer=False): retake = None output = self.model( - txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, mel2ph=mel2ph, - base_pitch=base_pitch, delta_pitch=delta_pitch, + txt_tokens, midi=midi, ph2word=ph2word, + ph_dur=ph_dur, mel2ph=mel2ph, + base_pitch=base_pitch, pitch=pitch, energy=energy, breathiness=breathiness, retake=retake, infer=infer ) @@ -165,10 +166,9 @@ def _validation_step(self, sample, batch_idx): self.plot_dur(batch_idx, sample['ph_dur'], dur_pred, txt=sample['tokens']) if pitch_pred is not None: base_pitch = sample['base_pitch'] - delta_pitch = sample['delta_pitch'] self.plot_curve( batch_idx, - gt_curve=base_pitch + delta_pitch, + gt_curve=sample['pitch'], pred_curve=base_pitch + pitch_pred, base_curve=base_pitch, curve_name='pitch', From 12e2a7d449631c5769e21cce2217b48753b6e934 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 25 May 2023 23:33:26 +0800 Subject: [PATCH 395/475] Remove variance predictor integrated with acoustic model --- configs/acoustic.yaml | 14 ----- modules/fastspeech/acoustic_encoder.py | 70 ++++++++++++------------ modules/fastspeech/param_adaptor.py | 2 +- modules/toplevel.py | 74 +++----------------------- training/acoustic_task.py | 36 +++++-------- 5 files changed, 58 insertions(+), 138 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index b655421bd..43408906a 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -59,20 +59,6 @@ use_breathiness_embed: false use_key_shift_embed: false use_speed_embed: false -predict_energy: false -predict_breathiness: false - -energy_db_min: -72.0 -energy_db_max: -12.0 -breathiness_db_min: -72.0 -breathiness_db_max: -20.0 - -variances_prediction_args: - repeat_bins: 64 - residual_layers: 20 - residual_channels: 256 -lambda_var_loss: 1.0 - K_step: 1000 timesteps: 1000 max_beta: 0.02 diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index b9218f4af..b7160975f 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -30,6 +30,21 @@ def __init__(self, vocab_size): else: raise ValueError('f0_embed_type must be \'discrete\' or \'continuous\'.') + self.variance_embed_list = [] + self.use_energy_embed = hparams.get('use_energy_embed', False) + self.use_breathiness_embed = hparams.get('use_breathiness_embed', False) + if self.use_energy_embed: + self.variance_embed_list.append('energy') + if self.use_breathiness_embed: + self.variance_embed_list.append('breathiness') + + self.use_variance_embeds = len(self.variance_embed_list) > 0 + if self.use_variance_embeds: + self.variance_embeds = nn.ModuleDict({ + v_name: Linear(1, hparams['hidden_size']) + for v_name in self.variance_embed_list + }) + self.use_key_shift_embed = hparams.get('use_key_shift_embed', False) if self.use_key_shift_embed: self.key_shift_embed = Linear(1, hparams['hidden_size']) @@ -42,9 +57,27 @@ def __init__(self, vocab_size): if self.use_spk_id: self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) + def forward_variance_embedding(self, condition, key_shift=None, speed=None, **variances): + if self.use_variance_embeds: + variance_embeds = torch.stack([ + self.variance_embeds[v_name](variances[v_name][:, :, None]) + for v_name in self.variance_embed_list + ], dim=-1).sum(-1) + condition += variance_embeds + + if self.use_key_shift_embed: + key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) + condition += key_shift_embed + + if self.use_speed_embed: + speed_embed = self.speed_embed(speed[:, :, None]) + condition += speed_embed + + return condition + def forward( self, txt_tokens, mel2ph, f0, energy=None, breathiness=None, - key_shift=None, speed=None, spk_embed_id=None, infer=True, + key_shift=None, speed=None, spk_embed_id=None, **kwargs ): dur = mel2ph_to_dur(mel2ph, txt_tokens.shape[1]).float() @@ -69,39 +102,10 @@ def forward( else: f0_mel = (1 + f0 / 700).log() pitch_embed = self.pitch_embed(f0_mel[:, :, None]) + condition += pitch_embed - adaptor_cond = condition + pitch_embed - mel_cond = self.forward_variance_embedding( - adaptor_cond, key_shift=key_shift, speed=speed + condition = self.forward_variance_embedding( + condition, key_shift=key_shift, speed=speed, **kwargs ) - # During training, the data augmentation parameters (GEN and VEL) - # are seen to the variance adaptor; but during inference, - # we will always send the DEFAULT parameters (GEN = 0 and VEL = 1) - # to the variance adaptor so that the prediction outputs will NOT - # be influenced by these parameters, which is more reasonable to - # most users of singing voice synthesis systems. - if self.use_key_shift_embed: - if infer: - key_shift = torch.zeros_like(key_shift) - key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) - adaptor_cond = adaptor_cond + key_shift_embed - - if self.use_speed_embed: - if infer: - speed = torch.ones_like(speed) - speed_embed = self.speed_embed(speed[:, :, None]) - adaptor_cond = adaptor_cond + speed_embed - - return adaptor_cond, mel_cond - - def forward_variance_embedding(self, condition, key_shift=None, speed=None): - if self.use_key_shift_embed: - key_shift_embed = self.key_shift_embed(key_shift[:, :, None]) - condition = condition + key_shift_embed - - if self.use_speed_embed: - speed_embed = self.speed_embed(speed[:, :, None]) - condition = condition + speed_embed - return condition diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py index 8b57ee6c1..9179ec691 100644 --- a/modules/fastspeech/param_adaptor.py +++ b/modules/fastspeech/param_adaptor.py @@ -9,9 +9,9 @@ class ParameterAdaptorModule(torch.nn.Module): def __init__(self): super().__init__() + self.variance_prediction_list = [] self.predict_energy = hparams.get('predict_energy', False) self.predict_breathiness = hparams.get('predict_breathiness', False) - self.variance_prediction_list = [] if self.predict_energy: self.variance_prediction_list.append('energy') if self.predict_breathiness: diff --git a/modules/toplevel.py b/modules/toplevel.py index d5de0aa12..446d55a23 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -11,7 +11,7 @@ GaussianDiffusion, PitchDiffusion ) from modules.fastspeech.acoustic_encoder import FastSpeech2Acoustic -from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST, ParameterAdaptorModule +from modules.fastspeech.param_adaptor import ParameterAdaptorModule from modules.fastspeech.tts_modules import RhythmRegulator, LengthRegulator from modules.fastspeech.variance_encoder import FastSpeech2Variance from utils.hparams import hparams @@ -28,26 +28,6 @@ def __init__(self, vocab_size, out_dims): vocab_size=vocab_size ) - if self.predict_variances: - self.variance_adaptor = self.build_adaptor() - variances_to_embed = set() - if hparams.get('use_energy_embed', False) and not self.predict_energy: - # energy is embedded but not predicted - variances_to_embed.add('energy') - if hparams.get('use_breathiness_embed', False) and not self.predict_breathiness: - # breathiness is embedded but not predicted - variances_to_embed.add('breathiness') - self.embed_variances = len(variances_to_embed) > 0 - self.variance_aware_list = [ - v_name for v_name in VARIANCE_CHECKLIST - if v_name in self.variance_prediction_list or v_name in variances_to_embed - ] - if self.embed_variances or self.predict_variances: - self.variance_embeds = nn.ModuleDict({ - v_name: Linear(1, hparams['hidden_size']) - for v_name in self.variance_aware_list - }) - self.diffusion = GaussianDiffusion( out_dims=out_dims, num_feats=1, @@ -66,56 +46,18 @@ def forward( self, txt_tokens, mel2ph, f0, key_shift=None, speed=None, spk_embed_id=None, gt_mel=None, infer=True, **kwargs ): - adaptor_cond, mel_cond = self.fs2( + condition = self.fs2( txt_tokens, mel2ph, f0, key_shift=key_shift, speed=speed, - spk_embed_id=spk_embed_id, infer=infer, **kwargs + spk_embed_id=spk_embed_id, **kwargs ) - variance_embed_inputs = { - v_name: kwargs.get(v_name) for v_name in self.variance_aware_list - } # all possible variance inputs - if infer: - if self.predict_variances: - # get variance predictor inputs - variance_preset_inputs = self.collect_variance_inputs(**variance_embed_inputs) - if not all([v is not None for v in variance_preset_inputs]): # need to predict some variances - variance_pred_outputs = self.collect_variance_outputs( - self.variance_adaptor(adaptor_cond, infer=True) - ) # dict of predictions - variance_embed_inputs = { - v_name: ( - variance_embed_inputs[v_name] if variance_embed_inputs[v_name] is not None - else variance_pred_outputs[v_name] - ) - for v_name in self.variance_aware_list - } # merge presets and predictions, should contain no NoneType - variance_pred_out = self.collect_variance_outputs(variance_embed_inputs) # collect from embed inputs - else: - variance_pred_out = {} + mel_pred = self.diffusion(condition, infer=True) + mel_pred *= ((mel2ph > 0).float()[:, :, None]) + return mel_pred else: - if self.predict_variances: - # use gt variances to train the predictor - variance_inputs = self.collect_variance_inputs(**variance_embed_inputs) - variance_pred_out = self.variance_adaptor(adaptor_cond, variance_inputs, infer=False) - else: - variance_pred_out = None - - if self.predict_variances or self.embed_variances: - # embed variances into mel condition - variance_embeds = torch.stack([ - self.variance_embeds[v_name](variance_embed_inputs[v_name][:, :, None]) # [B, T] => [B, T, H] - for v_name in self.variance_aware_list - ], dim=-1).sum(-1) - mel_cond += variance_embeds - - if infer: - mel_pred_out = self.diffusion(mel_cond, infer=True) - mel_pred_out *= ((mel2ph > 0).float()[:, :, None]) - else: - mel_pred_out = self.diffusion(mel_cond, gt_spec=gt_mel, infer=False) - - return mel_pred_out, variance_pred_out + x_recon, noise = self.diffusion(condition, gt_spec=gt_mel, infer=False) + return x_recon, noise class DiffSingerVariance(ParameterAdaptorModule, CategorizedModule): diff --git a/training/acoustic_task.py b/training/acoustic_task.py index f8db8a269..76c53cbd3 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -22,9 +22,9 @@ class AcousticDataset(BaseDataset): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.required_variances = {} # key: variance name, value: padding value - if hparams.get('use_energy_embed', False) or hparams.get('predict_energy', False): + if hparams.get('use_energy_embed', False): self.required_variances['energy'] = 0.0 - if hparams.get('use_breathiness_embed', False) or hparams.get('predict_breathiness', False): + if hparams.get('use_breathiness_embed', False): self.required_variances['breathiness'] = 0.0 self.need_key_shift = hparams.get('use_key_shift_embed', False) @@ -64,14 +64,11 @@ def __init__(self): if self.use_vocoder: self.vocoder: BaseVocoder = get_vocoder_cls(hparams)() self.logged_gt_wav = set() - - self.variances_to_predict = set() - if hparams['predict_energy']: - self.variances_to_predict.add('energy') - if hparams['predict_breathiness']: - self.variances_to_predict.add('breathiness') - self.predict_variances = len(self.variances_to_predict) > 0 - self.lambda_var_loss = hparams['lambda_var_loss'] + self.required_variances = [] + if hparams.get('use_energy_embed', False): + self.required_variances.append('energy') + if hparams.get('use_breathiness_embed', False): + self.required_variances.append('breathiness') def build_model(self): return DiffSingerAcoustic( @@ -82,10 +79,6 @@ def build_model(self): # noinspection PyAttributeOutsideInit def build_losses(self): self.mel_loss = DiffusionNoiseLoss(loss_type=hparams['diff_loss_type']) - if self.predict_variances: - self.var_loss = DiffusionNoiseLoss( - loss_type=hparams['diff_loss_type'], - ) def run_model(self, sample, infer=False): txt_tokens = sample['tokens'] # [B, T_ph] @@ -93,8 +86,8 @@ def run_model(self, sample, infer=False): mel2ph = sample['mel2ph'] # [B, T_s] f0 = sample['f0'] variances = { - v_name: None if infer else sample[v_name] - for v_name in self.variances_to_predict + v_name: sample[v_name] + for v_name in self.required_variances } key_shift = sample.get('key_shift') speed = sample.get('speed') @@ -110,19 +103,14 @@ def run_model(self, sample, infer=False): ) if infer: - mel_pred, var_pred = output - return mel_pred, var_pred + return output # mel_pred else: - (x_recon, x_noise), var_pred_out = output - mel_loss = self.mel_loss(x_recon, x_noise) + x_recon, x_noise = output + mel_loss = self.mel_loss(x_recon, x_noise, nonpadding=(mel2ph > 0).unsqueeze(-1).float()) losses = { 'mel_loss': mel_loss } - if self.predict_variances: - (v_recon, v_noise) = var_pred_out - losses['var_loss'] = self.lambda_var_loss * self.var_loss(v_recon, v_noise) - return losses def on_train_start(self): From c1f33cbf2f79c0b5f49951a77e357a472cd12e04 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 25 May 2023 23:50:28 +0800 Subject: [PATCH 396/475] Fix constant padding value --- utils/binarizer_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index dfa67fee3..61fdf53be 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -97,7 +97,7 @@ def get_breathiness_pyworld(wav_data, f0, length, hparams): wav_frames = (x.shape[0] + hop_size - 1) // hop_size f0_frames = f0.shape[0] if f0_frames < wav_frames: - f0 = np.pad(f0, [[0, wav_frames - f0_frames]], mode='constant') + f0 = np.pad(f0, (0, wav_frames - f0_frames), mode='constant', constant_values=(f0[0], f0[-1])) elif f0_frames > wav_frames: f0 = f0[:wav_frames] From c8ac07d7503e9e4a481ac61857f6e46e60db97c3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 00:55:55 +0800 Subject: [PATCH 397/475] Support variance embeds in acoustic exporter --- deployment/exporters/acoustic_exporter.py | 16 +++++++++++++--- deployment/modules/fastspeech2.py | 9 ++++++++- deployment/modules/toplevel.py | 3 ++- preprocessing/acoustic_binarizer.py | 6 ++---- 4 files changed, 25 insertions(+), 9 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index e05d0183d..610be883b 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -70,7 +70,7 @@ def build_model(self) -> DiffSingerAcousticONNX: out_dims=hparams['audio_num_mel_bins'] ).eval().to(self.device) load_ckpt(model, hparams['work_dir'], ckpt_steps=self.ckpt_steps, - prefix_in_ckpt='model', strict=True, device=self.device) + prefix_in_ckpt='model', strict=False, device=self.device) return model def export(self, path: Path): @@ -107,9 +107,13 @@ def _torch_export_model(self): tokens = torch.LongTensor([[1]]).to(self.device) durations = torch.LongTensor([[n_frames]]).to(self.device) f0 = torch.FloatTensor([[440.] * n_frames]).to(self.device) + variances = { + v_name: torch.zeros(1, n_frames, dtype=torch.float32, device=self.device) + for v_name in self.model.fs2.variance_embed_list + } kwargs: dict[str, torch.Tensor] = {} - arguments = (tokens, durations, f0, kwargs) - input_names = ['tokens', 'durations', 'f0'] + arguments = (tokens, durations, f0, variances, kwargs) + input_names = ['tokens', 'durations', 'f0'] + self.model.fs2.variance_embed_list dynamix_axes = { 'tokens': { 1: 'n_tokens' @@ -119,6 +123,12 @@ def _torch_export_model(self): }, 'f0': { 1: 'n_frames' + }, + **{ + v_name: { + 1: 'n_frames' + } + for v_name in self.model.fs2.variance_embed_list } } if hparams.get('use_key_shift_embed', False): diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py index 71aae48b9..a1925b8e9 100644 --- a/deployment/modules/fastspeech2.py +++ b/deployment/modules/fastspeech2.py @@ -45,7 +45,7 @@ def __init__(self, vocab_size): self.speed_min, self.speed_max = hparams['augmentation_args']['random_time_stretching']['range'] # noinspection PyMethodOverriding - def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=None): + def forward(self, tokens, durations, f0, variances: dict, gender=None, velocity=None, spk_embed=None): durations = durations * (tokens > 0) mel2ph = self.lr(durations) f0 = f0 * (mel2ph > 0) @@ -63,6 +63,13 @@ def forward(self, tokens, durations, f0, gender=None, velocity=None, spk_embed=N pitch_embed = self.pitch_embed(f0_mel[:, :, None]) condition += pitch_embed + if self.use_variance_embeds: + variance_embeds = torch.stack([ + self.variance_embeds[v_name](variances[v_name][:, :, None]) + for v_name in self.variance_embed_list + ], dim=-1).sum(-1) + condition += variance_embeds + if hparams.get('use_key_shift_embed', False): if hasattr(self, 'frozen_key_shift'): key_shift_embed = self.key_shift_embed(self.frozen_key_shift[:, None, None]) diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 71cbdb10a..a6820da40 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -40,12 +40,13 @@ def forward_fs2( tokens: Tensor, durations: Tensor, f0: Tensor, + variances: dict, gender: Tensor = None, velocity: Tensor = None, spk_embed: Tensor = None ) -> Tensor: return self.fs2( - tokens, durations, f0, + tokens, durations, f0, variances=variances, gender=gender, velocity=velocity, spk_embed=spk_embed ) diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 375d0d918..c5f9cc1d4 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -46,10 +46,8 @@ class AcousticBinarizer(BaseBinarizer): def __init__(self): super().__init__(data_attrs=ACOUSTIC_ITEM_ATTRIBUTES) self.lr = LengthRegulator() - self.need_energy = hparams.get('use_energy_embed', False) or hparams.get('predict_energy', False) - self.need_breathiness = ( - hparams.get('use_breathiness_embed', False) or hparams.get('predict_breathiness', False) - ) + self.need_energy = hparams.get('use_energy_embed', False) + self.need_breathiness = hparams.get('use_breathiness_embed', False) def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_data_dict = {} From d702e49f704f00f32f6dc401a95bb9806eca864f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 01:32:49 +0800 Subject: [PATCH 398/475] Fix augmentation logging --- basics/base_binarizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index fdc812e34..f7235b47a 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -156,7 +156,7 @@ def process(self): self.process_dataset( 'train', num_workers=int(self.binarization_args['num_workers']), - apply_augmentation=len(self.augmentation_args) > 0 + apply_augmentation=any(args['enabled'] for args in self.augmentation_args) ) def check_coverage(self): From b855d2544e5f7320df31847ac4afa1bc8a560acd Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 02:05:46 +0800 Subject: [PATCH 399/475] Fix KeyError --- modules/fastspeech/acoustic_encoder.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/fastspeech/acoustic_encoder.py b/modules/fastspeech/acoustic_encoder.py index b7160975f..a02ab9cd5 100644 --- a/modules/fastspeech/acoustic_encoder.py +++ b/modules/fastspeech/acoustic_encoder.py @@ -76,9 +76,9 @@ def forward_variance_embedding(self, condition, key_shift=None, speed=None, **va return condition def forward( - self, txt_tokens, mel2ph, f0, energy=None, breathiness=None, - key_shift=None, speed=None, spk_embed_id=None, - **kwargs + self, txt_tokens, mel2ph, f0, + key_shift=None, speed=None, + spk_embed_id=None, **kwargs ): dur = mel2ph_to_dur(mel2ph, txt_tokens.shape[1]).float() dur_embed = self.dur_embed(dur[:, :, None]) From ce68615ae852e3fabc38544e64af00c04bbaa38a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 02:09:29 +0800 Subject: [PATCH 400/475] Fix ValueError: not enough values to unpack --- training/acoustic_task.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 76c53cbd3..6d951a5d5 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -130,22 +130,12 @@ def _validation_step(self, sample, batch_idx): if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: - mel_pred, var_pred = self.run_model(sample, infer=True) + mel_pred = self.run_model(sample, infer=True) if self.use_vocoder: self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') - for name in self.variances_to_predict: - variance = sample[name] - variance_pred = var_pred[name] - self.plot_curve( - batch_idx, - gt_curve=variance, - pred_curve=variance_pred, - curve_name=name - ) - return outputs, sample['size'] ############ From 488c2719064d20941d277a81d495b10fdd19f1fb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 11:01:17 +0800 Subject: [PATCH 401/475] Fix NoneType error and value unpacking error --- inference/ds_acoustic.py | 4 ++-- inference/ds_variance.py | 8 ++++---- modules/toplevel.py | 5 ++++- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index e31d477a4..c39e0e20f 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -229,11 +229,11 @@ def run_model(self, sample, return_mel=False): ) # => [B, T, H] else: spk_mix_embed = None - mel_pred, _ = self.model( + mel_pred = self.model( txt_tokens, mel2ph=sample['mel2ph'], f0=sample['f0'], **variances, key_shift=sample.get('key_shift'), speed=sample.get('speed'), spk_mix_embed=spk_mix_embed, infer=True - ) # var_pred ignored for now + ) return mel_pred @torch.no_grad() diff --git a/inference/ds_variance.py b/inference/ds_variance.py index e7345b8a9..a52e1ce21 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -149,9 +149,9 @@ def preprocess_input(self, param): target_timestep=self.timestep, align_length=T_s ) - batch['delta_pitch'] = torch.from_numpy( + batch['pitch'] = torch.from_numpy( librosa.hz_to_midi(interp_f0(f0)[0]).astype(np.float32) - ).to(self.device)[None] - base_pitch + ).to(self.device)[None] return batch @@ -164,11 +164,11 @@ def run_model(self, sample): ph_dur = sample['ph_dur'] mel2ph = sample['mel2ph'] base_pitch = sample['base_pitch'] - delta_pitch = sample.get('delta_pitch') + pitch = sample.get('pitch') dur_pred, pitch_pred, variance_pred = self.model( txt_tokens, midi=midi, ph2word=ph2word, word_dur=word_dur, ph_dur=ph_dur, - mel2ph=mel2ph, base_pitch=base_pitch, delta_pitch=delta_pitch, + mel2ph=mel2ph, base_pitch=base_pitch, pitch=pitch, retake=None, infer=True ) if dur_pred is not None: diff --git a/modules/toplevel.py b/modules/toplevel.py index 446d55a23..3418dff07 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -137,7 +137,10 @@ def forward( if retake is not None: base_pitch = base_pitch * retake + pitch * ~retake pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) - pitch_pred_out = self.pitch_predictor(pitch_cond, pitch - base_pitch, infer) + if infer: + pitch_pred_out = self.pitch_predictor(pitch_cond, infer=True) + else: + pitch_pred_out = self.pitch_predictor(pitch_cond, pitch - base_pitch, infer=False) else: pitch_pred_out = None From dd37b0c1de9967906b5c6d217a85d3035a623f7a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 12:56:58 +0800 Subject: [PATCH 402/475] Fix KeyError --- training/variance_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 90808369a..903d7be22 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -27,7 +27,7 @@ def collater(self, samples): ph2word = utils.collate_nd([s['ph2word'] for s in samples], 0) mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) base_pitch = utils.collate_nd([s['base_pitch'] for s in samples], 0) - delta_pitch = utils.collate_nd([s['delta_pitch'] for s in samples], 0) + pitch = utils.collate_nd([s['pitch'] for s in samples], 0) batch.update({ 'tokens': tokens, 'ph_dur': ph_dur, @@ -35,7 +35,7 @@ def collater(self, samples): 'ph2word': ph2word, 'mel2ph': mel2ph, 'base_pitch': base_pitch, - 'delta_pitch': delta_pitch, + 'pitch': pitch, }) if hparams['predict_energy']: energy = utils.collate_nd([s['energy'] for s in samples], 0) From d4865fb662b1ebaca64cab471922ff6015425ac3 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 13:31:51 +0800 Subject: [PATCH 403/475] Support multi-speaker variance training --- modules/fastspeech/variance_encoder.py | 5 ++++- modules/toplevel.py | 28 ++++++++++++++++++-------- training/variance_task.py | 5 ++++- 3 files changed, 28 insertions(+), 10 deletions(-) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 52ef40fb5..699a399fc 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -46,13 +46,14 @@ def __init__(self, vocab_size): dur_loss_type=dur_hparams['loss_type'] ) - def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=True): + def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, spk_embed=None, infer=True): """ :param txt_tokens: (train, infer) [B, T_ph] :param midi: (train, infer) [B, T_ph] :param ph2word: (train, infer) [B, T_ph] :param ph_dur: (train, [infer]) [B, T_ph] :param word_dur: (infer) [B, T_w] + :param spk_embed: (train) [B, T_ph, H] :param infer: whether inference :return: encoder_out, ph_dur_pred """ @@ -72,6 +73,8 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, infer=T midi_embed = self.midi_embed(midi) # => [B, T_ph, H] dur_cond = encoder_out + midi_embed + if spk_embed is not None: + dur_cond += spk_embed ph_dur_pred = self.dur_predictor(dur_cond, x_masks=txt_tokens == PAD_INDEX, infer=infer) return encoder_out, ph_dur_pred diff --git a/modules/toplevel.py b/modules/toplevel.py index 3418dff07..9816fc776 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -68,6 +68,11 @@ def category(self): def __init__(self, vocab_size): super().__init__() self.predict_dur = hparams['predict_dur'] + + self.use_spk_id = hparams['use_spk_id'] + if self.use_spk_id: + self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) + self.fs2 = FastSpeech2Variance( vocab_size=vocab_size ) @@ -107,11 +112,17 @@ def __init__(self, vocab_size): def forward( self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, - base_pitch=None, pitch=None, retake=None, infer=True, **kwargs + base_pitch=None, pitch=None, retake=None, spk_id=None, infer=True, **kwargs ): + if self.use_spk_id: + spk_embed = self.spk_embed(spk_id)[:, None, :] # [B,] => [B, T=1, H] + else: + spk_embed = None + encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, - ph_dur=ph_dur, word_dur=word_dur, infer=infer + ph_dur=ph_dur, word_dur=word_dur, + spk_embed=spk_embed, infer=infer ) if not self.predict_pitch and not self.predict_variances: @@ -126,12 +137,13 @@ def forward( mel2ph_ = mel2ph[..., None].repeat([1, 1, hparams['hidden_size']]) condition = torch.gather(encoder_out, 1, mel2ph_) - if self.predict_pitch or self.predict_variances: - if retake is None: - retake_embed = self.retake_embed(torch.ones_like(mel2ph)) - else: - retake_embed = self.retake_embed(retake.long()) - condition += retake_embed + if self.use_spk_id: + condition += spk_embed + if retake is None: + retake_embed = self.retake_embed(torch.ones_like(mel2ph)) + else: + retake_embed = self.retake_embed(retake.long()) + condition += retake_embed if self.predict_pitch: if retake is not None: diff --git a/training/variance_task.py b/training/variance_task.py index 903d7be22..5bd52e9bd 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -55,6 +55,8 @@ def __init__(self): super().__init__() self.dataset_cls = VarianceDataset + self.use_spk_id = hparams['use_spk_id'] + self.predict_dur = hparams['predict_dur'] if self.predict_dur: self.lambda_dur_loss = hparams['lambda_dur_loss'] @@ -99,6 +101,7 @@ def build_losses(self): ) def run_model(self, sample, infer=False): + spk_ids = sample['spk_ids'] if self.use_spk_id else None # [B,] txt_tokens = sample['tokens'] # [B, T_ph] ph_dur = sample['ph_dur'] # [B, T_ph] ph2word = sample.get('ph2word') # [B, T_ph] @@ -127,7 +130,7 @@ def run_model(self, sample, infer=False): ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, pitch=pitch, energy=energy, breathiness=breathiness, - retake=retake, infer=infer + retake=retake, spk_id=spk_ids, infer=infer ) dur_pred, pitch_pred, variances_pred = output From 109f56558c16a85b1bb02d97aaf5f866518d7670 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 16:05:09 +0800 Subject: [PATCH 404/475] Fix base_pitch is all zero when using multiprocessing binarizer on Windows --- preprocessing/variance_binarizer.py | 43 ++++++++++++++++++----------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 2582a398b..f34e84372 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -34,6 +34,28 @@ ] +# This operator is used as global variable due to a PyTorch shared memory bug on Windows. +# See https://github.com/pytorch/pytorch/issues/100358 +smooth: nn.Conv1d = None + + +def build_smooth_op(kernel_size, device): + global smooth + smooth = nn.Conv1d( + in_channels=1, + out_channels=1, + kernel_size=kernel_size, + bias=False, + padding='same', + padding_mode='replicate' + ).eval().to(device) + smooth_kernel = torch.sin(torch.from_numpy( + np.linspace(0, 1, kernel_size).astype(np.float32) * np.pi + ).to(device)) + smooth_kernel /= smooth_kernel.sum() + smooth.weight.data = smooth_kernel[None, None] + + class VarianceBinarizer(BaseBinarizer): def __init__(self): super().__init__(data_attrs=VARIANCE_ITEM_ATTRIBUTES) @@ -41,22 +63,8 @@ def __init__(self): predict_energy = hparams['predict_energy'] predict_breathiness = hparams['predict_breathiness'] self.predict_variances = predict_energy or predict_breathiness - self.lr = LengthRegulator().to(self.device) - smooth_kernel_size = round(hparams['midi_smooth_width'] / self.timestep) - self.smooth = nn.Conv1d( - in_channels=1, - out_channels=1, - kernel_size=smooth_kernel_size, - bias=False, - padding='same', - padding_mode='replicate' - ).eval().to(self.device) - smooth_kernel = torch.sin(torch.from_numpy( - np.linspace(0, 1, smooth_kernel_size).astype(np.float32) * np.pi - ).to(self.device)) - smooth_kernel /= smooth_kernel.sum() - self.smooth.weight.data = smooth_kernel[None, None] + # self.smooth: nn.Conv1d = None def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): meta_data_dict = {} @@ -97,6 +105,9 @@ def check_coverage(self): @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): + if smooth is None: + build_smooth_op(round(hparams['midi_smooth_width'] / self.timestep), self.device) + seconds = sum(meta_data['ph_dur']) length = round(seconds / self.timestep) T_ph = len(meta_data['ph_seq']) @@ -159,7 +170,7 @@ def process_item(self, item_name, meta_data, binarization_args): frame_midi_pitch = torch.from_numpy(frame_midi_pitch).to(self.device) # Below: smoothen the pitch step curve as the base pitch curve - smoothed_midi_pitch = self.smooth(frame_midi_pitch[None])[0] + smoothed_midi_pitch = smooth(frame_midi_pitch[None])[0] processed_input['base_pitch'] = smoothed_midi_pitch.cpu().numpy() if hparams['predict_pitch'] or self.predict_variances: From 7da762616dc451434e98c90b3692453be9194e6b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 20:10:35 +0800 Subject: [PATCH 405/475] Migrate some DS files --- ...00\345\215\212\344\270\200\345\215\212.ds" | 812 ++++++------ .../\344\270\215\350\260\223\344\276\240.ds" | 418 +++++++ "samples/\344\273\231\347\221\266.ds" | 140 +-- ...13\346\214\207\346\234\210\346\224\271.ds" | 268 ---- "samples/\345\277\203\347\226\274giegie.ds" | 194 --- .../\346\201\213\344\272\272\345\277\203.ds" | 16 +- ...64\345\206\215\350\247\201\345\225\212.ds" | 408 +++--- ...22\345\250\207\345\205\253\350\277\236.ds" | 18 +- ...07\347\232\204\347\255\224\346\241\210.ds" | 436 ------- ...37\346\230\257\346\212\261\346\255\211.ds" | 1090 ----------------- .../\351\200\215\351\201\245\344\273\231.ds" | 176 +-- 11 files changed, 1202 insertions(+), 2774 deletions(-) create mode 100644 "samples/\344\270\215\350\260\223\344\276\240.ds" delete mode 100644 "samples/\345\267\246\346\211\213\346\214\207\346\234\210\346\224\271.ds" delete mode 100644 "samples/\345\277\203\347\226\274giegie.ds" delete mode 100644 "samples/\350\203\275\350\247\243\347\255\224\344\270\200\345\210\207\347\232\204\347\255\224\346\241\210.ds" delete mode 100644 "samples/\350\277\231\344\271\210\345\217\257\347\210\261\347\234\237\346\230\257\346\212\261\346\255\211.ds" rename "samples/\351\200\215\351\201\245\344\273\231\357\274\210\347\224\267key\357\274\211.ds" => "samples/\351\200\215\351\201\245\344\273\231.ds" (89%) diff --git "a/samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" "b/samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" index 595fe9ef5..d4a8d10ca 100644 --- "a/samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" +++ "b/samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" @@ -1,698 +1,698 @@ [ { + "offset": 32.503, "text": "AP 时 SP", "ph_seq": "AP sh ir SP", - "note_seq": "rest D#4 D#4 rest", - "note_dur_seq": "0.6 0.6899999 0.6899999 0.4", - "is_slur_seq": "0 0 0 0", - "ph_dur": "0.387434 0.212566 0.69 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.3874 0.2126 0.69 0.4", + "ph_num": "2 1 1", + "note_seq": "rest D#4 rest", + "note_dur": "0.6 0.69 0.4", + "note_slur": "0 0 0", "f0_seq": "336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 336.6 309.5 309.5 309.5 309.5 309.5 309.7 309.8 309.9 310.0 310.1 310.2 310.3 310.4 310.6 310.6 310.7 310.9 310.9 310.9 310.9 310.9 311.1 309.2 309.7 310.0 310.1 310.4 310.5 310.3 310.2 310.1 310.2 310.3 310.4 310.3 310.2 310.2 310.1 310.1 310.1 310.1 310.1 310.2 310.5 310.8 310.9 311.5 311.8 311.8 312.4 312.4 312.6 312.7 312.6 312.4 312.1 311.9 311.6 311.5 311.3 310.8 310.4 310.0 309.6 309.1 308.8 308.8 308.7 308.4 308.6 308.7 308.9 309.2 309.4 309.7 310.0 310.4 310.6 310.9 311.2 311.3 311.5 311.6 311.7 311.8 311.8 311.8 311.8 311.7 311.7 311.6 311.3 311.0 310.8 310.6 310.4 310.2 310.6 310.4 310.4 310.4 310.4 310.3 310.1 310.0 309.7 309.4 309.3 309.0 308.7 308.5 308.2 308.1 307.8 307.6 307.7 307.7 308.2 308.6 309.5 310.5 311.5 312.2 312.9 313.5 313.7 313.9 314.0 313.8 313.7 313.5 312.9 312.6 312.1 310.9 310.2 309.5 308.7 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3", - "input_type": "phoneme", - "offset": 32.503 + "f0_timestep": "0.005" }, { + "offset": 33.862, "text": "AP 间 的 微 风 吹 过 缓 缓 拂 过 了 耳 畔 AP 你 的 容 颜 SP", "ph_seq": "AP j ian d e w ei f eng ch ui g uo h uan h uan f u g uo l e er p an AP n i d e r ong y En SP", - "note_seq": "rest D4 D4 D#4 D#4 D4 D4 C4 C4 A#3 A#3 A#3 A#3 A#3 A#3 C4 C4 A#3 A#3 C4 C4 A#3 A#3 G#3 G3 G3 rest A#3 A#3 C4 C4 D4 D4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.3439999 0.3439999 0.345 0.345 0.173 0.173 0.1720001 0.1720001 0.345 0.345 0.1719999 0.1719999 0.345 0.345 0.1730001 0.948 0.948 0.2580001 0.1729999 0.1729999 0.1719999 0.1719999 0.1729999 0.1729999 0.6890001 0.6890001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.156001 0.119999 0.127002 0.044998 0.128002 0.044998 0.111999 0.060001 0.087852 0.085148 0.284002 0.059998 0.255 0.09 0.112999 0.060001 0.094546 0.077454 0.285002 0.059998 0.112002 0.059998 0.319999 0.138328 0.059673 0.948 0.213002 0.044998 0.128002 0.044998 0.126998 0.045002 0.147998 0.025002 0.689 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.156 0.12 0.127 0.045 0.128 0.045 0.112 0.06 0.0879 0.0851 0.284 0.06 0.255 0.09 0.113 0.06 0.0945 0.0775 0.285 0.06 0.112 0.06 0.32 0.1383 0.0597 0.948 0.213 0.045 0.128 0.045 0.127 0.045 0.148 0.025 0.689 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 1 2 1 2 2 2 2 1 1", + "note_seq": "rest D4 D#4 D4 C4 A#3 A#3 A#3 C4 A#3 C4 A#3 G#3 G3 rest A#3 C4 D4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.344 0.345 0.173 0.172 0.345 0.172 0.345 0.173 0.948 0.258 0.173 0.172 0.173 0.689 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 317.1 296.8 296.7 296.5 296.2 295.9 295.6 295.2 294.7 294.3 293.9 293.6 293.3 293.2 293.2 292.8 292.5 293.0 293.8 294.1 295.3 296.5 297.6 298.0 297.9 297.4 296.6 295.7 294.4 292.6 291.3 289.5 288.0 288.0 288.5 289.6 295.4 304.9 308.9 310.8 311.8 311.8 311.9 312.4 312.7 313.0 313.5 313.7 313.6 313.1 312.4 311.3 310.4 309.8 309.0 308.3 307.7 306.8 305.4 303.9 302.6 301.2 299.4 297.9 297.8 297.8 297.5 297.3 297.1 296.8 296.5 296.0 295.7 295.3 295.0 294.7 294.4 294.2 294.0 294.0 294.0 294.0 294.0 294.1 294.2 294.2 294.3 294.3 294.5 293.2 290.3 286.0 280.6 274.7 269.9 266.0 264.0 263.3 263.1 263.1 263.0 262.9 262.7 262.6 262.4 262.2 262.1 261.9 261.9 261.9 261.9 261.8 261.6 261.5 261.2 261.0 260.7 260.5 260.4 260.3 260.1 259.2 257.7 255.3 252.4 248.9 245.4 242.2 239.2 236.6 235.0 234.3 234.2 234.3 234.5 234.8 235.2 235.7 236.2 236.7 237.2 237.6 237.9 238.1 238.2 238.0 237.6 237.0 236.2 235.4 234.6 234.0 233.8 233.7 233.0 232.5 232.3 232.3 232.4 232.9 233.1 233.5 233.8 233.9 233.9 234.0 234.1 234.0 233.8 233.6 233.5 233.4 233.3 233.1 232.9 232.7 232.9 233.0 233.4 233.6 233.9 234.1 234.3 234.4 234.5 234.6 234.4 233.7 232.7 231.5 230.8 231.7 234.4 238.5 240.0 241.5 243.0 244.5 246.0 247.5 249.0 250.6 252.1 253.7 255.3 256.8 258.4 260.0 261.6 263.2 264.9 266.5 253.9 247.6 244.2 242.3 240.6 238.8 237.2 236.1 235.0 233.9 233.3 233.0 232.7 232.3 232.4 232.4 232.5 232.7 232.9 232.9 232.9 232.9 233.1 233.2 233.2 233.0 232.9 232.5 232.4 232.4 232.4 232.6 232.8 233.0 233.4 233.9 234.3 234.6 234.8 234.5 234.0 233.1 231.9 230.4 228.3 226.3 224.8 222.8 219.7 218.4 219.4 221.7 224.0 226.3 228.6 230.9 233.3 235.7 238.1 240.6 243.0 245.5 248.0 250.6 253.1 255.7 258.4 261.0 253.9 246.2 243.9 242.2 240.1 238.7 237.9 236.7 235.2 234.3 233.7 233.3 232.5 232.0 231.4 230.8 230.1 229.5 228.8 228.1 227.5 227.0 226.9 227.2 226.4 226.1 217.4 234.8 235.1 235.7 236.5 237.3 238.0 238.8 241.3 247.9 256.2 261.3 263.2 263.1 263.1 263.0 263.0 262.9 262.8 262.6 262.5 262.4 262.4 262.4 261.9 260.5 258.1 254.6 250.0 245.8 242.0 238.7 236.2 234.8 247.2 247.6 247.9 248.2 248.6 248.9 249.2 249.6 249.9 250.3 249.9 246.7 244.0 241.9 240.5 238.5 237.5 236.0 234.8 234.1 233.6 232.8 232.2 231.9 231.7 231.6 231.6 231.6 231.7 231.9 232.1 232.5 232.7 232.9 233.0 233.2 233.1 232.8 232.7 232.7 232.7 232.9 233.0 233.2 233.4 233.6 233.9 234.1 234.0 233.7 233.4 232.5 231.6 230.9 230.3 229.7 228.4 226.7 224.8 222.8 221.8 221.8 223.0 226.0 229.5 233.0 236.6 240.3 244.0 247.8 251.6 262.8 262.8 262.9 263.0 263.1 263.1 263.2 263.4 263.5 263.6 263.7 263.8 263.9 263.9 264.0 263.8 263.5 262.8 262.0 261.0 259.8 258.6 257.6 256.7 256.0 255.6 255.4 254.5 253.3 251.8 250.2 249.5 249.6 249.0 248.1 245.8 242.7 240.1 238.3 237.5 237.4 237.4 237.3 237.1 237.0 236.8 236.6 236.3 236.1 235.8 235.6 235.4 235.1 234.9 234.8 234.7 234.6 234.6 234.5 234.4 234.1 233.9 233.6 233.2 232.8 232.4 232.2 232.0 231.9 231.9 231.9 232.0 232.1 232.4 232.5 232.7 232.9 232.9 233.0 232.8 232.1 230.7 228.8 226.8 224.4 221.9 219.0 216.5 214.4 212.5 210.9 209.6 209.0 209.0 209.0 209.3 209.8 210.3 210.8 211.3 211.8 212.1 212.3 212.2 212.0 211.7 211.3 210.9 210.4 209.9 209.5 209.1 208.7 208.4 208.3 208.3 208.1 207.5 206.7 205.7 204.5 203.1 201.7 199.9 198.4 197.1 195.8 194.6 193.7 193.2 192.9 193.0 197.0 198.0 199.0 200.0 201.0 202.0 203.0 204.0 205.0 206.1 207.1 208.1 209.2 210.2 210.8 209.7 208.4 206.2 203.6 201.6 199.8 198.8 198.2 197.2 196.7 196.3 195.9 195.8 195.6 195.6 195.7 195.8 195.9 195.9 195.5 195.3 195.1 194.6 194.6 194.6 194.4 194.4 194.4 194.5 194.6 194.9 195.3 195.5 195.8 196.1 196.3 196.6 196.8 197.1 197.2 197.2 197.4 197.4 197.4 197.4 197.4 197.4 197.3 197.1 197.0 196.9 196.7 196.7 196.5 196.2 196.0 195.5 195.3 195.2 194.9 194.8 194.8 194.9 195.5 195.6 195.9 196.1 196.4 196.7 197.1 197.5 198.1 198.3 198.5 198.7 198.7 198.8 198.8 198.7 198.7 198.5 198.3 198.0 197.2 197.0 196.5 196.1 195.7 195.4 194.8 194.3 193.8 193.2 192.9 192.8 192.9 193.2 193.4 193.7 194.1 194.5 195.5 196.1 196.7 197.6 198.2 198.7 199.3 199.8 200.3 200.6 200.6 200.5 200.2 200.0 199.7 199.2 198.9 198.5 198.0 197.5 196.9 196.1 195.4 194.9 194.5 194.2 193.8 193.4 193.1 192.9 192.7 192.8 193.1 193.5 194.2 194.7 195.3 196.0 196.5 197.1 197.7 198.4 198.9 199.5 200.0 200.4 200.5 200.6 200.5 200.2 200.0 199.8 199.5 199.1 198.6 198.0 197.5 197.0 196.5 196.1 195.9 195.7 195.4 195.3 194.9 194.7 194.5 194.6 194.8 195.6 197.0 197.8 198.2 198.7 199.3 199.6 199.8 199.4 199.0 197.7 196.5 194.6 192.5 191.0 190.2 189.7 189.5 189.4 189.7 191.3 192.8 194.4 196.0 197.6 199.2 200.8 202.4 204.1 205.7 207.4 209.1 210.8 212.5 214.2 216.0 217.7 219.5 221.3 223.1 224.9 226.7 228.6 230.4 232.3 234.2 236.1 238.0 240.0 241.9 243.9 245.9 247.9 249.9 251.9 230.3 230.3 230.3 230.3 230.3 230.3 230.2 230.1 230.1 230.1 230.0 230.0 230.0 230.0 230.0 230.0 230.0 230.2 230.6 231.0 231.5 232.1 232.5 232.8 232.9 233.1 233.1 233.1 233.1 233.2 233.2 233.2 233.3 233.1 232.2 230.4 227.9 224.5 221.6 219.2 217.5 216.6 214.5 215.1 221.2 227.5 234.0 240.7 247.5 254.5 261.8 260.6 261.3 261.2 261.4 261.6 261.3 260.5 259.4 258.3 257.2 256.4 255.5 254.3 253.7 253.7 255.1 256.9 259.8 262.2 264.3 265.7 269.4 273.3 276.9 280.1 282.4 284.2 285.6 287.1 287.9 288.5 288.7 289.1 289.3 290.0 290.4 291.1 291.7 292.3 293.0 293.6 294.0 294.4 294.7 295.0 295.1 295.4 295.4 295.7 295.6 295.0 294.5 293.7 293.2 293.1 293.5 294.4 295.8 297.2 298.9 300.3 301.9 304.0 305.4 306.8 308.8 310.1 311.1 312.2 313.3 314.1 314.8 315.6 315.9 316.0 316.0 315.7 315.4 314.9 314.4 313.8 313.1 312.5 312.1 311.6 311.2 310.9 310.3 309.7 309.2 308.7 308.4 308.3 308.5 308.6 308.9 309.2 309.6 310.1 310.3 310.8 311.1 311.5 311.8 312.2 312.5 312.6 312.6 312.8 313.1 313.3 313.5 313.5 313.1 312.8 312.6 312.4 311.8 311.2 310.7 310.3 310.3 310.4 310.4 310.2 310.2 310.1 310.2 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.5 310.7 310.9 311.2 311.3 311.3 311.1 310.8 310.3 310.1 309.6 309.3 309.0 308.9 308.8 308.8 308.8 308.9 309.1 309.5 309.8 310.0 310.4 310.9 311.5 311.9 312.4 312.7 312.9 312.8 312.9 312.8 312.7 312.7 312.6 312.4 312.5 312.4 312.5 312.4 312.4 312.4 312.2 311.9 311.6 311.1 310.6 310.1 309.7 309.3 309.2 309.2 309.4 310.1 311.3 312.0 313.4 314.0 314.3 314.1 312.9 311.4 309.4 307.3 305.3 304.2 304.0 303.9 302.2 300.1 297.2 290.9 287.8 287.8 287.8 287.8 287.8 287.8 287.8 287.8 287.8 287.8 287.8 287.8", - "input_type": "phoneme", - "offset": 33.862 + "f0_timestep": "0.005" }, { + "offset": 39.379, "text": "AP 一 直 悄 悄 改 变 离 去 太 久 才 发 觉 AP 回 忆 里 面 SP", "ph_seq": "AP y i zh ir q iao q iao g ai b ian l i q v t ai j iu c ai f a j ve AP h ui y i l i m ian SP", - "note_seq": "rest D4 D4 D#4 D#4 G4 G4 D#4 D#4 D#4 D#4 D#4 D#4 D#4 D#4 F4 F4 D#4 D#4 A#3 A#3 D#4 D#4 D4 D4 D#4 D#4 rest A#3 A#3 C4 C4 D4 D4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.517 0.517 0.173 0.173 0.1720001 0.1720001 0.345 0.345 0.1729999 0.1729999 0.5170002 0.5170002 0.1719999 0.1719999 0.776 0.776 0.2590001 0.1719999 0.1719999 0.1729999 0.1729999 0.1719999 0.1719999 0.6900001 0.6900001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.230998 0.045002 0.112999 0.060001 0.083659 0.088341 0.077781 0.094219 0.128002 0.044998 0.098888 0.073112 0.427 0.09 0.083919 0.089081 0.104221 0.06778 0.284999 0.060001 0.082088 0.090912 0.427 0.09 0.102641 0.069359 0.776 0.183999 0.075001 0.126998 0.045002 0.112998 0.060001 0.100883 0.071117 0.69 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.113 0.06 0.0837 0.0883 0.0778 0.0942 0.128 0.045 0.0989 0.0731 0.427 0.09 0.0839 0.0891 0.1042 0.0678 0.285 0.06 0.0821 0.0909 0.427 0.09 0.1026 0.0694 0.776 0.184 0.075 0.127 0.045 0.113 0.06 0.1009 0.0711 0.69 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 1 1", + "note_seq": "rest D4 D#4 G4 D#4 D#4 D#4 D#4 F4 D#4 A#3 D#4 D4 D#4 rest A#3 C4 D4 D#4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.172 0.517 0.173 0.172 0.345 0.173 0.517 0.172 0.776 0.259 0.172 0.173 0.172 0.69 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 295.5 292.8 291.4 292.0 292.3 292.3 293.1 293.8 294.3 295.3 296.0 296.6 298.1 298.5 299.6 299.8 299.2 298.3 297.2 295.4 293.7 292.6 290.9 288.1 285.2 283.8 280.6 283.3 285.9 288.6 291.2 293.9 296.6 299.3 302.1 304.8 307.6 310.5 313.3 316.2 319.1 316.4 316.3 316.0 315.7 315.4 315.0 314.7 314.4 314.1 314.0 314.0 313.7 312.9 311.8 310.3 308.4 306.7 304.9 303.1 301.3 300.0 299.0 298.4 298.9 302.9 310.2 321.3 333.6 346.8 356.7 362.3 363.7 364.7 366.5 369.1 373.0 376.8 380.8 384.7 388.7 392.1 394.3 395.9 396.2 395.9 395.5 394.7 394.2 393.9 393.1 390.7 385.3 379.1 371.5 364.1 356.5 350.8 347.4 324.7 325.0 325.2 325.4 325.7 325.9 326.2 326.4 326.6 326.9 327.1 321.5 318.0 315.7 314.0 312.7 311.8 311.2 310.6 310.3 310.0 309.8 309.7 309.9 310.0 310.1 309.8 309.1 307.5 305.8 304.6 303.5 303.8 307.8 309.2 303.9 298.8 296.7 301.6 291.7 289.1 315.4 315.2 314.9 314.6 314.1 313.7 313.1 312.7 312.5 312.4 312.3 312.2 312.2 312.2 312.0 312.0 311.9 311.8 311.8 311.8 311.8 311.8 311.8 311.9 312.1 312.2 312.2 312.2 311.1 308.1 304.4 299.7 295.4 292.2 290.2 313.4 317.6 322.0 326.4 326.2 321.5 320.8 320.6 319.9 319.5 318.8 318.0 316.9 316.0 315.0 313.9 313.1 312.3 311.7 310.9 310.1 309.7 309.3 309.2 309.3 309.3 309.3 309.5 309.7 309.8 310.1 310.2 310.2 310.3 310.7 311.0 311.3 311.6 311.8 311.9 312.3 312.6 312.6 312.7 312.7 312.9 313.1 313.2 313.3 313.3 313.1 312.8 312.6 312.3 312.1 312.0 311.8 311.7 311.7 311.7 311.7 311.7 311.5 311.6 311.7 311.7 311.7 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 312.0 312.0 312.0 312.0 312.0 312.0 311.9 311.8 311.8 311.5 311.1 310.6 310.1 312.6 312.4 312.2 312.1 311.8 311.5 311.2 310.9 310.6 310.3 310.1 310.1 310.1 310.1 310.4 310.9 311.7 312.5 313.2 313.8 314.4 314.9 315.1 313.1 313.4 313.7 314.0 314.5 315.2 315.7 316.1 316.6 316.9 317.0 315.8 312.2 307.2 301.7 296.9 294.0 294.3 295.5 297.9 301.0 304.9 309.5 315.5 321.0 326.6 332.0 337.3 342.3 345.8 348.3 349.6 350.2 350.0 349.8 349.4 348.9 348.6 348.1 347.5 347.1 346.7 346.4 346.4 348.3 347.4 345.6 343.5 340.9 337.8 333.9 330.4 327.1 323.9 320.9 318.2 316.4 315.2 314.7 312.4 311.8 311.1 310.5 309.9 309.3 308.7 309.2 309.2 309.3 311.0 312.1 312.4 312.5 312.0 311.4 310.9 310.6 310.2 310.1 309.9 309.8 309.6 309.4 309.3 309.4 309.6 309.8 310.1 310.1 310.0 309.9 309.9 309.9 309.9 309.7 309.9 310.5 311.1 311.7 312.2 312.4 312.3 312.0 311.1 309.7 307.9 306.0 303.0 300.5 297.4 294.2 288.0 282.1 277.4 274.8 273.7 271.9 269.9 267.8 265.7 263.7 261.7 259.7 257.7 255.7 253.7 251.8 249.9 247.9 246.0 244.2 242.3 240.8 240.2 238.8 237.2 235.2 233.6 232.1 231.4 231.2 231.3 231.6 232.1 232.7 233.2 233.6 233.9 234.1 233.7 232.5 230.7 228.1 224.8 221.9 219.2 216.8 214.9 214.0 280.6 283.2 285.8 288.4 291.1 293.8 296.5 299.2 303.8 310.1 311.5 314.5 314.9 315.8 315.9 315.5 314.9 314.3 313.7 313.4 312.8 312.1 311.4 310.5 309.8 309.2 308.6 308.4 308.3 308.3 308.6 309.1 309.6 310.3 310.6 310.9 311.0 311.3 311.6 311.8 312.1 312.3 312.4 312.4 312.2 312.4 312.5 312.7 312.9 312.8 312.9 312.7 312.5 312.3 312.1 312.1 312.2 312.2 312.0 312.2 312.2 312.0 312.2 312.1 311.7 311.7 311.5 311.5 311.5 311.4 311.4 311.5 311.5 311.8 312.3 312.8 313.1 313.6 313.5 313.3 312.9 312.1 311.2 310.2 308.7 306.2 302.9 299.7 296.0 291.5 287.4 283.8 285.3 286.7 288.1 289.6 291.1 292.5 294.0 295.5 297.0 298.5 300.0 301.5 305.0 304.7 304.3 303.7 303.1 302.3 301.5 300.8 300.2 299.6 299.0 298.8 298.8 298.7 298.5 298.1 297.8 297.4 297.0 296.5 296.2 295.9 295.9 295.7 294.9 293.5 290.8 288.0 285.0 281.9 278.8 276.0 274.1 272.7 272.3 297.0 300.7 304.5 308.4 312.2 316.2 320.2 324.2 325.2 320.8 316.8 316.2 315.4 314.9 314.3 313.8 313.5 313.1 312.7 312.4 311.7 311.1 310.5 309.9 309.1 308.2 307.4 306.6 306.0 305.7 305.1 304.7 304.3 304.0 303.9 303.8 304.0 304.4 305.1 305.6 306.3 307.1 308.1 309.2 310.2 311.2 312.1 312.6 312.9 313.2 313.8 313.8 313.8 313.7 313.2 312.9 312.4 311.8 311.1 310.5 309.9 309.2 308.5 307.7 306.8 305.8 304.9 304.3 303.8 303.4 303.4 303.6 303.8 304.1 304.6 305.1 305.6 306.2 306.8 307.4 308.4 309.2 309.8 310.6 311.2 311.7 312.4 312.9 312.8 312.7 312.7 312.4 312.1 311.9 311.4 311.0 310.6 310.1 309.5 309.1 308.4 307.7 307.0 306.5 306.1 305.7 305.3 305.1 304.9 304.7 304.5 304.6 304.9 305.3 305.6 306.0 306.7 307.3 308.0 308.6 309.2 309.8 310.2 310.7 311.3 311.8 312.3 312.6 313.0 313.1 313.3 312.9 312.8 312.6 312.4 312.1 311.9 311.6 311.1 310.5 309.8 309.2 308.3 307.6 306.8 306.1 305.3 304.5 304.0 303.4 303.1 303.3 303.6 303.8 303.3 301.7 298.6 293.9 288.9 284.0 280.7 280.4 280.1 279.7 279.4 279.0 278.7 278.4 278.0 277.7 277.4 277.0 276.7 276.4 276.1 275.7 275.4 275.1 274.7 274.4 274.1 273.7 273.4 273.1 272.8 272.4 272.1 271.8 271.5 271.1 270.8 270.5 270.2 269.8 269.5 269.2 268.9 268.5 268.2 267.9 267.6 267.3 266.9 266.6 266.3 266.0 265.7 265.3 265.0 264.7 264.4 264.1 263.7 263.4 263.1 254.1 244.0 239.5 237.0 234.5 231.8 230.4 229.6 228.9 229.0 229.2 229.4 229.7 230.3 231.0 231.5 231.8 232.1 232.3 232.7 233.3 233.9 234.6 235.2 236.8 237.8 239.9 242.4 244.4 246.5 248.6 250.9 252.7 254.3 255.9 257.4 258.7 260.3 261.3 262.0 262.6 263.0 263.3 263.6 263.6 263.5 263.4 263.2 263.2 263.2 263.0 263.1 263.2 263.3 263.5 263.8 263.9 264.6 265.3 265.9 266.7 267.9 269.6 271.5 274.9 277.7 280.1 282.4 284.9 287.4 288.7 290.7 291.8 292.5 293.7 294.1 294.7 295.0 295.0 295.0 294.9 294.9 294.7 294.6 294.7 294.5 294.0 293.3 292.8 293.0 293.0 293.4 293.8 294.4 295.2 296.2 297.5 298.6 299.8 301.1 302.6 304.1 305.4 306.5 307.7 309.2 310.1 310.8 311.4 311.9 312.3 312.4 312.2 312.2 312.2 312.2 312.1 312.0 312.0 311.8 311.8 311.7 311.7 311.6 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.7 311.8 312.0 312.2 312.3 312.5 312.7 313.0 313.1 313.1 313.3 313.3 313.4 313.4 313.3 313.3 313.3 313.3 313.3 313.3 313.1 313.1 313.1 313.0 312.9 312.9 312.9 312.9 312.9 312.9 312.9 312.9 312.9 313.0 313.2 313.4 313.5 313.7 314.0 314.0 314.1 314.2 314.4 314.2 314.2 314.2 314.1 314.0 313.8 313.7 313.5 313.3 313.2 313.1 313.0 312.9 312.9 312.9 312.8 312.7 312.7 312.7 312.7 312.6 312.6 312.4 312.4 312.4 312.4 312.4 313.3 313.1 313.0 312.7 312.6 312.4 312.3 312.1 311.9 311.7 311.4 311.2 311.1 310.9 310.9 310.9 310.9 311.5 312.4 313.1 314.1 315.2 316.1 316.6 316.8 316.7 316.2 315.6 314.9 314.1 313.3 312.6 311.8 310.4 310.3 310.7 310.9 309.4 306.4 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1 305.1", - "input_type": "phoneme", - "offset": 39.379 + "f0_timestep": "0.005" }, { + "offset": 44.897, "text": "AP 多 少 片 段 还 能 一 一 浮 现 在 脑 海 SP", "ph_seq": "AP d uo sh ao p ian d uan h ai n eng y i y i f u x ian z ai n ao h ai SP", - "note_seq": "rest D4 D4 D#4 D#4 D4 D4 C4 C4 A#3 A#3 A#3 A#3 A#3 A#3 C4 C4 A#3 A#3 D#4 D#4 F4 F4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.345 0.345 0.1720001 0.1720001 0.173 0.173 0.3440001 0.3440001 0.1729999 0.1729999 0.345 0.345 0.1719999 0.1719999 0.6900001 0.6900001 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.214999 0.060001 0.113002 0.059998 0.097629 0.074371 0.121433 0.051567 0.108398 0.063602 0.255 0.09 0.300002 0.044998 0.127002 0.044998 0.084652 0.088348 0.223997 0.120003 0.111061 0.061939 0.269999 0.075001 0.092383 0.079617 0.69 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.215 0.06 0.113 0.06 0.0976 0.0744 0.1214 0.0516 0.1084 0.0636 0.255 0.09 0.3 0.045 0.127 0.045 0.0847 0.0883 0.224 0.12 0.1111 0.0619 0.27 0.075 0.0924 0.0796 0.69 0.068", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 D#4 D4 C4 A#3 A#3 A#3 C4 A#3 D#4 F4 G#4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.345 0.172 0.173 0.344 0.173 0.345 0.172 0.69 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 301.2 298.1 295.8 294.3 293.4 292.9 292.6 292.5 292.5 292.9 292.9 292.3 291.6 288.3 286.8 283.6 282.0 281.7 285.3 288.8 291.7 294.5 297.5 300.4 303.4 306.3 309.4 312.4 315.5 318.6 321.8 313.0 312.8 312.5 312.1 311.8 311.4 311.1 310.7 310.6 310.5 310.4 310.2 309.9 309.4 309.2 309.0 309.0 308.1 306.3 303.4 299.5 295.1 291.1 287.5 284.6 282.3 281.6 290.4 292.7 295.0 297.3 299.7 302.0 304.4 306.8 309.2 311.7 314.1 313.0 310.3 308.1 305.9 303.3 301.2 299.5 297.6 296.7 296.2 295.6 295.1 295.0 294.9 295.0 294.7 294.1 293.4 292.9 292.5 291.8 290.0 287.4 284.4 280.7 277.5 272.4 269.4 265.9 261.8 259.1 259.4 267.0 272.7 270.6 268.1 266.7 264.8 264.0 263.1 261.8 261.0 259.9 258.9 257.2 255.2 252.9 250.7 248.4 246.0 243.7 241.8 237.1 229.9 230.1 230.3 230.5 230.6 230.8 231.0 231.2 231.4 231.6 231.7 231.9 232.1 232.3 232.5 232.7 233.2 235.0 235.2 235.0 234.9 234.2 233.0 232.5 232.0 231.6 231.4 231.3 231.2 231.2 231.5 231.8 232.3 232.6 232.9 233.3 233.5 233.6 233.6 233.5 233.5 233.3 233.3 233.4 233.5 233.4 233.4 233.4 233.3 233.5 233.8 233.9 234.0 234.4 234.6 234.6 234.8 235.2 235.3 235.4 235.2 234.8 234.3 233.8 233.2 232.7 232.2 231.2 230.8 230.5 230.5 231.0 231.1 230.6 230.4 230.8 231.4 232.2 233.0 233.6 234.5 235.1 235.8 238.1 240.5 240.4 240.0 238.5 237.5 237.2 237.1 236.9 236.5 236.1 235.7 235.0 234.5 234.2 233.5 232.9 232.3 232.1 231.7 231.5 231.6 231.7 232.0 232.2 232.4 232.5 232.6 232.8 233.5 233.9 234.3 234.6 234.7 234.4 234.4 234.4 234.5 234.6 234.4 234.2 233.8 233.4 233.1 233.1 233.0 232.9 232.5 232.3 232.0 231.9 231.7 231.8 231.7 231.2 230.7 229.8 228.9 228.1 227.3 226.2 225.3 225.0 225.1 224.9 225.3 226.1 227.4 228.5 229.3 230.1 231.1 231.8 232.4 233.0 233.3 233.5 233.5 233.6 233.3 232.9 232.7 232.8 232.7 232.6 235.6 235.5 235.4 235.3 235.1 234.8 234.6 234.4 234.2 233.8 233.5 233.3 233.1 232.9 232.8 232.7 232.7 233.2 235.1 237.9 241.2 245.0 249.2 253.7 257.3 259.9 261.5 260.7 260.7 260.7 260.8 261.0 261.2 261.4 261.5 261.8 262.1 262.2 262.3 262.5 262.6 262.7 262.7 262.5 261.4 259.5 256.3 253.2 249.8 246.3 242.8 239.8 237.8 236.5 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.1 236.0 235.9 235.7 235.5 235.2 234.7 234.2 233.8 233.4 233.0 232.5 232.1 231.8 231.7 231.5 232.7 232.6 232.5 232.4 232.4 232.3 232.5 232.7 233.2 233.8 233.9 233.9 233.4 232.9 232.5 231.8 230.9 229.7 227.7 225.1 222.9 220.9 219.5 224.2 228.6 233.0 237.6 242.2 247.0 251.8 256.7 261.7 266.8 272.1 277.4 282.8 288.3 294.0 299.7 305.6 311.5 315.5 311.3 311.3 311.4 311.2 311.2 311.5 311.7 311.7 311.6 311.0 309.7 308.1 306.8 304.9 302.8 301.2 299.5 297.2 293.9 291.7 288.4 283.7 287.1 292.8 298.6 304.5 310.6 316.7 323.0 329.4 335.9 342.5 349.3 356.2 363.3 362.3 360.3 359.1 358.0 356.9 355.7 354.7 353.7 352.6 351.5 350.8 349.8 348.5 347.6 346.8 346.3 346.1 346.1 346.5 347.1 347.8 348.3 348.7 348.9 349.1 349.2 349.2 349.4 349.2 349.2 349.4 349.4 349.6 349.5 349.4 349.6 349.7 349.9 350.0 350.0 350.0 350.1 350.3 350.4 350.2 350.1 349.6 349.1 348.6 347.8 346.9 346.2 345.6 345.2 344.9 344.7 344.2 344.5 347.1 351.9 357.0 361.3 366.8 372.1 380.0 387.4 398.5 407.8 411.3 412.9 414.1 414.8 415.7 416.6 417.2 417.5 417.5 417.5 416.9 416.0 414.8 413.5 411.3 409.0 406.8 404.4 401.7 397.7 393.5 388.4 383.1 383.7 383.9 384.4 385.2 386.2 387.3 388.2 389.3 390.3 391.2 392.0 392.5 392.7 392.8 392.7 392.7 392.7 392.7 392.7 392.7 392.6 392.4 392.4 392.4 392.2 392.2 392.2 392.1 392.0 392.0 392.0 392.0 391.8 391.8 391.8 391.8 391.8 391.8 391.8 391.8 391.8 391.8 392.1 392.4 392.8 393.1 393.5 393.9 394.3 394.7 395.1 395.5 395.8 395.9 396.0 396.0 395.9 395.9 395.9 395.6 395.6 395.5 395.3 395.2 395.1 394.9 394.7 394.5 394.5 394.3 394.3 394.3 394.3 394.3 394.3 394.4 394.6 394.9 395.3 395.6 396.1 396.5 396.7 397.0 397.2 397.3 397.4 397.2 397.0 396.8 396.4 396.1 395.8 395.4 395.1 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 394.8 394.7 394.7 394.7 394.5 394.3 394.1 393.9 393.8 393.7 393.5 393.3 393.1 392.9 392.9 392.9 392.9 392.9 393.0 393.4 394.0 394.6 395.3 395.9 396.3 396.6 396.7 396.4 395.9 395.2 394.4 393.4 392.5 391.8 391.1 390.6 390.4 390.3 390.2 390.2 390.2 390.0 390.0 389.8 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7", - "input_type": "phoneme", - "offset": 44.897 + "f0_timestep": "0.005" }, { + "offset": 48.69, "text": "AP 想 要 再 重 来 SP", "ph_seq": "AP x iang y ao z ai ch ong l ai SP", - "note_seq": "rest G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.126002 0.149998 0.126998 0.045002 0.118442 0.053558 0.093002 0.079998 0.112002 0.059998 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.126 0.15 0.127 0.045 0.1184 0.0536 0.093 0.08 0.112 0.06 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest G4 G#4 G4 F4 D#4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.7 393.7 393.6 393.6 393.6 393.6 393.4 393.4 393.4 393.2 393.1 393.1 393.1 393.1 393.1 389.4 389.1 389.2 389.7 390.5 391.1 392.4 393.4 394.5 395.4 396.2 397.4 398.8 400.0 401.4 402.8 403.9 405.0 405.8 406.8 407.9 409.1 411.0 412.2 413.4 414.6 416.1 417.0 417.5 417.7 417.4 416.9 416.3 415.5 414.4 413.4 412.2 409.2 407.0 404.3 400.7 396.5 392.9 389.7 386.7 384.2 386.0 389.7 391.8 393.9 398.2 397.8 397.4 396.6 395.5 394.6 393.7 393.1 392.4 392.4 392.4 393.0 393.6 394.3 394.9 395.3 395.2 394.8 394.3 393.6 393.0 392.5 392.3 391.8 390.6 388.9 386.4 383.5 380.3 376.9 373.4 368.9 365.6 362.4 359.4 356.5 354.2 352.7 351.7 351.3 351.0 350.5 350.0 349.5 349.1 349.0 349.0 348.8 348.8 348.7 348.5 348.2 347.9 347.8 347.6 347.4 347.2 347.2 347.0 346.5 345.5 344.0 341.8 339.5 337.0 334.4 331.5 328.4 325.7 323.2 320.9 318.5 316.8 315.6 314.7 314.4 314.4 314.4 314.4 314.4 314.6 314.6 314.6 314.7 314.7 314.7 314.8 314.9 314.7 314.6 314.4 314.1 313.7 313.4 313.0 312.5 312.2 312.1 311.9 311.8 311.8 311.7 311.7 311.7 311.7 311.7 311.5 311.5 311.5 311.5 311.5 311.5 311.3 311.3 311.3 311.2 311.1 311.1 310.9 310.9 310.9 310.9 311.0 311.1 311.3 311.4 311.7 312.0 312.3 312.4 312.6 312.7 312.9 312.8 312.7 312.7 312.7 312.6 312.6 312.4 312.2 312.1 312.0 312.0 311.8 311.8 311.8 311.8 316.7 316.7 316.7 316.7 316.7 316.7 316.7 316.7 316.7 316.7 316.7 316.7", - "input_type": "phoneme", - "offset": 48.69 + "f0_timestep": "0.005" }, { + "offset": 50.069, "text": "AP 回 到 年 少 的 姿 态 AP 你 的 白 发 也 不 再 SP", "ph_seq": "AP h ui d ao n ian sh ao d e z i0 t ai AP n i d e b ai f a y E b u z ai SP", - "note_seq": "rest G4 G4 G#4 G#4 G4 G4 F4 F4 A#4 A#4 D#4 D#4 D#4 D#4 rest A#3 A#3 C4 C4 D#4 D#4 G4 G4 F4 F4 G4 G4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.3449999 0.3449999 0.1720001 0.1719999 0.1719999 0.1730001 0.1730001 0.345 0.345 0.1719999 0.1719999 0.517 0.517 0.1730001 0.1730001 0.5169997 0.5169997 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.200999 0.075001 0.11542 0.05658 0.111633 0.061367 0.098144 0.073856 0.127002 0.044998 0.096061 0.076939 0.21028 0.13472 0.345 0.127002 0.044998 0.126998 0.045002 0.094528 0.078472 0.225001 0.119999 0.112742 0.059258 0.456999 0.060001 0.085346 0.087654 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.201 0.075 0.1154 0.0566 0.1116 0.0614 0.0981 0.0739 0.127 0.045 0.0961 0.0769 0.2103 0.1347 0.345 0.127 0.045 0.127 0.045 0.0945 0.0785 0.225 0.12 0.1127 0.0593 0.457 0.06 0.0853 0.0877 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 1", + "note_seq": "rest G4 G#4 G4 F4 A#4 D#4 D#4 rest A#3 C4 D#4 G4 F4 G4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.173 0.345 0.345 0.172 0.172 0.173 0.345 0.172 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 404.6 401.6 397.9 395.5 393.1 390.1 387.9 386.8 386.9 387.3 388.4 389.5 390.9 392.0 393.1 394.3 394.8 395.1 395.1 394.7 393.3 391.8 389.6 386.8 383.4 379.5 374.4 362.6 372.5 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 415.8 417.1 416.2 415.4 414.7 414.1 413.6 413.2 412.8 412.3 411.8 411.2 410.1 409.1 407.7 406.5 405.1 404.1 402.6 399.7 397.9 398.5 398.6 399.5 400.4 398.7 396.0 394.4 393.7 393.4 393.2 392.7 392.3 391.9 391.3 390.3 389.0 387.3 384.7 381.5 378.1 373.9 368.5 363.9 363.9 363.9 363.9 363.9 363.9 363.9 363.9 364.0 364.0 364.0 364.0 364.0 364.0 364.0 364.0 364.1 359.0 355.5 353.5 351.8 350.4 350.1 349.6 349.3 349.0 348.6 348.3 348.2 348.2 348.2 347.9 347.4 345.6 343.8 341.5 339.3 337.7 333.2 335.9 337.4 330.6 332.5 345.7 350.5 355.5 375.5 396.6 418.9 442.4 467.2 464.3 464.3 463.9 465.0 466.2 467.7 468.6 469.0 469.7 470.6 469.8 462.4 455.2 448.1 441.0 434.1 427.3 420.6 414.0 407.5 401.1 394.8 388.6 382.5 376.5 370.6 364.8 359.1 353.4 347.9 342.4 337.1 331.8 326.6 321.4 317.5 312.6 310.5 310.6 310.4 310.4 310.6 310.5 310.3 310.1 309.3 309.2 309.0 308.8 308.6 308.5 308.3 308.5 308.5 308.4 308.6 309.1 309.5 310.0 310.5 310.9 311.4 311.7 312.0 312.0 312.2 312.4 312.1 311.7 311.1 310.2 309.3 308.4 307.4 306.0 302.7 298.3 299.2 300.1 301.0 301.9 302.9 303.8 304.7 305.6 306.6 307.5 308.4 309.4 310.3 311.3 312.2 313.1 314.1 315.1 311.8 311.8 311.8 311.8 311.9 312.1 312.2 312.2 312.4 312.5 312.6 312.7 312.7 312.7 312.8 312.8 312.7 312.7 312.4 312.2 311.9 311.6 311.3 310.9 310.5 310.2 309.9 309.7 309.4 309.3 309.3 309.3 309.3 309.4 309.5 309.8 310.0 310.3 310.6 310.8 311.0 311.3 311.5 311.6 311.7 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 311.8 312.0 312.0 312.0 312.1 312.2 312.2 312.2 312.4 312.3 312.0 311.7 311.3 310.8 310.2 309.3 308.7 308.0 307.4 306.8 306.3 305.9 305.6 305.5 306.4 307.4 308.5 309.6 310.7 311.8 312.9 314.0 315.1 316.2 317.3 318.4 319.5 319.0 317.6 314.1 309.4 303.9 293.6 278.6 265.5 255.3 249.0 245.3 243.0 239.6 235.4 233.4 231.0 229.7 229.2 229.3 229.5 230.3 231.3 232.0 232.6 233.2 233.8 233.6 233.1 232.6 231.3 229.2 227.0 224.2 222.0 221.1 222.3 226.1 230.2 234.5 238.8 243.2 247.7 252.2 256.8 261.6 266.4 271.3 262.7 262.1 261.4 260.9 261.2 261.6 261.8 262.0 262.2 262.0 261.8 261.2 260.5 259.5 258.1 256.1 254.6 252.5 249.1 245.7 241.4 241.2 241.0 240.1 241.2 244.7 252.1 259.8 267.7 275.8 284.1 292.7 301.6 310.8 320.2 317.8 315.4 314.7 313.9 313.4 313.2 312.9 313.0 313.1 313.3 313.2 312.6 312.1 311.1 310.4 309.8 309.8 309.7 309.7 309.7 309.5 309.5 309.5 309.4 309.3 309.3 309.3 309.3 309.3 309.3 309.5 309.6 309.8 309.9 309.9 310.1 310.2 310.2 310.3 310.3 310.1 310.0 309.8 309.7 309.3 309.2 308.9 308.7 308.5 308.4 308.4 326.3 330.6 335.0 339.5 344.0 348.6 353.3 358.0 362.8 367.6 372.5 377.5 382.5 387.6 392.8 398.0 403.3 405.0 402.8 401.5 400.0 398.1 396.8 395.6 394.5 393.6 393.0 392.4 391.5 390.4 389.3 388.2 386.9 385.6 384.3 382.9 381.1 379.5 378.2 376.5 374.3 371.3 367.5 362.4 360.8 359.9 359.4 358.8 358.4 358.0 357.3 356.4 355.4 354.5 353.5 352.6 352.3 352.1 352.3 352.3 352.4 352.3 352.3 352.4 352.4 352.2 352.1 352.0 351.8 351.3 350.9 350.4 349.4 348.8 348.2 347.5 346.9 346.5 346.3 346.1 346.0 346.0 346.1 346.3 346.5 346.9 347.3 347.8 348.6 349.2 349.9 350.4 350.9 351.3 351.5 351.7 351.9 351.9 351.8 351.6 351.3 351.0 350.8 350.4 350.0 349.5 348.6 348.0 347.3 346.7 346.2 345.7 345.4 345.3 345.4 345.7 346.1 346.6 347.1 347.4 347.8 347.6 347.8 348.2 348.5 348.9 348.8 348.0 346.4 344.4 343.2 342.6 342.7 344.1 344.8 338.7 337.3 339.1 347.2 347.4 350.6 354.9 359.4 364.1 368.8 373.5 378.3 383.2 388.2 393.2 398.2 403.4 408.6 413.8 419.2 400.0 395.0 394.6 394.4 394.4 394.5 394.3 393.5 392.5 391.5 390.0 386.2 382.8 376.7 369.1 362.8 357.6 351.0 338.8 333.5 332.1 331.7 331.3 331.0 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.6 312.6 312.6 312.7 312.7 312.7 312.7 312.7 312.7 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.3 312.2 312.0 311.7 311.4 311.1 310.7 310.5 310.4 310.4 310.4 310.4 310.4 310.5 310.7 310.9 311.0 311.1 311.1 311.3 311.5 311.6 311.7 311.7 311.7 311.8 311.7 311.7 311.5 311.4 311.2 311.1 310.9 310.7 310.6 310.4 310.4 310.4 310.4 310.4 310.5 310.6 310.6 310.8 310.9 311.1 311.2 311.4 311.6 311.7 311.7 311.7 311.8 311.7 311.7 311.5 311.4 311.1 310.9 310.7 310.4 310.2 310.1 309.9 309.9 309.9 309.9 309.9 309.9 309.9 310.1 310.1 310.1 310.2 310.2 310.2 310.3 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.3 310.2 310.1 309.9 309.7 309.5 309.3 309.2 309.0 309.0 313.3 313.3 313.3 313.3 313.3 313.3", - "input_type": "phoneme", - "offset": 50.069 + "f0_timestep": "0.005" }, { + "offset": 54.207, "text": "AP 生 命 的 短 暂 SP", "ph_seq": "AP sh eng m ing d e d uan z an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.140998 0.135002 0.122138 0.049862 0.124721 0.048279 0.123954 0.048046 0.066833 0.105167 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.141 0.135 0.1221 0.0499 0.1247 0.0483 0.124 0.048 0.0668 0.1052 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 337.5 335.4 330.8 325.1 320.0 314.1 312.7 311.3 309.7 309.3 309.2 309.5 310.0 310.4 310.8 311.1 311.6 311.9 312.2 312.6 313.0 313.4 314.2 315.1 316.0 317.1 318.6 320.7 322.4 324.6 327.6 331.2 333.7 336.4 340.5 344.2 347.3 348.6 349.7 349.8 350.2 350.6 350.8 350.8 351.2 351.4 351.7 352.2 352.7 353.1 353.3 353.1 352.2 351.5 350.5 349.8 348.3 346.8 344.5 340.7 338.4 336.6 335.2 334.0 333.5 332.5 333.9 343.1 357.1 371.5 386.6 387.7 388.8 390.2 391.8 392.7 393.1 393.0 392.6 392.2 391.7 391.1 390.6 390.3 390.2 390.0 390.0 390.0 389.7 388.8 387.0 385.7 383.4 379.1 379.2 383.5 385.3 389.5 394.6 391.4 390.1 396.2 402.4 408.7 413.6 412.4 413.6 413.6 415.0 415.6 415.9 415.7 415.2 414.6 414.2 413.4 412.1 410.7 409.1 405.9 402.8 398.5 392.9 389.3 385.6 382.7 380.0 378.7 383.3 388.9 391.3 393.6 396.0 398.5 400.9 403.3 405.8 408.3 410.8 406.8 405.9 405.3 405.4 405.6 405.4 405.0 404.7 404.3 403.5 402.2 400.7 399.3 396.7 395.4 393.5 391.5 389.3 387.8 386.9 386.3 386.2 386.2 386.3 386.8 387.5 388.1 389.1 390.3 391.1 392.2 393.2 394.1 395.2 395.9 396.8 397.3 397.6 397.8 397.7 397.2 396.6 395.6 394.5 393.1 391.5 390.2 389.1 388.7 388.9 389.4 390.0 390.6 391.5 392.3 392.9 393.5 393.9 394.0 394.0 393.8 392.3 390.4 387.4 383.7 377.7 371.1 363.6 351.8 343.6 341.8 341.8 341.8 341.8 341.8 341.8 341.8 341.8 341.8 341.8 341.8 341.8", - "input_type": "phoneme", - "offset": 54.207 + "f0_timestep": "0.005" }, { + "offset": 55.586, "text": "AP 生 活 的 阻 碍 SP", "ph_seq": "AP sh eng h uo d e z u ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.155997 0.120003 0.112002 0.059998 0.128002 0.044998 0.092616 0.079384 0.127998 0.390002 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.156 0.12 0.112 0.06 0.128 0.045 0.0926 0.0794 0.128 0.39 0.069", + "ph_num": "2 2 2 2 1 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.4 330.5 330.8 328.7 322.5 318.0 315.0 312.7 310.4 309.2 308.6 308.4 308.3 308.4 309.0 310.0 310.7 311.2 311.7 312.0 311.8 311.4 310.3 308.4 306.0 303.0 300.5 298.4 299.0 304.4 309.9 315.5 321.1 326.9 332.8 338.8 344.8 351.0 348.5 348.2 348.8 349.5 350.3 350.9 351.1 350.9 350.8 350.6 350.4 350.6 350.6 350.6 350.6 350.2 350.2 350.1 349.7 349.5 348.9 347.8 346.2 344.3 342.5 341.1 339.6 338.4 337.9 340.8 328.1 342.0 352.3 361.1 369.6 373.6 375.6 377.9 380.2 382.7 385.0 386.4 387.0 386.6 384.4 382.4 380.2 377.1 371.5 355.0 354.1 357.8 361.4 365.2 368.9 372.7 376.5 380.4 384.3 388.3 392.2 396.3 400.3 404.5 408.6 412.8 417.0 421.3 422.3 420.1 418.6 418.3 418.1 418.0 417.8 417.3 416.7 415.9 415.3 414.5 414.0 413.5 413.2 413.0 412.8 412.8 412.8 412.6 412.4 412.0 411.5 410.5 408.8 407.5 405.6 403.6 401.6 399.9 398.8 398.2 398.0 397.9 397.9 397.6 397.1 396.7 396.6 396.8 396.8 397.0 396.6 396.3 395.7 395.2 394.3 393.6 392.9 392.1 391.2 390.0 388.5 387.1 385.3 383.9 382.8 381.5 380.2 379.4 378.8 378.6 378.9 379.5 380.5 382.0 383.5 385.1 387.0 389.1 390.6 391.8 393.1 393.8 394.3 394.1 393.6 392.8 391.9 390.5 389.1 387.6 386.0 384.3 382.3 381.1 380.1 379.8 380.1 380.8 382.1 384.2 385.4 386.4 387.2 386.8 386.0 385.1 383.8 379.9 370.4 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7", - "input_type": "phoneme", - "offset": 55.586 + "f0_timestep": "0.005" }, { + "offset": 56.966, "text": "AP 已 经 逝 去 的 五 分 之 三 SP 还 有 多 少 安 排 SP", "ph_seq": "AP y i j ing sh ir q v d e w u f en zh ir s an SP h ai y ou d uo sh ao an p ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D4 D4 A#3 A#3 rest D#4 D#4 D#4 D#4 F4 F4 G4 G4 G#4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.3440001 0.3440001 0.3449998 0.3449998 0.1730001 0.1719999 0.1719999 0.1720002 0.1720002 0.1729999 0.1729999 0.1720002 0.1720002 0.1729999 0.3439999 0.3439999 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.230002 0.044998 0.113002 0.059998 0.072936 0.099064 0.10701 0.06599 0.10773 0.06427 0.312594 0.032406 0.088149 0.083851 0.416079 0.101921 0.184363 0.159637 0.345 0.097999 0.075001 0.126483 0.045517 0.126998 0.045002 0.089736 0.083263 0.127002 0.149559 0.068439 0.344 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.23 0.045 0.113 0.06 0.0729 0.0991 0.107 0.066 0.1077 0.0643 0.3126 0.0324 0.0881 0.0839 0.4161 0.1019 0.1844 0.1596 0.345 0.098 0.075 0.1265 0.0455 0.127 0.045 0.0897 0.0833 0.127 0.1496 0.0684 0.344 0.068", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 1 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 D4 A#3 rest D#4 D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.172 0.518 0.344 0.345 0.173 0.172 0.172 0.173 0.172 0.173 0.344 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 305.2 305.7 306.6 307.8 309.2 310.8 311.8 312.5 313.0 313.1 313.1 313.2 313.4 313.7 314.0 314.2 314.5 314.8 315.0 315.2 315.3 315.5 315.5 315.2 313.9 311.9 309.1 305.3 301.9 298.5 295.5 292.7 290.9 290.0 320.1 322.6 325.0 327.5 330.0 332.6 335.1 337.7 340.3 342.9 346.1 347.8 348.3 348.5 348.8 349.2 349.8 350.3 350.5 350.6 350.4 349.4 348.4 346.0 343.2 340.7 337.5 333.7 329.1 328.1 332.9 337.7 342.6 347.6 352.7 357.8 363.0 368.3 373.6 379.1 384.6 390.2 395.9 399.4 399.1 398.6 398.0 397.2 396.3 395.4 394.4 393.6 392.8 392.0 391.5 391.3 391.1 390.2 387.9 383.9 379.7 375.2 370.7 366.6 362.8 360.7 360.0 360.4 362.2 365.3 369.3 374.1 379.5 386.8 392.8 398.6 403.9 408.9 412.7 414.8 417.0 416.2 416.0 414.5 414.3 414.3 414.3 414.5 414.8 415.0 414.9 414.8 414.7 414.5 414.1 413.6 413.0 412.0 409.8 407.5 404.6 402.8 400.5 397.6 390.9 387.9 393.8 398.5 397.0 400.1 406.4 406.1 405.7 405.4 404.7 402.8 403.0 402.8 403.0 403.2 403.2 403.1 402.7 402.1 401.3 400.0 398.2 396.3 394.1 392.6 391.3 389.8 389.1 388.7 388.8 389.2 390.0 390.6 391.4 392.0 392.4 392.9 393.4 393.7 394.1 394.5 394.8 395.0 394.7 394.7 395.0 395.0 394.8 394.6 394.2 393.8 393.4 392.8 392.1 391.3 390.7 389.8 388.9 387.9 386.0 384.4 382.5 380.8 378.5 376.4 374.2 371.3 368.1 364.7 361.1 357.1 352.8 350.2 348.2 347.0 347.2 347.7 348.9 350.2 351.4 352.5 353.0 352.8 351.8 349.5 345.5 340.2 336.6 330.8 325.2 327.7 327.9 328.0 328.1 328.2 328.4 328.5 328.6 328.7 328.9 329.0 329.1 329.3 329.4 329.5 329.6 329.8 329.9 330.0 330.1 330.3 330.4 330.5 326.5 323.8 323.3 322.7 322.1 321.4 320.3 319.5 318.4 316.9 315.8 314.6 313.5 312.4 311.3 310.6 309.7 309.3 310.0 309.9 309.8 309.7 310.1 310.3 310.5 310.9 311.2 311.3 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.6 311.9 312.2 312.4 312.4 312.5 312.4 312.4 312.3 312.0 311.8 311.8 311.6 311.5 311.5 310.8 311.1 311.5 311.9 312.4 312.7 312.9 312.9 312.8 313.2 313.5 313.7 313.5 313.1 312.3 311.5 310.6 309.4 307.9 306.1 304.2 300.9 298.3 294.8 290.8 287.6 284.9 284.8 287.1 287.6 288.0 288.5 288.9 289.4 289.8 290.2 290.7 291.1 291.6 292.0 292.5 292.9 293.4 293.8 294.3 294.7 295.2 295.7 296.1 296.6 303.5 303.3 301.7 301.2 300.2 299.5 298.7 297.3 295.9 294.9 294.1 293.2 292.6 292.2 291.8 291.6 291.3 291.5 291.6 291.6 292.2 292.8 293.4 293.9 294.5 294.9 295.1 295.3 295.5 295.2 294.2 292.9 291.6 289.7 286.8 282.8 279.0 273.7 273.3 268.0 264.9 264.4 263.8 263.3 262.7 262.2 261.6 261.1 260.6 260.0 259.5 258.9 258.4 257.8 257.3 256.8 256.2 255.7 255.2 254.6 254.1 253.6 253.0 252.5 252.0 251.5 250.9 246.7 244.6 243.6 242.6 241.8 241.4 240.7 239.8 239.1 238.3 237.5 236.7 235.9 235.5 234.8 234.0 233.5 232.6 232.3 231.8 231.7 231.6 231.5 231.5 231.7 232.0 232.5 232.9 233.2 233.8 234.2 234.3 235.0 235.7 236.0 236.5 236.8 237.1 237.4 237.4 237.3 236.7 236.2 235.9 235.5 234.2 233.6 233.1 232.0 231.7 231.5 231.6 232.2 232.8 233.1 233.4 233.6 233.8 233.8 233.5 232.8 232.0 230.9 230.3 230.1 230.1 230.1 229.5 228.9 227.2 224.1 224.1 226.9 229.7 232.6 235.5 238.4 241.4 244.4 247.4 250.5 253.6 256.8 260.0 263.2 266.5 269.8 273.2 276.6 280.0 283.5 287.0 290.6 294.2 297.9 301.6 305.4 309.2 313.0 316.9 320.9 324.8 328.9 326.4 319.3 315.6 312.0 309.2 307.2 306.1 305.8 305.6 306.0 306.7 307.4 308.7 309.5 310.2 311.4 312.3 312.7 313.1 313.2 312.7 312.5 312.0 311.7 311.8 311.8 311.7 311.7 311.5 311.3 311.1 311.0 310.8 310.6 310.6 310.5 310.2 310.2 310.1 310.2 310.4 310.6 310.9 311.4 311.6 311.8 311.6 311.0 310.0 308.4 306.5 304.2 301.7 298.0 294.9 292.3 288.0 285.3 288.6 288.3 284.4 281.2 281.9 288.4 299.9 311.6 352.9 352.7 352.3 351.7 351.1 350.6 350.2 349.7 349.6 349.6 349.8 350.2 350.7 351.3 351.6 351.8 351.7 351.0 349.8 348.5 347.1 345.5 344.4 343.4 343.0 355.5 360.3 365.1 370.0 374.9 379.9 385.0 390.2 395.4 400.7 406.0 411.5 403.2 400.9 399.1 397.0 395.5 393.8 392.4 391.7 391.1 390.6 390.3 390.1 389.9 389.7 389.5 389.5 389.6 389.7 390.1 390.6 391.5 392.4 393.2 394.3 395.4 396.2 396.9 397.9 399.5 400.8 402.6 405.2 406.8 408.6 409.8 410.8 411.8 412.4 413.4 414.1 414.8 415.5 415.8 416.1 416.2 415.9 415.4 414.9 414.3 413.4 412.2 410.6 408.7 406.3 403.9 401.4 398.3 394.4 389.9 386.1 383.3 380.9 382.1 385.3 388.6 391.8 395.1 398.4 401.8 405.2 412.3 414.3 412.2 411.7 411.7 411.8 411.9 411.7 411.7 410.9 410.1 408.2 406.2 403.2 400.7 398.2 395.3 393.4 391.9 390.8 390.0 389.4 389.1 389.2 389.6 390.4 391.3 392.4 394.0 395.4 396.6 398.6 400.2 401.5 402.8 403.9 404.8 405.1 405.0 404.4 403.7 402.6 401.4 400.0 398.7 397.1 395.7 394.7 394.0 393.6 393.1 392.9 393.0 393.2 393.6 394.1 394.3 394.7 395.0 395.0 394.6 393.4 392.0 390.0 386.5 382.2 377.4 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5", - "input_type": "phoneme", - "offset": 56.966 + "f0_timestep": "0.005" }, { + "offset": 61.103, "text": "AP 有 多 少 期 待 SP", "ph_seq": "AP y ou d uo sh ao q i d ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.230994 0.045006 0.119617 0.053383 0.08388 0.08812 0.084026 0.088974 0.127002 0.044998 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.1196 0.0534 0.0839 0.0881 0.084 0.089 0.127 0.045 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.173 0.172 0.173 0.172 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 284.0 284.5 285.6 287.3 289.8 292.2 294.9 297.7 300.6 303.7 306.2 308.3 309.8 310.9 311.4 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.3 310.6 309.6 308.3 306.7 305.2 303.1 300.9 297.9 288.1 295.7 302.1 301.1 301.4 307.3 314.6 322.1 329.8 337.7 345.7 354.0 357.5 348.0 347.7 348.2 347.8 349.2 349.4 349.3 349.2 348.6 347.5 346.1 343.3 339.0 334.4 331.0 329.7 329.4 332.7 336.0 339.4 342.8 346.2 349.7 353.2 356.7 360.3 363.9 367.5 371.2 374.9 378.7 382.5 386.3 388.2 388.2 388.6 389.4 390.2 391.2 392.2 393.4 394.4 395.3 396.1 396.9 397.4 397.6 397.4 396.4 394.7 392.1 389.1 385.7 382.2 378.2 374.5 371.4 368.7 366.8 365.1 364.8 392.4 394.9 397.4 399.9 402.4 404.9 407.5 410.1 410.3 413.2 413.9 414.6 415.4 415.9 416.2 416.3 416.1 415.9 415.5 414.9 414.1 412.4 411.0 409.4 407.0 404.6 402.7 400.2 396.1 398.7 394.0 382.9 381.3 388.7 398.6 402.7 406.3 415.9 415.5 415.0 414.6 403.6 401.9 402.8 404.1 405.4 406.5 407.5 407.7 407.3 406.7 406.0 405.1 403.7 401.3 399.8 397.9 395.6 394.1 392.5 391.5 390.5 389.9 389.5 389.1 388.8 388.9 389.3 390.2 391.3 392.2 393.4 394.5 396.1 397.4 398.8 400.3 401.3 401.7 402.0 402.1 401.9 401.4 400.3 399.3 398.2 397.2 396.2 395.4 394.5 393.7 392.7 391.7 391.0 390.7 390.6 391.0 391.7 392.3 392.8 392.9 392.7 391.8 390.6 389.7 388.4 387.1 384.7 385.0 377.7 372.1 370.6 370.6 370.6 370.6 370.6 370.6 370.6 370.6 370.6 370.6 370.6 370.6 370.6", - "input_type": "phoneme", - "offset": 61.103 + "f0_timestep": "0.005" }, { + "offset": 62.483, "text": "AP 过 去 的 梦 何 时 捡 起 来 SP", "ph_seq": "AP g uo q v d e m eng h e sh ir j ian q i l ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 A#4 A#4 G4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.345 0.345 0.3439999 0.3439999 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.216002 0.059998 0.092616 0.079384 0.127002 0.044998 0.113002 0.059998 0.091998 0.080002 0.224997 0.120003 0.090358 0.082642 0.355257 0.161743 0.255004 0.089996 0.344 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.216 0.06 0.0926 0.0794 0.127 0.045 0.113 0.06 0.092 0.08 0.225 0.12 0.0904 0.0826 0.3553 0.1617 0.255 0.09 0.344 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 A#4 G4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.173 0.517 0.345 0.344 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 324.3 309.0 309.0 309.0 309.1 309.2 309.4 309.6 309.9 310.2 310.4 310.7 310.9 311.0 311.2 311.3 311.3 311.5 311.0 310.2 308.8 306.9 305.0 302.9 300.9 298.6 297.0 295.9 295.3 295.1 295.7 297.4 300.5 304.4 308.6 313.4 318.7 324.8 329.8 334.5 338.5 342.0 344.0 345.1 345.8 346.0 346.4 346.9 347.3 347.7 348.2 348.5 348.9 349.4 349.7 350.1 350.2 350.2 349.7 349.0 348.2 346.6 344.8 343.0 340.7 336.0 328.3 324.3 323.5 323.0 328.0 337.7 347.6 357.9 368.5 379.4 390.6 391.3 392.4 392.4 392.4 392.4 392.6 392.8 392.9 393.2 393.6 393.8 394.0 394.3 394.6 394.9 395.2 395.4 395.9 396.1 396.2 396.4 396.6 396.8 396.8 396.8 397.0 397.4 398.8 400.9 403.8 407.3 410.4 413.4 415.8 417.6 418.3 418.3 418.0 417.7 417.2 416.6 416.0 415.5 414.9 414.7 414.6 413.8 411.0 406.5 400.5 394.0 387.1 382.6 380.3 386.7 387.4 388.1 388.8 389.5 390.2 391.0 391.7 392.4 393.1 393.8 394.6 395.3 396.0 395.9 396.1 396.7 397.3 397.8 398.0 398.3 398.1 397.7 397.1 396.3 395.4 394.4 393.2 391.8 390.8 390.0 389.3 389.0 388.8 388.6 388.5 388.4 388.4 388.6 388.6 388.6 388.8 389.1 389.6 390.3 391.0 391.8 392.7 393.4 394.0 394.2 394.0 393.9 393.4 392.4 391.1 389.0 385.5 381.8 377.8 371.4 366.0 360.4 355.3 352.6 352.6 352.5 352.4 352.3 352.3 352.2 352.1 352.0 351.9 351.9 351.8 351.7 351.4 351.3 351.2 351.0 350.9 350.7 350.5 350.2 350.0 349.8 349.6 349.4 349.3 349.2 349.2 349.2 349.4 349.7 350.0 350.0 349.6 347.8 344.4 340.3 335.6 330.3 324.4 319.6 315.8 313.0 311.4 311.5 311.2 310.8 310.5 310.1 309.8 309.4 309.1 308.7 308.5 308.8 308.4 309.0 309.7 310.4 311.5 313.0 313.8 314.1 314.3 314.3 314.1 313.6 312.6 311.9 311.1 310.2 309.4 309.0 308.4 308.3 308.3 308.3 308.4 308.6 308.8 309.0 309.4 309.8 310.2 310.5 311.1 311.6 312.0 312.5 312.7 312.7 312.7 312.7 312.7 312.9 313.0 313.0 312.8 312.6 312.3 311.8 311.4 311.1 310.6 310.2 309.9 309.4 309.3 309.5 309.9 310.4 311.2 312.0 312.6 313.3 314.1 314.7 315.1 315.3 315.0 314.2 312.7 310.2 307.4 306.8 306.4 309.5 314.5 318.9 323.4 327.9 332.5 337.2 341.9 346.8 351.6 356.6 361.6 366.7 371.8 377.1 382.4 387.7 393.2 398.7 404.3 410.0 415.8 421.6 427.5 433.6 439.7 445.8 452.1 458.5 464.9 470.2 471.2 472.0 472.9 473.6 473.8 473.3 472.8 471.9 470.9 470.2 468.3 466.7 465.4 464.4 463.4 462.9 462.5 462.4 462.6 462.9 463.4 463.9 464.8 465.4 465.6 466.3 466.7 466.9 467.4 467.5 467.9 468.4 468.8 469.0 469.1 469.1 469.3 469.8 470.0 470.3 470.4 470.2 469.9 469.1 468.1 466.9 465.6 463.3 461.1 458.6 454.7 450.8 447.4 443.8 440.4 436.4 432.6 427.5 420.3 416.5 412.7 409.3 404.4 401.1 399.0 398.1 398.2 399.2 400.9 403.7 406.6 407.2 407.2 406.7 406.3 406.1 405.9 405.3 404.4 403.7 402.8 401.8 400.7 399.5 398.2 396.3 395.1 393.5 392.2 390.9 389.7 388.6 387.7 387.5 387.3 387.8 388.4 389.3 390.8 392.7 394.5 396.2 397.7 399.5 400.5 401.6 402.7 403.2 403.4 403.1 402.3 401.3 400.1 398.7 397.2 395.9 394.8 393.8 392.7 392.1 391.5 391.1 390.9 390.4 390.4 390.2 389.8 389.7 389.4 389.1 388.8 388.4 387.8 387.0 385.8 385.0 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9 383.9", - "input_type": "phoneme", - "offset": 62.483 + "f0_timestep": "0.005" }, { + "offset": 65.241, "text": "AP 年 轻 的 牵 绊 SP", "ph_seq": "AP n ian q ing d e q ian b an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.095371 0.077629 0.146998 0.025002 0.080516 0.091484 0.127994 0.045006 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.0954 0.0776 0.147 0.025 0.0805 0.0915 0.128 0.045 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 295.7 296.1 297.2 298.5 300.2 302.2 304.3 306.3 307.9 309.2 310.1 310.6 310.6 310.9 311.4 312.0 312.8 313.4 313.8 314.3 314.5 314.4 314.3 314.1 314.0 313.8 313.7 313.5 313.5 313.3 312.7 311.9 310.7 309.3 307.4 305.6 303.8 302.1 300.4 298.9 297.8 296.9 296.5 330.4 332.9 335.4 337.9 340.4 343.0 344.9 344.2 343.2 345.6 347.4 348.8 350.0 350.0 349.8 349.5 349.3 349.2 349.1 348.9 348.7 348.6 348.6 348.6 349.1 349.4 349.9 350.6 350.8 350.8 350.6 350.1 349.2 348.2 347.2 346.9 346.8 347.6 350.0 353.3 357.6 363.0 369.0 374.4 379.5 384.6 387.6 389.2 389.9 389.7 389.7 389.7 389.7 389.6 389.5 389.4 389.3 389.3 389.3 389.3 389.1 388.6 388.1 387.5 386.8 397.9 399.8 401.8 403.7 405.6 407.6 409.5 411.5 413.5 415.5 417.5 415.5 413.5 414.3 415.2 415.9 416.5 416.7 416.5 416.0 415.3 414.3 412.8 412.2 411.0 409.4 406.5 403.2 401.3 400.7 399.5 402.1 401.5 400.3 398.5 396.2 393.7 391.1 391.1 391.1 391.3 391.5 391.7 391.9 392.1 392.2 392.2 392.2 392.3 392.2 392.1 392.0 391.8 391.5 391.2 390.9 390.6 390.3 390.1 389.9 389.5 389.5 389.3 389.3 389.3 389.3 389.3 389.4 389.7 390.0 390.3 390.6 391.1 391.3 391.5 391.5 391.7 391.7 391.5 391.5 391.5 391.3 391.1 390.9 390.7 390.6 390.6 390.4 390.4 390.4 390.4 390.5 390.7 391.0 391.4 391.8 392.0 392.1 392.4 392.3 392.2 392.2 392.0 391.8 391.6 391.4 391.2 391.1 391.1 390.8 390.0 388.9 388.0 387.3 381.3 368.1 368.1 368.1 368.1 368.1 368.1 368.1 368.1 368.1 368.1 368.1 368.1", - "input_type": "phoneme", - "offset": 65.241 + "f0_timestep": "0.005" }, { + "offset": 66.621, "text": "AP 老 去 的 无 奈 SP", "ph_seq": "AP l ao q v d e w u n ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.081439 0.090561 0.127002 0.044998 0.128002 0.044998 0.092448 0.079552 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.0814 0.0906 0.127 0.045 0.128 0.045 0.0924 0.0796 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 315.5 311.1 311.1 311.1 311.2 311.5 311.8 312.1 312.2 312.7 313.0 313.4 313.7 314.0 314.3 314.6 314.7 314.9 314.9 315.1 315.0 314.8 314.7 314.5 314.3 314.0 313.7 313.5 313.4 313.2 313.1 312.9 312.0 310.2 307.4 304.7 302.1 300.0 298.6 320.6 323.3 325.9 328.6 331.3 334.1 336.8 339.6 342.4 345.2 348.4 347.8 347.5 347.9 348.2 348.8 349.4 349.8 350.1 350.2 350.0 349.7 349.4 349.0 349.0 348.8 348.7 348.6 345.4 336.9 334.7 334.1 334.5 340.0 347.9 355.9 364.2 372.6 381.2 390.0 399.1 408.3 417.7 396.1 394.0 393.0 392.1 391.7 391.8 391.9 392.1 392.1 391.9 391.6 390.9 390.0 389.3 388.5 387.9 387.8 387.7 387.9 388.3 388.9 389.7 391.2 393.0 394.8 397.0 399.9 402.0 403.9 405.8 407.5 408.6 410.2 411.7 412.9 413.9 414.9 416.1 417.2 417.9 418.3 418.8 419.1 418.9 418.9 418.4 418.0 417.1 415.5 414.5 413.2 412.0 409.7 407.0 405.2 403.3 401.3 399.2 397.0 396.8 396.6 395.4 394.1 392.9 392.7 393.3 394.2 394.0 394.0 394.0 394.0 394.0 394.0 393.9 393.8 393.8 393.8 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.4 393.4 393.2 392.9 392.6 392.4 392.0 391.8 391.4 391.2 390.8 390.5 390.4 390.4 390.4 390.4 390.6 390.9 391.3 391.8 392.4 392.8 393.1 393.3 393.5 393.4 393.2 393.1 392.8 392.4 392.0 391.4 391.0 390.7 390.5 390.4 390.4 390.2 390.2 390.2 390.2 390.1 390.0 389.9 389.7 389.7 389.7 389.7 389.7 389.6 389.4 389.2 388.9 388.5 388.1 387.7 387.3 387.1 386.9 386.8 376.9 376.9 376.9 376.9 376.9 376.9 376.9", - "input_type": "phoneme", - "offset": 66.621 + "f0_timestep": "0.005" }, { + "offset": 68.0, "text": "AP 生 命 重 复 着 因 果 循 环 SP 还 有 谁 的 身 影 能 永 远 如 此 的 伟 岸 啊 SP", "ph_seq": "AP sh eng m ing ch ong f u zh e y in g uo x vn h uan SP h ai y ou sh ei d e sh en y ing n eng y ong y van r u c i0 d e w ei an an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D4 D4 A#3 A#3 rest D#4 D#4 D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 A#4 A#4 G4 G4 F4 F4 D#4 D#4 F4 F4 F4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.3439999 0.3439999 0.1730001 0.1730001 0.517 0.517 0.345 0.345 0.345 0.345 0.1719999 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.3449998 0.3449998 0.3450003 0.3450003 0.3449998 0.3449998 0.3440003 0.3440003 0.3449998 0.3449998 0.3450003 0.3450003 0.3449998 0.3449998 0.1719999 0.1719999 1.552 0.3449998 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.140998 0.135002 0.111995 0.060005 0.100597 0.072403 0.112002 0.059998 0.098232 0.074768 0.299002 0.044998 0.128002 0.044998 0.351999 0.165001 0.224997 0.120003 0.345 0.095851 0.076149 0.127002 0.044998 0.096523 0.076477 0.127002 0.044998 0.112995 0.060005 0.145061 0.026939 0.254996 0.090004 0.300002 0.044998 0.300002 0.044998 0.287711 0.05629 0.193709 0.151291 0.299994 0.045006 0.277243 0.067757 0.127002 1.552 0.389998 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.141 0.135 0.112 0.06 0.1006 0.0724 0.112 0.06 0.0982 0.0748 0.299 0.045 0.128 0.045 0.352 0.165 0.225 0.12 0.345 0.0959 0.0761 0.127 0.045 0.0965 0.0765 0.127 0.045 0.113 0.06 0.1451 0.0269 0.255 0.09 0.3 0.045 0.3 0.045 0.2877 0.0563 0.1937 0.1513 0.3 0.045 0.2772 0.0678 0.127 1.552 0.39 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 D4 A#3 rest D#4 D#4 F4 G4 G#4 G4 F4 D#4 A#4 G4 F4 D#4 F4 F4 G4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.344 0.173 0.517 0.345 0.345 0.172 0.172 0.173 0.172 0.173 0.172 0.345 0.345 0.345 0.344 0.345 0.345 0.345 0.172 1.552 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 311.0 310.9 310.9 310.8 310.7 310.6 310.4 310.1 309.9 309.8 309.7 309.6 309.5 309.5 309.5 309.4 309.3 309.3 309.3 309.3 309.3 309.2 309.2 309.2 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.3 310.5 312.5 315.1 318.4 322.1 327.0 331.4 335.8 340.1 344.4 348.2 351.0 353.1 354.3 354.6 354.5 354.3 354.0 353.6 353.2 352.8 352.3 352.0 351.9 351.9 351.3 349.8 348.4 347.4 347.8 349.0 351.0 353.9 357.7 361.6 366.0 370.4 375.8 380.4 384.5 388.1 391.2 393.8 395.2 395.6 395.4 395.0 394.6 394.1 393.5 392.9 392.3 391.8 391.3 391.3 391.2 391.1 391.1 390.9 390.6 390.5 390.1 389.9 389.6 389.4 389.1 389.1 388.9 388.8 388.9 389.3 390.6 392.5 395.3 398.4 401.5 404.8 408.2 412.0 414.3 416.3 418.3 418.0 417.7 417.3 416.9 416.6 416.5 416.5 416.5 416.5 416.5 416.5 416.5 415.7 413.4 409.3 405.0 399.9 396.1 393.8 391.7 391.3 390.9 390.6 390.3 390.0 390.0 390.0 390.0 390.3 390.7 391.2 391.8 392.5 393.2 393.6 394.0 394.2 394.5 394.3 394.1 393.9 393.7 393.4 393.0 392.6 392.4 392.0 392.0 389.5 389.5 389.7 390.1 390.6 391.2 391.8 392.2 392.6 393.0 393.2 393.4 393.4 393.6 393.6 393.6 393.6 393.6 393.6 393.7 393.7 393.6 393.4 393.2 393.1 392.8 392.1 391.4 390.6 389.6 388.6 387.1 385.0 382.6 379.7 376.9 373.4 370.3 367.4 364.7 362.0 359.2 357.0 355.1 354.1 353.2 352.7 351.7 350.0 349.4 348.4 347.4 346.9 346.9 347.1 347.4 347.8 348.3 349.0 349.2 348.5 347.0 344.2 342.2 340.6 338.7 336.4 334.3 331.6 328.3 324.7 322.2 319.9 315.8 309.4 307.8 315.1 315.2 316.6 317.9 319.3 320.7 322.0 323.4 324.8 319.8 316.7 316.0 315.5 315.0 315.3 315.5 315.7 315.8 315.7 314.0 313.9 313.8 313.6 313.3 312.9 312.6 312.3 312.0 313.1 313.0 312.8 312.7 312.6 312.5 312.3 312.0 311.7 311.5 311.2 310.9 310.7 310.5 310.2 309.9 309.8 309.7 309.6 309.5 309.5 309.5 309.5 309.7 310.0 310.4 310.8 311.2 311.7 312.1 312.6 313.2 313.7 314.2 314.6 314.9 315.1 315.2 315.4 314.5 314.8 315.2 315.4 314.1 313.7 312.7 311.5 310.0 307.8 305.0 301.7 298.0 294.4 293.0 293.5 293.9 294.4 294.9 295.3 295.8 296.3 296.8 297.2 297.7 298.2 298.7 299.1 299.6 300.1 300.6 301.1 301.5 302.0 302.5 303.0 303.5 304.0 304.4 304.9 305.4 305.9 306.4 306.9 307.4 307.9 307.6 304.2 301.6 300.5 300.5 300.5 300.1 299.4 298.6 297.7 296.6 295.6 294.8 294.1 293.2 292.6 292.5 292.3 292.5 292.6 292.8 293.1 293.2 293.2 293.2 293.3 293.7 294.3 295.1 295.9 296.3 296.6 296.7 296.7 296.4 296.0 294.5 293.5 292.3 291.0 289.3 287.1 284.7 281.5 277.8 274.4 271.8 270.9 270.1 271.6 270.7 269.8 268.9 268.0 267.2 266.3 265.4 264.5 263.6 262.8 261.9 261.0 260.1 259.3 258.4 257.6 256.7 255.9 255.0 254.2 253.3 248.5 243.0 239.0 238.0 237.4 236.5 235.5 234.9 234.2 233.9 233.6 233.2 233.1 232.7 232.3 232.2 231.9 231.7 231.6 231.6 231.5 231.7 231.8 231.9 231.9 232.2 232.3 232.5 232.9 232.9 233.2 233.2 233.4 233.6 233.8 234.0 234.2 234.1 233.8 233.6 232.9 232.3 231.9 231.3 231.1 231.0 230.9 231.2 231.6 232.1 232.8 233.5 233.9 234.3 234.4 234.2 233.9 233.2 231.8 230.9 229.9 229.0 227.9 226.8 225.5 223.6 221.7 218.7 214.5 213.4 215.9 218.5 221.0 223.6 226.3 229.0 231.7 234.4 237.2 240.0 242.8 245.7 248.6 251.5 254.5 257.5 260.5 263.6 266.7 269.9 273.1 276.3 279.6 282.9 286.2 289.6 293.0 296.5 300.0 303.5 307.1 310.7 313.1 310.4 308.9 307.4 305.9 304.9 304.8 304.6 304.9 305.9 306.9 308.0 309.3 310.4 311.2 311.7 311.8 311.6 311.1 310.2 309.9 309.6 309.7 310.0 310.3 311.0 311.7 312.1 312.3 312.4 312.5 312.5 311.1 311.1 311.2 311.3 311.5 311.6 311.9 312.1 312.4 312.6 312.8 313.1 313.3 313.4 313.6 313.7 313.8 313.9 313.5 311.8 308.0 304.0 300.7 298.6 298.1 298.8 300.7 303.1 306.4 310.2 315.0 320.0 324.8 329.6 334.2 339.3 342.7 345.4 347.3 348.4 348.4 347.8 346.7 345.3 343.8 342.7 341.8 341.5 341.4 341.6 341.7 342.0 342.3 342.6 343.0 343.4 343.7 343.9 344.0 344.2 344.9 346.7 350.2 355.1 360.5 366.6 373.0 380.4 386.4 391.3 395.2 397.7 398.8 398.6 398.2 397.7 396.9 396.1 395.3 394.6 394.0 393.4 393.2 393.0 392.4 391.3 389.8 388.6 387.8 387.5 387.7 388.3 389.3 390.6 392.1 393.9 395.9 398.4 400.7 402.8 405.1 407.6 409.8 411.7 413.4 414.8 416.0 416.6 417.1 417.2 417.2 417.3 417.5 417.5 417.7 417.9 418.1 418.4 418.8 419.0 419.2 419.4 419.4 419.6 419.6 419.8 419.7 419.2 418.2 416.3 414.1 411.7 409.1 406.2 403.4 401.0 398.8 397.1 395.7 395.1 398.2 397.7 397.1 396.6 396.2 395.7 395.0 394.5 394.3 393.9 393.4 392.8 392.4 392.4 392.2 392.2 392.1 392.0 391.9 391.7 391.5 391.3 391.3 391.3 391.1 390.4 389.2 387.7 385.5 382.6 379.4 376.2 372.9 369.1 365.0 361.3 357.9 354.7 351.4 348.6 346.4 344.6 343.2 342.0 341.9 341.8 342.0 342.5 343.1 343.8 344.4 345.3 346.2 347.1 347.8 348.4 348.9 349.4 349.4 349.6 349.6 349.7 349.8 349.8 350.0 350.2 350.5 350.7 350.9 351.0 351.1 351.3 351.3 351.4 351.3 351.1 351.0 350.8 350.4 350.0 349.5 348.9 348.4 347.9 347.4 346.9 346.3 346.0 345.7 345.5 345.3 345.2 346.2 346.5 346.6 346.8 347.1 347.3 347.7 348.2 348.6 349.1 349.4 350.1 350.2 350.6 350.8 350.8 350.6 350.2 349.7 348.9 348.2 347.2 345.9 344.7 343.3 341.4 338.9 337.1 334.8 332.7 330.6 328.2 324.6 321.5 319.5 317.6 314.9 312.7 311.4 310.5 309.9 309.7 309.5 309.7 310.2 310.8 311.2 311.8 312.6 313.2 313.7 314.1 314.2 314.3 314.2 313.9 313.5 313.2 312.9 312.1 311.8 311.5 311.1 310.8 310.6 310.5 310.4 310.6 311.0 311.5 311.8 312.1 312.4 312.4 312.6 312.6 312.6 312.7 312.6 312.7 313.0 313.2 313.5 313.7 313.7 313.7 313.7 313.7 313.6 313.4 313.0 312.6 311.8 311.3 311.3 311.5 313.7 314.3 315.8 318.8 322.3 326.5 331.3 337.2 344.0 350.6 357.6 364.9 373.4 380.8 387.8 394.6 401.1 407.5 412.2 415.8 418.3 419.9 420.6 420.3 420.1 419.5 418.9 418.2 417.6 416.9 416.1 415.5 415.1 414.7 414.6 414.8 415.6 417.2 420.3 424.6 429.1 434.1 439.5 445.4 451.6 456.8 461.5 465.4 468.9 471.0 472.0 469.4 469.4 469.3 469.1 469.1 469.1 468.9 468.1 466.9 465.1 462.9 459.6 456.1 452.5 448.6 444.1 439.1 434.7 430.3 426.0 421.3 417.1 413.4 409.9 406.8 403.7 401.8 400.2 399.0 398.3 394.9 394.9 396.0 397.5 398.4 399.1 399.5 399.5 399.3 399.0 398.6 398.3 397.9 397.9 397.6 397.0 396.5 395.8 395.0 394.0 393.0 392.1 391.0 390.0 389.4 388.8 388.4 388.6 389.0 389.6 390.3 391.0 391.7 393.0 394.1 395.3 396.6 398.0 399.4 400.0 400.1 399.2 397.3 395.2 393.7 390.9 387.5 391.3 399.6 396.1 396.4 400.2 400.4 398.5 396.6 394.8 392.9 391.0 389.2 387.3 385.5 383.7 381.9 380.1 378.3 376.5 374.7 372.9 371.2 369.4 367.7 366.0 364.2 362.5 360.8 359.1 357.4 356.2 356.3 355.7 355.0 354.6 354.5 354.4 354.2 353.7 353.0 352.3 351.3 350.5 349.7 349.0 348.7 348.0 347.9 347.8 347.8 348.1 348.3 348.5 348.7 348.8 348.9 349.0 348.8 348.6 348.6 348.6 348.8 349.2 349.7 350.2 350.7 351.1 351.3 351.0 350.5 349.9 348.8 346.5 344.4 342.0 338.1 335.4 332.9 330.8 326.2 322.8 325.5 327.0 328.7 333.6 337.2 336.3 335.4 334.5 333.6 332.7 331.8 330.9 330.0 329.1 328.2 327.3 319.1 316.2 315.8 315.1 314.7 314.9 314.7 314.5 314.0 313.5 312.7 312.0 311.1 310.4 309.7 309.1 308.5 308.3 308.3 308.4 308.8 309.0 309.1 309.3 309.2 309.2 309.3 309.3 309.3 309.6 309.9 310.0 310.2 310.2 310.3 310.3 310.3 310.5 310.6 310.8 310.9 310.8 310.5 310.1 309.6 309.0 308.5 308.4 308.4 308.3 308.4 308.6 308.9 309.2 309.8 310.5 311.3 312.2 313.3 314.3 317.5 320.6 324.6 328.9 333.3 336.6 339.0 341.7 344.2 346.0 348.1 349.8 350.6 351.4 351.9 351.9 351.5 350.6 349.6 349.0 348.4 348.0 347.7 347.4 347.1 346.9 346.5 346.1 345.8 345.6 344.7 344.0 343.4 342.2 341.4 340.9 340.7 341.1 342.3 343.6 345.3 347.2 348.9 350.5 351.7 352.8 353.5 354.1 354.3 354.5 354.5 354.3 354.1 354.0 353.7 353.2 352.5 351.7 350.8 350.0 349.2 348.3 347.6 346.5 345.8 345.2 344.4 344.1 343.9 343.8 343.9 344.2 344.7 345.3 345.9 346.5 347.4 348.0 349.6 350.4 351.3 352.3 352.6 353.2 353.4 353.5 353.4 353.2 352.9 352.3 351.6 350.9 350.2 349.3 348.6 347.8 347.0 346.0 345.5 344.9 344.2 343.9 343.8 343.7 343.8 344.2 344.5 345.2 346.2 346.9 347.6 348.6 349.2 349.9 350.4 350.8 351.1 351.4 351.3 351.3 351.1 350.6 350.1 349.6 349.1 348.4 347.7 347.2 346.9 346.4 346.1 345.9 345.5 345.3 345.1 345.1 345.2 345.3 345.5 345.9 346.4 347.4 348.2 349.2 349.8 350.4 350.8 351.1 351.4 351.5 351.4 351.1 350.8 350.5 350.1 349.5 348.7 348.1 347.4 346.3 345.6 345.1 344.6 344.0 343.7 343.5 343.4 343.7 344.0 344.4 344.9 345.4 345.9 346.4 347.1 347.6 348.5 349.1 349.7 350.2 350.6 351.3 351.9 352.3 352.6 352.5 352.1 351.5 350.9 350.1 349.6 348.8 348.2 347.6 347.1 346.9 346.4 345.9 345.6 345.1 344.6 344.2 343.8 343.8 343.9 344.3 344.6 345.1 345.6 346.1 346.7 347.5 347.9 348.4 349.0 349.7 350.3 350.8 351.1 351.6 351.9 352.1 352.0 351.9 351.5 351.1 350.7 350.3 349.8 349.2 348.6 348.0 347.4 346.8 346.0 345.5 344.9 344.6 344.4 344.4 344.4 344.6 344.8 345.1 345.5 345.9 346.3 346.9 347.4 347.9 348.4 348.8 349.4 349.7 350.0 350.4 350.4 350.4 350.1 349.7 349.5 349.1 348.8 348.6 348.3 348.2 348.0 347.7 347.4 347.0 346.8 346.6 346.8 346.8 347.0 347.3 347.4 347.4 347.4 347.4 347.4 347.5 347.8 348.1 348.2 348.3 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.3 348.2 348.2 348.3 348.5 348.7 348.9 349.3 349.5 349.6 350.0 350.2 350.4 350.4 350.4 350.3 350.4 350.5 350.7 350.9 351.0 350.9 351.3 351.2 351.0 351.2 351.0 350.8 350.8 350.6 350.8 351.0 351.2 351.3 351.4 351.5 351.6 352.0 352.3 352.8 353.1 353.3 353.3 353.1 353.1 352.9 352.6 352.0 351.3 350.7 350.1 349.7 349.5 349.9 350.7 352.1 353.7 355.5 357.8 361.5 365.0 369.5 374.0 379.5 384.7 389.9 395.4 400.1 403.1 405.1 406.8 407.5 407.5 406.5 405.1 403.5 401.7 398.6 396.1 393.8 391.5 389.1 386.7 384.5 382.5 381.5 381.1 381.5 382.4 383.7 385.0 386.7 389.5 391.6 393.8 397.0 399.2 401.4 403.1 404.5 405.4 405.6 405.4 404.5 403.5 402.3 401.0 398.6 396.5 395.0 393.3 392.0 391.5 390.5 389.6 389.3 388.9 388.6 388.6 388.7 388.9 389.1 389.5 390.4 391.5 394.3 396.9 399.8 404.5 409.9 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7 411.7", - "input_type": "phoneme", - "offset": 68.0 + "f0_timestep": "0.005" }, { + "offset": 76.276, "text": "AP 你 的 生 命 给 了 我 一 半 SP", "ph_seq": "AP n i d e sh eng m ing g ei l e w o y i b an SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.1719999 0.1719999 0.5180001 0.5180001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127002 0.044998 0.074512 0.098488 0.127002 0.044998 0.109599 0.062401 0.2927 0.052299 0.113399 0.059601 0.472002 0.044998 0.10316 0.06884 0.518 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.127 0.045 0.0745 0.0985 0.127 0.045 0.1096 0.0624 0.2927 0.0523 0.1134 0.0596 0.472 0.045 0.1032 0.0688 0.518 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.173 0.517 0.172 0.518 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 269.8 286.5 286.4 286.2 286.1 285.9 285.6 285.5 285.2 285.0 284.9 284.8 284.8 284.0 282.0 279.1 275.6 271.7 267.6 263.1 259.7 257.2 255.4 254.5 255.3 256.6 257.6 258.6 260.3 261.3 261.8 262.2 262.5 262.7 262.7 262.7 262.5 262.1 261.5 260.1 258.7 257.1 257.8 258.5 263.1 267.5 271.8 276.2 280.7 285.2 289.8 291.6 292.6 296.8 302.4 307.2 310.8 312.0 312.0 312.0 312.0 311.8 311.8 311.8 311.7 311.7 311.5 311.5 311.5 311.4 311.0 310.5 309.9 309.0 308.5 308.2 325.7 329.0 332.2 335.5 338.8 342.2 345.6 349.0 352.5 356.0 359.5 363.1 365.0 360.3 357.9 355.5 354.2 353.2 352.9 352.4 351.7 351.3 350.7 350.0 349.6 349.4 349.2 349.1 348.6 347.6 346.6 345.0 343.9 342.0 340.2 338.2 335.7 334.0 332.6 330.8 328.6 326.7 325.1 323.2 321.5 321.3 321.2 318.6 316.0 314.8 313.8 312.8 312.0 310.9 309.6 308.1 306.4 305.0 304.1 303.7 304.1 305.6 308.1 311.8 314.0 316.0 317.6 318.3 317.5 315.9 314.4 313.0 311.4 313.1 329.1 345.9 363.5 382.1 401.6 422.0 443.5 466.2 462.1 462.2 463.6 465.5 467.5 469.1 470.4 471.4 472.0 472.4 472.4 471.4 470.2 468.9 466.4 465.1 464.1 463.3 462.8 462.5 462.4 462.5 462.8 463.1 463.7 464.8 465.5 466.4 467.8 468.9 469.8 470.4 470.5 470.1 469.4 468.3 466.2 463.5 460.9 457.7 454.1 450.4 445.8 440.3 432.2 420.6 409.6 397.2 389.3 381.6 373.3 362.9 353.2 345.3 337.7 331.1 325.8 321.9 318.2 315.1 313.1 312.2 311.8 312.7 314.3 315.8 317.6 317.5 316.1 314.7 313.7 312.9 312.5 312.0 311.6 311.5 311.5 311.5 311.3 311.0 310.3 309.4 308.4 307.6 306.5 305.3 304.4 303.3 301.9 300.9 300.0 299.0 298.1 297.2 296.5 295.4 294.7 294.2 293.7 293.5 293.6 293.7 293.9 294.1 294.3 294.5 295.0 295.3 295.7 296.0 296.2 296.5 296.7 296.7 296.6 296.6 296.6 296.6 296.6 296.3 296.0 295.5 295.2 295.0 294.5 294.2 293.8 293.7 293.4 293.2 292.9 292.7 292.6 292.6 292.8 293.0 293.2 293.6 293.9 294.2 294.4 294.7 295.0 295.0 295.0 295.0 295.1 295.3 295.4 295.4 295.3 295.2 295.0 295.0 294.9 294.7 294.7 294.5 294.7 294.7 294.8 294.9 294.7 294.6 294.6 294.6 294.6 294.6 294.5 294.3 294.3 294.0 294.2 294.0 294.0 294.0 293.9 293.8 293.9 293.9 293.7 293.6 293.3 292.9 292.8 292.4 292.3 292.0 291.4 290.8 290.1 289.5 288.4 287.5 286.1 284.5 282.4 281.0 280.9 282.9 288.9 293.2 295.0 297.6 300.2 302.6 305.1 307.4 309.2 310.5 311.6 312.3 312.7 312.6 312.3 311.7 310.6 309.1 307.8 306.7 305.1 303.0 301.4 297.9 292.1 280.0 278.0 281.4 286.1 290.9 295.8 300.7 305.7 310.8 316.0 321.3 326.7 332.1 337.7 343.3 349.1 347.8 347.9 349.7 350.8 351.9 352.9 354.1 354.9 355.2 355.7 355.0 354.3 353.1 351.7 349.8 347.3 345.5 343.5 342.1 341.2 340.4 339.7 339.2 338.9 338.8 339.3 340.2 341.1 342.0 343.2 344.2 345.2 346.5 347.6 348.6 349.7 350.8 351.5 351.9 352.1 352.1 351.8 351.3 350.6 349.6 348.6 347.3 345.9 344.8 343.5 342.2 341.3 340.4 339.7 339.1 338.5 338.5 338.5 339.0 339.5 340.5 341.6 343.0 344.4 345.5 347.1 348.5 349.6 350.4 350.8 350.6 350.1 349.6 349.3 349.0 348.8 348.7 348.6 348.8 349.1 349.1 347.6 345.1 341.9 338.0 338.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2 342.2", - "input_type": "phoneme", - "offset": 76.276 + "f0_timestep": "0.005" }, { + "offset": 79.035, "text": "AP 你 的 爱 也 给 了 我 一 半 SP", "ph_seq": "AP n i d e ai y E g ei l e w o y i b an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.517 0.517 0.173 0.173 0.517 0.517 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.229994 0.045006 0.147998 0.025002 0.127002 0.18526 0.032738 0.10419 0.06781 0.300002 0.044998 0.109721 0.062279 0.472002 0.044998 0.101482 0.071518 0.517 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.23 0.045 0.148 0.025 0.127 0.1853 0.0327 0.1042 0.0678 0.3 0.045 0.1097 0.0623 0.472 0.045 0.1015 0.0715 0.517 0.068", + "ph_num": "2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.172 0.517 0.173 0.517 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.6 252.2 251.3 250.5 250.9 251.1 250.9 250.3 249.1 247.1 244.4 241.3 238.7 234.9 231.2 228.7 227.1 226.5 226.9 227.9 229.1 230.2 231.2 232.4 233.3 233.6 233.7 233.9 233.4 232.6 231.4 229.8 228.1 226.6 225.1 223.1 219.8 216.8 213.0 208.7 218.6 229.0 239.8 251.2 263.1 275.5 288.6 302.3 307.5 307.4 308.3 309.4 310.5 311.6 312.0 312.4 312.7 312.7 312.7 312.4 312.0 312.0 312.3 312.5 312.9 313.4 313.4 313.2 313.0 312.5 312.3 311.7 311.2 310.6 310.1 310.1 310.3 311.0 312.7 314.7 317.5 321.3 324.6 327.8 331.7 335.1 338.5 342.8 344.8 347.0 348.5 350.0 350.7 350.8 349.9 348.8 347.7 346.4 344.8 342.7 340.8 338.5 336.3 334.1 331.5 328.8 324.7 323.2 321.0 317.8 315.4 313.3 311.8 310.8 310.2 309.8 309.7 309.7 309.7 309.9 310.2 310.8 311.4 311.8 311.3 310.3 309.0 307.6 305.9 304.0 302.4 303.1 307.6 306.7 308.4 311.1 313.8 316.5 319.3 322.0 324.8 327.6 330.5 333.3 336.2 339.2 342.1 345.1 348.0 351.1 354.1 353.6 354.3 355.0 354.6 354.4 354.0 353.5 352.9 352.3 351.3 350.6 349.8 348.8 347.9 347.1 346.3 345.6 345.1 344.5 344.3 344.2 344.2 344.2 344.3 344.6 344.8 345.1 345.5 345.8 346.1 346.5 346.9 347.2 347.6 347.8 347.8 347.9 348.0 348.3 348.9 349.6 350.2 350.9 351.5 351.8 351.7 351.1 350.1 348.5 346.6 344.5 341.5 338.5 335.5 332.3 328.9 325.2 321.9 318.7 315.7 312.6 309.9 307.7 305.8 304.2 302.8 302.1 301.8 302.0 302.8 304.5 307.2 310.3 312.9 314.6 315.5 314.5 314.3 313.8 313.4 312.8 312.0 311.4 310.9 310.6 310.6 309.7 308.1 305.8 303.5 301.2 299.5 298.8 297.6 296.9 296.7 297.3 297.8 297.8 297.4 297.4 297.4 297.3 297.0 296.8 296.7 296.8 297.0 297.1 297.1 297.1 297.1 297.1 297.4 297.1 296.7 296.4 296.0 295.5 294.9 294.3 293.7 293.1 292.6 292.2 292.0 291.9 291.6 291.6 291.6 291.6 291.8 291.8 292.1 292.1 292.1 292.3 292.6 292.9 293.2 293.7 294.1 294.3 294.7 295.1 295.2 295.6 295.7 295.4 295.2 295.0 294.6 294.4 294.1 293.7 293.5 293.3 293.2 293.2 293.2 293.3 293.3 293.2 293.0 293.0 292.9 292.8 292.9 292.9 292.9 293.1 293.2 293.3 293.7 293.7 293.9 294.0 294.1 294.3 294.2 294.3 294.3 294.3 294.3 294.2 294.1 294.0 293.9 293.9 294.0 294.0 294.0 293.9 294.2 294.6 295.6 297.0 298.2 299.3 300.7 302.0 303.2 304.5 305.8 307.0 308.3 309.5 310.3 310.9 311.1 311.1 311.2 311.3 311.3 311.5 311.6 311.8 311.8 311.8 311.9 311.6 310.3 308.1 304.5 301.1 297.2 292.9 289.4 286.7 284.9 283.9 284.0 284.6 285.7 287.2 289.0 291.6 294.3 297.0 299.9 303.1 306.3 308.9 311.5 313.6 315.6 316.9 317.6 318.0 317.8 317.5 317.0 316.4 315.7 315.0 314.2 313.1 312.0 311.1 310.2 309.2 308.3 307.5 306.9 306.3 305.8 305.6 305.6 305.7 306.3 307.1 308.3 309.7 311.5 313.3 315.0 316.9 318.4 319.6 320.6 321.2 321.6 321.6 321.2 320.6 319.9 319.1 318.0 316.7 315.5 314.3 313.0 311.6 310.3 309.3 308.3 307.5 306.8 306.3 306.1 307.3 307.7 308.4 309.3 310.1 311.1 312.2 313.7 314.6 315.5 316.5 317.1 317.7 318.2 318.5 318.5 318.3 317.9 317.2 316.6 315.7 314.9 313.7 313.2 312.7 312.3 312.2 312.1 312.1 311.7 310.1 308.6 306.3 301.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5 298.5", - "input_type": "phoneme", - "offset": 79.035 + "f0_timestep": "0.005" }, { + "offset": 81.793, "text": "AP 夏 天 不 懂 冬 日 的 严 寒 SP", "ph_seq": "AP x ia t ian b u d ong d ong r ir d e y En h an SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.3439999 0.3439999 0.1730001 0.1730001 0.517 0.517 0.1730001 0.1730001 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.156005 0.119995 0.083545 0.088455 0.099949 0.073051 0.127002 0.044998 0.083355 0.089645 0.299002 0.044998 0.147999 0.025002 0.476091 0.040909 0.107418 0.065582 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.156 0.12 0.0835 0.0885 0.0999 0.0731 0.127 0.045 0.0834 0.0896 0.299 0.045 0.148 0.025 0.4761 0.0409 0.1074 0.0656 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.344 0.173 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.3 285.1 274.1 270.4 268.4 266.6 265.4 264.4 263.2 262.3 261.5 260.9 260.3 258.7 257.4 256.2 253.6 251.6 249.5 248.5 250.5 254.3 258.2 262.2 266.2 270.2 274.4 278.6 282.8 287.1 291.5 296.0 300.5 305.1 309.8 309.8 309.3 309.3 309.6 310.1 310.2 310.3 310.6 310.7 311.2 311.5 311.5 311.3 310.7 310.0 309.7 309.3 308.9 308.5 307.6 306.3 304.6 302.8 301.3 300.7 299.8 299.9 307.2 314.7 322.4 330.3 338.3 346.6 355.1 352.3 349.8 349.3 349.2 349.6 350.1 350.2 350.4 350.4 350.4 350.2 350.0 349.9 349.4 348.9 348.2 346.6 344.1 340.8 337.0 332.8 328.8 325.2 323.2 321.4 322.0 322.7 323.4 324.1 324.8 325.5 326.2 326.9 327.6 328.3 327.4 323.9 320.6 318.7 316.7 315.6 314.6 313.5 312.7 311.6 310.6 309.9 309.4 309.6 310.0 310.5 311.3 312.0 312.4 313.1 313.9 314.0 314.7 314.6 312.7 326.7 341.4 356.7 372.6 389.3 406.7 424.9 444.0 463.8 478.3 471.4 469.7 468.3 465.3 465.1 464.4 463.9 463.7 463.9 464.0 464.0 463.4 462.4 461.4 460.2 458.9 458.1 457.6 457.7 458.2 458.6 459.5 460.4 461.4 462.1 463.2 464.2 464.7 465.1 465.8 466.3 466.8 467.4 467.9 468.6 468.9 469.3 469.7 469.7 469.9 469.9 469.6 469.0 468.2 467.0 465.6 464.2 462.3 459.3 456.7 453.4 448.7 444.9 439.7 433.3 426.4 419.8 411.5 403.8 396.9 389.9 381.8 373.1 366.4 358.0 352.8 347.7 342.6 337.6 332.7 327.9 323.1 318.4 313.7 309.2 308.0 309.0 309.9 310.6 310.6 310.6 310.3 309.8 309.2 308.4 306.5 304.6 302.8 300.5 299.1 299.1 299.8 300.6 301.4 302.1 302.9 303.7 304.5 305.2 306.0 295.5 295.6 295.7 296.0 296.5 296.9 297.3 297.8 298.4 298.9 299.1 299.4 299.6 299.8 299.7 299.4 298.9 298.1 297.2 296.4 295.3 294.1 292.9 291.9 291.0 290.2 289.5 289.1 289.0 291.2 291.5 291.8 291.8 292.5 292.6 292.8 293.3 293.7 294.2 294.6 294.7 294.8 294.8 294.7 294.7 294.8 294.7 294.5 294.3 294.2 294.3 294.1 294.0 294.1 294.1 294.1 294.3 294.3 294.1 293.8 293.8 293.7 293.8 293.8 294.0 294.3 294.3 294.3 294.3 294.3 294.4 294.5 294.7 294.9 295.2 295.5 295.6 295.7 296.2 295.9 295.7 295.0 294.0 292.7 290.9 289.1 287.1 285.6 284.6 284.2 283.5 281.2 276.2 271.8 270.1 266.3 270.2 276.0 281.9 288.0 294.2 300.6 307.1 313.7 313.7 310.5 309.7 310.3 310.8 311.6 312.4 313.7 314.6 314.4 314.2 313.3 312.4 311.4 310.0 308.1 306.2 303.7 300.4 294.5 286.8 283.3 287.3 288.0 290.0 292.6 296.0 300.1 305.3 311.0 316.6 322.3 328.3 335.2 340.4 345.3 349.5 353.2 355.7 357.1 357.5 357.3 356.8 356.3 355.3 354.3 353.2 351.9 350.5 349.0 347.6 346.3 345.0 343.6 342.4 341.5 340.8 340.3 339.7 339.7 339.8 340.3 341.1 342.2 343.5 345.0 346.7 348.8 350.4 352.0 353.4 354.6 355.4 355.7 355.9 355.7 355.2 354.7 354.0 353.2 352.1 351.0 350.0 348.8 347.6 346.4 345.3 344.4 343.5 342.7 342.0 341.6 341.4 341.4 341.4 341.9 342.8 343.8 344.9 346.1 347.5 349.1 350.4 351.6 352.8 353.9 354.6 355.0 355.3 355.2 355.0 354.7 354.3 353.7 353.2 352.5 351.8 351.0 350.4 349.9 349.4 348.9 348.6 348.6 348.6 348.6 348.6 348.6 348.8 348.8 348.9 349.0 349.1 349.3 349.4 349.6 349.6 349.8 349.8 349.9 350.0 350.0 350.0 350.2 330.0 330.0 330.0 330.0 330.0 330.0 330.0 330.0 330.0 330.0", - "input_type": "phoneme", - "offset": 81.793 + "f0_timestep": "0.005" }, { + "offset": 84.552, "text": "AP 未 曾 想 过 人 生 的 辛 酸 SP", "ph_seq": "AP w ei c eng x iang g uo r en sh eng d e x in s uan SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 G4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.1719999 0.1719999 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.112002 0.059998 0.097285 0.074715 0.111576 0.061424 0.127002 0.044998 0.219237 0.125763 0.126994 0.045006 0.368006 0.149994 0.084575 0.087425 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.112 0.06 0.0973 0.0747 0.1116 0.0614 0.127 0.045 0.2192 0.1258 0.127 0.045 0.368 0.15 0.0846 0.0874 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 G4 G#4 G4 G4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.172 0.518 0.172 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 258.1 253.5 248.8 244.4 240.9 238.1 235.7 233.7 232.4 231.7 231.2 231.3 231.4 231.9 232.7 233.6 235.0 236.3 237.2 237.9 238.4 238.4 238.1 237.4 236.1 234.2 233.2 231.7 228.9 228.3 227.8 227.8 231.2 236.7 242.4 248.2 254.2 260.3 266.6 273.0 279.5 286.2 293.1 300.2 307.4 308.9 309.5 309.5 310.2 311.1 312.0 312.6 312.7 312.5 311.7 310.5 308.9 306.5 302.8 300.0 298.0 294.2 299.0 303.0 306.8 310.7 314.5 318.5 322.5 326.5 330.6 334.7 338.9 343.2 347.5 351.8 356.2 360.7 359.2 357.3 354.6 352.7 351.4 350.2 349.5 349.2 349.2 349.0 349.1 349.2 349.2 348.5 347.8 346.8 345.4 343.0 339.9 336.0 330.4 324.3 319.2 314.8 310.3 305.9 302.3 297.4 289.7 295.1 300.7 306.3 312.1 317.9 317.7 313.4 312.0 311.5 310.7 310.3 310.3 310.0 309.6 309.3 309.2 309.3 309.3 309.5 309.7 309.2 308.6 307.8 306.8 305.5 304.0 302.9 302.8 303.4 304.8 307.4 309.2 312.6 316.4 319.3 322.8 328.6 334.0 338.2 341.9 345.8 348.8 350.9 352.4 352.9 353.0 352.9 352.4 352.1 351.9 351.5 351.3 350.9 350.4 349.8 349.1 348.5 348.1 347.7 347.1 347.0 346.7 346.4 346.4 346.6 346.9 347.6 348.0 348.6 349.3 349.8 350.6 351.8 352.9 353.7 354.3 354.8 354.8 354.3 353.5 351.9 350.0 347.7 345.0 338.7 333.6 330.8 328.6 329.9 333.5 337.2 341.0 344.7 348.5 352.4 356.3 360.2 364.2 368.2 372.3 376.4 380.5 384.7 389.0 393.3 397.6 402.0 406.5 406.4 392.2 392.2 392.2 392.2 392.6 393.4 393.7 394.0 394.0 393.7 393.4 392.9 392.3 391.8 391.2 390.3 388.8 387.9 387.9 388.5 388.4 388.0 387.0 385.6 383.5 381.7 380.8 379.0 379.7 387.0 390.6 376.7 386.6 396.8 407.2 418.0 416.7 418.2 419.9 420.7 421.5 422.0 422.1 422.0 421.7 421.0 419.6 418.1 416.3 414.2 412.4 411.0 409.6 407.9 407.0 406.3 405.7 405.7 406.1 406.6 407.4 408.3 409.4 410.6 412.0 413.1 414.1 414.9 416.0 416.6 416.9 417.2 417.4 417.6 417.6 417.3 417.1 416.8 416.3 415.5 415.0 414.6 414.1 413.4 412.7 412.1 411.2 410.9 410.3 409.9 409.6 409.2 409.2 409.4 409.9 410.7 411.7 412.9 413.8 414.8 415.6 416.7 417.3 417.5 417.3 416.4 414.9 412.8 409.6 405.7 401.9 397.1 390.1 384.1 384.1 384.1 384.1 384.1 384.1 384.0 384.0 384.0 384.0 384.0 383.9 383.9 383.9 383.9 383.9 383.8 383.8 383.8 383.8 383.8 383.8 383.7 383.7 386.5 389.1 390.1 391.5 391.8 391.7 391.3 390.8 390.2 389.6 388.6 387.5 385.5 383.8 381.9 379.1 374.5 369.8 366.4 363.0 362.3 364.5 367.8 371.1 374.4 377.7 381.1 384.5 387.9 391.4 394.9 398.4 402.0 405.6 409.2 412.9 413.3 409.2 406.7 404.8 403.6 403.2 402.9 402.6 401.8 400.7 399.5 398.1 396.7 395.0 393.1 391.7 390.6 390.2 390.2 390.3 390.9 391.5 392.2 393.2 394.3 395.2 396.0 396.7 397.4 398.0 398.3 398.5 398.7 398.8 398.5 398.2 397.6 397.0 396.3 395.4 394.7 394.0 392.9 391.6 390.7 390.0 389.3 388.8 388.6 388.7 388.9 389.1 389.4 390.0 390.6 391.3 391.9 392.9 393.7 394.3 395.2 396.0 396.7 397.3 397.9 398.2 398.5 398.6 398.4 398.3 398.2 397.8 397.2 396.5 396.0 395.4 394.6 393.7 392.9 392.1 391.5 391.3 391.3 391.3 391.1 391.1 391.1 391.3 391.3 391.3 391.3 391.1 390.9 390.6 390.2 389.7 389.4 388.8 387.4 385.6 383.3 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9", - "input_type": "phoneme", - "offset": 84.552 + "f0_timestep": "0.005" }, { + "offset": 87.31, "text": "AP 曾 经 有 过 多 少 的 遗 憾 SP", "ph_seq": "AP c eng j ing y ou g uo d uo sh ao d e y i h an SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.1720001 0.1720001 0.517 0.517 0.1730001 0.1730001 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.170997 0.105003 0.090526 0.082474 0.126994 0.045006 0.108653 0.063347 0.113002 0.059998 0.215758 0.129242 0.127002 0.044998 0.479982 0.037018 0.102054 0.070946 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.171 0.105 0.0905 0.0825 0.127 0.045 0.1087 0.0633 0.113 0.06 0.2158 0.1292 0.127 0.045 0.48 0.037 0.1021 0.0709 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.172 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 278.8 261.6 261.7 261.8 262.0 262.1 262.3 262.6 262.8 262.9 263.0 263.2 263.2 262.8 262.0 260.7 258.8 256.7 254.7 252.6 250.7 248.5 247.2 246.3 245.8 263.7 267.0 270.4 273.7 277.2 280.7 284.2 287.7 291.4 295.0 298.7 302.5 306.3 310.3 313.7 311.3 309.7 310.1 310.1 309.8 309.4 309.1 309.0 309.1 309.2 309.2 309.5 309.8 310.2 310.5 310.9 311.2 312.0 312.8 313.9 315.4 317.0 318.2 319.3 320.9 322.8 324.6 327.1 330.6 333.2 336.0 338.5 341.1 343.4 345.9 347.6 348.5 349.0 349.6 350.2 350.5 350.5 349.9 348.9 347.4 345.2 343.0 340.2 338.7 336.4 333.7 334.8 330.0 323.3 318.2 316.8 317.5 318.1 318.8 319.5 320.2 320.8 321.5 322.2 322.9 323.6 324.3 325.0 318.4 315.7 314.5 313.6 313.4 313.6 313.4 313.1 312.9 312.4 311.5 310.6 308.6 306.0 305.5 305.1 305.7 309.8 310.9 306.6 299.7 301.3 314.5 328.2 342.6 357.5 373.2 389.5 406.5 419.5 421.1 426.0 432.6 441.1 449.2 455.8 460.4 462.4 462.7 463.3 464.0 464.8 465.6 466.1 465.6 464.6 462.9 461.2 460.2 460.0 460.1 460.8 461.7 462.7 463.8 465.0 465.9 466.6 467.3 469.1 469.4 469.4 468.7 466.7 464.5 462.1 459.3 455.3 450.3 445.3 437.8 430.3 426.1 422.0 417.9 413.9 409.9 405.9 402.0 398.1 394.3 390.5 386.7 383.0 379.3 375.6 372.0 368.4 364.8 361.3 357.8 354.4 351.0 347.6 344.2 340.9 337.6 334.3 331.1 311.5 311.3 311.0 310.6 310.1 309.7 309.2 308.8 308.6 308.6 308.7 309.2 309.7 310.4 311.1 311.6 312.1 312.4 312.5 312.0 310.6 308.7 306.4 303.8 300.8 297.8 295.3 293.2 291.5 290.2 289.8 288.1 289.2 290.6 292.2 294.0 295.6 297.0 298.3 299.5 299.9 297.6 297.9 299.8 300.1 300.2 300.1 299.8 299.2 298.5 298.0 297.6 297.3 296.8 296.3 295.7 295.0 294.2 293.3 293.0 292.5 291.7 291.4 291.1 291.0 291.1 291.1 291.2 291.5 291.8 292.3 292.8 293.3 293.8 294.4 294.9 295.3 295.6 295.7 295.5 295.4 295.4 295.4 295.4 295.4 295.3 295.0 295.0 295.0 294.7 294.7 294.5 294.4 294.3 294.1 294.0 294.0 293.9 294.0 294.2 294.5 294.7 294.9 294.9 295.0 295.0 295.0 295.2 295.2 294.9 295.0 294.9 294.7 294.3 293.7 292.5 290.8 288.1 285.3 282.8 276.4 274.3 275.1 275.9 276.7 277.5 278.3 279.1 279.9 280.7 281.5 282.3 283.1 283.9 284.7 285.5 286.3 287.1 288.0 294.9 301.5 304.8 306.7 308.0 309.0 309.9 310.5 310.5 310.2 309.2 306.2 303.1 297.8 294.3 290.5 286.2 283.9 286.0 289.1 292.3 295.4 298.7 301.9 305.2 308.6 311.9 315.4 318.8 322.3 325.8 329.4 333.0 336.6 340.3 344.0 347.8 351.6 351.8 350.8 351.8 352.3 352.8 353.4 353.7 353.9 353.7 353.2 352.3 351.3 350.2 348.8 347.1 345.5 344.1 342.8 341.7 341.1 340.7 340.7 340.9 341.1 341.7 342.4 343.3 344.5 345.8 347.0 348.0 348.8 349.6 350.2 350.6 351.1 351.5 351.7 351.9 351.6 351.3 350.8 350.3 349.6 348.6 347.6 346.3 345.2 344.3 343.5 342.6 341.8 341.1 340.6 340.2 340.1 340.1 340.2 340.9 341.4 342.0 343.1 344.4 345.4 346.4 347.5 348.7 349.8 350.6 351.2 351.4 351.0 350.5 349.2 348.2 347.5 347.1 347.3 347.6 347.8 347.6 346.6 345.4 344.1 343.8 344.1 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6 343.6", - "input_type": "phoneme", - "offset": 87.31 + "f0_timestep": "0.005" }, { + "offset": 90.069, "text": "AP 最 后 全 都 微 笑 着 释 然 SP", "ph_seq": "AP z ui h ou q van d ou w ei x iao zh e sh ir r an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.1719999 0.1719999 0.5180001 0.5180001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.128791 0.147209 0.100299 0.071701 0.101535 0.071465 0.127002 0.044998 0.110835 0.061165 0.207366 0.137634 0.092571 0.080429 0.345423 0.171577 0.115245 0.056755 0.518 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.1288 0.1472 0.1003 0.0717 0.1015 0.0715 0.127 0.045 0.1108 0.0612 0.2074 0.1376 0.0926 0.0804 0.3454 0.1716 0.1152 0.0568 0.518 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.173 0.517 0.172 0.518 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 247.9 237.2 237.0 236.7 236.2 235.8 235.3 235.0 234.8 234.7 234.7 234.7 234.7 234.7 234.6 234.6 234.6 234.6 234.6 234.6 234.4 233.4 231.5 228.9 225.7 222.6 220.5 219.2 219.0 221.2 225.4 231.9 240.2 250.1 263.4 275.0 286.1 296.4 306.0 310.9 313.1 312.9 312.7 312.6 312.1 311.7 311.4 311.1 311.1 311.1 311.1 311.1 311.3 311.3 311.3 311.5 310.5 308.2 305.1 301.1 298.4 297.0 316.1 319.7 323.4 327.0 330.7 334.5 338.3 342.1 346.0 349.9 353.9 357.9 360.6 358.1 355.6 353.9 352.7 352.0 351.3 350.9 350.7 350.5 350.2 349.8 349.4 348.9 348.0 346.9 345.6 344.0 342.7 340.9 338.2 333.8 330.5 325.6 319.9 314.8 310.2 306.1 301.0 293.5 290.6 295.9 305.5 315.4 325.7 322.3 320.1 317.7 315.8 313.7 312.4 311.2 310.1 309.3 309.2 309.5 309.9 310.3 310.5 310.6 310.8 310.6 310.6 310.3 310.1 310.2 311.5 312.4 313.7 315.3 317.4 319.9 323.2 326.3 329.5 332.9 336.7 340.0 343.2 346.0 348.4 350.7 352.3 353.3 353.6 353.5 353.2 352.7 352.2 351.5 350.6 349.8 349.2 348.7 348.2 348.0 348.0 348.1 348.4 349.0 349.5 350.2 350.6 351.3 351.6 351.7 351.7 351.0 349.7 348.9 348.6 347.8 345.9 342.8 339.1 334.9 330.5 325.2 321.2 317.7 315.0 313.3 312.6 313.2 313.7 314.0 314.3 314.5 314.4 314.4 314.3 314.1 314.0 313.8 313.7 313.5 313.4 313.2 313.1 313.1 313.1 313.0 312.8 312.5 312.2 311.7 311.2 310.9 310.7 310.6 310.6 310.6 310.6 310.6 310.6 310.6 310.6 310.6 310.6 310.6 310.4 310.0 309.3 308.0 306.5 304.9 303.0 300.8 298.6 296.6 294.6 292.6 290.6 289.2 288.0 287.0 286.4 286.1 286.4 287.0 288.1 289.8 291.5 293.2 294.7 296.1 297.0 297.4 297.3 297.1 296.8 296.3 295.9 295.4 294.9 294.0 293.4 293.0 292.5 292.1 291.7 291.6 291.1 291.3 291.1 291.3 291.5 291.7 292.0 292.3 292.7 293.2 293.6 294.1 294.6 294.9 295.2 295.4 295.4 295.5 295.4 295.4 295.2 294.9 294.9 294.7 294.6 294.4 294.0 293.7 293.3 293.0 292.8 292.6 292.6 292.8 293.2 293.5 294.1 294.5 295.1 295.7 296.3 296.6 297.0 297.2 297.2 297.2 296.8 296.1 294.2 291.7 290.3 285.5 284.6 280.6 280.4 281.1 281.8 282.6 283.3 284.1 284.9 285.6 286.4 287.1 287.9 288.7 289.4 290.2 291.0 291.7 292.5 293.3 294.1 294.9 295.6 296.4 297.2 298.0 298.8 299.6 300.4 301.2 302.0 302.8 308.9 310.2 310.2 310.8 311.1 311.0 310.8 310.6 310.0 309.3 308.6 307.4 305.8 304.6 303.3 301.8 300.5 299.3 298.3 298.7 299.5 300.8 302.3 303.8 305.0 306.3 308.0 309.8 311.3 313.0 313.4 314.1 314.7 315.2 315.5 315.3 315.3 315.2 315.2 315.5 315.4 314.7 313.9 313.0 312.5 312.2 311.7 311.1 310.1 309.0 308.2 307.1 306.4 305.6 304.8 304.1 303.4 303.1 303.1 303.2 303.7 304.2 304.9 306.2 307.4 308.6 309.5 310.8 312.0 312.8 313.8 314.5 314.9 315.2 315.6 316.1 316.1 315.7 315.1 314.4 313.5 312.7 311.3 310.4 309.5 308.2 307.2 306.3 305.5 304.7 304.0 303.7 303.5 303.2 303.3 303.5 303.8 304.5 305.1 306.0 307.0 308.1 309.0 309.8 310.6 311.3 311.9 312.5 312.8 313.0 313.1 312.9 312.9 312.6 312.1 311.5 311.2 310.6 310.2 309.8 309.1 308.2 307.3 306.6 306.1 305.7 305.3 304.8 304.4 303.2 300.7 298.3 295.8 291.3 286.5 285.0 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3 285.3", - "input_type": "phoneme", - "offset": 90.069 + "f0_timestep": "0.005" }, { + "offset": 92.828, "text": "SP 是 否 SP 爱 就 是 要 被 他 人 欺 诈 未 来 SP", "ph_seq": "SP sh ir f ou SP ai j iu sh ir y ao b ei t a r en q i zh a w ei l ai SP", - "note_seq": "rest F4 F4 D#4 D#4 rest A#3 G4 G4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.275 0.345 0.345 0.561 0.561 0.1289999 0.345 0.345 0.345 0.3440001 0.3440001 0.3449998 0.3449998 0.1730001 0.1730001 0.517 0.517 0.345 0.345 0.6890001 0.6890001 0.3449998 0.3449998 0.1730003 0.1730003 0.3439999 0.3439999 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.08 0.195 0.225005 0.119995 0.561 0.084002 0.271476 0.118523 0.224997 0.120003 0.299002 0.044998 0.300002 0.044998 0.104336 0.068665 0.472002 0.044998 0.17208 0.17292 0.565305 0.123695 0.300002 0.044998 0.103535 0.069466 0.344 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.08 0.195 0.225 0.12 0.561 0.084 0.2715 0.1185 0.225 0.12 0.299 0.045 0.3 0.045 0.1043 0.0687 0.472 0.045 0.1721 0.1729 0.5653 0.1237 0.3 0.045 0.1035 0.0695 0.344 0.068", + "ph_num": "2 2 1 1 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 D#4 rest A#3 G4 G4 G#4 G4 F4 D#4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.275 0.345 0.561 0.129 0.345 0.345 0.344 0.345 0.173 0.517 0.345 0.689 0.345 0.173 0.344 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 369.8 353.5 353.3 353.1 352.5 352.1 351.7 351.2 350.7 350.4 350.2 350.2 350.0 350.0 350.0 350.0 350.0 349.8 349.8 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.7 349.8 350.2 350.6 350.9 351.3 351.5 351.7 351.7 351.7 350.4 347.9 344.9 341.3 337.0 332.2 327.3 323.0 319.1 315.8 312.7 310.9 310.2 311.2 311.8 312.3 312.8 313.4 313.9 314.4 315.0 315.5 316.0 316.6 317.1 317.7 318.2 318.7 319.3 319.8 320.4 320.9 321.5 322.0 322.6 323.1 323.7 324.2 324.5 319.7 316.4 316.3 316.4 316.4 317.0 317.2 316.8 316.4 315.9 314.7 314.0 312.8 311.6 310.2 308.9 308.0 307.0 306.1 305.5 304.6 303.6 302.9 302.4 302.3 302.4 302.6 303.3 304.7 305.8 306.9 308.0 309.2 310.3 311.3 312.1 312.9 313.5 313.9 314.0 314.0 313.8 313.5 312.7 311.8 310.9 309.8 308.8 307.8 306.9 306.0 305.2 304.5 304.0 303.4 303.3 303.1 303.3 303.8 304.4 305.3 306.6 307.6 308.4 309.4 310.5 311.5 312.3 313.0 313.4 313.6 313.4 313.1 312.6 312.0 311.7 309.5 309.2 308.4 307.4 306.0 304.4 302.3 299.8 297.4 294.5 291.7 288.1 284.6 281.3 277.9 274.3 270.1 266.7 263.2 259.8 256.2 252.7 249.7 246.8 244.1 241.2 238.9 236.8 235.0 233.2 231.8 230.6 229.9 229.3 229.0 229.0 229.3 229.7 230.4 231.2 232.1 232.9 233.6 234.1 234.5 263.7 261.4 259.1 256.9 254.6 252.4 250.2 248.0 245.8 243.6 241.5 239.4 237.3 235.2 233.1 231.1 229.1 229.7 230.4 231.4 232.0 232.3 232.5 232.6 232.6 232.5 232.4 232.2 232.0 231.7 231.7 231.7 232.1 232.3 232.0 232.0 231.7 231.7 231.7 231.8 231.9 232.1 232.2 232.3 232.4 232.5 232.9 233.1 233.5 233.8 233.8 233.7 233.5 233.1 232.5 232.0 231.5 230.8 229.5 227.7 225.7 223.6 220.0 216.6 220.4 226.3 232.4 238.6 245.0 251.6 258.3 265.3 272.4 279.7 287.2 268.5 269.9 273.6 279.2 286.5 297.2 307.3 317.5 327.7 338.5 345.5 350.5 352.9 353.5 355.1 358.1 362.9 367.8 373.2 378.5 384.0 388.7 392.0 393.8 394.4 394.3 394.0 393.7 393.3 392.9 392.7 392.4 390.9 390.6 390.6 390.6 390.9 391.5 392.1 392.9 393.5 394.1 394.3 393.8 393.9 394.3 394.4 394.8 395.2 395.4 395.4 395.6 394.7 392.0 388.8 384.8 380.2 374.5 370.1 365.8 362.4 359.7 357.8 377.6 378.8 380.0 381.2 382.4 383.6 384.8 386.0 387.2 388.5 389.7 390.9 392.2 393.4 394.6 395.9 397.2 397.6 395.9 395.5 395.9 396.3 396.3 396.3 396.0 395.3 394.4 393.7 392.7 391.8 391.0 390.2 389.6 389.1 388.8 388.8 388.7 388.8 388.8 389.0 389.5 389.7 390.0 390.4 390.6 390.8 391.0 391.2 391.3 391.5 391.8 392.1 392.2 392.2 392.3 392.4 392.6 392.7 392.7 392.8 392.8 392.7 392.7 392.6 392.4 392.1 391.7 391.5 391.1 390.9 390.9 391.1 391.5 391.9 392.4 393.1 394.2 395.4 396.9 398.6 400.5 403.2 406.1 409.5 412.4 414.4 415.8 417.4 419.2 420.8 421.6 421.6 421.3 420.7 420.2 419.6 418.8 418.0 417.1 415.7 414.9 414.1 413.5 412.4 412.0 411.8 411.5 411.3 411.2 411.4 411.7 412.3 412.8 413.3 414.0 414.4 414.8 414.8 414.8 414.8 414.8 414.3 414.1 413.8 413.4 413.5 413.6 413.8 414.3 414.7 414.8 414.5 413.9 412.9 411.7 410.5 408.9 408.1 407.3 405.9 404.3 401.5 397.6 392.2 384.9 380.5 379.2 377.4 382.9 398.8 401.0 401.2 400.4 398.7 396.9 395.0 392.8 391.1 389.8 389.3 389.3 389.5 390.0 390.8 391.5 392.2 393.0 393.4 393.6 393.4 391.9 389.3 385.7 381.3 375.7 371.1 366.8 363.4 360.6 359.6 339.7 339.1 338.4 337.8 337.2 336.5 335.9 335.3 334.7 336.2 340.8 344.6 345.4 345.9 346.6 348.4 350.0 352.1 353.8 354.9 354.9 354.1 352.9 351.7 350.6 349.6 348.6 347.7 347.1 346.9 347.0 347.1 347.3 347.6 347.6 347.8 347.8 348.2 348.4 348.7 348.9 349.1 349.3 349.6 349.9 350.1 350.2 350.2 350.2 350.4 350.4 350.3 350.0 349.9 349.7 349.5 349.4 349.3 349.2 349.2 349.3 349.6 349.9 350.0 350.0 350.0 350.0 350.2 350.2 350.4 350.6 350.8 351.0 351.1 351.4 351.5 351.5 351.4 351.3 351.2 351.0 351.0 350.6 350.2 349.7 349.0 348.5 347.8 346.9 345.7 344.0 341.8 338.8 336.0 332.9 329.4 325.7 320.6 318.6 316.7 314.4 313.2 312.4 312.3 312.5 313.0 313.6 314.4 315.5 316.3 316.9 317.5 317.8 317.5 317.3 316.7 316.4 315.8 315.3 314.7 314.3 313.9 313.4 312.8 312.3 311.5 310.6 310.0 309.7 309.7 309.9 310.1 309.9 310.1 310.6 311.1 312.0 312.9 313.6 314.1 314.6 314.7 314.3 313.7 312.9 311.2 309.5 307.7 303.8 300.9 299.2 297.6 296.6 300.2 302.4 303.0 303.6 304.1 304.7 305.3 305.9 306.4 307.0 307.6 308.2 308.7 309.3 309.9 310.5 311.1 311.7 312.2 312.8 313.4 314.0 314.6 315.2 315.8 316.4 317.0 317.6 318.2 319.6 318.7 317.6 318.3 319.1 319.6 320.1 320.2 320.0 319.7 319.0 318.0 316.9 315.7 314.0 312.4 310.9 309.9 308.6 307.4 306.8 306.3 306.0 306.2 306.4 306.8 307.5 307.9 308.6 309.5 310.8 311.8 312.9 313.8 314.6 315.2 315.7 316.2 316.4 316.4 316.3 314.8 314.7 314.7 314.7 314.6 314.5 314.4 314.4 314.4 314.4 314.4 315.1 316.2 318.7 321.7 325.0 328.8 332.7 336.8 340.4 343.3 345.3 346.7 347.0 346.4 345.3 343.9 341.5 338.9 336.1 333.2 330.2 327.1 324.0 321.4 318.9 316.8 315.4 314.4 314.0 313.8 313.8 313.8 313.9 314.0 314.1 314.2 314.3 314.4 314.5 314.6 314.7 314.7 314.7 314.0 314.0 314.0 315.0 313.1 313.0 312.5 311.8 311.0 309.8 308.7 307.6 305.4 303.7 300.2 298.0 293.9 287.8 284.7 284.7 291.6 295.2 295.4 295.6 295.8 296.1 296.3 296.5 296.7 297.0 297.2 297.4 297.6 297.9 298.1 298.3 298.6 298.8 299.0 299.2 299.5 299.7 299.4 298.7 299.1 298.9 298.3 297.6 296.7 295.9 295.4 295.1 294.7 294.0 293.3 292.8 292.4 291.9 291.7 291.8 291.3 291.3 291.5 291.6 292.0 292.1 292.1 292.1 292.0 292.1 292.3 292.4 292.5 292.6 292.5 292.8 292.8 293.0 293.1 293.2 293.3 293.3 293.5 293.6 293.8 293.8 293.8 293.8 293.8 293.7 293.8 293.8 293.7 293.5 293.2 292.9 292.8 292.8 293.5 294.6 295.8 297.5 298.3 298.6 299.1 299.8 300.6 301.6 302.8 304.1 305.3 306.6 308.0 309.5 310.6 311.7 312.6 313.3 314.0 314.3 314.5 314.4 314.1 313.7 313.2 312.6 311.7 310.9 310.1 309.2 308.2 307.3 306.6 306.0 305.6 305.1 305.1 305.1 305.4 306.0 306.8 307.9 309.2 310.6 312.0 313.5 314.9 316.0 316.9 317.5 317.8 318.0 317.8 317.4 317.1 316.5 315.9 315.1 314.3 313.5 312.6 311.8 310.8 310.0 309.4 308.8 308.2 307.8 307.6 307.6 307.7 308.0 308.6 309.4 310.6 311.8 313.2 314.6 316.2 317.7 318.9 320.1 321.0 321.7 322.0 322.2 321.9 321.2 320.2 319.1 317.9 316.8 315.7 315.0 314.6 314.5 314.4 314.4 314.4 314.4 314.2 314.2 314.1 314.0 314.0 313.8 313.8 313.7 313.5 313.5 313.4 313.3 313.3 313.1 313.1 313.1 313.1 313.1 313.3 313.3 313.3 313.3 313.3 313.3 313.3 313.3 313.3 313.3", - "input_type": "phoneme", - "offset": 92.828 + "f0_timestep": "0.005" }, { + "offset": 98.172, "text": "AP 却 不 会 悔 改 SP", "ph_seq": "AP q ve b u h ui h ui g ai SP", - "note_seq": "rest D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.173 0.173 0.344 0.344 1.724 1.724 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.140998 0.135002 0.113743 0.059258 0.10345 0.06855 0.085529 0.087471 0.264563 0.079437 1.724 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.141 0.135 0.1137 0.0593 0.1035 0.0685 0.0855 0.0875 0.2646 0.0794 1.724 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 D#4 D4 D#4 rest", + "note_dur": "0.276 0.173 0.172 0.173 0.344 1.724 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 337.7 329.4 322.8 318.4 313.6 311.0 309.6 308.8 308.9 309.4 310.1 311.2 312.0 312.9 313.7 314.2 313.8 312.9 311.3 309.1 306.4 303.3 299.7 294.9 289.8 287.5 291.5 299.6 308.0 316.7 325.5 334.6 344.0 353.6 363.5 356.9 351.3 350.1 349.6 349.4 349.5 349.8 350.1 350.3 350.4 350.1 349.6 348.4 346.8 345.0 341.9 338.0 333.6 329.5 325.7 321.4 319.1 319.6 320.9 322.2 323.5 324.8 326.1 327.4 328.7 330.0 331.4 332.7 334.0 335.4 328.8 317.9 313.8 312.3 310.7 309.8 309.4 308.8 307.5 306.5 304.9 301.7 298.3 295.2 290.4 284.4 278.0 271.9 267.4 266.8 268.1 270.5 272.9 275.4 277.9 280.4 282.9 285.4 288.0 290.6 293.2 295.8 298.5 301.2 297.9 293.2 292.6 292.5 293.4 294.7 295.4 295.8 295.4 294.8 294.0 293.4 292.9 292.2 291.9 291.6 291.3 291.4 291.7 292.0 292.2 292.3 292.3 292.3 292.2 292.3 292.5 292.3 292.4 292.4 292.3 292.3 292.4 292.7 293.2 293.6 294.0 294.1 294.0 293.8 293.3 293.0 291.9 291.1 289.3 287.6 285.9 283.8 281.5 279.3 276.5 271.3 269.8 273.1 276.4 279.7 283.1 286.5 290.0 293.5 297.1 300.7 304.4 308.0 311.8 315.6 319.4 323.3 327.2 331.2 319.0 315.7 315.2 313.9 313.5 313.5 313.5 313.7 314.2 314.4 314.4 314.2 313.8 313.4 313.0 312.4 312.2 311.7 311.5 311.1 310.8 310.6 310.3 310.0 309.7 309.4 309.2 309.1 308.9 308.8 308.8 309.0 309.2 309.6 309.9 310.0 310.3 310.7 311.0 311.5 312.0 312.4 312.6 312.6 312.7 312.7 312.8 312.9 313.1 312.9 312.6 312.7 312.6 312.6 312.4 312.3 311.8 311.5 311.1 310.5 309.9 309.7 309.5 309.3 309.3 309.2 309.2 309.2 309.2 309.3 309.3 309.5 309.6 309.8 310.1 310.4 310.7 311.0 311.5 311.9 312.0 312.2 312.2 312.2 312.3 312.4 312.5 312.7 312.7 312.7 312.7 312.6 312.4 312.0 311.8 311.5 311.2 310.9 310.8 310.6 310.6 310.7 310.9 310.9 310.9 311.0 311.2 311.5 311.7 311.7 311.6 311.5 311.5 311.5 311.5 311.1 310.8 310.5 310.1 310.1 310.1 310.1 310.3 310.5 310.6 310.9 311.1 311.2 311.5 311.5 311.3 311.5 311.5 311.6 311.7 311.6 311.4 311.4 311.5 311.4 311.3 311.2 311.1 311.1 311.4 311.7 311.7 311.8 311.8 312.1 312.2 312.3 312.5 312.6 312.6 312.7 312.5 312.3 312.0 311.8 311.7 311.7 311.4 311.1 310.8 310.4 310.0 309.6 309.4 309.4 309.6 309.7 309.8 309.9 310.1 310.3 310.4 310.9 311.3 311.6 312.0 312.3 312.5 312.8 313.0 313.1 313.1 313.0 313.0 313.1 313.1 313.1 312.9 312.7 312.7 312.4 312.2 311.9 311.6 311.4 311.2 310.9 310.8 310.7 310.5 310.4 310.2 310.0 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 310.0 310.1 310.2 310.3 310.7 311.3 311.8 312.2 312.7 313.3 313.6 314.1 314.5 314.7 314.6 314.4 314.0 315.0 314.7 314.2 313.5 312.7 311.8 310.9 310.1 309.2 308.2 307.3 306.6 306.1 305.8 305.6 305.6 305.7 306.1 306.6 307.2 307.9 308.9 309.9 310.9 311.9 313.0 314.0 315.1 315.9 316.6 317.4 317.9 318.3 318.4 318.5 318.3 317.8 317.1 316.2 315.2 314.0 312.6 311.1 309.8 308.4 307.1 305.7 304.7 303.8 303.1 302.5 302.3 302.3 302.6 303.1 304.2 305.4 307.0 308.8 310.6 312.4 314.2 316.2 317.6 318.7 319.6 320.2 320.6 320.3 319.8 319.0 317.8 316.6 315.1 313.3 311.7 310.2 308.8 307.4 306.3 305.6 305.2 305.1 305.3 305.7 306.5 307.6 308.8 310.3 311.8 313.1 314.5 315.8 316.7 317.4 317.8 318.0 317.8 317.5 317.2 316.6 315.9 315.2 314.4 313.6 312.6 312.0 311.3 310.6 310.1 309.7 309.5 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.3", - "input_type": "phoneme", - "offset": 98.172 + "f0_timestep": "0.005" }, { + "offset": 120.779, "text": "SP AP 没 SP", "ph_seq": "SP AP m ei SP", - "note_seq": "rest rest D#4 D#4 rest", - "note_dur_seq": "0.2 0.4 0.6899999 0.6899999 0.4", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.154994 0.4 0.045006 0.69 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.155 0.4 0.045 0.69 0.4", + "ph_num": "1 2 1 1", + "note_seq": "rest rest D#4 rest", + "note_dur": "0.2 0.4 0.69 0.4", + "note_slur": "0 0 0 0", "f0_seq": "291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 284.9 289.7 290.5 292.5 295.5 299.2 302.6 305.4 307.6 308.7 309.0 309.1 309.3 309.6 309.9 310.2 310.4 310.6 310.4 310.4 310.4 310.4 310.3 310.2 310.2 310.1 309.9 309.9 309.8 309.7 309.7 309.5 309.5 309.5 309.5 309.5 309.1 309.2 309.3 309.6 309.8 310.1 310.2 310.2 310.6 310.9 311.1 311.1 311.1 311.1 311.1 311.3 311.5 311.5 311.5 311.7 311.7 311.7 311.9 312.0 312.3 312.6 312.7 312.9 312.8 312.6 312.4 312.2 312.2 311.9 311.6 311.4 310.8 310.6 310.4 310.0 309.7 309.4 309.0 309.0 309.0 309.0 309.1 309.3 309.5 309.8 310.1 310.3 310.7 311.1 311.8 312.2 312.7 313.1 313.5 313.6 313.7 313.7 313.4 313.2 312.8 312.4 312.1 311.6 311.0 310.4 309.8 309.2 308.7 308.1 307.5 307.1 306.8 306.7 306.8 306.8 306.9 307.2 307.6 308.0 308.5 309.1 309.5 310.2 311.1 311.6 312.5 313.0 313.4 313.7 314.0 314.3 314.3 314.1 314.0 314.0 313.8 313.8 313.5 313.5 312.6 312.1 311.3 311.0 310.8 310.4 309.8 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5 309.5", - "input_type": "phoneme", - "offset": 120.779 + "f0_timestep": "0.005" }, { + "offset": 122.138, "text": "AP 有 任 何 的 不 凡 也 没 有 夺 目 光 彩 AP 我 却 明 白 SP", "ph_seq": "AP y ou r en h e d e b u f an y E m ei y ou d uo m u g uang c ai AP w o q ve m ing b ai SP", - "note_seq": "rest D4 D4 D#4 D#4 D4 D4 C4 C4 A#3 A#3 A#3 A#3 A#3 A#3 C4 C4 A#3 A#3 C4 C4 A#3 A#3 G#3 G#3 G3 G3 rest A#3 A#3 C4 C4 D4 D4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.345 0.345 0.173 0.173 0.1720001 0.1720001 0.345 0.345 0.1719999 0.1719999 0.345 0.345 0.1719999 0.1719999 0.9490001 0.9490001 0.2580001 0.1729999 0.1729999 0.1719999 0.1719999 0.1729999 0.1729999 0.6890001 0.6890001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127002 0.044998 0.095623 0.077377 0.12987 0.04213 0.100841 0.071159 0.224997 0.120003 0.300002 0.044998 0.113002 0.059998 0.127002 0.044998 0.300002 0.044998 0.109698 0.062302 0.281783 0.063217 0.086337 0.085663 0.949 0.212987 0.045013 0.082256 0.090744 0.112002 0.059998 0.097408 0.075592 0.689 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.127 0.045 0.0956 0.0774 0.1299 0.0421 0.1008 0.0712 0.225 0.12 0.3 0.045 0.113 0.06 0.127 0.045 0.3 0.045 0.1097 0.0623 0.2818 0.0632 0.0863 0.0857 0.949 0.213 0.045 0.0823 0.0907 0.112 0.06 0.0974 0.0756 0.689 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 1 1", + "note_seq": "rest D4 D#4 D4 C4 A#3 A#3 A#3 C4 A#3 C4 A#3 G#3 G3 rest A#3 C4 D4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.345 0.173 0.172 0.345 0.172 0.345 0.172 0.949 0.258 0.173 0.172 0.173 0.689 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 314.0 312.4 304.2 298.8 295.4 294.2 293.5 293.2 293.0 292.8 292.6 292.5 292.5 292.5 292.7 293.0 293.6 294.2 295.0 295.4 296.2 296.6 296.7 296.5 295.9 295.0 293.7 292.3 291.4 291.1 291.2 291.6 292.6 294.0 295.0 295.9 297.4 298.7 300.8 302.9 304.8 307.1 308.9 310.2 311.2 311.7 312.0 312.2 312.0 312.0 312.0 312.0 312.0 311.7 311.1 310.5 309.4 308.2 307.0 305.7 303.7 300.5 297.5 294.1 290.5 287.1 284.6 285.5 287.8 290.2 292.6 295.1 297.5 300.0 302.5 305.0 307.5 310.1 312.6 312.7 309.1 306.3 303.9 302.3 301.1 299.8 298.2 297.1 296.3 295.6 295.1 294.6 294.1 293.6 293.0 292.3 291.5 289.6 287.6 285.0 282.5 279.0 275.4 269.7 262.3 254.9 252.9 252.0 245.8 239.9 245.6 255.5 265.8 272.8 265.4 265.2 264.5 263.9 263.3 262.5 261.4 260.3 259.2 257.8 255.7 253.3 250.6 247.4 244.3 239.8 234.1 228.5 226.4 227.3 231.0 233.9 236.4 238.9 241.5 244.1 246.7 249.4 252.1 254.8 257.6 260.3 263.1 255.1 246.7 243.2 240.9 238.8 237.0 235.7 234.6 233.7 232.7 232.0 231.6 231.1 230.7 230.9 231.3 231.6 232.1 232.6 232.8 233.0 233.3 233.6 233.9 234.1 234.4 234.7 235.0 235.3 235.6 235.9 236.2 236.3 236.4 236.4 236.2 235.9 235.2 234.0 232.1 230.1 228.2 226.3 224.8 224.3 226.3 227.6 228.9 230.3 231.6 232.9 234.3 235.7 237.0 238.4 239.8 241.2 242.6 244.0 245.4 246.8 248.2 249.7 251.1 252.6 254.0 255.5 257.0 258.5 260.0 251.7 245.2 244.0 242.9 241.6 240.4 239.0 237.8 236.6 235.6 234.7 234.3 233.2 232.3 231.7 231.4 231.3 231.2 231.1 231.2 231.3 231.4 231.7 232.0 231.9 232.3 232.4 232.4 232.4 232.4 232.5 232.5 232.6 232.7 232.9 233.1 233.2 233.2 233.0 232.7 232.5 232.5 232.7 232.5 232.7 232.5 232.3 232.2 232.0 231.9 232.1 232.1 232.1 232.3 232.4 232.4 232.5 232.5 232.7 232.5 232.5 232.2 231.8 231.7 231.7 231.8 232.2 232.8 233.3 233.5 234.0 234.3 234.4 234.8 234.4 234.2 233.6 232.9 232.7 232.7 232.7 232.7 232.5 232.5 232.3 232.1 231.8 231.5 231.4 231.5 231.7 232.0 232.0 232.4 232.9 234.1 235.8 237.9 240.5 243.0 245.1 247.7 251.7 257.3 259.7 262.5 262.5 262.5 262.9 263.2 263.2 263.1 263.0 262.5 262.1 261.7 261.0 260.2 259.4 258.3 257.7 256.6 255.7 254.7 253.0 251.3 248.9 247.0 245.2 243.5 242.5 242.0 241.6 241.1 240.5 239.8 238.7 237.6 237.1 236.3 235.8 235.2 234.7 234.2 233.8 233.2 232.8 232.7 232.7 232.8 232.8 232.7 232.5 232.4 232.1 231.9 231.9 231.8 231.7 231.7 231.5 231.3 231.2 231.2 231.1 231.2 231.2 231.2 231.1 231.1 231.3 231.3 231.6 232.0 232.4 232.9 233.2 233.3 233.2 233.0 232.5 232.0 231.5 230.8 229.6 228.4 227.0 224.6 222.6 220.7 218.8 217.7 220.4 224.0 227.7 231.4 235.2 239.0 242.9 246.9 250.9 255.0 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.9 263.7 263.5 263.3 262.9 262.6 262.3 262.1 262.0 261.8 261.5 260.9 260.1 258.9 257.6 256.0 254.3 252.2 250.3 248.4 246.1 243.9 242.0 240.3 238.6 237.0 235.4 234.3 233.4 232.8 232.4 232.1 232.1 232.2 232.2 232.4 232.6 232.9 233.1 233.3 233.6 233.9 234.2 234.4 234.6 234.9 235.1 235.3 235.4 235.4 235.5 235.4 235.4 235.3 235.2 235.1 235.0 235.0 234.9 234.8 234.8 234.8 234.7 234.4 233.7 232.9 231.6 230.2 228.6 227.0 225.2 223.3 221.4 219.7 218.1 216.4 215.2 214.1 213.4 212.8 212.6 212.5 212.4 212.2 212.0 211.7 211.4 211.1 210.8 210.4 210.2 209.9 209.7 209.6 209.5 209.4 209.4 209.3 209.3 209.3 209.2 209.2 209.1 209.1 209.1 209.0 208.9 208.9 208.9 208.9 208.9 208.4 207.5 205.7 203.8 201.4 199.0 196.6 195.0 193.5 193.2 201.3 202.3 203.2 204.2 205.2 206.1 207.1 208.1 209.1 210.0 211.0 208.3 207.3 205.4 204.0 201.7 200.3 199.4 198.5 197.8 197.2 196.5 196.3 195.8 195.5 195.5 195.1 195.2 195.1 195.1 195.2 195.1 195.0 194.8 194.5 194.3 194.0 193.8 193.7 193.7 193.6 193.9 194.0 194.4 194.8 195.2 195.5 195.9 196.5 196.9 197.2 197.5 197.6 197.4 197.3 197.2 197.0 196.8 196.6 196.2 195.9 195.5 195.2 194.9 194.4 194.0 193.7 193.4 193.1 192.8 192.6 192.5 192.4 192.4 192.4 192.5 192.7 193.0 193.3 193.7 194.2 194.6 195.1 195.7 196.2 196.7 197.2 197.7 198.0 198.5 198.8 199.0 199.1 199.3 199.3 199.1 198.7 198.4 197.8 197.2 196.6 195.8 195.2 194.5 193.8 193.2 192.7 192.3 192.0 191.9 191.9 191.9 192.2 192.6 193.0 193.6 194.3 195.1 195.8 196.6 197.5 198.2 198.9 199.5 200.1 200.6 201.0 201.2 201.2 201.0 200.8 200.2 199.6 198.8 198.1 197.2 196.2 195.4 194.7 194.0 193.4 193.0 192.8 192.7 192.7 193.0 193.3 193.7 194.2 194.8 195.4 196.0 196.5 197.0 197.5 197.9 198.2 198.3 198.4 198.3 198.2 198.0 197.7 197.4 197.1 196.8 196.4 196.1 195.7 195.4 195.2 195.0 194.9 194.9 194.9 194.9 194.9 195.1 195.2 195.3 195.5 195.7 195.8 196.0 196.1 196.2 196.3 196.3 196.4 196.3 196.0 195.7 195.1 194.5 193.9 193.2 192.4 191.7 191.1 190.3 189.7 189.2 188.7 188.4 188.2 188.1 206.5 207.7 208.9 210.1 211.4 212.6 213.9 215.1 216.4 217.7 219.0 220.2 221.5 222.8 224.2 225.5 226.8 228.1 229.5 230.8 232.2 233.6 234.9 236.3 237.7 239.1 240.5 241.9 243.3 244.8 246.2 234.8 234.7 234.6 234.6 234.5 234.4 234.2 234.1 233.9 233.7 233.6 233.4 233.3 233.1 233.0 232.8 232.7 232.7 232.6 232.5 232.5 232.6 232.7 232.9 233.2 233.4 233.6 233.3 232.8 231.6 230.3 228.1 226.1 224.1 222.3 220.6 219.3 218.3 239.5 242.0 244.4 247.0 249.5 252.1 254.6 257.3 259.9 262.6 265.3 268.4 267.6 266.4 264.8 263.3 262.5 262.1 261.8 261.3 261.0 260.7 260.2 260.1 259.9 259.7 258.8 258.0 258.3 258.8 260.3 261.4 261.9 262.6 264.4 266.2 268.5 270.8 272.9 275.6 278.1 280.7 282.4 284.9 287.6 289.3 290.7 291.6 292.2 293.2 293.9 294.4 294.9 295.3 295.7 296.1 296.2 296.2 296.3 296.3 295.8 294.9 293.8 292.4 291.0 289.3 287.2 284.5 282.1 279.8 276.6 274.8 273.3 271.9 275.0 275.7 278.3 282.4 287.4 292.8 298.5 304.2 309.9 313.8 316.3 317.5 317.3 317.2 316.9 316.6 316.2 315.7 315.3 314.7 314.1 313.5 312.9 312.3 311.7 311.2 310.8 310.3 309.9 309.6 309.4 309.3 309.3 309.4 309.5 309.7 309.9 310.4 310.9 311.3 311.8 312.2 312.7 313.0 313.5 313.7 314.0 314.2 314.3 314.3 314.2 314.1 314.0 313.7 313.5 313.2 312.9 312.6 312.3 312.0 311.6 311.5 311.2 311.0 310.9 310.9 310.9 310.9 310.9 311.1 311.3 311.4 311.6 311.7 312.0 312.2 312.4 312.5 312.7 312.7 312.7 312.9 312.7 312.7 312.7 312.7 312.7 312.7 312.7 312.6 312.6 312.6 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.6 312.7 312.9 313.1 313.1 313.2 313.4 313.5 313.7 313.7 313.7 313.8 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.5 313.5 313.5 313.5 313.5 313.5 313.5 313.5 313.5 313.2 312.5 311.6 310.3 308.7 306.6 304.7 302.8 301.1 299.2 297.6 296.3 295.4 294.9 295.0 295.0 295.0 295.0 295.0 295.0 295.0 295.0 295.0 295.0 295.0", - "input_type": "phoneme", - "offset": 122.138 + "f0_timestep": "0.005" }, { + "offset": 127.655, "text": "AP 你 的 意 义 所 在 别 人 都 无 法 替 代 AP 就 算 有 时 SP", "ph_seq": "AP n i d e y i y i s uo z ai b ie r en d ou w u f a t i d ai AP j iu s uan y ou sh ir SP", - "note_seq": "rest D4 D4 D#4 D#4 G4 G4 D#4 D#4 D#4 D#4 D#4 D#4 D#4 D#4 F4 F4 D#4 D#4 A#3 A#3 D#4 D#4 D4 D4 D#4 D#4 rest A#3 A#3 C4 C4 D4 D4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.517 0.517 0.173 0.173 0.1720001 0.1720001 0.345 0.345 0.1719999 0.1719999 0.5180001 0.5180001 0.1719999 0.1719999 0.7970002 0.7970002 0.2379999 0.1719999 0.1719999 0.1719999 0.1719999 0.1729999 0.1729999 0.6890001 0.6890001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127002 0.044998 0.127987 0.045013 0.127002 0.044998 0.10345 0.06955 0.095676 0.076324 0.472002 0.044998 0.107875 0.065125 0.126269 0.045731 0.300002 0.044998 0.112002 0.059998 0.402201 0.115799 0.085422 0.086578 0.797 0.133004 0.104996 0.078326 0.093674 0.127002 0.044998 0.083141 0.089859 0.689 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.127 0.045 0.128 0.045 0.127 0.045 0.1035 0.0696 0.0957 0.0763 0.472 0.045 0.1079 0.0651 0.1263 0.0457 0.3 0.045 0.112 0.06 0.4022 0.1158 0.0854 0.0866 0.797 0.133 0.105 0.0783 0.0937 0.127 0.045 0.0831 0.0899 0.689 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 1 1", + "note_seq": "rest D4 D#4 G4 D#4 D#4 D#4 D#4 F4 D#4 A#3 D#4 D4 D#4 rest A#3 C4 D4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.172 0.517 0.173 0.172 0.345 0.172 0.518 0.172 0.797 0.238 0.172 0.172 0.173 0.689 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 273.8 278.7 281.5 283.2 285.1 287.1 288.8 291.0 293.6 294.6 295.2 295.7 293.8 292.1 291.3 290.0 290.5 291.2 292.1 293.2 294.4 295.9 297.8 299.4 300.4 301.1 301.6 301.5 299.0 298.5 297.6 296.4 295.0 293.6 292.5 291.7 291.3 291.4 292.0 293.5 295.1 297.3 299.8 302.7 305.4 307.9 310.3 312.1 313.4 314.1 314.3 314.1 313.8 313.5 313.1 312.6 311.9 311.2 310.5 310.0 309.5 309.0 308.5 308.2 308.0 307.9 308.0 308.4 309.1 310.1 311.8 314.2 317.5 321.7 325.5 337.6 340.9 350.3 361.8 368.1 373.5 378.2 382.3 385.9 388.6 391.0 393.1 393.1 393.1 393.1 393.2 393.4 393.4 393.4 393.5 393.6 393.6 393.7 393.5 392.5 390.3 387.5 383.6 378.4 373.1 367.7 361.8 355.5 348.7 342.8 337.3 332.0 326.8 322.5 319.1 316.3 314.2 312.7 312.6 312.6 312.6 312.6 312.6 312.6 312.6 312.6 312.6 312.6 312.6 312.6 312.2 311.2 309.3 306.5 303.7 300.8 298.3 296.3 295.3 295.1 295.5 296.4 298.1 300.0 302.1 304.2 306.3 308.3 309.7 310.6 311.0 311.0 310.9 310.9 310.9 310.9 310.8 310.8 310.6 310.6 310.6 310.6 310.6 310.1 308.7 306.8 304.4 301.3 297.9 294.7 291.6 288.8 286.0 284.0 282.8 282.1 292.2 296.0 299.8 303.7 307.6 311.6 315.7 319.8 323.9 328.1 323.2 321.9 321.2 319.7 319.5 318.7 318.0 316.8 316.1 315.5 314.7 313.7 312.8 311.6 310.6 309.5 308.7 308.3 308.3 308.3 308.4 308.7 308.8 309.0 309.4 309.6 309.9 310.2 310.5 310.8 311.0 311.1 311.1 311.4 311.7 311.7 311.7 311.7 311.7 311.8 311.8 311.8 311.8 311.6 311.5 311.5 311.5 311.3 311.3 311.3 311.3 311.5 311.5 311.5 311.6 311.5 311.6 311.6 311.5 311.5 311.5 311.5 311.7 311.9 312.0 312.0 312.2 312.4 312.4 314.3 314.2 314.2 314.2 314.1 313.9 313.8 313.6 313.5 313.4 313.2 313.1 313.0 312.9 312.9 312.9 312.8 312.0 310.5 308.2 305.4 302.4 299.1 295.7 292.2 289.5 287.2 285.7 284.6 285.0 285.7 287.2 289.0 292.1 295.3 298.4 301.7 305.1 307.3 309.4 310.6 311.4 311.3 310.9 310.8 310.7 310.6 310.5 310.3 310.1 309.9 309.7 309.6 309.4 309.2 309.1 309.0 309.0 309.0 309.5 310.4 312.7 315.3 318.8 322.9 327.9 332.3 336.7 340.6 344.2 347.1 348.8 349.8 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.7 350.4 350.2 349.4 347.3 343.5 338.3 332.9 327.2 321.7 316.1 312.0 309.3 307.8 311.8 311.8 311.8 311.8 311.9 312.0 312.2 312.4 312.5 312.6 312.7 312.9 313.1 313.2 313.3 313.3 313.3 313.4 313.4 313.3 313.3 313.3 313.2 313.0 312.8 312.7 312.5 312.3 312.2 312.1 311.9 311.7 311.6 311.5 311.5 311.5 311.5 313.1 313.4 313.6 313.4 313.2 312.9 312.3 311.3 310.4 309.2 308.0 306.2 304.6 303.0 300.5 297.5 294.8 291.6 287.3 283.9 279.6 274.2 268.4 261.3 256.3 249.4 244.4 240.9 237.2 233.1 230.7 230.1 230.2 231.0 232.1 233.0 233.8 234.8 235.4 235.5 234.8 233.6 231.2 229.3 228.5 227.5 230.3 235.2 238.7 242.4 246.0 249.8 253.6 257.4 261.3 265.3 269.3 273.4 277.5 281.7 286.0 290.4 294.8 299.2 303.8 308.4 313.1 317.8 322.6 327.5 332.5 323.0 321.5 321.4 321.0 321.5 321.9 321.7 320.9 319.7 317.8 315.9 314.4 313.2 311.8 310.6 309.7 308.8 307.9 307.2 306.7 306.5 306.6 306.8 307.2 307.9 308.4 308.9 309.6 310.1 310.9 311.5 312.2 312.6 312.9 313.1 313.3 313.3 313.3 313.3 313.3 313.1 312.9 312.6 312.0 311.6 311.2 310.9 310.3 309.9 309.6 309.0 308.8 308.8 308.9 309.1 309.4 309.9 310.4 311.0 311.4 311.7 312.0 312.5 313.1 313.4 313.8 313.7 313.2 312.3 310.9 309.4 308.3 306.8 305.9 305.7 305.4 305.1 304.9 304.6 304.3 304.1 303.8 303.5 303.3 303.0 302.7 302.5 302.2 301.9 301.7 301.4 301.1 300.9 300.6 300.3 300.1 299.8 299.5 299.3 299.0 298.7 298.5 298.2 296.8 294.9 293.7 293.4 293.6 294.0 294.2 294.5 294.3 293.5 292.8 291.8 291.1 290.7 289.5 288.1 286.0 282.4 277.6 272.7 269.4 273.1 276.1 279.2 282.3 285.4 288.6 291.8 295.1 298.4 301.7 305.1 308.5 311.9 315.4 318.0 317.0 316.4 315.8 315.8 316.0 316.0 316.3 316.5 316.6 316.3 315.9 315.1 314.1 313.0 312.0 311.0 309.9 309.2 308.8 308.0 307.4 306.9 306.7 306.7 306.5 306.7 307.0 307.3 307.7 308.4 309.2 309.9 310.4 311.1 311.6 312.2 312.7 313.3 314.0 314.2 314.3 314.5 314.4 314.3 314.1 313.7 313.3 312.8 312.5 312.1 311.8 311.5 311.1 310.8 310.5 310.1 309.6 309.2 308.8 308.3 308.1 308.3 308.6 308.9 309.2 309.3 309.3 309.5 309.7 309.8 310.3 310.7 311.1 311.2 311.4 311.6 311.8 311.9 312.0 312.0 312.0 311.8 311.8 311.6 311.5 311.3 310.9 310.8 310.6 310.5 310.2 310.0 309.7 309.4 309.3 309.1 309.0 309.0 309.0 309.0 308.8 309.0 309.0 309.1 309.2 309.0 308.9 308.8 308.7 308.7 308.8 308.8 308.8 308.8 308.6 308.8 308.8 308.8 310.4 310.4 310.4 310.6 310.7 310.9 311.0 311.2 311.3 311.5 311.7 311.7 311.8 311.8 311.8 311.8 311.9 312.0 312.1 312.2 312.2 312.4 312.5 312.6 312.6 312.7 312.7 312.7 312.7 312.9 312.8 312.7 312.5 312.3 312.1 311.8 311.4 311.1 310.8 310.4 310.1 309.9 309.6 309.5 309.3 309.3 294.4 293.5 292.7 291.9 291.1 290.3 289.5 288.7 287.9 287.1 286.3 285.5 284.7 283.9 283.1 282.3 281.5 280.8 280.0 279.2 278.4 277.7 276.9 276.1 275.3 274.6 273.8 273.1 272.3 271.5 270.8 270.0 269.3 234.1 234.0 234.0 234.0 233.9 233.9 233.8 233.7 233.6 233.6 233.5 233.3 233.2 233.2 233.1 233.1 233.1 233.1 233.0 232.8 232.4 231.7 230.9 229.9 228.9 227.7 226.6 225.5 224.7 223.7 223.0 222.6 222.4 244.2 246.8 249.4 252.0 254.7 257.4 260.1 262.9 265.6 268.4 269.4 265.1 263.7 263.4 263.1 262.5 262.2 261.4 260.7 260.0 259.4 258.9 258.5 258.5 258.6 258.6 259.1 259.7 260.6 263.4 263.9 265.1 266.6 268.4 270.5 273.1 275.4 278.4 281.1 283.7 286.4 288.6 290.8 292.5 293.7 294.4 294.9 294.7 294.6 294.5 294.4 294.2 294.2 294.2 294.2 294.3 294.8 295.2 295.6 295.8 296.0 295.7 294.7 293.2 291.4 289.0 287.0 284.6 282.7 281.5 280.4 306.0 307.2 308.4 309.7 310.9 298.3 298.6 299.5 300.5 302.0 303.6 305.4 307.2 308.9 310.3 311.7 312.7 313.2 313.4 313.3 313.1 313.0 312.8 312.6 312.2 311.8 311.4 310.9 310.6 310.2 309.8 309.6 309.4 309.1 309.0 309.0 309.0 309.0 309.2 309.3 309.6 309.9 310.2 310.6 310.9 311.2 311.5 311.6 311.7 311.8 311.7 311.7 311.5 311.4 311.2 311.1 310.9 310.5 310.2 310.1 309.8 309.6 309.3 309.1 308.9 308.8 308.6 308.5 308.4 308.4 308.4 308.5 308.7 309.0 309.4 309.7 310.1 310.8 311.3 311.8 312.2 312.6 313.0 313.3 313.6 313.7 313.8 313.7 313.5 313.4 313.1 312.8 312.4 312.1 311.7 311.4 310.9 310.4 310.1 309.8 309.4 309.1 309.0 309.0 309.0 309.0 309.2 309.5 310.0 310.5 311.1 311.7 312.4 313.0 313.6 314.0 314.3 314.6 314.8 314.8 314.7 314.4 314.1 313.7 313.0 312.5 311.9 311.4 310.9 310.5 310.2 309.9 309.9 309.9 309.9 309.9 310.1 310.2 310.4 310.5 310.8 311.0 311.2 311.5 311.7 311.8 312.0 312.1 312.2 312.3 304.0 304.0 304.0 304.0 304.0 304.0 304.0 304.0 304.0 304.0", - "input_type": "phoneme", - "offset": 127.655 + "f0_timestep": "0.005" }, { + "offset": 133.172, "text": "AP 产 生 几 许 不 快 你 也 很 快 就 抛 开 SP", "ph_seq": "AP ch an sh eng j i x v b u k uai n i y E h en k uai j iu p ao k ai SP", - "note_seq": "rest D4 D4 D#4 D#4 D4 D4 C4 C4 A#3 A#3 A#3 A#3 A#3 A#3 C4 C4 A#3 A#3 D#4 D#4 F4 F4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.345 0.345 0.1719999 0.1719999 0.1720001 0.1720001 0.345 0.345 0.1729999 0.1729999 0.3440001 0.3440001 0.1730001 0.1730001 0.6459999 0.6459999 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.156005 0.119995 0.113002 0.059998 0.08623 0.08577 0.092739 0.080261 0.147006 0.024994 0.22238 0.12262 0.284987 0.060013 0.127002 0.044998 0.111987 0.060013 0.260558 0.084442 0.092998 0.080002 0.239004 0.104996 0.095516 0.077484 0.646 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.156 0.12 0.113 0.06 0.0862 0.0858 0.0927 0.0803 0.147 0.025 0.2224 0.1226 0.285 0.06 0.127 0.045 0.112 0.06 0.2606 0.0844 0.093 0.08 0.239 0.105 0.0955 0.0775 0.646 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 D#4 D4 C4 A#3 A#3 A#3 C4 A#3 D#4 F4 G#4 G4 rest", + "note_dur": "0.276 0.173 0.172 0.173 0.172 0.345 0.345 0.172 0.172 0.345 0.173 0.344 0.173 0.646 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 307.2 294.0 293.8 293.8 293.8 293.8 293.8 293.7 293.6 293.5 293.5 293.5 293.5 293.5 293.5 293.5 293.5 293.6 293.7 293.9 294.0 294.0 294.1 294.2 294.3 293.8 292.6 290.5 287.8 285.3 283.0 281.5 280.7 281.5 284.2 288.6 294.4 301.5 307.2 311.2 313.2 313.7 313.7 313.7 313.6 313.5 313.5 313.3 313.3 313.2 313.1 313.0 312.9 312.9 312.9 312.9 312.1 309.9 306.4 302.1 297.1 293.2 290.3 289.1 288.8 289.0 289.3 289.8 290.6 291.3 292.0 292.8 293.7 294.4 295.0 295.5 295.9 296.0 296.2 296.0 295.9 295.6 295.1 294.7 294.3 293.8 293.4 293.1 293.0 293.0 292.4 290.8 288.3 285.2 281.6 277.8 273.5 270.4 267.8 266.0 265.0 264.8 264.8 264.9 265.0 265.0 265.1 265.2 265.4 265.5 265.6 265.6 265.6 265.7 265.6 265.6 265.5 265.4 265.3 265.2 265.1 264.9 264.8 264.8 264.8 264.6 263.6 262.1 260.1 257.4 253.8 250.4 247.2 244.0 241.0 238.2 236.3 235.1 234.6 234.4 234.4 234.4 234.5 234.6 234.6 234.6 234.7 234.7 234.7 234.7 234.8 234.7 234.7 234.6 234.5 234.3 234.1 234.0 233.9 233.8 233.6 233.6 233.6 233.6 233.6 233.7 233.8 233.9 234.0 234.2 234.4 234.5 234.7 234.9 235.0 235.1 235.2 235.4 235.4 235.5 232.9 231.8 229.6 228.0 228.2 229.4 232.8 241.5 241.6 241.7 241.8 241.9 242.0 242.1 242.2 242.3 242.4 242.6 242.7 242.8 242.9 243.0 243.1 243.2 243.3 243.4 243.5 243.7 243.8 243.9 244.0 244.1 244.2 242.6 240.5 239.5 239.3 239.5 239.6 239.4 238.4 237.0 236.1 235.0 234.4 233.9 233.5 232.9 232.4 232.1 232.0 231.9 232.1 232.3 232.5 233.1 233.2 233.2 233.4 233.2 233.2 233.4 233.6 233.7 233.8 233.8 233.9 233.9 234.1 234.3 234.6 234.8 234.8 234.6 234.5 234.4 234.3 234.0 233.4 232.7 231.9 231.5 231.2 230.6 230.0 230.1 230.3 230.4 230.8 230.7 230.6 230.7 231.2 231.8 232.0 232.1 232.3 232.3 232.5 233.2 233.8 234.0 234.0 233.9 233.5 233.5 233.2 233.2 233.3 233.1 232.9 232.7 232.6 232.5 232.4 232.2 232.1 231.9 231.4 230.9 230.5 230.0 229.7 229.4 229.2 229.6 230.4 232.5 236.3 240.5 244.0 247.3 251.6 254.7 257.2 258.9 260.9 262.4 263.5 265.0 265.1 265.3 265.2 264.6 263.8 262.7 261.4 259.7 257.7 256.1 254.2 251.1 248.4 245.5 242.7 240.6 236.8 232.7 230.1 230.1 231.1 232.0 233.0 234.0 235.1 236.1 237.1 238.1 239.1 240.1 241.2 242.2 239.8 236.9 235.6 234.8 233.8 232.7 231.4 230.0 228.7 228.0 227.9 228.5 229.5 230.1 230.7 231.0 231.5 231.8 232.0 232.2 232.3 232.4 232.5 232.4 232.2 232.0 232.1 232.1 232.1 232.5 232.8 233.0 233.4 233.9 234.4 234.6 234.6 234.5 234.4 234.2 233.8 232.9 231.6 230.3 228.2 226.2 223.7 221.3 218.9 217.6 220.8 225.1 229.5 233.9 238.5 243.1 247.8 252.6 257.5 262.5 267.6 272.8 278.1 283.5 288.9 294.6 300.3 306.1 308.9 310.2 311.4 311.5 311.4 311.3 311.3 311.3 311.2 311.1 311.1 311.1 311.0 310.9 310.9 310.9 310.9 310.5 309.9 309.1 308.0 306.8 305.7 304.7 303.7 303.1 302.6 328.6 332.8 337.0 341.3 345.7 350.0 354.5 356.0 352.6 349.3 349.6 349.9 350.0 350.7 351.3 351.3 351.3 351.1 350.6 350.1 349.3 348.4 347.6 347.1 347.0 346.8 346.8 347.0 347.5 347.8 348.0 348.2 348.4 348.4 348.4 348.1 347.8 347.6 347.5 347.3 347.5 347.7 348.1 348.7 349.4 350.0 350.5 351.0 351.2 351.3 350.8 350.2 349.1 347.3 344.6 341.3 337.5 333.8 329.6 326.3 322.1 319.3 321.7 320.5 325.3 331.8 338.4 345.1 352.0 359.0 366.1 373.4 380.8 388.4 396.1 403.9 407.0 410.5 414.2 414.1 414.0 413.8 413.6 413.4 413.2 413.0 412.8 412.6 412.4 412.4 412.4 412.0 410.9 409.2 407.2 404.6 401.3 398.4 395.5 393.0 390.5 388.6 387.4 386.8 398.9 397.5 396.1 394.7 386.3 386.5 387.1 388.0 389.2 390.4 391.7 392.9 394.0 394.9 395.6 396.0 396.0 395.8 395.4 394.9 394.3 393.6 392.9 392.1 391.4 390.6 389.9 389.2 388.6 387.9 387.5 387.3 387.3 387.3 387.5 387.8 388.3 389.1 389.9 390.7 391.8 392.9 393.9 394.9 395.9 396.8 397.5 398.1 398.6 398.9 399.2 399.1 398.8 398.1 397.3 396.4 395.3 394.1 392.7 391.3 389.9 388.5 387.1 385.8 384.6 383.9 383.2 382.6 382.4 382.4 382.6 382.9 383.5 384.3 385.2 386.2 387.4 388.8 390.1 391.3 392.9 394.3 395.4 396.4 397.3 398.0 398.7 398.9 399.2 399.1 398.7 398.2 397.7 397.0 396.1 395.0 393.9 392.9 391.9 390.9 389.7 389.0 388.4 387.8 387.4 387.3 387.3 387.5 388.1 388.8 389.8 390.8 391.8 392.9 394.0 394.7 395.3 395.7 396.1 396.0 395.8 395.5 395.3 395.0 394.5 394.0 393.5 392.9 392.4 391.9 391.4 390.9 390.4 390.1 389.9 389.6 389.4 389.3 388.6 388.6 388.6 388.6 388.6 388.6 388.6 388.6 388.6 388.6 388.6", - "input_type": "phoneme", - "offset": 133.172 + "f0_timestep": "0.005" }, { + "offset": 136.931, "text": "AP 虽 然 很 平 淡 SP", "ph_seq": "AP s ui r an h en p ing d an SP", - "note_seq": "rest G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 rest", - "note_dur_seq": "0.31 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.077", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.156741 0.153259 0.128002 0.044998 0.09595 0.07605 0.082271 0.090729 0.105197 0.066803 0.345 0.077", - "f0_timestep": "0.005", + "ph_dur": "0.1567 0.1533 0.128 0.045 0.0959 0.0761 0.0823 0.0907 0.1052 0.0668 0.345 0.077", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest G4 G#4 G4 F4 D#4 rest", + "note_dur": "0.31 0.173 0.172 0.173 0.172 0.345 0.077", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 423.3 396.0 395.9 395.6 395.3 395.1 394.6 394.0 393.7 393.3 392.8 392.2 391.7 391.3 391.1 390.8 390.6 390.5 390.0 390.6 391.3 392.1 392.7 393.1 393.1 392.9 392.3 391.5 391.0 391.1 391.7 392.4 393.6 395.6 397.5 400.0 402.8 405.6 407.7 409.9 412.0 413.8 415.4 416.7 418.0 418.6 418.5 418.1 417.8 417.4 416.9 416.5 416.0 415.3 414.5 413.2 412.2 411.2 410.2 408.9 407.3 405.5 402.8 399.6 395.6 392.8 394.5 396.2 398.0 399.7 401.4 403.2 405.0 406.7 408.5 410.3 403.8 396.3 395.4 394.6 393.7 393.4 392.3 391.8 391.1 390.6 390.2 389.8 389.4 388.6 387.5 386.0 384.5 383.5 382.4 380.2 376.2 372.4 367.2 362.6 356.3 352.1 347.5 337.9 338.9 339.8 340.8 341.7 342.7 343.7 344.6 345.6 346.5 347.1 347.4 348.8 349.4 349.9 350.0 350.0 349.9 349.8 349.7 349.3 348.4 346.6 344.1 341.7 338.9 336.0 331.1 327.9 323.2 317.7 311.8 306.8 301.1 294.7 289.7 293.2 293.7 294.9 296.2 297.9 300.0 302.6 305.1 307.6 309.9 312.4 314.7 316.6 317.9 319.0 319.5 319.6 319.3 318.8 318.0 317.1 316.0 314.7 313.5 312.1 310.6 309.5 308.3 307.1 306.1 305.4 304.8 304.5 304.6 304.9 305.6 306.8 308.1 309.9 311.8 313.7 315.4 317.1 319.0 320.0 320.7 321.1 320.9 320.4 319.5 318.2 316.8 315.1 313.4 311.7 310.1 308.9 308.0 307.3 307.0 307.0 307.1 307.3 307.5 307.8 308.3 308.8 309.2 309.8 310.3 310.8 311.3 311.8 312.2 312.5 312.7 312.9 313.0 296.4 294.9 294.9 294.9 294.9 294.9 294.9 294.9 294.9 294.9 294.9 294.9 294.9", - "input_type": "phoneme", - "offset": 136.931 + "f0_timestep": "0.005" }, { + "offset": 138.345, "text": "AP 没 有 多 少 的 波 澜 SP 却 想 时 间 过 更 慢 SP", "ph_seq": "AP m ei y ou d uo sh ao d e b o l an SP q ve x iang sh ir j ian g uo g eng m an SP", - "note_seq": "rest G4 G4 G#4 G#4 G4 G4 F4 F4 A#4 A#4 D#4 D#4 D#4 D#4 rest A#3 A#3 C4 C4 D#4 D#4 G4 G4 F4 F4 G4 G4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.344 0.344 0.173 0.1719999 0.1719999 0.1730001 0.1730001 0.3439999 0.3439999 0.1730001 0.1730001 0.517 0.517 0.1730001 0.1730001 0.5169997 0.5169997 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127002 0.044998 0.133144 0.039856 0.094791 0.077209 0.139255 0.032745 0.097591 0.075409 0.24562 0.09938 0.344 0.083004 0.089996 0.102466 0.069534 0.077999 0.095001 0.253668 0.090332 0.09338 0.07962 0.457002 0.059998 0.103313 0.069687 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.127 0.045 0.1331 0.0399 0.0948 0.0772 0.1393 0.0327 0.0976 0.0754 0.2456 0.0994 0.344 0.083 0.09 0.1025 0.0695 0.078 0.095 0.2537 0.0903 0.0934 0.0796 0.457 0.06 0.1033 0.0697 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 1", + "note_seq": "rest G4 G#4 G4 F4 A#4 D#4 D#4 rest A#3 C4 D#4 G4 F4 G4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.173 0.345 0.344 0.173 0.172 0.173 0.344 0.173 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 405.8 389.7 389.7 389.7 389.8 389.9 390.0 390.2 390.3 390.4 390.6 390.8 391.0 391.2 391.3 391.4 391.6 391.7 391.8 392.0 392.1 392.2 392.2 392.3 392.4 388.8 389.7 390.5 391.3 391.8 392.1 392.3 392.5 392.8 393.2 393.8 394.6 395.2 395.6 396.3 396.9 398.3 400.0 402.3 404.0 405.6 407.3 409.0 410.3 411.5 412.7 414.5 416.3 417.5 418.0 418.2 418.0 418.4 418.2 417.8 417.3 416.6 415.5 414.5 413.4 412.1 410.7 409.2 407.9 406.8 405.6 404.5 403.7 403.1 402.7 402.6 402.4 402.1 401.6 400.9 400.1 399.1 398.1 397.1 396.1 395.2 394.4 393.9 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 392.1 390.0 385.5 380.5 375.3 370.5 365.7 362.3 359.9 362.9 363.3 363.7 364.2 364.6 365.0 365.4 365.9 366.3 366.7 367.1 362.2 357.3 354.9 352.8 351.0 349.8 348.9 348.2 347.7 346.9 346.3 345.8 345.8 346.0 346.8 347.7 348.1 347.7 346.5 344.7 343.4 342.4 342.8 345.6 346.0 343.4 350.0 363.3 377.1 391.4 406.2 421.7 437.7 454.3 461.7 463.7 465.2 466.4 467.7 468.4 468.5 468.2 467.2 465.7 463.5 458.6 454.5 449.7 445.2 439.6 434.9 421.3 408.8 402.0 396.0 390.1 384.3 378.6 373.0 367.5 362.0 356.7 351.4 346.2 341.0 336.0 331.0 326.1 321.9 318.1 315.0 312.4 310.2 308.4 306.8 305.6 304.8 304.4 304.1 304.2 304.5 304.9 305.6 306.3 306.8 307.4 308.2 309.0 310.0 310.7 311.2 311.5 311.7 311.8 311.8 311.8 311.7 311.5 311.3 311.2 311.2 311.4 311.7 311.9 312.1 312.5 312.8 313.1 313.1 313.3 313.3 313.0 312.6 311.8 310.8 309.6 308.5 307.6 306.9 306.1 305.0 303.7 302.6 303.2 303.0 303.0 303.4 304.4 305.4 306.5 308.2 309.6 311.0 312.9 315.4 318.8 321.9 322.9 323.1 323.2 322.8 322.5 321.7 321.0 320.1 319.0 318.2 317.3 316.2 314.8 313.4 312.1 310.8 309.3 308.4 307.6 306.8 306.3 306.1 306.4 307.2 307.8 308.7 309.8 311.0 312.5 314.1 315.9 317.3 318.7 320.1 321.0 322.0 322.3 322.4 322.3 321.8 321.1 319.8 318.6 317.1 315.7 314.1 312.7 311.6 310.6 310.2 309.5 309.2 308.6 307.4 306.0 304.4 302.5 300.4 297.8 295.4 292.8 289.8 287.1 284.6 282.1 279.7 277.1 275.1 273.4 272.0 270.6 269.7 269.1 268.8 283.9 282.8 281.6 280.5 279.3 278.2 277.0 275.9 274.8 273.6 272.5 271.4 270.3 269.2 268.1 267.0 265.9 264.8 263.7 262.6 261.6 260.5 259.4 258.4 257.3 256.2 255.2 254.1 253.1 252.1 251.0 241.8 238.7 237.4 235.8 235.0 234.5 233.8 233.4 232.6 231.7 231.1 229.6 228.4 225.8 221.9 220.4 218.7 219.5 222.1 224.8 227.4 230.1 232.9 235.6 238.4 241.3 244.1 247.0 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.3 262.0 261.4 260.5 259.6 258.5 257.2 255.8 254.6 253.5 252.5 251.7 251.0 250.7 274.1 276.4 278.6 280.9 283.2 285.5 287.8 290.2 292.5 294.9 297.3 304.8 312.7 310.8 311.1 311.5 311.3 311.5 311.3 311.0 310.9 310.8 310.3 310.0 309.6 309.3 309.2 309.4 309.6 309.8 309.9 310.2 310.6 310.8 311.1 311.5 311.8 312.3 312.6 313.0 313.2 313.2 312.7 311.8 310.9 309.7 308.1 306.0 303.4 301.2 296.6 293.3 291.9 291.2 289.8 290.3 293.1 297.0 300.9 304.9 308.9 313.0 317.1 321.3 325.6 329.9 334.2 338.6 343.1 347.7 352.3 356.9 361.6 366.4 371.3 376.2 381.1 386.2 391.3 393.9 388.4 389.3 390.4 391.1 392.2 392.8 393.1 393.1 393.0 392.6 391.9 391.3 390.6 389.5 388.2 385.8 382.8 380.6 377.8 374.9 371.5 368.0 363.7 360.8 358.1 353.3 352.3 354.7 357.1 359.5 362.0 364.4 366.9 366.3 353.7 352.5 352.3 351.8 352.3 353.2 354.3 354.9 355.1 355.0 354.6 353.8 353.1 352.2 350.8 349.7 348.8 348.2 347.4 347.3 346.9 346.8 346.7 346.5 346.4 346.5 346.7 346.9 347.1 347.2 347.3 347.4 347.7 348.0 348.5 348.9 349.5 349.9 350.2 350.4 350.6 350.7 350.9 351.0 351.0 350.8 350.4 350.0 349.6 349.2 348.9 348.7 348.5 348.4 348.4 348.4 348.4 348.4 348.4 348.2 348.0 348.0 347.8 347.9 348.0 348.2 348.4 348.8 349.2 349.9 350.6 351.0 351.3 351.1 350.5 349.8 349.3 348.8 348.4 348.0 347.5 346.4 345.4 344.3 343.2 342.2 341.9 345.1 345.5 339.7 338.7 341.9 341.6 346.3 350.6 354.9 359.4 363.8 368.4 372.9 377.6 382.3 387.0 391.9 393.4 392.9 392.8 392.5 392.7 392.9 392.6 392.1 391.8 391.4 390.6 390.1 389.1 387.6 385.9 383.8 381.7 379.0 374.4 368.8 362.6 355.5 350.0 344.6 339.2 331.0 324.8 319.0 313.9 309.9 307.1 305.1 304.3 305.9 307.5 308.4 309.8 310.5 310.8 311.8 312.6 313.7 314.4 315.1 315.6 315.4 315.0 314.2 313.2 312.4 311.4 310.4 309.4 308.1 306.7 304.8 303.7 302.5 301.1 300.5 300.0 299.8 300.1 300.5 301.2 302.4 303.8 305.3 306.6 308.1 309.4 310.4 311.6 312.7 313.5 314.1 314.3 314.7 314.5 314.0 313.1 312.0 311.1 309.9 308.4 307.3 305.9 304.7 303.5 302.6 302.2 301.7 301.7 301.7 301.9 302.5 303.5 304.4 305.6 306.7 307.9 309.2 310.1 311.3 312.0 312.5 312.7 312.6 312.3 311.9 311.0 309.8 308.7 307.5 306.2 305.4 304.8 304.2 304.1 304.7 305.8 306.8 308.2 309.3 309.8 309.9 309.8 309.3 308.8 308.4 308.1 307.8 308.2 309.0 310.7 314.7 318.8 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1 320.1", - "input_type": "phoneme", - "offset": 138.345 + "f0_timestep": "0.005" }, { + "offset": 142.483, "text": "AP 生 命 的 短 暂 SP", "ph_seq": "AP sh eng m ing d e d uan z an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.126006 0.149994 0.112002 0.059998 0.124484 0.047516 0.128002 0.044998 0.089358 0.082642 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.126 0.15 0.112 0.06 0.1245 0.0475 0.128 0.045 0.0894 0.0826 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 329.2 327.0 319.3 316.5 313.3 311.3 310.2 309.5 309.0 308.7 308.7 309.1 309.7 310.4 311.4 312.3 313.1 313.7 314.0 314.2 314.2 314.2 314.3 314.5 314.9 315.7 316.3 317.5 319.0 320.3 322.0 324.2 327.1 329.7 332.6 336.9 340.2 342.8 345.5 347.4 348.1 349.2 350.2 351.0 351.6 351.9 352.1 352.3 352.7 353.2 353.6 353.6 353.1 352.3 351.5 350.8 350.2 349.2 347.8 345.2 341.4 337.3 333.2 329.5 326.2 323.4 322.9 324.2 329.1 345.8 363.3 381.7 385.7 387.0 388.0 390.1 391.5 392.6 392.8 392.7 392.6 392.4 392.3 392.2 391.9 391.5 391.2 390.8 390.4 390.1 389.8 389.4 389.0 388.6 388.4 388.1 387.9 387.6 387.3 387.3 387.3 387.3 416.7 416.7 416.7 416.8 417.0 417.0 417.2 417.2 417.2 417.2 417.4 417.2 417.2 417.0 416.7 416.4 416.2 415.9 415.7 415.5 415.5 414.1 409.5 400.9 390.8 381.7 378.4 373.0 376.1 379.1 382.2 385.3 388.4 391.6 394.8 398.0 401.2 401.4 399.2 398.7 400.3 402.4 405.0 407.2 407.9 407.9 407.5 406.5 405.1 403.9 402.6 400.1 397.5 395.5 393.5 391.2 389.2 387.6 386.4 385.6 385.6 385.9 386.6 387.5 388.8 390.3 392.4 393.6 394.9 396.1 397.4 398.5 399.4 399.9 400.0 399.8 399.5 399.3 398.2 397.5 396.3 394.7 393.6 392.6 391.5 390.7 390.1 389.7 389.5 389.6 389.8 390.2 391.1 391.6 392.0 392.4 392.4 392.3 391.3 389.3 386.0 382.9 378.5 371.4 359.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3", - "input_type": "phoneme", - "offset": 142.483 + "f0_timestep": "0.005" }, { + "offset": 143.862, "text": "AP 生 活 的 阻 碍 SP", "ph_seq": "AP sh eng h uo d e z u ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.3439999 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.156005 0.119995 0.111987 0.060013 0.128002 0.044998 0.091998 0.080002 0.128002 0.388998 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.156 0.12 0.112 0.06 0.128 0.045 0.092 0.08 0.128 0.389 0.069", + "ph_num": "2 2 2 2 1 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.344 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 322.3 318.1 315.7 313.7 312.5 311.7 311.2 311.1 311.4 311.8 312.1 312.2 311.9 310.9 309.5 307.4 304.9 301.6 298.5 295.6 291.2 287.2 288.0 293.0 298.1 303.3 308.6 314.0 319.5 325.1 330.8 336.6 342.4 348.4 349.7 347.8 347.0 347.6 348.8 349.5 349.7 349.8 350.0 349.9 349.8 349.7 349.5 349.4 349.2 348.9 348.7 348.5 348.2 348.0 347.8 347.6 347.3 347.1 346.9 346.8 346.8 346.8 347.0 347.8 349.4 352.3 356.0 359.9 364.6 369.6 375.3 380.2 384.8 388.7 392.1 393.7 394.9 394.8 394.7 394.4 394.2 393.9 393.7 393.6 393.6 393.1 392.4 391.2 389.9 388.7 387.9 387.4 384.6 387.7 390.7 393.8 397.0 400.1 403.3 406.5 409.8 413.0 414.5 414.7 415.1 415.3 415.4 416.0 416.7 417.5 417.7 417.7 417.3 416.8 416.3 415.7 414.7 413.7 413.2 412.9 412.6 412.4 412.4 412.2 411.6 411.2 410.3 408.2 406.2 403.8 401.5 398.8 396.1 393.7 391.7 390.9 391.0 391.5 392.2 392.8 393.4 393.7 394.3 394.8 395.1 395.6 395.7 395.6 392.1 391.9 391.4 390.9 390.3 389.5 388.6 387.9 387.0 386.0 385.1 384.3 383.6 382.8 382.3 382.0 381.8 381.7 381.9 382.5 383.3 384.4 385.6 387.2 388.9 390.4 391.9 393.4 394.9 395.9 396.6 397.1 397.2 397.0 396.8 396.1 395.5 395.0 394.3 393.4 392.4 391.6 390.6 389.9 389.0 388.4 387.9 387.6 387.3 387.3 387.3 387.5 388.1 388.6 389.2 390.0 391.1 392.0 392.8 393.6 394.3 394.9 395.2 395.2 360.5 360.5 360.5 360.5 360.5 360.5 360.5 360.5", - "input_type": "phoneme", - "offset": 143.862 + "f0_timestep": "0.005" }, { + "offset": 145.241, "text": "AP 已 经 逝 去 的 五 分 之 三 SP 还 有 多 少 安 排 SP", "ph_seq": "AP y i j ing sh ir q v d e w u f en zh ir s an SP h ai y ou d uo sh ao an p ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D4 D4 A#3 A#3 rest D#4 D#4 D#4 D#4 F4 F4 G4 G4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.1720001 0.1720001 0.517 0.517 0.345 0.345 0.345 0.345 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.1729999 0.3439999 0.3439999 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.113002 0.059998 0.079333 0.092667 0.112002 0.059998 0.128002 0.044998 0.300002 0.044998 0.094104 0.077896 0.404741 0.112259 0.208785 0.136215 0.345 0.097003 0.074997 0.128002 0.044998 0.115451 0.056549 0.087398 0.085602 0.127002 0.147121 0.070877 0.344 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.113 0.06 0.0793 0.0927 0.112 0.06 0.128 0.045 0.3 0.045 0.0941 0.0779 0.4047 0.1123 0.2088 0.1362 0.345 0.097 0.075 0.128 0.045 0.1155 0.0565 0.0874 0.0856 0.127 0.1471 0.0709 0.344 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 1 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 D4 A#3 rest D#4 D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.172 0.517 0.345 0.345 0.172 0.173 0.172 0.173 0.172 0.173 0.344 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 300.5 295.6 296.4 298.1 300.4 303.5 306.0 308.4 310.2 311.4 311.8 311.9 312.0 312.2 312.4 312.6 312.8 313.0 313.1 313.3 313.5 312.7 311.2 308.8 305.9 303.1 300.7 298.8 297.8 298.0 299.0 301.2 303.8 307.2 311.2 316.5 321.6 326.6 331.7 336.9 342.0 346.0 349.2 351.5 352.9 353.5 353.3 353.1 352.9 352.6 352.3 351.9 351.6 351.4 351.3 351.1 350.6 349.6 348.0 346.0 343.8 341.6 339.4 337.4 336.0 335.2 334.8 335.8 338.6 343.4 349.6 356.4 363.9 371.5 380.2 386.3 390.8 393.6 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 393.9 391.3 387.6 383.0 377.9 372.7 368.9 366.6 365.5 365.9 366.9 368.7 371.4 374.9 378.5 382.4 387.7 392.0 396.6 400.9 405.3 409.2 412.2 414.6 416.0 416.8 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 417.0 416.3 413.9 410.3 405.4 400.1 394.9 391.5 389.9 389.7 389.9 390.1 390.5 391.2 391.8 392.3 392.9 393.4 393.8 394.0 394.3 394.1 393.7 393.4 392.9 392.1 391.4 390.9 390.1 389.2 388.8 388.4 388.1 387.9 388.7 388.6 388.7 389.0 389.4 389.6 390.1 390.5 390.9 391.5 392.0 392.4 392.4 392.8 392.7 392.7 392.6 392.2 392.0 391.9 391.8 391.7 391.4 391.0 390.4 389.1 388.2 387.2 386.0 384.3 382.5 380.3 377.7 374.7 372.0 369.8 367.2 363.6 360.6 357.0 352.6 350.0 347.6 346.4 345.3 344.9 345.0 345.8 346.9 349.4 349.4 349.5 349.7 349.8 350.0 350.2 350.2 350.2 350.4 350.2 349.3 347.9 346.2 344.2 341.8 338.7 335.8 333.0 330.3 327.5 325.0 323.1 321.4 320.3 319.5 318.5 319.3 320.0 320.7 321.5 322.2 323.0 323.7 324.5 325.2 322.5 316.1 315.7 316.3 316.6 316.7 316.7 316.7 316.3 315.7 314.9 313.9 312.9 312.1 311.5 310.6 309.9 309.4 309.0 309.0 309.0 309.0 309.2 309.5 309.8 310.1 310.5 310.8 310.9 311.0 311.1 311.1 311.0 311.1 311.1 311.1 311.3 311.5 311.3 311.5 311.5 311.3 311.3 311.2 311.1 310.9 310.8 310.8 310.7 310.6 310.8 311.1 310.9 311.3 311.5 311.5 311.8 312.2 312.5 312.9 313.1 313.0 312.9 312.6 312.2 311.3 310.3 309.1 307.7 306.4 304.0 301.8 299.3 295.5 291.7 289.0 287.4 288.0 290.3 290.6 290.9 291.2 291.6 291.9 292.2 292.6 292.9 293.2 293.6 293.9 294.2 294.6 294.9 295.2 295.6 295.9 296.3 296.6 296.9 297.3 297.6 297.9 298.3 298.6 296.0 295.2 296.1 297.9 298.9 299.1 298.3 297.2 296.4 295.6 294.9 293.9 293.3 292.6 292.1 291.7 291.5 291.4 291.1 291.2 291.4 291.6 292.1 292.6 293.0 293.7 294.0 294.2 294.5 294.5 294.5 294.0 293.2 291.5 290.0 287.8 284.7 279.8 274.2 268.2 262.9 260.2 251.5 251.1 250.7 250.3 250.0 249.6 249.2 248.8 248.5 248.1 247.7 247.3 247.0 246.6 246.2 245.8 245.5 245.1 244.7 244.4 244.0 243.6 243.3 242.9 242.5 242.2 241.8 239.6 236.8 236.0 235.8 235.4 235.3 235.5 235.4 235.1 235.1 234.9 234.7 234.1 233.6 233.2 232.4 231.7 231.3 230.6 230.2 229.9 229.6 229.3 229.2 229.1 229.1 229.2 229.2 229.4 229.7 230.2 230.7 231.2 231.7 232.5 233.2 233.7 233.9 234.0 234.2 234.0 233.6 233.4 232.8 232.3 232.1 231.8 231.4 231.1 231.1 231.2 231.4 231.5 231.6 231.6 231.2 230.7 229.2 227.7 226.7 225.1 223.7 222.7 221.0 218.5 215.7 211.2 203.8 201.2 205.8 208.7 211.5 214.5 217.4 220.4 223.4 226.5 229.6 232.8 236.0 239.3 242.6 245.9 249.3 252.7 256.2 259.7 263.3 266.9 270.6 274.3 278.1 281.9 285.8 289.8 293.8 297.8 301.9 306.1 310.3 314.6 318.9 323.3 327.7 321.5 316.6 313.7 310.9 307.9 306.4 305.7 305.3 305.5 306.1 307.1 308.3 309.2 310.4 311.5 312.2 312.4 312.1 311.7 310.8 310.1 309.7 309.2 308.9 308.8 308.9 309.1 309.5 309.9 310.1 310.4 310.4 310.4 310.7 311.1 311.4 311.7 311.7 311.7 311.7 311.8 312.1 312.3 312.4 312.3 311.8 311.5 310.6 309.0 307.4 305.5 303.6 300.8 298.9 296.7 292.0 287.8 288.3 290.9 291.8 295.6 301.5 307.5 313.6 319.9 326.2 332.7 339.3 346.1 347.7 347.4 347.9 348.3 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.0 350.0 349.8 349.1 348.1 346.6 345.1 343.2 341.3 339.0 337.1 335.5 334.2 333.1 332.3 332.1 337.9 351.6 371.5 388.5 397.9 398.5 398.3 398.1 397.8 397.4 396.8 396.4 395.8 395.2 394.6 394.0 393.5 393.1 392.7 392.5 392.4 388.8 388.8 389.1 389.5 390.0 390.5 391.0 391.4 391.5 391.8 391.8 392.2 393.0 394.7 396.2 398.1 400.8 403.1 405.4 407.5 409.3 411.0 412.2 413.3 414.3 415.5 416.3 416.7 417.2 417.2 417.1 416.6 416.0 415.3 414.3 413.0 412.0 410.8 409.1 405.4 401.9 400.0 396.4 391.3 386.6 380.7 374.1 369.0 358.7 359.0 360.6 362.9 366.0 369.7 373.6 377.7 381.7 386.6 390.0 392.9 395.2 396.7 397.5 397.2 397.0 396.7 396.2 395.6 394.9 394.1 393.4 392.6 391.7 390.9 390.2 389.4 388.7 388.1 387.7 387.3 387.3 387.3 387.5 387.9 388.7 389.7 391.1 392.8 394.3 395.9 397.4 399.0 400.2 401.2 401.9 402.4 402.5 402.2 401.7 401.2 400.4 399.5 398.4 397.2 396.2 395.2 394.0 392.9 392.1 391.4 390.9 390.5 390.4 390.0 389.1 387.6 385.5 383.2 380.6 378.1 375.3 373.0 371.3 370.1 369.3 356.2 356.2 356.2 356.2 356.2 356.2 356.2 356.2 356.2 356.2 356.2 356.2", - "input_type": "phoneme", - "offset": 145.241 + "f0_timestep": "0.005" }, { + "offset": 149.379, "text": "AP 有 多 少 期 待 SP", "ph_seq": "AP y ou d uo sh ao q i d ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.123577 0.049423 0.083591 0.088409 0.079089 0.092911 0.104473 0.068527 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.1236 0.0494 0.0836 0.0884 0.0791 0.0929 0.1045 0.0685 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 326.2 308.9 308.8 308.6 308.4 308.2 307.9 307.7 307.6 307.4 307.4 307.4 307.4 307.5 307.7 308.0 308.3 308.6 309.0 309.4 309.9 310.2 310.5 310.7 310.8 310.9 310.7 310.2 309.2 308.1 306.9 305.3 303.5 301.9 300.4 299.1 297.8 296.8 296.4 296.2 297.5 301.8 307.8 315.1 323.2 331.9 340.7 346.6 350.0 347.3 352.8 352.7 352.7 352.7 352.5 352.3 352.3 352.3 352.3 352.2 351.7 349.8 347.8 345.2 342.2 339.0 335.6 332.8 330.4 328.8 327.6 359.2 364.9 370.8 376.7 382.8 388.9 395.2 394.8 394.7 394.7 394.5 394.3 393.9 393.7 393.4 392.9 392.6 392.4 392.2 392.0 391.8 391.8 391.8 391.2 390.0 388.1 385.7 383.0 379.9 376.2 373.0 370.1 367.6 365.2 363.6 362.9 386.1 389.3 392.6 395.8 399.1 402.4 405.7 409.1 412.5 413.7 413.9 414.7 415.6 416.5 417.5 418.0 417.9 417.2 416.0 414.8 413.2 410.1 407.5 404.6 400.3 395.2 386.6 374.4 367.4 371.2 374.9 378.7 382.6 386.4 390.3 394.3 398.3 402.3 406.4 391.8 391.9 392.6 393.1 394.1 394.9 395.9 396.8 397.4 397.7 397.9 398.0 397.8 397.5 397.0 396.4 395.6 394.9 394.0 393.2 392.4 391.7 390.9 390.1 389.5 389.1 388.8 388.6 388.6 388.7 389.1 389.4 390.1 391.0 391.9 392.9 394.0 395.0 396.1 397.0 397.9 398.5 399.1 399.6 399.8 399.9 399.7 399.1 398.3 397.2 396.1 394.8 393.5 392.2 390.9 389.7 389.0 388.4 388.0 387.9 387.9 388.2 388.4 388.8 389.3 389.8 390.3 390.8 391.3 391.7 392.0 392.1 392.3 360.7 360.7 360.7 360.7 360.7 360.7 360.7 360.7 360.7 360.7 360.7 360.7", - "input_type": "phoneme", - "offset": 149.379 + "f0_timestep": "0.005" }, { + "offset": 150.759, "text": "AP 过 去 的 梦 何 时 捡 起 来 SP", "ph_seq": "AP g uo q v d e m eng h e sh ir j ian q i l ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 A#4 A#4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.3440001 0.3440001 0.3449998 0.3449998 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.215002 0.059998 0.085109 0.087891 0.115726 0.056274 0.113002 0.059998 0.096942 0.075058 0.20941 0.13559 0.098483 0.073517 0.337397 0.180603 0.254004 0.089996 0.345 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.215 0.06 0.0851 0.0879 0.1157 0.0563 0.113 0.06 0.0969 0.0751 0.2094 0.1356 0.0985 0.0735 0.3374 0.1806 0.254 0.09 0.345 0.068", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 A#4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.172 0.518 0.344 0.345 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 333.8 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.6 311.7 311.7 311.7 311.7 311.7 311.7 311.7 311.7 311.7 311.8 311.5 310.8 309.4 307.8 306.1 304.1 302.1 299.7 297.9 296.4 295.3 294.4 294.0 317.4 311.7 314.2 319.7 327.3 336.2 342.8 346.5 347.9 348.2 349.0 350.0 351.0 351.9 352.5 351.1 351.6 352.1 352.4 352.7 353.1 353.5 353.7 353.8 354.0 353.8 352.6 350.8 348.4 345.7 343.1 341.6 340.8 341.4 343.6 347.4 352.7 360.0 367.0 374.0 380.6 387.1 391.8 394.3 395.4 395.2 395.0 394.4 394.0 393.5 392.9 392.2 391.7 391.3 391.1 390.9 390.5 390.2 390.0 389.7 389.2 388.8 388.3 387.7 387.4 387.4 387.9 389.6 391.3 392.0 393.4 395.2 397.3 399.7 402.6 405.1 407.1 409.9 412.9 414.3 414.9 415.4 416.1 416.5 416.7 417.0 417.0 416.4 415.1 413.2 411.1 408.5 404.8 400.7 396.2 390.9 385.3 384.2 386.4 388.7 391.0 393.3 395.7 398.0 400.4 402.7 405.1 407.5 409.9 412.4 414.8 408.8 400.8 398.9 399.1 398.9 399.1 399.5 399.8 400.0 399.6 399.1 398.2 397.2 396.2 395.0 393.7 392.7 391.6 390.6 389.6 389.1 389.1 388.9 389.3 389.7 390.2 391.3 392.3 393.4 394.4 395.3 396.1 396.9 397.5 397.7 397.5 396.8 395.0 392.9 389.5 384.5 378.4 372.4 366.7 358.4 356.1 355.9 355.6 355.3 355.0 354.7 354.4 354.2 353.9 353.6 353.3 353.0 352.8 352.5 352.2 351.9 351.6 351.4 351.1 350.8 349.6 349.6 349.6 349.8 350.2 350.4 350.6 351.0 351.3 351.7 351.9 352.2 352.4 352.6 352.7 352.8 352.5 351.4 349.5 346.6 342.4 338.2 333.6 328.9 323.9 318.9 314.7 311.1 308.3 305.7 304.4 303.8 304.3 304.8 305.2 305.7 306.2 306.6 307.1 307.6 308.6 308.9 309.0 310.1 310.9 311.7 312.5 313.1 313.5 313.7 313.3 312.9 312.6 312.1 311.1 310.4 309.4 308.4 307.9 307.4 307.0 306.8 306.8 306.9 307.1 307.4 307.4 307.7 308.0 308.3 308.6 308.9 309.3 309.5 309.6 310.0 310.2 310.5 310.8 310.9 310.9 310.7 310.5 310.2 310.0 309.9 309.6 309.2 309.0 308.7 308.8 308.9 308.9 308.7 308.5 308.3 308.5 308.8 309.8 310.5 310.9 311.4 311.7 311.8 312.1 312.8 312.9 312.7 312.3 311.4 310.1 308.9 307.0 304.8 303.3 300.9 299.1 304.4 309.8 315.3 320.8 326.5 332.2 338.1 344.1 350.1 356.3 362.6 369.0 375.5 382.1 388.9 395.7 402.7 409.8 417.0 424.4 431.9 439.5 447.2 455.1 463.2 469.9 472.1 472.7 473.2 473.1 473.0 473.2 473.0 472.4 471.7 470.9 469.7 468.3 467.2 466.0 465.0 464.3 463.8 463.5 463.9 464.4 465.1 465.9 466.4 467.0 467.4 468.1 468.5 469.0 469.3 469.5 469.8 469.9 470.1 470.2 470.2 470.2 470.2 470.2 469.9 469.5 469.1 468.5 468.0 467.1 466.1 464.3 462.4 459.9 456.4 453.0 448.9 443.7 439.7 435.7 431.9 427.5 421.8 413.5 407.2 401.2 397.2 394.5 392.1 390.4 389.7 389.9 392.2 396.5 403.2 406.5 408.9 408.6 408.4 408.4 408.4 408.4 408.4 408.3 407.7 406.8 405.5 404.0 402.2 400.3 398.6 396.3 395.0 393.6 392.2 391.3 390.5 389.6 389.0 388.8 389.1 389.6 390.5 391.5 392.5 393.8 395.9 397.5 399.1 400.7 402.1 403.1 403.6 403.9 403.9 403.6 402.9 402.1 401.2 400.2 398.8 397.4 395.9 394.7 393.0 392.2 391.7 391.3 391.5 391.8 392.4 393.1 393.4 393.3 392.8 391.9 391.0 390.2 389.3 389.0 388.6 384.2 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3 383.3", - "input_type": "phoneme", - "offset": 150.759 + "f0_timestep": "0.005" }, { + "offset": 153.517, "text": "AP 年 轻 的 牵 绊 SP", "ph_seq": "AP n ian q ing d e q ian b an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.079876 0.093124 0.127002 0.044998 0.082965 0.089035 0.110622 0.062378 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.0799 0.0931 0.127 0.045 0.083 0.089 0.1106 0.0624 0.345 0.069", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 352.7 313.8 313.8 313.8 313.9 314.0 314.1 314.2 314.2 314.4 314.4 314.4 314.5 314.7 314.8 315.0 315.3 315.5 315.6 315.9 316.0 316.1 316.2 316.3 316.2 315.7 314.8 313.4 311.8 310.1 308.4 306.5 304.8 303.6 302.6 301.9 302.0 302.4 304.0 306.1 308.9 312.1 316.1 320.7 325.0 329.2 333.6 338.2 341.7 344.6 346.9 348.6 349.3 349.4 349.6 349.7 349.9 350.1 350.5 350.8 351.2 351.5 351.8 352.0 352.1 352.1 352.1 351.5 350.6 349.6 348.4 347.1 345.5 344.4 343.6 343.1 339.3 337.7 336.8 337.4 338.9 342.7 354.1 366.1 378.6 384.4 385.0 386.3 388.3 390.1 391.1 391.6 391.5 390.1 388.0 385.0 381.7 376.9 372.4 364.7 367.7 371.0 373.3 375.5 377.8 380.1 382.4 384.7 387.0 389.4 391.7 394.1 396.5 398.9 401.3 403.7 406.1 408.6 411.0 412.6 412.8 413.1 413.6 414.1 414.7 415.4 416.1 416.3 416.5 416.1 415.5 414.5 413.4 411.7 409.8 407.7 404.6 397.8 390.1 385.9 382.0 375.1 370.4 367.0 364.2 358.2 355.0 358.8 360.1 362.5 366.6 370.9 375.7 380.6 385.8 390.1 393.1 395.2 396.1 395.9 395.5 395.0 394.3 393.5 392.5 391.5 390.4 389.1 387.9 387.0 385.9 384.8 383.9 383.2 382.6 382.2 382.2 382.2 382.3 383.0 383.7 384.8 386.1 387.7 389.3 390.8 392.3 393.8 395.4 396.6 397.5 398.3 398.7 399.1 398.9 398.3 397.5 396.7 395.6 394.4 393.2 392.2 391.3 390.5 389.9 389.7 389.7 389.7 389.8 390.0 390.0 390.2 390.6 390.8 391.0 391.2 391.5 391.8 392.0 392.2 392.4 392.6 392.7 392.7 392.8 373.0 373.0 373.0 373.0 373.0 373.0 373.0 373.0 373.0 373.0", - "input_type": "phoneme", - "offset": 153.517 + "f0_timestep": "0.005" }, { + "offset": 154.897, "text": "AP 老 去 的 无 奈 SP", "ph_seq": "AP l ao q v d e w u n ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.21015 0.06485 0.075985 0.097015 0.11336 0.05864 0.128002 0.044998 0.103091 0.068909 0.345 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.2102 0.0649 0.076 0.097 0.1134 0.0586 0.128 0.045 0.1031 0.0689 0.345 0.068", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.068", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 359.3 313.0 312.9 312.7 312.6 312.3 312.0 311.8 311.6 311.5 311.1 310.9 310.7 310.6 310.6 310.6 310.6 310.7 310.9 311.1 311.4 311.7 312.0 312.4 312.7 313.1 313.5 313.8 314.1 314.4 314.7 315.0 315.4 315.6 315.7 315.8 315.9 315.8 314.7 312.0 308.4 303.6 298.6 294.4 291.0 288.5 287.5 288.6 291.7 296.6 303.5 312.1 320.4 328.5 335.8 342.7 346.3 343.2 344.6 346.0 347.1 348.0 349.0 349.8 350.5 350.7 351.0 351.0 350.8 350.8 350.5 349.9 349.0 348.3 347.5 346.3 344.5 342.4 340.1 338.0 334.8 328.7 317.7 316.0 316.2 317.0 320.7 330.7 347.2 365.2 384.2 390.9 390.3 390.5 390.8 391.2 391.6 392.0 392.5 392.9 392.9 392.7 392.3 391.5 390.9 390.1 389.5 389.1 388.6 388.1 387.6 387.5 387.9 388.7 390.3 391.8 393.7 396.3 398.4 401.0 404.4 407.3 409.4 411.2 413.0 414.3 415.5 416.7 417.5 418.0 418.4 418.4 418.4 418.0 417.8 417.2 416.5 416.0 415.4 414.9 414.5 413.8 413.0 412.1 411.4 410.8 410.1 409.4 408.2 407.0 403.2 402.1 401.2 399.7 398.5 397.7 396.9 395.7 395.8 398.8 398.6 398.4 398.2 397.9 397.6 397.2 397.0 397.0 396.7 396.2 395.5 394.5 393.4 391.5 389.9 388.2 386.4 384.5 382.7 381.3 380.2 379.3 378.6 378.3 378.2 378.4 379.0 380.0 381.1 382.3 383.6 385.4 387.0 388.6 390.2 391.5 393.1 394.3 395.1 395.7 396.2 396.2 396.0 395.4 394.6 393.4 392.2 391.0 389.7 388.3 387.1 386.2 385.6 385.2 385.0 385.0 385.2 385.7 386.3 387.0 387.8 388.5 389.3 390.4 391.2 391.9 392.7 393.3 393.7 394.1 394.3 363.0 363.0 363.0 363.0 363.0 363.0 363.0 363.0 363.0 363.0 363.0", - "input_type": "phoneme", - "offset": 154.897 + "f0_timestep": "0.005" }, { + "offset": 156.276, "text": "AP 生 命 重 复 着 因 果 循 环 SP 还 有 谁 的 身 影 能 永 远 如 此 的 伟 岸 啊 SP", "ph_seq": "AP sh eng m ing ch ong f u zh e y in g uo x vn h uan SP h ai y ou sh ei d e sh en y ing n eng y ong y van r u c i0 d e w ei an an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D4 D4 A#3 A#3 rest D#4 D#4 D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 A#4 A#4 G4 G4 F4 F4 D#4 D#4 F4 F4 F4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.345 0.345 0.345 0.345 0.1719999 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.3449998 0.3449998 0.3450003 0.3450003 0.3439999 0.3439999 0.3450003 0.3450003 0.3449998 0.3449998 0.3450003 0.3450003 0.3449998 0.3449998 0.1719999 0.1719999 1.552 0.3449998 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.141005 0.134995 0.112002 0.059998 0.113002 0.059998 0.111987 0.060013 0.091998 0.080002 0.300002 0.044998 0.107982 0.065018 0.351991 0.165009 0.213668 0.131332 0.345 0.081988 0.090012 0.126254 0.045746 0.102916 0.070084 0.127002 0.044998 0.087948 0.085052 0.127002 0.044998 0.239989 0.105011 0.300002 0.044998 0.298986 0.045013 0.299987 0.045013 0.197112 0.147888 0.300002 0.044998 0.300002 0.044998 0.127002 1.552 0.389998 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.141 0.135 0.112 0.06 0.113 0.06 0.112 0.06 0.092 0.08 0.3 0.045 0.108 0.065 0.352 0.165 0.2137 0.1313 0.345 0.082 0.09 0.1263 0.0457 0.1029 0.0701 0.127 0.045 0.0879 0.0851 0.127 0.045 0.24 0.105 0.3 0.045 0.299 0.045 0.3 0.045 0.1971 0.1479 0.3 0.045 0.3 0.045 0.127 1.552 0.39 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 D4 A#3 rest D#4 D#4 F4 G4 G#4 G4 F4 D#4 A#4 G4 F4 D#4 F4 F4 G4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.173 0.517 0.345 0.345 0.172 0.172 0.173 0.172 0.173 0.172 0.345 0.345 0.344 0.345 0.345 0.345 0.345 0.172 1.552 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 343.8 311.2 311.1 311.1 311.1 310.9 310.8 310.6 310.5 310.3 310.2 310.0 309.8 309.7 309.5 309.5 309.4 309.3 309.3 309.3 309.3 309.5 310.2 310.9 311.8 313.0 314.6 316.3 318.0 319.9 321.8 324.2 326.4 328.6 330.8 333.2 335.5 337.5 339.5 341.4 343.4 344.8 345.9 347.0 347.9 348.4 348.7 348.8 348.8 348.9 349.1 349.4 349.6 349.8 350.0 350.1 350.2 350.4 347.9 343.3 338.2 335.6 336.4 338.8 343.1 348.6 354.8 361.7 368.7 377.3 383.6 388.8 392.8 395.3 396.1 396.1 396.1 396.1 396.1 396.2 396.3 396.3 396.3 396.3 396.3 396.5 396.1 395.0 393.6 391.8 389.8 387.7 385.7 384.4 383.7 383.6 384.2 385.3 387.5 390.1 393.1 396.3 400.1 404.2 407.7 411.0 414.0 416.9 418.9 420.1 420.8 420.7 420.5 420.1 419.7 419.3 418.7 418.2 417.6 417.1 416.9 416.7 416.7 416.0 414.6 412.6 410.2 407.2 403.9 401.1 398.6 396.7 395.2 394.8 394.6 394.4 394.2 394.0 393.7 393.4 392.9 392.7 392.5 392.3 392.2 392.2 392.2 392.2 392.2 392.2 392.2 392.2 392.3 392.4 392.4 392.4 392.4 392.6 392.7 392.7 392.7 392.7 392.7 392.7 392.7 390.9 391.1 391.4 391.7 392.1 392.3 392.5 392.9 393.1 393.1 393.1 393.1 393.1 392.9 392.9 392.8 392.7 392.7 392.7 392.7 392.6 392.4 392.4 392.2 392.0 391.5 391.2 390.2 389.4 388.7 387.6 385.9 384.3 382.6 380.4 378.1 375.9 373.8 371.1 367.4 364.9 362.3 358.6 356.4 354.7 353.0 351.3 350.2 349.5 349.3 349.5 349.7 349.9 350.2 350.5 350.6 350.8 350.2 349.4 348.3 346.5 343.7 340.9 338.2 335.1 332.3 330.1 327.4 322.4 318.3 311.1 295.4 294.6 298.7 302.8 307.0 311.2 315.5 319.8 324.2 328.7 333.2 332.4 326.0 323.5 321.7 318.6 318.1 317.5 317.2 316.6 316.1 315.4 314.9 314.4 313.7 312.8 312.0 311.2 310.6 310.3 309.9 309.5 309.3 309.0 308.8 309.0 309.0 309.0 309.0 308.9 309.0 309.5 310.1 310.4 310.8 311.0 311.2 311.5 311.8 312.2 312.2 312.2 312.1 311.9 311.8 311.7 311.6 311.3 311.1 310.8 310.5 309.9 309.6 309.3 309.2 309.2 309.6 309.9 310.3 311.0 311.7 312.7 313.7 314.3 314.7 315.0 315.1 315.2 315.7 315.6 315.5 314.2 311.1 307.6 304.9 302.3 299.0 297.9 298.1 298.6 299.1 299.5 300.0 300.5 301.0 301.5 302.0 302.5 303.0 303.6 304.1 304.6 305.1 305.6 306.1 306.6 307.1 307.6 308.1 308.6 309.1 309.7 310.2 310.7 311.2 308.3 306.1 304.8 303.8 302.8 301.8 300.7 299.6 298.5 297.4 296.5 295.5 294.5 293.8 293.7 293.7 293.7 293.9 294.1 294.4 294.7 295.0 295.3 295.4 295.5 295.7 295.5 295.4 295.2 295.0 295.1 295.2 295.5 295.8 295.9 295.9 295.8 295.5 295.0 294.3 293.6 292.4 290.8 289.1 287.6 284.0 281.1 275.7 273.2 269.4 265.6 264.4 264.0 263.6 263.2 262.9 262.5 262.1 261.7 261.4 261.0 260.6 260.2 259.9 259.5 259.1 258.8 258.4 258.0 257.6 254.5 249.3 247.8 246.8 245.8 245.6 244.9 243.8 242.7 241.4 240.3 238.6 237.8 236.9 235.8 234.8 233.9 233.1 232.6 232.0 231.4 231.1 230.8 230.9 231.2 231.5 231.8 232.3 232.8 233.4 234.0 234.6 235.2 235.8 236.3 236.7 237.1 237.5 237.6 237.7 237.4 237.1 236.3 235.7 234.7 233.6 232.9 232.5 232.2 232.2 232.7 233.3 233.9 234.5 235.0 235.1 235.1 234.6 234.5 234.1 233.5 232.8 232.1 231.5 230.7 230.0 229.6 229.0 228.4 228.0 227.1 229.2 231.9 234.7 237.4 240.2 243.1 245.9 248.9 251.8 254.8 257.8 260.8 263.9 267.0 270.2 273.4 276.6 279.9 283.2 286.5 289.9 293.3 296.8 300.3 303.8 307.4 311.1 292.8 293.0 293.6 294.6 296.0 297.8 299.7 301.6 303.5 305.5 307.3 308.5 309.4 310.0 310.2 310.1 310.1 310.1 310.1 310.1 310.1 310.0 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 310.1 310.1 310.1 310.1 310.1 310.2 310.2 310.3 310.4 310.6 310.7 310.9 311.1 311.4 311.5 311.7 311.8 312.0 312.0 312.2 313.0 312.4 311.2 309.5 307.7 305.8 303.9 301.8 300.1 298.9 298.1 297.6 325.9 328.6 331.3 334.0 336.8 339.5 342.3 345.1 348.0 350.8 348.5 348.0 347.9 347.8 348.1 348.5 348.7 348.9 349.0 352.2 352.1 352.0 351.7 351.5 351.1 350.6 350.2 349.8 349.3 348.7 348.2 347.9 347.4 346.9 346.4 346.1 345.8 345.6 345.4 345.4 346.4 351.5 364.5 381.2 391.5 395.3 395.2 395.2 395.0 394.8 394.4 394.2 393.9 393.5 393.1 392.9 392.7 392.5 392.3 392.2 392.2 394.7 394.7 394.6 394.0 392.4 390.8 389.1 387.8 387.3 424.5 427.3 430.1 433.0 435.8 438.7 419.6 419.6 419.6 419.6 419.7 419.9 419.9 419.9 419.9 419.9 420.0 420.0 419.8 419.6 419.4 419.2 418.8 418.3 418.0 417.8 417.6 417.5 417.4 417.0 416.0 414.5 412.4 410.1 407.6 405.1 402.6 399.8 397.7 395.9 394.4 393.3 392.9 392.8 392.7 392.6 392.4 392.1 391.8 391.6 391.2 390.8 390.4 390.1 389.9 389.5 389.3 389.1 389.1 389.1 389.9 389.7 389.5 389.5 389.5 389.7 390.2 390.6 390.0 390.1 390.4 390.9 391.4 392.0 392.4 393.0 393.3 393.5 393.7 393.1 391.4 388.7 385.3 381.1 375.7 370.9 366.1 361.5 356.9 353.3 350.6 348.9 348.0 348.0 348.2 348.6 349.0 349.6 350.2 350.7 351.3 351.9 352.2 352.4 352.4 352.3 352.3 352.3 352.3 352.3 352.3 352.1 352.1 352.1 352.1 352.1 351.9 351.9 351.9 351.9 351.9 351.9 351.9 354.7 353.9 353.2 352.6 352.1 351.3 350.6 350.0 349.4 348.8 348.4 347.9 347.5 347.2 347.1 347.2 347.3 347.7 347.9 348.0 348.5 348.6 349.2 349.7 350.2 350.4 350.7 351.0 350.9 350.8 350.7 350.3 349.6 349.1 348.2 346.9 345.5 344.2 342.9 340.7 338.7 337.0 334.8 332.3 329.8 327.2 324.5 321.6 319.4 317.7 316.3 315.5 314.7 314.6 314.2 314.0 313.9 313.3 312.6 312.2 311.9 311.8 311.9 312.0 311.8 311.8 311.6 311.3 311.1 310.9 310.8 310.6 310.2 309.8 309.2 308.6 308.2 308.1 308.1 308.2 308.5 308.7 309.0 309.4 309.5 309.7 309.7 309.7 309.8 310.1 310.4 310.5 310.6 310.6 310.4 310.1 309.8 309.6 309.5 309.5 309.5 309.7 309.9 309.7 310.0 310.2 310.5 311.1 311.8 314.6 315.6 318.2 321.5 326.2 331.7 338.8 346.5 354.3 362.6 370.9 381.1 389.0 396.6 403.4 409.7 414.7 417.8 419.6 420.0 419.8 419.6 419.4 419.2 419.0 418.9 419.2 420.6 422.9 425.8 429.3 433.4 438.7 443.4 447.9 452.4 456.7 460.4 463.0 464.8 465.5 465.6 465.8 466.0 466.3 466.9 467.6 468.1 468.5 468.8 469.0 469.3 470.8 470.8 470.8 471.0 470.5 470.2 469.5 468.6 467.5 466.5 465.4 463.5 461.5 459.5 456.7 453.6 450.5 447.6 444.9 440.7 436.5 432.1 427.2 423.7 420.3 416.2 411.9 408.6 406.1 403.4 401.6 401.2 401.2 401.7 402.3 402.1 401.9 400.7 399.7 399.0 398.1 397.6 397.0 396.3 395.4 394.5 393.8 393.1 392.1 391.3 390.3 389.5 388.4 387.5 386.9 386.4 386.4 386.5 386.9 387.6 388.4 389.4 390.6 391.7 392.4 393.0 393.4 393.9 394.5 394.7 394.3 393.3 391.6 389.1 386.1 382.6 379.4 375.4 372.2 375.7 383.9 382.9 381.8 380.8 379.7 378.6 377.6 376.6 375.5 374.5 373.4 372.4 371.4 370.3 369.3 368.3 367.3 366.2 365.2 364.2 363.2 362.2 361.2 360.2 359.2 358.2 357.6 356.8 355.7 354.9 354.1 353.5 353.1 352.6 352.1 351.1 350.4 350.1 349.0 348.5 347.9 347.6 347.3 347.1 347.0 347.1 347.3 347.5 347.7 347.8 347.9 348.0 348.0 347.8 347.8 348.0 348.2 348.4 348.7 348.9 349.2 349.7 350.1 350.3 350.4 350.4 349.9 349.0 347.3 345.4 343.3 341.3 338.7 336.3 332.0 326.6 321.5 317.2 314.3 312.4 310.4 308.4 307.4 313.0 310.4 314.5 316.0 316.9 317.7 318.6 319.5 320.4 321.2 322.1 322.0 321.4 321.0 319.9 319.6 319.0 317.8 316.4 314.6 313.7 312.6 311.4 310.8 310.3 310.0 310.0 310.1 310.3 310.7 311.1 311.5 311.8 312.3 312.6 312.7 312.7 312.6 312.4 312.2 311.9 311.8 311.9 312.1 312.3 312.6 312.6 312.6 312.6 312.6 312.4 312.2 312.1 311.7 311.5 311.4 311.1 310.9 310.9 310.8 310.8 311.0 311.5 312.2 312.9 314.2 316.0 318.2 320.1 322.2 325.1 327.0 328.7 331.4 334.7 337.8 340.5 342.4 345.2 347.4 349.2 350.6 351.3 351.6 351.9 352.1 351.8 351.4 350.9 350.4 350.0 349.7 349.3 348.8 348.4 348.0 347.6 347.2 346.6 346.3 345.9 345.5 345.2 344.9 344.8 345.1 345.7 346.6 348.2 349.5 350.9 352.1 353.0 353.7 354.0 354.5 354.7 354.9 354.8 354.7 354.5 354.1 353.7 353.3 352.9 352.5 351.9 351.5 350.8 350.3 349.6 349.0 348.3 347.4 346.7 346.2 345.8 345.4 345.2 345.2 345.2 345.3 345.7 346.0 346.7 347.4 348.4 349.3 350.4 351.3 351.9 352.5 352.9 353.2 353.3 353.2 353.1 353.0 352.6 352.1 351.5 350.8 350.0 349.0 348.4 347.6 346.5 345.7 344.9 344.1 343.3 342.6 342.2 342.2 342.1 342.3 342.6 343.0 344.0 345.2 346.2 347.5 348.8 349.8 350.7 351.8 352.7 353.2 353.6 353.8 353.7 353.5 353.1 352.7 352.1 351.2 349.8 349.0 348.2 347.1 346.1 345.3 344.4 343.4 342.6 341.9 341.4 341.4 341.5 341.6 342.1 342.8 343.5 344.2 345.2 346.1 347.0 347.8 348.5 349.1 349.6 350.2 350.8 351.1 351.3 351.3 351.3 351.1 350.9 350.7 350.3 350.0 349.6 349.2 348.7 348.3 348.0 347.7 347.4 347.0 346.6 346.4 346.2 346.2 346.0 346.0 346.0 346.0 346.1 346.4 346.9 347.5 348.0 348.3 348.5 348.8 349.1 349.2 349.4 349.4 349.4 349.4 349.4 349.6 349.6 349.6 349.6 349.5 349.4 349.3 349.4 349.5 349.6 349.6 349.4 349.3 349.1 348.7 348.5 348.3 348.1 347.8 347.5 347.4 347.2 347.0 347.1 347.3 347.4 347.8 347.8 348.0 348.0 348.2 348.4 348.4 348.5 348.6 348.6 348.7 348.9 349.0 349.1 349.4 349.6 349.6 349.8 350.0 350.2 350.4 350.3 350.2 350.0 349.6 349.1 348.5 348.0 347.5 347.0 346.6 346.6 346.4 346.4 346.4 346.6 346.7 347.0 347.1 347.4 348.0 348.7 349.2 349.7 350.1 350.2 350.3 350.4 350.4 350.2 350.1 349.8 349.7 349.4 349.0 348.5 347.9 347.5 347.1 347.0 347.1 347.3 347.4 347.5 347.6 347.6 347.6 347.8 347.8 348.0 348.1 348.3 348.4 348.4 348.4 348.5 348.7 348.8 349.1 349.2 349.6 350.2 350.6 350.8 351.0 351.2 351.3 351.3 351.3 351.1 351.0 350.8 350.6 350.6 350.6 350.6 350.8 350.8 351.0 351.2 351.6 351.8 352.1 352.4 352.7 353.1 353.6 353.9 354.2 354.3 354.3 354.5 354.7 354.9 354.7 354.5 354.3 354.0 353.6 353.4 353.0 352.5 352.0 351.5 351.2 351.0 351.1 351.3 351.9 353.3 355.1 357.7 360.6 365.4 369.9 374.1 379.5 385.5 390.9 396.1 400.6 405.1 409.2 412.0 413.9 414.3 413.9 413.2 411.7 409.7 406.9 403.3 399.9 397.0 394.3 391.5 388.6 387.0 385.7 384.7 384.4 384.6 385.3 386.6 389.0 391.6 395.2 398.2 401.0 403.7 406.3 409.0 410.6 411.8 412.2 412.2 412.2 411.2 409.8 408.6 407.0 404.4 402.2 400.0 397.6 395.7 394.4 393.1 391.8 390.9 390.3 390.0 389.5 389.3 389.1 388.9 388.8 388.7 388.1 386.8 383.4 376.8 366.9 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6", - "input_type": "phoneme", - "offset": 156.276 + "f0_timestep": "0.005" }, { + "offset": 164.552, "text": "AP 无 论 我 选 择 何 种 未 来 SP", "ph_seq": "AP w u l un w o x van z e h e zh ong w ei l ai SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.1719999 0.1719999 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127002 0.044998 0.127002 0.044998 0.086071 0.086929 0.101001 0.070999 0.22499 0.12001 0.091998 0.080002 0.472987 0.045013 0.112002 0.059998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.127 0.045 0.127 0.045 0.0861 0.0869 0.101 0.071 0.225 0.12 0.092 0.08 0.473 0.045 0.112 0.06 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.172 0.518 0.172 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 295.2 259.6 259.5 259.5 259.5 259.5 259.5 259.4 259.4 259.4 259.3 259.2 259.2 259.2 259.2 259.2 259.3 259.6 260.3 261.4 262.4 263.4 264.4 265.4 266.0 266.3 265.9 265.0 263.7 262.1 259.7 258.6 257.9 258.3 260.0 261.6 264.2 267.4 271.0 274.5 278.6 283.2 286.8 290.0 293.5 295.8 298.4 302.4 305.3 307.2 308.4 309.0 309.4 309.4 309.0 308.6 308.4 308.2 308.1 308.2 308.6 308.9 309.2 310.0 310.8 311.4 312.6 313.5 314.8 316.6 319.1 322.0 324.8 328.7 333.3 336.6 339.2 341.8 344.5 346.0 347.6 349.0 350.0 350.7 351.4 352.1 352.3 352.1 351.4 349.9 348.2 346.1 343.4 340.2 336.8 333.3 328.5 321.9 317.5 317.0 317.5 318.1 318.6 319.2 319.7 320.2 320.8 321.3 321.9 322.4 323.0 323.5 324.1 324.6 325.2 325.7 326.3 321.3 317.3 315.8 314.5 313.4 312.7 311.9 311.1 310.1 309.4 308.8 308.3 307.6 307.2 306.9 306.4 305.6 304.4 303.4 302.5 302.9 304.9 311.5 322.0 327.6 338.6 349.9 361.6 373.6 386.1 399.0 412.3 426.1 440.3 455.1 462.0 463.7 465.1 466.0 468.1 469.9 471.3 473.4 474.3 474.5 473.9 472.9 471.6 469.7 467.6 465.6 464.3 463.5 462.9 463.0 463.3 463.6 464.2 465.0 465.9 466.6 467.8 468.2 468.7 469.1 469.3 469.5 469.8 470.2 470.5 470.4 470.2 469.7 469.0 468.1 466.3 464.6 462.1 459.7 455.9 452.7 449.1 444.7 439.1 433.9 428.7 420.4 405.4 393.0 380.0 374.8 369.6 364.6 359.6 354.7 349.8 345.0 340.3 335.7 331.1 326.5 322.1 317.7 313.5 310.7 310.5 310.7 310.6 310.3 309.5 308.6 307.7 306.0 304.5 303.2 301.8 300.5 300.4 301.4 302.4 303.4 304.5 305.5 306.5 307.5 308.5 309.6 310.6 311.7 312.7 313.7 314.8 315.8 316.9 318.0 319.0 320.1 321.2 309.5 306.0 304.5 303.5 303.0 302.5 302.0 301.0 299.8 298.5 297.2 295.7 294.5 293.8 292.9 292.6 292.3 292.1 292.0 291.8 291.5 291.4 291.2 291.0 290.9 290.7 290.7 290.8 290.9 291.2 291.5 291.7 291.8 291.9 292.3 292.7 293.1 293.4 293.5 293.6 293.7 294.0 294.3 294.4 294.5 294.3 294.1 294.0 294.2 293.8 294.0 294.0 294.0 294.0 293.8 293.6 293.2 292.9 292.8 292.7 292.9 293.1 293.4 293.7 294.0 294.2 294.2 294.3 294.5 294.6 294.9 295.3 295.5 295.5 295.5 295.5 295.5 295.5 295.2 295.1 294.9 294.6 294.2 293.9 293.7 293.5 293.5 293.4 293.6 294.3 295.7 297.1 298.8 301.4 303.7 306.1 307.9 309.7 310.9 311.2 311.3 311.6 311.7 311.7 311.8 311.9 312.2 312.6 312.8 312.9 312.8 312.4 312.2 312.0 312.0 311.7 311.2 310.6 309.9 309.0 308.1 307.4 306.6 306.0 305.7 305.8 307.4 310.9 312.6 315.5 319.1 321.9 325.3 330.0 335.4 340.6 345.5 351.0 355.2 357.5 359.1 360.0 360.3 360.2 359.9 358.9 358.4 357.6 356.2 355.1 354.0 352.6 350.6 348.5 346.7 345.0 343.5 342.2 340.9 340.1 338.6 338.3 338.5 338.9 339.7 340.9 342.6 344.6 346.5 348.1 349.7 351.3 352.5 353.7 354.7 356.6 357.8 358.6 359.0 359.0 358.7 358.2 357.1 355.7 354.2 352.4 350.5 349.0 347.5 345.9 344.3 342.9 342.2 341.4 341.3 341.3 341.4 341.9 342.4 343.3 344.8 346.1 347.4 348.9 350.3 351.5 352.5 353.7 354.9 355.5 356.1 356.4 356.2 355.7 354.9 353.9 352.6 351.5 350.2 349.1 348.4 347.9 347.2 347.0 347.0 347.2 347.6 348.0 348.4 348.8 349.3 349.3 349.0 348.4 347.4 346.3 344.7 342.1 338.3 333.1 325.5 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4 312.4", - "input_type": "phoneme", - "offset": 164.552 + "f0_timestep": "0.005" }, { + "offset": 167.31, "text": "AP 无 论 你 心 中 如 何 不 安 SP", "ph_seq": "AP w u l un n i x in zh ong r u h e b u an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.1720001 0.1720001 0.517 0.517 0.1730001 0.1730001 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.113002 0.059998 0.127002 0.044998 0.083011 0.088989 0.093029 0.079971 0.300002 0.044998 0.092609 0.079391 0.451936 0.065063 0.128002 0.561998 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.113 0.06 0.127 0.045 0.083 0.089 0.093 0.08 0.3 0.045 0.0926 0.0794 0.4519 0.0651 0.128 0.562 0.069", + "ph_num": "2 2 2 2 2 2 2 2 1 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.172 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 256.4 246.5 239.5 237.5 235.9 234.4 233.1 232.3 231.6 230.7 229.9 229.9 229.9 230.4 231.1 231.9 232.5 233.1 233.9 234.6 234.9 235.0 235.0 234.8 234.3 233.6 233.0 232.5 232.0 232.0 232.1 232.6 234.0 235.3 237.1 240.5 244.7 248.8 253.2 258.6 267.2 278.1 290.0 296.3 298.9 300.5 301.3 302.6 304.1 305.1 306.2 307.0 307.5 307.7 307.7 307.8 308.1 308.6 309.5 310.3 310.9 312.3 314.0 314.5 314.9 315.1 315.1 315.5 315.9 316.8 318.2 319.9 322.5 325.8 328.2 330.8 333.8 338.3 342.2 344.6 346.8 347.8 348.7 349.4 350.0 350.2 349.9 348.7 346.9 345.1 343.2 341.3 337.0 333.5 329.9 328.1 327.6 326.9 326.1 325.3 324.5 323.7 323.0 322.2 321.4 320.6 319.8 319.1 318.3 317.5 316.8 316.0 311.8 311.8 312.5 312.9 313.1 313.0 312.7 312.1 311.5 310.5 309.3 307.2 304.7 301.6 297.2 297.0 295.2 293.4 296.4 288.8 277.0 273.7 275.8 282.9 290.1 297.5 305.1 312.9 320.9 329.1 337.5 346.1 355.0 364.0 373.3 371.8 366.1 363.5 361.1 359.2 357.9 356.7 355.5 354.3 353.5 352.5 351.5 350.8 350.0 349.2 348.4 348.1 348.0 348.0 348.1 348.4 348.7 349.2 349.6 350.1 350.2 350.7 350.8 351.0 351.0 351.0 351.0 351.0 351.0 351.0 351.1 351.2 351.0 350.8 350.8 350.4 349.7 348.8 347.5 345.4 344.1 342.1 339.9 337.4 335.2 333.7 331.7 329.6 328.1 326.7 325.5 323.4 321.7 320.1 318.2 316.6 315.5 314.8 314.3 314.0 313.9 313.8 313.6 313.2 312.6 311.7 311.0 310.4 310.2 310.3 310.4 310.3 309.8 308.6 306.8 304.4 301.0 297.3 292.9 287.9 283.2 279.2 278.2 280.1 281.9 283.8 285.7 287.6 289.5 291.5 293.4 295.4 297.4 299.4 301.4 303.4 305.4 307.5 309.5 307.2 306.3 304.4 303.0 301.7 300.6 299.8 299.0 298.2 297.6 296.8 295.9 295.0 294.2 293.4 292.7 292.2 291.8 291.7 291.8 291.9 292.1 292.1 292.1 292.2 292.4 292.6 292.7 292.9 293.2 293.4 293.7 293.8 294.1 294.5 294.7 295.0 295.2 295.2 295.2 295.2 295.2 295.2 295.1 295.0 295.0 294.8 294.7 294.5 294.5 294.5 294.5 294.7 294.5 294.4 294.1 293.8 293.4 293.0 292.6 292.4 292.3 292.3 292.4 292.9 293.2 294.1 294.5 294.9 295.0 295.0 294.9 294.8 294.7 294.5 294.0 293.3 292.1 290.0 288.0 285.6 284.3 282.7 281.3 283.5 286.5 289.6 292.7 295.8 299.0 302.2 305.4 308.7 312.0 315.3 318.7 313.6 313.5 313.4 313.3 313.1 312.8 312.6 312.4 312.1 311.8 311.6 311.3 311.2 311.0 310.9 310.9 309.7 309.5 309.5 309.5 309.7 309.7 309.8 310.2 310.4 310.7 311.2 311.2 311.1 310.7 309.4 308.2 307.2 306.5 306.5 307.0 307.6 308.4 309.5 310.2 311.2 312.1 312.7 313.5 314.4 315.1 315.6 316.1 316.1 315.8 315.7 315.3 314.7 314.3 313.7 312.9 312.0 311.2 310.2 308.8 307.6 306.5 305.5 304.4 303.8 303.3 302.7 302.8 302.9 303.4 303.8 304.3 304.8 305.5 306.5 307.6 308.4 309.4 310.4 311.3 312.0 312.6 312.9 312.7 312.6 312.2 311.6 311.1 310.3 309.3 308.6 307.9 307.0 306.3 305.6 304.8 304.3 304.0 303.8 304.0 304.2 304.5 305.0 306.0 307.1 308.2 309.1 309.9 310.8 311.5 312.2 312.8 313.0 313.1 313.0 312.6 312.0 311.0 309.9 308.8 308.0 307.4 306.8 306.4 306.4 306.6 307.4 308.3 309.5 310.9 312.2 312.9 313.1 312.9 312.6 312.2 311.9 311.8 311.9 312.2 312.6 312.8 313.1 311.9 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4", - "input_type": "phoneme", - "offset": 167.31 + "f0_timestep": "0.005" }, { + "offset": 170.069, "text": "AP 最 后 还 是 放 弃 了 愤 慨 SP", "ph_seq": "AP z ui h ou h ai sh ir f ang q i l e f en k ai SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.1719999 0.1719999 0.5180001 0.5180001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.170989 0.105011 0.112002 0.059998 0.112987 0.060013 0.112002 0.059998 0.085269 0.086731 0.225005 0.119995 0.113003 0.059998 0.397005 0.119995 0.112002 0.059998 0.518 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.171 0.105 0.112 0.06 0.113 0.06 0.112 0.06 0.0853 0.0867 0.225 0.12 0.113 0.06 0.397 0.12 0.112 0.06 0.518 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.173 0.517 0.172 0.518 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 274.5 271.8 269.4 267.6 265.8 264.9 263.7 263.1 263.1 263.6 264.0 264.4 264.6 264.3 263.3 261.8 260.2 258.6 256.9 255.8 256.3 257.5 264.4 266.6 268.7 269.2 266.4 270.1 272.9 276.3 283.7 293.1 301.8 308.3 311.3 311.5 310.2 309.5 309.2 309.3 310.2 311.3 311.9 312.7 312.3 311.8 311.1 310.3 309.4 308.6 308.1 307.7 307.6 307.0 305.8 303.8 301.9 300.1 298.4 297.0 296.3 316.1 324.1 332.3 346.6 346.6 346.6 346.7 346.9 347.1 347.5 347.7 348.0 348.2 348.4 348.5 348.6 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.8 348.7 347.8 345.6 342.0 338.2 334.4 330.8 327.5 325.1 324.0 324.9 324.7 324.5 324.3 324.0 323.8 323.6 321.2 318.0 314.7 313.7 311.3 309.7 308.1 306.3 303.2 300.1 296.7 291.3 286.7 287.5 294.6 302.0 309.5 317.3 325.2 333.3 341.6 350.2 358.9 367.9 377.1 386.5 396.1 406.0 416.1 426.5 437.2 448.1 459.3 470.8 468.9 469.5 469.9 470.2 470.7 470.9 471.0 471.2 470.9 470.2 469.3 467.7 466.6 465.2 464.3 463.5 462.9 462.5 462.4 462.4 462.6 463.2 464.0 464.7 465.5 466.4 467.0 467.6 467.7 467.5 467.2 467.0 466.5 465.9 464.3 462.6 459.8 455.4 449.2 443.6 437.8 434.8 434.0 433.4 437.0 431.3 425.7 420.2 414.7 409.3 404.0 398.8 393.6 388.5 383.4 378.5 373.5 368.7 363.9 359.2 354.5 349.9 345.4 340.9 336.5 332.1 327.8 323.5 319.3 315.9 314.1 313.1 312.5 312.6 312.9 312.7 312.5 312.4 311.8 311.7 311.2 310.2 309.5 308.2 307.0 305.7 304.4 303.1 301.5 299.8 298.6 297.6 296.9 296.1 295.7 295.2 294.7 294.5 294.8 295.2 296.0 297.5 299.8 301.1 300.4 300.0 298.9 297.9 297.5 297.0 296.9 296.7 296.4 296.0 295.5 294.7 294.4 294.0 293.6 293.2 292.6 292.1 291.7 291.3 290.9 290.6 290.4 290.0 289.8 289.8 289.9 290.5 291.1 291.8 292.5 293.1 293.7 294.4 294.9 295.3 295.9 296.0 296.0 295.9 295.7 295.7 295.6 295.4 295.1 294.8 294.6 294.3 293.7 293.4 293.0 292.6 292.2 291.9 291.8 291.4 291.3 291.4 291.7 292.3 292.9 293.6 294.4 295.1 295.4 295.4 294.9 293.9 292.8 291.6 288.6 287.0 283.9 281.0 276.5 272.7 269.6 267.8 269.0 270.7 272.5 274.3 276.1 277.9 279.7 281.5 283.4 285.2 287.1 289.0 290.9 292.8 294.7 296.6 298.6 300.5 302.5 304.5 306.5 308.5 310.5 312.5 314.6 315.4 311.7 310.9 311.1 311.0 311.3 311.5 311.3 310.6 309.3 307.6 304.9 299.6 294.4 289.6 287.9 285.8 290.1 294.1 298.1 302.2 306.3 310.5 314.7 319.0 323.3 327.8 332.2 336.8 341.3 346.0 350.7 355.5 360.4 359.0 360.1 359.6 358.6 358.3 357.7 356.9 356.0 355.0 353.9 352.8 351.6 350.4 349.0 347.9 346.0 344.9 343.7 342.8 342.1 341.7 341.5 341.2 341.3 341.5 341.9 343.0 343.8 344.8 346.2 347.0 347.8 348.3 349.2 349.7 350.2 350.7 350.8 351.0 351.3 351.5 351.5 351.4 350.9 350.3 349.5 348.6 347.8 346.8 346.0 345.4 344.6 343.9 343.4 343.1 342.9 342.7 342.8 342.8 343.0 343.3 343.6 343.8 344.0 344.4 344.7 345.3 345.9 346.4 346.9 347.3 347.6 347.9 348.1 348.5 348.6 348.4 348.4 348.4 348.6 348.8 349.0 349.4 349.7 350.2 350.8 350.9 350.8 350.2 349.0 348.5 347.6 344.0 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3 339.3", - "input_type": "phoneme", - "offset": 170.069 + "f0_timestep": "0.005" }, { + "offset": 172.828, "text": "AP 相 信 我 已 能 独 自 承 担 SP", "ph_seq": "AP x iang x in w o y i n eng d u z i0 ch eng d an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.1719999 0.1719999 0.517 0.517 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.169058 0.105942 0.105983 0.067017 0.127002 0.044998 0.128002 0.044998 0.112002 0.059998 0.300002 0.044998 0.098071 0.073929 0.335749 0.182251 0.124164 0.047836 0.517 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.1691 0.1059 0.106 0.067 0.127 0.045 0.128 0.045 0.112 0.06 0.3 0.045 0.0981 0.0739 0.3357 0.1823 0.1242 0.0478 0.517 0.068", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 G4 G#4 G4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.172 0.518 0.172 0.517 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 246.4 245.8 244.4 242.3 240.3 237.8 236.1 234.6 233.5 233.0 232.7 232.1 231.0 229.6 227.6 225.4 223.0 220.3 218.0 219.8 223.5 228.0 232.6 237.3 242.0 246.9 251.8 256.9 262.0 267.3 272.7 278.1 283.7 289.4 295.2 304.4 302.0 304.3 307.1 309.1 310.1 310.5 310.6 310.8 311.2 311.7 312.4 312.6 312.6 312.7 312.2 312.5 312.9 313.7 314.9 316.3 317.9 320.0 322.2 324.2 326.3 328.3 330.4 332.1 333.6 336.4 338.6 341.2 344.3 346.7 348.6 350.0 351.3 352.4 353.0 353.2 353.0 352.8 352.3 351.8 351.0 350.4 349.4 348.3 346.9 345.7 344.5 342.7 340.7 339.0 337.0 334.4 331.9 328.9 324.7 321.5 319.3 317.3 315.7 314.7 314.0 313.2 312.1 311.2 310.7 310.4 310.4 310.4 310.4 310.6 310.6 310.8 310.8 310.9 310.9 311.1 311.3 311.3 311.3 311.1 310.7 310.1 309.5 308.9 308.3 307.5 307.4 308.0 309.5 311.0 311.9 313.0 315.0 317.7 321.0 325.9 330.2 333.8 340.0 347.4 350.4 352.7 353.1 352.7 352.7 352.3 351.9 351.7 351.3 350.9 350.4 349.6 349.0 348.3 347.7 347.3 346.9 346.6 346.4 346.4 346.4 346.6 347.1 347.5 348.0 348.6 349.2 349.6 349.9 350.1 350.3 350.5 350.6 350.4 350.2 350.0 350.2 350.1 350.0 350.0 350.5 351.2 351.7 352.1 352.5 352.8 353.0 352.7 352.3 351.2 350.0 347.7 345.8 343.7 340.1 336.7 333.4 329.5 325.5 322.8 321.2 320.3 323.5 333.5 343.9 354.7 365.7 377.1 378.2 380.2 381.5 383.2 384.3 385.8 387.6 389.4 391.1 392.0 392.0 391.3 390.0 387.5 384.9 382.2 379.3 376.3 373.7 371.9 370.6 370.3 365.9 369.2 372.6 376.1 379.5 383.0 386.5 390.1 393.7 397.3 400.9 404.6 408.3 409.3 410.2 411.9 413.5 415.4 417.5 419.5 421.1 421.9 422.0 421.6 420.7 419.9 418.7 416.7 415.5 414.3 413.3 412.2 411.5 411.0 410.8 411.0 411.3 411.6 412.0 412.7 412.9 413.4 413.9 414.6 415.2 415.6 416.3 416.8 417.2 417.6 417.8 418.0 418.0 418.2 418.1 417.9 417.7 417.4 417.2 417.0 416.7 416.1 415.5 415.0 414.5 414.2 414.0 413.8 413.7 413.9 414.2 414.6 415.4 416.0 416.6 416.9 417.6 418.1 418.7 419.3 419.4 418.9 417.9 416.1 413.4 411.0 408.3 405.0 400.7 392.2 397.9 399.3 391.3 400.8 401.8 400.7 400.6 400.1 399.7 399.2 398.7 398.2 397.7 397.2 396.8 396.3 395.8 395.3 394.8 394.3 393.9 393.4 392.9 392.4 391.9 391.5 390.5 389.5 391.4 391.8 392.6 393.1 393.1 393.1 393.1 392.9 392.9 392.7 392.4 391.7 390.7 389.6 387.3 384.9 383.5 382.0 379.2 376.7 372.9 368.3 365.8 361.1 355.9 343.6 345.2 348.3 352.8 357.9 363.4 369.8 377.1 383.5 389.1 393.8 397.7 400.4 401.4 401.2 400.7 399.9 399.0 397.9 396.7 395.4 393.6 392.1 390.6 389.2 387.9 386.7 385.8 385.0 384.5 384.4 384.5 384.9 385.7 386.9 388.2 389.7 391.2 393.2 394.8 396.3 397.7 398.8 399.6 400.0 400.2 400.0 399.8 399.1 398.5 397.9 397.1 396.2 395.3 394.3 393.1 392.2 391.1 390.3 389.4 388.6 388.0 387.4 387.0 386.9 386.8 387.0 387.5 388.3 389.4 390.7 392.1 393.6 395.6 397.2 398.5 399.9 401.1 401.9 402.4 402.8 402.6 402.2 401.6 400.8 399.8 398.8 397.5 396.3 395.2 394.0 392.8 391.7 390.7 389.9 389.3 388.8 388.8 388.8 388.8 388.9 389.1 389.1 389.3 389.5 389.7 389.9 390.1 390.2 390.3 390.4 390.4 390.6 323.6 323.6 323.6 323.6 323.6 323.6 323.6 323.6 323.6 323.6 323.6", - "input_type": "phoneme", - "offset": 172.828 + "f0_timestep": "0.005" }, { + "offset": 175.586, "text": "AP 就 算 脾 气 总 是 特 别 坏 SP", "ph_seq": "AP j iu s uan p i q i z ong sh ir t e b ie h uai SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.345 0.345 0.1720001 0.1720001 0.517 0.517 0.1730001 0.1730001 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.186004 0.089996 0.112002 0.059998 0.104412 0.068588 0.091998 0.080002 0.101833 0.071167 0.22499 0.12001 0.112002 0.059998 0.472002 0.044998 0.113003 0.059998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.186 0.09 0.112 0.06 0.1044 0.0686 0.092 0.08 0.1018 0.0712 0.225 0.12 0.112 0.06 0.472 0.045 0.113 0.06 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.345 0.172 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 262.5 260.0 260.7 260.3 260.8 261.2 261.2 261.3 261.5 261.9 262.4 262.5 262.1 260.4 258.0 255.3 252.1 247.2 245.5 245.6 245.0 247.0 251.0 255.1 259.2 263.4 267.6 272.0 276.4 280.8 285.4 290.0 294.6 299.4 304.2 309.2 308.7 309.2 309.6 310.4 310.9 311.4 311.6 311.3 311.1 310.9 310.8 310.2 309.9 309.3 308.3 307.4 306.6 305.5 304.3 302.2 299.3 295.5 292.1 288.8 285.9 286.4 293.8 301.4 309.2 317.1 325.3 333.7 342.3 344.8 344.8 345.9 347.4 349.0 349.9 350.4 350.5 349.7 348.8 347.4 344.1 341.3 337.8 331.7 324.6 316.4 309.7 307.5 307.7 307.9 308.2 308.4 308.6 308.8 309.0 309.2 309.4 309.7 309.9 310.1 310.3 310.5 310.7 311.0 311.2 312.3 312.8 312.5 312.7 313.1 312.9 312.9 312.6 312.1 311.8 311.2 310.5 309.4 307.1 304.4 302.6 301.2 300.3 303.3 311.7 320.3 329.1 338.2 347.6 357.1 367.0 377.1 387.6 398.3 409.3 420.6 432.2 444.1 456.4 465.6 462.9 462.9 463.2 464.2 465.2 465.9 466.6 467.0 467.4 467.2 466.4 465.8 465.0 464.1 463.5 463.0 462.6 462.4 462.6 463.0 463.5 464.1 464.8 465.3 466.0 467.0 467.8 468.5 469.4 469.7 469.7 469.4 468.7 467.7 466.7 465.1 462.1 458.9 454.2 448.6 440.0 432.9 428.3 421.4 415.9 411.2 406.6 402.0 397.5 393.0 388.6 384.3 379.9 375.7 371.5 367.3 363.2 359.1 355.1 313.3 313.1 313.1 313.1 313.0 312.8 312.7 312.5 312.4 312.4 312.2 312.2 312.2 312.2 312.2 312.4 312.6 313.0 313.4 313.7 314.0 314.2 314.3 313.5 310.9 306.8 302.1 296.8 291.1 285.1 281.1 278.1 276.5 284.6 284.9 285.2 285.4 285.7 286.0 286.3 286.6 286.8 287.1 287.4 284.5 284.9 285.3 286.3 288.3 290.4 292.1 293.5 294.6 295.2 295.5 295.3 294.8 294.3 293.8 293.0 292.6 292.1 291.6 291.3 291.1 290.9 290.9 290.9 290.8 290.9 291.1 291.0 291.3 291.5 291.6 292.3 292.5 292.8 293.1 293.3 293.5 293.7 293.7 293.7 293.8 293.8 293.6 293.5 293.3 293.5 293.3 293.3 293.5 293.6 294.0 294.1 294.0 294.0 293.8 293.7 293.8 293.8 293.6 293.4 293.3 293.3 293.5 293.8 294.1 294.4 294.8 295.0 294.9 294.3 294.6 295.0 295.5 296.0 296.6 297.1 297.4 297.6 297.3 296.8 295.9 294.9 293.7 292.3 290.8 289.0 287.6 286.2 284.9 283.8 283.0 282.5 282.2 285.2 291.2 297.2 303.4 307.7 312.0 316.4 320.8 325.3 319.5 311.1 310.2 310.1 309.9 310.8 311.6 312.0 312.7 313.3 313.3 313.0 312.1 310.2 308.0 305.0 300.0 295.4 292.0 288.0 282.1 276.9 281.9 287.0 292.3 297.6 303.0 308.5 314.1 319.8 325.6 331.5 337.5 343.6 349.9 356.2 359.1 357.2 357.2 357.4 357.6 357.8 358.0 357.8 357.1 356.4 355.2 353.7 352.2 350.4 348.6 346.9 345.2 343.8 342.1 341.3 340.3 339.9 339.7 339.5 339.6 340.1 340.8 341.6 342.9 344.2 345.1 345.8 346.8 347.6 348.6 349.4 350.1 350.6 351.0 351.3 351.3 351.1 350.7 350.1 349.4 348.6 347.6 346.6 345.4 344.4 343.4 342.5 341.8 341.0 340.3 339.6 339.2 339.1 339.1 339.2 339.5 340.1 341.0 341.7 342.4 343.4 344.7 345.8 346.8 347.8 348.6 349.3 350.0 350.7 350.8 350.7 350.2 349.2 348.4 347.7 347.0 346.9 347.0 346.8 346.3 345.6 344.5 343.6 341.9 340.1 338.9 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5 338.5", - "input_type": "phoneme", - "offset": 175.586 + "f0_timestep": "0.005" }, { + "offset": 178.345, "text": "AP 就 算 整 天 抱 怨 着 不 满 SP", "ph_seq": "AP j iu s uan zh eng t ian b ao y van zh e b u m an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.1719999 0.1719999 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.171004 0.104996 0.102725 0.069275 0.11195 0.06105 0.127002 0.044998 0.119281 0.052719 0.299987 0.045013 0.113003 0.059998 0.457002 0.059998 0.112002 0.059998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.171 0.105 0.1027 0.0693 0.1119 0.0611 0.127 0.045 0.1193 0.0527 0.3 0.045 0.113 0.06 0.457 0.06 0.112 0.06 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.173 0.517 0.172 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 243.3 242.7 240.1 238.9 237.0 235.8 234.9 234.3 233.7 233.6 233.8 234.1 234.2 233.8 233.1 231.8 229.9 227.2 224.7 223.0 220.4 222.9 227.9 233.0 238.2 243.6 249.0 254.6 260.3 266.2 272.1 278.2 284.5 290.8 297.4 304.0 303.9 304.4 305.8 307.4 308.8 309.5 309.9 310.2 310.6 310.9 311.3 311.9 312.0 311.6 310.9 308.5 305.9 304.9 301.6 299.1 295.3 291.8 286.6 280.6 272.8 273.8 281.3 288.6 296.1 303.8 311.6 319.7 328.0 336.5 345.2 350.6 350.2 348.7 348.9 349.3 349.7 350.1 350.2 350.0 349.5 348.7 347.1 344.1 340.4 336.4 334.2 331.2 325.1 319.8 316.2 311.4 308.4 303.9 300.8 302.1 303.3 304.5 305.8 307.0 308.3 309.5 310.8 312.0 309.8 308.4 308.2 308.5 309.4 310.5 311.3 311.7 312.0 312.5 313.0 313.3 313.5 313.2 312.6 311.5 310.2 308.6 307.4 306.0 303.3 300.8 298.2 295.5 292.4 288.4 285.0 280.0 272.3 266.3 275.6 279.6 291.3 305.5 320.4 336.1 352.5 358.6 357.4 356.6 356.4 356.1 355.8 355.3 354.8 353.9 352.5 351.5 350.3 349.1 347.9 346.8 345.9 345.5 345.5 345.7 345.9 346.3 346.8 347.3 347.8 348.5 349.0 349.8 350.2 350.7 351.0 351.2 351.4 351.6 351.8 351.9 351.9 351.8 351.6 351.1 350.5 349.9 349.0 348.2 347.2 346.0 345.0 343.4 341.6 339.5 337.1 334.1 330.2 325.9 321.0 316.0 312.9 309.9 307.9 306.3 305.9 306.2 306.7 307.3 307.8 308.6 309.6 310.6 311.4 312.3 313.3 314.2 315.3 316.7 317.7 318.4 318.4 317.8 316.9 315.4 312.8 309.7 305.2 299.5 293.6 288.3 283.4 279.2 277.5 276.3 276.7 278.4 280.6 282.9 285.2 287.6 289.9 292.3 294.7 297.1 299.6 302.0 304.8 307.1 307.1 306.3 306.3 306.3 305.7 304.8 303.1 301.3 299.7 298.5 297.2 296.0 295.0 293.9 293.0 292.6 292.1 292.0 292.0 292.1 292.0 292.1 292.0 292.1 292.1 292.1 292.4 292.9 293.4 293.8 294.0 294.2 294.5 294.9 295.2 295.5 295.7 295.7 295.6 295.4 295.0 294.8 294.5 294.2 293.9 293.8 293.8 293.9 294.0 294.0 294.0 294.2 294.2 294.0 293.9 293.7 293.6 293.3 293.0 292.8 292.8 292.8 292.9 293.2 293.4 293.7 293.7 294.0 294.3 294.5 294.8 295.1 295.5 295.8 296.0 295.6 294.5 293.0 291.5 291.2 290.6 293.6 306.0 309.7 295.0 294.3 296.0 298.3 300.6 303.0 305.3 307.7 310.1 312.5 314.9 317.4 319.8 322.3 324.8 327.3 329.9 319.3 315.0 314.3 313.8 313.6 313.5 313.4 313.2 312.9 312.7 312.5 312.2 312.0 311.2 310.6 310.0 309.0 308.4 307.9 307.3 306.5 305.9 305.2 304.3 304.0 303.8 303.5 303.5 303.8 304.4 305.7 307.9 309.4 311.6 315.3 317.9 318.4 318.3 318.4 318.3 318.2 318.0 317.7 316.9 316.6 316.4 315.8 315.3 314.7 314.0 312.7 311.9 311.2 310.3 309.5 308.8 307.9 306.8 306.1 306.0 306.0 306.6 307.2 307.6 308.4 309.9 311.2 312.5 313.8 314.8 316.0 317.1 318.3 319.3 319.8 320.2 320.4 320.2 319.9 319.0 318.0 317.0 315.9 314.7 313.5 312.3 311.0 310.1 309.3 308.6 308.3 307.8 307.6 307.6 307.6 307.9 308.5 309.3 310.6 311.5 312.4 313.6 314.7 315.7 316.6 317.6 318.2 318.8 319.0 318.9 318.8 318.0 317.4 316.4 315.2 314.0 312.8 312.1 311.6 311.3 311.3 311.4 311.7 312.0 312.4 312.2 311.9 311.4 310.6 309.8 309.2 308.7 308.7 309.0 309.0 308.2 306.4 302.5 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9", - "input_type": "phoneme", - "offset": 178.345 + "f0_timestep": "0.005" }, { + "offset": 181.103, "text": "AP 却 也 怎 么 都 无 法 放 弃 这 孤 独 港 湾 SP", "ph_seq": "AP q ve y E z en m e d ou w u f a f ang q i zh e g u d u g ang w an SP", - "note_seq": "rest F4 F4 D#4 D#4 A#3 A#3 G4 G4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D#4 D#4 D4 D4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.345 0.345 0.69 0.69 0.345 0.345 0.344 0.344 0.345 0.345 0.345 0.345 0.1729999 0.1729999 0.5170002 0.5170002 0.3449998 0.3449998 0.5170002 0.5170002 0.1719999 0.1719999 0.3449998 0.3449998 0.1720004 0.1720004 1.207 1.207 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.126006 0.149994 0.300002 0.044998 0.540006 0.149994 0.240004 0.104996 0.290121 0.053879 0.300002 0.044998 0.254988 0.090012 0.091259 0.081741 0.397005 0.119995 0.237227 0.107773 0.457003 0.059998 0.112002 0.059998 0.285002 0.059998 0.098361 0.073639 1.207 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.126 0.15 0.3 0.045 0.54 0.15 0.24 0.105 0.2901 0.0539 0.3 0.045 0.255 0.09 0.0913 0.0817 0.397 0.12 0.2372 0.1078 0.457 0.06 0.112 0.06 0.285 0.06 0.0984 0.0736 1.207 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 D#4 A#3 G4 G4 G#4 G4 F4 D#4 D#4 D4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.345 0.69 0.345 0.344 0.345 0.345 0.173 0.517 0.345 0.517 0.172 0.345 0.172 1.207 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.2 370.5 357.7 354.3 351.6 348.6 347.7 347.4 347.0 346.7 346.2 346.0 345.9 346.0 346.1 346.3 346.7 347.2 347.7 348.1 348.3 348.4 348.4 348.2 348.1 348.2 348.2 348.4 348.7 349.0 349.2 349.4 349.6 349.8 350.2 350.7 351.0 351.3 351.5 351.5 351.3 350.6 350.1 349.0 348.2 347.4 346.5 345.4 344.1 342.8 341.2 339.4 337.3 334.9 331.5 326.2 322.7 320.0 314.7 306.4 301.2 296.0 292.7 291.2 291.0 291.8 293.7 295.4 297.7 301.1 303.2 305.4 307.1 309.2 310.5 311.9 312.9 313.5 314.0 314.5 314.9 315.3 315.5 315.6 315.1 314.6 314.1 313.5 312.6 311.8 310.9 309.3 308.4 307.7 307.1 306.7 306.4 306.3 306.4 306.7 307.2 307.6 308.1 309.3 309.9 310.5 311.3 311.9 312.1 312.5 313.0 313.2 313.5 313.7 313.6 313.4 313.0 312.7 312.3 311.8 311.1 310.2 309.5 308.9 308.6 308.2 307.7 307.5 307.2 307.1 307.2 307.2 307.6 308.0 308.7 307.8 306.5 304.6 302.3 298.8 295.4 291.6 287.7 283.2 278.3 273.9 269.4 264.9 260.1 255.8 251.9 248.1 244.4 241.5 238.9 236.9 234.9 233.8 233.0 232.7 232.7 232.8 233.1 233.4 233.8 234.2 234.8 235.3 235.7 236.0 236.3 236.5 236.7 236.6 236.2 235.4 234.5 233.6 232.7 231.9 231.6 281.7 279.4 277.1 274.8 272.5 270.2 268.0 265.8 263.6 261.4 259.2 257.0 254.9 252.8 250.7 248.6 246.6 244.5 242.5 240.5 238.5 236.5 234.5 232.6 230.7 228.7 226.8 225.0 223.7 224.5 225.7 226.3 227.9 229.1 229.6 230.1 230.1 230.3 230.6 230.8 231.3 231.6 231.7 231.9 231.9 231.9 232.1 232.1 232.1 232.1 232.1 232.1 232.0 232.1 232.2 232.4 232.3 232.4 232.5 232.5 232.6 232.8 233.0 233.3 233.4 233.5 233.4 233.4 233.1 233.2 233.1 233.0 232.9 232.8 232.6 232.3 232.1 231.9 231.9 232.0 232.4 232.7 233.6 234.7 236.3 238.1 240.2 242.6 245.7 248.1 251.0 256.1 264.7 276.7 292.1 306.3 315.9 321.4 326.0 332.6 337.1 340.9 345.1 350.0 353.2 355.7 358.1 360.8 363.4 365.9 368.6 371.4 373.3 374.9 377.4 378.6 379.9 381.2 382.6 384.1 385.4 387.3 388.8 390.1 391.2 392.2 393.1 393.4 393.6 393.9 394.1 394.3 394.3 394.3 394.3 394.3 394.1 393.7 393.0 391.7 389.9 388.3 387.0 385.6 383.9 380.9 372.6 362.7 359.3 356.3 352.6 348.0 346.7 350.7 357.6 364.6 371.8 379.1 386.5 394.1 401.8 409.7 417.8 426.0 401.6 400.8 400.5 400.1 399.8 399.4 398.8 398.2 397.5 396.8 395.9 395.2 394.3 393.1 391.8 390.9 389.9 389.1 389.1 389.2 389.5 390.0 390.5 391.0 391.2 391.6 392.0 392.2 392.8 393.2 393.4 393.5 393.6 393.8 394.0 394.2 394.3 394.3 394.3 394.3 394.2 394.0 394.0 393.8 393.8 393.6 393.1 392.7 392.2 391.7 391.1 390.7 390.2 389.6 389.4 389.2 389.2 389.6 390.0 390.7 392.1 393.8 395.9 398.4 400.9 404.9 407.2 409.3 411.7 413.5 415.1 416.5 417.6 418.5 419.1 419.5 419.5 419.1 418.7 418.1 416.7 415.7 415.0 414.3 413.5 413.1 412.7 412.1 411.5 411.1 410.7 410.6 410.8 410.8 410.8 410.8 410.9 411.1 411.4 411.9 412.3 412.7 413.1 413.5 413.9 414.3 414.8 415.2 415.6 415.8 416.2 416.5 416.5 416.5 416.6 415.8 414.0 411.4 407.7 403.5 399.2 395.0 390.6 385.9 382.8 380.2 378.4 377.4 385.1 386.3 387.5 388.7 389.9 391.1 392.3 393.6 394.8 396.0 395.3 390.9 392.1 392.6 392.2 392.4 390.9 388.6 385.0 382.8 380.3 381.7 383.5 381.9 380.4 378.8 377.3 375.7 374.2 372.6 371.1 369.6 368.1 366.6 365.1 363.6 362.1 360.6 359.1 357.6 356.2 354.7 353.3 351.8 350.4 349.3 349.6 350.6 350.2 350.7 351.2 351.4 351.9 352.5 352.8 352.8 352.4 351.8 351.0 350.2 349.5 348.7 348.0 347.4 347.0 347.0 347.2 347.2 347.2 347.2 347.2 347.3 347.5 347.7 348.0 348.3 348.5 348.8 349.0 349.0 349.2 349.2 349.4 349.6 349.8 350.0 350.2 350.3 350.3 350.1 349.9 349.6 349.2 349.0 348.8 348.8 348.8 348.8 348.8 348.7 348.5 348.4 348.4 348.5 348.9 349.4 349.9 350.3 350.6 350.6 350.6 350.1 349.4 348.6 347.1 344.5 341.7 338.1 333.3 328.9 324.8 321.0 316.3 309.2 303.4 303.8 304.2 304.7 305.1 305.5 305.9 306.3 306.8 307.2 307.6 308.0 308.4 308.9 309.3 309.7 310.1 310.6 311.0 311.4 311.8 312.3 312.7 313.1 313.6 313.2 311.7 310.6 311.1 311.6 312.0 312.2 312.0 311.8 311.7 311.0 310.6 310.3 310.0 309.5 309.2 309.0 308.9 308.9 309.1 309.2 309.2 309.6 309.9 310.2 310.4 310.6 310.4 310.6 310.9 311.2 311.8 312.3 312.5 312.3 311.7 310.9 309.6 308.3 306.5 303.7 301.4 296.7 292.0 279.1 273.0 267.0 261.7 259.7 258.5 261.4 266.7 269.5 272.2 275.0 277.8 280.7 283.5 286.4 289.4 292.3 295.3 298.3 301.4 304.5 307.6 310.7 313.9 314.6 312.3 313.4 313.9 315.1 316.3 316.9 317.2 317.3 317.2 316.7 316.3 315.2 314.2 313.2 312.1 310.9 310.1 309.5 308.9 308.8 308.6 308.4 308.3 308.4 308.6 308.9 309.2 309.6 309.9 310.3 310.8 311.3 311.7 311.9 312.1 312.5 312.9 313.3 313.3 313.4 313.3 313.1 313.0 312.7 312.4 312.0 311.7 311.4 311.3 311.1 310.9 310.8 310.4 310.2 310.2 310.4 310.4 310.4 310.6 310.9 311.1 311.3 311.5 311.4 311.3 311.3 311.5 311.6 312.1 312.2 312.2 312.2 311.8 311.3 310.5 309.6 308.7 307.3 305.2 302.8 300.1 296.6 291.8 288.3 282.8 280.3 274.5 274.1 275.0 276.4 277.7 279.0 280.4 281.7 283.0 284.4 285.8 287.1 288.5 289.9 291.3 292.7 294.1 295.5 296.9 294.1 292.9 292.1 291.7 292.2 293.1 293.9 294.6 294.9 294.8 294.3 293.2 291.5 290.2 286.6 284.4 281.2 277.3 272.9 268.1 263.1 258.7 256.6 255.9 255.0 258.7 266.7 273.0 279.4 286.0 292.7 299.6 298.0 296.8 297.1 297.2 297.8 298.4 298.3 298.0 297.4 296.7 295.9 294.7 293.7 292.8 292.1 291.6 291.4 291.3 291.3 291.4 291.6 291.9 292.1 292.5 292.9 293.0 293.2 293.4 293.5 293.8 294.3 294.8 295.0 295.4 295.8 296.1 296.1 295.9 295.3 294.6 294.0 293.2 291.9 290.6 288.4 284.5 280.8 276.9 272.9 270.2 268.3 269.2 272.0 274.8 277.6 280.4 283.3 286.2 289.2 292.1 295.1 298.1 301.2 304.3 307.4 310.5 313.7 316.9 320.2 323.5 320.6 313.0 312.2 311.8 311.5 312.2 312.2 312.5 312.9 312.9 313.2 313.2 312.8 312.2 311.8 311.2 310.7 310.2 309.8 309.3 308.3 307.7 307.0 306.8 306.8 307.3 308.1 309.2 310.5 311.7 313.1 314.2 314.7 315.2 315.5 315.6 315.3 315.3 315.0 314.6 314.6 314.4 314.1 313.9 313.6 313.0 312.5 311.8 311.1 310.6 310.1 309.7 309.5 309.1 308.8 308.5 308.3 308.3 308.4 308.6 308.8 309.3 309.7 310.1 310.6 310.9 311.2 311.5 312.1 312.4 312.9 313.3 313.5 313.5 313.5 313.6 313.7 313.7 313.7 313.6 313.3 312.9 312.5 312.1 311.7 311.7 311.2 310.8 310.5 310.2 309.9 309.8 309.7 309.7 309.7 309.8 309.9 310.1 310.4 310.8 311.1 311.4 311.7 311.8 312.2 312.4 312.4 312.4 312.4 312.4 312.5 312.7 312.7 312.6 312.7 312.6 312.3 312.0 311.5 311.3 310.9 310.9 310.8 310.5 310.4 310.4 310.6 310.9 311.3 311.7 311.7 311.7 312.0 311.8 312.0 312.0 311.9 311.7 311.5 311.5 311.5 311.4 311.4 311.5 311.5 311.7 311.7 311.5 311.3 311.3 311.1 311.1 311.0 310.9 310.9 310.9 310.9 310.8 310.7 310.5 310.4 310.2 310.2 310.3 310.5 310.8 310.9 311.3 311.7 312.1 312.4 312.8 313.0 313.3 313.6 313.8 314.3 314.1 313.8 313.3 312.8 312.2 311.6 310.8 310.0 309.2 308.5 307.8 307.1 306.5 305.9 305.4 305.1 305.0 304.9 304.9 305.2 305.7 306.5 307.4 308.4 309.5 311.0 312.3 313.4 314.4 315.5 316.5 317.1 317.6 317.8 317.7 317.4 316.8 316.0 315.1 313.8 312.6 311.3 310.1 308.8 307.7 306.7 305.8 305.3 305.0 304.9 304.9 305.2 305.8 306.4 307.2 308.1 309.0 310.3 311.3 312.5 313.5 314.6 315.6 316.3 316.8 317.3 317.7 317.8 317.7 317.2 316.6 315.8 314.9 313.8 312.7 311.5 310.3 309.0 308.1 307.3 306.6 306.1 305.8 305.8 305.8 306.1 306.6 307.3 308.1 309.0 309.9 310.8 311.7 312.7 313.5 314.3 314.8 315.3 315.5 315.7 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.5", - "input_type": "phoneme", - "offset": 181.103 + "f0_timestep": "0.005" }, { + "offset": 197.331, "text": "SP AP 年 轻 的 牵 绊 SP", "ph_seq": "SP AP n ian q ing d e q ian b an SP", - "note_seq": "rest rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.2 0.4 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.345 0.345 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.155002 0.4 0.044998 0.101489 0.070511 0.120571 0.052429 0.090854 0.081146 0.118648 0.054352 0.345 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.155 0.4 0.045 0.1015 0.0705 0.1206 0.0524 0.0909 0.0811 0.1186 0.0544 0.345 0.4", + "ph_num": "1 2 2 2 2 2 1 1", + "note_seq": "rest rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.2 0.4 0.172 0.173 0.172 0.173 0.345 0.4", + "note_slur": "0 0 0 0 0 0 0 0", "f0_seq": "358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 358.2 309.3 309.3 309.5 309.8 310.0 310.3 310.6 310.8 311.1 311.1 311.2 311.1 311.1 311.0 310.8 310.6 310.5 310.3 310.2 310.0 309.9 309.9 309.8 309.7 309.5 309.2 308.8 308.4 307.9 307.4 306.9 306.5 306.0 305.5 305.2 305.0 304.9 305.0 305.8 307.4 309.9 313.0 317.2 321.8 326.4 331.1 335.8 340.9 344.5 347.5 349.7 351.2 351.7 351.7 351.7 351.7 351.7 351.8 351.9 351.9 351.9 352.1 352.1 352.1 352.1 352.2 352.1 351.7 351.0 349.8 348.6 347.3 345.9 344.5 343.1 342.0 341.2 340.7 339.9 339.9 347.1 364.1 387.7 387.7 388.1 388.7 389.4 390.3 391.2 391.9 392.5 392.9 393.1 393.1 392.6 391.8 390.6 388.6 386.7 384.7 382.7 380.5 378.4 376.6 374.9 373.6 372.9 372.6 393.1 395.3 397.5 399.7 401.9 404.2 406.4 408.7 411.0 413.3 415.6 415.5 415.5 415.5 415.5 415.5 415.8 415.8 415.9 416.0 416.0 416.0 416.1 416.3 415.9 414.8 413.2 411.2 409.0 406.3 403.2 400.7 398.0 395.8 393.6 391.9 390.9 390.4 390.4 390.6 391.2 392.0 392.9 394.0 395.2 396.2 397.2 398.3 399.2 399.8 400.2 400.4 400.3 399.9 399.4 398.8 398.2 397.0 396.1 395.1 394.0 392.8 391.7 390.7 389.8 388.8 388.2 387.6 387.3 387.3 387.3 387.5 388.0 388.7 389.5 390.4 391.4 392.6 393.5 394.4 395.2 395.7 396.1 396.3 396.4 396.0 395.3 394.5 393.4 392.2 391.3 390.3 389.6 389.2 389.1 389.1 389.1 389.1 389.2 389.3 389.4 389.6 389.7 389.9 390.1 390.3 390.5 390.7 390.9 390.9 391.1 391.3 391.3 391.3 391.3 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4 363.4", - "input_type": "phoneme", - "offset": 197.331 + "f0_timestep": "0.005" }, { + "offset": 199.035, "text": "AP 老 去 的 无 奈 SP", "ph_seq": "AP l ao q v d e w u n ai SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.230002 0.044998 0.094829 0.078171 0.127002 0.044998 0.128002 0.044998 0.112002 0.059998 0.345 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.23 0.045 0.0948 0.0782 0.127 0.045 0.128 0.045 0.112 0.06 0.345 0.068", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.068", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 335.0 314.3 314.2 313.7 313.1 312.3 311.6 310.8 310.1 309.4 309.0 308.8 308.8 308.9 309.0 309.3 309.9 310.3 310.8 311.3 311.9 312.4 312.8 313.1 313.4 313.6 313.8 313.4 312.6 311.3 309.6 307.6 305.2 303.0 301.1 299.5 298.3 297.8 297.8 298.7 300.7 304.4 308.5 313.3 318.5 324.4 330.6 335.9 340.7 344.6 347.8 349.4 350.0 350.0 350.0 350.2 350.2 350.4 350.5 350.7 350.8 350.8 350.8 351.0 350.6 349.8 348.7 347.2 345.3 343.2 341.2 339.1 337.1 335.2 333.7 332.5 331.7 331.3 374.3 378.3 382.4 386.5 390.7 394.9 395.8 392.2 391.2 391.1 390.9 391.5 391.8 392.0 392.0 391.5 391.2 390.9 390.4 389.9 389.3 388.6 388.1 387.9 388.0 388.7 390.4 392.5 395.0 397.4 400.7 403.0 405.0 407.4 409.4 411.1 412.3 413.3 414.3 415.5 416.5 417.5 418.0 418.4 418.8 419.2 419.4 419.4 419.4 419.0 418.4 418.0 417.5 416.7 415.9 415.0 413.6 412.5 411.5 410.1 408.9 407.5 406.5 404.9 403.3 401.5 399.2 395.6 399.1 399.1 399.2 399.3 399.3 399.5 399.7 399.9 400.1 400.2 400.2 400.3 400.3 400.1 399.6 398.8 397.9 396.8 395.6 394.4 392.9 391.4 390.0 388.8 387.5 386.4 385.6 384.9 384.4 384.2 384.2 384.3 384.9 385.5 386.4 387.5 388.9 390.3 391.6 392.9 394.0 395.4 396.4 397.0 397.5 397.8 397.8 397.4 396.8 396.1 395.2 394.0 392.7 391.7 390.8 389.9 389.2 388.6 388.4 388.4 388.7 389.3 390.0 391.0 391.8 392.7 393.4 393.7 394.0 393.5 392.4 391.0 389.2 387.6 386.8 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6 378.6", - "input_type": "phoneme", - "offset": 199.035 + "f0_timestep": "0.005" }, { + "offset": 200.414, "text": "AP 生 命 重 复 着 因 果 循 环 SP 还 有 谁 的 身 影 能 永 远 如 此 的 伟 岸 啊 SP", "ph_seq": "AP sh eng m ing ch ong f u zh e y in g uo x vn h uan SP h ai y ou sh ei d e sh en y ing n eng y ong y van r u c i0 d e w ei an an SP", - "note_seq": "rest D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D4 D4 A#3 A#3 rest D#4 D#4 D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 A#4 A#4 G4 G4 F4 F4 D#4 D#4 F4 F4 F4 G4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.345 0.345 0.3439999 0.3439999 0.1730001 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.1730001 0.1730001 0.1719999 0.1719999 0.3449998 0.3449998 0.3450003 0.3450003 0.3439999 0.3439999 0.3450003 0.3450003 0.3449998 0.3449998 0.3450003 0.3450003 0.3449998 0.3449998 0.1719999 0.1719999 1.552 0.3449998 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.125991 0.150009 0.112002 0.059998 0.094104 0.077896 0.113002 0.059998 0.103641 0.068359 0.300002 0.044998 0.116405 0.056595 0.352007 0.164993 0.22499 0.12001 0.344 0.079998 0.093002 0.126987 0.045013 0.092586 0.080414 0.127002 0.044998 0.082454 0.090546 0.127002 0.044998 0.240004 0.104996 0.300002 0.044998 0.299002 0.044998 0.300002 0.044998 0.211836 0.133163 0.300002 0.044998 0.287642 0.057358 0.127002 1.552 0.389998 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.126 0.15 0.112 0.06 0.0941 0.0779 0.113 0.06 0.1036 0.0684 0.3 0.045 0.1164 0.0566 0.352 0.165 0.225 0.12 0.344 0.08 0.093 0.127 0.045 0.0926 0.0804 0.127 0.045 0.0825 0.0905 0.127 0.045 0.24 0.105 0.3 0.045 0.299 0.045 0.3 0.045 0.2118 0.1332 0.3 0.045 0.2876 0.0574 0.127 1.552 0.39 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1 1 1", + "note_seq": "rest D#4 F4 G4 G#4 G4 F4 D#4 D4 A#3 rest D#4 D#4 F4 G4 G#4 G4 F4 D#4 A#4 G4 F4 D#4 F4 F4 G4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.173 0.517 0.345 0.344 0.173 0.172 0.173 0.172 0.173 0.172 0.345 0.345 0.344 0.345 0.345 0.345 0.345 0.172 1.552 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 361.3 314.7 314.6 314.5 314.4 314.4 314.2 314.1 314.0 314.0 313.8 313.8 313.8 313.7 313.7 313.4 313.2 313.0 312.7 312.4 312.0 311.7 311.3 311.0 310.8 310.5 310.4 310.2 310.2 310.3 310.8 312.1 313.7 315.7 317.9 321.1 324.1 327.2 330.3 333.6 337.1 339.9 342.3 344.4 346.2 347.3 348.0 348.2 348.3 348.5 348.8 349.4 349.6 349.8 350.0 349.5 348.0 346.0 343.7 341.3 339.0 336.9 335.8 335.8 336.7 340.2 345.0 351.7 359.4 366.8 374.1 381.1 387.5 390.7 392.2 392.0 392.0 392.0 391.9 391.8 391.7 390.4 390.4 390.4 390.4 390.4 390.4 390.2 390.2 390.2 390.1 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.4 392.3 395.0 398.3 402.2 405.4 408.4 410.5 407.7 411.1 412.9 413.6 413.6 413.8 414.0 414.3 414.6 415.0 415.3 415.5 415.8 416.0 416.2 416.0 415.2 413.9 412.2 410.1 407.2 404.3 401.5 398.7 395.6 392.7 390.5 388.4 386.9 385.7 385.3 386.3 387.2 388.2 389.1 390.1 391.0 392.0 392.9 393.9 396.5 397.8 397.9 398.3 398.8 399.4 399.8 400.0 400.2 399.7 399.1 398.1 396.6 395.4 394.3 393.4 392.6 391.8 391.3 391.0 391.0 391.2 391.6 392.0 392.5 392.9 393.4 393.8 394.3 394.5 394.6 394.6 394.6 394.7 394.7 394.8 394.9 394.7 394.7 394.7 394.8 395.0 394.9 394.7 394.3 394.0 393.1 392.1 391.0 389.5 387.4 385.1 383.0 380.5 376.8 373.6 370.4 366.6 363.6 360.5 358.1 356.4 355.2 354.1 353.1 352.2 351.4 350.3 349.2 348.3 347.5 347.0 346.8 346.8 347.0 347.4 348.0 348.4 348.6 348.0 346.6 344.9 342.3 339.5 336.9 334.3 331.9 328.8 325.8 322.7 319.4 316.9 315.1 312.4 312.8 315.2 317.6 320.1 322.6 325.1 327.6 330.2 332.7 332.5 321.2 319.3 318.0 316.5 316.2 316.1 315.9 315.5 315.2 314.7 314.3 313.8 313.2 312.0 310.9 310.1 309.6 309.2 309.2 309.0 308.8 308.8 308.8 309.1 309.4 309.6 310.0 310.3 310.6 310.8 311.0 311.1 311.2 311.5 311.5 311.7 311.7 311.5 311.2 311.0 310.7 310.4 310.1 309.9 309.6 309.5 309.5 309.5 309.7 310.3 310.8 311.3 312.1 312.9 313.5 314.0 314.3 314.1 313.7 312.9 311.7 310.0 308.4 306.0 302.6 300.2 298.3 299.0 299.3 299.6 299.8 300.1 300.4 300.6 300.9 301.1 301.4 301.6 301.9 302.1 302.4 302.6 302.9 303.1 303.4 303.7 303.9 304.2 304.4 304.7 304.9 305.2 305.5 305.7 306.0 306.2 306.5 306.7 307.0 307.3 307.5 307.8 308.0 303.5 300.7 300.5 300.4 300.2 300.3 299.8 299.4 298.8 297.7 296.4 295.9 295.0 294.3 293.9 293.5 293.3 293.3 293.4 293.4 293.3 293.3 293.4 293.5 293.6 293.7 294.0 294.1 294.2 294.3 294.6 295.1 295.5 295.7 295.7 295.5 295.2 294.9 294.5 293.7 292.9 292.3 290.8 289.3 287.0 284.8 281.2 278.5 275.9 273.3 270.2 267.1 263.0 259.1 258.8 258.5 258.2 257.9 257.7 257.4 257.1 256.8 256.5 256.2 255.9 255.7 255.4 255.1 250.8 245.2 243.4 242.2 240.6 239.3 237.4 236.4 235.8 235.2 234.9 234.9 234.5 234.3 234.0 233.4 232.9 232.6 232.2 232.1 231.8 231.6 231.7 231.3 230.8 230.7 230.5 230.5 230.7 231.0 231.4 231.8 232.2 232.7 233.1 233.4 233.8 234.3 234.9 235.1 235.1 235.0 234.7 234.5 234.1 233.6 232.9 232.5 232.1 231.9 231.9 232.0 232.1 232.4 232.8 232.9 233.4 233.6 233.6 233.8 233.7 233.2 232.3 230.9 229.3 228.1 226.7 225.0 223.6 222.1 219.1 217.2 217.1 219.5 222.0 224.4 226.9 229.4 232.0 234.6 237.2 239.8 242.5 245.2 247.9 250.7 253.5 256.3 259.1 262.0 264.9 267.9 270.9 273.9 276.9 280.0 283.1 286.3 289.4 292.7 295.9 299.2 302.5 305.9 309.3 312.7 309.3 307.1 305.1 304.3 303.8 303.6 303.8 304.4 305.0 305.8 306.9 308.3 309.3 310.6 311.3 311.5 311.5 311.3 310.9 310.6 310.1 309.9 309.9 310.1 310.4 310.9 311.2 311.5 311.3 311.1 311.3 311.1 311.0 310.7 310.2 309.9 309.6 309.3 308.9 308.8 308.8 308.8 309.2 309.8 310.5 311.1 311.7 312.3 312.6 312.7 312.2 311.0 309.2 307.0 304.7 302.4 300.2 298.0 296.6 295.8 338.0 341.6 345.2 348.8 352.4 356.1 359.8 356.0 352.0 351.3 350.9 350.8 350.6 350.6 350.4 350.4 350.6 350.4 350.6 350.5 349.8 349.3 348.8 348.2 347.9 347.5 346.6 345.6 344.8 343.3 340.5 335.1 339.5 336.6 337.9 345.4 345.3 347.6 357.6 372.7 388.4 394.6 392.4 394.3 391.4 391.3 391.3 391.3 391.3 391.1 391.1 390.9 390.9 390.9 390.9 390.9 390.6 389.9 388.8 387.5 385.8 384.2 382.2 380.3 378.9 377.8 376.8 376.2 399.8 401.7 403.5 405.4 407.3 409.2 414.2 421.1 418.0 417.8 417.8 417.3 417.7 418.0 417.8 417.7 417.3 416.9 416.4 415.8 415.3 414.7 414.2 414.0 413.8 413.5 413.0 412.6 412.2 411.0 409.8 408.2 406.1 403.5 401.9 401.0 400.3 399.9 399.5 399.2 399.1 398.9 398.5 397.9 397.1 396.6 395.9 395.5 395.2 394.6 394.2 393.9 393.5 393.1 392.8 392.4 392.0 391.5 391.0 390.7 390.2 390.0 389.8 389.5 389.5 389.5 392.0 391.3 390.3 388.7 387.0 384.4 381.8 378.9 375.9 372.6 369.1 366.0 363.0 360.1 357.0 354.4 352.4 350.6 349.1 347.9 347.2 347.2 347.0 346.7 346.1 345.3 344.5 343.6 342.7 341.8 340.7 340.0 339.4 338.8 338.4 338.3 338.4 338.9 339.7 341.0 342.8 344.6 346.4 348.3 350.2 352.0 353.3 354.2 354.9 355.1 354.9 354.7 354.5 354.2 353.9 353.4 352.9 352.4 352.1 351.6 351.1 350.8 350.5 350.4 350.4 346.9 346.7 346.7 346.9 347.1 347.5 348.0 348.3 348.8 349.2 349.6 349.8 350.0 350.4 350.9 351.3 351.6 351.8 351.8 351.5 351.1 350.6 349.8 348.8 347.6 346.5 345.0 343.2 340.7 338.5 336.3 333.8 331.9 329.7 327.4 324.5 321.6 319.3 316.9 314.4 312.7 310.9 309.9 309.0 309.0 309.1 309.3 309.6 309.8 310.1 310.6 311.2 311.7 312.2 312.7 313.1 312.9 312.9 313.1 313.1 313.0 312.7 312.6 312.2 311.7 311.1 310.5 309.8 309.5 309.4 309.5 309.9 310.2 311.0 311.1 311.3 311.7 311.8 311.8 312.0 312.1 312.0 312.0 312.1 312.0 312.1 312.2 312.2 312.2 312.4 312.4 312.0 311.7 311.2 310.8 310.6 310.5 310.3 310.1 309.9 310.0 310.4 311.2 312.0 315.2 316.7 319.2 322.8 327.5 334.4 341.3 348.6 356.6 365.6 375.2 383.8 392.0 399.8 407.7 413.8 418.4 421.8 423.7 424.2 423.9 423.4 422.6 422.0 421.0 420.2 419.5 419.1 418.9 419.1 419.6 420.9 423.3 426.0 429.1 432.7 436.9 441.4 445.7 449.8 453.7 457.8 461.0 463.5 465.4 466.6 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.8 467.0 465.8 464.6 463.2 461.4 459.3 457.2 454.7 452.1 447.1 443.1 439.2 434.9 432.0 428.5 426.6 427.0 427.6 429.6 427.9 418.7 409.6 402.2 400.2 400.2 400.0 399.8 399.5 398.8 398.3 397.7 397.0 396.5 396.2 396.0 395.5 395.0 394.5 393.7 393.0 392.3 391.5 390.5 389.7 389.4 389.3 389.5 390.0 390.5 391.3 392.3 392.3 392.1 391.8 391.3 390.8 390.5 390.0 389.6 389.3 389.3 389.3 389.6 390.2 390.9 391.6 392.2 392.7 393.0 392.9 392.0 390.5 388.6 387.3 374.3 373.5 372.8 372.0 371.3 370.5 369.8 369.1 368.3 367.6 366.8 366.1 365.4 364.6 363.9 363.2 362.4 361.7 361.0 360.3 359.5 358.8 358.1 357.2 353.9 352.0 352.0 352.4 352.7 352.6 352.3 351.8 351.3 350.6 349.8 348.7 347.9 347.6 347.0 346.7 346.4 346.2 346.2 346.3 346.7 347.1 347.2 347.2 347.2 347.0 346.8 346.8 346.9 347.2 347.5 347.7 347.9 349.2 349.0 348.9 348.7 348.5 348.3 347.8 347.6 347.4 347.1 346.9 346.8 346.8 346.9 347.1 347.4 347.6 347.8 347.9 346.6 344.4 341.1 336.7 332.9 328.2 324.5 321.5 319.3 318.7 322.3 321.0 319.8 318.7 317.5 316.4 315.7 315.7 315.3 316.0 316.0 315.9 315.3 314.8 314.0 313.1 312.3 311.7 310.9 310.2 309.7 309.2 309.0 309.2 309.2 309.3 309.5 309.7 309.7 309.8 310.0 309.9 309.9 309.9 309.9 310.0 310.2 310.6 310.8 310.9 310.9 311.0 311.3 311.5 311.8 311.8 311.8 311.7 311.4 310.8 310.3 309.8 309.1 308.9 308.8 308.9 309.4 309.6 310.1 310.8 311.5 312.8 314.4 316.3 318.6 320.9 323.4 326.2 329.6 332.9 336.0 339.9 343.2 346.0 347.7 349.8 351.0 351.9 352.4 352.6 352.3 351.7 351.0 350.5 350.0 349.5 349.0 348.6 348.4 347.8 347.6 347.2 347.2 347.2 347.2 346.9 346.5 345.7 344.7 343.7 342.7 342.0 341.8 342.5 343.7 345.1 346.2 347.2 348.8 350.3 351.6 352.7 353.4 353.9 354.2 354.4 354.4 354.2 354.1 353.8 353.6 353.1 352.6 351.7 350.8 350.0 349.0 348.2 347.6 347.1 346.5 346.0 345.7 345.3 344.8 344.6 344.4 344.6 345.2 345.6 346.4 347.2 347.7 348.6 349.5 350.6 351.7 352.3 352.8 353.2 353.4 353.5 353.5 353.2 352.9 352.5 352.1 351.4 350.7 350.0 349.1 348.3 347.6 346.9 346.4 346.1 345.8 345.5 345.4 345.6 345.8 346.0 346.5 347.1 347.9 348.5 349.0 349.5 349.9 350.2 350.6 351.1 351.3 351.5 351.6 351.5 351.0 350.8 350.5 349.8 349.5 349.0 348.6 348.3 348.1 347.9 347.5 347.1 346.7 346.6 346.6 346.7 347.0 347.0 347.6 348.0 348.4 349.2 350.0 350.6 351.1 351.7 352.1 352.3 352.4 352.3 352.2 351.9 351.7 351.3 350.6 350.3 349.8 349.0 348.4 347.8 347.0 346.2 345.6 345.1 344.8 344.7 344.8 344.8 345.1 345.2 345.5 346.2 346.7 347.4 348.1 348.8 349.3 350.0 350.8 351.8 352.5 352.9 353.3 353.5 353.5 353.3 352.9 352.4 351.8 351.1 350.4 349.7 349.0 348.4 347.9 347.5 347.1 346.6 346.1 345.8 345.4 345.2 344.8 344.8 345.0 345.5 345.7 346.2 346.8 347.3 347.9 348.5 349.2 350.2 350.7 351.3 351.9 352.3 352.6 352.9 353.1 353.2 353.2 353.0 352.5 352.0 351.4 350.7 350.0 349.6 349.2 348.5 348.0 347.4 346.7 346.3 345.9 345.5 345.4 345.4 345.5 345.8 346.3 346.7 347.1 347.4 347.9 348.2 348.8 349.4 350.0 350.4 350.7 351.0 351.2 351.3 351.3 351.1 350.9 350.6 350.4 350.0 349.8 349.4 348.9 348.4 348.3 348.1 347.9 347.8 347.7 347.6 347.7 347.8 347.9 348.1 348.3 348.5 348.6 348.9 349.0 349.2 349.4 349.6 349.8 349.8 349.7 349.5 349.4 349.4 349.4 349.4 349.4 349.2 349.2 349.2 349.2 349.4 349.6 349.6 349.8 349.8 349.8 350.0 350.3 350.5 350.6 350.7 350.9 351.0 351.1 351.5 351.5 351.7 351.8 351.9 351.9 351.9 351.7 351.5 351.3 351.3 351.3 351.3 351.5 351.5 351.7 351.9 352.1 352.3 352.6 352.7 353.0 353.1 353.1 352.9 352.7 352.3 351.8 351.3 350.7 350.1 349.6 349.2 349.2 349.6 350.6 351.8 353.4 356.4 359.9 363.6 367.8 372.6 377.9 383.3 389.0 395.0 398.8 402.3 404.6 407.5 408.8 409.5 409.2 408.5 407.2 405.1 402.6 399.7 396.9 394.3 391.7 389.8 387.7 385.0 383.7 383.3 383.1 383.7 384.4 385.5 387.3 389.2 391.0 392.9 395.3 398.0 400.0 402.5 404.2 405.5 406.3 406.3 405.6 404.8 403.9 402.3 400.5 398.5 396.6 395.1 394.4 393.8 393.2 392.5 392.1 391.5 391.0 390.2 389.6 388.6 388.4 388.7 389.7 391.1 393.0 395.3 397.8 401.0 406.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4", - "input_type": "phoneme", - "offset": 200.414 + "f0_timestep": "0.005" }, { + "offset": 208.69, "text": "AP 你 的 生 命 给 了 我 一 半 SP", "ph_seq": "AP n i d e sh eng m ing g ei l e w o y i b an SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.1719999 0.1719999 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127002 0.044998 0.10625 0.06575 0.128002 0.044998 0.127002 0.044998 0.284987 0.060013 0.127002 0.044998 0.473002 0.044998 0.112002 0.059998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.127 0.045 0.1062 0.0658 0.128 0.045 0.127 0.045 0.285 0.06 0.127 0.045 0.473 0.045 0.112 0.06 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.172 0.518 0.172 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 267.0 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.3 262.4 262.4 262.4 262.4 262.4 262.4 262.4 262.5 262.5 262.5 262.5 262.6 262.8 262.8 262.8 262.9 263.0 262.1 260.1 257.1 254.0 251.5 249.8 249.8 250.8 253.2 256.8 262.3 268.1 274.3 281.0 288.0 295.3 301.2 306.0 310.2 312.0 312.7 312.5 312.4 312.1 311.8 311.5 311.1 310.9 310.8 310.8 310.8 310.9 311.0 311.2 311.5 311.7 311.7 311.2 309.3 305.6 301.8 299.1 323.3 326.4 329.5 332.7 335.8 339.0 342.3 345.5 348.8 352.2 355.5 358.9 355.3 353.0 352.2 351.5 351.2 351.0 351.0 351.0 351.0 350.8 350.4 350.2 350.0 349.5 348.9 348.0 347.0 345.8 344.4 342.6 340.9 339.3 337.6 335.5 334.4 333.1 331.5 330.6 328.7 326.5 324.2 322.4 321.4 321.0 320.0 318.3 316.4 314.7 313.4 312.8 312.2 311.6 311.2 310.8 310.5 310.2 310.2 310.2 310.5 310.7 310.9 311.1 311.3 312.0 312.6 313.2 313.7 313.5 312.7 311.2 310.2 309.5 321.7 337.9 354.8 372.7 391.4 411.1 431.7 445.3 447.6 451.1 453.1 455.5 457.6 460.0 462.7 464.7 466.7 467.2 467.2 466.8 466.3 465.8 465.1 464.4 463.7 463.2 463.0 463.3 463.6 464.3 464.7 465.4 465.6 465.8 466.3 466.8 467.3 467.8 468.3 468.8 469.1 469.2 469.3 469.0 468.3 467.6 466.2 465.0 463.1 460.5 457.3 453.9 450.5 446.1 440.9 435.1 426.7 414.0 403.3 393.4 383.7 373.6 363.2 352.9 340.7 331.8 325.2 319.0 312.1 307.3 304.8 303.5 302.9 303.2 305.7 306.8 306.9 307.4 307.2 307.4 308.3 309.2 310.3 311.3 312.2 312.9 313.5 313.5 313.1 312.5 311.4 310.2 309.3 308.4 306.8 306.0 304.8 303.4 302.0 300.7 299.4 297.6 295.9 294.6 293.2 291.9 291.0 290.5 290.5 291.0 291.5 292.0 292.8 293.2 293.8 294.3 294.9 295.4 295.9 296.4 296.7 297.0 297.1 297.4 297.3 297.4 297.1 296.7 296.3 296.0 295.7 295.2 294.7 294.2 293.7 293.4 293.2 292.9 292.6 292.2 291.8 291.6 291.6 291.6 292.0 292.4 292.6 293.0 293.3 293.6 293.9 294.2 294.3 294.6 294.7 294.6 294.9 294.7 294.9 294.9 294.5 294.4 294.2 294.1 294.0 294.0 294.1 294.2 294.1 294.0 294.0 293.8 294.0 294.0 294.0 294.0 293.9 294.1 294.2 294.2 294.2 294.1 294.0 294.0 294.1 294.2 294.2 293.8 293.8 293.5 293.0 292.6 291.9 291.6 291.1 290.9 290.6 289.9 288.8 287.8 286.7 286.0 285.1 283.8 281.2 280.0 286.3 288.8 292.8 298.8 300.9 302.3 304.0 305.8 307.6 309.4 310.9 314.7 314.7 314.7 314.7 314.8 315.0 315.1 315.1 315.1 315.3 314.9 313.9 312.4 310.4 308.0 305.1 302.4 300.0 297.8 295.7 294.3 293.6 311.8 315.2 318.6 322.0 325.5 329.1 332.6 336.2 339.9 343.3 345.6 348.1 349.4 351.5 353.0 353.9 354.4 354.9 355.1 354.4 353.2 351.6 349.5 347.6 345.9 344.0 342.1 340.8 339.5 338.2 337.5 337.0 337.1 337.4 338.2 339.1 340.7 342.1 343.4 345.0 346.5 347.6 348.7 349.8 350.8 351.5 351.9 352.1 351.9 351.7 351.3 350.3 349.2 348.0 346.5 345.1 343.9 342.8 341.5 340.0 338.5 337.7 336.8 336.4 336.4 336.2 336.6 337.5 338.6 340.0 341.6 343.0 344.4 345.8 347.5 348.7 349.8 350.8 351.7 351.5 350.6 349.6 348.6 347.8 347.1 346.7 346.4 346.2 346.2 345.6 344.3 340.8 332.0 321.0 309.3 308.7 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6", - "input_type": "phoneme", - "offset": 208.69 + "f0_timestep": "0.005" }, { + "offset": 211.448, "text": "AP 你 的 爱 也 给 了 我 一 半 SP", "ph_seq": "AP n i d e ai y E g ei l e w o y i b an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.173 0.173 0.172 0.172 0.172 0.173 0.173 0.345 0.345 0.1720001 0.1720001 0.517 0.517 0.1730001 0.1730001 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.231002 0.044998 0.127987 0.045013 0.127002 0.171985 0.045013 0.128002 0.044998 0.300002 0.044998 0.127002 0.044998 0.472002 0.044998 0.113003 0.059998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.231 0.045 0.128 0.045 0.127 0.172 0.045 0.128 0.045 0.3 0.045 0.127 0.045 0.472 0.045 0.113 0.06 0.517 0.069", + "ph_num": "2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.173 0.172 0.172 0.173 0.345 0.172 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 239.6 238.2 238.2 238.6 239.3 240.1 240.5 240.3 239.7 238.3 235.4 232.9 230.9 229.1 226.2 225.1 225.3 226.1 227.3 228.8 230.2 231.5 232.3 232.9 233.3 233.1 232.7 231.9 230.7 228.8 226.7 224.9 221.5 218.5 216.3 216.4 216.9 223.0 234.1 245.8 258.0 270.9 284.4 298.6 308.9 301.6 302.9 305.1 305.5 307.1 308.5 309.4 310.0 310.4 310.8 311.0 311.2 311.3 311.3 311.5 311.7 311.7 311.8 312.1 312.2 312.3 312.3 312.0 311.7 311.2 310.8 310.4 310.0 309.6 309.5 310.3 311.8 316.5 320.8 323.7 327.0 329.7 333.3 336.7 340.3 343.0 345.1 347.1 348.7 349.9 350.5 350.8 350.5 349.8 349.0 347.4 346.1 344.2 342.2 340.3 338.3 336.7 334.7 332.4 330.2 327.5 323.4 318.0 315.8 314.0 311.1 309.7 309.1 308.9 308.9 309.2 309.6 310.2 310.9 311.3 311.8 312.4 313.0 313.0 312.4 310.2 308.4 307.3 306.1 304.3 305.6 309.0 305.5 303.1 305.3 308.2 311.1 314.1 317.1 320.0 323.1 326.1 329.2 332.3 335.5 338.6 341.8 345.1 348.3 349.4 349.6 350.5 351.3 352.1 353.3 353.8 354.1 354.1 353.7 353.5 353.2 352.5 351.4 350.2 349.4 348.5 347.7 347.3 346.9 346.7 346.9 347.1 347.4 347.9 348.2 348.7 349.1 349.5 349.8 350.0 350.3 350.6 350.9 351.3 351.5 351.7 351.7 351.7 351.6 351.3 350.9 350.4 349.8 349.0 348.0 346.9 345.5 344.1 342.6 340.6 337.9 335.6 333.0 330.7 328.5 324.3 319.5 316.4 313.0 310.1 306.3 303.8 302.3 301.6 301.7 302.5 306.0 312.3 315.3 314.7 314.6 313.1 312.2 312.4 312.4 312.5 312.8 312.9 312.9 312.7 312.3 311.8 311.0 309.9 309.1 308.1 306.2 304.7 303.3 301.3 299.5 297.9 296.9 296.3 296.2 296.6 297.1 297.6 298.0 297.7 297.5 297.1 296.7 296.7 296.7 296.7 296.7 296.7 296.6 296.3 296.3 296.3 296.2 296.3 296.4 296.3 296.1 295.7 294.9 294.0 293.3 292.9 292.5 292.2 292.0 291.9 291.8 291.8 292.0 292.2 292.3 292.3 292.1 292.1 292.3 292.5 293.0 293.3 293.3 293.5 293.6 293.8 294.1 294.3 294.6 294.9 295.0 294.9 294.8 294.7 294.6 294.3 293.9 293.7 293.4 293.2 293.2 293.3 293.4 293.5 293.5 293.5 293.4 293.3 293.3 293.3 293.3 293.5 293.7 293.7 293.7 293.7 293.8 293.9 294.0 294.2 294.3 294.3 294.3 294.2 294.0 293.8 293.3 293.0 292.6 292.6 292.5 292.6 292.8 292.6 292.8 293.0 293.0 293.0 292.9 292.9 293.3 294.0 295.1 296.0 297.2 299.3 300.8 302.4 304.3 305.9 307.4 308.6 309.6 310.4 310.9 311.4 309.9 309.7 309.4 309.0 308.4 307.7 306.9 306.1 305.3 304.4 303.4 302.6 301.9 301.4 300.9 300.5 300.5 300.5 300.7 301.1 301.5 302.3 303.1 304.0 305.0 306.0 307.2 308.6 309.7 310.8 311.8 313.0 314.0 314.8 315.5 315.9 316.4 316.6 316.7 316.5 315.9 315.5 314.7 313.8 312.7 311.6 310.5 309.3 308.3 307.2 306.3 305.5 305.0 304.6 304.4 304.4 304.5 305.0 305.8 306.5 307.7 308.9 310.0 311.1 312.3 313.4 314.2 315.0 315.5 315.8 315.7 315.5 315.2 314.6 313.7 312.7 311.8 310.9 309.7 308.6 307.7 306.8 306.0 305.2 304.7 304.5 304.4 304.4 304.6 305.1 305.7 306.3 307.0 308.1 309.1 310.1 310.9 311.9 312.8 313.4 314.0 314.3 314.5 314.7 314.5 314.3 314.2 313.7 313.4 312.8 312.3 311.8 311.2 310.5 309.9 309.3 308.7 308.1 307.6 307.3 307.0 306.7 306.3 306.3 306.3 306.5 306.9 307.6 308.4 309.3 310.2 311.0 311.6 312.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0", - "input_type": "phoneme", - "offset": 211.448 + "f0_timestep": "0.005" }, { + "offset": 214.207, "text": "AP 夏 天 不 懂 冬 日 的 严 寒 SP", "ph_seq": "AP x ia t ian b u d ong d ong r ir d e y En h an SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.1719999 0.1719999 0.5180001 0.5180001 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.15599 0.12001 0.112002 0.059998 0.128002 0.044998 0.11748 0.05452 0.127002 0.044998 0.300002 0.044998 0.128002 0.044998 0.472002 0.044998 0.100848 0.071152 0.518 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.156 0.12 0.112 0.06 0.128 0.045 0.1175 0.0545 0.127 0.045 0.3 0.045 0.128 0.045 0.472 0.045 0.1008 0.0712 0.518 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.172 0.345 0.173 0.517 0.172 0.518 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 278.6 262.5 262.4 262.4 262.2 262.0 261.8 261.6 261.3 261.1 261.0 260.9 260.9 260.9 260.9 261.1 261.5 261.9 262.4 262.6 262.9 262.9 262.6 261.9 261.0 259.8 258.4 257.1 255.7 254.5 253.3 252.4 251.9 276.5 280.3 284.1 288.0 291.9 295.9 300.0 304.1 307.4 307.6 308.1 308.3 309.3 309.7 310.3 311.4 312.1 312.5 312.4 312.0 311.0 309.3 307.7 306.6 306.1 305.4 303.7 302.1 300.2 298.6 297.5 301.8 307.6 313.6 319.6 325.8 332.0 350.0 349.8 349.7 349.5 349.3 349.0 348.8 348.6 348.4 348.4 348.4 348.8 349.4 349.9 350.3 350.5 350.5 350.2 349.8 349.3 348.6 347.8 347.3 346.9 346.8 346.5 345.5 344.1 342.4 341.1 328.5 330.0 332.8 335.7 338.5 314.3 314.2 314.0 313.8 313.7 313.4 313.1 312.7 312.4 312.2 312.0 311.8 311.8 312.4 311.7 310.8 310.0 309.5 309.3 310.0 310.4 310.4 310.6 310.6 310.7 310.1 308.5 306.7 302.5 300.6 307.3 318.5 333.9 350.0 366.8 384.5 403.1 422.5 442.9 464.2 467.2 465.8 463.3 462.5 462.5 462.1 462.1 462.1 462.2 462.3 461.8 461.3 460.4 459.5 458.6 457.9 457.2 456.9 456.8 457.5 458.5 459.5 460.4 461.3 462.5 463.4 464.3 465.0 465.6 466.3 467.1 467.7 468.2 468.5 469.0 469.5 469.8 469.9 469.8 469.5 468.9 467.9 467.0 465.9 464.8 463.2 461.3 459.3 454.5 453.6 451.3 447.8 442.7 436.0 428.9 421.1 412.5 402.1 392.5 383.3 374.1 364.7 355.0 347.0 339.7 333.3 326.9 321.7 317.8 314.9 312.9 311.8 311.8 311.7 311.6 311.5 311.3 311.2 311.0 310.7 310.5 310.3 310.2 310.2 310.2 309.9 309.1 307.7 305.7 302.9 300.2 297.2 294.1 290.6 287.4 284.6 282.3 280.2 278.5 277.7 302.6 303.8 303.2 301.4 301.8 302.3 302.9 303.5 303.4 303.0 302.4 301.3 299.8 298.6 297.4 296.3 295.4 294.6 293.8 292.9 292.1 291.7 291.1 290.9 290.8 290.6 290.6 290.5 290.6 290.6 290.9 291.3 291.7 292.2 292.8 293.4 293.8 294.6 295.1 295.5 295.9 296.0 296.0 295.9 295.8 295.5 295.1 294.8 294.5 294.5 294.5 294.6 294.7 294.7 294.7 294.7 294.7 294.5 294.5 294.5 294.4 294.5 294.5 294.5 294.7 294.9 294.9 295.1 295.2 295.2 295.4 295.4 295.4 295.5 295.5 295.7 295.7 295.6 295.7 295.4 294.9 294.3 293.5 292.3 291.4 290.2 289.0 287.5 286.3 285.1 282.8 281.2 279.5 277.1 273.5 269.8 267.8 267.3 268.4 272.3 278.7 286.1 292.1 296.2 300.4 304.4 305.7 307.7 308.9 310.0 310.9 311.8 311.8 311.8 311.8 311.8 311.8 311.5 310.6 309.5 307.8 305.9 304.0 302.1 300.2 298.1 296.7 295.7 295.0 294.7 319.7 323.3 326.8 330.4 334.1 337.8 341.5 345.3 349.1 353.0 356.9 356.3 355.0 356.2 357.7 358.7 359.7 360.1 360.0 359.4 358.7 358.0 357.0 355.3 353.8 352.2 350.7 349.1 348.2 347.5 347.0 346.8 346.9 347.4 348.0 348.6 349.6 350.6 351.7 353.1 354.1 355.3 356.2 356.8 357.2 357.5 357.6 357.5 357.3 356.9 356.4 355.8 355.1 354.0 353.3 352.6 351.3 350.6 349.9 349.0 348.3 347.7 347.2 346.8 346.9 347.4 347.8 348.4 349.6 350.4 351.3 352.1 352.9 353.7 354.7 355.7 356.5 357.2 357.9 358.4 358.6 356.1 355.8 355.4 354.6 353.9 352.9 352.0 350.8 349.6 348.7 347.9 347.0 346.2 345.7 345.5 345.4 345.4 345.9 346.7 347.5 348.4 349.4 350.4 351.3 351.8 352.0 352.2 351.9 351.3 350.6 350.0 349.2 348.1 347.2 346.5 345.9 345.4 289.1 289.1 289.1 289.1 289.1 289.1 289.1 289.1 289.1", - "input_type": "phoneme", - "offset": 214.207 + "f0_timestep": "0.005" }, { + "offset": 216.966, "text": "AP 未 曾 想 过 人 生 的 辛 酸 SP", "ph_seq": "AP w ei c eng x iang g uo r en sh eng d e x in s uan SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 G4 G4 G#4 G#4 G4 G4 G4 G4 rest", - "note_dur_seq": "0.275 0.173 0.173 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1720001 0.1720001 0.518 0.518 0.1719999 0.1719999 0.517 0.517 0.068", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.230002 0.044998 0.113002 0.059998 0.112002 0.059998 0.128002 0.044998 0.127002 0.044998 0.217162 0.127838 0.127002 0.044998 0.352991 0.165009 0.095691 0.076309 0.517 0.068", - "f0_timestep": "0.005", + "ph_dur": "0.23 0.045 0.113 0.06 0.112 0.06 0.128 0.045 0.127 0.045 0.2172 0.1278 0.127 0.045 0.353 0.165 0.0957 0.0763 0.517 0.068", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 G4 G#4 G4 G4 rest", + "note_dur": "0.275 0.173 0.172 0.173 0.172 0.345 0.172 0.518 0.172 0.517 0.068", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 269.3 235.3 235.2 234.9 234.5 233.9 233.4 232.9 232.3 231.8 231.5 231.2 230.9 231.0 231.1 231.7 232.5 233.4 234.3 235.3 236.2 236.8 237.2 238.8 238.3 237.8 237.0 235.3 233.4 231.2 228.5 225.1 221.9 218.3 215.7 215.5 218.0 225.4 233.0 240.8 248.9 257.3 266.0 274.9 284.2 293.8 303.7 307.4 306.2 306.0 307.4 309.3 310.6 311.3 309.2 309.3 309.7 310.2 310.8 311.3 311.8 312.0 312.2 312.0 311.9 311.6 311.3 311.0 310.6 310.1 309.7 309.2 309.2 309.9 312.4 315.8 320.2 325.2 330.7 336.8 341.8 345.8 348.8 350.6 350.9 350.7 350.3 349.8 349.2 348.9 348.7 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 347.8 345.6 342.1 337.9 333.0 327.9 322.3 318.1 314.9 312.7 310.9 315.7 313.7 313.3 313.2 312.7 312.3 312.0 311.7 311.5 311.2 311.1 310.9 310.9 311.1 311.1 311.1 310.9 310.3 309.5 308.6 307.0 305.8 304.5 304.0 304.2 305.7 307.8 311.2 314.3 317.9 322.8 327.8 332.8 338.5 343.9 347.0 348.9 350.4 351.7 352.1 351.9 351.5 350.9 350.4 350.0 349.6 349.0 348.3 347.5 346.6 345.7 345.1 344.4 343.5 342.6 342.1 341.6 341.3 341.3 341.4 341.9 342.6 343.3 344.6 346.0 347.0 348.1 349.2 350.5 351.0 351.7 351.0 350.0 348.4 346.4 343.8 341.0 336.7 331.2 324.5 321.5 324.8 328.1 331.4 334.7 338.1 341.5 344.9 348.4 351.9 355.5 359.1 362.7 366.3 370.0 373.8 377.5 381.3 385.2 389.1 393.0 397.0 401.0 405.0 396.2 392.2 392.1 392.4 393.0 393.7 394.2 394.3 394.5 394.4 394.2 394.0 393.7 393.4 392.9 392.5 392.0 391.2 390.5 389.7 389.2 388.6 387.4 385.0 382.2 379.5 376.9 373.2 370.6 368.1 362.9 361.5 375.2 389.5 404.2 409.9 414.1 418.0 421.4 423.5 424.4 424.5 424.5 424.4 423.8 422.8 421.8 419.9 418.3 416.2 414.1 412.2 410.1 408.3 407.2 406.5 406.0 405.9 406.2 406.9 407.7 408.6 409.6 410.8 412.4 413.6 414.7 415.8 416.8 417.5 417.9 418.2 418.3 418.4 418.4 418.1 417.9 417.7 417.1 416.5 416.3 415.7 415.2 414.6 413.9 413.2 412.3 411.6 411.1 410.7 410.4 410.6 410.8 411.7 412.7 413.9 414.5 415.3 416.1 416.9 418.1 419.1 419.8 420.0 419.7 418.8 417.9 416.0 413.2 409.0 405.1 400.1 398.8 398.6 398.3 398.0 397.7 397.4 397.1 396.8 396.6 396.3 396.0 395.7 395.4 395.1 394.8 394.6 394.3 394.0 393.7 393.4 393.1 392.8 392.6 392.3 392.0 391.7 391.7 392.2 393.4 394.0 394.5 394.7 394.7 394.3 393.9 393.4 391.8 390.3 388.6 386.4 383.7 380.2 375.7 369.9 363.8 358.2 354.9 353.9 357.0 361.2 365.4 369.6 373.9 378.3 382.7 387.2 391.7 396.3 400.9 405.6 410.3 415.1 414.5 412.9 411.1 410.4 409.4 408.2 407.0 405.8 404.4 402.9 401.5 400.1 398.2 396.7 395.2 393.4 392.0 390.9 390.5 390.4 390.4 390.6 391.1 392.0 393.0 394.0 395.1 396.1 397.1 397.9 398.6 399.4 399.8 400.2 400.5 400.3 400.1 399.6 399.0 398.4 397.8 397.1 396.3 395.5 394.5 393.6 393.0 392.2 391.5 390.9 390.5 390.2 390.0 389.8 389.9 390.1 390.4 391.1 391.8 392.4 393.4 394.2 395.1 395.9 396.8 397.9 398.7 399.5 400.1 400.5 400.7 400.7 400.6 400.3 399.8 399.2 398.2 397.0 395.9 394.6 393.8 393.2 392.4 392.1 391.8 391.6 391.3 391.0 390.8 390.4 390.0 389.5 388.8 387.5 385.9 385.9 386.8 388.2 390.6 391.4 392.1 389.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2 388.2", - "input_type": "phoneme", - "offset": 216.966 + "f0_timestep": "0.005" }, { + "offset": 219.724, "text": "AP 曾 经 有 过 多 少 的 遗 憾 SP", "ph_seq": "AP c eng j ing y ou g uo d uo sh ao d e y i h an SP", - "note_seq": "rest C4 C4 D#4 D#4 F4 F4 D#4 D#4 A#4 A#4 D#4 D#4 D4 D4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.173 0.173 0.3439999 0.3439999 0.1730001 0.1730001 0.517 0.517 0.1730001 0.1730001 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.141005 0.134995 0.112002 0.059998 0.141613 0.031387 0.105563 0.066437 0.113002 0.059998 0.224005 0.119995 0.127987 0.045013 0.472002 0.044998 0.113003 0.059998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.141 0.135 0.112 0.06 0.1416 0.0314 0.1056 0.0664 0.113 0.06 0.224 0.12 0.128 0.045 0.472 0.045 0.113 0.06 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 D#4 A#4 D#4 D4 D#4 F4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.173 0.344 0.173 0.517 0.173 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 277.3 261.2 261.2 261.2 261.3 261.3 261.4 261.6 261.7 261.8 261.9 262.1 262.2 262.3 262.4 262.4 262.5 262.3 261.6 260.7 259.4 258.0 256.4 254.4 252.9 251.3 249.9 248.6 247.7 247.1 273.8 275.9 278.0 280.1 282.2 284.3 286.4 288.6 290.7 292.9 295.1 296.5 303.2 307.0 308.1 309.3 310.2 310.5 310.7 310.9 310.8 311.2 311.3 311.5 311.8 312.4 312.8 313.4 314.5 315.2 315.9 317.0 318.7 320.4 322.2 324.2 326.6 328.4 330.0 331.4 334.8 336.4 338.6 340.7 342.6 345.1 347.5 349.7 350.9 351.3 349.8 349.8 349.8 349.9 350.0 350.0 350.2 350.2 350.2 350.4 350.0 348.9 346.9 344.1 341.2 337.7 334.0 329.9 326.3 323.2 320.6 318.4 316.8 316.2 319.8 314.2 314.0 313.7 313.0 312.4 311.7 311.0 310.4 309.9 309.7 309.7 309.9 310.3 310.9 311.5 312.4 313.0 313.6 313.9 314.2 313.7 312.8 311.1 308.9 306.5 304.0 301.4 298.5 296.3 294.5 293.2 292.5 356.6 372.6 389.2 406.6 424.8 443.8 463.6 474.1 470.2 469.0 468.2 467.6 467.8 467.8 468.1 467.8 468.0 468.1 467.9 467.5 467.0 466.2 465.4 464.9 464.4 464.1 464.4 465.1 466.4 468.3 470.1 471.4 472.7 474.6 474.4 474.3 474.1 473.8 473.4 473.1 472.6 472.1 471.9 471.5 471.1 470.9 470.8 470.8 470.4 469.2 466.7 461.9 456.8 451.1 444.9 437.9 430.6 424.4 418.7 413.9 409.3 406.4 404.8 367.0 363.7 360.5 357.3 354.1 350.9 347.8 344.7 341.6 338.6 314.2 314.0 313.7 313.2 312.6 311.8 310.8 310.1 309.3 308.6 307.9 307.5 307.3 307.2 307.4 307.8 308.5 309.3 310.1 310.7 311.0 310.9 310.2 309.2 307.9 306.1 303.7 301.5 299.2 296.8 294.3 291.9 290.0 288.3 287.0 285.9 285.4 300.4 296.2 296.6 297.8 298.6 299.9 300.8 300.8 300.4 300.1 299.4 298.8 298.0 297.6 297.2 296.6 295.7 294.7 294.2 293.5 292.9 292.3 291.7 291.2 290.9 290.8 290.6 290.5 290.6 290.6 291.0 291.5 291.6 292.5 292.6 293.1 293.7 294.2 294.6 294.9 294.9 295.0 294.8 294.7 294.9 294.5 294.3 294.3 294.0 293.9 293.7 293.6 293.5 293.4 293.3 293.2 293.2 293.2 293.3 293.3 293.6 293.8 294.0 294.2 294.5 294.9 295.3 295.8 296.1 294.2 294.2 294.2 294.2 294.2 294.3 294.3 294.3 294.4 294.5 294.5 294.5 294.5 294.5 294.7 294.5 294.5 294.5 294.4 294.3 294.3 294.2 294.1 294.0 293.9 293.8 293.7 293.7 293.7 293.7 293.7 294.1 295.3 297.2 299.5 302.4 305.8 308.8 311.3 313.4 315.0 315.7 315.5 315.5 315.4 315.2 315.1 315.1 315.1 315.0 314.7 314.2 313.5 312.7 311.6 310.6 309.7 308.8 308.0 307.5 307.2 307.4 308.3 310.2 314.0 318.1 322.8 328.6 334.4 339.7 344.7 349.2 353.5 355.7 356.8 357.1 356.7 356.1 355.3 354.3 353.3 351.8 350.4 348.9 347.5 346.1 344.8 343.6 342.6 341.9 341.3 341.1 341.1 341.4 341.8 342.8 344.0 345.3 346.6 348.1 349.8 351.4 352.9 354.1 355.1 355.9 356.3 355.1 355.0 354.6 354.1 353.4 352.3 351.0 349.7 348.6 347.3 345.8 344.8 343.8 342.7 342.0 341.4 341.1 340.9 340.7 340.9 341.2 341.6 342.3 343.2 344.1 345.2 346.2 347.2 348.0 349.0 349.8 350.3 351.0 351.4 351.6 351.8 351.7 351.4 350.9 350.6 350.1 349.4 348.7 347.8 346.8 345.8 345.1 344.4 343.9 343.6 343.7 343.8 344.2 344.9 345.9 346.9 347.8 348.8 349.6 350.0 350.4 350.3 350.2 350.2 349.9 349.6 349.4 349.1 348.7 348.5 348.2 348.0 347.8 347.6 347.6 298.5 298.5 298.5 298.5 298.5", - "input_type": "phoneme", - "offset": 219.724 + "f0_timestep": "0.005" }, { + "offset": 222.483, "text": "AP 最 后 全 都 微 笑 着 释 然 SP", "ph_seq": "AP z ui h ou q van d ou w ei x iao zh e sh ir r an SP", - "note_seq": "rest A#3 A#3 D#4 D#4 F4 F4 D#4 D#4 F4 F4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 0.1730001 0.1730001 0.517 0.517 0.1719999 0.1719999 0.517 0.517 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.171004 0.104996 0.112002 0.059998 0.092776 0.079224 0.128002 0.044998 0.127002 0.044998 0.22499 0.12001 0.098247 0.074753 0.352007 0.164993 0.127002 0.044998 0.517 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.171 0.105 0.112 0.06 0.0928 0.0792 0.128 0.045 0.127 0.045 0.225 0.12 0.0982 0.0748 0.352 0.165 0.127 0.045 0.517 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 D#4 F4 D#4 F4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.172 0.172 0.173 0.172 0.345 0.173 0.517 0.172 0.517 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 244.7 242.4 237.7 236.5 235.0 234.3 234.0 233.8 234.1 234.2 234.3 234.4 233.9 233.0 231.9 230.2 228.2 226.5 224.9 221.8 220.6 217.3 221.7 228.2 234.9 241.8 248.9 256.2 263.7 271.4 279.4 287.6 296.0 304.7 311.3 308.8 307.7 309.2 309.2 309.5 309.7 309.9 310.4 310.6 310.8 310.9 311.1 310.7 309.8 308.4 306.5 304.2 302.0 300.0 298.3 296.7 295.8 306.1 310.3 314.5 318.8 323.2 327.6 332.0 336.5 341.1 345.8 350.5 355.2 360.1 356.5 354.1 353.0 352.2 351.6 351.2 350.7 350.3 349.9 349.5 349.2 348.9 348.5 348.1 347.6 347.1 346.2 344.6 343.4 341.4 339.4 337.2 333.9 329.4 324.4 319.4 315.2 309.6 305.9 301.9 296.3 293.8 307.7 320.4 317.5 316.6 316.6 316.2 315.4 314.3 313.2 311.9 310.8 310.1 309.3 309.0 308.8 308.4 308.4 308.3 308.3 308.1 308.2 308.4 308.6 309.1 309.7 310.5 311.9 313.8 315.9 318.9 321.4 324.0 328.3 331.5 334.3 337.7 340.3 342.9 345.2 347.1 349.0 350.5 351.1 351.3 351.1 350.2 349.6 348.6 347.8 347.0 346.3 345.6 344.9 344.3 343.7 343.2 343.0 342.9 343.1 343.3 343.8 344.8 346.0 347.0 348.2 349.8 350.5 350.9 350.9 350.4 349.3 347.7 345.2 341.7 337.6 333.5 327.7 320.4 314.0 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 313.7 312.2 312.2 312.2 312.2 312.2 312.2 312.2 312.2 312.0 311.6 311.1 310.6 310.3 309.5 307.9 305.7 302.4 299.5 296.9 294.6 292.9 291.8 286.9 287.9 288.9 289.9 290.9 292.0 293.0 294.0 295.0 296.1 297.1 298.1 298.5 296.4 296.7 296.4 296.7 297.6 297.9 297.9 297.8 297.5 296.9 296.2 295.7 295.0 294.2 293.5 292.7 292.0 291.4 291.1 290.9 290.5 290.3 290.2 290.2 290.5 290.6 290.8 291.3 291.7 292.4 292.8 293.0 293.7 294.0 294.3 294.9 294.9 294.9 294.9 294.8 294.6 294.4 294.2 293.8 293.6 293.4 293.2 293.1 293.0 292.8 292.8 292.5 292.3 292.1 291.7 291.5 291.6 291.9 292.5 293.2 294.0 294.8 295.5 296.2 296.9 297.2 297.2 297.0 296.0 294.6 292.4 289.7 284.9 280.6 277.4 278.7 285.1 285.8 286.5 287.2 287.9 288.7 289.4 290.1 290.8 291.5 292.3 293.0 293.7 294.4 295.2 295.9 296.6 297.4 298.1 298.8 299.6 300.3 301.1 301.8 302.5 303.3 304.0 308.1 310.4 310.8 310.9 311.2 311.5 311.6 311.5 311.2 310.9 310.3 309.5 308.9 308.0 306.4 304.9 303.5 302.0 301.0 300.8 301.1 301.8 303.0 304.0 305.6 307.8 309.0 310.2 311.3 311.9 312.5 313.2 313.7 314.2 314.5 314.6 314.7 314.5 314.4 314.3 314.2 314.0 313.7 313.3 312.6 312.0 311.5 310.6 309.8 309.0 307.9 306.8 306.0 305.1 304.3 303.4 302.9 302.3 302.3 302.6 302.9 303.4 304.6 305.7 307.2 308.8 310.1 311.3 312.4 313.6 314.8 315.8 316.6 317.1 317.3 317.1 317.1 316.5 315.8 315.0 313.7 312.2 311.0 310.1 309.0 307.4 306.1 305.0 303.8 303.1 302.6 301.9 301.7 301.6 301.9 302.6 303.5 304.5 305.9 307.3 308.5 309.6 310.7 312.0 312.9 313.8 314.0 314.0 313.9 313.3 312.7 312.2 311.7 311.1 310.2 309.4 308.6 307.7 306.8 306.5 306.3 306.6 307.0 307.4 307.7 307.4 306.7 305.0 303.0 300.3 296.2 289.1 279.2 272.9 270.0 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5 271.5", - "input_type": "phoneme", - "offset": 222.483 + "f0_timestep": "0.005" }, { + "offset": 225.241, "text": "AP 谢 谢 你 让 我 成 为 最 幸 福 的 小 孩 SP", "ph_seq": "AP x ie x ie n i r ang w o ch eng w ei z ui x ing f u d e x iao h ai SP", - "note_seq": "rest F4 F4 D#4 D#4 A#3 A#3 G4 G4 G4 G4 G#4 G#4 G4 G4 F4 F4 D#4 D#4 D#4 D#4 D4 D4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.276 0.345 0.345 0.69 0.69 0.345 0.345 0.344 0.344 0.345 0.345 0.345 0.345 0.1719999 0.1719999 0.5180001 0.5180001 0.3449998 0.3449998 0.6890001 0.6890001 0.3449998 0.3449998 0.1720004 0.1720004 0.3449998 0.3449998 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.126006 0.149994 0.225005 0.119995 0.540006 0.149994 0.300002 0.044998 0.299002 0.044998 0.22499 0.12001 0.300002 0.044998 0.098056 0.073944 0.352992 0.165009 0.224989 0.12001 0.643987 0.045013 0.225005 0.119995 0.10329 0.06871 0.345 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.126 0.15 0.225 0.12 0.54 0.15 0.3 0.045 0.299 0.045 0.225 0.12 0.3 0.045 0.0981 0.0739 0.353 0.165 0.225 0.12 0.644 0.045 0.225 0.12 0.1033 0.0687 0.345 0.069", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 D#4 A#3 G4 G4 G#4 G4 F4 D#4 D#4 D4 D#4 D#4 rest", + "note_dur": "0.276 0.345 0.69 0.345 0.344 0.345 0.345 0.172 0.518 0.345 0.689 0.345 0.172 0.345 0.069", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 356.6 322.5 323.1 324.5 327.4 330.4 333.8 337.3 341.1 344.8 347.7 350.0 351.4 352.1 351.4 350.2 348.4 346.9 346.1 346.0 346.1 346.3 346.6 347.0 347.4 347.7 348.0 348.3 348.4 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.6 348.4 347.7 346.2 344.2 341.7 339.0 335.6 332.2 329.2 326.2 323.4 320.8 319.3 318.2 317.7 317.6 317.3 316.9 316.6 316.3 315.9 315.6 315.3 314.9 314.6 314.3 314.0 313.6 313.3 313.0 312.6 312.3 312.0 311.7 312.6 310.9 311.5 311.9 312.7 313.4 313.6 313.7 313.5 313.5 313.7 313.8 313.8 313.5 313.0 312.4 311.6 311.3 310.6 310.2 309.8 309.5 309.4 309.5 309.6 309.7 309.9 310.3 310.7 311.0 311.3 311.3 311.5 311.6 311.8 312.1 312.2 312.2 312.3 312.3 312.3 312.5 312.6 312.6 312.5 312.4 312.2 312.0 311.7 311.5 311.5 311.3 311.3 311.2 311.0 311.7 311.4 310.7 309.9 308.7 307.3 305.5 303.1 300.9 298.3 295.6 292.4 289.1 286.0 282.7 279.4 275.4 272.1 268.8 265.5 262.1 258.6 255.7 252.7 249.5 247.0 244.7 242.6 240.5 238.6 237.0 235.8 234.8 233.9 233.4 233.1 232.7 233.1 233.6 234.0 234.4 234.6 234.8 234.7 234.5 234.2 233.8 233.4 232.9 232.3 231.9 231.5 231.2 230.9 230.8 230.8 230.8 230.8 230.8 230.9 230.9 230.9 231.0 231.1 231.1 231.2 231.2 231.2 231.3 231.3 231.3 231.3 231.3 231.3 231.3 231.3 231.5 231.5 231.5 231.5 231.6 231.6 231.7 231.7 231.8 231.9 232.0 232.0 232.1 232.1 232.1 232.1 232.2 232.2 232.2 232.2 232.2 232.0 231.7 231.4 231.3 231.2 231.3 231.4 231.6 231.7 231.7 232.0 232.1 232.4 232.5 232.7 233.1 233.4 233.7 234.1 234.6 234.9 235.0 235.0 234.7 234.4 234.0 233.8 232.9 231.7 230.5 228.9 227.5 225.9 224.0 221.9 220.1 219.2 219.1 220.1 221.3 223.1 225.7 228.0 230.9 233.7 238.4 242.8 246.3 250.8 257.8 266.9 276.6 286.0 299.8 309.3 317.5 326.6 336.0 339.5 344.1 347.6 351.0 353.6 355.8 358.6 361.6 364.2 366.6 368.4 369.9 371.3 372.8 374.5 376.5 378.1 380.2 381.3 382.5 384.4 385.9 387.3 388.8 390.3 391.5 392.7 393.7 394.3 394.5 394.7 394.5 394.5 394.3 394.0 393.8 393.7 393.4 393.1 392.7 392.3 391.9 391.8 391.7 391.5 391.1 390.3 389.3 388.2 387.2 386.7 386.4 386.5 386.8 387.7 389.1 390.3 391.9 393.5 394.4 395.2 396.3 397.2 397.0 397.0 396.6 396.2 395.9 395.5 395.0 394.3 393.5 392.8 391.9 391.0 390.3 389.7 388.7 387.9 387.6 386.8 386.4 386.4 386.5 386.9 387.4 388.4 389.6 390.8 391.6 392.4 392.9 392.9 393.1 392.4 391.5 390.0 387.8 384.6 380.9 377.3 373.2 368.0 365.3 367.9 371.9 366.0 361.2 363.4 365.6 367.9 370.2 372.5 374.8 377.1 379.4 381.8 384.1 386.5 388.9 391.3 393.7 396.2 398.6 401.1 403.6 406.1 408.6 411.1 413.7 416.2 418.8 421.4 424.0 426.6 427.1 426.1 426.7 427.6 427.0 426.4 426.0 424.8 423.8 422.1 420.9 419.6 418.3 417.0 415.8 414.6 413.7 413.2 412.7 412.7 413.0 413.5 413.9 414.7 415.1 415.9 416.5 416.9 417.1 417.3 417.5 417.5 417.5 417.5 417.5 417.5 417.4 417.2 417.0 417.0 416.8 417.0 416.8 416.6 416.2 415.6 414.9 414.1 413.2 412.2 411.4 410.6 409.8 408.4 407.0 405.6 405.2 404.5 403.5 402.1 400.5 398.7 396.8 395.1 393.1 391.6 390.5 389.8 389.5 389.4 389.3 389.2 389.0 388.8 388.4 388.2 388.0 387.8 387.7 387.7 387.4 386.6 385.3 383.3 380.2 377.1 373.6 370.0 366.0 361.8 358.4 354.9 352.0 349.2 347.1 345.8 345.1 344.5 345.0 345.5 346.0 346.5 347.0 350.4 350.2 351.0 351.6 352.9 354.7 355.7 356.4 356.9 357.0 356.7 356.1 355.3 353.8 352.2 350.8 349.4 348.4 347.8 347.4 347.2 347.4 347.5 347.9 348.3 348.6 349.0 349.4 350.0 350.3 350.6 350.9 351.0 351.0 351.0 351.2 351.3 351.0 350.7 350.5 350.3 350.2 350.2 350.0 349.8 349.3 348.8 348.6 348.4 348.1 347.8 347.8 348.0 348.3 348.8 349.4 350.4 351.9 353.2 354.4 355.5 356.3 356.5 355.9 354.2 352.7 350.6 348.4 344.3 338.1 335.7 336.0 335.7 335.1 334.4 333.7 333.0 332.3 331.6 330.9 330.2 329.6 328.9 328.2 327.5 326.8 326.2 325.5 324.8 324.1 323.5 322.8 322.1 321.5 320.8 320.2 319.5 318.8 318.2 317.5 316.9 316.2 315.5 316.5 317.7 317.6 317.3 316.9 316.7 315.8 315.4 314.9 314.2 313.8 313.4 312.7 312.3 311.8 311.2 310.8 310.4 310.0 309.5 309.1 308.7 308.6 308.6 309.1 309.5 309.9 310.5 311.3 311.9 312.4 312.7 312.6 312.6 312.5 312.3 312.1 311.9 311.0 309.3 307.4 304.7 301.8 298.5 296.3 293.9 292.0 292.9 294.5 296.1 297.7 299.4 301.0 302.6 304.3 305.9 307.6 309.3 311.0 312.7 314.4 316.1 317.8 319.6 321.3 323.0 324.8 326.6 328.4 327.4 323.0 321.0 320.4 319.9 319.3 319.3 319.1 318.7 318.0 317.3 316.6 315.7 314.7 313.8 312.6 311.2 310.0 308.9 308.0 307.7 307.7 307.7 307.9 308.0 308.2 308.5 309.0 309.7 310.4 311.3 312.1 312.9 313.7 314.1 314.7 313.6 313.4 313.2 312.9 312.5 312.0 311.5 310.9 310.5 310.2 309.8 309.7 309.8 311.7 311.8 312.7 314.0 315.4 317.2 319.5 322.2 324.7 327.4 330.6 333.1 335.6 337.9 340.1 342.0 343.2 344.2 344.6 344.6 344.0 342.9 341.0 338.9 336.5 333.8 330.6 327.4 324.5 321.8 319.1 316.4 314.4 312.9 311.8 311.2 311.1 311.1 311.1 311.3 311.5 311.7 311.9 312.1 312.2 312.4 312.6 312.6 311.8 311.6 311.4 311.2 311.1 312.2 312.4 312.7 312.9 312.9 312.8 312.4 311.7 310.8 309.9 309.3 308.7 307.9 306.9 305.6 304.1 301.7 300.4 300.1 299.9 300.0 301.2 294.7 283.7 279.4 281.7 287.2 288.2 289.2 290.3 291.3 292.4 293.4 294.5 295.5 296.6 297.7 297.6 296.9 296.4 296.0 295.8 295.6 294.9 294.3 293.8 293.2 293.2 293.2 292.8 292.7 292.4 292.1 291.9 291.6 291.5 291.5 291.5 291.5 291.5 291.8 292.5 292.8 293.5 294.3 294.9 295.4 295.5 295.1 294.2 292.7 290.5 287.7 284.7 281.1 275.6 271.7 271.8 273.0 274.3 275.5 276.8 278.1 279.3 280.6 281.9 283.2 284.5 285.8 287.1 288.4 289.7 291.1 292.4 293.7 295.1 296.4 297.8 299.2 300.9 301.2 301.9 303.1 304.7 306.7 308.6 310.4 312.4 313.6 314.3 314.7 314.6 314.4 314.3 314.0 313.7 313.4 313.3 313.1 313.0 312.7 312.0 311.0 309.6 308.0 306.3 304.5 302.8 300.9 299.4 298.2 297.3 296.8 301.6 303.6 305.7 307.8 309.9 312.0 314.3 316.9 316.6 318.0 317.8 318.0 318.0 317.8 317.5 317.3 316.7 315.7 314.7 313.8 312.6 311.1 310.2 309.3 308.3 307.7 307.6 307.7 307.7 307.9 308.1 308.7 309.2 309.6 310.2 310.9 311.6 312.3 312.7 313.2 313.8 314.2 314.8 315.3 315.7 315.8 315.8 315.7 315.2 314.4 313.5 312.6 311.5 310.2 309.5 309.1 308.6 308.6 308.4 309.0 309.4 310.2 311.0 311.7 312.0 311.8 311.3 310.4 309.2 309.0 309.0 309.2 311.9 310.2 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3 308.3", - "input_type": "phoneme", - "offset": 225.241 + "f0_timestep": "0.005" }, { + "offset": 230.586, "text": "AP 感 谢 你 的 爱 SP", "ph_seq": "AP g an x ie n i d e ai SP", - "note_seq": "rest D#4 D#4 F4 F4 D#4 D#4 D4 D4 A#4 rest", - "note_dur_seq": "0.276 0.172 0.172 0.173 0.173 0.172 0.172 0.345 0.345 2.759 0.069", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.216002 0.059998 0.103168 0.068832 0.128002 0.044998 0.127002 0.044998 0.285002 2.818998 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.216 0.06 0.1032 0.0688 0.128 0.045 0.127 0.045 0.285 2.819 0.069", + "ph_num": "2 2 2 2 1 1 1", + "note_seq": "rest D#4 F4 D#4 D4 A#4 rest", + "note_dur": "0.276 0.172 0.173 0.172 0.345 2.759 0.069", + "note_slur": "0 0 0 0 0 0 0", "f0_seq": "309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.9 309.7 309.4 309.3 309.1 309.5 309.8 310.2 310.8 311.3 311.2 310.6 309.5 307.5 304.5 301.2 294.9 290.5 291.9 295.9 300.0 304.1 308.3 312.5 316.8 321.1 325.5 330.0 334.6 339.2 343.8 348.5 353.3 358.2 359.1 355.6 354.3 353.0 351.9 351.1 350.4 349.9 349.6 349.5 349.2 348.9 348.4 347.5 346.4 345.3 344.0 342.5 340.9 338.4 336.3 333.8 331.6 329.6 327.3 324.5 321.7 319.1 317.1 315.7 314.0 312.6 311.8 311.5 312.2 312.6 312.9 313.1 312.4 312.2 312.1 311.8 311.7 311.5 311.5 311.2 310.6 309.8 308.9 308.2 307.4 306.3 304.4 301.3 298.5 294.6 287.6 281.1 274.2 267.8 261.9 256.3 250.2 249.2 256.4 263.9 271.5 279.4 287.5 295.8 304.4 298.6 298.5 298.2 298.2 298.8 299.0 298.8 298.4 297.9 297.3 296.6 295.8 295.2 294.2 293.5 293.0 292.3 292.0 291.5 291.2 291.1 291.0 291.0 291.2 291.5 291.9 292.4 293.2 293.9 294.3 294.9 295.0 295.0 295.0 295.0 295.1 295.2 295.4 295.7 295.9 295.9 295.7 296.3 295.6 294.7 293.6 292.5 291.5 291.0 292.9 300.8 315.0 336.0 364.4 403.9 443.5 482.7 520.1 553.1 577.2 585.0 583.6 580.1 574.0 566.0 557.3 547.4 536.7 523.3 511.8 501.0 490.9 481.1 472.4 465.9 460.8 457.7 456.3 456.2 456.8 457.8 459.1 460.8 462.1 463.3 463.8 464.2 464.0 464.0 463.8 463.4 463.1 462.8 462.3 461.9 461.6 461.2 460.9 460.7 460.5 460.5 460.4 460.3 460.3 461.2 462.0 462.8 463.7 464.8 466.1 467.0 468.7 469.4 470.1 470.8 471.0 471.0 471.2 471.3 471.2 471.0 470.8 470.1 469.5 469.0 468.3 467.6 467.0 466.3 465.4 464.7 464.3 464.3 464.3 464.3 464.3 464.1 463.9 463.8 464.1 464.3 464.6 464.6 464.6 464.8 465.4 465.8 466.4 467.1 467.8 468.3 468.9 469.4 469.7 469.8 469.9 469.9 469.9 469.7 469.6 469.1 469.1 469.0 468.9 469.0 469.0 468.9 468.5 468.1 467.7 467.3 466.6 466.3 465.6 465.3 464.8 464.6 464.3 464.3 464.1 464.2 464.5 464.4 464.3 464.3 464.3 464.7 465.1 465.1 465.4 465.6 465.6 465.8 466.2 466.4 466.6 466.7 466.9 467.0 467.0 467.1 467.1 467.1 467.3 467.5 467.6 468.3 468.9 468.9 469.1 469.3 469.4 469.6 469.8 469.8 469.5 469.0 468.4 468.0 467.8 467.7 467.5 467.1 466.4 465.6 465.0 464.2 463.4 462.7 462.4 462.2 462.3 462.6 463.0 463.5 464.1 464.7 465.2 465.8 466.4 466.9 467.8 468.5 469.3 470.1 470.4 470.6 471.0 471.4 471.7 471.9 471.9 471.4 471.2 470.8 470.1 469.4 468.7 467.7 466.6 465.6 464.6 463.5 462.5 461.9 461.2 460.5 460.1 460.0 460.0 460.1 460.3 460.8 461.5 462.1 462.8 463.6 464.6 465.6 466.7 467.6 468.6 469.4 470.1 470.7 471.1 471.6 471.6 471.8 471.5 471.3 471.0 470.6 470.1 469.5 468.8 468.0 467.1 466.4 465.4 464.7 464.0 463.3 462.6 462.0 461.5 461.1 460.6 460.5 460.5 460.5 460.9 461.3 462.0 462.9 464.0 465.4 466.5 467.8 469.0 470.3 471.8 472.9 473.8 474.6 475.3 475.6 475.9 475.8 475.4 475.0 474.4 473.3 472.4 471.0 469.9 468.4 466.8 465.4 464.0 462.4 460.8 459.7 458.6 457.6 456.8 456.1 455.7 455.5 455.6 456.0 456.5 457.6 459.2 461.0 462.9 465.0 466.9 468.8 471.1 472.7 474.3 475.5 476.3 476.8 477.0 476.6 476.0 474.9 473.7 472.4 470.8 468.8 466.8 465.0 463.2 461.4 459.4 457.9 456.7 455.7 454.8 454.2 454.2 454.4 455.0 456.0 457.5 459.1 460.9 462.9 465.6 467.6 469.5 471.2 472.8 474.1 474.9 475.1 475.2 474.9 474.3 473.3 472.2 471.0 469.7 467.9 466.3 464.7 463.1 461.6 460.0 458.7 457.6 456.8 456.1 455.6 455.5 455.5 455.9 456.7 457.9 459.3 460.8 462.7 464.5 466.4 468.3 470.3 472.1 473.5 474.9 475.8 476.6 477.0 476.9 476.5 475.7 474.6 473.5 472.1 470.6 468.6 466.7 464.8 462.9 461.1 459.2 457.9 456.7 455.8 454.9 454.5 454.5 454.5 455.1 455.9 457.1 458.4 459.8 461.5 463.5 465.1 466.9 468.7 470.5 472.1 473.4 474.4 475.1 475.7 476.0 475.7 475.2 474.6 473.8 472.6 471.0 469.7 468.3 466.7 464.9 463.3 461.9 460.5 459.2 458.0 457.1 456.4 456.1 456.0 456.0 456.4 457.0 457.7 458.6 459.7 461.3 462.7 464.2 465.6 467.2 468.9 470.2 471.3 472.4 473.5 474.1 474.4 474.8 474.7 474.3 473.8 473.2 472.4 471.3 469.9 468.8 467.7 466.4 465.0 463.6 462.5 461.3 460.0 459.3 458.7 458.2 457.9 457.9 457.9 458.3 458.9 459.7 460.7 461.9 463.2 464.6 465.9 467.1 468.5 469.7 470.6 471.4 471.9 472.1 472.3 472.1 471.9 471.6 471.1 470.5 469.9 469.2 468.3 467.6 466.7 466.0 465.0 464.3 463.6 462.9 462.3 461.8 461.3 461.1 461.1 461.1 461.1 461.3 461.5 461.9 462.5 463.1 463.6 464.3 465.0 465.7 466.4 467.1 467.7 468.2 468.7 469.1 469.1 469.4 469.2 469.0 468.7 468.5 468.1 467.4 467.0 466.4 465.8 465.1 464.6 464.0 463.3 462.9 462.4 462.2 461.9 461.9 461.9 461.9 461.9 462.1 462.4 462.6 462.8 463.1 463.5 463.8 464.3 464.5 464.9 465.2 465.5 465.7 465.9 466.2 466.2 466.2 467.8 467.8 467.8 467.8 467.8 467.8 467.8 467.8 467.8", - "input_type": "phoneme", - "offset": 230.586 + "f0_timestep": "0.005" }, { + "offset": 238.71, "text": "SP AP 啦 啦 啦 啦 啦 SP", "ph_seq": "SP AP l a l a l a l a l a SP", - "note_seq": "rest rest A#4 A#4 D#5 D#5 F5 F5 D5 D5 D#5 D#5 rest", - "note_dur_seq": "0.2 0.4 0.345 0.345 0.345 0.345 0.5170001 0.5170001 0.5169998 0.5169998 0.6900001 0.6900001 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.140002 0.4 0.059998 0.255004 0.089996 0.270003 0.074997 0.441988 0.075012 0.427004 0.089996 0.69 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.14 0.4 0.06 0.255 0.09 0.27 0.075 0.442 0.075 0.427 0.09 0.69 0.4", + "ph_num": "1 2 2 2 2 2 1 1", + "note_seq": "rest rest A#4 D#5 F5 D5 D#5 rest", + "note_dur": "0.2 0.4 0.345 0.345 0.517 0.517 0.69 0.4", + "note_slur": "0 0 0 0 0 0 0 0", "f0_seq": "490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.2 490.4 492.1 494.6 496.9 498.8 500.3 500.8 499.1 494.5 490.9 487.4 480.9 475.0 470.6 466.6 463.3 462.1 462.4 463.3 465.0 466.1 467.0 467.2 467.2 467.2 467.2 467.0 466.8 466.7 466.4 466.2 466.0 465.9 466.0 466.3 466.5 467.0 467.6 468.1 468.9 469.1 469.4 469.4 469.6 469.9 470.1 470.2 470.2 470.2 469.9 469.9 469.7 469.6 469.1 468.9 468.6 468.3 468.3 468.3 468.5 469.0 469.5 470.4 471.9 474.4 478.2 484.0 492.6 499.6 506.9 516.5 531.2 541.8 553.5 564.3 574.8 585.2 596.0 615.8 631.1 631.5 627.9 625.1 622.7 621.5 621.2 620.8 620.8 620.2 619.0 618.4 617.1 615.7 615.0 614.4 614.0 614.1 614.6 615.4 616.8 618.2 619.6 620.8 621.7 623.0 623.6 623.9 624.6 625.0 625.4 625.9 626.2 626.5 626.9 626.9 627.1 627.2 626.9 626.6 626.2 625.9 625.6 625.2 624.8 624.1 623.2 622.1 620.8 619.6 618.2 616.3 614.9 613.7 613.7 614.4 617.6 621.5 626.2 631.1 635.9 641.0 647.0 653.8 661.4 670.8 680.3 689.2 697.1 706.6 716.0 715.6 715.6 715.2 713.8 713.5 713.5 713.3 712.4 710.9 708.8 706.8 705.1 703.3 701.4 699.7 698.1 696.7 694.4 693.1 691.9 691.0 690.2 689.8 689.2 689.0 689.6 690.6 692.0 693.9 695.6 697.2 698.9 700.9 702.1 703.5 704.1 704.5 704.5 704.3 703.7 702.9 702.0 700.8 699.7 699.0 698.3 697.2 695.8 694.8 694.2 693.6 693.3 693.0 692.6 692.4 692.4 692.6 693.0 693.6 694.3 695.0 696.0 697.4 698.5 699.3 700.1 700.4 700.5 700.5 700.5 700.2 699.9 699.7 699.3 698.7 698.2 696.7 695.2 692.8 690.4 686.5 682.5 677.9 670.4 663.6 656.2 649.8 642.9 635.5 627.9 619.5 612.5 605.7 600.1 595.5 590.0 586.7 583.6 580.9 582.9 586.0 592.1 596.1 597.7 598.8 598.8 598.5 598.7 598.9 598.6 598.1 596.9 595.2 593.8 592.1 591.0 589.8 588.1 586.8 585.8 584.8 583.9 583.5 583.2 582.9 582.5 582.3 582.3 582.4 582.9 583.2 583.9 585.0 586.0 586.9 587.5 587.8 588.1 588.5 588.8 589.1 589.4 589.7 589.4 589.4 588.8 588.7 588.7 588.7 588.7 588.7 588.7 588.5 588.2 587.8 587.6 587.3 587.0 587.0 587.0 587.3 587.3 587.7 588.0 588.2 588.6 589.1 589.5 589.7 589.9 590.0 590.0 589.6 589.2 588.2 587.3 585.7 584.6 583.2 581.8 580.2 579.2 578.7 578.4 578.4 578.7 579.4 580.6 582.3 584.0 586.0 588.7 590.7 592.7 595.9 600.2 605.2 609.8 615.0 619.3 622.6 625.7 628.0 630.2 632.8 635.0 637.0 637.9 637.9 637.5 637.5 636.9 635.6 633.9 632.5 631.3 629.8 628.4 626.8 624.9 623.3 621.5 619.4 618.0 616.3 615.1 613.9 613.0 612.4 612.3 612.7 613.8 615.2 616.9 618.7 620.5 622.7 624.8 626.6 629.5 631.7 633.5 635.0 636.2 637.3 637.5 637.3 636.9 636.4 635.7 634.6 632.8 630.9 629.0 626.5 624.4 622.6 620.8 618.3 616.5 615.0 613.1 612.2 611.9 612.1 612.6 613.4 614.6 616.4 618.7 620.5 622.3 624.7 626.2 627.4 628.8 630.3 630.8 631.2 631.3 630.9 630.2 629.6 628.8 627.5 626.3 625.1 623.8 622.6 621.5 620.6 619.9 618.9 618.2 617.7 617.4 617.1 616.9 616.9 617.0 617.4 618.1 618.8 619.5 620.1 621.0 622.6 623.5 624.7 625.9 626.7 627.1 627.6 627.9 627.5 627.2 626.8 626.3 626.6 626.9 627.5 629.1 630.0 630.5 631.2 631.0 630.5 629.7 627.5 622.3 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7 618.7", - "input_type": "phoneme", - "offset": 238.71 + "f0_timestep": "0.005" }, { + "offset": 241.793, "text": "AP 啦 啦 SP", "ph_seq": "AP l a l a SP", - "note_seq": "rest F5 F5 D#5 D#5 rest", - "note_dur_seq": "0.276 0.6900001 0.6900001 2.413 2.413 0.069", - "is_slur_seq": "0 0 0 0 0 0", - "ph_dur": "0.216002 0.059998 0.614988 0.075012 2.413 0.069", - "f0_timestep": "0.005", + "ph_dur": "0.216 0.06 0.615 0.075 2.413 0.069", + "ph_num": "2 2 1 1", + "note_seq": "rest F5 D#5 rest", + "note_dur": "0.276 0.69 2.413 0.069", + "note_slur": "0 0 0 0", "f0_seq": "694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 694.8 700.3 714.5 728.3 739.8 748.7 753.4 751.4 746.0 736.4 721.0 709.0 699.8 694.2 690.8 689.4 690.2 693.1 696.8 700.6 703.7 705.8 708.0 709.0 709.0 708.6 707.6 706.1 704.7 702.9 701.6 700.7 699.5 698.5 697.7 696.7 696.0 694.8 694.4 694.0 693.6 693.6 693.6 693.9 694.0 694.3 694.6 694.8 695.0 695.2 695.3 695.7 696.1 696.4 696.4 696.9 697.2 697.6 698.2 698.5 698.7 699.1 699.5 699.9 700.1 700.1 700.0 699.7 699.7 699.7 699.7 699.7 699.0 698.5 698.1 697.7 697.7 697.3 697.0 696.8 696.6 696.0 695.5 695.1 694.4 694.0 693.6 693.2 693.2 693.2 693.2 693.2 693.5 693.9 694.2 694.6 695.2 696.1 697.1 697.7 698.1 698.1 698.5 699.4 700.1 700.4 700.7 700.9 701.2 701.5 701.7 701.4 700.9 700.3 700.0 699.4 698.7 697.7 695.2 693.2 690.9 688.2 684.8 681.2 677.9 674.0 670.0 665.8 662.1 659.2 655.5 652.6 649.8 647.9 645.7 643.8 641.6 639.4 637.8 636.8 636.2 636.8 637.5 638.3 638.8 639.5 640.0 639.3 637.4 636.1 634.2 632.3 631.7 630.8 629.8 628.9 628.3 627.8 627.2 626.3 625.5 624.9 624.1 623.1 622.5 621.9 620.6 619.7 619.4 618.8 618.4 618.3 618.3 618.6 618.9 619.6 620.5 621.4 622.4 623.5 624.6 625.2 625.6 625.9 626.2 626.2 626.2 626.2 626.5 626.3 626.2 626.1 625.7 625.4 625.0 624.7 624.3 624.0 623.7 623.3 623.0 623.0 622.7 622.6 622.4 622.0 621.5 621.0 620.8 620.7 620.5 620.3 620.1 620.1 619.8 620.1 619.7 620.0 620.1 620.7 621.1 621.7 622.4 622.8 623.0 623.1 623.3 623.3 623.3 623.4 623.7 623.7 624.0 624.4 624.4 624.7 624.8 625.0 625.1 625.1 625.0 624.6 624.2 623.6 623.1 622.6 622.3 622.3 621.9 621.3 620.9 620.8 620.5 620.2 620.1 620.1 620.1 620.1 620.2 620.6 620.9 621.5 621.9 622.6 622.6 622.9 623.3 623.6 623.9 623.8 623.5 623.3 623.2 622.8 622.4 621.8 621.5 621.1 620.8 620.8 620.8 620.8 620.8 620.8 620.8 620.8 621.0 621.4 621.8 622.4 622.7 623.2 623.8 624.1 624.4 624.7 624.8 625.1 624.8 623.9 622.8 622.1 621.5 620.8 620.1 619.5 618.8 617.8 617.6 617.6 617.2 617.6 617.6 617.6 617.6 617.9 618.4 619.0 619.7 620.5 621.0 621.3 621.7 622.0 622.4 623.3 624.5 625.1 625.7 626.6 626.6 626.8 626.7 626.4 625.8 625.0 624.1 623.3 622.7 622.0 620.9 620.3 619.7 619.1 618.7 618.1 617.7 617.6 617.6 617.8 618.0 618.1 618.7 619.3 620.0 621.2 622.1 623.1 623.7 624.4 625.1 625.4 626.2 626.5 626.3 626.2 625.9 625.3 624.8 624.3 623.8 623.2 622.9 622.6 621.8 621.2 620.3 619.4 619.1 618.8 618.5 618.3 618.5 618.7 618.8 619.2 619.4 619.5 620.1 620.6 621.2 621.9 622.3 622.3 622.3 622.5 622.9 623.2 623.5 623.9 624.1 624.1 624.1 623.9 623.6 623.3 623.3 623.3 623.3 622.7 622.3 622.0 621.7 621.5 621.4 621.0 620.4 619.7 619.7 619.7 619.7 619.4 619.4 619.4 619.4 619.4 619.7 619.7 620.0 620.3 620.5 620.8 621.5 622.1 622.4 622.7 623.3 623.7 623.7 624.1 624.4 624.8 624.8 625.0 624.9 625.0 625.1 625.1 625.0 624.7 624.3 623.6 623.2 622.5 621.9 621.4 620.8 620.5 620.2 619.8 619.5 619.4 619.2 619.2 619.4 619.5 619.8 620.3 620.9 621.5 622.4 623.3 624.2 625.1 625.4 626.1 626.5 626.9 627.2 627.3 627.1 626.9 626.8 626.5 625.9 625.4 624.8 623.7 623.0 622.3 621.7 621.3 620.9 620.6 620.3 620.1 619.9 619.9 620.1 620.2 620.5 620.9 621.2 621.9 621.9 622.5 622.6 622.9 623.2 623.5 623.9 624.3 624.6 624.9 625.2 625.5 625.4 625.1 624.7 624.1 623.8 623.1 622.4 622.0 621.4 621.2 620.7 620.1 619.5 618.8 618.3 618.3 618.3 618.4 618.7 619.0 619.7 620.1 620.7 621.1 621.4 621.8 622.1 622.6 623.3 623.8 624.2 624.5 624.8 624.7 624.1 624.1 624.1 623.8 623.9 623.8 623.5 623.3 623.3 623.3 623.2 623.0 622.9 622.4 621.7 620.8 619.6 618.7 617.8 617.0 616.9 617.2 617.7 618.8 619.7 620.8 621.9 622.9 623.9 624.6 625.2 625.5 625.6 625.9 625.9 625.5 625.5 624.8 624.1 623.8 623.3 622.8 622.3 621.8 621.3 620.7 620.0 618.8 618.0 617.2 617.2 617.2 617.5 618.2 619.3 620.8 622.6 624.6 626.4 627.8 629.2 630.9 631.9 632.8 633.9 633.9 633.5 632.3 630.6 629.0 628.3 628.6 632.6 647.0 652.7 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0 639.0", - "input_type": "phoneme", - "offset": 241.793 + "f0_timestep": "0.005" }, { + "offset": 246.297, "text": "SP AP 啦 啦 啦 啦 啦 SP", "ph_seq": "SP AP l a l a l a l a l a SP", - "note_seq": "rest rest A#5 A#5 G#5 G#5 G5 G5 F5 F5 D#5 D#5 rest", - "note_dur_seq": "0.2 0.4 0.344 0.344 0.345 0.345 0.3449999 0.3449999 0.1720001 0.1720001 1.897 1.897 0.345", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.154987 0.4 0.045013 0.283987 0.060013 0.285002 0.059998 0.285002 0.059998 0.112002 0.059998 1.897 0.345", - "f0_timestep": "0.005", + "ph_dur": "0.155 0.4 0.045 0.284 0.06 0.285 0.06 0.285 0.06 0.112 0.06 1.897 0.345", + "ph_num": "1 2 2 2 2 2 1 1", + "note_seq": "rest rest A#5 G#5 G5 F5 D#5 rest", + "note_dur": "0.2 0.4 0.344 0.345 0.345 0.172 1.897 0.345", + "note_slur": "0 0 0 0 0 0 0 0", "f0_seq": "791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 791.3 808.5 833.0 862.9 890.6 905.3 910.8 910.5 908.6 906.1 902.7 896.9 894.3 893.3 894.9 902.1 909.1 916.5 924.7 929.3 932.1 934.2 935.6 935.9 935.3 934.2 932.5 931.1 930.2 929.5 929.1 928.7 928.6 929.0 929.5 930.0 930.8 931.8 932.5 933.3 934.2 934.8 935.7 936.1 937.4 938.3 938.3 938.8 939.7 939.5 938.6 937.0 933.8 930.4 926.4 921.6 916.8 911.5 905.8 897.3 891.3 882.0 871.9 860.9 852.2 845.0 836.4 829.7 824.6 820.4 817.8 817.1 818.3 821.3 825.0 830.4 836.9 842.0 843.2 844.0 843.5 842.4 841.6 840.9 839.9 838.4 836.9 835.3 833.3 830.6 829.3 827.7 825.7 824.9 823.7 823.1 822.6 822.9 823.3 823.8 824.6 825.1 825.8 826.8 827.7 828.9 830.0 831.8 833.0 834.5 835.9 836.4 837.2 837.7 838.2 838.0 837.1 835.7 833.7 831.4 828.7 826.3 823.7 820.6 818.2 814.1 810.2 809.1 807.6 806.3 805.4 804.8 804.9 805.4 806.0 806.7 807.2 808.2 809.1 810.2 812.0 813.5 814.3 814.5 814.5 814.1 813.4 811.6 809.6 807.0 804.2 801.6 798.6 796.2 793.7 791.8 790.1 788.5 786.6 784.9 783.8 782.9 782.0 781.1 780.4 779.9 779.7 779.5 779.5 779.5 779.6 780.0 780.4 781.0 781.7 782.9 783.7 785.0 786.0 786.5 787.3 788.2 789.0 789.6 790.0 790.2 789.9 789.9 789.3 788.5 787.6 786.5 784.8 782.3 779.6 775.9 772.3 768.7 764.9 760.5 755.0 750.2 745.5 740.9 737.3 734.5 730.9 726.4 723.3 720.2 716.7 713.5 711.1 709.7 708.9 708.0 707.7 707.2 706.3 705.6 704.9 704.3 702.9 701.6 700.3 699.1 697.8 696.5 695.4 694.2 692.6 691.1 689.5 687.3 685.3 683.3 679.6 677.0 673.1 670.5 666.1 662.9 658.7 652.2 647.1 642.7 639.0 636.4 634.8 633.8 633.4 632.7 632.3 632.0 632.0 632.4 632.4 632.4 632.1 631.8 631.5 630.9 630.3 629.4 628.3 627.1 626.1 624.9 623.7 622.5 620.8 619.6 618.5 616.9 615.4 614.6 614.2 613.9 613.9 614.4 615.3 616.3 617.4 618.3 619.7 621.2 622.6 623.5 625.1 626.0 626.7 627.5 628.4 629.1 629.6 630.0 630.0 629.4 628.9 628.0 627.1 626.2 625.6 624.4 623.5 622.6 621.4 620.2 619.1 618.3 617.4 616.7 616.4 616.2 616.4 617.0 617.6 618.2 619.4 620.3 621.3 622.4 623.2 623.8 624.4 625.0 625.5 625.9 625.9 626.1 625.8 625.4 625.1 624.4 624.1 623.5 623.0 622.7 622.1 621.9 621.7 621.4 621.2 621.0 620.8 620.9 621.2 621.2 621.5 621.5 621.5 621.5 621.8 622.4 622.8 623.4 623.9 624.2 624.6 624.8 624.8 624.8 624.8 625.1 624.8 625.1 625.1 624.8 624.5 624.1 623.8 623.2 622.6 621.7 620.8 620.3 620.1 620.1 620.0 619.7 619.4 619.0 619.0 619.3 619.4 619.7 620.0 620.3 620.6 621.2 622.0 623.0 624.0 624.8 625.6 626.2 627.1 627.7 628.0 628.4 628.4 628.2 627.6 626.7 625.8 625.1 624.4 623.7 622.9 621.7 620.8 619.2 618.3 617.6 616.5 615.8 615.5 615.5 615.7 616.5 617.4 618.8 620.5 621.5 622.5 623.5 624.5 624.8 625.1 625.6 625.9 626.5 626.9 627.2 627.5 627.7 627.5 626.9 626.4 625.9 625.2 624.5 623.9 623.0 621.7 620.5 619.6 618.3 618.0 617.4 617.2 617.5 618.0 618.8 619.9 620.8 621.3 621.7 622.3 623.0 623.3 624.1 624.4 624.4 624.7 624.8 624.5 624.2 623.9 623.5 623.3 623.2 623.0 623.0 623.0 622.9 622.6 622.2 621.9 621.6 621.3 620.6 620.2 619.9 620.0 620.3 620.6 620.9 621.2 621.4 622.0 622.3 623.0 623.3 623.6 623.7 623.7 624.0 623.8 623.7 623.7 623.5 623.2 622.8 622.5 622.1 621.5 621.1 620.8 620.8 620.5 620.5 620.2 620.1 619.8 619.7 619.7 619.7 619.7 619.7 619.9 620.2 620.6 621.3 621.9 622.4 623.0 623.6 624.2 625.0 625.6 625.9 625.9 625.4 625.0 624.4 623.4 622.3 621.4 620.5 619.2 618.3 617.7 617.0 616.3 615.9 615.6 615.7 615.8 616.2 616.8 617.6 619.1 620.2 620.9 621.2 621.7 622.3 622.6 623.0 623.3 623.3 623.3 623.1 622.8 622.3 621.8 621.4 620.9 620.4 619.9 619.2 618.3 617.8 617.2 616.9 616.6 616.8 617.1 617.7 618.2 618.7 619.4 620.1 621.1 622.3 623.1 623.7 624.9 625.5 626.6 627.2 628.2 629.0 629.1 628.3 627.3 626.4 625.4 624.1 622.9 621.7 620.6 619.5 618.0 616.5 614.5 609.8 605.1 598.6 590.0 582.4 571.6 552.0 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9 534.9", - "input_type": "phoneme", - "offset": 246.297 + "f0_timestep": "0.005" }, { + "offset": 250.069, "text": "AP 啦 啦 啦 啦 SP", "ph_seq": "AP l a l a l a l a SP", - "note_seq": "rest A#4 A#4 D#5 D#5 D5 D5 D#5 D#5 rest", - "note_dur_seq": "0.276 0.345 0.345 0.344 0.344 0.5180001 0.5180001 2.241 2.241 0.5", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.201003 0.074997 0.239989 0.105011 0.268988 0.075012 0.413004 0.104996 2.241 0.5", - "f0_timestep": "0.005", + "ph_dur": "0.201 0.075 0.24 0.105 0.269 0.075 0.413 0.105 2.241 0.5", + "ph_num": "2 2 2 2 1 1", + "note_seq": "rest A#4 D#5 D5 D#5 rest", + "note_dur": "0.276 0.345 0.344 0.518 2.241 0.5", + "note_slur": "0 0 0 0 0 0", "f0_seq": "493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 496.5 502.3 504.1 505.5 506.9 509.5 511.8 514.3 516.7 518.7 518.7 517.3 513.4 507.2 499.5 493.2 485.1 476.9 471.8 467.0 464.6 463.9 463.5 463.7 463.7 464.0 464.2 464.3 464.3 464.5 464.8 465.0 465.2 465.4 465.6 465.6 465.6 465.6 465.6 465.7 465.9 466.3 466.7 467.2 467.6 468.6 469.2 469.7 469.9 469.9 469.8 469.6 469.2 468.4 468.0 467.5 467.5 467.5 467.3 466.8 466.1 465.7 465.4 465.5 465.8 466.2 466.8 467.8 469.9 473.4 478.2 484.8 494.5 504.6 512.1 524.2 536.9 550.4 562.9 577.1 590.6 604.1 619.1 624.8 623.8 622.3 621.0 620.0 619.7 619.7 619.0 618.7 618.0 616.8 615.8 614.8 613.7 612.3 611.0 610.3 609.7 609.7 610.3 610.9 612.3 614.4 615.9 617.3 618.7 620.7 622.1 623.3 624.6 625.7 626.5 627.0 627.4 627.7 627.7 627.3 627.3 626.6 626.0 625.5 624.6 623.9 623.3 622.4 621.2 619.9 618.5 616.6 614.4 612.6 610.5 608.2 607.0 606.1 604.9 604.3 604.0 604.1 604.6 605.1 605.8 606.5 606.6 606.8 607.0 607.0 607.0 607.0 606.3 604.9 604.0 603.6 602.9 602.5 602.0 601.1 599.7 598.3 597.1 595.8 594.0 592.8 591.2 590.4 589.0 588.0 587.2 585.6 585.1 584.3 583.6 583.1 582.8 582.5 582.3 582.5 583.2 583.8 584.6 586.5 587.7 588.5 590.0 590.6 591.2 591.6 592.0 592.3 592.4 592.3 591.9 591.6 591.3 590.7 589.9 589.4 589.0 588.3 588.0 588.0 588.0 587.8 587.7 587.7 587.7 587.7 587.8 588.1 588.8 589.2 589.8 590.4 590.8 591.1 591.1 591.1 590.8 590.7 590.5 590.4 590.2 589.9 589.6 589.1 588.5 587.9 587.3 586.5 585.6 585.4 584.6 584.6 584.4 584.5 585.0 585.9 586.7 587.5 588.5 589.5 590.7 592.1 593.4 594.8 596.6 598.6 601.4 604.0 606.6 610.3 614.0 618.7 624.1 627.7 629.7 631.0 631.3 631.3 631.1 630.2 629.3 628.4 627.2 626.0 624.7 623.7 622.5 621.0 619.4 617.6 616.1 614.8 613.8 613.2 612.6 612.7 613.0 613.9 614.6 615.6 617.0 618.2 619.7 621.3 622.8 624.2 625.5 626.7 627.9 628.8 629.9 630.2 630.2 629.8 629.5 628.9 628.3 627.2 626.5 625.7 624.6 623.7 622.9 621.9 621.0 620.2 618.8 618.0 616.7 615.8 615.5 615.2 615.4 615.7 616.2 617.2 618.5 619.7 620.8 621.8 623.0 623.9 624.8 625.6 626.2 626.5 627.2 627.3 627.3 627.1 626.7 626.1 625.7 625.1 624.3 623.8 623.2 622.5 621.8 621.2 620.8 620.5 620.1 619.8 619.5 619.1 618.8 618.7 618.7 618.8 619.0 619.1 619.4 619.6 620.2 620.8 621.7 622.3 622.6 623.1 623.6 623.9 624.1 624.1 623.8 623.7 623.5 622.9 622.6 622.6 622.3 621.8 621.2 620.8 620.5 620.7 621.4 621.8 622.2 622.3 622.4 622.6 622.6 622.6 622.7 623.0 623.3 623.0 623.0 623.0 622.6 622.6 622.4 622.0 621.9 621.9 622.0 622.1 621.8 621.4 621.1 620.7 620.3 619.7 619.7 619.4 619.7 619.7 619.7 620.2 620.7 621.2 622.1 623.2 623.9 624.6 625.4 625.9 626.2 626.3 626.6 626.6 626.6 626.6 626.3 626.0 625.3 624.7 623.9 622.9 621.5 620.1 619.0 617.3 616.0 615.4 615.1 615.4 615.8 616.4 617.2 618.7 620.0 621.1 622.3 623.4 624.3 625.1 626.0 626.7 627.0 627.3 627.6 628.0 627.7 627.7 627.4 626.7 626.1 625.5 624.8 624.1 623.3 622.3 621.2 620.2 619.0 618.3 617.6 616.9 616.3 616.2 616.5 616.8 617.4 618.5 619.4 620.2 621.2 622.1 622.8 623.4 623.8 624.1 624.1 624.1 624.1 623.8 623.9 623.8 623.7 623.5 623.3 623.0 622.4 622.0 621.4 621.2 621.1 620.8 620.5 620.5 620.2 620.4 621.0 621.5 621.8 621.7 621.7 622.1 622.5 623.1 623.5 623.8 624.1 624.4 625.1 625.1 625.1 625.1 624.2 623.8 623.3 622.4 621.4 620.4 619.5 618.9 618.7 618.7 618.7 619.1 619.4 619.7 619.7 620.0 620.6 621.2 622.0 622.5 622.6 622.8 623.2 623.5 623.8 624.3 624.8 624.8 624.8 624.8 624.8 625.1 625.1 625.4 625.5 625.3 625.1 625.0 624.5 624.0 623.3 622.5 622.1 621.2 620.8 620.1 619.8 619.0 618.7 618.7 618.7 618.8 619.2 619.5 619.9 620.2 620.6 620.8 620.8 620.8 620.8 621.1 621.9 622.5 622.9 623.4 624.1 625.1 625.9 626.6 626.9 626.9 626.9 626.6 625.8 625.5 624.8 623.7 623.0 622.4 621.5 620.4 619.6 619.0 618.3 617.8 617.6 617.6 617.7 618.5 619.1 619.7 620.6 621.2 621.5 621.9 622.5 622.9 623.4 623.7 623.9 624.2 624.4 624.4 624.4 624.4 624.1 623.3 623.0 622.1 620.8 619.6 618.5 617.8 617.0 616.3 616.2 616.2 616.7 617.5 618.1 619.0 619.9 620.5 621.5 622.3 623.3 623.9 624.5 625.3 625.5 625.5 625.1 624.4 623.6 622.3 620.9 619.5 618.1 616.9 615.2 614.0 613.2 612.7 612.9 613.2 614.0 615.0 616.7 619.0 621.3 623.5 625.9 627.7 629.5 631.7 633.5 634.4 635.2 635.6 636.0 635.8 636.8 637.2 632.9 628.0 626.8 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2 626.2", - "input_type": "phoneme", - "offset": 250.069 + "f0_timestep": "0.005" } ] \ No newline at end of file diff --git "a/samples/\344\270\215\350\260\223\344\276\240.ds" "b/samples/\344\270\215\350\260\223\344\276\240.ds" new file mode 100644 index 000000000..f8f4290c8 --- /dev/null +++ "b/samples/\344\270\215\350\260\223\344\276\240.ds" @@ -0,0 +1,418 @@ +[ + { + "offset": 34.162, + "text": "SP AP 衣 襟 上 别 好 了 晚 霞 SP", + "ph_seq": "SP AP y i j in sh ang b ie h ao l e w an x ia SP", + "ph_dur": "0.175 0.4 0.025 0.178 0.06 0.163 0.075 0.431 0.045 0.163 0.075 0.178 0.06 0.372 0.105 0.326 0.15 0.476 0.4", + "ph_num": "1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 G3 E3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.238 0.238 0.477 0.476 0.476 0.4", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "164.8 165.2 165.1 165.0 164.4 164.5 164.7 164.7 164.9 165.0 165.2 165.0 165.3 165.3 165.0 165.1 165.4 165.2 165.2 165.4 165.4 165.1 165.2 165.4 165.5 165.7 165.9 165.9 165.6 165.8 165.9 165.6 165.8 165.9 166.2 165.9 165.9 165.9 165.8 166.0 166.0 165.7 165.6 165.3 165.9 165.7 165.8 165.7 166.0 166.6 166.9 167.6 168.5 175.9 185.7 196.0 208.6 222.5 234.5 241.7 246.7 248.4 247.7 245.5 244.0 242.2 235.0 217.2 212.1 216.8 219.9 223.4 226.9 221.8 219.3 218.8 219.3 220.4 221.0 221.9 222.5 223.8 223.9 223.2 220.6 214.2 206.6 212.7 220.6 227.3 235.4 243.9 252.9 256.1 255.8 256.3 256.0 255.1 251.2 244.5 234.5 229.3 226.5 224.9 227.5 238.1 248.9 258.2 263.8 266.6 261.8 252.4 240.2 230.2 221.6 218.6 220.9 226.8 241.2 254.7 263.4 270.6 273.5 270.9 261.7 243.8 240.8 247.7 254.3 259.0 265.5 271.8 279.1 261.9 253.8 249.3 248.2 248.4 248.1 247.7 247.0 246.6 246.5 247.2 246.8 244.4 240.6 231.0 229.9 228.6 226.0 224.1 221.3 220.1 218.5 217.3 217.4 217.4 217.9 219.5 221.1 222.2 222.6 221.8 221.1 220.2 219.8 219.0 219.3 218.8 219.7 219.6 219.5 222.9 223.4 222.3 220.7 220.4 221.2 221.9 222.3 222.3 221.3 221.0 219.1 218.3 219.1 224.7 234.1 242.0 248.1 252.4 253.4 252.8 248.1 244.0 242.6 242.7 242.3 243.2 244.7 244.9 246.0 246.0 244.2 238.1 228.6 216.9 208.2 198.7 191.6 188.0 189.1 191.0 193.6 195.0 197.2 198.1 197.2 197.1 196.2 195.3 193.8 193.7 193.6 194.5 195.3 196.4 196.6 196.3 194.2 192.4 193.6 195.9 198.1 198.8 198.9 198.5 198.6 197.6 195.8 194.1 191.7 187.7 185.1 183.1 181.5 179.6 178.0 176.3 174.4 173.0 171.1 169.4 167.9 166.0 164.3 161.7 162.1 165.0 167.5 168.5 169.6 169.2 167.8 163.0 159.2 156.4 155.5 156.7 160.3 165.1 170.1 174.4 178.6 180.4 179.7 177.1 171.2 165.0 159.5 154.5 153.4 153.4 154.6 157.9 164.3 170.5 176.4 180.1 181.1 177.5 173.1 165.3 157.5 151.0 147.8 146.6 147.8 149.3 149.4 149.5 149.6 149.5 149.9 150.8 151.3 152.2 152.9 153.6 154.4 155.4 156.0 157.1 157.6 158.0 158.4 158.8 158.7 158.5 158.5 158.2 158.6 158.8 158.7 158.4 158.5 158.8 158.8 158.7 159.2 158.7 159.4", + "f0_timestep": "0.011609977324263039", + "energy": "0.0002 0.0 0.0001 0.0 0.0002 0.0001 0.0003 0.0004 0.0001 0.0 0.0004 0.0006 0.0004 0.0004 0.001 0.0004 0.0008 0.0006 0.0006 0.0006 0.001 0.0009 0.0013 0.0023 0.0014 0.0024 0.0028 0.0025 0.0031 0.0031 0.003 0.0026 0.003 0.0023 0.0025 0.0027 0.0021 0.0022 0.0021 0.0022 0.0017 0.0017 0.0014 0.0013 0.0011 0.0018 0.0018 0.0018 0.0026 0.0044 0.0244 0.0389 0.0547 0.067 0.0723 0.075 0.0728 0.071 0.0707 0.0697 0.0719 0.0749 0.0792 0.0814 0.0816 0.0755 0.0639 0.0503 0.0333 0.0193 0.0151 0.0259 0.0467 0.064 0.0744 0.0821 0.0828 0.0794 0.0824 0.0822 0.0825 0.0848 0.0824 0.0811 0.0743 0.063 0.0494 0.0336 0.0236 0.0215 0.0227 0.0222 0.0414 0.0625 0.0777 0.0921 0.098 0.0959 0.0914 0.0858 0.0785 0.071 0.0634 0.057 0.0525 0.0524 0.0539 0.0581 0.0621 0.0635 0.0664 0.067 0.0672 0.064 0.0582 0.0515 0.0432 0.0379 0.0353 0.036 0.0423 0.0504 0.0559 0.0532 0.0461 0.0345 0.0182 0.0102 0.0037 0.0012 0.0049 0.0102 0.0415 0.0589 0.0718 0.0832 0.0838 0.0842 0.0848 0.0825 0.085 0.086 0.0861 0.0879 0.0843 0.0776 0.0678 0.0506 0.0355 0.0197 0.0125 0.0127 0.0133 0.0151 0.0357 0.0538 0.0676 0.08 0.0834 0.0843 0.0864 0.0866 0.0866 0.0844 0.0815 0.0799 0.0783 0.0791 0.0783 0.0745 0.0721 0.0694 0.0672 0.0699 0.0767 0.082 0.0868 0.0888 0.0884 0.0875 0.0863 0.0838 0.0781 0.0723 0.0642 0.0585 0.0546 0.0528 0.0551 0.057 0.06 0.0626 0.0642 0.0645 0.063 0.0614 0.0588 0.0568 0.0574 0.0572 0.0574 0.058 0.059 0.0596 0.0606 0.0598 0.061 0.0606 0.06 0.0581 0.0574 0.0569 0.0567 0.0627 0.0685 0.0727 0.0792 0.0821 0.0823 0.0827 0.0797 0.0756 0.0732 0.0722 0.0731 0.0726 0.0737 0.0735 0.0718 0.0713 0.0684 0.0649 0.0634 0.0631 0.0639 0.0647 0.0651 0.0656 0.066 0.0658 0.0644 0.0585 0.0498 0.039 0.0276 0.0185 0.0156 0.0169 0.0165 0.016 0.0155 0.014 0.0133 0.0117 0.0089 0.0224 0.0382 0.051 0.0614 0.0678 0.0688 0.0703 0.0697 0.069 0.0672 0.0636 0.0602 0.0558 0.052 0.0494 0.0494 0.0474 0.048 0.0481 0.0466 0.0466 0.0451 0.0444 0.0425 0.0403 0.0371 0.0337 0.0312 0.0283 0.0261 0.0248 0.0231 0.0227 0.0215 0.0214 0.0208 0.0204 0.0195 0.0186 0.0183 0.0175 0.0154 0.0132 0.0104 0.0084 0.005 0.0027 0.0011 0.0008 0.0011 0.001 0.0006 0.0005 0.0008 0.0011 0.0014 0.0009 0.0009 0.0004 0.0 0.0005 0.0003 0.0004 0.0004 0.0004 0.0001 0.0004 0.0002 0.0 0.0 0.0001 0.0001 0.0003 0.0002 0.0 0.0 0.0003 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0 0.0001 0.0 0.0001 0.0001 0.0002 0.0001 0.0001 0.0001 0.0 0.0002 0.0002 0.0002 0.0002 0.0004 0.0004 0.0004 0.0008 0.001 0.0009 0.0011 0.0012 0.0016 0.0019 0.0021 0.0022 0.0027 0.0025 0.0027 0.0027 0.0025 0.0024 0.0024 0.0024 0.0022 0.0024 0.0022 0.0023 0.002 0.0015 0.0014 0.0013 0.0011 0.0012 0.001 0.0011 0.0011 0.0011 0.0019 0.003 0.0028 0.003 0.0027 0.0026 0.0028 0.0032 0.0037 0.0037 0.0039 0.003 0.0025 0.0023 0.0022 0.0023 0.0026 0.0029 0.0034 0.0068 0.0101 0.0131 0.0146 0.0145 0.0124 0.0101 0.006 0.0027 0.0021 0.0015 0.0015 0.0012 0.0012 0.001 0.0007 0.001 0.0014 0.0031 0.0081 0.0152 0.0194 0.0223 0.0245 0.0223 0.0191 0.0149 0.0092 0.0046 0.0034 0.003 0.0032 0.003 0.0029 0.0027 0.0025 0.0027 0.0023 0.0022 0.002 0.002 0.0018 0.0014 0.0012 0.0007 0.0009 0.0007 0.0006 0.0007 0.0004 0.0002 0.0003 0.0003 0.0002 0.0002 0.0 0.0004 0.0003 0.0006 0.0018 0.0021 0.002 0.0016 0.0009 0.0013 0.002 0.0026 0.0026 0.0026 0.0023 0.0022 0.0021 0.0023 0.0019 0.0017 0.0015 0.0016 0.0017 0.0019 0.0023 0.0031 0.0043 0.0069 0.0109 0.0131 0.0142 0.0131 0.0107 0.0075 0.0038 0.0025 0.002 0.002 0.0019 0.0016 0.0014 0.0015 0.0012 0.0012 0.001 0.0011 0.001 0.001 0.0009 0.0009 0.0012 0.0013 0.0016 0.0018 0.0019 0.0019 0.0023 0.0017 0.0016 0.0018 0.0018 0.0018 0.0019 0.0017 0.002 0.0017 0.0016 0.0015 0.0016 0.0014 0.0013 0.0013 0.0011 0.0009 0.0008 0.0009 0.0005 0.0008 0.0007 0.0006 0.0005 0.0006 0.0005 0.0006 0.0003 0.0006 0.0004 0.0003 0.0002 0.0003 0.0005 0.0006 0.0006 0.0007 0.001 0.0009 0.0011 0.0013 0.0017 0.0015 0.0013 0.0014 0.0012 0.0012 0.0013 0.0015 0.0013 0.0013 0.001 0.0009 0.0008 0.0007 0.0007 0.0007 0.0008 0.0009 0.0007 0.0008 0.0009 0.0015 0.0038 0.0085 0.0124 0.0149 0.0171 0.0172 0.0174 0.0174 0.0165 0.0161 0.0144 0.0121 0.0099 0.007 0.0038 0.0028 0.002 0.0019 0.002 0.002 0.002 0.0021 0.0022 0.0023 0.0021 0.002 0.0021 0.002 0.0017 0.0019 0.0019 0.0019 0.0018 0.0017 0.0016 0.0013 0.0016 0.0014 0.0013 0.0011 0.0011 0.001 0.0008 0.0007 0.0004 0.0003 0.0004 0.0007 0.0006 0.0004 0.0006 0.0006 0.0005 0.0004 0.0005 0.0002 0.0007 0.0013 0.0015 0.0016 0.0015 0.001 0.0008 0.0006 0.0004 0.0002 0.0003 0.0002 0.0002 0.0004 0.0002 0.0001 0.0001 0.0002 0.0002 0.0001 0.0 0.0002 0.0 0.0 0.0 0.0 0.0001 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 37.715, + "text": "AP 余 晖 送 我 牵 匹 老 马 SP", + "ph_seq": "AP y v h ui s ong w o q ian p i l ao m a SP", + "ph_dur": "0.32 0.06 0.163 0.075 0.163 0.075 0.179 0.06 0.163 0.075 0.178 0.06 0.163 0.075 0.371 0.105 1.429 0.095", + "ph_num": "2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest E3 E3 F#3 G3 F#3 E3 D3 E3 rest", + "note_dur": "0.38 0.238 0.238 0.239 0.238 0.238 0.238 0.476 1.429 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0", + "f0_seq": "130.1 130.2 130.1 129.9 130.0 129.9 130.0 129.9 130.0 129.8 129.8 129.9 129.8 130.1 130.1 130.1 130.2 130.2 130.0 129.8 129.9 129.9 130.0 129.9 130.0 129.8 129.8 129.5 129.5 129.0 128.8 127.8 126.2 125.3 129.0 133.5 138.9 144.8 152.1 158.8 163.4 166.0 166.7 165.9 163.2 160.7 157.4 158.9 160.6 162.5 163.8 165.8 166.6 167.7 166.0 165.6 165.5 165.0 164.9 165.3 165.0 164.8 166.1 164.9 162.7 159.6 158.1 163.0 167.3 171.8 176.1 179.9 185.3 190.2 189.4 186.4 185.4 185.3 185.1 185.5 184.8 184.3 185.1 186.2 186.7 186.9 185.6 183.8 181.8 180.8 183.8 188.3 194.5 198.1 199.9 200.5 199.2 198.6 197.3 196.9 195.9 195.7 195.3 195.3 195.4 194.4 189.5 183.6 180.1 176.8 173.1 169.4 165.6 163.7 160.2 157.5 157.3 159.1 159.7 163.7 171.6 179.3 183.7 186.3 187.4 186.6 183.3 179.9 177.7 239.1 312.9 370.0 370.0 370.0 366.6 309.2 189.3 184.6 180.0 174.6 170.5 168.1 165.7 164.7 163.9 164.2 164.7 165.9 165.9 165.0 163.2 160.3 158.2 155.4 152.5 149.4 146.2 143.2 142.2 142.3 143.2 144.7 146.8 148.7 148.8 148.4 147.8 147.6 147.7 147.3 147.0 147.0 146.6 146.6 146.1 145.5 145.1 144.3 144.8 145.7 146.3 146.2 146.2 146.5 146.5 145.8 145.0 144.3 144.3 144.6 144.9 145.4 145.7 146.9 150.0 156.2 162.4 165.1 167.5 169.0 169.2 168.3 166.4 162.6 157.8 154.9 152.3 152.0 153.6 157.8 162.7 166.5 170.1 170.5 170.2 168.7 166.8 164.3 161.9 160.2 159.4 159.9 160.8 162.8 165.1 166.5 166.6 167.7 168.0 167.9 165.6 163.1 161.0 160.0 159.8 159.5 160.4 161.9 164.5 168.0 169.9 170.5 170.8 170.2 168.6 165.2 161.6 160.0 158.4 157.3 157.2 159.6 163.3 167.8 171.4 173.9 174.8 174.6 173.1 170.8 165.7 160.7 158.4 157.3 155.9 155.3 157.0 159.8 163.1 166.6 169.8 172.5 172.5 172.3 169.4 165.3 161.7 159.1 157.8 156.0 155.2 157.0 160.6 163.5 167.3 171.1 173.7 173.9 172.5 169.6 164.6 157.4 152.2 150.6 150.9 151.9 155.1 159.9 164.6 168.3 171.6 174.4 174.5 172.5 167.2 160.1 152.7 148.3 147.5 148.9 151.3 155.1 160.7 167.6 171.7 174.5 176.5 174.3 170.8 166.7 157.9 158.7 157.1 157.2 156.8 157.3 156.4 155.8 156.1", + "f0_timestep": "0.011609977324263039", + "energy": "0.001 0.0011 0.0014 0.0015 0.0014 0.0022 0.0025 0.0023 0.0029 0.0031 0.0031 0.0036 0.0042 0.004 0.0041 0.0041 0.0039 0.0038 0.0032 0.0028 0.003 0.0023 0.0025 0.0023 0.0021 0.002 0.0019 0.0024 0.0016 0.0016 0.0106 0.0255 0.0354 0.0459 0.055 0.0619 0.0651 0.0654 0.0638 0.0597 0.0584 0.0569 0.0557 0.0528 0.0484 0.0426 0.0332 0.025 0.0174 0.0125 0.0101 0.0083 0.0219 0.0389 0.0527 0.0651 0.0698 0.0722 0.072 0.0688 0.0688 0.0662 0.0637 0.0614 0.0524 0.0433 0.0324 0.0202 0.0157 0.0148 0.0153 0.0128 0.0274 0.0473 0.0585 0.0687 0.0716 0.0679 0.0663 0.0633 0.0616 0.0609 0.0605 0.0607 0.0632 0.0643 0.0644 0.0644 0.0627 0.0608 0.0602 0.0619 0.0642 0.0669 0.0707 0.0718 0.0712 0.0702 0.0688 0.0669 0.0677 0.0667 0.0675 0.0673 0.0644 0.0584 0.0487 0.0365 0.0234 0.0134 0.0135 0.0143 0.0139 0.0189 0.0411 0.0533 0.0602 0.0647 0.058 0.0508 0.0479 0.0466 0.0489 0.0521 0.0531 0.0514 0.0436 0.0345 0.0238 0.011 0.0047 0.0056 0.0108 0.0137 0.0181 0.0362 0.0478 0.0565 0.0635 0.0636 0.0627 0.0619 0.063 0.0626 0.0634 0.0656 0.0659 0.0679 0.0668 0.0645 0.0614 0.0601 0.0612 0.0645 0.0702 0.0719 0.0711 0.069 0.0629 0.0605 0.0614 0.0613 0.0646 0.0643 0.065 0.0641 0.064 0.065 0.0639 0.0637 0.0615 0.06 0.0616 0.0622 0.0621 0.0623 0.0612 0.0616 0.0609 0.0615 0.0611 0.0605 0.0598 0.0595 0.0577 0.0576 0.0578 0.0541 0.0522 0.0514 0.0516 0.0526 0.0554 0.0563 0.0575 0.0597 0.059 0.0588 0.0584 0.0594 0.0592 0.0601 0.058 0.0574 0.0562 0.0524 0.051 0.0464 0.0434 0.0418 0.0421 0.0422 0.0432 0.0435 0.0449 0.0446 0.0441 0.0433 0.0419 0.0414 0.0407 0.0411 0.0413 0.0411 0.0417 0.0414 0.0411 0.0419 0.0421 0.0429 0.0434 0.043 0.0416 0.041 0.0394 0.0383 0.0376 0.0374 0.0376 0.0375 0.0378 0.0383 0.0383 0.0389 0.0386 0.0378 0.0376 0.0374 0.0361 0.0351 0.0341 0.0334 0.0328 0.0332 0.0333 0.0331 0.0333 0.0339 0.0338 0.0354 0.0341 0.0335 0.0318 0.031 0.0305 0.029 0.0292 0.0285 0.0292 0.0298 0.0308 0.031 0.0312 0.0312 0.0309 0.0302 0.0303 0.0297 0.0284 0.0278 0.0268 0.0259 0.026 0.0258 0.0266 0.0268 0.0267 0.0268 0.0266 0.0262 0.0253 0.0242 0.0222 0.021 0.0205 0.0195 0.0191 0.0185 0.0175 0.0182 0.0189 0.0199 0.0213 0.0212 0.0203 0.0188 0.0169 0.0154 0.0136 0.0122 0.0116 0.0108 0.0109 0.011 0.0111 0.0119 0.0122 0.0128 0.0124 0.0115 0.0087 0.0061 0.0041 0.0027 0.0011 0.0014 0.0014 0.0012 0.0005", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0002 0.0006 0.0006 0.0008 0.0015 0.0019 0.0027 0.0031 0.0031 0.0032 0.0033 0.0034 0.0039 0.0041 0.0044 0.0044 0.0041 0.004 0.0038 0.0037 0.0032 0.0026 0.0026 0.0024 0.0022 0.0018 0.0016 0.0011 0.0005 0.0013 0.002 0.0025 0.0024 0.0021 0.0019 0.0017 0.0019 0.0018 0.0018 0.0014 0.0014 0.0012 0.001 0.0011 0.0013 0.0018 0.0035 0.0059 0.0083 0.0099 0.0108 0.0108 0.0096 0.0075 0.006 0.0034 0.0017 0.0014 0.0013 0.0012 0.0015 0.0013 0.0013 0.0011 0.0015 0.0037 0.0059 0.0102 0.0142 0.0163 0.0171 0.015 0.012 0.008 0.0038 0.0015 0.0013 0.0013 0.001 0.0009 0.0008 0.0006 0.0006 0.0004 0.0005 0.0004 0.0004 0.0004 0.0003 0.0005 0.0004 0.0006 0.0008 0.0011 0.001 0.0013 0.0013 0.0013 0.0014 0.0013 0.0013 0.0013 0.0014 0.0012 0.0013 0.0017 0.004 0.0073 0.0104 0.0117 0.0155 0.0161 0.0147 0.0136 0.0089 0.0045 0.0022 0.0015 0.001 0.0009 0.0008 0.0006 0.0006 0.0003 0.0006 0.0003 0.0004 0.002 0.0027 0.0033 0.0035 0.0034 0.0036 0.0041 0.0041 0.0036 0.0031 0.0025 0.0019 0.0019 0.0019 0.0016 0.0012 0.0015 0.0011 0.0012 0.0012 0.0015 0.0014 0.0016 0.0014 0.0016 0.0015 0.0015 0.0016 0.0017 0.0017 0.0019 0.0019 0.002 0.0023 0.0024 0.0022 0.0022 0.0019 0.0019 0.0021 0.0018 0.0018 0.0019 0.0014 0.0018 0.0016 0.0017 0.0017 0.0016 0.0016 0.0014 0.0014 0.0013 0.0009 0.001 0.0008 0.0008 0.0008 0.0007 0.0007 0.0004 0.0003 0.0002 0.0004 0.0004 0.0003 0.0002 0.0002 0.0003 0.0007 0.0012 0.0015 0.0017 0.002 0.0022 0.0024 0.0024 0.0021 0.0019 0.0016 0.0015 0.0013 0.0015 0.0019 0.0019 0.0019 0.0019 0.0016 0.0014 0.0012 0.0014 0.0016 0.0012 0.001 0.0012 0.0011 0.0013 0.0013 0.0016 0.0016 0.0016 0.0013 0.0011 0.0012 0.0012 0.0012 0.0011 0.001 0.001 0.0009 0.001 0.0009 0.0009 0.0012 0.001 0.001 0.0011 0.0009 0.0009 0.001 0.0012 0.0011 0.0009 0.001 0.0009 0.001 0.0009 0.0007 0.0011 0.0011 0.001 0.0008 0.001 0.0008 0.0008 0.0008 0.0007 0.0007 0.0008 0.0005 0.0006 0.0007 0.0008 0.0009 0.0008 0.0008 0.0006 0.0007 0.0005 0.0007 0.0005 0.0005 0.0004 0.0005 0.0005 0.0005 0.0006 0.0005 0.0005 0.0008 0.0006 0.0008 0.0008 0.0007 0.0006 0.0004 0.0005 0.0005 0.0004 0.0003 0.0002 0.0003 0.0003 0.0002 0.0005 0.0001 0.0002 0.0002 0.0003 0.0002 0.0002 0.0003 0.0001 0.0 0.0 0.0 0.0 0.0001 0.0 0.0 0.0 0.0002 0.0002 0.0011 0.0018 0.0021 0.002 0.0014 0.0007 0.0005 0.0002 0.0002", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 41.781, + "text": "SP AP 正 路 过 烟 村 里 人 家 AP 恰 似 当 年 故 里 正 飞 花 SP", + "ph_seq": "SP AP zh eng l u g uo y En c un l i r en j ia AP q ia s i0 d ang n ian g u l i zh eng f ei h ua SP", + "ph_dur": "0.14 0.4 0.06 0.178 0.06 0.193 0.045 0.401 0.075 0.163 0.075 0.194 0.045 0.401 0.075 0.401 0.075 0.476 0.148 0.09 0.163 0.075 0.193 0.045 0.163 0.075 0.194 0.045 0.178 0.06 0.178 0.06 0.193 0.045 0.163 0.075 0.952 0.19", + "ph_num": "1 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 D4 B3 B3 rest B3 A3 G3 G3 E3 G3 A3 G3 B3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.238 0.239 0.476 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.239 0.238 0.238 0.238 0.238 0.952 0.19", + "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "154.7 154.9 154.6 154.5 154.5 154.6 154.6 154.4 154.9 154.8 155.3 155.3 155.0 155.1 155.4 155.3 155.2 155.5 155.4 155.6 155.9 155.5 155.9 155.5 155.7 155.6 155.4 155.5 155.2 155.5 156.0 155.4 155.6 155.5 155.8 155.6 155.8 155.6 155.6 155.7 155.5 155.4 155.5 155.3 155.5 155.5 155.6 154.9 155.2 154.8 154.6 154.1 154.3 159.0 167.1 178.1 192.1 203.8 220.8 235.0 242.0 246.9 248.8 249.5 247.4 244.0 241.4 237.7 234.6 233.4 228.2 224.2 223.7 221.7 219.1 218.0 217.9 218.0 218.8 220.0 220.7 220.8 220.1 218.4 213.0 204.4 203.8 205.2 219.9 231.8 247.0 259.3 252.9 251.1 253.4 254.0 253.5 252.3 250.3 245.5 239.9 234.2 230.2 226.6 226.0 227.3 232.2 239.9 250.5 258.5 262.2 261.0 257.0 249.3 240.3 233.8 230.7 229.0 229.8 234.5 241.2 246.4 252.1 255.7 259.4 262.3 262.6 262.1 260.2 257.3 253.6 252.5 252.9 251.7 249.6 249.2 250.6 250.3 250.2 248.8 248.1 247.9 248.0 248.8 248.1 246.8 243.1 236.8 235.5 235.7 235.0 234.7 233.9 233.8 234.2 222.8 217.8 215.5 216.3 217.5 218.1 218.4 219.8 222.1 224.0 224.3 223.5 222.9 222.8 222.9 221.5 220.5 220.1 220.1 218.3 218.2 220.0 220.3 220.1 220.2 219.9 218.5 217.6 218.1 219.6 220.8 221.0 220.0 219.8 223.1 231.2 243.1 246.7 248.9 249.1 249.0 247.6 245.5 244.6 243.6 243.7 244.1 245.9 247.7 248.0 248.1 248.8 248.6 248.2 248.5 249.8 251.0 255.2 260.0 272.7 284.4 294.2 302.6 304.5 304.3 301.8 299.9 296.3 293.7 292.5 292.0 292.6 293.9 295.3 297.2 298.8 299.2 296.3 288.3 275.2 259.3 249.3 244.5 243.6 245.7 246.4 247.7 250.7 251.2 252.6 251.5 251.3 248.9 245.5 242.6 239.8 235.6 235.3 240.5 244.6 249.5 253.8 255.7 257.0 257.7 257.4 257.5 254.1 251.9 246.3 242.0 236.3 232.2 229.9 232.1 236.5 241.7 248.6 254.2 257.6 259.4 260.6 259.8 258.6 254.4 244.5 234.7 227.7 225.2 226.1 227.9 228.4 229.1 231.2 235.7 242.3 250.3 253.9 256.0 255.1 253.5 255.2 257.5 257.1 252.2 253.8 254.1 254.9 256.4 256.6 258.1 258.7 258.1 257.3 258.4 259.5 261.4 263.6 266.0 268.0 271.2 274.5 278.2 277.3 272.2 262.1 253.2 247.1 244.2 243.2 243.9 244.4 246.2 246.4 244.6 240.1 234.9 226.8 226.3 224.9 223.9 223.9 222.5 224.0 220.8 218.9 219.3 219.4 220.1 220.1 219.3 219.7 220.1 219.7 219.8 219.2 218.6 214.7 205.9 190.8 191.4 193.9 196.4 197.4 198.0 197.8 195.0 194.1 194.5 195.8 196.8 196.4 196.3 196.0 195.6 195.5 194.9 194.7 194.6 194.0 195.2 195.2 195.1 195.7 197.0 196.6 196.2 196.4 196.3 195.9 195.0 194.3 194.5 195.0 196.5 198.1 198.6 196.3 190.9 182.0 169.6 165.2 166.9 167.9 168.9 171.3 164.5 163.3 163.0 162.5 162.3 163.0 164.2 164.9 166.2 167.0 167.0 164.3 163.5 161.5 158.7 154.7 149.9 140.9 141.7 145.0 146.1 148.7 152.2 158.1 166.3 175.2 184.1 191.0 196.0 198.4 199.0 197.1 194.5 190.4 179.9 180.3 189.5 197.8 207.1 216.7 227.8 225.5 223.3 223.4 223.7 223.3 221.1 219.4 218.7 219.4 220.1 220.2 218.5 213.8 207.4 199.9 200.3 199.5 200.2 199.4 201.0 198.8 194.0 191.0 191.6 193.7 196.2 197.9 196.8 197.8 196.3 191.3 180.0 176.2 180.0 183.6 187.7 191.5 195.5 199.2 202.2 205.9 206.1 208.2 209.8 212.7 214.3 214.2 214.6 215.6 216.6 217.1 217.6 218.5 218.9 218.5 218.1 218.2 218.9 219.0 218.5 218.7 220.4 221.6 222.2 223.1 226.3 231.3 235.6 240.7 246.5 249.2 250.2 250.4 251.1 250.9 250.1 247.8 245.2 242.3 241.3 241.4 242.5 244.6 250.0 256.5 261.0 262.4 261.9 259.1 254.7 248.9 243.2 236.7 233.7 232.8 233.9 236.4 241.1 246.8 253.5 260.2 263.6 263.8 261.1 255.1 248.4 241.4 231.9 224.9 223.1 231.0 244.0 247.8 243.7 244.6 246.9 248.8 248.5 248.4 248.7 249.4 250.0 250.1 247.5 243.7 243.5 243.5 244.5 245.5 244.6 244.9 245.4 245.0 245.1 246.2 246.0 245.1 246.4 246.2", + "f0_timestep": "0.011609977324263039", + "energy": "0.0004 0.0007 0.0001 0.0002 0.0003 0.0006 0.0008 0.001 0.0006 0.0008 0.0005 0.0007 0.0007 0.001 0.0008 0.0012 0.0015 0.0017 0.0024 0.0018 0.0023 0.0019 0.0024 0.0022 0.0027 0.002 0.0021 0.003 0.0022 0.0022 0.0022 0.0026 0.0028 0.0027 0.0028 0.0025 0.0022 0.0025 0.0019 0.0018 0.0017 0.0011 0.0016 0.0013 0.0011 0.001 0.0025 0.0079 0.0099 0.0103 0.0231 0.0418 0.0533 0.062 0.0667 0.0643 0.0635 0.0632 0.063 0.0631 0.067 0.0702 0.0723 0.0763 0.078 0.0792 0.0804 0.0786 0.0775 0.0761 0.0751 0.0757 0.0769 0.0785 0.0793 0.0815 0.0824 0.0813 0.081 0.0797 0.0764 0.0731 0.0663 0.0558 0.0411 0.0253 0.0136 0.0051 0.0036 0.0055 0.0176 0.0413 0.0591 0.0728 0.0845 0.088 0.0909 0.0926 0.09 0.0881 0.0834 0.078 0.0727 0.0665 0.0608 0.0574 0.0579 0.0608 0.0636 0.0641 0.0655 0.0635 0.0623 0.0612 0.0581 0.0562 0.0557 0.0541 0.0537 0.0548 0.0549 0.0567 0.0588 0.0621 0.066 0.0692 0.072 0.0746 0.0757 0.0763 0.077 0.077 0.0797 0.0815 0.0848 0.0879 0.0898 0.0915 0.0943 0.0922 0.0924 0.0899 0.0866 0.0873 0.0854 0.0814 0.0706 0.057 0.0402 0.0252 0.022 0.0192 0.0143 0.0299 0.0524 0.0685 0.0807 0.0874 0.0875 0.0877 0.0879 0.0872 0.0861 0.0858 0.0871 0.0886 0.09 0.0907 0.0916 0.092 0.0902 0.0869 0.08 0.075 0.077 0.0795 0.0842 0.0884 0.087 0.0862 0.0855 0.0831 0.0811 0.0811 0.0784 0.0769 0.0752 0.0699 0.0656 0.0636 0.0637 0.0684 0.0725 0.0757 0.0777 0.0789 0.0809 0.0821 0.0836 0.0839 0.084 0.0832 0.0816 0.082 0.0814 0.0823 0.0822 0.0811 0.0785 0.0757 0.0705 0.0639 0.0586 0.0566 0.0613 0.0732 0.0837 0.0938 0.1007 0.1043 0.1053 0.1029 0.1006 0.0969 0.0938 0.0925 0.0901 0.0891 0.0865 0.0847 0.0855 0.0858 0.089 0.0915 0.0929 0.093 0.0919 0.0905 0.0871 0.0865 0.0849 0.0828 0.0837 0.0845 0.0873 0.0903 0.0914 0.0926 0.0895 0.0797 0.0644 0.0478 0.0286 0.0235 0.0248 0.0348 0.0526 0.069 0.0827 0.091 0.0904 0.0845 0.0797 0.0778 0.0761 0.0739 0.0697 0.0655 0.0626 0.0625 0.0637 0.0634 0.0639 0.0627 0.0599 0.0586 0.0557 0.0538 0.0531 0.0518 0.0506 0.0494 0.049 0.0493 0.0488 0.0479 0.0452 0.0427 0.0397 0.0378 0.0355 0.034 0.0329 0.0334 0.034 0.0349 0.0292 0.0252 0.0185 0.0108 0.0043 0.0038 0.0028 0.0034 0.0036 0.003 0.0025 0.0017 0.0017 0.0008 0.0012 0.002 0.0055 0.0093 0.0111 0.0138 0.0153 0.0207 0.0452 0.0622 0.077 0.0871 0.0866 0.0854 0.082 0.0786 0.077 0.0754 0.0741 0.0728 0.0681 0.0584 0.046 0.033 0.0258 0.0267 0.028 0.0319 0.0469 0.0575 0.0673 0.0761 0.0795 0.0829 0.084 0.0853 0.0855 0.0849 0.086 0.085 0.0818 0.0758 0.0635 0.0482 0.0322 0.0153 0.0086 0.0093 0.0322 0.0504 0.0617 0.0701 0.0714 0.0691 0.0676 0.0666 0.0645 0.0629 0.0611 0.0632 0.066 0.0693 0.0709 0.0721 0.0722 0.0711 0.0718 0.0704 0.0684 0.0683 0.0691 0.069 0.0691 0.0688 0.0658 0.0629 0.0613 0.0594 0.0591 0.0587 0.0584 0.0558 0.0532 0.046 0.0349 0.0231 0.0117 0.0122 0.0318 0.0485 0.0633 0.0712 0.0761 0.0764 0.0779 0.079 0.0791 0.0795 0.0779 0.0814 0.0808 0.0798 0.0819 0.0794 0.0775 0.0733 0.0674 0.0596 0.0558 0.0604 0.0665 0.0736 0.0765 0.075 0.0705 0.0667 0.0645 0.0621 0.0608 0.058 0.0555 0.0551 0.0532 0.0473 0.0389 0.0276 0.0115 0.0089 0.0109 0.0116 0.0185 0.042 0.0549 0.066 0.0725 0.0683 0.0656 0.0623 0.0606 0.0604 0.0599 0.0604 0.0603 0.059 0.0536 0.0448 0.0338 0.021 0.0122 0.0055 0.0164 0.0438 0.0598 0.0747 0.0851 0.0881 0.09 0.0881 0.0865 0.0832 0.0774 0.0685 0.0551 0.0384 0.0211 0.0099 0.0089 0.0085 0.0095 0.0095 0.0092 0.0218 0.0382 0.0551 0.0708 0.0832 0.09 0.0926 0.0943 0.0955 0.096 0.0958 0.0951 0.0941 0.0934 0.0945 0.0941 0.093 0.0906 0.088 0.0843 0.0797 0.0745 0.0671 0.0637 0.0611 0.0589 0.0589 0.0593 0.0608 0.0625 0.0664 0.0695 0.0722 0.0747 0.0745 0.0744 0.0732 0.0704 0.0669 0.0617 0.0569 0.0516 0.0491 0.0473 0.0491 0.0515 0.0545 0.058 0.0592 0.0597 0.0585 0.0548 0.0489 0.0422 0.0361 0.031 0.0282 0.0271 0.0268 0.0284 0.0301 0.0324 0.035 0.0353 0.0368 0.0359 0.0335 0.0306 0.0253 0.0202 0.0166 0.0144 0.0151 0.0165 0.0161 0.0175 0.0184 0.019 0.0208 0.0219 0.0219 0.0208 0.019 0.0159 0.0133 0.0075 0.0037 0.0022 0.0013 0.0009 0.0007 0.0007 0.0002 0.0002 0.0 0.0 0.0001 0.0 0.0 0.0003", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0003 0.0003 0.0003 0.0005 0.0003 0.0006 0.0005 0.0005 0.0006 0.0007 0.0007 0.001 0.0009 0.001 0.001 0.0013 0.0016 0.0018 0.0018 0.0021 0.0022 0.0022 0.0026 0.0027 0.003 0.0031 0.0029 0.0029 0.0028 0.0027 0.0026 0.0028 0.0027 0.0028 0.0025 0.0023 0.0022 0.0021 0.0019 0.0015 0.0012 0.0007 0.0005 0.0007 0.0008 0.004 0.0073 0.0104 0.0115 0.0114 0.0099 0.0068 0.004 0.0022 0.0016 0.0017 0.0017 0.0013 0.0011 0.0008 0.0005 0.0003 0.0006 0.0003 0.0005 0.0006 0.0005 0.0006 0.0007 0.0006 0.0009 0.0012 0.0013 0.0013 0.0013 0.0012 0.0011 0.001 0.0008 0.0005 0.0005 0.0003 0.0005 0.0003 0.0003 0.0011 0.0013 0.0023 0.0035 0.0043 0.0042 0.0038 0.0022 0.0008 0.0007 0.0007 0.0007 0.0009 0.0011 0.0011 0.0012 0.001 0.0011 0.001 0.0011 0.0011 0.0011 0.0011 0.0012 0.0013 0.0011 0.0012 0.0012 0.0012 0.0013 0.0011 0.001 0.0009 0.001 0.0009 0.001 0.001 0.001 0.0009 0.0008 0.0011 0.0012 0.0014 0.0013 0.0013 0.0013 0.0012 0.0016 0.0016 0.0015 0.0017 0.0017 0.0015 0.0015 0.0013 0.0012 0.0009 0.0006 0.0007 0.0006 0.0054 0.0127 0.0186 0.0228 0.0234 0.0209 0.0155 0.0092 0.0055 0.0028 0.0018 0.0014 0.0012 0.0012 0.0014 0.0013 0.0011 0.0012 0.001 0.001 0.0008 0.001 0.0012 0.001 0.001 0.0009 0.0012 0.0014 0.0018 0.0022 0.002 0.002 0.0019 0.0017 0.0018 0.0018 0.0019 0.002 0.0023 0.0026 0.0026 0.0024 0.0024 0.002 0.0023 0.0026 0.0026 0.0024 0.0024 0.0018 0.0014 0.0013 0.0013 0.0013 0.0014 0.0009 0.0011 0.0013 0.0015 0.0017 0.0016 0.0013 0.0014 0.0016 0.0024 0.0025 0.0031 0.0037 0.004 0.0037 0.0028 0.002 0.0016 0.0013 0.0017 0.0021 0.0022 0.0024 0.0023 0.0023 0.0024 0.0022 0.0015 0.0009 0.0007 0.0005 0.001 0.0013 0.0015 0.0014 0.0015 0.0013 0.0009 0.0009 0.0009 0.0007 0.0008 0.0009 0.0007 0.0008 0.0008 0.0006 0.0006 0.0015 0.0058 0.0126 0.019 0.024 0.0268 0.0252 0.0208 0.015 0.0076 0.0038 0.003 0.0025 0.0023 0.0023 0.0025 0.0024 0.0027 0.0028 0.0029 0.0029 0.0029 0.0027 0.0028 0.0023 0.0021 0.0018 0.0014 0.0015 0.0016 0.0016 0.0018 0.0018 0.002 0.0019 0.0019 0.0019 0.0018 0.0018 0.0016 0.0013 0.0013 0.0012 0.001 0.001 0.0012 0.0013 0.0014 0.0019 0.0021 0.0023 0.0024 0.0026 0.003 0.0032 0.0034 0.0033 0.0028 0.0022 0.0014 0.0007 0.0007 0.002 0.0054 0.0082 0.0113 0.0144 0.0153 0.0141 0.0118 0.0081 0.0047 0.0037 0.0028 0.0023 0.0019 0.002 0.0021 0.0023 0.0024 0.0025 0.0037 0.0114 0.0175 0.0243 0.0268 0.0296 0.033 0.0303 0.0281 0.0217 0.0103 0.0026 0.0015 0.0017 0.0016 0.0014 0.0014 0.0016 0.0014 0.0013 0.0013 0.0011 0.0012 0.0011 0.001 0.0018 0.0028 0.0042 0.0051 0.0051 0.005 0.0042 0.0032 0.0025 0.0022 0.0021 0.0019 0.0016 0.0016 0.0011 0.0009 0.0008 0.0007 0.0007 0.0006 0.0005 0.0008 0.0007 0.0011 0.0014 0.0014 0.0016 0.0017 0.0014 0.0015 0.0013 0.0013 0.001 0.0007 0.0007 0.0004 0.0004 0.0003 0.0002 0.0007 0.0015 0.0034 0.0063 0.0076 0.0068 0.0064 0.004 0.0007 0.0003 0.0 0.0001 0.0002 0.0002 0.0002 0.0003 0.0003 0.0006 0.001 0.0012 0.0024 0.0038 0.0038 0.0041 0.0043 0.0034 0.0034 0.003 0.0028 0.0023 0.0021 0.0022 0.0019 0.0014 0.0013 0.0013 0.001 0.001 0.0012 0.0011 0.0012 0.0013 0.0027 0.0067 0.0116 0.0134 0.0133 0.0124 0.0078 0.0032 0.0022 0.0018 0.0017 0.0014 0.0012 0.001 0.0008 0.0007 0.0007 0.0008 0.0005 0.0003 0.0005 0.001 0.002 0.003 0.0034 0.0034 0.0029 0.0016 0.0011 0.0011 0.0014 0.0015 0.0015 0.0013 0.0014 0.0013 0.0014 0.0018 0.0023 0.0029 0.0041 0.0055 0.0065 0.0069 0.0071 0.0064 0.0052 0.0038 0.0021 0.0017 0.0015 0.0014 0.0013 0.0014 0.0014 0.0015 0.0014 0.0014 0.0013 0.0011 0.0012 0.0012 0.0014 0.0014 0.0015 0.0018 0.0018 0.0017 0.0015 0.0016 0.0014 0.0015 0.0016 0.0017 0.0018 0.002 0.002 0.0017 0.0018 0.0017 0.0014 0.0017 0.0019 0.002 0.0019 0.0016 0.0016 0.0014 0.0012 0.0013 0.0012 0.0011 0.001 0.0012 0.0013 0.0014 0.0013 0.0014 0.0012 0.0012 0.0009 0.001 0.0009 0.0008 0.0008 0.0008 0.0006 0.001 0.0008 0.0009 0.001 0.0008 0.0008 0.0008 0.0005 0.0004 0.0004 0.0005 0.0007 0.0016 0.0003 0.0005 0.0005 0.0004 0.0006 0.0004 0.0003 0.0003 0.0005 0.0006 0.0005 0.0007 0.0006 0.0005 0.0006 0.0004 0.0004 0.0005 0.0006 0.0005 0.0003 0.0003 0.0002 0.0002 0.0004", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 48.476, + "text": "SP AP 醉 过 风 喝 过 茶 SP", + "ph_seq": "SP AP z ui g uo f eng h e g uo ch a SP", + "ph_dur": "0.097 0.4 0.075 0.193 0.045 0.163 0.075 1.353 0.075 0.193 0.045 0.164 0.075 0.714 0.143", + "ph_num": "1 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 D4 E4 D4 E4 A3 rest", + "note_dur": "0.172 0.4 0.238 0.238 1.428 0.238 0.239 0.714 0.143", + "note_slur": "0 0 0 0 0 0 0 0 0", + "f0_seq": "264.3 264.5 264.9 264.7 264.0 263.9 263.8 263.7 263.6 263.5 263.3 263.1 263.0 262.2 262.1 262.2 262.4 262.1 261.7 262.1 262.0 262.3 262.0 261.7 261.7 261.9 261.1 261.5 262.0 261.6 261.1 260.9 260.5 261.4 260.8 261.2 260.6 260.8 260.6 260.1 260.0 259.3 259.5 259.0 259.2 258.4 258.0 257.5 257.0 256.2 256.3 249.7 247.1 246.4 246.6 246.3 245.2 245.1 245.4 245.6 246.0 247.3 243.9 239.3 231.1 245.7 260.2 275.7 291.2 304.0 297.8 297.4 298.9 298.5 298.4 297.2 296.5 295.2 295.6 295.6 294.3 289.0 279.3 267.0 274.0 282.6 292.0 301.6 312.2 324.2 339.4 336.1 333.4 334.2 334.2 332.0 328.0 324.5 319.7 315.9 314.4 314.7 317.2 321.6 325.4 327.3 330.2 331.7 330.8 329.7 329.1 328.4 328.3 329.2 329.6 328.7 330.1 331.6 332.2 333.0 333.6 334.5 334.3 332.1 330.4 327.9 324.5 322.1 320.6 322.3 326.4 331.0 337.2 342.4 346.1 347.6 347.5 347.1 343.7 338.2 331.5 325.8 321.6 320.6 321.8 324.2 328.4 332.9 339.7 345.6 349.6 351.9 351.6 349.0 344.6 340.2 334.1 327.5 321.6 318.3 317.5 320.0 325.5 332.6 339.8 346.2 351.4 354.0 354.5 351.7 347.2 341.6 335.7 328.4 324.3 320.1 318.3 319.1 321.8 326.3 331.3 337.1 341.9 344.8 345.4 344.6 341.9 337.4 334.4 329.5 322.5 320.2 321.1 322.5 328.0 329.3 329.4 331.9 335.2 338.4 341.4 339.9 334.5 324.0 313.3 305.5 300.9 298.5 296.5 294.6 292.8 291.2 289.4 290.6 290.1 290.6 291.0 292.3 293.2 293.5 292.9 292.0 291.1 291.2 292.3 293.4 291.2 285.6 272.9 285.6 301.5 316.1 331.3 344.5 338.7 336.1 335.5 336.0 334.4 334.7 334.3 333.5 331.3 327.3 321.6 311.6 291.5 277.8 265.5 253.1 242.4 231.8 221.1 210.4 204.5 204.3 210.1 215.8 221.1 225.5 225.7 224.9 223.2 218.2 212.5 207.4 203.3 202.3 206.8 214.3 222.9 229.8 233.1 235.4 234.9 233.3 226.2 220.1 211.8 206.6 205.5 207.7 212.8 216.2 220.5 223.8 226.4 225.8 223.8 220.7 216.1 212.6 214.2 218.0 223.0 228.1 231.9 232.4 232.2 230.3 226.4 221.5 215.1 210.1 207.2 206.3 209.0 213.2 218.4 225.3 231.9 236.5 235.8 233.6 228.6 219.3 211.6 204.8 201.5 199.3 198.5 198.7 199.5 200.1 200.0 200.6 201.4 201.6 203.4", + "f0_timestep": "0.011609977324263039", + "energy": "0.0002 0.0 0.0 0.0 0.0 0.0 0.0001 0.0002 0.0 0.0003 0.0005 0.0006 0.0012 0.0012 0.0017 0.0019 0.0024 0.003 0.003 0.0031 0.0035 0.0038 0.0042 0.0042 0.0048 0.0052 0.005 0.005 0.0051 0.0056 0.0046 0.0046 0.0038 0.0038 0.0029 0.0024 0.0021 0.0018 0.0012 0.0003 0.0008 0.0007 0.0023 0.0044 0.0093 0.0141 0.0177 0.0204 0.0214 0.0455 0.0661 0.0823 0.0984 0.1039 0.1042 0.1048 0.1029 0.1014 0.0998 0.0973 0.0938 0.0867 0.075 0.0586 0.0409 0.0244 0.0117 0.0133 0.0443 0.0686 0.0881 0.1047 0.1122 0.1148 0.1168 0.1183 0.1189 0.1187 0.1191 0.1165 0.1112 0.0977 0.0781 0.0547 0.0227 0.0087 0.0059 0.0057 0.0041 0.0282 0.06 0.0898 0.1127 0.13 0.1353 0.1354 0.1338 0.1275 0.1247 0.121 0.1196 0.1196 0.1215 0.1219 0.1213 0.1234 0.1225 0.1243 0.1259 0.126 0.1264 0.1257 0.125 0.1279 0.1294 0.1302 0.1309 0.1322 0.1329 0.1358 0.1379 0.1367 0.1358 0.132 0.1281 0.1236 0.1192 0.1149 0.1131 0.1105 0.1103 0.111 0.1104 0.11 0.1082 0.1063 0.1042 0.1023 0.1005 0.1009 0.1017 0.1017 0.1031 0.1018 0.0986 0.0972 0.094 0.0922 0.091 0.0883 0.0874 0.0865 0.0871 0.088 0.0883 0.0903 0.091 0.092 0.0931 0.0926 0.0919 0.0912 0.0899 0.0877 0.0863 0.0837 0.0819 0.0811 0.082 0.0834 0.0845 0.0861 0.0859 0.0848 0.0828 0.0799 0.0797 0.078 0.0767 0.0758 0.0738 0.0721 0.071 0.0704 0.0716 0.0707 0.0704 0.0695 0.0674 0.0652 0.0624 0.0608 0.0599 0.0602 0.0619 0.0624 0.0637 0.064 0.0625 0.0619 0.0617 0.0597 0.0591 0.055 0.0454 0.0357 0.0185 0.0051 0.004 0.0061 0.0079 0.0073 0.0186 0.049 0.0664 0.0828 0.0953 0.1006 0.1035 0.1022 0.1007 0.0964 0.0933 0.0897 0.0845 0.0751 0.0644 0.0502 0.0329 0.0195 0.0097 0.01 0.032 0.0516 0.0668 0.0818 0.0916 0.097 0.101 0.0998 0.0976 0.0945 0.0907 0.0894 0.0833 0.0706 0.0556 0.0345 0.0185 0.0136 0.0164 0.0165 0.0158 0.0242 0.0366 0.0476 0.0606 0.071 0.0767 0.0817 0.0849 0.0856 0.0868 0.0849 0.0786 0.0718 0.063 0.0557 0.0538 0.0548 0.0582 0.062 0.0644 0.0647 0.0651 0.0631 0.0625 0.0598 0.0554 0.0502 0.0453 0.0417 0.0417 0.0423 0.0431 0.0458 0.0469 0.048 0.0475 0.0453 0.0426 0.0401 0.0358 0.0335 0.0324 0.0317 0.0322 0.0318 0.033 0.0321 0.0328 0.0323 0.0304 0.0278 0.0254 0.0227 0.0211 0.0207 0.0205 0.021 0.0208 0.0213 0.0206 0.02 0.0193 0.0179 0.0157 0.0131 0.0094 0.0054 0.0027 0.0005 0.0003 0.0001 0.0 0.0002 0.0004 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0002 0.0002 0.0 0.0001 0.0004 0.0002 0.0004 0.0004 0.0008 0.0012 0.0014 0.0014 0.0016 0.0019 0.0023 0.0026 0.0029 0.0034 0.004 0.0041 0.0045 0.0047 0.0048 0.0051 0.0055 0.0057 0.0058 0.0058 0.0055 0.0056 0.0053 0.0049 0.0037 0.0031 0.0028 0.0023 0.0017 0.0014 0.001 0.0009 0.0013 0.0027 0.0059 0.0095 0.0149 0.0188 0.0204 0.0202 0.0168 0.0124 0.0072 0.0028 0.0024 0.0022 0.002 0.0022 0.0021 0.002 0.0021 0.0019 0.0016 0.0011 0.0014 0.003 0.0052 0.0075 0.0078 0.0074 0.0058 0.0023 0.0012 0.0011 0.0014 0.0013 0.0015 0.0015 0.0014 0.0015 0.0017 0.0021 0.0022 0.0029 0.0039 0.0048 0.0058 0.0062 0.0061 0.0054 0.0044 0.003 0.0024 0.0019 0.0022 0.0022 0.0019 0.0019 0.0021 0.002 0.0019 0.0019 0.0019 0.0019 0.0019 0.0021 0.0021 0.0021 0.0022 0.0018 0.0018 0.0018 0.0018 0.0021 0.0024 0.0021 0.0021 0.0023 0.0025 0.0025 0.0025 0.0025 0.0023 0.0024 0.0025 0.0027 0.0028 0.0028 0.0028 0.003 0.0033 0.0034 0.003 0.003 0.003 0.0027 0.0025 0.0022 0.0019 0.0021 0.0026 0.003 0.0031 0.0031 0.003 0.0027 0.0021 0.0016 0.0015 0.0015 0.0017 0.002 0.0019 0.002 0.0017 0.0018 0.0017 0.002 0.0024 0.0023 0.0023 0.0018 0.0018 0.0016 0.0015 0.0015 0.0016 0.0016 0.0015 0.0016 0.0016 0.0017 0.0018 0.0018 0.0019 0.0018 0.0018 0.0014 0.0013 0.0011 0.0012 0.0012 0.0011 0.0011 0.001 0.001 0.001 0.001 0.0009 0.0009 0.0009 0.0008 0.0008 0.0008 0.0008 0.0007 0.0006 0.0007 0.0006 0.0006 0.0008 0.0007 0.0007 0.001 0.0011 0.0018 0.0028 0.0029 0.0042 0.0055 0.0067 0.0085 0.0083 0.0074 0.0062 0.0031 0.0018 0.0015 0.0017 0.0016 0.0018 0.0015 0.0014 0.0011 0.0011 0.001 0.0007 0.0007 0.0007 0.0013 0.0022 0.0032 0.0033 0.0029 0.0021 0.001 0.0005 0.0009 0.0009 0.001 0.001 0.001 0.0008 0.001 0.0012 0.0014 0.0024 0.0033 0.0051 0.0078 0.0117 0.0162 0.0171 0.0166 0.0139 0.0088 0.0038 0.0027 0.0026 0.0026 0.0025 0.0024 0.0021 0.0022 0.0023 0.0021 0.0022 0.0021 0.0019 0.0017 0.0016 0.0015 0.0016 0.0015 0.0014 0.0015 0.0017 0.0016 0.0017 0.0017 0.0016 0.0012 0.0011 0.0011 0.001 0.0011 0.0009 0.0009 0.0009 0.001 0.0008 0.0011 0.0009 0.0008 0.0008 0.0006 0.0009 0.0008 0.0005 0.0006 0.0007 0.0006 0.0005 0.0007 0.0005 0.0005 0.0004 0.0004 0.0002 0.0002 0.0004 0.0002 0.0003 0.0006 0.0004 0.0003 0.0004 0.0008 0.0019 0.0016 0.0018 0.0015 0.0004 0.0003 0.0002 0.0 0.0 0.0001 0.0 0.0003", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 52.239, + "text": "AP 寻 常 巷 口 寻 个 酒 家 SP", + "ph_seq": "AP x vn ch ang x iang k ou x vn g e j iu j ia SP", + "ph_dur": "0.275 0.105 0.178 0.06 0.163 0.075 0.193 0.045 0.595 0.12 0.193 0.045 0.401 0.075 0.163 0.075 0.952 0.095", + "ph_num": "2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest B3 A3 B3 D4 B3 A3 B3 G3 rest", + "note_dur": "0.38 0.238 0.238 0.238 0.715 0.238 0.476 0.238 0.952 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0", + "f0_seq": "196.8 197.5 197.4 197.9 197.2 197.3 197.2 197.1 196.7 196.6 196.4 196.4 196.2 195.9 195.9 196.1 196.1 196.3 196.6 197.0 196.6 196.5 196.2 196.6 196.9 196.8 196.8 196.6 196.4 196.5 195.9 196.1 195.7 195.8 193.3 192.3 194.9 201.7 211.9 224.0 234.0 242.5 248.6 249.7 247.7 244.4 240.8 234.4 225.1 219.5 220.8 220.5 222.6 226.9 223.0 219.4 218.5 217.8 218.9 219.1 219.3 219.2 219.4 219.8 218.7 217.6 218.8 216.2 212.5 221.4 228.1 236.0 243.7 251.0 254.8 249.3 246.1 245.1 245.5 245.2 244.4 244.7 245.7 246.5 248.0 250.2 250.5 249.4 244.7 238.0 235.8 244.1 255.4 267.4 281.9 288.7 291.3 294.3 295.2 296.7 293.9 291.1 288.0 285.2 280.5 277.1 275.0 274.7 275.6 278.2 280.8 286.4 290.1 293.8 295.0 295.3 294.4 292.7 289.5 286.5 285.2 284.3 284.9 285.9 290.1 294.2 298.3 302.8 305.9 304.7 301.5 295.9 290.5 284.0 280.7 279.5 282.8 287.0 292.8 298.7 304.8 309.0 308.2 305.8 300.7 296.4 296.7 293.7 292.3 293.1 293.9 294.2 294.5 294.6 293.1 283.6 267.8 255.0 251.9 248.4 248.1 247.3 246.6 246.8 246.0 246.3 245.4 246.1 245.2 242.8 235.7 228.6 222.6 217.2 213.8 212.0 216.5 215.4 218.4 220.6 222.4 222.8 222.5 221.3 220.9 219.7 220.3 221.6 221.3 220.7 220.6 220.6 221.3 221.0 219.9 219.5 219.7 218.6 219.4 220.4 220.1 220.2 220.7 220.5 219.6 218.4 218.5 220.1 221.4 219.1 205.4 208.1 216.8 224.2 231.9 240.8 250.2 261.7 253.3 252.8 251.5 252.4 250.5 248.1 246.7 247.6 247.5 247.3 246.3 241.6 230.4 227.6 222.7 218.1 213.1 207.9 204.1 198.3 192.7 192.4 196.2 198.7 200.5 200.9 200.3 198.4 194.9 191.0 188.0 186.3 185.5 188.3 192.7 199.1 204.3 207.5 207.3 207.1 205.4 200.7 196.0 190.5 185.6 184.8 185.6 190.4 195.2 200.1 205.7 209.3 210.2 210.3 206.6 200.7 192.1 185.3 182.7 181.6 183.6 187.7 193.7 198.7 204.1 207.8 208.3 207.1 203.6 196.8 188.5 182.1 179.0 178.6 181.5 186.2 190.6 196.6 203.1 207.3 209.9 211.0 208.6 202.6 194.8 187.0 182.9 180.9 181.3 182.7 186.9 191.8 198.1 203.3 205.0 205.7 206.6 206.6 205.1 201.3 198.8 196.9 195.1 192.7 189.3 187.0 186.3 183.9 182.6 182.1", + "f0_timestep": "0.011609977324263039", + "energy": "0.0009 0.0011 0.001 0.0017 0.0018 0.0023 0.0034 0.0039 0.0042 0.0046 0.0053 0.0058 0.0061 0.0061 0.0061 0.0062 0.0052 0.0048 0.0046 0.0037 0.003 0.0023 0.0017 0.0016 0.0016 0.0037 0.0057 0.0089 0.0117 0.0136 0.0133 0.0287 0.0482 0.0615 0.0731 0.0773 0.0764 0.0732 0.0703 0.0697 0.0711 0.0754 0.0786 0.0812 0.0823 0.0818 0.0793 0.0721 0.06 0.0458 0.028 0.0193 0.0344 0.0487 0.0624 0.0731 0.0756 0.0746 0.072 0.0703 0.0708 0.0708 0.0713 0.0729 0.0738 0.0751 0.0706 0.0609 0.0491 0.0345 0.0215 0.0193 0.0207 0.0412 0.0641 0.0788 0.0868 0.0895 0.0856 0.0821 0.0818 0.0809 0.0782 0.0773 0.0768 0.0774 0.0777 0.0738 0.0649 0.0506 0.0359 0.0206 0.0138 0.0122 0.0329 0.0534 0.0722 0.0856 0.0912 0.0954 0.0948 0.0947 0.0956 0.0932 0.0923 0.09 0.0862 0.0838 0.0823 0.0834 0.0858 0.0867 0.0884 0.0907 0.0915 0.0923 0.0929 0.0916 0.0915 0.0902 0.0886 0.0882 0.0894 0.0893 0.0897 0.089 0.0877 0.0881 0.0896 0.0897 0.0925 0.0928 0.0907 0.0893 0.0841 0.0798 0.0771 0.0754 0.0772 0.0797 0.0828 0.0847 0.0861 0.0864 0.0829 0.0747 0.0606 0.0444 0.0259 0.0143 0.0152 0.0171 0.0174 0.0151 0.0192 0.0521 0.0677 0.0795 0.0874 0.0838 0.0829 0.0808 0.0789 0.0786 0.0789 0.08 0.0817 0.083 0.0849 0.0842 0.08 0.0699 0.055 0.0367 0.0288 0.0416 0.0585 0.0739 0.0833 0.0885 0.0894 0.0881 0.0887 0.0875 0.0885 0.0893 0.0894 0.0884 0.0881 0.0896 0.0884 0.0916 0.0918 0.0909 0.0918 0.0899 0.0903 0.0902 0.0902 0.0925 0.0922 0.0937 0.094 0.0958 0.0944 0.0933 0.0951 0.0931 0.0875 0.0764 0.0583 0.0374 0.0198 0.0171 0.0194 0.0178 0.0339 0.053 0.0684 0.0832 0.0895 0.0919 0.0917 0.0917 0.0912 0.0892 0.0896 0.0882 0.0813 0.0708 0.0546 0.0371 0.0213 0.0183 0.0176 0.0161 0.0329 0.0481 0.0622 0.0728 0.0773 0.0776 0.0787 0.0807 0.0795 0.0798 0.0768 0.0719 0.0664 0.0624 0.0581 0.0574 0.0585 0.0613 0.064 0.0683 0.0723 0.0744 0.0754 0.0742 0.0696 0.0651 0.0589 0.0526 0.0486 0.0472 0.0468 0.0477 0.05 0.0539 0.0578 0.0594 0.0598 0.0579 0.0529 0.048 0.0432 0.0392 0.0353 0.0342 0.0338 0.035 0.0377 0.0413 0.0438 0.0457 0.0452 0.0423 0.039 0.035 0.0308 0.0279 0.026 0.0241 0.023 0.0215 0.0214 0.0214 0.0233 0.0244 0.0256 0.026 0.0241 0.0217 0.0181 0.0155 0.0132 0.0114 0.011 0.0112 0.0115 0.0124 0.0128 0.0129 0.0136 0.013 0.0119 0.0101 0.0074 0.0039 0.0012 0.0008 0.0004 0.0 0.0005 0.0002 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0003 0.0006 0.001 0.0017 0.0022 0.0028 0.0034 0.0043 0.0051 0.0061 0.0066 0.0072 0.0076 0.0077 0.007 0.0064 0.006 0.0055 0.0053 0.0049 0.0043 0.0029 0.0026 0.0025 0.0028 0.0046 0.0066 0.0099 0.0127 0.0137 0.0138 0.0113 0.0076 0.0046 0.0025 0.002 0.0017 0.0016 0.0011 0.0013 0.0012 0.0011 0.0012 0.001 0.0009 0.0007 0.0009 0.0015 0.0056 0.0121 0.0148 0.0161 0.015 0.0104 0.0067 0.0034 0.0031 0.0027 0.0024 0.0021 0.002 0.002 0.002 0.0018 0.0017 0.002 0.0028 0.0061 0.0132 0.0166 0.0199 0.0216 0.0195 0.0167 0.0129 0.0091 0.0042 0.0033 0.0025 0.0024 0.002 0.002 0.0018 0.0014 0.0012 0.0008 0.0008 0.0009 0.0009 0.001 0.0043 0.0089 0.0126 0.013 0.0128 0.0107 0.0044 0.0034 0.0026 0.002 0.0018 0.0015 0.0013 0.0011 0.0012 0.0012 0.0014 0.0012 0.0013 0.0013 0.0009 0.0011 0.0009 0.0009 0.0011 0.0011 0.0012 0.0014 0.0013 0.0012 0.001 0.0009 0.0009 0.0008 0.0009 0.0009 0.0008 0.0007 0.0007 0.0008 0.001 0.001 0.0009 0.0012 0.0011 0.001 0.0008 0.0007 0.0008 0.0007 0.0006 0.0004 0.0005 0.0008 0.001 0.0013 0.0052 0.0083 0.0108 0.014 0.0157 0.0167 0.0173 0.0155 0.0134 0.0108 0.0073 0.0038 0.0016 0.0014 0.0011 0.0009 0.0007 0.0008 0.0006 0.0003 0.0003 0.0005 0.0 0.0 0.0001 0.0002 0.0006 0.0011 0.0017 0.0022 0.0022 0.0019 0.0016 0.0014 0.0013 0.0012 0.0012 0.0011 0.0014 0.0018 0.0014 0.0015 0.0015 0.0013 0.0014 0.0015 0.0015 0.0016 0.0018 0.0018 0.0016 0.0017 0.0015 0.0016 0.0018 0.0017 0.0019 0.0019 0.002 0.0021 0.0023 0.0021 0.0021 0.0023 0.0043 0.01 0.0158 0.0206 0.0233 0.0233 0.0206 0.016 0.0109 0.0059 0.002 0.0015 0.0013 0.0016 0.0013 0.0011 0.0011 0.0013 0.0018 0.004 0.0076 0.0116 0.0144 0.0174 0.0177 0.0162 0.0135 0.0096 0.0061 0.0026 0.0023 0.0023 0.0024 0.0026 0.0026 0.0025 0.0027 0.003 0.0031 0.0031 0.003 0.0027 0.0028 0.0028 0.0024 0.0025 0.0023 0.0025 0.0025 0.0026 0.0027 0.0024 0.0023 0.0021 0.0019 0.0019 0.0019 0.0019 0.0019 0.0018 0.0017 0.0018 0.0017 0.0019 0.0018 0.002 0.0019 0.0017 0.0015 0.0014 0.0014 0.0011 0.001 0.001 0.0009 0.0012 0.0015 0.0011 0.0011 0.0012 0.0011 0.0011 0.001 0.0008 0.0007 0.0007 0.0008 0.0006 0.0006 0.0004 0.0006 0.0007 0.0005 0.0007 0.0005 0.0005 0.0003 0.0003 0.0004 0.0002 0.0002 0.0002 0.0001 0.0003 0.0003 0.0005 0.0014 0.0016 0.002 0.0018 0.0013 0.0008 0.0005 0.0004 0.0003 0.0001 0.0001 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 56.048, + "text": "AP 在 座 皆 算 老 友 SP", + "ph_seq": "AP z ai z uo j ie s uan l ao y ou SP", + "ph_dur": "0.321 0.06 0.178 0.06 0.178 0.06 0.163 0.075 0.416 0.06 0.163 0.075 1.429 0.095", + "ph_num": "2 2 2 2 2 2 1 1", + "note_seq": "rest G3 E3 G3 A3 B3 A3 rest", + "note_dur": "0.381 0.238 0.238 0.238 0.476 0.238 1.429 0.095", + "note_slur": "0 0 0 0 0 0 0 0", + "f0_seq": "178.2 177.9 178.0 178.1 177.7 178.1 178.2 178.5 178.3 178.1 178.0 178.1 177.8 178.0 178.0 177.9 178.0 178.2 178.4 178.4 178.5 178.7 178.4 178.9 179.4 179.6 180.0 180.8 181.8 182.7 184.8 186.9 190.3 190.7 189.2 187.4 186.0 184.6 183.0 185.7 188.4 192.3 197.1 199.1 198.8 195.7 190.7 180.3 171.2 168.3 166.3 165.9 164.6 164.9 164.9 165.0 165.0 165.0 165.8 165.7 166.3 166.6 166.7 166.7 166.1 163.9 158.9 153.6 157.1 163.4 169.3 176.4 183.4 190.8 199.3 197.2 196.1 196.0 196.4 196.2 196.1 195.9 195.8 196.2 196.5 195.4 193.9 194.0 197.1 203.7 209.2 214.6 220.1 226.4 231.8 229.9 226.2 227.6 226.9 225.1 221.8 216.0 210.3 207.0 205.5 206.0 211.0 217.9 224.5 229.5 228.7 226.0 220.9 211.8 205.3 201.4 199.8 202.0 206.0 210.7 214.6 221.5 225.7 231.0 231.4 231.9 230.2 227.4 224.2 224.9 224.8 227.0 232.1 238.1 242.2 247.1 250.0 248.7 247.7 247.5 246.6 245.9 246.2 246.3 247.2 248.2 248.2 248.9 246.1 241.7 234.7 225.6 216.1 208.6 208.3 210.6 213.7 213.7 216.7 218.9 219.3 218.4 217.0 214.2 211.7 208.5 206.4 205.4 207.2 211.9 218.6 224.8 228.6 229.6 228.1 224.6 220.6 213.9 209.7 209.2 210.8 214.8 221.5 225.3 226.9 227.5 225.7 223.1 219.2 215.9 212.6 211.1 211.3 211.5 215.2 221.4 226.5 229.4 230.9 231.1 229.0 225.3 221.7 217.1 213.7 211.1 210.3 211.9 215.4 219.6 223.9 227.3 229.3 231.5 231.0 229.3 225.6 221.0 216.6 214.2 211.5 210.8 212.2 215.0 218.5 220.8 224.4 227.3 230.1 230.8 230.0 227.6 223.4 219.6 216.2 213.1 211.3 211.3 213.2 216.2 220.5 224.9 228.1 230.5 231.5 230.0 227.5 223.4 219.1 214.2 211.1 209.6 210.3 214.9 219.2 223.0 227.7 229.4 229.8 229.4 226.3 222.8 219.6 213.6 208.7 206.2 205.8 210.5 216.9 224.1 226.0 229.7 229.2 228.4 228.5 228.2 227.2 225.9 223.7 220.2 216.9 216.8 217.0 215.8 215.1 215.4 213.8 213.8", + "f0_timestep": "0.011609977324263039", + "energy": "0.0013 0.0021 0.0027 0.0033 0.0039 0.0049 0.0057 0.0068 0.007 0.0074 0.0068 0.0071 0.0079 0.0083 0.0085 0.0088 0.0084 0.0082 0.0075 0.0071 0.0058 0.005 0.0034 0.0017 0.0014 0.0021 0.0022 0.0041 0.008 0.0123 0.0158 0.0193 0.0428 0.0578 0.0693 0.0802 0.0775 0.0744 0.0716 0.0664 0.065 0.0677 0.0693 0.0729 0.0759 0.0711 0.0635 0.05 0.0312 0.0184 0.0118 0.0202 0.041 0.0545 0.0683 0.0797 0.0834 0.0858 0.0873 0.0864 0.0864 0.0851 0.0816 0.0767 0.0675 0.0565 0.0401 0.0248 0.0106 0.008 0.0095 0.0117 0.0159 0.0414 0.0614 0.079 0.0912 0.0925 0.0925 0.0882 0.085 0.0848 0.0812 0.0785 0.0749 0.0678 0.0576 0.0444 0.0333 0.0221 0.0219 0.0241 0.0215 0.0411 0.0604 0.0722 0.0854 0.0873 0.0839 0.0817 0.0771 0.0737 0.0698 0.0663 0.0616 0.059 0.0569 0.0547 0.0546 0.0531 0.0516 0.0513 0.0513 0.0508 0.0495 0.0468 0.0442 0.0425 0.0413 0.0392 0.0373 0.0356 0.0342 0.0338 0.0347 0.034 0.0331 0.0307 0.0289 0.0251 0.023 0.0294 0.0367 0.0481 0.0606 0.0729 0.0829 0.0887 0.0924 0.0939 0.0943 0.0943 0.0944 0.0927 0.0906 0.0899 0.0876 0.0859 0.0823 0.0786 0.0752 0.0697 0.0671 0.0658 0.0649 0.0681 0.0707 0.0767 0.0807 0.0833 0.0873 0.0871 0.0874 0.0853 0.0811 0.0763 0.0702 0.0667 0.0643 0.0632 0.0646 0.0686 0.0707 0.0754 0.0778 0.0785 0.0776 0.0743 0.068 0.061 0.0581 0.0563 0.0582 0.0612 0.0646 0.0665 0.0692 0.0707 0.0722 0.0734 0.0707 0.0671 0.0612 0.0558 0.0531 0.0537 0.0564 0.0591 0.0629 0.0661 0.0682 0.0694 0.0699 0.0667 0.0641 0.0599 0.0551 0.0522 0.0496 0.0488 0.0505 0.0521 0.0552 0.0593 0.0602 0.0622 0.0626 0.0617 0.0611 0.0588 0.0573 0.0535 0.0502 0.0476 0.0466 0.0475 0.0481 0.05 0.0517 0.0537 0.0544 0.055 0.0551 0.0545 0.0529 0.0503 0.046 0.0427 0.0399 0.0372 0.038 0.038 0.0395 0.0415 0.0437 0.0453 0.0468 0.0469 0.0451 0.042 0.0379 0.0339 0.0304 0.0275 0.0265 0.0263 0.0262 0.0265 0.0279 0.0293 0.0311 0.0311 0.0306 0.0281 0.0248 0.023 0.0212 0.0188 0.0179 0.0176 0.0176 0.0187 0.0191 0.0192 0.0193 0.0183 0.0159 0.0144 0.0127 0.0112 0.0081 0.0055 0.003 0.0009 0.0007 0.0003 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0008 0.0017 0.0025 0.0034 0.0043 0.005 0.0058 0.0069 0.0072 0.0078 0.0084 0.008 0.0086 0.0088 0.0087 0.0093 0.0092 0.0092 0.009 0.0081 0.007 0.0055 0.0038 0.0021 0.0012 0.001 0.0017 0.0043 0.0087 0.0137 0.0165 0.0162 0.0148 0.0105 0.0046 0.0032 0.0026 0.0022 0.0021 0.0021 0.0022 0.002 0.002 0.0019 0.002 0.0023 0.0017 0.0016 0.0042 0.0083 0.0132 0.0279 0.0453 0.0468 0.0473 0.0426 0.0133 0.0028 0.002 0.0016 0.0016 0.0017 0.002 0.002 0.0017 0.0016 0.0023 0.003 0.0044 0.0077 0.0114 0.0133 0.0133 0.0119 0.0085 0.0044 0.0025 0.0023 0.002 0.0021 0.0025 0.0022 0.002 0.0018 0.0019 0.0027 0.0076 0.0137 0.0191 0.0233 0.0265 0.0282 0.027 0.0228 0.0172 0.0096 0.0036 0.0026 0.0024 0.0026 0.0027 0.003 0.0031 0.003 0.0025 0.0023 0.0018 0.0016 0.0015 0.0014 0.0014 0.0011 0.001 0.0009 0.0009 0.0009 0.0008 0.0006 0.0004 0.0006 0.0005 0.0004 0.0004 0.0005 0.0006 0.0007 0.0006 0.0006 0.0005 0.0006 0.0006 0.0006 0.0009 0.0014 0.0019 0.0021 0.0024 0.0024 0.0021 0.0021 0.0021 0.0018 0.0019 0.0017 0.0018 0.0016 0.0016 0.0016 0.0015 0.0015 0.0015 0.0015 0.0016 0.0015 0.0015 0.0017 0.0019 0.0018 0.0018 0.0018 0.0018 0.002 0.0021 0.0022 0.0019 0.0018 0.0017 0.0016 0.0016 0.0015 0.0015 0.0017 0.0015 0.0014 0.0014 0.0014 0.0016 0.0015 0.0015 0.0013 0.001 0.001 0.001 0.0012 0.0014 0.0012 0.0012 0.0012 0.0011 0.0013 0.0012 0.0013 0.0012 0.001 0.0012 0.0013 0.0013 0.001 0.0012 0.0011 0.0013 0.001 0.0012 0.0012 0.0011 0.0012 0.0009 0.0009 0.0008 0.0008 0.0008 0.0008 0.001 0.0009 0.0011 0.0008 0.0009 0.0007 0.001 0.0009 0.0005 0.0008 0.0007 0.0007 0.0005 0.0006 0.0007 0.0007 0.0007 0.0007 0.0007 0.0006 0.0008 0.0006 0.0009 0.0009 0.0007 0.0006 0.0005 0.0005 0.0006 0.0004 0.0004 0.0004 0.0005 0.0005 0.0005 0.0007 0.0006 0.0008 0.0007 0.0005 0.0005 0.0004 0.0003 0.0003 0.0003 0.0002 0.0004 0.0004 0.0002 0.0004 0.0003 0.0004 0.0004 0.0003 0.0003 0.0002 0.0002 0.0002 0.0003 0.0 0.0001 0.0001 0.0003 0.0001 0.0004 0.0004 0.0005 0.0014 0.0015 0.0013 0.0014 0.0008 0.0004 0.0002 0.0001 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 59.638, + "text": "SP AP 碗 底 便 是 天 涯 SP", + "ph_seq": "SP AP w an d i b ian sh ir t ian y a SP", + "ph_dur": "0.095 0.4 0.105 0.193 0.045 0.193 0.045 0.163 0.075 0.417 0.06 0.178 0.06 2.143 0.19", + "ph_num": "1 2 2 2 2 2 2 1 1", + "note_seq": "rest rest G3 E3 G3 A3 B3 B3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.238 0.477 0.238 2.143 0.19", + "note_slur": "0 0 0 0 0 0 0 0 0", + "f0_seq": "125.8 126.1 126.0 125.8 125.6 125.7 125.7 125.8 125.4 125.5 125.4 125.5 125.5 125.4 125.5 125.7 125.5 125.5 125.5 125.5 125.3 125.4 125.3 125.4 125.3 125.2 125.2 125.6 125.6 125.3 125.3 125.4 125.6 125.2 125.2 125.5 124.9 124.9 124.9 124.9 124.8 125.1 124.9 125.0 125.1 125.1 125.6 125.9 125.2 125.7 128.7 133.6 140.0 146.5 153.3 158.2 164.0 175.1 184.5 188.8 192.2 195.3 197.1 197.1 195.1 190.0 184.2 177.8 169.6 170.2 172.9 176.6 176.9 168.0 162.5 162.3 162.3 163.1 164.5 165.5 166.0 165.9 166.8 166.2 164.8 163.0 157.8 150.9 157.6 166.7 176.3 185.4 190.8 195.0 196.0 197.0 197.3 197.5 196.2 195.7 195.6 194.7 194.7 195.4 193.6 191.0 186.6 191.4 198.4 205.6 213.7 221.6 230.0 235.5 238.4 233.2 232.8 231.4 226.6 220.4 213.5 207.4 204.8 203.7 206.7 213.9 223.3 230.8 235.9 236.7 233.9 226.9 217.5 211.4 208.5 207.2 211.4 219.3 228.3 234.5 237.3 244.2 250.6 256.0 260.3 260.1 261.7 264.7 267.9 270.0 272.7 275.9 280.3 285.1 287.1 277.9 270.4 261.1 256.4 254.4 252.9 251.5 251.2 250.5 249.4 249.8 248.5 248.3 247.7 246.2 244.8 241.9 237.6 233.1 227.4 225.8 224.6 224.5 224.4 224.7 225.6 225.7 225.3 226.7 227.1 228.2 227.9 228.1 228.7 228.7 228.8 228.9 228.5 228.6 227.2 226.4 226.5 225.6 225.3 229.1 237.9 247.7 252.9 253.4 252.8 251.2 248.2 245.5 243.5 243.4 243.7 245.6 247.0 248.9 249.8 249.5 249.7 250.5 250.0 248.7 247.3 245.8 245.0 245.1 246.1 247.3 249.3 249.4 249.2 248.3 247.6 247.5 247.6 247.6 246.7 246.9 247.0 247.1 248.1 248.7 248.5 248.2 248.4 248.3 248.2 248.0 248.6 248.9 248.0 248.3 248.0 247.6 248.3 248.1 247.9 247.7 248.0 248.4 248.0 247.0 247.3 246.3 246.8 246.1 246.5 246.2 245.7 246.2 246.8 247.5 247.7 247.9 247.3 247.2 247.1 247.4 247.7 247.6 247.3 247.1 246.7 247.2 247.2 246.4 247.1 247.4 248.0 247.4 248.1 247.5 247.8 247.9 248.1 248.0 247.4 247.4 246.7 246.4 246.2 245.7 246.0 246.8 247.8 247.7 247.4 246.5 247.1 247.0 246.4 246.1 246.0 246.0 246.0 246.6 246.3 246.7 246.3 246.1 247.2 247.1 246.9 247.4 247.4 246.7 246.9 246.2 246.5 247.2 248.6 251.4 253.2 255.6 254.8 252.7 251.9 250.9 248.7 246.3 247.0 247.6 248.4 249.4 251.0 252.1 252.9 254.2 253.5 253.4 252.1 251.4 249.1 246.8 245.2 246.1 247.9 248.4 248.6 249.9 250.1 250.0 252.6 252.5 252.1 251.8 251.2 250.0 248.8 248.3 247.9 246.5 245.8 244.2 243.8 242.7 243.4 241.1", + "f0_timestep": "0.011609977324263039", + "energy": "0.0 0.0004 0.0008 0.0004 0.0001 0.0001 0.0006 0.0007 0.0005 0.0008 0.0008 0.0005 0.0007 0.0007 0.001 0.0009 0.0012 0.0014 0.0011 0.0017 0.0014 0.0016 0.0016 0.0012 0.0017 0.0022 0.0017 0.0019 0.002 0.0015 0.0019 0.0025 0.0019 0.002 0.002 0.002 0.0017 0.0014 0.0015 0.001 0.001 0.0004 0.0001 0.0001 0.0 0.0013 0.002 0.0103 0.0212 0.0289 0.0386 0.0468 0.0496 0.0532 0.0542 0.0529 0.0501 0.0473 0.0445 0.0456 0.0495 0.0526 0.0557 0.0578 0.0571 0.0525 0.0443 0.0329 0.0181 0.01 0.0258 0.0434 0.0521 0.0591 0.0611 0.0591 0.0596 0.0591 0.061 0.0611 0.0617 0.0635 0.0633 0.062 0.0571 0.049 0.0374 0.0241 0.0105 0.0089 0.0216 0.0421 0.0558 0.0663 0.0714 0.0701 0.0695 0.0687 0.0675 0.0672 0.0633 0.0608 0.0586 0.0544 0.0513 0.0449 0.035 0.0265 0.0197 0.0195 0.0226 0.022 0.0239 0.0464 0.061 0.0749 0.0841 0.0831 0.0805 0.0753 0.0711 0.0652 0.0602 0.0546 0.0527 0.0509 0.049 0.049 0.0464 0.0447 0.0421 0.0408 0.0373 0.0347 0.0312 0.0303 0.0297 0.0292 0.0297 0.0305 0.0337 0.0366 0.0361 0.0317 0.0238 0.0143 0.0045 0.0009 0.0011 0.0048 0.0071 0.0087 0.009 0.0141 0.0459 0.067 0.0795 0.089 0.0885 0.0858 0.0842 0.0842 0.0838 0.0819 0.0819 0.0825 0.0854 0.0871 0.09 0.0913 0.0924 0.0957 0.0982 0.0999 0.0981 0.0948 0.0922 0.0926 0.0976 0.1021 0.1035 0.1043 0.1058 0.1061 0.1088 0.1101 0.111 0.1092 0.1111 0.1108 0.1097 0.1124 0.1108 0.109 0.1024 0.0918 0.0784 0.067 0.0591 0.056 0.0591 0.066 0.0724 0.0797 0.0818 0.0818 0.0807 0.0781 0.0781 0.0771 0.0777 0.0783 0.0801 0.0815 0.0827 0.0845 0.0834 0.0845 0.0833 0.0835 0.0828 0.0816 0.0814 0.0787 0.0798 0.0795 0.0805 0.0816 0.0821 0.0799 0.0797 0.0794 0.079 0.0797 0.0809 0.0816 0.0822 0.0835 0.0823 0.0832 0.0822 0.0822 0.0824 0.0818 0.082 0.0812 0.082 0.0822 0.083 0.0839 0.0834 0.0834 0.085 0.0855 0.0855 0.0862 0.0861 0.086 0.0868 0.0863 0.0853 0.0851 0.085 0.0849 0.0855 0.0854 0.0852 0.0852 0.0852 0.0845 0.085 0.0844 0.0837 0.0838 0.0837 0.0844 0.0838 0.0837 0.0853 0.085 0.0847 0.0855 0.0859 0.0857 0.0864 0.0863 0.0855 0.0867 0.0867 0.0873 0.0872 0.086 0.0861 0.0846 0.084 0.0835 0.0835 0.0834 0.0825 0.0835 0.0825 0.0834 0.0831 0.083 0.083 0.0817 0.0816 0.0806 0.0805 0.0793 0.0789 0.0777 0.0775 0.0769 0.0771 0.0763 0.0753 0.074 0.0724 0.0717 0.0708 0.0701 0.0686 0.0675 0.066 0.0646 0.0642 0.0624 0.0617 0.0622 0.0612 0.0618 0.0607 0.0606 0.0594 0.058 0.0557 0.0522 0.0491 0.0465 0.0443 0.0429 0.0424 0.0415 0.0403 0.0392 0.0383 0.0384 0.0379 0.0374 0.0362 0.0345 0.0334 0.0327 0.0308 0.0297 0.0278 0.0264 0.0241 0.0212 0.0173 0.0129 0.009 0.005 0.0026 0.0019 0.0016 0.0012 0.0012 0.001 0.0004 0.0004 0.0004 0.0 0.0001 0.0003", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0004 0.0 0.0002 0.0 0.0001 0.0002 0.0001 0.0002 0.0002 0.0005 0.0007 0.0009 0.0009 0.001 0.001 0.0012 0.0014 0.0013 0.0014 0.0014 0.0018 0.0018 0.0016 0.0018 0.0019 0.0019 0.002 0.0018 0.0018 0.0018 0.002 0.0021 0.0022 0.0023 0.0025 0.0023 0.0021 0.0019 0.0018 0.0014 0.0013 0.0011 0.0009 0.0008 0.0008 0.0009 0.0012 0.0015 0.0018 0.0017 0.0011 0.001 0.0015 0.0018 0.002 0.0016 0.0014 0.0013 0.001 0.0004 0.0002 0.0003 0.0003 0.0005 0.0005 0.0003 0.0004 0.0009 0.0035 0.0055 0.0064 0.0064 0.0059 0.0037 0.002 0.0017 0.0014 0.0012 0.0014 0.0016 0.0014 0.0015 0.0014 0.0013 0.0012 0.001 0.0007 0.0005 0.0007 0.0007 0.0009 0.0011 0.0011 0.0012 0.0013 0.0015 0.0013 0.001 0.0009 0.0009 0.0009 0.0009 0.0008 0.0008 0.0011 0.0031 0.0081 0.012 0.0169 0.0232 0.0276 0.0295 0.0288 0.0235 0.016 0.0084 0.0024 0.0018 0.0016 0.0017 0.0015 0.0014 0.0013 0.0012 0.001 0.001 0.001 0.0008 0.0007 0.0006 0.0007 0.0006 0.0006 0.0006 0.0006 0.0005 0.0002 0.0002 0.0002 0.0 0.0001 0.0002 0.0003 0.0008 0.0015 0.0017 0.0018 0.0014 0.0019 0.0051 0.0079 0.0084 0.0086 0.0066 0.0037 0.0027 0.0022 0.0024 0.0022 0.0021 0.0021 0.0014 0.0012 0.0014 0.001 0.0009 0.0008 0.0009 0.0009 0.001 0.0014 0.0016 0.0017 0.0021 0.0025 0.0026 0.0027 0.0025 0.0023 0.0022 0.0024 0.0021 0.0021 0.0023 0.0022 0.002 0.0018 0.002 0.002 0.0021 0.0024 0.0024 0.0021 0.0022 0.002 0.0023 0.0022 0.0025 0.0024 0.0024 0.0024 0.0024 0.0023 0.002 0.0021 0.0019 0.0017 0.0015 0.0015 0.0014 0.0014 0.0017 0.0019 0.0019 0.0019 0.0019 0.0018 0.002 0.0021 0.0022 0.0024 0.0022 0.0022 0.0022 0.0022 0.0021 0.0021 0.002 0.0019 0.0019 0.0019 0.002 0.0019 0.0022 0.0024 0.0023 0.0024 0.0022 0.0023 0.0021 0.0021 0.0018 0.002 0.0021 0.0022 0.0024 0.0024 0.0026 0.0023 0.0022 0.0023 0.0023 0.0023 0.0022 0.0023 0.0021 0.0024 0.0024 0.0023 0.0023 0.002 0.0023 0.0024 0.0023 0.0023 0.0023 0.0019 0.002 0.0023 0.0021 0.0023 0.0022 0.0023 0.0021 0.0024 0.0022 0.0024 0.0022 0.0023 0.0022 0.0023 0.0024 0.0024 0.0025 0.0024 0.0025 0.0023 0.0025 0.0026 0.0027 0.0026 0.0026 0.0022 0.0024 0.0023 0.0022 0.0022 0.0022 0.0023 0.0024 0.0023 0.0021 0.0022 0.0023 0.0023 0.0021 0.0023 0.0023 0.0022 0.0019 0.002 0.0021 0.0025 0.0024 0.0023 0.0022 0.0019 0.002 0.002 0.0017 0.0017 0.0016 0.0015 0.0015 0.0014 0.0017 0.0015 0.0018 0.0017 0.0016 0.0016 0.0014 0.0012 0.0008 0.0011 0.0011 0.001 0.001 0.0009 0.0009 0.001 0.001 0.001 0.0008 0.0007 0.0006 0.0005 0.0003 0.0003 0.0004 0.0003 0.0003 0.0003 0.0004 0.0004 0.0002 0.0002 0.0002 0.0004 0.0003 0.0005 0.0012 0.0014 0.0017 0.0015 0.0009 0.001 0.001 0.0009 0.0008 0.0006 0.0004 0.0002 0.0004 0.0006 0.0003", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 64.638, + "text": "SP AP 天 涯 远 无 处 不 为 家 SP", + "ph_seq": "SP AP t ian y a y van w u ch u b u w ei j ia SP", + "ph_dur": "0.125 0.4 0.075 0.178 0.06 0.163 0.075 0.311 0.165 0.164 0.075 0.193 0.045 0.416 0.06 0.386 0.09 0.476 0.285", + "ph_num": "1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 G3 E3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.239 0.238 0.476 0.476 0.476 0.285", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "147.2 147.0 147.2 147.4 147.3 147.2 147.0 147.8 147.6 147.6 147.4 147.6 148.1 147.9 147.9 147.9 147.9 148.0 147.8 147.8 147.9 148.1 148.1 148.2 147.9 148.0 148.1 148.2 148.1 148.4 148.2 148.1 148.6 148.2 148.3 148.5 148.4 148.5 148.2 148.3 148.5 148.6 148.6 148.3 148.7 148.5 148.6 148.7 148.6 148.6 148.7 148.6 147.2 146.8 151.4 160.4 173.3 188.0 206.0 220.7 235.0 244.4 248.3 247.6 243.2 238.0 232.6 228.7 225.2 222.3 220.2 219.1 217.7 216.7 218.1 219.7 220.7 220.7 220.9 221.6 220.6 219.0 219.1 218.8 218.7 219.8 219.8 219.5 218.6 217.6 218.8 226.0 242.6 251.3 255.9 260.3 261.5 259.9 255.6 248.3 242.7 238.4 237.0 237.8 238.8 242.2 246.5 250.5 253.5 253.7 252.5 250.9 249.0 246.4 244.4 242.4 241.7 242.7 245.9 248.1 248.9 248.4 248.8 249.2 249.5 250.0 248.8 247.0 245.0 245.0 245.6 246.4 246.3 245.7 244.7 244.3 245.8 246.4 246.5 245.5 246.0 246.4 246.8 247.0 245.6 242.6 237.7 232.9 232.3 232.9 233.4 233.2 233.3 233.2 234.5 227.9 223.4 222.5 220.6 220.3 221.1 220.7 220.8 220.1 219.7 215.5 203.3 203.4 203.8 207.2 210.6 217.5 221.5 227.3 221.9 223.2 224.8 224.7 224.8 224.8 223.5 223.8 224.8 224.0 222.1 221.1 220.1 219.7 219.4 225.7 234.5 240.5 245.0 247.6 249.8 250.2 248.2 246.4 244.6 243.3 243.7 244.8 245.9 246.5 246.7 246.5 246.2 243.4 238.8 232.1 223.5 211.6 203.5 196.7 193.9 194.0 194.4 196.5 196.6 196.9 197.9 198.8 198.2 198.0 195.9 193.9 194.2 193.9 193.1 194.3 195.5 196.7 198.0 199.1 199.0 199.2 199.4 200.0 199.7 199.8 199.4 199.8 201.2 202.2 202.5 202.7 201.1 195.0 182.0 179.7 176.9 174.7 172.3 169.5 167.2 165.0 163.2 161.7 163.7 166.6 168.7 170.3 170.9 170.5 167.2 161.7 156.5 151.8 150.0 152.6 157.0 163.5 171.8 179.0 182.7 183.8 182.7 179.3 173.9 167.4 160.0 153.1 150.6 151.8 153.7 158.2 162.8 167.8 173.0 176.0 177.9 178.3 176.2 170.8 164.1 160.0 161.0 163.9 167.4 171.6 172.2 172.2 172.5 172.4 171.9 171.5 171.0 171.6 171.5 170.1 169.8 170.2 170.7 170.8 170.7 171.0 170.8 170.1 169.8 169.6 169.9 169.0 170.4", + "f0_timestep": "0.011609977324263039", + "energy": "0.0 0.0 0.0001 0.0002 0.0002 0.0 0.0001 0.0003 0.0001 0.0005 0.0009 0.0005 0.0008 0.0006 0.0012 0.001 0.0017 0.0018 0.0018 0.002 0.0025 0.0026 0.0027 0.0032 0.0031 0.0031 0.0036 0.0033 0.0032 0.0037 0.0033 0.0032 0.0029 0.0027 0.002 0.0028 0.0026 0.0024 0.0018 0.0015 0.0013 0.0014 0.0012 0.0013 0.0009 0.0014 0.0027 0.0056 0.0083 0.0122 0.0238 0.0382 0.0494 0.0588 0.0659 0.0692 0.0696 0.0689 0.0659 0.0655 0.0681 0.0723 0.0788 0.0809 0.0817 0.0815 0.0803 0.0793 0.0791 0.0778 0.0766 0.0767 0.0749 0.075 0.0763 0.0772 0.0782 0.0811 0.0811 0.0804 0.082 0.079 0.0785 0.0769 0.0721 0.07 0.0662 0.0641 0.0624 0.0586 0.055 0.0581 0.073 0.0831 0.0924 0.0963 0.0935 0.09 0.0835 0.0742 0.0655 0.0594 0.0574 0.0566 0.0568 0.058 0.06 0.0624 0.0655 0.0674 0.0694 0.0715 0.0715 0.0723 0.0714 0.0717 0.0729 0.0731 0.0768 0.078 0.0789 0.0836 0.0835 0.0863 0.0907 0.0925 0.0938 0.0953 0.0947 0.0917 0.0898 0.0863 0.0861 0.0845 0.0843 0.0826 0.079 0.0766 0.0744 0.0723 0.0731 0.0739 0.0757 0.077 0.0726 0.0643 0.0506 0.0331 0.0191 0.0153 0.0192 0.0206 0.0205 0.0228 0.0431 0.0571 0.0685 0.0758 0.0761 0.075 0.0747 0.0755 0.0737 0.0706 0.0615 0.0496 0.036 0.0206 0.0119 0.0078 0.0067 0.0077 0.0196 0.0409 0.0574 0.0727 0.0812 0.0848 0.0846 0.0818 0.083 0.0813 0.0792 0.0798 0.0768 0.0749 0.0702 0.0627 0.0574 0.0543 0.0561 0.058 0.0609 0.0616 0.0627 0.0656 0.0676 0.069 0.0685 0.0672 0.0656 0.065 0.0643 0.0645 0.0632 0.0616 0.0607 0.0611 0.0631 0.0632 0.0637 0.0651 0.0639 0.0662 0.069 0.0717 0.0754 0.0772 0.0789 0.0789 0.0767 0.0771 0.0752 0.073 0.071 0.0673 0.0669 0.0669 0.0664 0.0682 0.0686 0.0696 0.0709 0.0723 0.0718 0.0736 0.0758 0.0741 0.0755 0.0742 0.0717 0.072 0.0684 0.0667 0.0658 0.062 0.0546 0.0436 0.0285 0.0139 0.0085 0.0093 0.0101 0.0102 0.009 0.0193 0.0393 0.0536 0.063 0.0695 0.0687 0.0661 0.0651 0.0629 0.0605 0.0595 0.0554 0.0509 0.0486 0.0447 0.0438 0.0439 0.0439 0.045 0.0445 0.0445 0.0435 0.0426 0.0414 0.0386 0.0361 0.0329 0.0293 0.027 0.0246 0.0237 0.0219 0.0216 0.0204 0.0196 0.0191 0.0178 0.0177 0.0171 0.0161 0.015 0.0137 0.0125 0.0107 0.0079 0.0057 0.0035 0.0016 0.0005 0.0006 0.0006 0.0005 0.0004 0.0009 0.0004 0.0008 0.0005 0.0002 0.0004 0.0002 0.0001 0.0005 0.0 0.0 0.0003 0.0002 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0 0.0003 0.0001 0.0 0.0002 0.0 0.0 0.0001 0.0002 0.0007 0.0004 0.0006 0.0008 0.0009 0.0012 0.0017 0.0015 0.0015 0.0017 0.0025 0.0028 0.0029 0.0032 0.0034 0.0036 0.0037 0.004 0.004 0.0039 0.0036 0.0032 0.0031 0.0028 0.0026 0.0026 0.0024 0.0021 0.0017 0.0017 0.0012 0.001 0.0008 0.0004 0.0006 0.0006 0.0014 0.0028 0.0056 0.0079 0.0092 0.0099 0.0089 0.0068 0.0045 0.0028 0.002 0.002 0.002 0.0021 0.0019 0.0012 0.0009 0.0007 0.0004 0.0008 0.0005 0.0006 0.0006 0.0007 0.0008 0.0009 0.001 0.0013 0.0016 0.0019 0.0019 0.0022 0.0022 0.0024 0.0024 0.0024 0.0024 0.0025 0.002 0.002 0.0018 0.0017 0.0018 0.0018 0.002 0.0023 0.0024 0.0026 0.0028 0.0022 0.0024 0.0022 0.0023 0.0024 0.0023 0.0016 0.0018 0.0017 0.0015 0.0012 0.0008 0.0007 0.0003 0.0005 0.0005 0.0003 0.0004 0.0005 0.0006 0.0006 0.0005 0.0006 0.0005 0.0006 0.0008 0.0008 0.0007 0.0008 0.0006 0.0006 0.0006 0.0005 0.0006 0.0006 0.0004 0.0002 0.0005 0.0003 0.0005 0.0004 0.0005 0.0004 0.0004 0.0008 0.0007 0.0006 0.0004 0.0004 0.0005 0.0005 0.0009 0.002 0.004 0.0075 0.0136 0.0214 0.0234 0.0231 0.0207 0.0123 0.006 0.002 0.0013 0.001 0.0009 0.0008 0.0009 0.0005 0.0004 0.0004 0.0004 0.0004 0.0007 0.0004 0.0004 0.0003 0.0003 0.0004 0.0009 0.001 0.0008 0.0007 0.0007 0.0006 0.0007 0.0007 0.0006 0.0007 0.0008 0.0007 0.0008 0.0006 0.0007 0.0007 0.0005 0.0008 0.0006 0.0007 0.0008 0.0007 0.0007 0.0008 0.0013 0.0007 0.0007 0.0006 0.0006 0.0005 0.0005 0.0004 0.0004 0.0003 0.0001 0.0004 0.0004 0.0005 0.0005 0.0004 0.0007 0.0006 0.0006 0.0007 0.0006 0.0007 0.0007 0.0008 0.0011 0.0011 0.001 0.0012 0.001 0.0012 0.0011 0.001 0.0009 0.001 0.0008 0.001 0.001 0.0009 0.0013 0.0015 0.0014 0.0014 0.0012 0.0014 0.0014 0.0012 0.0012 0.0011 0.0011 0.0013 0.0029 0.0041 0.0058 0.0075 0.0094 0.0109 0.011 0.0106 0.0093 0.0068 0.0052 0.0043 0.0029 0.0025 0.0024 0.0025 0.0027 0.0026 0.0028 0.0026 0.0026 0.0024 0.0023 0.0022 0.0024 0.0022 0.0021 0.0018 0.002 0.002 0.0021 0.002 0.002 0.0019 0.0013 0.0011 0.0009 0.0008 0.0008 0.0008 0.0008 0.0007 0.0005 0.0004 0.0006 0.0007 0.0006 0.0005 0.0005 0.0005 0.0003 0.0005 0.0011 0.0014 0.0013 0.0013 0.0007 0.0006 0.0005 0.0003 0.0003 0.0001 0.0001 0.0003 0.0 0.0001 0.0002 0.0001 0.0001 0.0 0.0001 0.0002 0.0001 0.0002 0.0001 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 68.191, + "text": "AP 蓬 门 自 我 也 像 广 厦 SP", + "ph_seq": "AP p eng m en z i0 w o y E x iang g uang sh a SP", + "ph_dur": "0.32 0.06 0.179 0.06 0.178 0.06 0.163 0.075 0.163 0.075 0.163 0.075 0.178 0.06 0.341 0.135 1.429 0.095", + "ph_num": "2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest E3 E3 F#3 G3 F#3 E3 D3 E3 rest", + "note_dur": "0.38 0.239 0.238 0.238 0.238 0.238 0.238 0.476 1.429 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0", + "f0_seq": "133.9 134.2 134.3 134.3 134.3 134.2 134.1 134.2 134.3 134.4 134.3 134.2 133.9 134.2 134.1 134.4 134.0 133.7 133.7 133.7 133.6 133.4 133.7 133.7 133.5 133.4 134.0 134.0 133.8 133.9 133.9 134.4 134.4 133.2 132.5 133.1 134.4 137.5 141.5 147.2 151.3 155.2 157.7 160.1 163.0 164.7 165.7 165.1 164.1 162.3 164.0 163.4 162.4 161.3 160.5 160.4 160.1 161.3 162.1 162.7 163.5 163.6 164.0 165.5 165.5 165.1 161.8 156.5 151.4 158.1 165.5 172.1 180.6 189.5 187.6 186.3 185.4 185.6 185.6 185.5 185.3 184.9 185.3 184.9 185.7 185.3 184.7 183.8 182.1 182.1 182.9 184.8 187.9 188.9 188.8 191.2 194.7 196.0 196.5 196.4 195.8 194.3 193.4 193.5 194.2 194.1 194.2 194.5 194.5 195.3 195.9 194.8 194.3 193.1 189.4 185.3 182.1 181.1 181.0 182.3 183.1 183.5 184.8 186.1 186.8 186.0 184.1 179.3 173.1 169.2 169.3 169.3 170.1 170.0 170.5 171.6 168.0 165.8 163.6 163.2 163.7 163.3 163.3 164.1 165.5 168.1 169.1 169.1 167.2 163.9 160.8 155.3 150.1 145.4 145.9 148.2 146.3 145.7 144.5 142.5 142.8 143.0 144.1 145.3 145.5 145.8 146.1 146.3 146.0 145.4 145.1 145.0 145.3 145.7 146.2 146.2 146.5 146.2 146.8 147.1 147.7 148.2 148.6 148.5 146.9 144.9 145.2 147.0 149.5 151.8 154.3 156.5 159.2 161.1 164.3 167.2 170.6 172.2 168.1 166.7 165.9 164.8 163.4 162.1 161.7 161.6 161.8 161.5 161.9 161.4 163.6 164.5 164.4 164.8 166.3 166.9 166.3 165.8 165.1 165.2 165.2 164.7 163.4 161.9 161.0 161.0 160.7 159.1 159.5 160.7 162.8 166.2 168.9 171.4 172.9 173.1 172.7 169.8 167.4 164.7 160.8 157.9 156.5 157.1 159.9 164.1 168.1 170.9 173.1 174.6 173.8 171.4 168.2 164.9 159.5 156.2 155.6 155.9 159.3 162.9 167.1 171.2 173.7 175.5 175.2 171.2 167.5 163.8 159.1 156.2 155.3 156.8 159.8 164.5 168.7 171.7 174.1 174.5 172.8 169.0 165.4 160.5 156.4 154.6 155.1 158.1 163.0 166.5 169.9 171.8 173.4 171.7 167.8 162.8 159.3 156.8 155.5 154.7 156.6 161.1 165.7 169.7 172.6 174.6 173.0 169.1 164.0 156.9 154.1 150.3 149.1 151.7 154.6 160.1 169.4 173.7 177.1 178.3 174.9 168.6 160.7 153.1 151.8 151.6 152.4 153.0 153.5 152.9 154.1", + "f0_timestep": "0.011609977324263039", + "energy": "0.0014 0.0017 0.0027 0.003 0.0037 0.0043 0.0048 0.0066 0.0073 0.0068 0.0071 0.0073 0.0081 0.0079 0.0073 0.0075 0.0078 0.0073 0.0069 0.006 0.0049 0.0039 0.0034 0.004 0.0035 0.0023 0.0029 0.0057 0.0147 0.0198 0.0218 0.0331 0.0453 0.056 0.0651 0.0677 0.0672 0.0668 0.0643 0.0629 0.0624 0.0616 0.0633 0.0638 0.0642 0.0656 0.0664 0.0658 0.0652 0.0645 0.0635 0.0643 0.0664 0.0693 0.0717 0.0732 0.0724 0.0706 0.0691 0.0678 0.0676 0.0671 0.0676 0.0691 0.0694 0.0659 0.0567 0.0477 0.0329 0.0266 0.027 0.0259 0.0375 0.0483 0.061 0.0719 0.0769 0.0778 0.077 0.0766 0.0752 0.0751 0.0751 0.0746 0.0739 0.074 0.0715 0.0692 0.0682 0.067 0.069 0.0701 0.071 0.0714 0.0711 0.071 0.0717 0.0713 0.072 0.0722 0.0718 0.0721 0.0715 0.0728 0.0721 0.0721 0.0727 0.071 0.0703 0.0695 0.0668 0.0673 0.0645 0.0616 0.0634 0.063 0.0671 0.0711 0.0734 0.0752 0.0752 0.074 0.0717 0.0697 0.0679 0.0653 0.0598 0.0493 0.039 0.0262 0.0153 0.0123 0.0125 0.0107 0.0219 0.0402 0.0492 0.0586 0.0615 0.0601 0.0593 0.0564 0.056 0.053 0.053 0.0541 0.0536 0.054 0.0546 0.0561 0.0551 0.0527 0.0491 0.0434 0.0488 0.0547 0.0635 0.071 0.0696 0.0716 0.0682 0.067 0.0706 0.0692 0.0704 0.0706 0.0697 0.0685 0.0674 0.0662 0.0634 0.0637 0.0611 0.0614 0.0604 0.0596 0.0616 0.0618 0.0626 0.0635 0.0646 0.0644 0.064 0.0604 0.0531 0.0427 0.0306 0.0222 0.0189 0.021 0.0222 0.021 0.0181 0.0148 0.0089 0.0224 0.039 0.0498 0.06 0.0645 0.0643 0.0652 0.0657 0.0664 0.0676 0.0679 0.0666 0.0668 0.0641 0.0637 0.0645 0.0643 0.0663 0.068 0.0698 0.0701 0.0704 0.0697 0.069 0.0702 0.0715 0.0719 0.0726 0.0722 0.0707 0.0705 0.0698 0.0689 0.0689 0.0677 0.0667 0.0667 0.0667 0.0674 0.0675 0.0665 0.0664 0.0657 0.0647 0.0636 0.0602 0.0566 0.0527 0.0496 0.0472 0.0478 0.0482 0.0486 0.0497 0.05 0.0511 0.0522 0.0508 0.0481 0.0454 0.0426 0.0398 0.0376 0.0362 0.0371 0.0378 0.039 0.0412 0.0433 0.0448 0.0453 0.0447 0.0419 0.0382 0.035 0.0321 0.0307 0.0296 0.031 0.0315 0.0343 0.0365 0.0381 0.0399 0.0401 0.0373 0.0347 0.0309 0.0272 0.0253 0.0255 0.0265 0.0274 0.0294 0.0316 0.0332 0.0344 0.0343 0.0322 0.0292 0.0261 0.0228 0.02 0.0187 0.0183 0.0186 0.0195 0.0217 0.0226 0.0236 0.0229 0.0213 0.0193 0.0171 0.0153 0.0134 0.0114 0.0113 0.0114 0.0121 0.0132 0.0136 0.014 0.0138 0.0129 0.0101 0.0073 0.0044 0.0015 0.0006 0.0003 0.0006 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0007 0.0019 0.0027 0.0032 0.0037 0.0042 0.0052 0.0065 0.0073 0.0077 0.0075 0.0074 0.0073 0.0071 0.0075 0.0074 0.0074 0.0076 0.007 0.0062 0.0049 0.0036 0.0029 0.0023 0.0021 0.0019 0.0021 0.0043 0.0059 0.0073 0.0076 0.007 0.006 0.0046 0.0034 0.0026 0.0023 0.0019 0.0016 0.0011 0.0008 0.0008 0.0009 0.0007 0.0007 0.0005 0.0005 0.0003 0.0004 0.0006 0.0003 0.0005 0.0005 0.0011 0.0012 0.0014 0.0015 0.0013 0.0014 0.0013 0.0011 0.001 0.0006 0.0005 0.0002 0.0004 0.0005 0.004 0.0102 0.0167 0.0234 0.0247 0.0231 0.019 0.0082 0.0026 0.0015 0.0013 0.0013 0.0012 0.0013 0.0014 0.0013 0.0013 0.0013 0.0012 0.0012 0.0012 0.0011 0.0011 0.0011 0.0008 0.0009 0.0009 0.0012 0.0007 0.001 0.001 0.0012 0.0013 0.0013 0.0014 0.0013 0.0013 0.0013 0.0015 0.0013 0.0014 0.0014 0.0015 0.0016 0.0017 0.0016 0.0018 0.0018 0.0019 0.0018 0.0019 0.0018 0.002 0.002 0.0022 0.0025 0.0027 0.0025 0.0022 0.0023 0.0044 0.0088 0.011 0.0123 0.0131 0.0123 0.0116 0.0101 0.0076 0.0051 0.0028 0.0022 0.0017 0.0017 0.0014 0.0012 0.0011 0.0007 0.0005 0.0005 0.0006 0.0005 0.0004 0.0004 0.0004 0.0003 0.0002 0.0003 0.0005 0.0006 0.0006 0.0004 0.0008 0.0008 0.0009 0.0009 0.0009 0.0007 0.0008 0.0005 0.0004 0.0004 0.0006 0.0003 0.0004 0.0005 0.0006 0.0007 0.0006 0.0005 0.0003 0.0004 0.0004 0.0003 0.0005 0.0005 0.0007 0.0017 0.0066 0.0113 0.0143 0.0175 0.0202 0.0205 0.021 0.0194 0.0161 0.0115 0.0082 0.0044 0.0022 0.0015 0.0016 0.0019 0.0018 0.0019 0.0019 0.0018 0.0017 0.0019 0.0019 0.002 0.002 0.0018 0.0018 0.0017 0.002 0.0018 0.002 0.0022 0.002 0.0019 0.0018 0.0022 0.0022 0.0024 0.0023 0.002 0.0019 0.0018 0.0018 0.0017 0.0018 0.0019 0.002 0.0022 0.0023 0.0024 0.0025 0.0022 0.0023 0.0023 0.0023 0.0021 0.0019 0.0017 0.0017 0.0017 0.0017 0.0015 0.0018 0.0016 0.0012 0.0015 0.0015 0.0015 0.0017 0.0018 0.0016 0.0017 0.0013 0.0013 0.0014 0.0012 0.0012 0.0012 0.0013 0.0012 0.0011 0.0012 0.0014 0.0013 0.0012 0.0009 0.0011 0.0011 0.0008 0.001 0.0007 0.0009 0.0009 0.0009 0.0007 0.001 0.0009 0.001 0.0007 0.0005 0.0004 0.0005 0.0006 0.0006 0.0006 0.0007 0.0009 0.0009 0.0011 0.0009 0.0008 0.0005 0.0006 0.0005 0.0004 0.0004 0.0002 0.0005 0.0006 0.0007 0.0008 0.0006 0.0006 0.0007 0.0005 0.0002 0.0002 0.0002 0.0002 0.0001 0.0001 0.0003 0.0003 0.0002 0.0002 0.0002 0.0004 0.0005 0.0003 0.0004 0.0001 0.0 0.0 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 72.257, + "text": "SP AP 论 意 气 不 计 多 或 寡 AP 占 三 分 便 敢 自 称 为 侠 SP", + "ph_seq": "SP AP l un y i q i b u j i d uo h uo g ua AP zh an s an f en b ian g an z i0 ch eng w ei x ia SP", + "ph_dur": "0.11 0.4 0.09 0.178 0.06 0.163 0.075 0.432 0.045 0.178 0.06 0.193 0.045 0.386 0.09 0.416 0.06 0.476 0.163 0.075 0.178 0.06 0.194 0.045 0.178 0.06 0.193 0.045 0.193 0.045 0.163 0.075 0.178 0.06 0.163 0.075 1.191 0.19", + "ph_num": "1 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 D4 B3 B3 rest B3 A3 G3 G3 E3 G3 A3 G3 B3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.477 0.238 0.238 0.476 0.238 0.238 0.476 0.238 0.238 0.239 0.238 0.238 0.238 0.238 0.238 0.238 1.191 0.19", + "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "134.4 134.5 133.9 133.8 133.9 133.9 134.1 133.8 134.0 133.8 133.8 134.0 134.0 134.3 134.2 134.1 133.5 133.8 133.9 133.8 133.9 134.1 133.9 134.0 133.9 133.7 133.7 133.8 133.8 133.6 133.8 133.6 133.6 133.6 133.6 133.5 133.7 133.8 133.8 133.9 133.4 133.5 133.5 133.5 133.4 133.3 132.6 132.2 131.6 133.3 140.1 147.8 156.9 166.8 177.7 188.1 199.9 215.4 230.2 240.4 247.0 250.3 250.6 249.5 245.4 240.4 236.1 229.7 226.4 225.7 226.0 226.4 225.5 223.5 220.5 221.2 220.6 219.9 219.9 220.0 221.4 221.7 222.3 221.0 217.4 210.0 201.2 208.7 216.8 224.7 232.8 238.9 239.4 231.2 224.7 224.9 225.0 225.7 225.9 225.7 223.7 222.0 222.2 223.5 228.0 236.0 244.3 249.7 253.1 253.7 253.0 250.0 246.4 243.8 242.4 244.2 247.8 250.3 250.9 250.6 250.3 249.8 248.9 246.5 241.6 234.9 227.6 229.1 237.7 246.7 256.5 267.6 276.0 267.2 261.8 262.4 259.4 256.0 251.3 250.2 247.3 248.1 245.8 245.9 246.1 244.2 244.4 243.8 237.8 226.7 218.2 219.9 221.1 223.3 226.4 220.9 219.9 219.7 219.0 217.5 216.6 216.8 218.1 220.4 221.7 222.1 220.9 213.1 199.6 193.0 200.2 207.5 214.1 223.3 225.3 222.4 221.8 222.5 223.3 223.4 222.8 222.7 221.4 220.1 218.7 217.5 216.5 218.3 224.1 231.8 240.7 246.5 247.7 247.4 246.7 245.0 243.9 242.8 242.2 239.3 240.4 242.6 245.5 248.1 250.5 250.7 250.0 249.4 246.6 247.2 244.9 262.4 282.8 303.4 324.6 335.8 324.5 316.7 308.8 304.4 299.0 296.6 294.0 294.1 293.3 291.8 290.8 289.8 291.1 292.2 294.5 295.0 292.0 286.9 277.2 262.6 255.4 249.1 248.5 248.8 249.4 251.3 252.3 251.9 250.6 250.8 247.8 244.5 241.7 242.2 241.6 243.5 245.1 249.1 252.3 255.3 258.0 257.0 257.1 260.3 262.6 262.4 262.3 259.6 254.1 247.5 239.0 233.0 231.6 230.8 232.0 236.3 244.1 253.0 260.0 264.3 265.9 264.5 263.3 258.7 249.6 238.3 230.4 223.5 220.6 221.3 228.9 239.1 250.2 257.5 262.0 262.8 260.5 255.4 251.2 249.3 245.5 241.5 235.8 232.1 227.4 222.8 217.0 212.5 207.5 202.8 198.5 193.9 188.7 184.3 180.3 176.4 173.1 169.3 166.2 162.9 159.9 158.0 154.8 156.1 161.6 172.9 184.4 198.5 209.8 222.2 234.2 244.1 250.4 250.7 248.4 243.7 238.5 233.9 236.6 238.7 242.8 245.6 251.3 249.5 243.5 234.4 229.2 222.8 220.4 218.8 218.6 218.2 218.0 220.7 223.2 222.4 218.6 212.3 204.3 195.0 194.3 195.3 196.6 199.7 201.4 196.6 196.4 196.4 196.5 196.9 196.7 197.0 198.4 198.2 197.8 197.8 197.1 195.1 189.5 180.2 180.1 179.8 205.3 206.0 203.5 200.5 198.5 197.1 196.4 196.4 195.7 194.9 195.4 194.9 195.8 197.0 198.4 198.6 196.8 191.6 181.1 179.8 182.9 189.4 197.0 190.1 187.4 181.0 175.4 171.0 168.4 166.5 164.7 164.2 164.2 164.6 164.4 163.0 160.8 154.8 161.0 167.7 175.4 184.4 194.8 202.8 197.6 196.8 196.8 196.6 196.5 196.9 196.8 196.8 196.0 195.6 194.5 193.1 189.8 183.1 189.6 196.8 205.0 212.6 221.2 229.5 235.8 227.7 224.8 222.0 220.5 218.6 218.3 217.7 217.9 218.5 220.0 221.4 222.2 222.3 218.1 213.0 204.6 200.9 195.7 193.7 191.5 191.4 192.0 193.8 195.5 197.1 197.3 197.2 198.9 199.8 200.9 198.9 195.1 187.8 196.6 205.7 215.4 226.2 237.2 248.7 260.8 252.7 250.2 250.8 249.7 247.9 246.5 245.5 243.7 243.0 242.0 241.4 241.2 241.8 242.4 244.1 245.0 245.7 245.8 246.7 247.6 248.0 247.8 248.3 248.0 247.0 245.6 244.2 244.4 244.4 242.2 242.0 243.2 243.8 244.8 245.9 247.5 247.5 248.4 249.6 249.7 249.3 248.7 246.3 243.6 242.2 242.5 243.5 244.4 244.4 244.7 245.5 245.7 245.5 244.7 245.8 245.3 244.4 244.6 244.9 246.0 247.2 249.0 250.6 251.0 251.2 250.4 249.2 247.4 245.1 243.0 241.0 239.8 239.2 240.3 242.6 245.7 250.5 255.3 257.9 258.8 258.4 255.6 252.2 246.9 243.4 242.7 242.0 241.8 243.8 245.8 247.7 248.4 249.8 250.0 249.6 250.2 248.7 247.6 246.9 247.9 247.9 250.2 253.6 259.9 258.4 256.2 254.8 254.3 253.9 250.4 248.5 248.5 247.8 247.6 247.5 247.7 248.5 249.7 247.9", + "f0_timestep": "0.011609977324263039", + "energy": "0.0003 0.0007 0.0006 0.0008 0.0006 0.0004 0.0001 0.0002 0.0001 0.0004 0.0006 0.0011 0.0009 0.0013 0.0021 0.0019 0.0017 0.0023 0.0019 0.0021 0.0024 0.0029 0.0032 0.0038 0.0032 0.0038 0.0031 0.0038 0.0038 0.0042 0.0037 0.0028 0.0028 0.0026 0.0027 0.0021 0.0021 0.002 0.002 0.0009 0.0007 0.0009 0.0008 0.0009 0.0039 0.0199 0.0334 0.044 0.0522 0.0598 0.0662 0.0721 0.0766 0.0771 0.0734 0.0685 0.063 0.0596 0.061 0.0637 0.0676 0.0733 0.0776 0.0808 0.0824 0.0837 0.0831 0.0834 0.0835 0.0814 0.0807 0.0795 0.078 0.0767 0.0763 0.0746 0.076 0.0765 0.077 0.0779 0.0788 0.0783 0.0767 0.073 0.0633 0.0509 0.0356 0.0217 0.0166 0.0182 0.0182 0.0301 0.0523 0.0677 0.0802 0.0902 0.0898 0.0893 0.088 0.083 0.0792 0.0758 0.0703 0.0679 0.0652 0.0655 0.0672 0.0696 0.0741 0.0759 0.0767 0.0766 0.0738 0.0715 0.0712 0.0714 0.0724 0.0731 0.0733 0.074 0.0754 0.0765 0.074 0.0671 0.056 0.0421 0.0264 0.0134 0.0066 0.004 0.0079 0.0265 0.0508 0.0658 0.0788 0.0858 0.0857 0.0844 0.0822 0.0786 0.0765 0.0751 0.075 0.0742 0.0729 0.0747 0.0752 0.0726 0.0637 0.051 0.0321 0.0161 0.0127 0.0247 0.0446 0.058 0.0684 0.0762 0.0771 0.0775 0.0784 0.0782 0.0772 0.0765 0.0764 0.0732 0.0667 0.0558 0.0425 0.0259 0.0124 0.0073 0.0089 0.0352 0.0593 0.0743 0.0859 0.091 0.0928 0.0927 0.0933 0.0925 0.0895 0.0855 0.0784 0.0719 0.0623 0.0553 0.0517 0.051 0.0552 0.0612 0.0662 0.0711 0.072 0.0728 0.0719 0.0691 0.0679 0.0668 0.0663 0.0675 0.0666 0.0663 0.0654 0.0633 0.0594 0.0537 0.044 0.0319 0.0194 0.0081 0.0064 0.0054 0.0136 0.0424 0.0584 0.0697 0.0795 0.0787 0.0773 0.0788 0.0796 0.0805 0.0828 0.0822 0.0821 0.0819 0.0813 0.0814 0.0823 0.0823 0.0818 0.0813 0.0825 0.085 0.0867 0.0874 0.0866 0.0867 0.0866 0.0857 0.0853 0.0825 0.0773 0.0742 0.0688 0.0603 0.0487 0.0341 0.0159 0.0093 0.0047 0.0052 0.0074 0.028 0.0498 0.0678 0.0808 0.0874 0.0896 0.0894 0.087 0.0841 0.0822 0.0764 0.0737 0.0718 0.069 0.069 0.0663 0.0579 1.0 0.0583 0.0677 0.0652 0.0609 0.057 0.055 0.0523 0.0486 0.0451 0.0391 0.0326 0.0292 0.0267 0.0257 0.0254 0.0252 0.0251 0.0256 0.027 0.0262 0.0234 0.0195 0.0135 0.0065 0.0036 0.0028 0.0035 0.0042 0.0046 0.0046 0.0045 0.0037 0.0031 0.0025 0.0017 0.0012 0.0014 0.0021 0.0017 0.0021 0.0048 0.0069 0.0102 0.019 0.0426 0.0608 0.0716 0.0798 0.0782 0.0726 0.0682 0.0626 0.0589 0.0612 0.0632 0.0665 0.0698 0.0687 0.0685 0.0609 0.0512 0.0403 0.027 0.0211 0.0205 0.0433 0.0595 0.0725 0.0796 0.0772 0.0747 0.0709 0.0696 0.0667 0.0653 0.0651 0.066 0.0706 0.0724 0.0691 0.064 0.0512 0.0359 0.023 0.0077 0.0108 0.0371 0.0516 0.0612 0.0695 0.0673 0.0652 0.064 0.063 0.0637 0.0641 0.0669 0.0688 0.0696 0.0667 0.0578 0.0465 0.0334 0.0199 0.0191 0.0389 0.0588 0.0693 0.0747 0.0733 0.0669 0.0623 0.0621 0.0599 0.0586 0.0566 0.0568 0.0586 0.0615 0.0624 0.058 0.0498 0.0376 0.0234 0.0102 0.011 0.0337 0.0465 0.0544 0.0612 0.0588 0.0567 0.057 0.0562 0.058 0.0592 0.0609 0.0627 0.062 0.0596 0.0526 0.041 0.0295 0.0146 0.0129 0.0117 0.0214 0.0465 0.063 0.0756 0.0855 0.0858 0.085 0.0853 0.0829 0.0818 0.08 0.0779 0.0738 0.0684 0.0586 0.0467 0.0351 0.0237 0.0222 0.0221 0.0223 0.0229 0.0437 0.056 0.0653 0.0717 0.0673 0.0652 0.0631 0.0632 0.0639 0.0656 0.0683 0.0681 0.0711 0.0719 0.0718 0.0715 0.0702 0.0708 0.0722 0.0719 0.0718 0.0741 0.0734 0.0765 0.0791 0.0792 0.0811 0.0819 0.0798 0.0799 0.0767 0.0711 0.0649 0.0538 0.0413 0.0293 0.02 0.0179 0.0176 0.0162 0.0284 0.0467 0.0584 0.0706 0.0764 0.077 0.0758 0.0741 0.0722 0.0703 0.0691 0.0685 0.0679 0.0691 0.0703 0.0735 0.0725 0.0748 0.0755 0.0763 0.0794 0.0798 0.0821 0.0816 0.0817 0.082 0.0816 0.0809 0.0803 0.0794 0.0779 0.0776 0.0774 0.0771 0.078 0.0794 0.0797 0.0791 0.0787 0.0787 0.0784 0.0781 0.0785 0.0788 0.078 0.0779 0.0769 0.0772 0.0769 0.0782 0.0791 0.0788 0.0789 0.0781 0.0779 0.0779 0.0775 0.0769 0.0764 0.0763 0.0773 0.0789 0.0797 0.0802 0.0803 0.0808 0.0815 0.0826 0.0826 0.081 0.0796 0.0764 0.0737 0.0719 0.0699 0.0676 0.0669 0.0649 0.0654 0.064 0.0624 0.0619 0.0593 0.0572 0.0541 0.05 0.0469 0.0438 0.0413 0.041 0.0399 0.0408 0.04 0.0401 0.0395 0.0383 0.0387 0.039 0.0382 0.0377 0.0345 0.0313 0.0285 0.0246 0.0228 0.0224 0.0072 0.4223 0.0089 0.006 0.0027 0.0013 0.002 0.0001 0.0003 0.0 0.0 0.0 0.0 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0003 0.0002 0.0001 0.0002 0.0003 0.0002 0.0002 0.0001 0.0003 0.0004 0.0004 0.0005 0.0005 0.0006 0.0006 0.0008 0.001 0.0013 0.0016 0.0021 0.0027 0.003 0.0033 0.0033 0.0038 0.004 0.0042 0.0042 0.0041 0.0037 0.0034 0.0031 0.0025 0.0022 0.0024 0.0022 0.0019 0.0016 0.0012 0.0008 0.0004 0.0005 0.0011 0.0048 0.009 0.0102 0.0106 0.0096 0.006 0.0027 0.0022 0.0025 0.0023 0.0019 0.0014 0.001 0.0006 0.0006 0.0003 0.0004 0.0003 0.0002 0.0001 0.0004 0.0004 0.0004 0.0006 0.0006 0.0008 0.0012 0.0015 0.0015 0.0014 0.0014 0.0016 0.0017 0.0018 0.0016 0.0015 0.0015 0.0016 0.0021 0.0023 0.0026 0.0056 0.0084 0.0123 0.0163 0.0188 0.0197 0.0177 0.014 0.0087 0.0036 0.0025 0.0024 0.0022 0.0018 0.0018 0.0017 0.0019 0.0015 0.0019 0.002 0.002 0.0019 0.0019 0.0015 0.0014 0.0014 0.0013 0.0013 0.0014 0.0013 0.0011 0.0011 0.0013 0.0013 0.0011 0.0012 0.001 0.0012 0.0014 0.001 0.0006 0.001 0.002 0.0017 0.0016 0.0014 0.0009 0.001 0.0009 0.0007 0.0006 0.0005 0.0006 0.0006 0.0005 0.0005 0.0006 0.0005 0.0006 0.0003 0.0007 0.0012 0.0015 0.0019 0.0039 0.008 0.0119 0.0142 0.0144 0.0127 0.0099 0.0054 0.0027 0.0021 0.0016 0.0016 0.0013 0.0019 0.0018 0.0016 0.0017 0.0021 0.0021 0.0026 0.0025 0.0021 0.004 0.0056 0.0059 0.0057 0.0049 0.0022 0.0011 0.0009 0.0012 0.001 0.0009 0.0011 0.001 0.0011 0.0012 0.0013 0.0012 0.001 0.0009 0.001 0.001 0.001 0.001 0.0009 0.001 0.0009 0.0008 0.0008 0.0008 0.0007 0.0007 0.0007 0.0007 0.0007 0.0009 0.0009 0.0007 0.0009 0.0022 0.0033 0.0039 0.0046 0.0045 0.0038 0.0028 0.0014 0.0009 0.0009 0.001 0.0009 0.0009 0.0008 0.0009 0.0008 0.0005 0.0007 0.0007 0.0007 0.0007 0.0007 0.0008 0.0005 0.0012 0.0016 0.0017 0.0017 0.0017 0.0013 0.0011 0.001 0.0008 0.0008 0.0006 0.0006 0.0008 0.0007 0.0008 0.0009 0.0017 0.0022 0.0032 0.0051 0.0061 0.0059 0.0048 0.0031 0.0017 0.0012 0.0016 0.0018 0.0017 0.0018 0.0021 0.0021 0.0023 0.0026 0.0026 0.0031 0.0034 0.0037 0.0051 0.2838 0.006 0.0042 0.0027 0.0023 0.0018 0.0016 0.0016 0.0014 0.0013 0.0015 0.0012 0.001 0.0009 0.0008 0.0007 0.0008 0.0008 0.001 0.001 0.001 0.0007 0.0011 0.0015 0.0016 0.002 0.0019 0.0027 0.0036 0.0044 0.005 0.005 0.0045 0.0034 0.0025 0.0018 0.0012 0.0005 0.0006 0.0006 0.0017 0.0041 0.0068 0.0084 0.0094 0.0087 0.0074 0.0058 0.0038 0.0035 0.0033 0.003 0.0029 0.0023 0.0017 0.0011 0.001 0.0012 0.0013 0.0012 0.0032 0.0072 0.0121 0.0167 0.0179 0.0173 0.0133 0.0061 0.0041 0.0032 0.0031 0.0025 0.0022 0.0017 0.0014 0.0013 0.001 0.0007 0.0007 0.0006 0.0006 0.0007 0.0006 0.0009 0.0017 0.0036 0.0055 0.006 0.0059 0.0047 0.0021 0.0014 0.0013 0.0011 0.0008 0.0008 0.0006 0.0005 0.0004 0.0003 0.0004 0.0003 0.0004 0.0005 0.0002 0.0005 0.001 0.0015 0.0018 0.0017 0.0016 0.0016 0.0015 0.0017 0.0013 0.0014 0.0014 0.0013 0.0011 0.0009 0.0009 0.0008 0.0005 0.0005 0.0024 0.0051 0.0054 0.0053 0.0048 0.0032 0.0019 0.002 0.002 0.0017 0.0016 0.0015 0.0014 0.0014 0.0012 0.0008 0.0007 0.0005 0.0021 0.0047 0.0078 0.0108 0.0134 0.0135 0.0115 0.0089 0.0038 0.0014 0.0011 0.001 0.0011 0.0009 0.0009 0.0008 0.0006 0.0008 0.0009 0.0027 0.0049 0.0078 0.0144 0.0185 0.021 0.0207 0.0164 0.0118 0.0058 0.0028 0.0022 0.0019 0.0017 0.0016 0.002 0.0019 0.0018 0.0013 0.0012 0.001 0.001 0.0009 0.0008 0.0008 0.0008 0.0007 0.0009 0.0009 0.0007 0.001 0.0011 0.0013 0.0013 0.0016 0.0014 0.0015 0.0016 0.0017 0.0016 0.0019 0.0035 0.0079 0.0118 0.0147 0.0168 0.0172 0.0154 0.0135 0.0101 0.0061 0.0031 0.0021 0.0017 0.0017 0.0017 0.0017 0.0017 0.0019 0.0019 0.0015 0.0015 0.0014 0.0014 0.0015 0.0015 0.0015 0.0017 0.0016 0.0017 0.0018 0.0017 0.0016 0.0015 0.0015 0.0015 0.0016 0.0017 0.0016 0.0017 0.0016 0.0017 0.0016 0.0018 0.0017 0.0018 0.0017 0.0017 0.0017 0.0017 0.0017 0.0021 0.0019 0.0017 0.0016 0.0013 0.0014 0.0015 0.0017 0.0017 0.0017 0.0017 0.0017 0.0016 0.0016 0.0017 0.0016 0.0015 0.0016 0.0016 0.0015 0.0015 0.0016 0.0016 0.0019 0.0018 0.0019 0.0019 0.0017 0.0018 0.0018 0.0016 0.0015 0.0014 0.0013 0.0014 0.0017 0.0015 0.0013 0.0012 0.001 0.0008 0.001 0.0008 0.0009 0.0008 0.001 0.0007 0.0007 0.0006 0.0008 0.0007 0.0006 0.0007 0.0008 0.0006 0.0009 0.0007 0.0007 0.0006 0.0005 0.0006 0.0008 0.0011 0.0017 0.0577 0.001 0.0022 0.0017 0.0008 0.0008 0.0006 0.0002 0.0002 0.0002 0.0001 0.0003 0.0002 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 79.144, + "text": "AP 刀 可 捉 拳 也 耍 AP 偶 尔 闲 来 问 个 生 杀 SP", + "ph_seq": "AP d ao k e zh uo q van y E sh ua AP ou er x ian l ai w en g e sh eng sh a SP", + "ph_dur": "0.335 0.045 0.193 0.045 0.163 0.075 1.324 0.105 0.178 0.06 0.163 0.075 0.952 0.163 0.253 0.223 0.075 0.179 0.06 0.564 0.15 0.193 0.045 0.371 0.105 0.163 0.075 0.953 0.095", + "ph_num": "2 2 2 2 2 2 1 1 1 2 2 2 2 2 2 1 1", + "note_seq": "rest B3 D4 E4 D4 E4 A3 rest B3 A3 B3 D4 B3 A3 B3 G3 rest", + "note_dur": "0.38 0.238 0.238 1.429 0.238 0.238 0.952 0.238 0.238 0.238 0.239 0.714 0.238 0.476 0.238 0.953 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "149.7 149.7 149.5 149.5 149.3 149.5 149.2 149.4 149.6 149.8 149.7 149.7 149.7 150.0 149.7 149.6 149.4 149.6 149.1 149.7 149.6 149.3 149.3 149.0 149.0 148.6 148.5 148.9 148.4 148.3 148.4 148.6 148.2 150.8 161.2 173.4 189.4 203.1 219.8 234.6 247.3 251.5 254.5 253.8 250.6 247.5 244.9 238.4 226.7 237.7 249.2 260.3 270.6 283.2 288.6 293.7 294.9 295.2 295.5 294.2 292.7 292.2 291.7 290.5 286.4 279.7 270.5 273.4 278.7 283.0 287.7 289.5 294.8 300.4 297.7 294.7 296.0 297.5 297.6 297.8 297.7 297.1 297.2 297.1 297.6 298.2 298.9 298.7 298.2 296.5 296.7 299.4 304.6 312.7 322.6 330.9 335.4 337.1 337.4 336.3 333.8 331.3 329.1 328.1 327.9 327.1 327.6 328.2 328.9 329.5 330.4 330.8 331.2 332.4 332.7 332.3 332.2 330.6 331.0 331.6 333.3 331.6 330.8 330.3 328.5 328.0 328.7 329.7 331.1 331.3 332.3 333.1 334.6 334.9 335.6 335.1 334.9 333.8 331.3 328.9 327.3 326.6 326.9 326.8 327.4 328.9 331.5 335.1 338.7 341.1 341.7 343.3 344.5 342.8 340.0 335.3 328.6 323.2 320.3 320.2 321.6 325.0 328.4 333.8 338.1 343.3 346.2 348.4 346.6 341.6 333.8 327.1 322.2 320.1 321.8 324.4 328.3 334.2 337.6 339.2 340.1 341.5 340.4 337.5 332.8 327.3 319.3 306.4 301.7 302.0 300.4 299.9 298.7 297.6 295.8 296.0 295.0 291.4 290.2 291.5 290.9 293.1 294.5 296.1 295.6 295.4 293.1 292.3 291.3 292.2 291.5 290.0 289.3 287.0 290.3 296.6 303.2 313.4 324.1 328.3 329.7 329.8 328.9 329.1 330.2 329.0 328.1 326.3 316.4 300.9 279.4 271.9 265.3 259.5 253.3 246.2 239.4 230.4 219.7 215.2 217.0 218.1 220.5 221.3 220.1 219.3 217.4 213.4 209.3 206.1 204.1 206.1 210.3 216.8 222.7 226.7 229.9 231.7 232.9 231.0 226.8 221.5 216.7 212.8 210.2 210.6 212.9 216.5 220.5 225.0 227.9 229.2 229.2 228.5 225.2 221.7 217.5 214.4 211.3 210.6 211.1 212.8 215.9 219.5 224.9 229.2 231.2 231.7 232.2 229.1 223.7 216.5 209.4 205.9 203.1 203.6 206.6 213.1 218.6 224.9 230.9 235.5 238.2 237.6 233.4 227.5 219.3 209.4 202.8 201.1 203.6 208.7 214.1 220.4 227.6 229.6 230.2 229.5 225.2 222.8 222.0 224.5 226.8 229.0 230.5 231.8 233.3 234.6 237.0 240.1 246.7 254.1 258.5 264.3 260.5 257.1 255.4 253.0 251.0 248.4 246.6 246.4 245.4 244.6 244.1 243.8 244.7 245.1 245.6 246.4 245.5 244.6 243.3 242.1 242.4 243.7 243.0 239.0 233.2 229.0 226.6 221.9 217.6 217.4 218.0 218.9 220.5 221.5 222.2 223.6 223.7 223.3 222.3 218.7 210.6 216.1 222.4 228.8 234.9 241.4 248.6 258.0 250.4 244.6 243.6 243.7 244.2 244.4 245.1 245.3 245.6 245.9 247.2 247.2 247.4 247.5 245.8 246.5 246.9 249.0 251.6 256.2 259.3 259.4 261.3 262.2 263.3 262.7 262.0 261.1 260.3 259.8 260.5 261.2 262.1 265.4 269.5 277.0 285.1 297.0 303.0 305.0 304.3 302.0 297.0 292.4 290.9 291.5 292.2 293.4 295.0 297.2 299.4 299.6 298.4 296.5 295.4 292.4 290.9 289.6 289.0 289.5 290.4 290.1 290.0 290.8 291.4 293.0 293.1 293.8 294.1 294.8 294.5 296.6 298.7 298.5 298.3 296.9 293.3 287.4 282.1 276.6 270.0 263.1 257.0 249.2 246.6 245.7 246.3 245.8 247.0 248.9 249.7 249.1 248.9 246.3 244.6 239.2 230.7 221.2 212.3 215.9 216.8 217.4 216.6 216.3 218.7 220.0 220.1 220.3 220.5 220.2 219.9 219.9 219.3 219.6 219.5 219.6 220.3 220.1 219.0 218.8 218.8 219.4 219.2 220.6 220.4 220.1 219.0 218.7 218.9 218.8 219.3 219.1 218.5 213.9 207.2 214.6 221.1 227.6 233.7 241.3 247.3 254.9 259.9 250.4 249.0 247.7 247.2 246.8 246.5 247.1 246.9 245.5 244.6 244.3 240.3 234.4 228.6 224.4 219.8 214.7 210.3 205.9 201.7 198.3 193.0 193.4 194.3 197.0 198.9 198.7 197.6 197.5 194.7 191.0 187.6 185.7 185.2 185.6 188.3 193.9 199.9 203.9 206.7 207.5 206.7 203.7 198.4 192.6 186.8 182.4 181.6 182.8 186.0 191.0 198.0 204.1 207.7 209.3 209.7 208.0 203.9 198.3 190.7 184.1 181.4 181.5 183.3 187.4 193.2 200.4 205.4 207.9 209.4 208.7 205.7 199.7 192.5 183.6 178.8 177.7 179.2 183.4 187.1 192.9 198.8 203.9 207.2 207.9 207.5 204.0 198.3 190.5 181.8 177.8 177.5 178.8 182.8 185.7 190.5 198.8 203.6 205.0 205.4 202.3 198.0 193.0 190.1 188.5 186.2 185.3 185.7 184.8 184.7 185.2", + "f0_timestep": "0.011609977324263039", + "energy": "0.0009 0.0009 0.0012 0.002 0.0021 0.0025 0.0029 0.0037 0.0032 0.0038 0.0044 0.0046 0.0044 0.0048 0.0052 0.0039 0.0035 0.0032 0.0026 0.0028 0.0021 0.0025 0.002 0.0015 0.0005 0.0009 0.0008 0.0012 0.0004 0.0023 0.0044 0.0206 0.037 0.0484 0.061 0.069 0.0744 0.0779 0.0783 0.0809 0.0821 0.0846 0.0859 0.0867 0.087 0.0814 0.0699 0.0552 0.0371 0.0215 0.0142 0.0117 0.02 0.051 0.0703 0.0862 0.0989 0.1006 0.1029 0.103 0.1016 0.101 0.0985 0.0925 0.0799 0.0624 0.0409 0.0171 0.0082 0.0121 0.022 0.023 0.0294 0.0564 0.0715 0.0842 0.0966 0.099 0.1016 0.1034 0.103 0.1036 0.1031 0.1057 0.1043 0.1055 0.1038 0.0984 0.0935 0.0836 0.077 0.0721 0.0709 0.074 0.0776 0.0801 0.0817 0.0821 0.0815 0.0833 0.0852 0.0867 0.0869 0.0866 0.0853 0.0841 0.0839 0.0837 0.083 0.0817 0.0818 0.082 0.0846 0.0858 0.0879 0.0902 0.0911 0.0916 0.091 0.0911 0.0897 0.0894 0.089 0.0885 0.0892 0.0896 0.09 0.0904 0.091 0.092 0.0924 0.0925 0.0925 0.0927 0.0941 0.095 0.0962 0.0959 0.0944 0.093 0.0921 0.0923 0.0937 0.0954 0.0956 0.0944 0.0931 0.0908 0.0878 0.0859 0.0827 0.0805 0.0797 0.0798 0.0818 0.0839 0.0861 0.0864 0.0842 0.0803 0.0769 0.0735 0.0717 0.0702 0.0687 0.068 0.0696 0.0704 0.0739 0.0763 0.0763 0.0761 0.073 0.0687 0.0647 0.0597 0.058 0.0579 0.0575 0.0566 0.0555 0.0542 0.0533 0.0535 0.0553 0.0532 0.0478 0.0383 0.0247 0.0108 0.0097 0.0131 0.0171 0.0188 0.0185 0.0307 0.0541 0.0694 0.0821 0.089 0.089 0.0881 0.0879 0.086 0.0831 0.0839 0.0849 0.0866 0.09 0.0895 0.0892 0.0884 0.0854 0.0851 0.0831 0.0826 0.0827 0.0849 0.0878 0.091 0.0942 0.0954 0.095 0.0919 0.0893 0.0858 0.0852 0.0832 0.0784 0.0692 0.054 0.0368 0.0258 0.0221 0.0245 0.0219 0.025 0.0414 0.0578 0.0728 0.0841 0.0907 0.093 0.0936 0.0939 0.0903 0.0878 0.0845 0.0801 0.0782 0.0753 0.0756 0.0755 0.0772 0.0787 0.0795 0.0808 0.0815 0.0821 0.0836 0.0846 0.0842 0.0821 0.0783 0.0747 0.0716 0.0695 0.0696 0.0694 0.0711 0.0749 0.0768 0.0784 0.078 0.0764 0.0754 0.0742 0.0731 0.0714 0.0691 0.0658 0.0639 0.0624 0.0617 0.0633 0.0655 0.0677 0.0694 0.0694 0.0684 0.0656 0.0613 0.0566 0.0511 0.0474 0.0437 0.0418 0.0412 0.0426 0.0467 0.0512 0.0549 0.0567 0.057 0.0562 0.0535 0.0497 0.0454 0.041 0.0375 0.0345 0.0327 0.0337 0.0356 0.0383 0.039 0.0391 0.0375 0.0322 0.0266 0.02 0.0121 0.0066 0.0056 0.0053 0.0063 0.0061 0.0061 0.0062 0.0058 0.0056 0.0044 0.0068 0.0303 0.0481 0.0695 0.0831 0.0932 0.0978 0.0969 0.0975 0.0953 0.0932 0.0913 0.0903 0.0902 0.0913 0.0902 0.0899 0.089 0.0857 0.0857 0.0843 0.0842 0.085 0.0842 0.0847 0.0842 0.0842 0.0838 0.0863 0.0897 0.0898 0.0931 0.0921 0.0908 0.0915 0.0918 0.0916 0.0904 0.0874 0.0821 0.0756 0.0647 0.0503 0.0347 0.0197 0.015 0.0152 0.0138 0.0191 0.045 0.0644 0.0803 0.0909 0.0923 0.0908 0.0874 0.0861 0.0818 0.0789 0.0771 0.0753 0.0776 0.0794 0.0819 0.0835 0.084 0.0827 0.0809 0.0812 0.0871 0.0942 0.1007 0.1059 0.1074 0.1051 0.1059 0.1048 0.1006 0.1002 0.0954 0.0897 0.0859 0.0772 0.0722 0.0691 0.0687 0.0723 0.0753 0.0775 0.0795 0.0812 0.0814 0.0821 0.0822 0.0808 0.0815 0.0811 0.0804 0.081 0.0799 0.0801 0.0799 0.0805 0.0812 0.0814 0.0846 0.0851 0.0899 0.093 0.0857 0.9791 0.0945 0.1011 0.0955 0.0895 0.0886 0.0884 0.0894 0.0906 0.0932 0.0922 0.094 0.0956 0.0964 0.0995 0.0987 0.0988 0.1003 0.0966 0.0979 0.0979 0.0962 0.0958 0.0921 0.0883 0.0858 0.0859 0.0853 0.0855 0.0862 0.0881 0.0894 0.0904 0.0909 0.0851 0.0755 0.0619 0.0437 0.0246 0.018 0.0436 0.0622 0.0794 0.0941 0.098 0.1002 0.0976 0.0953 0.0944 0.0917 0.0895 0.0885 0.087 0.0876 0.0888 0.0884 0.0872 0.0869 0.0858 0.0849 0.0839 0.0852 0.0856 0.087 0.0877 0.087 0.087 0.0865 0.0872 0.0868 0.086 0.0825 0.0742 0.0631 0.0484 0.0318 0.02 0.0206 0.0231 0.0241 0.023 0.0366 0.0549 0.0686 0.0806 0.0864 0.0867 0.0862 0.0866 0.0856 0.0827 0.0806 0.0773 0.0739 0.0712 0.0615 0.05 0.0402 0.0297 0.0272 0.0286 0.0258 0.0296 0.0445 0.058 0.0674 0.077 0.0781 0.0788 0.0801 0.0793 0.0788 0.0767 0.074 0.0694 0.0658 0.0619 0.0597 0.0599 0.0604 0.0636 0.0663 0.0688 0.0714 0.0722 0.0724 0.0699 0.0649 0.0591 0.0511 0.0462 0.0423 0.0416 0.0413 0.0433 0.0473 0.0515 0.0544 0.0572 0.0574 0.056 0.0525 0.0476 0.0429 0.038 0.0347 0.0314 0.0306 0.0308 0.0334 0.037 0.0399 0.0431 0.045 0.0449 0.0433 0.0396 0.0354 0.0304 0.0277 0.026 0.0245 0.0238 0.0234 0.0231 0.0257 0.0278 0.0288 0.0297 0.0293 0.0269 0.0237 0.0201 0.0171 0.0142 0.0138 0.0141 0.0143 0.0143 0.0145 0.0144 0.0139 0.0138 0.0137 0.0129 0.0121 0.0093 0.0067 0.0034 0.0012 0.0 0.0 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0004 0.0008 0.0011 0.0016 0.0026 0.0034 0.0039 0.0042 0.0043 0.0044 0.0045 0.0047 0.0051 0.0052 0.0054 0.0049 0.0046 0.0043 0.0037 0.0034 0.0027 0.0022 0.0018 0.0016 0.0012 0.0012 0.0013 0.001 0.0009 0.0018 0.0047 0.018 0.0268 0.0313 0.0312 0.0272 0.0212 0.009 0.0031 0.0031 0.0028 0.0023 0.0016 0.0016 0.0015 0.0016 0.0018 0.0029 0.0043 0.006 0.0078 0.0084 0.0082 0.0071 0.0054 0.0028 0.0026 0.0022 0.0021 0.0018 0.0017 0.0016 0.0019 0.002 0.0021 0.0023 0.0024 0.0024 0.0063 0.0164 0.0265 0.0295 0.0292 0.0239 0.014 0.0045 0.0023 0.0019 0.0017 0.0016 0.0014 0.0014 0.0011 0.0013 0.0012 0.0016 0.0014 0.0014 0.0015 0.0015 0.0013 0.0014 0.0011 0.0013 0.0012 0.0013 0.0013 0.0012 0.0014 0.0012 0.0011 0.0011 0.0011 0.001 0.001 0.001 0.0011 0.0011 0.0013 0.0013 0.0011 0.0012 0.0012 0.0013 0.0011 0.0013 0.0011 0.001 0.0009 0.001 0.0008 0.0009 0.0007 0.001 0.0009 0.001 0.0009 0.001 0.001 0.0014 0.0015 0.0016 0.0017 0.0015 0.0013 0.0013 0.0011 0.0013 0.0012 0.0013 0.0014 0.0014 0.0014 0.0015 0.0015 0.0016 0.0018 0.0019 0.0018 0.0016 0.0014 0.0013 0.0013 0.0009 0.0012 0.0011 0.0011 0.0009 0.0012 0.0012 0.0014 0.0015 0.0011 0.001 0.0012 0.0009 0.0011 0.0012 0.0012 0.0014 0.0016 0.0015 0.0013 0.0011 0.0008 0.0009 0.0008 0.0008 0.0008 0.001 0.0007 0.0009 0.0006 0.0008 0.001 0.0012 0.0014 0.0027 0.0038 0.006 0.0092 0.0123 0.0147 0.0165 0.0153 0.013 0.0101 0.0046 0.0025 0.0019 0.0017 0.0019 0.0019 0.0017 0.0014 0.0013 0.0012 0.0009 0.0009 0.0008 0.0008 0.0006 0.0006 0.0005 0.0007 0.0012 0.0016 0.0018 0.0019 0.0022 0.002 0.0019 0.0017 0.0017 0.0018 0.0016 0.0014 0.0012 0.0022 0.006 0.0141 0.0185 0.0224 0.0259 0.0262 0.0246 0.0213 0.0156 0.0105 0.0047 0.0035 0.0027 0.0022 0.0022 0.002 0.0019 0.0022 0.0024 0.0025 0.0023 0.0022 0.0021 0.002 0.0023 0.0022 0.0021 0.0019 0.0019 0.002 0.0021 0.0022 0.0021 0.002 0.0019 0.0017 0.0014 0.0016 0.0016 0.0018 0.0019 0.0019 0.0018 0.002 0.0021 0.0021 0.0021 0.002 0.0018 0.002 0.0016 0.0016 0.0015 0.0018 0.0016 0.0017 0.0016 0.0016 0.0014 0.0014 0.0014 0.0018 0.0018 0.0019 0.0015 0.0012 0.0012 0.0009 0.001 0.001 0.0011 0.0013 0.0013 0.0012 0.0014 0.0013 0.0014 0.0015 0.0014 0.0012 0.0011 0.0009 0.0006 0.0004 0.0006 0.0007 0.0007 0.0006 0.0007 0.001 0.0011 0.0015 0.0024 0.0034 0.0047 0.0055 0.0065 0.0069 0.0069 0.0064 0.0058 0.0052 0.0039 0.0034 0.0026 0.0012 0.0011 0.0015 0.0018 0.0018 0.0018 0.0018 0.0015 0.0016 0.0012 0.0012 0.0011 0.001 0.0011 0.0009 0.001 0.0008 0.001 0.0008 0.0007 0.0007 0.0009 0.001 0.0012 0.0013 0.0014 0.0014 0.0015 0.0015 0.0015 0.0013 0.0011 0.0013 0.0012 0.0013 0.0014 0.0016 0.0017 0.002 0.0045 0.0078 0.0103 0.0121 0.013 0.0128 0.0118 0.0112 0.0096 0.007 0.0045 0.0021 0.0014 0.0015 0.0012 0.0011 0.0013 0.0009 0.0009 0.0007 0.0008 0.0007 0.0008 0.0007 0.0007 0.0008 0.001 0.001 0.0015 0.002 0.0023 0.0022 0.0022 0.002 0.002 0.0019 0.0022 0.002 0.0018 0.002 0.0021 0.0021 0.0021 0.002 0.0021 0.0023 0.0023 0.0024 0.0024 0.002 0.0019 0.0016 0.0016 0.0016 0.0015 0.0013 0.0012 0.0013 0.0011 0.0009 0.0008 0.0007 0.0008 0.0007 0.001 0.0011 0.0017 0.0025 0.0033 0.1469 0.001 0.0027 0.0016 0.0009 0.001 0.0009 0.0009 0.0008 0.0011 0.0011 0.001 0.0008 0.0007 0.0006 0.0007 0.0006 0.0005 0.0005 0.0005 0.0008 0.0009 0.0009 0.0011 0.0013 0.0012 0.001 0.001 0.0007 0.0004 0.0008 0.0005 0.0002 0.0005 0.0005 0.0006 0.0006 0.0018 0.0033 0.0043 0.0044 0.0043 0.0034 0.0016 0.0015 0.0013 0.0013 0.0011 0.0011 0.0012 0.0013 0.0014 0.0015 0.0011 0.0013 0.0013 0.0014 0.0013 0.0011 0.0011 0.0012 0.0012 0.0011 0.0016 0.0013 0.0014 0.0013 0.0013 0.0012 0.0011 0.0011 0.0017 0.0037 0.0082 0.0132 0.0159 0.0196 0.0232 0.0261 0.0282 0.0275 0.0227 0.0174 0.0089 0.0026 0.0018 0.0017 0.0018 0.0014 0.0011 0.0012 0.0013 0.0013 0.0013 0.0019 0.0042 0.0108 0.0173 0.0225 0.0261 0.0274 0.0251 0.0212 0.0162 0.0092 0.0038 0.0025 0.0022 0.0024 0.0022 0.0021 0.0022 0.0022 0.0022 0.0024 0.0022 0.002 0.0021 0.002 0.0022 0.0024 0.0021 0.002 0.0022 0.0021 0.0022 0.0021 0.0023 0.0018 0.0018 0.0016 0.0012 0.0011 0.0014 0.0015 0.0015 0.0015 0.0014 0.0013 0.0014 0.0014 0.0016 0.0015 0.0014 0.0014 0.0012 0.0011 0.0009 0.0008 0.0009 0.0008 0.0009 0.0009 0.001 0.0009 0.0013 0.0012 0.001 0.001 0.0009 0.0005 0.0004 0.0005 0.0006 0.0006 0.0007 0.0007 0.0006 0.0006 0.0006 0.0005 0.0005 0.0002 0.0003 0.0002 0.0003 0.0002 0.0002 0.0003 0.0002 0.0005 0.0004 0.0003 0.0003 0.0005 0.0008 0.0016 0.0017 0.0015 0.0014 0.0007 0.0006 0.0004 0.0003", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 86.525, + "text": "AP 没 得 英 雄 名 讳 AP 掂 量 些 旧 事 抵 酒 价 SP", + "ph_seq": "AP m ei d e y ing x iong m ing h ui AP d ian l iang x ie j iu sh ir d i j iu j ia SP", + "ph_dur": "0.305 0.075 0.193 0.045 0.163 0.075 0.163 0.075 0.356 0.12 0.163 0.075 0.238 0.179 0.06 0.178 0.06 0.163 0.075 0.163 0.075 0.356 0.12 0.193 0.045 0.594 0.12 0.387 0.09 1.428 0.095", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest G3 E3 G3 A3 B3 A3 rest G3 E3 G3 A3 B3 D4 D3 E3 rest", + "note_dur": "0.38 0.238 0.238 0.238 0.476 0.238 0.238 0.239 0.238 0.238 0.238 0.476 0.238 0.714 0.477 1.428 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "186.0 185.8 185.9 185.8 185.8 185.8 186.2 185.9 186.0 186.1 186.1 186.3 186.4 186.7 186.6 186.3 186.5 186.8 187.0 187.0 187.1 187.3 187.8 187.7 187.8 187.9 187.9 187.2 186.7 186.3 184.3 181.0 178.8 175.4 176.3 177.9 179.1 182.2 184.3 188.6 191.5 193.7 195.7 196.4 195.9 194.5 190.4 185.4 175.1 166.3 167.2 167.0 166.4 169.3 166.4 163.4 162.6 162.6 163.4 164.5 165.5 166.6 166.7 166.2 165.5 164.6 163.1 158.0 149.2 142.0 145.9 151.2 166.1 179.1 188.4 194.3 195.3 195.7 196.0 196.5 196.8 196.5 196.8 197.1 197.1 196.3 193.5 188.0 189.5 194.9 200.9 207.0 214.9 223.3 228.9 233.9 228.5 228.0 224.7 220.6 215.6 210.8 207.9 208.9 210.1 212.7 218.4 224.8 227.4 229.1 227.6 221.8 214.7 206.4 201.9 200.6 201.0 203.0 206.4 213.0 217.7 221.6 223.3 223.1 222.1 218.9 217.3 218.9 222.6 225.9 229.3 234.3 237.3 241.9 246.0 246.2 246.8 247.6 247.3 247.4 247.6 246.9 247.4 248.2 248.3 247.6 244.8 238.7 229.3 219.0 219.3 221.4 224.0 225.8 227.0 227.8 224.6 226.9 227.1 228.1 226.3 223.0 216.7 208.6 201.0 195.4 194.4 196.1 201.9 210.1 218.7 227.0 232.9 235.0 230.2 225.2 218.0 211.1 205.5 200.8 197.6 194.8 191.3 188.4 185.8 183.4 181.0 177.5 175.1 172.7 171.0 168.2 165.9 164.2 161.7 159.4 157.3 154.9 154.3 157.6 160.2 163.3 168.3 176.6 186.2 192.7 197.4 199.1 198.8 196.3 191.6 186.6 181.6 176.4 172.3 168.7 166.7 166.6 164.4 163.5 163.1 162.9 165.1 166.9 168.0 167.5 166.5 165.6 165.6 163.9 160.9 156.1 152.4 162.3 172.1 182.0 192.7 201.1 200.8 195.9 194.8 194.4 194.0 194.9 196.1 196.5 196.9 197.0 196.2 194.3 187.8 181.1 184.0 187.1 190.3 193.0 195.4 197.4 199.2 200.3 197.7 197.8 198.2 198.3 198.5 198.1 196.9 196.3 195.2 195.0 196.5 200.7 207.9 214.7 218.6 221.0 221.6 220.9 218.2 216.1 214.3 213.2 213.4 215.1 217.8 220.4 221.8 222.1 220.7 216.7 221.1 225.1 230.1 234.0 239.4 244.1 248.8 253.5 258.9 261.9 253.6 249.2 248.7 248.2 247.8 246.7 245.7 245.6 246.8 246.8 247.1 245.6 242.8 227.0 230.8 242.2 254.1 265.7 280.4 266.2 262.4 261.7 262.0 262.3 261.6 261.5 261.2 262.2 262.9 262.8 263.2 263.1 263.6 262.7 263.0 269.1 276.1 286.5 292.8 296.2 297.0 296.9 295.1 293.3 290.9 290.5 289.5 289.4 291.4 293.1 295.8 296.1 296.2 294.9 294.7 292.5 289.6 286.1 283.9 283.5 284.3 286.6 288.8 290.7 293.7 295.2 294.4 290.6 286.9 279.4 266.7 252.5 241.9 230.8 219.7 210.0 200.8 193.1 185.4 178.2 171.4 158.1 147.7 143.0 143.2 143.4 143.8 143.7 144.0 143.8 143.8 142.3 141.6 141.2 142.3 143.5 144.1 145.4 146.6 147.9 148.6 148.8 148.5 148.5 148.0 147.6 146.7 147.2 148.1 149.0 149.0 148.7 146.5 140.5 140.7 142.4 144.6 146.4 148.0 149.7 151.8 153.8 153.8 152.9 147.3 146.5 146.8 147.3 148.0 148.3 148.4 147.9 147.5 147.0 147.0 146.7 146.7 147.0 147.8 149.4 151.9 155.2 159.0 162.6 164.0 165.2 166.4 167.8 167.4 166.6 166.2 164.8 162.8 160.8 159.2 158.6 158.3 158.1 159.4 161.6 164.9 167.6 169.9 172.0 173.4 173.0 172.2 169.8 166.3 162.0 160.2 158.5 157.6 158.7 161.1 164.8 168.3 170.9 173.0 174.4 174.4 172.7 169.9 165.7 161.6 158.2 156.1 155.8 156.7 160.9 165.3 169.9 173.2 175.8 178.1 177.9 176.8 172.4 166.6 160.9 156.7 154.4 154.1 155.7 158.8 163.5 167.9 171.8 174.8 177.1 176.6 174.8 170.7 165.7 157.9 152.1 150.0 150.6 152.9 156.5 161.5 166.9 172.8 177.3 179.5 178.5 175.4 170.7 165.0 156.5 152.2 150.3 149.0 148.9 151.1 154.9 160.2 163.3 166.1 170.5 172.3 172.7 171.8 169.1 164.7 162.4 161.7 161.3 159.8 158.3 158.1 157.5 156.8 158.0", + "f0_timestep": "0.011609977324263039", + "energy": "0.0016 0.0026 0.0025 0.0034 0.004 0.0044 0.006 0.007 0.0077 0.0084 0.0091 0.0097 0.0101 0.0095 0.0095 0.0085 0.0079 0.0079 0.0066 0.0056 0.0058 0.004 0.0034 0.0032 0.0022 0.0025 0.0017 0.0126 0.0417 0.0613 0.076 0.0887 0.0924 0.0916 0.0918 0.0866 0.0814 0.0753 0.0694 0.0655 0.065 0.0649 0.0676 0.069 0.0688 0.0675 0.0597 0.0507 0.0374 0.0226 0.0164 0.0172 0.0428 0.065 0.078 0.0897 0.0901 0.0902 0.0907 0.091 0.093 0.0916 0.0907 0.0864 0.083 0.0774 0.0693 0.06 0.0467 0.0324 0.022 0.0203 0.0324 0.0505 0.063 0.0728 0.0783 0.0797 0.08 0.0806 0.0814 0.078 0.0788 0.079 0.0753 0.0733 0.0659 0.0532 0.0431 0.0316 0.0274 0.0298 0.0303 0.0363 0.0614 0.0764 0.0872 0.0946 0.0893 0.0865 0.0813 0.0773 0.0741 0.0703 0.0687 0.067 0.0653 0.0632 0.0636 0.0639 0.0643 0.0669 0.0692 0.0726 0.0714 0.0694 0.0661 0.0637 0.0664 0.067 0.0676 0.0673 0.0662 0.0666 0.0694 0.072 0.0728 0.0771 0.0779 0.0788 0.0824 0.0821 0.0844 0.0868 0.0892 0.0904 0.09 0.0913 0.0898 0.0885 0.0875 0.0858 0.0842 0.0826 0.0823 0.0792 0.0759 0.0668 0.052 0.0376 0.0193 0.011 0.0099 0.0083 0.0252 0.0498 0.0652 0.0795 0.0877 0.0886 0.0857 0.082 0.0789 0.0731 0.0692 0.0635 0.0565 0.0533 0.0497 0.0498 0.0502 0.0501 0.0511 0.0472 0.0418 0.0334 0.0222 0.0105 0.0048 0.0037 0.0048 0.0042 0.0038 0.004 0.0033 0.0024 0.0022 0.002 0.0018 0.0021 0.0022 0.001 0.0018 0.0018 0.0105 0.0293 0.0445 0.0559 0.0614 0.0631 0.061 0.0581 0.0562 0.054 0.0535 0.0555 0.0584 0.0629 0.0639 0.0634 0.0659 0.064 0.0658 0.0655 0.0635 0.0637 0.0618 0.0639 0.0653 0.0676 0.0668 0.0665 0.0662 0.0651 0.0638 0.0624 0.0603 0.059 0.058 0.0533 0.0467 0.0377 0.0261 0.0189 0.0143 0.0137 0.0277 0.045 0.0577 0.0692 0.0743 0.0738 0.074 0.073 0.073 0.0713 0.0705 0.0691 0.064 0.055 0.0434 0.0269 0.0136 0.0109 0.0133 0.0145 0.0155 0.0336 0.0511 0.0655 0.0743 0.0764 0.0762 0.0739 0.0753 0.0741 0.0724 0.0681 0.0618 0.0576 0.0545 0.0548 0.0577 0.0615 0.0642 0.0649 0.0643 0.064 0.0637 0.0651 0.0658 0.067 0.0675 0.0666 0.0672 0.0653 0.0614 0.0551 0.0444 0.034 0.0243 0.0164 0.0151 0.0186 0.0195 0.0208 0.0219 0.0192 0.027 0.0451 0.0579 0.0691 0.0758 0.0766 0.0773 0.0773 0.0778 0.0777 0.0775 0.0776 0.0737 0.0656 0.054 0.0394 0.0224 0.0106 0.0085 0.0158 0.0441 0.0612 0.0737 0.0855 0.0881 0.0908 0.0958 0.0979 0.0991 0.1002 0.097 0.0943 0.093 0.0887 0.0832 0.0773 0.0699 0.0676 0.0688 0.0718 0.0746 0.0778 0.08 0.0813 0.0826 0.0839 0.0839 0.0846 0.0859 0.0854 0.0868 0.0869 0.0872 0.0891 0.0873 0.0868 0.0857 0.0855 0.0852 0.0856 0.0852 0.0834 0.0819 0.0798 0.0799 0.077 0.0773 0.078 0.0796 0.0788 0.0707 0.0587 0.0418 0.0223 0.0076 0.0089 0.0107 0.0126 0.0139 0.0141 0.0124 0.0109 0.0255 0.0404 0.0517 0.0604 0.0642 0.0635 0.064 0.0642 0.0618 0.0628 0.0622 0.0613 0.0608 0.0589 0.0571 0.0557 0.0567 0.0555 0.0577 0.0571 0.058 0.0581 0.0572 0.0587 0.058 0.0592 0.0595 0.0588 0.06 0.0574 0.0563 0.0502 0.0415 0.0315 0.0176 0.0107 0.0077 0.0087 0.01 0.0089 0.0093 0.0254 0.0435 0.053 0.0629 0.0666 0.0663 0.066 0.0664 0.067 0.0665 0.0671 0.0659 0.0639 0.0627 0.0613 0.0595 0.0595 0.058 0.0574 0.0593 0.0605 0.0618 0.0628 0.0624 0.0627 0.0629 0.0633 0.0652 0.0645 0.0646 0.0644 0.063 0.0635 0.0624 0.0598 0.0582 0.056 0.0546 0.0539 0.0554 0.0551 0.0558 0.0571 0.0586 0.0601 0.0611 0.0609 0.0587 0.0558 0.0524 0.05 0.0478 0.0466 0.0464 0.0476 0.0483 0.0504 0.0543 0.0567 0.0584 0.0578 0.0543 0.051 0.0471 0.0452 0.0425 0.0406 0.0394 0.0379 0.0382 0.0411 0.0445 0.0491 0.0533 0.0544 0.0539 0.0501 0.0459 0.0415 0.0373 0.0353 0.0329 0.0322 0.031 0.032 0.0338 0.0365 0.0398 0.042 0.0424 0.041 0.0377 0.0333 0.0298 0.0273 0.0251 0.0238 0.0233 0.022 0.0233 0.0233 0.0237 0.0241 0.0244 0.0236 0.0229 0.0219 0.0201 0.0183 0.0169 0.0153 0.0141 0.0135 0.0134 0.0141 0.0147 0.0144 0.014 0.0137 0.0133 0.0131 0.0128 0.0112 0.0093 0.0061 0.0031 0.0017 0.001 0.0006 0.0008 0.0009 0.0005 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0006 0.0012 0.002 0.0025 0.0033 0.0044 0.0056 0.0063 0.0076 0.0085 0.0087 0.0094 0.0092 0.0085 0.0085 0.0083 0.008 0.0076 0.007 0.0058 0.005 0.004 0.0032 0.0024 0.0014 0.0011 0.001 0.001 0.0014 0.0017 0.0016 0.0015 0.0017 0.0022 0.0025 0.0024 0.0024 0.002 0.0017 0.0016 0.0015 0.0015 0.0014 0.0015 0.0012 0.0009 0.0009 0.0008 0.0007 0.0006 0.0013 0.0026 0.0033 0.0038 0.0034 0.0025 0.0016 0.0015 0.0015 0.0017 0.0019 0.0018 0.0018 0.0016 0.0016 0.0018 0.0023 0.0041 0.0124 0.0222 0.0266 0.0301 0.0286 0.0227 0.0174 0.0068 0.0024 0.0016 0.0014 0.0014 0.0013 0.0012 0.0013 0.0014 0.0019 0.0028 0.0059 0.0107 0.0162 0.0209 0.0253 0.0282 0.0301 0.029 0.0251 0.0191 0.0111 0.0035 0.0021 0.0024 0.0021 0.0019 0.0016 0.0014 0.0011 0.0011 0.0011 0.0011 0.0011 0.0012 0.001 0.001 0.0009 0.0011 0.001 0.0009 0.0008 0.0006 0.0005 0.0005 0.0005 0.0004 0.0006 0.0004 0.0005 0.0005 0.0005 0.0006 0.0005 0.0006 0.0008 0.0007 0.0009 0.0012 0.0015 0.0017 0.0016 0.0017 0.0016 0.0011 0.0012 0.001 0.0009 0.0008 0.0007 0.0007 0.0008 0.0008 0.0038 0.0096 0.0124 0.0135 0.0135 0.0098 0.0062 0.004 0.0023 0.0016 0.0015 0.0018 0.002 0.0025 0.0027 0.0027 0.0024 0.0024 0.0019 0.0016 0.0015 0.0016 0.0015 0.0014 0.0011 0.001 0.001 0.0012 0.0016 0.0019 0.0025 0.0028 0.0029 0.0035 0.0038 0.0037 0.0037 0.003 0.0027 0.0022 0.0019 0.0018 0.0016 0.001 0.0007 0.002 0.0099 0.0157 0.0161 0.0157 0.0117 0.0034 0.0019 0.0015 0.0012 0.0011 0.001 0.0008 0.0007 0.0007 0.0006 0.0007 0.0007 0.0007 0.0006 0.0006 0.0007 0.001 0.0012 0.0014 0.0016 0.0019 0.0018 0.002 0.0017 0.0015 0.0017 0.0017 0.0015 0.0014 0.0012 0.0013 0.0027 0.0079 0.0128 0.0156 0.0173 0.0162 0.0123 0.0093 0.0058 0.0037 0.0023 0.0016 0.0016 0.0016 0.002 0.0023 0.0026 0.0024 0.0021 0.0017 0.0015 0.002 0.0033 0.0087 0.0136 0.0162 0.0166 0.0158 0.0119 0.0076 0.0062 0.0023 0.0019 0.0017 0.0016 0.0018 0.0018 0.0017 0.0017 0.0013 0.0011 0.0012 0.0012 0.0014 0.0012 0.0013 0.0012 0.0011 0.0011 0.0009 0.001 0.001 0.0009 0.001 0.0009 0.0009 0.0006 0.0009 0.003 0.0064 0.0104 0.013 0.0155 0.0182 0.0195 0.0218 0.0233 0.0216 0.0192 0.0149 0.0091 0.0054 0.0027 0.0018 0.0018 0.0015 0.0017 0.0017 0.0017 0.0016 0.0014 0.0015 0.0015 0.0011 0.0006 0.0015 0.0039 0.005 0.0047 0.0048 0.0034 0.0022 0.0025 0.0025 0.0023 0.0019 0.0019 0.0019 0.0019 0.0021 0.0025 0.0026 0.0022 0.0019 0.0017 0.0021 0.0021 0.0023 0.0024 0.0021 0.002 0.0019 0.0016 0.0018 0.0018 0.0018 0.0016 0.0016 0.0017 0.0018 0.0019 0.002 0.002 0.0015 0.0014 0.0012 0.0014 0.0014 0.0016 0.0016 0.0015 0.0017 0.0019 0.002 0.0019 0.0018 0.0017 0.0016 0.0017 0.0022 0.0027 0.0033 0.0055 0.0084 0.011 0.0137 0.0153 0.0152 0.0144 0.0122 0.0097 0.0074 0.0059 0.0034 0.0018 0.0011 0.0012 0.001 0.001 0.001 0.0011 0.0012 0.0012 0.0011 0.0011 0.0008 0.0009 0.0012 0.001 0.001 0.0011 0.001 0.0009 0.0011 0.001 0.0009 0.001 0.0007 0.0007 0.0011 0.0007 0.0007 0.0008 0.0019 0.0035 0.0058 0.0072 0.0083 0.0096 0.0089 0.0082 0.0068 0.0043 0.0034 0.0025 0.0021 0.0018 0.0016 0.0014 0.0015 0.0013 0.0016 0.0016 0.0016 0.0014 0.0013 0.0015 0.0013 0.0015 0.0016 0.0018 0.0019 0.002 0.0021 0.0021 0.0019 0.002 0.0016 0.0014 0.0012 0.0013 0.0014 0.0015 0.0015 0.0014 0.0014 0.0014 0.0013 0.0014 0.0016 0.0018 0.002 0.0021 0.0021 0.0021 0.002 0.002 0.0019 0.002 0.0018 0.0017 0.0016 0.0015 0.0014 0.0014 0.0016 0.0016 0.0018 0.0019 0.0018 0.0018 0.002 0.002 0.0022 0.002 0.0021 0.0016 0.0014 0.0012 0.0011 0.0015 0.0014 0.0016 0.0018 0.0016 0.0014 0.0014 0.0016 0.0017 0.0017 0.0015 0.0014 0.0012 0.0012 0.0012 0.0018 0.0015 0.0017 0.0014 0.0015 0.0013 0.0012 0.0013 0.0013 0.0011 0.0013 0.0012 0.0005 0.0007 0.0006 0.0008 0.0008 0.0009 0.0008 0.001 0.0012 0.001 0.0009 0.0007 0.0009 0.0007 0.0006 0.0004 0.0004 0.0003 0.0003 0.0004 0.0005 0.0004 0.0004 0.0004 0.0004 0.0005 0.0014 0.0014 0.0016 0.0016 0.0009 0.0008 0.0005 0.0004 0.0003 0.0002 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 93.448, + "text": "SP AP 向 江 南 折 过 花 AP 对 春 风 与 红 蜡 AP 多 情 总 似 我 风 流 爱 天 下 AP 人 世 肯 相 逢 知 己 幸 有 七 八 AP 邀 我 拍 坛 去 醉 眼 万 斗 烟 霞 AP 向 江 北 饮 过 马 AP 对 西 风 与 黄 沙 AP 无 情 也 似 我 向 剑 底 斩 桃 花 AP 人 世 难 相 逢 AP 谢 青 山 催 白 发 AP 慷 慨 唯 霜 雪 相 赠 眉 间 一 道 疤 SP", + "ph_seq": "SP AP x iang j iang n an zh e g uo h ua AP d ui ch un f eng y v h ong l a AP d uo q ing z ong s i0 w o f eng l iu ai t ian x ia AP r en sh ir k en x iang f eng zh ir j i x ing y ou q i b a AP y ao w o p ai t an q v z ui y En w an d ou y En x ia AP x iang j iang b ei y in g uo m a AP d ui x i f eng y v h uang sh a AP w u q ing y E s i0 w o x iang j ian d i zh an t ao h ua AP r en sh ir n an x iang f eng AP x ie q ing sh an c ui b ai f a AP k ang k ai w ei sh uang x ve x iang z eng m ei j ian y i d ao b a SP", + "ph_dur": "0.08 0.4 0.12 0.193 0.045 0.193 0.045 0.178 0.06 0.193 0.045 0.163 0.075 0.476 0.193 0.045 0.163 0.075 0.194 0.045 0.193 0.045 0.178 0.06 0.193 0.045 0.476 0.193 0.045 0.163 0.075 0.193 0.045 0.163 0.075 0.164 0.075 0.163 0.075 0.386 0.09 0.416 0.461 0.075 0.356 0.12 0.477 0.163 0.075 0.163 0.075 0.178 0.06 0.163 0.075 0.401 0.075 0.639 0.075 0.179 0.06 0.163 0.075 0.163 0.075 0.163 0.075 0.193 0.045 0.476 0.148 0.09 0.178 0.06 0.178 0.06 0.178 0.06 0.164 0.075 0.178 0.06 0.416 0.06 0.163 0.075 0.193 0.045 0.326 0.15 0.356 0.12 0.477 0.133 0.105 0.193 0.045 0.193 0.045 0.193 0.045 0.193 0.045 0.178 0.06 0.476 0.193 0.045 0.164 0.075 0.178 0.06 0.178 0.06 0.163 0.075 0.163 0.075 0.476 0.178 0.06 0.163 0.075 0.178 0.06 0.164 0.075 0.178 0.06 0.163 0.075 0.416 0.06 0.193 0.045 0.178 0.06 0.416 0.06 0.401 0.075 0.477 0.148 0.09 0.163 0.075 0.178 0.06 0.163 0.075 0.416 0.06 0.476 0.119 0.12 0.163 0.075 0.163 0.075 0.178 0.06 0.193 0.045 0.178 0.06 0.476 0.163 0.075 0.193 0.045 0.178 0.06 0.164 0.075 0.163 0.075 0.163 0.075 0.431 0.045 0.193 0.045 0.178 0.06 0.193 0.045 0.193 0.045 0.417 0.06 2.38 0.238", + "ph_num": "1 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 B3 A3 G3 A3 B3 rest B3 B3 A3 B3 E4 D4 rest D4 D4 B3 D4 E4 G4 E4 D4 B3 A3 B3 rest B3 B3 A3 G3 A3 A3 A3 G3 A3 B3 E3 rest E3 E3 D3 E3 G3 A3 A3 G3 A3 D4 B3 rest B3 B3 A3 G3 A3 B3 rest B3 B3 A3 B3 E4 D4 rest D4 D4 B3 D4 E4 G4 E4 D4 B3 A3 B3 rest B3 B3 A3 G3 A3 rest G3 A3 B3 A3 B3 D4 rest D4 D4 B3 A3 B3 E4 D4 E4 F#4 E4 D4 E4 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.238 0.238 0.238 0.476 0.238 0.238 0.239 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.239 0.238 0.476 0.238 0.238 0.476 0.476 0.477 0.238 0.238 0.238 0.238 0.476 0.714 0.239 0.238 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.239 0.238 0.476 0.238 0.238 0.476 0.476 0.477 0.238 0.238 0.238 0.238 0.238 0.238 0.476 0.238 0.239 0.238 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.239 0.238 0.238 0.476 0.238 0.238 0.476 0.476 0.477 0.238 0.238 0.238 0.238 0.476 0.476 0.239 0.238 0.238 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.239 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.477 2.38 0.238", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "147.5 147.5 147.5 147.3 147.4 147.1 147.0 147.1 147.0 146.8 147.0 146.9 147.5 147.5 147.7 147.8 147.3 147.3 147.4 147.4 147.0 147.2 147.2 147.2 147.4 147.5 147.3 147.3 147.3 147.5 147.6 147.3 147.6 147.6 147.6 147.8 147.7 147.6 147.9 147.5 147.4 147.5 147.4 147.6 147.0 147.0 147.0 147.1 146.6 146.6 147.2 147.1 147.6 148.1 152.6 161.5 177.9 190.9 209.0 224.5 237.8 249.8 251.0 249.3 245.1 240.2 233.1 221.7 212.2 222.6 233.0 244.8 254.0 255.2 253.3 253.0 251.5 247.1 244.5 242.7 243.1 244.1 244.9 246.4 251.8 256.2 256.4 255.3 252.7 248.2 242.1 236.1 228.0 220.5 217.6 216.5 216.7 219.0 221.1 221.9 222.1 222.1 221.6 219.3 217.4 214.4 210.2 204.2 196.4 194.7 194.8 195.2 195.6 196.7 192.9 193.1 193.2 194.4 196.2 197.0 197.8 197.5 196.9 196.1 194.7 192.8 189.5 186.3 181.0 187.7 195.2 203.2 212.9 221.3 221.9 222.4 222.5 221.5 221.6 221.2 220.3 220.0 219.7 218.4 218.1 216.4 212.2 203.5 207.2 213.1 218.8 226.0 233.5 241.0 251.2 252.7 255.3 257.6 258.5 257.6 253.4 247.8 242.0 237.9 233.8 232.5 234.4 239.2 244.5 251.1 255.9 259.0 260.1 258.6 255.3 249.3 242.2 233.4 227.2 223.3 222.6 225.8 232.1 240.6 251.3 259.9 264.5 264.3 261.0 252.6 244.4 235.7 231.7 227.2 223.9 219.3 215.9 211.8 208.7 205.3 201.7 197.9 194.7 191.4 188.0 184.8 181.3 178.0 174.7 171.1 168.6 165.4 162.4 159.3 157.3 154.0 152.4 159.2 173.5 188.0 205.4 221.7 237.7 248.4 253.6 256.0 253.1 247.2 239.6 223.6 226.6 231.3 236.0 241.9 247.9 253.4 258.1 254.7 251.3 250.3 247.8 247.8 246.7 246.7 246.7 246.8 247.3 243.9 236.9 228.0 212.6 203.8 210.1 214.1 218.3 222.5 221.9 217.6 219.5 219.8 219.8 218.9 219.4 220.8 220.5 220.0 220.1 220.7 221.6 221.9 223.1 223.1 225.9 225.4 226.3 229.7 237.4 243.0 244.4 244.9 245.5 245.4 246.1 246.8 247.0 248.0 249.5 249.4 249.6 249.2 245.4 239.3 230.4 229.2 227.3 225.3 223.2 219.1 218.8 223.3 241.4 256.6 277.4 298.4 312.0 322.5 328.7 331.5 331.8 332.1 330.0 329.8 329.1 327.6 324.7 319.6 312.8 310.9 306.9 301.3 294.1 290.2 290.4 291.9 293.3 293.4 293.3 292.3 289.6 288.3 287.9 288.1 288.4 289.3 290.6 292.8 296.6 300.5 304.5 305.4 304.0 300.1 295.3 290.3 286.6 283.9 285.1 287.4 292.0 298.0 302.6 306.7 309.5 310.6 309.7 306.2 301.3 295.8 290.9 289.7 278.5 269.2 262.8 256.6 250.8 244.8 240.0 234.9 230.2 225.4 219.2 214.2 209.2 204.3 199.3 194.3 189.9 184.8 180.2 177.6 178.0 197.6 220.6 246.3 274.0 292.1 303.1 308.9 308.0 303.5 297.3 291.3 284.8 277.3 271.4 278.0 282.1 287.3 292.6 298.0 298.8 296.2 295.3 294.6 294.9 294.5 295.5 294.8 294.8 293.5 293.2 292.6 290.3 286.2 277.9 267.9 264.2 273.9 285.2 295.0 308.5 305.1 302.4 300.7 293.4 283.6 272.0 264.4 258.8 254.8 251.9 250.0 247.4 242.1 248.5 256.2 264.3 271.3 280.0 287.4 296.5 305.9 297.7 292.0 292.4 292.5 293.3 294.9 295.6 295.8 294.9 294.9 294.2 293.7 294.4 292.0 288.6 286.9 292.9 301.5 313.5 323.4 329.8 331.6 333.7 332.6 330.9 329.9 328.9 328.7 329.4 329.8 329.7 329.0 326.7 320.9 302.4 314.9 327.7 345.3 362.4 382.0 390.8 392.9 397.0 397.4 395.4 393.3 389.4 383.9 380.5 377.8 378.2 380.7 386.8 390.9 392.4 394.2 394.2 392.9 389.9 390.4 392.0 392.4 391.4 389.0 387.8 388.1 386.9 387.5 389.7 390.1 390.9 391.5 392.4 392.9 393.0 391.5 390.0 387.6 383.1 379.0 370.5 363.9 358.2 353.9 348.8 345.5 341.3 338.5 335.5 334.1 333.4 331.1 329.5 328.0 328.0 328.5 330.7 332.2 330.6 327.0 318.4 306.3 299.3 292.0 292.1 291.1 291.2 293.1 294.8 294.7 293.5 291.0 287.4 283.3 279.9 277.1 273.6 269.9 265.0 259.5 254.3 249.3 246.6 245.8 245.3 245.9 245.5 245.0 244.1 244.5 245.6 247.1 249.1 250.0 249.1 249.2 248.4 248.2 246.5 245.0 244.1 244.9 245.0 245.9 247.2 249.1 251.3 250.2 249.0 248.5 247.8 247.3 245.4 238.1 229.8 223.2 221.8 220.3 219.1 218.4 217.1 215.9 215.9 214.8 217.1 218.4 219.2 218.7 218.4 217.8 217.5 218.1 218.4 217.8 218.6 218.5 219.1 219.4 220.4 220.7 220.4 219.4 219.1 218.8 219.0 220.3 220.8 220.5 222.0 222.6 223.2 223.4 223.0 221.3 217.8 212.0 207.1 212.1 217.9 223.2 229.1 234.5 239.4 245.9 252.1 258.9 257.3 258.9 258.0 255.1 252.2 247.8 242.8 240.6 238.6 237.8 240.3 242.7 247.2 252.0 257.1 259.7 260.6 259.9 257.1 252.2 243.8 232.5 226.0 223.8 225.6 231.5 239.9 249.2 257.4 263.4 267.1 267.9 265.5 258.8 248.3 233.5 224.2 216.3 210.3 203.5 199.1 194.1 190.1 186.0 182.4 178.9 175.4 171.6 168.4 165.3 161.7 158.7 155.7 153.1 150.3 147.6 144.6 142.1 146.8 153.3 162.0 169.0 181.1 194.5 215.3 232.6 249.6 258.6 257.0 251.6 240.8 234.3 231.2 230.4 234.6 237.7 241.5 245.1 248.8 252.9 257.6 259.4 256.6 257.4 256.5 254.1 251.9 250.0 248.3 248.7 249.7 250.1 249.8 247.0 239.3 218.4 220.3 222.5 224.9 227.7 229.2 222.0 219.5 218.2 219.8 221.4 221.9 221.0 219.7 217.1 215.9 214.3 212.8 207.1 196.9 193.8 192.4 190.9 189.7 188.9 186.9 186.1 183.6 184.8 186.8 189.1 191.5 193.3 195.0 196.3 196.1 195.3 194.3 193.8 193.4 193.6 194.3 195.2 195.4 195.1 194.9 195.4 196.1 195.3 195.1 195.4 195.8 196.5 197.4 198.1 197.4 196.7 195.6 193.4 186.9 176.6 170.6 179.5 190.0 200.4 212.2 224.8 236.0 230.9 228.5 226.7 223.9 221.4 219.0 216.7 215.8 216.5 217.2 219.6 222.6 226.0 227.7 226.7 227.3 227.2 225.5 224.5 221.4 219.0 217.2 215.7 215.9 216.8 218.2 220.5 222.7 224.4 225.3 223.4 220.3 215.8 213.1 212.7 214.5 217.7 221.4 226.0 228.3 231.1 230.5 228.9 226.5 222.7 221.0 220.5 220.3 218.8 218.9 220.5 222.0 220.3 216.9 213.4 215.8 217.9 221.1 224.6 228.1 233.2 227.3 221.8 222.7 222.5 222.0 221.3 220.8 220.7 220.9 220.8 220.8 221.1 220.4 216.9 209.1 200.4 206.6 212.9 218.8 226.1 229.6 224.0 222.8 222.4 222.3 221.8 221.0 220.9 220.5 219.8 221.2 221.2 219.3 210.6 202.5 201.5 201.9 200.9 200.3 199.1 198.6 196.1 193.4 193.9 194.1 195.4 196.4 197.7 198.1 198.0 197.8 196.8 195.8 195.0 195.9 195.9 196.1 197.4 199.5 203.2 211.3 216.2 219.4 220.2 221.8 222.7 222.1 221.7 220.8 220.4 219.5 219.5 219.3 218.7 212.9 206.2 213.0 220.7 229.2 236.5 245.5 254.4 265.2 258.0 249.7 247.0 247.7 247.6 247.8 248.7 249.5 249.9 248.3 245.1 237.0 216.1 211.8 205.8 194.5 184.8 175.8 168.1 158.5 156.9 158.4 163.4 165.3 167.4 168.6 167.4 162.9 157.2 152.6 149.1 148.7 150.1 154.3 160.5 168.2 175.0 180.5 182.6 181.1 176.0 167.7 159.9 151.9 148.9 148.6 151.4 155.1 161.3 167.6 172.5 175.0 177.3 178.5 177.4 173.7 168.6 169.4 169.0 169.1 168.8 168.5 168.3 167.7 166.5 165.5 164.4 163.0 160.6 158.6 156.7 155.2 154.5 153.9 153.6 153.2 153.2 152.9 153.1 152.8 152.2 150.0 149.7 150.2 150.2 150.1 149.7 149.5 151.9 156.3 161.7 165.8 167.4 167.5 167.0 166.9 167.1 167.1 166.4 167.2 166.2 165.4 163.7 162.6 162.5 163.3 165.0 166.9 166.8 166.1 165.2 164.6 163.3 163.0 158.3 153.9 148.7 146.9 144.0 142.3 140.6 140.6 141.5 143.4 144.9 145.2 146.1 146.4 146.7 147.2 147.8 148.4 147.8 147.0 146.2 144.7 143.6 141.2 141.0 142.8 144.7 147.1 152.6 159.0 160.3 162.1 162.4 162.9 163.5 163.8 164.6 164.5 164.0 164.2 163.9 162.8 161.7 157.0 163.5 169.4 176.2 182.7 189.5 196.8 197.7 197.7 197.3 196.3 195.9 195.3 194.4 195.4 196.7 197.4 197.7 197.5 195.1 182.6 186.0 189.2 195.4 200.1 205.1 210.4 204.5 201.2 202.3 202.1 201.5 201.2 200.8 199.8 198.0 196.0 194.6 193.6 193.7 198.4 205.4 214.9 220.9 225.4 225.4 223.1 219.6 215.9 213.4 212.4 213.5 215.4 218.5 220.8 221.3 221.7 221.7 222.7 223.1 222.5 221.0 219.4 218.2 217.8 218.3 219.2 221.0 218.9 220.5 223.1 222.7 223.8 222.5 220.8 218.4 218.6 219.4 219.5 219.9 219.6 218.1 213.5 207.9 201.9 196.5 193.9 192.0 192.4 193.4 194.1 195.3 196.2 196.5 196.5 196.4 196.4 196.3 194.9 195.1 195.7 196.4 195.5 195.2 195.3 193.8 208.8 221.2 226.0 225.6 225.2 225.3 225.3 224.6 222.6 221.1 219.6 219.2 218.7 217.9 217.0 217.6 218.1 217.5 218.4 218.1 218.8 219.7 220.3 220.4 220.6 220.1 220.6 221.1 221.8 221.9 221.5 221.0 220.4 220.6 221.2 221.1 218.9 217.8 215.4 211.8 211.2 220.5 232.3 241.4 247.3 251.5 254.4 256.3 257.4 258.8 257.6 255.4 254.5 254.8 255.0 256.1 258.8 262.5 265.7 277.0 286.3 294.4 297.6 300.2 298.5 297.4 294.7 292.7 291.7 293.3 294.3 294.9 292.3 286.4 277.9 270.5 268.0 265.2 263.2 260.9 258.9 256.1 253.5 251.8 248.1 242.9 242.8 247.3 250.8 253.0 254.1 253.3 249.4 243.9 237.9 233.8 233.3 236.2 241.7 247.9 254.4 259.9 263.8 264.4 263.3 259.9 253.0 242.7 232.3 226.0 224.3 225.6 227.3 234.2 242.7 251.5 259.5 265.0 265.9 264.3 259.2 251.0 243.0 234.1 223.9 219.6 216.4 214.1 211.7 209.4 207.1 204.7 202.8 201.0 198.7 196.5 194.4 192.2 190.1 188.1 185.3 183.0 180.7 177.7 174.6 171.1 168.6 166.5 168.8 177.1 188.6 197.6 208.2 219.5 235.1 245.9 251.0 250.6 247.7 243.1 237.7 230.0 223.8 223.5 231.7 241.5 251.0 253.4 250.4 249.6 248.0 246.3 245.2 244.8 244.3 244.2 245.0 246.1 246.9 248.6 246.9 243.4 235.8 228.3 228.6 230.9 245.0 233.0 226.5 221.3 220.3 219.5 218.0 218.5 219.2 220.1 220.4 219.5 219.6 218.8 219.3 216.1 212.4 206.5 201.1 196.5 191.2 190.4 189.9 190.1 191.1 193.5 195.8 196.9 197.0 196.7 196.9 196.0 196.4 196.5 195.1 193.4 186.3 186.4 196.8 204.7 214.4 224.1 226.9 223.2 221.2 220.8 220.7 220.7 221.3 222.0 221.5 221.3 221.4 221.2 220.5 220.3 221.0 219.7 221.4 222.2 223.8 223.2 224.1 222.1 222.6 223.7 224.9 225.0 224.5 223.3 223.0 223.1 223.8 224.7 227.2 229.7 234.8 241.3 247.6 253.1 256.4 257.0 256.4 256.0 253.9 248.6 242.4 237.6 234.9 234.9 236.3 242.0 248.9 254.6 258.7 261.0 262.0 262.2 257.2 249.7 238.7 233.3 228.6 222.8 218.2 213.7 209.3 205.6 202.0 198.5 195.0 192.1 188.9 185.6 182.4 179.2 176.0 172.9 169.8 166.9 163.9 161.3 157.7 155.1 151.7 152.4 160.8 177.5 191.1 208.6 225.5 240.5 251.1 254.8 255.2 250.9 245.6 238.2 225.4 228.0 232.8 237.5 243.2 248.2 254.9 251.5 248.5 249.7 248.7 247.9 247.2 246.1 246.8 247.0 247.0 246.0 243.7 238.7 229.6 211.8 213.0 215.1 217.4 219.9 221.7 218.6 218.1 219.0 219.4 220.1 220.1 220.8 221.9 221.0 219.9 219.5 220.1 220.2 221.0 221.2 223.4 224.6 226.6 229.1 239.2 244.6 247.6 247.0 248.7 248.9 248.0 247.8 247.9 248.1 248.7 249.7 249.8 248.3 242.7 235.9 227.9 241.1 252.9 266.4 282.2 298.8 322.1 323.5 327.7 330.9 332.1 330.0 330.0 330.4 329.8 330.1 329.7 326.9 321.6 311.5 301.2 303.0 305.3 307.8 310.5 312.2 309.7 301.5 297.7 298.5 300.1 302.2 302.0 300.4 297.1 293.7 290.0 283.4 279.6 278.3 279.8 283.4 289.3 295.8 302.2 307.0 310.4 311.0 308.8 303.0 293.7 286.3 279.0 274.6 276.0 281.0 288.8 295.6 302.9 309.9 315.1 316.5 311.5 298.7 283.1 277.7 271.7 266.8 260.8 254.0 248.9 244.7 240.4 236.6 232.5 229.1 224.6 220.6 216.3 212.2 208.7 205.0 201.9 198.7 195.6 193.1 189.7 191.4 200.8 210.5 222.1 238.0 258.2 277.1 293.3 299.1 301.7 299.8 293.1 288.8 282.3 274.6 268.6 275.7 282.3 290.0 296.6 303.9 307.7 304.7 300.3 300.3 297.1 296.2 294.7 294.8 293.7 293.2 293.9 293.8 292.7 291.7 288.7 286.2 281.1 274.1 270.6 269.9 276.6 280.3 282.2 282.2 282.6 276.6 270.3 264.4 261.0 256.3 252.9 251.7 249.0 244.8 239.1 246.9 257.2 267.4 276.0 285.3 295.4 304.0 297.3 292.4 292.5 292.2 293.1 293.5 294.2 295.2 294.8 294.9 294.7 292.6 291.0 291.4 289.8 288.7 292.0 298.4 308.2 317.9 323.4 328.9 330.6 331.1 330.8 330.8 330.2 330.3 330.9 330.6 329.8 330.6 326.7 320.0 310.3 320.4 333.5 346.2 358.3 373.6 389.6 395.8 393.9 398.3 400.5 402.0 401.7 399.8 397.5 392.6 389.3 386.8 386.3 389.1 389.9 391.7 393.7 396.0 394.0 394.7 393.7 392.8 389.6 387.1 385.7 387.1 388.1 390.9 392.2 390.9 390.8 390.2 390.0 388.1 382.6 375.3 364.8 336.3 327.7 331.1 334.9 338.1 336.4 332.5 333.9 334.0 334.9 334.0 333.8 330.4 328.2 326.6 327.7 327.7 327.4 324.7 317.1 303.3 288.6 278.9 285.4 300.2 301.3 297.1 295.3 294.1 292.5 293.4 294.4 296.5 297.6 297.6 296.1 293.8 294.0 278.0 253.3 232.9 235.5 236.9 236.3 236.5 235.8 237.1 239.6 243.1 246.7 248.1 248.1 247.8 248.2 247.7 244.0 242.9 241.8 241.3 238.7 238.9 239.0 241.9 242.1 244.0 244.9 245.4 245.2 246.9 248.8 249.9 250.2 249.9 249.6 249.3 247.5 243.6 236.4 231.3 222.8 219.2 216.9 214.3 211.8 209.3 205.9 205.9 209.2 215.3 219.6 224.5 225.8 223.7 220.9 218.0 215.4 214.3 213.9 214.9 217.4 220.0 220.9 223.1 224.4 224.6 223.9 222.3 221.1 219.2 218.3 217.9 217.2 218.1 219.0 220.1 221.8 221.4 220.8 220.2 217.0 210.9 202.1 192.3 199.2 206.8 215.8 225.2 233.7 240.4 249.1 254.3 256.9 257.4 255.5 251.4 247.3 243.7 238.2 235.4 235.7 238.9 243.3 250.0 254.9 260.5 262.7 263.3 261.9 257.6 251.3 242.8 232.6 227.2 225.8 226.9 232.2 241.9 251.9 259.3 263.7 266.5 265.9 262.2 252.3 240.0 228.1 218.9 213.3 207.6 203.3 199.8 196.3 192.9 190.3 186.8 183.8 180.8 178.1 175.1 172.4 169.6 166.9 164.4 161.1 158.3 154.2 150.4 149.4 152.0 158.6 166.2 171.0 181.2 193.9 206.1 223.4 236.6 249.2 254.0 252.7 248.1 241.8 233.4 227.3 232.0 237.0 242.0 246.8 251.9 256.6 257.6 252.2 251.8 251.0 250.0 249.1 247.7 246.9 247.3 248.2 248.8 249.2 249.1 247.6 246.3 244.9 240.8 234.6 233.0 225.5 224.7 219.2 217.6 216.1 217.5 219.4 221.2 221.8 220.8 220.2 219.8 219.4 215.5 208.5 196.9 194.4 193.3 192.3 192.3 191.2 189.9 192.2 189.8 190.1 190.2 190.7 192.4 193.5 194.7 195.1 194.9 194.1 193.7 193.7 193.4 193.0 193.6 194.9 194.6 193.8 194.5 195.3 195.5 195.2 195.3 195.6 197.2 198.1 198.7 198.5 197.6 196.2 195.9 192.9 187.8 178.9 171.0 180.5 190.8 201.9 214.8 228.8 231.5 232.5 232.5 230.2 226.7 220.6 213.8 210.8 208.9 209.7 213.2 217.8 224.9 230.7 235.5 238.3 238.5 235.5 228.1 219.6 210.2 206.1 204.1 203.9 206.2 211.1 218.7 224.7 230.0 234.6 235.3 232.6 231.2 224.9 222.1 218.7 214.5 210.7 206.2 203.2 200.4 198.0 195.6 193.1 190.5 187.9 186.2 184.2 181.9 180.7 179.2 175.3 170.8 166.6 164.2 162.3 160.6 159.2 157.9 155.6 153.4 151.6 149.4 150.2 155.6 161.6 167.6 174.3 181.8 189.3 195.7 198.5 199.0 197.5 195.8 191.5 184.1 187.4 191.5 196.5 201.2 206.8 214.2 217.4 218.9 220.0 220.1 220.3 219.3 219.5 219.6 221.0 221.3 220.8 219.3 217.9 213.7 207.9 218.2 227.3 237.3 247.8 260.7 253.5 249.9 249.2 246.9 246.7 246.6 246.5 245.3 246.0 247.4 246.9 245.1 242.7 237.8 228.6 229.0 230.6 230.5 232.2 230.4 224.6 218.8 218.4 218.5 220.1 221.2 221.4 221.7 221.8 221.6 220.7 218.5 216.0 207.9 211.0 216.0 223.6 231.9 241.2 251.0 250.0 249.6 250.8 250.1 249.7 248.7 246.5 245.7 246.9 248.3 248.8 250.4 247.8 242.4 235.6 228.5 238.8 250.1 262.8 275.7 293.4 297.8 296.5 300.2 304.1 307.1 306.7 305.0 300.5 296.0 291.4 286.7 284.8 285.6 289.5 294.8 298.5 302.2 305.2 304.3 301.1 300.0 297.7 292.3 285.6 280.2 276.4 275.7 278.0 283.6 289.2 294.9 299.0 304.5 309.0 310.9 308.0 303.5 292.3 284.1 277.9 270.6 263.4 257.1 252.0 247.0 243.1 239.8 235.8 232.4 228.5 225.3 221.0 218.5 215.7 212.8 210.0 208.4 205.7 202.9 200.4 197.4 194.5 197.8 208.4 224.8 238.5 259.2 279.4 292.6 298.5 300.1 293.7 291.3 291.7 291.9 291.2 288.1 287.1 284.4 286.0 288.5 289.5 291.3 295.1 296.4 293.6 292.4 291.3 291.8 293.6 294.0 292.9 292.1 290.4 287.7 287.2 278.4 270.8 257.5 252.3 250.4 249.7 248.4 248.6 247.5 246.3 245.7 247.0 248.3 249.7 249.3 247.2 245.8 243.8 241.9 238.8 232.9 229.9 229.1 227.7 226.6 226.1 224.5 224.6 223.1 220.1 218.4 216.7 217.4 218.2 219.3 221.3 222.1 222.0 221.4 220.8 218.0 211.4 216.8 223.0 229.6 236.3 245.3 249.2 248.2 247.3 247.5 247.4 247.4 247.5 248.6 248.4 249.6 249.8 251.2 250.4 244.0 231.6 242.7 253.8 265.8 280.7 294.7 311.3 328.1 339.1 334.1 335.4 335.4 334.2 331.8 328.4 326.1 321.0 321.8 319.3 323.5 326.6 329.5 332.0 336.8 339.0 337.7 335.0 334.2 332.3 330.0 326.7 323.5 322.3 324.3 326.9 330.2 330.8 329.5 330.2 328.6 328.0 322.7 314.3 294.8 281.7 284.6 286.6 288.2 290.2 289.7 289.2 289.8 291.2 292.2 293.6 294.9 296.0 295.4 294.5 293.6 294.8 295.6 294.8 293.8 295.1 296.3 304.3 313.4 323.1 329.6 332.8 332.1 332.2 332.4 330.0 328.9 327.4 326.5 329.3 331.1 330.2 327.1 325.0 321.5 312.5 317.8 337.3 352.7 370.7 388.8 378.4 371.5 371.5 370.9 369.9 367.1 365.6 365.4 366.0 366.8 367.3 367.5 365.9 364.4 360.8 355.6 352.0 350.2 345.6 341.1 336.6 330.8 329.1 329.0 328.3 330.8 331.9 332.8 334.9 333.6 331.3 328.0 322.2 306.7 286.9 267.9 267.4 270.2 274.9 283.1 283.8 284.8 287.0 291.5 297.5 300.0 302.3 300.2 298.2 293.4 290.8 289.3 288.7 289.9 292.1 292.3 292.5 292.3 290.9 291.4 292.2 292.9 292.7 292.5 293.3 294.4 295.1 294.7 295.4 295.9 294.0 293.8 291.7 286.4 274.1 278.0 284.9 287.0 294.7 302.8 309.9 304.1 296.3 295.5 296.8 298.1 299.0 296.5 294.7 294.0 293.4 294.4 294.3 295.2 295.3 296.7 298.8 300.8 302.6 308.0 313.6 322.8 330.2 337.3 339.5 339.3 337.7 334.0 330.5 326.7 323.6 322.1 322.0 324.1 325.3 326.8 328.1 329.3 329.9 330.3 330.3 329.3 328.7 327.9 328.6 329.1 329.0 329.2 329.7 329.7 329.0 328.2 328.3 329.0 328.3 328.2 327.9 327.1 325.8 326.9 327.1 330.4 334.2 336.6 338.8 338.6 337.6 334.5 329.4 325.9 321.8 317.8 315.1 314.5 315.0 317.0 321.0 325.4 332.0 337.2 341.5 343.1 342.3 340.3 339.2 335.8 332.2 328.7 324.2 319.1 317.8 317.8 318.6 322.4 326.3 331.5 336.3 338.8 339.1 337.6 336.8 334.9 333.0 331.3 328.7 328.5 328.5 327.5 326.6 327.4 327.2 327.4 330.3 333.9 337.3 339.4 341.1 341.4 340.9 337.7 332.8 326.0 318.6 314.0 312.6 315.3 320.3 326.6 333.7 341.0 346.1 349.9 351.7 349.4 344.4 338.7 331.3 326.0 321.4 318.9 319.4 321.5 326.0 334.5 339.9 343.5 345.9 346.8 345.0 342.4 338.1 333.2 327.5 323.1 320.3 318.8 321.4 325.2 330.8 335.2 337.9 341.9 344.1 343.5 341.8 339.7 336.8 333.7 330.1 327.0 326.6 326.0 327.8 330.8 334.3 337.2 339.0 339.2 338.5 337.1 335.2 333.9 332.4 330.0 327.0 325.0 323.4 321.2 318.0 315.8 314.3 311.6 308.7 305.0 300.0 294.2 287.7 284.3 282.5 280.0 277.5 276.6 274.9 273.2 270.2 266.9 262.5 256.0 247.4 247.9 247.6 248.0 248.4 249.1 249.1 249.4 249.1 250.1 250.8 250.8 251.0 251.8 251.3 252.3 252.2 251.7 250.3", + "f0_timestep": "0.011609977324263039", + "energy": "0.0004 0.0002 0.0006 0.0007 0.0004 0.0004 0.0005 0.0005 0.0008 0.0014 0.0011 0.0008 0.0014 0.0017 0.0021 0.002 0.0023 0.0022 0.0025 0.0031 0.003 0.0032 0.0034 0.0034 0.0031 0.0037 0.0031 0.0031 0.0026 0.0028 0.0025 0.0026 0.0025 0.0014 0.0018 0.0019 0.0011 0.0008 0.001 0.0014 0.0021 0.0022 0.0042 0.006 0.008 0.0103 0.011 0.0121 0.0126 0.0118 0.0139 0.0281 0.039 0.0517 0.063 0.0713 0.0769 0.077 0.0734 0.0683 0.0667 0.0672 0.0707 0.0744 0.0751 0.0712 0.0622 0.0486 0.0333 0.0221 0.0186 0.0259 0.0453 0.0639 0.0838 0.0927 0.0944 0.0909 0.0799 0.0738 0.0713 0.069 0.0683 0.0702 0.0711 0.0714 0.0718 0.0711 0.0711 0.0728 0.0735 0.0741 0.0729 0.0715 0.0722 0.0702 0.0699 0.0664 0.0618 0.0602 0.0559 0.0579 0.0596 0.0613 0.0666 0.067 0.0671 0.0607 0.0487 0.0355 0.0204 0.0147 0.0254 0.0481 0.0665 0.0801 0.0906 0.092 0.0903 0.0875 0.0849 0.0814 0.0786 0.0764 0.0699 0.061 0.0512 0.0365 0.0211 0.0128 0.0088 0.0124 0.0431 0.0564 0.0726 0.0848 0.0922 0.0972 0.0995 0.1003 0.0977 0.0953 0.0915 0.0853 0.0776 0.0657 0.0516 0.0373 0.0242 0.0143 0.0125 0.0124 0.0129 0.0286 0.0503 0.0738 0.0935 0.1078 0.114 0.1131 0.1077 0.1021 0.0943 0.0878 0.0813 0.0774 0.0763 0.0762 0.0779 0.0785 0.082 0.082 0.0805 0.0784 0.0752 0.0729 0.0712 0.0692 0.069 0.0652 0.0616 0.0573 0.0528 0.0526 0.0559 0.0579 0.0589 0.0599 0.0592 0.059 0.0562 0.0492 0.0389 0.0275 0.0143 0.0048 0.0028 0.0033 0.0045 0.0049 0.005 0.0051 0.0054 0.0045 0.0035 0.0029 0.0029 0.0021 0.0021 0.0021 0.0011 0.0007 0.004 0.0102 0.0427 0.0624 0.0787 0.0906 0.0911 0.0894 0.0864 0.0825 0.0807 0.0822 0.082 0.0854 0.0832 0.0768 0.068 0.0522 0.0386 0.0297 0.0292 0.032 0.0324 0.0355 0.055 0.0723 0.0862 0.0958 0.0957 0.0917 0.089 0.0847 0.0807 0.0802 0.0792 0.0766 0.07 0.0593 0.0445 0.0288 0.0157 0.01 0.0134 0.0386 0.0591 0.074 0.0855 0.0876 0.0851 0.0822 0.0789 0.0774 0.0768 0.0782 0.0795 0.0813 0.0827 0.0817 0.0817 0.0804 0.0773 0.0728 0.0663 0.0619 0.0614 0.0679 0.0764 0.0835 0.0896 0.0923 0.0929 0.0925 0.0915 0.0894 0.0869 0.0852 0.0847 0.0873 0.0875 0.0801 0.0672 0.048 0.0262 0.0153 0.0308 0.0512 0.0691 0.0838 0.0934 0.0957 0.0934 0.092 0.0881 0.0843 0.0834 0.0828 0.0843 0.0866 0.089 0.0914 0.0941 0.0954 0.0946 0.0923 0.0887 0.0891 0.0983 0.1055 0.1154 0.1211 0.1198 0.1205 0.1173 0.1156 0.1139 0.1094 0.1051 0.1004 0.0948 0.0915 0.0905 0.092 0.0947 0.0992 0.1017 0.1035 0.1043 0.1023 0.1005 0.0987 0.0935 0.0886 0.0812 0.0731 0.0677 0.0648 0.0644 0.0646 0.0644 0.064 0.0623 0.0611 0.0599 0.056 0.0492 0.0393 0.0258 0.0125 0.0044 0.0038 0.0041 0.0058 0.0057 0.0055 0.0054 0.0046 0.0041 0.0022 0.0016 0.0004 0.0007 0.0 0.0 0.0002 0.0086 0.0257 0.0509 0.0682 0.0843 0.0939 0.1017 0.1016 0.1001 0.1004 0.0953 0.097 0.0993 0.1023 0.1017 0.0959 0.0833 0.0634 0.0443 0.0208 0.0158 0.0185 0.026 0.0566 0.0812 0.099 0.1126 0.1131 0.1105 0.1082 0.1068 0.1064 0.1056 0.1052 0.105 0.1062 0.1065 0.1055 0.0961 0.0801 0.0594 0.0356 0.0201 0.0398 0.0621 0.0774 0.0896 0.0961 0.0943 0.0928 0.0915 0.0883 0.0879 0.0866 0.0849 0.0788 0.0691 0.0569 0.0434 0.0362 0.0374 0.0409 0.0434 0.0447 0.0445 0.0603 0.0766 0.0909 0.1002 0.1033 0.1025 0.1005 0.1015 0.0992 0.0986 0.0995 0.0988 0.1 0.1006 0.0991 0.0971 0.0945 0.0934 0.0965 0.1015 0.109 0.1135 0.116 0.1193 0.117 0.1166 0.1137 0.1102 0.1097 0.1093 0.1098 0.1106 0.1044 0.0894 0.0705 0.0437 0.0188 0.01 0.0074 0.0044 0.0324 0.0651 0.0911 0.1111 0.1241 0.1267 0.1254 0.1237 0.1212 0.1182 0.1171 0.1154 0.1153 0.1142 0.1122 0.1107 0.1102 0.1091 0.1073 0.1048 0.099 0.0952 0.0933 0.0925 0.0932 0.0942 0.0955 0.0959 0.0961 0.0956 0.0955 0.0956 0.0954 0.0965 0.0962 0.0972 0.0979 0.0983 0.0989 0.1 0.1007 0.0999 0.0987 0.0981 0.0992 0.1006 0.1019 0.1039 0.1048 0.1049 0.1071 0.1094 0.1105 0.1129 0.1113 0.1089 0.1064 0.1034 0.102 0.1005 0.1 0.1009 0.1012 0.1014 0.1012 0.1 0.1 0.0995 0.0999 0.1011 0.1016 0.1029 0.1027 0.1019 0.102 0.0998 0.0988 0.0993 0.0982 0.099 0.099 0.0993 0.0991 0.0992 0.0991 0.0994 0.0983 0.0966 0.0958 0.0945 0.0946 0.0943 0.0947 0.096 0.0965 0.0978 0.1003 0.1004 0.1011 0.1017 0.1002 0.1013 0.1017 0.1025 0.103 0.1021 0.1026 0.1018 0.1008 0.1001 0.0989 0.0974 0.0932 0.0863 0.071 0.0553 0.039 0.0213 0.0121 0.0122 0.013 0.0147 0.0154 0.039 0.061 0.078 0.0919 0.097 0.0984 0.0971 0.0954 0.0942 0.0929 0.0911 0.0922 0.092 0.0935 0.0943 0.0933 0.0941 0.0939 0.0922 0.0946 0.0948 0.0949 0.0959 0.0931 0.0922 0.0917 0.0897 0.09 0.089 0.088 0.087 0.0824 0.0717 0.0586 0.0424 0.0305 0.029 0.0314 0.0354 0.0373 0.0362 0.0398 0.0607 0.0809 0.1023 0.1121 0.1126 0.1093 0.1038 0.1016 0.0981 0.0948 0.0912 0.086 0.084 0.0806 0.0784 0.0771 0.0765 0.0759 0.0763 0.0758 0.0749 0.0734 0.0723 0.0715 0.0683 0.0645 0.0625 0.0591 0.0589 0.0588 0.0582 0.0585 0.0567 0.0561 0.055 0.0516 0.0476 0.0412 0.0327 0.0235 0.0149 0.0081 0.0059 0.0066 0.0072 0.0078 0.008 0.0089 0.0087 0.0081 0.0072 0.0071 0.007 0.0062 0.006 0.005 0.0043 0.014 0.0253 0.0438 0.0657 0.0831 0.0986 0.1086 0.104 0.0952 0.0809 0.0673 0.0631 0.0674 0.0725 0.0769 0.0781 0.0733 0.0681 0.0644 0.0667 0.0731 0.0806 0.0833 0.0744 0.0656 0.0592 0.0653 0.0785 0.0914 0.0979 0.0996 0.099 0.0975 0.0966 0.0955 0.0946 0.0933 0.0889 0.0775 0.0634 0.0443 0.0243 0.0166 0.0176 0.0399 0.0596 0.0721 0.0781 0.0762 0.0704 0.0668 0.0697 0.071 0.0718 0.0732 0.0742 0.0749 0.0717 0.0637 0.0514 0.037 0.0279 0.0279 0.0302 0.0287 0.041 0.0574 0.0686 0.0816 0.0893 0.0901 0.0934 0.091 0.0899 0.0887 0.0879 0.0895 0.0882 0.0884 0.0869 0.0859 0.0867 0.0854 0.0864 0.0868 0.0862 0.0859 0.0816 0.0771 0.0737 0.0705 0.0723 0.0725 0.0758 0.0765 0.0751 0.0757 0.0738 0.0688 0.0592 0.0455 0.0297 0.0133 0.0066 0.0055 0.0048 0.0304 0.0552 0.0762 0.0945 0.1034 0.1048 0.1036 0.0996 0.097 0.0965 0.0953 0.0944 0.0955 0.0957 0.0973 0.0976 0.0993 0.1004 0.0997 0.1001 0.0973 0.0946 0.0925 0.09 0.0881 0.0886 0.0878 0.0858 0.0853 0.0836 0.0834 0.0836 0.0844 0.0824 0.0818 0.0785 0.0748 0.0738 0.0714 0.0708 0.0719 0.0729 0.0747 0.0765 0.0761 0.0756 0.0743 0.0725 0.071 0.0711 0.0697 0.0684 0.0677 0.0617 0.0533 0.0419 0.0276 0.0117 0.0106 0.0129 0.0154 0.0202 0.0448 0.0645 0.0793 0.0926 0.096 0.0974 0.0961 0.0937 0.0933 0.0907 0.0906 0.0902 0.0861 0.0814 0.0698 0.0556 0.039 0.0238 0.0171 0.0175 0.0229 0.0442 0.0598 0.0739 0.0845 0.0868 0.0871 0.0874 0.0862 0.0836 0.0835 0.08 0.0749 0.069 0.0574 0.0451 0.0331 0.0236 0.0215 0.0226 0.0227 0.036 0.0534 0.0669 0.0787 0.0832 0.0828 0.0824 0.0799 0.0808 0.0803 0.0791 0.0805 0.0789 0.0788 0.0781 0.0787 0.078 0.0778 0.0793 0.0806 0.0852 0.0889 0.0885 0.0877 0.0838 0.0807 0.0813 0.0807 0.0809 0.0807 0.08 0.0795 0.0764 0.0686 0.0568 0.0419 0.026 0.0213 0.0234 0.0261 0.0248 0.0281 0.0501 0.0665 0.0794 0.09 0.0884 0.087 0.0859 0.0839 0.0817 0.0825 0.0811 0.0753 0.0647 0.0499 0.0307 0.0117 0.0074 0.003 0.0179 0.0408 0.0565 0.0687 0.0762 0.0772 0.0764 0.0757 0.075 0.0715 0.0704 0.0688 0.0654 0.0628 0.0615 0.0578 0.0565 0.0566 0.0552 0.0558 0.0553 0.0542 0.054 0.0521 0.0492 0.0463 0.0432 0.0387 0.035 0.0312 0.0294 0.0283 0.0279 0.0269 0.0263 0.0255 0.0256 0.0241 0.0219 0.0191 0.0154 0.0115 0.0076 0.0039 0.0028 0.0028 0.003 0.0031 0.0039 0.0038 0.0036 0.0031 0.0018 0.0019 0.0012 0.0005 0.0002 0.0001 0.0001 0.0002 0.002 0.01 0.0287 0.0424 0.055 0.0631 0.0661 0.0659 0.064 0.0616 0.0584 0.0558 0.0539 0.0534 0.0533 0.0541 0.0553 0.0558 0.0565 0.0568 0.0575 0.0587 0.0572 0.0565 0.0579 0.0592 0.0627 0.0656 0.0683 0.0693 0.0708 0.072 0.0706 0.0712 0.0679 0.0647 0.0584 0.0471 0.0365 0.0232 0.0173 0.0169 0.0189 0.0184 0.0269 0.0381 0.0477 0.0566 0.0614 0.0635 0.0631 0.0634 0.0633 0.0623 0.0643 0.0626 0.0613 0.059 0.0532 0.0468 0.0383 0.0275 0.0167 0.0123 0.026 0.0394 0.05 0.0593 0.0627 0.0631 0.0634 0.0614 0.0613 0.0589 0.057 0.0559 0.0545 0.0523 0.0474 0.0405 0.0297 0.0197 0.0125 0.0117 0.0124 0.0252 0.0428 0.0564 0.0672 0.0736 0.0736 0.0742 0.0735 0.0729 0.0724 0.0721 0.0726 0.0712 0.0648 0.0558 0.0429 0.0279 0.0164 0.0166 0.0166 0.0248 0.047 0.0619 0.0748 0.0847 0.0868 0.0858 0.083 0.0815 0.0769 0.0745 0.0708 0.0662 0.063 0.0604 0.0626 0.064 0.0695 0.0733 0.0746 0.0744 0.0729 0.0712 0.0715 0.0721 0.0715 0.0707 0.0711 0.0722 0.0724 0.0737 0.0737 0.0742 0.0751 0.0755 0.074 0.0748 0.074 0.0736 0.0739 0.0745 0.0761 0.0778 0.0789 0.0798 0.0816 0.0826 0.0838 0.0836 0.0825 0.0807 0.0784 0.0769 0.0751 0.0734 0.0721 0.0711 0.0708 0.0719 0.0717 0.0702 0.0713 0.0761 0.082 0.087 0.0905 0.0899 0.0883 0.0876 0.085 0.0816 0.0792 0.0743 0.0711 0.0698 0.0685 0.0682 0.065 0.059 0.051 0.0409 0.0378 0.0556 0.0697 0.084 0.0931 0.0944 0.0921 0.0898 0.0885 0.0867 0.0863 0.0869 0.0863 0.0863 0.088 0.0877 0.0889 0.0906 0.0901 0.0905 0.0911 0.0906 0.0898 0.0896 0.089 0.0884 0.0887 0.0894 0.0891 0.0877 0.0863 0.0847 0.0827 0.0787 0.0783 0.0746 0.0705 0.0668 0.0632 0.0591 0.0595 0.0667 0.0756 0.0864 0.0963 0.1033 0.1087 0.1094 0.109 0.107 0.1032 0.0997 0.095 0.0886 0.0828 0.0761 0.0715 0.0687 0.0703 0.0737 0.0783 0.0829 0.087 0.0888 0.0899 0.0905 0.09 0.0888 0.0856 0.0829 0.0806 0.0791 0.0765 0.0679 0.0567 0.0413 0.025 0.019 0.0195 0.0198 0.0204 0.0205 0.0182 0.0264 0.044 0.0588 0.0745 0.0874 0.0924 0.0951 0.0949 0.0921 0.0896 0.0864 0.0831 0.0792 0.0766 0.0742 0.0725 0.0724 0.0717 0.0735 0.0747 0.0739 0.0743 0.0721 0.0715 0.0701 0.068 0.0652 0.0612 0.0582 0.0558 0.0539 0.0519 0.0513 0.0508 0.05 0.0505 0.0496 0.0499 0.0489 0.0445 0.0367 0.0271 0.0172 0.0067 0.0048 0.0048 0.0048 0.0045 0.005 0.0043 0.0035 0.0031 0.0024 0.0021 0.0022 0.0036 0.0052 0.0079 0.0113 0.0138 0.0159 0.0155 0.0272 0.0459 0.0615 0.0741 0.0817 0.0815 0.0784 0.0749 0.0695 0.0678 0.0685 0.0703 0.0737 0.0745 0.0739 0.0702 0.0603 0.0474 0.0335 0.018 0.0229 0.0459 0.0626 0.0778 0.0883 0.09 0.09 0.0862 0.0831 0.0798 0.0779 0.0775 0.0767 0.0775 0.0779 0.0741 0.0683 0.0579 0.0432 0.0477 0.0388 0.0614 0.0734 0.0828 0.088 0.0865 0.0865 0.0843 0.0834 0.0821 0.0815 0.0806 0.0807 0.0807 0.0803 0.0802 0.0809 0.0776 0.0734 0.0697 0.0643 0.0657 0.0684 0.0714 0.0761 0.0751 0.0755 0.0762 0.0741 0.0766 0.0748 0.073 0.0723 0.0697 0.0686 0.0635 0.0545 0.0424 0.0274 0.0147 0.0098 0.0108 0.0314 0.0533 0.0704 0.0836 0.09 0.092 0.0918 0.092 0.0917 0.0914 0.0897 0.0888 0.0885 0.086 0.0849 0.0825 0.0809 0.0825 0.0819 0.0837 0.0879 0.091 0.0994 0.1065 0.1102 0.1143 0.1128 0.1122 0.11 0.1055 0.1012 0.0944 0.0889 0.0834 0.0796 0.0792 0.0798 0.0833 0.0872 0.0894 0.0901 0.0883 0.0841 0.0814 0.0773 0.0742 0.0702 0.0638 0.059 0.0549 0.0533 0.0526 0.0555 0.0573 0.0562 0.0555 0.0535 0.0501 0.0459 0.0378 0.0271 0.0155 0.0053 0.0027 0.0032 0.0044 0.0044 0.0051 0.0049 0.0049 0.0045 0.0041 0.0041 0.003 0.0027 0.0026 0.002 0.0015 0.0007 0.0012 0.0038 0.0086 0.0289 0.0536 0.069 0.0822 0.0876 0.0867 0.084 0.0812 0.0781 0.0804 0.0825 0.083 0.0804 0.0753 0.0637 0.051 0.0358 0.0247 0.0219 0.023 0.0297 0.0494 0.0673 0.0803 0.0904 0.0913 0.0905 0.0896 0.0864 0.0846 0.0823 0.081 0.0819 0.0796 0.0713 0.0589 0.0415 0.0223 0.0109 0.0065 0.0128 0.0394 0.0566 0.0716 0.0831 0.0836 0.0835 0.0797 0.0759 0.0744 0.0737 0.0738 0.074 0.074 0.0732 0.0723 0.0697 0.0671 0.064 0.0623 0.065 0.0693 0.0742 0.0803 0.0836 0.0874 0.089 0.0883 0.0896 0.0871 0.0852 0.0843 0.0822 0.0807 0.0762 0.0661 0.0525 0.0356 0.0183 0.0123 0.0112 0.0119 0.0267 0.0493 0.0691 0.0846 0.0949 0.0984 0.0971 0.0952 0.0935 0.0881 0.0856 0.0833 0.081 0.0799 0.0734 0.0616 0.0479 0.0305 0.0185 0.0178 0.0203 0.0389 0.0601 0.0813 0.098 0.1072 0.1126 0.1135 0.1144 0.1169 0.1154 0.1138 0.1076 0.1011 0.0981 0.0954 0.0958 0.0964 0.0986 0.0992 0.1015 0.101 0.0999 0.0998 0.0978 0.0948 0.0902 0.0842 0.0763 0.0732 0.069 0.0681 0.0684 0.0677 0.0669 0.065 0.0621 0.0588 0.0543 0.046 0.0361 0.0232 0.0097 0.0041 0.0037 0.005 0.0057 0.0053 0.0055 0.0057 0.006 0.0049 0.0045 0.0039 0.0035 0.0026 0.0021 0.0014 0.0008 0.0034 0.0211 0.0414 0.0607 0.0773 0.0875 0.093 0.0929 0.0904 0.0898 0.0873 0.0859 0.0879 0.0898 0.0916 0.0886 0.0775 0.0621 0.0427 0.0222 0.0168 0.0215 0.0221 0.04 0.0649 0.0816 0.0945 0.0986 0.0955 0.0918 0.0889 0.0896 0.0884 0.0883 0.0895 0.0898 0.0906 0.0907 0.0911 0.0905 0.0884 0.0866 0.0843 0.0849 0.0889 0.0921 0.0953 0.0988 0.099 0.098 0.0982 0.096 0.0962 0.095 0.0941 0.0915 0.0844 0.0734 0.0596 0.0437 0.0329 0.031 0.0321 0.0324 0.0312 0.0473 0.0646 0.0793 0.0919 0.0967 0.0969 0.0969 0.0952 0.0951 0.0941 0.0947 0.0964 0.0957 0.0961 0.0954 0.0938 0.0934 0.0909 0.0909 0.0942 0.0974 0.105 0.108 0.1106 0.113 0.1128 0.111 0.1125 0.1116 0.1095 0.1084 0.1042 0.1005 0.0929 0.0789 0.0621 0.0421 0.0268 0.0253 0.025 0.0235 0.0428 0.0668 0.0873 0.1053 0.1135 0.1155 0.115 0.1168 0.1168 0.1176 0.1186 0.1196 0.1198 0.1199 0.1182 0.1154 0.1111 0.1066 0.1037 0.1006 0.0977 0.0959 0.0912 0.0895 0.0875 0.0851 0.0839 0.0824 0.0826 0.0811 0.0808 0.0812 0.0826 0.0847 0.0818 0.0723 0.0569 0.0377 0.0197 0.0172 0.0224 0.0475 0.0639 0.078 0.0911 0.0952 0.097 0.0953 0.0907 0.0833 0.079 0.0758 0.0746 0.0758 0.0768 0.0759 0.0703 0.0614 0.0491 0.0336 0.0264 0.0459 0.0611 0.0747 0.0843 0.0867 0.0856 0.0835 0.0821 0.0808 0.0802 0.0792 0.078 0.0765 0.0703 0.0595 0.0464 0.0283 0.0146 0.0134 0.0145 0.0249 0.0448 0.0616 0.0782 0.0895 0.095 0.098 0.098 0.098 0.096 0.0943 0.092 0.0884 0.0856 0.0828 0.0788 0.0755 0.072 0.0705 0.0686 0.0674 0.0674 0.0667 0.067 0.0682 0.0683 0.0677 0.0671 0.0658 0.0651 0.0644 0.0639 0.0615 0.055 0.0447 0.0327 0.0182 0.0089 0.0082 0.0102 0.0139 0.0266 0.0407 0.0558 0.0698 0.078 0.0848 0.0886 0.09 0.09 0.0887 0.086 0.084 0.0825 0.082 0.0815 0.0831 0.0853 0.0861 0.0904 0.092 0.0898 0.0898 0.0849 0.0822 0.083 0.0818 0.0818 0.082 0.0797 0.0797 0.0787 0.0769 0.0748 0.0716 0.0643 0.0529 0.0401 0.0249 0.0145 0.0102 0.0096 0.0221 0.0436 0.065 0.0843 0.1001 0.1077 0.1123 0.1119 0.1081 0.1023 0.0939 0.0858 0.08 0.0762 0.0749 0.0745 0.0763 0.0794 0.0817 0.082 0.0809 0.0776 0.0738 0.0721 0.0684 0.0669 0.0659 0.0623 0.0594 0.0565 0.0537 0.0549 0.0552 0.0548 0.0558 0.0553 0.0548 0.0529 0.0471 0.0392 0.0289 0.017 0.0065 0.0046 0.0053 0.0053 0.0056 0.0048 0.0051 0.0049 0.0042 0.004 0.0037 0.0032 0.0025 0.0025 0.0026 0.0022 0.0031 0.012 0.0241 0.041 0.0601 0.0758 0.0861 0.0907 0.0868 0.0791 0.071 0.0645 0.0598 0.0628 0.0665 0.0705 0.0716 0.0686 0.061 0.0505 0.0413 0.0349 0.0349 0.0364 0.0353 0.0448 0.058 0.0723 0.0851 0.092 0.095 0.0932 0.0915 0.0898 0.0877 0.0893 0.0898 0.0896 0.0888 0.0886 0.0873 0.087 0.0882 0.0857 0.0871 0.0887 0.0911 0.0927 0.0924 0.0874 0.0853 0.0827 0.08 0.0803 0.0772 0.0749 0.0739 0.0741 0.0699 0.0639 0.0541 0.0398 0.0268 0.0228 0.0199 0.0256 0.0468 0.0626 0.0744 0.0848 0.0864 0.0876 0.0869 0.084 0.0828 0.0823 0.0819 0.0825 0.0823 0.0813 0.0807 0.0815 0.0799 0.0785 0.078 0.0769 0.0757 0.0761 0.071 0.0695 0.0665 0.0655 0.0669 0.0662 0.0687 0.0697 0.0705 0.0724 0.0716 0.0678 0.0591 0.0472 0.0314 0.0161 0.0058 0.005 0.029 0.0546 0.0758 0.0936 0.1029 0.104 0.1016 0.0959 0.0898 0.0847 0.0793 0.0762 0.0715 0.0712 0.0716 0.071 0.0745 0.0762 0.0773 0.0773 0.0745 0.0708 0.0647 0.0599 0.0553 0.0512 0.0503 0.0491 0.0507 0.0503 0.0513 0.0511 0.0505 0.0501 0.048 0.0455 0.0439 0.0406 0.0366 0.0298 0.0217 0.013 0.0063 0.0036 0.0036 0.004 0.0044 0.0042 0.0041 0.0036 0.0025 0.0021 0.0015 0.0018 0.002 0.0033 0.0054 0.0077 0.0101 0.0119 0.0136 0.0122 0.0221 0.0398 0.0525 0.0642 0.0714 0.0723 0.0727 0.0713 0.0695 0.0695 0.0713 0.0733 0.0729 0.0679 0.058 0.0447 0.0288 0.0161 0.0144 0.0146 0.016 0.0302 0.0538 0.0689 0.0824 0.0879 0.0876 0.0862 0.0856 0.084 0.0827 0.082 0.0809 0.0797 0.0763 0.0678 0.0555 0.042 0.0289 0.0209 0.0199 0.0288 0.0525 0.071 0.0866 0.0968 0.0978 0.096 0.095 0.0902 0.0849 0.0821 0.0778 0.0764 0.0757 0.0719 0.0616 0.0506 0.0348 0.0214 0.0186 0.0274 0.0506 0.0674 0.083 0.0936 0.094 0.0936 0.0913 0.0896 0.0881 0.0868 0.0864 0.0849 0.0766 0.0639 0.0462 0.0247 0.0079 0.004 0.0059 0.0295 0.0556 0.0748 0.0915 0.1013 0.1027 0.1017 0.0972 0.095 0.0927 0.0908 0.0899 0.0893 0.0895 0.0839 0.0746 0.0604 0.0419 0.0221 0.0086 0.0048 0.0065 0.0415 0.0712 0.0918 0.1101 0.1179 0.1167 0.1153 0.1141 0.1103 0.1067 0.102 0.0937 0.0909 0.0925 0.0962 0.1017 0.107 0.1094 0.1112 0.1111 0.1078 0.1059 0.1048 0.103 0.1002 0.0985 0.0931 0.0883 0.0851 0.082 0.0796 0.0804 0.079 0.076 0.0729 0.0693 0.0671 0.0647 0.0581 0.048 0.0346 0.0194 0.008 0.0047 0.0052 0.0044 0.0036 0.0038 0.0035 0.0036 0.0033 0.0028 0.0029 0.0021 0.0015 0.0016 0.0029 0.0037 0.0082 0.0103 0.011 0.0262 0.0425 0.0516 0.0636 0.0701 0.0748 0.0788 0.0765 0.0773 0.0733 0.0696 0.0676 0.0656 0.0651 0.0646 0.0619 0.0546 0.0444 0.0299 0.012 0.019 0.0501 0.0762 0.0936 0.1053 0.1059 0.103 0.0983 0.0959 0.0921 0.0889 0.0878 0.0864 0.0866 0.0873 0.0867 0.0844 0.0829 0.0808 0.0788 0.0817 0.0842 0.0897 0.0926 0.0918 0.0917 0.0881 0.0853 0.0846 0.0831 0.081 0.0806 0.0804 0.0789 0.0748 0.0641 0.0496 0.0352 0.0229 0.0212 0.0222 0.0198 0.0261 0.0456 0.0594 0.072 0.0813 0.0817 0.0805 0.078 0.076 0.073 0.0731 0.0723 0.0711 0.0685 0.0603 0.0482 0.0354 0.0239 0.0168 0.0154 0.026 0.047 0.0644 0.0783 0.0873 0.0886 0.0891 0.0904 0.0906 0.0895 0.0885 0.0878 0.0846 0.0809 0.0694 0.0539 0.0378 0.024 0.0212 0.0249 0.0252 0.0253 0.0499 0.0691 0.0856 0.1018 0.1063 0.1094 0.1088 0.1083 0.106 0.1031 0.1036 0.103 0.1048 0.1035 0.102 0.0999 0.0976 0.0967 0.0958 0.0943 0.0911 0.0884 0.0847 0.0809 0.0807 0.0795 0.0785 0.0775 0.0762 0.0761 0.0775 0.0784 0.081 0.0827 0.0789 0.0705 0.0564 0.0384 0.0229 0.0165 0.0293 0.0485 0.066 0.0834 0.0967 0.1022 0.1048 0.1022 0.0966 0.0913 0.0858 0.0844 0.0851 0.0867 0.0879 0.0862 0.085 0.0838 0.0856 0.0874 0.0909 0.0952 0.0991 0.1049 0.1087 0.1116 0.1143 0.1148 0.1138 0.1096 0.1047 0.0981 0.094 0.0915 0.0859 0.0747 0.0601 0.0422 0.0257 0.0193 0.0193 0.0363 0.0586 0.0741 0.088 0.0963 0.0994 0.1019 0.1019 0.1004 0.0971 0.0937 0.0929 0.0917 0.0918 0.0917 0.0898 0.0881 0.0861 0.0839 0.0849 0.0872 0.0892 0.0908 0.0896 0.0891 0.0897 0.0895 0.0895 0.0902 0.0911 0.0934 0.0946 0.096 0.0928 0.0839 0.0707 0.0519 0.0305 0.0154 0.0066 0.008 0.0372 0.0734 0.099 0.1196 0.1313 0.1324 0.1301 0.1266 0.1226 0.1177 0.1137 0.1091 0.1075 0.1064 0.1067 0.1083 0.1058 0.1053 0.1048 0.1012 0.1026 0.1026 0.1037 0.1057 0.1059 0.1067 0.1052 0.105 0.1028 0.1011 0.0998 0.0964 0.0905 0.0784 0.0614 0.0403 0.0188 0.0051 0.0014 0.0009 0.016 0.0465 0.0765 0.0995 0.1202 0.1308 0.1331 0.1339 0.1321 0.1309 0.1308 0.1299 0.1296 0.1257 0.1204 0.1126 0.1039 0.0955 0.0878 0.0833 0.0814 0.0816 0.0824 0.0856 0.0893 0.0924 0.0976 0.0999 0.102 0.1015 0.1003 0.0984 0.0957 0.096 0.0967 0.0983 0.1014 0.1046 0.1068 0.1091 0.1096 0.1106 0.1118 0.1142 0.1166 0.1206 0.1224 0.1246 0.1259 0.1255 0.1245 0.1234 0.1243 0.1259 0.1279 0.1303 0.1327 0.1347 0.1373 0.1387 0.1397 0.1411 0.1429 0.146 0.1495 0.1515 0.1525 0.1525 0.1524 0.1487 0.1461 0.1435 0.1398 0.1394 0.1399 0.1404 0.1409 0.1418 0.1402 0.1393 0.1378 0.1374 0.138 0.14 0.1427 0.1474 0.1513 0.1521 0.1512 0.1482 0.1436 0.1405 0.1377 0.137 0.1375 0.1364 0.1357 0.1335 0.133 0.1347 0.1388 0.1434 0.1516 0.1562 0.1604 0.162 0.1593 0.1587 0.1581 0.1564 0.1579 0.1576 0.156 0.1564 0.1561 0.1583 0.1592 0.1605 0.1622 0.1631 0.1619 0.1594 0.1547 0.1498 0.1455 0.1423 0.1393 0.1383 0.14 0.1452 0.156 0.1658 0.1736 0.1802 0.1802 0.1748 0.1668 0.1556 0.1467 0.1409 0.1372 0.135 0.1319 0.132 0.1359 0.1422 0.1474 0.1512 0.1526 0.1545 0.156 0.1549 0.1495 0.1439 0.138 0.1318 0.1281 0.1238 0.122 0.1233 0.1247 0.1264 0.1279 0.1287 0.1287 0.1288 0.1293 0.1288 0.1276 0.1255 0.1237 0.122 0.1202 0.1183 0.1157 0.117 0.1161 0.117 0.1165 0.1149 0.1156 0.1153 0.1144 0.1125 0.1091 0.1046 0.1027 0.101 0.0994 0.0969 0.0951 0.0914 0.0869 0.0841 0.0772 0.0727 0.0686 0.0642 0.0608 0.0573 0.0529 0.049 0.0461 0.0435 0.0422 0.037 0.0335 0.0281 0.0208 0.0153 0.0098 0.0054 0.003 0.0022 0.0013 0.0014 0.0005 0.0007 0.0006 0.0003 0.0002 0.0006 0.0 0.0 0.0 0.0004 0.0 0.0005", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0002 0.0002 0.0 0.0003 0.0003 0.0004 0.0004 0.0004 0.0007 0.0013 0.0014 0.0013 0.0013 0.0014 0.0018 0.0019 0.002 0.0021 0.0022 0.0024 0.0027 0.0028 0.003 0.0031 0.0033 0.0035 0.0035 0.0033 0.0028 0.0024 0.002 0.002 0.002 0.002 0.0016 0.0013 0.0012 0.0009 0.0007 0.001 0.0013 0.002 0.0035 0.0057 0.008 0.0098 0.0113 0.0131 0.0137 0.0128 0.0131 0.0162 0.0227 0.0311 0.0537 0.0719 0.0839 0.0817 0.0703 0.0471 0.0107 0.0013 0.0006 0.0004 0.001 0.0008 0.001 0.004 0.0087 0.0135 0.0163 0.0191 0.0179 0.0153 0.0117 0.0039 0.0021 0.0021 0.0021 0.0019 0.0016 0.0016 0.0015 0.0013 0.0008 0.0007 0.0004 0.0005 0.0003 0.0004 0.0006 0.0009 0.0013 0.0017 0.0018 0.0018 0.0021 0.002 0.0018 0.0016 0.0014 0.0011 0.0011 0.0011 0.001 0.001 0.0011 0.0017 0.0042 0.0069 0.0118 0.0157 0.0168 0.0164 0.0132 0.0081 0.0029 0.0018 0.002 0.0022 0.0023 0.0022 0.0019 0.0017 0.0015 0.0016 0.0013 0.0013 0.0016 0.0025 0.0036 0.004 0.0043 0.0031 0.002 0.0011 0.001 0.0011 0.0013 0.0012 0.0013 0.0013 0.0013 0.0014 0.0016 0.0014 0.0015 0.0032 0.0056 0.0082 0.0107 0.0127 0.0131 0.0118 0.0101 0.0063 0.0024 0.0021 0.0022 0.0024 0.0023 0.0026 0.0026 0.0025 0.0026 0.0022 0.002 0.0019 0.0022 0.0021 0.0021 0.0022 0.002 0.0019 0.002 0.0023 0.0022 0.0024 0.0021 0.0022 0.0022 0.002 0.0021 0.0017 0.0016 0.0017 0.0016 0.0017 0.0017 0.0017 0.0013 0.0013 0.0011 0.0014 0.002 0.002 0.0025 0.0031 0.004 0.0047 0.005 0.0057 0.0052 0.0047 0.004 0.0029 0.0028 0.002 0.0016 0.0009 0.0005 0.0006 0.0023 0.0066 0.0127 0.0136 0.0138 0.0117 0.0046 0.0038 0.004 0.0038 0.0033 0.0024 0.0018 0.0015 0.0014 0.0016 0.0046 0.0082 0.0164 0.0238 0.031 0.0357 0.0349 0.0309 0.0228 0.011 0.0038 0.0021 0.0014 0.0012 0.001 0.001 0.001 0.0009 0.0006 0.0008 0.0007 0.0006 0.0009 0.0019 0.004 0.0054 0.0057 0.0053 0.0041 0.0019 0.0012 0.0011 0.0012 0.0011 0.0011 0.0011 0.001 0.001 0.001 0.0009 0.0009 0.001 0.0009 0.0009 0.0008 0.0006 0.0008 0.0012 0.0013 0.0017 0.0019 0.0017 0.0016 0.0017 0.0012 0.0015 0.0017 0.0017 0.0018 0.0018 0.0019 0.0022 0.0022 0.0032 0.0075 0.0112 0.0119 0.0121 0.0105 0.0069 0.0051 0.0039 0.0032 0.0029 0.0026 0.0017 0.0011 0.0012 0.0013 0.0014 0.0013 0.0015 0.0013 0.0016 0.0015 0.0015 0.0016 0.0014 0.0016 0.0019 0.0023 0.0026 0.0024 0.0025 0.0024 0.0023 0.0021 0.0022 0.0021 0.0022 0.0021 0.0019 0.0022 0.0021 0.0018 0.0017 0.0011 0.0013 0.0015 0.0016 0.0014 0.0014 0.0015 0.0012 0.0013 0.0015 0.0014 0.0013 0.0015 0.0014 0.0014 0.0013 0.0013 0.0012 0.0012 0.0013 0.0012 0.0012 0.0013 0.0018 0.0023 0.0027 0.0034 0.0038 0.0044 0.0053 0.0055 0.0056 0.0052 0.0041 0.0033 0.0023 0.0015 0.0009 0.0008 0.0004 0.0005 0.0026 0.0059 0.0132 0.017 0.0167 0.016 0.0113 0.0056 0.0042 0.0033 0.003 0.0024 0.0018 0.0018 0.0017 0.0022 0.0032 0.0043 0.0068 0.0093 0.0116 0.0155 0.0192 0.0202 0.019 0.0155 0.0103 0.0038 0.0024 0.0019 0.0015 0.0014 0.0017 0.0013 0.0014 0.0013 0.0011 0.0008 0.0009 0.0013 0.0032 0.0062 0.0097 0.0126 0.0127 0.0116 0.0086 0.0033 0.0021 0.0015 0.0014 0.0014 0.0014 0.0015 0.0017 0.002 0.0037 0.0117 0.0218 0.0307 0.0384 0.045 0.049 0.0492 0.0477 0.0413 0.0324 0.0233 0.0109 0.0038 0.0025 0.0021 0.0017 0.0016 0.0017 0.0015 0.0015 0.0017 0.0016 0.0016 0.0016 0.0014 0.0014 0.0014 0.0014 0.0013 0.0014 0.0014 0.0013 0.0012 0.0014 0.0014 0.0014 0.0015 0.0016 0.0017 0.0018 0.0018 0.0018 0.0017 0.0038 0.0057 0.0063 0.0069 0.0062 0.0054 0.0044 0.0033 0.0023 0.0018 0.0017 0.002 0.002 0.002 0.0017 0.0017 0.0016 0.0015 0.0015 0.0014 0.0013 0.0011 0.0011 0.001 0.0011 0.0012 0.0014 0.0017 0.0018 0.0019 0.0017 0.0016 0.0016 0.0015 0.0015 0.0016 0.0018 0.0018 0.0019 0.002 0.0018 0.0019 0.0018 0.0018 0.002 0.0021 0.0021 0.002 0.0021 0.0027 0.0025 0.0028 0.0027 0.0024 0.0023 0.0021 0.002 0.002 0.0018 0.0019 0.0022 0.0021 0.002 0.0016 0.0016 0.0014 0.0015 0.0015 0.0015 0.0016 0.0015 0.0013 0.0011 0.001 0.0009 0.001 0.001 0.001 0.001 0.0011 0.001 0.001 0.001 0.0014 0.0017 0.0023 0.0028 0.0031 0.003 0.0029 0.0027 0.0024 0.0023 0.0021 0.0019 0.0019 0.0022 0.0025 0.0026 0.0028 0.0027 0.0025 0.0024 0.0026 0.0023 0.0023 0.0026 0.0022 0.0025 0.0027 0.0026 0.0028 0.0027 0.0023 0.0022 0.002 0.0023 0.0025 0.0025 0.0023 0.0019 0.0027 0.0043 0.0061 0.008 0.0098 0.0113 0.0119 0.0111 0.0099 0.0075 0.0048 0.0037 0.0027 0.0023 0.0023 0.0025 0.0023 0.0024 0.0024 0.0021 0.0021 0.0023 0.0022 0.0021 0.002 0.0021 0.002 0.0021 0.002 0.0022 0.0022 0.002 0.0017 0.0019 0.0018 0.0016 0.0017 0.0017 0.0019 0.0027 0.0088 0.0147 0.0179 0.0254 0.0301 0.034 0.0398 0.0427 0.0422 0.0384 0.0303 0.0194 0.0092 0.0041 0.0035 0.0034 0.0036 0.0037 0.0037 0.0037 0.0031 0.0026 0.0027 0.0027 0.0027 0.0029 0.0025 0.0022 0.0019 0.0023 0.0026 0.003 0.0031 0.003 0.0029 0.0029 0.003 0.003 0.003 0.0029 0.0029 0.0025 0.0021 0.0017 0.0018 0.0019 0.0022 0.0022 0.0019 0.0023 0.0027 0.0034 0.0056 0.0063 0.0069 0.0074 0.0069 0.008 0.0091 0.0095 0.0094 0.0089 0.0072 0.0061 0.0047 0.0028 0.0021 0.0017 0.0018 0.0019 0.0023 0.0025 0.0035 0.0055 0.0063 0.0067 0.0061 0.0047 0.0019 0.001 0.0009 0.0012 0.0038 0.0149 0.0282 0.0396 0.0541 0.0662 0.0813 0.0877 0.0842 0.0739 0.0517 0.0312 0.0189 0.0125 0.0049 0.0032 0.003 0.0028 0.0025 0.0025 0.0027 0.0031 0.0035 0.0038 0.0045 0.0062 0.0089 0.0118 0.0131 0.0125 0.011 0.0075 0.0038 0.0033 0.0028 0.0022 0.0022 0.0019 0.0019 0.0021 0.0021 0.002 0.0027 0.0053 0.0118 0.0176 0.0226 0.0275 0.0294 0.03 0.0271 0.0217 0.0154 0.0079 0.0042 0.0033 0.0034 0.0032 0.003 0.0025 0.0027 0.0026 0.0026 0.0023 0.0021 0.0018 0.0018 0.002 0.0018 0.0017 0.0015 0.0014 0.0012 0.0012 0.0013 0.0014 0.0018 0.0017 0.0012 0.0009 0.001 0.0012 0.0013 0.0011 0.0011 0.0019 0.0033 0.0042 0.005 0.0055 0.0049 0.0043 0.0033 0.002 0.0016 0.0017 0.0016 0.0017 0.0022 0.0019 0.0015 0.0016 0.0016 0.0017 0.0015 0.0014 0.0013 0.0013 0.0013 0.0011 0.0013 0.0014 0.0016 0.0016 0.0016 0.0015 0.0011 0.0012 0.0009 0.001 0.0008 0.0009 0.0009 0.0012 0.001 0.0011 0.001 0.0009 0.0009 0.0007 0.0007 0.0006 0.0006 0.0007 0.0008 0.0008 0.0006 0.0008 0.0008 0.0008 0.0006 0.0005 0.0006 0.0005 0.0005 0.0008 0.001 0.002 0.0057 0.0102 0.0132 0.0153 0.0161 0.014 0.0111 0.0071 0.0035 0.0022 0.0021 0.002 0.0018 0.0019 0.002 0.0022 0.0023 0.0025 0.0025 0.0025 0.0039 0.0063 0.0093 0.0139 0.0178 0.0186 0.0172 0.0143 0.0086 0.0028 0.0021 0.002 0.0017 0.0017 0.0016 0.0013 0.0016 0.0021 0.0047 0.0104 0.0169 0.0202 0.0227 0.0228 0.0211 0.0201 0.018 0.0148 0.0112 0.0057 0.0029 0.0024 0.0019 0.0018 0.0011 0.001 0.0008 0.0008 0.0005 0.0006 0.0005 0.0008 0.0007 0.0008 0.0012 0.0017 0.0022 0.0024 0.0023 0.0019 0.0018 0.0019 0.0018 0.0018 0.0015 0.0017 0.0016 0.0015 0.0022 0.0038 0.0075 0.0118 0.0168 0.0202 0.0236 0.0263 0.0269 0.0254 0.0216 0.0151 0.0079 0.0028 0.0018 0.0015 0.0013 0.0012 0.0016 0.0013 0.0014 0.0012 0.0011 0.0011 0.0015 0.0015 0.0013 0.0014 0.0021 0.0023 0.0031 0.0035 0.0034 0.0034 0.0036 0.0038 0.0037 0.0037 0.0034 0.0032 0.003 0.0027 0.0026 0.0025 0.0024 0.0023 0.0023 0.0022 0.0021 0.0021 0.0017 0.0016 0.0015 0.0014 0.0013 0.0013 0.001 0.0009 0.0009 0.001 0.0008 0.0008 0.0006 0.0006 0.0007 0.0008 0.0009 0.0011 0.0016 0.0015 0.0018 0.0017 0.0016 0.0018 0.0024 0.0027 0.0034 0.0039 0.0037 0.0035 0.0028 0.002 0.0014 0.0008 0.0008 0.0005 0.0001 0.0004 0.0024 0.0093 0.0129 0.0136 0.0131 0.0096 0.0048 0.0027 0.0023 0.0018 0.0015 0.0011 0.0009 0.0007 0.0007 0.0006 0.0007 0.0005 0.0007 0.0005 0.0006 0.0006 0.0005 0.0005 0.0007 0.0007 0.0005 0.0009 0.0008 0.0011 0.0011 0.0012 0.0009 0.0011 0.0014 0.0014 0.0016 0.0015 0.0014 0.0018 0.0023 0.0036 0.0051 0.0056 0.0053 0.0049 0.0039 0.0027 0.0029 0.0027 0.0024 0.0019 0.0019 0.002 0.0022 0.0021 0.0022 0.0018 0.0019 0.0017 0.0026 0.0033 0.0042 0.0041 0.0042 0.0036 0.0028 0.0023 0.0024 0.002 0.0016 0.0015 0.0014 0.0013 0.0012 0.0008 0.0008 0.0005 0.0004 0.0026 0.0065 0.0097 0.0119 0.013 0.0121 0.0104 0.0082 0.006 0.0038 0.0017 0.0015 0.0014 0.0012 0.0013 0.0012 0.0014 0.0013 0.0013 0.0015 0.0019 0.0028 0.005 0.0085 0.0125 0.0149 0.0145 0.0128 0.0087 0.0033 0.0019 0.0017 0.002 0.0017 0.0017 0.002 0.0019 0.0018 0.0017 0.0021 0.002 0.0023 0.0018 0.0019 0.0017 0.0019 0.0018 0.0018 0.002 0.0018 0.0016 0.0016 0.0017 0.0014 0.0015 0.0013 0.0013 0.0013 0.0015 0.0014 0.0014 0.0015 0.0015 0.0016 0.0013 0.0015 0.0016 0.0017 0.0018 0.0017 0.0019 0.0016 0.0017 0.0017 0.0018 0.0014 0.0013 0.0014 0.0013 0.0012 0.0013 0.001 0.0007 0.0007 0.0007 0.0008 0.0007 0.0009 0.0011 0.0013 0.0014 0.0017 0.0019 0.0018 0.0019 0.0017 0.0015 0.0014 0.0013 0.0009 0.0008 0.0007 0.0007 0.0008 0.0005 0.0005 0.001 0.0017 0.0024 0.0027 0.0027 0.0023 0.0018 0.0019 0.0016 0.0014 0.0014 0.0013 0.0013 0.0012 0.0011 0.0011 0.0012 0.0014 0.0014 0.001 0.0011 0.0009 0.0011 0.0011 0.001 0.0013 0.0011 0.0011 0.0009 0.0012 0.0011 0.0013 0.0013 0.0015 0.0011 0.0021 0.0011 0.0013 0.0016 0.0017 0.0018 0.0023 0.0024 0.0029 0.003 0.0028 0.0027 0.0029 0.0032 0.0032 0.0032 0.003 0.0027 0.0028 0.0024 0.0021 0.0019 0.0014 0.0013 0.0014 0.0015 0.0016 0.0014 0.0012 0.0011 0.0009 0.0008 0.0009 0.001 0.0008 0.0009 0.0005 0.0006 0.0011 0.0022 0.0062 0.0113 0.0142 0.0172 0.0211 0.0218 0.0231 0.0226 0.0203 0.0173 0.0129 0.0096 0.0042 0.0036 0.0036 0.0039 0.0035 0.0035 0.0038 0.0035 0.0038 0.0034 0.0031 0.0029 0.0028 0.0028 0.0026 0.0026 0.0022 0.0023 0.0023 0.0024 0.0026 0.0028 0.003 0.0032 0.0027 0.0028 0.0024 0.0025 0.0024 0.0021 0.0018 0.0016 0.0014 0.0017 0.0017 0.0017 0.0017 0.0018 0.0022 0.002 0.0021 0.0027 0.0033 0.0042 0.0049 0.0051 0.0048 0.0037 0.0029 0.0024 0.0022 0.0024 0.0042 0.0065 0.0088 0.0123 0.0151 0.0163 0.016 0.0143 0.0111 0.0084 0.0057 0.0044 0.0044 0.0045 0.0045 0.0038 0.0029 0.0017 0.0013 0.0012 0.001 0.001 0.0009 0.002 0.0058 0.0121 0.0157 0.017 0.0162 0.012 0.0074 0.003 0.0025 0.0022 0.0023 0.002 0.0017 0.0016 0.0013 0.0016 0.0013 0.001 0.0007 0.0007 0.0006 0.0011 0.0028 0.0024 0.0026 0.0031 0.003 0.0026 0.0025 0.0025 0.0019 0.002 0.0016 0.0018 0.0016 0.0016 0.0019 0.0019 0.0018 0.002 0.0018 0.0019 0.0019 0.0022 0.0024 0.0021 0.0023 0.002 0.0019 0.0018 0.0014 0.0013 0.0009 0.001 0.0008 0.0005 0.0006 0.0006 0.0004 0.0005 0.0012 0.002 0.0055 0.0078 0.0093 0.0094 0.0082 0.0063 0.002 0.0012 0.0011 0.001 0.0009 0.001 0.0009 0.001 0.001 0.0011 0.0012 0.0011 0.0011 0.0008 0.0008 0.0008 0.001 0.0011 0.0014 0.0024 0.0027 0.003 0.0029 0.0028 0.0028 0.0028 0.0026 0.0025 0.0024 0.0023 0.0025 0.0025 0.0024 0.0025 0.0022 0.002 0.0022 0.0021 0.002 0.0021 0.0019 0.002 0.0018 0.0018 0.0018 0.0018 0.0018 0.0017 0.0016 0.0015 0.0013 0.0014 0.0012 0.0012 0.0012 0.0012 0.0017 0.0021 0.0019 0.002 0.0023 0.0027 0.0034 0.0042 0.0046 0.0051 0.0053 0.0048 0.0045 0.0034 0.0028 0.0023 0.0019 0.0015 0.0009 0.0004 0.0011 0.0024 0.0094 0.0263 0.0128 0.0123 0.0107 0.0046 0.0034 0.0036 0.0039 0.0033 0.0024 0.0015 0.0016 0.0018 0.0026 0.0068 0.0113 0.0165 0.0202 0.0232 0.0258 0.0241 0.0211 0.0165 0.0086 0.0041 0.0031 0.0022 0.0018 0.0017 0.0017 0.0016 0.0015 0.0016 0.0018 0.0016 0.0013 0.0023 0.0031 0.0046 0.0057 0.0055 0.005 0.004 0.0022 0.0017 0.0017 0.0016 0.0015 0.0013 0.0012 0.0011 0.0009 0.0008 0.0008 0.0006 0.0006 0.0005 0.0007 0.0006 0.0008 0.0009 0.0011 0.0014 0.0016 0.0014 0.0013 0.0012 0.0013 0.0015 0.0017 0.0017 0.0016 0.0014 0.0014 0.0016 0.0018 0.0041 0.0069 0.0085 0.0102 0.0113 0.0109 0.0097 0.0079 0.0049 0.0026 0.002 0.0018 0.0016 0.0017 0.0013 0.0013 0.0013 0.0013 0.0014 0.0016 0.0025 0.0058 0.0104 0.0157 0.0193 0.0211 0.0197 0.0155 0.0118 0.0067 0.0035 0.0037 0.0031 0.0027 0.0027 0.0025 0.0024 0.0024 0.0023 0.0023 0.0024 0.0023 0.0023 0.0027 0.0025 0.0021 0.002 0.0016 0.0016 0.0015 0.0015 0.002 0.0019 0.0019 0.0018 0.0017 0.0016 0.0017 0.0016 0.0015 0.0016 0.0014 0.0014 0.0012 0.0013 0.0014 0.0014 0.0018 0.0017 0.0021 0.0033 0.0041 0.005 0.0054 0.0059 0.006 0.0058 0.0057 0.0049 0.0043 0.0038 0.0032 0.0026 0.0015 0.0011 0.0014 0.0018 0.0022 0.0021 0.0016 0.0019 0.0024 0.0029 0.003 0.0029 0.0023 0.0016 0.0013 0.0013 0.0018 0.002 0.0031 0.005 0.0075 0.0108 0.0152 0.0206 0.0219 0.0208 0.0174 0.0102 0.0045 0.0025 0.0021 0.0019 0.002 0.0017 0.0015 0.0013 0.0009 0.0008 0.001 0.001 0.0012 0.0016 0.0015 0.0016 0.0019 0.0019 0.0022 0.0019 0.002 0.002 0.0021 0.0025 0.0028 0.0032 0.0028 0.0026 0.0025 0.0026 0.0031 0.0096 0.0186 0.025 0.0312 0.0345 0.0347 0.0341 0.0295 0.0237 0.0181 0.0104 0.0038 0.002 0.0015 0.0018 0.0018 0.0018 0.002 0.0019 0.0019 0.0019 0.0017 0.0016 0.0016 0.0017 0.0019 0.0017 0.0017 0.0018 0.0018 0.0015 0.002 0.0019 0.0022 0.0021 0.0022 0.0023 0.0026 0.003 0.0033 0.0036 0.004 0.0089 0.0155 0.0198 0.0235 0.026 0.0256 0.0242 0.0203 0.015 0.009 0.0052 0.0047 0.004 0.0034 0.0034 0.0031 0.0031 0.0027 0.0027 0.0023 0.0019 0.0019 0.0022 0.0018 0.0017 0.0017 0.0014 0.0015 0.0017 0.0014 0.0017 0.0016 0.0017 0.0018 0.0016 0.0014 0.0013 0.0011 0.001 0.0011 0.0012 0.0016 0.0028 0.0063 0.0112 0.0146 0.018 0.0186 0.0165 0.0137 0.0084 0.0052 0.0036 0.0027 0.0022 0.0023 0.0022 0.0022 0.0017 0.0016 0.0011 0.0009 0.0008 0.0007 0.0004 0.001 0.0032 0.0042 0.0044 0.0046 0.004 0.0026 0.0026 0.0021 0.0019 0.0017 0.0018 0.0018 0.0017 0.0017 0.002 0.0025 0.0031 0.0046 0.0059 0.0083 0.0124 0.0147 0.0148 0.0136 0.0104 0.0054 0.0037 0.0034 0.0035 0.003 0.0029 0.0025 0.0023 0.0021 0.0023 0.0023 0.0023 0.0019 0.0017 0.0015 0.0012 0.0015 0.0012 0.0011 0.001 0.0008 0.0008 0.0006 0.0004 0.0005 0.0005 0.0004 0.0002 0.0005 0.0005 0.0006 0.0007 0.0014 0.0022 0.0037 0.0056 0.0068 0.0069 0.0064 0.0052 0.0031 0.0025 0.0025 0.0023 0.0019 0.0016 0.0015 0.0013 0.0013 0.0013 0.0014 0.0016 0.0014 0.0015 0.0016 0.0013 0.0015 0.0015 0.0013 0.0014 0.001 0.0008 0.0012 0.0009 0.0012 0.001 0.001 0.0012 0.0008 0.0009 0.001 0.001 0.0011 0.0016 0.0042 0.0065 0.0073 0.0087 0.009 0.0078 0.0073 0.0056 0.0028 0.0026 0.0025 0.0024 0.0026 0.0027 0.0029 0.0028 0.0028 0.0026 0.0021 0.0019 0.0021 0.0021 0.0022 0.0024 0.0022 0.0021 0.002 0.0022 0.0022 0.0023 0.0024 0.0023 0.0024 0.0023 0.0021 0.0022 0.0019 0.0018 0.0018 0.0017 0.0015 0.0017 0.0018 0.0016 0.0016 0.0018 0.002 0.0022 0.0032 0.0041 0.0048 0.0055 0.0057 0.0055 0.0057 0.0053 0.0046 0.0041 0.0037 0.003 0.0024 0.0018 0.0011 0.0013 0.0014 0.0016 0.0016 0.0018 0.0021 0.0033 0.0043 0.0051 0.0052 0.0055 0.0048 0.0031 0.0014 0.0011 0.0008 0.0012 0.0019 0.0126 0.0219 0.0294 0.0357 0.0401 0.0406 0.0369 0.0311 0.0227 0.0136 0.0084 0.0039 0.0028 0.0023 0.0024 0.0024 0.0025 0.0026 0.0027 0.0026 0.0023 0.0021 0.0019 0.0018 0.0014 0.0011 0.0011 0.0014 0.0016 0.0019 0.0021 0.0021 0.0022 0.0024 0.0021 0.0024 0.0021 0.002 0.0018 0.0018 0.0018 0.0043 0.011 0.0164 0.0201 0.0229 0.0219 0.019 0.0146 0.0097 0.0053 0.0039 0.003 0.0025 0.0023 0.0019 0.0021 0.0022 0.0021 0.002 0.0017 0.0014 0.0016 0.0015 0.0013 0.0013 0.0011 0.0012 0.0011 0.0008 0.0007 0.0007 0.0009 0.001 0.0009 0.001 0.0009 0.0008 0.0008 0.0006 0.0008 0.0007 0.0008 0.0015 0.0028 0.0035 0.0045 0.0045 0.0039 0.0029 0.0023 0.002 0.002 0.002 0.0022 0.0023 0.0023 0.0021 0.0018 0.0012 0.0012 0.0012 0.0012 0.0013 0.0011 0.0013 0.0011 0.001 0.0011 0.001 0.001 0.0009 0.0008 0.0008 0.0005 0.0008 0.0007 0.0005 0.0006 0.0007 0.0005 0.0006 0.0005 0.0008 0.0006 0.0006 0.0007 0.0018 0.0024 0.0024 0.0024 0.002 0.0024 0.0033 0.0042 0.0045 0.0043 0.0037 0.0028 0.0018 0.0015 0.0012 0.0019 0.0035 0.0055 0.0085 0.0108 0.013 0.0139 0.0129 0.0125 0.0103 0.0081 0.0062 0.0033 0.0025 0.0026 0.0023 0.0019 0.0019 0.0017 0.0017 0.0015 0.0015 0.0018 0.0036 0.0064 0.0103 0.0135 0.0158 0.0164 0.0146 0.0116 0.0065 0.0035 0.0022 0.0019 0.0016 0.0013 0.0014 0.0013 0.0013 0.0013 0.0011 0.0014 0.0021 0.0065 0.0116 0.0153 0.0179 0.018 0.0152 0.0117 0.0073 0.0036 0.0027 0.0026 0.0023 0.002 0.0019 0.0018 0.0015 0.0011 0.0008 0.0006 0.0009 0.0033 0.0075 0.0127 0.0174 0.0191 0.0183 0.0147 0.0092 0.0047 0.0029 0.0028 0.0029 0.0025 0.0023 0.0022 0.002 0.002 0.0019 0.0017 0.0016 0.0026 0.0031 0.0031 0.0023 0.0031 0.0041 0.0046 0.0047 0.0039 0.0026 0.0028 0.0029 0.0025 0.002 0.0017 0.0018 0.0022 0.002 0.0019 0.0016 0.0015 0.0015 0.0026 0.0033 0.0041 0.0045 0.0046 0.004 0.0032 0.0024 0.0024 0.0027 0.0025 0.0025 0.0025 0.0024 0.0025 0.0022 0.0018 0.0016 0.0016 0.0017 0.0019 0.0017 0.0019 0.0015 0.0015 0.0014 0.0015 0.0017 0.0018 0.0017 0.0017 0.0015 0.0016 0.0017 0.0019 0.002 0.0018 0.0017 0.0015 0.0012 0.0013 0.0013 0.0012 0.0017 0.0021 0.0027 0.0024 0.0024 0.0026 0.003 0.0034 0.0035 0.0034 0.0032 0.0029 0.0029 0.0025 0.0019 0.0017 0.0015 0.0013 0.0027 0.0033 0.0055 0.0068 0.0076 0.0096 0.0202 0.0363 0.05 0.0568 0.0548 0.0484 0.0341 0.0107 0.002 0.001 0.0009 0.001 0.0007 0.0007 0.0012 0.0013 0.0025 0.0049 0.007 0.0082 0.0083 0.0075 0.0046 0.0036 0.0032 0.0027 0.0027 0.0022 0.0023 0.002 0.0019 0.002 0.0021 0.002 0.0022 0.0019 0.0015 0.0013 0.001 0.001 0.0012 0.0013 0.0015 0.0017 0.0018 0.0019 0.002 0.0019 0.0019 0.0018 0.0018 0.0019 0.0024 0.0023 0.0045 0.0097 0.0135 0.0179 0.0215 0.0223 0.0212 0.0179 0.0127 0.007 0.0032 0.0023 0.0017 0.0015 0.0014 0.0014 0.0017 0.0019 0.0017 0.0015 0.0014 0.0023 0.0058 0.0104 0.0132 0.0148 0.0151 0.013 0.0102 0.007 0.0037 0.0026 0.0022 0.0019 0.002 0.0021 0.002 0.0023 0.0021 0.0024 0.0028 0.0056 0.0109 0.015 0.0191 0.023 0.0249 0.0256 0.0245 0.0206 0.0158 0.0087 0.0037 0.0033 0.003 0.0035 0.0033 0.0029 0.0027 0.0021 0.0021 0.0019 0.0018 0.0018 0.0019 0.0018 0.0016 0.0015 0.0012 0.0013 0.0011 0.0011 0.0011 0.001 0.0011 0.0011 0.001 0.001 0.0009 0.0007 0.0009 0.001 0.0007 0.0009 0.001 0.0048 0.009 0.0136 0.0161 0.0163 0.0145 0.0104 0.0059 0.0026 0.0024 0.0025 0.0028 0.0024 0.0045 0.0021 0.0018 0.0017 0.0016 0.0014 0.0013 0.0009 0.0009 0.0006 0.0011 0.0015 0.0021 0.0024 0.0025 0.002 0.0023 0.0026 0.0028 0.0028 0.0027 0.0025 0.0025 0.0021 0.0024 0.0025 0.003 0.006 0.0107 0.0156 0.0195 0.0212 0.0204 0.0167 0.0123 0.0079 0.0038 0.0036 0.0035 0.0031 0.0032 0.003 0.0027 0.0023 0.0021 0.0017 0.0015 0.0015 0.0017 0.0017 0.0019 0.002 0.0021 0.0025 0.0027 0.0026 0.0026 0.0023 0.0021 0.0019 0.0018 0.002 0.002 0.0021 0.0021 0.0022 0.0024 0.0023 0.0031 0.003 0.0031 0.0038 0.0053 0.0059 0.0062 0.006 0.0045 0.0041 0.0034 0.0031 0.0026 0.0026 0.0024 0.0022 0.002 0.0019 0.0018 0.0019 0.002 0.0024 0.0027 0.003 0.0028 0.0025 0.0022 0.0018 0.0018 0.0016 0.0017 0.0015 0.0015 0.0015 0.0016 0.0017 0.0016 0.0015 0.0017 0.0022 0.0025 0.0026 0.0027 0.0026 0.0019 0.0025 0.003 0.0032 0.0031 0.0028 0.0026 0.0022 0.002 0.0022 0.0023 0.0026 0.0026 0.0025 0.0023 0.0021 0.002 0.0021 0.0021 0.0021 0.0019 0.0018 0.0015 0.0018 0.0017 0.0019 0.0019 0.002 0.0021 0.0022 0.0018 0.0017 0.0019 0.0018 0.0019 0.0022 0.0019 0.0019 0.0018 0.0018 0.002 0.0017 0.0017 0.0016 0.0015 0.0017 0.0016 0.0017 0.0019 0.0019 0.0022 0.0022 0.0026 0.0027 0.0027 0.0027 0.0028 0.0028 0.0026 0.0026 0.0028 0.0029 0.0031 0.0033 0.0033 0.0032 0.0032 0.0031 0.0029 0.0029 0.003 0.0032 0.0035 0.0035 0.0032 0.0031 0.0031 0.0034 0.0036 0.0036 0.0032 0.0029 0.0025 0.0023 0.0023 0.0025 0.0029 0.0032 0.0033 0.0032 0.0031 0.0026 0.0026 0.0023 0.0023 0.0027 0.0032 0.0037 0.0039 0.0037 0.0035 0.0029 0.0029 0.0031 0.0031 0.0035 0.0034 0.0033 0.0027 0.0026 0.0025 0.0028 0.0027 0.0029 0.0034 0.0036 0.0038 0.0034 0.0029 0.0028 0.0029 0.0029 0.0036 0.0039 0.004 0.0042 0.0044 0.0045 0.0043 0.0041 0.0035 0.0035 0.0034 0.0032 0.0036 0.0036 0.0035 0.0034 0.0029 0.003 0.0031 0.0031 0.0031 0.0036 0.0035 0.0039 0.0039 0.0035 0.0032 0.0027 0.003 0.0029 0.003 0.0026 0.0024 0.0022 0.0022 0.0024 0.0023 0.0022 0.0022 0.0022 0.0021 0.0018 0.0016 0.0016 0.0018 0.002 0.0021 0.002 0.0018 0.0018 0.0016 0.0015 0.0017 0.0017 0.0019 0.0019 0.0015 0.0015 0.0016 0.0017 0.0017 0.0018 0.0015 0.0015 0.0018 0.0018 0.002 0.0019 0.0017 0.0018 0.0017 0.0016 0.0015 0.0017 0.0017 0.0014 0.0013 0.0012 0.0013 0.0009 0.0011 0.0009 0.0008 0.0008 0.001 0.0009 0.0015 0.0026 0.004 0.0044 0.0043 0.004 0.0023 0.0015 0.0011 0.0009 0.0005 0.0005 0.0006 0.0003 0.0002 0.0002 0.0003 0.0002 0.0003 0.0002 0.0004", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 140.829, + "text": "SP AP 过 三 巡 酒 气 开 月 华 SP", + "ph_seq": "SP AP g uo s an x vn j iu q i k ai y ve h ua SP", + "ph_dur": "0.14 0.4 0.06 0.163 0.075 0.163 0.075 0.416 0.06 0.163 0.075 0.178 0.06 0.356 0.12 0.357 0.12 0.476 0.4", + "ph_num": "1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 G3 E3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.238 0.238 0.476 0.477 0.476 0.4", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "201.5 201.9 202.1 202.2 202.0 202.3 201.9 202.2 202.4 202.5 202.2 202.4 202.4 202.5 202.3 201.9 201.8 201.7 201.3 201.3 200.9 201.2 200.9 200.9 201.1 200.9 201.1 201.0 201.1 200.9 201.0 200.8 200.8 200.6 200.6 200.9 200.6 200.9 200.8 200.4 200.4 199.9 200.5 199.5 199.3 199.2 198.6 197.8 197.9 197.5 197.5 197.4 196.5 197.8 202.4 210.1 219.2 227.2 235.2 242.7 248.6 250.4 248.4 244.1 239.9 235.3 232.7 230.3 228.0 226.1 224.7 222.2 224.3 220.1 218.3 215.8 216.0 217.3 219.6 221.6 221.6 221.4 222.4 222.0 219.6 217.3 207.6 217.0 225.0 233.3 242.5 252.9 262.1 257.0 254.0 251.8 250.2 248.0 245.4 242.3 238.3 235.2 235.4 236.3 239.7 243.7 247.0 249.6 252.7 256.1 257.4 255.5 249.4 244.1 239.7 237.1 236.6 238.8 243.1 247.0 249.3 250.1 249.9 249.6 248.0 245.3 238.8 229.9 226.3 233.8 240.8 247.1 254.3 258.1 252.8 249.8 248.1 247.2 247.1 246.9 247.4 246.6 245.9 246.0 245.4 243.4 238.7 232.9 230.2 228.6 227.0 226.6 225.9 225.6 223.2 218.8 218.1 218.5 219.3 219.8 219.4 219.9 221.5 222.7 222.3 221.3 220.0 212.4 212.3 216.7 218.2 220.9 224.8 228.9 237.1 242.2 246.0 248.4 251.0 252.5 252.7 250.4 247.7 244.9 242.3 240.8 240.3 241.3 243.8 246.6 247.5 248.9 248.8 246.9 246.1 246.1 246.1 246.0 245.4 245.2 244.2 245.1 247.1 247.8 246.7 244.2 239.7 233.1 224.0 210.5 199.1 191.7 187.1 186.3 187.3 188.2 191.3 192.9 194.6 194.9 195.4 193.3 190.7 188.6 186.8 185.4 184.6 184.3 184.2 185.2 190.3 192.4 196.9 198.3 197.9 197.9 198.1 197.7 195.9 194.1 193.8 193.6 194.2 193.7 192.0 189.1 183.0 178.4 175.8 173.5 171.5 170.0 169.1 167.9 166.6 165.8 164.6 162.7 165.5 167.2 170.6 172.0 172.0 170.7 167.7 164.5 161.0 159.3 159.0 159.9 161.9 164.5 167.3 169.9 171.1 171.3 170.7 170.0 168.6 165.7 162.2 158.7 156.9 156.5 157.7 158.7 160.9 162.6 166.9 171.1 174.1 175.4 175.5 175.3 174.4 172.5 166.3 160.6 153.5 149.9 150.6 150.5 150.7 150.9 150.9 151.0 151.5 151.6 151.8 151.6 151.4 152.0 151.9 152.1 151.8 152.2 152.2 152.2 152.1 152.1 152.2 152.2 151.9 152.1 152.1 152.2 152.1 152.3 152.1 152.6 152.2 152.4 152.3", + "f0_timestep": "0.011609977324263039", + "energy": "0.0009 0.0007 0.0003 0.0 0.0 0.0 0.0 0.0002 0.0003 0.0 0.0004 0.0006 0.0005 0.0007 0.0007 0.001 0.0019 0.002 0.0026 0.0028 0.0024 0.0031 0.0028 0.0034 0.0036 0.0036 0.0033 0.0033 0.0032 0.0034 0.0037 0.0031 0.0032 0.0037 0.0032 0.0036 0.0027 0.0032 0.0027 0.0021 0.0021 0.0018 0.0023 0.0021 0.0011 0.0019 0.0026 0.006 0.0068 0.0135 0.0416 0.0612 0.0793 0.0909 0.0964 0.0997 0.0864 0.8695 0.0903 0.0874 0.0884 0.0879 0.0837 0.0796 0.0683 0.0523 0.0389 0.0243 0.0219 0.0205 0.0171 0.0361 0.0564 0.0711 0.087 0.094 0.0936 0.0925 0.0892 0.0847 0.0819 0.0784 0.0767 0.0756 0.0705 0.0602 0.0476 0.0316 0.0213 0.0188 0.0177 0.0297 0.0528 0.0705 0.0858 0.0959 0.097 0.0973 0.0948 0.0898 0.0853 0.0807 0.0781 0.0771 0.0776 0.0789 0.0793 0.0806 0.0801 0.0808 0.0812 0.0815 0.0823 0.0802 0.0778 0.0764 0.0758 0.0756 0.0766 0.0769 0.0764 0.0761 0.0775 0.0777 0.0782 0.0757 0.0668 0.0549 0.0381 0.0219 0.0128 0.0109 0.0222 0.0468 0.0631 0.0765 0.0854 0.0869 0.0874 0.0869 0.0852 0.0827 0.0798 0.0782 0.0752 0.0695 0.0586 0.0464 0.0305 0.0153 0.0134 0.0126 0.0113 0.0217 0.0379 0.051 0.061 0.0662 0.0687 0.068 0.0678 0.069 0.0678 0.0687 0.0664 0.0594 0.0491 0.036 0.0222 0.0108 0.0098 0.0107 0.0097 0.0263 0.0417 0.0582 0.0722 0.0823 0.0876 0.0894 0.0895 0.0876 0.0848 0.0825 0.08 0.0775 0.0768 0.0769 0.0781 0.0798 0.0811 0.0824 0.0826 0.0829 0.0815 0.0811 0.0779 0.0764 0.0739 0.0724 0.0719 0.0706 0.0712 0.0698 0.0685 0.0686 0.0668 0.0656 0.0641 0.061 0.0591 0.057 0.0575 0.0602 0.0621 0.0643 0.0644 0.065 0.0644 0.0654 0.0674 0.0671 0.0684 0.0673 0.0664 0.0661 0.0651 0.0648 0.0636 0.0629 0.0612 0.0608 0.0619 0.0618 0.0626 0.0649 0.0669 0.068 0.0677 0.0677 0.0649 0.0619 0.0589 0.0536 0.046 0.0369 0.0282 0.0176 0.0093 0.0064 0.0061 0.0059 0.0054 0.004 0.0055 0.0228 0.0368 0.0489 0.0612 0.0674 0.0724 0.0743 0.0728 0.0722 0.0685 0.0625 0.0577 0.0529 0.049 0.0482 0.049 0.0493 0.051 0.0532 0.0535 0.0531 0.0526 0.0499 0.0481 0.0459 0.0441 0.0405 0.0379 0.0353 0.0331 0.0304 0.0294 0.0277 0.0262 0.0252 0.0247 0.0234 0.0233 0.0234 0.0219 0.0208 0.0193 0.0165 0.0137 0.0104 0.0069 0.0039 0.0014 0.0009 0.0005 0.0009 0.0005 0.0004 0.001 0.0007 0.0003 0.0004 0.0004 0.0007 0.0005 0.0009 0.0006 0.0006 0.0006 0.0006 0.0003 0.001 0.0003 0.0003 0.0004 0.0012 0.0006 0.0008 0.0013 0.0011 0.0007 0.001 0.0005", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0002 0.0002 0.0001 0.0001 0.0001 0.0 0.0001 0.0002 0.0001 0.0003 0.0003 0.0003 0.0005 0.0008 0.0012 0.0012 0.0017 0.0018 0.0021 0.0024 0.0026 0.0031 0.0034 0.0037 0.0038 0.004 0.004 0.0035 0.0035 0.0034 0.0034 0.0037 0.0036 0.0037 0.0034 0.0031 0.0031 0.0024 0.002 0.0015 0.0013 0.001 0.001 0.0009 0.0009 0.0016 0.0024 0.0036 0.0052 0.0059 0.006 0.0054 0.0035 0.003 0.0026 0.0032 0.0028 0.1227 0.0016 0.0031 0.0024 0.0018 0.002 0.0041 0.0088 0.0159 0.0184 0.021 0.0218 0.02 0.0174 0.0135 0.0092 0.0046 0.003 0.0028 0.0024 0.002 0.002 0.0021 0.0019 0.0016 0.0012 0.0009 0.0014 0.0049 0.0102 0.0142 0.0176 0.0189 0.018 0.0149 0.0106 0.0068 0.0035 0.0022 0.0017 0.0016 0.0015 0.0013 0.0011 0.0012 0.0012 0.0013 0.0014 0.0012 0.0009 0.0009 0.0008 0.0008 0.0009 0.0008 0.0008 0.001 0.0011 0.0007 0.0008 0.0007 0.0005 0.0009 0.0006 0.0004 0.0006 0.0006 0.0007 0.0006 0.0005 0.002 0.0051 0.0081 0.0101 0.0107 0.0097 0.0075 0.005 0.0022 0.0016 0.0012 0.0009 0.001 0.001 0.001 0.0009 0.0007 0.0007 0.0013 0.0042 0.0076 0.0098 0.0119 0.0126 0.0128 0.012 0.0098 0.0077 0.0039 0.0016 0.0015 0.0012 0.0011 0.0012 0.0009 0.001 0.001 0.0014 0.0011 0.0014 0.0024 0.0053 0.0075 0.0084 0.0089 0.0082 0.0065 0.0045 0.0033 0.0025 0.0024 0.0022 0.0021 0.0021 0.0023 0.0018 0.0019 0.0019 0.0016 0.002 0.0021 0.0019 0.0018 0.0015 0.0016 0.0015 0.0016 0.0018 0.0017 0.0017 0.0017 0.0019 0.0018 0.0018 0.0017 0.0016 0.0015 0.0015 0.0016 0.0016 0.0016 0.0017 0.0018 0.0016 0.0018 0.0017 0.0016 0.0013 0.001 0.001 0.001 0.001 0.001 0.0009 0.0009 0.0012 0.0013 0.0012 0.0012 0.0013 0.0013 0.0015 0.0015 0.0017 0.0014 0.0016 0.0015 0.0016 0.0014 0.0015 0.0016 0.0014 0.0015 0.0014 0.0013 0.0012 0.0012 0.0009 0.0014 0.0034 0.005 0.0061 0.0066 0.0065 0.0061 0.0053 0.0047 0.0042 0.0032 0.0024 0.0017 0.0015 0.0013 0.0011 0.0013 0.0014 0.0016 0.0019 0.0019 0.0018 0.0012 0.0011 0.001 0.0011 0.001 0.0011 0.0014 0.001 0.0012 0.0011 0.0011 0.001 0.0009 0.0012 0.0009 0.0011 0.0008 0.0007 0.0008 0.0008 0.0005 0.0004 0.0004 0.0005 0.0005 0.0003 0.0004 0.0003 0.0002 0.0003 0.0001 0.0016 0.0025 0.0026 0.0028 0.0021 0.001 0.0004 0.0002 0.0002 0.0003 0.0001 0.0003 0.0003 0.0003 0.0002 0.0001 0.0002 0.0002 0.0001 0.0001 0.0001 0.0 0.0002 0.0001 0.0001 0.0 0.0 0.0001 0.0002 0.0001 0.0002 0.0001 0.0001 0.0003 0.0006", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 144.382, + "text": "AP 浓 醉 到 五 更 不 还 家 SP", + "ph_seq": "AP n ong z ui d ao w u g eng b u h uan j ia SP", + "ph_dur": "0.29 0.09 0.178 0.06 0.193 0.045 0.178 0.06 0.178 0.06 0.193 0.045 0.163 0.075 0.402 0.075 1.428 0.095", + "ph_num": "2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest E3 E3 F#3 G3 F#3 E3 D3 E3 rest", + "note_dur": "0.38 0.238 0.238 0.238 0.238 0.238 0.238 0.477 1.428 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0", + "f0_seq": "129.1 129.1 129.0 128.8 129.0 129.3 129.0 129.0 128.9 128.9 128.9 128.5 128.8 128.5 128.4 128.7 128.7 129.0 128.8 128.9 128.9 129.2 129.2 129.5 129.3 129.4 129.3 129.0 128.6 128.3 125.1 125.5 128.2 130.7 135.6 140.7 145.9 151.8 157.6 161.6 164.3 166.1 166.8 167.0 165.7 164.2 160.7 155.9 151.8 154.1 157.5 160.7 164.0 166.6 166.5 163.6 163.7 163.5 164.0 164.3 164.2 164.4 165.0 164.1 163.4 163.2 160.8 157.7 153.5 161.5 168.0 175.2 181.8 183.4 184.7 184.3 183.6 183.8 185.3 185.9 186.3 186.2 186.3 186.6 186.7 186.5 186.6 185.8 185.2 183.2 182.8 184.3 188.3 192.9 196.2 198.5 200.2 199.1 197.7 196.0 195.9 196.1 195.4 195.9 195.9 195.6 196.1 195.7 192.4 183.4 185.0 189.2 194.5 199.0 203.1 195.9 190.1 190.6 188.6 187.2 184.7 183.9 184.1 184.6 185.9 187.3 187.7 185.9 180.5 174.3 166.3 164.2 160.7 162.2 165.3 162.0 162.5 162.4 163.4 164.7 165.7 167.0 166.9 166.0 164.4 162.0 159.0 153.6 148.6 147.0 146.7 147.1 147.6 147.7 148.0 147.3 144.1 143.4 143.7 144.2 144.8 146.2 147.1 147.2 146.3 146.2 145.7 145.6 145.1 144.9 145.3 146.5 146.3 146.0 146.5 147.4 147.9 147.8 147.3 147.4 147.1 147.4 147.7 147.5 147.1 147.5 147.7 147.2 144.6 139.9 143.0 146.2 149.8 153.7 158.4 162.6 165.4 167.7 167.5 167.5 167.3 166.3 165.3 164.6 163.0 161.7 161.0 159.8 159.0 160.3 161.4 163.5 164.9 166.5 166.9 166.7 165.9 165.0 164.6 163.9 163.2 163.0 163.0 161.8 161.3 161.0 162.1 163.0 164.4 165.6 166.9 167.3 167.7 167.7 166.5 165.2 164.0 163.1 162.3 162.1 162.1 162.1 161.9 161.7 162.1 163.3 163.8 164.9 166.6 167.9 168.8 169.2 169.2 169.2 168.3 167.9 164.5 162.2 160.9 160.1 159.7 160.7 163.6 166.6 169.3 171.7 173.4 174.8 175.0 174.4 171.5 166.5 162.0 160.0 158.7 158.4 158.8 160.8 164.7 168.7 172.7 175.0 176.3 176.7 175.4 172.3 167.1 160.3 156.1 153.4 153.4 154.6 157.1 160.0 165.2 171.5 176.4 180.4 180.4 177.7 173.0 168.4 162.3 156.2 152.6 151.3 152.0 154.1 157.7 163.4 169.6 171.3 170.1 171.1 171.8 172.3 170.1 167.4 165.9 163.6 163.7 164.0 163.1 162.7 163.0 162.9 163.0 163.7", + "f0_timestep": "0.011609977324263039", + "energy": "0.001 0.0004 0.0013 0.0017 0.0026 0.0029 0.0036 0.0045 0.0044 0.0053 0.0059 0.0062 0.0064 0.007 0.0078 0.0073 0.0072 0.0065 0.0054 0.005 0.0051 0.0036 0.0033 0.0025 0.0017 0.0015 0.0041 0.0164 0.0264 0.0317 0.0389 0.0437 0.047 0.0499 0.0542 0.0585 0.0605 0.0619 0.0616 0.059 0.0585 0.0586 0.0581 0.0591 0.0592 0.0549 0.0499 0.0412 0.0283 0.0182 0.0145 0.0142 0.024 0.041 0.0542 0.0657 0.0735 0.0755 0.0758 0.0757 0.0735 0.073 0.0728 0.0715 0.0702 0.0638 0.0541 0.0414 0.0249 0.0101 0.008 0.0078 0.0323 0.0536 0.0699 0.0808 0.0854 0.084 0.0816 0.0803 0.079 0.0771 0.078 0.0771 0.0755 0.0752 0.0723 0.0698 0.0653 0.0591 0.0547 0.0505 0.05 0.0586 0.0667 0.0734 0.0768 0.0752 0.0726 0.0701 0.0704 0.0685 0.0678 0.0678 0.066 0.066 0.0624 0.0543 0.0433 0.031 0.0184 0.0116 0.0115 0.0215 0.0413 0.0546 0.0643 0.0694 0.0695 0.0682 0.0663 0.0653 0.0623 0.0612 0.0602 0.0603 0.0606 0.0592 0.0544 0.0458 0.0351 0.0221 0.0167 0.0203 0.0363 0.0477 0.0571 0.0642 0.0643 0.0642 0.0627 0.06 0.0595 0.0583 0.0565 0.0512 0.0446 0.0354 0.0251 0.0173 0.0092 0.0075 0.0073 0.0069 0.023 0.0386 0.0509 0.0595 0.0644 0.0639 0.0637 0.0644 0.0641 0.0627 0.0613 0.0595 0.0579 0.0575 0.0563 0.0567 0.056 0.0548 0.0543 0.0536 0.0526 0.0527 0.0531 0.0532 0.0536 0.0533 0.0537 0.0541 0.0534 0.054 0.0532 0.0532 0.0503 0.0449 0.037 0.0282 0.0187 0.0123 0.0101 0.008 0.0094 0.0261 0.0414 0.0537 0.0628 0.0661 0.0643 0.0626 0.0629 0.0624 0.0618 0.061 0.0585 0.0575 0.0562 0.0561 0.0576 0.0587 0.061 0.0612 0.0615 0.0617 0.0616 0.0619 0.062 0.0624 0.0624 0.0632 0.0629 0.063 0.0624 0.0629 0.0636 0.0636 0.065 0.0647 0.0655 0.0669 0.0675 0.0695 0.0703 0.0702 0.07 0.0689 0.0688 0.0695 0.0704 0.0714 0.0712 0.0718 0.0715 0.0711 0.0713 0.0711 0.071 0.0716 0.072 0.0716 0.0719 0.0714 0.0719 0.0724 0.0724 0.0726 0.0705 0.0692 0.0673 0.0653 0.0632 0.0626 0.0611 0.0597 0.0591 0.0584 0.0577 0.0571 0.0564 0.0564 0.0549 0.0532 0.0515 0.0488 0.0472 0.0456 0.0449 0.0439 0.0437 0.0429 0.042 0.0414 0.0399 0.0395 0.0376 0.0367 0.0357 0.0344 0.0327 0.0309 0.0298 0.0291 0.0279 0.0263 0.0254 0.0247 0.0243 0.0248 0.0249 0.0249 0.0242 0.0232 0.0217 0.0198 0.0181 0.0171 0.0163 0.0163 0.0162 0.016 0.0156 0.0161 0.0163 0.0153 0.0142 0.0116 0.0094 0.0059 0.0024 0.0009 0.0001 0.0 0.0 0.0002 0.0001 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0004 0.0008 0.0012 0.0018 0.0028 0.0034 0.0044 0.0049 0.0052 0.0061 0.0068 0.0073 0.0073 0.0073 0.0071 0.007 0.0069 0.0067 0.0062 0.0051 0.0044 0.0034 0.0027 0.0022 0.0009 0.0016 0.0041 0.0062 0.0064 0.0057 0.0038 0.0006 0.0005 0.0004 0.0009 0.0009 0.0009 0.0008 0.0007 0.0005 0.0006 0.0006 0.0006 0.0005 0.0005 0.0006 0.0005 0.0028 0.0065 0.0093 0.0132 0.0153 0.0145 0.0126 0.0082 0.0032 0.0018 0.0017 0.0018 0.0016 0.0016 0.0017 0.0016 0.0017 0.0014 0.0011 0.0011 0.0011 0.001 0.0018 0.0039 0.0042 0.0047 0.0046 0.0035 0.0029 0.0025 0.0022 0.0021 0.0018 0.0017 0.0015 0.0013 0.0012 0.001 0.0009 0.0008 0.0006 0.0007 0.0005 0.0005 0.0005 0.0005 0.0004 0.0006 0.0006 0.0005 0.0005 0.0005 0.0006 0.0005 0.0005 0.0003 0.0005 0.0005 0.0006 0.0007 0.0008 0.0018 0.0043 0.0068 0.0088 0.0104 0.0103 0.0088 0.0066 0.003 0.0015 0.001 0.0007 0.0008 0.0007 0.0006 0.0005 0.0005 0.0004 0.0002 0.0004 0.0002 0.0001 0.0 0.0001 0.0002 0.0006 0.001 0.0012 0.0009 0.0009 0.0007 0.0006 0.0006 0.0006 0.0005 0.0005 0.0006 0.0006 0.0006 0.001 0.0035 0.0048 0.0063 0.0071 0.0067 0.0062 0.0047 0.0038 0.0027 0.0016 0.0015 0.0012 0.0012 0.0013 0.001 0.0009 0.0009 0.001 0.0012 0.0013 0.0011 0.0014 0.0012 0.001 0.0009 0.0008 0.0009 0.0007 0.0007 0.0005 0.0006 0.0006 0.0005 0.0002 0.0004 0.0005 0.0005 0.0005 0.0004 0.0006 0.0011 0.0037 0.007 0.0084 0.0095 0.0094 0.0076 0.0061 0.0044 0.0031 0.0025 0.0022 0.002 0.0021 0.0015 0.0016 0.0012 0.0013 0.0014 0.0014 0.0014 0.0014 0.0015 0.0017 0.0015 0.0018 0.0017 0.0017 0.0018 0.0016 0.0014 0.0015 0.0011 0.0012 0.0012 0.0013 0.0013 0.0013 0.0014 0.0013 0.0016 0.0015 0.0014 0.0015 0.0015 0.0014 0.0014 0.0015 0.0014 0.0015 0.0015 0.0015 0.0013 0.0014 0.0013 0.0013 0.0012 0.0012 0.0013 0.0013 0.0017 0.0016 0.0018 0.0017 0.0016 0.0016 0.0018 0.002 0.002 0.0019 0.0017 0.0017 0.0016 0.0016 0.0018 0.0018 0.0017 0.0018 0.0017 0.0017 0.0015 0.0017 0.0018 0.002 0.0018 0.0019 0.0015 0.0016 0.0014 0.0012 0.0014 0.0012 0.001 0.0011 0.0008 0.0009 0.0009 0.0008 0.0009 0.001 0.0008 0.0007 0.0008 0.0007 0.0006 0.0006 0.0006 0.0007 0.0007 0.0006 0.0004 0.0004 0.0004 0.0005 0.0004 0.0005 0.0005 0.0004 0.0002 0.0001 0.0 0.0001 0.0003 0.0002 0.0004 0.0003 0.0002 0.0004 0.0005 0.0011 0.0015 0.0015 0.0013 0.001 0.0006 0.0003 0.0001 0.0002 0.0003 0.0005", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 148.448, + "text": "SP AP 漫 说 道 无 瑕 少 年 事 AP 敢 夸 玉 带 宝 剑 青 骢 马 SP", + "ph_seq": "SP AP m an sh uo d ao w u x ia sh ao n ian sh ir AP g an k ua y v d ai b ao j ian q ing c ong m a SP", + "ph_dur": "0.11 0.4 0.09 0.163 0.075 0.193 0.045 0.416 0.06 0.163 0.075 0.163 0.075 0.371 0.105 0.402 0.075 0.476 0.178 0.06 0.178 0.06 0.163 0.075 0.178 0.06 0.193 0.045 0.163 0.075 0.163 0.075 0.194 0.045 0.178 0.06 1.19 0.19", + "ph_num": "1 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 D4 B3 B3 rest B3 A3 G3 G3 E3 G3 A3 G3 B3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.238 0.238 0.476 0.238 0.239 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.238 0.239 0.238 1.19 0.19", + "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "171.5 172.2 172.1 171.6 171.4 171.7 171.5 171.7 171.7 171.6 171.5 172.0 171.6 171.8 171.5 171.4 171.5 171.4 171.3 171.2 171.3 171.3 171.1 171.2 171.4 171.2 171.2 171.5 171.1 171.4 171.1 171.0 170.8 171.1 171.0 171.0 171.2 170.8 171.0 170.3 170.9 170.5 169.9 169.4 169.0 168.5 168.0 167.7 165.7 165.7 169.5 173.2 179.6 188.9 199.0 208.4 220.1 230.7 240.7 246.1 247.9 246.7 245.0 242.6 239.3 236.0 233.4 230.2 227.8 228.6 228.5 229.3 230.6 224.8 220.3 219.2 217.9 218.6 219.2 219.6 220.8 221.0 220.1 219.9 219.5 218.3 210.6 218.0 226.4 233.6 244.3 254.5 252.2 253.8 250.8 252.3 252.3 251.9 248.8 244.7 240.8 237.6 235.9 236.5 237.8 240.7 245.2 251.0 254.2 255.8 253.3 249.4 243.1 238.3 235.6 235.6 236.1 237.5 240.4 243.3 245.7 247.7 248.4 248.1 246.0 244.0 240.2 236.1 233.6 232.5 231.5 235.2 236.5 240.6 243.9 245.6 246.1 247.2 248.0 247.9 248.1 247.4 247.5 248.4 246.8 244.4 240.6 232.3 229.8 230.3 229.7 229.6 228.8 227.8 227.0 218.3 218.5 218.6 219.5 220.9 222.3 223.4 224.0 223.3 221.2 219.2 215.8 208.6 212.7 217.9 223.7 229.7 236.2 243.4 250.4 258.4 251.7 253.0 252.3 252.6 249.6 244.7 240.2 238.5 240.0 242.2 244.3 246.3 248.7 249.3 249.0 247.0 247.2 245.9 244.8 244.1 243.1 243.2 243.4 245.7 246.4 247.8 248.8 248.9 249.0 247.7 246.9 248.3 250.5 250.6 250.6 251.4 255.6 263.4 273.0 285.1 297.6 302.1 303.0 302.8 300.2 296.1 293.2 292.3 292.6 293.9 294.2 296.0 298.2 300.2 298.8 295.9 289.7 281.1 268.8 255.1 245.0 243.1 242.1 241.2 243.1 246.0 248.5 250.4 252.1 252.6 251.9 250.1 246.0 242.1 233.8 232.5 237.6 241.3 245.1 249.1 253.7 255.8 253.6 253.6 253.0 251.7 248.8 245.8 242.3 239.0 234.5 232.1 232.7 236.1 240.1 246.2 252.2 257.8 260.6 261.9 259.6 254.8 246.6 237.0 227.7 224.2 223.6 227.9 233.5 241.8 250.1 259.0 266.9 270.1 268.8 260.9 250.3 235.7 221.2 215.6 212.0 210.1 208.1 205.2 202.8 199.9 196.9 194.1 191.1 188.7 185.9 182.7 180.3 177.5 174.7 172.1 169.2 165.8 162.8 160.7 158.0 156.2 153.1 157.9 163.6 175.7 187.3 200.3 214.7 230.4 243.1 250.6 250.3 249.1 245.6 241.6 235.6 228.9 225.0 227.6 231.4 235.4 237.6 232.1 226.0 223.6 221.1 221.4 221.0 219.3 218.1 218.5 219.0 218.6 218.1 218.1 215.1 209.9 205.5 199.2 193.4 189.9 187.5 186.8 189.3 192.1 194.2 195.5 196.6 197.5 197.5 197.3 196.9 195.2 195.0 195.4 190.7 193.3 194.5 196.7 197.6 200.8 203.0 204.2 201.9 199.5 198.0 196.5 195.9 196.5 196.4 196.3 196.7 196.1 194.4 191.6 187.7 181.0 178.6 174.7 172.8 168.3 167.1 167.9 163.5 163.1 161.9 162.0 163.8 166.0 167.0 166.8 165.8 165.6 164.9 164.7 164.9 164.9 162.2 159.3 164.8 173.1 180.9 188.3 196.7 200.1 197.0 196.1 195.5 195.5 194.8 195.3 194.7 194.8 196.3 196.9 196.2 194.1 189.1 194.3 200.4 206.0 212.4 218.8 225.1 231.4 228.2 223.5 221.9 221.3 220.9 219.4 219.1 219.5 220.0 220.1 219.7 217.9 215.0 207.9 197.5 192.8 194.8 195.6 197.4 199.7 197.1 195.8 194.6 195.0 194.7 195.3 195.9 196.1 196.5 196.7 197.6 198.2 199.2 198.2 197.0 195.8 196.5 197.4 199.7 202.7 208.2 211.7 215.2 217.3 219.3 220.9 220.5 220.2 220.8 221.0 220.4 221.0 220.8 221.0 221.9 222.4 222.1 222.6 223.8 226.9 232.4 239.0 244.3 247.9 250.3 250.6 251.6 251.4 249.7 247.3 244.5 242.5 240.7 240.6 240.9 242.4 244.0 246.9 250.1 254.2 257.7 258.6 257.6 254.1 250.8 244.9 239.5 236.5 235.5 236.5 238.5 241.7 244.1 246.1 248.0 250.0 251.3 251.2 248.9 246.9 245.0 242.7 241.0 240.0 241.3 245.1 248.0 248.2 247.4 247.7 248.8 249.4 249.5 247.5 245.4 244.6 244.5 245.7 248.5 251.6 251.1 250.7 251.2 252.7 252.7 251.1 248.0 246.2 243.7 243.9 244.8 247.9 251.4 254.0 256.0 258.2 257.5 254.5 250.3 246.1 241.7 235.5 229.0 223.0 210.8 211.1 210.8 210.8 210.8 210.2 209.8 209.4 209.1 208.4 208.4 207.2 208.0 207.8 207.9 208.6", + "f0_timestep": "0.011609977324263039", + "energy": "0.0 0.0004 0.0005 0.0006 0.0009 0.0004 0.0003 0.0 0.0006 0.0008 0.0003 0.001 0.0012 0.0009 0.001 0.0011 0.0017 0.0014 0.0016 0.002 0.002 0.0017 0.0023 0.0023 0.0023 0.0023 0.0027 0.0021 0.0019 0.0018 0.0018 0.002 0.0021 0.0023 0.0023 0.0014 0.0012 0.0012 0.0004 0.0006 0.0006 0.0005 0.0001 0.0003 0.0006 0.0005 0.002 0.0176 0.0289 0.0371 0.0466 0.0517 0.0552 0.0568 0.0573 0.0566 0.0529 0.0503 0.0495 0.0509 0.053 0.0545 0.0553 0.0553 0.0542 0.0489 0.0418 0.033 0.0215 0.0181 0.0166 0.0226 0.0395 0.0552 0.0682 0.077 0.081 0.0833 0.0836 0.0842 0.0809 0.0792 0.0775 0.0754 0.069 0.0576 0.0433 0.0239 0.0089 0.0051 0.0084 0.0377 0.0593 0.0779 0.0912 0.0958 0.0958 0.0919 0.0895 0.0869 0.083 0.0811 0.0777 0.0766 0.077 0.0755 0.0766 0.0776 0.0803 0.0808 0.0826 0.0815 0.0802 0.0801 0.0773 0.0766 0.075 0.0728 0.0731 0.0747 0.0756 0.0773 0.078 0.0763 0.0757 0.0724 0.0705 0.0681 0.0654 0.0629 0.0624 0.0636 0.0638 0.0665 0.0682 0.068 0.0682 0.0687 0.0683 0.0694 0.0694 0.0692 0.0703 0.0689 0.07 0.0681 0.0608 0.0496 0.0353 0.0201 0.0156 0.0158 0.0146 0.0284 0.0457 0.0574 0.0711 0.0763 0.0802 0.0812 0.0804 0.0807 0.0791 0.0774 0.075 0.0712 0.065 0.0539 0.042 0.0277 0.0208 0.0197 0.0186 0.0188 0.0373 0.0555 0.0677 0.0808 0.0867 0.0889 0.0902 0.0895 0.0874 0.0849 0.0848 0.0853 0.0874 0.0911 0.0916 0.0941 0.0951 0.0961 0.0948 0.0921 0.0895 0.0843 0.0819 0.0828 0.0839 0.0862 0.089 0.0888 0.0877 0.0866 0.0836 0.08 0.0782 0.0752 0.0731 0.0723 0.0719 0.0724 0.072 0.0751 0.0784 0.0823 0.0872 0.092 0.0981 0.1028 0.1057 0.1059 0.106 0.1046 0.1037 0.104 0.101 0.0987 0.0942 0.0882 0.0839 0.0804 0.0797 0.0807 0.0799 0.0796 0.0802 0.0802 0.0804 0.0798 0.0785 0.0764 0.0753 0.0736 0.0729 0.0741 0.0751 0.0758 0.0717 0.0611 0.0485 0.0344 0.0259 0.0252 0.025 0.028 0.0407 0.0545 0.0689 0.0812 0.0876 0.0911 0.0913 0.0889 0.0863 0.0822 0.0793 0.0757 0.0749 0.073 0.0726 0.0731 0.0739 0.0754 0.0761 0.0765 0.0756 0.0733 0.0709 0.0673 0.0643 0.0587 0.054 0.0509 0.0486 0.0469 0.0476 0.0485 0.0501 0.0525 0.0533 0.052 0.0483 0.0408 0.0321 0.0212 0.0104 0.0039 0.0026 0.0033 0.0036 0.0041 0.0042 0.0052 0.0052 0.0045 0.0032 0.003 0.0017 0.0015 0.0012 0.0012 0.0012 0.0006 0.0021 0.0018 0.0113 0.03 0.048 0.0629 0.0706 0.0737 0.0703 0.0654 0.0617 0.0591 0.0592 0.0601 0.0646 0.0661 0.0679 0.0663 0.0586 0.0496 0.0352 0.02 0.01 0.0217 0.042 0.0584 0.0704 0.0777 0.0792 0.0783 0.0765 0.075 0.0727 0.0726 0.0717 0.07 0.0692 0.0668 0.066 0.0634 0.059 0.0533 0.0466 0.0472 0.0531 0.0622 0.0686 0.0742 0.0765 0.0769 0.0761 0.0766 0.0763 0.0747 0.0746 0.0703 0.0621 0.0522 0.0384 0.0194 0.0065 0.0036 0.0058 0.017 0.039 0.0571 0.0713 0.0795 0.0822 0.0806 0.076 0.0753 0.0718 0.0689 0.068 0.065 0.063 0.0591 0.0509 0.0405 0.0283 0.0158 0.007 0.007 0.018 0.0359 0.0491 0.0596 0.067 0.0676 0.0667 0.0653 0.0637 0.0633 0.0615 0.0592 0.0567 0.0526 0.0487 0.0408 0.0335 0.0248 0.0155 0.0102 0.0089 0.0182 0.0353 0.0481 0.0606 0.0668 0.0683 0.0677 0.0644 0.0638 0.0607 0.0589 0.0577 0.0552 0.0517 0.0448 0.0341 0.0237 0.0151 0.0133 0.0127 0.0115 0.027 0.0454 0.0573 0.0671 0.0701 0.0696 0.0679 0.0686 0.0699 0.0696 0.0701 0.0699 0.0694 0.0697 0.0665 0.0598 0.0491 0.037 0.026 0.0168 0.0279 0.0429 0.0551 0.0639 0.0697 0.0704 0.0684 0.0681 0.0671 0.0656 0.0645 0.0635 0.0642 0.0653 0.0667 0.0683 0.0668 0.0669 0.0657 0.0662 0.0667 0.068 0.0726 0.0811 0.0891 0.0959 0.1024 0.1052 0.1078 0.1087 0.1104 0.1086 0.1064 0.1038 0.1007 0.0985 0.0947 0.0894 0.0823 0.0745 0.0696 0.0655 0.064 0.0651 0.0668 0.0721 0.0779 0.082 0.0869 0.0882 0.0892 0.0889 0.0861 0.0839 0.0793 0.0754 0.072 0.0699 0.0703 0.0721 0.0764 0.0797 0.0844 0.088 0.0908 0.0907 0.0896 0.0861 0.0823 0.078 0.0732 0.07 0.0677 0.0675 0.0713 0.0749 0.0772 0.0795 0.0787 0.0786 0.0795 0.0776 0.0744 0.0704 0.0638 0.0604 0.0594 0.0597 0.0608 0.0618 0.064 0.063 0.064 0.0639 0.0632 0.0628 0.0609 0.0579 0.0552 0.0538 0.0538 0.0548 0.0554 0.0558 0.0555 0.0563 0.0568 0.0566 0.0553 0.0514 0.0468 0.0426 0.039 0.0366 0.0363 0.0369 0.0369 0.0369 0.0361 0.0351 0.0338 0.0315 0.0283 0.0247 0.0204 0.0153 0.0108 0.0072 0.0041 0.0019 0.0005 0.0008 0.0011 0.0007 0.0006 0.0004 0.0 0.0001 0.0 0.0 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0 0.0 0.0001 0.0001 0.0001 0.0002 0.0001 0.0003 0.0003 0.0004 0.0005 0.0007 0.0009 0.0011 0.0011 0.0011 0.0014 0.0015 0.0017 0.0017 0.0019 0.0022 0.0024 0.0027 0.0027 0.0026 0.0025 0.0023 0.0022 0.002 0.002 0.0021 0.0021 0.0019 0.0019 0.0018 0.0015 0.0014 0.0014 0.0012 0.0009 0.0009 0.0007 0.0004 0.0005 0.0002 0.0003 0.0001 0.0003 0.0004 0.0005 0.001 0.0017 0.0022 0.0022 0.002 0.0016 0.0014 0.001 0.0006 0.0003 0.0005 0.0008 0.001 0.0016 0.0067 0.0108 0.0138 0.0165 0.0177 0.0171 0.0155 0.0129 0.0093 0.0056 0.0017 0.0012 0.0012 0.0012 0.0013 0.0012 0.0011 0.001 0.0012 0.0011 0.0011 0.0017 0.0026 0.0032 0.0037 0.0045 0.0049 0.0044 0.004 0.0029 0.0024 0.0021 0.002 0.0019 0.002 0.0019 0.0016 0.0015 0.0012 0.0013 0.0013 0.0014 0.0015 0.0014 0.0017 0.0014 0.0015 0.0014 0.0012 0.0011 0.0008 0.0008 0.0009 0.0009 0.001 0.0009 0.0009 0.0009 0.0009 0.0007 0.0006 0.0007 0.0005 0.0005 0.0005 0.0005 0.0004 0.0004 0.0003 0.0004 0.0005 0.0004 0.0002 0.0005 0.0004 0.0005 0.0006 0.0006 0.0009 0.0008 0.0013 0.0034 0.0067 0.0107 0.0136 0.0154 0.0155 0.0141 0.0114 0.0081 0.0056 0.003 0.0026 0.0024 0.0019 0.0019 0.0017 0.0017 0.0019 0.002 0.002 0.0025 0.0068 0.0132 0.0191 0.0225 0.0234 0.0221 0.0175 0.0138 0.0099 0.0048 0.0037 0.003 0.0024 0.0023 0.0015 0.0019 0.0018 0.0019 0.002 0.002 0.0021 0.0021 0.0018 0.0018 0.0017 0.0015 0.001 0.0012 0.001 0.0009 0.0011 0.0012 0.001 0.0012 0.001 0.0012 0.001 0.001 0.0009 0.0009 0.0008 0.0006 0.0005 0.0003 0.0006 0.0009 0.0013 0.0015 0.0019 0.002 0.002 0.0021 0.0018 0.0016 0.0017 0.0016 0.0015 0.0015 0.0016 0.0021 0.0028 0.0026 0.0027 0.0024 0.0022 0.0018 0.002 0.0019 0.0018 0.0024 0.0023 0.002 0.0019 0.0014 0.0013 0.001 0.0009 0.0006 0.001 0.0009 0.0012 0.0018 0.0064 0.0112 0.016 0.0212 0.0244 0.0254 0.0235 0.019 0.0139 0.0081 0.0046 0.0036 0.0027 0.0021 0.0023 0.0022 0.0025 0.0025 0.0026 0.0025 0.0025 0.0025 0.0022 0.0022 0.0022 0.0019 0.0018 0.0018 0.002 0.0019 0.0021 0.0021 0.002 0.0019 0.0014 0.0009 0.001 0.0009 0.0009 0.0009 0.0009 0.0012 0.001 0.0008 0.0012 0.0013 0.0018 0.0018 0.0016 0.0021 0.0026 0.0033 0.0039 0.0045 0.0054 0.0053 0.0049 0.0041 0.0027 0.002 0.0016 0.0012 0.0008 0.0009 0.0006 0.0016 0.0036 0.0107 0.0211 0.0227 0.0229 0.0207 0.0065 0.0033 0.0028 0.0026 0.002 0.0017 0.0008 0.0006 0.0006 0.0005 0.0007 0.0017 0.0033 0.0046 0.0074 0.0076 0.0062 0.0053 0.0028 0.0017 0.0019 0.0021 0.002 0.0019 0.0016 0.0016 0.0015 0.0015 0.0014 0.0014 0.0014 0.0014 0.0013 0.0013 0.0013 0.0012 0.0009 0.0011 0.001 0.0013 0.0011 0.0012 0.0013 0.0013 0.0013 0.0011 0.001 0.0011 0.0009 0.001 0.0007 0.0013 0.0022 0.0023 0.0038 0.0055 0.0063 0.0061 0.0054 0.0041 0.0026 0.0025 0.002 0.0021 0.0019 0.002 0.002 0.002 0.002 0.0015 0.0016 0.0013 0.001 0.0012 0.0014 0.0017 0.0019 0.002 0.0021 0.002 0.002 0.002 0.0013 0.0014 0.0014 0.0016 0.0013 0.0014 0.0014 0.0011 0.0009 0.0009 0.0008 0.0028 0.0062 0.0086 0.0097 0.0097 0.0085 0.0062 0.004 0.0025 0.0016 0.0016 0.0013 0.001 0.001 0.0008 0.0009 0.0008 0.0009 0.0008 0.0031 0.0056 0.0083 0.0108 0.0116 0.0121 0.0111 0.0094 0.0072 0.0037 0.0018 0.0014 0.0011 0.001 0.0009 0.0006 0.0005 0.0005 0.0004 0.0005 0.0002 0.0004 0.0007 0.004 0.008 0.0097 0.0101 0.0091 0.0052 0.0025 0.0012 0.0011 0.0011 0.0009 0.0007 0.0006 0.0005 0.0005 0.0003 0.0003 0.0003 0.0007 0.0001 0.0006 0.0006 0.0005 0.0004 0.0005 0.0007 0.001 0.0014 0.0015 0.0018 0.0018 0.0018 0.0017 0.0017 0.0015 0.0014 0.0012 0.0011 0.001 0.0011 0.0011 0.0012 0.0013 0.0013 0.0015 0.0018 0.0021 0.002 0.002 0.0017 0.0015 0.0015 0.0015 0.0015 0.0013 0.0014 0.0014 0.0013 0.0013 0.0011 0.0012 0.001 0.0011 0.0011 0.001 0.0011 0.001 0.0011 0.0013 0.0013 0.0015 0.0014 0.0016 0.0016 0.0009 0.0011 0.001 0.001 0.0011 0.0013 0.0012 0.0011 0.0012 0.0011 0.0011 0.0012 0.001 0.0009 0.0008 0.0008 0.0007 0.0007 0.0007 0.0007 0.0007 0.0007 0.0008 0.0009 0.0006 0.0006 0.0006 0.0008 0.0007 0.0007 0.0005 0.0006 0.0006 0.0006 0.0006 0.0008 0.0007 0.0007 0.0007 0.0008 0.0009 0.0005 0.0008 0.0005 0.0006 0.0005 0.0005 0.0005 0.0005 0.0004 0.0005 0.0006 0.0005 0.0005 0.0003 0.0005 0.001 0.0013 0.0013 0.0012 0.0006 0.0004 0.0 0.0002 0.0002 0.0001 0.0002 0.0002 0.0004 0.0006 0.0003 0.0007", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 155.334, + "text": "AP 眠 星 子 AP 枕 霜 花 AP 旧 茅 草 也 比 神 仙 榻 AP 交 游 任 意 南 北 SP", + "ph_seq": "AP m ian x ing z i0 AP zh en sh uang h ua AP j iu m ao c ao y E b i sh en x ian t a AP j iao y ou r en y i n an b ei SP", + "ph_dur": "0.29 0.09 0.163 0.075 0.193 0.045 1.191 0.163 0.075 0.163 0.075 0.163 0.075 0.953 0.163 0.075 0.178 0.06 0.178 0.06 0.163 0.075 0.654 0.06 0.163 0.075 0.387 0.09 0.193 0.045 1.19 0.148 0.09 0.163 0.075 0.163 0.075 0.179 0.06 0.416 0.06 0.193 0.045 1.666 0.095", + "ph_num": "2 2 2 1 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest B3 D4 E4 rest D4 E4 A3 rest B3 A3 B3 D4 B3 A3 B3 G3 rest G3 E3 G3 A3 B3 A3 rest", + "note_dur": "0.38 0.238 0.238 1.191 0.238 0.238 0.238 0.953 0.238 0.238 0.238 0.238 0.714 0.238 0.477 0.238 1.19 0.238 0.238 0.238 0.239 0.476 0.238 1.666 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "184.5 184.9 185.0 185.0 184.9 185.0 185.2 185.2 184.9 184.8 184.6 184.6 184.2 184.2 184.3 184.0 183.7 183.9 183.8 184.2 183.7 183.7 183.9 183.6 183.5 183.2 183.2 181.5 181.7 179.3 174.4 174.9 182.3 188.1 194.9 203.9 211.9 223.0 230.2 240.2 246.6 250.2 251.6 249.7 245.9 243.8 241.6 238.4 240.5 255.1 266.6 278.9 290.6 302.6 294.2 295.2 296.4 296.1 296.0 294.1 293.2 292.8 293.1 294.3 293.4 292.0 288.3 279.7 270.3 274.8 282.4 296.0 311.0 325.5 325.1 327.4 333.0 335.6 336.5 335.5 332.4 330.1 326.4 322.6 320.5 317.7 315.2 315.2 318.8 320.7 323.2 326.6 328.7 329.4 329.9 328.2 326.2 324.8 325.1 325.7 326.9 327.4 327.2 325.8 326.0 328.4 329.3 329.4 329.9 328.3 326.8 327.9 328.2 327.5 327.6 328.8 329.4 330.2 330.9 331.1 330.8 330.2 329.3 327.8 327.0 326.1 324.4 323.9 322.9 322.3 323.4 325.3 327.1 328.5 330.2 331.6 333.1 334.8 335.8 335.8 334.4 332.8 329.2 325.8 321.7 319.5 319.6 321.1 324.6 328.9 334.1 337.3 338.8 338.7 338.8 335.8 331.8 327.6 323.4 319.0 315.1 315.8 318.4 322.8 328.0 332.0 336.8 340.1 342.4 343.4 338.6 333.8 328.1 321.9 310.5 295.1 279.6 269.9 263.5 258.7 253.8 249.1 243.9 239.5 233.9 229.3 222.8 217.3 212.0 208.0 204.9 200.8 197.4 193.6 189.3 185.4 180.4 178.8 184.5 200.6 217.2 234.0 249.4 267.9 282.3 294.8 300.8 301.1 295.7 287.5 270.0 273.6 284.1 294.7 305.8 315.7 326.5 339.9 335.5 335.3 333.8 334.0 333.3 332.4 333.1 333.5 332.6 330.5 325.2 315.9 307.0 292.0 274.1 260.2 247.3 235.6 227.6 221.7 216.3 216.1 218.8 221.3 224.2 224.3 224.3 221.8 218.0 215.4 214.4 213.9 214.1 216.0 219.3 223.3 226.4 229.2 230.7 231.2 229.3 223.3 216.9 210.7 206.1 207.0 209.8 215.5 219.1 223.6 227.6 228.8 229.0 226.9 223.7 219.3 214.6 209.8 208.1 208.0 209.7 212.6 217.5 224.3 229.5 232.0 232.9 232.5 228.7 222.7 214.9 207.1 202.5 202.2 203.2 207.0 213.4 222.2 230.3 235.4 239.2 240.0 238.5 233.8 225.1 213.7 203.0 200.9 200.2 203.8 211.9 222.5 231.7 238.8 242.7 242.4 237.8 232.4 221.4 208.1 196.3 188.5 187.7 187.0 185.6 184.6 183.1 181.7 180.8 179.8 178.9 178.0 177.3 177.2 176.4 174.5 172.9 171.6 171.1 170.1 169.3 168.8 169.6 170.3 174.4 179.3 184.1 194.5 206.5 220.5 232.4 242.7 248.2 248.6 245.5 241.2 236.0 231.9 227.6 224.3 222.5 220.5 220.9 219.4 219.1 218.1 219.2 220.9 222.1 221.5 219.0 217.0 216.3 216.1 216.8 218.3 217.8 214.7 207.5 215.3 224.2 233.7 242.5 253.3 248.5 243.9 243.2 244.6 245.3 245.5 245.9 245.4 245.2 245.2 246.7 247.7 249.8 248.5 245.6 243.4 245.0 248.4 252.3 252.8 254.6 257.7 258.6 259.8 260.1 259.8 260.3 260.1 259.6 260.1 259.9 259.7 261.6 264.8 268.6 275.9 283.3 291.0 297.1 301.5 302.2 301.9 301.0 297.6 294.0 291.8 289.4 287.3 286.5 288.1 290.9 292.8 294.9 297.0 298.8 298.7 297.9 296.5 293.0 290.4 288.9 288.5 289.0 289.9 290.1 291.0 293.2 296.7 297.9 297.0 294.6 291.7 288.7 286.5 272.0 260.7 257.6 258.2 257.0 256.4 256.4 250.7 248.7 247.0 245.6 246.5 248.0 248.6 247.8 247.0 246.7 246.3 246.2 245.0 239.4 222.3 221.2 221.1 221.2 221.5 222.5 221.7 221.3 219.3 221.4 222.5 220.8 220.0 217.9 216.0 215.8 216.3 215.8 216.0 216.2 217.4 219.4 220.6 220.5 219.2 218.8 217.6 217.2 217.8 218.7 219.1 218.6 218.6 218.7 219.2 220.2 221.3 222.4 221.2 218.5 213.2 211.9 221.0 229.7 238.8 247.5 258.2 268.7 256.4 251.3 248.1 246.4 247.5 248.0 247.9 248.1 248.5 247.2 248.3 246.6 242.2 235.1 227.9 217.4 208.3 200.9 193.5 186.0 184.6 187.2 189.7 192.7 194.6 196.4 197.6 197.7 195.2 193.8 192.6 193.7 194.4 194.3 194.9 196.0 196.5 196.3 197.2 197.1 196.8 197.5 197.0 197.0 195.9 195.2 195.8 194.8 192.7 191.2 190.4 189.4 189.2 190.0 191.9 194.9 198.9 202.0 204.7 205.6 206.1 204.0 199.5 195.0 190.5 186.9 185.2 185.3 188.1 191.7 197.5 204.3 208.7 211.4 212.0 210.2 204.4 198.2 192.3 187.1 182.5 180.8 182.5 187.1 193.6 199.9 205.6 209.7 211.6 210.7 204.4 197.1 188.6 179.2 176.2 175.2 178.3 183.5 191.0 200.2 207.8 211.9 213.7 213.1 209.6 201.3 189.9 179.2 173.0 172.7 174.9 179.0 187.4 200.6 210.7 215.9 217.1 213.1 204.3 190.1 177.2 176.0 174.3 172.7 172.4 170.9 169.6 168.5 167.1 166.4 165.6 164.7 163.2 161.8 160.8 159.3 158.2 157.3 156.2 155.7 154.6 154.6 154.0 154.4 155.1 155.2 156.1 157.6 163.4 172.1 181.3 188.7 194.7 198.3 197.5 193.8 188.9 185.0 181.5 177.9 174.8 172.5 169.6 166.9 163.8 162.1 162.5 164.6 166.6 165.2 164.8 164.2 163.9 164.0 164.1 165.1 165.6 166.9 167.8 166.3 164.9 165.3 166.0 171.5 177.3 184.1 189.2 192.0 194.5 194.1 194.2 192.6 191.6 190.3 190.2 190.1 191.2 191.4 192.9 193.9 194.0 193.4 193.7 195.8 197.1 195.0 195.8 195.3 195.6 195.5 195.4 194.0 193.5 192.5 193.5 194.8 195.3 197.4 201.1 206.6 212.0 217.9 220.5 221.4 219.9 218.3 217.2 215.4 216.3 216.9 219.3 220.1 220.4 220.3 220.4 220.8 219.6 218.7 216.9 215.7 215.2 215.1 217.2 220.2 229.3 238.6 246.1 246.4 245.6 246.7 247.1 245.3 244.8 245.0 244.8 244.0 243.2 242.9 240.5 237.1 232.3 224.2 217.2 207.8 207.6 218.9 209.6 205.1 200.0 198.5 199.2 199.5 199.8 200.7 201.1 201.5 202.0 202.4 202.6 202.8 202.1 202.0 202.1 202.4 203.6 206.1 212.2 219.7 226.4 231.0 231.7 230.2 226.8 221.9 217.1 213.1 210.7 210.8 212.8 216.2 220.4 225.8 229.9 232.0 232.3 230.6 227.1 221.5 214.5 209.6 206.7 206.3 209.0 213.0 216.3 218.2 220.3 221.9 221.6 221.0 219.1 218.0 217.0 215.7 215.7 218.4 222.5 224.0 222.9 221.9 220.4 218.2 216.0 214.3 214.3 214.5 214.4 214.7 216.5 219.8 223.8 227.4 229.1 229.1 227.7 224.5 221.3 217.9 214.8 212.6 212.1 213.3 215.0 217.4 220.3 222.8 225.9 227.7 228.6 227.0 224.4 221.6 218.5 215.8 213.7 212.5 212.6 214.3 216.0 218.3 221.2 224.6 227.4 229.0 229.9 229.0 227.2 223.7 221.2 217.8 214.6 213.2 212.8 212.6 214.6 218.5 223.8 228.1 231.2 232.3 230.8 228.0 223.4 218.6 214.0 211.3 210.2 210.6 213.1 219.2 223.2 228.4 232.6 230.9 232.1 233.1 231.7 231.4 230.1 227.4 222.0 219.3 216.4 213.4 211.7 209.9 208.6 207.3 207.0", + "f0_timestep": "0.011609977324263039", + "energy": "0.0008 0.001 0.0014 0.0024 0.0029 0.0031 0.0043 0.0045 0.005 0.0055 0.006 0.0063 0.0072 0.0073 0.0077 0.0073 0.0066 0.0064 0.0055 0.0044 0.0036 0.0034 0.0026 0.0014 0.0016 0.0012 0.0015 0.0048 0.0292 0.0405 0.0521 0.0639 0.0689 0.0723 0.073 0.0724 0.069 0.0667 0.0645 0.0633 0.0648 0.0681 0.0709 0.073 0.0724 0.0668 0.0564 0.0442 0.0289 0.0183 0.0155 0.0162 0.0255 0.047 0.0636 0.0773 0.0855 0.0869 0.0873 0.0839 0.0845 0.0818 0.0791 0.0794 0.0785 0.079 0.0744 0.0643 0.0503 0.032 0.0194 0.0163 0.0194 0.0384 0.054 0.0676 0.0821 0.089 0.0952 0.0992 0.0996 0.0992 0.0968 0.0965 0.0945 0.0926 0.0937 0.0923 0.0943 0.0935 0.0931 0.0922 0.0921 0.0917 0.0922 0.0926 0.0931 0.0938 0.0944 0.0951 0.0964 0.0964 0.0966 0.0965 0.0968 0.0972 0.0986 0.0986 0.0994 0.1002 0.1004 0.1008 0.0996 0.101 0.1017 0.1014 0.1025 0.1017 0.1017 0.102 0.1017 0.1023 0.1036 0.104 0.1065 0.1067 0.1077 0.1073 0.1071 0.1073 0.1071 0.1062 0.1055 0.1053 0.1042 0.1028 0.1013 0.0984 0.0967 0.0965 0.0983 0.1014 0.1032 0.1053 0.1054 0.1028 0.1002 0.0966 0.0933 0.0924 0.0905 0.0894 0.087 0.0858 0.0851 0.0875 0.0888 0.0902 0.0886 0.0866 0.0846 0.0809 0.079 0.0764 0.0741 0.073 0.0726 0.0722 0.0717 0.0704 0.0699 0.0687 0.0676 0.0626 0.0533 0.0431 0.0292 0.0178 0.0116 0.0086 0.0062 0.0052 0.0046 0.0039 0.0036 0.0036 0.0035 0.0037 0.003 0.0031 0.002 0.0017 0.0037 0.0066 0.0104 0.0287 0.0462 0.0614 0.072 0.0789 0.0812 0.081 0.0802 0.075 0.0711 0.0709 0.0722 0.0763 0.076 0.0699 0.0579 0.0433 0.0292 0.0205 0.021 0.0197 0.023 0.0458 0.0643 0.081 0.0934 0.0974 0.0969 0.0944 0.0906 0.086 0.0819 0.079 0.0754 0.0702 0.0638 0.0536 0.0421 0.0309 0.0193 0.0146 0.033 0.0504 0.0678 0.0819 0.0882 0.0944 0.0981 0.1011 0.1032 0.1013 0.0985 0.0959 0.0954 0.0944 0.0935 0.0931 0.0926 0.0925 0.0929 0.0915 0.0906 0.0911 0.0913 0.0922 0.0926 0.0893 0.0869 0.0831 0.0794 0.0774 0.077 0.0778 0.0805 0.0822 0.0824 0.0839 0.0831 0.0819 0.0794 0.0757 0.0715 0.0695 0.0666 0.0655 0.0635 0.0644 0.0665 0.069 0.0709 0.0716 0.0716 0.0683 0.066 0.0603 0.0546 0.0508 0.0454 0.0426 0.0416 0.0412 0.0446 0.0466 0.05 0.051 0.0499 0.0484 0.0447 0.0403 0.0356 0.0316 0.028 0.0263 0.0251 0.0255 0.027 0.0285 0.0302 0.0307 0.031 0.0291 0.0261 0.0206 0.015 0.0104 0.0067 0.0048 0.0037 0.0029 0.0031 0.0027 0.0025 0.0029 0.0019 0.0026 0.0025 0.0021 0.0014 0.0018 0.0013 0.0029 0.0056 0.0089 0.0108 0.0252 0.0426 0.0581 0.071 0.0777 0.0801 0.0795 0.0766 0.0734 0.0712 0.0703 0.0715 0.0753 0.0774 0.0782 0.0787 0.0779 0.0772 0.0779 0.0783 0.0807 0.0835 0.0856 0.0875 0.0885 0.0879 0.0903 0.092 0.0918 0.0945 0.0927 0.0931 0.0922 0.088 0.0835 0.0749 0.0619 0.0476 0.0348 0.0223 0.0187 0.026 0.046 0.0621 0.0768 0.0876 0.092 0.095 0.0967 0.0977 0.0952 0.092 0.0895 0.0867 0.0858 0.0827 0.0792 0.0753 0.0737 0.0737 0.0754 0.08 0.0862 0.0904 0.0947 0.0967 0.0989 0.1014 0.1019 0.1024 0.1026 0.1032 0.1017 0.1009 0.0963 0.0913 0.0872 0.0822 0.0823 0.0848 0.0893 0.0932 0.0962 0.0975 0.099 0.1003 0.1006 0.0998 0.0963 0.0941 0.0921 0.092 0.0924 0.0937 0.0957 0.097 0.1007 0.1018 0.1054 0.1091 0.113 0.117 0.118 0.1195 0.1185 0.1173 0.1163 0.1144 0.114 0.1132 0.1128 0.1141 0.1148 0.1154 0.1089 0.0985 0.0803 0.0589 0.0362 0.0198 0.0074 0.0045 0.0262 0.0449 0.0591 0.073 0.0788 0.0804 0.0813 0.0822 0.0845 0.0852 0.0851 0.0826 0.0805 0.0763 0.0697 0.0586 0.0472 0.0329 0.0234 0.0206 0.0185 0.0178 0.0391 0.0567 0.0732 0.0895 0.094 0.0965 0.0972 0.0965 0.0979 0.0969 0.0943 0.0917 0.0904 0.0869 0.085 0.085 0.0822 0.0816 0.0823 0.08 0.0785 0.078 0.0773 0.0785 0.0778 0.0781 0.0751 0.0734 0.0749 0.0748 0.0756 0.0751 0.0703 0.0605 0.0478 0.0346 0.0219 0.0183 0.0175 0.0142 0.0247 0.0445 0.0598 0.0738 0.0823 0.0823 0.0823 0.0794 0.0773 0.0744 0.0728 0.071 0.0707 0.0708 0.0703 0.0663 0.0579 0.0467 0.0332 0.0193 0.0129 0.0285 0.0405 0.053 0.0638 0.071 0.0736 0.0771 0.0802 0.0815 0.0835 0.0822 0.0815 0.0823 0.0825 0.0857 0.0907 0.0941 0.097 0.0975 0.0977 0.097 0.0966 0.097 0.0958 0.0968 0.0965 0.0965 0.0964 0.0955 0.094 0.0925 0.0906 0.0879 0.087 0.0854 0.084 0.0818 0.081 0.0823 0.0838 0.0852 0.0845 0.0842 0.0848 0.083 0.0803 0.0758 0.0698 0.0653 0.0614 0.0596 0.0588 0.0603 0.0637 0.0663 0.0686 0.0688 0.0686 0.0658 0.0607 0.0577 0.0525 0.0493 0.0469 0.0444 0.0441 0.0456 0.0492 0.0507 0.0534 0.0533 0.0517 0.0495 0.0451 0.0424 0.0384 0.035 0.0333 0.0307 0.03 0.0315 0.0331 0.0363 0.0383 0.0395 0.0388 0.0367 0.0329 0.0288 0.0245 0.0217 0.0194 0.0186 0.0187 0.0189 0.0211 0.0218 0.0235 0.0222 0.0197 0.0161 0.0114 0.0069 0.0037 0.0019 0.0022 0.0019 0.0026 0.0023 0.0024 0.0022 0.0017 0.0016 0.0014 0.0019 0.002 0.0023 0.0012 0.0017 0.0036 0.0065 0.0078 0.0107 0.0244 0.0392 0.0493 0.0578 0.0615 0.0625 0.0628 0.0622 0.0624 0.0621 0.0653 0.0663 0.0695 0.0682 0.0663 0.0665 0.0646 0.0641 0.064 0.0635 0.0645 0.0658 0.0683 0.0695 0.0716 0.0715 0.0733 0.0728 0.0736 0.0723 0.0731 0.0745 0.0749 0.0753 0.0725 0.0714 0.0681 0.0642 0.0593 0.0536 0.0496 0.0527 0.0593 0.067 0.0716 0.0735 0.0734 0.0727 0.0713 0.0697 0.0667 0.0665 0.0652 0.0646 0.0656 0.0649 0.0648 0.0647 0.065 0.0659 0.0678 0.0687 0.0709 0.0745 0.0761 0.0776 0.0794 0.0775 0.0776 0.0781 0.076 0.0748 0.073 0.0696 0.0658 0.0637 0.0623 0.0638 0.0661 0.0688 0.0702 0.0724 0.0733 0.0731 0.0734 0.074 0.0727 0.0742 0.0755 0.0756 0.0788 0.0809 0.0818 0.0819 0.0798 0.077 0.0756 0.0728 0.0694 0.066 0.0636 0.0611 0.0648 0.0722 0.0813 0.0913 0.0968 0.0995 0.0978 0.0943 0.0907 0.0876 0.0839 0.0802 0.0786 0.0748 0.0724 0.0677 0.0587 0.0473 0.0327 0.0238 0.038 0.0572 0.0725 0.0853 0.0925 0.0922 0.0903 0.0882 0.0844 0.0852 0.0887 0.0903 0.0933 0.0947 0.0956 0.0951 0.0924 0.0881 0.0802 0.0734 0.0678 0.0621 0.0621 0.0621 0.0646 0.0696 0.0722 0.0775 0.0803 0.08 0.0799 0.074 0.0681 0.0627 0.0593 0.0588 0.0609 0.0643 0.0684 0.0727 0.0751 0.0783 0.0787 0.0762 0.0724 0.0675 0.0637 0.0618 0.0627 0.0641 0.0669 0.0699 0.0715 0.0723 0.0722 0.0702 0.0707 0.0711 0.0693 0.0692 0.0666 0.0663 0.0673 0.0697 0.0713 0.0718 0.0735 0.0723 0.0721 0.0735 0.073 0.0724 0.0718 0.0687 0.0671 0.0677 0.0668 0.0691 0.0704 0.0708 0.0713 0.0699 0.0684 0.066 0.0647 0.064 0.0619 0.0619 0.0618 0.0612 0.0627 0.0636 0.0647 0.0658 0.0663 0.0659 0.0645 0.0632 0.0623 0.0588 0.0576 0.0575 0.0559 0.0556 0.055 0.0547 0.0556 0.0569 0.0579 0.0591 0.06 0.06 0.0581 0.0567 0.0554 0.0523 0.0502 0.047 0.0445 0.044 0.0422 0.0414 0.0401 0.0393 0.0397 0.0393 0.0391 0.0393 0.0377 0.036 0.0329 0.029 0.0272 0.0253 0.0235 0.0226 0.0221 0.0213 0.0215 0.0217 0.0222 0.0219 0.0212 0.0193 0.0166 0.0127 0.009 0.0052 0.0032 0.0017 0.0012 0.0005 0.0004 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0003 0.0008 0.0014 0.002 0.0027 0.0032 0.004 0.0044 0.0055 0.0062 0.0069 0.0075 0.008 0.0078 0.0072 0.007 0.0062 0.006 0.0053 0.0047 0.0042 0.0032 0.0025 0.0019 0.0012 0.0008 0.0007 0.0007 0.0008 0.0012 0.0012 0.0012 0.0016 0.0019 0.0019 0.002 0.0021 0.0024 0.0023 0.0018 0.0017 0.0017 0.0019 0.002 0.0022 0.003 0.006 0.0099 0.0126 0.0151 0.0163 0.016 0.0145 0.0117 0.0086 0.0049 0.0024 0.0018 0.0016 0.0014 0.0013 0.0013 0.0012 0.001 0.0008 0.0008 0.001 0.0016 0.0032 0.0057 0.0103 0.0138 0.0152 0.0151 0.0124 0.0077 0.0033 0.0025 0.0022 0.0025 0.0025 0.0025 0.0022 0.0022 0.0021 0.002 0.0022 0.0024 0.0022 0.0023 0.0024 0.002 0.0022 0.0022 0.0023 0.0023 0.0023 0.002 0.002 0.002 0.0021 0.0023 0.0021 0.0022 0.0022 0.0025 0.0026 0.0027 0.0028 0.0029 0.0029 0.0027 0.0029 0.0025 0.0024 0.0025 0.0024 0.0027 0.0026 0.0026 0.0024 0.0024 0.0024 0.0025 0.0025 0.0028 0.0026 0.0025 0.0026 0.0023 0.0025 0.0026 0.0029 0.0028 0.0029 0.0026 0.0024 0.0024 0.0025 0.0023 0.0027 0.0023 0.0024 0.0019 0.0017 0.0019 0.0022 0.0021 0.0026 0.0027 0.0025 0.0023 0.0018 0.0014 0.0012 0.0013 0.0017 0.002 0.002 0.002 0.002 0.0017 0.0017 0.0017 0.0016 0.0017 0.0018 0.0016 0.0018 0.0016 0.0016 0.0014 0.0015 0.0014 0.0012 0.0015 0.0013 0.0018 0.0026 0.0031 0.004 0.0042 0.0043 0.0041 0.0035 0.0033 0.003 0.0028 0.0031 0.0028 0.0022 0.0012 0.0032 0.0062 0.0072 0.0071 0.006 0.0046 0.0038 0.0044 0.0053 0.0056 0.0055 0.0049 0.004 0.0025 0.0016 0.0014 0.0019 0.0036 0.0079 0.0125 0.0156 0.0188 0.0211 0.0205 0.0185 0.015 0.0103 0.0045 0.0024 0.0019 0.0016 0.0016 0.0013 0.0014 0.0012 0.0013 0.0012 0.0012 0.0013 0.0021 0.0031 0.005 0.008 0.0086 0.0084 0.0076 0.0048 0.0016 0.0015 0.0016 0.0017 0.0018 0.0018 0.0018 0.0019 0.002 0.002 0.0018 0.0018 0.0022 0.0024 0.0025 0.0023 0.0025 0.002 0.0018 0.0019 0.002 0.0023 0.0024 0.0021 0.002 0.002 0.0021 0.0023 0.0025 0.0023 0.0022 0.0025 0.0021 0.0021 0.0025 0.0024 0.0023 0.0024 0.0021 0.002 0.0022 0.0023 0.0023 0.0023 0.0021 0.0018 0.0017 0.0017 0.0021 0.0023 0.0019 0.0018 0.0017 0.0013 0.0013 0.0014 0.0012 0.0013 0.0013 0.0013 0.0011 0.0014 0.0015 0.0015 0.0015 0.0012 0.001 0.001 0.0007 0.0006 0.0007 0.0007 0.0006 0.0008 0.0009 0.0011 0.0011 0.0011 0.0011 0.0009 0.0009 0.0011 0.0017 0.0023 0.0029 0.0031 0.0035 0.0039 0.0035 0.0036 0.0033 0.0025 0.0019 0.0012 0.0008 0.0013 0.0039 0.0068 0.0091 0.0106 0.0114 0.0106 0.009 0.0067 0.0027 0.0024 0.0023 0.0025 0.0022 0.0015 0.0013 0.001 0.0012 0.0011 0.001 0.0009 0.0007 0.001 0.0009 0.0009 0.0013 0.0014 0.0015 0.0016 0.0019 0.002 0.002 0.002 0.002 0.002 0.0019 0.0019 0.0017 0.0013 0.0013 0.0019 0.0057 0.0115 0.0168 0.0188 0.0183 0.0152 0.0087 0.0047 0.0028 0.0025 0.0021 0.0017 0.0018 0.0018 0.0016 0.0015 0.0012 0.0013 0.0014 0.0014 0.0015 0.0013 0.0015 0.0015 0.0018 0.002 0.0019 0.0021 0.0019 0.002 0.0022 0.0023 0.0023 0.0022 0.0022 0.0023 0.0025 0.0025 0.0027 0.0026 0.0027 0.0028 0.003 0.0031 0.0027 0.0028 0.0025 0.0025 0.0023 0.0023 0.0025 0.0024 0.0024 0.0022 0.002 0.0021 0.002 0.0018 0.0018 0.002 0.0021 0.0023 0.0024 0.0027 0.0026 0.0025 0.0024 0.0021 0.0021 0.0018 0.0018 0.002 0.0022 0.0023 0.0024 0.0023 0.0025 0.0027 0.0024 0.0023 0.0025 0.0021 0.0029 0.0026 0.0023 0.0021 0.0033 0.0037 0.004 0.004 0.003 0.002 0.0016 0.0017 0.0016 0.0015 0.0016 0.002 0.0022 0.0028 0.0027 0.0051 0.0117 0.016 0.0198 0.0216 0.0204 0.0173 0.0125 0.008 0.0045 0.0035 0.0035 0.0031 0.0028 0.0025 0.0025 0.0024 0.0024 0.0026 0.0024 0.002 0.0016 0.0013 0.0011 0.0011 0.001 0.0008 0.0009 0.0008 0.0008 0.0009 0.0008 0.0005 0.0005 0.0005 0.0006 0.0008 0.001 0.001 0.0013 0.0043 0.0096 0.0132 0.0168 0.0183 0.0181 0.0159 0.0129 0.0095 0.0057 0.0034 0.0024 0.0017 0.0014 0.0014 0.0013 0.0014 0.0013 0.001 0.0007 0.0006 0.0005 0.0003 0.0011 0.0028 0.0044 0.0052 0.005 0.0045 0.0036 0.0024 0.0021 0.0024 0.0028 0.0028 0.0025 0.0023 0.0021 0.0019 0.0018 0.0016 0.0016 0.0018 0.002 0.002 0.002 0.0021 0.0019 0.0017 0.0018 0.0017 0.002 0.002 0.0019 0.0021 0.0025 0.0026 0.0026 0.0024 0.0023 0.0023 0.0024 0.0026 0.0026 0.0028 0.0028 0.0026 0.0023 0.0024 0.0026 0.0026 0.0024 0.0026 0.0025 0.0023 0.0022 0.002 0.0018 0.0017 0.0017 0.0018 0.0021 0.002 0.0021 0.0022 0.0019 0.0021 0.0021 0.0022 0.0018 0.0019 0.0013 0.0013 0.0014 0.0015 0.0015 0.0013 0.0014 0.0013 0.0013 0.0014 0.0014 0.0015 0.0011 0.0009 0.0006 0.0006 0.0007 0.0006 0.0007 0.0007 0.0008 0.0009 0.0009 0.0008 0.0005 0.0008 0.0007 0.0004 0.0005 0.0003 0.0005 0.0004 0.0005 0.0003 0.0007 0.0003 0.0005 0.001 0.0013 0.0011 0.0012 0.0013 0.0011 0.0017 0.0016 0.002 0.0025 0.0024 0.002 0.0021 0.0016 0.0015 0.0013 0.001 0.0005 0.0017 0.0045 0.0067 0.0081 0.0087 0.0084 0.007 0.0054 0.004 0.0026 0.0023 0.0021 0.0019 0.0017 0.0016 0.0015 0.0014 0.0013 0.0015 0.0012 0.0011 0.0014 0.0012 0.0014 0.0013 0.0016 0.0016 0.0016 0.0016 0.0017 0.0015 0.0014 0.001 0.001 0.001 0.001 0.0009 0.001 0.0011 0.0009 0.0014 0.0014 0.0015 0.0013 0.0015 0.0014 0.0017 0.0016 0.0017 0.0017 0.0016 0.0016 0.0017 0.0016 0.0013 0.001 0.0009 0.0006 0.0006 0.0004 0.0005 0.0004 0.0005 0.0004 0.0005 0.0007 0.0008 0.0013 0.0017 0.0017 0.0017 0.0017 0.0016 0.0018 0.0016 0.0019 0.0019 0.002 0.0017 0.0017 0.0016 0.0016 0.0018 0.0018 0.002 0.0019 0.0021 0.0017 0.0019 0.0016 0.0014 0.0014 0.0012 0.0012 0.0012 0.0014 0.0015 0.0016 0.0017 0.0014 0.002 0.0021 0.0019 0.0019 0.0019 0.0016 0.0015 0.0013 0.0017 0.002 0.0022 0.0019 0.0018 0.0017 0.0017 0.0015 0.0011 0.0012 0.0008 0.0005 0.0005 0.0003 0.0005 0.0004 0.0002 0.0002 0.0002 0.0005 0.001 0.0011 0.0012 0.0016 0.0018 0.002 0.0023 0.0021 0.0015 0.0016 0.0013 0.0016 0.0016 0.0016 0.0014 0.0017 0.0016 0.0017 0.0018 0.002 0.0021 0.0019 0.0018 0.0019 0.0019 0.0017 0.0015 0.0016 0.0016 0.0018 0.0017 0.0016 0.0015 0.0015 0.0013 0.0013 0.0014 0.0013 0.0016 0.0015 0.0017 0.0016 0.0017 0.0016 0.0013 0.0013 0.0011 0.0011 0.0012 0.0012 0.0016 0.0017 0.002 0.0019 0.0019 0.0016 0.0016 0.0018 0.0016 0.0018 0.0018 0.0017 0.0016 0.0015 0.0015 0.0017 0.0017 0.0017 0.0018 0.0016 0.0016 0.0014 0.0014 0.0015 0.0018 0.0018 0.002 0.0017 0.0016 0.0017 0.0016 0.0018 0.0017 0.0014 0.0017 0.0015 0.0012 0.0013 0.0014 0.0011 0.0011 0.0016 0.0012 0.0012 0.0012 0.0012 0.0011 0.0011 0.0009 0.0011 0.0008 0.0009 0.0008 0.0008 0.0011 0.0011 0.001 0.001 0.001 0.001 0.0008 0.0009 0.001 0.001 0.0011 0.001 0.0008 0.0008 0.0009 0.001 0.001 0.0009 0.0008 0.0009 0.0008 0.0007 0.001 0.0007 0.0009 0.0007 0.0006 0.0004 0.0006 0.0003 0.0002 0.0001 0.0002 0.0002 0.0003 0.0003 0.0008 0.0003 0.0004 0.0009 0.0014 0.0014 0.0018 0.0018 0.0012 0.001 0.0006 0.0002 0.0 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 166.333, + "text": "SP AP 洒 落 不 计 冬 夏 SP", + "ph_seq": "SP AP s a l uo b u j i d ong x ia SP", + "ph_dur": "0.082 0.4 0.09 0.178 0.06 0.193 0.045 0.163 0.075 0.431 0.045 0.163 0.075 1.905 0.143", + "ph_num": "1 2 2 2 2 2 2 1 1", + "note_seq": "rest rest G3 E3 G3 A3 B3 B3 rest", + "note_dur": "0.172 0.4 0.238 0.238 0.238 0.476 0.238 1.905 0.143", + "note_slur": "0 0 0 0 0 0 0 0 0", + "f0_seq": "167.2 167.4 167.2 167.1 167.3 167.4 167.2 167.5 166.9 166.9 167.1 167.2 167.2 166.7 167.1 167.0 167.1 167.2 167.0 167.0 167.1 167.0 167.1 167.0 166.9 167.1 167.1 167.0 166.9 167.0 167.2 167.1 167.1 167.0 167.0 167.1 167.2 166.7 166.9 166.6 166.7 166.6 166.3 166.3 166.5 166.2 166.0 165.7 165.6 165.0 163.7 161.0 161.2 164.4 170.1 175.3 182.7 188.2 192.4 196.0 196.8 194.9 192.6 187.7 182.4 177.7 173.7 170.6 169.2 168.3 164.9 163.9 163.9 164.4 165.7 167.4 169.0 168.5 167.1 165.1 163.9 162.2 159.4 156.4 155.4 160.9 165.3 171.2 178.1 185.7 193.5 193.7 195.6 196.4 196.6 196.0 195.1 195.6 196.1 195.3 194.6 194.9 194.6 192.5 186.3 191.3 197.0 203.8 211.4 219.0 226.6 223.2 221.9 221.9 221.3 220.0 218.5 215.5 213.1 211.5 211.1 213.0 215.3 219.4 221.5 224.6 226.4 224.8 223.3 221.6 219.6 218.2 217.1 216.5 218.0 220.5 221.7 223.2 224.7 224.5 223.5 221.9 220.3 212.6 205.8 206.0 218.7 229.5 239.5 249.9 258.8 255.5 247.8 246.2 246.2 245.0 244.1 245.4 245.8 247.0 247.8 248.3 248.1 247.4 242.1 237.0 237.5 238.2 238.8 240.1 240.4 241.8 242.5 233.2 226.8 224.6 223.3 224.3 224.4 224.8 225.4 225.1 225.0 226.6 226.3 226.2 227.7 228.3 230.5 235.9 241.6 246.8 250.7 251.0 251.1 250.2 248.6 246.9 244.9 244.6 244.7 244.5 245.6 245.5 245.3 245.1 246.1 246.5 246.4 245.5 245.2 246.3 246.8 247.9 248.4 249.2 249.8 250.0 249.9 248.8 248.1 247.6 246.2 243.1 240.4 239.1 238.9 241.1 242.4 245.9 249.7 254.3 258.3 260.8 262.0 260.5 257.9 252.0 246.4 240.9 236.8 235.7 236.4 238.2 241.2 245.7 250.2 253.4 255.7 257.9 258.7 256.6 253.6 249.8 245.7 242.3 240.0 238.6 240.0 241.7 245.2 247.4 249.3 250.0 251.9 253.0 251.5 251.8 248.5 247.2 245.2 243.8 242.0 240.1 240.5 241.5 243.1 245.5 247.2 249.0 250.9 252.3 253.1 253.1 251.8 249.4 247.5 244.0 241.4 239.6 238.8 239.0 239.8 242.1 245.1 248.4 252.1 255.7 257.9 258.0 256.5 255.0 252.5 249.1 244.4 240.9 239.7 240.2 242.7 245.7 248.3 251.9 255.9 258.7 259.6 259.1 256.6 253.3 251.4 247.8 245.9 246.0 245.1 244.9 245.1 245.3 246.1 247.7 249.1 250.6 252.4 252.7 253.5 252.1 251.6 250.4 248.9 247.5 240.9 239.1 240.4 238.7 237.9 235.8 233.5 232.0 229.9 229.2 227.5", + "f0_timestep": "0.011609977324263039", + "energy": "0.0001 0.0003 0.0003 0.0001 0.0 0.0 0.0 0.0 0.0002 0.0003 0.0006 0.001 0.0009 0.0013 0.0011 0.0011 0.001 0.0011 0.0014 0.0014 0.0018 0.0014 0.001 0.0015 0.0023 0.0016 0.0012 0.0018 0.0019 0.0023 0.0021 0.0018 0.0016 0.0016 0.0013 0.001 0.0016 0.0008 0.0007 0.0012 0.0006 0.0021 0.0039 0.0057 0.0093 0.0102 0.0107 0.0087 0.0149 0.0316 0.0427 0.0522 0.0584 0.0586 0.0586 0.0572 0.0583 0.0586 0.0596 0.0601 0.0618 0.0636 0.0638 0.0624 0.0588 0.0531 0.0482 0.0452 0.0461 0.0497 0.0536 0.0585 0.0614 0.0628 0.065 0.0655 0.0659 0.0653 0.0634 0.0593 0.0536 0.0441 0.0317 0.0204 0.0075 0.0036 0.0023 0.0056 0.0116 0.0298 0.0431 0.0553 0.0635 0.0659 0.0664 0.0642 0.0628 0.0619 0.0601 0.0601 0.0591 0.0563 0.0488 0.0387 0.0276 0.015 0.0102 0.0104 0.0109 0.0204 0.0354 0.0494 0.0612 0.0684 0.0697 0.0686 0.0654 0.0626 0.0602 0.0583 0.0567 0.0553 0.0562 0.0556 0.0569 0.0584 0.0594 0.0604 0.061 0.0617 0.0612 0.061 0.0613 0.0609 0.0616 0.062 0.0619 0.0619 0.0616 0.0607 0.0597 0.0558 0.0479 0.0388 0.0247 0.0122 0.0055 0.0016 0.0051 0.0126 0.0357 0.0541 0.0675 0.0768 0.0783 0.0776 0.0746 0.074 0.0748 0.0739 0.0743 0.073 0.0706 0.0666 0.0553 0.0432 0.0279 0.0169 0.0168 0.0167 0.0156 0.0248 0.0417 0.0602 0.074 0.0837 0.0881 0.0903 0.0926 0.0947 0.0963 0.0957 0.0938 0.09 0.0835 0.0773 0.0716 0.0662 0.0632 0.0638 0.069 0.0756 0.0821 0.086 0.0896 0.0908 0.0927 0.0916 0.0917 0.0904 0.0894 0.0908 0.0922 0.0948 0.0978 0.1 0.1026 0.1033 0.1027 0.1023 0.1024 0.1031 0.1052 0.1088 0.1103 0.1131 0.1149 0.1147 0.1153 0.1158 0.1162 0.1166 0.1153 0.1131 0.1107 0.1089 0.1092 0.1099 0.1103 0.1105 0.1115 0.1144 0.1172 0.1198 0.1202 0.1204 0.1193 0.1164 0.1124 0.108 0.1028 0.0989 0.0961 0.0943 0.0952 0.0965 0.1008 0.1047 0.1098 0.1134 0.1146 0.1169 0.1158 0.1129 0.1087 0.1035 0.0989 0.0976 0.0972 0.0958 0.0986 0.0988 0.1002 0.1022 0.1037 0.1046 0.1061 0.1072 0.1069 0.1065 0.104 0.102 0.1004 0.0999 0.099 0.0989 0.0994 0.0987 0.1006 0.1016 0.1015 0.1018 0.1018 0.1017 0.1014 0.0998 0.0972 0.0944 0.092 0.0884 0.0854 0.082 0.0791 0.0767 0.0768 0.0777 0.0797 0.0824 0.0851 0.0874 0.0878 0.0867 0.0836 0.079 0.074 0.0683 0.0631 0.0598 0.057 0.0552 0.0565 0.0578 0.0611 0.0628 0.064 0.0638 0.0618 0.0594 0.0556 0.0512 0.0479 0.0444 0.0419 0.0403 0.0376 0.0368 0.0357 0.0349 0.0345 0.0347 0.0348 0.0335 0.0321 0.03 0.0273 0.0248 0.0214 0.0178 0.0139 0.0095 0.0042 0.002 0.0008 0.0004 0.0 0.0 0.0001 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0001 0.0002 0.0002 0.0003 0.0004 0.0001 0.0002 0.0005 0.0009 0.0009 0.001 0.0012 0.0014 0.0012 0.0012 0.0013 0.0014 0.0015 0.0017 0.0017 0.0018 0.0017 0.0015 0.0017 0.0018 0.002 0.002 0.002 0.0022 0.002 0.0019 0.0019 0.0015 0.0012 0.0012 0.001 0.0009 0.0008 0.0006 0.0004 0.0011 0.0036 0.0063 0.0091 0.0107 0.0114 0.011 0.0093 0.0073 0.005 0.0032 0.0023 0.0022 0.0023 0.0019 0.0019 0.0017 0.0016 0.0014 0.0015 0.0016 0.0018 0.0016 0.0012 0.0011 0.0008 0.0006 0.0006 0.0008 0.0009 0.0008 0.0011 0.0011 0.0012 0.0012 0.0013 0.0011 0.0009 0.001 0.001 0.0009 0.0007 0.0006 0.0007 0.0011 0.0013 0.0017 0.0019 0.0017 0.0015 0.0009 0.0006 0.0003 0.0002 0.0001 0.0004 0.0002 0.0004 0.0001 0.0004 0.0008 0.0014 0.0027 0.0053 0.0073 0.0093 0.0105 0.0108 0.0097 0.008 0.0059 0.0033 0.002 0.0016 0.0019 0.0016 0.0016 0.0016 0.0017 0.0018 0.0018 0.002 0.002 0.0019 0.0017 0.0016 0.0015 0.0016 0.0017 0.0016 0.0017 0.0014 0.0013 0.0017 0.0015 0.0015 0.0016 0.0014 0.0012 0.0013 0.0011 0.0011 0.0011 0.0029 0.0034 0.0036 0.0035 0.0044 0.0041 0.0039 0.0038 0.0025 0.0015 0.0009 0.0008 0.0007 0.0006 0.0004 0.0005 0.0005 0.0005 0.0008 0.0011 0.0043 0.0093 0.0125 0.0147 0.0171 0.0172 0.0166 0.0146 0.0112 0.0067 0.0039 0.0027 0.0019 0.0013 0.0014 0.0013 0.0014 0.0015 0.0011 0.0012 0.0015 0.0014 0.0017 0.0017 0.0017 0.0017 0.0013 0.0013 0.0012 0.001 0.0011 0.0011 0.0011 0.0013 0.0012 0.0011 0.0011 0.001 0.001 0.0011 0.001 0.0011 0.001 0.0011 0.0011 0.001 0.0012 0.0013 0.0014 0.0018 0.0017 0.0017 0.0016 0.0016 0.0019 0.0018 0.0019 0.0019 0.0019 0.0021 0.002 0.0021 0.002 0.002 0.002 0.0021 0.002 0.0019 0.0019 0.0018 0.002 0.002 0.0018 0.0021 0.0021 0.0021 0.0019 0.0017 0.0016 0.0013 0.0015 0.0016 0.0016 0.0019 0.0017 0.0018 0.0018 0.0018 0.0022 0.0022 0.0021 0.0019 0.0016 0.0016 0.0015 0.0014 0.0015 0.0015 0.0019 0.002 0.0019 0.0018 0.0018 0.0018 0.0018 0.0021 0.0021 0.0021 0.0022 0.002 0.0018 0.0017 0.0016 0.0017 0.0017 0.0017 0.0018 0.0015 0.0013 0.0016 0.0015 0.0016 0.0019 0.0017 0.0016 0.0015 0.0011 0.0012 0.0013 0.0012 0.0011 0.0014 0.0015 0.0014 0.0013 0.0013 0.0012 0.0013 0.0014 0.0014 0.0014 0.0012 0.001 0.001 0.0008 0.0007 0.0008 0.0009 0.0009 0.0008 0.0008 0.0009 0.0008 0.0008 0.0007 0.0007 0.0006 0.0009 0.0006 0.0005 0.0005 0.0005 0.0004 0.0004 0.0004 0.0005 0.0004 0.0005 0.0005 0.0005 0.0007 0.0006 0.0005 0.0005 0.0011 0.0017 0.0019 0.002 0.0012 0.0006 0.0003 0.0 0.0002 0.0003 0.0002", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 171.305, + "text": "SP AP 算 冬 夏 豪 气 未 曾 罢 SP", + "ph_seq": "SP AP s uan d ong x ia h ao q i w ei z eng b a SP", + "ph_dur": "0.095 0.4 0.105 0.178 0.06 0.163 0.075 0.416 0.06 0.163 0.075 0.163 0.075 0.387 0.09 0.431 0.045 0.476 0.333", + "ph_num": "1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 G3 E3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.238 0.238 0.477 0.476 0.476 0.333", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "199.9 200.4 200.4 200.8 201.1 200.8 201.0 201.2 201.4 200.7 201.4 201.6 201.2 201.4 200.9 201.3 201.4 201.4 201.6 201.5 201.5 201.9 202.3 202.3 202.6 202.4 202.3 202.5 202.9 202.9 202.9 203.2 203.0 203.2 203.5 203.2 203.4 203.8 203.8 203.3 203.8 203.7 203.5 204.3 204.0 204.0 204.0 203.7 204.0 204.0 204.2 203.9 203.5 203.7 204.2 206.2 208.1 210.9 222.3 231.8 238.5 244.2 246.0 246.9 244.4 241.9 237.6 230.9 227.1 221.2 213.0 220.3 225.6 221.6 219.0 218.3 219.3 220.7 221.6 221.8 221.7 221.6 221.8 222.1 220.5 216.2 207.3 213.4 221.2 229.5 238.6 247.4 257.9 257.2 257.2 257.6 256.2 252.9 248.7 243.4 238.0 235.1 235.5 236.0 238.7 242.5 246.5 251.2 255.2 257.6 257.0 254.8 248.1 243.5 237.5 232.8 233.0 235.7 240.5 248.1 251.8 254.9 253.8 253.4 249.5 243.6 233.4 222.4 225.1 231.4 239.4 247.1 254.8 262.2 255.1 250.8 250.5 250.6 250.3 249.5 248.4 246.9 246.1 246.0 245.5 243.3 236.6 231.9 229.5 228.9 228.6 229.2 228.1 228.6 225.7 220.8 220.0 219.6 219.6 220.2 221.1 221.4 221.6 221.7 222.1 221.7 222.0 221.4 219.2 216.6 214.2 215.4 226.6 241.4 250.5 256.3 256.8 257.0 254.3 252.2 247.1 244.7 243.2 242.6 243.1 242.8 244.1 247.7 252.3 255.7 257.9 255.8 252.4 249.6 246.8 243.6 241.0 239.5 238.8 238.6 243.3 250.2 257.1 261.4 260.5 255.8 245.3 226.1 220.3 217.1 215.0 213.4 211.0 208.5 207.4 204.2 197.8 191.8 191.6 190.4 190.7 189.5 189.9 191.1 192.3 192.1 192.6 194.6 195.9 197.0 196.8 195.9 195.0 195.1 194.4 194.1 194.6 195.6 196.2 197.0 196.4 195.6 195.8 196.4 196.6 196.9 195.1 192.5 185.9 178.9 174.1 172.1 170.2 168.9 167.1 165.7 161.7 159.1 161.6 164.3 166.6 167.2 166.8 165.9 164.2 161.2 157.5 155.1 153.3 153.7 155.9 159.9 165.7 169.8 173.9 176.0 176.4 174.7 170.8 166.6 161.6 156.5 154.1 154.9 158.0 160.7 164.6 168.0 171.2 172.6 174.0 174.4 172.2 168.6 163.3 157.7 156.0 156.2 157.2 159.2 159.1 159.3 159.3 159.4 160.2 160.5 160.6 161.1 161.4 161.4 161.6 161.6 162.0 162.2 162.2 162.0 161.9 161.5 160.9 161.1 160.7 160.2 160.6 160.2 160.8 160.9 159.2", + "f0_timestep": "0.011609977324263039", + "energy": "0.0007 0.0006 0.0005 0.0004 0.0004 0.0004 0.0004 0.001 0.0006 0.0006 0.0002 0.0006 0.0005 0.0006 0.0013 0.0015 0.0015 0.0017 0.0018 0.0018 0.0024 0.0023 0.0021 0.0025 0.0022 0.0031 0.0028 0.0026 0.0025 0.0026 0.0022 0.0021 0.0024 0.0023 0.0018 0.0019 0.0011 0.001 0.001 0.0005 0.0006 0.0017 0.0025 0.0046 0.0073 0.0093 0.0113 0.0122 0.0122 0.0117 0.0196 0.0411 0.0562 0.0686 0.0765 0.0785 0.0759 0.0704 0.0663 0.0622 0.0621 0.0639 0.0664 0.069 0.0697 0.0703 0.0694 0.0645 0.0547 0.0434 0.0359 0.0479 0.0613 0.0724 0.0796 0.0814 0.0813 0.0806 0.081 0.0796 0.0793 0.0787 0.0777 0.0767 0.0715 0.0604 0.0477 0.0316 0.0203 0.0173 0.0163 0.0175 0.0393 0.0609 0.0743 0.0873 0.0901 0.0894 0.0886 0.0849 0.0826 0.0779 0.0756 0.0732 0.0724 0.0729 0.0721 0.0734 0.0741 0.0741 0.0762 0.0763 0.0757 0.073 0.0686 0.0663 0.0642 0.0627 0.0636 0.0633 0.0634 0.0648 0.0654 0.0659 0.0659 0.0581 0.0484 0.0362 0.0227 0.0106 0.0054 0.0063 0.0144 0.0417 0.0594 0.0779 0.0892 0.0931 0.0943 0.092 0.0889 0.0862 0.0838 0.081 0.077 0.0667 0.0548 0.039 0.025 0.0159 0.016 0.0155 0.0142 0.0176 0.0385 0.0536 0.0656 0.075 0.0773 0.0778 0.0783 0.0773 0.0752 0.0734 0.0721 0.0693 0.0681 0.0673 0.0665 0.0643 0.0612 0.0577 0.0594 0.0681 0.077 0.0861 0.0931 0.0947 0.097 0.0946 0.0893 0.0858 0.0801 0.0777 0.0768 0.0749 0.0759 0.0762 0.0773 0.0779 0.0796 0.0818 0.0825 0.084 0.0812 0.079 0.0768 0.0761 0.0769 0.0763 0.0761 0.0743 0.074 0.0743 0.0737 0.0677 0.0559 0.0421 0.0224 0.009 0.0082 0.009 0.0105 0.0113 0.0267 0.0475 0.0611 0.0722 0.0778 0.0767 0.0755 0.074 0.0742 0.0746 0.0742 0.0746 0.073 0.0715 0.0698 0.0673 0.0652 0.0642 0.0626 0.0624 0.0623 0.0616 0.0621 0.0616 0.0608 0.0618 0.061 0.0605 0.0588 0.057 0.0561 0.055 0.0537 0.0475 0.0392 0.0283 0.016 0.007 0.0037 0.0046 0.015 0.0313 0.044 0.0548 0.0623 0.0664 0.0674 0.0677 0.068 0.0661 0.0637 0.061 0.0565 0.0502 0.0457 0.0415 0.04 0.0403 0.0406 0.0422 0.0448 0.0454 0.0469 0.0445 0.0417 0.0383 0.0341 0.0317 0.0289 0.0266 0.0253 0.0237 0.0236 0.0232 0.0226 0.0224 0.0218 0.0211 0.0203 0.0182 0.0175 0.0168 0.0146 0.0132 0.0093 0.006 0.0039 0.0024 0.0011 0.0005 0.0008 0.0 0.0011 0.0002 0.0002 0.001 0.0013 0.0007 0.0005 0.0013 0.0014 0.0035 0.0057 0.0 0.8704 0.0184 0.0099 0.0047 0.0027 0.0021 0.0005 0.0003 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0004 0.0 0.0001 0.0004 0.0003 0.0003 0.0004 0.0003 0.0006 0.0006 0.0005 0.0006 0.0008 0.001 0.0012 0.0015 0.0019 0.0019 0.002 0.002 0.002 0.0023 0.0024 0.0025 0.0027 0.0029 0.0028 0.0029 0.0028 0.0028 0.0025 0.0024 0.0023 0.0021 0.002 0.0013 0.0013 0.0011 0.0009 0.0007 0.0006 0.0006 0.0022 0.0045 0.0074 0.0099 0.012 0.0129 0.0129 0.0121 0.0097 0.0074 0.0043 0.0023 0.0022 0.0023 0.0022 0.0022 0.0019 0.0017 0.0012 0.0012 0.0005 0.0007 0.0006 0.0005 0.0004 0.0002 0.0002 0.0003 0.0008 0.0013 0.0013 0.0012 0.0009 0.0009 0.0009 0.0008 0.0009 0.0007 0.0006 0.0007 0.0007 0.0009 0.0014 0.0048 0.0101 0.0135 0.0158 0.0175 0.0172 0.0153 0.0128 0.009 0.0038 0.0033 0.0031 0.0028 0.0024 0.0024 0.0021 0.0018 0.0017 0.0016 0.0018 0.0018 0.0017 0.0017 0.0014 0.0013 0.0014 0.0012 0.0013 0.0018 0.0016 0.0018 0.0017 0.0014 0.0016 0.0015 0.0014 0.0013 0.0012 0.0013 0.0017 0.0016 0.0015 0.0028 0.0052 0.0062 0.0069 0.0068 0.0052 0.0043 0.003 0.0019 0.0021 0.0019 0.0014 0.0016 0.0015 0.0015 0.0015 0.0014 0.0013 0.0025 0.0056 0.0086 0.0114 0.014 0.0157 0.0158 0.0152 0.0129 0.0098 0.0068 0.0035 0.0024 0.0021 0.0017 0.0018 0.0016 0.0016 0.0014 0.0011 0.0009 0.0009 0.0008 0.0007 0.0004 0.0007 0.001 0.001 0.001 0.0012 0.0013 0.0013 0.0013 0.0016 0.0015 0.0018 0.0016 0.0012 0.0014 0.0014 0.0015 0.0015 0.0015 0.0015 0.0015 0.0013 0.0014 0.0015 0.0016 0.0017 0.0019 0.0017 0.0016 0.0019 0.0017 0.0018 0.0015 0.0013 0.0011 0.0011 0.0012 0.0015 0.0024 0.0031 0.0057 0.0101 0.0127 0.0133 0.0126 0.0094 0.0048 0.0023 0.0012 0.0011 0.0012 0.0013 0.001 0.001 0.0009 0.0009 0.0008 0.0008 0.0008 0.0008 0.0006 0.0006 0.0006 0.0006 0.0006 0.0007 0.0005 0.0007 0.0005 0.0005 0.0004 0.0003 0.0004 0.0004 0.0003 0.0004 0.0004 0.0005 0.0004 0.0003 0.0006 0.0011 0.001 0.0009 0.0022 0.0033 0.0034 0.0033 0.0029 0.0019 0.0019 0.0019 0.0017 0.0018 0.0016 0.0017 0.0016 0.0015 0.0013 0.0013 0.0009 0.0013 0.0009 0.0012 0.0012 0.0014 0.0011 0.0009 0.001 0.001 0.0012 0.0011 0.001 0.001 0.0008 0.0007 0.0006 0.0004 0.0004 0.0003 0.0004 0.0003 0.0004 0.0005 0.0005 0.0007 0.0003 0.0004 0.0005 0.001 0.0015 0.0016 0.0016 0.0011 0.0009 0.0009 0.0005 0.0004 0.0005 0.0007 0.0005 0.0008 0.001 0.0009 0.001 0.0018 0.002 0.0023 0.0011 0.1417 0.0015 0.0023 0.0011 0.0005 0.0006 0.0005 0.0004 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 174.858, + "text": "AP 再 砥 砺 剑 锋 出 京 华 SP", + "ph_seq": "AP z ai d i l i j ian f eng ch u j ing h ua SP", + "ph_dur": "0.305 0.075 0.193 0.045 0.178 0.06 0.178 0.06 0.178 0.06 0.164 0.075 0.163 0.075 0.371 0.105 0.952 0.095", + "ph_num": "2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest E3 E3 F#3 G3 F#3 E3 D3 E3 rest", + "note_dur": "0.38 0.238 0.238 0.238 0.238 0.239 0.238 0.476 0.952 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0", + "f0_seq": "123.7 124.0 123.8 123.5 123.8 123.8 123.7 123.5 123.5 123.4 123.5 123.3 123.4 123.2 123.3 123.2 123.2 123.0 123.1 123.2 123.3 123.2 123.2 123.2 123.2 123.2 123.4 123.5 123.3 123.6 123.2 123.6 123.7 124.1 123.7 127.3 130.9 136.0 143.8 150.8 154.8 158.3 161.2 163.7 164.9 166.3 165.4 163.9 162.4 162.2 159.1 156.0 157.9 157.2 162.1 161.3 162.6 163.0 164.3 164.6 164.9 164.7 164.8 165.1 165.5 165.5 165.0 165.1 163.8 164.5 165.1 168.0 176.0 180.1 182.5 183.7 185.0 185.1 185.2 184.8 184.8 185.0 184.9 184.5 184.8 184.6 183.8 181.4 174.4 166.6 174.4 182.5 191.5 201.4 210.7 204.1 201.0 199.6 197.5 197.1 196.4 195.7 195.2 195.5 196.7 196.7 195.4 191.9 185.0 179.3 183.7 186.7 191.4 196.6 200.3 192.7 187.8 186.0 185.2 185.0 183.6 182.9 183.9 184.3 185.5 186.5 185.5 181.7 175.7 173.8 174.5 174.8 175.8 175.3 177.0 175.1 170.0 165.6 166.0 165.2 165.4 165.5 165.7 165.7 165.4 164.5 163.3 161.0 156.4 151.2 150.7 149.1 148.4 148.3 147.8 150.1 146.5 144.6 145.1 145.5 146.0 145.7 145.7 145.7 146.2 146.6 147.0 147.5 147.1 146.7 147.3 147.3 147.7 148.0 148.6 148.6 148.5 148.1 147.8 147.6 147.6 148.1 148.2 148.1 146.7 144.4 138.9 136.8 140.0 143.0 146.2 150.1 154.2 158.0 162.4 166.1 169.8 168.3 167.5 166.2 165.6 164.3 162.7 160.7 159.4 157.8 157.1 156.1 157.0 158.2 160.3 162.6 166.0 168.2 170.1 171.1 170.6 169.5 167.6 165.2 163.6 162.2 161.0 161.1 161.2 162.7 164.7 167.4 170.0 172.3 173.3 172.7 170.7 167.1 163.3 159.3 156.2 154.8 155.7 158.4 161.7 165.4 168.7 172.0 174.9 175.5 174.3 170.8 164.6 159.1 155.6 153.4 153.0 155.6 158.2 162.1 166.0 168.6 171.0 172.9 172.7 170.6 165.8 161.2 156.6 154.3 154.0 154.5 156.4 156.9 156.7 161.2 167.4 172.6 174.8 174.9 173.8 170.0 165.6 160.4 158.3 156.7 157.5 157.0 157.1 156.0 155.4", + "f0_timestep": "0.011609977324263039", + "energy": "0.0009 0.001 0.0015 0.0018 0.0026 0.0028 0.0033 0.0038 0.0045 0.0053 0.005 0.0059 0.0061 0.0059 0.0062 0.0061 0.0053 0.0052 0.0051 0.0044 0.0043 0.0033 0.003 0.002 0.0015 0.0022 0.0024 0.0048 0.0069 0.0093 0.0108 0.0169 0.0281 0.0375 0.0461 0.0514 0.0528 0.0544 0.0542 0.0532 0.0526 0.0527 0.0534 0.0539 0.0552 0.0561 0.0572 0.0569 0.0557 0.0515 0.0445 0.0398 0.0361 0.0405 0.0484 0.0555 0.0591 0.0603 0.0643 0.0632 0.0641 0.0654 0.0646 0.0653 0.0652 0.0665 0.0658 0.0625 0.0572 0.0488 0.041 0.0422 0.0512 0.0597 0.0654 0.0684 0.068 0.0669 0.0671 0.0653 0.0652 0.0649 0.0638 0.0636 0.0626 0.061 0.0575 0.0499 0.0391 0.027 0.0167 0.011 0.0102 0.0228 0.0442 0.0566 0.0667 0.0719 0.0703 0.0684 0.0653 0.0632 0.0612 0.0583 0.057 0.0557 0.0517 0.0455 0.0377 0.0257 0.0162 0.0068 0.0036 0.0166 0.0374 0.0502 0.061 0.0659 0.064 0.0617 0.0574 0.0562 0.0552 0.0535 0.0541 0.0517 0.0465 0.039 0.028 0.0172 0.0111 0.0102 0.0101 0.0084 0.0237 0.0374 0.0487 0.0582 0.0617 0.0634 0.0634 0.0637 0.0646 0.0636 0.0629 0.0606 0.0546 0.0452 0.0338 0.0206 0.0108 0.0081 0.0075 0.0078 0.0165 0.0328 0.0434 0.0513 0.056 0.055 0.0556 0.0557 0.0549 0.0554 0.0528 0.0525 0.0533 0.0535 0.0552 0.0552 0.0571 0.0569 0.0574 0.0586 0.0581 0.0585 0.0588 0.0582 0.0594 0.059 0.0594 0.0583 0.0556 0.0525 0.0457 0.0367 0.0279 0.0176 0.0096 0.0054 0.0046 0.0049 0.0043 0.0045 0.0055 0.0228 0.0375 0.0487 0.0592 0.0637 0.0636 0.0651 0.0643 0.0642 0.0643 0.0622 0.061 0.058 0.0565 0.0547 0.0541 0.0544 0.0531 0.0545 0.056 0.0566 0.0586 0.0585 0.0583 0.0579 0.0565 0.056 0.0531 0.0509 0.0492 0.0486 0.0488 0.0486 0.0501 0.0508 0.0514 0.051 0.0503 0.0489 0.0471 0.0451 0.0425 0.041 0.0382 0.0372 0.0372 0.0376 0.0392 0.0418 0.0432 0.0428 0.0422 0.0395 0.0357 0.0337 0.0306 0.0284 0.027 0.0256 0.0246 0.0244 0.0245 0.0243 0.0245 0.0246 0.0242 0.0233 0.0221 0.0202 0.0179 0.0168 0.0163 0.0156 0.0158 0.0157 0.0151 0.0142 0.0126 0.0122 0.0114 0.0119 0.0115 0.0108 0.0128 0.0073 0.0036 0.002 0.0012 0.0009 0.0015 0.0007 0.0005", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0004 0.0006 0.0009 0.0013 0.0023 0.0027 0.0036 0.0046 0.0052 0.006 0.0066 0.0066 0.0069 0.0067 0.0064 0.0065 0.0064 0.0062 0.0058 0.005 0.0041 0.0029 0.0018 0.0012 0.0008 0.0012 0.0025 0.0046 0.0073 0.0103 0.012 0.0126 0.0128 0.0108 0.0085 0.0063 0.0029 0.0023 0.0022 0.0022 0.0018 0.0016 0.0016 0.0016 0.0017 0.0019 0.0019 0.0021 0.0018 0.0017 0.0014 0.0009 0.0012 0.0013 0.0015 0.0018 0.0017 0.0017 0.0016 0.0015 0.0015 0.0015 0.0014 0.0015 0.0014 0.0016 0.0016 0.0017 0.0016 0.0018 0.0016 0.0016 0.0018 0.0015 0.0019 0.0021 0.0018 0.0018 0.0018 0.0014 0.0016 0.0013 0.0015 0.0014 0.0016 0.0018 0.0017 0.0017 0.0028 0.0051 0.0078 0.0101 0.0108 0.0102 0.0086 0.0051 0.0028 0.0023 0.0015 0.0015 0.0013 0.001 0.0007 0.0007 0.0007 0.0005 0.0006 0.0004 0.0005 0.0011 0.0018 0.0025 0.0032 0.0034 0.0029 0.0021 0.0013 0.0011 0.0011 0.001 0.0009 0.0008 0.0006 0.0003 0.0002 0.0002 0.0009 0.0023 0.0056 0.0079 0.01 0.0107 0.0099 0.0078 0.0054 0.0032 0.0015 0.0008 0.0006 0.0004 0.0005 0.0006 0.0006 0.0008 0.0006 0.0007 0.0011 0.0021 0.0036 0.006 0.0076 0.0088 0.0089 0.0079 0.0064 0.0042 0.0026 0.0017 0.001 0.0006 0.0005 0.0005 0.0005 0.0005 0.0006 0.0006 0.0006 0.0003 0.0001 0.0003 0.0005 0.0001 0.0003 0.0004 0.0005 0.0005 0.0005 0.0004 0.0005 0.0004 0.0004 0.0003 0.0003 0.0002 0.0002 0.0 0.0009 0.0019 0.0026 0.0033 0.0039 0.0042 0.0043 0.0047 0.0043 0.0034 0.0026 0.0014 0.0008 0.0013 0.0014 0.0016 0.0015 0.0015 0.0013 0.0015 0.0013 0.0015 0.0016 0.0016 0.0013 0.0017 0.0014 0.0014 0.0016 0.0015 0.0015 0.0016 0.0015 0.0017 0.0014 0.0015 0.0013 0.0014 0.0016 0.0016 0.0018 0.0017 0.0017 0.0017 0.0017 0.0019 0.0021 0.0016 0.0016 0.0016 0.0015 0.0015 0.0017 0.0012 0.0013 0.0014 0.0013 0.0013 0.0013 0.0013 0.0015 0.0014 0.0014 0.0011 0.0012 0.0013 0.0009 0.001 0.0009 0.0008 0.0008 0.0008 0.0009 0.0009 0.0008 0.0009 0.0008 0.0008 0.0008 0.0007 0.0005 0.0004 0.0005 0.0003 0.0003 0.0004 0.0004 0.0004 0.0005 0.0004 0.0002 0.0006 0.0031 0.0028 0.0029 0.0026 0.0018 0.0006 0.0005 0.0004 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 178.924, + "text": "SP AP 问 来 人 胸 襟 谁 似 我 将 日 月 山 海 一 并 笑 纳 SP", + "ph_seq": "SP AP w en l ai r en x iong j in sh ui s i0 w o j iang r ir y ve sh an h ai y i b ing x iao n a SP", + "ph_dur": "0.095 0.4 0.105 0.178 0.06 0.163 0.075 0.386 0.09 0.193 0.045 0.163 0.075 0.372 0.105 0.401 0.075 0.639 0.075 0.163 0.075 0.163 0.075 0.163 0.075 0.178 0.06 0.179 0.06 0.193 0.045 0.163 0.075 0.178 0.06 1.19 0.285", + "ph_num": "1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 A3 B3 B3 A3 B3 D4 B3 B3 B3 A3 G3 G3 E3 G3 A3 G3 B3 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.476 0.238 0.238 0.477 0.238 0.238 0.714 0.238 0.238 0.238 0.238 0.239 0.238 0.238 0.238 1.19 0.285", + "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "184.9 186.0 186.0 185.9 186.0 185.8 186.2 186.5 186.4 186.7 186.1 187.0 186.5 186.9 186.8 186.5 186.5 186.5 186.6 186.6 186.4 186.8 186.8 186.8 186.6 186.9 186.7 187.0 187.1 187.0 187.5 187.5 187.3 187.4 187.5 187.1 187.2 187.3 187.6 187.4 187.4 187.3 187.1 187.1 187.2 187.0 186.9 187.3 186.9 187.6 188.9 190.3 191.9 195.3 199.8 203.9 210.1 215.2 227.3 235.3 241.6 245.8 247.8 248.2 248.6 247.6 244.6 241.2 237.2 233.8 230.4 228.0 226.1 221.4 218.7 218.5 220.0 220.7 221.4 221.9 221.7 220.9 219.5 218.9 219.0 218.6 218.2 217.9 217.8 218.9 218.8 218.9 220.6 220.8 221.8 221.8 221.9 220.7 219.2 217.5 217.0 216.6 217.4 219.7 224.2 232.1 239.2 244.6 249.1 249.7 249.6 249.2 247.7 246.8 245.6 245.3 247.5 248.5 248.7 248.0 249.1 249.1 248.2 245.0 240.9 233.9 234.1 240.5 245.9 252.4 258.8 266.7 274.5 285.0 272.1 267.2 265.5 262.5 256.2 252.3 249.5 248.0 246.8 247.8 249.6 250.8 251.0 249.2 245.0 238.1 226.6 216.8 214.4 215.5 218.9 218.7 218.5 218.9 220.0 221.1 221.4 222.1 222.1 222.0 222.3 222.2 221.1 217.9 213.3 213.0 223.6 234.4 246.0 257.6 271.7 266.3 259.7 257.3 254.3 251.5 248.7 245.3 242.2 237.2 234.2 233.0 236.4 241.6 247.3 251.6 255.0 256.4 255.8 254.3 251.6 248.3 247.6 246.7 248.5 248.6 249.0 249.6 250.4 250.8 249.5 246.2 250.3 254.2 261.3 267.8 274.8 282.0 290.2 300.6 310.7 325.2 325.8 316.7 312.5 310.6 306.1 301.8 296.6 293.1 290.3 289.8 290.2 291.6 292.1 293.0 292.5 292.9 288.3 283.8 275.4 263.7 255.1 248.7 243.8 241.8 242.2 243.6 245.4 248.9 249.9 250.6 250.0 248.9 248.0 247.9 248.2 247.2 245.2 245.6 247.2 247.7 248.3 249.6 250.2 249.4 249.5 249.7 248.9 246.8 244.2 243.2 243.4 243.5 244.1 246.0 246.9 249.5 250.5 252.0 253.1 253.6 253.1 251.3 247.4 243.8 240.4 237.2 236.3 237.4 240.3 243.8 246.3 249.7 254.5 258.6 261.8 263.3 261.2 256.6 251.8 244.3 242.7 240.6 239.8 243.3 244.9 246.9 251.6 255.1 257.7 260.1 261.7 259.5 255.1 249.0 238.9 241.1 242.9 247.0 251.7 256.5 260.8 264.9 268.2 264.2 256.5 253.6 251.3 247.9 245.6 244.4 243.6 244.3 245.0 245.8 247.7 249.4 249.1 245.3 239.1 233.6 227.9 223.2 219.8 219.2 219.5 220.0 220.7 220.1 220.6 219.7 219.1 219.1 218.3 216.8 216.7 218.0 215.7 213.2 208.1 203.2 196.3 192.8 189.9 190.1 192.2 193.9 195.6 196.1 196.3 196.7 196.5 195.3 193.1 194.2 194.9 192.0 184.0 186.3 190.8 194.4 198.6 202.4 207.0 209.3 211.0 203.8 199.3 197.6 196.0 195.8 195.1 194.9 195.6 197.5 198.6 196.9 194.0 189.4 183.8 178.4 174.5 172.2 170.5 170.4 168.6 164.3 162.8 162.7 164.2 164.8 164.9 165.2 165.5 165.4 165.8 165.8 164.5 165.1 165.8 165.8 167.9 170.2 173.2 177.8 183.8 190.6 193.5 196.3 197.8 197.7 197.2 195.8 194.8 195.5 196.4 196.3 195.1 193.8 188.6 190.0 189.2 195.6 206.0 215.6 224.9 231.9 226.1 224.7 223.6 221.7 220.8 220.2 218.8 219.0 219.7 219.5 218.7 214.5 210.1 203.8 203.0 202.3 202.4 202.4 201.7 203.6 195.1 193.8 191.5 191.2 191.8 193.3 195.1 195.8 196.2 197.3 198.5 199.3 200.7 201.5 200.1 198.8 198.2 199.9 201.7 204.7 210.1 214.1 217.7 219.6 220.9 221.0 220.7 220.6 220.4 220.1 220.8 221.3 221.8 222.1 222.6 222.5 221.8 222.9 226.4 234.5 244.6 252.4 257.5 258.2 257.4 254.5 250.0 245.5 242.2 239.7 240.0 242.6 246.9 251.8 255.5 257.5 258.9 257.8 254.5 250.3 245.3 241.1 236.9 236.0 236.1 239.9 244.5 248.5 250.0 251.9 253.3 253.9 254.1 252.6 250.6 248.2 244.7 241.3 238.6 237.2 239.1 240.9 242.9 245.5 248.4 252.3 255.8 257.0 256.9 255.0 252.2 248.0 243.8 239.8 237.2 235.6 236.8 239.7 244.4 248.7 253.1 256.4 258.7 259.0 256.7 253.9 249.6 245.4 240.8 237.4 235.8 238.7 242.2 245.4 248.9 255.9 260.5 260.2 257.4 251.0 248.7 247.8 245.8 240.2 234.1 232.8 232.9 232.8 233.4 233.6 234.3 235.0 235.4 235.2 234.5 234.0 233.6 233.4 233.2 233.2 234.2 234.0 234.2 233.9 234.2 232.9 233.6 234.4", + "f0_timestep": "0.011609977324263039", + "energy": "0.0001 0.0004 0.0009 0.0007 0.0007 0.0006 0.0004 0.0003 0.0004 0.0008 0.0003 0.0006 0.0006 0.0007 0.0007 0.0009 0.001 0.0007 0.0017 0.0016 0.0009 0.0015 0.0017 0.0016 0.0014 0.0021 0.0018 0.0017 0.0024 0.0021 0.002 0.0025 0.0026 0.0028 0.0026 0.0023 0.0026 0.0017 0.0019 0.0016 0.0013 0.0 0.0007 0.0002 0.0007 0.0007 0.0021 0.0083 0.0229 0.0365 0.0467 0.0564 0.0617 0.0649 0.0662 0.0635 0.0608 0.0564 0.053 0.0534 0.0555 0.0599 0.0631 0.0672 0.07 0.0737 0.0761 0.0766 0.0762 0.0743 0.0738 0.0756 0.0791 0.0835 0.0872 0.0881 0.0862 0.0853 0.085 0.0836 0.0834 0.0823 0.0807 0.079 0.0756 0.0741 0.0731 0.0742 0.0743 0.0723 0.0705 0.0714 0.0806 0.0933 0.1019 0.108 0.1073 0.1006 0.0937 0.0844 0.0758 0.0687 0.0646 0.0593 0.0573 0.0582 0.0582 0.0604 0.0621 0.0628 0.0645 0.0655 0.0655 0.0653 0.0648 0.0654 0.0678 0.0682 0.0673 0.0676 0.0669 0.0667 0.0672 0.066 0.0603 0.0509 0.0389 0.0251 0.016 0.0163 0.0149 0.0127 0.0302 0.0478 0.0616 0.0733 0.0761 0.0739 0.0702 0.0657 0.063 0.0607 0.0593 0.0589 0.0599 0.0605 0.0627 0.0636 0.0628 0.0553 0.0454 0.0338 0.0189 0.0302 0.044 0.058 0.0697 0.0746 0.0748 0.0738 0.0726 0.0728 0.0737 0.0734 0.0734 0.0724 0.0689 0.0605 0.0494 0.0356 0.0235 0.0181 0.0173 0.0158 0.0393 0.0569 0.0719 0.0864 0.0913 0.0956 0.0957 0.0939 0.0907 0.0858 0.0821 0.0803 0.0785 0.0793 0.0803 0.0818 0.0843 0.0856 0.0881 0.0888 0.0854 0.0841 0.0826 0.0801 0.0822 0.081 0.0795 0.0776 0.0733 0.0678 0.0603 0.0509 0.0393 0.028 0.0204 0.0195 0.0208 0.0188 0.0189 0.0158 0.0147 0.036 0.0507 0.064 0.0769 0.0819 0.0835 0.0855 0.0857 0.0849 0.0851 0.0853 0.0862 0.0853 0.0864 0.0866 0.0874 0.0904 0.0904 0.0936 0.0931 0.0941 0.0942 0.0916 0.0905 0.0867 0.0854 0.0852 0.0836 0.0843 0.0843 0.0841 0.0849 0.0838 0.0844 0.0836 0.0811 0.0795 0.079 0.0781 0.0816 0.0869 0.0925 0.0998 0.1028 0.1057 0.104 0.1029 0.1017 0.1002 0.1002 0.0986 0.0951 0.0948 0.0935 0.0925 0.0943 0.0954 0.0949 0.0996 0.1008 0.1025 0.1041 0.1006 0.1011 0.1004 0.1016 0.101 0.0988 0.0956 0.091 0.0902 0.0891 0.0897 0.0913 0.0928 0.0936 0.0925 0.0896 0.088 0.0881 0.0881 0.0841 0.0815 0.0736 0.0702 0.068 0.0684 0.0706 0.0717 0.0723 0.0725 0.0707 0.0698 0.0639 0.0537 0.0441 0.0296 0.0172 0.0107 0.0101 0.0111 0.0114 0.0242 0.0428 0.0586 0.0703 0.078 0.0783 0.0742 0.0682 0.0615 0.0575 0.0564 0.0555 0.0566 0.0586 0.0593 0.0621 0.0636 0.0634 0.0612 0.0577 0.0553 0.055 0.058 0.0618 0.0678 0.0713 0.0669 0.1703 0.0658 0.0747 0.075 0.0713 0.0691 0.0687 0.0673 0.0669 0.0666 0.0653 0.0648 0.0625 0.0634 0.0681 0.0721 0.0767 0.0808 0.0822 0.0849 0.0865 0.0859 0.0875 0.0847 0.0829 0.0826 0.0761 0.0676 0.0552 0.0386 0.0255 0.0162 0.0146 0.0137 0.0157 0.042 0.0565 0.0706 0.0784 0.0788 0.0779 0.0744 0.0723 0.0699 0.0673 0.0673 0.0648 0.0626 0.0599 0.055 0.0511 0.044 0.035 0.0272 0.019 0.0295 0.0415 0.0539 0.063 0.0681 0.0695 0.0708 0.0693 0.071 0.0711 0.0683 0.0681 0.0656 0.0646 0.0649 0.0618 0.0575 0.0519 0.0485 0.046 0.0496 0.0545 0.0585 0.0617 0.0635 0.0652 0.0669 0.0668 0.0662 0.0647 0.0637 0.0629 0.0618 0.0581 0.0503 0.0405 0.0274 0.0141 0.0084 0.0084 0.0188 0.0421 0.0583 0.0724 0.0806 0.0818 0.0805 0.0748 0.0723 0.0696 0.0667 0.0673 0.0664 0.0658 0.0635 0.0548 0.0444 0.032 0.0201 0.0153 0.0127 0.023 0.0425 0.0579 0.0713 0.0804 0.0817 0.0841 0.0819 0.0795 0.0775 0.0712 0.0685 0.0668 0.0661 0.0685 0.0695 0.0706 0.0719 0.0749 0.0771 0.0759 0.076 0.0815 0.0861 0.0939 0.0996 0.1002 0.1021 0.1035 0.1037 0.1057 0.1055 0.1041 0.102 0.0982 0.0936 0.0863 0.0767 0.0672 0.0591 0.0546 0.0527 0.055 0.0586 0.0634 0.0675 0.0722 0.075 0.0755 0.0748 0.0712 0.0679 0.0648 0.0636 0.0624 0.0639 0.0669 0.0692 0.0724 0.0731 0.071 0.0706 0.069 0.0686 0.0677 0.0677 0.0667 0.0667 0.0669 0.0669 0.0684 0.0677 0.069 0.0685 0.0692 0.0694 0.0689 0.0679 0.0663 0.0641 0.0619 0.0598 0.0593 0.0584 0.0585 0.0576 0.0573 0.0567 0.0562 0.0574 0.058 0.0582 0.0572 0.0558 0.053 0.0493 0.0461 0.0433 0.0413 0.0407 0.0408 0.0401 0.0387 0.0375 0.0356 0.0363 0.035 0.0352 0.0334 0.0313 0.0297 0.0275 0.0252 0.023 0.0208 0.0189 0.0183 0.0181 0.0177 0.0183 0.0188 0.0191 0.0189 0.0187 0.0173 0.0154 0.0118 0.0083 0.0045 0.0021 0.0005 0.0004 0.0007 0.0007 0.0004 0.0013 0.0009 0.0006 0.0009 0.0005 0.0007 0.0005 0.0003 0.0004 0.0 0.0 0.0007 0.0004 0.0006 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0002 0.0 0.0001 0.0003 0.0002 0.0003 0.0002 0.0003 0.0003 0.0006 0.0007 0.0011 0.0012 0.0015 0.0013 0.0013 0.0012 0.0014 0.0015 0.0016 0.0019 0.0019 0.0022 0.002 0.0019 0.0019 0.0018 0.0018 0.0021 0.0024 0.0024 0.0027 0.0025 0.0027 0.0025 0.0026 0.0026 0.0023 0.0017 0.0013 0.0006 0.0006 0.0004 0.0001 0.0002 0.0004 0.0009 0.001 0.0013 0.0012 0.001 0.0013 0.0013 0.0015 0.0013 0.0012 0.0009 0.0005 0.0004 0.0004 0.0003 0.0004 0.0006 0.0005 0.0003 0.0005 0.0005 0.0005 0.0007 0.0008 0.001 0.0014 0.0021 0.0026 0.0029 0.0027 0.0024 0.0022 0.0021 0.0021 0.002 0.0021 0.0023 0.002 0.0018 0.0015 0.0012 0.0011 0.0009 0.0011 0.0011 0.0016 0.0018 0.0018 0.0021 0.0022 0.0024 0.0023 0.0025 0.0023 0.0023 0.0018 0.0016 0.0011 0.001 0.0008 0.0008 0.0005 0.0007 0.0004 0.0005 0.0002 0.0004 0.0005 0.0004 0.0005 0.0005 0.0004 0.0003 0.0002 0.0004 0.0006 0.0012 0.0031 0.0057 0.0094 0.0127 0.0149 0.0159 0.0156 0.0132 0.0102 0.0073 0.0029 0.0013 0.0012 0.0012 0.0012 0.0014 0.001 0.0009 0.0007 0.0005 0.0006 0.0005 0.0006 0.0006 0.0007 0.001 0.002 0.0041 0.0053 0.0059 0.0053 0.0043 0.0024 0.0016 0.0016 0.0013 0.0011 0.0008 0.0005 0.0006 0.0004 0.0005 0.0008 0.0018 0.0053 0.0105 0.0143 0.0171 0.0171 0.015 0.0111 0.0071 0.0033 0.0019 0.0017 0.0016 0.0017 0.0015 0.0016 0.0017 0.0017 0.0017 0.0017 0.0015 0.0013 0.0014 0.0013 0.0014 0.0016 0.0016 0.0016 0.0013 0.0013 0.0009 0.001 0.0009 0.0008 0.0009 0.0009 0.001 0.0027 0.0061 0.0114 0.0156 0.0183 0.0204 0.0217 0.0211 0.0206 0.0187 0.0147 0.0112 0.0072 0.0033 0.0019 0.0009 0.0012 0.0011 0.0009 0.001 0.001 0.001 0.0008 0.001 0.0008 0.0009 0.001 0.0012 0.0011 0.0013 0.0015 0.0018 0.0022 0.0022 0.0018 0.0016 0.0014 0.0011 0.0011 0.0009 0.0009 0.0008 0.0008 0.0007 0.0006 0.0006 0.0005 0.0007 0.0007 0.0005 0.0004 0.0006 0.0007 0.0009 0.0006 0.0007 0.0008 0.0011 0.0009 0.0009 0.0008 0.001 0.001 0.001 0.001 0.001 0.0011 0.001 0.001 0.0012 0.0011 0.0011 0.0011 0.0014 0.0014 0.0014 0.0014 0.0014 0.0013 0.0012 0.0011 0.0013 0.0012 0.0014 0.0014 0.0014 0.0014 0.0016 0.0016 0.0017 0.0018 0.0019 0.0018 0.0016 0.0011 0.0012 0.001 0.0008 0.0008 0.0008 0.0009 0.001 0.0012 0.0012 0.0019 0.0023 0.0031 0.0039 0.0051 0.0075 0.0095 0.0112 0.0112 0.0098 0.0075 0.0041 0.0025 0.0017 0.0017 0.0017 0.0015 0.0016 0.0014 0.0011 0.0002 0.0002 0.0002 0.0004 0.0003 0.0005 0.0006 0.0005 0.0009 0.0012 0.0011 0.0013 0.0013 0.0015 0.0022 0.0027 0.007 0.0023 0.0018 0.0019 0.0014 0.0014 0.0013 0.0013 0.0012 0.0013 0.0012 0.0012 0.0009 0.001 0.0011 0.0013 0.0014 0.0015 0.0013 0.0013 0.0015 0.0016 0.0016 0.0015 0.0017 0.0017 0.002 0.0025 0.0041 0.0071 0.0109 0.0133 0.0148 0.0139 0.0108 0.0081 0.0046 0.0029 0.0023 0.0022 0.0016 0.0016 0.0014 0.0012 0.0009 0.0009 0.0009 0.0006 0.0008 0.0006 0.0007 0.001 0.0022 0.0031 0.0039 0.004 0.0039 0.0032 0.0024 0.0024 0.0023 0.0025 0.0023 0.0022 0.002 0.0019 0.0021 0.0022 0.0022 0.0022 0.0021 0.0021 0.0017 0.0018 0.0018 0.0016 0.0018 0.0015 0.0017 0.0019 0.0018 0.0017 0.0016 0.0018 0.0016 0.0013 0.0012 0.0012 0.0013 0.0009 0.0007 0.001 0.0016 0.0019 0.0026 0.0024 0.0023 0.002 0.0016 0.0015 0.0012 0.0012 0.001 0.0009 0.0008 0.0008 0.0004 0.0003 0.0007 0.0009 0.0031 0.0073 0.0103 0.013 0.0147 0.0143 0.012 0.009 0.0064 0.0039 0.0033 0.0028 0.0025 0.0019 0.0017 0.0016 0.0016 0.0013 0.0012 0.0008 0.0009 0.0007 0.0006 0.0008 0.0006 0.0004 0.0005 0.0005 0.0006 0.0011 0.0012 0.0015 0.0017 0.0022 0.0022 0.002 0.0018 0.0015 0.0013 0.0012 0.0012 0.0014 0.0014 0.0017 0.0015 0.0015 0.0014 0.0016 0.0018 0.0018 0.0016 0.0019 0.0018 0.0016 0.0015 0.0013 0.0011 0.001 0.001 0.001 0.0011 0.0011 0.001 0.001 0.001 0.0009 0.001 0.0011 0.0011 0.0012 0.001 0.0011 0.0011 0.0013 0.0013 0.0012 0.0013 0.0012 0.0011 0.001 0.0012 0.0009 0.0008 0.0011 0.0011 0.0008 0.0009 0.0011 0.0009 0.0009 0.0008 0.0006 0.0008 0.0006 0.0008 0.0007 0.0006 0.0007 0.0009 0.0008 0.0007 0.0009 0.0009 0.0009 0.0008 0.0008 0.0006 0.0004 0.0003 0.0005 0.0005 0.0005 0.0006 0.0005 0.0005 0.0006 0.0005 0.0007 0.0003 0.0002 0.0002 0.0002 0.0003 0.0002 0.0002 0.0003 0.0004 0.0003 0.0003 0.0004 0.0002 0.0005 0.0016 0.0019 0.002 0.002 0.0013 0.001 0.0006 0.0007 0.0007 0.0007 0.0007 0.0006 0.0006 0.0007 0.0007 0.0009 0.0009 0.0006 0.0002 0.0002 0.0 0.0 0.0003 0.0002", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 185.81, + "text": "AP 只 姓 名 SP", + "ph_seq": "AP zh ir x ing m ing SP", + "ph_dur": "0.305 0.075 0.164 0.075 0.178 0.06 0.952 0.095", + "ph_num": "2 2 2 1 1", + "note_seq": "rest B3 D4 E4 rest", + "note_dur": "0.38 0.239 0.238 0.952 0.095", + "note_slur": "0 0 0 0 0", + "f0_seq": "295.2 295.9 295.9 295.6 295.8 295.9 295.7 295.3 294.9 295.7 295.4 295.6 295.1 295.7 295.8 295.9 295.3 295.7 295.4 295.6 295.0 294.3 294.5 293.9 292.9 291.9 289.9 288.5 286.4 284.5 282.2 278.7 274.8 262.1 253.3 249.1 246.8 245.4 246.3 247.1 248.6 249.4 249.7 249.5 247.1 242.4 232.7 240.3 248.4 258.2 267.4 277.8 287.8 296.5 297.0 293.4 294.6 295.5 296.5 295.0 294.3 293.1 293.0 293.1 292.0 293.0 293.0 292.3 291.1 291.3 292.5 293.6 296.3 297.2 296.7 296.6 297.9 300.2 299.7 300.0 299.9 299.1 298.1 297.6 297.2 296.7 299.9 303.7 310.4 320.5 331.9 339.3 342.8 342.4 340.0 335.8 330.9 327.0 324.7 323.7 324.2 326.6 327.9 331.1 334.1 336.0 337.1 336.4 334.7 329.3 325.1 321.4 318.3 318.8 322.6 328.2 332.1 336.5 338.5 338.9 339.4 336.3 333.0 329.3 326.7 324.6 322.7 323.6 325.7 328.8 330.7 332.7 334.6 334.9 332.2 328.5 326.5 326.6 327.8 329.0 328.7 326.3 326.7 328.2 330.2 335.0 339.9 338.9 336.6 332.9 330.1 328.9 326.5 324.9 316.2 306.2 298.4 295.0 292.3 290.7 290.5 290.5 288.5 294.5", + "f0_timestep": "0.011609977324263039", + "energy": "0.0008 0.0003 0.0015 0.0012 0.0019 0.0029 0.0037 0.0041 0.0048 0.0053 0.0054 0.0061 0.0063 0.0066 0.0069 0.0067 0.006 0.0059 0.0052 0.0053 0.0044 0.0035 0.003 0.0025 0.0021 0.0027 0.0032 0.004 0.0076 0.0118 0.016 0.0223 0.0497 0.0707 0.0877 0.1014 0.1023 0.1011 0.1036 0.1024 0.1033 0.1037 0.1015 0.0981 0.0885 0.0736 0.0569 0.0387 0.0257 0.0215 0.021 0.0212 0.0375 0.059 0.0774 0.0937 0.1014 0.103 0.1039 0.1019 0.102 0.1017 0.0989 0.0989 0.0993 0.0994 0.101 0.099 0.0993 0.0995 0.0993 0.1003 0.1018 0.1051 0.1094 0.1135 0.1172 0.1206 0.122 0.1235 0.123 0.1192 0.1147 0.1083 0.102 0.0947 0.089 0.0818 0.078 0.0772 0.0771 0.0802 0.082 0.0847 0.0877 0.0899 0.0917 0.0924 0.0904 0.0892 0.0882 0.0872 0.0868 0.0879 0.0884 0.0903 0.093 0.0944 0.0964 0.0975 0.0965 0.0957 0.0921 0.0891 0.0876 0.0833 0.0844 0.086 0.0849 0.0867 0.0857 0.0856 0.0849 0.0828 0.0802 0.0755 0.0733 0.0712 0.0693 0.0698 0.0683 0.0683 0.0668 0.0653 0.0641 0.0632 0.0622 0.0601 0.0574 0.0546 0.0517 0.0492 0.0467 0.0451 0.0454 0.0446 0.0441 0.0432 0.0418 0.0416 0.0404 0.0385 0.0361 0.0322 0.0269 0.0204 0.0123 0.0061 0.0034 0.0014 0.0 0.0 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0003 0.0002 0.0005 0.0009 0.0016 0.0022 0.003 0.0041 0.0045 0.0054 0.006 0.0065 0.007 0.0068 0.0067 0.0065 0.006 0.0059 0.0055 0.005 0.0041 0.0032 0.0026 0.0016 0.0017 0.0015 0.0018 0.0024 0.0044 0.0109 0.0142 0.0153 0.0152 0.012 0.0069 0.0042 0.0032 0.0024 0.0021 0.002 0.0021 0.0023 0.0023 0.0028 0.0041 0.0103 0.0153 0.0178 0.0209 0.0224 0.0222 0.0218 0.0192 0.0138 0.0091 0.0048 0.0034 0.0023 0.002 0.0018 0.0019 0.0019 0.0015 0.0012 0.0009 0.0011 0.0013 0.0013 0.0013 0.0013 0.0009 0.0013 0.0013 0.0019 0.0024 0.0024 0.003 0.0028 0.0026 0.0028 0.0025 0.0025 0.0023 0.0021 0.0018 0.0019 0.0019 0.0018 0.002 0.0017 0.0019 0.002 0.0018 0.0018 0.0017 0.0015 0.002 0.0017 0.0019 0.0019 0.0017 0.0017 0.0018 0.0018 0.0016 0.0017 0.0018 0.0019 0.0022 0.0023 0.0023 0.0024 0.0021 0.002 0.0017 0.0016 0.0016 0.0013 0.0014 0.0014 0.0014 0.0014 0.0012 0.0013 0.001 0.001 0.0012 0.0011 0.001 0.0009 0.0007 0.0008 0.0006 0.0006 0.0005 0.0006 0.0006 0.0006 0.0008 0.0006 0.0007 0.0006 0.0006 0.0004 0.0003 0.0004 0.0003 0.0005 0.0003 0.0002 0.0003 0.0004 0.0004 0.0004 0.0005 0.0009 0.0029 0.0029 0.0027 0.0027 0.0013 0.0006 0.0003 0.0004", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 187.715, + "text": "AP 不 作 答 AP 转 身 向 云 外 寄 生 涯 SP", + "ph_seq": "AP b u z uo d a AP zh uan sh en x iang y vn w ai j i sh eng y a SP", + "ph_dur": "0.335 0.045 0.178 0.06 0.178 0.06 0.953 0.163 0.075 0.163 0.075 0.163 0.075 0.163 0.075 0.579 0.135 0.164 0.075 0.371 0.105 0.163 0.075 0.952 0.095", + "ph_num": "2 2 2 1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 A3 rest B3 A3 B3 D4 B3 A3 B3 G3 rest", + "note_dur": "0.38 0.238 0.238 0.953 0.238 0.238 0.238 0.238 0.714 0.239 0.476 0.238 0.952 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "207.9 208.6 208.4 208.6 208.9 209.4 208.9 208.9 208.9 208.3 208.4 208.4 208.5 208.2 208.2 208.3 208.2 207.9 207.9 208.2 208.1 208.3 208.0 208.1 208.5 207.8 207.7 207.4 207.3 207.0 206.9 206.5 207.5 212.3 227.4 242.8 256.7 268.8 282.9 290.8 296.1 298.9 299.0 296.0 294.2 292.3 287.4 276.6 269.8 279.5 290.4 300.4 312.7 325.3 327.5 330.0 330.5 332.6 334.0 335.4 335.2 332.0 326.4 321.5 316.1 304.1 279.2 268.5 259.6 249.8 240.1 230.6 222.4 213.1 208.8 209.9 213.5 218.6 222.0 223.4 221.7 218.6 214.1 209.8 206.1 204.2 202.3 203.9 210.1 217.6 224.0 228.4 229.3 228.8 226.4 220.7 214.1 208.4 205.3 205.2 208.4 213.3 217.7 222.1 227.3 230.3 231.0 230.4 227.0 222.2 216.3 211.4 208.9 208.0 209.9 213.9 218.9 225.3 229.8 232.5 233.6 232.4 226.8 218.4 210.6 204.4 202.3 202.0 206.5 211.8 218.0 225.8 231.7 234.6 235.4 234.4 231.1 222.3 212.0 202.0 198.5 199.5 204.7 213.3 223.6 234.6 242.9 246.2 245.6 238.7 225.6 211.1 192.8 190.6 188.8 187.6 186.7 186.4 186.0 186.1 186.0 185.8 185.6 185.0 183.2 182.2 180.8 180.4 180.8 179.8 179.3 177.9 177.1 176.6 175.2 174.2 173.6 174.8 181.1 189.8 197.7 204.0 215.2 226.6 237.6 246.5 250.1 248.9 246.0 241.3 232.1 222.6 225.5 226.1 228.2 231.9 234.5 230.7 225.0 222.1 217.8 216.9 216.6 218.2 218.7 219.1 219.4 218.7 217.1 214.0 208.8 214.9 221.3 227.9 233.5 240.0 247.3 256.0 255.6 248.0 247.0 246.3 245.4 245.5 245.4 244.2 245.1 245.8 247.4 248.5 249.8 248.3 245.6 241.7 244.5 248.9 256.4 258.4 259.3 260.2 261.1 261.9 261.8 261.9 261.6 261.4 261.1 261.4 260.6 261.0 262.1 266.1 271.3 280.4 288.7 297.4 302.8 304.9 304.1 302.3 299.1 295.2 290.8 288.6 286.0 284.5 285.1 287.4 291.4 296.0 300.1 302.0 300.5 300.2 299.4 296.9 293.7 290.5 286.4 284.6 284.3 284.9 287.0 288.5 290.6 294.7 298.8 301.0 298.0 292.3 288.6 282.5 275.1 268.2 261.3 257.0 252.0 250.5 247.6 246.6 248.6 247.0 247.0 247.9 247.9 247.6 246.3 246.6 246.7 245.3 242.1 239.2 232.5 225.0 217.3 214.2 213.0 211.8 211.0 210.3 210.9 213.6 216.9 218.3 218.4 218.4 217.4 217.2 218.1 217.9 219.2 217.2 218.4 217.7 218.5 218.2 217.9 218.8 219.3 221.1 220.7 220.6 220.6 220.4 219.6 220.0 220.0 220.3 220.7 219.9 214.3 206.6 212.7 218.9 226.2 233.3 240.4 247.9 254.7 262.9 270.8 259.5 253.4 249.0 246.9 246.0 245.9 246.2 247.3 247.1 247.4 247.3 246.0 243.3 237.8 229.5 216.7 203.2 192.1 184.3 181.7 184.3 188.3 191.0 194.5 195.5 197.2 198.0 197.6 196.2 194.3 193.2 191.9 191.1 192.4 194.3 196.8 199.5 202.6 204.3 206.2 206.0 202.8 196.5 190.6 186.4 184.8 184.6 187.1 190.5 195.0 199.2 203.2 206.1 207.0 206.6 204.1 199.1 192.9 186.4 182.1 181.8 182.9 187.8 193.7 199.7 203.9 206.7 207.7 207.4 205.1 200.6 194.7 187.9 182.2 179.9 180.5 182.7 187.0 192.5 197.2 200.5 202.2 203.6 203.4 201.1 197.0 191.6 184.6 181.7 180.7 182.8 187.9 193.1 197.2 199.3 200.1 201.9 202.4 202.0 198.6 195.3 194.0 193.8 193.3 191.5 189.3 188.4 187.6 186.7 186.1 186.8", + "f0_timestep": "0.011609977324263039", + "energy": "0.001 0.0019 0.0008 0.0015 0.0014 0.0021 0.0021 0.0025 0.0035 0.0037 0.0039 0.0044 0.0044 0.0054 0.0055 0.0058 0.0062 0.0057 0.0051 0.0044 0.0045 0.0036 0.0035 0.0031 0.0023 0.0017 0.0009 0.0005 0.0009 0.0025 0.0215 0.0372 0.0509 0.0628 0.0688 0.0712 0.0699 0.0684 0.0667 0.0648 0.0618 0.0596 0.0596 0.059 0.0619 0.0629 0.057 0.0493 0.0348 0.0193 0.0126 0.0138 0.0178 0.0383 0.059 0.0726 0.0833 0.0871 0.0864 0.0871 0.0863 0.0852 0.083 0.0767 0.0653 0.0507 0.0327 0.01 0.0021 0.0005 0.0031 0.0103 0.0332 0.0477 0.0601 0.0685 0.071 0.0721 0.0744 0.0754 0.0749 0.0752 0.0733 0.0709 0.0698 0.065 0.0604 0.0568 0.0546 0.0571 0.0597 0.064 0.0658 0.0665 0.066 0.0662 0.0642 0.0599 0.0554 0.0487 0.0456 0.045 0.046 0.0476 0.049 0.0514 0.0538 0.0551 0.0564 0.0542 0.0506 0.047 0.0432 0.0412 0.0405 0.04 0.0397 0.04 0.0412 0.0426 0.0435 0.0437 0.0424 0.0405 0.0371 0.0341 0.0317 0.0295 0.0292 0.0287 0.0288 0.029 0.0304 0.0316 0.0324 0.0329 0.0314 0.0301 0.0276 0.0248 0.0219 0.0202 0.0183 0.0173 0.0167 0.0185 0.0214 0.023 0.0239 0.0224 0.0204 0.0171 0.0137 0.0091 0.0048 0.0027 0.0023 0.0021 0.002 0.0022 0.0026 0.0027 0.0029 0.003 0.0025 0.0021 0.0022 0.002 0.002 0.0018 0.002 0.0025 0.0045 0.0072 0.0121 0.0305 0.0504 0.0642 0.0747 0.0783 0.075 0.0674 0.0588 0.0527 0.05 0.0538 0.0564 0.0562 0.0554 0.0491 0.0415 0.0326 0.022 0.0166 0.0157 0.0153 0.0385 0.0539 0.0656 0.0769 0.0776 0.0774 0.0763 0.0745 0.0712 0.071 0.0704 0.0686 0.0681 0.06 0.048 0.0363 0.0234 0.0191 0.0195 0.0202 0.0313 0.0524 0.069 0.084 0.0919 0.0923 0.0923 0.0888 0.0867 0.0838 0.081 0.0826 0.0863 0.079 0.3494 0.0876 0.0817 0.0755 0.0729 0.0721 0.0795 0.0855 0.09 0.0945 0.0968 0.0987 0.0993 0.1007 0.1015 0.1005 0.1005 0.0983 0.0931 0.0877 0.0809 0.0759 0.0738 0.0756 0.0802 0.0846 0.0883 0.0894 0.0901 0.0911 0.0922 0.0909 0.0883 0.0847 0.0798 0.078 0.0778 0.0772 0.0777 0.0785 0.0788 0.0794 0.0794 0.0798 0.08 0.0817 0.0821 0.082 0.082 0.0829 0.0837 0.0839 0.0831 0.0808 0.0784 0.077 0.075 0.0731 0.0702 0.0678 0.0654 0.0626 0.0605 0.0583 0.0558 0.0548 0.0547 0.0598 0.0672 0.0751 0.0809 0.0823 0.081 0.0796 0.0782 0.0772 0.0756 0.0727 0.0717 0.0687 0.0617 0.0522 0.0397 0.0225 0.0128 0.011 0.0119 0.0135 0.0219 0.0337 0.0443 0.0552 0.0632 0.0664 0.0684 0.0692 0.0703 0.0695 0.0697 0.0688 0.0681 0.0677 0.0673 0.0688 0.0691 0.0705 0.0724 0.0726 0.0726 0.073 0.0733 0.0744 0.0744 0.076 0.0747 0.0734 0.0731 0.0696 0.067 0.0614 0.0516 0.0392 0.0267 0.0151 0.0127 0.0129 0.0138 0.0131 0.0101 0.0279 0.0438 0.0565 0.067 0.0717 0.0716 0.0703 0.0698 0.0681 0.0661 0.0651 0.0642 0.0651 0.0664 0.067 0.0664 0.0644 0.0609 0.058 0.0536 0.0508 0.0515 0.0558 0.0603 0.0648 0.0685 0.0727 0.0768 0.0793 0.0803 0.0801 0.0787 0.0763 0.0735 0.07 0.0656 0.0637 0.0628 0.0618 0.0635 0.0668 0.0688 0.0706 0.0689 0.066 0.0628 0.0589 0.0554 0.052 0.0492 0.0477 0.0484 0.0492 0.0505 0.0545 0.0564 0.0565 0.0552 0.0507 0.0465 0.0404 0.0364 0.0329 0.0301 0.029 0.0289 0.0321 0.0337 0.0376 0.0415 0.0422 0.0443 0.0426 0.0387 0.0344 0.0295 0.0254 0.0238 0.0219 0.0206 0.0204 0.0218 0.0237 0.0262 0.027 0.028 0.0277 0.0268 0.0244 0.0219 0.0187 0.0164 0.0147 0.0129 0.0132 0.0132 0.014 0.0139 0.0142 0.0136 0.0133 0.0126 0.0108 0.0091 0.0065 0.0036 0.0026 0.0019 0.0014 0.0006 0.0008 0.0 0.0008", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0004 0.0005 0.001 0.0014 0.0022 0.0027 0.0035 0.0039 0.0042 0.0045 0.0047 0.0049 0.0054 0.0058 0.0058 0.006 0.006 0.0056 0.0056 0.0053 0.0043 0.0034 0.0032 0.0027 0.0023 0.0018 0.0012 0.0007 0.0008 0.002 0.0023 0.0024 0.0026 0.0022 0.0019 0.002 0.0019 0.002 0.0016 0.0012 0.0008 0.0005 0.0005 0.0004 0.0008 0.0011 0.0011 0.0022 0.0049 0.008 0.0115 0.0149 0.0156 0.0145 0.012 0.0059 0.0019 0.0015 0.0012 0.0011 0.001 0.0011 0.0011 0.001 0.0012 0.0021 0.0022 0.0019 0.0011 0.0007 0.0027 0.0043 0.0046 0.0045 0.0034 0.0025 0.0025 0.0025 0.0024 0.002 0.0018 0.002 0.0021 0.0023 0.002 0.0019 0.0018 0.0016 0.0015 0.0016 0.0017 0.0019 0.0019 0.0018 0.0017 0.0019 0.0018 0.0016 0.0014 0.0015 0.0012 0.0013 0.0015 0.0016 0.0014 0.0015 0.0014 0.0013 0.0013 0.0012 0.0013 0.0013 0.0011 0.0012 0.0011 0.001 0.0009 0.001 0.001 0.0007 0.001 0.0009 0.001 0.0013 0.0013 0.0012 0.0011 0.0009 0.0009 0.0009 0.0008 0.0007 0.0008 0.0006 0.0007 0.0008 0.0009 0.0007 0.0006 0.0008 0.0007 0.0007 0.0006 0.0004 0.0004 0.0002 0.0003 0.0005 0.0004 0.0006 0.0005 0.001 0.0012 0.0012 0.0018 0.0015 0.0013 0.0013 0.0016 0.002 0.0024 0.0028 0.0028 0.0031 0.0026 0.002 0.0018 0.0013 0.0017 0.0018 0.0015 0.0019 0.0046 0.0069 0.0074 0.0073 0.0061 0.0045 0.0031 0.003 0.0032 0.0029 0.0024 0.0022 0.0017 0.0014 0.0014 0.0013 0.0011 0.0011 0.0035 0.0071 0.0117 0.0152 0.0164 0.0148 0.0115 0.0072 0.0031 0.0024 0.0022 0.0019 0.0016 0.0013 0.0011 0.0012 0.0013 0.0015 0.0019 0.0041 0.0094 0.0131 0.0163 0.0185 0.0188 0.0179 0.0164 0.013 0.0095 0.0054 0.0028 0.0023 0.0027 0.0022 0.0018 0.0017 0.0018 0.0017 0.0019 0.0022 0.0387 0.0011 0.0022 0.0018 0.0013 0.0015 0.0019 0.002 0.0025 0.002 0.0019 0.0017 0.0023 0.0023 0.0025 0.0024 0.002 0.0019 0.0022 0.0019 0.0018 0.0021 0.0015 0.0014 0.0016 0.0015 0.0017 0.0015 0.0014 0.0016 0.0015 0.0015 0.0013 0.0012 0.0009 0.001 0.001 0.001 0.0009 0.001 0.0009 0.0008 0.0008 0.0006 0.0008 0.001 0.0008 0.0007 0.0005 0.0006 0.0006 0.0007 0.0008 0.0007 0.0008 0.0008 0.0008 0.0008 0.0007 0.0006 0.0004 0.0005 0.0005 0.0006 0.0006 0.0007 0.0009 0.001 0.0012 0.0016 0.0016 0.0018 0.002 0.0022 0.0021 0.0022 0.0023 0.0022 0.0022 0.0018 0.0017 0.0017 0.0027 0.0044 0.0067 0.0094 0.0116 0.0121 0.0112 0.0088 0.0052 0.0028 0.002 0.0018 0.0016 0.0015 0.0017 0.0018 0.0018 0.0017 0.0018 0.0022 0.0021 0.0022 0.0021 0.0018 0.0018 0.0016 0.0018 0.0019 0.0018 0.0018 0.0017 0.0019 0.0019 0.002 0.002 0.0019 0.0019 0.0021 0.0042 0.0073 0.0099 0.0115 0.0129 0.0136 0.0141 0.014 0.0126 0.0102 0.0068 0.0026 0.0014 0.0013 0.001 0.0012 0.0011 0.001 0.0007 0.0006 0.0006 0.0004 0.0003 0.0006 0.0006 0.0004 0.0006 0.0006 0.0006 0.0005 0.001 0.0015 0.0019 0.002 0.0021 0.0019 0.0019 0.0018 0.0019 0.0019 0.0019 0.0018 0.0017 0.0016 0.0016 0.0018 0.0019 0.0019 0.0017 0.0013 0.0014 0.0013 0.0015 0.0015 0.0016 0.0015 0.0019 0.0013 0.0012 0.0015 0.0014 0.0015 0.0014 0.0012 0.001 0.0011 0.0012 0.0013 0.0011 0.0012 0.001 0.0011 0.0008 0.0006 0.0006 0.0007 0.0007 0.0007 0.0007 0.0009 0.0009 0.0009 0.001 0.001 0.0007 0.0007 0.0008 0.0006 0.0006 0.0007 0.0004 0.0006 0.0007 0.0006 0.0008 0.0006 0.0008 0.0005 0.0007 0.0004 0.0003 0.0003 0.0003 0.0001 0.0003 0.0002 0.0003 0.0003 0.0002 0.0002 0.0001 0.0005 0.001 0.0012 0.0015 0.0015 0.0011 0.0009 0.0004 0.0005 0.0001 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 193.191, + "text": "AP 不 必 英 雄 名 讳 AP 记 两 个 旧 事 抵 酒 价 SP", + "ph_seq": "AP b u b i y ing x iong m ing h ui AP j i l iang g e j iu sh ir d i j iu j ia SP", + "ph_dur": "0.335 0.045 0.194 0.045 0.178 0.06 0.163 0.075 0.371 0.105 0.163 0.075 0.238 0.148 0.09 0.178 0.06 0.193 0.045 0.163 0.075 0.357 0.12 0.193 0.045 0.579 0.135 0.386 0.09 1.429 0.095", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest G3 E3 G3 A3 B3 A3 rest G3 E3 G3 A3 B3 D4 D3 E3 rest", + "note_dur": "0.38 0.239 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.477 0.238 0.714 0.476 1.429 0.095", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "143.9 144.1 144.1 144.2 143.9 144.0 143.9 144.0 144.3 144.1 144.1 143.8 143.8 143.9 143.7 143.5 143.5 143.6 143.6 143.5 143.4 143.4 143.3 143.4 143.5 143.2 143.4 143.3 142.8 142.5 142.5 142.5 142.2 142.2 145.9 151.2 156.1 167.0 177.6 188.4 192.3 194.6 196.1 195.5 192.5 188.4 183.5 175.7 174.7 175.2 174.1 173.6 173.8 169.0 165.1 162.9 162.7 163.3 163.9 164.3 164.4 164.5 164.9 165.0 164.8 165.4 166.2 165.9 166.9 168.7 170.3 172.4 181.1 187.0 192.5 194.0 196.6 196.7 196.2 196.7 196.1 196.3 197.3 197.5 197.4 197.2 193.5 186.7 190.4 195.7 201.8 208.3 216.0 224.2 230.8 230.6 227.2 226.9 223.3 220.5 217.2 212.3 208.2 208.2 208.9 211.2 216.2 222.5 226.0 230.2 229.6 225.6 218.1 210.3 203.8 201.8 202.3 203.4 207.5 212.3 218.2 222.7 225.1 225.4 223.6 220.9 217.1 215.9 221.3 224.2 227.5 231.3 233.2 237.1 241.6 244.9 245.9 247.5 247.7 246.8 246.8 247.0 247.0 248.5 247.5 246.1 243.3 236.6 226.7 215.8 215.8 217.5 219.9 222.0 224.7 226.4 223.7 225.6 226.8 226.9 225.1 222.0 216.3 209.1 201.9 197.2 197.0 199.4 206.0 215.0 222.2 228.5 232.0 230.9 224.9 217.7 212.4 206.0 203.5 200.2 198.1 195.8 194.2 192.9 191.0 189.3 187.7 185.8 184.9 183.1 181.8 179.6 178.2 176.8 175.0 173.2 171.8 170.1 169.9 170.6 170.6 169.9 171.2 175.6 184.2 191.7 197.3 199.9 199.5 197.9 194.8 189.0 183.5 177.1 171.9 167.8 166.5 166.3 164.1 162.8 162.8 163.0 164.5 166.3 166.4 166.6 166.8 166.9 166.8 165.4 163.6 159.5 156.1 158.0 164.9 177.1 189.4 198.0 195.5 196.4 196.7 195.4 195.0 195.2 195.1 195.2 195.3 194.4 193.9 192.1 188.0 181.0 184.8 188.1 192.0 195.3 198.3 200.7 202.2 200.9 197.5 197.6 197.6 198.3 198.5 197.2 195.9 195.4 194.8 194.3 195.5 199.6 207.7 214.7 218.4 220.3 220.2 219.2 216.7 215.3 214.5 212.8 213.3 214.3 216.6 219.1 222.2 222.9 222.9 218.4 212.8 217.7 223.1 228.5 234.5 240.2 245.9 251.1 257.7 264.9 252.9 249.1 249.0 249.0 247.7 246.6 245.8 245.7 246.2 247.1 247.2 247.1 246.4 231.8 228.2 232.1 242.7 254.3 268.4 264.8 261.8 262.3 263.0 262.5 262.0 261.8 261.8 262.4 262.9 261.5 262.2 263.7 265.0 267.1 267.9 274.7 283.2 291.6 297.2 298.3 297.6 296.3 293.4 289.7 288.1 287.3 286.7 288.8 290.6 293.3 294.6 295.4 295.3 294.4 293.5 290.3 286.9 283.9 282.9 283.9 285.5 287.6 290.7 293.6 296.8 296.2 292.8 286.0 279.4 266.2 254.1 244.8 234.6 224.8 214.8 205.6 196.9 189.6 181.7 174.7 167.6 156.3 148.2 143.8 143.2 143.7 143.5 143.4 143.4 143.6 142.7 140.9 140.1 139.8 141.9 143.3 144.4 145.4 146.6 147.5 148.0 148.1 147.8 148.6 148.0 147.0 146.8 146.9 147.3 148.7 148.3 147.7 145.6 141.3 140.9 142.0 143.4 144.9 146.3 147.9 149.7 151.6 151.1 150.7 146.2 145.6 146.0 146.4 147.6 147.7 148.3 147.9 146.7 145.9 145.6 145.8 145.8 146.1 146.1 146.8 147.5 148.4 150.5 153.4 159.4 164.9 169.5 171.3 171.2 170.2 168.5 164.8 161.0 157.6 155.2 155.4 157.6 162.0 166.8 170.6 172.8 174.6 175.5 174.6 171.8 169.0 164.9 160.2 157.6 156.7 156.5 157.9 160.9 164.6 168.1 171.5 173.9 175.0 174.5 173.1 170.7 167.1 162.9 159.4 158.3 158.2 158.2 159.6 162.7 165.9 168.9 171.6 174.0 174.7 174.9 173.9 170.9 166.5 161.4 158.4 157.5 156.9 157.3 159.8 163.4 167.3 171.0 175.3 177.6 177.7 177.0 174.2 169.6 162.6 156.7 152.0 150.3 151.2 153.4 157.6 163.6 169.7 175.0 179.0 180.3 178.6 174.3 169.2 163.1 155.0 151.3 150.4 148.9 148.7 151.5 155.1 160.3 164.0 166.8 169.3 171.2 171.6 170.5 168.8 165.6 163.4 164.1 163.5 162.1 161.0 161.2 160.4 160.4 160.5", + "f0_timestep": "0.011609977324263039", + "energy": "0.001 0.001 0.0016 0.0018 0.0031 0.0042 0.0052 0.0057 0.0062 0.0065 0.0069 0.008 0.0076 0.0081 0.0074 0.0077 0.0075 0.0066 0.0062 0.0054 0.0044 0.0038 0.0038 0.0033 0.0029 0.0022 0.0019 0.0016 0.0016 0.0047 0.0199 0.0393 0.0556 0.0692 0.0762 0.079 0.0755 0.0705 0.0679 0.0647 0.0648 0.0663 0.0666 0.0664 0.065 0.0611 0.0534 0.0421 0.029 0.013 0.0103 0.0263 0.048 0.0605 0.0702 0.0764 0.0765 0.077 0.0764 0.0755 0.0761 0.0766 0.0782 0.0792 0.079 0.0771 0.0765 0.0713 0.0655 0.0592 0.0527 0.0518 0.0593 0.0701 0.0771 0.0817 0.0846 0.081 0.0803 0.0817 0.0806 0.0782 0.0785 0.0746 0.0713 0.0682 0.058 0.0462 0.0337 0.0225 0.0185 0.0205 0.019 0.0322 0.0602 0.0785 0.0917 0.0996 0.0957 0.0925 0.0889 0.0862 0.0831 0.0783 0.0755 0.0719 0.0696 0.0672 0.0664 0.0649 0.0656 0.0667 0.0682 0.0739 0.0729 0.0727 0.0698 0.0666 0.067 0.0682 0.0684 0.0694 0.0691 0.0666 0.0672 0.0666 0.067 0.0686 0.0706 0.0726 0.0753 0.0773 0.0779 0.0808 0.0832 0.084 0.0861 0.0859 0.0849 0.0858 0.0846 0.0829 0.0816 0.0784 0.0775 0.0738 0.0702 0.0605 0.0468 0.0326 0.0158 0.0098 0.0094 0.0091 0.0249 0.0448 0.0597 0.0754 0.0835 0.0866 0.0865 0.0847 0.0818 0.0754 0.0706 0.0624 0.0555 0.0528 0.0498 0.0497 0.049 0.0489 0.0489 0.047 0.0419 0.0347 0.0252 0.0163 0.0079 0.0048 0.0046 0.0046 0.0047 0.0043 0.0028 0.0034 0.0034 0.0026 0.0023 0.0018 0.0026 0.0045 0.0072 0.0102 0.0115 0.0261 0.0425 0.055 0.0628 0.0662 0.0641 0.0612 0.06 0.057 0.0567 0.0578 0.0613 0.065 0.0671 0.0684 0.0679 0.0664 0.064 0.0609 0.0578 0.0567 0.0579 0.0605 0.063 0.0665 0.066 0.065 0.0647 0.0613 0.0596 0.0579 0.0553 0.0563 0.0549 0.0517 0.0462 0.0366 0.0269 0.0165 0.0098 0.0115 0.0331 0.0515 0.0636 0.0737 0.0775 0.0784 0.0772 0.076 0.0745 0.0715 0.0702 0.0663 0.0575 0.0465 0.0323 0.0152 0.0068 0.0098 0.0121 0.0128 0.0117 0.0297 0.0482 0.0623 0.0754 0.081 0.0841 0.0846 0.0842 0.0816 0.0782 0.0723 0.0641 0.0584 0.053 0.052 0.0553 0.0587 0.0613 0.0629 0.0642 0.0654 0.0661 0.0665 0.0669 0.0678 0.0675 0.067 0.0668 0.0661 0.0641 0.0601 0.0516 0.0413 0.0301 0.0182 0.0151 0.0153 0.0169 0.0179 0.0172 0.0153 0.0175 0.0363 0.0498 0.0609 0.0715 0.0747 0.0772 0.0774 0.0775 0.0772 0.0762 0.077 0.0755 0.0695 0.0589 0.0452 0.0277 0.0128 0.0054 0.0084 0.0324 0.0527 0.0672 0.081 0.0872 0.0895 0.0919 0.0929 0.0932 0.094 0.0917 0.0889 0.0868 0.0824 0.0777 0.0717 0.0675 0.0669 0.0692 0.0725 0.0733 0.0745 0.0743 0.075 0.0777 0.0776 0.0772 0.077 0.0771 0.0768 0.0785 0.0801 0.0801 0.081 0.0801 0.08 0.0799 0.0786 0.0785 0.0779 0.0765 0.0754 0.0747 0.073 0.0719 0.0717 0.0713 0.0725 0.0715 0.0659 0.0554 0.0397 0.0233 0.0092 0.0087 0.0101 0.011 0.0116 0.0115 0.0104 0.0082 0.009 0.0236 0.0398 0.0506 0.0599 0.0627 0.0612 0.0615 0.0598 0.06 0.0596 0.0587 0.0566 0.0553 0.0544 0.0525 0.0522 0.0527 0.0529 0.0551 0.0558 0.0566 0.057 0.0563 0.0575 0.0579 0.0576 0.0583 0.0581 0.0581 0.0567 0.0549 0.0489 0.041 0.03 0.0184 0.0097 0.0084 0.009 0.0093 0.0088 0.0073 0.0193 0.0411 0.0523 0.0604 0.0671 0.0649 0.0655 0.0665 0.0672 0.0667 0.0656 0.0652 0.0638 0.0637 0.0655 0.0656 0.0648 0.0652 0.0615 0.0582 0.056 0.0516 0.0503 0.0504 0.0502 0.053 0.0539 0.0548 0.0561 0.0553 0.0543 0.0526 0.0498 0.0473 0.0455 0.0439 0.0446 0.0459 0.0482 0.0509 0.0545 0.0567 0.0572 0.0566 0.0541 0.0516 0.0497 0.0475 0.0466 0.0446 0.0438 0.0421 0.0439 0.0459 0.0479 0.0515 0.0529 0.0529 0.0519 0.0492 0.0452 0.0425 0.0416 0.041 0.04 0.0399 0.0395 0.039 0.0404 0.0415 0.0442 0.0464 0.0479 0.0486 0.0476 0.0447 0.0412 0.0388 0.0377 0.0357 0.0348 0.0338 0.0333 0.034 0.035 0.036 0.0377 0.04 0.0405 0.0403 0.0379 0.0349 0.0317 0.0285 0.027 0.0246 0.0243 0.023 0.0224 0.0226 0.0218 0.0222 0.023 0.0225 0.0219 0.021 0.0193 0.0183 0.0172 0.0152 0.0143 0.0129 0.0128 0.0123 0.013 0.0139 0.0141 0.0138 0.0133 0.0128 0.0121 0.0115 0.0102 0.0088 0.0051 0.0031 0.0012 0.0009 0.0011 0.0005 0.0009 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0006 0.0012 0.0023 0.0035 0.0048 0.0057 0.0063 0.0066 0.0072 0.0072 0.0079 0.0084 0.008 0.0079 0.008 0.0077 0.0075 0.0067 0.0055 0.0042 0.0037 0.0036 0.0033 0.0028 0.0022 0.0019 0.0016 0.002 0.0025 0.0037 0.0042 0.0034 0.0031 0.0012 0.0007 0.0008 0.001 0.0008 0.0008 0.0007 0.0005 0.0003 0.0005 0.0003 0.0008 0.0009 0.0014 0.0016 0.0019 0.0015 0.0013 0.0017 0.002 0.002 0.002 0.002 0.0019 0.0019 0.002 0.0019 0.0015 0.0015 0.0018 0.0018 0.0019 0.0019 0.0017 0.002 0.0024 0.0026 0.0026 0.0027 0.0024 0.0022 0.0022 0.0023 0.0019 0.002 0.0016 0.0012 0.0012 0.0012 0.0013 0.0016 0.0022 0.0062 0.0134 0.0167 0.0193 0.0211 0.0197 0.019 0.0171 0.0131 0.01 0.0056 0.0032 0.0023 0.0022 0.002 0.0017 0.0014 0.0016 0.0014 0.0011 0.0012 0.0009 0.001 0.0008 0.001 0.0008 0.001 0.0012 0.001 0.0009 0.0006 0.0007 0.0007 0.0007 0.0007 0.0005 0.0005 0.0006 0.0006 0.0005 0.0004 0.0003 0.0005 0.0006 0.0005 0.0006 0.0006 0.0008 0.001 0.0013 0.0011 0.0011 0.0011 0.001 0.0008 0.0005 0.0005 0.0005 0.0003 0.0005 0.0004 0.0007 0.0034 0.0075 0.0098 0.0114 0.0117 0.0095 0.0072 0.0046 0.0022 0.0015 0.0014 0.0015 0.0018 0.002 0.0023 0.0022 0.002 0.002 0.0018 0.0015 0.0016 0.0014 0.0013 0.0012 0.001 0.001 0.0013 0.0015 0.0017 0.0016 0.0023 0.0029 0.0032 0.0037 0.0041 0.0041 0.0038 0.0033 0.0022 0.0013 0.0011 0.0014 0.0022 0.0044 0.0077 0.01 0.0108 0.0107 0.009 0.007 0.0052 0.0034 0.0021 0.0018 0.0015 0.0019 0.002 0.0019 0.0017 0.0018 0.0014 0.0012 0.0012 0.0013 0.0012 0.0012 0.0013 0.0013 0.0016 0.0016 0.0016 0.0021 0.0019 0.0021 0.002 0.0017 0.0015 0.0011 0.0009 0.0009 0.0008 0.0006 0.0005 0.0006 0.0012 0.0034 0.0047 0.0051 0.0049 0.0044 0.0023 0.0015 0.0013 0.0012 0.0015 0.0014 0.0015 0.0015 0.0017 0.0018 0.0017 0.0022 0.0025 0.0037 0.0059 0.0095 0.0121 0.0132 0.013 0.0114 0.008 0.0057 0.0046 0.0015 0.0015 0.0014 0.0015 0.0015 0.0013 0.0012 0.0011 0.0011 0.0011 0.001 0.0008 0.0009 0.0007 0.0006 0.0006 0.0008 0.0008 0.0007 0.0007 0.0007 0.0006 0.0006 0.0006 0.0005 0.0006 0.0004 0.0014 0.0044 0.0071 0.0106 0.0133 0.015 0.0167 0.0177 0.0183 0.0175 0.0155 0.0122 0.008 0.0049 0.0025 0.0019 0.0017 0.0013 0.0013 0.0013 0.0014 0.0013 0.0011 0.0012 0.0014 0.0011 0.0015 0.0014 0.0028 0.005 0.0055 0.0058 0.0051 0.0031 0.0025 0.0023 0.0021 0.002 0.0018 0.0019 0.0019 0.0022 0.0023 0.0022 0.0018 0.0019 0.0014 0.0015 0.0017 0.0018 0.0019 0.0018 0.0014 0.0014 0.0013 0.0014 0.0013 0.0013 0.0012 0.0013 0.0015 0.0016 0.0016 0.0015 0.0014 0.0013 0.0012 0.0012 0.0015 0.0015 0.0013 0.0013 0.0015 0.0014 0.0016 0.0018 0.0019 0.0017 0.0016 0.0016 0.0018 0.0027 0.0036 0.0056 0.0083 0.0104 0.0117 0.0132 0.0132 0.0122 0.0105 0.0084 0.0063 0.005 0.0041 0.0031 0.0017 0.0009 0.001 0.0007 0.0009 0.001 0.001 0.0009 0.001 0.0011 0.0009 0.0009 0.0008 0.0008 0.0009 0.0008 0.0008 0.0008 0.0008 0.0009 0.0009 0.0009 0.0007 0.0009 0.0006 0.0006 0.0006 0.0006 0.0008 0.0022 0.0035 0.0057 0.0074 0.009 0.0098 0.0095 0.0084 0.007 0.0045 0.0027 0.0018 0.0021 0.0018 0.0021 0.0017 0.0016 0.0015 0.0015 0.0018 0.0016 0.0015 0.0015 0.0014 0.0014 0.0015 0.0013 0.0017 0.0017 0.0018 0.002 0.0019 0.0021 0.0022 0.0019 0.0018 0.0018 0.002 0.0022 0.0023 0.0022 0.0016 0.0014 0.0013 0.0012 0.0013 0.0015 0.0015 0.0014 0.0016 0.0018 0.0018 0.0018 0.0017 0.0015 0.0017 0.0015 0.0015 0.0012 0.0014 0.0013 0.0014 0.0015 0.0013 0.0014 0.0014 0.0014 0.0014 0.0017 0.0016 0.0015 0.0013 0.001 0.0009 0.0009 0.001 0.0012 0.0014 0.0013 0.0012 0.0014 0.0013 0.0013 0.0016 0.0017 0.0016 0.0015 0.0014 0.001 0.0009 0.001 0.001 0.0012 0.0012 0.0011 0.0011 0.001 0.001 0.0011 0.0012 0.001 0.001 0.0009 0.0005 0.0005 0.0004 0.0004 0.0005 0.0003 0.0003 0.0006 0.0006 0.0007 0.0007 0.0007 0.0008 0.0006 0.0006 0.0003 0.0006 0.0001 0.0002 0.0 0.0 0.0002 0.0001 0.0001 0.0002 0.0001 0.0002 0.001 0.0017 0.0016 0.0017 0.0011 0.0007 0.0007 0.0006 0.0003 0.0 0.0", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 200.114, + "text": "SP AP 向 江 南 折 过 花 AP 对 春 风 与 红 蜡 AP 多 情 总 似 我 风 流 爱 天 下 AP 人 世 肯 相 逢 AP 知 己 幸 有 七 八 AP 邀 我 拍 坛 去 醉 眼 万 斗 烟 霞 AP 向 江 北 饮 过 马 AP 对 西 风 与 黄 沙 AP 无 情 也 似 我 迎 剑 锋 斩 桃 花 AP 人 世 能 相 逢 AP 谢 青 山 催 白 发 AP 慷 慨 唯 霜 雪 相 赠 眉 间 一 道 疤 AP 当 此 世 赢 输 都 算 闲 话 来 换 杯 陈 酒 天 纵 我 潇 洒 AP 风 流 不 曾 老 AP 弹 铗 唱 作 年 华 AP 凭 我 纵 马 去 过 剑 底 杯 中 觅 生 涯 SP", + "ph_seq": "SP AP x iang j iang n an zh e g uo h ua AP d ui ch un f eng y v h ong l a AP d uo q ing z ong s i0 w o f eng l iu ai t ian x ia AP r en sh ir k en x iang f eng AP zh ir j i x ing y ou q i b a AP y ao w o p ai t an q v z ui y En w an d ou y En x ia AP x iang j iang b ei y in g uo m a AP d ui x i f eng y v h uang sh a AP w u q ing y E s i0 w o y ing j ian f eng zh an t ao h ua AP r en sh ir n eng x iang f eng AP x ie q ing sh an c ui b ai f a AP k ang k ai w ei sh uang x ve x iang z eng m ei j ian y i d ao b a AP d ang c i0 sh ir y ing sh u d ou s uan x ian h ua l ai h uan b ei ch en j iu t ian z ong w o x iao s a AP f eng l iu b u c eng l ao AP t an j ia ch ang z uo n ian h ua AP p ing w o z ong m a q v g uo j ian d i b ei zh ong m i sh eng y a SP", + "ph_dur": "0.08 0.4 0.12 0.193 0.045 0.193 0.045 0.179 0.06 0.193 0.045 0.163 0.075 0.476 0.193 0.045 0.163 0.075 0.193 0.045 0.193 0.045 0.178 0.06 0.194 0.045 0.476 0.193 0.045 0.163 0.075 0.193 0.045 0.163 0.075 0.163 0.075 0.163 0.075 0.386 0.09 0.417 0.461 0.075 0.356 0.12 0.476 0.163 0.075 0.163 0.075 0.179 0.06 0.163 0.075 0.401 0.075 0.476 0.148 0.09 0.178 0.06 0.163 0.075 0.163 0.075 0.163 0.075 0.194 0.045 0.476 0.148 0.09 0.178 0.06 0.178 0.06 0.178 0.06 0.163 0.075 0.178 0.06 0.417 0.06 0.163 0.075 0.193 0.045 0.326 0.15 0.356 0.12 0.476 0.133 0.105 0.193 0.045 0.194 0.045 0.193 0.045 0.193 0.045 0.178 0.06 0.476 0.193 0.045 0.163 0.075 0.178 0.06 0.178 0.06 0.164 0.075 0.163 0.075 0.476 0.178 0.06 0.163 0.075 0.178 0.06 0.163 0.075 0.178 0.06 0.163 0.075 0.417 0.06 0.193 0.045 0.178 0.06 0.416 0.06 0.401 0.075 0.476 0.148 0.09 0.164 0.075 0.178 0.06 0.163 0.075 0.401 0.075 0.476 0.133 0.105 0.163 0.075 0.163 0.075 0.178 0.06 0.194 0.045 0.178 0.06 0.476 0.163 0.075 0.193 0.045 0.178 0.06 0.163 0.075 0.163 0.075 0.163 0.075 0.432 0.045 0.193 0.045 0.178 0.06 0.193 0.045 0.193 0.045 0.416 0.06 0.714 0.194 0.045 0.386 0.09 0.356 0.12 0.519 0.195 0.163 0.075 0.193 0.045 0.164 0.075 0.163 0.075 0.178 0.06 0.654 0.06 0.178 0.06 0.193 0.045 0.178 0.06 0.193 0.045 0.178 0.06 0.417 0.06 0.416 0.06 0.356 0.12 0.371 0.105 0.476 0.164 0.075 0.178 0.06 0.193 0.045 0.163 0.075 0.416 0.06 0.476 0.178 0.06 0.193 0.045 0.163 0.075 0.194 0.045 0.163 0.075 0.163 0.075 0.476 0.178 0.06 0.163 0.075 0.178 0.06 0.178 0.06 0.163 0.075 0.179 0.06 0.401 0.075 0.193 0.045 0.193 0.045 0.178 0.06 0.163 0.075 0.163 0.075 0.163 0.075 0.476 0.238", + "ph_num": "1 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest rest B3 B3 A3 G3 A3 B3 rest B3 B3 A3 B3 E4 D4 rest D4 D4 B3 D4 E4 G4 E4 D4 B3 A3 B3 rest B3 B3 A3 G3 A3 rest A3 A3 G3 A3 B3 E3 rest E3 E3 D3 E3 G3 A3 A3 G3 A3 D4 B3 rest B3 B3 A3 G3 A3 B3 rest B3 B3 A3 B3 E4 D4 rest D4 D4 B3 D4 E4 G4 E4 D4 B3 A3 B3 rest B3 B3 A3 G3 A3 rest G3 A3 B3 A3 B3 D4 rest D4 D4 B3 A3 B3 E4 D4 E4 F#4 E4 D4 E4 rest C4 D#4 F4 F4 F4 C4 C4 F4 D#4 D#4 D#4 C4 D#4 F4 G#4 F4 D#4 C4 A#3 C4 rest C4 C4 A#3 G#3 A#3 rest A#3 A#3 G#3 A#3 C4 F3 rest F3 F3 D#3 F3 G#3 A#3 A#3 G#3 A#3 C4 F4 D#4 C4 rest", + "note_dur": "0.2 0.4 0.238 0.238 0.239 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.238 0.239 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.476 0.239 0.238 0.476 0.476 0.476 0.238 0.238 0.239 0.238 0.476 0.476 0.238 0.238 0.238 0.238 0.238 0.239 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.477 0.238 0.238 0.476 0.476 0.476 0.238 0.238 0.239 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.239 0.238 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.477 0.238 0.238 0.476 0.476 0.476 0.238 0.239 0.238 0.238 0.476 0.476 0.238 0.238 0.238 0.238 0.239 0.238 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.477 0.238 0.238 0.238 0.238 0.476 0.714 0.239 0.476 0.476 0.714 0.238 0.238 0.239 0.238 0.238 0.714 0.238 0.238 0.238 0.238 0.238 0.477 0.238 0.238 0.476 0.476 0.476 0.239 0.238 0.238 0.238 0.476 0.476 0.238 0.238 0.238 0.239 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.238 0.239 0.476 0.238 0.238 0.238 0.238 0.238 0.238 0.476 0.238", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "146.0 146.3 145.9 146.1 146.0 145.6 146.3 146.1 145.9 146.0 146.2 146.3 146.4 146.2 146.3 146.3 146.4 146.4 146.2 146.4 146.5 146.4 146.2 146.4 146.3 146.5 146.4 146.3 146.7 146.2 146.4 146.4 146.5 146.7 146.4 146.3 146.5 146.6 146.7 146.7 146.6 146.2 146.3 146.4 146.3 145.9 145.8 146.1 146.1 146.3 146.2 146.7 146.9 147.3 152.3 160.7 177.4 189.5 207.6 223.1 236.1 249.3 252.0 249.8 246.3 240.7 233.4 221.7 213.0 222.4 232.4 244.1 254.2 255.2 252.8 252.7 251.2 247.8 244.7 242.9 241.8 243.6 244.7 245.4 252.3 257.3 258.2 256.3 253.9 248.8 242.2 235.6 227.6 219.8 217.7 216.3 216.6 218.9 221.3 221.8 221.7 222.3 221.3 218.7 216.7 213.7 210.0 203.2 195.3 194.9 195.0 195.1 194.7 196.4 192.5 193.0 192.9 193.8 196.0 197.1 197.9 197.6 197.2 195.9 194.5 193.1 188.9 186.0 180.4 187.3 194.7 203.3 213.1 221.9 221.9 222.2 222.2 221.4 221.2 220.6 219.8 219.6 218.9 218.1 218.1 216.3 211.7 203.6 207.2 212.2 218.0 224.3 230.5 238.1 247.6 250.8 253.4 256.1 257.6 256.5 253.0 248.8 243.3 238.7 235.8 233.9 234.6 239.3 243.9 248.9 253.9 257.4 259.3 258.8 256.3 252.1 244.6 235.2 228.0 223.2 222.2 223.8 228.4 238.0 249.6 259.2 263.8 265.5 262.6 255.7 246.5 235.5 230.8 225.9 222.8 218.8 214.9 211.1 208.0 204.6 200.9 197.2 194.1 190.5 187.1 183.9 180.2 177.1 173.8 170.6 167.6 164.2 161.3 158.3 155.9 153.2 151.8 158.9 174.0 188.6 206.7 222.5 238.8 249.6 253.8 256.2 253.5 247.4 240.6 224.9 225.6 231.1 236.0 242.2 247.7 253.6 258.3 255.6 251.2 250.6 248.3 247.5 246.4 246.6 246.5 246.5 247.5 245.5 238.4 230.1 210.6 204.1 210.2 214.4 218.2 222.4 222.9 218.7 219.9 220.1 220.1 218.6 219.0 220.2 220.4 220.1 220.3 221.4 221.4 221.7 222.9 223.1 224.4 224.0 225.3 229.7 238.2 243.9 245.2 245.4 246.1 245.8 246.1 247.0 247.5 248.4 250.1 250.5 249.9 249.5 245.3 238.6 231.3 230.3 229.1 226.4 223.5 221.5 220.2 223.5 240.4 255.4 277.3 298.8 313.2 322.8 329.6 332.3 332.5 332.6 330.6 329.7 329.3 327.5 325.1 320.0 313.6 311.2 307.5 301.4 294.0 290.6 291.3 291.6 293.2 294.0 293.9 292.9 290.0 288.3 287.6 287.0 287.9 289.1 291.1 294.4 298.2 303.4 306.0 305.1 302.3 297.6 291.0 286.6 282.8 281.7 284.4 288.1 292.8 299.4 303.6 308.1 309.4 310.7 308.8 304.9 299.7 294.4 289.9 288.4 277.7 268.6 262.2 256.1 250.1 244.8 239.3 234.6 229.7 224.6 219.3 214.8 209.8 205.1 200.6 196.2 191.8 187.5 182.6 180.0 180.3 199.8 221.5 247.9 273.9 291.9 303.2 308.9 307.8 303.5 297.9 291.7 284.8 277.3 271.3 277.6 282.8 287.9 292.4 297.9 298.3 294.5 294.5 294.2 294.3 294.3 295.7 296.0 294.7 293.8 293.2 293.0 290.0 286.2 277.4 268.2 265.3 274.8 287.1 297.0 311.5 306.9 304.2 301.9 294.9 284.3 273.4 265.2 260.2 255.6 253.0 250.5 248.2 241.9 249.5 257.6 266.3 274.1 283.1 291.4 300.0 306.7 297.8 291.8 291.6 292.1 293.3 294.2 295.2 294.7 295.4 294.7 294.8 293.6 293.9 292.2 288.6 286.2 293.2 302.0 314.7 323.4 330.6 332.8 333.2 332.8 331.6 330.0 328.6 328.8 329.5 330.4 330.5 329.5 327.1 319.8 301.8 314.6 328.3 344.6 361.8 379.9 394.6 392.8 395.5 395.1 393.0 387.0 381.8 374.9 369.5 364.8 366.7 371.0 380.5 387.2 394.1 396.5 395.6 394.7 391.2 390.2 391.2 391.7 390.4 388.1 386.7 387.1 386.6 386.8 389.0 389.6 389.7 391.6 392.9 393.9 392.6 391.1 387.5 385.0 380.8 376.2 368.8 364.3 359.4 354.1 349.1 344.8 341.5 337.3 335.6 334.7 332.9 331.3 330.5 328.1 328.3 328.2 329.9 331.4 329.7 327.0 317.5 306.4 299.0 292.3 291.6 291.3 291.0 293.2 294.9 294.9 293.8 291.4 287.5 283.4 278.8 275.6 271.5 267.2 262.1 257.0 252.0 248.2 247.0 246.8 246.8 247.1 246.1 245.3 243.3 243.5 244.5 246.1 247.5 249.1 248.3 248.9 248.6 248.8 247.6 245.6 245.0 245.4 245.2 245.8 247.2 248.7 250.8 250.1 249.0 248.2 247.9 247.3 246.1 240.1 230.4 222.6 221.8 220.2 220.0 219.3 218.9 218.4 218.6 217.1 217.6 218.7 219.3 219.4 219.2 218.2 218.1 218.5 218.4 218.4 218.2 218.1 218.7 218.9 219.9 220.1 220.5 220.2 219.2 219.1 219.4 220.4 220.5 221.0 221.2 222.8 222.8 223.6 222.8 221.6 218.2 213.2 206.2 210.6 217.1 222.4 228.2 234.5 239.9 246.9 253.3 258.4 258.1 258.6 258.3 254.8 251.8 247.8 242.5 240.4 239.6 238.2 239.4 242.1 246.6 251.0 256.2 258.7 260.5 259.6 257.3 253.6 245.3 234.8 227.7 224.8 225.6 230.4 237.8 246.5 254.8 260.8 265.2 267.7 266.7 260.4 250.6 238.5 226.9 218.2 210.4 203.8 198.8 194.0 189.6 185.5 181.7 178.4 174.5 171.0 167.9 165.3 161.6 158.6 156.1 153.0 150.5 148.4 145.8 145.6 150.5 157.4 165.9 173.6 185.8 199.7 219.1 235.6 250.6 257.3 256.0 249.8 239.9 232.4 229.5 228.5 232.8 236.0 239.6 243.9 248.6 253.3 258.9 262.0 259.9 260.2 258.9 255.9 252.8 250.6 249.0 248.0 248.7 249.2 249.9 247.9 241.3 221.5 222.2 224.0 225.4 228.0 230.7 230.8 222.7 219.2 219.0 220.8 220.9 221.5 221.0 220.4 218.1 217.1 215.5 213.2 206.7 194.7 192.3 190.6 188.6 186.5 184.1 182.8 181.4 183.3 184.5 187.5 189.5 192.1 194.2 195.4 196.6 195.9 194.5 194.0 193.8 193.4 192.8 194.1 194.8 194.6 194.1 194.8 195.4 195.0 194.7 195.4 195.9 197.6 198.7 199.4 199.0 197.0 195.5 193.9 188.7 180.9 168.7 165.9 179.0 191.5 204.4 218.0 231.5 232.1 233.9 232.1 230.1 225.2 220.5 215.8 213.6 212.1 213.1 215.9 220.4 225.8 231.2 235.0 237.8 237.3 234.2 229.2 222.6 214.8 209.4 206.2 205.1 205.2 208.7 214.9 220.8 227.2 233.0 237.4 240.7 242.1 240.0 234.6 223.4 209.5 198.1 194.0 191.6 188.7 186.5 184.4 182.1 180.2 178.0 176.1 173.9 172.2 169.5 167.9 166.1 164.5 162.3 160.6 159.0 157.3 155.8 153.6 152.1 150.0 148.5 146.8 152.5 166.0 175.3 190.6 201.8 214.0 222.1 226.3 226.3 223.8 219.6 215.2 208.3 198.8 201.7 205.0 209.5 213.4 216.8 220.5 220.8 220.9 222.1 222.5 222.4 221.5 220.7 219.9 220.5 220.6 218.4 210.5 201.6 200.9 201.7 201.5 200.8 200.1 200.0 198.7 194.3 194.6 195.3 196.1 197.3 198.5 197.8 197.6 197.5 196.8 195.5 195.1 195.7 196.4 196.8 198.2 200.0 204.9 212.8 217.5 221.1 220.7 222.3 222.8 222.5 221.5 220.4 220.1 219.9 220.2 219.8 218.2 212.7 206.4 213.4 221.1 229.7 238.2 246.7 255.4 265.6 258.0 249.6 247.5 247.6 247.3 247.8 248.9 249.8 249.8 248.2 245.1 235.7 215.3 209.5 203.9 192.6 182.4 172.5 164.9 156.7 156.5 159.3 164.5 166.7 168.9 170.0 168.7 162.9 156.4 151.2 148.1 147.5 149.3 153.0 159.8 168.2 175.8 181.5 183.5 182.1 175.7 166.7 158.3 150.8 147.0 146.2 148.4 152.7 157.9 166.5 173.6 178.4 181.0 180.4 176.0 167.0 157.4 147.7 145.4 144.1 143.6 143.2 143.4 143.1 143.4 143.1 143.0 143.0 142.9 142.9 142.8 143.0 143.0 143.2 144.0 143.9 144.2 144.3 143.6 144.1 143.7 137.8 139.8 141.6 143.9 145.5 146.0 148.2 152.0 157.7 162.0 164.9 165.9 165.9 164.9 163.8 161.7 161.3 160.0 159.8 161.5 164.3 165.2 165.5 165.6 166.3 165.8 164.9 164.6 164.4 163.5 162.0 157.9 153.7 195.2 249.2 307.1 311.9 313.7 315.9 320.0 272.7 168.5 151.0 148.3 147.5 148.0 148.0 148.0 148.2 148.3 147.4 147.1 146.2 144.6 145.9 147.8 149.3 151.4 153.1 154.3 154.2 155.9 163.3 164.4 164.7 163.8 163.6 163.3 164.1 163.9 163.4 163.1 163.6 164.1 163.1 161.5 157.4 163.6 169.6 176.0 182.6 189.1 195.6 198.7 197.7 197.3 197.1 195.9 195.5 195.2 196.4 196.7 197.1 196.6 195.5 191.0 180.3 179.4 186.9 194.7 203.1 211.1 221.8 224.1 223.4 224.1 223.9 221.4 216.3 212.3 208.1 204.0 201.4 200.6 202.0 204.9 210.9 217.6 225.0 229.6 231.2 230.3 225.2 218.5 211.2 207.4 205.6 205.4 208.1 212.4 215.9 219.5 223.1 225.5 228.1 229.7 227.5 225.2 220.9 219.6 218.1 219.4 220.3 223.2 222.0 221.4 223.4 222.3 222.1 222.0 220.1 219.1 218.9 219.2 219.4 220.3 220.4 217.9 214.2 209.1 202.2 197.2 193.7 192.4 193.0 194.0 194.2 195.4 196.3 197.1 196.5 196.7 196.6 196.7 195.2 194.9 195.0 196.5 195.6 194.9 195.3 195.0 208.9 222.2 226.1 226.3 225.2 225.8 225.7 224.7 222.7 220.2 219.8 219.4 218.6 217.9 217.6 218.3 218.0 217.9 218.6 218.5 219.1 220.1 220.4 221.1 220.6 220.0 220.4 220.5 222.2 222.0 221.2 220.8 221.0 221.0 221.3 220.6 217.9 216.4 215.0 212.4 213.7 223.0 233.9 242.8 249.2 254.9 257.2 258.1 258.9 259.4 257.6 255.1 255.0 255.1 255.7 256.4 259.2 262.9 265.9 276.9 285.5 293.7 296.6 299.7 298.8 297.3 294.9 293.1 292.3 293.3 294.3 294.4 291.8 285.6 275.8 269.9 267.7 265.1 263.0 260.7 258.2 255.7 252.8 251.0 247.2 243.1 243.3 248.9 252.8 255.3 254.7 252.3 247.7 240.9 235.3 232.6 232.9 236.2 242.7 249.6 256.4 261.2 265.0 265.5 264.2 259.5 252.1 240.5 229.9 224.3 224.4 226.4 230.9 239.2 249.2 258.8 266.1 269.8 270.0 265.8 257.8 246.8 237.3 227.5 216.8 213.8 210.7 207.8 204.7 202.5 199.9 197.8 195.2 192.8 190.6 188.0 185.7 183.3 181.0 179.0 176.7 174.3 172.3 169.6 166.5 163.9 161.3 159.2 160.6 169.0 181.0 190.6 203.9 216.3 233.9 244.7 250.6 250.7 248.1 244.2 238.3 230.0 223.4 222.1 230.8 240.2 250.5 253.3 249.8 249.0 248.2 246.9 245.9 244.8 244.7 244.2 245.8 246.5 247.7 249.7 247.2 244.2 235.3 229.2 228.3 230.0 241.9 233.0 226.8 222.1 220.8 219.8 219.0 218.8 219.7 220.4 220.3 219.6 219.3 219.7 219.4 216.5 212.1 206.2 200.9 196.4 191.0 190.3 189.7 190.6 191.2 193.9 196.3 197.1 197.5 197.0 196.8 196.0 196.1 196.5 195.8 194.0 188.8 185.5 195.5 205.0 215.5 225.6 225.4 222.8 220.9 220.4 220.4 220.6 221.2 221.5 221.3 221.4 221.6 220.9 220.4 220.0 219.2 219.3 221.2 222.2 223.7 225.4 225.5 227.1 227.3 229.8 230.3 231.4 230.0 230.1 228.7 228.0 227.9 228.0 228.6 230.9 236.9 243.0 249.6 254.7 257.6 258.3 258.2 257.3 255.4 250.5 242.7 236.4 231.9 230.6 232.0 238.3 246.3 254.5 261.5 265.5 267.1 266.0 258.4 248.5 234.8 229.8 225.5 220.7 216.9 212.1 208.0 204.1 200.9 197.2 193.8 190.7 187.6 184.7 181.3 178.4 175.6 172.0 169.2 165.9 163.1 160.3 157.1 154.3 151.0 151.3 160.2 176.8 190.9 208.5 225.2 240.8 251.3 254.4 255.0 251.3 246.1 238.3 225.3 228.5 233.2 238.3 243.7 248.1 253.6 251.2 248.6 249.9 249.0 248.1 247.0 246.0 247.3 246.8 246.8 245.5 243.5 239.0 230.1 215.6 211.0 213.9 217.5 220.1 222.0 219.4 218.1 219.6 219.7 220.2 220.1 221.1 221.6 220.5 219.7 219.5 220.6 220.5 221.4 221.8 223.3 224.2 224.3 227.4 236.8 244.1 247.8 248.1 248.5 247.9 247.1 246.3 246.7 247.7 248.7 249.1 249.3 247.9 243.1 232.7 224.4 229.7 234.2 238.5 242.5 250.9 253.9 261.4 275.3 294.5 310.5 319.3 329.8 331.6 330.0 330.6 329.6 327.0 320.4 309.4 299.5 301.2 303.5 305.7 308.8 310.1 308.1 300.8 297.1 297.6 297.8 300.1 300.5 298.7 295.3 291.4 286.9 281.2 277.7 277.1 279.9 283.9 289.1 296.4 301.1 307.3 310.4 311.4 308.5 301.8 290.8 282.3 276.5 274.1 277.0 282.8 290.9 296.9 303.1 309.6 314.0 315.1 309.4 295.9 282.6 278.0 271.9 266.4 259.3 251.1 244.6 240.1 235.2 230.6 225.7 221.7 217.1 212.3 208.7 205.1 201.3 198.5 195.0 192.2 189.0 185.6 182.5 183.0 193.8 204.1 217.0 232.5 253.9 273.6 291.6 299.0 302.4 299.7 293.5 288.5 281.7 273.5 270.3 275.8 282.2 288.1 294.0 300.9 305.2 300.8 297.1 297.0 295.9 294.8 293.9 294.4 293.7 293.0 293.8 294.3 292.8 292.2 289.4 287.1 283.5 276.6 272.6 269.7 276.0 278.5 279.6 278.8 278.1 273.4 268.1 263.4 261.2 257.7 254.9 252.8 249.8 244.5 238.3 246.2 255.6 265.9 275.0 285.4 295.2 304.0 298.2 293.9 292.3 291.7 291.8 292.8 293.2 294.0 294.6 295.4 294.8 294.5 293.5 293.2 291.4 289.5 288.6 292.8 299.4 309.8 317.9 323.2 326.9 329.1 329.8 329.9 329.0 329.5 329.0 329.0 329.5 330.0 330.6 331.4 325.9 320.5 317.9 318.5 326.9 336.3 341.9 345.1 348.3 351.9 353.8 353.5 353.6 352.3 352.0 351.0 349.3 350.6 356.8 367.0 376.0 386.8 394.1 398.3 398.4 396.6 392.5 389.6 387.3 386.5 387.7 388.7 389.8 389.7 390.4 391.3 391.9 392.1 391.8 387.6 376.7 347.9 336.5 340.2 345.7 351.2 360.8 366.9 363.1 359.9 354.6 352.0 347.7 341.5 338.0 331.5 328.6 326.8 328.0 329.9 329.2 324.5 315.1 301.3 289.4 280.9 287.0 289.7 294.6 293.7 294.1 292.5 290.4 292.6 294.1 295.7 296.7 296.8 296.7 295.0 290.6 283.8 272.5 257.0 234.6 238.5 238.7 241.2 241.8 242.4 243.6 245.0 248.3 248.7 249.2 249.2 248.9 247.3 243.0 242.3 240.1 239.4 237.4 237.9 237.3 240.0 241.9 245.0 246.9 247.3 247.5 248.2 249.8 250.0 249.0 248.7 248.6 248.8 247.0 244.3 238.4 230.8 225.0 221.4 217.4 215.2 212.0 208.9 205.2 204.2 208.7 213.9 218.6 222.9 225.1 223.9 222.6 220.4 218.1 217.3 217.6 218.2 219.3 220.1 221.3 221.6 223.1 223.0 223.0 222.0 220.3 218.8 218.3 217.9 217.7 218.8 219.6 220.9 222.6 221.5 221.2 219.7 215.7 209.3 199.5 189.9 197.8 205.2 213.6 222.6 231.7 239.8 248.5 254.1 257.3 258.4 256.5 251.7 247.9 243.6 238.1 235.1 234.9 237.6 242.8 249.0 254.4 260.3 262.8 264.0 262.7 258.9 254.5 247.2 237.2 230.5 228.5 227.1 230.0 239.3 249.8 257.7 264.3 267.3 267.7 265.3 255.9 245.8 238.9 230.2 223.7 215.7 209.8 204.5 199.8 196.0 192.8 189.2 186.2 183.1 179.7 176.7 173.8 171.0 168.5 165.5 162.3 159.4 155.7 151.8 150.2 151.7 156.9 163.7 168.8 177.3 190.5 204.1 220.1 234.4 247.7 254.0 254.0 249.4 242.7 234.1 228.1 231.9 236.6 240.1 244.5 249.0 254.1 259.5 254.9 251.8 251.6 251.2 250.0 249.0 248.1 247.4 247.2 248.7 249.0 249.0 246.6 241.5 234.4 228.2 222.9 222.0 221.0 222.1 218.6 216.9 216.2 216.6 217.5 219.6 220.5 218.8 216.3 215.9 215.4 213.7 207.3 198.9 196.5 195.6 195.0 194.0 193.4 191.6 190.9 191.1 189.5 189.0 189.8 190.9 192.9 194.2 195.5 195.6 195.2 195.1 194.2 193.8 193.8 194.0 194.4 195.0 195.1 195.0 195.4 196.1 195.4 194.7 195.6 196.6 197.5 198.4 199.0 198.0 196.4 195.3 192.7 185.3 172.1 166.7 176.6 187.1 197.3 209.3 221.7 234.9 234.6 233.8 231.7 228.3 223.6 218.9 214.1 212.2 211.4 212.6 215.8 220.4 226.8 231.9 236.0 238.0 237.2 233.0 225.5 217.7 209.7 206.1 204.4 205.0 207.8 214.0 221.5 227.2 232.3 236.2 236.2 232.9 228.7 221.5 221.1 219.9 219.2 217.1 212.1 208.1 204.6 201.0 198.2 194.8 192.2 188.9 186.7 183.8 181.6 179.5 175.0 170.9 166.3 163.7 161.6 159.5 157.8 155.8 153.8 150.9 148.9 146.1 147.3 153.2 159.6 166.0 173.5 180.7 188.6 195.7 198.7 199.4 197.8 196.0 192.0 184.1 184.1 191.0 196.4 202.5 208.5 215.6 218.0 219.4 220.4 220.4 220.1 219.0 219.2 219.0 219.7 220.7 220.5 219.3 218.2 214.5 208.8 219.3 228.4 238.3 248.4 260.6 253.5 250.5 249.6 247.8 247.4 246.5 246.7 245.1 245.7 247.2 247.5 245.7 242.9 238.4 229.0 229.7 230.2 230.8 231.3 229.7 224.4 219.0 218.6 218.0 220.2 220.9 221.3 221.8 221.8 221.8 221.0 218.0 214.9 207.8 211.6 219.8 229.1 237.3 247.0 256.3 251.4 249.6 250.9 250.1 249.4 248.7 246.7 245.5 246.0 247.5 248.7 249.2 248.3 243.4 237.7 227.4 236.1 245.8 257.1 268.2 280.4 294.1 301.1 300.5 302.6 305.6 307.3 305.5 302.1 296.3 291.9 288.9 286.4 284.9 287.4 291.6 297.1 301.7 305.5 306.0 304.7 301.0 297.4 292.8 285.9 279.5 274.7 274.6 276.7 283.4 288.7 296.6 301.5 303.5 303.9 303.3 302.8 299.0 294.4 290.1 285.7 279.5 273.7 265.6 257.6 252.3 246.0 241.2 235.9 231.6 226.7 222.8 218.9 214.4 210.8 207.7 204.3 201.9 198.0 194.4 191.2 188.3 185.0 188.0 198.8 215.9 234.6 255.1 277.9 291.9 300.0 301.8 297.1 293.0 291.1 291.7 291.0 288.8 283.1 278.3 282.0 285.8 289.8 293.8 294.0 293.6 293.9 293.6 294.2 293.1 293.9 294.5 293.4 290.5 289.0 285.6 285.5 278.3 272.4 257.6 249.2 247.1 246.7 246.5 247.2 247.5 246.3 246.1 246.5 248.7 249.9 249.8 247.9 245.6 243.9 241.4 238.5 233.1 230.4 229.7 228.5 228.3 227.3 225.8 225.6 223.2 220.6 218.8 217.4 217.5 218.6 219.7 221.5 221.8 221.9 221.1 220.6 218.5 212.7 217.9 223.3 230.0 237.0 245.9 251.5 250.1 247.8 248.4 248.4 247.7 247.5 248.3 248.2 249.5 250.3 250.9 250.8 245.0 232.5 243.2 254.2 266.5 280.2 295.0 311.5 327.8 338.6 334.4 335.1 334.5 334.6 332.8 330.7 327.5 324.0 323.9 322.3 323.4 326.7 328.7 331.6 336.3 337.3 336.4 334.6 334.2 332.5 329.9 326.6 324.3 322.5 325.3 328.1 329.6 331.5 329.6 329.3 328.6 327.8 322.7 313.2 290.3 279.9 283.0 285.1 287.4 288.2 287.9 287.8 289.5 290.8 291.8 293.3 294.3 295.3 295.0 293.7 293.7 295.1 295.5 294.7 293.4 294.7 295.3 302.6 310.4 320.8 327.6 332.6 331.8 332.5 332.8 330.6 328.2 327.6 326.4 328.2 329.4 329.3 327.0 324.6 320.2 311.3 317.1 337.1 353.1 370.7 388.6 379.2 372.3 371.6 371.3 369.8 367.3 365.0 365.7 366.4 366.7 366.5 366.9 363.9 361.4 357.4 351.4 345.7 344.1 340.1 338.6 334.7 331.0 329.4 329.3 329.2 330.6 332.2 333.5 335.0 333.3 330.5 327.8 321.9 302.8 284.0 267.9 268.0 271.5 276.2 281.8 285.4 285.4 288.2 291.4 298.6 301.2 303.1 300.9 298.1 293.9 291.0 289.7 289.4 289.7 291.2 291.9 292.7 292.3 292.0 291.6 292.5 294.1 294.4 293.0 293.2 293.8 294.9 295.3 296.4 295.8 294.9 293.3 288.4 281.3 269.9 274.0 283.2 290.5 299.7 311.0 323.4 334.7 331.7 334.6 337.9 339.3 337.1 333.3 328.7 328.2 325.8 324.3 324.0 323.5 323.5 323.3 324.0 323.3 323.5 323.9 323.7 326.0 327.4 329.9 333.2 335.7 338.7 340.3 339.1 338.5 334.7 329.0 324.2 319.3 317.6 318.9 322.4 326.0 331.3 335.4 339.9 342.4 343.4 342.3 339.3 333.4 326.4 319.5 314.5 314.5 317.9 323.3 328.4 336.6 345.7 352.3 352.3 347.4 338.0 325.6 308.5 289.7 272.7 268.1 263.8 258.4 253.6 248.6 243.1 237.7 232.8 228.1 223.8 220.1 216.0 211.4 206.8 202.8 198.7 195.0 191.4 188.6 184.7 179.9 182.9 190.7 197.8 206.1 213.8 218.7 223.2 226.9 229.8 230.3 231.4 233.9 240.6 245.2 251.5 256.5 261.3 263.6 263.6 263.1 261.3 260.7 259.6 260.6 261.8 261.3 261.4 262.4 262.7 262.9 260.9 258.2 248.3 250.2 258.9 266.2 274.7 284.1 294.7 304.5 317.4 326.5 320.4 319.7 319.5 318.9 317.3 314.9 311.5 308.8 305.5 302.7 301.4 301.7 302.8 307.1 311.3 316.1 317.5 317.6 315.6 315.0 314.7 315.0 316.6 316.9 318.1 318.2 315.5 312.0 303.6 293.9 297.6 302.8 307.4 313.4 318.4 322.7 327.1 331.7 338.3 337.7 324.6 315.7 315.1 315.4 315.8 314.4 312.7 312.1 312.4 312.6 310.8 310.2 310.1 308.5 308.7 309.4 312.3 317.0 324.2 337.8 345.6 352.8 356.7 355.7 354.0 351.8 350.0 347.3 345.3 344.8 344.8 346.9 348.8 350.4 352.5 353.1 352.6 353.2 353.8 354.2 352.0 351.6 349.4 348.2 348.2 347.3 349.5 350.5 351.6 351.7 352.7 352.7 352.3 350.7 349.1 347.9 347.4 347.3 348.5 347.9 345.2 345.4 346.9 349.0 350.7 352.5 352.4 352.6 352.1 352.1 351.5 351.5 353.0 349.9 344.8 331.9 332.6 340.4 345.4 350.0 352.4 353.9 354.8 350.8 350.7 351.0 351.5 350.3 349.5 347.5 347.4 348.2 347.0 344.3 336.9 323.7 307.4 286.2 275.4 274.6 276.1 274.9 271.6 270.5 274.8 276.3 275.7 274.6 272.3 269.2 265.3 262.8 262.5 260.9 258.6 253.5 244.8 249.1 254.2 258.2 261.6 265.3 267.2 266.0 262.3 261.7 262.2 262.7 263.2 264.5 263.8 263.1 264.6 264.6 263.0 259.9 253.6 251.7 273.1 291.2 311.9 331.6 351.3 353.8 353.3 353.3 352.2 350.9 349.0 347.9 348.6 349.0 349.0 346.9 345.1 340.2 329.1 317.1 300.7 309.5 316.4 324.2 328.8 333.5 318.5 312.9 313.5 314.9 316.6 313.4 311.9 307.8 304.4 302.3 301.1 301.1 301.5 303.7 307.2 310.4 313.7 315.1 316.7 317.5 317.1 315.3 312.9 308.6 305.7 305.2 307.2 307.8 307.3 307.1 309.4 311.7 314.3 316.5 317.8 319.7 320.8 319.8 318.1 315.5 312.5 309.1 306.3 303.6 301.4 301.6 303.1 306.6 312.5 318.0 320.4 322.1 322.4 320.7 317.5 309.8 302.3 298.4 297.0 299.2 301.6 304.9 308.0 309.3 310.2 311.7 312.3 313.8 314.4 313.2 310.4 310.4 312.4 310.4 307.0 294.7 285.4 292.4 304.0 315.5 324.1 331.7 320.2 313.8 312.0 310.8 311.0 311.3 310.8 310.8 311.0 311.1 310.3 307.7 303.2 295.9 288.7 266.4 261.0 261.5 275.4 270.1 264.5 261.3 261.2 259.7 259.0 259.7 260.9 261.2 261.4 262.2 262.6 263.2 263.1 262.6 260.4 259.3 257.8 268.9 281.4 293.9 307.5 309.0 309.3 311.2 309.5 309.3 309.2 308.9 309.1 308.6 308.5 308.6 308.5 308.9 307.5 299.6 290.5 305.1 320.4 335.9 348.7 352.1 350.0 351.0 351.3 349.5 349.0 347.6 348.6 347.7 348.2 347.6 347.4 341.3 332.0 324.6 322.9 323.9 326.3 332.5 340.2 347.7 360.2 370.3 376.5 380.4 382.2 382.6 381.2 380.6 378.4 377.3 375.8 376.1 379.6 388.6 399.2 407.1 413.2 415.7 416.4 418.4 419.5 418.2 416.9 413.9 413.2 412.6 414.7 414.3 414.4 412.8 410.0 406.6 397.7 375.9 356.8 361.1 364.2 369.6 374.4 380.9 385.4 377.9 371.9 367.6 365.9 361.9 359.9 357.0 353.7 351.1 349.2 347.1 346.2 346.6 348.8 350.0 349.4 346.7 340.1 331.8 326.5 323.1 318.4 315.4 312.9 311.3 312.8 313.4 314.2 314.8 315.2 311.7 308.5 302.5 296.4 287.8 279.7 270.4 264.3 260.3 257.7 259.5 261.9 263.2 264.3 264.4 265.5 265.0 264.3 261.6 261.2 259.5 259.5 261.4 262.4 263.5 265.2 266.3 263.9 265.2 265.1 264.6 263.1 261.9 260.3 259.7 260.9 261.4 264.7 266.4 264.5 262.0 255.0 248.4 253.0 255.1 257.7 260.0 262.0 263.5 265.8 268.2 268.1 257.6 250.1 244.3 243.0 242.1 240.0 239.8 237.7 236.2 235.9 235.6 234.4 234.3 234.3 234.6 234.8 233.6 233.7 232.7 232.7 235.5 237.3 237.7 236.9 235.3 234.4 234.3 234.1 234.6 234.4 232.6 226.4 227.7 232.1 237.8 243.1 249.3 255.7 261.9 269.7 276.5 268.9 268.2 269.7 270.7 270.3 267.0 263.1 259.8 258.9 257.2 255.8 255.1 255.1 257.0 261.1 263.8 268.7 270.6 270.4 267.2 266.2 262.1 257.7 254.7 250.9 248.0 247.2 248.1 251.6 256.7 261.0 266.1 270.0 274.8 277.0 276.6 272.9 263.9 255.1 249.1 245.4 242.1 235.8 229.8 227.9 225.0 222.8 221.2 219.2 218.2 217.3 215.6 214.5 212.2 212.0 210.7 209.8 209.4 208.6 207.3 205.4 204.5 204.3 205.8 209.0 213.9 221.4 231.4 246.1 253.4 260.1 262.6 264.7 265.4 265.3 263.6 261.6 259.4 258.3 257.2 257.8 259.8 258.6 258.5 260.7 260.5 260.4 259.7 259.0 258.6 259.4 261.4 261.7 261.1 257.4 247.2 238.6 230.1 233.7 233.9 241.5 239.1 237.2 234.2 232.0 232.9 232.8 234.0 234.6 234.6 233.8 232.7 231.6 229.6 222.9 215.2 203.3 203.3 203.7 204.0 204.3 204.2 204.7 202.8 205.8 208.0 209.9 209.3 209.6 207.3 203.6 201.3 200.0 198.8 199.8 201.8 204.3 205.6 206.6 208.1 208.3 207.8 207.6 207.2 207.6 207.5 207.4 207.6 207.2 207.3 207.5 207.7 207.4 207.3 207.4 207.2 207.4 208.1 207.2 207.2 207.9 208.6 214.5 227.4 235.5 238.6 242.9 244.7 242.8 240.6 235.8 230.6 227.4 226.2 225.8 227.5 231.0 235.5 241.2 245.0 246.6 247.1 244.5 241.8 237.3 230.8 223.8 219.4 216.9 218.0 220.3 224.7 230.9 238.5 245.2 250.4 252.4 251.9 248.8 241.1 234.8 227.9 225.1 221.6 218.6 211.1 199.4 196.4 193.7 190.7 188.4 185.4 182.5 179.5 177.0 174.0 171.6 168.7 165.8 162.9 160.3 157.8 155.1 152.4 149.5 151.1 154.4 162.7 173.7 185.9 198.0 213.4 225.4 234.5 235.7 235.6 233.2 232.0 230.2 228.9 228.1 226.0 225.0 226.2 229.4 233.4 234.6 232.1 231.7 232.3 232.9 233.3 233.9 233.7 233.3 232.1 229.4 224.3 219.7 212.5 207.0 208.6 208.7 209.7 209.9 210.7 206.5 207.0 206.8 206.5 206.7 207.1 207.0 207.2 207.4 207.9 209.5 210.0 210.3 209.2 204.8 202.5 204.4 210.3 218.4 225.6 232.7 233.2 233.4 234.4 234.6 235.0 235.3 235.0 233.9 233.0 232.7 233.9 233.9 231.7 229.3 222.5 221.4 224.0 233.0 244.1 256.4 261.6 265.1 264.2 263.7 261.5 261.2 262.3 262.5 263.7 263.2 262.6 258.5 250.8 238.0 223.5 216.7 208.9 200.4 192.5 184.9 178.4 173.9 172.4 174.5 177.7 180.6 181.4 181.6 179.8 173.2 167.9 163.4 161.4 160.9 163.1 168.6 175.6 182.3 187.8 191.8 193.0 191.2 186.0 177.3 167.0 160.3 157.5 157.2 159.1 162.4 171.4 179.0 185.2 190.3 191.8 191.1 187.0 180.7 171.5 171.0 170.2 169.2 168.0 167.3 165.9 164.7 163.9 163.3 162.6 162.0 161.3 160.7 159.7 157.7 156.4 155.0 153.9 152.8 152.4 151.6 151.6 150.6 150.0 148.9 147.4 147.3 148.7 150.4 152.8 157.7 164.1 169.2 172.7 175.3 176.8 178.1 176.9 175.4 174.7 174.1 174.4 175.3 175.2 175.7 175.6 175.8 175.0 175.6 176.7 175.7 174.8 174.1 174.3 174.8 174.6 171.8 163.8 155.5 156.0 157.4 157.9 158.6 158.9 158.5 157.4 156.7 154.8 154.2 155.2 155.9 156.9 157.1 157.4 157.7 157.6 156.6 155.9 154.8 153.9 154.5 155.6 157.1 160.9 165.7 169.7 173.2 175.4 176.1 175.5 175.6 175.3 174.9 174.2 174.0 174.1 173.6 172.5 167.7 169.1 177.0 183.3 190.7 197.5 204.2 214.1 209.4 208.6 207.8 206.5 206.1 206.5 206.2 205.9 205.9 207.4 207.5 207.0 202.8 190.9 188.6 191.4 196.3 199.2 206.1 200.3 202.3 202.0 204.4 205.4 206.2 207.4 207.6 207.3 205.6 204.0 203.0 202.9 205.9 212.0 221.3 230.2 236.1 237.2 236.7 234.3 232.1 229.8 229.3 229.6 230.7 231.3 233.1 233.8 233.4 233.5 233.1 231.6 228.6 227.0 221.6 218.2 224.6 231.2 238.4 243.4 248.3 237.8 236.0 235.2 234.7 233.0 232.0 232.0 230.2 230.8 231.9 233.0 233.4 233.1 231.5 227.1 219.7 209.1 200.1 202.1 205.2 209.5 206.0 206.8 206.6 207.6 207.4 207.6 208.6 209.0 209.1 208.7 208.8 207.2 204.1 193.7 192.7 203.0 213.0 223.6 235.0 233.7 233.3 232.9 232.6 233.8 234.3 233.9 232.9 232.7 232.9 233.3 233.4 233.3 229.5 225.1 217.4 221.2 233.0 242.9 253.9 264.4 271.2 267.1 261.9 261.8 261.8 262.2 262.3 262.6 262.3 261.8 262.2 262.7 263.8 264.6 263.5 263.2 269.1 276.6 293.0 314.3 332.0 347.3 355.0 354.7 351.9 349.5 348.5 347.2 346.9 347.4 348.4 348.1 345.9 338.4 324.4 329.9 333.6 337.8 342.9 346.6 352.9 348.5 334.5 323.8 317.5 312.8 312.2 312.4 313.5 314.7 313.9 312.9 310.9 308.3 303.4 294.6 284.0 271.9 259.6 250.2 247.9 248.1 251.1 254.0 258.1 261.8 262.5 263.5 264.1 263.2 261.0 259.8 257.5 255.3 255.3 257.3 259.3 262.2 266.0 271.1 274.6 277.9 278.3 275.3 270.2 264.6 256.4 248.8 245.0 244.7 246.6 250.4 256.2 261.7 267.8 272.6 276.3 275.6 270.4 264.2 257.1 251.4 247.6 243.9 236.2 234.9 235.1 234.8 234.6 234.3 234.3 234.5 234.6 234.3 234.6 234.4 234.5 234.8 234.6 234.8 234.6 235.5 235.4 238.0", + "f0_timestep": "0.011609977324263039", + "energy": "0.0001 0.0003 0.0 0.0 0.0001 0.0 0.0 0.0002 0.0008 0.0011 0.0009 0.0006 0.0011 0.0013 0.0014 0.0018 0.0015 0.0019 0.0023 0.0022 0.002 0.0028 0.0022 0.0027 0.0027 0.003 0.0029 0.0024 0.0025 0.0018 0.0017 0.0019 0.002 0.0015 0.0017 0.0009 0.001 0.0007 0.0009 0.0011 0.002 0.003 0.0048 0.0072 0.0088 0.0111 0.0115 0.0124 0.0133 0.0125 0.0186 0.0317 0.0469 0.059 0.0706 0.0796 0.0827 0.0819 0.0797 0.0729 0.0705 0.0704 0.0737 0.0789 0.0795 0.0748 0.0661 0.0513 0.0352 0.0256 0.0223 0.027 0.0461 0.0649 0.0849 0.0947 0.0968 0.0926 0.0822 0.0752 0.0732 0.0705 0.0701 0.0721 0.0729 0.0739 0.0732 0.0728 0.0737 0.0748 0.0753 0.0762 0.0753 0.0727 0.0739 0.0719 0.0707 0.0682 0.0634 0.0619 0.0588 0.0608 0.0616 0.0642 0.0687 0.0703 0.0715 0.0639 0.0521 0.0375 0.0213 0.0154 0.0285 0.0506 0.0668 0.0798 0.0889 0.0898 0.0887 0.0869 0.0842 0.0808 0.0777 0.0749 0.0703 0.0617 0.0514 0.0387 0.023 0.013 0.0085 0.0089 0.0302 0.0506 0.0669 0.0801 0.089 0.0936 0.0963 0.0976 0.0961 0.0941 0.091 0.0854 0.0772 0.065 0.0514 0.0362 0.0231 0.0145 0.012 0.0127 0.0126 0.0288 0.0511 0.0714 0.0916 0.1039 0.1095 0.1104 0.1067 0.102 0.095 0.0885 0.0823 0.0788 0.078 0.0778 0.0795 0.0806 0.082 0.0816 0.0795 0.0758 0.0727 0.071 0.0697 0.0686 0.0677 0.0645 0.06 0.0559 0.052 0.0518 0.0543 0.0559 0.0572 0.0571 0.0566 0.0563 0.0542 0.047 0.038 0.0255 0.0131 0.0044 0.0038 0.0044 0.0055 0.0055 0.006 0.0065 0.0064 0.0051 0.0038 0.0038 0.0031 0.0024 0.0013 0.0018 0.0015 0.001 0.004 0.0109 0.0407 0.0609 0.0765 0.088 0.0885 0.0867 0.0855 0.0823 0.081 0.0822 0.0821 0.0862 0.085 0.0793 0.0699 0.0539 0.0388 0.0287 0.0273 0.0295 0.03 0.0325 0.0534 0.0706 0.0846 0.0945 0.0944 0.0914 0.0878 0.0831 0.0798 0.0785 0.0786 0.0762 0.0695 0.0588 0.0437 0.0282 0.0159 0.009 0.0146 0.0405 0.0594 0.0744 0.0857 0.0874 0.0853 0.0816 0.079 0.0766 0.076 0.0772 0.0783 0.0806 0.0813 0.0802 0.0789 0.0777 0.0729 0.0674 0.0614 0.0573 0.0586 0.0646 0.0737 0.0816 0.0869 0.0902 0.0913 0.0912 0.0897 0.0871 0.0849 0.0837 0.0842 0.0864 0.0867 0.0783 0.0653 0.0458 0.0247 0.0161 0.0325 0.053 0.0721 0.0856 0.0956 0.0968 0.0936 0.0929 0.0884 0.0867 0.0854 0.0853 0.0866 0.0882 0.0921 0.094 0.0893 0.5694 0.0986 0.099 0.092 0.0899 0.0988 0.107 0.1183 0.1213 0.12 0.1197 0.1151 0.114 0.1112 0.1082 0.1032 0.0984 0.0933 0.0895 0.089 0.0906 0.0941 0.0979 0.1007 0.1028 0.1041 0.1015 0.0998 0.0968 0.0919 0.0863 0.0788 0.0715 0.0667 0.0644 0.0642 0.0654 0.0658 0.0649 0.0636 0.0621 0.0608 0.0565 0.0491 0.0383 0.0241 0.0109 0.0046 0.0042 0.0051 0.0053 0.0057 0.0058 0.0055 0.0045 0.0035 0.0025 0.0013 0.0008 0.0005 0.0007 0.0 0.0016 0.0084 0.0228 0.0493 0.068 0.0842 0.095 0.1012 0.1031 0.1014 0.1005 0.0956 0.0965 0.0976 0.1014 0.1007 0.0944 0.0816 0.0618 0.0428 0.019 0.0152 0.0182 0.025 0.055 0.0807 0.0983 0.111 0.1125 0.1095 0.108 0.1061 0.1061 0.1053 0.1045 0.104 0.1052 0.1053 0.1042 0.0952 0.0793 0.0586 0.0336 0.0187 0.0408 0.0622 0.0772 0.0896 0.0958 0.0943 0.093 0.0923 0.0892 0.0886 0.088 0.0868 0.0821 0.0731 0.0602 0.0464 0.0374 0.0374 0.0415 0.0438 0.0449 0.0449 0.0614 0.0761 0.0897 0.0996 0.1018 0.1011 0.1001 0.1007 0.099 0.0984 0.0996 0.0987 0.1003 0.1015 0.0989 0.0962 0.0928 0.092 0.0951 0.1056 0.1185 0.122 0.286 0.1287 0.1304 0.1264 0.1203 0.117 0.1152 0.1138 0.1134 0.1132 0.1057 0.0906 0.0707 0.0464 0.0202 0.0097 0.0073 0.0049 0.0342 0.0692 0.096 0.1161 0.1273 0.1297 0.1277 0.1248 0.1223 0.1178 0.1158 0.1115 0.1095 0.1092 0.1072 0.1065 0.1077 0.1076 0.1058 0.1027 0.0968 0.0924 0.0916 0.0907 0.0923 0.0942 0.0939 0.0944 0.0945 0.0933 0.0928 0.0933 0.0934 0.094 0.0943 0.095 0.0966 0.0968 0.0978 0.0994 0.0993 0.0989 0.0994 0.0996 0.1008 0.1029 0.1042 0.1053 0.1061 0.1066 0.1081 0.1102 0.1128 0.112 0.1116 0.1082 0.1051 0.1024 0.1013 0.1005 0.1006 0.1015 0.0999 0.1008 0.0993 0.0989 0.0997 0.1 0.1002 0.1017 0.1017 0.103 0.1049 0.1039 0.1032 0.1004 0.1 0.1005 0.0996 0.0993 0.0991 0.0978 0.0973 0.0974 0.097 0.0972 0.0965 0.0957 0.0948 0.0928 0.0934 0.0923 0.0939 0.095 0.0962 0.0971 0.1 0.1006 0.1005 0.1017 0.1007 0.1013 0.1021 0.1036 0.1039 0.1039 0.1039 0.1042 0.1033 0.1026 0.1015 0.0989 0.0949 0.0875 0.0735 0.058 0.0422 0.0235 0.0124 0.0122 0.0122 0.0144 0.0148 0.0403 0.0621 0.0776 0.0913 0.0967 0.0978 0.0971 0.0956 0.0943 0.0935 0.0917 0.093 0.0922 0.0921 0.0941 0.0927 0.0934 0.0928 0.0914 0.0939 0.0942 0.0941 0.0951 0.092 0.0898 0.0907 0.0882 0.0878 0.0884 0.0877 0.0881 0.0826 0.0743 0.0606 0.045 0.0317 0.0305 0.0317 0.0365 0.0397 0.0386 0.0432 0.0616 0.0787 0.0979 0.1078 0.107 0.1049 0.099 0.097 0.095 0.0916 0.089 0.0844 0.0823 0.0793 0.0775 0.0763 0.076 0.0767 0.0759 0.0752 0.0748 0.0734 0.0716 0.0705 0.0675 0.0646 0.0621 0.0596 0.0592 0.0584 0.0588 0.0588 0.0576 0.0566 0.0549 0.0523 0.0488 0.0433 0.0345 0.0244 0.0153 0.0086 0.0053 0.0053 0.0066 0.0068 0.0074 0.0086 0.0085 0.0089 0.0079 0.0069 0.0076 0.0064 0.0053 0.0044 0.0038 0.0126 0.0247 0.041 0.063 0.0808 0.0962 0.1049 0.1032 0.0947 0.0818 0.0699 0.0645 0.0704 0.0743 0.0781 0.0784 0.073 0.0682 0.0649 0.0679 0.0762 0.0834 0.0823 0.0752 0.0623 0.0578 0.0659 0.0784 0.0929 0.0992 0.1025 0.1026 0.1011 0.1004 0.0986 0.0985 0.0977 0.0932 0.0833 0.0681 0.0491 0.0264 0.0163 0.0165 0.0184 0.0419 0.0618 0.0699 0.0756 0.0735 0.0673 0.0679 0.0715 0.0719 0.0734 0.0758 0.0766 0.077 0.0732 0.0641 0.0525 0.0373 0.0298 0.0301 0.0299 0.0405 0.0586 0.0724 0.0824 0.0906 0.09 0.0921 0.0923 0.0894 0.0911 0.0879 0.0888 0.0883 0.0881 0.0884 0.0878 0.0893 0.088 0.0879 0.0878 0.0857 0.0863 0.0846 0.078 0.0751 0.07 0.0695 0.0714 0.0741 0.0775 0.0781 0.0791 0.0797 0.076 0.0689 0.0562 0.0399 0.0248 0.0094 0.0047 0.0081 0.0343 0.0565 0.0761 0.0914 0.0991 0.1002 0.097 0.0943 0.0919 0.0871 0.0854 0.0814 0.0767 0.0768 0.0755 0.0761 0.0789 0.0805 0.0802 0.0794 0.0778 0.0735 0.0698 0.0647 0.0616 0.0585 0.0573 0.0574 0.0566 0.0569 0.0569 0.0565 0.057 0.0574 0.0584 0.0582 0.0567 0.0543 0.048 0.0395 0.0292 0.0185 0.0103 0.0044 0.0031 0.0039 0.0045 0.0045 0.0042 0.0034 0.0035 0.003 0.0014 0.0017 0.0009 0.0003 0.0006 0.0015 0.0021 0.0067 0.0116 0.0154 0.0275 0.0503 0.0706 0.0843 0.0891 0.0866 0.0787 0.0753 0.0769 0.0816 0.0863 0.086 0.0848 0.0788 0.0703 0.0595 0.0441 0.0314 0.0214 0.0221 0.0219 0.039 0.0567 0.0709 0.0831 0.0884 0.0895 0.0899 0.09 0.0883 0.0889 0.086 0.0804 0.074 0.0613 0.0483 0.035 0.0234 0.0208 0.0213 0.0214 0.0347 0.0523 0.0672 0.0797 0.0838 0.0848 0.0844 0.0819 0.0834 0.0822 0.0806 0.0817 0.0812 0.0813 0.0812 0.0814 0.0811 0.0813 0.0826 0.0839 0.0882 0.0923 0.0913 0.0897 0.0847 0.0818 0.0817 0.0807 0.082 0.0812 0.0809 0.0808 0.0778 0.0701 0.0576 0.043 0.0266 0.0222 0.0249 0.0259 0.0256 0.0294 0.0511 0.0678 0.0806 0.0901 0.088 0.086 0.0842 0.0832 0.0804 0.0804 0.079 0.0722 0.0623 0.0474 0.0288 0.0105 0.0075 0.0035 0.0186 0.0403 0.0561 0.0688 0.0756 0.0763 0.075 0.074 0.073 0.0691 0.068 0.0652 0.0624 0.0608 0.0573 0.0547 0.0534 0.0541 0.0528 0.0528 0.0536 0.0522 0.0529 0.0505 0.0474 0.0455 0.0418 0.0386 0.0342 0.0304 0.0277 0.026 0.0263 0.0267 0.0275 0.0292 0.0301 0.0292 0.0268 0.0228 0.0189 0.0127 0.0085 0.0046 0.0032 0.0032 0.0028 0.0033 0.0038 0.0041 0.0041 0.0036 0.0029 0.0024 0.0018 0.0012 0.0011 0.0003 0.0001 0.0 0.001 0.0039 0.0181 0.0307 0.0425 0.0526 0.0585 0.0607 0.0615 0.0623 0.061 0.0605 0.0584 0.0554 0.0559 0.0559 0.0566 0.057 0.0571 0.056 0.0557 0.0548 0.0543 0.0558 0.0596 0.0643 0.0677 0.07 0.0728 0.0727 0.0735 0.0711 0.0667 0.0612 0.0515 0.0399 0.0274 0.0113 0.0022 0.0055 0.0068 0.0117 0.0137 0.0127 0.0215 0.0375 0.0489 0.0581 0.0632 0.0642 0.0641 0.0642 0.0656 0.0643 0.0655 0.0616 0.0585 0.0567 0.0481 0.0405 0.0305 0.0175 0.0088 0.0077 0.0214 0.0345 0.0453 0.055 0.0587 0.0593 0.06 0.0591 0.0585 0.0565 0.0551 0.0536 0.0534 0.0517 0.0481 0.0406 0.0317 0.0218 0.0139 0.0119 0.0126 0.0246 0.04 0.0528 0.0631 0.0701 0.0708 0.0714 0.071 0.0717 0.071 0.0704 0.0709 0.0681 0.0625 0.0522 0.0394 0.0248 0.0141 0.0158 0.017 0.0251 0.0445 0.0633 0.0775 0.0884 0.0903 0.0876 0.0828 0.0782 0.0745 0.0711 0.0693 0.0659 0.0639 0.064 0.0663 0.0687 0.072 0.0743 0.0739 0.0751 0.0736 0.072 0.0712 0.0701 0.0687 0.0694 0.0684 0.0687 0.0685 0.0681 0.0682 0.0689 0.0702 0.0713 0.0715 0.0717 0.0725 0.0726 0.0733 0.0741 0.0754 0.0765 0.0774 0.0782 0.0802 0.0816 0.0826 0.0825 0.0824 0.0799 0.0786 0.0772 0.0755 0.0749 0.0745 0.0735 0.073 0.0719 0.0712 0.0702 0.0715 0.0764 0.0826 0.0888 0.0922 0.0919 0.0908 0.0901 0.0872 0.0841 0.082 0.076 0.0734 0.0714 0.0696 0.0686 0.0659 0.0595 0.0525 0.0448 0.0445 0.0599 0.0752 0.088 0.0957 0.096 0.0935 0.0885 0.0868 0.0853 0.0839 0.0849 0.0836 0.0847 0.0851 0.0867 0.0875 0.0881 0.0887 0.0888 0.0896 0.0893 0.0896 0.0884 0.0876 0.0879 0.0873 0.0868 0.0868 0.0854 0.0847 0.0829 0.0816 0.0796 0.0778 0.076 0.072 0.0689 0.0656 0.0627 0.0645 0.0715 0.0797 0.0906 0.1006 0.1055 0.1128 0.1146 0.1155 0.1161 0.1126 0.1091 0.0969 0.1397 0.0872 0.0833 0.0768 0.0727 0.0725 0.0751 0.0795 0.0857 0.0903 0.0932 0.0943 0.0945 0.0931 0.0921 0.0886 0.0857 0.0832 0.0824 0.0792 0.0694 0.0575 0.041 0.0248 0.0201 0.0215 0.0215 0.0213 0.0204 0.0182 0.0229 0.0422 0.0599 0.0762 0.0883 0.0926 0.0933 0.0921 0.0893 0.0866 0.0831 0.0785 0.075 0.0727 0.0708 0.0699 0.0685 0.0676 0.069 0.0693 0.0699 0.0695 0.067 0.0668 0.066 0.0639 0.0613 0.0574 0.0551 0.0524 0.0499 0.0493 0.0489 0.048 0.0476 0.047 0.0459 0.0449 0.0424 0.0384 0.0309 0.0224 0.0147 0.0065 0.0045 0.0043 0.0043 0.0042 0.0035 0.0034 0.0024 0.0025 0.0016 0.0021 0.0019 0.003 0.0045 0.0066 0.01 0.0123 0.0146 0.0148 0.0282 0.0464 0.0604 0.0721 0.0788 0.0778 0.075 0.0721 0.0668 0.0666 0.0674 0.0689 0.073 0.0738 0.0732 0.0677 0.0572 0.0442 0.0297 0.0173 0.0225 0.0455 0.0624 0.0794 0.0905 0.0913 0.0917 0.0855 0.0833 0.0802 0.0762 0.0754 0.0737 0.0742 0.0742 0.0684 0.0589 0.0457 0.0283 0.0147 0.0299 0.0542 0.0689 0.0811 0.0879 0.0861 0.0857 0.0838 0.0821 0.0805 0.0805 0.0806 0.0804 0.0814 0.0811 0.0806 0.0803 0.0758 0.0722 0.0686 0.0636 0.0662 0.0682 0.071 0.0749 0.0737 0.0736 0.074 0.0726 0.0742 0.074 0.0719 0.071 0.0694 0.0682 0.0633 0.0547 0.0428 0.0283 0.0159 0.0098 0.0111 0.0299 0.0521 0.0691 0.0819 0.0889 0.0901 0.0909 0.0905 0.0908 0.0897 0.0876 0.0876 0.0855 0.0835 0.0825 0.0809 0.0796 0.081 0.0816 0.0827 0.0864 0.0925 0.1007 0.1087 0.1141 0.1184 0.1165 0.115 0.1136 0.1078 0.1042 0.0979 0.0903 0.0855 0.081 0.0808 0.0823 0.0862 0.0893 0.0905 0.0922 0.0905 0.0871 0.085 0.0809 0.0772 0.0724 0.0651 0.0589 0.0532 0.0502 0.0492 0.052 0.0548 0.0553 0.0563 0.0549 0.0518 0.047 0.0381 0.0269 0.0137 0.0045 0.0038 0.0045 0.0048 0.005 0.0056 0.0058 0.006 0.0057 0.0049 0.0041 0.0037 0.0033 0.0023 0.0022 0.0022 0.0018 0.0015 0.0032 0.0083 0.0363 0.0563 0.0727 0.0845 0.0876 0.0868 0.0831 0.0813 0.0789 0.083 0.0843 0.0853 0.0828 0.0758 0.0655 0.0514 0.0365 0.0249 0.0221 0.0245 0.0282 0.0475 0.0657 0.0775 0.0881 0.0898 0.0886 0.0878 0.0852 0.0829 0.0812 0.0797 0.0815 0.08 0.0724 0.0616 0.0436 0.0245 0.0119 0.0067 0.0144 0.0412 0.0576 0.073 0.0845 0.0846 0.0842 0.0795 0.0755 0.0727 0.0721 0.0717 0.073 0.0733 0.0722 0.0713 0.0678 0.0652 0.0624 0.0606 0.0624 0.0658 0.072 0.0784 0.0823 0.0865 0.0881 0.088 0.0883 0.0855 0.0841 0.0828 0.0826 0.0819 0.0776 0.0682 0.0545 0.0372 0.0202 0.013 0.0109 0.0108 0.0262 0.0465 0.0653 0.0791 0.0898 0.095 0.0951 0.0954 0.0942 0.0902 0.0879 0.0847 0.0817 0.0797 0.073 0.0608 0.0469 0.0308 0.0187 0.017 0.0207 0.0398 0.0589 0.0775 0.0938 0.1035 0.1096 0.1108 0.1122 0.1113 0.109 0.1056 0.0992 0.0919 0.0887 0.0873 0.0877 0.0894 0.0923 0.0951 0.0978 0.0971 0.0961 0.0953 0.093 0.09 0.084 0.0764 0.0691 0.0644 0.0627 0.0629 0.0646 0.065 0.065 0.0639 0.0623 0.0598 0.0544 0.047 0.0357 0.0205 0.0082 0.0035 0.0036 0.0041 0.0046 0.0037 0.0044 0.0041 0.0037 0.0029 0.0025 0.0015 0.0014 0.0011 0.0009 0.0001 0.0 0.006 0.0255 0.0424 0.0605 0.0762 0.084 0.0871 0.0862 0.083 0.0816 0.0825 0.0822 0.0857 0.0883 0.0872 0.0854 0.0778 0.0638 0.047 0.0279 0.0177 0.0193 0.0203 0.0343 0.0625 0.0811 0.0939 0.1006 0.0961 0.0924 0.0896 0.0905 0.0898 0.09 0.0911 0.0918 0.0925 0.0935 0.0928 0.0919 0.0899 0.0877 0.0855 0.0846 0.0872 0.0911 0.093 0.0967 0.0979 0.0967 0.0972 0.0951 0.0953 0.0947 0.0939 0.0914 0.0833 0.0721 0.0564 0.0404 0.0291 0.0273 0.0274 0.0274 0.0274 0.046 0.0657 0.0803 0.092 0.0966 0.0969 0.0968 0.0959 0.0956 0.0949 0.0956 0.0971 0.0967 0.0969 0.0957 0.0948 0.0937 0.0914 0.0905 0.093 0.0978 0.1031 0.1073 0.1086 0.1091 0.1102 0.113 0.1151 0.1166 0.1148 0.1131 0.1105 0.1102 0.108 0.1041 0.1011 0.095 0.0905 0.0893 0.0903 0.0935 0.1012 0.1065 0.1112 0.116 0.1172 0.1183 0.1156 0.1115 0.1077 0.1044 0.1021 0.1004 0.0998 0.099 0.0989 0.0991 0.1 0.1011 0.1033 0.1041 0.1037 0.1027 0.101 0.0976 0.0945 0.0922 0.0906 0.0884 0.0874 0.0874 0.087 0.0904 0.0937 0.0924 0.0834 0.0683 0.0469 0.0274 0.0202 0.0202 0.0305 0.0549 0.0724 0.0884 0.1002 0.1044 0.1066 0.1086 0.1049 0.0988 0.0944 0.0881 0.086 0.0874 0.0869 0.0843 0.0762 0.0614 0.044 0.0224 0.0126 0.0354 0.0589 0.0803 0.0984 0.1076 0.1107 0.1082 0.103 0.0972 0.0921 0.0874 0.0866 0.0871 0.0853 0.0784 0.065 0.0466 0.0272 0.0162 0.0158 0.0278 0.0498 0.0685 0.0841 0.0978 0.1023 0.1032 0.1047 0.1026 0.1011 0.099 0.097 0.0934 0.0917 0.0883 0.0841 0.0824 0.0778 0.0772 0.0756 0.0742 0.0755 0.0746 0.0745 0.0756 0.0744 0.0733 0.0712 0.0701 0.0693 0.069 0.0683 0.0656 0.0588 0.0498 0.0373 0.024 0.0139 0.01 0.0111 0.0138 0.0281 0.0398 0.0534 0.0656 0.0726 0.0792 0.0831 0.0845 0.0842 0.0828 0.0811 0.0786 0.0777 0.0779 0.0787 0.0789 0.0808 0.0798 0.0826 0.0839 0.0815 0.0816 0.0786 0.0773 0.078 0.0766 0.0756 0.0771 0.0759 0.0749 0.0745 0.0727 0.0701 0.0659 0.0592 0.0475 0.0357 0.0211 0.0129 0.0098 0.01 0.024 0.0452 0.0653 0.0831 0.0964 0.1048 0.1076 0.1068 0.1034 0.0973 0.0881 0.0817 0.0762 0.073 0.0729 0.0721 0.0745 0.0773 0.0804 0.0811 0.0797 0.0764 0.0727 0.0708 0.0707 0.0701 0.0716 0.0691 0.066 0.0618 0.0571 0.0571 0.0592 0.0597 0.0617 0.0631 0.0614 0.0605 0.0546 0.046 0.0345 0.0201 0.0078 0.0046 0.0051 0.0054 0.0054 0.0054 0.0055 0.0053 0.0054 0.0049 0.0044 0.0038 0.0024 0.0028 0.0025 0.0023 0.0049 0.0166 0.0275 0.042 0.0592 0.072 0.0834 0.0881 0.0859 0.0801 0.0716 0.0661 0.0615 0.0633 0.0673 0.0711 0.0729 0.0702 0.0626 0.052 0.0433 0.0365 0.0353 0.0373 0.0366 0.0367 0.0451 0.0561 0.0729 0.0863 0.091 0.0925 0.0899 0.0882 0.0866 0.0856 0.0861 0.0857 0.0847 0.0855 0.0836 0.0809 0.0816 0.0798 0.0796 0.0824 0.0848 0.0863 0.0863 0.0827 0.0796 0.077 0.0761 0.0772 0.0774 0.0775 0.0774 0.0752 0.0715 0.0638 0.0535 0.0404 0.0294 0.0242 0.0243 0.0232 0.0277 0.0493 0.066 0.0792 0.0908 0.092 0.0929 0.092 0.09 0.0881 0.0869 0.0888 0.0866 0.0871 0.0846 0.0837 0.084 0.0818 0.0818 0.0804 0.0791 0.0803 0.0767 0.0725 0.0682 0.0655 0.0665 0.0663 0.0691 0.0692 0.0693 0.0711 0.0687 0.0656 0.0557 0.0422 0.0268 0.0114 0.0051 0.005 0.0045 0.0319 0.0551 0.0756 0.0922 0.1007 0.1029 0.1013 0.0967 0.0918 0.0861 0.0821 0.0781 0.0746 0.0732 0.0726 0.0734 0.0762 0.0776 0.0778 0.0771 0.0739 0.0688 0.0651 0.0603 0.0559 0.0535 0.0516 0.0522 0.0523 0.0526 0.053 0.0536 0.0534 0.0526 0.0505 0.0479 0.0461 0.0446 0.0411 0.036 0.0271 0.0185 0.0094 0.0044 0.0037 0.0043 0.0046 0.0045 0.0045 0.0035 0.003 0.0027 0.0022 0.0018 0.0036 0.0047 0.0061 0.009 0.0116 0.0138 0.0131 0.0249 0.0403 0.0527 0.0632 0.0706 0.0711 0.0705 0.0711 0.0677 0.0687 0.0709 0.0714 0.0718 0.0669 0.0573 0.0456 0.0317 0.0184 0.0139 0.0151 0.0155 0.0306 0.0518 0.067 0.0799 0.0851 0.0855 0.0835 0.0832 0.0829 0.081 0.0811 0.079 0.0785 0.0751 0.0658 0.0534 0.0404 0.0275 0.0203 0.0192 0.0325 0.0554 0.0741 0.0886 0.0981 0.0993 0.0978 0.0959 0.0908 0.0864 0.0834 0.0788 0.0767 0.0775 0.0738 0.064 0.0521 0.0361 0.0216 0.0182 0.0259 0.0494 0.0658 0.0803 0.0903 0.0903 0.091 0.0886 0.0872 0.0869 0.0852 0.085 0.0828 0.075 0.0626 0.0471 0.0251 0.0073 0.0044 0.0066 0.0299 0.0561 0.0747 0.091 0.0999 0.1012 0.1002 0.0962 0.0949 0.0923 0.0911 0.0907 0.0895 0.0885 0.0827 0.0748 0.0615 0.0459 0.0263 0.0109 0.0054 0.0042 0.0061 0.0412 0.0676 0.0856 0.1008 0.1055 0.1045 0.1061 0.1065 0.1037 0.1007 0.0948 0.0891 0.0869 0.0884 0.092 0.0969 0.1004 0.1017 0.1032 0.1021 0.1009 0.101 0.0995 0.0973 0.0953 0.0908 0.0863 0.0829 0.0805 0.0794 0.0787 0.0787 0.0767 0.0742 0.0712 0.0695 0.0683 0.0637 0.0547 0.0419 0.0261 0.0098 0.0041 0.0038 0.0042 0.0038 0.0039 0.0039 0.0036 0.0032 0.0025 0.0021 0.0016 0.0014 0.0011 0.0012 0.0024 0.0068 0.0096 0.0135 0.0262 0.041 0.0514 0.0608 0.0663 0.0712 0.075 0.0737 0.0761 0.0728 0.0705 0.0686 0.0662 0.0651 0.0642 0.0639 0.0588 0.0496 0.0384 0.0204 0.011 0.0356 0.0601 0.0828 0.0963 0.1017 0.1012 0.0978 0.0962 0.0942 0.0917 0.0906 0.0882 0.0881 0.0908 0.0915 0.0914 0.0881 0.084 0.0812 0.0814 0.0888 0.0958 0.0979 0.7742 0.0994 0.1001 0.0944 0.0897 0.0881 0.0832 0.0836 0.0829 0.0793 0.0745 0.0634 0.0481 0.0338 0.0232 0.0221 0.0232 0.0221 0.027 0.0456 0.0606 0.0735 0.0838 0.0838 0.0823 0.0798 0.0775 0.0753 0.0754 0.0749 0.0731 0.0705 0.062 0.0511 0.0377 0.0266 0.018 0.0163 0.0288 0.0491 0.0676 0.0812 0.0902 0.0906 0.0909 0.0919 0.0918 0.0906 0.0898 0.0882 0.0842 0.0806 0.0692 0.0529 0.0375 0.0243 0.0227 0.0259 0.0254 0.0254 0.0498 0.0685 0.0855 0.1018 0.1065 0.1104 0.1107 0.1116 0.1098 0.1084 0.1086 0.1084 0.1106 0.1094 0.1079 0.1063 0.1044 0.1028 0.102 0.0996 0.0969 0.0937 0.0895 0.0856 0.0848 0.084 0.0829 0.0821 0.0802 0.08 0.0816 0.0824 0.0851 0.0862 0.0814 0.0716 0.0563 0.0353 0.0206 0.017 0.03 0.0507 0.0705 0.0889 0.102 0.1075 0.109 0.1057 0.1 0.0948 0.0883 0.0867 0.087 0.0887 0.0895 0.0885 0.087 0.0852 0.0866 0.0875 0.0917 0.0967 0.0997 0.1059 0.1102 0.1144 0.1153 0.1164 0.1129 0.1088 0.1037 0.0976 0.0942 0.0923 0.0889 0.0783 0.0645 0.0471 0.0288 0.0212 0.0199 0.0365 0.0585 0.0732 0.0871 0.0953 0.098 0.0994 0.0993 0.0968 0.094 0.091 0.0906 0.0903 0.0899 0.0897 0.0884 0.087 0.0852 0.0838 0.084 0.0858 0.0894 0.0909 0.0919 0.0918 0.0916 0.0924 0.0931 0.0935 0.0953 0.097 0.0988 0.0995 0.0947 0.0833 0.067 0.0476 0.0243 0.0119 0.0052 0.0088 0.0386 0.0717 0.0968 0.1145 0.1237 0.1232 0.1205 0.117 0.115 0.1113 0.1076 0.104 0.1023 0.1022 0.1024 0.1047 0.1036 0.1036 0.1028 0.1015 0.1021 0.102 0.1034 0.1038 0.1046 0.1036 0.102 0.1017 0.1009 0.1001 0.0992 0.0982 0.0939 0.0842 0.0683 0.0478 0.0229 0.008 0.0011 0.0018 0.015 0.0496 0.0788 0.103 0.1238 0.1307 0.1317 0.1277 0.1237 0.1207 0.1192 0.1193 0.1188 0.1204 0.1209 0.1212 0.1215 0.1211 0.1189 0.1184 0.1182 0.1171 0.1176 0.1178 0.1175 0.1183 0.1195 0.122 0.1236 0.1259 0.1307 0.1335 0.1341 0.1341 0.1323 0.1293 0.1287 0.1297 0.1292 0.1304 0.1311 0.1292 0.1261 0.1257 0.1262 0.1289 0.1323 0.133 0.132 0.1289 0.1266 0.1231 0.1179 0.1107 0.1033 0.0955 0.0884 0.0827 0.0779 0.0747 0.0655 0.053 0.0381 0.0171 0.0047 0.0035 0.0059 0.0064 0.0055 0.0056 0.0051 0.0042 0.0041 0.0033 0.0025 0.0021 0.0016 0.0011 0.001 0.0015 0.001 0.0018 0.0048 0.0321 0.0567 0.0766 0.0936 0.1009 0.1027 0.1024 0.099 0.0957 0.0895 0.0819 0.0762 0.0727 0.0686 0.0693 0.0682 0.0708 0.0739 0.0775 0.0798 0.0807 0.0812 0.0819 0.0822 0.0821 0.0818 0.0805 0.08 0.0794 0.0789 0.079 0.0766 0.0707 0.0601 0.0461 0.0315 0.0195 0.0218 0.0238 0.0239 0.0211 0.0151 0.0334 0.0534 0.0704 0.0847 0.0921 0.0927 0.0967 0.0981 0.0977 0.098 0.0966 0.0951 0.0953 0.0941 0.0939 0.0947 0.095 0.0949 0.0965 0.0962 0.0959 0.0952 0.094 0.0942 0.0944 0.0951 0.095 0.0913 0.0866 0.0791 0.0665 0.0529 0.0381 0.0279 0.0294 0.0301 0.0318 0.0327 0.0303 0.0285 0.0256 0.0426 0.0587 0.0713 0.0847 0.0882 0.0899 0.0928 0.0955 0.0977 0.0995 0.0996 0.0983 0.0972 0.0951 0.0929 0.0891 0.0861 0.0833 0.0815 0.08 0.0805 0.0826 0.0848 0.0867 0.0863 0.0883 0.0898 0.0936 0.0957 0.096 0.0961 0.0948 0.0936 0.0931 0.0936 0.0945 0.0953 0.0967 0.099 0.1017 0.1028 0.1036 0.1031 0.1006 0.0982 0.0942 0.0919 0.0894 0.0894 0.0899 0.0911 0.0916 0.0907 0.0909 0.0902 0.0899 0.0907 0.0913 0.0904 0.0947 0.096 0.0986 0.1025 0.101 0.101 0.0994 0.0966 0.0958 0.0952 0.0942 0.0938 0.093 0.0923 0.0912 0.084 0.0716 0.055 0.0362 0.0257 0.0254 0.0243 0.0267 0.048 0.0657 0.0844 0.0984 0.1027 0.1046 0.1014 0.1005 0.1 0.0983 0.0982 0.0958 0.0931 0.0867 0.0748 0.0598 0.0416 0.0306 0.0307 0.0514 0.0719 0.0882 0.0988 0.1034 0.1023 0.0997 0.0973 0.0933 0.0906 0.0858 0.0838 0.0829 0.0782 0.0679 0.0545 0.0372 0.0215 0.0181 0.0191 0.0188 0.0297 0.0514 0.0676 0.0845 0.0944 0.0985 0.0989 0.0958 0.0927 0.0876 0.0848 0.0839 0.08 0.0737 0.0625 0.0478 0.0326 0.021 0.0172 0.0163 0.0267 0.052 0.0718 0.0877 0.0985 0.1009 0.1004 0.0982 0.0957 0.0921 0.0883 0.0846 0.0817 0.0807 0.0792 0.0725 0.0623 0.0477 0.0284 0.015 0.0136 0.0228 0.045 0.0666 0.0838 0.0964 0.101 0.1036 0.105 0.107 0.1081 0.1064 0.1054 0.105 0.106 0.1075 0.1108 0.1115 0.1116 0.1111 0.1106 0.1109 0.1104 0.1104 0.1099 0.1115 0.1139 0.1172 0.1176 0.1171 0.1155 0.1118 0.1111 0.1129 0.1132 0.1139 0.1141 0.1128 0.1135 0.1137 0.1163 0.1179 0.1184 0.1198 0.1175 0.1168 0.1153 0.1115 0.1093 0.1051 0.1026 0.1002 0.0981 0.0947 0.0925 0.0909 0.0916 0.0928 0.0923 0.0906 0.0858 0.0835 0.0837 0.0901 0.0995 0.1048 0.1084 0.108 0.1036 0.1016 0.0998 0.0979 0.0961 0.0929 0.0898 0.0856 0.0792 0.0685 0.0548 0.0401 0.0245 0.0141 0.0118 0.0274 0.0527 0.0735 0.0898 0.1002 0.1019 0.1018 0.098 0.0934 0.0881 0.0813 0.0796 0.0794 0.0812 0.0791 0.0711 0.0593 0.0413 0.0222 0.0249 0.0532 0.0763 0.0974 0.1112 0.1138 0.1134 0.112 0.1096 0.1063 0.1044 0.1011 0.0991 0.0982 0.0958 0.0941 0.0861 0.0725 0.0549 0.0343 0.0206 0.0176 0.0238 0.0468 0.067 0.0844 0.0986 0.1024 0.1024 0.1005 0.0962 0.0911 0.0865 0.0836 0.0823 0.0838 0.0846 0.0763 0.0644 0.0501 0.0296 0.0168 0.0221 0.0453 0.0651 0.0817 0.0958 0.1028 0.106 0.1067 0.1064 0.1032 0.0993 0.098 0.0941 0.0888 0.0796 0.0637 0.0461 0.024 0.0138 0.0118 0.0135 0.0276 0.0513 0.071 0.0887 0.103 0.1092 0.1144 0.1164 0.1153 0.1125 0.108 0.104 0.1008 0.0976 0.0971 0.0936 0.0922 0.093 0.0905 0.0921 0.0927 0.0927 0.0927 0.0917 0.0925 0.0931 0.0942 0.0933 0.0927 0.0915 0.0895 0.0883 0.0875 0.085 0.0779 0.0653 0.0488 0.0285 0.0171 0.0241 0.0261 0.0342 0.0565 0.0728 0.0904 0.1029 0.1083 0.1128 0.1133 0.113 0.112 0.1089 0.106 0.1051 0.1034 0.1045 0.1039 0.1018 0.1002 0.0972 0.095 0.095 0.0954 0.0957 0.0958 0.0948 0.093 0.0906 0.0889 0.0885 0.0896 0.0907 0.0932 0.0943 0.0956 0.0958 0.0956 0.0952 0.0929 0.0925 0.0903 0.0907 0.0936 0.0988 0.1074 0.1131 0.1185 0.1217 0.1239 0.1245 0.1238 0.1222 0.12 0.1212 0.1219 0.1243 0.1249 0.1248 0.1263 0.1259 0.1275 0.1287 0.1296 0.1295 0.1283 0.1252 0.1219 0.1207 0.1192 0.1171 0.114 0.1091 0.1003 0.0887 0.0716 0.0517 0.0314 0.021 0.0213 0.0207 0.0201 0.0194 0.0175 0.0162 0.0455 0.0651 0.0801 0.0956 0.0988 0.1024 0.1071 0.1078 0.1061 0.1062 0.1046 0.1018 0.1038 0.1009 0.0983 0.0999 0.0986 0.0988 0.1018 0.1005 0.1001 0.1007 0.101 0.101 0.1023 0.0995 0.095 0.0917 0.0869 0.0845 0.0785 0.0684 0.0541 0.0386 0.0264 0.0246 0.0256 0.025 0.0246 0.0214 0.026 0.046 0.0638 0.083 0.0965 0.1023 0.1039 0.1043 0.1029 0.1021 0.1004 0.096 0.0927 0.0899 0.0882 0.0877 0.0888 0.0904 0.0938 0.0971 0.0986 0.0998 0.0997 0.0985 0.0961 0.0902 0.0867 0.0799 0.0745 0.0722 0.07 0.0692 0.0686 0.0687 0.0676 0.0651 0.0626 0.0601 0.0571 0.0536 0.0483 0.0422 0.0343 0.0259 0.0166 0.0093 0.006 0.0051 0.0053 0.0067 0.0064 0.0067 0.0062 0.0051 0.0044 0.0032 0.0031 0.0023 0.0023 0.0027 0.0059 0.0375 0.0602 0.0777 0.0893 0.0865 0.0789 0.0681 0.0598 0.055 0.0536 0.0562 0.0614 0.0666 0.0719 0.0743 0.0765 0.0789 0.0784 0.0795 0.0808 0.0795 0.083 0.086 0.0904 0.0948 0.0965 0.0948 0.094 0.0925 0.092 0.0931 0.0923 0.0916 0.0929 0.0902 0.0863 0.0779 0.0626 0.047 0.0316 0.0307 0.048 0.0628 0.0755 0.0835 0.0854 0.0867 0.0851 0.0858 0.0841 0.0832 0.0832 0.0815 0.0788 0.073 0.0616 0.0466 0.0325 0.0212 0.0191 0.0207 0.0182 0.033 0.052 0.0671 0.0805 0.089 0.0902 0.092 0.0917 0.0892 0.0859 0.0824 0.0804 0.0795 0.0794 0.0778 0.0776 0.0754 0.0747 0.0745 0.0718 0.0743 0.0754 0.0764 0.0764 0.0762 0.0749 0.0741 0.0732 0.0714 0.0705 0.0709 0.0712 0.0727 0.0722 0.0729 0.0724 0.0718 0.0716 0.0685 0.0667 0.0649 0.0708 0.0801 0.0893 0.0977 0.1017 0.1047 0.1044 0.1032 0.1003 0.0949 0.0895 0.0851 0.0818 0.0816 0.082 0.0824 0.0855 0.0873 0.0887 0.0907 0.0897 0.0872 0.0854 0.0828 0.0789 0.0776 0.0749 0.0722 0.0703 0.0679 0.067 0.0667 0.0669 0.0677 0.0661 0.0648 0.0639 0.0612 0.0582 0.0511 0.0409 0.0297 0.0164 0.0065 0.0048 0.0045 0.0035 0.0037 0.0037 0.0035 0.0031 0.0028 0.0018 0.0018 0.0014 0.002 0.0017 0.0029 0.0046 0.0069 0.01 0.0265 0.0439 0.0583 0.0699 0.0768 0.0781 0.0764 0.0752 0.0723 0.0734 0.0732 0.0739 0.0724 0.071 0.0709 0.0711 0.0698 0.0631 0.0519 0.0386 0.0211 0.0239 0.0416 0.0587 0.0727 0.0828 0.0873 0.087 0.086 0.0839 0.0807 0.078 0.0749 0.0703 0.0626 0.051 0.0373 0.0241 0.0166 0.0166 0.017 0.0262 0.0452 0.0577 0.0675 0.0742 0.0757 0.0764 0.0762 0.0742 0.0706 0.0663 0.063 0.0622 0.0634 0.0644 0.0611 0.053 0.0424 0.0291 0.0184 0.0168 0.0249 0.0472 0.0645 0.0788 0.0908 0.0931 0.0954 0.0972 0.0962 0.0958 0.0936 0.091 0.0885 0.0867 0.0832 0.0796 0.0754 0.0706 0.0679 0.0674 0.0707 0.0737 0.0799 0.085 0.0877 0.0913 0.0906 0.0884 0.0853 0.0811 0.0781 0.0758 0.0738 0.0714 0.0641 0.0536 0.0403 0.0254 0.0136 0.012 0.013 0.0268 0.0417 0.0556 0.0678 0.076 0.0816 0.0837 0.0835 0.0827 0.0801 0.0756 0.0708 0.0655 0.0604 0.0594 0.0568 0.0551 0.0555 0.0541 0.0547 0.0543 0.0523 0.0506 0.0486 0.0454 0.0429 0.0391 0.0351 0.0323 0.0292 0.0277 0.0273 0.0276 0.0283 0.0284 0.0277 0.0271 0.0244 0.0213 0.0168 0.0117 0.0073 0.0033 0.0029 0.0027 0.0027 0.0042 0.0039 0.0047 0.0045 0.0049 0.0041 0.0038 0.0038 0.0036 0.0039 0.0031 0.0032 0.0026 0.003 0.0077 0.0169 0.0307 0.0444 0.0549 0.0616 0.0647 0.0637 0.061 0.0593 0.0577 0.056 0.0591 0.0603 0.0624 0.0651 0.0657 0.0657 0.0659 0.0676 0.0691 0.0716 0.073 0.0725 0.0737 0.0745 0.0754 0.0766 0.0775 0.0784 0.0785 0.0792 0.0774 0.0765 0.0749 0.0697 0.0599 0.0476 0.0326 0.0193 0.0159 0.0168 0.0172 0.024 0.0424 0.0534 0.0638 0.069 0.0679 0.0681 0.0645 0.063 0.0609 0.0596 0.0603 0.0617 0.0635 0.0632 0.0631 0.0616 0.0589 0.0588 0.0575 0.0588 0.0649 0.0674 0.0717 0.0751 0.0746 0.0755 0.0759 0.073 0.0715 0.0695 0.0678 0.0661 0.0613 0.0517 0.0402 0.0256 0.0141 0.0134 0.015 0.0144 0.0248 0.0451 0.0585 0.0688 0.0762 0.0777 0.078 0.0774 0.0756 0.072 0.0698 0.0673 0.0632 0.0592 0.0491 0.0382 0.0251 0.014 0.0085 0.0167 0.041 0.058 0.0731 0.0844 0.0871 0.0893 0.0899 0.0895 0.0897 0.0855 0.0812 0.0745 0.0666 0.0611 0.0578 0.0578 0.0612 0.0668 0.0717 0.0755 0.078 0.0777 0.0782 0.0794 0.0788 0.0806 0.081 0.0829 0.0849 0.0847 0.0848 0.0837 0.0816 0.0783 0.0693 0.0563 0.0408 0.0231 0.0138 0.0135 0.0145 0.0262 0.0469 0.064 0.0773 0.0866 0.0888 0.0895 0.0891 0.0869 0.082 0.0779 0.0752 0.0744 0.0755 0.0773 0.0754 0.0673 0.0551 0.0389 0.0205 0.0192 0.0455 0.0634 0.0753 0.087 0.0891 0.0883 0.0883 0.0862 0.0859 0.0854 0.0839 0.0825 0.0782 0.0694 0.0573 0.0426 0.0245 0.0106 0.0088 0.0165 0.0488 0.0697 0.0866 0.1004 0.1032 0.1053 0.1047 0.1043 0.1032 0.1004 0.099 0.0938 0.0888 0.0836 0.0733 0.0583 0.0413 0.0222 0.0123 0.0144 0.0164 0.0321 0.059 0.0778 0.0913 0.0999 0.0986 0.0939 0.0912 0.0867 0.084 0.0829 0.0812 0.0809 0.0799 0.0788 0.0765 0.0744 0.0729 0.0723 0.0754 0.0828 0.091 0.0974 0.1018 0.104 0.1043 0.1043 0.1031 0.1009 0.0977 0.0944 0.0925 0.0895 0.0856 0.0747 0.0606 0.0435 0.0301 0.0249 0.0253 0.0253 0.0287 0.0501 0.0659 0.0843 0.0963 0.0984 0.1003 0.0991 0.1004 0.0989 0.0998 0.0967 0.0946 0.0935 0.0887 0.0908 0.0919 0.0865 1.0 0.0799 0.0873 0.0911 0.098 0.1002 0.1047 0.1068 0.104 0.1069 0.1052 0.1059 0.1046 0.1022 0.0987 0.0924 0.0901 0.0868 0.0858 0.0866 0.0878 0.0902 0.0905 0.0914 0.0898 0.0898 0.0887 0.0845 0.0785 0.0682 0.058 0.0495 0.045 0.0437 0.0438 0.0452 0.0455 0.0447 0.0437 0.0417 0.0397 0.0375 0.0338 0.0297 0.0242 0.0184 0.0134 0.008 0.0039 0.0005 0.0011 0.0007 0.0009 0.0006 0.001 0.0004 0.0007 0.0008 0.0004 0.0002 0.0001 0.0003 0.0 0.0006 0.0005 0.0018", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0001 0.0002 0.0002 0.0002 0.0002 0.0004 0.0005 0.0004 0.0008 0.0009 0.0014 0.0014 0.0012 0.0014 0.0016 0.0018 0.0018 0.0021 0.0024 0.0024 0.0028 0.0028 0.0029 0.0031 0.0033 0.0035 0.0035 0.0031 0.0029 0.0024 0.0021 0.002 0.002 0.0021 0.0016 0.0013 0.001 0.0009 0.0008 0.0013 0.002 0.003 0.0047 0.0065 0.0092 0.0116 0.0125 0.014 0.0149 0.0143 0.0144 0.0147 0.0144 0.0208 0.0443 0.0712 0.087 0.0895 0.0797 0.0519 0.0209 0.0022 0.0007 0.0008 0.001 0.0012 0.0009 0.0044 0.011 0.0155 0.0188 0.0222 0.0206 0.0175 0.0136 0.0055 0.002 0.0019 0.0022 0.0019 0.0019 0.0019 0.0018 0.0013 0.0013 0.0009 0.0005 0.0007 0.0006 0.0009 0.0007 0.0009 0.0012 0.0017 0.0019 0.002 0.0021 0.002 0.0019 0.0018 0.0016 0.0014 0.0015 0.0015 0.0012 0.0011 0.0013 0.0022 0.0049 0.0078 0.0127 0.0165 0.0177 0.0171 0.0132 0.008 0.0029 0.0019 0.0021 0.0022 0.0024 0.0026 0.0022 0.002 0.0016 0.0016 0.0013 0.0013 0.0018 0.0023 0.0027 0.003 0.0029 0.0025 0.0021 0.0011 0.0011 0.0012 0.0015 0.0013 0.0014 0.0014 0.0014 0.0014 0.0016 0.0015 0.0021 0.0036 0.0057 0.0083 0.0108 0.0126 0.0124 0.0112 0.009 0.0052 0.0023 0.0019 0.002 0.0022 0.0024 0.0027 0.0025 0.0026 0.0026 0.0026 0.0021 0.002 0.0022 0.0022 0.0021 0.0022 0.0021 0.002 0.002 0.0021 0.0022 0.0022 0.0022 0.0021 0.0021 0.0021 0.0021 0.002 0.002 0.0018 0.0017 0.0016 0.0018 0.0018 0.0016 0.0015 0.0014 0.0013 0.0019 0.0023 0.0031 0.004 0.0047 0.0055 0.0059 0.0061 0.0059 0.0053 0.0045 0.0033 0.003 0.0022 0.0013 0.0009 0.0005 0.0007 0.0022 0.0083 0.0175 0.0186 0.0193 0.0168 0.0069 0.0038 0.004 0.0037 0.0031 0.0022 0.0016 0.0014 0.0012 0.0013 0.0044 0.0084 0.0165 0.0233 0.0302 0.0347 0.0331 0.0294 0.0221 0.0107 0.0035 0.0019 0.0016 0.0011 0.0009 0.0008 0.0007 0.0008 0.0006 0.0005 0.0006 0.0005 0.0012 0.0018 0.0038 0.005 0.0052 0.0049 0.0037 0.0018 0.0011 0.0011 0.0012 0.0012 0.001 0.0009 0.001 0.0011 0.001 0.0013 0.0011 0.001 0.0009 0.0008 0.0008 0.0008 0.0009 0.001 0.0014 0.0017 0.0018 0.0019 0.0016 0.0015 0.0015 0.0015 0.0017 0.0018 0.002 0.002 0.002 0.002 0.0022 0.0036 0.0085 0.0126 0.0132 0.0134 0.0116 0.0074 0.0059 0.0041 0.0036 0.0029 0.0025 0.0019 0.0011 0.0014 0.0012 0.0016 0.0018 0.0019 0.0019 0.0028 0.0034 0.0759 0.0025 0.0033 0.0029 0.0022 0.0022 0.0025 0.0029 0.0029 0.0024 0.0021 0.002 0.0019 0.0021 0.0024 0.0024 0.0025 0.0022 0.002 0.0017 0.0015 0.0016 0.0015 0.0016 0.0014 0.0014 0.0015 0.0016 0.0015 0.0016 0.0016 0.0015 0.0015 0.0014 0.0016 0.0016 0.0014 0.0015 0.0012 0.0012 0.0013 0.0011 0.0012 0.0015 0.002 0.0027 0.003 0.0033 0.0038 0.0045 0.0053 0.0054 0.0056 0.0051 0.0042 0.0033 0.0027 0.0015 0.0012 0.001 0.001 0.001 0.0032 0.0065 0.0121 0.0166 0.0171 0.0161 0.0122 0.0069 0.0039 0.0033 0.0028 0.0023 0.0017 0.0017 0.002 0.0025 0.0031 0.0039 0.0059 0.0087 0.0109 0.0147 0.0192 0.0202 0.0191 0.0159 0.0107 0.0037 0.0025 0.0021 0.0013 0.0013 0.0014 0.0013 0.0012 0.001 0.0009 0.0007 0.0007 0.0018 0.0037 0.0068 0.0109 0.0141 0.0141 0.0128 0.0088 0.0032 0.0021 0.0018 0.0014 0.0015 0.0014 0.0015 0.0016 0.002 0.0034 0.0107 0.0215 0.0297 0.037 0.0433 0.0476 0.0489 0.0475 0.0422 0.0334 0.023 0.0108 0.0036 0.0025 0.0021 0.0019 0.0017 0.0018 0.0018 0.0019 0.0018 0.0019 0.0021 0.0021 0.0016 0.0015 0.0017 0.0018 0.0022 0.0025 0.0028 0.0159 0.0023 0.0024 0.0022 0.0018 0.0018 0.0018 0.0018 0.0022 0.002 0.002 0.002 0.0032 0.0053 0.0056 0.0061 0.0061 0.005 0.0041 0.0029 0.0022 0.0017 0.002 0.0022 0.0023 0.0019 0.0019 0.0016 0.0014 0.0013 0.0014 0.0013 0.0011 0.0012 0.0011 0.0011 0.001 0.0011 0.0012 0.0013 0.0014 0.0016 0.0016 0.0017 0.0014 0.0014 0.0011 0.0013 0.0014 0.0017 0.0018 0.0017 0.0017 0.0016 0.0017 0.0017 0.0018 0.002 0.0022 0.0024 0.0025 0.0026 0.0028 0.0026 0.0025 0.0025 0.0022 0.0021 0.0021 0.002 0.0019 0.0019 0.0019 0.0017 0.002 0.0015 0.0015 0.0014 0.0014 0.0014 0.0014 0.0013 0.0015 0.0013 0.0011 0.001 0.0009 0.0009 0.001 0.0009 0.0007 0.0009 0.001 0.0012 0.0012 0.0016 0.0018 0.0024 0.0028 0.0028 0.0028 0.0027 0.0025 0.0023 0.0025 0.0024 0.0021 0.0022 0.0021 0.0023 0.0025 0.0027 0.0025 0.0025 0.0024 0.0025 0.0024 0.0023 0.0024 0.0023 0.0024 0.0028 0.0028 0.0028 0.0026 0.0022 0.002 0.0021 0.0024 0.0025 0.0024 0.0022 0.0022 0.0025 0.0039 0.0056 0.0071 0.0093 0.0113 0.012 0.0113 0.0095 0.0071 0.0042 0.0032 0.0026 0.0024 0.0022 0.0022 0.0022 0.0023 0.0023 0.0023 0.0022 0.0023 0.0023 0.0023 0.0023 0.0019 0.0021 0.002 0.0018 0.0019 0.002 0.0019 0.0017 0.0016 0.0016 0.0015 0.0015 0.0016 0.0019 0.0025 0.0079 0.0139 0.0178 0.0246 0.0307 0.0351 0.0411 0.0454 0.0456 0.0426 0.035 0.0229 0.0104 0.0043 0.0037 0.0035 0.0038 0.0034 0.0034 0.0033 0.0027 0.0024 0.0023 0.0024 0.0028 0.0025 0.0024 0.0022 0.0018 0.0018 0.0023 0.0028 0.0028 0.0031 0.003 0.0031 0.0028 0.0026 0.0028 0.0027 0.0028 0.0025 0.0019 0.0018 0.0015 0.0018 0.0019 0.0018 0.0017 0.0021 0.002 0.0026 0.0041 0.0051 0.0061 0.0067 0.007 0.0084 0.0094 0.0097 0.0097 0.009 0.0077 0.0064 0.0052 0.0035 0.0019 0.0017 0.0018 0.002 0.0026 0.0027 0.0037 0.0054 0.0062 0.0064 0.0059 0.0044 0.002 0.0008 0.001 0.001 0.0038 0.0163 0.029 0.0423 0.0572 0.0687 0.0831 0.0894 0.0852 0.0744 0.0531 0.0319 0.0188 0.0119 0.0046 0.0028 0.0027 0.0026 0.0025 0.0027 0.0027 0.0033 0.0037 0.0038 0.0042 0.0056 0.0079 0.0119 0.0146 0.015 0.0142 0.0115 0.0073 0.0036 0.003 0.0024 0.002 0.0018 0.0017 0.0016 0.0013 0.0017 0.0017 0.0022 0.0056 0.0121 0.0192 0.0241 0.0277 0.029 0.0264 0.0221 0.0165 0.0081 0.0045 0.0036 0.0033 0.0028 0.0024 0.0024 0.0022 0.0025 0.0023 0.0024 0.0023 0.0021 0.002 0.002 0.002 0.0018 0.0019 0.0016 0.0014 0.0014 0.0014 0.0019 0.0022 0.0023 0.0019 0.001 0.0011 0.0012 0.0014 0.0014 0.0013 0.0013 0.003 0.0038 0.0047 0.005 0.0046 0.0037 0.003 0.002 0.0015 0.0015 0.0018 0.0019 0.0019 0.0018 0.0016 0.0012 0.0012 0.0012 0.0009 0.0013 0.0012 0.0009 0.0008 0.0007 0.0009 0.0008 0.0008 0.0008 0.0006 0.0008 0.0007 0.0007 0.0009 0.0009 0.0009 0.0008 0.0008 0.0006 0.0006 0.0006 0.0005 0.0008 0.0008 0.0011 0.0019 0.0029 0.0031 0.0032 0.003 0.0028 0.0031 0.0041 0.0043 0.0045 0.0044 0.0037 0.0028 0.0024 0.0019 0.0011 0.0007 0.0008 0.0014 0.0028 0.0066 0.0106 0.0129 0.0147 0.0137 0.0111 0.008 0.0044 0.0037 0.0037 0.0041 0.0039 0.0032 0.0026 0.002 0.0018 0.0023 0.0021 0.0031 0.0071 0.0113 0.0187 0.0247 0.0268 0.0262 0.0213 0.0137 0.0071 0.0023 0.0021 0.0017 0.0016 0.0017 0.002 0.0019 0.0023 0.005 0.0109 0.0168 0.0196 0.0215 0.0215 0.0201 0.0197 0.0184 0.0152 0.012 0.0067 0.003 0.0024 0.0021 0.0019 0.0015 0.0012 0.0008 0.0007 0.0007 0.0005 0.0005 0.0005 0.0007 0.0009 0.0012 0.0017 0.0019 0.0023 0.0021 0.002 0.0019 0.0018 0.0021 0.0021 0.0019 0.0017 0.0017 0.0017 0.002 0.0037 0.008 0.0123 0.0171 0.0206 0.0242 0.0271 0.0275 0.0256 0.0217 0.0147 0.0078 0.0025 0.0018 0.0015 0.0015 0.0014 0.0013 0.0013 0.0013 0.0012 0.0012 0.0013 0.0016 0.0017 0.0015 0.0015 0.0026 0.0027 0.0033 0.0041 0.0036 0.0036 0.004 0.0038 0.0039 0.0039 0.0036 0.0035 0.0032 0.0028 0.0026 0.0024 0.0023 0.0023 0.0023 0.0024 0.0021 0.0018 0.0017 0.0014 0.0016 0.0015 0.0014 0.0011 0.0011 0.0009 0.0009 0.0008 0.0007 0.0007 0.001 0.0006 0.0008 0.0008 0.0011 0.0011 0.001 0.0012 0.0014 0.0015 0.0016 0.0015 0.0021 0.0028 0.0036 0.0044 0.0043 0.0042 0.0037 0.0029 0.0023 0.0018 0.0012 0.0009 0.0001 0.0002 0.0009 0.0053 0.0116 0.0138 0.0144 0.014 0.0084 0.0044 0.0031 0.0023 0.0018 0.0014 0.0011 0.001 0.001 0.0008 0.001 0.0009 0.0008 0.0007 0.0005 0.0005 0.0005 0.0005 0.0004 0.0007 0.0007 0.001 0.001 0.001 0.001 0.0011 0.0012 0.0011 0.0013 0.0018 0.0026 0.0022 0.0018 0.0018 0.0018 0.0039 0.0051 0.0051 0.0049 0.0038 0.0032 0.0025 0.0024 0.0021 0.0021 0.0017 0.0017 0.0017 0.0016 0.0016 0.0014 0.0015 0.0015 0.0019 0.003 0.0031 0.0037 0.0036 0.0033 0.003 0.0023 0.002 0.0021 0.0019 0.0018 0.0014 0.0011 0.0011 0.0011 0.001 0.0007 0.0005 0.0004 0.0019 0.0052 0.0084 0.0107 0.012 0.0111 0.0097 0.0076 0.0057 0.0037 0.002 0.0014 0.0014 0.0014 0.0014 0.0015 0.0014 0.0011 0.0012 0.0012 0.0015 0.0027 0.0051 0.0084 0.0125 0.0151 0.0148 0.0134 0.0097 0.0042 0.002 0.0016 0.0018 0.0019 0.002 0.002 0.0018 0.0019 0.0018 0.0019 0.002 0.0018 0.0019 0.0019 0.0018 0.0016 0.0017 0.0018 0.0019 0.002 0.0019 0.002 0.0016 0.0016 0.0018 0.0016 0.0016 0.0017 0.0015 0.0015 0.0014 0.0013 0.0014 0.0014 0.0014 0.0016 0.0014 0.0013 0.0016 0.0014 0.0014 0.0014 0.0013 0.0013 0.0014 0.0015 0.0012 0.0011 0.0012 0.0011 0.0011 0.0008 0.0008 0.0006 0.0006 0.0006 0.0007 0.0008 0.001 0.0012 0.0016 0.0018 0.002 0.0019 0.0019 0.0017 0.0017 0.0016 0.0013 0.0011 0.0008 0.0006 0.0006 0.0007 0.0005 0.0005 0.0011 0.0021 0.0025 0.0028 0.0026 0.0023 0.0021 0.0019 0.0016 0.0015 0.0015 0.0015 0.0013 0.0015 0.0011 0.0011 0.0011 0.0012 0.0012 0.0011 0.0009 0.001 0.0011 0.0011 0.0011 0.0011 0.001 0.0012 0.0012 0.001 0.0009 0.0011 0.0015 0.0012 0.0013 0.0013 0.0015 0.0015 0.0015 0.0015 0.0016 0.0019 0.0025 0.0027 0.0031 0.0027 0.0026 0.0027 0.0028 0.0031 0.0032 0.0032 0.0032 0.0031 0.004 0.0025 0.0023 0.0018 0.0016 0.0018 0.0018 0.0018 0.0017 0.0014 0.0014 0.0009 0.0009 0.0007 0.0007 0.0007 0.0006 0.0004 0.0007 0.0007 0.0023 0.0084 0.013 0.016 0.0193 0.0224 0.0228 0.0236 0.0231 0.0206 0.0179 0.0134 0.0097 0.0045 0.0036 0.0035 0.0032 0.0032 0.0035 0.0036 0.0035 0.0034 0.0028 0.0023 0.0024 0.0024 0.0024 0.0021 0.0021 0.002 0.0018 0.002 0.0021 0.0024 0.0027 0.0026 0.0026 0.0024 0.002 0.0023 0.0018 0.0019 0.0017 0.0014 0.0014 0.0013 0.0015 0.0015 0.0017 0.0016 0.0018 0.0018 0.0019 0.0024 0.0026 0.0032 0.0042 0.0045 0.0044 0.0043 0.0031 0.0022 0.0021 0.0021 0.0025 0.0038 0.0059 0.0087 0.0114 0.0133 0.0148 0.0148 0.0131 0.0109 0.0081 0.0057 0.0043 0.0041 0.0043 0.004 0.0036 0.0028 0.0015 0.0012 0.0012 0.0011 0.001 0.001 0.0025 0.0063 0.0122 0.0156 0.0175 0.0168 0.0131 0.0088 0.0029 0.0025 0.0022 0.0022 0.0018 0.0017 0.0016 0.0017 0.0014 0.0012 0.0012 0.0006 0.0004 0.0003 0.0008 0.0022 0.004 0.0045 0.0045 0.0043 0.0026 0.0022 0.0021 0.0021 0.0021 0.0016 0.0015 0.0015 0.0015 0.0017 0.0017 0.0018 0.0018 0.0019 0.0016 0.0017 0.0021 0.0023 0.0024 0.0025 0.0021 0.002 0.0019 0.0015 0.001 0.0009 0.0008 0.0006 0.0007 0.0005 0.0003 0.0003 0.0005 0.001 0.0022 0.0053 0.0072 0.0087 0.0087 0.0072 0.0055 0.002 0.0012 0.0009 0.0009 0.0012 0.0011 0.0009 0.0007 0.0008 0.001 0.0013 0.0011 0.001 0.0009 0.0009 0.0009 0.0007 0.001 0.0016 0.002 0.0023 0.0026 0.0027 0.0028 0.0028 0.0026 0.0023 0.0023 0.0022 0.0023 0.0023 0.0024 0.0024 0.0022 0.0024 0.0019 0.002 0.0019 0.0018 0.002 0.0022 0.002 0.0022 0.0021 0.0019 0.0018 0.0017 0.0017 0.0014 0.0014 0.0013 0.0012 0.0014 0.0016 0.0012 0.0015 0.0015 0.0017 0.0017 0.0018 0.0024 0.003 0.0039 0.0044 0.0046 0.0055 0.0052 0.0052 0.0049 0.0038 0.0033 0.0025 0.002 0.0015 0.0009 0.0006 0.0011 0.0019 0.005 0.007 0.0073 0.0071 0.0049 0.0037 0.0035 0.0041 0.004 0.0036 0.0024 0.0018 0.001 0.0019 0.0029 0.0067 0.0115 0.0162 0.0199 0.0232 0.0257 0.0247 0.0218 0.0171 0.0091 0.0044 0.0029 0.002 0.0016 0.0016 0.0016 0.0014 0.0014 0.0015 0.0017 0.0015 0.0013 0.0022 0.0033 0.0045 0.0054 0.0052 0.0048 0.0038 0.0023 0.0015 0.0013 0.0013 0.0012 0.0012 0.0012 0.0009 0.0009 0.0007 0.0008 0.0008 0.0005 0.0004 0.0006 0.0008 0.0007 0.001 0.0013 0.0014 0.0015 0.0017 0.0013 0.0013 0.0013 0.0015 0.0018 0.0018 0.0018 0.0017 0.0015 0.0018 0.0022 0.0046 0.008 0.0097 0.0108 0.0112 0.01 0.0083 0.0066 0.0042 0.0027 0.0023 0.0022 0.0021 0.0021 0.0016 0.0017 0.0013 0.0013 0.0011 0.0016 0.0023 0.0055 0.0095 0.0141 0.0182 0.0198 0.0187 0.0152 0.0109 0.0057 0.003 0.003 0.0025 0.0026 0.0026 0.0025 0.0025 0.0025 0.0025 0.0023 0.0022 0.0022 0.0025 0.0024 0.0024 0.002 0.0018 0.0016 0.0014 0.0015 0.0016 0.0016 0.0019 0.0019 0.0018 0.0016 0.0017 0.0016 0.0013 0.0014 0.0013 0.0013 0.0013 0.0013 0.0012 0.0011 0.0012 0.001 0.0014 0.0021 0.0035 0.0043 0.0047 0.0049 0.0045 0.0044 0.0041 0.0034 0.0031 0.0025 0.0022 0.0022 0.0015 0.0006 0.0008 0.0063 0.014 0.0158 0.0165 0.0149 0.0075 0.0041 0.0035 0.0027 0.0024 0.0019 0.0013 0.0013 0.0014 0.0012 0.0016 0.0025 0.0048 0.0074 0.0109 0.0148 0.0187 0.0205 0.0195 0.0169 0.0112 0.0052 0.0024 0.0021 0.0019 0.0016 0.0017 0.0015 0.0011 0.0009 0.0009 0.0009 0.001 0.001 0.0011 0.0013 0.0015 0.0017 0.002 0.0018 0.0021 0.002 0.0022 0.0021 0.0024 0.0027 0.0028 0.0027 0.0028 0.0026 0.0027 0.0029 0.0092 0.0163 0.0222 0.0282 0.0306 0.0307 0.03 0.0261 0.0219 0.017 0.0099 0.0036 0.0021 0.0016 0.0016 0.0019 0.0018 0.0016 0.0017 0.0015 0.0014 0.0013 0.0013 0.0016 0.0015 0.0019 0.0019 0.0017 0.0016 0.0017 0.0018 0.002 0.0022 0.0019 0.0022 0.0023 0.0022 0.0023 0.0026 0.0028 0.003 0.0029 0.0029 0.0025 0.0025 0.0025 0.0024 0.0028 0.003 0.0033 0.0035 0.0035 0.0037 0.0037 0.0035 0.0036 0.0029 0.0029 0.0025 0.0025 0.0025 0.0025 0.0023 0.0025 0.0026 0.0023 0.0023 0.0023 0.0021 0.002 0.002 0.002 0.0018 0.002 0.0021 0.0022 0.0022 0.002 0.002 0.0018 0.0019 0.0019 0.0025 0.0027 0.0059 0.0092 0.0143 0.0182 0.0208 0.021 0.018 0.014 0.009 0.0042 0.003 0.0031 0.0026 0.0024 0.0025 0.0023 0.002 0.0015 0.0014 0.0014 0.0013 0.0009 0.0015 0.0033 0.0048 0.0055 0.005 0.0045 0.003 0.0021 0.0024 0.0021 0.0021 0.0021 0.0018 0.0015 0.0013 0.0009 0.0008 0.0008 0.0009 0.0013 0.0035 0.0064 0.0115 0.0147 0.0155 0.0147 0.0119 0.0064 0.0038 0.0032 0.0031 0.0031 0.0028 0.0027 0.0026 0.0025 0.0025 0.0025 0.0025 0.0023 0.0021 0.0016 0.0015 0.0013 0.0014 0.0014 0.0014 0.0015 0.0011 0.0011 0.001 0.0008 0.0005 0.0004 0.0007 0.001 0.0008 0.0009 0.0008 0.0014 0.0025 0.0044 0.0069 0.0081 0.0082 0.0074 0.0056 0.0037 0.0027 0.0024 0.0023 0.0021 0.0016 0.0015 0.0014 0.0014 0.0015 0.0014 0.0013 0.0013 0.0014 0.0016 0.0016 0.0015 0.0015 0.0013 0.0012 0.0007 0.001 0.001 0.0011 0.0011 0.0011 0.001 0.0011 0.0012 0.001 0.0011 0.0011 0.0012 0.0018 0.0042 0.0064 0.007 0.0083 0.0085 0.0071 0.0063 0.0047 0.0025 0.0024 0.0021 0.0022 0.0024 0.0025 0.0029 0.0027 0.0024 0.0023 0.002 0.0021 0.0019 0.0022 0.0023 0.0021 0.0019 0.002 0.002 0.0019 0.0018 0.0023 0.0023 0.0023 0.0022 0.002 0.0019 0.0021 0.002 0.0019 0.0017 0.0017 0.0016 0.0018 0.0019 0.0021 0.0019 0.0021 0.0021 0.0019 0.0033 0.0045 0.005 0.0056 0.0058 0.0056 0.0056 0.0054 0.0047 0.0043 0.0038 0.0032 0.0028 0.0021 0.0015 0.0013 0.0018 0.0019 0.0021 0.0022 0.0025 0.0037 0.0045 0.0059 0.0059 0.0059 0.0051 0.0037 0.0018 0.0013 0.001 0.0011 0.0025 0.0113 0.0206 0.0278 0.0334 0.0396 0.0412 0.0407 0.0374 0.0293 0.0205 0.0122 0.0068 0.0034 0.0025 0.0026 0.0027 0.0023 0.0021 0.0023 0.0025 0.0025 0.0025 0.0023 0.0019 0.0015 0.0012 0.001 0.0011 0.0015 0.0017 0.002 0.0022 0.0022 0.002 0.0019 0.0015 0.0015 0.0016 0.0018 0.0017 0.0019 0.0041 0.0095 0.0159 0.0197 0.0237 0.024 0.0225 0.0196 0.0148 0.0101 0.0058 0.0037 0.0029 0.0024 0.0024 0.0023 0.0019 0.0019 0.0021 0.0018 0.0015 0.0017 0.0014 0.0011 0.0014 0.0015 0.0012 0.0011 0.0011 0.0008 0.0008 0.0008 0.0009 0.0011 0.0011 0.0009 0.0007 0.0005 0.0005 0.0009 0.001 0.0011 0.0019 0.0035 0.0042 0.0049 0.0054 0.005 0.0044 0.0039 0.0028 0.0019 0.0021 0.0021 0.0023 0.0024 0.0022 0.0018 0.0015 0.0012 0.0012 0.0012 0.0013 0.0014 0.0012 0.0011 0.001 0.0012 0.0011 0.0012 0.001 0.0009 0.0007 0.0006 0.0006 0.0007 0.0008 0.0007 0.0008 0.0003 0.0004 0.0006 0.0006 0.0006 0.0005 0.0007 0.0006 0.0015 0.0023 0.0027 0.0025 0.0023 0.0026 0.0034 0.0042 0.0044 0.0044 0.0035 0.0026 0.0022 0.0016 0.0016 0.0031 0.0046 0.0071 0.0097 0.0125 0.0139 0.0143 0.0146 0.013 0.0105 0.008 0.0039 0.0028 0.0027 0.0027 0.0023 0.0021 0.0017 0.0015 0.0016 0.0014 0.0018 0.003 0.0064 0.0101 0.013 0.0157 0.0163 0.0146 0.0116 0.0067 0.0038 0.0023 0.0021 0.0016 0.0013 0.0014 0.0013 0.0012 0.0012 0.001 0.0011 0.0023 0.0068 0.0112 0.015 0.0176 0.0177 0.0152 0.0116 0.0077 0.0034 0.0033 0.0029 0.0023 0.0021 0.0019 0.0014 0.0012 0.0012 0.0012 0.0005 0.0007 0.003 0.0073 0.0123 0.017 0.0188 0.018 0.0152 0.0092 0.0046 0.0027 0.0026 0.0025 0.0022 0.0021 0.0019 0.0019 0.0018 0.0018 0.0017 0.0015 0.0026 0.0031 0.003 0.0026 0.0034 0.0045 0.0049 0.0049 0.0038 0.0027 0.0029 0.0026 0.0024 0.0021 0.0018 0.0021 0.0019 0.0021 0.0018 0.0017 0.0016 0.0017 0.0025 0.0033 0.0037 0.0043 0.0043 0.004 0.0035 0.0028 0.0023 0.0022 0.0026 0.0025 0.0025 0.0025 0.0025 0.0021 0.002 0.0017 0.0014 0.0014 0.0014 0.0014 0.0015 0.0016 0.0015 0.0016 0.0012 0.0015 0.0017 0.0017 0.0019 0.0016 0.0017 0.0018 0.0017 0.0017 0.0015 0.0013 0.0014 0.0012 0.0011 0.001 0.0008 0.0012 0.0016 0.0024 0.0023 0.0024 0.0026 0.0028 0.0037 0.0039 0.004 0.004 0.0036 0.0032 0.0031 0.0021 0.0019 0.0018 0.0013 0.0022 0.0025 0.005 0.007 0.0082 0.0098 0.0201 0.0367 0.0497 0.0552 0.0561 0.0487 0.0356 0.0196 0.0042 0.0012 0.0011 0.0008 0.0007 0.0007 0.0009 0.0011 0.0014 0.004 0.0065 0.0094 0.0097 0.0089 0.0072 0.0034 0.0034 0.0034 0.0032 0.0029 0.0022 0.002 0.002 0.0021 0.0022 0.0023 0.0026 0.0024 0.0021 0.0012 0.0012 0.0018 0.0026 0.0038 0.0029 0.1129 0.003 0.0038 0.003 0.003 0.0023 0.0032 0.002 0.0022 0.0022 0.0023 0.0045 0.0105 0.0144 0.0192 0.0226 0.0238 0.0233 0.0197 0.0146 0.0084 0.0034 0.0025 0.0018 0.0015 0.0015 0.0015 0.0019 0.0021 0.002 0.0013 0.0012 0.0023 0.0055 0.0103 0.0128 0.0147 0.015 0.0128 0.0102 0.007 0.0036 0.0027 0.0024 0.0022 0.0019 0.0021 0.0023 0.0024 0.0023 0.0028 0.0032 0.006 0.012 0.0161 0.0204 0.0242 0.0258 0.0266 0.0251 0.0207 0.0153 0.0083 0.0036 0.0032 0.0034 0.0033 0.0033 0.0029 0.0026 0.0023 0.002 0.0019 0.0019 0.0019 0.0019 0.0018 0.0016 0.0015 0.0014 0.0013 0.0013 0.0012 0.0013 0.0011 0.0011 0.0012 0.0011 0.0009 0.0009 0.0009 0.0009 0.0008 0.0008 0.0005 0.0008 0.0052 0.0097 0.0143 0.0169 0.0168 0.015 0.0112 0.0062 0.0027 0.0027 0.0027 0.0026 0.0024 0.0024 0.0022 0.0018 0.0015 0.0015 0.0014 0.0012 0.001 0.0007 0.0007 0.0011 0.0015 0.0021 0.0024 0.0023 0.0022 0.0022 0.0025 0.0026 0.0029 0.0027 0.0024 0.0022 0.0019 0.002 0.0025 0.0028 0.0056 0.0098 0.0151 0.0192 0.0211 0.0201 0.0171 0.0126 0.0073 0.0037 0.0034 0.0032 0.0029 0.0032 0.0031 0.0027 0.002 0.0018 0.0017 0.0018 0.0017 0.0016 0.0017 0.0018 0.0021 0.0024 0.0024 0.0027 0.0025 0.0023 0.0024 0.0022 0.002 0.0021 0.002 0.002 0.0023 0.0021 0.0025 0.0025 0.0025 0.0032 0.0028 0.0026 0.0041 0.0069 0.0073 0.0074 0.0068 0.0041 0.0037 0.0033 0.0029 0.0028 0.0027 0.0021 0.0021 0.0018 0.0018 0.0016 0.0018 0.002 0.0024 0.0029 0.0027 0.0026 0.0021 0.0017 0.0018 0.0017 0.0016 0.0015 0.0016 0.0016 0.0014 0.0015 0.0016 0.0014 0.0014 0.0014 0.0017 0.0027 0.0028 0.003 0.0027 0.0019 0.0014 0.002 0.0027 0.003 0.0032 0.0035 0.0033 0.0028 0.0025 0.0018 0.0018 0.0016 0.0013 0.0018 0.0019 0.0018 0.002 0.002 0.002 0.0019 0.0019 0.002 0.0019 0.002 0.0022 0.0022 0.002 0.002 0.0018 0.0017 0.0017 0.0021 0.002 0.0023 0.0027 0.0028 0.0031 0.0031 0.0029 0.0028 0.0027 0.0023 0.0023 0.0022 0.0021 0.0021 0.0025 0.0025 0.0026 0.0028 0.0024 0.0024 0.0023 0.0022 0.0019 0.0018 0.0017 0.0014 0.0016 0.0017 0.0017 0.0018 0.0019 0.0011 0.0022 0.0031 0.0038 0.0043 0.0042 0.0042 0.004 0.0038 0.0034 0.0029 0.0022 0.0015 0.0015 0.0014 0.0012 0.0012 0.001 0.0012 0.0026 0.0041 0.0049 0.0051 0.0046 0.0044 0.0043 0.0044 0.004 0.0032 0.0029 0.0021 0.0019 0.0015 0.0015 0.0014 0.0014 0.0014 0.0013 0.0013 0.0011 0.001 0.001 0.0009 0.0009 0.0008 0.0006 0.0008 0.0009 0.0007 0.0004 0.0007 0.0007 0.0012 0.005 0.0094 0.0138 0.0188 0.023 0.0257 0.0248 0.0221 0.0169 0.0093 0.0074 0.0037 0.0024 0.0022 0.0023 0.0023 0.0022 0.0024 0.0024 0.0023 0.0021 0.002 0.0019 0.0022 0.0021 0.0022 0.0023 0.0023 0.0022 0.0023 0.0023 0.0021 0.002 0.0021 0.0023 0.0026 0.0025 0.0029 0.0048 0.011 0.018 0.0225 0.0276 0.0301 0.0307 0.0321 0.0337 0.0328 0.0313 0.0265 0.0192 0.0132 0.0076 0.0035 0.0028 0.0023 0.002 0.002 0.0022 0.0023 0.0028 0.0029 0.0033 0.0034 0.0032 0.0035 0.0034 0.0035 0.0036 0.0035 0.0035 0.0035 0.003 0.0026 0.0024 0.0025 0.0029 0.0034 0.0031 0.0032 0.0036 0.0033 0.0033 0.0033 0.0034 0.0034 0.0032 0.0033 0.0036 0.0038 0.0039 0.0036 0.0035 0.0035 0.0036 0.0038 0.0037 0.0037 0.0032 0.0028 0.0027 0.0027 0.0024 0.0026 0.0025 0.0027 0.0033 0.0037 0.0035 0.0033 0.003 0.0024 0.0023 0.0023 0.0021 0.0018 0.0018 0.0021 0.0021 0.002 0.0019 0.0017 0.0019 0.0024 0.0055 0.0103 0.0149 0.019 0.0235 0.0257 0.0256 0.0232 0.0186 0.0121 0.0066 0.0024 0.0022 0.0018 0.0017 0.0016 0.0016 0.0013 0.0009 0.0009 0.0008 0.0006 0.0006 0.0008 0.0011 0.0013 0.0021 0.002 0.0017 0.0017 0.0015 0.0012 0.0013 0.001 0.0012 0.0011 0.0009 0.0012 0.0013 0.0018 0.0029 0.0059 0.0094 0.0118 0.0148 0.0173 0.0187 0.0189 0.0169 0.0138 0.009 0.0043 0.0029 0.0023 0.0019 0.0021 0.0022 0.0023 0.0021 0.0017 0.0011 0.0014 0.003 0.0069 0.011 0.0139 0.0161 0.0168 0.0151 0.0123 0.0089 0.0056 0.0026 0.0024 0.0021 0.002 0.0018 0.0014 0.0012 0.001 0.0012 0.001 0.0008 0.0007 0.0028 0.0064 0.0089 0.0121 0.0136 0.0128 0.0114 0.008 0.003 0.0018 0.0016 0.002 0.0022 0.0021 0.002 0.0017 0.0018 0.0019 0.002 0.0021 0.0024 0.0023 0.0021 0.002 0.0019 0.002 0.0018 0.0017 0.002 0.0021 0.0021 0.0022 0.002 0.0022 0.0022 0.0022 0.0022 0.0027 0.0026 0.0028 0.0028 0.0028 0.0026 0.0025 0.0024 0.0023 0.0022 0.0023 0.0023 0.0025 0.0022 0.0021 0.0021 0.0021 0.0022 0.0021 0.0022 0.0021 0.0019 0.0019 0.0017 0.0021 0.0021 0.002 0.0018 0.0014 0.0017 0.0022 0.0026 0.003 0.003 0.0029 0.0027 0.0025 0.0023 0.0022 0.0023 0.0024 0.0023 0.0022 0.0024 0.0025 0.0036 0.006 0.0078 0.0103 0.0109 0.0095 0.0081 0.0037 0.0021 0.0025 0.0023 0.0022 0.0019 0.0018 0.0015 0.0015 0.0012 0.0009 0.0009 0.0009 0.0006 0.0004 0.0007 0.0012 0.0018 0.0022 0.0019 0.0019 0.002 0.0021 0.0025 0.0023 0.0023 0.0021 0.0021 0.002 0.0018 0.0021 0.0022 0.0023 0.0024 0.003 0.0048 0.0086 0.0123 0.0156 0.0152 0.0138 0.0105 0.0034 0.0029 0.0032 0.0024 0.0021 0.0019 0.0015 0.0014 0.0009 0.0008 0.0005 0.0005 0.0009 0.0018 0.0053 0.0094 0.013 0.0149 0.0143 0.0126 0.0085 0.0037 0.0025 0.0019 0.0018 0.0018 0.0019 0.002 0.0018 0.0017 0.002 0.0023 0.0025 0.0029 0.0037 0.006 0.0096 0.0113 0.0114 0.0105 0.0076 0.004 0.0036 0.0037 0.0033 0.003 0.0026 0.0021 0.002 0.0019 0.0026 0.0026 0.0025 0.0022 0.0023 0.0021 0.0024 0.0023 0.002 0.002 0.0016 0.0016 0.0017 0.0017 0.0018 0.0019 0.0018 0.0017 0.0017 0.0018 0.0016 0.0014 0.0015 0.0037 0.0057 0.0095 0.0155 0.021 0.0253 0.0247 0.0218 0.0163 0.0083 0.0032 0.0026 0.0028 0.0025 0.0025 0.002 0.0015 0.0015 0.0014 0.0012 0.0011 0.0011 0.0011 0.0011 0.0011 0.001 0.001 0.0012 0.0011 0.0011 0.0009 0.0008 0.0009 0.0007 0.0007 0.0008 0.0012 0.0012 0.0013 0.0014 0.0013 0.0015 0.0015 0.0012 0.0013 0.0012 0.0012 0.0011 0.0008 0.001 0.0009 0.001 0.0013 0.0013 0.0015 0.0014 0.0016 0.0017 0.0019 0.0023 0.0024 0.0025 0.0025 0.0022 0.0019 0.0022 0.0022 0.0023 0.0018 0.0019 0.0022 0.0023 0.0026 0.0027 0.0027 0.0028 0.0024 0.0025 0.0046 0.011 0.0155 0.0184 0.0213 0.022 0.022 0.0215 0.0204 0.0183 0.0149 0.0111 0.0077 0.0046 0.0028 0.0029 0.0028 0.0029 0.0027 0.0026 0.0025 0.0024 0.002 0.0022 0.002 0.0019 0.0017 0.0016 0.0016 0.0016 0.0016 0.0015 0.0018 0.0018 0.0017 0.0018 0.0018 0.0014 0.0015 0.0011 0.0012 0.0024 0.008 0.0151 0.0193 0.0223 0.0252 0.0259 0.0264 0.0275 0.0254 0.0201 0.0158 0.0088 0.0029 0.0025 0.0024 0.0025 0.0026 0.0023 0.0021 0.0021 0.0019 0.0016 0.0014 0.0015 0.0017 0.002 0.002 0.0019 0.002 0.0015 0.0018 0.0018 0.0017 0.002 0.0018 0.0017 0.0018 0.0016 0.0015 0.0015 0.0016 0.0018 0.0016 0.0017 0.0015 0.0015 0.0014 0.0015 0.0016 0.0018 0.0017 0.0018 0.0025 0.0033 0.0034 0.0039 0.0044 0.0047 0.0061 0.0067 0.007 0.0066 0.0059 0.0047 0.0036 0.0032 0.0027 0.0026 0.0029 0.0031 0.0035 0.0035 0.0029 0.0025 0.0021 0.0017 0.0012 0.0008 0.0005 0.0005 0.0004 0.0003 0.0002 0.0003 0.0006 0.0005 0.0007 0.0008 0.0009 0.0008 0.0011 0.0013 0.0018 0.002 0.0021 0.0021 0.0018 0.0017 0.0017 0.0016 0.0013 0.0013 0.001 0.001 0.001 0.0011 0.0012 0.0011 0.0009 0.001 0.0011 0.0013 0.0014 0.0015 0.001 0.0009 0.0007 0.0007 0.0005 0.0006 0.0006 0.0005 0.0006 0.0008 0.0012 0.0018 0.0041 0.01 0.0163 0.0187 0.0189 0.0174 0.0117 0.0075 0.0052 0.0025 0.0023 0.0022 0.0021 0.0027 0.0023 0.0016 0.0017 0.0017 0.0016 0.0017 0.0014 0.0014 0.001 0.0011 0.0009 0.0011 0.0008 0.001 0.0009 0.0007 0.0009 0.001 0.0008 0.001 0.0007 0.0006 0.0006 0.0004 0.0004 0.0005 0.0005 0.0006 0.0008 0.0008 0.0008 0.0008 0.0007 0.0011 0.0016 0.0021 0.0022 0.0024 0.0024 0.0024 0.0023 0.0023 0.0021 0.0021 0.0019 0.0017 0.0015 0.0017 0.0015 0.0017 0.0013 0.0014 0.0015 0.0016 0.0017 0.0018 0.0017 0.0017 0.0017 0.0016 0.0013 0.0013 0.0012 0.0012 0.0011 0.001 0.0008 0.0006 0.0008 0.0007 0.0009 0.0009 0.0008 0.0007 0.0011 0.0013 0.0016 0.0022 0.0027 0.003 0.0035 0.0033 0.0033 0.003 0.0025 0.0023 0.0017 0.0015 0.0013 0.0012 0.0021 0.0036 0.0055 0.0065 0.0064 0.0057 0.0044 0.004 0.0038 0.0037 0.0037 0.0034 0.0033 0.0024 0.0017 0.0013 0.0012 0.0011 0.0009 0.0008 0.0008 0.0018 0.0044 0.0081 0.0115 0.0135 0.0128 0.0112 0.0072 0.0036 0.0034 0.0029 0.0019 0.0019 0.0018 0.0018 0.0019 0.0017 0.0016 0.0032 0.0054 0.0081 0.014 0.016 0.016 0.0148 0.01 0.0058 0.003 0.0026 0.0025 0.0022 0.0016 0.0015 0.0013 0.001 0.0006 0.0007 0.0006 0.0005 0.0004 0.0011 0.0048 0.0097 0.0139 0.0172 0.0179 0.0163 0.0129 0.0077 0.0025 0.0021 0.0019 0.0019 0.0017 0.0015 0.0015 0.0019 0.0019 0.0023 0.0021 0.0015 0.0013 0.0009 0.0008 0.0008 0.0012 0.0014 0.0018 0.002 0.002 0.0019 0.002 0.0018 0.0016 0.0012 0.0011 0.001 0.001 0.0012 0.001 0.0038 0.0075 0.0093 0.0107 0.0108 0.0094 0.0078 0.006 0.0048 0.003 0.0023 0.0025 0.0025 0.0027 0.0029 0.0028 0.0029 0.0029 0.0027 0.0026 0.0023 0.0021 0.0023 0.0024 0.0025 0.0024 0.0021 0.002 0.002 0.0017 0.0016 0.0015 0.0012 0.0012 0.0011 0.001 0.0008 0.0008 0.0009 0.0011 0.0012 0.0013 0.0009 0.001 0.0011 0.0012 0.0012 0.0013 0.0014 0.002 0.0032 0.0039 0.0048 0.0052 0.0052 0.0054 0.0051 0.0049 0.0046 0.004 0.003 0.0021 0.0016 0.0011 0.0009 0.0012 0.0032 0.0065 0.0079 0.009 0.0084 0.0059 0.0037 0.0025 0.002 0.0014 0.001 0.0009 0.0006 0.0007 0.0006 0.0005 0.0004 0.0008 0.0006 0.0006 0.0005 0.0006 0.0007 0.0008 0.0008 0.0009 0.0011 0.0011 0.0012 0.0011 0.001 0.0014 0.0015 0.0017 0.0016 0.0015 0.0015 0.0024 0.0057 0.0105 0.0141 0.0167 0.0168 0.0139 0.0103 0.0049 0.0022 0.0009 0.001 0.0009 0.0007 0.0006 0.0005 0.0004 0.0004 0.0006 0.0004 0.0007 0.0007 0.0005 0.0006 0.0004 0.0004 0.0007 0.0009 0.0014 0.0014 0.0018 0.0018 0.002 0.0019 0.0021 0.0018 0.0017 0.0017 0.0016 0.0016 0.0022 0.0048 0.0081 0.0112 0.0144 0.0156 0.0153 0.0134 0.0103 0.0071 0.0043 0.002 0.0015 0.0012 0.0011 0.001 0.001 0.0011 0.0009 0.001 0.0006 0.0008 0.0014 0.0022 0.0027 0.0055 0.0066 0.0065 0.0062 0.0043 0.0016 0.0013 0.0012 0.0012 0.0011 0.0011 0.0011 0.0014 0.0015 0.0013 0.0014 0.0012 0.0012 0.0014 0.0016 0.0014 0.0015 0.0012 0.0013 0.0014 0.0012 0.0014 0.0013 0.0012 0.0012 0.0012 0.0011 0.0011 0.0012 0.0015 0.0019 0.0019 0.0023 0.0038 0.008 0.0116 0.0143 0.0155 0.015 0.0123 0.0092 0.0059 0.0025 0.002 0.0016 0.0017 0.0015 0.0015 0.0013 0.0011 0.0006 0.0006 0.0005 0.0005 0.0004 0.0004 0.0014 0.0031 0.0054 0.0061 0.0063 0.0061 0.0041 0.0032 0.0028 0.0023 0.002 0.0022 0.0022 0.0022 0.0021 0.0019 0.0017 0.0018 0.0023 0.003 0.003 0.0036 0.0047 0.005 0.005 0.0044 0.0033 0.0029 0.0027 0.0026 0.0025 0.0024 0.0022 0.0023 0.0022 0.0022 0.0021 0.002 0.0018 0.0029 0.0045 0.01 0.0157 0.0179 0.0182 0.0164 0.011 0.0057 0.0034 0.0024 0.0019 0.0018 0.0015 0.0011 0.0011 0.001 0.001 0.0007 0.0007 0.0007 0.0006 0.0004 0.0008 0.0012 0.0016 0.0018 0.0019 0.0017 0.0016 0.0015 0.0021 0.0021 0.0022 0.0023 0.0023 0.0025 0.0025 0.0027 0.0078 0.0133 0.0173 0.0213 0.0246 0.0241 0.0232 0.0201 0.0147 0.0099 0.0052 0.0027 0.0026 0.0026 0.0022 0.002 0.0016 0.0017 0.0013 0.0013 0.0014 0.002 0.0021 0.0034 0.0029 0.2076 0.0039 0.005 0.0038 0.0026 0.0025 0.002 0.0021 0.0021 0.002 0.0018 0.0017 0.0017 0.002 0.002 0.0021 0.0019 0.0017 0.0016 0.0017 0.0015 0.0016 0.0015 0.0014 0.0013 0.0013 0.0013 0.0015 0.0018 0.0018 0.0018 0.0014 0.0012 0.0007 0.0006 0.0007 0.0008 0.0007 0.0005 0.0006 0.0003 0.0004 0.0006 0.0003 0.0003 0.0006 0.0012 0.0015 0.0013 0.0012 0.0005 0.0003 0.0 0.0 0.0002 0.0001 0.0001 0.0004 0.0003 0.0005 0.0008 0.0006 0.0004 0.0003 0.0004 0.0004", + "breathiness_timestep": "0.011609977324263039" + }, + { + "offset": 246.286, + "text": "AP 当 此 世 生 死 也 算 闲 话 AP 来 换 场 豪 醉 不 负 天 纵 潇 洒 AP 风 流 不 曾 老 AP 弹 铗 唱 作 年 华 AP 凭 我 自 由 去 只 做 狂 人 不 谓 侠 SP", + "ph_seq": "AP d ang c i0 sh ir sh eng s i0 y E s uan x ian h ua AP l ai h uan ch ang h ao z ui b u f u t ian z ong x iao s a AP f eng l iu b u c eng l ao AP t an j ia ch ang z uo n ian h ua AP p ing w o z i0 y ou q v zh ir z uo k uang r en b u w ei x ia SP", + "ph_dur": "0.336 0.045 0.416 0.06 0.341 0.135 0.594 0.12 0.163 0.075 0.164 0.075 0.163 0.075 0.163 0.075 0.178 0.06 0.476 0.133 0.105 0.178 0.06 0.178 0.06 0.193 0.045 0.178 0.06 0.194 0.045 0.3652 0.1108 0.193 0.045 0.178 0.06 0.371 0.105 0.371 0.105 0.477 0.163 0.075 0.178 0.06 0.193 0.045 0.163 0.075 0.416 0.06 0.476 0.178 0.06 0.193 0.045 0.164 0.075 0.193 0.045 0.163 0.075 0.178 0.06 0.476 0.178 0.06 0.193 0.045 0.163 0.075 0.163 0.075 0.164 0.075 0.163 0.075 0.416 0.06 0.193 0.045 0.163 0.075 0.193 0.045 0.193 0.045 0.356 0.12 2.381 0.5", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D#4 F4 F4 F4 C4 C4 F4 D#4 rest D#4 D#4 C4 D#4 F4 G#4 F4 D#4 C4 A#3 C4 rest C4 C4 A#3 G#3 A#3 rest G#3 A#3 G#3 A#3 C4 D#4 rest D#4 D#4 C4 A#3 C4 F4 D#4 F4 G4 F4 D#4 F4 rest", + "note_dur": "0.381 0.476 0.476 0.714 0.238 0.239 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.238 0.239 0.476 0.238 0.238 0.476 0.476 0.477 0.238 0.238 0.238 0.238 0.476 0.476 0.238 0.238 0.239 0.238 0.238 0.238 0.476 0.238 0.238 0.238 0.238 0.239 0.238 0.476 0.238 0.238 0.238 0.238 0.476 2.381 0.5", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", + "f0_seq": "149.4 149.3 149.0 148.9 149.0 149.3 149.4 149.6 149.9 149.9 150.1 149.9 150.0 150.1 149.9 149.9 149.7 150.1 150.0 149.7 149.9 149.7 149.6 149.6 149.5 149.5 149.6 149.4 149.3 148.9 148.9 148.0 151.4 165.2 172.4 185.3 196.6 207.8 216.3 222.5 228.1 231.1 231.9 230.7 229.2 227.6 225.8 227.8 235.2 248.8 259.3 265.4 266.9 265.5 264.4 262.3 261.4 259.7 260.8 260.5 260.2 260.8 261.9 261.5 262.6 261.6 258.5 250.4 240.9 251.4 262.2 275.1 288.8 304.7 310.6 312.1 315.8 318.7 319.7 318.5 316.8 314.0 311.2 308.1 306.7 304.1 303.5 301.8 304.9 309.4 313.7 318.0 320.3 320.7 318.6 316.0 314.2 313.6 313.9 314.3 315.6 314.4 309.0 300.6 291.7 299.1 305.7 312.6 320.1 328.3 334.9 342.5 350.7 361.1 366.7 367.6 346.1 340.0 338.2 333.8 329.3 325.4 322.4 320.5 320.1 318.9 317.8 316.6 315.2 316.9 316.3 319.0 322.5 327.0 336.3 343.3 348.5 352.4 352.4 351.4 350.7 346.0 343.8 344.1 345.3 348.5 350.0 350.8 351.6 350.8 350.0 347.7 346.3 346.1 346.2 346.3 345.4 348.0 348.9 352.4 354.8 355.0 353.4 351.2 347.3 340.0 332.5 332.8 336.7 340.5 344.3 348.5 350.9 355.9 359.3 363.3 361.1 356.9 353.4 353.1 351.0 350.5 350.1 348.9 348.5 348.3 349.2 349.1 347.0 344.3 336.0 328.5 338.6 347.3 356.6 366.0 375.5 360.5 354.5 351.3 351.2 348.4 347.5 346.4 346.5 346.5 347.4 347.1 345.6 343.7 338.8 329.7 319.3 304.6 293.1 283.9 276.0 270.6 267.4 265.8 265.9 268.1 268.7 268.3 267.3 266.1 263.7 262.6 261.5 258.2 254.3 247.6 250.5 255.0 260.6 264.0 269.0 271.3 267.3 263.8 261.7 261.1 260.9 262.3 263.3 262.5 263.1 263.0 262.6 262.1 259.3 256.0 249.1 265.6 282.1 299.2 316.0 333.2 352.0 352.4 352.3 351.1 350.8 348.7 348.8 350.5 352.4 352.5 351.3 347.9 343.1 331.6 313.5 303.8 305.4 307.8 309.2 312.1 316.2 313.9 313.2 319.6 323.6 326.6 325.4 322.4 316.7 308.9 303.1 298.5 295.5 295.3 296.5 300.5 305.0 311.5 316.8 323.5 326.8 327.3 324.1 315.8 307.0 298.8 294.9 291.9 291.3 293.3 298.7 307.3 316.5 323.0 325.9 324.8 319.2 309.1 302.1 301.6 299.6 295.7 287.2 280.6 276.2 274.6 272.0 269.5 267.5 265.4 263.5 260.8 260.1 258.5 257.4 253.5 253.5 253.9 252.6 252.4 256.3 259.6 262.9 265.5 269.6 271.3 274.0 275.5 282.0 290.5 295.4 304.1 311.0 315.4 315.8 315.3 310.1 304.3 288.4 285.2 297.6 311.8 327.0 320.1 314.9 312.9 311.7 311.0 311.6 312.1 313.0 312.9 311.9 312.4 310.5 305.8 296.3 283.3 265.4 263.5 259.9 254.9 252.2 249.9 250.5 251.4 254.0 256.8 258.7 260.7 261.7 262.5 264.1 264.4 263.9 263.4 262.5 262.9 260.2 256.7 254.3 253.5 266.0 299.9 310.6 310.5 315.3 316.4 314.7 313.2 309.8 308.8 308.3 309.1 310.7 310.3 307.5 303.7 293.4 288.3 303.7 320.0 335.2 350.3 363.4 355.3 351.9 350.2 350.1 349.3 348.4 347.7 347.7 348.2 347.6 347.1 345.2 339.7 331.9 330.9 337.7 344.8 354.5 361.9 364.0 368.6 374.2 376.9 377.4 374.2 373.2 373.4 374.3 371.1 371.7 378.9 390.5 399.5 408.1 415.4 419.4 423.5 422.7 421.4 416.5 412.3 410.7 407.4 408.3 411.7 414.3 414.1 413.2 411.2 408.0 398.7 384.4 378.1 380.7 386.4 392.5 398.7 405.1 412.8 421.5 416.9 396.7 382.5 374.6 369.3 361.8 356.8 352.7 350.3 349.1 350.5 351.2 350.7 349.9 345.3 340.7 328.8 317.1 308.4 309.7 313.1 311.5 311.4 312.1 311.4 311.7 311.8 312.2 313.8 315.5 314.7 312.8 309.6 305.0 296.4 279.4 257.5 255.9 256.5 257.1 259.1 261.8 256.9 258.0 263.9 266.4 268.4 270.3 268.6 267.0 263.5 260.6 258.8 256.9 256.2 257.9 259.2 261.1 261.4 262.9 264.0 263.6 262.4 261.1 259.7 259.7 259.3 260.1 262.2 265.0 267.1 269.7 265.2 257.6 258.1 259.3 260.3 262.3 264.6 265.8 266.9 270.0 267.8 250.7 239.6 233.6 231.1 228.6 228.4 229.1 229.0 229.4 230.9 232.6 233.3 235.3 235.6 236.8 236.6 235.1 233.6 232.6 232.3 235.0 236.0 236.5 235.4 234.1 232.9 233.4 233.8 234.6 234.7 230.8 225.7 229.4 233.0 238.6 242.9 248.8 255.4 261.6 268.3 277.5 270.7 270.6 270.9 270.5 268.5 264.2 260.9 258.1 257.5 255.7 254.5 254.4 255.6 259.0 262.8 266.1 270.4 270.9 268.4 263.2 260.5 256.6 255.2 254.4 252.8 252.5 251.4 251.9 252.2 255.7 257.1 260.2 264.2 268.0 270.8 272.6 273.7 272.5 266.3 256.8 249.0 241.9 237.8 232.5 228.0 225.7 223.5 221.2 219.3 217.5 216.7 215.0 214.3 212.5 210.8 209.5 207.7 206.4 205.8 204.6 203.3 201.5 200.6 201.7 204.4 208.8 212.8 225.2 237.7 249.2 257.6 260.9 264.4 266.5 267.3 267.2 263.8 261.9 260.7 258.6 258.4 259.9 260.1 258.2 258.2 259.0 259.0 259.2 258.8 258.6 258.7 260.5 261.5 261.6 258.8 251.5 241.6 229.4 231.4 238.4 245.5 251.6 244.4 238.5 234.4 232.9 234.1 234.5 234.3 234.4 234.0 234.3 232.9 230.9 227.1 223.1 213.1 207.9 207.6 207.3 206.7 206.2 205.0 205.6 203.9 206.2 208.0 210.2 209.7 210.7 207.5 204.4 202.6 201.4 201.4 202.7 204.3 206.8 207.9 208.6 208.7 208.0 207.3 207.2 207.2 207.6 208.0 207.8 207.5 207.8 207.1 207.7 206.9 207.0 207.2 207.2 207.4 207.8 207.3 208.6 208.5 208.3 209.7 215.3 228.4 236.7 239.1 242.9 244.4 242.1 238.2 233.8 229.4 226.8 225.8 226.7 229.0 232.6 237.3 241.2 244.1 244.6 244.9 242.6 240.4 235.3 229.9 225.3 222.8 220.8 221.9 223.9 226.8 231.7 237.7 244.0 246.6 247.1 247.9 246.8 242.3 236.0 229.7 222.8 217.5 213.1 207.5 199.0 195.5 192.4 189.2 186.4 183.5 180.2 179.4 177.0 170.6 165.4 162.8 159.4 156.6 154.1 151.8 148.8 146.5 143.2 142.9 145.6 153.5 163.8 174.1 182.8 194.5 204.0 210.2 209.7 209.3 206.9 207.4 207.2 205.5 205.6 203.8 207.9 212.5 220.9 228.6 230.2 232.0 232.1 233.3 233.4 233.4 234.5 234.0 233.4 233.0 230.2 224.8 216.2 214.0 213.6 214.2 214.1 213.8 214.4 213.8 209.7 207.7 207.4 206.5 206.8 207.2 207.2 206.7 207.8 208.2 209.4 210.1 209.5 207.9 203.4 200.4 204.0 210.4 218.2 224.9 231.5 232.0 233.7 234.8 235.1 235.7 236.0 235.1 234.1 234.2 233.5 234.1 234.5 233.2 230.6 230.2 235.2 239.1 244.4 253.6 262.0 266.0 266.5 266.7 264.2 261.8 260.9 260.9 260.9 261.8 263.6 263.5 263.2 260.8 253.7 248.3 243.6 255.7 268.3 282.5 297.8 314.1 317.2 318.1 320.6 324.3 323.6 320.6 315.6 310.1 303.5 299.4 295.9 297.0 300.2 304.6 310.0 315.8 319.5 322.0 322.1 321.3 318.9 314.8 308.7 300.0 293.5 290.0 289.6 293.1 298.3 307.5 315.3 322.0 325.0 325.2 324.5 319.1 308.8 294.8 291.5 288.8 285.9 282.3 278.5 276.2 274.5 272.7 270.3 268.4 266.9 264.3 261.0 257.3 254.1 251.1 247.9 244.7 241.8 239.2 237.3 234.5 232.2 232.9 237.2 246.2 258.1 270.4 281.4 291.8 305.8 312.7 314.7 312.0 311.2 311.5 311.8 311.8 310.2 310.2 310.8 310.9 312.6 314.1 315.4 313.5 311.8 308.9 309.4 310.0 310.2 309.2 308.5 307.8 305.9 302.4 290.8 274.9 273.9 276.3 279.4 283.7 290.0 282.8 269.3 264.7 263.1 262.4 261.8 262.4 263.0 263.1 262.3 263.9 263.9 262.7 259.9 255.0 248.3 240.8 233.1 226.8 226.2 226.7 227.3 229.6 228.9 229.9 231.5 232.6 232.9 233.0 232.1 231.8 231.9 230.3 225.5 225.2 230.2 234.5 241.7 248.3 256.9 265.6 268.6 265.0 262.2 262.2 261.8 261.5 260.7 261.1 260.5 261.3 263.1 263.3 259.9 248.5 247.1 250.1 258.0 266.4 276.7 288.3 299.1 300.5 303.1 304.7 308.6 312.3 312.9 314.1 313.8 313.7 312.4 311.9 311.6 314.4 318.5 322.6 327.3 335.0 345.0 349.4 351.9 351.7 350.3 348.2 345.3 346.2 345.8 347.1 348.8 350.0 350.6 352.0 351.4 348.8 349.6 338.0 306.8 294.0 295.2 297.9 300.8 304.9 304.8 307.2 307.2 307.9 309.9 310.3 310.4 311.1 311.3 311.8 311.7 311.6 311.3 309.9 302.8 288.0 297.2 304.2 311.8 318.8 327.9 340.5 349.5 351.5 352.2 352.7 351.3 348.8 346.1 345.0 344.5 342.5 340.8 341.1 342.6 344.6 345.8 347.3 348.8 361.7 376.5 385.4 393.1 396.5 395.8 396.0 395.1 392.7 390.0 388.4 387.0 388.0 390.6 389.6 382.2 372.2 364.4 358.4 375.7 396.6 412.0 376.0 364.9 356.8 352.3 351.3 350.2 350.1 349.0 349.8 351.8 352.2 351.9 350.9 351.3 349.5 345.4 338.5 329.8 317.9 305.6 300.9 303.2 302.9 309.8 315.1 320.6 322.9 319.3 316.1 313.2 310.9 308.9 308.0 308.2 310.0 311.2 312.1 311.4 310.0 310.8 312.8 314.0 314.6 313.3 310.6 308.7 310.2 312.1 313.9 313.3 310.7 298.9 302.3 307.3 312.2 316.8 321.1 324.9 329.5 334.0 338.3 344.1 341.7 320.3 315.8 318.7 320.6 318.4 316.7 316.3 315.5 315.9 314.3 313.4 313.2 312.5 314.5 318.8 325.6 334.8 343.9 349.6 354.9 357.9 357.8 354.9 352.2 349.7 346.6 343.8 342.6 341.2 341.7 343.5 346.6 347.7 348.0 348.9 349.6 349.7 347.8 346.9 346.8 346.5 345.9 344.8 342.7 341.9 342.0 344.6 348.1 351.6 357.5 361.6 364.6 366.8 368.0 367.3 364.4 359.6 352.0 347.5 343.2 342.0 342.7 344.5 347.0 351.6 355.4 360.4 362.1 362.0 359.9 357.1 354.6 351.6 348.0 346.1 345.9 343.4 344.0 344.5 345.7 349.7 353.8 357.3 359.1 361.0 360.5 358.8 355.7 352.9 350.8 347.9 346.1 344.0 343.3 342.5 343.1 346.3 348.7 351.7 354.4 357.6 358.3 359.8 359.8 358.1 355.7 351.9 347.1 341.4 337.4 335.6 335.8 340.1 345.6 351.9 358.0 363.5 366.1 366.7 365.9 364.0 362.0 359.5 356.0 352.7 351.5 349.8 349.1 348.7 349.4 351.7 353.3 355.2 356.7 357.2 357.2 357.0 356.3 355.5 355.8 357.0 358.0 356.5 355.3 353.3 351.4 350.7 351.9 352.6 352.6 355.7 358.1 359.0 359.8 361.5 363.7 363.5 364.4 364.0 361.7 358.6 354.9 351.3 348.9 347.9 349.4 350.4 353.5 356.2 359.8 361.3 362.2 363.2 361.7 360.1 357.8 355.0 353.4 351.9 349.8 349.0 348.2 347.3 347.1 347.0 346.5 345.7 343.9 343.3 341.5 336.7 331.6 326.9 321.3 316.7 313.7 311.8 310.8 311.6 312.6 312.4 312.9 313.1 314.9 315.1 311.7 302.9 302.6 302.6 303.3 304.0 304.3 304.4 304.5 305.1 305.4 305.3 305.4 305.6 305.0 305.7 305.5 305.4 305.0 304.7 304.8 304.9 305.5 305.3 305.7 305.7 305.8 305.8 306.1 306.3 305.7 306.0 305.3 304.9 304.4 303.5 302.9 301.6 299.6 300.5 298.5 297.5 298.4", + "f0_timestep": "0.011609977324263039", + "energy": "0.0002 0.0013 0.0012 0.0022 0.0026 0.0036 0.004 0.0046 0.005 0.0056 0.0055 0.0062 0.0059 0.0064 0.0055 0.0054 0.0052 0.0049 0.0046 0.0041 0.0036 0.0033 0.0025 0.0019 0.0017 0.0007 0.0007 0.0014 0.0017 0.0032 0.0059 0.0285 0.0492 0.0664 0.08 0.0888 0.0929 0.0975 0.1016 0.1016 0.0994 0.093 0.0871 0.0839 0.079 0.076 0.0681 0.06 0.0567 0.0591 0.0651 0.0696 0.0748 0.0784 0.0801 0.0823 0.0824 0.0821 0.0804 0.079 0.0771 0.0754 0.0736 0.0735 0.0744 0.0735 0.0658 0.0541 0.0395 0.0263 0.0209 0.0205 0.0212 0.0364 0.0539 0.07 0.083 0.092 0.0952 0.0993 0.1022 0.1021 0.1021 0.1003 0.099 0.0991 0.0984 0.0972 0.0963 0.0961 0.0949 0.096 0.0968 0.0968 0.097 0.0967 0.0973 0.0974 0.0977 0.0963 0.0911 0.0836 0.0714 0.0564 0.0422 0.031 0.0277 0.0303 0.0319 0.0332 0.0327 0.0313 0.0287 0.0235 0.0368 0.0578 0.0715 0.0851 0.0913 0.0923 0.0953 0.1006 0.1021 0.1046 0.1055 0.1036 0.1021 0.0992 0.0962 0.0928 0.0908 0.088 0.086 0.0847 0.0845 0.0862 0.0887 0.0915 0.0931 0.0941 0.097 0.1015 0.1059 0.1089 0.1109 0.1111 0.1095 0.1092 0.1075 0.1073 0.1063 0.1059 0.1061 0.105 0.1043 0.1035 0.1014 0.0985 0.0968 0.096 0.096 0.096 0.0942 0.0871 0.0772 0.0654 0.0515 0.0417 0.0329 0.0303 0.0291 0.0289 0.0285 0.0263 0.0245 0.0315 0.0574 0.077 0.0951 0.1065 0.1087 0.1078 0.1038 0.1013 0.0963 0.0931 0.0907 0.0879 0.0821 0.0708 0.0548 0.0381 0.0249 0.0229 0.0232 0.0265 0.0464 0.0669 0.0869 0.103 0.1077 0.1105 0.1074 0.1046 0.1051 0.1018 0.1013 0.1002 0.0987 0.0983 0.0966 0.0934 0.0902 0.0869 0.0857 0.0869 0.0889 0.0927 0.0945 0.0996 0.1027 0.1039 0.1071 0.1044 0.103 0.1013 0.0995 0.0972 0.0913 0.0818 0.0663 0.0492 0.0319 0.0178 0.0165 0.0148 0.0296 0.0523 0.0725 0.0897 0.1024 0.1045 0.1031 0.1015 0.0952 0.0903 0.0864 0.0821 0.0783 0.0748 0.0647 0.0521 0.0375 0.0225 0.0171 0.0183 0.017 0.0267 0.0535 0.0716 0.0879 0.099 0.0995 0.0986 0.0942 0.0917 0.0875 0.083 0.0802 0.0772 0.0735 0.0645 0.0533 0.0374 0.0208 0.0112 0.0076 0.0166 0.045 0.0705 0.0917 0.111 0.1192 0.1251 0.1258 0.1245 0.1231 0.1168 0.1115 0.1055 0.1 0.1005 0.0996 0.0996 0.0993 0.0992 0.0984 0.0984 0.0968 0.0953 0.0966 0.097 0.0982 0.0997 0.0956 0.0904 0.083 0.0744 0.0703 0.0697 0.0687 0.0691 0.0689 0.0679 0.0661 0.0629 0.0552 0.0437 0.0301 0.0132 0.0057 0.0058 0.006 0.0053 0.0054 0.0047 0.0038 0.0028 0.0023 0.0015 0.0016 0.001 0.0028 0.0116 0.0278 0.0429 0.056 0.0648 0.0743 0.0813 0.09 0.0944 0.0956 0.0929 0.085 0.0785 0.0734 0.07 0.0684 0.0696 0.0703 0.0716 0.0734 0.0716 0.0688 0.0621 0.0504 0.0362 0.0186 0.0269 0.0526 0.0746 0.0918 0.1034 0.1079 0.1082 0.1037 0.099 0.0906 0.0806 0.0758 0.0732 0.0723 0.071 0.0616 0.0492 0.0346 0.0175 0.0119 0.0138 0.021 0.0375 0.0534 0.0697 0.0811 0.0877 0.0922 0.092 0.0892 0.0836 0.0761 0.0711 0.0683 0.0688 0.0697 0.0691 0.0661 0.0587 0.0478 0.0355 0.0295 0.0453 0.0652 0.084 0.0985 0.1067 0.1102 0.1108 0.1079 0.1045 0.1011 0.0962 0.0934 0.0887 0.0787 0.0653 0.0486 0.0264 0.0159 0.0142 0.0176 0.0309 0.0511 0.0658 0.0783 0.0908 0.0986 0.1043 0.1068 0.1047 0.1008 0.0983 0.0965 0.0948 0.091 0.0834 0.0731 0.0586 0.0433 0.0283 0.0168 0.032 0.0557 0.0753 0.0917 0.1023 0.1056 0.1056 0.1045 0.0995 0.0943 0.089 0.0832 0.0788 0.0784 0.0795 0.0824 0.0868 0.09 0.091 0.0918 0.0914 0.0895 0.0898 0.0889 0.0904 0.0926 0.0925 0.0927 0.0916 0.0882 0.0841 0.0744 0.0605 0.0443 0.0232 0.0075 0.0028 0.0024 0.0039 0.0048 0.004 0.0298 0.0566 0.0738 0.0878 0.0956 0.0953 0.095 0.0934 0.0939 0.0925 0.0902 0.09 0.0878 0.0905 0.0907 0.0877 0.077 0.0613 0.0417 0.0199 0.0101 0.0313 0.0528 0.0694 0.0825 0.0882 0.0896 0.089 0.0876 0.0857 0.0825 0.0791 0.0782 0.0766 0.075 0.0691 0.0579 0.0432 0.0236 0.015 0.0146 0.0252 0.0443 0.0631 0.0802 0.0941 0.1025 0.1064 0.1074 0.1074 0.1066 0.1045 0.1036 0.0991 0.097 0.0954 0.094 0.0943 0.0928 0.0898 0.0884 0.0862 0.0839 0.085 0.0835 0.0839 0.0839 0.0824 0.0821 0.0815 0.081 0.0801 0.0757 0.0673 0.0551 0.0408 0.0264 0.0216 0.0213 0.0205 0.0196 0.0172 0.0162 0.0435 0.063 0.0771 0.091 0.0939 0.0948 0.0962 0.0963 0.0953 0.0972 0.0968 0.0951 0.095 0.0931 0.091 0.0932 0.0956 0.0975 0.103 0.1007 0.0997 0.0989 0.0976 0.0984 0.099 0.095 0.0897 0.084 0.0797 0.0764 0.0724 0.0639 0.0508 0.0378 0.0249 0.0206 0.0202 0.0193 0.0193 0.0163 0.0158 0.0422 0.0621 0.0807 0.0935 0.0973 0.0994 0.0986 0.099 0.0976 0.0942 0.0907 0.0875 0.0863 0.0861 0.0872 0.0904 0.094 0.0976 0.1018 0.1067 0.1076 0.1094 0.1107 0.1092 0.1081 0.1028 0.0976 0.0929 0.0891 0.0873 0.0864 0.0852 0.0859 0.0813 0.0777 0.0752 0.0715 0.0692 0.0662 0.0619 0.055 0.0462 0.0371 0.0268 0.0173 0.0091 0.0067 0.0064 0.0067 0.006 0.006 0.0064 0.005 0.0058 0.0044 0.0029 0.0021 0.0022 0.0021 0.0027 0.0042 0.0362 0.0593 0.0768 0.0874 0.0865 0.0788 0.0687 0.0593 0.0559 0.0547 0.0558 0.0603 0.0651 0.0674 0.0697 0.0731 0.0745 0.075 0.0763 0.0785 0.0815 0.0859 0.0905 0.0942 0.0967 0.096 0.0936 0.0918 0.0908 0.0911 0.0923 0.0897 0.0891 0.0873 0.0787 0.0697 0.0545 0.035 0.021 0.0139 0.0265 0.0468 0.0625 0.0747 0.0821 0.0833 0.0832 0.0837 0.084 0.083 0.0831 0.081 0.0784 0.0725 0.0615 0.0464 0.0301 0.018 0.0167 0.0183 0.0189 0.016 0.0268 0.0505 0.0655 0.0796 0.0869 0.0877 0.0889 0.087 0.0864 0.0834 0.0811 0.0801 0.0791 0.0778 0.0773 0.0758 0.0732 0.0718 0.0711 0.0706 0.0732 0.0745 0.0751 0.0755 0.0741 0.073 0.0712 0.0696 0.0686 0.0685 0.0679 0.0692 0.0698 0.0709 0.0707 0.0706 0.0701 0.069 0.0667 0.0662 0.0652 0.0713 0.0844 0.0943 0.1023 0.1067 0.1059 0.1048 0.1039 0.0987 0.093 0.0882 0.0853 0.0839 0.0857 0.087 0.089 0.0912 0.0937 0.0964 0.097 0.0981 0.0967 0.0951 0.0922 0.0886 0.0842 0.0806 0.0764 0.0746 0.0729 0.072 0.0715 0.0695 0.0691 0.067 0.0649 0.0632 0.0608 0.0557 0.0471 0.0361 0.0237 0.0093 0.0036 0.0035 0.0039 0.0033 0.0036 0.0032 0.0027 0.0023 0.0022 0.0012 0.002 0.0015 0.0011 0.0013 0.0028 0.0034 0.0058 0.0089 0.0228 0.0401 0.0542 0.065 0.0727 0.074 0.0723 0.0699 0.0678 0.0672 0.0658 0.0641 0.0622 0.0612 0.0617 0.0623 0.0597 0.0524 0.0427 0.0299 0.0154 0.0196 0.0377 0.0547 0.0686 0.0792 0.0836 0.0835 0.0823 0.08 0.076 0.073 0.0697 0.0629 0.0526 0.0408 0.0259 0.0159 0.0128 0.015 0.0154 0.0239 0.0415 0.0548 0.0646 0.071 0.0731 0.0743 0.075 0.0741 0.0684 0.0638 0.0606 0.0611 0.0616 0.0626 0.0593 0.0497 0.0398 0.0267 0.0171 0.0147 0.0241 0.0464 0.0625 0.0774 0.0901 0.0946 0.0982 0.0997 0.099 0.0982 0.0964 0.0963 0.0943 0.0927 0.0907 0.0864 0.0809 0.0782 0.075 0.0755 0.0807 0.0842 0.0866 0.0901 0.0917 0.0935 0.0936 0.0916 0.0882 0.0838 0.08 0.0779 0.0764 0.0752 0.069 0.0579 0.0448 0.028 0.0155 0.0129 0.0114 0.0224 0.0524 0.074 0.0962 0.113 0.1189 0.121 0.118 0.1138 0.1091 0.0996 0.094 0.0896 0.089 0.093 0.0958 0.0981 0.0995 0.1019 0.1026 0.1022 0.1003 0.1003 0.1018 0.1027 0.1043 0.1037 0.1021 0.0986 0.0947 0.0908 0.0868 0.0862 0.0851 0.0827 0.0818 0.0822 0.0822 0.0794 0.072 0.058 0.0406 0.0207 0.0069 0.0052 0.0059 0.0056 0.0057 0.0057 0.0051 0.0047 0.0044 0.004 0.0037 0.0028 0.0026 0.002 0.0027 0.0025 0.0072 0.0146 0.0344 0.0544 0.0716 0.0838 0.0893 0.0912 0.089 0.0861 0.0847 0.0825 0.0841 0.0865 0.088 0.0894 0.089 0.0867 0.0866 0.0859 0.0856 0.0861 0.0849 0.0845 0.0872 0.0914 0.0946 0.0983 0.1005 0.1013 0.1025 0.104 0.1055 0.1058 0.1068 0.103 0.0903 0.0743 0.0511 0.0268 0.0168 0.0162 0.0178 0.0289 0.0539 0.0698 0.0819 0.0905 0.0884 0.0883 0.0874 0.0884 0.0882 0.0884 0.0894 0.0892 0.0881 0.0843 0.0798 0.0741 0.0664 0.0587 0.055 0.058 0.0642 0.0712 0.0756 0.0776 0.0786 0.079 0.08 0.0785 0.0774 0.0766 0.0763 0.0744 0.0692 0.0602 0.0466 0.0329 0.0201 0.0131 0.0136 0.0138 0.0234 0.0459 0.0615 0.0751 0.0838 0.0856 0.0854 0.0849 0.0831 0.0802 0.0783 0.0779 0.0753 0.0701 0.0603 0.0457 0.0284 0.0153 0.0121 0.0143 0.0166 0.0267 0.0468 0.0659 0.08 0.0906 0.0951 0.0944 0.096 0.0954 0.0934 0.0921 0.0886 0.0847 0.0812 0.0793 0.0782 0.0783 0.0768 0.0764 0.0765 0.0772 0.0786 0.0801 0.0806 0.0809 0.0819 0.0825 0.0839 0.0846 0.0852 0.0865 0.0876 0.0874 0.0859 0.0767 0.0623 0.0472 0.0251 0.0122 0.0121 0.0137 0.0241 0.0456 0.0617 0.0783 0.0897 0.097 0.1062 0.1086 0.1115 0.1114 0.1073 0.1038 0.0986 0.0908 0.0785 0.0632 0.044 0.0254 0.0144 0.0097 0.0185 0.0467 0.0703 0.0894 0.1067 0.114 0.1167 0.1181 0.115 0.1109 0.1062 0.1016 0.0983 0.0949 0.0945 0.0921 0.0914 0.089 0.0838 0.0814 0.0821 0.0869 0.0964 0.1058 0.1125 0.1183 0.1192 0.1151 0.1079 0.1002 0.0942 0.0923 0.0926 0.0921 0.0894 0.0819 0.0692 0.0522 0.0333 0.019 0.0204 0.0512 0.0748 0.0921 0.1051 0.1062 0.1054 0.1023 0.101 0.0993 0.0969 0.096 0.0965 0.0986 0.0996 0.1006 0.1029 0.1033 0.1034 0.1028 0.1 0.0967 0.0981 0.103 0.1072 0.1139 0.1145 0.115 0.1148 0.1142 0.1167 0.1158 0.1147 0.1127 0.1097 0.1078 0.1068 0.1043 0.104 0.1048 0.1048 0.1057 0.1075 0.1082 0.1081 0.1072 0.1052 0.1032 0.1012 0.0989 0.0931 0.0861 0.0727 0.0551 0.0395 0.0248 0.0223 0.0237 0.0248 0.0258 0.0254 0.023 0.022 0.0331 0.0578 0.0727 0.0877 0.098 0.1003 0.1063 0.1093 0.1114 0.1122 0.1099 0.1074 0.1044 0.0997 0.0951 0.0902 0.0866 0.0849 0.0865 0.0887 0.0901 0.092 0.0914 0.0924 0.0948 0.098 0.1004 0.1012 0.1006 0.0994 0.098 0.098 0.0987 0.0997 0.1018 0.1023 0.1029 0.1038 0.104 0.1057 0.107 0.1101 0.1112 0.1138 0.1151 0.1162 0.1177 0.1166 0.1181 0.1175 0.1195 0.121 0.1233 0.1233 0.1234 0.1244 0.1252 0.1287 0.1313 0.1315 0.1294 0.1253 0.1219 0.1216 0.1199 0.1211 0.123 0.1237 0.1261 0.1266 0.1269 0.1269 0.1305 0.134 0.1373 0.1402 0.1411 0.1414 0.1395 0.1376 0.1353 0.1343 0.1342 0.1356 0.1383 0.1417 0.1442 0.1459 0.1462 0.1448 0.1443 0.1436 0.1428 0.1419 0.1402 0.1376 0.1365 0.136 0.137 0.1374 0.1396 0.1434 0.1478 0.1533 0.1574 0.1608 0.1628 0.1604 0.1569 0.1508 0.1444 0.1378 0.1339 0.1309 0.1285 0.1283 0.1284 0.128 0.1262 0.1252 0.1248 0.1254 0.1278 0.1288 0.1284 0.1281 0.1274 0.1244 0.1227 0.1202 0.1167 0.1165 0.1147 0.1158 0.1178 0.1181 0.1198 0.1214 0.1245 0.1266 0.1288 0.1285 0.1287 0.1291 0.1301 0.1303 0.1289 0.1285 0.1261 0.1254 0.1266 0.1275 0.1303 0.1339 0.1342 0.1376 0.1389 0.1405 0.1422 0.1418 0.1418 0.1398 0.1405 0.1369 0.1337 0.1304 0.1257 0.1239 0.1211 0.1205 0.1181 0.115 0.1136 0.1117 0.1118 0.1114 0.1101 0.1092 0.1061 0.1048 0.1043 0.1055 0.1063 0.1048 0.1032 0.0986 0.0962 0.0942 0.0925 0.0913 0.0893 0.0875 0.0835 0.0783 0.0729 0.0672 0.063 0.0595 0.0544 0.0498 0.0457 0.042 0.0391 0.0369 0.0349 0.0309 0.0265 0.0223 0.0165 0.0095 0.0053 0.0023 0.0012 0.0007 0.001 0.0002 0.0003 0.0007 0.0002 0.0 0.0006 0.0001 0.0003 0.0 0.0 0.0001 0.0002 0.0 0.0 0.0 0.0 0.0 0.0002 0.0 0.0 0.0 0.0 0.0006 0.0 0.0 0.0002 0.0002 0.0001 0.0 0.0 0.0005 0.0 0.0005 0.0 0.0", + "energy_timestep": "0.011609977324263039", + "breathiness": "0.0003 0.0012 0.0018 0.0023 0.0032 0.004 0.0049 0.0056 0.0062 0.0071 0.0071 0.0073 0.007 0.0061 0.0056 0.0051 0.0048 0.0048 0.0045 0.004 0.0035 0.002 0.0014 0.0014 0.0013 0.001 0.0012 0.0017 0.0015 0.0025 0.0032 0.0035 0.004 0.0041 0.004 0.0044 0.0043 0.0045 0.0041 0.004 0.0031 0.0022 0.0021 0.0022 0.0023 0.0023 0.0022 0.0017 0.0014 0.0011 0.0009 0.001 0.0009 0.0008 0.0008 0.001 0.0011 0.001 0.0009 0.0008 0.0006 0.0007 0.0008 0.0006 0.0008 0.0008 0.0012 0.0048 0.0088 0.0151 0.0201 0.0211 0.0206 0.0163 0.0102 0.005 0.0022 0.0019 0.0023 0.002 0.0021 0.0019 0.0021 0.0023 0.0021 0.0022 0.0022 0.0026 0.003 0.003 0.0031 0.0029 0.0026 0.0026 0.0026 0.0022 0.0023 0.0025 0.0026 0.0028 0.0029 0.0032 0.0076 0.0137 0.0185 0.023 0.0278 0.0302 0.0322 0.0342 0.0358 0.0351 0.0324 0.0273 0.0197 0.0137 0.0077 0.0036 0.0028 0.0026 0.0023 0.0022 0.0024 0.0023 0.0022 0.0022 0.0024 0.0027 0.0029 0.0029 0.003 0.0029 0.003 0.0031 0.0034 0.0032 0.0028 0.0026 0.0023 0.0026 0.0026 0.0027 0.0028 0.0027 0.0029 0.0029 0.0027 0.0028 0.0026 0.0027 0.0028 0.003 0.0031 0.0029 0.0025 0.0025 0.0025 0.0025 0.0023 0.0025 0.0021 0.0021 0.0022 0.0027 0.0036 0.0077 0.0152 0.0209 0.0258 0.0282 0.0277 0.0268 0.026 0.0247 0.0222 0.0188 0.0132 0.0088 0.0043 0.0032 0.0031 0.0034 0.0033 0.0029 0.0027 0.0018 0.0016 0.0011 0.002 0.0063 0.0109 0.0153 0.0213 0.0243 0.0252 0.023 0.018 0.0133 0.0067 0.0027 0.0017 0.0018 0.0017 0.0014 0.0013 0.0013 0.0013 0.0012 0.0013 0.0012 0.0013 0.0016 0.002 0.0025 0.0028 0.0028 0.0023 0.002 0.002 0.002 0.0022 0.0022 0.002 0.002 0.0022 0.0022 0.0022 0.0024 0.0026 0.0026 0.0051 0.0093 0.0118 0.0139 0.0154 0.0149 0.013 0.0112 0.0081 0.0029 0.0021 0.0018 0.0017 0.0016 0.0014 0.0014 0.0015 0.0014 0.0013 0.0012 0.0026 0.0054 0.0096 0.0131 0.0164 0.0184 0.0177 0.0155 0.0115 0.0069 0.0037 0.0024 0.0021 0.002 0.0018 0.0015 0.0017 0.0015 0.001 0.0007 0.0007 0.0006 0.0017 0.0038 0.0052 0.0063 0.0066 0.0057 0.0044 0.0027 0.002 0.0021 0.0021 0.0022 0.0022 0.0024 0.0025 0.0024 0.0024 0.0025 0.0019 0.0018 0.0017 0.0021 0.0019 0.0019 0.0018 0.0017 0.0015 0.0016 0.0016 0.0021 0.0023 0.0025 0.0025 0.0023 0.0021 0.002 0.0019 0.0016 0.0016 0.0016 0.0017 0.0018 0.0017 0.0017 0.0014 0.0016 0.0019 0.0022 0.0034 0.0047 0.0052 0.0055 0.0057 0.0049 0.0044 0.0034 0.0021 0.0014 0.001 0.0009 0.0008 0.0012 0.001 0.001 0.0011 0.0012 0.0016 0.0024 0.0028 0.0034 0.0038 0.0038 0.0039 0.0035 0.0027 0.0022 0.0022 0.0021 0.0022 0.0019 0.0018 0.0015 0.0017 0.002 0.0033 0.0058 0.0069 0.0066 0.0062 0.0042 0.0019 0.0016 0.0018 0.0016 0.0016 0.0016 0.0014 0.001 0.0011 0.0008 0.0008 0.0007 0.0009 0.0023 0.0044 0.0074 0.0112 0.0125 0.0119 0.0105 0.0059 0.003 0.0031 0.0026 0.0024 0.002 0.0016 0.0015 0.0012 0.0012 0.0012 0.001 0.0009 0.0007 0.0009 0.0011 0.0017 0.0028 0.0035 0.0042 0.0039 0.0032 0.0028 0.0026 0.0026 0.0027 0.0026 0.0022 0.0017 0.0014 0.0014 0.0011 0.0012 0.0018 0.0031 0.0055 0.0081 0.0131 0.0168 0.0176 0.0167 0.0133 0.0072 0.0032 0.0024 0.0025 0.0021 0.0021 0.0021 0.0022 0.002 0.0019 0.0018 0.0019 0.0018 0.0019 0.0021 0.0018 0.0018 0.0018 0.0015 0.0011 0.0009 0.0009 0.0012 0.0013 0.0012 0.0012 0.0008 0.0009 0.0009 0.0008 0.0007 0.0008 0.0009 0.0009 0.001 0.001 0.001 0.001 0.0011 0.0012 0.0015 0.0015 0.0015 0.0013 0.0011 0.0009 0.001 0.001 0.0012 0.0015 0.0019 0.0026 0.003 0.0034 0.0037 0.0042 0.0045 0.0045 0.0037 0.0028 0.002 0.0012 0.001 0.001 0.0012 0.0013 0.0012 0.0014 0.0013 0.0011 0.0011 0.0009 0.0011 0.0011 0.0012 0.0023 0.0045 0.0052 0.0053 0.0052 0.0037 0.003 0.0026 0.0025 0.0024 0.002 0.0017 0.0014 0.0012 0.0011 0.0011 0.0011 0.001 0.0009 0.0016 0.0038 0.006 0.0101 0.0125 0.0125 0.0118 0.009 0.0047 0.0023 0.0017 0.0016 0.0019 0.002 0.0019 0.0018 0.0013 0.0012 0.001 0.0011 0.0009 0.0009 0.0008 0.0008 0.0006 0.0005 0.0006 0.0009 0.0007 0.0008 0.0007 0.0009 0.0008 0.001 0.0009 0.0009 0.0012 0.0034 0.0099 0.0141 0.0178 0.0213 0.022 0.0219 0.0206 0.0175 0.0141 0.0105 0.0069 0.0047 0.0031 0.0027 0.0024 0.0025 0.0025 0.0023 0.0023 0.0018 0.0019 0.0016 0.0014 0.0014 0.0014 0.0013 0.0013 0.0014 0.0011 0.0012 0.0012 0.0014 0.0016 0.0014 0.0012 0.0013 0.001 0.0009 0.0009 0.0029 0.0097 0.0145 0.0173 0.0192 0.0204 0.0207 0.0207 0.021 0.019 0.0147 0.0116 0.0059 0.0025 0.0022 0.0019 0.002 0.0019 0.0018 0.0016 0.0017 0.0017 0.0014 0.0016 0.0015 0.0017 0.0017 0.0018 0.0017 0.0017 0.0018 0.0019 0.0018 0.0017 0.0018 0.0019 0.0016 0.0015 0.0013 0.0014 0.0018 0.0018 0.0018 0.002 0.002 0.0017 0.0015 0.0014 0.0014 0.0014 0.0016 0.002 0.0021 0.0021 0.0037 0.0042 0.0047 0.0054 0.0055 0.0055 0.0063 0.0063 0.0066 0.0064 0.0059 0.0049 0.0032 0.0023 0.0017 0.0021 0.0026 0.0031 0.0032 0.0029 0.0026 0.0023 0.002 0.0016 0.0012 0.0009 0.0005 0.0003 0.0002 0.0004 0.0004 0.0006 0.0005 0.0005 0.0008 0.0008 0.0007 0.0009 0.001 0.0013 0.0018 0.0018 0.002 0.0017 0.0017 0.0015 0.0014 0.0012 0.001 0.001 0.0009 0.0008 0.0006 0.0008 0.0005 0.001 0.0012 0.0015 0.0017 0.0016 0.0013 0.001 0.0007 0.0005 0.0006 0.0006 0.0004 0.0003 0.0004 0.0007 0.0005 0.001 0.0014 0.0023 0.0057 0.0113 0.0169 0.0181 0.0179 0.0155 0.0095 0.0068 0.0032 0.0022 0.002 0.0017 0.0016 0.0015 0.0013 0.0013 0.0012 0.0012 0.0013 0.001 0.001 0.0008 0.0005 0.0008 0.0006 0.0007 0.0007 0.0008 0.0006 0.0007 0.0009 0.0006 0.0006 0.0007 0.0006 0.0005 0.0004 0.0004 0.0006 0.0006 0.0006 0.0006 0.0006 0.0008 0.0005 0.0007 0.0009 0.0013 0.0015 0.002 0.0024 0.0023 0.0023 0.0021 0.0021 0.0017 0.0018 0.0015 0.0014 0.0012 0.0013 0.0014 0.0015 0.0015 0.0014 0.0013 0.0012 0.0013 0.0015 0.0014 0.0015 0.0016 0.0013 0.0013 0.001 0.001 0.001 0.0009 0.0008 0.0005 0.0004 0.0005 0.0005 0.0006 0.0006 0.0006 0.0007 0.0007 0.0011 0.0013 0.002 0.0024 0.0025 0.0032 0.0033 0.0033 0.0031 0.0026 0.0023 0.0013 0.0011 0.0012 0.0011 0.0009 0.0017 0.0026 0.0042 0.0052 0.0052 0.0047 0.0043 0.004 0.0035 0.0037 0.0033 0.0032 0.0025 0.0019 0.0013 0.0012 0.0009 0.0009 0.0009 0.0008 0.0009 0.0018 0.0046 0.0081 0.0106 0.0116 0.0111 0.0091 0.0057 0.0035 0.0029 0.0022 0.0015 0.0016 0.0016 0.0018 0.0019 0.0014 0.0021 0.0033 0.005 0.0069 0.0127 0.0145 0.0146 0.0138 0.0087 0.0045 0.0026 0.0023 0.0019 0.0014 0.0008 0.0007 0.0006 0.0008 0.0005 0.0003 0.0004 0.0003 0.0003 0.0008 0.0033 0.0073 0.0114 0.0141 0.014 0.0127 0.0089 0.0036 0.0014 0.0014 0.0013 0.0011 0.0011 0.0007 0.0009 0.001 0.0014 0.0014 0.0015 0.0012 0.0011 0.0005 0.0004 0.0005 0.0011 0.0014 0.0018 0.002 0.0018 0.0018 0.0016 0.0014 0.0013 0.001 0.0007 0.0007 0.0006 0.0008 0.0007 0.0025 0.0068 0.0094 0.01 0.0115 0.0108 0.0093 0.0085 0.0055 0.0018 0.002 0.0018 0.0018 0.0021 0.0023 0.0022 0.0022 0.0022 0.0022 0.002 0.0019 0.0019 0.0021 0.0022 0.0021 0.0017 0.0018 0.0016 0.0017 0.0018 0.0021 0.0021 0.0023 0.0021 0.002 0.0022 0.0023 0.0025 0.0022 0.0018 0.0015 0.0016 0.0018 0.0016 0.0017 0.0013 0.0014 0.0015 0.002 0.0026 0.0032 0.0039 0.0046 0.0051 0.0052 0.0052 0.0047 0.0045 0.0039 0.0032 0.0029 0.002 0.0012 0.0011 0.0012 0.0021 0.0034 0.0037 0.0034 0.0032 0.0033 0.0036 0.0035 0.0034 0.0025 0.0018 0.001 0.0008 0.0006 0.0003 0.0006 0.0007 0.0009 0.0009 0.001 0.001 0.001 0.0011 0.0005 0.0007 0.0008 0.0009 0.0009 0.001 0.0013 0.0016 0.0016 0.0016 0.0015 0.0016 0.0019 0.002 0.0024 0.0041 0.0068 0.0117 0.0149 0.0161 0.015 0.0123 0.0082 0.0026 0.0013 0.0012 0.0012 0.0012 0.001 0.0012 0.0011 0.001 0.001 0.001 0.0013 0.0012 0.0016 0.0014 0.0018 0.0018 0.0016 0.002 0.0018 0.0018 0.0017 0.0017 0.0017 0.0015 0.0014 0.0013 0.0011 0.0009 0.0012 0.0012 0.0018 0.0029 0.0058 0.0094 0.0117 0.0136 0.014 0.0122 0.0101 0.0073 0.0036 0.0018 0.0014 0.0013 0.0012 0.0012 0.0012 0.0012 0.0014 0.0013 0.0012 0.0012 0.0017 0.0028 0.005 0.0095 0.0136 0.0159 0.0157 0.0135 0.0095 0.005 0.0026 0.0025 0.0026 0.0026 0.0025 0.002 0.002 0.0018 0.0019 0.0018 0.002 0.0018 0.002 0.0019 0.0017 0.0014 0.0014 0.0016 0.0015 0.0014 0.0015 0.0015 0.0018 0.0018 0.0019 0.0019 0.0019 0.0017 0.0015 0.0015 0.0018 0.0017 0.0024 0.0042 0.008 0.0114 0.0131 0.0129 0.0115 0.0086 0.0028 0.0018 0.0018 0.0016 0.0015 0.0016 0.0017 0.0016 0.0017 0.0017 0.0021 0.0019 0.0029 0.0035 0.0035 0.0055 0.0089 0.0095 0.0094 0.0086 0.0046 0.0029 0.0024 0.0023 0.0018 0.0016 0.0016 0.0013 0.0013 0.0009 0.0009 0.0009 0.0009 0.0009 0.0011 0.0012 0.0014 0.0018 0.0021 0.0024 0.0024 0.0026 0.0025 0.0026 0.0023 0.002 0.0016 0.0012 0.001 0.0009 0.0007 0.001 0.001 0.0013 0.0011 0.0009 0.0008 0.0011 0.0012 0.0011 0.0011 0.0009 0.0008 0.0009 0.001 0.001 0.0011 0.0012 0.0012 0.0012 0.0011 0.0011 0.0013 0.0016 0.0018 0.0015 0.0016 0.0016 0.0016 0.0018 0.0018 0.0018 0.0018 0.0016 0.0014 0.0017 0.0019 0.002 0.0019 0.0021 0.0017 0.0016 0.0018 0.002 0.0018 0.002 0.002 0.002 0.0018 0.0017 0.0017 0.0018 0.0018 0.0021 0.0021 0.0018 0.002 0.0019 0.0017 0.0081 0.0146 0.018 0.0225 0.0251 0.0259 0.0279 0.0281 0.0273 0.0256 0.0224 0.0183 0.0128 0.0076 0.0034 0.0019 0.0017 0.0015 0.0019 0.002 0.002 0.0021 0.002 0.0023 0.0024 0.0027 0.0024 0.0024 0.0023 0.0021 0.0021 0.0021 0.0019 0.0018 0.002 0.002 0.002 0.0019 0.0022 0.0022 0.0024 0.0026 0.0024 0.0025 0.0025 0.0027 0.0031 0.003 0.003 0.0027 0.0028 0.0028 0.0028 0.0026 0.0025 0.0025 0.0025 0.0029 0.003 0.0032 0.0032 0.0029 0.0031 0.0031 0.003 0.003 0.0031 0.0028 0.003 0.0033 0.0035 0.0032 0.0038 0.004 0.004 0.0043 0.0043 0.0046 0.004 0.0038 0.0034 0.0031 0.0032 0.0034 0.0031 0.0033 0.0035 0.0041 0.0044 0.0044 0.0044 0.0039 0.0035 0.0034 0.0035 0.0037 0.0036 0.0034 0.0032 0.0033 0.003 0.0034 0.0034 0.0036 0.0035 0.0034 0.0036 0.0037 0.0035 0.0034 0.003 0.0032 0.0034 0.0034 0.0035 0.0035 0.0037 0.0038 0.0043 0.0047 0.0045 0.0045 0.0041 0.0043 0.0041 0.0043 0.0044 0.004 0.0036 0.003 0.0029 0.0024 0.0026 0.0027 0.0027 0.0029 0.0034 0.0034 0.0035 0.0037 0.0032 0.0032 0.0029 0.0028 0.003 0.0033 0.003 0.0032 0.0031 0.0029 0.0029 0.0031 0.0032 0.0035 0.0037 0.0035 0.0037 0.0035 0.0033 0.0032 0.0034 0.0033 0.0033 0.0032 0.0027 0.0028 0.0029 0.0029 0.0033 0.0031 0.0031 0.0031 0.0029 0.0029 0.0026 0.0025 0.0024 0.0024 0.0023 0.0022 0.0023 0.0024 0.0025 0.0025 0.0025 0.0023 0.002 0.0021 0.0018 0.0019 0.0017 0.0016 0.0015 0.0015 0.0017 0.0017 0.0015 0.0016 0.0016 0.0018 0.0018 0.0016 0.0015 0.0015 0.0015 0.0015 0.0014 0.0011 0.0009 0.0006 0.0005 0.0006 0.0007 0.0006 0.0006 0.0007 0.0006 0.0008 0.0012 0.0028 0.0034 0.0034 0.0032 0.0018 0.0009 0.0009 0.0006 0.0006 0.0005 0.0004 0.0003 0.0001 0.0003 0.0002 0.0 0.0 0.0002 0.0001 0.0 0.0001 0.0 0.0 0.0 0.0 0.0 0.0001 0.0001 0.0001 0.0 0.0002 0.0002 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0001 0.0 0.0", + "breathiness_timestep": "0.011609977324263039" + } +] \ No newline at end of file diff --git "a/samples/\344\273\231\347\221\266.ds" "b/samples/\344\273\231\347\221\266.ds" index c4d691cad..c26165cf2 100644 --- "a/samples/\344\273\231\347\221\266.ds" +++ "b/samples/\344\273\231\347\221\266.ds" @@ -1,122 +1,122 @@ [ { + "offset": 1.569, "text": "SP 你 是 不 食 烟 火 的 仙 瑶 AP 舞 凤 翔 鸾 相 交 的 SP 窈 窕 SP", "ph_seq": "SP n i sh ir b u sh ir y En h uo d e x ian y ao AP w u f eng x iang l uan x iang j iao d e SP y ao t iao SP", - "note_seq": "rest A4 A4 G4 G4 E4 E4 B4 B4 A4 A4 D5 D5 D5 D5 F#4 F#4 G4 G4 rest A4 A4 G4 G4 E4 E4 B4 B4 A4 A4 D5 D5 E5 E5 rest A4 A4 B4 B4 rest", - "note_dur_seq": "0.6 0.361 0.361 0.3619999 0.3619999 0.7220001 0.7220001 0.7229998 0.7229998 0.3620002 0.3620002 0.181 0.181 0.5419998 0.5419998 0.3610003 0.3610003 1.446 1.446 0.723 0.3610001 0.3610001 0.362 0.362 0.723 0.723 0.7219996 0.7219996 0.3620005 0.3620005 0.1809998 0.1809998 0.7230005 0.7230005 0.1799994 0.5419998 0.5419998 0.9040003 0.9040003 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.465 0.135 0.241 0.12 0.272 0.09 0.527 0.195 0.678 0.045 0.242 0.12 0.136 0.045 0.362 0.18 0.316001 0.044999 1.446 0.678 0.045 0.256001 0.105 0.242 0.12 0.603 0.12 0.571999 0.150001 0.272 0.09 0.136 0.045 0.723001 0.134999 0.045 0.422 0.12 0.904 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.465 0.135 0.241 0.12 0.272 0.09 0.527 0.195 0.678 0.045 0.242 0.12 0.136 0.045 0.362 0.18 0.316 0.045 1.446 0.678 0.045 0.256 0.105 0.242 0.12 0.603 0.12 0.572 0.15 0.272 0.09 0.136 0.045 0.723 0.135 0.045 0.422 0.12 0.904 0.4", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 1 1", + "note_seq": "rest A4 G4 E4 B4 A4 D5 D5 F#4 G4 rest A4 G4 E4 B4 A4 D5 E5 rest A4 B4 rest", + "note_dur": "0.6 0.361 0.362 0.722 0.723 0.362 0.181 0.542 0.361 1.446 0.723 0.361 0.362 0.723 0.722 0.362 0.181 0.723 0.18 0.542 0.904 0.4", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.2 407.3 408.0 409.5 411.5 414.6 417.5 421.2 424.6 428.4 431.6 434.4 436.8 438.2 439.0 439.0 438.8 438.0 437.5 436.3 435.4 434.4 433.7 433.2 432.9 432.9 432.9 432.9 432.9 432.9 432.9 432.9 433.0 433.2 433.2 433.2 433.3 433.4 433.4 433.4 433.4 433.4 433.4 433.5 433.7 433.7 434.1 434.4 434.8 435.7 436.2 437.1 438.0 439.0 440.0 441.1 442.1 443.1 444.1 444.9 445.5 446.3 446.8 447.2 447.4 447.6 447.5 446.7 445.9 444.2 442.3 439.8 437.6 434.8 431.6 428.4 425.2 421.6 418.7 415.0 412.2 409.1 406.5 404.2 402.3 400.7 399.3 398.6 398.2 397.7 396.3 393.7 389.5 385.5 380.2 375.6 371.0 367.5 364.6 362.8 386.8 386.9 387.0 387.4 387.9 388.5 389.1 389.7 390.4 391.0 391.7 392.3 392.8 393.2 393.6 393.6 393.9 394.3 395.8 397.9 400.7 404.0 407.7 411.2 415.3 419.4 423.2 426.6 429.7 431.8 433.0 433.7 432.2 429.0 424.2 418.3 411.9 405.6 400.3 396.6 394.5 393.8 393.8 393.8 394.0 394.0 394.2 394.3 394.3 394.5 393.4 390.6 386.3 380.6 373.8 366.2 357.8 349.4 341.1 333.1 325.6 319.0 313.5 309.2 306.0 304.3 303.4 303.8 304.4 305.4 306.4 307.7 308.9 310.4 312.0 313.7 315.3 317.3 319.0 320.8 322.5 324.2 325.7 327.1 328.4 329.4 330.4 331.0 331.4 331.6 331.6 331.5 331.3 331.3 331.1 330.9 330.5 330.2 329.9 329.6 329.2 328.8 328.5 328.1 327.7 327.4 327.1 326.9 326.7 326.5 326.4 326.4 326.4 326.5 326.7 327.1 327.5 328.1 328.7 329.4 330.0 330.5 331.0 331.4 331.8 331.9 332.1 331.9 331.8 331.6 331.3 330.9 330.5 330.0 329.4 329.0 328.5 328.0 327.5 327.1 326.8 326.6 326.4 326.4 326.4 326.4 326.5 326.9 327.1 327.5 327.9 328.4 329.0 329.4 329.8 330.2 330.5 330.7 331.0 331.0 331.0 331.2 331.3 331.2 330.8 330.0 329.1 327.6 326.0 324.0 322.1 320.0 318.0 316.1 314.3 312.5 311.2 310.1 309.4 308.8 308.9 309.5 310.7 312.7 315.4 318.7 322.9 327.5 333.3 339.3 346.2 353.6 361.6 370.2 379.2 388.8 398.6 408.7 419.1 429.5 440.0 450.3 460.4 470.4 479.6 488.8 496.5 504.0 510.2 515.7 520.2 523.4 525.5 526.5 526.3 526.0 525.1 524.2 522.8 521.3 519.5 517.4 515.3 513.0 510.7 508.4 506.1 503.9 501.7 499.6 497.7 495.9 494.6 493.3 492.5 491.9 491.7 491.6 491.6 491.6 491.6 491.6 491.6 491.8 491.9 491.9 491.9 492.1 492.2 492.2 492.3 492.4 492.5 492.5 492.5 492.5 492.5 492.5 492.7 492.4 492.0 491.4 490.6 489.5 488.3 487.1 485.4 483.7 482.0 480.3 478.4 476.7 475.2 473.6 472.0 470.6 469.4 468.3 467.2 466.6 466.1 466.0 466.1 466.7 468.1 469.9 472.4 475.1 478.7 482.2 486.0 489.7 493.5 496.9 500.1 502.6 504.8 506.0 507.0 506.5 505.4 502.9 500.2 496.2 492.1 487.7 483.4 479.3 475.8 472.7 470.9 469.4 469.5 469.8 470.6 471.6 473.3 475.1 477.3 479.5 482.1 484.6 486.9 489.1 491.3 493.3 495.2 496.5 497.6 498.1 498.4 497.9 497.1 495.8 494.2 492.3 489.7 487.1 484.0 480.8 477.2 473.7 470.0 466.3 462.4 458.7 455.0 451.5 448.1 444.9 442.0 439.5 437.2 435.1 433.6 432.2 431.5 430.9 430.9 430.9 431.1 431.3 431.6 431.9 432.4 432.9 433.5 434.1 434.8 435.4 436.0 436.5 437.1 437.7 438.2 438.5 438.8 439.1 439.3 439.4 439.5 439.5 439.5 439.5 439.5 439.5 439.5 439.5 439.7 439.7 439.7 439.9 440.0 440.0 440.0 440.0 440.0 440.0 440.1 440.2 439.7 438.9 437.0 434.9 431.6 428.2 424.2 420.1 415.8 411.7 407.8 404.3 400.9 398.6 396.6 395.4 394.7 395.1 395.6 397.4 399.5 402.6 406.0 410.1 414.8 420.0 426.1 432.4 439.4 446.7 454.6 462.8 471.2 479.8 488.7 497.7 506.9 515.6 524.3 532.8 541.1 548.9 556.2 563.0 568.7 574.2 578.3 582.0 584.0 585.8 586.8 587.0 587.0 587.9 587.2 585.8 584.0 581.7 578.6 575.6 571.7 568.4 564.5 561.1 557.8 554.7 552.3 550.2 548.9 548.1 559.9 585.6 606.1 616.1 615.6 614.3 612.6 610.3 607.6 604.7 601.4 598.6 595.4 593.0 590.6 588.9 587.7 587.3 587.1 586.7 586.3 585.6 585.1 584.5 584.3 584.3 584.4 584.8 585.1 585.7 586.5 586.9 587.3 587.7 588.3 587.9 587.3 586.7 585.9 585.0 583.8 582.7 581.6 580.1 578.6 576.9 575.2 573.6 571.8 570.0 568.0 566.0 563.8 561.7 559.5 557.3 555.0 552.7 550.4 548.1 545.8 543.5 541.2 538.8 536.4 534.2 532.1 530.0 527.9 525.7 523.7 521.7 519.8 518.0 516.2 514.6 513.1 511.7 510.4 509.1 507.8 506.6 505.5 504.6 503.6 502.8 502.2 501.7 501.4 501.1 500.9 500.8 496.7 490.1 474.8 457.9 435.3 413.5 392.2 373.5 357.9 347.4 340.0 337.8 338.0 338.4 339.2 340.2 341.4 343.0 344.6 346.6 348.6 350.8 353.0 355.2 357.5 359.8 362.0 364.1 365.9 367.7 369.3 370.6 371.5 372.2 372.7 373.0 372.7 372.5 372.1 371.7 371.1 370.4 369.7 368.9 368.1 367.4 366.7 366.2 365.9 365.6 365.5 365.5 365.5 365.7 365.8 365.9 366.0 366.3 366.5 366.8 367.1 367.4 367.6 367.9 368.1 368.3 368.6 368.8 368.9 368.9 368.9 369.1 369.0 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.8 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.7 368.8 368.9 369.0 369.1 369.3 369.6 369.7 369.9 370.2 370.5 370.7 370.9 371.0 371.2 371.4 371.6 371.7 371.7 371.8 371.9 371.8 371.7 371.7 371.6 371.5 371.4 371.3 371.1 371.0 370.7 370.6 370.4 370.2 370.0 369.9 369.7 369.5 369.3 369.2 369.1 369.0 368.9 368.7 368.7 368.7 368.7 368.7 368.8 369.0 369.5 370.1 370.8 371.7 372.8 374.0 375.2 376.6 378.1 379.5 381.0 382.4 383.9 385.3 386.8 388.3 389.6 390.9 391.9 392.8 393.6 394.2 394.7 395.0 395.2 395.0 394.9 394.8 394.6 394.5 394.3 394.0 393.7 393.3 393.1 392.8 392.4 392.1 391.7 391.4 391.1 390.9 390.6 390.5 390.4 390.3 390.2 390.2 390.2 390.2 390.2 390.2 390.4 390.4 390.4 390.5 390.6 390.8 390.9 391.2 391.4 391.5 391.6 391.7 391.8 391.8 391.9 392.0 392.0 392.0 392.1 392.2 392.0 391.8 391.6 391.2 390.9 390.2 389.6 389.0 388.3 387.4 386.5 385.7 384.7 383.7 382.6 381.5 380.5 379.5 378.4 377.3 376.2 375.2 374.1 373.1 372.2 371.4 370.5 369.7 369.0 368.4 367.8 367.2 366.9 366.5 366.2 366.0 366.0 366.1 366.7 367.8 369.8 372.4 375.4 378.8 382.4 386.0 389.4 392.1 394.5 396.3 397.4 397.5 397.0 395.9 394.5 392.5 390.4 388.0 385.7 383.2 380.9 378.8 377.1 375.7 374.9 374.4 374.4 374.7 375.6 376.7 378.1 379.7 381.6 383.7 385.8 387.9 390.0 391.8 393.6 394.7 395.8 396.4 396.9 396.5 395.9 394.5 392.9 390.8 388.6 386.2 383.8 381.4 379.1 376.9 375.2 373.6 373.0 372.6 372.8 373.2 374.3 375.6 377.1 378.9 380.9 382.9 385.0 386.9 388.7 390.3 391.7 392.5 393.2 393.6 393.2 392.7 391.8 390.8 389.4 387.9 386.3 384.5 382.8 381.5 380.2 379.2 378.5 378.1 378.0 378.1 378.4 378.9 379.6 380.4 381.4 382.4 383.6 384.8 385.9 386.9 387.8 388.6 389.4 389.9 390.4 390.6 390.6 390.6 390.6 390.6 390.6 390.7 390.8 390.9 390.9 391.0 391.1 391.1 391.1 391.2 391.3 391.3 391.3 391.3 391.3 391.6 392.0 392.3 392.7 393.0 393.4 393.8 394.1 394.5 394.9 395.2 395.6 396.0 396.3 396.7 397.0 397.4 397.8 398.1 398.5 398.9 399.2 399.6 400.0 400.3 400.7 401.1 401.5 401.8 402.2 402.6 402.9 403.3 403.7 404.0 404.4 404.8 405.2 405.5 405.9 406.3 406.7 407.0 407.4 407.8 408.2 408.5 408.9 409.3 409.7 410.0 410.4 410.8 411.2 411.6 411.9 412.3 412.7 413.1 413.4 413.8 414.2 414.6 415.0 415.4 415.7 416.1 416.5 416.9 417.3 417.7 418.0 418.4 418.8 419.2 419.6 420.0 420.4 420.7 421.1 421.5 421.9 422.3 422.7 423.1 423.5 423.8 424.2 424.6 425.0 425.4 425.8 426.2 426.6 427.0 427.4 427.8 428.2 428.6 428.9 429.3 429.7 430.1 430.5 430.9 431.3 431.7 432.1 432.5 432.9 433.3 433.7 434.1 434.5 434.9 435.3 435.7 436.1 436.5 436.9 437.3 437.7 438.1 438.5 438.9 439.3 439.5 439.5 439.7 439.9 440.2 440.5 440.8 441.0 441.3 441.6 441.9 442.2 442.4 442.5 442.7 442.8 443.0 442.9 442.8 442.7 442.5 442.0 441.6 441.2 440.8 440.4 440.0 439.6 439.2 438.7 438.3 437.9 437.6 437.5 437.5 437.5 437.5 437.5 437.6 437.8 438.1 438.4 438.7 438.8 439.0 439.3 439.5 439.5 439.7 439.4 439.2 439.0 438.6 438.0 437.0 436.0 434.7 433.0 431.4 429.5 427.4 425.3 423.1 420.6 418.4 416.0 413.7 411.5 409.1 407.1 405.2 403.2 401.5 400.0 398.8 397.7 396.7 395.9 395.3 395.0 395.0 395.0 395.1 395.3 395.5 395.8 396.2 396.6 396.9 397.3 397.6 397.8 397.9 397.9 398.4 399.2 400.9 404.0 407.3 411.2 415.8 420.9 425.7 430.5 434.7 438.2 441.1 442.8 443.2 442.5 440.6 437.9 434.3 430.1 425.5 420.6 415.8 411.0 406.5 402.5 399.0 396.3 394.6 394.0 394.7 394.9 395.0 395.0 395.1 394.8 393.8 391.7 388.9 384.8 380.4 374.7 368.9 362.6 356.1 349.3 342.6 336.1 329.9 323.8 318.5 313.6 309.5 306.0 303.3 301.2 299.7 299.0 299.1 299.3 300.1 301.0 302.2 303.6 305.2 307.0 308.9 310.9 312.9 315.1 317.3 319.4 321.3 323.1 324.7 326.1 327.2 328.3 328.7 329.1 329.2 329.1 329.1 329.1 329.0 328.9 328.7 328.5 328.4 328.2 328.0 327.8 327.6 327.5 327.4 327.2 327.1 326.9 326.8 326.8 326.8 326.8 326.8 326.8 326.9 327.1 327.3 327.5 327.9 328.2 328.6 329.0 329.5 329.8 330.1 330.5 330.8 331.1 331.4 331.5 331.6 331.7 331.6 331.5 331.4 331.3 331.0 330.8 330.6 330.4 330.1 329.8 329.4 329.1 328.8 328.5 328.2 328.0 327.8 327.6 327.5 327.4 327.4 327.4 327.4 327.4 327.5 327.8 328.0 328.2 328.4 328.7 329.1 329.4 329.7 330.0 330.3 330.6 330.9 331.2 331.4 331.6 331.8 332.0 332.1 332.2 332.3 332.2 332.1 332.1 332.0 331.9 331.9 331.9 331.8 331.7 331.7 331.7 331.7 331.7 331.6 331.2 330.2 329.1 327.7 326.1 324.3 322.4 320.4 318.7 316.9 315.4 314.0 313.0 312.2 311.8 311.7 312.1 312.9 314.1 315.8 318.0 320.7 323.6 327.2 331.1 335.5 340.1 345.2 350.6 356.4 362.5 368.7 375.4 382.2 389.3 396.4 403.4 410.6 417.9 424.9 431.9 438.5 445.0 451.1 456.8 462.1 466.9 471.2 474.5 477.6 479.9 482.0 482.8 484.8 485.1 485.6 486.9 488.4 490.3 492.2 494.1 496.0 497.9 499.2 500.3 500.8 501.0 500.8 500.8 500.8 500.6 500.5 500.2 499.9 499.7 499.6 499.2 498.9 498.6 498.2 497.7 497.4 497.0 496.6 496.1 495.8 495.4 494.9 494.5 494.2 493.9 493.6 493.3 493.0 492.8 492.7 492.5 492.4 492.2 492.2 492.2 492.1 492.0 491.7 491.4 490.8 490.2 489.4 488.6 487.5 486.3 485.1 483.7 482.3 480.8 479.3 477.6 475.9 474.0 472.2 470.2 468.2 466.3 464.4 462.5 460.5 458.4 456.4 454.3 452.1 450.0 448.0 446.0 444.0 442.1 440.3 438.4 436.6 434.9 433.3 431.8 430.4 429.0 427.7 426.5 425.4 424.3 423.4 422.5 421.9 421.3 420.7 420.4 420.1 420.1 427.3 427.6 427.9 428.3 428.6 428.9 396.8 397.0 397.6 398.4 399.7 401.1 402.8 404.7 406.8 409.0 411.2 413.8 416.2 418.5 420.9 423.5 425.9 428.3 430.4 432.5 434.4 436.2 437.5 438.6 439.3 439.9 440.3 440.1 440.0 439.8 439.6 439.4 439.1 438.8 438.4 438.0 437.7 437.4 437.2 437.0 437.0 437.0 437.0 437.2 437.6 438.0 438.8 439.6 440.3 441.0 441.6 441.9 442.0 442.1 441.7 441.1 440.4 439.1 437.7 436.0 434.3 432.2 430.1 428.1 426.0 423.9 421.9 419.9 418.3 416.7 415.4 414.3 413.7 413.2 413.0 424.2 438.7 474.3 511.8 548.2 576.7 587.6 587.4 587.3 587.1 586.8 586.4 586.0 585.7 585.2 584.6 584.4 584.2 584.1 583.9 583.9 583.9 584.1 584.5 584.8 585.2 585.7 586.3 586.9 587.4 587.8 588.0 588.0 588.1 587.6 586.3 584.6 582.7 580.7 578.0 575.2 572.7 570.2 568.2 566.5 565.2 564.5 564.7 565.4 567.0 569.2 572.3 575.6 579.3 583.0 586.5 589.7 592.4 594.0 595.0 595.4 595.2 594.9 594.6 594.0 593.5 592.9 592.3 591.5 590.8 590.3 589.8 589.4 589.2 589.0 589.0 588.8 588.7 588.7 588.6 588.4 588.2 588.0 587.6 587.3 587.1 586.9 586.8 586.7 586.7 586.7 586.8 587.3 588.6 590.8 593.9 597.6 602.0 606.6 612.1 617.7 623.7 629.6 635.5 641.4 647.1 652.1 657.0 660.6 663.9 666.3 668.0 668.7 668.7 668.5 676.2 675.9 675.6 675.2 674.8 674.1 673.1 672.1 670.9 669.6 668.3 667.0 665.4 664.1 662.6 661.2 659.8 658.2 657.0 655.7 654.7 653.6 652.6 651.9 651.3 650.7 650.6 650.6 650.4 649.9 649.2 648.3 647.1 645.6 643.6 641.5 639.2 636.4 633.4 630.3 626.9 623.2 619.3 615.1 611.0 606.6 601.9 597.1 592.4 587.3 582.3 577.0 571.8 566.4 561.0 555.6 550.3 544.8 539.4 534.0 528.9 523.6 518.3 513.1 507.9 502.9 498.1 493.3 488.6 484.1 479.7 475.3 471.2 467.9 464.2 460.4 456.8 453.5 450.3 447.3 444.4 441.8 439.3 437.0 434.9 433.1 431.4 430.0 428.7 427.6 426.7 426.1 425.5 425.3 425.2 425.0 425.0 425.0 424.9 424.7 424.5 424.5 424.4 424.2 424.0 424.0 424.0 424.0 421.6 422.4 423.3 424.5 425.9 427.4 429.0 430.8 432.6 434.4 436.0 437.4 438.6 439.5 440.1 440.6 440.7 440.6 440.5 440.5 440.4 440.3 440.1 440.0 439.7 439.5 439.3 439.1 438.8 438.6 438.3 438.0 437.7 437.6 437.4 437.1 436.9 436.8 436.6 436.5 436.3 436.2 436.2 436.2 436.2 436.2 436.3 436.4 436.5 436.7 436.9 437.2 437.6 438.1 438.5 438.8 439.1 439.5 439.9 440.3 440.8 441.0 441.3 441.6 441.8 442.0 442.0 442.0 442.2 442.0 441.6 441.0 440.3 439.4 438.2 437.0 435.4 433.9 432.4 430.7 428.9 427.1 425.4 423.6 421.8 420.3 418.9 417.5 416.3 415.2 414.3 413.5 412.8 412.3 412.2 413.3 416.1 421.9 429.8 440.5 452.1 464.6 476.9 487.5 496.0 501.5 504.6 504.4 504.2 504.1 503.9 503.7 503.4 502.9 502.6 502.3 501.8 501.4 500.7 500.1 499.6 499.1 498.7 498.2 497.7 497.2 496.7 496.4 496.1 495.8 495.5 495.4 495.3 495.3 495.2 495.1 494.9 494.7 494.4 494.1 493.8 493.5 493.0 492.6 492.1 491.6 491.1 490.7 490.2 489.9 489.6 489.2 488.9 488.6 488.5 488.5 488.5 488.5 488.6 488.7 488.9 489.0 489.3 489.6 489.9 490.3 490.8 491.2 491.7 492.1 492.4 492.8 493.3 493.6 493.9 494.3 494.5 494.7 494.8 495.0 495.2 495.3 495.0 494.6 494.2 493.6 492.8 491.9 490.9 489.7 488.5 487.1 485.7 484.1 482.5 480.8 479.1 477.5 475.8 474.1 472.5 471.0 469.7 468.4 467.2 466.1 465.2 464.4 463.6 463.2 462.9 462.9 463.3 463.7 465.5 467.5 470.2 473.2 476.5 480.3 484.3 488.2 492.2 495.7 499.0 501.9 504.5 506.4 507.6 508.2 508.0 507.5 506.2 504.7 502.8 500.7 498.3 495.8 493.2 490.9 488.7 486.3 484.0 482.4 481.0 480.1 479.5 479.3 479.5 479.9 480.6 481.7 482.9 484.4 486.0 487.8 489.7 491.6 493.5 495.4 497.1 498.7 500.1 501.4 502.2 502.9 503.2 503.3 502.9 502.5 502.0 501.2 500.2 499.2 498.2 496.9 495.7 494.6 493.6 492.6 491.8 491.0 490.6 490.2 490.2 490.2 490.3 490.6 491.0 491.5 492.1 492.6 493.2 494.0 494.7 495.3 495.8 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2", - "input_type": "phoneme", - "offset": 1.569 + "f0_timestep": "0.005" }, { + "offset": 13.157, "text": "SP 题 画 诗 也 难 描 SP 千 古 词 不 敢 扰 AP 华 灯 照 墨 香 里 印 色 SP 正 妙 SP", "ph_seq": "SP t i h ua sh ir y E n an m iao SP q ian g u c i0 b u g an r ao AP h ua d eng zh ao m o x iang l i y in s e SP zh eng m iao SP", - "note_seq": "rest A4 A4 B4 B4 E4 E4 D5 D5 G4 G4 A4 A4 rest B3 B3 A4 A4 G4 G4 B4 B4 G4 G4 E4 E4 rest A4 A4 G4 G4 B3 B3 A4 A4 B4 B4 A4 A4 D5 D5 D5 D5 rest D4 D4 E4 E4 rest", - "note_dur_seq": "0.578 0.361 0.361 0.362 0.362 0.5420001 0.5420001 0.5419999 0.5419999 0.362 0.362 0.723 0.723 0.3610001 0.3609998 0.3609998 0.362 0.362 0.1810002 0.1810002 0.5419998 0.5419998 0.3610001 0.3610001 0.5419998 0.5419998 0.9040003 0.1809998 0.1809998 0.3610001 0.3610001 0.5420003 0.5420003 0.1809998 0.1809998 0.1809998 0.1809998 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.5430002 0.5419998 0.5419998 1.626 1.626 0.144", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.473 0.105 0.286001 0.074999 0.241999 0.120001 0.497 0.045 0.437 0.105 0.286999 0.075001 0.723 0.211 0.15 0.301 0.059999 0.242001 0.119999 0.121001 0.059999 0.466999 0.075001 0.315998 0.045002 0.542 0.814 0.09 0.136 0.045 0.240999 0.120001 0.452 0.09 0.121 0.059999 0.136 0.045 0.316 0.045 0.120998 0.060001 0.361 0.408 0.135 0.377001 0.164999 1.626 0.144", - "f0_timestep": "0.005", + "ph_dur": "0.473 0.105 0.286 0.075 0.242 0.12 0.497 0.045 0.437 0.105 0.287 0.075 0.723 0.211 0.15 0.301 0.06 0.242 0.12 0.121 0.06 0.467 0.075 0.316 0.045 0.542 0.814 0.09 0.136 0.045 0.241 0.12 0.452 0.09 0.121 0.06 0.136 0.045 0.316 0.045 0.121 0.06 0.361 0.408 0.135 0.377 0.165 1.626 0.144", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 1 1", + "note_seq": "rest A4 B4 E4 D5 G4 A4 rest B3 A4 G4 B4 G4 E4 rest A4 G4 B3 A4 B4 A4 D5 D5 rest D4 E4 rest", + "note_dur": "0.578 0.361 0.362 0.542 0.542 0.362 0.723 0.361 0.361 0.362 0.181 0.542 0.361 0.542 0.904 0.181 0.361 0.542 0.181 0.181 0.361 0.181 0.361 0.543 0.542 1.626 0.144", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 401.4 402.2 405.9 409.8 414.0 418.2 422.0 425.9 429.5 432.5 435.1 437.0 438.2 439.0 439.1 438.9 438.7 438.5 438.1 437.5 437.0 436.5 435.9 435.4 435.2 435.0 434.9 434.9 435.0 435.4 436.1 436.8 437.6 438.4 439.2 440.1 440.9 441.7 442.2 442.6 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.5 441.3 439.0 435.6 431.4 426.7 421.8 416.8 412.2 408.2 405.0 402.8 401.7 402.9 408.2 419.4 436.5 456.2 476.4 494.5 508.4 515.7 515.9 515.0 513.6 511.7 509.5 507.1 504.8 502.5 500.4 498.8 497.6 497.1 496.9 496.6 496.1 495.6 494.6 493.7 492.6 491.2 489.6 487.9 486.3 484.3 482.2 480.1 478.0 475.7 473.2 470.9 468.5 466.0 463.5 461.1 458.9 456.5 454.2 452.0 449.9 447.9 446.0 444.3 442.7 441.2 439.8 438.7 437.7 436.8 436.1 435.5 435.2 434.9 434.3 432.0 427.2 419.9 411.9 402.7 392.0 380.8 369.8 359.7 350.6 343.0 334.4 306.1 285.0 281.3 281.8 282.7 284.0 285.5 287.5 289.8 292.3 295.0 298.0 301.1 304.4 307.7 310.9 314.2 317.3 320.2 322.9 325.3 327.4 329.2 330.6 331.5 331.9 332.0 331.9 331.7 331.6 331.3 330.9 330.5 330.1 329.8 329.5 329.3 329.2 329.1 329.1 329.1 329.2 329.6 329.9 330.3 330.8 331.2 331.7 332.2 332.6 332.9 333.1 333.1 333.2 333.0 332.8 332.5 332.0 331.5 331.0 330.3 329.7 329.1 328.4 327.9 327.4 327.0 326.7 326.4 326.4 326.4 326.5 327.0 327.4 328.2 329.1 330.1 331.2 332.1 333.1 334.0 334.9 335.6 336.0 336.4 336.7 337.7 340.8 345.7 352.2 359.8 367.9 379.2 391.9 406.1 421.5 438.1 455.8 473.9 492.4 510.9 529.0 546.4 562.4 576.7 588.6 598.1 605.0 609.1 610.0 609.7 609.1 607.9 606.8 605.4 603.8 602.1 600.4 598.4 596.2 594.2 592.4 590.4 589.0 587.8 586.7 585.7 585.3 585.3 585.3 585.7 586.3 586.9 587.4 587.7 588.0 588.0 588.0 588.0 588.0 588.0 588.0 588.0 588.0 588.0 587.8 587.5 586.2 585.9 585.6 585.1 584.3 583.3 582.0 580.6 578.9 577.2 575.4 573.3 570.9 568.5 566.0 563.3 560.4 557.3 554.2 551.5 548.1 544.6 541.1 537.4 533.8 529.9 525.9 522.0 518.1 514.1 509.9 506.0 501.9 497.8 493.7 489.7 485.7 481.5 477.6 473.7 469.8 465.9 462.0 458.4 454.7 451.0 447.4 444.0 440.6 437.5 434.3 431.2 428.2 425.3 422.5 419.9 417.5 415.1 412.8 410.6 408.5 406.5 404.8 403.2 401.6 400.2 399.1 397.9 397.0 396.1 395.4 394.8 394.3 393.9 393.8 390.4 392.4 394.6 397.0 399.2 401.2 402.5 403.2 403.2 402.8 402.1 400.9 399.6 398.1 396.4 394.9 393.5 392.2 391.3 390.8 390.6 390.6 390.8 391.0 391.4 391.8 392.1 392.7 393.2 393.8 394.3 394.7 395.1 395.3 395.4 395.6 395.4 395.4 395.3 395.0 394.6 394.4 394.0 393.7 393.5 393.3 393.1 393.1 393.1 392.9 392.9 392.9 392.9 392.8 392.7 392.7 392.7 392.5 392.4 392.4 392.4 392.4 392.4 392.4 392.4 392.4 392.4 392.4 392.6 392.7 392.7 392.9 392.9 392.9 392.9 393.1 393.0 392.9 392.9 392.9 392.7 392.5 392.4 392.3 392.1 392.0 391.8 391.4 391.2 391.0 390.8 390.6 390.4 390.2 390.1 389.9 389.7 389.5 389.3 389.3 389.3 389.3 389.3 389.3 389.3 389.3 389.4 389.5 389.5 389.7 389.9 390.1 390.3 390.5 390.6 390.8 391.0 391.2 391.3 391.3 391.5 391.7 391.8 391.8 391.8 391.8 392.0 392.1 392.7 393.4 394.5 395.9 397.4 399.0 400.9 403.2 405.6 408.1 410.7 413.5 416.3 419.1 421.9 424.8 427.6 430.2 432.9 435.4 437.8 440.0 442.0 443.7 445.4 446.4 447.3 447.9 448.3 448.1 447.0 444.9 441.4 437.4 433.1 428.8 424.8 421.3 418.9 417.6 417.3 417.5 418.5 419.8 421.7 424.0 426.5 429.2 432.3 435.3 438.1 440.8 443.3 445.6 447.1 448.3 449.0 449.1 448.5 447.4 445.9 444.1 442.0 439.9 437.8 436.0 434.3 433.1 432.4 432.2 432.4 432.9 433.8 434.9 436.2 437.5 438.9 440.3 441.3 442.0 442.6 442.7 442.5 442.5 442.3 442.1 441.9 441.6 441.2 441.0 440.8 440.6 440.5 438.5 435.2 432.0 428.8 425.5 422.4 419.2 416.1 413.0 409.9 406.8 403.8 400.7 397.7 394.8 391.8 388.9 386.0 383.1 380.2 377.4 374.5 371.7 369.0 366.2 363.5 360.7 358.0 355.4 352.7 350.1 347.4 344.8 342.3 339.7 337.2 334.6 332.1 329.6 327.2 324.7 322.3 319.9 317.5 315.1 312.8 310.4 308.1 305.8 303.5 301.2 299.0 296.7 294.5 292.3 290.1 288.0 285.8 283.7 281.5 279.4 277.3 275.3 273.2 271.2 269.1 267.1 265.1 263.1 261.2 259.4 257.3 255.1 253.1 251.2 249.2 247.7 246.6 246.2 246.0 245.9 245.9 245.8 245.7 245.6 245.4 245.3 245.2 245.1 245.0 245.0 245.0 245.0 245.0 245.2 245.6 246.0 246.4 246.9 247.3 247.7 247.9 248.0 248.1 248.1 248.1 248.2 248.2 248.3 248.4 248.5 251.1 253.9 257.6 262.0 267.3 273.3 280.1 287.5 295.5 303.9 312.7 321.1 328.9 337.7 346.3 354.6 362.3 369.3 375.4 380.3 383.8 386.0 387.3 387.6 388.1 388.7 389.4 390.2 390.8 391.2 391.2 390.9 390.5 390.0 387.6 388.1 388.7 389.5 391.0 392.7 394.6 396.9 399.3 401.9 404.8 407.7 410.7 413.7 416.9 419.9 422.7 425.6 428.2 430.5 432.6 434.4 435.9 437.0 437.8 438.4 438.6 439.0 439.7 441.0 442.5 443.8 445.0 446.0 446.7 444.9 443.6 441.4 438.8 435.8 432.4 428.3 424.0 419.6 415.0 410.4 405.9 401.5 397.2 393.4 390.0 386.9 384.5 382.6 380.9 380.0 379.6 379.5 379.8 380.5 381.3 382.4 383.7 385.0 386.4 387.8 389.1 390.2 391.3 392.1 392.7 393.0 393.1 393.1 393.1 393.3 393.7 394.0 394.3 394.6 395.0 395.3 395.6 395.7 395.9 396.1 396.1 396.0 395.0 393.1 390.4 387.2 383.5 379.6 375.8 372.1 368.9 366.3 364.3 368.7 370.8 374.8 380.4 387.8 395.9 404.6 413.4 421.6 429.0 434.2 437.6 439.2 439.2 439.4 439.7 440.2 440.6 441.3 441.9 442.5 443.2 443.8 444.4 444.9 445.3 445.4 443.6 444.3 445.4 447.7 450.8 454.5 458.6 463.1 468.1 473.0 478.3 483.3 488.1 492.7 496.7 500.1 502.7 504.4 505.4 505.6 505.2 504.7 504.3 503.5 502.4 501.4 500.3 499.0 497.8 496.5 495.0 493.7 492.5 491.3 490.1 489.3 488.6 488.0 487.6 487.4 487.4 487.5 487.9 488.4 489.1 489.9 490.9 492.1 493.3 494.5 495.7 496.9 498.1 499.2 500.2 500.9 501.5 501.9 502.3 498.8 499.1 499.3 498.4 495.9 492.2 487.2 480.9 473.8 465.6 456.9 447.8 438.6 429.6 420.6 412.0 404.0 396.8 390.3 384.7 380.0 376.7 374.3 373.1 372.7 372.6 372.6 372.6 372.4 372.2 372.1 371.9 371.7 371.5 371.4 371.2 371.0 370.8 370.6 370.5 370.3 370.1 370.0 370.0 370.0 370.0 370.2 371.0 372.5 374.5 377.4 380.6 384.2 387.8 391.3 394.7 397.8 400.1 401.9 403.0 403.4 403.0 402.2 401.0 399.5 397.9 396.2 394.3 392.8 391.2 390.1 389.3 388.8 388.8 389.0 389.4 390.2 390.9 391.7 392.4 393.2 393.9 394.5 394.7 394.9 394.7 394.7 394.6 394.4 394.2 394.0 393.6 393.4 393.2 393.1 393.1 393.1 387.9 386.9 385.3 382.5 379.0 374.8 370.1 365.1 359.7 354.0 348.4 343.0 337.9 333.1 328.7 324.9 321.8 319.4 317.7 316.7 316.6 316.7 317.7 319.0 320.5 322.3 324.1 325.8 327.6 328.9 329.8 330.4 330.5 330.3 330.2 330.0 329.7 329.2 328.9 328.5 328.0 327.7 327.3 326.9 326.7 326.5 326.4 326.4 326.5 327.0 327.9 329.2 330.6 332.2 334.0 336.0 337.8 339.4 340.9 342.2 343.0 343.4 343.4 342.8 341.4 339.6 337.2 334.5 331.7 328.9 326.4 324.1 322.2 321.0 320.4 320.4 320.9 322.3 324.2 326.5 329.2 331.9 334.5 336.9 338.6 339.7 340.3 339.9 339.4 338.5 337.4 336.1 334.8 333.5 332.2 331.2 330.3 329.8 329.6 329.6 329.8 329.9 330.2 330.6 331.0 331.4 331.9 332.4 332.8 333.2 333.7 333.8 334.1 334.2 334.2 334.0 333.8 333.6 333.4 333.2 333.0 332.8 332.7 332.5 332.3 332.1 331.9 331.7 331.5 331.3 331.2 331.0 330.8 330.6 330.4 330.2 330.0 329.9 329.7 329.5 329.3 329.1 328.9 328.7 328.6 328.4 328.2 328.0 327.8 327.6 327.4 327.3 327.1 326.9 326.7 326.5 326.3 326.2 326.0 325.8 325.6 325.4 325.2 325.1 324.9 324.7 324.5 324.3 324.1 324.0 323.8 323.6 323.4 323.2 323.0 322.9 322.7 322.5 322.3 322.1 322.0 321.8 321.6 321.4 321.2 321.1 320.9 320.7 320.5 320.3 320.1 320.0 319.8 319.6 319.4 319.2 319.1 318.9 318.7 318.5 318.3 318.2 318.0 317.8 317.6 317.5 317.3 317.1 316.9 316.7 316.6 316.4 316.2 316.0 315.8 315.7 315.5 315.3 315.1 315.0 314.8 314.6 314.4 314.3 314.1 313.9 313.7 313.5 313.4 313.2 313.0 312.8 312.7 312.5 312.3 312.1 312.0 311.8 311.6 311.4 311.3 311.1 310.9 310.7 310.6 310.4 310.2 310.0 309.9 309.7 309.5 309.3 309.2 309.0 308.8 308.6 308.5 308.3 308.1 307.9 307.8 307.6 307.4 307.2 307.1 306.9 306.7 306.6 306.4 306.2 306.0 305.9 305.7 305.5 305.4 305.2 305.0 304.8 304.7 304.5 304.3 304.1 304.0 303.8 303.6 303.5 303.3 303.1 303.0 302.8 302.6 302.4 302.3 304.2 308.8 316.9 326.4 339.1 353.7 370.3 386.0 403.7 417.2 430.4 438.8 443.6 444.7 444.4 443.8 443.0 441.8 440.8 439.2 438.0 436.8 435.7 434.7 434.1 433.7 432.9 431.1 428.7 426.2 423.5 421.1 419.3 418.3 418.0 417.5 416.8 415.7 414.2 412.5 410.3 407.7 405.2 402.6 399.9 397.4 394.7 392.4 390.5 388.8 387.5 386.4 385.7 385.7 385.7 386.0 386.5 387.1 387.9 388.6 389.6 390.3 391.1 391.8 392.3 392.9 392.9 393.0 392.8 392.6 392.4 392.1 391.7 391.4 391.2 390.8 390.6 390.6 390.3 389.4 387.7 384.7 380.9 376.2 370.9 364.8 358.2 351.2 343.8 336.2 328.4 320.7 312.9 305.3 298.1 291.1 284.5 278.3 272.6 267.4 262.8 258.8 255.5 252.9 250.6 248.9 247.7 247.2 247.1 246.9 246.5 246.0 245.4 244.6 243.8 243.1 242.3 241.6 240.9 240.4 240.1 239.9 240.0 240.1 240.5 241.0 241.8 242.7 243.6 244.6 245.5 246.2 246.8 247.3 247.6 247.6 247.5 247.2 247.0 246.7 246.2 245.8 245.4 245.0 244.5 244.1 243.8 243.6 243.4 243.4 243.4 243.5 243.7 244.0 244.3 244.7 245.1 245.6 246.1 246.5 246.9 247.2 247.6 247.8 247.9 248.0 248.0 247.9 247.9 247.8 247.7 247.4 247.3 249.9 249.8 249.8 249.8 249.7 249.7 249.6 249.5 249.3 249.2 249.1 249.1 249.0 248.9 248.8 248.8 248.8 248.8 249.3 250.8 253.5 257.4 262.2 268.5 275.7 284.0 293.2 303.3 314.2 325.9 336.6 348.0 360.5 373.0 385.0 396.4 406.9 416.3 424.3 430.7 434.9 437.2 438.2 438.2 438.2 438.4 438.8 439.2 439.7 440.1 440.5 440.9 441.4 441.8 442.2 442.5 443.0 443.2 443.4 443.6 443.7 442.0 437.4 429.9 420.9 413.2 408.6 407.2 408.1 409.8 412.8 416.7 421.5 426.9 433.0 439.7 446.6 453.7 460.8 467.4 473.2 479.2 484.5 488.9 492.4 494.6 495.6 495.9 495.9 496.1 496.5 496.7 497.0 497.4 497.7 497.9 498.0 498.0 497.3 495.4 492.6 488.9 484.6 479.6 474.3 468.6 463.0 457.8 452.8 448.2 444.3 441.4 439.2 438.0 437.5 437.5 437.6 437.8 438.1 438.6 439.0 439.4 440.0 440.5 441.0 441.6 442.0 442.5 442.7 442.9 443.1 443.2 443.1 442.9 442.7 442.5 442.3 441.9 441.4 441.0 440.6 440.2 439.7 439.3 439.1 439.0 439.0 439.0 439.2 439.6 440.3 441.1 442.0 442.7 443.2 443.6 443.8 443.8 443.8 443.8 443.8 443.8 443.8 443.8 443.8 443.8 443.8 443.8 444.5 446.8 450.7 456.1 463.2 471.8 481.6 492.3 503.8 515.7 527.8 539.9 549.9 559.6 569.2 577.2 583.4 587.3 589.5 589.7 589.9 590.3 590.8 591.4 592.0 592.8 593.5 593.9 594.2 594.2 594.3 594.0 593.7 593.1 592.6 592.0 591.4 591.0 590.7 590.4 588.7 584.2 578.1 571.1 563.8 557.0 551.2 547.7 546.1 546.4 547.5 549.1 551.2 553.7 556.9 560.4 564.2 568.0 571.8 575.6 578.9 582.0 584.1 586.0 587.5 588.3 588.6 588.2 587.7 587.1 586.3 585.3 584.3 583.4 582.3 581.4 580.8 580.4 580.2 580.6 581.6 583.4 586.0 589.4 593.0 596.6 599.6 602.1 604.1 605.2 605.7 605.0 603.8 602.1 599.9 597.4 594.7 591.9 589.0 586.2 583.8 581.8 580.1 579.1 578.6 578.7 579.2 580.1 581.3 582.9 584.8 586.8 588.8 590.7 592.1 593.2 593.9 594.5 594.2 593.9 593.3 592.6 591.9 590.7 589.6 588.7 587.7 586.7 585.8 585.0 584.4 584.0 579.5 574.1 568.6 563.3 558.0 552.7 547.5 542.3 537.2 532.1 527.1 522.1 517.2 512.3 507.5 502.7 497.9 493.2 488.6 484.0 479.4 474.9 470.4 466.0 461.6 457.2 452.9 448.6 444.4 440.2 436.0 431.9 427.8 423.8 419.8 415.8 411.9 408.0 404.2 400.4 396.6 392.8 389.1 385.5 381.8 378.2 374.6 371.1 367.6 364.1 360.7 357.3 353.9 350.6 347.3 344.0 340.7 337.5 334.3 331.2 328.1 325.0 321.9 318.9 315.8 312.9 309.9 307.0 304.1 301.2 298.4 295.6 292.8 290.0 287.3 284.6 281.9 279.2 276.6 274.0 271.4 268.8 266.3 263.8 261.3 258.8 256.4 253.9 251.5 249.2 246.8 244.5 242.2 239.9 237.6 235.4 233.2 231.0 228.8 226.6 224.5 222.4 220.3 219.2 222.0 224.8 228.6 232.8 237.4 242.4 247.7 253.2 258.7 264.3 269.8 275.1 280.0 284.6 288.5 291.9 294.4 296.2 297.4 297.7 297.6 297.4 297.3 297.0 296.7 296.5 296.2 295.9 295.5 295.2 294.9 294.6 294.3 294.2 294.1 294.0 294.0 294.0 294.1 294.3 294.4 294.6 294.9 295.4 295.7 295.9 296.3 296.7 296.9 297.1 297.4 297.5 297.6 297.6 297.7 297.5 297.4 297.2 297.0 296.7 296.3 295.9 295.7 295.4 294.9 294.7 294.5 294.3 294.3 294.3 294.4 294.5 294.9 295.4 295.8 296.2 296.7 297.2 297.5 297.8 298.0 298.2 298.2 298.1 297.9 297.8 297.5 297.1 296.6 296.3 296.0 295.5 295.1 294.8 294.5 294.2 294.0 294.0 294.0 293.8 293.8 293.8 293.8 293.6 293.5 293.5 293.4 293.2 293.1 293.0 292.8 292.5 292.4 292.2 292.1 292.0 292.0 291.8 291.7 291.6 291.6 291.6 291.6 291.6 291.6 291.7 291.8 291.9 292.1 292.3 292.4 292.6 292.8 293.0 293.2 293.3 293.4 293.6 293.7 293.8 293.8 293.8 293.9 293.8 293.8 293.8 293.8 293.8 293.8 293.7 293.7 293.5 293.5 293.5 293.7 293.5 293.5 293.4 293.2 292.9 292.6 292.4 292.1 291.8 291.5 291.1 290.8 290.5 290.2 290.0 289.7 289.6 289.5 289.5 289.5 290.0 291.2 293.3 295.9 299.0 302.7 306.7 311.0 315.2 319.4 323.5 327.2 330.4 332.9 334.6 335.6 335.9 335.7 335.4 334.9 334.3 333.7 332.9 332.1 331.2 330.4 329.5 328.8 328.2 327.5 327.1 326.6 326.4 326.4 326.4 326.6 326.8 327.3 327.7 328.2 328.9 329.5 330.1 330.8 331.3 331.8 332.3 332.6 332.9 333.1 331.2 331.3 331.5 331.6 331.8 331.9 331.9 331.9 332.1 332.0 331.9 331.9 331.7 331.3 331.2 330.9 330.6 330.3 329.9 329.6 329.3 329.1 328.9 328.6 328.5 328.3 328.3 328.3 328.3 328.3 328.4 328.6 328.9 329.2 329.5 329.8 330.2 330.6 331.0 331.4 331.9 332.3 332.6 332.9 333.3 333.6 333.8 333.9 334.0 334.1 333.8 333.7 333.4 333.0 332.4 331.8 331.2 330.2 329.2 328.2 327.2 326.2 325.1 324.0 323.0 322.1 321.3 320.7 320.1 319.6 319.3 319.1 319.2 319.5 320.2 321.5 322.9 324.5 326.5 328.7 331.1 333.5 335.8 337.9 339.8 341.6 343.0 343.9 344.4 344.5 344.1 343.4 342.4 341.2 339.7 337.9 336.0 334.0 332.0 329.9 327.9 326.1 324.4 322.8 321.6 320.6 320.0 319.7 319.8 320.3 321.7 323.6 326.1 329.0 332.0 335.2 338.3 341.1 343.2 344.9 346.0 346.4 346.2 345.7 345.1 344.2 343.2 341.9 340.5 339.0 337.3 335.6 333.9 332.1 330.2 328.6 326.9 325.4 324.0 322.6 321.5 320.6 319.9 319.4 319.1 319.2 319.5 320.6 322.0 324.0 326.1 328.3 330.8 333.4 335.9 338.0 339.7 341.1 341.5 341.7 341.4 340.9 340.3 339.4 338.4 337.2 336.0 334.5 333.1 331.8 330.4 329.0 327.7 326.6 325.5 324.7 324.1 323.6 323.2 323.2 323.2 323.6 324.2 324.8 325.7 326.6 327.7 328.8 330.0 331.2 332.1 333.1 333.9 334.6 335.0 335.3 335.3 335.1 335.0 334.8 334.3 334.0 333.7 333.1 332.5 332.0 331.4 330.8 330.2 329.7 329.1 328.6 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3 328.3", - "input_type": "phoneme", - "offset": 13.157 + "f0_timestep": "0.005" }, { + "offset": 25.786, "text": "AP 眉 眼 里 藏 的 笑 SP 如 星 落 九 霄 AP 纤 姿 舞 的 曼 妙 SP 惹 月 色 倾 倒 SP", "ph_seq": "AP m ei y En l i c ang d e x iao SP r u x ing l uo j iu x iao AP x ian z i0 w u d e m an m iao SP r e y ve s e q ing d ao SP", - "note_seq": "rest E4 E4 D4 D4 E3 E3 G3 G3 A3 A3 A3 A3 rest A3 A3 G3 G3 A3 A3 B3 B3 B3 B3 rest E4 E4 D4 D4 E3 E3 G3 G3 A3 A3 A3 A3 rest B3 B3 A4 A4 A4 A4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.6 0.361 0.361 0.361 0.361 0.362 0.362 0.181 0.181 0.3610001 0.3610001 0.3609998 0.3609998 0.5430002 0.3609998 0.3609998 0.181 0.181 0.5420003 0.5420003 0.3609996 0.3609996 0.723 0.723 1.085 0.3610001 0.3610001 0.362 0.362 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.3620005 0.3620005 0.5419998 0.3610001 0.3610001 0.3619995 0.3619995 0.7230005 0.7230005 0.3610001 0.3610001 0.3610001 0.3610001 0.217", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.525001 0.074999 0.316 0.045 0.271 0.09 0.242001 0.119999 0.136 0.045 0.240999 0.120001 0.361 0.498 0.045 0.240997 0.120003 0.120999 0.060001 0.437001 0.105 0.241001 0.119999 0.723 0.905 0.18 0.271 0.09 0.302002 0.059998 0.316002 0.044998 0.120998 0.060001 0.240997 0.120003 0.362 0.497002 0.044998 0.316002 0.044998 0.242001 0.119999 0.528001 0.195 0.315998 0.045002 0.361 0.217", - "f0_timestep": "0.005", + "ph_dur": "0.525 0.075 0.316 0.045 0.271 0.09 0.242 0.12 0.136 0.045 0.241 0.12 0.361 0.498 0.045 0.241 0.12 0.121 0.06 0.437 0.105 0.241 0.12 0.723 0.905 0.18 0.271 0.09 0.302 0.06 0.316 0.045 0.121 0.06 0.241 0.12 0.362 0.497 0.045 0.316 0.045 0.242 0.12 0.528 0.195 0.316 0.045 0.361 0.217", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest E4 D4 E3 G3 A3 A3 rest A3 G3 A3 B3 B3 rest E4 D4 E3 G3 A3 A3 rest B3 A4 A4 D4 E4 rest", + "note_dur": "0.6 0.361 0.361 0.362 0.181 0.361 0.361 0.543 0.361 0.181 0.542 0.361 0.723 1.085 0.361 0.362 0.361 0.181 0.361 0.362 0.542 0.361 0.362 0.723 0.361 0.361 0.217", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 254.3 256.9 260.8 265.1 269.5 273.9 278.3 282.5 286.3 289.6 292.3 294.2 295.3 295.9 295.7 295.7 295.7 295.7 295.5 295.4 295.2 295.1 294.9 294.8 294.7 294.5 294.4 294.2 294.1 294.0 294.0 294.0 294.0 294.6 296.3 299.3 303.2 307.9 313.0 317.9 322.3 325.8 328.1 329.1 329.1 329.1 329.1 329.1 329.3 329.4 329.6 329.6 329.7 329.9 330.2 330.4 330.4 330.5 330.7 330.8 331.0 331.0 331.0 331.1 331.1 330.9 330.4 329.6 328.6 327.4 326.0 324.4 322.7 320.6 318.6 316.6 314.4 312.3 310.1 307.8 305.7 303.7 301.7 299.8 298.1 296.7 295.4 294.2 293.2 292.5 292.0 291.7 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.6 291.5 290.8 289.0 286.2 282.5 278.0 272.8 267.1 260.6 253.8 246.7 239.5 232.3 225.0 217.9 211.0 204.5 198.3 192.6 187.3 182.5 178.2 174.5 171.3 168.6 166.5 165.1 164.3 164.0 163.8 163.4 162.8 162.1 161.3 160.5 159.7 159.0 158.5 158.1 158.0 158.1 158.5 159.1 160.0 161.0 162.1 163.3 164.5 165.6 166.5 167.3 167.7 167.9 167.9 167.8 167.6 167.4 167.1 166.7 166.4 166.0 165.6 165.2 164.9 164.7 164.4 164.3 164.2 164.2 164.3 164.5 164.7 165.0 165.3 165.6 165.9 166.2 166.4 166.5 166.6 166.4 165.8 165.0 163.8 162.4 161.0 159.3 157.7 156.2 154.9 153.7 152.7 152.1 151.9 152.1 153.1 155.0 157.6 161.2 165.2 169.7 174.3 178.9 183.3 186.9 189.8 191.7 192.8 192.9 193.0 193.2 193.5 194.0 194.3 194.8 195.3 195.7 196.2 196.5 196.6 196.8 196.9 196.9 196.9 197.0 197.1 197.2 197.3 197.5 197.6 197.7 197.7 197.7 197.8 197.5 196.7 195.3 193.6 191.5 189.2 187.0 184.9 183.1 181.6 181.1 183.7 190.0 198.7 208.2 216.4 221.1 221.3 221.2 221.1 221.0 220.8 220.6 220.4 220.3 220.3 220.1 220.1 220.0 219.8 219.6 219.3 219.0 218.7 218.4 218.0 217.7 217.5 217.3 217.2 217.1 217.1 217.1 217.3 217.5 217.9 218.3 218.7 219.3 219.8 220.3 220.7 221.1 221.4 221.5 221.6 221.3 220.6 219.4 217.7 215.6 213.1 210.4 207.7 204.8 202.0 199.4 197.0 195.0 193.2 191.9 191.0 190.6 190.6 190.7 190.8 190.9 191.0 191.2 191.5 191.7 192.0 192.3 192.7 193.0 193.3 193.6 193.9 194.3 194.6 194.9 195.2 195.4 195.6 195.7 195.8 195.9 196.0 196.2 196.6 197.5 199.0 200.8 202.9 205.2 207.8 210.4 213.0 215.5 217.7 219.7 221.3 222.4 223.1 223.1 222.9 222.6 221.9 221.2 220.4 219.7 218.8 218.1 217.6 217.2 217.1 217.1 217.1 217.3 217.5 217.8 218.1 218.5 218.9 219.3 219.6 220.0 220.5 220.8 221.1 221.3 221.5 221.5 221.6 221.4 221.1 220.6 220.0 219.2 218.3 217.2 216.1 214.9 213.6 212.3 211.0 209.8 208.6 207.4 206.3 205.4 204.5 203.9 203.8 203.7 203.7 203.6 203.5 203.4 203.3 203.2 203.1 203.0 202.9 202.8 202.7 202.6 202.5 202.4 202.3 202.2 202.1 202.0 201.9 201.8 201.7 201.6 201.5 201.4 201.3 201.2 201.1 201.0 200.9 200.8 200.7 200.6 200.5 200.4 200.3 200.2 200.1 200.0 199.9 199.8 199.7 199.6 199.5 199.4 199.3 199.3 199.2 199.1 199.0 198.9 198.8 198.7 198.6 198.5 198.4 198.3 198.2 198.1 198.0 197.9 197.8 197.7 197.6 197.5 197.4 197.3 197.2 197.1 197.0 196.9 196.8 196.8 196.7 196.6 196.5 196.4 196.3 196.2 196.1 196.0 195.9 195.8 195.7 195.6 195.5 195.4 195.3 195.2 195.1 195.0 194.9 194.9 194.8 194.7 194.6 194.4 194.3 194.2 194.0 193.9 193.7 193.6 193.5 193.4 193.4 193.4 193.4 193.5 193.7 194.9 196.5 198.4 200.8 203.6 206.6 209.7 212.8 215.7 218.3 220.6 222.3 223.3 223.8 223.7 223.4 223.0 222.4 221.8 221.0 220.3 219.6 218.9 218.4 218.0 217.9 217.9 218.0 218.3 218.7 219.3 219.8 220.3 220.8 221.1 221.3 221.2 220.7 219.6 217.9 215.6 212.9 209.9 206.8 203.7 200.7 197.9 195.5 193.4 191.9 191.0 190.6 190.7 191.0 191.4 191.9 192.4 193.1 193.8 194.6 195.4 196.0 196.6 197.1 197.4 197.6 197.8 197.7 197.6 197.4 197.2 196.9 196.6 196.2 195.9 195.6 195.2 194.9 194.6 194.4 194.2 194.1 194.1 194.1 194.3 194.6 195.1 195.7 196.1 196.5 196.8 196.9 196.7 196.4 196.0 195.5 195.0 194.5 194.1 193.8 193.7 193.9 194.4 195.2 196.3 197.7 199.4 201.3 203.4 205.7 208.0 210.3 212.5 214.6 216.5 218.2 219.6 220.6 221.3 221.6 221.6 221.5 221.4 221.2 221.0 220.7 220.5 220.2 219.9 219.6 219.4 219.2 219.0 218.9 218.9 218.9 218.9 219.0 219.2 219.4 219.7 220.0 220.2 220.6 220.8 221.1 221.4 221.6 221.8 221.9 222.0 222.0 221.9 221.7 221.5 221.1 220.7 220.4 220.0 219.6 219.3 218.9 218.6 218.4 218.3 218.2 218.2 218.3 218.6 219.0 219.5 220.1 220.8 221.4 222.0 222.7 223.2 223.6 223.9 224.1 224.0 223.3 221.9 219.7 217.0 213.9 210.6 207.3 204.4 201.4 198.7 196.3 194.5 193.3 192.9 192.9 192.9 192.9 193.1 193.2 193.4 195.1 195.9 198.4 202.0 206.8 212.4 220.0 220.0 220.0 220.0 220.1 220.1 220.1 220.3 220.3 220.3 220.4 220.4 220.5 220.5 220.5 220.5 220.7 220.9 221.6 222.7 224.2 225.9 227.9 230.1 232.5 234.8 237.2 239.4 241.4 243.1 244.4 245.3 245.9 246.3 246.5 246.8 247.1 247.4 247.8 248.2 248.5 248.7 249.1 249.2 249.4 249.5 249.6 251.3 250.9 250.1 249.1 247.7 245.8 243.8 241.5 239.2 237.0 234.9 232.7 230.7 228.9 227.5 226.4 225.6 225.3 225.5 226.3 227.7 229.6 232.0 234.7 237.5 240.5 243.3 245.8 248.1 249.8 250.8 251.3 251.1 251.0 250.9 250.8 250.5 250.3 250.1 249.7 249.4 249.0 248.7 248.4 248.1 247.8 247.6 247.3 247.1 247.0 246.9 246.9 246.8 246.8 246.8 246.8 246.8 246.8 246.8 246.7 246.7 246.7 246.6 246.5 246.5 246.4 246.4 246.3 246.2 246.2 246.2 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.4 246.6 247.0 247.5 247.9 248.5 249.2 250.0 250.6 251.3 252.0 252.7 253.3 253.8 254.3 254.7 255.0 255.2 255.3 255.2 254.6 253.8 252.7 251.4 250.0 248.4 246.9 245.4 244.0 242.8 242.1 241.6 241.4 241.6 241.9 242.4 243.2 244.0 245.1 246.3 247.6 248.8 250.1 251.4 252.6 253.7 254.6 255.4 255.9 256.3 256.5 256.4 256.1 255.7 255.2 254.6 253.8 252.8 251.8 250.7 249.6 248.4 247.2 246.1 245.0 244.0 243.1 242.3 241.7 241.2 240.8 240.6 240.6 240.6 241.0 241.4 242.0 242.7 243.6 244.4 245.3 246.3 247.2 247.9 248.6 249.2 249.6 249.9 250.1 250.0 249.9 249.6 249.3 249.0 248.7 248.3 248.0 248.1 248.2 248.4 248.6 248.8 249.0 249.2 249.4 249.6 249.7 249.9 250.1 250.3 250.5 250.7 250.9 251.1 251.3 251.5 251.6 251.8 252.0 252.2 252.4 252.6 252.8 253.0 253.2 253.4 253.5 253.7 253.9 254.1 254.3 254.5 254.7 254.9 255.1 255.3 255.5 255.7 255.9 256.1 256.2 256.4 256.6 256.8 257.0 257.2 257.4 257.6 257.8 258.0 258.2 258.4 258.6 258.8 259.0 259.2 259.4 259.6 259.8 260.0 260.1 260.3 260.5 260.7 260.9 261.1 261.3 261.5 261.7 261.9 262.1 262.3 262.5 262.7 262.9 263.1 263.3 263.5 263.7 263.9 264.1 264.3 264.5 264.7 264.9 265.1 265.3 265.5 265.7 265.9 266.1 266.3 266.5 266.7 266.9 267.1 267.3 267.5 267.7 267.9 268.1 268.3 268.5 268.7 268.9 269.1 269.3 269.6 269.8 270.0 270.2 270.4 270.6 270.8 271.0 271.2 271.4 271.6 271.8 272.0 272.2 272.4 272.6 272.8 273.0 273.2 273.4 273.7 273.9 274.1 274.3 274.5 274.7 274.9 275.1 275.3 275.5 275.7 275.9 276.1 276.4 276.6 276.8 277.0 277.2 277.4 277.6 277.8 278.0 278.2 278.5 278.7 278.9 279.1 279.3 279.5 279.7 279.9 280.1 280.4 280.6 280.8 281.0 281.2 281.4 281.6 281.8 282.1 282.3 282.5 282.7 282.9 283.1 283.3 283.5 283.8 284.0 284.2 284.4 284.6 284.8 285.1 285.3 285.5 285.7 285.9 286.1 286.3 286.6 286.8 287.0 287.2 287.4 287.6 287.9 288.1 288.3 288.5 288.7 289.0 289.2 289.4 289.6 289.8 290.0 290.3 290.5 290.7 290.8 290.8 290.8 290.8 290.8 290.8 291.0 291.7 292.8 294.3 296.0 298.1 300.4 303.0 305.8 308.7 311.6 314.6 317.7 320.4 323.1 325.6 327.7 329.5 331.0 331.9 332.4 332.7 332.7 332.7 332.7 332.7 332.8 332.9 332.9 333.0 333.1 333.1 333.1 333.1 333.3 332.9 332.2 331.2 329.7 327.9 325.7 323.2 320.4 317.6 314.7 311.8 308.8 306.0 303.3 300.9 298.7 296.7 295.0 293.7 292.8 292.3 292.1 292.1 292.1 292.2 292.4 292.5 292.6 292.8 293.0 293.2 293.4 293.7 293.8 294.0 294.1 294.3 294.3 294.4 294.5 294.4 294.3 294.3 294.1 294.0 293.8 293.6 293.4 293.2 292.9 292.8 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.1 290.8 288.6 285.5 281.7 276.9 271.5 265.6 259.2 252.5 245.5 238.3 231.1 224.0 217.1 210.3 203.8 197.7 191.9 186.6 181.7 177.3 173.3 169.9 167.1 164.7 162.9 161.5 160.8 160.6 160.6 160.6 160.7 160.8 161.0 161.1 161.4 161.6 161.8 162.1 162.4 162.6 162.9 163.1 163.3 163.5 163.6 163.8 163.8 163.9 164.0 164.0 164.0 164.0 164.0 164.0 164.1 164.1 164.1 164.2 164.3 164.3 164.4 164.4 164.5 164.6 164.6 164.7 164.7 164.7 164.7 164.7 164.8 164.8 164.8 164.9 165.0 165.2 165.3 165.5 165.8 166.0 166.2 166.4 166.6 166.8 167.0 167.1 167.2 167.3 167.4 167.2 166.7 165.9 164.7 163.2 161.5 159.8 158.0 156.3 154.8 153.7 152.8 152.3 152.1 156.9 167.9 181.9 194.6 202.3 202.2 202.1 201.8 201.5 201.0 200.7 200.2 199.7 199.2 198.8 198.5 198.3 198.2 198.1 198.0 198.0 198.0 197.9 197.9 197.8 197.7 197.6 197.5 197.5 197.5 197.5 197.3 196.9 196.5 196.1 195.8 195.7 196.0 197.1 198.8 201.0 203.6 206.5 209.6 212.5 215.3 217.6 219.5 220.6 221.0 220.9 220.8 220.7 220.5 220.2 220.0 219.7 219.4 219.0 218.7 218.4 218.1 217.9 217.7 217.5 217.5 217.5 217.5 217.5 217.7 217.9 218.2 218.5 218.9 219.2 219.6 220.0 220.4 220.7 220.9 221.1 221.3 221.3 221.3 221.3 221.3 221.1 221.0 220.9 220.8 220.6 220.5 220.3 220.1 220.0 219.8 219.7 219.6 219.6 219.6 219.6 219.6 219.6 219.7 219.8 219.9 220.1 220.3 220.4 220.6 220.8 221.0 221.1 221.3 221.4 221.5 221.5 221.5 221.7 221.6 221.4 221.3 221.1 220.8 220.5 220.1 219.7 219.2 218.8 218.4 217.9 217.4 217.0 216.6 216.2 215.9 215.6 215.4 215.2 215.1 215.1 215.3 215.7 216.4 217.5 218.6 220.1 221.5 222.9 224.1 225.2 226.1 226.5 226.7 226.4 226.0 225.4 224.7 223.9 222.9 221.8 220.7 219.6 218.6 217.7 216.8 216.2 215.8 215.7 216.0 216.7 217.9 219.5 221.3 223.0 224.4 225.5 226.0 225.9 225.5 224.8 223.9 223.1 222.1 221.1 220.2 219.4 218.7 218.4 218.1 217.7 217.4 217.1 216.8 216.4 216.1 215.8 215.5 215.1 214.8 214.5 214.2 213.8 213.5 213.2 212.9 212.5 212.2 211.9 211.6 211.3 211.0 210.6 210.3 210.0 209.7 209.4 209.0 208.7 208.4 208.1 207.8 207.5 207.2 206.9 206.5 206.2 205.9 205.6 205.3 205.0 204.7 204.4 204.1 203.8 203.4 203.1 202.8 202.5 202.2 201.9 201.6 201.3 201.0 200.7 200.4 200.1 199.8 199.5 199.2 198.9 198.6 198.3 198.0 197.7 197.4 197.1 196.8 196.5 196.2 195.9 195.6 195.3 195.0 194.7 194.4 194.1 193.9 193.6 193.3 193.0 192.7 192.4 192.1 191.8 191.5 191.2 190.9 190.7 190.4 190.1 189.8 189.5 189.2 188.9 189.1 189.8 191.0 192.7 195.0 197.7 200.9 204.4 208.3 212.4 216.6 221.0 225.4 229.7 233.8 237.7 241.2 244.1 246.5 248.3 249.5 250.0 250.0 249.8 249.6 249.3 248.8 248.3 247.9 247.5 247.0 246.6 246.4 246.1 246.1 246.1 246.1 246.3 246.5 246.8 247.1 247.4 247.8 248.0 248.4 248.6 248.9 249.0 249.1 248.1 248.1 248.3 249.0 250.5 252.7 255.8 259.6 264.0 269.0 274.6 280.7 287.4 294.6 302.2 310.1 318.2 326.4 334.5 342.6 350.3 357.7 364.5 370.0 375.2 380.0 383.8 386.5 388.2 388.8 388.8 388.8 388.8 388.8 388.8 388.8 388.8 389.0 389.1 389.1 389.1 389.1 389.1 389.1 389.1 389.2 389.6 390.9 393.3 396.6 401.2 406.3 411.8 417.6 423.4 429.0 433.9 437.8 440.8 442.6 443.3 443.1 442.8 442.5 441.9 441.3 440.6 439.7 439.1 438.5 437.8 437.3 436.9 436.5 436.5 436.2 435.4 433.9 431.2 428.1 424.4 420.4 416.1 411.5 407.0 402.6 398.6 395.0 392.0 389.5 387.8 387.0 387.3 389.7 396.9 406.8 417.7 428.4 437.3 443.1 444.0 443.8 443.8 443.8 443.7 443.4 443.2 443.0 442.8 442.4 442.1 441.8 441.4 440.9 440.5 440.3 439.9 439.5 439.1 438.6 438.2 438.0 437.6 437.2 436.9 436.7 436.5 436.3 436.1 435.9 435.7 435.7 435.7 435.7 435.5 435.3 435.1 434.8 434.4 433.9 433.3 432.5 431.7 430.8 429.7 428.7 427.7 426.3 425.1 424.0 422.6 420.9 419.3 417.6 415.9 414.2 412.2 410.2 408.2 406.3 404.3 402.1 400.0 397.7 395.4 393.1 390.8 388.4 386.2 383.7 381.3 378.9 376.4 374.0 371.5 369.0 366.7 364.3 361.8 359.2 356.8 354.4 352.0 349.6 347.3 345.0 342.6 340.3 338.0 335.9 333.7 331.5 329.4 327.2 325.2 323.2 321.3 319.4 317.8 316.0 314.2 312.4 310.8 309.2 307.5 306.0 304.5 303.1 301.8 300.5 299.2 298.1 296.9 295.8 294.8 293.8 293.0 292.1 291.4 290.7 290.0 289.5 288.9 288.5 288.0 287.7 287.4 287.3 287.2 287.1 287.0 286.6 285.4 283.7 281.5 278.9 276.0 272.7 269.3 265.9 262.5 259.4 256.4 253.7 251.5 249.9 248.6 247.8 247.7 247.9 248.9 250.2 252.1 254.3 257.0 259.8 263.1 266.6 270.1 273.7 277.2 280.7 283.9 286.6 289.0 291.0 292.5 293.3 293.7 293.5 293.5 293.4 293.3 293.3 293.2 293.0 292.9 292.8 292.8 292.6 292.6 292.6 292.6 292.8 293.2 293.8 294.3 295.2 295.9 296.6 297.4 298.1 298.6 298.9 299.1 299.3 299.0 298.1 296.9 295.4 293.6 291.6 289.5 287.5 285.6 283.9 282.5 281.4 280.9 280.7 281.1 282.3 284.1 286.5 289.6 293.2 297.3 301.7 306.3 311.0 315.6 320.1 323.9 327.5 330.3 332.4 333.8 334.4 334.1 333.7 332.9 331.9 330.9 329.8 328.9 328.0 327.5 327.4 327.4 327.4 327.7 328.2 328.7 329.3 329.9 330.7 331.5 332.2 332.9 333.7 334.4 335.1 335.7 336.4 336.8 337.1 337.2 337.5 337.3 336.7 335.7 334.5 332.9 331.1 329.2 327.3 325.3 323.5 321.9 320.5 319.5 318.9 318.6 318.7 319.1 319.9 321.1 322.5 324.0 325.8 327.6 329.4 331.1 332.5 333.6 334.5 335.0 335.4 335.2 334.8 334.2 333.5 332.5 331.5 330.4 329.3 328.3 327.4 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2 327.2", - "input_type": "phoneme", - "offset": 25.786 + "f0_timestep": "0.005" }, { + "offset": 37.012, "text": "AP 嫣 然 笑 几 分 娇 SP 不 染 风 尘 铸 的 刀 AP 悬 琴 箫 风 雅 SP 温 柔 声 色 告 SP", "ph_seq": "AP y En r an x iao j i f en j iao SP b u r an f eng ch en zh u d e d ao AP x van q in x iao f eng y a SP w en r ou sh eng s e g ao SP", - "note_seq": "rest D4 D4 B3 B3 E3 E3 G3 G3 B3 B3 A3 A3 rest G4 G4 F#4 F#4 G4 G4 F#4 F#4 G4 G4 A4 A4 B4 B4 rest A3 A3 A3 A3 B3 B3 E4 E4 D4 D4 rest B3 B3 A3 A3 E3 E3 G3 G3 E3 E3 rest", - "note_dur_seq": "0.578 0.181 0.181 0.362 0.362 0.5419999 0.5419999 0.1800001 0.1800001 0.5429999 0.5429999 0.3610001 0.3610001 0.3610001 0.181 0.181 0.181 0.181 0.3610001 0.3610001 0.3619998 0.3619998 0.1810002 0.1810002 0.3610001 0.3610001 0.9039998 0.9039998 0.7220001 0.1809998 0.1809998 0.362 0.362 0.5420003 0.5420003 0.3610001 0.3610001 0.3619995 0.3619995 0.3610001 0.3620005 0.3620005 0.1799994 0.1799994 0.1810007 0.1810007 0.3619995 0.3619995 0.3610001 0.3610001 0.144", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.533002 0.044998 0.136002 0.044998 0.242001 0.119999 0.437 0.105 0.119999 0.060001 0.422997 0.120003 0.361 0.300999 0.060001 0.135998 0.045002 0.120999 0.060001 0.256001 0.105 0.301998 0.060001 0.136002 0.044998 0.301003 0.059998 0.904 0.616997 0.105003 0.121002 0.059998 0.242001 0.119999 0.376999 0.165001 0.316002 0.044998 0.362 0.315998 0.045002 0.316998 0.045002 0.119998 0.060001 0.121003 0.059998 0.257 0.105 0.361 0.144", - "f0_timestep": "0.005", + "ph_dur": "0.533 0.045 0.136 0.045 0.242 0.12 0.437 0.105 0.12 0.06 0.423 0.12 0.361 0.301 0.06 0.136 0.045 0.121 0.06 0.256 0.105 0.302 0.06 0.136 0.045 0.301 0.06 0.904 0.617 0.105 0.121 0.06 0.242 0.12 0.377 0.165 0.316 0.045 0.362 0.316 0.045 0.317 0.045 0.12 0.06 0.121 0.06 0.257 0.105 0.361 0.144", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest D4 B3 E3 G3 B3 A3 rest G4 F#4 G4 F#4 G4 A4 B4 rest A3 A3 B3 E4 D4 rest B3 A3 E3 G3 E3 rest", + "note_dur": "0.578 0.181 0.362 0.542 0.18 0.543 0.361 0.361 0.181 0.181 0.361 0.362 0.181 0.361 0.904 0.722 0.181 0.362 0.542 0.361 0.362 0.361 0.362 0.18 0.181 0.362 0.361 0.144", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.3 293.4 293.6 293.8 294.1 294.3 294.6 294.9 295.1 295.3 295.6 295.9 296.2 296.3 296.5 296.6 296.7 296.7 296.8 296.8 296.7 296.5 296.0 295.6 295.1 294.5 294.0 293.6 293.0 292.6 292.1 291.8 291.6 291.6 291.1 289.8 288.0 285.5 282.5 278.9 275.1 271.5 267.9 264.1 260.3 257.0 254.1 251.7 249.9 248.7 248.1 248.0 247.9 247.7 247.5 247.2 246.9 246.5 246.2 245.9 245.6 245.4 245.3 245.2 245.2 245.2 245.5 245.8 246.2 246.7 247.2 247.8 248.4 249.0 249.5 250.0 250.5 250.8 251.1 251.2 251.0 250.4 249.4 247.8 245.7 243.2 240.3 237.0 233.4 229.6 225.6 221.8 217.9 213.6 209.2 204.9 200.7 196.7 192.8 189.0 185.5 182.3 179.3 176.6 174.2 172.2 170.4 169.0 168.1 167.4 167.0 166.9 166.8 166.8 166.7 166.6 166.6 166.5 166.4 166.2 166.2 166.0 165.9 165.7 165.6 165.5 165.3 165.2 165.1 165.0 164.9 164.9 164.8 164.8 164.8 164.8 164.7 164.7 164.6 164.6 164.5 164.4 164.3 164.2 164.1 164.1 164.0 163.9 163.8 163.8 163.7 163.7 163.7 163.7 163.7 163.7 163.8 163.8 163.9 164.0 164.1 164.2 164.2 164.3 164.5 164.6 164.6 164.6 164.7 164.7 164.8 164.8 164.7 164.7 164.7 164.7 164.7 164.6 164.6 164.5 164.5 164.5 164.5 164.5 164.5 164.6 164.8 165.1 165.5 165.9 166.2 166.4 166.6 166.5 166.2 165.6 164.7 163.7 162.4 161.1 159.8 158.4 157.1 156.0 155.0 154.3 153.7 153.5 153.7 154.4 155.8 157.8 160.5 163.7 167.2 170.8 174.3 178.4 182.3 186.1 189.5 192.4 194.6 195.9 196.6 196.5 196.5 196.4 196.3 196.3 196.2 196.2 196.2 196.3 196.4 196.6 197.0 197.2 197.4 197.1 196.1 194.6 192.0 189.1 186.1 183.2 180.6 178.6 177.5 177.3 178.1 180.5 184.1 188.6 193.9 199.7 205.5 211.0 215.8 219.4 221.6 222.4 222.3 222.3 222.2 222.0 221.9 221.7 221.5 221.3 221.1 220.9 220.7 220.5 220.4 220.2 220.0 220.0 220.0 220.0 220.3 221.1 222.3 223.9 225.9 228.1 230.5 233.2 236.0 238.8 241.4 243.7 245.9 248.0 249.7 250.9 251.8 252.1 252.0 251.8 251.5 251.3 250.9 250.4 250.0 249.6 249.2 248.9 248.6 248.5 248.5 248.5 248.5 248.6 248.8 249.0 249.2 249.4 249.7 250.0 250.2 250.4 250.7 250.9 251.2 251.3 251.5 251.5 251.5 251.6 251.4 250.7 249.6 248.2 246.6 244.7 242.6 240.2 237.8 235.4 233.1 231.1 228.9 226.8 224.9 223.3 221.9 220.8 220.1 219.7 219.5 219.1 218.4 217.3 215.8 214.2 212.4 210.5 208.5 206.7 205.0 203.4 202.2 201.3 200.6 200.4 200.5 200.8 201.6 202.8 204.3 206.1 208.1 210.2 212.5 214.7 217.0 219.1 221.0 222.6 223.8 224.8 225.1 225.1 224.8 224.0 223.1 221.9 220.6 219.3 218.0 217.0 215.8 214.9 214.4 214.0 214.2 215.0 216.3 218.1 220.1 222.0 223.5 224.4 224.8 224.6 224.0 223.2 222.1 220.9 219.6 218.4 217.2 216.3 215.6 215.2 215.1 215.2 215.5 215.9 216.5 217.3 218.1 218.9 219.8 220.5 221.1 221.6 221.9 222.0 221.9 221.7 221.5 221.1 220.8 220.4 219.9 219.5 219.1 218.8 218.5 218.4 218.2 219.8 221.7 223.6 225.6 227.6 229.6 231.6 233.6 235.7 237.7 239.8 241.9 244.0 246.2 248.3 250.5 252.7 254.9 257.2 259.4 261.7 264.0 266.3 268.6 271.0 273.4 275.8 278.2 280.6 283.1 285.6 288.1 290.6 293.1 295.7 298.3 300.9 303.6 306.2 308.9 311.6 314.3 317.1 319.9 322.7 325.5 328.4 331.2 334.2 337.1 340.0 343.0 346.0 349.1 352.1 355.2 358.3 361.5 364.6 367.8 371.1 374.3 377.6 380.9 384.2 387.6 391.0 394.4 397.9 401.4 399.4 397.3 395.4 393.5 391.9 390.8 389.9 389.3 389.3 389.3 389.5 389.7 390.0 390.4 390.9 391.3 391.9 392.3 392.7 393.1 393.4 393.6 393.6 393.5 392.6 391.2 389.3 386.5 383.9 381.1 377.8 374.5 371.5 368.7 366.4 364.5 363.3 362.8 362.8 363.0 363.4 364.0 364.9 365.8 366.7 367.7 368.6 369.5 370.4 371.3 371.9 372.4 372.6 372.8 372.1 370.5 368.0 364.7 360.8 356.4 351.4 346.4 341.5 336.9 332.5 328.7 325.7 323.3 321.8 321.2 329.6 348.6 372.6 391.8 394.4 394.3 394.3 394.3 394.1 393.9 393.7 393.5 393.4 393.4 393.2 393.1 393.1 393.1 392.9 392.8 392.4 392.1 391.7 391.1 390.5 390.1 389.7 389.4 389.3 389.3 389.3 389.6 390.2 390.9 391.8 392.9 393.8 395.0 396.0 396.9 397.9 398.6 399.0 399.2 399.4 398.8 397.5 395.6 393.0 390.0 386.7 383.0 379.6 376.0 372.7 369.8 367.3 365.3 364.1 363.6 363.4 363.5 363.7 363.8 364.2 364.5 364.9 365.3 365.9 366.4 366.9 367.6 368.2 368.7 369.2 369.8 370.1 370.5 370.9 371.1 371.3 371.5 371.7 371.6 371.5 371.4 371.1 370.9 370.6 370.2 369.8 369.3 368.9 368.6 368.1 367.7 367.3 367.0 366.8 366.4 366.4 366.4 366.4 366.4 366.4 366.6 366.9 367.1 367.4 367.7 368.1 368.6 369.1 369.6 370.0 370.5 371.1 371.6 372.1 372.7 373.1 373.5 374.1 374.4 374.8 375.1 375.2 375.4 375.6 375.6 375.6 374.5 371.5 367.8 363.4 357.9 352.5 347.3 342.9 339.6 337.6 337.1 338.1 341.2 345.7 351.5 358.1 365.3 372.8 379.8 386.0 390.8 393.9 395.6 395.6 395.6 395.7 395.9 396.1 396.3 396.5 396.8 397.1 397.3 397.5 397.5 397.5 397.3 396.1 393.8 390.0 385.6 380.5 375.1 369.6 364.5 360.1 356.5 353.9 352.4 390.2 391.1 393.9 397.5 402.1 408.0 414.3 420.8 426.7 431.7 435.5 437.9 438.4 438.2 438.0 437.8 437.4 437.0 436.5 436.1 435.7 435.3 435.0 434.9 434.9 434.9 435.4 436.2 437.1 438.2 439.5 440.6 441.6 442.5 443.3 443.9 444.1 443.9 443.7 443.3 442.8 442.2 441.5 440.9 440.1 439.2 438.5 437.8 437.2 436.6 436.1 435.8 435.7 435.7 437.0 435.4 432.9 429.7 426.0 421.7 417.8 414.3 411.6 409.6 408.6 408.9 409.6 410.8 412.2 413.2 413.9 414.3 415.3 417.1 419.5 422.2 425.5 429.0 432.0 434.9 438.0 440.5 442.2 443.4 444.1 443.9 443.7 443.4 443.1 442.7 442.3 441.9 441.3 440.8 440.3 439.7 439.2 438.8 438.4 438.1 437.9 437.7 437.7 437.7 438.2 439.0 440.0 441.4 443.1 445.0 447.1 449.4 451.8 454.5 457.4 460.5 463.6 466.6 469.6 472.8 476.0 478.9 481.8 484.4 486.9 489.2 491.3 493.2 494.6 495.7 496.6 497.3 497.6 496.6 497.7 498.8 499.4 499.9 500.2 500.0 499.6 499.1 498.6 497.9 496.9 495.8 494.6 493.3 491.9 490.8 489.3 488.1 486.9 485.9 485.0 484.3 483.6 483.1 482.9 482.9 483.1 483.7 484.8 486.3 488.0 489.9 492.3 494.5 496.7 498.9 500.8 502.5 504.0 504.8 505.4 505.5 504.8 503.4 501.5 499.1 496.5 493.6 490.8 488.5 486.6 485.1 484.4 484.3 484.3 484.6 485.3 486.0 486.8 487.7 488.7 489.9 491.3 492.7 494.2 495.6 497.0 498.2 499.6 500.7 501.7 502.7 503.7 504.4 504.9 505.3 505.6 505.4 504.7 503.7 502.2 500.1 497.7 495.2 492.6 490.0 487.4 485.0 482.9 481.2 480.0 479.2 479.0 479.0 479.5 480.2 481.2 482.6 484.2 485.9 487.6 489.5 491.4 493.0 494.7 496.0 497.0 497.8 498.3 498.4 498.2 498.2 497.7 497.4 497.0 496.6 496.0 495.3 494.8 494.4 493.9 493.2 492.7 492.3 492.0 491.6 491.3 488.9 485.8 482.8 479.8 476.8 473.8 470.9 467.9 465.0 462.1 459.2 456.4 453.5 450.7 447.9 445.1 442.3 439.5 436.8 434.1 431.4 428.7 426.0 423.3 420.7 418.1 415.5 412.9 410.3 407.7 405.2 402.7 400.2 397.7 395.2 392.7 390.3 387.8 385.4 383.0 380.6 378.3 375.9 373.5 371.2 368.9 366.6 364.3 362.0 359.8 357.5 355.3 353.1 350.9 348.7 346.5 344.4 342.2 340.1 338.0 335.9 333.8 331.7 329.6 327.6 325.5 323.5 321.5 319.5 317.5 315.5 313.5 311.6 309.6 307.7 305.8 303.9 302.0 300.1 298.2 296.4 294.5 292.7 290.8 289.0 287.2 285.4 283.7 281.9 280.1 278.4 276.6 274.9 273.2 271.5 269.8 268.1 266.4 264.8 263.1 261.5 259.9 258.2 256.6 255.0 253.4 251.9 250.3 248.7 247.2 245.6 244.1 242.6 241.1 239.6 238.1 236.6 235.1 233.6 232.2 230.7 229.3 227.9 226.4 225.0 223.6 222.2 220.8 219.5 218.1 216.7 215.4 214.0 212.7 211.4 210.1 208.8 207.5 206.2 204.9 204.4 207.1 209.7 212.3 214.8 217.0 218.9 220.6 221.7 222.4 222.8 222.7 222.7 222.6 222.6 222.5 222.4 222.4 222.4 222.2 221.3 218.6 215.2 211.5 208.0 205.2 203.5 203.3 203.6 204.3 205.4 206.7 208.3 209.9 211.8 213.6 215.3 216.8 218.0 219.0 219.7 220.0 219.9 219.9 219.7 219.6 219.5 219.2 219.1 218.9 218.7 218.6 218.5 218.3 218.2 218.2 218.2 218.2 218.4 218.6 218.9 219.2 219.5 219.9 220.3 220.7 221.0 221.3 221.6 221.8 221.9 222.0 221.9 221.4 220.6 219.5 218.2 216.8 215.1 213.6 212.0 210.5 209.2 208.2 207.5 207.1 206.9 207.0 207.3 207.8 208.4 209.2 210.2 211.2 212.4 213.6 214.7 216.0 217.1 218.2 219.2 220.1 220.7 221.2 221.5 221.7 221.6 221.5 221.5 221.5 221.5 221.5 221.5 222.4 222.6 222.9 223.5 224.1 224.9 225.9 227.0 228.2 229.5 230.8 232.2 233.7 235.1 236.3 237.7 239.1 240.4 241.6 242.6 243.7 244.5 245.1 245.6 245.9 245.1 244.8 244.5 244.1 243.8 243.6 243.5 243.4 243.4 243.4 243.5 243.7 244.0 244.2 244.5 244.9 245.3 245.8 246.2 246.6 247.1 247.6 247.9 248.3 248.6 248.8 248.9 249.1 249.2 249.2 249.2 249.4 249.6 249.7 250.0 250.1 250.2 250.3 250.4 250.4 250.1 249.1 247.4 245.1 242.3 239.0 235.6 232.1 228.7 225.4 222.4 220.1 218.2 216.9 216.2 216.3 216.9 218.4 220.8 223.9 227.8 232.4 237.6 243.5 250.1 257.1 264.4 272.1 279.8 287.6 295.2 302.4 309.2 315.3 320.8 325.3 328.6 330.8 331.9 332.0 331.9 333.7 333.7 333.6 333.5 333.1 332.8 332.5 332.2 331.9 331.4 331.0 330.5 330.0 329.5 329.1 328.6 328.2 327.8 327.4 327.1 327.0 326.7 326.5 326.4 326.4 326.4 326.5 326.6 326.8 327.3 327.6 328.0 328.5 329.1 329.5 330.0 330.5 331.0 331.4 331.9 332.1 332.4 332.5 332.6 331.9 329.8 326.3 321.5 315.7 308.6 300.9 292.8 284.6 276.5 268.7 261.3 254.5 248.4 243.3 239.1 235.8 233.8 232.9 233.1 234.4 237.0 240.7 245.5 251.2 257.4 264.0 270.6 277.0 282.5 287.3 290.8 292.9 293.6 293.5 293.5 293.4 293.2 292.9 292.6 292.5 292.3 292.0 291.7 291.6 291.5 291.3 291.3 291.4 291.6 292.0 292.7 293.5 294.5 295.6 296.6 297.6 298.5 299.1 299.5 299.8 299.7 299.5 299.2 299.0 298.4 297.8 297.2 296.6 295.9 295.2 294.5 293.8 293.1 292.5 292.1 291.6 291.4 291.3 291.3 291.4 291.8 292.4 293.0 293.8 294.7 295.5 296.4 297.1 297.6 298.1 298.5 298.6 298.6 298.2 297.6 296.9 295.9 295.0 294.2 293.4 292.8 291.4 290.0 288.6 287.3 285.9 284.5 283.1 281.8 280.4 279.1 277.8 276.4 275.1 273.8 272.5 271.2 269.9 268.6 267.3 266.0 264.8 263.5 262.2 261.0 259.7 258.5 257.2 256.0 254.8 253.6 252.3 251.1 249.9 248.7 247.6 246.4 245.2 244.0 242.8 241.7 240.5 239.4 238.2 237.1 236.0 234.8 233.7 232.6 231.5 230.4 229.3 228.2 227.1 226.0 224.9 223.8 222.8 221.7 220.6 219.6 218.5 217.5 217.6 217.7 217.8 217.9 218.0 218.1 218.2 218.3 218.4 218.2 218.4 218.7 219.1 219.7 220.6 221.5 222.5 223.6 224.8 226.1 227.5 228.9 230.4 231.9 233.5 235.0 236.4 237.9 239.3 240.6 241.9 242.9 243.9 244.7 245.3 245.9 246.2 246.3 246.5 246.4 246.4 246.3 246.2 246.2 246.1 246.0 245.9 245.8 245.6 245.5 245.4 245.3 245.2 245.2 245.2 245.2 245.1 244.7 244.3 243.8 243.1 242.2 241.3 240.1 239.0 237.8 236.5 235.1 233.8 232.4 231.1 229.7 228.4 227.1 225.9 224.8 223.8 222.9 222.2 221.6 221.0 220.6 220.3 220.3 220.3 220.3 220.4 220.5 220.7 220.9 221.1 221.2 221.4 221.5 221.5 221.5 221.1 220.4 219.5 218.1 216.7 215.0 212.9 210.7 208.3 205.6 202.9 200.1 197.2 194.4 191.5 188.7 185.9 183.2 180.7 178.3 176.1 174.0 172.1 170.4 169.0 167.8 166.8 166.1 165.6 165.3 165.3 165.5 165.9 166.3 166.6 166.9 166.6 165.8 164.4 162.6 160.4 158.1 155.8 153.7 151.8 150.3 149.3 150.2 151.1 152.8 155.2 158.5 162.4 166.6 171.2 175.9 180.7 185.1 188.9 192.1 194.4 195.9 196.2 196.1 196.0 195.8 195.7 195.5 195.3 195.1 195.0 195.0 195.0 195.0 195.0 195.2 195.3 195.5 195.7 195.8 196.0 196.2 196.5 196.7 196.9 197.1 197.2 197.2 197.3 197.4 197.4 197.2 196.7 196.0 195.0 193.7 192.3 190.7 188.9 187.0 185.0 183.0 180.9 178.8 176.7 174.8 172.9 171.2 169.5 168.1 166.9 165.8 164.9 164.2 163.8 163.7 164.0 164.2 164.3 164.5 164.6 164.7 164.7 164.8 164.8 164.7 164.6 164.4 164.1 163.9 163.6 163.3 163.1 162.8 162.5 162.2 162.0 161.8 161.7 161.6 161.6 161.6 161.8 162.1 162.5 162.9 163.4 164.0 164.6 165.2 165.8 166.3 166.7 167.1 167.4 167.5 167.7 167.6 167.5 167.3 167.0 166.7 166.3 165.9 165.5 165.0 164.5 164.1 163.7 163.3 162.9 162.6 162.4 162.2 162.2 162.2 162.2 162.4 162.6 162.9 163.1 163.5 163.9 164.3 164.7 165.0 165.3 165.5 165.8 165.8 165.8 165.7 165.6 165.5 165.3 165.1 164.9 164.6 164.3 164.1 163.8 163.6 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4 163.4", - "input_type": "phoneme", - "offset": 37.012 + "f0_timestep": "0.005" }, { + "offset": 48.918, "text": "SP 娇 容 调 的 色 调 SP 惊 艳 的 刚 好 SP 梨 涡 晕 的 正 俏 SP 潋 滟 如 云 渺 SP", "ph_seq": "SP j iao r ong t iao d e s e d iao SP j ing y En d e g ang h ao SP l i w o y vn d e zh eng q iao SP l ian y En r u y vn m iao SP", - "note_seq": "rest E4 E4 D4 D4 E3 E3 G3 G3 A3 A3 A3 A3 rest A3 A3 G3 G3 A3 A3 B3 B3 B3 B3 rest E4 E4 D4 D4 B3 B3 G3 G3 A3 A3 A3 A3 rest B3 B3 A4 A4 A4 A4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.6 0.362 0.362 0.3609999 0.3609999 0.3610001 0.3610001 0.181 0.181 0.362 0.362 0.3610001 0.3610001 0.5420001 0.362 0.362 0.1799998 0.1799998 0.5430002 0.5430002 0.3610001 0.3610001 0.7229996 0.7229996 1.084 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.1799998 0.1799998 0.3620005 0.3620005 0.3610001 0.3610001 0.5419998 0.3619995 0.3619995 0.3610001 0.3610001 0.7230005 0.7230005 0.3619995 0.3619995 0.5419998 0.5419998 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.480001 0.119999 0.317002 0.044998 0.285999 0.075001 0.315998 0.045002 0.120999 0.060001 0.301999 0.060001 0.361 0.478867 0.063133 0.317002 0.044998 0.135002 0.044998 0.467999 0.075001 0.256001 0.105 0.723 1.038998 0.045002 0.317002 0.044998 0.316002 0.044998 0.317002 0.044998 0.119998 0.060001 0.242002 0.119999 0.361 0.481998 0.060001 0.302002 0.059998 0.316002 0.044998 0.662999 0.060001 0.241997 0.120003 0.542 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.48 0.12 0.317 0.045 0.286 0.075 0.316 0.045 0.121 0.06 0.302 0.06 0.361 0.4789 0.0631 0.317 0.045 0.135 0.045 0.468 0.075 0.256 0.105 0.723 1.039 0.045 0.317 0.045 0.316 0.045 0.317 0.045 0.12 0.06 0.242 0.12 0.361 0.482 0.06 0.302 0.06 0.316 0.045 0.663 0.06 0.242 0.12 0.542 0.4", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest E4 D4 E3 G3 A3 A3 rest A3 G3 A3 B3 B3 rest E4 D4 B3 G3 A3 A3 rest B3 A4 A4 D4 E4 rest", + "note_dur": "0.6 0.362 0.361 0.361 0.181 0.362 0.361 0.542 0.362 0.18 0.543 0.361 0.723 1.084 0.362 0.361 0.362 0.18 0.362 0.361 0.542 0.362 0.361 0.723 0.362 0.542 0.4", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 238.4 241.6 247.3 253.3 259.4 265.4 271.2 276.8 281.9 286.5 290.5 293.8 296.2 297.8 298.7 299.0 299.7 300.8 302.3 304.4 306.7 309.2 311.5 313.8 316.4 318.5 320.4 322.0 323.1 323.7 323.7 323.6 323.6 323.6 323.6 323.6 323.5 323.4 323.4 323.4 323.2 323.2 323.2 323.2 323.2 323.2 323.2 323.2 323.2 323.4 323.8 324.2 324.5 324.8 325.1 325.1 325.2 324.8 324.3 323.6 322.5 321.3 319.8 318.1 316.4 314.4 312.3 310.3 308.3 306.3 304.2 302.3 300.4 298.7 297.2 295.8 294.7 293.8 293.2 292.8 292.6 292.5 292.5 292.4 292.3 292.3 292.2 292.0 291.9 291.7 291.6 291.5 291.5 291.4 291.3 291.3 291.3 291.3 291.4 291.6 292.0 292.4 292.6 293.2 293.7 294.1 294.5 294.7 295.0 295.3 295.4 295.4 294.8 293.8 292.0 289.8 287.2 284.2 280.8 277.2 273.3 269.3 265.7 262.2 258.3 254.6 251.1 247.8 244.8 242.1 239.9 238.1 236.7 235.8 235.3 233.2 227.6 218.1 206.0 193.0 181.5 172.5 166.5 163.7 163.7 163.7 163.9 164.1 164.2 164.6 164.9 165.1 165.4 165.7 166.0 166.2 166.4 166.5 166.6 166.6 166.5 166.5 166.4 166.3 166.2 166.0 165.8 165.7 165.5 165.4 165.2 165.0 164.9 164.8 164.8 164.8 164.8 164.8 164.8 164.8 164.9 165.0 165.0 165.1 165.1 165.2 165.3 165.4 165.4 165.5 165.6 165.7 165.8 165.8 165.8 165.8 165.8 165.6 165.2 164.5 163.6 162.6 161.5 160.4 159.4 158.4 157.6 157.0 156.7 157.0 157.8 159.8 162.9 166.7 171.1 175.9 180.9 185.9 190.2 193.8 196.4 197.9 198.2 198.2 198.2 198.2 198.2 198.2 198.2 198.2 198.2 198.2 198.0 197.2 195.2 192.4 189.1 185.6 182.3 179.5 177.6 176.8 176.6 177.0 177.8 178.9 180.4 182.2 184.3 186.7 189.3 192.1 195.1 198.1 201.2 204.2 207.3 210.2 212.8 215.3 217.3 219.1 220.4 221.4 221.8 222.0 221.9 221.5 221.2 220.7 220.3 220.0 219.7 219.6 219.6 219.6 219.6 219.7 219.7 219.8 219.9 220.0 220.2 220.4 220.5 220.6 220.7 220.8 220.8 220.9 220.9 221.0 221.0 220.9 220.9 220.8 220.8 220.7 220.6 220.5 220.4 220.3 220.3 220.3 220.3 219.7 218.6 217.0 214.9 212.5 209.9 207.5 205.4 203.7 202.5 201.8 201.8 202.1 202.6 203.4 204.5 205.8 207.1 208.5 210.2 212.0 213.9 215.7 217.4 219.2 220.6 221.9 223.0 223.9 224.5 224.8 224.8 224.4 223.8 222.8 221.7 220.4 219.0 217.5 216.1 214.6 213.4 212.2 211.3 210.7 210.3 210.3 210.4 210.9 211.7 212.7 213.9 215.3 216.8 218.5 220.2 221.9 223.6 225.2 226.6 227.8 228.7 229.4 229.8 229.9 229.6 228.6 227.2 225.5 223.5 221.3 219.1 216.9 215.0 213.3 212.0 211.1 210.7 210.8 211.3 212.3 213.6 215.2 216.9 218.5 219.8 220.9 221.5 221.7 221.7 221.7 221.7 221.7 221.7 221.7 221.6 221.4 221.2 220.9 220.7 220.5 220.3 220.0 219.8 219.6 219.4 219.2 218.9 218.7 218.5 218.3 218.0 217.8 217.6 217.4 217.2 216.9 216.7 216.5 216.3 216.1 215.8 215.6 215.4 215.2 215.0 214.8 214.5 214.3 214.1 213.9 213.7 213.4 213.2 213.0 212.8 212.6 212.4 212.2 211.9 211.7 211.5 211.3 211.1 210.9 210.6 210.4 210.2 210.0 209.8 209.6 209.4 209.2 208.9 208.7 208.5 208.3 208.1 207.9 207.7 207.5 207.3 207.0 206.8 206.6 206.4 206.2 206.0 205.8 205.6 205.4 205.2 205.0 204.7 204.5 204.3 204.1 203.9 203.7 203.5 203.3 203.1 202.9 202.7 202.5 202.3 202.1 201.9 201.7 201.5 201.2 201.0 200.8 200.6 200.4 200.2 200.0 199.8 199.6 199.5 199.3 199.1 198.9 198.7 198.4 198.3 198.0 197.9 197.7 197.5 197.4 197.3 197.2 197.2 197.2 197.4 197.8 198.6 199.6 201.1 202.7 204.5 206.6 208.6 210.9 212.8 215.1 216.9 218.7 220.1 221.3 222.1 222.6 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.8 222.4 221.5 220.1 218.3 216.1 213.8 210.9 208.5 205.5 203.0 200.4 198.3 196.4 194.8 193.7 193.0 192.9 192.9 192.9 193.1 193.2 193.4 193.7 194.0 194.3 194.6 194.9 195.3 195.6 195.9 196.1 196.4 196.5 196.7 196.8 196.8 196.8 196.8 196.7 196.5 196.4 196.2 196.0 195.7 195.6 195.5 195.3 195.3 195.3 194.8 193.6 191.8 189.7 187.4 185.2 183.6 182.2 181.7 192.9 218.1 220.2 219.9 219.5 219.0 218.4 217.7 217.1 216.7 216.3 216.1 216.1 216.2 216.3 216.5 216.9 217.3 217.7 218.1 218.4 218.8 219.1 219.4 219.5 219.6 219.6 219.7 219.8 219.9 220.2 220.5 220.8 221.0 221.3 221.6 221.9 222.2 222.5 222.6 222.9 223.0 223.1 223.1 223.1 223.1 223.1 222.9 222.8 222.8 222.6 222.4 222.2 222.1 221.8 221.7 221.4 221.2 221.0 220.8 220.7 220.5 220.4 220.4 220.3 220.3 220.3 220.3 220.3 220.4 220.5 220.6 220.9 221.2 221.4 221.7 221.9 222.2 222.5 222.8 222.9 223.1 223.2 223.3 223.4 223.1 222.3 220.5 218.7 216.3 213.4 210.5 207.5 204.5 201.9 199.3 197.6 196.0 195.2 195.0 196.0 198.4 202.5 207.8 214.1 221.3 228.2 235.7 241.0 246.1 248.9 250.1 250.0 250.0 250.0 249.9 249.8 249.8 249.7 249.7 249.7 249.7 249.7 249.5 249.5 249.5 249.3 249.2 249.0 248.9 248.7 248.5 248.4 248.1 247.9 247.9 247.7 247.7 247.7 247.7 247.7 247.7 247.7 247.9 248.0 248.2 248.3 248.6 248.8 248.9 249.3 249.4 249.6 249.7 249.9 250.0 250.0 250.1 249.5 247.5 244.7 240.9 236.6 232.1 227.9 223.6 220.0 216.7 214.9 213.8 213.8 214.2 215.0 216.3 217.9 220.0 222.2 224.8 227.6 230.4 233.5 236.2 239.4 241.7 244.4 246.4 248.2 249.5 250.3 250.7 250.7 250.4 250.0 249.5 248.7 248.0 247.2 246.4 245.7 245.2 244.8 244.5 244.5 244.5 244.6 244.9 245.1 245.5 245.8 246.2 246.6 247.0 247.4 247.8 248.1 248.4 248.7 248.9 249.1 249.1 249.1 249.0 248.6 248.1 247.5 246.8 246.1 245.2 244.5 243.7 242.9 242.3 241.7 241.3 241.0 241.0 241.0 241.4 241.8 242.5 243.5 244.5 245.5 246.8 247.9 249.2 250.2 251.4 252.3 253.2 253.8 254.2 254.4 254.4 254.0 253.4 252.7 251.7 250.6 249.4 248.1 246.7 245.5 244.1 243.0 242.0 241.0 240.3 239.8 239.6 239.5 239.9 240.6 241.7 243.1 244.6 246.7 248.4 250.3 251.9 253.3 254.4 255.0 255.4 255.1 254.6 253.8 252.9 251.7 250.5 249.0 247.6 246.1 244.8 243.5 242.6 241.8 241.2 241.0 241.0 241.4 241.9 242.6 243.7 244.7 245.9 247.1 248.3 249.3 250.1 250.7 251.1 251.2 251.1 250.8 250.4 249.8 249.2 248.4 247.8 247.1 246.5 246.0 245.8 245.7 245.7 245.7 245.8 246.0 246.1 246.3 246.4 246.7 246.9 246.9 247.1 247.2 247.3 247.5 247.7 247.9 248.1 248.3 248.5 248.6 248.8 249.0 249.2 249.4 249.6 249.8 250.0 250.1 250.3 250.5 250.7 250.9 251.1 251.3 251.5 251.7 251.8 252.0 252.2 252.4 252.6 252.8 253.0 253.2 253.4 253.5 253.7 253.9 254.1 254.3 254.5 254.7 254.9 255.1 255.3 255.5 255.6 255.8 256.0 256.2 256.4 256.6 256.8 257.0 257.2 257.4 257.6 257.8 258.0 258.2 258.3 258.5 258.7 258.9 259.1 259.3 259.5 259.7 259.9 260.1 260.3 260.5 260.7 260.9 261.1 261.3 261.5 261.7 261.9 262.1 262.3 262.5 262.7 262.8 263.0 263.2 263.4 263.6 263.8 264.0 264.2 264.4 264.6 264.8 265.0 265.2 265.4 265.6 265.8 266.0 266.2 266.4 266.6 266.8 267.0 267.2 267.4 267.6 267.8 268.0 268.2 268.4 268.6 268.8 269.0 269.2 269.4 269.6 269.8 270.0 270.2 270.5 270.7 270.9 271.1 271.3 271.5 271.7 271.9 272.1 272.3 272.5 272.7 272.9 273.1 273.3 273.5 273.7 273.9 274.1 274.3 274.5 274.7 275.0 275.2 275.4 275.6 275.8 276.0 276.2 276.4 276.6 276.8 277.0 277.2 277.4 277.7 277.9 278.1 278.3 278.5 278.7 278.9 279.1 279.3 279.5 279.7 280.0 280.2 280.4 280.6 280.8 281.0 281.2 281.4 281.6 281.9 282.1 282.3 282.5 282.7 282.9 283.1 283.3 283.5 283.8 284.0 284.2 284.4 284.6 284.8 285.0 285.3 285.5 285.7 285.9 286.1 286.3 286.5 286.8 287.0 287.2 287.4 287.6 288.1 288.7 289.1 289.6 290.1 290.5 290.9 291.4 291.7 292.0 292.4 292.6 292.8 293.0 293.0 293.0 293.4 294.3 296.4 298.8 302.6 306.4 311.1 315.7 320.4 324.6 328.9 332.4 335.6 337.3 338.6 338.6 338.4 338.0 337.5 336.9 336.0 335.2 334.3 333.5 332.6 331.7 331.1 330.6 330.2 330.0 330.0 329.8 329.7 329.5 329.3 329.1 328.9 328.7 328.5 328.5 328.1 327.1 325.3 322.5 319.5 315.5 311.6 307.4 303.5 299.9 296.7 294.4 292.5 291.6 291.6 291.6 291.8 292.1 292.3 292.6 293.0 293.2 293.4 293.5 293.6 293.5 293.5 293.5 293.5 293.5 293.4 293.3 293.3 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.3 293.2 293.0 292.7 292.2 291.5 290.6 289.6 288.3 287.0 285.5 283.8 281.9 280.0 278.0 275.9 273.8 271.7 269.6 267.4 265.3 263.3 261.3 259.3 257.4 255.7 254.0 252.4 251.1 249.9 248.7 247.8 247.1 246.4 245.9 245.7 245.7 245.7 245.7 245.7 245.9 246.0 246.2 246.3 246.5 246.6 246.7 246.8 246.8 246.5 247.2 247.9 248.7 249.5 250.2 250.8 251.1 251.2 251.1 251.1 250.8 250.5 250.2 249.9 249.5 249.1 248.7 248.3 248.0 247.7 247.4 247.3 247.2 247.2 247.2 247.2 247.3 247.5 247.6 247.8 247.9 248.1 248.3 248.4 248.5 248.6 248.7 248.7 248.7 248.7 248.7 248.5 248.4 248.2 248.1 247.9 247.7 247.6 247.4 247.4 247.3 247.2 247.2 246.8 245.4 242.7 239.4 234.5 229.7 224.1 218.6 213.3 208.0 203.8 199.7 196.9 194.7 193.7 193.4 193.4 193.5 193.6 193.7 193.9 194.1 194.4 194.6 194.8 195.1 195.3 195.6 195.8 196.1 196.2 196.5 196.6 196.7 196.8 196.8 196.8 196.5 196.1 195.3 194.2 193.1 191.6 190.3 188.8 187.5 186.2 185.1 184.1 183.5 183.1 182.9 183.6 185.9 190.5 195.8 202.5 209.1 215.4 220.8 224.1 225.7 225.5 225.2 224.9 224.4 223.8 223.1 222.4 221.8 221.0 220.5 220.1 219.8 219.6 219.6 219.6 219.6 219.7 219.8 220.0 220.1 220.4 220.6 220.8 221.0 221.2 221.4 221.5 221.7 221.8 221.9 221.9 222.0 221.9 221.5 220.9 219.9 219.0 217.8 216.3 214.8 213.3 211.6 210.2 208.7 207.5 206.4 205.6 204.8 204.4 204.3 204.3 204.6 204.9 205.5 206.2 207.0 207.9 208.8 209.9 210.9 212.1 213.2 214.2 215.3 216.3 217.2 217.9 218.5 219.1 219.4 219.5 219.6 219.7 219.8 220.1 220.5 221.0 221.4 221.9 222.4 222.9 223.4 223.8 224.2 224.5 224.6 224.8 224.8 224.6 224.1 223.5 222.5 221.6 220.4 219.4 218.2 217.1 216.2 215.5 215.0 214.8 214.7 214.9 215.3 216.0 216.8 217.7 218.8 219.8 220.9 221.9 222.8 223.5 224.1 224.4 224.6 224.5 224.1 223.6 222.9 222.0 221.1 220.1 219.2 218.2 217.5 216.7 216.1 215.8 215.7 215.7 215.8 216.2 216.6 217.0 217.7 218.3 218.8 219.5 220.0 220.5 220.9 221.2 221.3 220.8 221.5 221.4 221.4 221.4 221.3 221.3 221.2 221.2 221.1 221.1 221.0 221.0 220.9 220.9 220.8 220.8 220.7 220.7 220.7 220.6 220.6 220.5 220.5 220.4 220.4 220.3 220.3 220.2 220.2 220.1 220.1 220.0 220.0 220.0 219.9 219.9 219.8 219.8 219.7 219.7 219.6 219.6 219.5 219.5 219.4 219.4 219.3 219.3 219.3 219.2 219.2 219.1 219.1 219.0 219.0 218.9 218.9 218.8 218.8 218.7 218.7 218.7 218.6 218.6 218.5 218.5 218.4 218.4 218.3 218.3 218.2 218.2 218.1 218.1 218.1 218.0 218.0 217.9 217.9 217.8 217.8 217.7 217.7 217.6 217.6 217.5 217.5 217.5 217.4 217.4 217.3 217.3 217.2 217.2 217.1 217.9 221.9 225.4 229.3 232.5 236.1 239.0 241.9 244.2 246.1 247.5 248.4 248.8 248.7 248.5 248.1 247.6 246.9 246.3 245.7 245.1 244.4 244.0 243.7 243.4 243.4 243.4 243.6 243.9 244.5 245.1 245.8 246.6 247.4 248.2 248.8 249.6 250.1 250.6 251.0 251.1 251.2 251.0 250.6 250.2 249.6 248.8 248.1 247.2 246.4 245.6 244.8 244.1 243.6 243.2 243.0 242.9 242.2 241.0 239.1 236.7 234.4 231.7 229.7 227.8 226.7 226.1 226.3 227.4 228.9 231.9 235.0 239.0 244.2 250.0 256.4 263.9 271.4 280.7 289.0 299.4 308.9 319.6 330.0 340.4 351.1 360.6 371.1 379.1 387.9 394.8 401.0 405.7 409.0 411.1 411.9 411.6 410.8 409.8 408.4 406.7 404.8 403.1 401.2 399.5 397.9 396.8 396.0 395.6 395.8 396.7 398.7 401.6 404.9 409.5 414.0 419.2 424.5 429.7 434.8 439.2 443.6 446.4 448.7 450.1 450.5 450.2 449.9 449.3 448.4 447.6 446.4 445.4 444.1 443.1 441.9 440.8 439.6 438.7 437.9 437.0 436.7 436.5 436.6 438.2 441.7 446.2 450.6 455.1 457.7 458.6 458.2 457.5 456.5 455.3 453.7 451.8 450.0 447.8 445.9 444.0 442.3 440.8 439.5 438.6 438.0 437.7 437.5 437.5 437.5 437.4 437.2 437.2 436.9 436.7 436.6 436.3 435.8 435.4 435.2 434.8 434.4 434.0 433.4 432.9 432.3 431.8 431.2 430.7 430.0 429.3 428.8 428.1 427.3 426.6 425.9 425.0 424.3 423.5 422.7 421.9 421.1 420.1 419.2 418.4 417.4 416.5 415.5 414.4 413.6 412.6 411.7 410.7 409.6 408.6 407.7 406.8 405.6 404.6 403.6 402.6 401.5 400.6 399.7 398.8 397.7 396.8 395.7 394.7 393.7 392.7 391.7 390.8 389.9 388.8 387.9 386.9 386.2 385.2 384.4 383.4 382.5 381.8 380.8 380.2 379.2 378.5 377.8 377.1 376.4 375.6 374.9 374.2 373.6 373.0 372.3 371.8 371.2 370.6 370.2 369.7 369.2 368.7 368.3 367.8 367.4 367.1 366.7 366.5 366.2 365.8 365.5 365.3 365.3 365.1 364.9 364.8 364.7 364.7 364.7 364.7 363.8 361.3 356.4 350.6 342.9 334.5 325.6 315.8 307.1 297.2 289.8 281.8 275.7 270.4 266.6 264.0 263.0 262.8 263.0 263.1 263.4 263.8 264.3 264.9 265.6 266.4 267.2 268.1 269.1 270.1 271.4 272.4 273.7 274.9 276.2 277.5 278.9 280.2 281.4 282.9 284.1 285.3 286.5 287.7 288.9 290.0 291.0 292.0 292.8 293.6 294.3 294.9 295.4 295.7 296.0 296.2 296.4 296.2 296.2 296.0 295.8 295.6 295.4 295.1 294.8 294.3 294.0 293.8 293.4 293.1 292.7 292.3 292.0 291.8 291.7 291.5 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.3 291.4 291.5 291.5 291.5 291.5 291.5 291.5 291.6 291.6 291.6 291.6 291.6 291.6 291.7 291.8 291.8 291.8 291.8 291.8 291.8 291.8 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.1 292.4 293.2 294.1 295.7 297.2 299.5 301.6 304.3 307.0 309.8 312.8 315.8 319.0 321.7 324.3 327.3 329.8 332.1 334.1 335.8 337.1 338.0 338.5 338.6 338.4 338.0 337.3 336.5 335.4 334.4 333.0 331.8 330.5 329.2 328.1 327.0 326.0 325.3 324.7 324.3 324.3 324.3 324.9 325.8 326.9 328.3 329.8 331.5 333.1 334.5 336.0 336.9 337.6 338.0 338.0 337.8 337.3 336.7 336.0 335.2 334.3 333.4 332.4 331.7 330.9 330.3 329.8 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6", - "input_type": "phoneme", - "offset": 48.918 + "f0_timestep": "0.005" }, { + "offset": 60.29, "text": "SP 紫 衫 飘 蒲 扇 摇 SP 无 须 一 分 多 与 少 SP 墨 色 烧 SP 釉 彩 膏 泥 光 色 烙 SP", "ph_seq": "SP z i0 sh an p iao p u sh an y ao SP w u x v y i f en d uo y v sh ao SP m o s e sh ao SP y ou c ai g ao n i g uang s e l ao SP", - "note_seq": "rest E4 E4 D4 D4 E4 E4 G3 G3 A3 A3 A3 A3 rest D4 D4 B3 B3 G4 G4 F#4 F#4 G4 G4 A4 A4 B4 B4 rest D4 D4 B3 B3 A3 A3 rest D4 D4 E4 E4 G4 G4 F#4 F#4 G4 G4 A4 A4 F#4 F#4 rest", - "note_dur_seq": "0.433 0.181 0.181 0.361 0.361 0.5419999 0.5419999 0.181 0.181 0.5420001 0.5420001 0.362 0.362 0.3610001 0.181 0.181 0.1799998 0.1799998 0.3620002 0.3620002 0.3609998 0.3609998 0.1810002 0.1810002 0.362 0.362 0.9029999 0.9029999 0.362 0.3610001 0.3610001 0.5419998 0.5419998 0.1810002 0.1810002 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.3609996 0.3609996 0.1810007 0.1810007 0.5419998 0.5419998 0.7229996 0.7229996 1.084001 1.084001 0.108", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.328 0.105 0.120999 0.060001 0.271004 0.089996 0.407005 0.134995 0.121002 0.059998 0.497002 0.044998 0.362 0.316002 0.044998 0.121002 0.059998 0.135002 0.044998 0.241997 0.120003 0.315994 0.045006 0.135994 0.045006 0.242005 0.119995 0.903 0.317002 0.044998 0.241005 0.119995 0.361763 0.180237 0.181 0.316002 0.044998 0.121002 0.059998 0.136002 0.044998 0.316001 0.044998 0.120996 0.060005 0.362 0.18 0.617996 0.105003 1.084001 0.108", - "f0_timestep": "0.005", + "ph_dur": "0.328 0.105 0.121 0.06 0.271 0.09 0.407 0.135 0.121 0.06 0.497 0.045 0.362 0.316 0.045 0.121 0.06 0.135 0.045 0.242 0.12 0.316 0.045 0.136 0.045 0.242 0.12 0.903 0.317 0.045 0.241 0.12 0.3618 0.1802 0.181 0.316 0.045 0.121 0.06 0.136 0.045 0.316 0.045 0.121 0.06 0.362 0.18 0.618 0.105 1.084 0.108", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 1 2 2 2 2 2 2 2 1 1", + "note_seq": "rest E4 D4 E4 G3 A3 A3 rest D4 B3 G4 F#4 G4 A4 B4 rest D4 B3 A3 rest D4 E4 G4 F#4 G4 A4 F#4 rest", + "note_dur": "0.433 0.181 0.361 0.542 0.181 0.542 0.362 0.361 0.181 0.18 0.362 0.361 0.181 0.362 0.903 0.362 0.361 0.542 0.181 0.361 0.181 0.181 0.361 0.181 0.542 0.723 1.084 0.108", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 284.5 289.4 294.1 299.7 304.0 309.1 313.3 317.4 320.7 323.7 326.0 327.8 328.7 329.0 328.6 327.8 326.8 325.6 324.4 323.2 322.3 321.7 321.7 321.3 320.5 319.4 317.8 316.0 313.8 311.8 309.3 307.2 304.8 303.0 301.1 299.4 298.0 297.1 296.4 296.4 296.2 296.2 296.2 296.2 296.1 296.0 295.9 295.9 295.7 295.5 295.5 295.3 295.2 295.1 294.9 294.9 294.7 294.7 294.5 294.5 294.5 294.5 294.5 294.5 294.5 294.5 294.7 294.7 294.9 295.1 295.3 295.5 295.6 295.8 296.2 296.4 296.6 296.8 296.9 297.1 297.3 297.5 297.7 297.8 297.8 297.8 297.7 296.5 294.2 290.8 286.8 281.7 277.2 271.7 267.4 263.4 260.2 257.9 256.7 256.9 258.8 262.6 267.6 274.5 280.4 286.9 291.6 293.7 294.1 295.1 296.6 298.8 301.3 304.4 307.2 311.0 314.3 318.0 321.4 324.5 327.5 329.8 331.9 333.1 333.7 333.7 333.5 333.3 332.9 332.5 332.0 331.4 330.7 330.2 329.6 329.1 328.6 328.2 327.9 327.9 327.9 327.9 328.1 328.3 328.5 328.9 329.3 329.7 330.2 330.6 331.0 331.4 331.8 332.3 332.5 332.9 333.1 333.1 333.3 333.0 332.8 332.3 331.8 331.3 330.7 330.1 329.6 329.2 329.1 329.1 329.1 329.3 329.7 330.1 330.5 331.0 331.3 331.8 332.2 332.4 332.6 332.3 331.2 328.9 325.7 321.2 316.6 310.4 304.7 298.1 291.8 285.7 279.5 274.3 269.0 265.1 261.4 258.9 257.1 256.2 206.9 203.0 199.2 195.5 191.9 188.3 184.8 181.4 178.0 196.9 196.9 196.9 196.9 196.9 197.0 197.0 197.0 197.1 197.2 197.3 197.4 197.4 197.5 197.6 197.6 197.7 197.8 197.8 197.9 197.9 198.0 198.0 198.0 198.0 198.0 198.2 197.8 196.6 195.1 192.9 190.6 188.2 186.0 184.3 182.8 182.1 182.3 183.9 187.3 191.6 197.6 202.9 209.0 213.5 216.8 218.3 218.5 218.5 218.5 218.5 218.5 218.6 218.7 218.7 218.8 218.9 218.9 219.0 219.0 219.1 219.1 219.1 219.1 219.2 219.2 219.2 219.4 219.4 219.5 219.7 219.9 220.2 220.3 220.6 220.7 221.0 221.1 221.4 221.6 221.7 221.9 221.9 221.9 222.0 221.9 221.9 221.7 221.6 221.4 221.2 220.9 220.6 220.3 220.1 219.7 219.4 219.1 218.8 218.6 218.4 218.2 218.0 218.0 217.9 217.9 217.9 217.9 217.9 218.0 218.1 218.3 218.4 218.6 218.8 219.0 219.1 219.4 219.6 219.8 220.0 220.2 220.3 220.5 220.6 220.7 220.9 220.9 220.9 221.0 221.0 221.1 221.1 221.2 221.4 221.7 221.8 221.9 222.1 222.2 222.3 222.3 222.4 221.6 220.0 217.3 214.5 211.6 209.4 207.6 207.0 207.1 207.8 209.3 211.0 213.2 215.6 218.1 220.7 222.9 225.0 226.4 227.4 227.9 227.6 226.9 226.1 224.8 223.3 221.6 219.9 218.3 216.6 215.3 214.4 213.5 213.1 213.0 213.2 213.6 214.2 215.0 216.0 217.0 218.2 219.2 220.3 221.3 222.0 222.7 223.1 223.3 223.3 223.0 222.3 221.5 220.3 219.2 218.1 217.1 216.1 215.3 214.9 214.8 214.7 215.1 215.8 216.6 217.6 218.7 219.7 220.8 221.5 222.3 222.7 222.8 222.7 222.6 222.4 222.1 221.9 221.6 221.1 220.8 220.5 220.1 219.7 219.3 219.2 220.3 221.3 222.4 223.5 224.5 225.6 226.7 227.8 228.9 230.0 231.1 232.2 233.4 234.5 235.6 236.7 237.9 239.0 240.2 241.3 242.5 243.7 244.9 246.0 247.2 248.4 249.6 250.8 252.0 253.2 254.5 255.7 256.9 258.2 259.4 260.7 261.9 263.2 264.5 265.7 267.0 268.3 269.6 270.9 272.2 273.5 274.8 276.2 277.5 278.8 280.2 281.5 282.9 284.3 285.6 287.0 288.4 289.8 291.2 291.7 292.0 292.3 292.6 292.9 293.1 293.3 293.5 293.6 293.5 293.7 293.9 294.1 294.3 294.3 294.5 294.7 294.9 295.0 295.1 295.3 295.4 295.4 295.4 295.5 295.0 293.7 291.1 287.9 283.8 279.4 274.6 269.6 265.2 260.3 256.8 253.7 250.9 249.2 248.3 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1 247.4 245.7 242.5 239.5 236.6 234.6 233.8 235.5 243.0 253.3 268.6 286.2 305.4 325.6 342.0 356.6 363.2 364.7 364.9 365.6 366.7 368.0 370.0 371.7 374.0 376.1 378.6 381.1 383.4 385.7 387.6 389.5 390.9 391.9 392.6 393.1 392.9 392.7 392.4 391.9 391.4 390.8 390.2 389.6 389.0 388.7 388.3 388.2 388.2 388.5 389.2 390.4 391.4 392.2 392.9 393.0 392.3 390.5 387.8 384.4 380.1 375.6 370.2 365.3 359.8 355.1 350.3 346.7 343.1 340.3 338.3 337.1 337.0 337.8 339.6 342.4 346.2 350.1 354.9 359.0 363.7 367.4 370.7 372.8 373.8 373.9 373.9 373.9 373.9 373.9 373.8 373.6 373.6 373.5 373.4 373.3 373.2 373.2 373.0 373.0 372.9 372.8 372.8 372.8 372.8 372.8 372.7 372.6 372.4 372.1 371.9 371.6 371.2 370.9 370.5 370.2 369.7 369.4 369.0 368.7 368.5 368.2 368.1 368.1 368.1 368.1 368.2 368.5 368.7 368.9 369.4 369.7 370.1 370.4 370.8 371.0 371.2 371.5 371.5 371.6 371.0 369.2 367.0 363.8 360.3 355.9 352.0 347.4 344.0 340.3 337.8 335.8 334.7 334.6 335.8 338.6 342.8 348.3 354.3 361.8 368.1 375.1 380.5 385.2 387.9 389.3 389.3 389.3 389.3 389.6 389.7 389.9 390.1 390.4 390.6 390.9 390.9 391.1 391.1 391.3 391.0 390.5 389.6 388.6 387.5 386.4 385.6 385.0 385.0 386.2 389.1 394.1 399.6 406.6 414.3 422.3 429.0 435.4 439.2 441.7 441.9 441.8 441.6 441.3 441.0 440.7 440.2 439.8 439.4 439.1 438.8 438.5 438.5 438.5 438.5 438.9 439.4 440.1 440.9 441.9 443.1 443.9 445.1 445.9 446.5 447.0 447.4 447.6 446.9 445.5 443.6 440.9 437.5 433.9 429.5 425.7 421.1 417.3 413.2 409.8 406.9 404.7 402.9 401.8 401.4 401.6 403.4 405.8 409.4 413.4 417.9 422.9 427.4 432.2 435.7 438.9 440.8 442.0 442.0 442.0 442.0 442.1 442.4 442.5 442.7 443.0 443.2 443.5 443.8 444.1 444.3 444.7 444.9 445.2 445.5 445.8 446.1 446.3 446.4 446.7 446.7 446.7 446.8 446.9 447.7 448.7 450.7 453.0 455.8 458.8 462.0 465.1 468.9 472.3 476.0 479.0 482.3 484.8 487.2 488.8 489.8 490.4 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.6 490.8 490.8 490.8 491.0 491.0 491.0 491.0 491.0 491.0 491.0 491.3 491.3 491.3 491.6 492.0 492.6 493.3 493.9 494.7 495.5 496.3 497.3 498.2 499.2 499.8 500.8 501.6 502.3 503.0 503.3 503.7 504.0 504.0 504.1 503.8 503.5 502.9 501.9 501.1 500.0 498.8 497.4 496.0 494.6 493.3 492.3 491.0 490.2 489.2 488.7 488.4 488.2 488.2 488.4 488.9 489.6 490.5 491.3 492.7 494.0 495.6 497.2 498.8 500.2 501.6 503.1 504.6 505.6 506.5 507.4 507.9 508.1 508.2 507.9 507.2 506.2 505.1 503.7 502.2 500.4 498.6 496.6 495.0 493.3 491.6 490.2 489.2 488.2 487.6 487.4 487.4 487.6 488.1 488.8 489.6 491.0 492.2 493.5 494.9 496.5 497.9 499.1 500.5 501.4 502.1 502.9 503.1 503.4 503.1 503.0 501.5 501.2 500.7 500.2 499.2 498.1 496.7 495.2 493.6 491.9 489.8 487.6 485.5 483.3 481.0 478.4 475.9 473.6 471.2 468.7 466.2 463.7 461.5 459.3 457.1 455.0 453.1 451.3 449.8 447.6 443.2 438.8 434.5 430.2 426.0 421.8 417.6 413.5 409.5 405.4 401.4 397.5 393.6 389.7 385.9 382.1 378.3 374.6 370.9 367.3 363.7 360.1 356.5 353.0 349.5 346.1 342.7 339.3 336.0 332.7 329.4 326.2 323.0 319.8 316.6 313.5 310.4 307.4 304.4 301.4 298.4 295.5 292.6 289.7 286.8 284.0 281.2 278.4 275.7 273.0 270.3 267.6 265.0 262.4 259.8 257.3 254.7 252.2 249.7 247.3 244.8 243.4 243.5 243.5 243.7 243.7 243.8 243.8 243.9 244.0 244.0 244.0 244.0 244.0 244.2 244.8 245.7 247.3 249.0 251.3 253.7 256.5 259.6 262.7 266.2 269.3 272.8 275.8 278.9 281.6 284.0 285.8 287.5 288.6 289.3 289.5 289.5 289.6 289.7 289.9 290.1 290.3 290.7 291.0 291.3 291.6 291.9 292.2 292.4 292.6 292.8 293.0 293.0 293.2 292.9 292.5 291.6 290.7 289.5 287.9 286.0 284.2 281.9 279.9 277.2 274.9 272.3 269.8 267.3 264.6 262.4 259.8 257.9 255.7 253.9 252.2 250.8 249.5 248.4 247.8 247.2 246.9 246.9 246.8 246.8 246.7 246.5 246.4 246.2 246.1 245.9 245.6 245.4 245.2 245.0 244.8 244.7 244.7 244.5 244.5 244.5 244.5 244.5 244.6 244.7 244.8 244.9 245.1 245.2 245.4 245.5 245.8 246.0 246.1 246.4 246.6 246.8 246.9 247.1 247.2 247.4 247.4 247.5 247.2 246.9 246.5 245.9 245.1 244.1 242.9 241.7 240.2 238.8 237.2 235.6 233.9 232.3 230.5 229.1 227.4 226.0 224.5 223.3 222.3 221.2 220.5 219.9 219.5 219.2 219.5 219.5 219.5 219.5 219.5 219.5 219.5 219.5 219.4 219.4 219.4 219.4 219.4 219.4 219.4 219.4 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.1 219.1 219.1 219.0 219.0 218.9 218.9 218.7 218.6 218.6 218.5 218.4 218.4 218.3 218.2 218.2 218.2 218.2 218.1 218.1 218.1 218.1 218.0 218.0 217.9 217.9 217.8 217.7 217.6 217.6 217.6 217.5 217.5 217.5 217.5 217.5 218.2 218.2 218.2 218.2 218.7 220.9 224.0 228.7 234.1 240.2 247.2 253.7 261.0 263.0 263.4 263.7 264.1 264.4 264.8 265.1 265.5 265.8 266.2 266.6 266.9 267.3 267.6 268.0 268.3 268.7 269.0 269.4 269.7 270.1 270.5 270.8 271.2 271.5 271.9 272.3 272.6 273.0 273.3 273.7 274.1 274.4 274.8 275.2 275.5 275.9 276.3 276.6 277.0 277.3 277.7 278.1 278.5 278.8 279.2 279.6 279.9 280.3 280.7 281.0 281.4 281.8 282.2 282.5 282.9 283.3 283.7 284.0 284.4 284.8 285.2 285.6 285.6 285.8 286.0 286.3 286.5 286.8 287.2 287.7 288.1 288.6 289.2 289.7 290.3 290.7 291.3 291.8 292.4 292.8 293.3 293.7 294.1 294.4 294.8 295.0 295.2 295.3 295.4 294.7 289.0 280.4 274.7 273.9 275.3 278.1 281.7 287.1 292.5 298.8 305.2 311.4 316.7 322.0 325.7 328.7 329.8 329.9 329.8 329.8 329.8 329.8 329.8 329.8 329.8 329.8 329.7 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.2 327.5 324.6 320.2 315.8 310.8 307.2 304.7 303.7 304.7 308.7 313.7 319.9 324.9 328.0 329.7 330.0 330.6 331.5 332.8 334.4 336.2 338.2 340.6 343.0 345.7 348.6 351.7 354.8 358.0 361.1 364.4 367.7 370.7 373.7 376.6 379.3 381.7 383.9 385.8 387.4 388.8 389.7 390.3 390.6 390.0 390.0 390.0 390.0 390.1 390.8 391.5 392.3 393.5 394.7 396.1 397.0 398.1 398.7 399.2 399.4 398.9 397.9 396.4 394.4 391.9 389.3 386.4 383.7 380.6 378.1 375.7 373.6 372.0 370.9 370.5 370.3 370.1 369.8 369.4 368.9 368.5 368.0 367.5 367.1 367.0 367.0 367.0 367.1 367.2 367.3 367.7 368.0 368.2 368.6 368.9 369.4 369.6 369.8 370.1 370.2 370.2 370.4 370.0 368.6 366.6 363.9 360.8 357.3 353.5 350.1 346.4 343.8 341.1 339.4 338.3 338.3 339.6 343.3 348.7 355.7 363.9 372.1 381.1 387.7 393.9 397.2 398.8 398.6 398.6 398.3 397.7 397.2 396.7 396.0 395.5 394.7 394.3 393.5 393.0 392.6 392.3 392.1 392.0 392.0 392.0 392.2 392.4 392.7 393.1 393.4 393.6 393.9 394.1 394.3 394.5 394.5 394.5 394.5 394.6 394.8 395.0 395.1 395.4 395.6 395.9 396.1 396.4 396.6 396.8 396.9 397.2 397.2 397.5 397.5 397.5 397.7 397.5 397.5 397.4 397.2 397.1 397.0 396.9 396.6 396.4 396.1 396.1 395.9 395.8 395.6 395.6 395.6 395.3 394.1 391.5 388.6 384.2 379.7 374.6 369.4 364.2 359.8 355.1 351.3 347.8 345.6 344.1 343.6 343.8 344.6 343.6 343.8 344.0 344.2 344.8 345.3 346.0 346.7 347.4 348.3 349.1 349.9 350.6 351.3 351.9 352.4 353.0 353.2 353.4 353.6 354.5 357.3 363.2 370.4 381.0 391.4 403.2 414.6 424.9 433.7 439.3 442.5 442.6 442.3 442.0 441.5 440.7 440.1 439.2 438.2 437.3 436.5 435.8 435.1 434.7 434.2 434.2 437.0 437.3 437.6 437.9 438.2 438.6 439.0 439.2 439.5 439.8 440.1 440.4 440.7 440.8 441.0 441.0 441.0 441.1 440.8 440.0 438.5 435.9 432.9 429.3 425.4 421.1 416.1 411.4 405.8 401.4 395.9 391.1 386.3 382.0 378.2 374.6 371.8 368.9 367.2 365.8 365.3 365.1 365.1 365.1 365.1 365.1 365.1 365.3 365.3 365.4 365.5 365.6 365.7 365.9 366.0 366.1 366.4 366.4 366.6 366.6 366.9 367.0 367.1 367.4 367.4 367.7 367.7 367.9 367.9 368.1 368.2 368.3 368.3 368.4 368.5 368.5 368.5 368.5 368.6 368.6 368.5 368.5 368.5 368.5 368.3 368.3 368.2 368.1 367.9 367.9 367.7 367.7 367.4 367.4 367.1 367.0 366.9 366.8 366.6 366.6 366.6 366.4 366.4 366.4 366.4 366.4 366.4 367.0 368.5 370.8 373.8 376.9 380.8 383.9 387.5 390.0 392.1 393.2 393.8 393.4 392.9 392.3 391.3 390.0 388.8 387.0 385.7 383.8 382.2 380.2 378.6 377.1 375.6 374.5 373.2 372.6 372.1 371.7 371.7 371.7 371.8 372.1 372.3 372.6 372.8 373.2 373.5 373.9 374.3 374.6 374.9 375.1 375.2 375.2 375.3 375.2 375.2 375.0 374.9 374.7 374.5 374.5 374.2 373.9 373.5 373.3 372.9 372.6 372.4 371.9 371.7 371.2 371.0 370.6 370.3 370.0 369.6 369.4 369.1 368.9 368.7 368.4 368.3 368.2 368.1 368.1 368.1 368.1 368.1 368.2 368.5 368.9 369.4 369.7 370.4 370.8 371.4 372.0 372.5 372.9 373.4 373.9 374.1 374.3 374.5 374.7 374.5 374.3 373.8 373.3 372.5 371.6 370.6 369.4 368.1 366.6 365.3 363.6 362.0 360.3 358.9 357.3 355.7 354.2 352.8 351.3 350.0 348.8 347.8 346.9 346.1 345.5 344.9 344.7 344.6 345.1 346.7 349.8 353.7 359.0 363.9 368.8 372.8 375.5 376.8 377.0 376.5 376.0 375.2 374.0 372.7 371.1 369.5 367.7 365.7 364.0 362.0 360.1 358.3 356.6 355.0 353.7 352.5 351.5 350.8 350.2 350.2 350.2 350.8 351.9 353.1 354.9 356.9 359.3 361.9 364.5 367.3 369.9 372.6 374.7 376.8 378.3 379.7 380.4 380.8 380.4 379.5 378.3 376.3 374.0 371.4 368.3 365.4 362.0 359.3 356.6 353.4 351.1 349.0 347.3 346.0 345.3 345.3 345.6 346.5 348.0 349.9 352.5 355.0 358.2 361.1 364.4 367.3 370.3 372.7 374.7 376.4 377.2 377.8 377.5 376.8 375.7 374.4 372.6 370.4 367.9 365.1 362.2 359.0 355.9 352.9 349.7 346.6 343.8 341.2 338.7 336.6 334.7 333.3 332.2 331.5 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2", - "input_type": "phoneme", - "offset": 60.29 + "f0_timestep": "0.005" }, { + "offset": 70.988, "text": "AP 你 似 携 SP 月 而 来 的 仙 瑶 AP 填 补 凡 尘 SP 缺 失 的 心 跳 SP", "ph_seq": "AP n i s i0 x ie SP y ve er l ai d e x ian y ao AP t ian b u f an ch en SP q ve sh ir d e x in t iao SP", - "note_seq": "rest A4 A4 G4 G4 E4 E4 rest B4 B4 A4 B4 B4 D5 D5 F#4 F#4 G4 G4 rest A4 A4 G4 G4 E4 E4 B4 B4 rest A4 A4 D5 D5 E5 E5 A4 A4 B4 B4 rest", - "note_dur_seq": "0.578 0.362 0.362 0.361 0.361 0.362 0.362 0.3609999 0.723 0.723 0.3610001 0.181 0.181 0.5420001 0.5420001 0.7229998 0.7229998 1.085 1.085 0.7220001 0.362 0.362 0.3610001 0.3610001 0.723 0.723 0.3619995 0.3619995 0.3610001 0.3620005 0.3620005 0.3610001 0.3610001 0.5419998 0.5419998 0.5419998 0.5419998 1.085 1.085 0.144", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.533002 0.044998 0.241997 0.120003 0.240997 0.120003 0.362 0.316002 0.044998 0.677994 0.316002 0.090004 0.136002 0.044998 0.362 0.18 0.678002 0.044998 1.085 0.601997 0.120003 0.317002 0.044998 0.240997 0.120003 0.557999 0.165001 0.362 0.211006 0.149994 0.242005 0.119995 0.316002 0.044998 0.362007 0.179993 0.392006 0.149994 1.085 0.144", - "f0_timestep": "0.005", + "ph_dur": "0.533 0.045 0.242 0.12 0.241 0.12 0.362 0.316 0.045 0.678 0.316 0.09 0.136 0.045 0.362 0.18 0.678 0.045 1.085 0.602 0.12 0.317 0.045 0.241 0.12 0.558 0.165 0.362 0.211 0.15 0.242 0.12 0.316 0.045 0.362 0.18 0.392 0.15 1.085 0.144", + "ph_num": "2 2 2 1 2 1 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest A4 G4 E4 rest B4 A4 B4 D5 F#4 G4 rest A4 G4 E4 B4 rest A4 D5 E5 A4 B4 rest", + "note_dur": "0.578 0.362 0.361 0.362 0.361 0.723 0.361 0.181 0.542 0.723 1.085 0.722 0.362 0.361 0.723 0.362 0.361 0.362 0.361 0.542 0.542 1.085 0.144", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 342.8 348.0 355.6 363.2 371.0 378.8 386.4 393.8 401.2 408.2 414.5 420.5 425.9 430.7 434.9 438.5 441.0 442.7 443.7 444.0 443.8 443.8 443.6 443.4 443.2 443.0 442.5 442.3 442.0 441.7 441.4 441.2 440.8 440.8 440.6 440.5 440.5 440.5 440.6 440.9 441.4 442.0 442.7 443.3 444.0 444.5 444.9 445.4 445.4 445.3 444.2 442.3 438.9 434.9 430.2 425.1 419.8 414.3 409.4 404.8 400.7 397.3 394.9 393.4 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.1 393.2 393.4 393.4 393.6 393.8 394.0 394.2 394.4 394.8 395.0 395.1 395.3 395.6 395.9 396.1 396.3 396.5 396.7 396.9 397.1 397.3 397.5 397.5 397.5 397.5 397.6 396.9 395.3 392.8 388.6 383.4 377.6 371.2 364.6 358.2 351.9 346.3 341.5 337.5 334.6 332.8 332.1 332.0 331.8 331.6 331.4 331.1 330.8 330.5 330.1 329.7 329.3 329.0 328.7 328.4 328.2 328.0 327.9 327.9 327.8 327.7 327.7 327.7 327.7 327.7 327.7 327.6 327.5 327.5 327.5 327.4 327.4 327.4 327.4 327.4 327.4 327.4 327.4 327.4 327.4 327.4 327.5 327.7 327.9 328.0 328.2 328.6 328.9 329.2 329.5 329.8 330.1 330.3 330.6 330.9 331.2 331.5 331.8 332.0 332.1 332.3 332.4 332.5 332.5 332.7 332.5 332.4 332.1 331.8 331.5 331.2 330.7 330.3 330.0 329.8 329.6 329.6 329.3 328.9 328.1 327.1 325.9 324.3 322.6 320.8 318.9 316.9 314.9 312.9 311.0 309.2 307.5 305.9 304.5 303.5 302.6 304.2 306.3 308.5 310.7 312.9 315.1 317.3 319.6 321.8 324.1 326.4 328.7 331.1 333.4 335.8 338.1 340.5 343.0 345.4 347.8 350.3 352.8 355.3 357.8 360.3 362.9 365.5 368.1 370.7 373.3 375.9 378.6 381.3 384.0 386.7 389.4 392.2 395.0 397.8 400.6 403.4 406.3 409.2 412.1 415.0 417.9 420.9 423.9 426.9 429.9 433.0 436.0 439.1 442.2 445.4 448.5 451.7 454.9 458.1 461.4 464.3 467.9 472.7 478.2 483.7 488.9 493.1 495.9 497.4 497.4 497.3 497.2 497.0 496.7 496.5 496.3 496.0 495.8 495.6 495.3 495.3 495.3 495.3 495.3 495.3 495.3 495.3 495.3 495.3 495.6 495.6 495.7 495.9 495.9 496.1 496.2 496.3 496.5 496.5 496.7 497.0 497.0 497.0 497.1 497.3 497.3 497.3 497.3 497.3 497.5 497.4 497.3 497.2 497.0 497.0 496.8 496.4 496.0 495.8 495.5 495.0 494.5 494.3 493.9 493.4 493.0 492.7 492.3 492.0 491.6 491.2 491.0 490.8 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.7 490.9 491.2 491.4 491.7 492.2 492.4 492.8 493.3 493.7 494.0 494.5 494.7 494.9 495.3 495.6 495.6 495.9 495.9 496.1 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.3 496.5 496.5 496.5 496.5 496.5 496.6 496.7 496.7 496.7 496.7 496.7 496.7 496.7 496.7 496.7 496.9 496.6 495.9 494.7 493.3 491.6 489.6 487.5 485.1 482.3 479.5 476.6 473.6 470.4 467.2 464.3 461.3 458.3 455.5 452.9 450.3 448.1 446.1 444.5 443.1 441.9 441.0 440.6 440.5 440.5 440.5 440.5 440.6 440.8 441.0 441.2 441.4 441.7 442.1 442.3 442.7 443.0 443.3 443.7 443.9 444.1 444.5 444.8 445.0 445.1 445.2 445.4 445.4 445.4 445.6 445.4 445.4 445.2 445.0 444.8 444.5 444.1 443.9 443.5 443.1 442.6 442.2 441.8 441.4 441.1 440.8 440.3 440.0 439.7 439.5 439.3 439.2 439.2 439.2 439.4 440.1 441.4 443.2 445.4 448.2 451.3 454.7 458.3 462.1 466.2 470.0 473.7 477.2 480.5 483.3 485.7 487.6 488.9 489.6 489.9 489.9 489.9 489.9 489.9 490.0 490.2 490.2 490.5 490.7 490.8 490.8 491.1 491.3 491.3 491.5 491.6 491.7 491.9 491.9 491.9 491.9 492.1 493.3 496.7 502.3 496.2 496.3 496.7 497.7 498.7 499.9 501.4 502.8 504.0 505.4 506.7 507.8 508.6 509.3 509.5 509.8 507.7 508.2 509.2 510.8 512.8 515.0 517.8 521.0 524.4 528.0 532.1 536.2 540.6 545.0 549.5 554.0 558.5 562.2 566.2 570.1 573.6 576.9 579.7 582.0 584.1 585.7 586.7 587.0 587.1 587.0 586.9 586.6 586.3 585.7 585.4 585.0 584.4 583.9 583.6 583.0 582.7 582.6 582.6 582.6 585.3 585.3 585.3 585.3 585.3 585.3 585.7 586.3 587.0 587.9 589.0 590.2 591.3 592.4 593.5 594.3 594.8 595.4 595.8 595.4 594.0 591.8 588.7 584.8 580.1 574.5 568.2 561.1 553.9 546.3 538.7 531.2 523.6 516.3 509.7 503.3 497.6 492.6 488.3 484.6 482.0 480.2 479.3 502.2 498.3 494.1 489.5 484.6 480.2 475.1 469.6 463.9 457.9 452.0 446.0 440.0 434.1 428.3 422.6 417.1 411.7 406.5 401.6 397.0 392.7 388.7 385.1 381.7 378.6 376.0 373.9 372.1 370.6 369.6 369.0 368.7 368.7 368.7 368.7 368.7 368.7 368.9 369.1 369.1 369.2 369.4 369.6 369.7 369.9 370.0 370.1 370.2 370.4 370.6 370.6 370.7 370.9 370.9 370.9 370.9 371.0 370.9 370.9 370.9 370.9 370.9 370.9 370.9 370.9 370.9 370.9 370.9 370.9 370.7 370.6 370.6 370.6 370.6 370.6 370.5 370.4 370.4 370.4 370.4 370.4 370.3 370.2 370.2 370.2 370.2 370.2 370.1 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.2 370.2 370.3 370.5 370.7 370.9 371.0 371.4 371.6 371.8 371.9 371.9 372.1 372.1 372.1 372.3 372.1 372.1 372.1 372.1 372.1 372.1 371.9 371.9 371.9 371.8 371.7 371.7 371.5 371.5 371.4 371.3 371.3 371.3 371.1 371.1 371.1 371.1 371.1 371.1 371.0 370.6 370.1 369.3 368.2 366.9 365.5 364.1 362.5 360.9 359.7 358.6 357.4 356.7 356.2 355.9 356.4 357.5 359.4 362.0 365.1 368.8 372.7 376.6 380.2 383.6 386.4 388.4 389.4 389.9 389.7 389.7 389.7 389.7 389.7 389.7 389.5 389.5 389.4 389.3 389.2 389.1 389.1 388.9 388.7 388.6 388.5 388.4 388.2 388.2 388.0 387.8 387.7 387.7 387.5 387.5 387.3 387.2 387.0 387.0 387.0 386.9 386.8 386.8 386.8 386.8 386.8 386.8 386.8 386.8 386.8 386.9 387.1 387.3 387.5 387.6 387.8 388.0 388.2 388.4 388.6 388.6 388.6 389.9 389.7 389.7 389.7 389.6 389.5 389.5 389.3 389.1 388.9 388.7 388.5 388.4 388.2 387.8 387.6 387.4 387.2 387.0 386.8 386.6 386.6 386.5 386.4 386.4 386.4 386.4 386.4 386.6 387.0 387.6 388.2 388.9 389.8 390.8 391.6 392.4 393.1 393.7 394.1 394.3 395.4 395.5 395.2 394.8 394.3 393.6 392.8 391.9 390.9 389.7 388.6 387.5 386.5 385.5 384.4 383.5 382.6 381.9 381.3 380.7 380.3 380.2 380.2 380.5 381.1 382.0 383.2 384.6 386.2 387.9 389.7 391.6 393.4 395.0 396.5 397.9 398.8 399.5 399.9 400.2 400.0 399.6 399.1 398.4 397.5 396.6 395.6 394.5 393.4 392.1 390.9 389.7 388.6 387.7 386.9 386.2 385.6 385.2 385.0 385.0 385.1 385.5 386.1 386.8 387.7 388.8 390.0 391.1 392.4 393.6 394.8 396.1 397.0 397.8 398.5 399.1 399.4 399.5 399.3 398.9 398.4 397.8 397.1 396.1 395.2 394.2 393.1 392.1 391.2 390.3 389.5 389.0 388.5 388.3 388.2 388.2 388.2 388.2 388.4 388.4 388.6 388.9 389.2 389.4 389.6 390.0 390.2 390.3 390.6 390.9 391.1 391.3 391.5 391.5 391.2 390.9 390.6 390.3 390.0 389.7 389.4 389.0 388.7 388.4 388.1 387.8 387.5 387.2 386.9 386.6 386.3 385.9 385.6 385.3 385.0 384.7 384.4 384.1 383.8 383.5 383.2 382.9 382.6 382.3 382.0 381.6 381.3 381.0 380.7 380.4 380.1 379.8 379.5 379.2 378.9 378.6 378.3 378.0 377.7 377.4 377.1 376.8 376.5 376.2 375.9 375.6 375.3 375.0 374.7 374.4 374.1 373.8 373.5 373.2 372.9 372.6 372.3 372.0 371.7 371.4 371.1 370.8 370.5 370.2 369.9 369.6 369.3 369.0 368.7 368.4 368.2 367.9 367.6 367.3 367.0 366.7 366.4 366.1 365.8 365.5 365.2 364.9 364.6 364.3 364.1 363.8 363.5 363.2 362.9 362.6 362.3 362.0 361.7 361.4 361.2 360.9 360.6 360.3 360.0 359.7 359.4 359.1 358.8 358.6 358.3 358.0 357.7 357.4 357.1 356.8 356.6 356.3 356.0 355.7 355.4 355.1 354.9 354.6 354.3 354.0 353.7 353.4 353.2 352.9 352.6 352.3 352.0 351.7 351.5 351.2 350.9 350.6 350.3 350.1 350.1 351.5 353.5 355.8 358.5 361.6 365.0 368.7 372.2 376.2 380.8 385.6 390.4 395.1 400.1 405.0 409.8 414.5 418.9 423.0 426.7 430.2 433.2 435.6 437.7 439.2 440.0 440.5 440.5 440.7 440.9 441.1 441.4 441.8 442.0 442.4 442.5 442.7 442.8 442.8 442.8 443.0 443.2 443.4 443.7 443.8 443.8 444.0 443.6 442.4 440.5 437.8 434.4 430.5 426.2 421.8 417.4 413.0 408.9 405.4 402.3 399.9 398.2 397.2 397.0 396.8 396.8 396.6 396.4 396.2 395.8 395.6 395.3 394.9 394.5 394.0 393.5 393.1 392.8 392.5 392.2 391.9 391.7 391.5 391.4 391.3 391.3 391.3 391.3 391.3 391.3 391.4 391.5 391.5 391.7 391.8 391.8 392.0 392.0 392.2 392.2 392.2 392.2 392.2 392.2 392.3 391.8 390.9 389.6 387.8 385.6 382.9 380.0 376.7 373.1 369.6 366.0 362.3 358.8 355.4 352.1 349.1 346.4 344.0 341.8 340.4 339.2 338.4 338.1 338.0 337.7 337.3 336.8 336.2 335.4 334.6 333.7 332.8 331.9 331.0 330.2 329.5 328.9 328.4 328.1 327.9 327.9 327.9 328.1 328.4 328.7 329.2 329.6 330.1 330.6 331.1 331.4 331.7 331.9 331.9 332.0 331.9 331.9 331.9 331.9 331.9 331.9 331.7 331.7 331.6 331.5 331.4 331.3 331.3 331.2 331.2 331.0 330.9 330.7 330.6 330.6 330.4 330.3 330.2 330.1 330.0 329.8 329.8 329.7 329.5 329.4 329.4 329.2 329.2 329.2 329.2 329.1 329.1 329.1 329.1 329.1 329.1 329.1 329.1 329.1 329.1 329.2 329.4 329.5 329.7 329.8 330.0 330.2 330.2 330.3 330.4 330.4 330.4 330.5 330.4 330.4 330.4 330.4 330.3 330.2 330.1 330.0 330.0 329.8 329.8 329.7 329.6 329.6 329.6 329.6 329.6 329.6 329.6 329.7 329.8 330.0 330.3 330.5 330.8 331.1 331.4 331.5 331.7 331.9 331.9 332.0 331.9 331.5 330.9 329.8 328.6 327.0 325.4 323.7 321.5 319.4 317.0 314.7 312.3 309.9 307.6 305.5 303.4 301.6 300.0 298.6 297.4 296.4 295.9 295.6 296.5 299.9 307.4 319.6 335.1 353.7 374.7 397.4 420.6 442.0 460.5 474.7 483.4 486.8 486.8 487.5 488.6 489.9 491.4 493.1 494.7 496.6 498.4 499.9 501.4 502.7 503.4 503.9 504.2 504.0 503.2 502.1 500.9 499.5 497.6 495.7 493.8 491.7 489.7 487.8 486.0 484.6 483.3 482.5 482.0 482.1 482.3 483.3 484.6 486.4 488.5 490.7 493.3 495.7 497.9 499.8 501.3 502.4 503.1 503.2 502.6 501.6 500.0 498.1 496.0 493.8 491.5 489.6 487.7 486.4 485.5 485.1 485.1 485.3 485.5 485.8 486.5 487.0 487.8 488.6 489.4 490.4 491.3 492.1 493.0 494.0 494.9 495.6 496.3 496.0 495.3 494.5 493.8 493.1 492.3 491.6 490.9 490.2 489.4 488.7 488.0 487.3 486.5 485.8 485.1 484.4 483.6 482.9 482.2 481.5 480.8 480.1 479.4 478.6 477.9 477.2 476.5 475.8 475.1 474.4 473.7 473.0 472.3 471.6 470.9 470.2 469.5 468.8 468.1 467.4 466.7 466.0 465.3 464.6 463.9 463.3 462.6 461.9 461.2 460.5 459.8 459.1 458.5 457.8 457.1 456.4 455.7 455.1 454.4 453.7 453.0 452.4 451.7 451.0 450.4 449.7 449.0 448.4 447.7 445.2 443.7 442.4 440.5 439.9 439.2 438.6 438.3 442.8 442.8 442.8 442.8 443.0 443.2 443.4 443.7 443.9 444.3 444.6 444.8 445.1 445.5 445.7 445.9 446.3 446.5 446.7 447.0 447.2 447.4 447.4 447.4 447.5 447.9 448.7 452.0 456.8 463.3 471.4 480.8 491.3 503.3 515.9 529.0 542.3 555.6 568.6 580.5 591.1 600.2 607.5 612.5 615.5 577.6 578.6 579.6 580.6 581.6 582.6 583.6 584.6 585.6 586.6 587.6 588.6 589.6 590.6 591.6 592.7 591.0 590.7 590.4 589.9 589.4 588.9 588.0 586.9 585.7 584.8 583.8 582.8 581.9 580.8 580.1 579.6 579.1 578.9 578.9 578.9 578.9 579.1 579.5 580.0 580.2 580.8 581.4 581.9 582.6 583.4 583.9 584.5 585.1 585.6 586.1 586.4 586.7 586.9 587.0 587.3 587.3 587.3 587.3 587.4 587.7 588.0 588.3 588.6 588.9 589.1 589.4 589.7 590.0 590.3 590.5 590.8 591.1 591.4 591.4 591.4 591.4 591.8 590.6 587.9 583.8 578.7 573.1 567.3 561.9 557.6 554.5 552.8 565.6 604.3 665.4 678.4 678.1 677.8 677.1 676.2 675.2 674.1 672.8 671.6 670.3 669.0 667.7 666.5 665.6 664.6 664.0 663.6 663.5 663.3 663.1 663.1 662.8 662.4 662.1 661.8 661.4 660.8 660.1 659.5 658.9 658.3 657.4 656.7 655.8 654.7 653.6 652.6 651.3 650.4 649.2 647.9 646.6 645.1 643.8 642.3 641.0 639.6 638.0 636.5 635.0 633.4 631.9 630.4 628.9 627.4 625.9 624.4 622.9 621.2 619.5 618.0 616.5 615.0 613.6 612.1 610.7 609.5 608.0 606.6 605.4 604.0 602.7 601.5 600.4 599.2 598.3 597.3 596.2 595.4 594.5 593.8 593.0 592.2 591.6 591.1 590.5 589.9 589.4 588.9 588.6 588.3 588.1 588.0 588.0 588.0 609.8 607.7 605.1 602.8 600.2 597.6 595.1 592.8 590.4 588.0 586.0 587.1 590.7 437.7 437.5 437.5 437.5 437.5 437.5 437.5 437.5 437.5 437.5 437.4 437.2 437.2 437.2 437.2 437.1 437.0 437.0 437.0 436.8 436.7 436.7 436.7 436.7 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.5 436.6 436.7 436.7 436.7 436.9 437.0 437.1 437.2 437.2 437.4 437.6 437.7 437.8 438.0 438.2 438.2 438.4 438.6 438.7 438.8 439.0 439.0 439.2 439.2 439.3 439.5 439.5 439.7 439.7 439.7 439.7 439.7 439.7 439.7 439.9 439.3 437.8 435.4 432.1 428.2 423.3 418.3 413.0 407.8 402.8 398.2 394.2 390.9 388.5 387.0 386.4 388.1 392.4 399.4 408.8 420.4 433.9 446.8 461.6 477.4 492.7 506.9 518.5 527.1 532.4 534.8 534.2 532.7 530.4 527.5 524.2 520.3 516.0 511.8 507.5 503.2 499.3 495.9 493.0 490.7 489.1 488.4 488.2 488.2 488.2 488.2 488.2 488.2 488.3 488.5 488.5 488.7 488.8 488.9 489.1 489.3 489.6 489.6 489.8 490.0 490.2 490.5 490.7 490.9 491.0 491.1 491.4 491.6 491.8 492.1 492.3 492.5 492.5 492.7 493.0 493.0 493.2 493.3 493.4 493.6 493.6 493.6 493.6 494.4 494.2 494.2 494.2 494.2 494.0 493.9 493.8 493.6 493.6 493.5 493.2 493.0 493.0 492.8 492.6 492.3 492.1 491.8 491.6 491.6 491.4 491.2 491.0 491.0 490.8 490.8 490.6 490.5 490.5 490.5 490.5 490.5 490.3 489.9 489.3 488.6 487.9 486.8 485.4 483.9 482.2 480.6 478.7 476.6 474.6 472.5 470.4 468.2 466.2 463.9 461.9 459.9 458.0 456.2 454.5 452.9 451.6 450.5 449.5 448.5 447.8 447.3 447.0 446.9 447.9 448.9 450.0 452.2 455.3 459.4 464.2 469.4 474.9 480.3 485.7 490.4 494.2 497.2 499.1 500.2 500.0 499.5 499.0 498.3 497.3 496.1 494.7 493.3 491.9 490.4 488.8 487.4 486.0 484.7 483.6 482.7 482.0 481.4 481.2 481.4 482.1 483.7 486.3 489.4 492.9 496.5 500.0 503.1 505.2 506.9 508.1 508.2 507.8 507.0 506.3 505.1 503.5 501.8 500.0 498.1 496.2 494.3 492.4 490.5 488.7 487.1 485.7 484.5 483.7 483.2 482.9 482.9 483.0 483.7 484.7 486.0 487.5 489.2 491.2 493.3 495.5 497.6 499.6 501.5 503.1 504.6 505.8 506.6 507.0 507.5 507.2 506.8 506.0 504.9 503.7 502.5 500.8 499.3 497.7 496.2 494.7 493.3 492.1 491.1 490.5 490.1 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9 489.9", - "input_type": "phoneme", - "offset": 70.988 + "f0_timestep": "0.005" }, { + "offset": 82.555, "text": "AP 落 于 人 间 一 笑 AP 勾 画 醉 意 袅 袅 AP 似 梦 绕 SP 画 里 画 外 扫 寂 寥 SP", "ph_seq": "AP l uo y v r en j ian y i x iao AP g ou h ua z ui y i n iao n iao AP s i0 m eng r ao SP h ua l i h ua w ai s ao j i l iao SP", - "note_seq": "rest A4 A4 G4 G4 E4 E4 D5 D5 G4 G4 A4 A4 rest B3 B3 B4 B4 A4 A4 A4 A4 B4 B4 E4 E4 rest A4 A4 G4 G4 B3 B3 rest B4 B4 A4 A4 D5 D5 B4 B4 G4 G4 F#4 F#4 E4 E4 rest", - "note_dur_seq": "0.578 0.361 0.361 0.3609999 0.3609999 0.5430001 0.5430001 0.5419999 0.5419999 0.3610001 0.3610001 0.362 0.362 0.723 0.3609998 0.3609998 0.3610001 0.3610001 0.362 0.362 0.3610001 0.3610001 0.723 0.723 0.362 0.362 0.723 0.1799998 0.1799998 0.362 0.362 0.3610001 0.3610001 0.1810002 0.3610001 0.3610001 0.3619995 0.3619995 0.3610001 0.3610001 0.3620005 0.3620005 0.1799994 0.1799994 0.5430002 0.5430002 1.445 1.445 0.144", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.533002 0.044998 0.316002 0.044998 0.316002 0.044998 0.392999 0.150002 0.496994 0.045006 0.240997 0.120003 0.362 0.633004 0.089996 0.241005 0.119995 0.240997 0.120003 0.316994 0.045006 0.270996 0.090004 0.618004 0.104996 0.362 0.557999 0.165001 0.120002 0.059998 0.317002 0.044998 0.361 0.045998 0.135002 0.300995 0.060005 0.241997 0.120003 0.315994 0.045006 0.241998 0.120003 0.119994 0.060005 0.453004 0.089996 1.445 0.144", - "f0_timestep": "0.005", + "ph_dur": "0.533 0.045 0.316 0.045 0.316 0.045 0.393 0.15 0.497 0.045 0.241 0.12 0.362 0.633 0.09 0.241 0.12 0.241 0.12 0.317 0.045 0.271 0.09 0.618 0.105 0.362 0.558 0.165 0.12 0.06 0.317 0.045 0.361 0.046 0.135 0.301 0.06 0.242 0.12 0.316 0.045 0.242 0.12 0.12 0.06 0.453 0.09 1.445 0.144", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 1 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A4 G4 E4 D5 G4 A4 rest B3 B4 A4 A4 B4 E4 rest A4 G4 B3 rest B4 A4 D5 B4 G4 F#4 E4 rest", + "note_dur": "0.578 0.361 0.361 0.543 0.542 0.361 0.362 0.723 0.361 0.361 0.362 0.361 0.723 0.362 0.723 0.18 0.362 0.361 0.181 0.361 0.362 0.361 0.362 0.18 0.543 1.445 0.144", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 412.4 413.4 416.4 419.7 422.9 426.0 429.1 431.9 434.3 436.5 438.5 440.0 441.3 442.0 442.5 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8 443.0 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.1 442.6 441.8 440.3 438.5 436.2 433.6 430.8 427.5 424.0 420.5 416.9 413.2 409.6 406.1 403.0 400.0 397.0 394.2 391.8 389.7 387.9 386.6 385.6 385.0 385.0 385.1 385.3 385.6 386.2 386.7 387.3 387.8 388.4 389.0 389.5 390.1 390.6 390.9 391.1 391.3 391.3 391.3 391.4 391.5 391.9 392.1 392.4 392.8 393.0 393.4 393.5 393.6 393.7 394.0 394.6 396.3 398.8 402.1 405.8 409.9 414.3 418.7 422.7 426.5 429.6 431.9 433.6 434.2 433.6 432.1 429.8 426.8 423.2 419.2 414.9 410.6 406.5 402.8 399.5 396.8 395.1 394.1 393.7 392.9 391.2 388.5 385.0 381.0 376.3 371.1 365.4 359.8 354.3 348.9 343.7 338.9 334.8 331.5 328.7 326.6 325.4 324.9 324.9 325.0 325.3 325.7 326.1 326.6 327.1 327.7 328.3 328.9 329.3 329.8 330.2 330.6 330.8 331.0 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.2 331.3 331.3 331.3 331.4 331.5 331.5 331.5 331.6 331.7 331.8 331.9 331.9 331.9 332.0 332.1 332.1 332.3 332.3 332.3 332.4 332.5 332.5 332.5 332.5 332.5 332.5 332.5 332.6 332.7 332.7 332.7 332.7 332.7 332.8 333.0 333.1 333.1 333.3 333.4 333.6 333.7 333.9 334.0 334.2 334.4 334.4 334.5 334.6 334.8 335.0 335.0 335.1 335.2 335.2 335.2 335.2 335.1 333.8 330.5 325.9 320.7 315.4 310.5 306.5 304.3 303.9 304.9 308.1 313.1 319.8 328.2 338.5 350.2 364.2 379.5 396.3 414.4 433.6 453.7 474.2 494.7 515.0 534.5 552.6 569.3 582.9 593.8 601.8 606.4 608.4 608.1 607.6 607.0 606.2 605.0 603.8 602.1 600.6 599.0 597.3 595.5 593.8 592.1 590.6 589.2 587.9 586.8 586.0 585.4 585.3 585.2 585.0 585.0 584.7 584.5 584.1 583.5 582.9 582.1 581.5 580.8 579.8 578.7 577.9 576.8 575.5 574.2 573.0 571.7 570.3 568.6 567.0 565.5 564.1 562.4 560.5 558.9 557.0 555.3 553.6 551.8 549.9 548.1 546.2 544.4 542.5 540.7 538.6 536.8 535.0 533.2 531.4 529.6 527.8 526.3 524.6 522.9 521.4 519.9 518.1 516.9 515.5 514.0 512.7 511.6 510.4 509.3 508.2 507.1 506.2 505.4 504.5 503.7 503.1 502.5 501.8 501.4 500.9 500.6 500.3 500.2 500.2 500.2 497.3 490.7 480.5 467.8 453.5 438.7 424.4 412.0 401.7 394.3 389.9 388.8 388.8 388.8 388.8 389.0 389.1 389.1 389.3 389.5 389.7 389.9 390.1 390.5 390.6 390.8 391.2 391.5 391.8 392.1 392.4 392.8 393.1 393.4 393.7 393.9 394.3 394.6 394.9 395.1 395.3 395.7 395.9 396.1 396.2 396.4 396.6 396.6 396.8 396.8 396.8 396.9 397.5 399.2 403.2 408.8 416.1 425.5 436.3 448.5 460.9 473.8 486.1 497.4 507.3 515.5 520.7 523.2 522.9 520.2 515.3 508.6 499.9 490.4 480.5 470.8 461.5 452.9 446.0 440.8 437.5 435.8 435.7 436.1 436.9 438.0 439.2 440.7 442.4 444.1 445.9 447.8 449.5 451.2 452.8 454.1 455.0 455.6 456.0 456.1 455.6 455.0 453.7 452.0 450.2 448.2 446.1 443.8 441.4 439.1 436.9 434.9 433.2 431.9 430.9 430.3 430.2 430.4 431.1 432.0 433.3 434.9 436.9 439.0 441.1 443.4 445.6 447.8 449.9 451.8 453.2 454.3 455.3 455.9 456.3 455.6 454.3 452.4 449.9 447.0 443.8 440.8 438.1 435.8 434.2 433.1 432.9 433.0 433.2 433.8 434.4 435.2 436.0 436.9 438.0 438.9 439.9 440.8 441.7 442.5 443.2 443.8 444.3 444.6 444.9 443.0 441.1 439.2 437.3 435.5 433.6 431.8 430.0 428.1 426.3 424.5 422.7 420.9 419.1 417.3 415.6 413.8 412.0 410.3 408.5 406.8 405.1 403.4 401.7 399.9 398.2 396.6 394.9 393.2 391.5 389.9 388.2 386.6 384.9 383.3 381.6 380.0 378.4 376.8 375.2 373.6 372.0 370.4 368.9 367.3 365.7 364.2 362.6 361.1 359.6 358.0 356.5 355.0 353.5 352.0 350.5 349.0 347.5 346.1 344.6 343.1 341.7 340.2 338.8 337.3 335.9 334.5 333.0 331.6 330.2 328.8 327.4 326.0 324.6 323.3 321.9 320.5 319.2 317.8 316.5 315.1 313.8 312.4 311.1 309.8 308.5 307.2 305.9 304.6 303.3 302.0 300.7 299.4 298.2 296.9 295.6 294.4 293.1 291.9 290.6 289.4 288.2 286.9 285.7 284.5 283.3 282.1 280.9 279.7 278.5 277.3 276.2 275.0 273.8 272.7 271.5 270.3 269.2 268.1 266.9 265.8 264.7 263.5 262.4 261.3 260.2 259.1 258.0 256.9 255.8 254.7 253.6 252.5 251.5 250.4 249.3 248.3 247.2 247.5 247.5 247.6 247.7 247.8 247.9 247.9 247.9 247.9 247.9 248.1 248.1 248.1 248.1 248.2 248.2 248.3 248.4 248.5 248.5 248.6 248.7 248.9 249.0 249.1 249.1 249.2 249.3 249.4 249.4 249.5 249.5 249.5 249.6 249.8 250.4 251.7 253.8 256.5 260.0 264.1 268.8 274.0 280.0 286.5 293.3 300.5 308.1 316.0 323.9 331.8 339.7 347.0 353.5 360.4 366.8 372.6 377.6 381.7 384.8 386.9 387.9 389.9 395.0 404.8 418.7 437.0 457.5 479.3 500.9 520.8 537.3 547.5 552.4 550.6 546.4 540.1 532.5 524.5 513.7 502.8 492.0 481.8 472.4 464.6 458.8 455.0 453.1 453.1 453.8 455.2 457.0 459.2 462.1 465.2 468.6 472.2 475.9 479.8 483.5 487.1 490.3 493.1 495.3 497.2 498.8 499.6 500.2 499.9 499.7 499.2 498.8 498.2 497.5 496.7 496.0 495.1 494.2 493.3 492.6 491.9 491.2 490.7 490.3 490.1 489.9 489.8 489.3 488.2 486.6 484.4 481.8 478.9 475.4 472.0 468.4 464.7 461.0 457.5 454.5 451.6 449.2 447.0 445.5 444.5 444.1 444.0 443.8 443.8 443.8 443.6 443.4 443.3 443.2 443.0 442.8 442.6 442.4 442.0 441.7 441.5 441.3 441.1 441.0 440.9 440.7 440.5 440.5 440.5 440.5 440.4 440.3 440.3 440.3 440.3 440.3 440.3 440.3 440.3 440.3 440.3 440.3 440.2 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 439.9 439.7 439.7 439.5 439.5 439.4 439.2 439.0 438.8 438.6 438.4 438.1 437.9 437.7 437.7 437.7 437.7 437.7 437.7 437.9 438.1 438.4 438.6 439.0 439.2 439.7 440.1 440.5 440.9 441.4 441.8 442.1 442.4 442.7 443.1 443.3 443.5 443.7 443.8 443.9 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.2 447.6 447.4 447.4 447.4 447.4 447.4 447.4 447.2 447.2 447.2 447.0 446.9 446.9 446.9 446.7 446.7 446.5 446.4 446.4 446.4 446.4 446.4 446.4 446.4 446.3 446.1 446.1 445.7 445.5 445.1 444.7 444.2 443.6 443.1 442.7 442.3 441.9 441.4 441.0 440.8 440.6 440.5 440.5 440.7 441.4 442.5 444.1 446.1 448.5 451.5 454.6 457.9 461.5 465.4 469.4 473.5 477.4 481.4 485.2 488.9 492.2 495.3 497.8 500.1 501.8 503.0 504.0 504.3 504.0 503.8 503.5 503.1 502.4 501.9 501.2 500.5 499.8 499.0 498.3 497.6 497.1 496.6 496.3 496.2 496.2 496.2 496.2 496.2 496.2 496.2 496.4 496.5 496.6 496.8 497.1 497.3 497.3 497.5 497.7 498.0 498.2 498.5 498.7 498.8 498.9 499.1 499.4 499.6 499.6 499.8 499.9 499.9 499.9 499.9 500.2 499.8 499.2 498.3 496.9 495.0 492.9 490.2 487.2 483.9 480.1 476.2 472.5 468.3 463.7 458.7 453.7 448.3 442.9 437.5 431.9 426.1 420.4 414.7 409.2 403.6 398.0 392.5 387.3 381.9 376.9 372.0 367.3 362.7 358.2 354.1 350.2 346.5 343.1 339.8 336.7 334.2 331.7 329.5 327.6 325.9 324.5 323.4 322.6 322.0 321.7 323.3 323.7 324.3 325.3 326.6 328.0 329.6 331.2 332.9 334.6 336.2 337.3 338.3 339.2 339.9 340.1 339.9 339.2 338.0 336.4 334.4 332.2 329.9 327.6 325.5 323.7 322.1 321.0 320.4 320.3 320.4 320.9 321.7 322.8 324.2 325.8 327.4 329.2 331.2 333.1 334.9 336.7 338.3 339.8 341.0 341.9 342.6 342.9 342.8 342.4 341.6 340.5 339.0 337.3 335.5 333.7 332.0 330.6 329.3 328.3 327.6 327.4 327.4 327.4 327.6 327.7 328.0 328.5 329.0 329.4 329.7 330.1 330.5 330.8 331.2 331.5 331.9 332.2 332.6 332.9 333.3 333.6 334.0 334.3 334.7 335.0 335.4 335.7 336.1 336.4 336.8 337.2 337.5 337.9 338.2 338.6 338.9 339.3 339.6 340.0 340.4 340.7 341.1 341.4 341.8 342.1 342.5 342.9 343.2 343.6 344.0 344.3 344.7 345.0 345.4 345.8 346.1 346.5 346.9 347.2 347.6 347.9 348.3 348.7 349.0 349.4 349.8 350.1 350.5 350.9 351.3 351.6 352.0 352.4 352.7 353.1 353.5 353.8 354.2 354.6 355.0 355.3 355.7 356.1 356.5 356.8 357.2 357.6 358.0 358.3 358.7 359.1 359.5 359.8 360.2 360.6 361.0 361.4 361.7 362.1 362.5 362.9 363.3 363.6 364.0 364.4 364.8 365.2 365.6 365.9 366.3 366.7 367.1 367.5 367.9 368.2 368.6 369.0 369.4 369.8 370.2 370.6 371.0 371.4 371.7 372.1 372.5 372.9 373.3 373.7 374.1 374.5 374.9 375.3 375.7 376.1 376.5 376.9 377.3 377.6 378.0 378.4 378.8 379.2 379.6 380.0 380.4 380.8 381.2 381.6 382.0 382.4 385.3 390.0 395.1 400.7 406.3 412.2 418.0 423.4 428.4 432.9 436.5 439.2 441.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.2 442.3 442.3 442.3 442.3 442.5 442.5 442.5 442.5 442.5 442.5 442.5 442.8 442.4 441.4 439.9 437.7 435.1 432.2 428.9 425.3 421.3 417.3 413.3 409.4 405.4 401.9 398.6 395.9 393.6 391.3 389.5 388.3 387.6 387.5 387.5 387.7 388.2 388.6 389.1 389.9 390.6 391.6 392.4 393.2 394.1 394.9 395.6 396.2 396.8 397.1 397.3 397.5 397.2 396.1 394.1 391.1 387.0 382.0 376.2 369.8 362.8 355.3 347.0 338.6 329.9 321.1 312.2 303.1 294.2 285.5 276.9 268.5 260.5 252.9 245.5 238.7 232.3 226.7 221.9 216.7 212.2 208.1 204.4 201.2 198.5 196.4 194.8 193.7 193.1 193.8 194.1 195.2 196.9 199.0 201.7 204.8 208.3 212.2 216.4 220.7 225.1 229.5 233.8 237.6 241.2 244.2 246.8 248.6 249.7 250.1 249.7 249.1 248.0 246.6 244.8 242.8 240.5 238.2 235.8 233.4 231.2 228.9 227.1 225.5 224.2 223.2 222.6 222.4 223.3 225.7 229.3 233.8 238.8 243.8 248.1 251.0 252.5 252.6 251.8 250.4 248.4 245.9 242.9 239.9 236.9 233.9 231.1 228.6 226.7 225.3 224.5 224.3 224.8 226.4 228.6 231.5 234.7 237.9 241.2 244.0 246.1 247.4 248.9 249.5 273.2 278.8 284.4 290.2 296.2 302.2 308.3 314.6 321.0 327.6 334.3 341.1 348.0 355.1 362.3 369.7 377.3 384.9 392.8 400.8 408.9 417.3 425.8 434.5 442.0 441.8 441.8 441.6 441.4 441.1 440.8 440.4 440.0 439.6 439.2 438.7 438.3 437.9 437.5 437.0 436.6 436.2 435.9 435.6 435.2 435.2 435.0 434.9 434.9 435.4 437.0 439.6 443.2 447.7 453.1 459.1 465.4 471.7 478.0 484.0 489.2 493.6 496.9 498.9 499.9 500.0 499.8 499.5 499.2 498.5 498.0 497.3 496.5 495.6 494.8 493.9 493.2 492.3 491.5 490.8 490.3 489.6 489.4 489.2 489.1 488.8 487.9 486.2 483.8 480.7 476.8 472.6 468.1 463.3 458.9 454.9 451.3 448.3 445.9 444.6 444.1 443.9 443.8 443.8 443.8 443.8 443.6 443.6 443.4 443.2 443.1 443.0 442.8 442.6 442.4 442.3 442.3 442.0 441.8 441.6 441.4 441.3 441.2 441.0 440.8 440.8 440.6 440.5 440.5 440.5 440.5 440.5 440.4 440.3 440.3 440.3 440.0 440.0 439.9 439.7 439.7 439.5 439.5 439.3 439.2 439.2 439.2 439.2 438.7 437.1 434.2 429.4 423.7 417.6 411.4 405.5 400.2 396.2 393.4 392.2 392.4 393.8 396.4 400.5 405.9 412.5 420.3 429.0 438.8 449.5 461.1 473.2 485.5 498.1 510.8 523.3 535.0 545.9 555.8 564.5 571.8 577.6 580.9 582.7 583.3 583.3 583.3 583.3 583.3 583.4 583.6 583.7 583.9 584.2 584.3 584.5 584.6 585.3 585.3 585.3 585.3 585.4 585.6 585.6 585.9 586.2 586.5 586.8 587.0 587.3 587.6 587.9 588.2 588.4 588.7 589.0 589.3 589.4 589.5 589.7 589.7 589.7 589.9 589.7 589.0 588.1 586.7 584.7 582.3 579.6 576.6 573.1 569.3 565.2 560.9 556.5 551.8 547.0 542.3 537.7 533.0 528.4 524.0 519.7 515.5 511.5 508.0 504.6 501.4 498.6 496.2 494.3 492.6 491.3 490.5 489.9 489.9 489.9 490.4 490.8 491.5 492.2 492.9 493.6 494.3 495.0 495.6 495.9 496.0 494.6 494.2 493.7 493.2 492.6 491.9 491.2 490.8 490.3 490.0 489.9 490.0 490.2 490.9 491.8 493.0 494.5 495.8 497.0 497.9 498.6 499.2 498.6 496.6 492.8 487.6 480.7 472.7 464.0 454.9 445.6 436.5 427.7 419.8 412.8 406.8 402.1 398.6 396.9 396.3 396.1 396.1 395.9 395.7 395.6 395.4 395.2 395.0 394.7 394.3 394.0 393.8 393.4 393.2 392.9 392.6 392.4 392.2 392.0 392.0 392.0 392.0 392.0 392.0 392.1 392.3 392.7 392.9 393.2 393.5 393.7 394.1 394.3 394.3 394.4 393.5 390.7 385.9 379.3 371.0 362.3 353.6 345.3 338.1 332.3 328.6 326.9 326.4 326.5 326.7 327.0 327.4 327.9 328.4 328.9 329.4 330.1 330.7 331.3 331.9 332.4 332.9 333.2 333.5 333.7 333.8 334.0 334.4 335.3 336.5 338.1 339.9 342.1 344.5 347.1 350.0 352.9 355.8 358.7 361.5 364.2 366.7 369.1 371.1 372.8 374.1 375.0 375.6 375.8 375.6 375.6 375.4 375.2 374.8 374.4 374.1 373.7 373.4 372.9 372.4 372.0 371.5 371.1 370.8 370.5 370.4 370.2 370.0 370.0 369.9 369.4 368.3 366.9 364.9 362.6 360.0 357.2 354.1 350.9 347.7 344.6 341.6 338.8 336.2 334.1 332.2 330.8 329.8 329.2 329.1 329.1 329.1 329.1 329.1 329.1 329.3 329.4 329.4 329.6 329.7 329.9 330.0 330.2 330.4 330.5 330.8 331.0 331.2 331.3 331.5 331.8 332.0 332.2 332.3 332.5 332.8 333.0 333.2 333.3 333.5 333.7 333.8 333.8 333.9 334.0 334.1 334.2 334.2 334.2 334.2 334.4 334.3 334.2 334.1 333.9 333.6 333.5 333.1 332.7 332.3 331.9 331.4 331.0 330.6 330.2 329.7 329.2 328.8 328.3 328.0 327.7 327.4 327.0 326.8 326.6 326.4 326.4 326.4 326.4 326.4 326.4 326.6 326.6 326.7 327.0 327.2 327.4 327.7 327.9 328.2 328.5 328.8 329.1 329.4 329.8 330.1 330.4 330.6 330.8 331.2 331.5 331.7 331.8 332.0 332.1 332.3 332.5 332.5 332.5 332.7 332.5 332.5 332.5 332.5 332.5 332.3 332.3 332.3 332.2 332.0 331.9 331.9 331.8 331.6 331.5 331.5 331.3 328.5 328.3 328.3 328.2 328.0 327.9 327.7 327.4 327.0 326.7 326.4 326.0 325.6 325.1 324.6 324.2 323.7 323.2 322.5 321.9 321.3 320.8 320.2 319.6 319.0 318.4 317.6 317.0 316.4 315.8 315.3 314.7 314.2 313.7 313.2 312.7 312.2 311.8 311.3 310.9 310.5 310.2 309.9 309.6 309.3 309.1 308.9 308.8 308.6 308.6 308.6 308.8 309.5 310.8 312.6 315.1 317.8 320.7 323.7 326.6 329.2 331.3 332.9 333.8 334.4 334.1 333.7 332.9 331.8 330.5 329.1 327.6 326.0 324.4 323.0 321.7 320.4 319.5 319.0 318.7 318.6 318.9 319.3 320.2 321.3 322.6 324.0 325.6 327.4 329.1 330.7 332.3 333.8 335.0 336.0 336.8 337.2 337.5 337.2 336.5 335.4 333.9 332.1 330.0 327.6 325.2 322.6 320.1 318.0 315.6 313.4 311.5 309.9 308.6 307.6 307.1 307.1 307.2 307.8 308.9 310.1 311.7 313.5 315.5 317.8 320.2 322.6 324.9 327.3 329.6 331.5 333.3 334.8 336.0 336.8 337.3 337.3 336.7 335.6 333.8 331.5 328.9 325.9 322.8 319.5 316.4 313.5 310.9 308.6 306.8 305.4 304.7 304.6 304.9 305.6 307.0 308.7 310.6 312.6 315.3 318.0 320.9 323.8 326.8 329.4 331.8 333.8 335.4 336.5 337.2 337.4 337.0 336.4 335.4 334.0 332.4 330.6 328.6 326.6 324.6 322.4 320.4 318.7 317.1 315.6 314.2 313.2 312.5 312.0 312.1 312.4 313.1 314.3 315.9 317.7 319.6 321.5 323.4 325.4 327.1 328.5 329.6 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.2", - "input_type": "phoneme", - "offset": 82.555 + "f0_timestep": "0.005" }, { + "offset": 94.41, "text": "SP 你 是 不 食 烟 火 的 仙 瑶 AP 舞 凤 翔 鸾 相 交 的 SP 窈 窕 SP", "ph_seq": "SP n i sh ir b u sh ir y En h uo d e x ian y ao AP w u f eng x iang l uan x iang j iao d e SP y ao t iao SP", - "note_seq": "rest A4 A4 G4 G4 E4 E4 B4 B4 A4 A4 D5 D5 D5 D5 F#4 F#4 G4 G4 rest A4 A4 G4 G4 E4 E4 B4 B4 A4 A4 D5 D5 E5 E5 rest A4 A4 B4 B4 rest", - "note_dur_seq": "0.289 0.361 0.361 0.362 0.362 0.723 0.723 0.722 0.722 0.362 0.362 0.181 0.181 0.5420001 0.5420001 0.3610001 0.3610001 1.446 1.446 0.723 0.3610001 0.3610001 0.362 0.362 0.723 0.723 0.723 0.723 0.3610001 0.3610001 0.1809998 0.1809998 0.7229996 0.7229996 0.1800003 0.5430002 0.5430002 1.265 1.265 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.244002 0.044998 0.240997 0.120003 0.286995 0.075005 0.528 0.195 0.677002 0.044998 0.257004 0.104996 0.136002 0.044998 0.362 0.18 0.316002 0.044998 1.446 0.678002 0.044998 0.255997 0.105003 0.241997 0.120003 0.602997 0.120003 0.573006 0.149994 0.271004 0.089996 0.136002 0.044998 0.723 0.135002 0.044998 0.407998 0.135002 1.265 0.072", - "f0_timestep": "0.005", + "ph_dur": "0.244 0.045 0.241 0.12 0.287 0.075 0.528 0.195 0.677 0.045 0.257 0.105 0.136 0.045 0.362 0.18 0.316 0.045 1.446 0.678 0.045 0.256 0.105 0.242 0.12 0.603 0.12 0.573 0.15 0.271 0.09 0.136 0.045 0.723 0.135 0.045 0.408 0.135 1.265 0.072", + "ph_num": "2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 1 1", + "note_seq": "rest A4 G4 E4 B4 A4 D5 D5 F#4 G4 rest A4 G4 E4 B4 A4 D5 E5 rest A4 B4 rest", + "note_dur": "0.289 0.361 0.362 0.723 0.722 0.362 0.181 0.542 0.361 1.446 0.723 0.361 0.362 0.723 0.723 0.361 0.181 0.723 0.18 0.543 1.265 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 262.0 266.9 273.1 280.0 287.6 295.9 304.7 314.0 323.8 334.1 344.6 355.1 365.7 376.4 386.8 396.6 405.9 414.5 422.2 429.1 434.7 438.9 441.8 443.4 444.0 443.8 443.8 443.4 443.1 442.8 442.4 441.9 441.3 440.8 440.3 439.6 439.0 438.4 437.7 437.3 436.7 436.2 435.8 435.4 435.2 435.0 434.9 434.9 434.3 432.9 430.7 427.5 423.8 419.1 414.3 409.5 404.6 400.0 395.6 392.2 389.3 387.3 386.2 385.8 386.4 388.4 391.5 395.5 400.3 405.8 411.5 417.7 423.5 429.0 433.9 437.8 441.0 442.7 443.2 442.5 441.1 438.9 436.1 432.7 428.8 424.5 420.0 415.3 410.5 406.0 401.7 397.7 394.3 391.3 389.0 387.3 386.2 385.8 385.7 385.8 386.0 386.2 386.6 387.0 387.5 388.2 388.8 389.4 390.2 390.9 391.5 392.2 393.0 393.7 394.3 394.8 395.4 395.9 396.2 396.6 396.8 396.8 396.9 396.1 394.0 391.0 387.0 382.2 376.5 370.2 363.5 356.4 349.0 341.6 334.4 327.4 320.6 314.4 308.7 303.5 298.9 295.2 292.2 290.1 288.6 288.0 288.0 288.5 289.4 290.6 292.3 294.5 296.8 299.4 302.2 305.3 308.4 311.5 314.5 317.4 320.2 322.7 324.8 326.7 328.0 328.9 329.5 329.5 329.4 329.4 329.2 329.1 328.9 328.8 328.6 328.3 328.1 327.9 327.7 327.4 327.3 327.1 327.0 327.0 327.0 327.0 327.0 327.0 327.1 327.3 327.4 327.6 327.7 328.0 328.4 328.7 329.0 329.3 329.6 329.9 330.3 330.5 330.7 331.0 331.3 331.5 331.7 331.8 331.9 331.9 331.9 332.1 332.1 332.1 332.1 332.1 332.2 332.3 332.4 332.5 332.7 332.7 332.8 333.0 333.1 333.3 333.5 333.6 333.8 333.9 334.2 334.5 334.6 334.8 334.9 335.1 335.3 335.4 335.6 335.7 335.9 336.1 336.2 336.4 336.4 336.5 336.6 336.6 336.7 336.7 336.7 336.7 336.9 336.6 335.6 333.9 331.6 328.9 325.7 322.3 318.8 315.2 312.0 309.0 306.5 304.3 302.8 302.0 301.9 302.4 303.8 306.1 309.5 313.8 319.0 325.2 332.2 340.1 348.6 357.9 367.7 378.1 388.8 399.7 410.7 421.8 432.7 443.0 452.7 461.7 469.9 476.8 482.6 486.8 489.5 491.0 491.2 491.0 491.0 491.0 491.0 491.0 491.0 491.0 491.0 491.0 491.0 491.0 490.9 490.8 490.8 490.8 490.8 490.8 490.8 490.7 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.5 490.4 490.2 490.2 490.0 489.8 489.5 489.3 489.1 488.6 488.3 487.9 487.6 487.3 486.8 486.6 486.3 486.1 486.0 486.0 486.0 485.7 485.0 484.0 482.6 480.7 478.3 475.7 472.9 469.9 466.7 463.6 460.5 457.5 454.7 452.2 449.8 448.0 446.7 445.6 445.0 445.0 445.6 447.8 451.1 455.4 460.6 466.4 472.5 478.5 484.1 489.1 493.0 495.9 496.8 496.8 496.3 495.5 494.5 493.3 492.1 490.9 489.9 489.1 488.4 488.2 488.2 488.4 488.6 488.8 489.0 489.4 490.0 490.7 491.1 491.6 492.3 492.8 493.2 493.5 493.9 494.2 494.2 494.3 493.6 491.9 489.5 486.4 482.5 478.0 472.9 467.3 461.2 454.9 448.5 442.0 435.4 429.2 423.2 417.6 412.3 407.3 403.0 399.3 396.3 393.9 392.1 391.0 390.6 390.8 391.8 393.4 395.4 397.9 400.9 404.4 408.1 412.1 416.0 419.6 423.9 427.9 431.7 435.3 438.5 441.0 443.1 444.6 445.4 445.6 445.6 445.6 445.6 445.6 445.6 445.6 445.6 445.6 445.6 445.6 445.8 445.9 445.9 445.9 445.9 445.9 446.0 446.1 446.1 446.1 446.1 446.1 446.1 446.1 446.1 446.1 446.1 446.3 445.9 444.6 442.4 439.2 435.0 430.2 425.0 419.6 413.9 408.2 403.6 399.0 394.5 390.7 387.7 385.7 384.7 384.6 385.4 387.0 390.0 394.0 398.7 404.3 410.8 418.2 426.5 435.5 445.1 455.3 466.2 477.3 488.5 500.1 511.6 522.2 531.8 542.4 552.3 561.5 569.6 576.8 582.9 587.4 590.4 592.1 592.7 592.3 591.8 591.2 590.2 589.0 587.8 586.4 585.0 583.3 581.8 580.4 579.0 577.7 576.6 575.7 574.9 574.4 574.2 574.4 574.9 576.6 579.1 582.0 585.5 589.1 592.8 596.6 600.0 602.8 605.1 606.5 607.3 607.0 606.8 606.5 606.1 605.4 604.5 603.8 602.8 601.6 600.5 599.3 597.9 596.4 595.0 593.6 592.2 590.7 589.3 588.1 586.8 585.5 584.4 583.3 582.2 581.3 580.6 580.0 579.5 578.9 578.9 578.9 578.7 578.2 577.5 576.3 574.9 573.0 571.0 568.6 565.9 563.1 559.8 556.3 552.7 548.6 544.4 540.2 535.8 530.9 526.1 521.1 516.0 510.9 505.7 500.4 494.9 489.6 484.4 479.0 473.5 468.3 462.9 457.8 452.7 447.7 442.8 437.9 433.3 428.8 424.4 420.0 416.0 412.0 408.4 404.9 401.5 398.2 395.2 392.3 389.7 387.3 385.2 383.5 381.6 380.0 378.5 377.3 376.3 375.6 375.1 374.8 374.7 374.5 374.5 374.4 374.2 374.0 373.9 373.5 373.3 373.0 372.6 372.3 371.9 371.7 371.4 371.2 370.9 370.6 370.4 370.4 370.4 370.4 370.4 370.4 370.4 370.5 370.6 370.6 370.8 371.0 371.2 371.4 371.5 371.7 371.9 372.1 372.2 372.4 372.6 372.8 372.9 373.1 373.4 373.6 373.6 373.8 374.0 374.2 374.3 374.3 374.5 374.5 374.5 374.6 374.5 373.8 372.6 370.6 368.1 365.3 362.4 359.5 356.8 354.2 352.2 350.7 349.9 349.9 351.3 355.3 361.3 368.6 377.0 385.8 394.3 401.4 406.5 409.5 410.5 410.2 409.6 408.8 407.7 406.5 405.2 403.6 402.1 400.3 398.6 397.0 395.6 394.3 393.1 392.0 391.1 390.4 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.0 390.2 390.3 390.5 390.7 390.9 391.1 391.3 391.6 391.9 392.1 392.5 392.7 393.1 393.4 393.7 394.0 394.3 394.5 394.9 395.1 395.4 395.7 395.9 396.1 396.3 396.5 396.7 396.8 396.8 396.8 396.8 396.9 396.8 396.8 396.6 396.4 396.1 395.7 395.3 395.0 394.4 393.8 393.2 392.6 391.8 391.1 390.3 389.6 388.8 388.0 387.1 386.4 385.4 384.5 383.7 382.9 382.0 381.1 380.3 379.5 378.7 377.8 377.1 376.4 375.7 374.9 374.2 373.5 373.0 372.5 371.9 371.5 371.1 370.8 370.4 370.2 370.1 370.0 370.0 370.0 370.2 370.7 371.7 373.0 374.6 376.4 378.2 380.4 382.6 384.8 387.0 389.0 390.9 392.6 394.0 395.1 395.8 396.2 396.1 395.6 394.7 393.4 391.7 389.7 387.4 384.9 382.3 379.5 377.2 374.5 371.8 369.3 366.8 364.7 362.9 361.3 360.2 359.4 358.8 359.0 359.5 360.7 362.6 365.1 368.1 371.4 375.1 378.9 382.7 386.4 390.0 393.1 395.8 397.9 399.3 400.0 400.0 399.5 398.6 397.2 395.3 393.1 390.8 388.2 385.4 382.6 379.7 376.9 374.3 371.9 369.6 367.4 365.9 364.7 363.8 363.4 363.6 364.1 365.3 367.3 369.8 372.8 376.2 379.8 383.7 387.6 391.4 395.0 398.2 400.9 403.0 404.4 405.1 405.0 404.0 402.3 399.8 396.7 393.1 389.2 385.2 381.2 377.3 374.2 371.3 368.6 366.5 365.1 364.8 365.1 366.3 368.4 371.3 374.7 378.7 383.0 387.3 391.2 394.6 397.7 399.4 400.4 400.6 400.4 400.2 399.8 399.2 398.5 397.6 396.6 395.6 394.5 393.2 391.8 390.4 388.9 387.4 385.7 384.0 382.4 380.6 378.9 377.2 375.6 373.8 372.1 370.5 368.9 367.4 366.0 364.5 363.3 362.1 361.0 360.1 359.2 358.4 357.5 356.9 356.2 355.5 354.9 354.2 353.6 352.9 352.2 351.6 350.9 350.3 349.6 349.0 348.3 347.7 347.0 346.4 345.8 345.1 344.5 343.8 343.2 342.6 341.9 341.3 340.6 340.0 339.4 338.7 338.1 337.5 336.9 336.2 335.6 335.0 334.4 333.7 333.1 332.5 331.9 331.3 330.7 330.0 329.4 328.8 328.2 327.6 327.0 326.4 325.8 325.2 324.6 324.0 323.4 322.8 322.2 321.6 321.0 320.4 319.8 319.2 318.6 318.0 317.4 316.8 316.2 315.6 315.0 314.5 313.9 313.3 312.7 312.1 311.5 311.0 310.4 309.8 309.2 308.7 308.1 307.5 306.9 306.4 305.8 305.2 304.7 304.1 303.5 303.0 302.4 301.9 301.3 300.7 300.2 299.6 299.1 298.5 297.9 297.4 296.8 296.3 295.7 295.2 294.6 294.1 293.5 293.0 292.5 291.9 291.4 290.8 290.3 289.7 289.2 288.7 288.1 287.6 287.1 286.5 286.0 285.5 284.9 284.4 283.9 283.4 282.8 282.3 281.8 281.3 280.7 280.2 279.7 279.9 282.8 286.6 291.3 296.8 303.2 310.4 318.4 327.2 336.6 346.4 356.6 367.0 376.1 385.9 396.2 406.2 415.5 424.0 431.5 437.8 442.9 446.6 449.0 449.7 449.5 449.3 448.7 448.1 447.3 446.5 445.6 444.6 443.5 442.4 441.4 440.6 439.7 439.2 438.6 438.1 437.7 437.7 437.7 437.8 438.1 438.6 439.0 439.4 439.7 439.9 439.5 438.1 435.4 431.7 426.9 421.3 414.9 408.2 401.1 394.0 387.1 380.4 374.2 368.6 363.7 359.7 356.6 354.4 353.4 354.6 359.6 369.6 381.3 390.0 394.5 394.3 394.3 394.3 394.3 394.2 394.0 394.0 394.0 393.9 393.8 393.8 393.8 393.8 393.8 393.8 394.5 396.5 400.1 405.1 411.2 417.5 423.4 428.4 432.1 433.9 433.6 431.9 428.9 424.8 419.5 413.9 408.4 403.1 398.3 394.5 392.0 390.9 390.5 390.2 389.6 388.6 387.5 386.2 384.7 383.0 381.1 379.1 376.9 374.9 372.8 370.6 368.5 366.6 364.8 363.2 361.9 360.7 359.7 358.8 358.3 358.2 356.2 351.0 343.2 330.1 316.0 302.3 290.1 280.5 274.3 272.8 273.1 274.0 275.4 277.3 279.7 282.5 285.8 289.3 293.1 297.1 301.2 305.4 309.5 313.4 317.1 320.5 323.4 325.7 327.7 329.2 330.1 330.5 330.4 330.4 330.3 330.1 329.9 329.8 329.4 329.3 329.0 328.7 328.4 328.2 327.9 327.8 327.6 327.4 327.4 327.4 327.4 327.4 327.4 327.4 327.6 327.7 327.9 328.2 328.4 328.6 328.9 329.2 329.6 329.9 330.2 330.5 330.8 331.2 331.5 331.8 332.1 332.4 332.5 332.7 332.8 333.0 333.1 333.1 333.2 333.0 332.9 332.6 332.2 331.9 331.5 330.9 330.4 329.9 329.4 329.0 328.5 328.0 327.7 327.6 327.4 327.4 327.4 327.4 327.4 327.5 327.5 327.7 327.9 328.2 328.4 328.7 328.8 329.1 329.3 329.6 329.9 330.2 330.4 330.6 330.9 331.0 331.2 331.3 331.5 331.5 331.5 331.7 331.2 330.1 328.4 326.0 323.0 319.6 316.1 312.7 309.3 306.1 303.3 301.3 299.8 299.1 298.9 299.4 300.5 302.6 305.2 308.4 312.1 316.5 321.5 327.2 333.4 340.2 347.5 355.3 363.4 370.9 379.2 388.3 397.7 407.2 416.7 426.2 435.6 444.8 453.5 461.9 469.7 476.8 483.2 488.8 493.6 497.6 500.0 501.6 502.5 502.3 502.0 501.6 501.2 500.6 499.9 498.9 498.2 497.3 496.5 495.7 495.0 494.3 493.8 493.3 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.0 493.2 493.3 493.3 493.3 493.3 493.3 493.3 493.4 493.6 493.6 493.6 493.6 493.6 493.6 493.6 493.6 493.6 493.6 493.6 493.8 493.7 493.5 493.2 492.9 492.5 492.0 491.3 490.6 489.8 488.9 487.9 486.8 485.6 484.3 482.9 481.5 480.1 478.5 476.9 475.1 473.3 471.5 469.7 467.9 465.9 463.9 461.9 459.9 457.9 455.7 453.7 451.6 449.5 447.7 445.5 443.6 441.5 439.4 437.3 435.2 433.1 431.2 429.3 427.4 425.4 423.5 421.7 420.0 418.4 416.8 415.2 413.6 412.2 410.9 409.5 408.2 407.0 405.8 404.6 403.6 402.8 402.0 401.2 400.5 399.9 399.5 399.1 398.7 398.3 398.2 398.2 398.2 398.2 398.7 399.3 400.2 401.4 402.6 404.2 405.9 407.7 409.7 411.8 413.6 415.8 418.2 420.5 422.8 425.2 427.2 429.5 431.4 433.2 434.8 436.2 437.5 438.5 439.2 439.6 439.9 439.8 439.7 439.7 439.7 439.7 439.7 439.6 439.5 439.4 439.2 439.2 439.0 438.8 438.7 438.6 438.5 438.5 438.3 438.2 438.1 438.0 437.9 437.7 437.7 437.7 437.7 437.7 437.7 437.2 435.7 432.9 429.2 425.2 420.4 415.1 410.0 405.1 401.0 397.9 395.9 395.0 396.8 402.1 411.2 424.0 440.0 459.3 480.5 502.5 524.5 545.4 564.1 577.5 585.9 590.0 589.8 589.7 589.5 589.3 588.9 588.3 588.1 587.6 587.0 586.4 585.9 585.3 584.7 584.2 583.6 583.2 582.8 582.3 582.0 581.7 581.6 581.6 581.4 580.6 578.6 575.6 571.8 567.7 563.7 559.1 554.6 550.5 547.2 544.5 543.0 546.2 564.1 582.5 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.3 586.6 587.8 589.7 592.2 595.3 599.3 603.7 608.5 613.7 619.1 624.8 630.2 636.0 641.4 646.4 651.1 655.1 658.9 661.4 663.5 664.6 665.4 665.1 664.7 664.2 663.5 662.5 661.5 660.6 659.6 658.5 657.4 656.4 655.5 654.8 654.2 653.9 653.9 653.9 653.9 654.1 654.3 654.3 654.6 655.2 655.6 656.0 656.3 657.0 657.3 657.9 658.5 659.0 659.4 660.0 660.3 660.7 661.0 661.3 661.6 661.9 661.9 661.9 662.2 661.9 661.0 660.0 658.5 656.5 654.2 651.7 648.6 645.2 641.4 637.3 632.9 628.4 623.3 618.0 612.4 606.8 600.9 594.8 588.6 582.2 575.6 569.2 563.4 556.7 549.8 543.0 536.1 529.2 522.3 515.6 508.7 502.1 495.5 488.9 482.3 476.1 469.9 463.9 458.0 452.3 446.7 441.3 436.0 430.9 426.1 421.4 417.0 412.8 408.8 405.0 401.4 398.0 394.7 391.9 389.2 387.0 385.1 383.3 381.4 379.5 377.7 375.8 374.0 372.2 370.3 368.5 366.7 364.9 363.2 361.4 359.6 357.9 356.1 354.4 352.6 350.9 349.2 347.5 345.8 344.1 342.4 347.8 353.4 359.4 365.5 371.9 378.2 384.8 391.4 397.8 404.1 410.3 416.0 421.5 426.5 431.1 434.9 438.0 440.9 443.1 444.5 445.4 445.5 445.4 445.3 445.1 444.9 444.7 444.5 444.3 444.0 443.6 443.1 442.7 442.3 441.9 441.4 441.0 440.6 440.2 439.9 439.6 439.2 438.8 438.6 438.4 438.1 437.9 437.7 437.7 437.7 437.7 437.7 437.7 437.7 437.7 437.8 438.0 438.0 438.2 438.4 438.6 438.7 438.8 439.0 439.2 439.4 439.6 439.7 439.8 440.0 440.0 440.2 440.3 440.3 440.3 440.3 440.3 439.8 438.7 436.8 434.6 431.8 428.6 425.2 421.3 417.3 413.3 409.1 405.0 401.1 397.2 393.6 390.5 387.6 385.1 383.2 381.5 380.6 380.2 384.0 385.7 390.9 398.7 408.8 420.8 434.4 449.2 464.2 478.5 491.5 502.2 510.2 514.9 515.6 515.0 514.0 512.2 510.1 507.8 505.3 502.9 500.2 498.0 496.1 494.6 493.6 493.0 492.9 492.7 492.7 492.7 492.7 492.7 492.7 492.7 492.7 492.5 492.5 492.5 492.5 492.4 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.2 492.4 492.5 492.5 492.5 492.7 492.7 492.7 492.9 493.0 493.1 493.3 493.3 493.3 493.5 493.6 493.6 493.6 493.6 493.6 493.6 493.6 493.7 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.8 493.5 492.7 492.0 490.9 489.5 487.7 485.8 483.7 481.7 479.5 477.1 474.3 471.6 468.9 466.0 463.1 460.3 457.6 455.0 452.4 450.1 447.9 445.9 444.0 442.3 440.8 439.7 438.9 438.2 437.7 437.7 437.7 437.9 438.2 438.7 439.4 440.0 440.6 441.4 442.2 442.8 443.4 443.8 444.3 444.5 444.8 445.0 445.4 446.1 447.4 449.2 451.1 453.4 456.1 459.0 462.1 465.7 469.3 472.9 476.6 480.3 484.0 487.7 491.3 494.6 497.6 500.4 502.8 504.7 506.5 507.8 508.5 508.8 508.6 507.8 506.5 504.6 502.2 499.6 496.6 493.3 490.0 486.7 483.4 480.2 477.3 475.0 473.0 471.4 470.2 469.8 469.8 470.1 471.0 472.4 474.1 476.2 478.6 481.2 484.0 487.0 490.1 493.0 495.8 498.3 500.5 502.4 504.0 505.0 505.5 505.6 505.3 504.6 503.3 501.9 500.2 498.2 496.0 493.6 491.0 488.4 485.7 483.0 480.4 477.9 475.6 473.5 471.6 469.9 468.6 467.5 466.8 466.7 466.8 467.4 468.7 470.5 472.8 475.6 478.7 482.1 485.8 489.3 493.1 496.6 499.8 502.6 504.9 506.9 508.1 508.7 508.8 508.4 507.6 506.6 505.1 503.5 501.6 499.7 497.9 495.7 493.4 491.2 489.1 487.0 485.1 483.4 483.4 483.4 483.4 483.4 483.4 483.4 483.4 483.4 483.4 483.4 483.4 483.4", - "input_type": "phoneme", - "offset": 94.41 + "f0_timestep": "0.005" }, { + "offset": 105.977, "text": "SP 题 画 诗 也 难 描 SP 千 古 词 不 敢 扰 AP 华 灯 照 墨 香 里 印 色 SP 正 妙 SP", "ph_seq": "SP t i h ua sh ir y E n an m iao SP q ian g u c i0 b u g an r ao AP h ua d eng zh ao m o x iang l i y in s e SP zh eng m iao SP", - "note_seq": "rest A4 A4 B4 B4 E4 E4 D5 D5 G4 G4 A4 A4 rest B3 B3 A4 A4 G4 G4 B4 B4 G4 G4 E4 E4 rest A4 A4 G4 G4 B3 B3 A4 A4 B4 B4 A4 A4 D5 D5 D5 D5 rest D4 D4 E4 E4 rest", - "note_dur_seq": "0.288 0.362 0.362 0.3610001 0.3610001 0.5419999 0.5419999 0.5420001 0.5420001 0.362 0.362 0.723 0.723 0.3609998 0.3620002 0.3620002 0.3609998 0.3609998 0.1810002 0.1810002 0.5419998 0.5419998 0.3610001 0.3610001 0.5419998 0.5419998 0.9040003 0.1809998 0.1809998 0.3610001 0.3610001 0.5419998 0.5419998 0.1810002 0.1810002 0.1809998 0.1809998 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.5430002 0.5419998 0.5419998 2.168 2.168 0.5", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.182997 0.105003 0.271996 0.090004 0.240997 0.120003 0.497002 0.044998 0.437004 0.104996 0.272004 0.089996 0.723 0.195999 0.165001 0.286996 0.075005 0.240997 0.120003 0.120995 0.060005 0.481995 0.060005 0.315994 0.045006 0.542 0.814004 0.089996 0.136002 0.044998 0.241005 0.119995 0.392006 0.149994 0.120995 0.060005 0.136002 0.044998 0.316002 0.044998 0.121002 0.059998 0.361 0.407998 0.135002 0.376999 0.165001 2.168 0.5", - "f0_timestep": "0.005", + "ph_dur": "0.183 0.105 0.272 0.09 0.241 0.12 0.497 0.045 0.437 0.105 0.272 0.09 0.723 0.196 0.165 0.287 0.075 0.241 0.12 0.121 0.06 0.482 0.06 0.316 0.045 0.542 0.814 0.09 0.136 0.045 0.241 0.12 0.392 0.15 0.121 0.06 0.136 0.045 0.316 0.045 0.121 0.06 0.361 0.408 0.135 0.377 0.165 2.168 0.5", + "ph_num": "2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 1 1", + "note_seq": "rest A4 B4 E4 D5 G4 A4 rest B3 A4 G4 B4 G4 E4 rest A4 G4 B3 A4 B4 A4 D5 D5 rest D4 E4 rest", + "note_dur": "0.288 0.362 0.361 0.542 0.542 0.362 0.723 0.361 0.362 0.361 0.181 0.542 0.361 0.542 0.904 0.181 0.361 0.542 0.181 0.181 0.361 0.181 0.361 0.543 0.542 2.168 0.5", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.2 398.5 399.7 402.0 405.4 409.7 414.5 419.6 424.8 429.9 434.7 438.5 441.4 443.2 444.0 443.8 443.6 443.4 443.1 442.6 442.2 441.8 441.1 440.6 440.0 439.4 439.0 438.4 437.9 437.6 437.3 437.0 437.0 437.0 437.0 437.0 437.0 437.0 437.0 437.0 437.0 437.0 437.1 437.2 437.2 437.2 437.2 437.4 437.5 437.5 437.5 437.5 437.5 437.5 437.5 437.5 437.6 438.2 439.7 442.6 446.7 452.3 458.7 465.8 473.3 481.1 488.8 495.6 501.6 506.6 510.2 512.4 512.9 512.5 511.5 510.3 508.6 506.6 504.4 501.9 499.8 497.6 495.7 494.2 492.9 492.2 492.1 491.9 491.4 490.6 489.5 488.1 486.4 484.6 482.5 480.2 477.6 475.0 472.7 469.5 466.2 462.8 459.4 455.9 452.4 448.7 445.0 441.4 437.8 434.2 430.7 427.2 423.9 420.6 417.4 414.3 411.5 408.7 406.1 403.7 401.5 399.4 397.7 396.2 394.8 393.6 392.6 391.9 391.5 391.4 390.4 387.4 381.3 372.0 360.8 348.4 335.7 323.3 311.7 301.8 294.0 288.4 285.0 283.9 284.2 285.0 286.2 287.9 289.9 292.3 295.0 298.2 301.5 304.9 308.4 311.8 315.5 318.8 321.9 324.7 327.3 329.4 331.2 332.3 332.9 333.2 333.1 333.1 332.9 332.8 332.6 332.5 332.3 332.0 331.7 331.3 331.0 330.7 330.2 329.9 329.4 329.1 328.7 328.2 327.9 327.6 327.2 326.8 326.5 326.2 325.8 325.7 325.4 325.2 325.0 324.9 324.9 324.9 324.9 324.9 325.0 325.3 325.7 326.1 326.6 327.2 328.0 328.7 329.4 330.1 330.8 331.6 332.2 332.8 333.3 333.7 334.0 334.2 334.4 334.7 335.6 337.3 339.9 343.3 347.8 352.9 358.8 365.5 373.0 381.3 390.5 400.2 410.5 421.4 432.9 445.0 457.2 469.7 482.3 494.9 507.5 519.8 531.7 543.0 553.6 563.5 572.3 580.0 585.6 590.5 594.7 597.3 598.4 598.4 598.3 598.3 598.2 597.9 597.7 597.6 597.4 597.1 596.8 596.2 595.9 595.6 595.2 594.7 594.4 593.8 593.5 593.2 592.8 592.3 592.0 591.8 591.5 591.2 590.9 590.6 590.3 590.0 590.0 590.0 590.0 590.0 589.8 592.8 592.5 592.0 591.1 589.8 588.3 586.3 584.1 581.6 578.9 576.1 573.3 569.4 565.6 561.5 557.1 552.8 548.0 543.0 538.1 532.9 527.5 522.1 516.6 511.0 505.3 499.6 493.9 488.2 482.6 476.8 471.3 465.8 460.2 454.7 449.2 444.1 439.0 433.9 429.0 424.3 419.6 415.2 410.9 406.6 402.7 399.0 395.4 392.0 388.8 385.7 382.8 380.1 377.8 375.4 373.4 371.6 370.1 368.7 367.4 366.4 365.7 365.2 364.9 364.7 367.3 369.2 371.7 374.6 377.8 380.9 383.9 386.7 389.3 391.2 392.3 393.0 393.0 392.9 392.8 392.6 392.2 392.0 391.7 391.3 390.9 390.4 390.0 389.5 389.1 388.7 388.3 387.9 387.6 387.3 387.2 387.0 386.8 386.8 386.8 386.8 386.8 386.9 387.0 387.2 387.4 387.6 387.8 388.0 388.2 388.5 388.9 389.1 389.4 389.7 390.1 390.5 390.8 391.0 391.4 391.8 392.1 392.4 392.7 393.1 393.4 393.6 393.8 394.0 394.2 394.3 394.5 394.7 394.7 394.7 394.8 394.9 394.7 394.7 394.7 394.7 394.6 394.5 394.5 394.3 394.3 394.1 393.9 393.7 393.6 393.6 393.4 393.2 393.0 392.9 392.9 392.7 392.5 392.4 392.3 392.2 392.2 392.0 392.0 392.0 392.0 392.0 391.9 391.8 391.8 391.8 391.8 391.7 391.5 391.5 391.3 391.3 391.2 391.1 391.0 390.8 390.6 390.5 390.4 390.3 390.1 390.0 390.0 389.8 389.6 389.5 389.5 389.3 389.3 389.3 389.3 389.3 389.3 389.4 389.6 391.5 392.0 393.0 394.4 396.1 398.4 400.8 403.7 406.9 410.3 413.9 417.7 421.5 425.4 429.3 433.2 437.0 440.3 443.6 446.5 449.2 451.5 453.4 454.7 455.7 456.2 456.0 455.3 453.9 452.1 449.5 446.6 443.4 440.3 437.0 433.7 430.8 428.1 425.7 423.9 422.6 421.8 421.9 422.2 423.1 424.5 426.4 428.6 430.9 433.5 436.2 438.9 441.5 444.0 446.1 447.9 449.2 450.1 450.5 450.2 449.6 448.8 447.7 446.4 444.7 442.6 440.5 438.3 436.0 433.9 431.7 429.7 427.8 426.2 424.8 423.8 423.0 422.6 422.6 422.6 423.2 424.1 425.3 426.7 428.3 430.0 431.8 433.5 435.2 436.9 438.5 439.7 440.8 441.5 439.6 435.9 432.2 428.6 425.0 421.5 417.9 414.4 411.0 407.5 404.1 400.7 397.4 394.0 390.7 387.5 384.2 381.0 377.8 374.6 371.5 368.4 365.3 362.2 359.2 356.2 353.2 350.3 347.3 344.4 341.5 338.7 335.8 333.0 330.2 327.5 324.7 322.0 319.3 316.6 314.0 311.4 308.7 306.2 303.6 301.1 298.5 296.0 293.5 291.1 288.7 286.2 283.8 281.5 279.1 276.8 274.4 272.1 269.9 267.6 265.4 263.1 260.9 258.8 256.6 254.4 252.3 250.2 248.5 248.5 248.6 248.7 248.7 248.8 248.8 248.9 248.9 249.0 249.1 249.1 249.2 249.3 249.5 249.5 249.5 249.7 249.8 249.8 249.9 250.0 250.0 250.1 250.1 250.2 250.2 250.3 250.4 250.4 250.4 250.4 250.4 250.4 250.6 251.1 252.6 254.9 258.0 261.9 266.6 271.9 278.2 285.0 292.3 300.1 308.4 316.9 325.6 333.2 341.1 349.4 357.4 364.5 371.1 376.7 381.3 384.8 387.0 388.0 388.2 388.2 388.2 388.4 388.6 389.0 389.4 389.7 390.1 390.5 390.9 391.2 391.6 392.0 392.4 392.6 393.0 393.7 394.8 396.1 397.8 399.8 402.1 404.5 407.1 409.8 412.8 415.8 418.7 421.6 424.6 427.2 429.9 432.3 434.4 436.5 438.2 439.5 440.3 440.8 441.2 442.5 443.2 443.8 444.3 444.9 445.5 445.9 446.1 446.5 446.7 446.7 446.8 446.1 445.1 443.1 440.6 437.6 434.2 430.2 426.1 421.8 417.5 413.4 409.8 405.9 402.3 399.2 396.5 394.2 392.7 391.7 391.3 391.1 391.1 391.1 391.1 391.1 391.1 391.1 391.1 391.1 391.0 390.9 390.9 390.9 390.9 390.9 390.8 390.6 390.6 390.6 390.6 390.6 390.6 390.6 390.6 390.6 390.6 390.6 390.7 391.1 391.5 392.0 392.6 393.1 393.4 393.6 394.9 398.5 405.8 416.3 427.1 436.2 442.3 444.8 444.6 444.4 444.0 443.4 442.8 442.2 441.5 440.9 440.3 439.6 439.1 438.7 438.5 438.5 438.6 439.2 440.4 442.2 444.1 446.7 449.6 452.8 455.9 458.9 462.7 466.5 470.2 473.8 477.1 480.1 482.6 485.0 486.8 488.0 488.8 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.1 489.3 489.3 489.5 489.6 489.6 489.6 489.6 489.9 489.9 490.1 490.6 491.0 491.6 492.3 492.7 493.7 494.3 494.9 495.5 496.0 496.5 496.7 496.9 496.6 495.4 493.2 489.9 485.7 480.7 475.1 468.9 462.1 455.3 448.3 441.3 434.4 427.9 421.7 416.0 410.9 406.5 402.9 400.0 397.9 396.6 396.3 396.3 396.3 396.3 396.3 396.3 396.3 396.5 396.6 396.6 396.6 396.8 396.8 396.8 396.8 396.8 396.9 396.9 396.8 396.8 396.6 396.4 396.2 396.0 395.8 395.6 395.3 395.0 394.7 394.4 394.2 393.8 393.6 393.4 393.2 393.1 393.1 393.1 393.0 392.4 391.3 389.8 387.6 384.9 381.9 378.5 374.7 370.4 366.0 361.3 356.4 351.5 346.4 341.4 336.4 331.5 326.8 322.1 317.7 313.6 310.1 306.7 303.2 300.2 297.6 295.4 293.4 291.9 290.8 290.0 289.8 290.3 291.9 294.5 297.9 302.3 307.3 312.4 317.7 322.5 326.7 330.2 332.3 333.3 333.1 332.7 332.2 331.6 330.8 329.7 328.4 327.1 325.6 324.1 322.7 321.0 319.5 318.0 316.6 315.2 314.0 313.0 312.1 311.5 310.9 310.6 310.6 310.6 310.9 311.6 312.4 313.5 314.7 316.1 317.6 319.2 321.0 322.8 324.5 326.2 327.9 329.3 330.7 332.0 333.1 333.9 334.4 334.7 334.7 334.3 333.7 332.5 331.2 329.5 327.8 326.2 324.0 321.9 319.7 317.6 315.7 313.8 312.2 310.9 309.9 309.1 308.6 308.7 308.9 309.3 310.1 311.1 312.2 313.5 314.9 316.6 318.2 319.9 321.5 323.1 324.5 325.8 327.0 328.1 328.9 329.5 329.8 330.2 330.9 332.7 335.7 339.6 344.3 350.0 356.6 364.1 372.4 381.5 391.2 401.5 412.5 423.8 435.4 447.1 451.3 450.3 449.4 448.5 447.5 446.6 445.7 444.8 443.8 442.9 442.0 441.1 440.2 439.3 438.3 437.4 436.5 435.6 434.7 433.8 432.9 432.0 431.1 430.2 429.3 428.4 427.5 426.7 425.8 424.9 424.0 423.1 422.3 421.4 420.5 419.6 418.8 417.9 417.0 416.2 415.3 414.4 413.6 412.7 411.9 411.0 410.1 409.3 408.4 407.6 406.7 405.9 405.1 404.2 403.4 402.5 401.7 400.9 400.0 399.2 398.4 397.6 396.7 395.9 395.1 394.3 393.4 392.6 391.8 391.0 390.2 389.4 388.6 387.8 387.0 386.2 385.4 384.6 383.8 383.0 382.2 381.4 380.6 379.8 379.0 378.2 377.4 376.6 375.9 375.1 374.3 373.5 372.8 372.0 371.2 370.4 369.7 368.9 368.1 367.4 366.6 365.8 365.1 364.3 363.6 362.8 362.1 361.3 360.6 359.8 359.1 358.3 357.6 356.8 356.1 355.4 354.6 353.9 353.1 352.4 351.7 350.9 350.2 349.5 348.8 348.0 347.3 346.6 345.9 345.2 344.4 343.7 343.0 342.3 341.6 340.9 340.2 339.5 338.8 338.1 337.4 336.7 336.0 335.3 334.6 333.9 333.2 332.5 331.8 331.1 330.4 329.7 329.1 328.4 327.7 327.0 326.3 325.7 325.0 324.3 323.6 323.0 322.3 321.6 320.9 320.3 319.6 319.0 318.3 317.6 317.0 316.3 315.7 315.0 314.3 313.7 313.0 312.4 311.7 312.9 316.0 320.2 325.7 332.3 340.0 348.5 357.7 367.5 377.8 387.9 397.7 407.0 415.5 423.1 429.2 433.6 436.3 437.0 436.5 436.1 435.6 435.0 434.7 434.4 434.2 434.2 433.3 430.9 426.9 421.3 414.5 407.6 401.2 395.5 391.0 388.2 387.5 387.5 387.5 387.6 387.7 387.8 387.9 388.1 388.5 388.7 388.9 389.1 389.5 389.7 390.0 390.3 390.6 390.9 391.1 391.3 391.5 391.7 391.8 391.8 391.8 392.0 391.8 391.5 391.0 390.3 389.4 388.6 387.5 386.4 385.4 384.4 383.5 382.6 382.0 381.6 381.4 381.3 380.2 377.3 372.9 367.0 359.9 351.7 342.3 332.6 322.6 312.5 302.6 293.0 284.0 275.7 268.4 261.9 256.4 252.0 248.9 246.8 245.8 245.7 245.7 245.7 245.8 245.9 246.0 246.2 246.4 246.6 246.8 247.0 247.3 247.5 247.8 248.0 248.2 248.5 248.7 248.9 249.0 249.3 249.4 249.5 249.5 249.5 249.6 249.3 248.5 247.3 245.5 243.4 241.0 238.5 236.3 234.2 232.4 230.9 230.1 229.8 229.9 230.5 231.5 232.9 234.7 236.7 239.0 241.3 243.6 245.9 248.1 249.8 251.2 252.1 252.7 252.7 252.2 251.3 249.8 248.0 245.9 243.6 241.3 238.9 236.8 234.9 233.4 232.2 231.4 231.2 231.4 232.0 233.0 234.4 236.0 237.8 239.9 241.9 243.9 245.8 247.5 248.8 249.8 250.2 250.5 250.2 249.6 248.8 247.7 246.4 245.1 243.5 242.0 240.6 239.4 238.3 237.4 236.7 236.5 236.9 238.3 241.4 246.7 253.7 262.2 272.3 283.9 297.1 311.4 326.6 342.3 358.4 374.3 389.5 403.1 414.7 424.2 431.1 435.3 436.5 436.5 436.5 436.7 436.9 437.0 437.3 437.7 437.9 438.3 438.7 439.2 439.5 439.7 440.2 440.4 440.6 440.8 441.0 441.0 441.2 439.6 435.7 430.3 424.6 420.4 419.4 420.4 423.8 428.2 433.8 441.1 449.5 458.7 467.9 476.6 484.6 491.2 496.5 499.1 500.2 499.9 499.9 499.5 499.2 498.9 498.4 498.2 497.7 497.2 496.9 496.6 496.2 496.2 492.1 491.9 491.9 491.9 491.7 491.6 491.4 494.1 493.0 491.3 489.1 486.2 482.7 479.1 475.0 470.8 466.7 462.4 458.4 454.8 451.6 448.7 446.3 444.6 443.7 443.4 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.1 443.0 442.8 442.8 442.8 442.8 442.7 442.5 442.5 442.5 442.5 442.4 442.3 442.3 442.3 442.3 442.1 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.0 442.1 442.3 442.3 442.5 442.5 442.6 442.8 442.8 443.0 443.1 443.1 443.1 443.1 443.1 439.2 441.3 445.0 450.3 457.1 465.5 475.0 485.5 496.8 508.7 520.8 533.0 544.6 555.5 565.4 574.0 580.9 585.4 587.9 588.9 588.7 588.7 588.7 588.5 588.3 588.3 588.0 587.7 587.7 587.5 587.2 586.9 586.7 586.7 586.4 586.3 586.3 586.3 586.3 586.3 586.5 586.9 587.4 588.0 588.6 589.5 590.4 591.3 592.4 593.3 594.4 595.4 596.2 597.1 597.9 598.5 598.9 599.2 599.5 599.3 598.5 597.3 595.5 593.2 590.7 587.8 584.8 582.0 579.2 577.0 575.2 573.8 572.8 572.3 572.7 574.1 576.4 579.4 583.3 587.8 592.1 596.4 600.4 603.7 606.3 607.7 608.3 607.7 606.2 604.1 601.2 597.8 594.0 590.0 586.1 582.4 578.8 575.3 572.3 569.8 568.0 567.0 566.7 567.0 567.8 569.1 570.8 572.8 575.1 577.6 580.4 582.9 585.5 587.9 590.0 591.8 593.2 594.2 594.7 591.8 588.3 583.1 578.0 572.9 567.8 562.8 557.8 552.9 548.0 543.2 538.4 533.6 528.9 524.2 519.6 515.0 510.4 505.9 501.4 497.0 492.6 488.3 484.0 479.7 475.4 471.2 467.1 462.9 458.8 454.8 450.8 446.8 442.8 438.9 435.0 431.2 427.4 423.6 419.9 416.1 412.5 408.8 405.2 401.6 398.1 394.6 391.1 387.6 384.2 380.8 377.4 374.1 370.8 367.5 364.3 361.0 357.8 354.7 351.5 348.4 345.4 342.3 339.3 336.3 333.3 330.4 327.4 324.5 321.7 318.8 316.0 313.2 310.5 307.7 305.0 302.3 299.6 297.0 294.3 291.7 289.2 286.6 284.1 281.6 279.1 276.6 274.2 271.7 269.3 267.0 264.6 262.3 259.9 257.6 255.4 253.1 250.9 248.7 246.5 244.3 242.1 240.0 237.9 235.8 233.7 231.6 234.2 237.0 240.1 243.3 246.8 250.4 254.1 257.9 261.8 265.6 269.3 273.1 276.6 279.9 283.2 285.9 288.5 290.7 292.5 293.8 294.8 295.5 295.9 295.7 295.7 295.6 295.5 295.5 295.4 295.2 295.0 294.8 294.6 294.3 294.0 293.9 293.6 293.3 293.0 292.8 292.5 292.2 291.9 291.6 291.4 291.2 291.0 290.8 290.7 290.5 290.4 290.3 290.3 290.3 290.3 290.3 290.3 290.3 290.3 290.5 290.5 290.6 290.7 290.9 291.0 291.1 291.3 291.4 291.6 291.9 292.0 292.1 292.4 292.6 292.7 292.9 293.0 293.2 293.3 293.4 293.6 293.7 293.7 293.8 293.8 293.8 293.9 293.9 293.8 293.8 293.8 293.7 293.6 293.4 293.3 293.1 293.0 292.8 292.6 292.4 292.2 291.9 291.8 291.5 291.4 291.1 290.9 290.7 290.5 290.3 290.2 290.0 289.9 289.8 289.6 289.5 289.5 289.5 289.5 289.5 289.5 289.5 289.5 289.5 289.5 289.5 289.5 289.6 289.6 289.6 289.6 289.8 289.8 289.8 289.9 290.0 290.0 290.1 290.1 290.2 290.3 290.3 290.5 290.5 290.6 290.6 290.7 290.8 290.8 291.0 291.0 291.1 291.1 291.2 291.3 291.3 291.3 291.4 291.5 291.5 291.5 291.5 291.5 291.5 291.5 291.6 291.7 292.1 292.6 293.2 294.1 295.4 296.7 298.3 300.0 301.9 303.8 306.0 308.2 310.4 312.6 314.7 317.0 319.1 321.0 322.8 324.5 326.0 327.3 328.3 329.1 329.6 329.9 329.9 329.8 329.8 329.8 329.7 329.6 329.5 329.4 329.2 329.1 328.9 328.6 328.4 328.2 328.0 327.7 327.6 327.4 327.2 326.9 326.8 326.6 326.3 326.1 325.9 325.8 325.7 325.5 325.5 325.4 325.3 325.3 325.3 325.3 325.2 325.1 325.0 324.9 324.7 324.4 324.1 323.8 323.4 323.0 322.7 322.2 321.6 321.1 320.6 319.9 319.3 318.7 318.1 317.5 316.9 316.2 315.5 314.9 314.3 313.7 313.0 312.4 311.8 311.2 310.8 310.2 309.7 309.3 309.0 308.5 308.2 307.9 307.6 307.3 307.2 307.1 307.0 307.0 307.0 307.0 307.5 308.1 309.0 310.0 311.2 312.7 314.3 316.0 317.8 319.8 321.8 323.8 325.8 327.7 329.6 331.5 333.2 334.6 336.1 337.3 338.2 337.5 337.4 337.1 336.7 336.1 335.4 334.6 333.7 332.7 331.6 330.5 329.4 328.3 327.2 326.1 325.2 324.4 323.7 323.0 322.6 322.3 322.3 322.3 322.5 322.8 323.0 323.7 324.3 324.9 325.7 326.6 327.4 328.3 329.1 329.9 330.6 331.3 331.9 332.4 332.8 333.0 333.2 333.1 332.9 332.5 332.0 331.3 330.5 329.6 328.8 327.9 326.8 326.0 325.1 324.3 323.6 323.0 322.6 322.3 322.3 322.3 322.4 322.6 322.9 323.4 324.0 324.7 325.4 326.2 327.0 327.9 328.7 329.6 330.5 331.2 331.9 332.7 333.4 333.9 334.4 334.8 335.0 335.2 335.4 335.2 334.7 334.2 333.6 332.7 331.7 330.8 329.7 328.6 327.5 326.6 325.7 324.9 324.4 324.0 323.8 323.8 323.8 324.2 324.6 325.2 325.9 326.8 327.7 328.5 329.6 330.5 331.3 332.2 333.0 333.7 334.1 334.5 334.7 334.2 333.9 333.7 333.5 333.2 333.0 332.7 332.4 332.0 331.8 331.6 331.3 331.0 330.6 330.5 330.3 330.1 330.0 329.8 329.7 329.6 330.5 330.4 330.4 330.4 330.2 330.0 329.9 329.7 329.5 329.2 329.1 328.7 328.4 328.1 327.8 327.4 327.0 326.5 326.0 325.6 325.1 324.6 324.2 323.5 323.0 322.5 321.9 321.2 320.6 320.0 319.3 318.6 317.9 317.1 316.4 315.6 314.9 314.3 313.5 312.7 312.0 311.1 310.3 309.5 308.7 307.9 307.0 306.1 305.3 304.4 303.5 302.8 301.9 301.1 300.1 299.1 298.3 297.4 296.6 295.7 294.9 294.0 293.2 292.3 291.5 290.6 289.7 288.8 288.0 287.1 286.4 285.6 284.8 284.0 283.2 282.4 281.7 280.9 280.2 279.5 278.8 278.0 277.3 276.6 276.0 275.4 274.8 274.3 273.6 273.1 272.5 271.8 271.3 270.8 270.3 269.8 269.3 268.9 268.4 268.0 267.6 267.2 266.8 266.5 266.2 265.8 265.5 265.3 265.0 264.8 264.5 264.4 264.3 264.1 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7", - "input_type": "phoneme", - "offset": 105.977 + "f0_timestep": "0.005" } ] \ No newline at end of file diff --git "a/samples/\345\267\246\346\211\213\346\214\207\346\234\210\346\224\271.ds" "b/samples/\345\267\246\346\211\213\346\214\207\346\234\210\346\224\271.ds" deleted file mode 100644 index 95de7e013..000000000 --- "a/samples/\345\267\246\346\211\213\346\214\207\346\234\210\346\224\271.ds" +++ /dev/null @@ -1,268 +0,0 @@ -[ - { - "text": "AP", - "ph_seq": "z uo sh ou w o d a d i SP", - "note_seq": "D#3 D#3 A#3 A#3 A#3 A#3 C#4 C#4 D#4 D#4 rest", - "note_dur_seq": "0.131849 0.222603 0.188356 0.299658 0.111301 0.556507 0.111301 0.145548 0.111301 0.642123 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.131849 0.222603 0.188356 0.299658 0.111301 0.556507 0.111301 0.145548 0.111301 0.642123 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "176.5 176.1 176.1 176.1 176.1 176.1 176.1 176.1 175.1 171.9 170.3 166.0 163.7 161.7 158.6 157.4 156.5 155.9 156.3 156.7 157.7 158.1 158.1 158.1 157.8 157.3 156.9 156.4 155.7 155.7 155.7 155.7 155.7 155.7 156.6 157.4 157.9 158.6 159.0 159.0 158.9 158.8 158.6 159.6 157.6 156.8 169.0 185.2 235.4 237.4 238.8 238.1 236.2 234.6 232.9 232.7 232.7 232.7 233.1 234.3 235.0 235.5 237.2 237.6 237.7 237.7 237.7 237.7 237.4 236.9 235.8 235.4 235.1 233.9 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 232.9 233.2 234.4 235.0 235.2 235.2 235.2 234.7 232.7 232.3 232.3 232.3 232.3 232.3 232.5 233.1 234.0 234.6 235.4 235.9 235.9 235.9 235.4 235.0 234.2 232.5 232.0 231.2 231.1 231.1 231.1 231.2 231.5 232.8 233.5 234.2 234.8 235.1 235.2 235.2 235.2 235.2 235.2 234.3 232.4 232.5 232.7 232.7 233.2 232.9 235.6 238.3 241.7 247.8 250.0 258.0 260.9 263.6 270.1 272.7 274.2 277.0 278.1 278.8 279.8 279.8 279.4 281.4 282.8 284.8 313.3 307.4 314.9 317.1 317.5 314.7 313.5 312.7 311.5 310.8 310.8 309.5 309.2 310.1 313.3 314.0 314.0 314.0 314.0 313.3 312.2 311.1 309.0 307.9 307.6 307.6 307.6 308.3 310.6 311.7 313.8 314.6 315.5 315.8 315.8 315.8 315.5 315.1 314.0 311.1 309.7 309.2 309.2 309.7 311.3 312.2 312.9 314.9 315.7 316.9 317.5 317.5 317.3 316.9 316.6 315.8 313.7 312.6 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 1.7174657534246576 - }, - { - "text": "AP", - "ph_seq": "y ou sh ou SP", - "note_seq": "D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.063356 0.347603 0.089041 0.359589 0.100000", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.063356 0.347603 0.089041 0.359589 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "267.7 275.0 282.2 288.5 288.8 289.6 295.2 301.9 306.5 312.0 312.2 313.1 313.3 313.1 310.8 309.0 309.0 311.3 312.7 312.0 312.8 314.8 318.4 325.2 328.9 339.2 343.5 346.2 348.2 348.2 347.6 340.9 336.6 332.1 323.6 319.3 316.2 296.7 307.9 305.4 313.8 312.4 310.9 310.4 310.2 310.2 310.2 310.2 309.7 309.3 308.3 304.7 303.0 300.6 296.5 293.5 287.4 283.9 281.1 279.4 278.6 278.0 277.0 278.0 279.4 280.7 279.6 277.7 274.2 273.1 271.0 269.6 270.2 276.2 280.9 280.9 280.9 280.9 280.9 280.9 280.9 280.9", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 4.328767123287671 - }, - { - "text": "AP", - "ph_seq": "w o SP", - "note_seq": "A#3 A#3 rest", - "note_dur_seq": "0.061644 0.472603 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.061644 0.472603 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "257.4 242.6 234.4 233.1 230.3 229.1 231.2 234.0 236.2 237.3 237.0 234.7 233.5 232.3 230.5 230.9 231.9 234.3 235.9 237.0 237.0 235.9 234.7 232.3 231.1 231.2 232.3 233.4 235.7 236.1 236.1 234.6 233.1 231.5 229.1 228.8 230.7 235.5 236.5 236.3 230.3 224.2 217.9 218.0 220.1 226.7 230.3 230.3 230.3 230.3 230.3 230.3 230.3 230.3", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 5.193493150684931 - }, - { - "text": "AP", - "ph_seq": "zh e SP", - "note_seq": "C#4 C#4 rest", - "note_dur_seq": "0.097603 0.078767 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.097603 0.078767 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.5 293.5 293.5 293.5 293.5 293.0 290.6 288.6 283.0 267.6 266.5 268.5 272.1 275.6 276.5 270.7 270.7 270.7 270.7 270.7 270.7 270.7 270.7", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 5.808219178082191 - }, - { - "text": "AP", - "ph_seq": "t ian SP", - "note_seq": "A#3 A#3 rest", - "note_dur_seq": "0.090753 0.667808 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.090753 0.667808 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "260.0 242.8 233.1 233.1 233.1 233.1 233.1 233.1 233.1 232.5 232.0 233.2 233.9 234.6 236.1 236.7 235.9 232.9 232.9 232.9 232.9 232.9 233.5 235.9 236.7 236.7 236.5 235.8 234.3 233.6 232.8 231.7 231.7 231.7 232.4 232.8 233.4 233.9 234.3 234.3 234.3 234.3 234.2 233.6 233.2 232.4 231.2 230.3 229.3 229.3 229.3 229.6 230.3 231.1 232.4 233.2 233.5 233.1 232.7 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 6.022260273972603 - }, - { - "text": "AP", - "ph_seq": "zh ang w en l ie ch u l e SP", - "note_seq": "G#3 G#3 A#3 A#3 B3 B3 C#4 C#4 D#4 D#4 rest", - "note_dur_seq": "0.121575 0.351027 0.111301 0.243151 0.116438 0.356164 0.157534 0.207192 0.101027 0.359589 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.121575 0.351027 0.111301 0.243151 0.116438 0.356164 0.157534 0.207192 0.101027 0.359589 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "223.3 223.3 223.3 223.3 223.3 223.3 222.9 222.6 222.2 222.0 221.1 219.4 217.6 215.6 210.6 207.9 207.4 207.4 207.4 207.4 207.4 208.1 208.7 210.1 210.2 209.7 209.2 208.6 207.5 207.1 206.9 206.9 206.9 207.4 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.0 208.3 198.4 208.8 215.0 223.5 227.5 230.6 233.1 233.1 232.3 233.2 233.4 233.4 233.4 233.4 230.8 229.3 229.1 229.6 230.1 230.8 233.1 233.4 234.6 234.6 234.6 234.6 234.3 234.0 232.3 232.1 233.1 233.8 235.2 237.2 241.8 243.9 246.8 248.1 249.1 249.2 249.2 248.7 247.4 246.8 246.1 244.8 244.7 244.7 245.2 245.8 246.9 249.5 250.4 250.5 250.0 246.2 244.7 244.7 244.7 245.0 245.4 245.8 246.5 247.1 247.8 248.5 248.5 248.5 248.5 248.5 247.4 246.7 246.1 244.8 245.0 246.6 251.5 255.7 260.6 270.5 273.5 275.9 277.2 278.3 280.2 280.4 280.4 277.5 275.9 274.3 271.9 271.8 271.8 272.4 273.7 274.3 274.6 275.3 276.9 276.9 276.9 278.0 280.4 282.6 292.5 298.3 303.6 315.9 317.3 317.5 313.1 309.5 302.8 299.5 296.9 295.7 295.7 295.7 299.7 301.9 304.2 308.6 310.4 312.0 312.6 312.7 312.0 309.0 305.8 299.5 295.0 293.0 285.8 283.8 283.5 287.0 287.0 287.0 287.0 287.0 287.0 287.0 287.0", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 8.405821917808218 - }, - { - "text": "AP", - "ph_seq": "sh ir f ang d e sh an SP", - "note_seq": "C#4 C#4 D#4 D#4 D#4 D#4 F4 F4 rest", - "note_dur_seq": "0.095890 0.116438 0.089041 0.255137 0.104452 0.303082 0.159247 0.719178 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0", - "ph_dur": "0.095890 0.116438 0.089041 0.255137 0.104452 0.303082 0.159247 0.719178 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "279.4 278.5 276.7 273.1 272.7 272.7 270.9 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 270.5 271.3 273.1 274.0 275.3 278.1 279.1 279.8 283.2 287.8 300.7 306.5 309.9 312.7 313.5 313.8 313.7 313.5 313.5 312.7 311.8 310.4 307.7 307.0 306.8 306.8 306.8 308.3 309.2 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.3 309.9 310.4 311.8 312.4 313.3 313.3 313.3 313.3 312.7 311.3 308.1 307.7 307.7 309.0 312.0 315.9 325.1 331.3 346.6 352.4 356.7 361.8 364.1 365.7 366.8 368.3 368.7 370.6 370.6 372.1 372.6 375.8 376.7 376.7 376.7 376.7 376.7 376.7 376.7 375.2 374.3 371.9 370.3 368.8 363.6 360.2 352.6 342.7 343.4 343.6 344.2 345.2 347.8 349.2 350.6 352.5 353.5 353.7 351.3 348.2 345.2 341.6 341.6 343.0 345.0 346.8 349.6 350.6 351.7 353.1 353.7 353.7 352.9 349.0 344.4 341.3 341.6 344.4 346.0 347.6 348.0 347.6 348.2 339.9 334.2 323.2 314.3 312.1 310.2 309.5 310.8 310.9 310.9 310.9 310.9 310.9 310.9 308.8 307.2 304.9 297.2 292.5 287.8 279.8 277.5 275.6 272.3 271.3 271.3 271.3 271.3 271.3 271.3 271.3 271.3", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 10.743150684931507 - }, - { - "text": "AP", - "ph_seq": "d ian SP", - "note_seq": "F4 F4 rest", - "note_dur_seq": "0.083904 1.078767 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.083904 1.078767 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.0 349.8 349.6 347.4 345.2 344.4 345.8 347.6 349.6 354.5 354.5 354.5 351.7 349.6 348.2 345.4 344.2 343.6 343.8 345.0 350.0 351.5 352.9 353.7 353.7 353.7 353.3 351.7 350.0 346.6 345.0 343.4 341.4 340.9 341.1 342.0 343.8 347.4 349.4 351.0 353.7 353.7 353.7 353.7 352.5 347.6 345.2 344.4 344.4 344.4 344.4 345.8 347.6 349.4 353.7 353.7 353.7 353.5 352.3 349.8 342.4 341.8 343.0 344.0 346.0 350.0 352.3 354.7 355.5 355.7 356.4 356.4 355.7 352.3 346.0 345.4 345.4 345.4 345.6 348.0 349.6 351.0 352.7 353.1 354.5 355.5 355.5 355.1 352.1 350.6 350.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 12.60445205479452 - }, - { - "text": "AP", - "ph_seq": "AP b a sh ir g uang SP", - "note_seq": "rest D#4 D#4 F#4 F#4 G#4 G#4 rest", - "note_dur_seq": "0.275685 0.113014 0.315068 0.095890 0.287671 0.123288 1.541096 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0", - "ph_dur": "0.275685 0.113014 0.315068 0.095890 0.287671 0.123288 1.541096 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 316.0 318.4 316.9 313.8 312.4 311.3 310.6 316.7 319.7 315.3 313.8 310.9 308.6 308.6 308.6 308.6 308.6 312.7 312.6 312.4 312.2 312.0 311.8 312.4 312.6 313.1 313.5 313.5 313.5 312.2 311.8 309.3 307.7 305.1 304.4 310.9 312.2 314.0 314.9 315.7 317.6 318.1 319.2 325.7 330.1 337.3 340.1 345.3 353.1 356.2 360.3 366.0 368.5 369.6 370.9 370.9 370.9 370.4 369.8 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.9 368.3 366.2 358.4 364.5 370.0 370.0 370.0 370.0 371.8 375.8 388.9 396.6 403.9 413.9 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 416.7 416.7 416.7 415.8 415.3 415.3 415.1 415.1 415.1 416.0 417.7 421.1 421.8 421.8 421.8 420.4 416.0 414.1 412.7 410.8 410.8 410.8 412.4 415.1 418.0 422.3 422.8 422.8 421.1 419.9 417.2 415.1 414.1 414.1 414.1 414.1 414.8 415.8 417.0 419.2 419.9 420.1 421.3 422.1 422.8 422.8 422.8 422.8 422.8 421.8 418.4 417.0 415.5 413.4 412.2 411.2 410.8 410.8 414.3 414.8 416.5 417.5 417.5 417.5 417.5 417.5 417.5 417.0 416.0 415.3 414.1 414.1 414.1 414.1 414.1 417.2 419.4 419.6 418.2 415.5 415.5 415.5 415.5 415.5 415.5 415.5 415.5", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 13.892123287671232 - }, - { - "text": "AP", - "ph_seq": "AP c ong c ong d ui h uan ch eng l e n ian SP", - "note_seq": "rest G#4 G#4 A#4 A#4 G#4 G#4 D#4 D#4 D#4 D#4 F4 F4 F4 F4 rest", - "note_dur_seq": "0.303082 0.159247 0.119863 0.085616 0.145548 0.059932 0.123288 0.082192 0.099315 0.157534 0.462329 0.102740 0.118151 0.087329 1.078767 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.303082 0.159247 0.119863 0.085616 0.145548 0.059932 0.123288 0.082192 0.099315 0.157534 0.462329 0.102740 0.118151 0.087329 1.078767 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.5 415.8 424.2 431.6 440.0 456.3 462.3 465.6 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 465.9 462.3 456.3 448.6 431.6 424.2 418.8 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 414.1 406.7 393.8 359.5 342.5 328.2 312.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 314.0 315.3 316.7 316.7 317.0 316.0 308.1 307.3 313.5 327.5 340.1 370.0 368.9 372.8 380.6 381.7 381.7 376.5 373.9 366.0 367.9 372.1 378.6 379.7 379.7 373.9 370.9 369.8 367.9 367.9 367.9 367.9 367.9 368.9 370.9 371.9 372.4 370.0 370.0 369.9 369.8 368.4 363.0 359.5 356.0 350.7 349.4 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.0 350.2 352.5 353.5 354.7 354.7 354.7 353.1 349.8 348.2 347.2 346.4 346.4 346.4 347.3 349.0 350.1 351.9 354.7 359.4 364.8 369.3 374.7 375.8 376.7 376.7 373.2 370.0 363.2 362.2 362.2 369.8 370.6 372.1 373.0 373.9 374.7 374.7 374.7 374.7 374.5 374.1 371.9 371.9 369.6 365.5 364.1 363.2 363.2 364.5 368.9 369.8 372.8 373.9 373.9 373.9 373.9 373.4 372.8 367.7 365.1 362.2 362.2 364.9 370.0 372.8 374.7 375.8 375.8 375.8 370.4 367.9 365.1 363.2 363.2 367.0 370.0 372.8 376.7 376.7 376.7 377.8 377.8 375.8 371.9 367.0 365.1 362.2 362.2 362.2 362.2 362.2 362.2 362.2 362.2 362.2", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 17.054794520547944 - }, - { - "text": "AP", - "ph_seq": "AP s an q ian sh ir SP", - "note_seq": "rest D#4 D#4 F#4 F#4 G#4 G#4 rest", - "note_dur_seq": "0.226027 0.184932 0.241438 0.169521 0.222603 0.188356 1.541096 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0", - "ph_dur": "0.226027 0.184932 0.241438 0.169521 0.222603 0.188356 1.541096 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.7 315.1 329.6 339.3 349.3 365.4 369.3 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.2 373.1 378.0 384.5 399.7 406.5 411.8 415.3 418.4 418.4 418.4 418.4 418.4 418.4 417.5 415.3 414.1 413.2 412.0 412.0 412.0 412.0 412.0 412.0 414.1 415.3 417.5 419.6 420.6 420.6 420.6 420.6 420.6 420.6 420.4 419.6 418.4 418.4 417.5 416.3 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 416.7 418.4 418.4 418.4 418.4 420.6 420.6 420.6 420.6 420.6 419.2 417.7 416.3 414.1 414.1 414.1 414.1 414.1 414.1 416.3 417.5 421.1 421.8 421.8 420.6 419.6 418.7 415.1 414.1 412.0 410.8 410.8 410.8 415.8 418.2 421.8 422.8 422.8 422.8 421.8 420.6 417.2 415.1 413.9 412.0 410.8 410.3 409.8 412.0 415.3 416.3 417.5 420.4 421.1 421.8 421.8 421.8 420.6 417.5 415.1 412.9 410.8 410.8 410.8 412.0 412.9 415.1 415.8 416.3 418.4 418.4 419.6 419.6 419.6 418.0 414.6 412.9 412.0 412.0 412.0 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1 415.1", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 20.445205479452056 - }, - { - "text": "AP", - "ph_seq": "AP r u s uo b u j ian AP z uo sh ou n ian zh e h ua y ou sh ou w u zh e j ian SP", - "note_seq": "rest F#4 F#4 G#4 G#4 B4 B4 A#4 A#4 rest D#4 D#4 A#4 A#4 A#4 A#4 C#5 C#5 D#5 D#5 D#5 D#5 D#5 D#5 A#4 A#4 C#5 C#5 A#4 A#4 rest", - "note_dur_seq": "0.306507 0.104452 0.133562 0.123288 0.297945 0.113014 0.277397 0.133562 2.722603 0.381849 0.131849 0.222603 0.188356 0.246575 0.164384 0.494863 0.121575 0.123288 0.082192 0.748288 0.073630 0.273973 0.136986 0.393836 0.017123 0.494863 0.121575 0.133562 0.071918 0.821918 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.306507 0.104452 0.133562 0.123288 0.297945 0.113014 0.277397 0.133562 2.722603 0.381849 0.131849 0.222603 0.188356 0.246575 0.164384 0.494863 0.121575 0.123288 0.082192 0.748288 0.073630 0.273973 0.136986 0.393836 0.017123 0.494863 0.121575 0.133562 0.071918 0.821918 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "370.0 617.2 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.0 370.2 370.4 378.0 384.5 392.0 406.5 411.8 414.8 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 353.5 303.8 345.9 407.2 418.5 437.6 458.6 463.5 471.0 483.4 487.4 492.5 493.6 491.0 491.9 492.5 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.8 493.6 488.6 484.5 475.2 471.2 468.1 466.2 466.2 466.4 467.0 467.2 467.0 466.7 466.4 466.2 465.9 467.2 469.7 471.0 471.0 471.0 470.2 469.4 468.6 466.7 464.8 464.8 464.8 464.8 464.8 464.8 464.8 466.4 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 467.2 464.8 464.8 464.8 464.8 464.8 464.8 464.8 464.8 464.8 464.8 464.8 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.1 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.4 465.6 465.6 465.6 465.6 465.6 465.6 465.6 465.6 465.6 465.6 465.6 468.6 468.6 468.6 465.9 465.9 465.9 467.2 468.3 471.0 471.0 471.0 471.0 469.7 468.3 465.9 465.9 465.9 465.9 465.9 465.9 465.9 467.2 469.7 471.0 471.0 471.0 469.7 468.6 467.2 467.2 471.0 472.1 472.1 472.1 472.1 472.1 464.8 462.7 460.8 457.6 457.6 457.6 461.6 464.3 467.2 471.0 472.1 473.5 473.5 473.5 473.5 471.6 469.7 461.9 457.9 453.9 450.3 449.2 452.9 460.0 468.6 469.9 472.1 472.1 472.1 472.1 472.1 471.0 467.8 464.3 460.0 460.0 460.0 461.9 463.5 463.5 468.6 469.9 472.1 472.1 472.1 472.1 470.5 468.9 465.9 464.8 462.4 460.8 460.0 460.0 460.0 462.1 466.4 468.6 468.9 469.4 469.7 472.1 472.1 472.1 472.1 469.7 466.2 462.4 453.3 439.5 417.9 362.2 499.3 443.6 284.9 278.9 269.9 265.4 261.0 240.9 231.5 225.1 294.3 291.6 311.1 622.3 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 311.1 315.3 313.3 310.4 309.5 308.8 308.8 308.8 308.8 308.8 309.9 310.9 312.6 313.1 313.7 313.7 313.7 311.5 310.4 307.9 306.3 306.5 306.5 306.8 307.0 307.0 308.8 308.8 310.1 312.6 313.7 316.2 316.8 315.0 335.2 355.9 380.8 432.7 471.3 468.8 466.2 465.1 464.3 462.7 462.7 462.7 465.6 468.9 471.3 471.3 471.3 471.3 469.4 467.2 463.2 461.3 460.3 458.9 458.9 458.9 458.9 458.9 459.7 460.5 461.3 464.8 466.4 466.4 466.4 466.2 466.2 466.2 466.2 467.5 469.7 471.9 476.2 476.2 474.9 466.7 462.7 460.3 458.9 458.9 462.7 469.9 472.4 474.3 476.2 476.2 474.9 469.4 464.3 456.6 456.6 456.6 457.1 457.9 461.6 468.9 471.3 472.7 473.8 467.5 457.9 455.5 455.5 455.5 457.9 460.3 465.1 468.9 470.5 472.4 472.4 472.4 472.4 470.2 466.2 464.3 462.1 459.0 458.9 481.3 508.4 523.3 536.9 553.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.7 555.0 559.0 576.1 587.3 598.8 622.1 625.8 628.8 630.9 624.4 618.7 615.8 613.0 613.0 613.0 613.0 613.0 613.0 613.0 616.9 619.4 621.9 625.9 624.4 629.1 631.7 634.2 634.2 634.2 629.1 619.4 616.2 614.4 614.4 616.2 623.7 638.3 646.1 652.8 650.9 642.0 624.8 616.5 608.0 598.6 595.5 595.5 609.8 619.4 629.5 646.1 649.4 640.1 630.9 621.9 606.6 601.8 596.9 593.8 602.8 612.3 631.3 640.9 646.8 652.8 646.1 639.0 629.1 628.4 624.4 624.4 624.4 624.4 624.4 616.2 627.3 632.8 630.9 619.7 616.5 614.8 614.8 614.8 618.0 621.2 624.4 629.5 630.9 631.3 627.5 627.3 629.3 656.8 679.4 695.8 712.3 706.2 694.4 688.4 684.9 686.5 692.4 698.5 710.7 712.3 712.3 705.3 697.4 686.9 660.0 651.5 622.5 624.4 624.1 624.4 626.2 626.2 629.5 629.5 629.5 625.5 621.9 618.0 617.3 616.5 623.0 613.2 602.1 570.2 557.8 546.1 549.6 554.0 558.9 568.3 568.3 568.3 557.6 552.1 550.5 549.3 548.8 553.5 556.5 523.3 493.8 481.3 462.6 458.9 458.9 458.9 466.7 473.5 480.1 487.6 486.5 479.0 473.2 467.2 457.9 455.5 454.2 451.8 454.2 460.3 472.4 478.7 481.2 485.1 485.1 469.9 462.7 455.5 451.8 451.3 451.1 452.9 455.5 457.9 467.0 471.9 476.5 483.7 485.1 483.7 479.5 475.4 464.0 456.6 451.8 447.2 447.2 447.2 450.5 454.0 457.9 470.0 493.8 508.4 536.9 547.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 553.9 547.4 536.9 508.4 493.8 481.3 467.0 466.2 466.2 466.2 465.4 467.0 469.9 471.6 471.6 467.5 465.4 464.3 461.9 461.1 459.2 459.2 460.5 472.7 476.5 480.4 481.5 481.5 477.6 470.2 463.5 454.5 453.9 453.4 453.9 454.5 460.5 472.7 475.4 477.9 477.9 477.9 477.9 467.8 462.9 455.8 455.8 455.8 459.2 462.4 465.9 472.7 475.4 477.9 481.5 477.9 476.5 465.4 459.7 454.5 452.1 452.9 456.3 459.2 461.9 467.8 471.6 474.0 479.0 480.4 479.8 476.5 472.7 469.1 459.2 459.2 459.2 459.2 459.2 459.2 459.2 459.2", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 23.527397260273972 - }, - { - "text": "AP", - "ph_seq": "m ei j ian l uo x ia l e SP", - "note_seq": "G#4 G#4 A#4 A#4 B4 B4 C#5 C#5 D#5 D#5 rest", - "note_dur_seq": "0.157534 0.304795 0.106164 0.285959 0.125000 0.422945 0.193493 0.172945 0.083904 0.462329 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.157534 0.304795 0.106164 0.285959 0.125000 0.422945 0.193493 0.172945 0.083904 0.462329 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 415.3 416.7 418.9 419.9 420.4 423.5 442.8 456.6 472.9 507.5 525.7 523.9 494.5 487.6 483.4 481.5 479.5 477.0 474.7 473.8 473.5 473.3 473.0 471.7 471.6 471.9 471.6 471.3 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.2 466.7 467.3 469.8 477.9 482.7 487.1 493.1 493.9 493.9 493.9 493.9 493.9 493.9 494.2 496.2 503.7 507.2 508.6 508.6 507.5 501.6 497.0 492.5 484.8 483.7 483.7 487.1 492.5 497.9 508.1 512.2 514.6 514.6 514.3 505.4 500.2 494.7 486.8 486.2 486.2 491.3 495.3 499.6 508.1 511.3 513.4 515.2 515.2 505.4 501.1 497.9 493.9 493.9 494.9 496.3 501.6 509.5 529.4 539.2 547.2 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 555.6 554.4 563.1 571.9 604.3 624.0 633.3 636.8 628.4 616.2 606.3 608.4 615.8 637.9 644.2 644.9 635.0 624.8 608.4 605.6 604.9 606.6 613.0 620.8 637.5 644.9 647.2 642.7 635.3 626.6 610.5 606.3 603.1 603.1 606.3 617.2 622.6 628.4 633.5 632.8 629.8 620.8 615.5 610.2 606.3 606.3 606.3 606.3 606.3 606.3 606.3 606.3", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 34.619863013698634 - }, - { - "text": "AP", - "ph_seq": "AP y i w an n ian d e SP", - "note_seq": "rest C#5 C#5 D#5 D#5 D#5 D#5 F5 F5 rest", - "note_dur_seq": "0.126712 0.046233 0.109589 0.051370 0.246575 0.164384 0.342466 0.068493 0.744863 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.126712 0.046233 0.109589 0.051370 0.246575 0.164384 0.342466 0.068493 0.744863 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "611.2 554.4 554.4 554.4 554.4 554.4 554.4 554.4 554.4 486.5 486.5 489.1 494.7 500.2 516.6 529.0 541.7 559.2 559.2 559.2 559.2 557.6 556.3 556.3 554.7 550.8 547.4 547.5 571.1 596.9 624.2 628.6 617.9 623.3 632.8 637.5 637.5 636.1 632.0 628.0 620.1 616.2 614.4 614.4 614.4 621.5 625.5 629.1 634.2 636.1 637.5 637.5 636.1 632.8 625.9 622.6 621.2 618.0 617.2 616.2 616.2 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 623.4 630.2 642.5 678.6 698.6 716.7 738.6 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 739.8 739.5 736.9 725.9 718.9 705.9 683.0 699.3 706.6 702.5 698.1 692.0 690.4 690.4 691.2 692.0 693.6 697.2 699.5 703.8 708.8 717.8 739.0 741.1 745.0 760.8 772.3 783.5 803.7 807.9 807.0 799.1 792.2 779.0 766.1 753.4 731.5 732.2 730.5 722.3 717.0 712.5 701.3 695.6 689.6 694.4 698.9 704.9 706.6 702.5 679.1 662.0 646.2 628.2 622.9 618.2 615.5 616.5 623.0 628.4 631.3 631.3 629.5 626.2 624.8 633.1 643.8 665.4 671.6 671.6 671.6 671.6 671.6 671.6 671.6 671.6", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 36.96061643835616 - }, - { - "text": "AP", - "ph_seq": "x ve SP", - "note_seq": "D#5 D#5 rest", - "note_dur_seq": "0.113014 1.027397 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.113014 1.027397 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "633.1 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 633.5 630.9 624.4 615.5 615.9 620.4 640.8 649.5 663.1 688.3 695.1 699.0 702.9 704.5 704.9 702.1 701.3 701.3 699.7 698.1 700.1 700.9 706.6 714.4 714.4 712.7 700.1 693.2 689.2 684.9 687.6 698.5 704.1 709.4 712.3 705.8 697.2 690.8 690.4 693.2 703.7 709.4 715.2 723.1 723.5 709.0 701.3 693.6 688.4 689.2 690.4 703.3 707.8 710.3 709.0 700.5 690.4 681.7 681.3 684.1 690.4 697.2 711.1 715.2 717.7 716.8 711.1 704.9 693.2 684.9 681.3 679.4 686.5 701.7 706.6 711.5 721.0 723.1 722.3 706.2 697.2 695.6 697.7 703.3 704.9 699.3 690.4 675.4 675.4 675.4 675.4 675.4 675.4 675.4 675.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 38.87671232876712 - }, - { - "text": "AP", - "ph_seq": "AP y i d i l ei SP", - "note_seq": "rest D#5 D#5 F#5 F#5 G#5 G#5 rest", - "note_dur_seq": "0.291096 0.017123 0.299658 0.111301 0.285959 0.125000 2.054795 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0", - "ph_dur": "0.291096 0.017123 0.299658 0.111301 0.285959 0.125000 2.054795 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 563.4 564.1 565.0 570.9 573.9 576.9 583.3 587.7 591.8 622.6 624.4 625.9 627.7 627.7 630.9 630.9 630.9 630.9 630.9 630.9 630.9 630.9 630.2 629.5 628.8 628.0 624.4 624.4 624.4 624.4 624.4 624.4 625.9 627.3 632.4 638.2 649.6 666.1 675.5 682.8 724.7 736.6 740.4 746.4 747.3 748.2 748.2 748.2 738.7 738.7 738.7 738.7 738.7 738.7 739.1 739.1 739.1 739.1 739.1 739.1 739.1 739.1 739.1 739.1 739.1 739.1 739.6 739.6 739.6 740.0 740.5 755.5 768.5 783.5 812.6 823.2 830.1 830.6 830.6 830.6 830.6 830.6 830.6 824.9 824.9 824.9 827.7 830.6 833.5 833.5 833.5 833.5 833.5 830.6 829.2 829.2 824.9 824.9 824.9 824.9 824.9 824.9 824.9 825.3 826.8 829.2 829.2 829.2 829.2 831.6 833.5 833.5 833.5 835.4 835.4 837.8 839.8 839.8 842.2 842.2 842.2 840.3 837.8 837.8 835.4 835.4 835.4 835.4 835.4 835.4 835.4 835.4 835.4 835.4 835.4 835.4 833.5 831.1 831.1 831.1 829.2 829.2 832.5 835.4 827.3 832.5 837.8 842.2 842.2 837.4 832.0 827.3 820.6 820.6 820.6 823.0 828.2 833.5 844.6 848.1 851.5 853.5 853.5 851.5 844.2 837.4 823.0 817.3 812.1 805.6 803.7 804.6 813.5 822.0 830.6 847.1 855.9 862.4 862.4 862.4 843.7 834.5 825.3 809.8 803.7 799.5 797.2 797.2 798.6 803.7 807.9 823.4 831.1 839.3 853.0 859.4 865.9 870.9 869.4 864.9 847.1 837.8 829.2 816.3 809.8 805.6 799.5 797.2 799.5 808.8 817.8 836.9 846.1 855.9 864.9 864.9 864.9 863.9 862.4 854.5 839.3 832.0 817.3 809.8 803.7 797.2 794.9 794.9 796.3 798.2 799.5 808.8 813.5 818.2 833.5 842.2 849.0 852.5 855.9 860.4 862.4 864.9 866.9 871.9 869.4 860.4 854.9 849.0 830.6 812.1 812.1 812.1 812.1 812.1 812.1 812.1 812.1", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 40.273972602739725 - }, - { - "text": "AP", - "ph_seq": "AP h a a a SP", - "note_seq": "rest G#5 G#5 F#5 F#5 rest", - "note_dur_seq": "0.154110 0.102740 0.821918 0.821918 1.232877 0.100000", - "is_slur_seq": "0 0 0 0 0 0", - "ph_dur": "0.154110 0.102740 0.821918 0.821918 1.232877 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 833.5 833.5 835.4 835.4 835.4 835.4 826.8 822.5 818.2 818.2 818.2 819.2 825.3 831.1 837.6 848.5 863.1 897.2 912.7 942.9 958.0 958.0 956.3 941.5 928.0 920.6 919.5 920.6 938.3 947.5 953.0 957.9 942.9 932.9 883.1 871.2 853.4 833.5 826.3 820.1 820.1 820.1 820.1 822.5 824.7 826.6 814.8 794.8 712.0 684.2 664.8 638.6 632.4 622.6 623.3 625.5 627.7 627.7 624.4 609.8 610.5 611.2 628.8 644.1 649.6 678.6 698.6 716.7 747.7 758.2 758.2 743.4 729.0 719.3 717.3 715.6 715.6 715.6 728.1 747.3 754.2 759.0 766.1 765.2 764.3 747.3 739.1 726.9 723.1 721.4 719.3 720.6 721.4 739.1 748.2 754.2 758.2 760.4 759.0 756.4 747.7 731.5 723.1 721.4 721.4 725.2 734.0 752.1 758.2 762.1 759.0 754.2 754.2 745.1 742.6 734.9 732.6 728.8 722.7 719.4 715.8 708.2 704.9 702.1 696.4 693.6 691.6 689.6 689.6 689.6 689.6 689.6 701.1 707.4 715.7 723.2 726.0 728.9 734.8 741.7 746.4 750.3 750.3 750.3 750.3 748.2 734.9 731.5 728.5 723.9 723.1 723.1 726.9 730.6 734.9 742.6 746.4 749.5 752.9 754.2 754.2 754.2 752.1 742.6 738.3 733.6 728.1 726.9 726.9 726.9 730.2 734.0 740.8 744.7 750.3 752.1 752.9 754.2 754.2 754.2 747.7 741.7 735.3 725.2 723.9 723.1 723.1 723.1 731.5 736.2 740.4 752.1 756.4 756.4 756.4 754.2 749.5 740.4 732.8 725.2 715.6 715.6 716.4 721.8 726.0 734.5 738.7 740.4 751.6 754.7 758.2 758.2 758.2 758.2 754.2 743.4 732.8 711.9 711.9 713.1 714.0 718.9 728.5 733.2 738.3 748.2 751.6 754.7 760.4 752.5 744.7 729.0 721.4 717.7 722.3 727.3 736.6 738.7 746.4 750.3 750.3 750.3 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 43.613013698630134 - }, - { - "text": "AP", - "ph_seq": "AP n a sh ir w o SP", - "note_seq": "rest D#5 D#5 F#5 F#5 F#5 F#5 rest", - "note_dur_seq": "0.205479 0.154110 0.222603 0.188356 0.299658 0.111301 1.592466 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0", - "ph_dur": "0.205479 0.154110 0.222603 0.188356 0.299658 0.111301 1.592466 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 622.3 624.8 625.9 625.9 625.9 627.3 629.1 629.1 628.8 628.4 627.7 626.6 625.9 620.5 618.0 612.3 609.8 609.8 609.8 609.8 609.8 611.2 613.5 616.2 620.9 659.2 678.6 716.7 730.7 740.0 740.0 740.0 740.0 740.0 740.0 738.7 741.7 744.3 746.4 747.3 748.2 749.5 750.3 752.5 752.5 752.9 753.4 753.8 753.8 754.2 754.2 754.2 754.2 754.2 754.2 754.2 752.1 750.3 746.4 744.3 740.8 738.7 739.1 740.4 743.4 746.0 748.2 748.2 746.4 743.4 741.7 740.4 746.2 755.9 784.0 799.3 836.0 840.2 837.4 833.5 818.2 814.0 812.1 830.6 840.3 847.1 847.1 838.8 822.0 814.0 814.0 816.3 825.8 834.9 849.0 853.5 853.5 846.6 840.3 833.5 820.6 818.2 818.2 818.2 820.6 832.0 837.4 843.2 853.5 855.9 857.9 857.9 857.9 857.9 835.9 829.2 825.3 818.2 816.3 812.1 812.1 814.0 826.8 833.5 840.3 851.5 852.5 853.5 853.5 845.6 838.3 823.0 818.2 812.6 809.8 809.8 810.7 812.1 817.8 828.7 834.5 840.3 842.2 842.2 842.2 837.8 833.5 824.9 820.6 817.3 814.0 814.0 816.3 825.8 830.6 834.9 844.6 846.1 847.1 847.1 838.8 823.0 818.2 816.3 816.3 818.2 820.6 824.9 827.3 831.1 839.3 843.2 847.1 851.5 851.5 843.2 835.4 827.3 816.3 812.1 821.1 839.8 849.0 855.9 864.9 864.9 855.9 831.6 818.2 805.6 784.9 776.3 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6 830.6", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 46.84931506849315 - }, - { - "text": "AP", - "ph_seq": "AP h a a a SP", - "note_seq": "rest F#5 F#5 F#5 A#5 rest", - "note_dur_seq": "0.256849 0.154110 0.315068 1.071918 2.517123 0.100000", - "is_slur_seq": "0 0 0 0 0 0", - "ph_dur": "0.256849 0.154110 0.315068 1.071918 2.517123 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.0 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 740.4 747.3 750.8 754.2 762.2 766.3 770.9 784.4 790.6 795.0 809.7 815.4 820.6 830.6 835.4 842.2 844.6 846.6 849.0 849.0 849.0 849.0 849.0 849.0 849.0 847.6 846.6 842.2 842.2 842.2 839.8 839.8 835.4 833.5 831.1 831.1 831.1 831.1 831.1 834.2 837.9 845.8 854.3 866.4 887.5 899.1 935.0 947.5 960.2 988.3 993.5 998.7 1003.9 1003.9 1003.9 1001.6 999.8 997.5 993.5 993.5 991.2 991.2 991.2 991.2 991.2 991.2 992.9 994.6 996.4 996.4 996.1 992.9 977.9 970.7 955.4 949.4 945.5 938.8 936.1 932.9 928.0 928.0 928.0 928.0 928.0 928.0 928.0 928.0 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3 931.3 934.5 937.7 939.4 940.4 942.6 945.3 945.3 945.3 943.7 942.1 940.4 937.7 935.6 930.7 930.7 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 925.9 928.0 929.6 933.4 935.6 935.6 940.4 940.4 940.5 945.7 946.1 946.4 946.7 946.6 945.7 945.0 944.2 942.7 942.1 941.8 940.2 939.4 938.4 937.5 938.9 940.1 939.2 936.3 935.5 932.4 930.3 926.1 924.4 923.2 923.0 924.1 925.9 931.0 933.9 936.6 940.4 941.0 940.7 937.1 934.1 927.2 923.8 921.1 918.3 918.6 920.1 926.2 930.3 934.6 942.6 945.0 946.1 944.1 941.1 932.2 927.3 922.6 915.8 914.4 914.6 919.7 924.2 929.6 940.7 945.4 948.8 950.6 948.6 939.8 933.8 927.4 916.1 912.3 910.2 912.0 915.8 921.2 934.8 941.7 947.8 955.0 955.4 953.3 943.1 935.9 920.6 914.0 909.0 905.5 907.4 911.6 925.4 933.8 942.2 955.6 959.2 960.1 953.7 946.9 929.4 920.9 912.9 902.7 901.5 903.3 914.7 923.3 932.7 950.7 957.5 961.8 961.5 956.9 941.3 931.8 922.4 907.3 903.0 901.4 906.8 913.2 921.5 940.4 949.2 956.3 963.2 962.1 951.5 943.1 933.7 915.5 908.5 903.6 902.2 905.7 911.7 928.9 938.5 947.5 960.4 963.0 959.1 953.0 944.9 926.1 917.2 909.7 901.7 901.8 904.8 918.0 927.6 937.1 954.3 960.1 963.3 960.6 955.0 938.1 928.5 919.4 905.8 902.5 902.1 924.0 932.3 932.3 932.3 932.3 932.3 932.3 932.3 932.3", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 49.82876712328767 - } -] \ No newline at end of file diff --git "a/samples/\345\277\203\347\226\274giegie.ds" "b/samples/\345\277\203\347\226\274giegie.ds" deleted file mode 100644 index 1bd68714b..000000000 --- "a/samples/\345\277\203\347\226\274giegie.ds" +++ /dev/null @@ -1,194 +0,0 @@ -[ - { - "text": "哥 哥 SP", - "ph_seq": "g e g e SP", - "note_seq": "D#4 D#4 C4 C4 rest", - "note_dur_seq": "0.122222 0.170833 0.079167 0.166667 0.100000", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.122222 0.170833 0.079167 0.166667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 333.7 330.6 327.0 324.9 323.6 323.0 322.5 322.1 322.3 326.4 332.3 335.2 341.1 344.4 344.2 327.4 308.3 303.8 306.2 300.1 298.9 318.9 321.7 321.2 318.6 311.7 308.3 300.7 292.0 285.0 264.5 249.5 244.2 235.7 228.4 222.2 222.2 222.2 222.2 222.2 222.2 222.2 222.2 222.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 0.3361111111111111 - }, - { - "text": "哥 哥 SP", - "ph_seq": "g e g e SP", - "note_seq": "G4 G4 D4 D4 rest", - "note_dur_seq": "0.122222 0.113889 0.052778 0.208333 0.100000", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.122222 0.113889 0.052778 0.208333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "284.2 287.5 294.2 300.9 304.4 311.5 318.6 322.3 329.8 337.3 341.3 348.8 362.0 369.6 380.8 395.2 401.9 408.2 410.8 406.3 388.6 365.3 375.5 361.4 382.2 402.6 395.9 389.5 386.8 382.8 375.8 371.5 362.6 352.3 346.2 331.5 306.7 296.2 280.6 267.6 261.3 249.5 239.6 237.8 237.8 237.8 237.8 237.8 237.8 237.8 237.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 1.0444444444444443 - }, - { - "text": "你 给 我 买 这 个 你 女 朋 友 知 道 了 SP 不 会 生 气 吧 SP", - "ph_seq": "n i g ei w o m ai zh ei g e n i n v p eng y ou zh ir d ao l e SP b u h ui sh eng q i b a SP", - "note_seq": "D4 D4 F4 F4 F4 F4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 F4 F4 F4 F4 F4 F4 rest D4 D4 F4 F4 F4 F4 F4 F4 A3 A3 rest", - "note_dur_seq": "0.165278 0.056944 0.026389 0.040278 0.013889 0.077778 0.034722 0.170833 0.079167 0.113889 0.052778 0.070833 0.054167 0.070833 0.045833 0.050000 0.083333 0.111111 0.013889 0.084722 0.040278 0.090278 0.043056 0.077778 0.038889 0.113889 0.069444 0.025000 0.050000 0.033333 0.105556 0.102778 0.208333 0.166667 0.234722 0.098611 0.250000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.165278 0.056944 0.026389 0.040278 0.013889 0.077778 0.034722 0.170833 0.079167 0.113889 0.052778 0.070833 0.054167 0.070833 0.045833 0.050000 0.083333 0.111111 0.013889 0.084722 0.040278 0.090278 0.043056 0.077778 0.038889 0.113889 0.069444 0.025000 0.050000 0.033333 0.105556 0.102778 0.208333 0.166667 0.234722 0.098611 0.250000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "292.6 293.3 294.5 295.9 296.4 297.8 299.0 299.7 300.9 302.1 302.8 304.0 305.3 306.0 307.2 306.0 297.9 293.3 291.0 292.7 298.5 273.4 263.1 261.6 286.5 298.6 312.4 321.0 325.3 332.3 338.7 341.6 344.6 340.8 334.5 339.1 347.6 345.6 345.2 350.4 347.0 333.8 319.5 312.0 293.0 275.7 267.7 264.7 259.4 246.2 234.0 227.2 236.6 239.9 262.2 286.8 300.0 325.1 337.5 338.3 337.5 336.4 335.6 334.0 330.4 328.5 324.7 318.4 315.7 312.7 299.5 290.8 274.3 263.4 265.6 263.7 258.0 254.8 250.7 248.8 247.9 247.7 247.4 249.4 261.5 270.4 273.5 296.2 303.1 306.1 318.4 324.5 326.8 329.8 330.0 327.0 317.3 309.5 303.0 289.1 262.7 252.1 245.4 240.6 240.5 243.3 246.2 247.8 250.8 259.5 267.7 270.9 286.6 301.9 302.6 303.8 307.2 309.0 313.8 311.8 306.1 294.9 285.3 283.7 292.5 281.6 278.5 280.7 283.0 284.2 286.5 288.8 290.5 295.0 294.0 289.8 292.3 303.3 309.0 324.3 335.8 336.2 335.4 332.1 329.4 321.0 313.1 310.2 305.8 301.4 298.5 286.0 274.0 268.2 259.2 252.0 248.5 242.4 239.2 270.5 279.9 287.6 245.2 252.0 258.9 262.5 269.6 274.6 275.9 273.2 277.8 281.4 279.3 287.5 293.8 303.7 316.4 326.2 329.1 331.5 329.6 323.4 298.8 299.8 303.1 310.2 317.3 321.0 329.4 337.9 342.4 350.4 346.0 345.8 345.6 344.2 342.8 340.1 338.3 338.5 339.9 343.4 345.2 347.2 343.6 341.4 336.6 333.3 334.6 339.5 344.4 346.8 351.9 357.0 359.5 364.7 370.0 372.6 378.0 383.5 386.2 391.8 395.6 393.4 384.6 378.6 378.4 378.2 374.9 372.6 367.0 359.3 354.9 346.2 338.1 335.2 328.5 315.3 309.3 301.7 295.5 286.3 274.0 262.4 263.6 263.1 261.8 257.9 254.9 226.2 228.1 235.6 241.4 237.7 235.8 232.1 228.5 226.4 223.1 220.4 219.9 219.0 215.8 213.0 211.2 210.2 209.6 206.6 202.7 200.8 195.7 191.9 192.2 192.5 192.5 192.5 192.5 192.5 192.5 192.5 192.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 2.2513888888888887 - }, - { - "text": "真 好 吃 哥 你 尝 一 口 SP", - "ph_seq": "zh en h ao ch ir g e n i ch ang y i k ou SP", - "note_seq": "E4 E4 C4 C4 C4 C4 G4 G4 C4 C4 D4 D4 E4 E4 C4 C4 rest", - "note_dur_seq": "0.120833 0.075000 0.050000 0.098611 0.068056 0.086111 0.122222 0.095833 0.070833 0.069444 0.055556 0.141667 0.013889 0.050000 0.044444 0.125000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.120833 0.075000 0.050000 0.098611 0.068056 0.086111 0.122222 0.095833 0.070833 0.069444 0.055556 0.141667 0.013889 0.050000 0.044444 0.125000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "310.6 311.1 312.2 313.3 313.8 314.7 315.8 316.4 317.5 327.5 327.5 327.5 328.1 328.9 330.4 331.9 332.1 332.7 334.7 336.0 332.0 334.6 338.3 336.6 328.3 320.1 304.4 289.5 282.4 270.5 267.1 263.4 261.5 261.6 261.8 262.1 262.2 262.4 262.5 262.7 262.8 263.0 263.3 263.4 259.2 255.4 256.8 306.8 275.4 291.6 300.2 317.8 336.7 346.6 367.0 383.9 386.6 389.5 391.3 391.1 392.4 388.4 379.7 375.4 370.6 370.0 319.6 321.2 329.3 324.7 314.0 308.8 295.9 289.6 286.5 281.3 278.9 281.4 271.6 260.6 257.7 257.6 258.3 257.4 256.4 258.2 260.4 269.1 280.6 286.3 300.6 330.4 341.4 337.9 337.7 337.5 329.1 315.5 304.1 296.0 300.5 301.7 296.9 291.0 290.5 280.9 260.3 253.0 248.5 244.2 245.4 248.1 248.1 248.1 248.1 248.1 248.1 248.1 248.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 7.420833333333333 - }, - { - "text": "AP 哥 哥 SP", - "ph_seq": "AP g e g e SP", - "note_seq": "rest C5 C5 C5 C5 rest", - "note_dur_seq": "0.181944 0.119444 0.198611 0.093056 0.333333 0.100000", - "is_slur_seq": "0 0 0 0 0 0", - "ph_dur": "0.181944 0.119444 0.198611 0.093056 0.333333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 523.3 531.2 528.7 503.4 489.3 476.8 486.8 493.6 507.2 516.6 519.3 524.2 529.3 532.4 540.1 544.5 544.8 543.9 539.2 532.7 498.8 468.1 458.9 467.2 499.6 495.3 495.3 500.8 507.2 510.4 515.2 518.1 519.6 522.6 526.6 527.5 527.8 527.2 517.6 513.6 501.9 499.7 488.8 468.3 461.9 448.1 440.8 433.3 406.7 405.8 400.5 386.4 374.9 366.8 354.1 347.8 349.2 349.2 349.2 349.2 349.2 349.2 349.2 349.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 9.698611111111111 - }, - { - "text": "你 女 朋 友 要 是 知 道 我 俩 吃 一 同 一 个 棒 棒 糖 你 女 朋 友 不 会 吃 醋 吧 SP", - "ph_seq": "n i n v p eng y ou y ao sh ir zh ir d ao w o l ia ch ir y i t ong y i g e b ang b ang t ang n i n v p eng y ou b u h ui ch ir c u b a SP", - "note_seq": "C5 C5 C5 C5 C5 C5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 C5 C5 A#4 A#4 A4 A4 C5 C5 G#4 G#4 B4 B4 B4 B4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G4 G4 F4 F4 rest", - "note_dur_seq": "0.165278 0.070833 0.054167 0.097222 0.069444 0.068056 0.015278 0.068056 0.015278 0.043056 0.040278 0.084722 0.040278 0.088889 0.036111 0.059722 0.023611 0.112500 0.054167 0.098611 0.068056 0.111111 0.013889 0.095833 0.175000 0.152778 0.013889 0.084722 0.040278 0.087500 0.037500 0.044444 0.059722 0.026389 0.056944 0.047222 0.036111 0.095833 0.070833 0.097222 0.069444 0.048611 0.013889 0.118056 0.048611 0.050000 0.033333 0.112500 0.179167 0.179167 0.163889 0.229167 0.073611 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.165278 0.070833 0.054167 0.097222 0.069444 0.068056 0.015278 0.068056 0.015278 0.043056 0.040278 0.084722 0.040278 0.088889 0.036111 0.059722 0.023611 0.112500 0.054167 0.098611 0.068056 0.111111 0.013889 0.095833 0.175000 0.152778 0.013889 0.084722 0.040278 0.087500 0.037500 0.044444 0.059722 0.026389 0.056944 0.047222 0.036111 0.095833 0.070833 0.097222 0.069444 0.048611 0.013889 0.118056 0.048611 0.050000 0.033333 0.112500 0.179167 0.179167 0.163889 0.229167 0.073611 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "514.0 516.3 520.5 525.1 527.5 531.8 536.4 538.6 543.3 548.0 550.2 555.0 559.5 562.1 567.0 558.9 536.1 495.9 483.4 483.2 486.0 502.8 514.0 523.6 530.9 533.6 530.9 524.2 519.0 508.4 499.9 495.9 480.1 470.8 465.9 442.3 415.5 410.5 411.0 423.1 434.2 444.6 438.0 436.5 438.2 447.2 462.1 469.9 485.7 500.2 506.9 519.9 532.4 534.6 536.4 537.3 536.7 534.2 523.8 509.2 493.3 489.3 479.0 474.0 479.5 482.0 482.3 475.1 469.1 457.6 450.5 451.8 461.9 467.5 470.5 476.0 474.3 473.8 472.1 469.9 473.3 498.6 504.2 502.5 504.8 504.3 501.9 486.7 462.0 465.1 469.4 453.9 447.2 442.3 446.4 451.1 465.1 482.6 488.2 494.2 502.8 504.8 517.7 528.1 534.1 521.1 495.9 482.3 445.6 425.7 418.2 418.4 422.3 428.2 432.9 442.3 471.5 486.0 474.3 477.9 483.2 493.9 498.2 498.5 499.9 499.9 503.4 511.2 490.1 487.2 478.2 461.6 451.1 413.4 394.3 387.5 374.1 361.3 354.5 340.5 378.6 326.8 334.2 342.4 346.6 355.1 363.6 368.1 377.1 386.4 391.1 400.5 410.3 414.6 404.2 392.4 390.2 395.9 420.6 432.7 451.1 470.2 481.5 517.2 545.2 555.0 575.6 590.0 595.5 599.3 587.7 579.9 563.7 543.6 525.7 485.8 432.9 429.2 418.0 424.8 432.9 434.4 437.0 434.4 422.8 404.4 414.3 420.1 453.2 460.0 459.2 456.6 453.7 451.8 443.6 437.0 434.9 429.7 416.7 413.2 406.8 400.5 398.2 394.7 403.2 411.2 420.1 423.3 419.2 405.1 370.4 355.3 348.6 351.5 353.3 363.4 375.4 381.9 403.2 427.2 436.0 447.9 458.2 461.3 466.2 470.2 471.3 473.5 472.9 469.7 457.4 439.5 431.4 420.9 397.9 388.4 380.6 373.2 368.9 348.4 311.5 316.6 321.0 337.7 378.6 384.4 402.3 408.4 409.6 415.8 420.1 418.2 414.3 411.5 409.6 385.3 366.6 368.5 374.1 380.2 366.6 358.2 361.5 367.4 380.0 392.9 399.1 403.9 406.8 408.6 405.6 405.6 405.6 405.6 405.6 405.4 404.9 405.1 405.4 405.4 406.1 408.2 409.4 410.8 411.2 412.7 414.1 414.6 416.0 417.2 418.0 419.4 420.6 421.1 418.4 415.1 414.1 415.1 417.2 418.2 419.9 420.1 419.4 418.0 415.5 413.6 418.4 424.8 428.5 430.4 434.2 438.2 440.3 444.1 448.2 450.0 454.2 458.2 460.3 464.3 468.6 470.8 474.9 479.0 479.2 476.0 487.5 492.2 496.5 482.0 468.9 445.4 442.5 443.1 443.6 441.3 438.7 433.9 428.7 425.3 414.3 402.3 396.6 384.8 376.2 378.9 371.2 355.7 351.7 355.7 355.7 356.1 344.3 334.1 325.5 312.2 303.7 296.7 291.2 281.2 279.9 281.2 276.2 274.0 263.3 257.7 259.1 256.8 252.7 248.7 246.7 235.0 229.7 231.8 230.7 224.2 220.4 215.0 212.3 210.7 202.1 202.1 202.1 202.1 202.1 202.1 202.1 202.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 11.251388888888888 - }, - { - "text": "哥 哥 你 骑 着 小 SP", - "ph_seq": "g e g e n i q i zh e x iao SP", - "note_seq": "D4 D4 C4 C4 C4 C4 E4 E4 F#4 F#4 C4 C4 rest", - "note_dur_seq": "0.122222 0.184722 0.086111 0.106944 0.080556 0.126389 0.098611 0.097222 0.052778 0.093056 0.094444 0.125000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.122222 0.184722 0.086111 0.106944 0.080556 0.126389 0.098611 0.097222 0.052778 0.093056 0.094444 0.125000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "282.4 283.5 285.5 287.5 288.5 290.6 292.6 293.7 295.7 297.9 299.0 304.0 306.7 306.1 303.0 300.0 298.5 295.5 293.3 293.0 293.0 293.8 294.7 297.9 301.4 300.9 295.2 289.6 289.5 289.5 287.7 289.2 282.7 284.0 286.7 292.0 295.2 295.9 293.0 287.5 284.5 278.5 272.7 269.9 264.4 259.7 256.2 254.0 250.5 248.7 247.4 248.9 252.9 254.9 258.6 261.8 263.7 263.6 262.5 260.6 253.0 253.0 254.6 258.2 261.8 263.4 267.1 270.7 275.5 282.8 302.3 296.0 286.0 287.5 288.1 292.0 297.9 300.0 312.6 315.0 322.7 356.2 372.3 374.1 379.1 379.5 378.4 376.0 361.3 351.9 331.3 324.3 320.8 314.0 307.4 299.2 287.6 273.0 273.2 277.3 265.4 262.4 259.1 256.4 255.5 253.7 252.1 251.5 250.7 251.1 251.1 251.1 251.1 251.1 251.1 251.1 251.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 16.377777777777776 - }, - { - "text": "小 电 动 车 带 着 我 你 女 朋 友 知 道 了 SP", - "ph_seq": "x iao d ian d ong ch e d ai zh e w o n i n v p eng y ou zh ir d ao l e SP", - "note_seq": "C4 C4 F4 F4 E4 E4 E4 E4 F#4 F#4 D4 D4 B3 B3 E4 E4 F#4 F#4 E4 E4 F#4 F#4 F4 F4 F4 F4 C#4 C#4 rest", - "note_dur_seq": "0.193056 0.088889 0.056944 0.162500 0.066667 0.134722 0.094444 0.162500 0.066667 0.170833 0.120833 0.044444 0.018056 0.065278 0.018056 0.090278 0.034722 0.120833 0.087500 0.048611 0.013889 0.122222 0.086111 0.069444 0.034722 0.154167 0.075000 0.125000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.193056 0.088889 0.056944 0.162500 0.066667 0.134722 0.094444 0.162500 0.066667 0.170833 0.120833 0.044444 0.018056 0.065278 0.018056 0.090278 0.034722 0.120833 0.087500 0.048611 0.013889 0.122222 0.086111 0.069444 0.034722 0.154167 0.075000 0.125000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "262.1 262.4 262.8 263.4 263.7 264.4 265.0 265.3 265.7 266.2 266.5 267.1 267.7 268.1 268.7 269.1 269.4 267.9 264.5 262.8 259.7 255.9 253.7 255.2 271.2 279.4 296.9 332.9 341.0 351.6 371.3 376.9 382.6 385.3 383.7 379.1 371.1 360.9 348.0 340.3 336.4 327.0 318.2 315.1 307.0 297.9 297.2 313.5 346.6 366.0 366.4 367.9 365.1 360.7 352.3 344.0 340.5 331.5 320.1 314.2 298.3 289.1 291.5 297.8 304.4 307.7 314.6 321.4 324.9 332.1 339.3 343.0 350.0 352.3 352.5 347.0 338.9 335.2 332.7 333.1 334.0 339.7 349.0 354.5 365.5 376.9 386.0 411.3 410.0 407.7 410.3 412.9 413.4 413.2 409.8 407.5 401.4 390.6 384.2 371.7 359.5 353.5 340.3 325.5 318.2 308.1 302.6 299.8 288.8 288.8 289.8 290.5 291.3 290.3 289.5 285.1 277.3 267.9 259.9 262.5 259.4 253.4 251.1 249.7 247.8 252.9 251.5 253.2 268.1 275.9 290.5 321.2 329.4 356.2 370.5 383.5 392.9 393.6 398.6 400.2 396.1 393.4 383.9 368.5 360.9 345.8 326.6 318.4 308.4 299.7 298.8 297.2 293.6 288.4 288.3 292.6 293.3 296.9 316.0 326.1 331.7 337.5 341.6 350.8 359.7 361.8 361.5 359.0 356.6 350.6 349.2 348.8 347.8 347.0 346.6 345.8 340.9 340.3 339.4 332.3 334.2 336.2 340.1 341.1 339.9 343.0 352.3 357.0 365.3 364.5 364.7 364.9 363.6 362.6 360.5 357.0 354.9 350.6 344.8 339.3 328.5 320.6 316.7 309.3 297.7 284.1 281.3 294.2 292.1 283.8 273.4 270.2 266.8 263.9 262.4 253.7 246.4 243.8 241.4 241.4 241.4 241.4 241.4 241.4 241.4 241.4", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 17.911111111111108 - }, - { - "text": "不 会 揍 我 吧 SP", - "ph_seq": "b u h ui z ou w o b a SP", - "note_seq": "G#4 G#4 B4 B4 A#4 A#4 F4 F4 F4 F4 rest", - "note_dur_seq": "0.113889 0.077778 0.026389 0.091667 0.075000 0.148611 0.059722 0.147222 0.061111 0.291667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.113889 0.077778 0.026389 0.091667 0.075000 0.148611 0.059722 0.147222 0.061111 0.291667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "323.6 325.3 328.9 332.5 334.2 337.9 341.6 343.4 347.2 350.0 350.2 369.4 380.2 381.9 382.4 379.5 384.7 430.7 451.8 465.0 511.3 526.3 525.4 514.0 501.9 495.3 494.2 496.7 498.5 502.2 505.7 504.1 506.5 513.4 518.1 542.3 551.5 555.6 563.4 568.3 569.3 564.4 556.6 551.8 540.5 514.6 494.5 486.0 463.5 414.1 405.8 380.6 379.5 371.7 359.3 352.3 350.4 341.4 336.7 333.5 327.9 323.8 322.5 316.6 310.1 307.9 304.0 307.4 316.0 334.2 349.2 351.9 356.8 360.5 362.4 366.2 370.0 371.9 375.4 374.5 374.1 373.2 367.7 365.3 361.5 358.0 356.8 337.9 326.2 320.6 309.5 296.7 290.3 279.1 271.8 271.8 271.8 271.8 271.8 271.8 271.8 271.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 20.82361111111111 - }, - { - "text": "好 可 怕 你 女 朋 友 SP", - "ph_seq": "h ao k e p a n i n v p eng y ou SP", - "note_seq": "A#4 A#4 G#4 G#4 A#4 A#4 G4 G4 G4 G4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.154167 0.152778 0.118056 0.102778 0.105556 0.087500 0.037500 0.106944 0.080556 0.120833 0.087500 0.186111 0.043056 0.229167 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.154167 0.152778 0.118056 0.102778 0.105556 0.087500 0.037500 0.106944 0.080556 0.120833 0.087500 0.186111 0.043056 0.229167 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "648.7 658.5 671.2 696.8 703.3 688.0 685.3 691.6 634.2 574.9 547.4 496.5 450.0 428.5 441.3 446.7 447.2 444.6 444.1 447.2 455.5 464.0 469.4 482.9 483.4 480.7 479.8 481.2 476.5 465.6 455.0 449.8 439.2 428.6 423.0 398.7 398.5 400.4 410.3 419.9 424.3 428.5 428.0 426.2 418.4 407.5 409.1 413.2 421.3 428.2 432.9 457.7 502.9 537.0 555.8 559.8 551.5 509.5 463.2 452.1 437.7 425.7 422.4 408.0 384.3 381.8 381.9 382.6 383.3 384.4 385.7 386.4 387.5 388.8 389.3 390.6 391.8 392.4 399.5 402.1 403.9 395.6 389.3 388.6 382.2 375.2 371.3 363.4 358.4 356.4 336.9 324.0 321.5 323.8 326.8 328.5 330.9 325.1 317.5 331.8 343.8 346.8 345.2 329.6 321.7 303.3 298.6 298.5 301.4 307.6 310.2 311.3 315.1 320.2 322.3 329.2 336.1 334.2 338.7 349.5 351.9 353.5 354.1 353.7 348.0 341.8 339.7 336.0 331.5 328.3 318.4 307.6 302.3 292.0 282.0 277.2 270.5 258.8 250.5 243.0 243.0 243.0 243.0 243.0 243.0 243.0 243.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 22.491666666666664 - }, - { - "text": "不 像 我 SP", - "ph_seq": "b u x iang w o SP", - "note_seq": "C4 C4 A#3 A#3 G#3 G#3 rest", - "note_dur_seq": "0.113889 0.144444 0.147222 0.222222 0.090278 0.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0", - "ph_dur": "0.113889 0.144444 0.147222 0.222222 0.090278 0.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "241.2 236.9 236.6 236.2 236.1 235.9 235.2 234.8 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.6 234.8 236.5 239.5 241.6 245.0 249.4 251.7 255.8 258.5 260.0 262.7 263.9 264.4 265.3 265.4 265.7 267.8 261.0 261.1 267.6 256.8 254.0 252.4 251.3 250.0 244.7 242.6 242.4 242.7 240.9 239.6 239.9 240.3 239.8 238.9 237.0 235.0 234.0 232.0 230.0 228.9 227.0 224.3 223.8 214.7 215.0 219.5 215.7 212.8 211.6 207.7 203.7 202.8 201.7 201.0 200.2 198.5 195.2 194.9 195.1 195.3 195.4 195.7 195.9 196.0 196.2 196.5 196.6 196.8 197.0 197.1 197.4 197.6 197.7 199.4 206.1 210.2 216.1 220.0 220.9 221.7 221.4 221.7 222.6 221.4 222.0 222.0 222.0 222.0 222.0 222.0 222.0 222.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 24.177777777777777 - }, - { - "text": "我 只 会 心 疼 gi ye gi ye SP", - "ph_seq": "w o zh ir h ui x in t eng g i y E g i y E SP", - "note_seq": "D#4 D#4 C#4 C#4 D4 D4 G4 G4 G4 G4 E4 E4 E4 E4 E4 E4 E4 E4 rest", - "note_dur_seq": "0.111111 0.086111 0.080556 0.100000 0.066667 0.083333 0.083333 0.125000 0.104167 0.212500 0.100000 0.048611 0.013889 0.184722 0.086111 0.048611 0.013889 0.604167 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.111111 0.086111 0.080556 0.100000 0.066667 0.083333 0.083333 0.125000 0.104167 0.212500 0.100000 0.048611 0.013889 0.184722 0.086111 0.048611 0.013889 0.604167 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "262.4 264.4 268.1 271.9 273.8 277.8 281.9 283.8 288.0 292.1 294.2 298.5 302.1 305.3 309.7 314.0 315.3 315.8 314.4 313.7 312.4 309.3 302.8 299.7 307.6 306.8 302.8 294.7 290.6 283.8 278.0 275.3 266.8 255.7 250.4 247.6 243.3 258.5 291.3 283.8 287.8 299.1 309.9 310.6 308.4 299.1 292.8 302.4 302.8 325.4 341.5 403.6 395.0 385.3 383.0 384.6 384.6 378.6 355.1 354.3 358.4 366.2 376.5 384.4 401.2 411.2 419.2 425.5 422.3 415.8 409.4 406.3 400.0 393.8 390.6 384.8 378.9 375.8 370.0 364.3 361.5 355.9 350.4 347.6 348.4 372.1 377.6 390.6 417.0 423.1 427.5 427.5 424.8 413.9 405.1 407.9 402.6 384.0 371.7 380.7 375.6 370.4 360.5 353.5 350.0 344.8 341.4 341.3 341.1 340.7 341.4 341.1 338.5 335.2 333.5 334.6 338.3 340.1 342.8 343.8 339.9 332.1 323.2 320.8 308.8 319.5 320.6 323.2 325.5 326.8 329.2 331.7 333.1 335.4 338.1 339.3 339.5 337.7 336.7 336.7 336.7 336.7 339.7 342.2 341.6 336.7 330.0 326.6 319.9 315.7 314.6 312.4 310.2 309.3 306.6 301.8 302.5 307.4 304.7 302.6 298.6 297.4 297.4 296.0 281.1 280.9 281.7 273.7 269.8 266.7 266.2 266.5 267.0 267.4 267.3 265.3 261.3 257.6 256.2 253.7 247.4 246.7 245.0 243.5 242.8 242.8 261.6 261.6 261.6 261.6 261.6 261.6 261.6 261.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 25.513888888888886 - } -] \ No newline at end of file diff --git "a/samples/\346\201\213\344\272\272\345\277\203.ds" "b/samples/\346\201\213\344\272\272\345\277\203.ds" index b8a735219..f4872e5b4 100644 --- "a/samples/\346\201\213\344\272\272\345\277\203.ds" +++ "b/samples/\346\201\213\344\272\272\345\277\203.ds" @@ -1,14 +1,14 @@ [ { + "offset": 27.937, "text": "SP AP 化 作 风 AP 化 作 雨 AP 化 作 春 走 向 你 AP 梦 如 声 AP 梦 如 影 AP 梦 是 遥 望 的 掌 印 AP 化 作 烟 AP 化 作 泥 AP 化 作 云 飘 向 你 AP 思 如 海 AP 恋 如 城 AP 思 念 最 遥 不 可 及 AP 你 问 西 湖 水 AP 偷 走 她 的 几 分 美 AP 时 光 一 去 不 再 AP 信 誓 旦 旦 留 给 谁 AP 你 问 长 江 水 AP 淘 尽 心 酸 的 滋 味 AP 剩 半 颗 AP 恋 人 心 唤 不 回 SP", - "ph_seq": "SP AP h ua z uo f eng AP h ua z uo y v AP h ua z uo ch un z ou x iang n i AP m eng r u sh eng AP m eng r u y ing AP m eng sh ir y ao w ang d e zh ang y in AP h ua z uo y En AP h ua z uo n i AP h ua z uo y vn p iao x iang n i AP s i0 r u h ai AP l ian r u ch eng AP s i0 n ian z ui y ao b u k e j i AP n i w en x i i h u u sh ui AP t ou z ou t a d e j i f en m ei AP sh ir g uang y i q v b u z ai AP x in sh ir d an d an l iu g ei sh ui AP n i w en ch ang ang j iang iang sh ui AP t ao j in x in s uan d e z i0 w ei AP sh eng b an k e AP l ian r en x in h uan b u h ui SP", - "note_seq": "rest rest D4 D4 E4 E4 F4 F4 rest A3 A3 F4 F4 E4 E4 rest D4 D4 C4 C4 D4 D4 C4 C4 D4 D4 A3 A3 rest D4 D4 E4 E4 F4 F4 rest A3 A3 F4 F4 E4 E4 rest D4 D4 C4 C4 D4 D4 F4 F4 E4 E4 C4 C4 D4 D4 rest D4 D4 E4 E4 F4 F4 rest A3 A3 F4 F4 E4 E4 rest D4 D4 C4 C4 D4 D4 C4 C4 D4 D4 A3 A3 rest D4 D4 E4 E4 F4 F4 rest G4 G4 A4 A4 E4 E4 rest D4 D4 C4 C4 D4 D4 F4 F4 E4 E4 C4 C4 D4 D4 rest F4 F4 G4 G4 A4 A4 C5 A4 A4 F4 G4 G4 rest E4 E4 D4 D4 C4 C4 A3 A3 G4 G4 A4 A4 F4 F4 rest D4 D4 E4 E4 F4 F4 G4 G4 A4 A4 E4 E4 rest D4 D4 E4 E4 F4 F4 F4 F4 F4 F4 G4 G4 A4 A4 rest F4 F4 G4 G4 A4 A4 C5 A4 A4 F4 G4 G4 rest E4 E4 D4 D4 C4 C4 A3 A3 G4 G4 A4 A4 F4 F4 rest D4 D4 E4 E4 F4 F4 rest G4 G4 A4 A4 E4 E4 D4 D4 C4 C4 D4 D4 rest", - "note_dur_seq": "0.2 0.4 0.3649999 0.3649999 0.366 0.366 0.3660001 0.3660001 0.3659999 0.3659999 0.3659999 0.3660002 0.3660002 0.3659999 0.3659999 0.365 0.3659999 0.3659999 0.3659999 0.3659999 0.7319999 0.7319999 0.3660002 0.3660002 0.3660002 0.3660002 0.3649998 0.3649998 0.3660002 0.3659997 0.3659997 0.3660002 0.3660002 0.3660002 0.3660002 0.3659997 0.3660002 0.3660002 0.3649998 0.3649998 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3659992 0.3659992 0.3650007 0.3650007 0.3659992 0.3659992 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3659992 0.3659992 0.3650007 0.3650007 0.3660002 0.3659992 0.3659992 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3660002 0.3649998 0.3649998 0.7320004 0.7320004 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3659992 0.3649998 0.3649998 0.3660011 0.3660011 0.3659992 0.3659992 0.3660011 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3649998 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3660011 0.3660011 0.3649998 0.3649998 0.3659992 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3660011 0.3649998 0.3649998 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3649998 0.3649998 0.3660011 0.3660011 0.3659992 0.3659992 0.3660011 0.3660011 0.3659992 0.3659992 0.3659992 0.3660011 0.3660011 0.730999 0.730999 0.3660011 0.3660011 0.3659992 0.3659992 0.3660011 0.3660011 0.3659973 0.3660011 0.3660011 0.3650017 0.3650017 0.3659973 0.3659973 0.3660011 0.3660011 0.3660011 0.3660011 0.3660011 0.3660011 0.3659973 0.3659973 0.3660011 0.3650017 0.3650017 0.3659973 0.3659973 0.3660011 0.3660011 0.3660011 0.3659973 0.3659973 0.3660011 0.3660011 0.3660011 0.3649979 0.3660011 0.3660011 0.3660011 0.3660011 0.3659973 0.3659973 0.3660011 0.3660011 0.3660011 0.3660011 0.3649979 0.3649979 0.3660011 0.3660011 0.3660011 0.3660011 0.3660011 0.3659973 0.3659973 0.3660011 0.3660011 0.3660011 0.3649979 0.3649979 0.3660011 0.3660011 0.7319984 0.7319984 0.3660011 0.3660011 0.7319984 0.7319984 1.829002 1.829002 0.5", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.004878 0.4 0.195122 0.244574 0.120426 0.245574 0.120426 0.366 0.216609 0.149391 0.24557 0.12043 0.32027 0.045731 0.366 0.24457 0.12043 0.245574 0.120426 0.245574 0.120426 0.567365 0.164635 0.245574 0.120426 0.245574 0.120426 0.365 0.291305 0.074696 0.320269 0.045731 0.245574 0.120426 0.366 0.291308 0.074692 0.32027 0.045731 0.319269 0.045731 0.366 0.32027 0.045731 0.245574 0.120426 0.320266 0.045734 0.320269 0.045731 0.31927 0.045731 0.283785 0.082214 0.32027 0.045731 0.366 0.260814 0.105186 0.245574 0.120426 0.320269 0.045731 0.365001 0.245574 0.120426 0.245573 0.120426 0.303035 0.062965 0.366 0.260818 0.105183 0.24557 0.12043 0.319269 0.045731 0.596327 0.135674 0.245573 0.120426 0.315296 0.050705 0.365999 0.170877 0.195122 0.319269 0.045731 0.245575 0.120426 0.365999 0.320271 0.045731 0.320265 0.045734 0.245575 0.120426 0.365999 0.169878 0.195122 0.291304 0.074696 0.276062 0.089939 0.320265 0.045734 0.306549 0.059452 0.245573 0.120426 0.245575 0.120426 0.365 0.320269 0.045731 0.320269 0.045731 0.245575 0.120426 0.365999 0.245575 0.120426 0.365 0.245569 0.12043 0.366001 0.245573 0.120426 0.27606 0.089939 0.260818 0.105183 0.306547 0.059452 0.24457 0.12043 0.245575 0.120426 0.27606 0.089939 0.366001 0.170877 0.195122 0.320269 0.045731 0.320267 0.045734 0.551121 0.179878 0.291306 0.074696 0.245573 0.120426 0.366001 0.186119 0.179878 0.245575 0.120426 0.319271 0.045731 0.320259 0.045738 0.295681 0.07032 0.306553 0.059448 0.245571 0.12043 0.365997 0.306553 0.059448 0.319263 0.045738 0.245567 0.12043 0.366001 0.260822 0.105179 0.365997 0.245571 0.12043 0.366001 0.244568 0.12043 0.276066 0.089935 0.245579 0.120422 0.245567 0.12043 0.294895 0.071106 0.245571 0.12043 0.319267 0.045731 0.366001 0.201374 0.164627 0.320263 0.045738 0.245567 0.12043 0.366001 0.320271 0.045731 0.319267 0.045731 0.245579 0.120422 0.611568 0.12043 0.320271 0.045731 0.536877 0.195122 1.829002 0.5", - "f0_timestep": "0.005", + "ph_seq": "SP AP h ua z uo f eng AP h ua z uo y v AP h ua z uo ch un z ou x iang n i AP m eng r u sh eng AP m eng r u y ing AP m eng sh ir y ao w ang d e zh ang y in AP h ua z uo y En AP h ua z uo n i AP h ua z uo y vn p iao x iang n i AP s i0 r u h ai AP l ian r u ch eng AP s i0 n ian z ui y ao b u k e j i AP n i w en x i h u sh ui AP t ou z ou t a d e j i f en m ei AP sh ir g uang y i q v b u z ai AP x in sh ir d an d an l iu g ei sh ui AP n i w en ch ang j iang sh ui AP t ao j in x in s uan d e z i0 w ei AP sh eng b an k e AP l ian r en x in h uan b u h ui SP", + "ph_dur": "0.0049 0.4 0.1951 0.2446 0.1204 0.2456 0.1204 0.366 0.2166 0.1494 0.2456 0.1204 0.3203 0.0457 0.366 0.2446 0.1204 0.2456 0.1204 0.2456 0.1204 0.5674 0.1646 0.2456 0.1204 0.2456 0.1204 0.365 0.2913 0.0747 0.3203 0.0457 0.2456 0.1204 0.366 0.2913 0.0747 0.3203 0.0457 0.3193 0.0457 0.366 0.3203 0.0457 0.2456 0.1204 0.3203 0.0457 0.3203 0.0457 0.3193 0.0457 0.2838 0.0822 0.3203 0.0457 0.366 0.2608 0.1052 0.2456 0.1204 0.3203 0.0457 0.365 0.2456 0.1204 0.2456 0.1204 0.303 0.063 0.366 0.2608 0.1052 0.2456 0.1204 0.3193 0.0457 0.5963 0.1357 0.2456 0.1204 0.3153 0.0507 0.366 0.1709 0.1951 0.3193 0.0457 0.2456 0.1204 0.366 0.3203 0.0457 0.3203 0.0457 0.2456 0.1204 0.366 0.1699 0.1951 0.2913 0.0747 0.2761 0.0899 0.3203 0.0457 0.3065 0.0595 0.2456 0.1204 0.2456 0.1204 0.365 0.3203 0.0457 0.3203 0.0457 0.2456 0.1204 0.6116 0.1204 0.6106 0.1204 0.366 0.2456 0.1204 0.2761 0.0899 0.2608 0.1052 0.3065 0.0595 0.2446 0.1204 0.2456 0.1204 0.2761 0.0899 0.366 0.1709 0.1951 0.3203 0.0457 0.3203 0.0457 0.5511 0.1799 0.2913 0.0747 0.2456 0.1204 0.366 0.1861 0.1799 0.2456 0.1204 0.3193 0.0457 0.3203 0.0457 0.2957 0.0703 0.3066 0.0594 0.2456 0.1204 0.366 0.3066 0.0594 0.3193 0.0457 0.2456 0.1204 0.6268 0.1052 0.6116 0.1204 0.366 0.2446 0.1204 0.2761 0.0899 0.2456 0.1204 0.2456 0.1204 0.2949 0.0711 0.2456 0.1204 0.3193 0.0457 0.366 0.2014 0.1646 0.3203 0.0457 0.2456 0.1204 0.366 0.3203 0.0457 0.3193 0.0457 0.2456 0.1204 0.6116 0.1204 0.3203 0.0457 0.5369 0.1951 1.829 0.5", + "ph_num": "1 2 2 2 1 2 2 2 1 2 2 2 2 2 2 1 2 2 2 1 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 1 2 2 2 1 2 2 2 2 2 2 1 2 2 2 1 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest rest D4 E4 F4 rest A3 F4 E4 rest D4 C4 D4 C4 D4 A3 rest D4 E4 F4 rest A3 F4 E4 rest D4 C4 D4 F4 E4 C4 D4 rest D4 E4 F4 rest A3 F4 E4 rest D4 C4 D4 C4 D4 A3 rest D4 E4 F4 rest G4 A4 E4 rest D4 C4 D4 F4 E4 C4 D4 rest F4 G4 A4 C5 A4 F4 G4 rest E4 D4 C4 A3 G4 A4 F4 rest D4 E4 F4 G4 A4 E4 rest D4 E4 F4 F4 F4 G4 A4 rest F4 G4 A4 C5 A4 F4 G4 rest E4 D4 C4 A3 G4 A4 F4 rest D4 E4 F4 rest G4 A4 E4 D4 C4 D4 rest", + "note_dur": "0.2 0.4 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.732 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.732 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.731 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.366 0.366 0.366 0.366 0.366 0.365 0.366 0.732 0.366 0.732 1.829 0.5", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.7 263.8 263.9 263.9 264.0 264.1 264.3 264.5 264.7 264.8 264.8 264.9 265.0 265.0 265.1 265.4 266.4 267.7 269.3 271.6 274.3 277.1 280.0 282.8 285.4 287.6 289.4 290.6 291.2 291.2 291.1 291.1 291.1 291.0 290.9 290.8 290.7 290.6 290.5 290.5 290.5 290.5 290.5 290.5 290.6 291.0 291.6 292.3 293.0 293.8 294.8 295.7 296.6 297.4 298.3 298.8 299.3 299.7 300.0 299.9 299.7 298.5 296.6 293.5 289.7 285.6 281.5 277.7 274.3 271.9 270.2 269.8 270.2 271.4 273.3 276.2 279.9 284.2 289.2 294.7 300.4 306.1 311.8 317.4 322.5 326.8 330.0 332.7 334.4 335.2 335.0 334.7 334.5 334.0 333.5 332.7 332.0 331.2 330.5 329.7 329.1 328.6 328.0 327.7 327.7 327.7 327.7 327.7 327.9 328.2 328.3 328.6 328.9 329.2 329.5 330.0 330.3 330.6 331.0 331.3 331.6 331.9 332.1 332.3 332.4 332.5 332.5 332.3 331.2 329.3 327.4 324.4 321.0 317.7 314.3 311.4 308.8 307.0 305.8 305.4 305.5 306.1 306.7 307.8 309.0 310.5 312.2 314.0 315.7 317.4 319.0 320.2 321.4 322.1 322.6 322.8 323.5 325.9 329.4 333.8 339.0 344.0 348.8 352.4 354.7 355.5 355.2 354.8 353.9 352.7 351.3 349.6 348.0 346.2 344.6 342.7 341.1 339.4 338.0 336.9 336.1 335.7 335.6 335.7 336.6 337.7 339.3 341.1 343.4 345.7 348.0 350.4 352.5 354.3 355.5 356.4 356.8 356.4 356.0 355.3 354.0 352.7 351.2 349.4 347.6 345.9 344.3 342.8 341.5 340.5 339.9 339.5 339.5 339.8 340.7 341.8 343.4 345.0 346.5 348.2 350.0 351.5 352.7 353.4 353.6 353.5 353.3 353.2 352.7 352.3 351.7 351.2 350.6 350.1 349.8 349.4 349.2 349.2 347.3 344.9 342.6 340.2 337.9 335.6 333.3 331.0 328.7 326.4 324.2 322.0 319.8 317.6 315.4 313.2 311.1 308.9 306.8 304.7 302.6 300.6 298.5 296.4 294.4 292.4 290.4 288.4 286.4 284.4 282.5 280.6 278.6 276.7 274.8 272.9 271.1 269.2 267.4 265.5 263.7 261.9 260.1 258.3 256.5 254.8 253.0 251.3 249.6 247.8 246.1 244.5 242.8 241.1 239.5 237.8 236.2 234.6 233.0 231.4 229.8 228.2 226.6 225.1 223.5 222.3 222.2 222.1 222.0 221.8 221.7 221.5 221.3 221.1 220.9 220.8 220.6 220.6 220.6 220.6 220.6 220.6 220.8 220.9 221.0 221.1 221.3 221.5 221.6 221.8 221.9 222.1 222.2 222.3 222.4 222.4 222.4 222.6 222.6 222.6 222.9 223.1 223.4 223.6 223.8 224.0 224.2 224.4 224.5 224.2 223.6 222.8 221.5 220.0 218.2 216.3 214.2 212.3 210.4 208.6 207.1 205.8 204.9 204.3 204.4 205.0 206.3 208.6 211.6 214.9 218.7 223.5 229.1 235.3 241.9 249.0 256.4 264.2 271.9 279.8 287.5 294.6 300.7 307.1 312.9 317.8 321.7 324.5 326.4 327.0 327.0 327.0 327.1 327.3 327.6 327.7 328.0 328.4 328.7 328.9 329.1 329.4 329.6 329.6 329.7 329.8 330.0 330.6 331.5 332.8 334.4 336.1 337.9 339.7 341.6 343.7 345.8 347.6 349.1 350.5 351.6 352.3 352.7 352.9 352.9 352.9 353.1 353.1 353.2 353.4 353.5 353.7 353.7 353.7 353.5 353.5 353.8 353.5 352.8 351.9 350.5 349.1 347.2 345.4 343.4 341.3 339.3 337.1 335.2 333.4 331.9 330.6 329.4 328.3 327.7 327.4 327.4 327.4 327.8 328.4 329.1 329.8 330.6 331.3 332.0 332.6 332.9 333.1 332.9 332.6 331.9 331.3 330.4 329.2 328.3 327.2 326.3 325.3 324.3 323.5 322.9 322.5 322.3 322.3 322.5 323.0 323.8 324.8 326.0 327.3 328.6 330.0 331.3 332.6 333.5 334.4 335.0 335.3 335.5 335.2 334.7 334.0 333.1 332.0 330.8 329.6 328.5 327.7 326.9 326.3 326.0 326.0 326.0 326.2 326.3 326.7 327.0 327.3 327.7 328.1 328.6 328.9 329.4 329.7 330.0 330.3 330.6 330.8 330.8 330.8 330.2 329.6 329.1 328.5 327.9 327.3 326.7 326.1 325.5 324.9 324.3 323.7 323.2 322.6 322.0 321.4 320.8 320.3 319.7 319.1 318.5 317.9 317.4 316.8 316.2 315.7 315.1 314.5 314.0 313.4 312.8 312.3 311.7 311.1 310.6 310.0 309.4 308.9 308.3 307.8 307.2 306.7 306.1 305.6 305.0 304.5 303.9 303.4 302.8 302.3 301.7 301.2 300.6 300.1 299.5 299.0 298.5 297.9 297.4 296.8 296.3 295.8 295.2 294.7 294.2 293.6 293.3 293.3 293.5 295.4 295.4 295.4 295.4 295.4 295.2 295.1 295.0 294.9 294.8 294.7 294.5 294.3 294.2 294.1 294.0 293.8 293.7 293.7 293.5 293.4 293.3 293.3 293.3 293.3 293.3 293.3 293.3 293.5 293.7 293.8 294.3 293.7 292.8 291.3 289.2 287.0 284.3 281.5 278.6 275.9 273.2 270.9 268.7 267.0 265.9 265.0 264.7 264.5 264.5 264.4 264.3 264.2 264.1 263.9 263.7 263.6 263.4 263.4 263.4 262.5 262.6 262.5 262.5 262.5 262.5 262.4 262.3 262.2 262.1 261.9 261.8 261.7 261.6 261.5 261.3 261.2 261.2 261.0 261.0 261.0 261.0 260.9 260.9 260.8 260.7 260.5 260.4 260.1 260.0 259.8 259.7 259.7 259.7 259.7 259.8 259.9 260.3 260.6 261.0 261.5 262.0 262.5 263.0 263.3 263.7 264.1 264.2 264.2 264.3 263.9 263.1 261.9 260.4 258.8 256.7 254.5 252.3 250.0 247.7 245.5 244.0 242.1 240.5 239.3 238.3 237.8 238.6 238.7 239.0 239.6 240.5 241.4 242.4 243.8 245.3 246.9 248.6 250.3 252.1 253.8 255.6 257.1 258.6 259.9 261.0 261.9 262.7 263.3 264.0 263.9 263.9 263.8 263.5 263.4 263.2 262.9 262.7 262.4 262.2 261.9 261.7 261.4 261.2 261.1 260.9 260.7 260.6 260.6 260.6 260.8 261.3 262.3 263.6 265.4 267.6 269.7 272.1 274.9 277.8 280.8 283.7 286.4 288.9 291.2 293.2 294.6 295.5 296.1 296.3 296.2 296.1 295.9 295.7 295.4 295.1 294.7 294.3 293.8 293.4 293.0 292.6 292.4 292.1 291.8 291.5 291.3 291.3 291.3 291.3 291.4 291.6 292.0 292.4 292.8 293.2 293.8 294.3 294.9 295.5 295.9 296.2 296.6 297.0 297.3 297.4 297.6 297.5 297.3 297.1 296.6 296.2 295.8 295.4 295.0 294.6 294.2 293.7 293.6 293.5 293.5 293.5 293.5 293.6 293.8 294.0 294.2 294.3 294.6 294.8 295.0 295.3 295.4 295.5 295.5 295.7 295.5 295.1 294.3 293.2 291.6 289.8 287.7 285.3 282.8 280.3 277.6 275.0 272.6 270.3 268.1 266.5 265.0 263.6 262.7 261.9 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.8 261.9 261.9 261.9 261.9 261.9 261.9 261.9 262.0 261.9 261.9 261.8 261.8 261.7 261.6 261.6 261.6 261.5 261.4 261.3 261.1 261.0 260.7 260.5 260.2 260.1 259.9 259.8 259.8 259.8 260.3 261.2 262.4 263.5 264.6 265.3 265.7 265.6 264.8 263.4 261.6 259.3 256.8 254.2 251.6 249.2 247.1 245.5 244.4 244.0 244.0 244.6 245.9 247.7 250.0 252.6 255.3 258.9 262.8 266.9 271.3 275.7 280.2 284.5 288.8 292.6 296.0 299.0 301.1 302.6 303.8 304.4 304.2 304.0 303.8 303.4 303.0 302.4 301.8 301.1 300.3 299.5 298.6 297.8 296.9 296.2 295.4 294.9 294.1 293.6 293.0 292.5 292.3 292.0 292.0 292.0 292.0 292.3 292.9 293.4 293.8 294.3 294.6 294.7 294.5 294.1 293.2 291.9 290.1 287.9 285.1 281.9 278.7 275.3 271.2 267.0 262.4 257.7 252.9 248.2 243.4 238.7 234.0 229.5 225.5 221.7 217.7 214.0 210.5 207.2 204.3 201.6 199.3 197.3 195.6 194.3 193.4 192.7 192.5 192.5 192.9 193.6 194.6 195.9 197.4 199.2 201.3 203.5 206.0 208.4 210.8 213.2 215.5 217.7 219.5 220.9 222.1 223.1 223.7 223.9 223.7 223.1 222.2 221.1 219.7 218.1 216.3 214.5 212.6 210.7 208.9 207.3 205.9 204.8 203.9 203.3 202.9 203.0 203.3 204.0 205.1 206.4 208.0 209.7 211.7 213.7 215.7 217.3 218.8 220.2 221.4 222.1 222.6 222.5 222.0 221.3 220.3 218.8 217.2 215.7 214.1 212.4 210.9 209.4 208.3 207.4 206.9 206.7 206.9 207.6 208.6 209.9 211.2 212.9 214.7 216.5 218.1 219.5 220.3 220.8 221.3 221.9 222.4 222.9 223.4 223.9 224.4 224.9 225.5 226.0 226.5 227.0 227.6 228.1 228.6 229.1 229.7 230.2 230.7 231.3 231.8 232.3 232.9 233.4 233.9 234.5 235.0 235.6 236.1 236.6 237.2 237.7 238.3 238.8 239.4 239.9 240.5 241.0 241.6 242.2 242.7 243.3 243.8 244.4 245.0 245.5 246.1 246.7 247.2 247.8 248.4 249.0 249.5 250.1 250.7 251.3 252.4 254.2 256.6 259.2 262.2 265.5 269.0 272.6 276.2 279.8 283.0 285.5 288.2 290.5 292.3 293.4 294.1 295.0 295.0 295.0 294.7 294.6 294.3 293.9 293.6 293.3 292.9 292.4 292.0 291.7 291.2 290.8 290.5 290.2 290.0 289.7 289.6 289.5 289.5 289.5 289.5 290.0 290.5 291.0 291.7 292.5 293.3 294.2 295.0 295.5 296.1 296.4 296.6 296.6 296.6 296.4 296.2 296.0 295.8 295.4 295.1 294.7 294.3 293.9 293.5 293.0 292.6 292.1 291.7 291.4 291.0 290.7 290.6 290.3 290.2 290.1 290.1 290.2 291.1 292.5 294.5 297.1 300.1 303.3 307.0 310.9 313.8 317.8 321.5 324.9 328.0 330.6 332.7 334.0 334.7 334.6 334.5 334.3 333.8 333.5 332.9 332.2 331.4 330.8 329.9 329.1 328.3 327.5 326.8 326.2 325.7 325.5 325.1 324.9 324.9 325.0 325.4 326.0 327.0 328.0 329.2 330.4 331.6 332.5 333.5 334.2 334.7 335.1 334.8 332.9 329.6 325.1 319.7 313.8 307.9 302.3 298.3 294.2 291.2 289.6 289.5 289.8 290.6 291.9 293.6 295.5 298.1 300.8 304.0 306.7 310.2 314.1 318.1 322.1 326.3 330.5 334.6 338.5 342.4 346.0 349.2 351.7 354.2 356.3 358.0 359.3 359.7 360.0 359.7 359.0 358.1 357.0 355.5 353.8 352.1 350.2 348.4 346.6 345.2 343.9 343.0 342.1 341.6 341.6 341.8 342.5 343.2 344.6 346.1 347.8 349.6 351.5 353.3 354.9 356.2 357.2 357.9 358.1 358.0 357.8 357.1 356.4 355.3 354.2 353.1 351.9 350.5 349.0 347.7 346.5 345.4 344.4 343.7 343.0 342.8 342.8 342.8 342.9 343.2 343.6 344.1 344.6 345.3 345.9 346.6 347.5 348.2 349.0 349.7 350.3 350.9 351.4 351.9 352.2 352.5 352.7 352.9 350.0 347.1 344.2 341.4 338.6 335.8 333.0 330.3 327.6 324.9 322.2 319.6 316.9 314.3 311.7 309.2 306.6 304.1 301.6 299.1 296.7 294.2 291.8 289.4 287.0 284.7 282.3 280.0 277.7 275.4 273.1 270.9 268.7 266.4 264.3 262.1 259.9 257.8 255.7 253.6 251.5 249.4 247.4 245.3 243.3 241.3 239.3 237.3 235.4 233.4 231.5 229.6 227.7 225.9 224.0 222.2 220.3 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.2 219.4 219.4 219.4 219.4 219.4 219.4 219.4 219.4 219.4 219.5 219.5 219.5 219.5 219.6 219.7 219.9 220.0 220.0 220.3 220.4 220.6 220.6 220.8 221.0 221.1 221.2 221.3 221.4 221.4 221.4 221.5 221.4 221.4 221.4 221.4 221.4 221.4 221.3 221.3 221.3 221.3 221.1 221.1 221.1 220.6 220.5 220.5 220.5 220.5 220.4 220.4 220.3 220.3 220.3 220.1 220.1 220.1 220.1 220.2 220.8 222.2 224.4 227.2 230.7 234.4 238.7 244.0 249.7 255.9 262.5 269.4 276.5 283.7 290.8 297.6 304.1 309.8 314.4 319.3 323.2 326.2 328.2 329.1 329.1 329.1 329.1 329.1 329.1 329.1 328.9 328.9 328.9 328.7 328.7 328.7 328.5 328.5 328.5 328.5 328.5 328.5 327.4 327.6 328.6 330.2 332.2 334.6 337.3 340.3 343.0 345.6 347.8 349.7 350.8 351.4 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.3 351.2 351.0 351.0 351.0 351.0 351.0 351.0 351.0 351.0 351.0 350.4 349.4 348.4 347.1 345.5 344.0 342.3 340.9 339.0 337.1 335.4 333.7 332.2 330.6 329.3 328.3 327.4 326.8 326.4 326.4 326.4 326.6 327.1 327.9 328.7 329.4 330.5 331.5 332.5 333.4 334.0 334.7 335.2 335.4 335.5 335.2 334.7 334.1 333.3 332.1 330.8 329.4 328.0 326.8 325.6 324.5 323.2 322.4 321.8 321.4 321.2 321.2 321.7 322.5 323.7 324.9 326.3 327.7 329.5 331.1 332.7 334.0 335.0 335.8 336.2 336.2 336.0 335.6 334.8 333.8 332.9 331.6 330.3 329.1 327.8 326.8 325.8 324.9 324.2 323.7 323.6 323.6 323.7 324.2 324.7 325.5 326.2 326.9 327.7 328.5 329.1 329.6 330.0 330.1 329.7 329.0 328.4 327.7 327.1 326.5 325.8 325.2 324.6 323.9 323.3 322.6 322.0 321.4 320.8 320.1 319.5 318.9 318.3 317.6 317.0 316.4 315.8 315.1 314.5 313.9 313.3 312.7 312.1 311.5 310.9 310.2 309.6 309.0 308.4 307.8 307.2 306.6 306.0 305.4 304.8 304.2 303.6 303.0 302.4 301.8 301.3 300.7 300.1 299.5 298.9 298.3 297.7 297.1 296.6 296.0 295.4 294.8 294.2 293.7 293.2 293.2 293.2 293.2 293.1 293.0 293.0 292.8 292.8 292.8 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.8 292.9 293.0 293.2 293.4 293.6 293.8 293.9 294.2 294.3 294.5 294.7 294.8 294.9 295.0 295.0 295.1 295.0 294.6 293.8 292.5 291.0 289.1 287.2 285.0 282.4 279.8 277.1 274.3 271.7 269.1 266.8 264.6 262.6 260.9 259.7 258.6 257.9 257.4 257.4 257.5 257.7 257.9 258.3 258.7 259.2 259.8 260.3 261.0 261.5 262.1 262.6 263.0 263.4 263.6 263.9 264.0 263.9 263.9 263.9 263.7 263.6 263.4 263.3 263.0 262.8 262.6 262.3 262.1 261.8 261.6 261.2 260.9 260.8 260.6 260.3 260.1 260.0 259.9 259.8 259.8 260.4 260.4 260.4 260.4 260.6 260.9 261.2 261.6 262.1 262.4 262.8 263.1 263.4 263.6 263.7 263.6 263.4 263.3 263.0 262.7 262.3 261.8 261.5 261.1 260.7 260.4 260.2 260.1 260.1 260.3 260.8 261.5 262.2 263.1 263.7 264.1 264.4 264.2 264.1 263.9 263.5 263.2 262.8 262.4 261.9 261.4 261.1 260.6 260.2 259.9 259.8 259.7 259.7 260.3 261.9 264.2 267.5 271.4 275.7 280.1 284.5 288.5 291.7 294.1 295.4 295.6 295.5 295.4 295.3 295.0 294.7 294.5 294.2 293.9 293.6 293.5 293.3 293.3 293.3 293.3 293.5 293.8 294.1 294.5 294.8 295.3 295.7 296.0 296.4 296.7 296.8 296.9 297.0 296.9 296.8 296.7 296.4 296.2 295.9 295.6 295.3 295.0 294.6 294.3 294.0 293.7 293.5 293.3 293.3 293.3 293.3 293.3 293.5 293.7 294.0 294.3 294.4 294.6 294.9 295.2 295.3 295.4 295.5 295.5 295.9 296.6 297.6 298.9 300.4 302.1 304.1 306.4 308.6 311.0 313.6 315.7 318.0 320.2 322.5 324.5 326.4 327.9 329.1 330.0 330.6 330.9 331.1 331.9 333.5 335.5 337.9 340.7 343.4 346.0 348.0 349.6 351.0 351.7 351.7 351.5 351.2 350.6 350.0 349.5 348.8 348.3 348.0 347.8 348.8 348.8 348.8 348.8 349.0 349.2 349.8 350.2 350.6 351.1 351.5 351.7 351.9 352.0 351.5 350.8 349.5 347.8 345.6 343.2 340.5 337.5 334.6 332.1 329.8 327.3 325.3 323.7 322.5 321.7 321.5 321.8 322.7 324.2 326.0 328.0 330.2 332.1 333.9 335.2 335.7 335.5 335.9 335.6 335.1 334.5 333.7 332.6 331.5 330.6 329.6 328.7 328.0 327.5 327.4 327.4 327.4 327.6 327.9 328.2 328.6 329.1 329.6 330.1 330.6 331.1 331.5 331.9 332.2 332.3 332.5 332.2 332.0 331.6 331.2 330.5 329.8 329.2 328.5 327.7 327.3 326.9 326.8 326.8 326.8 327.2 327.6 328.1 328.7 329.2 329.6 329.9 330.1 329.6 328.0 325.2 321.4 316.1 310.9 305.1 298.5 291.8 285.3 279.1 273.6 268.9 265.0 262.1 260.4 259.8 259.1 259.2 259.6 259.9 260.3 260.8 261.3 261.6 262.0 262.5 262.9 263.3 263.6 263.9 263.9 264.0 263.9 263.6 263.4 263.1 262.7 262.3 261.9 261.6 261.3 261.0 261.0 261.0 261.1 261.3 261.6 261.9 262.4 262.6 263.0 263.2 263.5 263.6 263.6 263.6 263.6 263.4 263.2 263.0 262.9 262.7 262.4 262.3 262.2 262.1 262.1 262.0 261.9 261.9 261.8 261.6 261.5 261.3 261.0 260.8 260.5 260.3 260.1 259.9 259.7 259.6 259.5 259.4 259.4 259.4 259.7 260.6 262.1 263.9 266.3 269.1 272.1 275.5 278.9 282.4 285.9 289.2 292.3 294.5 296.7 298.6 300.0 300.7 300.8 300.5 300.0 299.3 298.4 297.2 296.1 295.0 293.8 292.4 291.1 290.0 288.9 288.0 287.4 286.9 286.8 286.8 287.3 288.2 289.3 290.9 292.5 294.3 296.0 297.5 298.6 299.7 300.2 300.5 300.4 300.0 299.5 298.9 298.1 297.2 296.4 295.4 294.5 293.7 292.9 292.3 291.7 291.5 291.5 291.5 291.6 291.9 292.3 292.6 293.2 293.8 294.5 295.1 295.8 296.4 296.8 297.2 297.5 297.8 297.8 297.9 297.9 297.9 297.9 297.8 297.8 297.8 297.8 297.8 297.7 297.7 297.7 297.7 297.7 297.6 297.6 297.6 297.6 297.6 297.5 297.5 297.5 297.5 297.5 297.4 297.4 297.4 297.4 297.4 297.3 297.3 297.3 297.3 297.3 297.2 297.2 297.2 297.2 297.2 297.1 297.1 297.1 297.1 297.1 297.0 297.0 297.0 297.0 297.0 296.9 296.9 296.9 296.9 296.9 296.8 296.8 296.8 296.8 296.8 296.7 296.7 296.7 296.7 296.7 296.6 296.6 296.6 296.6 296.4 296.4 296.4 296.3 296.2 296.0 295.8 295.5 295.4 295.1 294.9 294.6 294.3 294.2 293.9 293.7 293.5 293.3 293.2 293.0 293.0 293.0 293.0 293.0 293.0 293.2 293.3 293.6 293.8 294.1 294.5 294.9 295.1 295.6 295.9 296.2 296.4 296.6 296.8 296.9 296.9 297.1 297.1 297.4 297.7 298.1 298.4 298.6 298.8 298.3 296.9 294.9 291.9 289.3 285.4 281.5 277.3 273.4 269.6 266.2 263.3 261.1 259.5 258.9 258.9 260.0 262.2 265.8 270.5 276.1 282.5 289.2 295.5 303.1 310.6 317.8 324.2 329.8 334.0 336.9 338.3 338.3 338.0 337.4 336.6 335.4 334.2 332.9 331.6 330.4 329.4 328.6 327.9 327.6 327.5 327.5 327.7 328.0 328.5 328.9 329.6 330.2 330.6 331.2 331.8 332.2 332.7 333.0 333.3 333.3 333.3 333.1 332.8 332.5 331.9 331.2 330.8 330.1 329.5 329.1 328.7 328.5 328.5 328.5 328.5 328.6 328.7 328.9 329.1 329.3 329.4 329.8 330.0 330.2 330.4 330.6 330.7 330.8 330.8 330.9 330.8 330.5 330.2 329.8 329.4 328.9 328.6 328.5 328.5 328.9 330.1 332.0 334.4 337.1 340.3 343.4 346.6 349.6 352.3 354.0 355.3 356.1 356.2 356.0 355.7 355.1 354.8 353.9 353.1 352.1 351.3 350.0 349.0 348.0 347.0 346.0 345.2 344.4 344.0 343.4 343.0 342.8 342.8 343.0 343.5 344.2 345.3 346.7 348.2 349.6 351.1 352.5 353.7 354.5 355.1 355.5 355.3 355.0 354.4 353.5 352.5 351.3 350.0 348.8 347.6 346.6 345.9 345.3 345.2 345.2 345.3 345.5 346.0 346.3 346.9 347.6 348.1 348.6 349.3 349.8 350.2 350.6 350.8 350.8 349.7 347.6 345.4 343.3 341.2 339.1 337.0 334.9 332.8 330.7 328.7 326.7 324.6 322.6 320.6 318.6 316.7 314.7 312.8 310.8 308.9 307.0 305.1 303.2 301.3 299.5 297.6 295.8 293.9 292.1 290.3 288.5 286.7 285.0 283.2 281.4 279.7 278.0 276.3 274.5 272.8 271.2 269.5 267.8 266.2 264.5 262.9 261.2 259.6 258.0 256.4 254.8 253.3 251.7 250.1 248.6 247.1 245.5 244.0 242.5 241.0 239.5 238.0 236.5 235.1 233.6 232.2 231.5 231.2 230.8 230.1 229.3 228.3 227.3 226.1 224.9 223.7 222.5 221.4 220.3 219.5 218.9 218.3 218.1 218.1 218.1 218.2 218.4 218.7 219.0 219.4 219.7 220.0 220.3 220.6 220.7 220.8 220.9 220.9 220.9 220.9 220.9 221.0 221.1 221.1 221.2 221.3 221.4 221.4 221.4 221.5 221.5 221.5 221.5 221.5 221.1 220.3 219.4 218.0 216.4 214.7 212.9 211.1 209.6 207.8 206.2 204.7 203.5 202.7 202.1 201.7 202.0 202.4 204.2 206.8 210.2 214.6 219.8 225.7 232.5 240.0 247.3 254.7 263.4 272.2 281.1 289.6 297.8 305.4 312.2 317.7 322.0 324.9 326.5 327.0 327.0 327.2 327.5 327.8 328.2 328.6 329.1 329.4 329.8 330.2 330.5 330.6 330.6 330.9 331.3 332.1 333.3 334.9 336.6 338.6 340.8 343.1 345.2 347.4 349.1 350.6 352.1 353.3 354.0 354.3 354.3 354.3 354.4 354.5 354.5 354.7 354.9 354.9 354.9 355.0 355.1 354.4 352.9 350.7 347.7 344.2 340.8 337.6 334.9 332.9 331.9 331.9 332.0 332.2 332.5 332.8 333.2 333.7 334.1 334.6 335.1 335.5 335.8 336.1 336.4 336.4 336.6 336.4 336.1 335.8 335.1 334.5 333.7 332.9 332.1 331.2 330.2 329.2 328.1 327.2 326.2 325.3 324.5 323.8 323.2 322.6 322.3 322.0 321.9 321.9 322.2 322.7 323.4 324.4 325.7 326.9 328.4 329.8 331.3 332.3 333.6 334.6 335.4 335.9 336.2 336.2 335.8 335.0 334.0 332.8 331.3 329.9 328.4 326.8 325.2 323.9 322.7 321.9 321.2 321.0 321.0 321.1 321.4 321.7 322.3 323.0 323.8 324.5 325.5 326.2 327.1 327.9 328.7 329.4 330.0 330.5 330.8 331.2 331.3 330.9 330.2 329.6 329.0 328.4 327.8 327.2 326.6 326.0 325.4 324.8 324.2 323.6 323.0 322.4 321.8 321.3 320.7 320.1 319.5 318.9 318.3 317.7 317.1 316.6 316.0 315.4 314.8 314.2 313.7 313.1 312.5 311.9 311.4 310.8 310.2 309.6 309.1 308.5 307.9 307.4 306.8 306.2 305.7 305.1 304.6 304.0 303.4 302.9 302.3 301.8 301.2 300.7 300.1 299.5 299.0 298.4 297.9 297.3 296.8 296.3 295.7 295.2 294.6 294.3 294.3 294.3 294.3 294.2 294.2 294.0 293.9 293.8 293.7 293.5 293.3 293.2 293.0 292.8 292.7 292.6 292.5 292.3 292.3 292.1 292.0 292.0 292.0 292.0 292.0 291.7 291.5 290.8 290.0 289.0 287.6 286.1 284.5 283.0 281.2 279.2 277.2 275.3 273.3 271.4 269.4 267.8 266.2 264.8 263.5 262.7 261.9 261.2 260.9 260.7 260.7 260.7 261.0 261.2 261.5 261.9 262.3 262.6 262.9 263.4 263.7 264.1 264.4 264.5 264.5 264.6 264.4 263.7 262.9 261.9 260.8 259.8 258.8 258.1 257.7 257.7 257.7 258.1 258.6 259.2 260.0 260.7 261.5 261.9 262.4 262.8 262.4 262.5 262.4 262.3 262.2 262.0 261.9 261.6 261.2 261.0 260.8 260.4 260.1 259.9 259.6 259.4 259.2 259.1 259.1 259.1 259.1 259.3 259.5 259.9 260.2 260.7 261.4 261.9 262.3 262.9 263.4 263.8 264.1 264.4 264.5 264.6 264.5 264.4 264.3 264.1 263.8 263.6 263.3 263.0 262.7 262.4 262.2 262.1 261.9 261.9 261.9 261.6 261.1 260.4 259.5 258.4 257.3 256.2 255.1 254.5 253.8 253.4 253.3 253.4 253.7 254.3 255.2 256.0 257.0 257.8 258.6 259.1 259.4 259.7 259.7 259.7 259.7 259.7 259.7 259.8 259.8 259.9 260.0 260.0 260.1 260.1 260.1 260.1 261.6 262.4 263.4 265.0 267.1 269.7 272.6 275.7 278.9 282.2 285.4 288.3 291.0 293.1 294.7 295.7 296.3 296.2 296.1 295.8 295.2 294.7 294.1 293.5 292.9 292.4 292.0 291.7 291.6 291.6 291.6 291.7 291.9 292.1 292.2 292.5 292.6 292.8 293.0 293.1 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.2 293.1 293.0 293.0 293.0 293.0 293.0 293.0 293.0 293.0 293.0 293.0 292.8 292.7 292.6 292.6 292.6 292.6 292.5 292.6 292.9 293.2 293.7 294.4 294.1 294.0 294.0 294.0 294.0 294.0 293.8 293.8 293.8 293.7 293.7 293.7 293.5 293.5 293.5 293.5 293.5 293.5 293.1 292.3 291.0 289.2 287.0 284.5 281.8 278.9 276.1 273.5 271.2 268.8 266.8 265.1 264.0 263.2 263.0 263.0 263.0 263.0 263.1 263.1 263.2 263.3 263.4 263.5 263.6 263.7 263.9 264.0 264.1 264.2 264.2 264.2 264.2 264.2 264.2 264.2 264.0 263.7 263.5 263.2 262.8 262.4 262.0 261.6 261.2 260.9 260.6 260.3 260.2 260.1 260.1 260.1 260.3 260.6 260.9 261.4 261.9 262.4 262.8 263.3 263.9 264.4 264.7 265.0 265.1 265.3 265.0 264.4 263.4 262.1 260.5 258.6 256.6 254.7 252.7 250.8 249.4 247.8 246.6 245.5 245.0 244.8 245.0 245.5 246.6 248.0 250.0 252.2 254.9 257.9 261.1 264.5 268.2 271.8 275.5 278.7 282.2 285.8 289.0 291.9 294.5 296.5 298.2 299.3 299.8 299.9 299.8 299.5 299.0 298.5 297.8 296.9 296.0 295.2 294.5 293.7 292.9 292.2 291.5 291.0 290.7 290.5 290.5 290.1 289.2 287.6 287.2 286.8 286.3 285.3 284.2 282.6 280.9 279.3 277.0 274.4 271.7 268.9 265.9 262.7 259.3 255.8 252.3 248.9 245.5 242.3 238.9 235.4 232.0 228.5 225.1 222.0 218.9 216.0 213.1 210.4 207.9 205.9 203.9 201.9 200.0 198.4 197.1 195.8 194.8 194.0 193.4 193.1 193.0 193.2 193.9 195.4 197.4 199.9 202.9 206.2 209.6 212.9 216.0 218.6 220.6 221.9 222.6 222.5 222.0 221.2 220.0 218.4 216.6 214.6 212.5 210.2 208.0 205.9 204.2 202.6 201.0 199.8 198.9 198.4 198.2 198.5 199.5 201.2 203.3 205.8 208.7 211.3 214.2 217.2 219.8 222.2 223.7 224.5 224.8 224.6 224.3 223.7 222.9 222.0 221.0 219.9 218.6 217.3 215.9 214.6 213.5 212.2 210.9 209.8 208.7 207.9 207.2 206.7 206.3 206.2 206.3 206.5 207.1 207.9 209.0 210.2 211.5 212.9 214.2 215.3 216.4 217.3 218.1 218.5 217.5 217.3 217.3 217.4 217.5 217.5 217.6 217.6 217.7 217.7 217.8 217.9 217.9 218.0 218.0 218.1 218.2 218.2 218.3 218.3 218.4 218.5 218.5 218.6 218.6 218.7 218.7 218.8 218.9 218.9 219.0 219.0 219.1 219.2 219.2 219.3 219.3 219.4 219.5 219.5 219.6 219.6 219.7 219.7 219.8 219.9 219.9 220.0 220.0 220.1 220.2 220.2 220.3 220.3 220.4 220.5 220.5 220.6 220.6 220.7 221.2 221.9 222.9 224.1 225.5 227.4 229.3 231.6 234.1 236.7 239.0 242.0 245.2 248.4 251.8 255.2 258.7 262.2 265.7 269.1 272.6 275.7 278.4 281.2 284.0 286.4 288.8 290.6 292.2 293.5 294.5 295.1 295.4 295.2 295.2 295.1 295.0 294.9 294.7 294.6 294.4 294.3 294.1 293.8 293.7 293.7 293.6 293.5 293.5 293.5 293.5 293.9 294.3 294.8 295.3 295.9 296.4 296.8 297.0 297.2 297.1 296.7 296.2 295.6 294.9 294.0 293.0 292.1 291.4 290.7 290.2 289.8 289.8 289.0 289.9 291.4 293.7 296.6 300.0 303.8 308.0 312.3 316.6 320.7 324.3 327.3 330.1 332.5 333.8 334.4 334.2 334.2 333.9 333.6 333.3 332.8 332.2 331.7 331.1 330.6 330.0 329.4 328.9 328.5 328.0 327.7 327.5 327.4 327.4 327.4 327.4 327.4 327.4 327.5 327.6 327.7 327.9 328.1 328.1 328.3 328.5 328.5 328.7 328.7 328.7 328.7 328.7 327.6 325.8 323.4 320.3 316.9 313.1 309.4 306.0 303.3 300.4 297.9 296.3 295.3 295.2 295.5 296.7 298.3 300.5 303.3 306.3 309.7 313.0 316.5 319.7 322.7 325.2 327.2 328.2 329.1 329.4 330.3 332.5 335.5 339.3 343.4 347.4 351.3 354.5 356.8 357.8 358.1 357.9 357.4 356.8 355.9 354.9 353.9 352.7 351.5 350.2 349.0 348.2 347.2 346.4 345.7 345.2 345.0 345.0 345.6 346.8 348.8 350.9 353.3 355.5 357.0 358.4 359.3 359.5 359.4 359.0 358.4 357.8 356.8 355.7 354.5 353.4 352.1 350.8 349.6 348.4 347.4 346.4 345.7 345.2 344.8 344.6 344.6 344.8 345.2 346.0 346.8 347.6 348.4 349.1 349.6 349.7 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.8 349.7 349.7 349.7 349.7 349.7 349.7 349.7 349.7 349.7 349.7 349.7 349.7 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.5 349.5 349.5 349.5 349.5 349.5 349.5 349.5 349.5 349.5 349.5 349.4 349.4 349.4 349.5 350.2 351.4 352.8 354.5 356.8 359.5 362.6 365.8 369.1 372.7 417.5 417.1 416.7 415.7 414.4 412.9 411.2 409.4 407.5 405.3 403.2 401.2 399.4 397.5 395.7 394.2 392.8 391.6 390.8 390.4 390.2 390.9 390.6 390.4 390.2 390.2 390.2 390.2 390.2 390.4 390.5 390.6 391.0 391.2 391.5 391.9 392.3 392.7 392.8 393.2 393.6 393.7 394.0 394.2 394.3 394.3 394.4 394.3 394.3 394.3 394.1 394.0 393.8 393.6 393.4 393.1 392.9 392.6 392.4 392.1 391.9 391.5 391.5 391.1 390.9 390.8 390.6 390.4 390.2 390.2 390.2 390.2 390.6 390.4 390.4 390.4 390.4 390.4 390.3 390.2 390.2 390.0 389.9 389.7 389.6 389.5 389.5 389.3 389.3 389.1 389.1 389.1 389.1 389.1 389.1 389.3 390.7 393.4 397.0 401.8 407.2 413.2 419.4 425.4 430.9 435.2 438.7 441.4 442.7 442.8 442.6 442.5 442.0 441.4 440.9 440.3 439.8 439.7 439.5 439.5 439.5 440.1 440.7 441.5 442.1 442.5 442.8 441.8 439.4 435.7 431.1 426.0 419.3 412.4 405.4 398.6 392.4 387.0 382.7 379.5 377.6 372.4 368.2 363.7 357.0 349.4 341.3 333.2 325.3 318.0 311.5 306.3 302.4 300.1 299.3 299.6 300.5 302.5 305.1 308.3 312.0 315.9 319.9 323.8 327.2 330.2 332.5 333.8 334.4 334.2 333.7 332.7 331.4 329.8 328.1 326.3 324.5 322.8 321.3 320.1 319.3 318.8 318.8 319.0 319.6 320.4 321.4 322.6 324.0 325.5 327.1 328.5 329.8 330.7 331.3 331.7 331.8 331.7 331.2 330.7 330.0 329.1 328.3 327.4 326.4 325.7 324.9 324.3 323.9 323.6 323.6 324.0 324.8 326.2 327.9 329.8 331.5 333.0 334.0 334.4 334.2 334.2 333.8 333.5 333.1 332.6 332.2 331.5 331.1 330.6 330.2 329.9 329.6 329.6 329.4 328.9 328.4 328.0 327.5 327.0 326.5 326.0 325.5 325.0 324.6 324.1 323.6 323.1 322.6 322.1 321.7 321.2 320.7 320.2 319.7 319.3 318.8 318.3 317.8 317.4 316.9 316.4 315.9 315.5 315.0 314.5 314.1 313.6 313.1 312.7 312.2 311.7 311.3 310.8 310.3 309.9 309.4 309.0 308.5 308.0 307.6 307.1 306.7 306.2 305.7 305.3 304.8 304.4 303.9 303.5 303.0 302.6 302.1 301.7 301.2 300.8 300.3 299.9 299.4 299.0 298.8 298.7 298.5 298.2 297.9 297.4 297.0 296.5 296.0 295.5 294.9 294.3 293.9 293.5 292.9 292.5 292.1 291.9 291.6 291.5 291.5 291.5 291.5 291.8 292.3 292.8 293.3 293.9 294.5 294.9 295.1 295.4 295.5 295.4 295.1 294.0 293.5 292.5 290.8 288.7 286.2 283.5 280.7 277.9 274.8 271.6 268.6 265.7 263.1 260.9 259.1 257.7 256.7 256.2 256.2 256.3 256.5 256.9 257.4 257.9 258.4 258.9 259.4 259.9 260.2 260.3 260.5 260.5 260.3 260.0 259.7 259.2 258.8 258.3 257.9 257.7 257.6 257.6 257.6 257.9 258.2 258.6 259.2 259.8 260.3 260.8 261.2 261.5 261.7 261.7 261.6 261.4 261.1 261.0 260.8 260.6 260.4 260.2 260.1 260.0 259.8 259.8 259.8 259.8 260.0 260.0 260.4 260.7 261.1 261.6 262.1 262.5 263.0 263.5 263.8 264.2 264.4 264.5 264.6 264.5 264.1 263.4 262.5 261.3 259.9 258.3 256.7 255.1 253.4 251.7 250.4 249.1 247.8 246.8 245.9 245.5 245.2 245.4 246.3 247.9 250.4 253.5 257.0 260.5 265.0 269.8 274.5 279.1 283.5 287.2 290.5 293.0 294.5 295.1 295.0 294.9 294.6 294.2 293.7 293.3 292.8 292.3 292.1 292.0 292.0 292.0 292.1 292.4 292.6 292.9 293.3 293.7 294.1 294.5 294.9 295.3 295.4 295.5 295.6 295.7 295.7 295.7 295.7 295.7 295.7 295.7 295.7 295.7 295.7 295.8 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.9 295.8 295.7 295.4 295.1 294.8 294.5 294.3 294.2 294.2 294.2 294.2 294.6 295.5 297.1 298.9 301.2 303.8 306.6 309.3 312.4 315.5 318.7 321.5 324.2 326.4 328.2 329.6 330.3 330.5 330.4 330.2 330.0 329.5 329.0 328.5 328.0 327.4 327.0 326.5 326.0 325.8 325.7 325.7 325.9 326.8 328.3 330.3 332.8 335.6 338.5 341.4 344.2 346.7 348.8 350.2 351.0 351.5 351.3 351.3 351.0 350.9 350.6 350.4 350.1 349.8 349.6 349.3 349.2 349.0 349.0 350.4 349.4 348.0 346.2 344.6 342.3 340.0 337.5 334.9 332.6 330.4 328.3 326.5 325.1 324.0 323.3 323.2 323.3 323.6 324.2 324.9 326.0 327.1 328.4 329.6 330.9 332.1 333.3 334.1 334.6 335.1 335.5 335.4 335.3 334.9 334.6 334.1 333.5 332.9 332.2 331.6 331.0 330.6 330.2 329.9 329.8 329.8 329.8 329.8 330.0 330.2 330.4 330.6 331.0 331.1 331.4 331.5 331.9 332.0 332.1 332.1 332.3 331.9 330.8 329.2 326.9 324.1 320.8 317.1 313.0 308.8 304.4 300.1 296.3 291.9 287.6 283.7 280.0 276.7 273.7 271.2 269.1 267.7 266.7 266.4 265.8 264.9 264.0 263.1 262.2 261.3 260.4 259.5 258.6 257.9 257.9 257.9 257.9 257.9 257.9 258.0 258.0 258.0 258.2 258.2 258.2 258.3 258.4 258.5 258.6 258.6 258.6 258.8 258.8 258.8 258.9 258.9 258.9 258.9 258.9 259.1 259.1 259.3 259.6 259.9 260.4 261.1 261.6 262.4 262.9 263.5 264.1 264.4 264.7 264.8 264.9 264.4 263.0 261.1 258.6 255.8 252.7 249.7 246.7 244.1 241.7 240.2 239.1 238.8 238.5 238.7 239.1 239.6 239.8 240.2 240.9 241.9 243.1 244.4 246.0 247.8 249.8 252.1 254.4 256.8 259.4 261.6 264.3 267.1 270.0 272.8 275.6 278.4 281.0 283.5 285.9 288.2 290.1 291.9 293.4 294.6 295.7 296.6 297.0 297.2 297.1 296.9 296.5 295.9 295.4 294.5 293.7 292.8 292.0 291.1 290.3 289.5 288.9 288.4 287.9 287.6 287.6 287.6 287.9 288.3 288.9 289.6 290.5 291.4 292.5 293.5 294.3 295.5 296.5 297.5 298.4 299.1 299.7 300.1 300.3 300.5 300.2 299.5 298.4 297.2 295.8 294.2 292.6 291.4 290.4 289.5 288.9 288.8 288.8 289.0 289.3 289.7 290.2 290.8 291.4 292.1 292.8 293.5 294.2 294.9 295.5 296.0 296.6 297.0 297.2 297.4 289.8 289.8 289.7 289.5 289.3 289.1 288.9 288.7 288.5 288.3 288.1 287.9 287.7 287.5 287.3 287.1 286.9 286.7 286.5 286.3 286.1 285.9 285.7 285.5 285.3 285.1 284.9 284.7 284.5 284.3 284.2 284.0 283.8 283.6 283.4 283.2 283.0 282.8 282.6 282.4 282.2 282.0 281.8 281.6 281.4 281.2 281.0 280.9 280.7 280.5 280.3 280.1 279.9 279.7 279.5 279.4 280.5 282.2 284.8 287.7 291.1 295.7 300.7 306.1 311.8 317.5 323.4 329.1 334.3 339.1 343.1 346.3 348.4 350.0 350.6 350.4 350.2 349.8 349.5 348.9 348.2 347.8 347.1 346.6 346.3 346.0 345.8 345.8 345.8 346.3 346.9 347.6 348.6 349.6 350.4 351.5 352.2 352.7 353.4 353.5 353.6 353.5 353.3 353.1 352.8 352.3 351.9 351.6 351.3 350.9 350.8 350.6 350.6 350.6 350.8 351.0 351.3 351.9 352.2 352.7 353.1 353.3 353.5 353.7 353.4 352.9 351.9 350.7 349.6 348.0 346.3 344.6 342.8 341.3 339.9 338.7 338.0 337.3 337.3 338.2 340.8 344.8 350.4 357.0 364.3 371.9 379.0 384.8 390.7 395.2 397.8 398.5 398.4 398.2 397.9 397.5 397.1 396.5 395.9 395.2 394.5 393.9 393.2 392.4 391.7 391.0 390.4 390.0 389.7 389.3 389.1 389.1 389.1 389.4 390.0 390.7 391.8 392.9 394.0 395.2 396.1 396.9 397.2 397.4 396.6 394.3 391.2 387.3 382.5 377.4 372.1 368.1 363.2 358.8 355.3 352.5 350.9 350.2 349.5 347.4 343.8 339.3 334.2 328.9 324.2 320.2 317.5 316.1 316.0 317.0 318.8 321.7 325.6 330.2 335.8 342.2 349.2 356.8 365.1 373.6 382.2 390.6 397.9 406.2 414.1 421.2 427.4 432.9 437.0 439.9 441.5 441.6 441.5 441.5 441.3 441.3 440.8 440.7 440.3 439.8 439.7 439.2 438.8 438.4 438.0 437.6 437.2 437.0 436.7 436.5 436.3 436.2 436.2 436.2 436.2 436.3 436.7 437.1 437.6 438.2 439.1 439.7 440.5 441.4 442.2 442.9 443.5 444.3 444.8 445.5 445.9 446.1 446.1 446.3 445.6 444.2 442.5 440.8 439.2 438.5 438.5 440.1 442.7 446.4 450.9 456.0 462.9 470.2 478.2 486.5 494.9 503.4 511.2 518.5 525.1 530.2 534.1 536.5 538.1 538.0 537.5 536.7 535.5 533.9 532.0 529.9 527.7 525.6 523.3 521.4 519.6 517.8 516.4 515.2 514.5 514.3 514.3 514.8 515.9 517.2 519.2 521.1 522.5 524.6 526.3 527.5 528.2 528.7 525.6 523.9 520.5 516.0 510.1 503.7 497.2 490.8 484.6 479.0 474.9 471.9 470.4 470.1 470.8 473.0 476.6 481.1 486.2 492.0 498.2 504.0 509.4 514.3 518.0 520.5 521.8 521.9 520.7 518.8 516.0 512.9 508.8 504.0 498.8 493.3 487.4 481.6 475.8 470.3 465.2 460.5 456.4 453.3 450.6 448.4 446.9 446.5 446.4 446.0 445.6 445.2 444.7 444.1 443.7 443.0 442.5 442.1 441.8 441.5 441.3 441.3 441.3 441.3 441.3 441.4 441.5 441.8 442.1 442.3 442.6 442.8 443.2 443.4 443.8 444.0 444.2 444.3 444.3 444.4 444.3 444.3 444.1 443.7 443.2 442.5 442.0 441.4 440.8 440.1 439.7 439.2 439.0 439.0 438.8 437.6 435.7 432.4 428.5 423.5 418.0 411.9 405.4 398.4 391.5 384.8 378.9 373.0 367.1 361.8 357.0 352.9 349.6 347.0 345.5 344.6 344.6 344.6 344.9 345.2 345.7 346.2 346.7 347.3 347.8 348.4 348.9 349.4 349.8 350.0 350.0 350.1 350.0 349.9 349.8 349.6 349.3 349.2 349.0 349.0 349.0 349.0 349.2 349.5 349.8 350.1 350.4 350.8 351.1 351.3 351.5 351.6 351.0 349.2 346.1 341.9 337.1 332.0 326.9 322.1 317.9 314.6 312.6 311.8 311.7 312.5 314.0 316.2 319.1 322.7 326.8 331.3 336.5 342.0 347.8 352.7 358.6 364.8 370.9 376.5 381.9 386.8 391.2 394.7 397.7 399.7 400.8 401.1 400.9 400.4 399.6 398.8 397.7 396.6 395.5 394.2 392.9 391.6 390.4 389.5 388.8 388.1 387.7 387.7 387.9 388.6 390.0 391.5 393.3 395.2 396.7 398.2 399.1 399.4 399.3 399.2 398.8 398.4 397.6 396.9 396.1 395.0 394.0 392.9 392.0 391.3 390.3 389.6 388.8 388.3 387.9 387.7 387.7 387.9 388.5 389.5 390.9 392.3 393.6 394.7 395.6 395.9 395.9 395.7 395.6 395.0 394.4 393.9 393.1 392.6 392.0 391.5 390.9 390.4 390.3 390.2 389.2 387.5 385.8 384.1 382.5 380.8 379.1 377.5 375.8 374.2 372.6 371.0 369.4 367.8 366.2 364.6 363.0 361.4 359.8 358.3 356.7 355.2 353.6 352.1 350.6 349.0 347.5 346.0 344.5 343.0 341.5 340.0 338.6 337.1 335.6 334.2 332.7 331.3 329.8 328.4 327.0 325.5 324.1 322.7 321.3 319.9 318.5 317.1 315.8 314.4 313.0 311.7 310.3 309.0 307.6 306.3 305.0 303.6 302.3 301.0 299.7 298.4 297.1 295.8 294.5 293.2 292.9 292.8 297.0 296.9 296.8 296.6 296.4 296.0 295.7 295.4 294.9 294.6 294.3 294.1 293.8 293.5 293.5 293.5 293.7 294.5 295.9 297.7 299.8 302.5 305.7 309.2 312.7 316.4 320.1 323.4 326.5 329.2 331.3 333.1 333.8 334.2 334.1 333.9 333.7 333.6 333.4 333.1 332.9 332.6 332.4 332.3 332.3 332.2 331.3 329.1 325.4 321.1 316.2 310.9 305.4 300.0 295.1 290.9 287.4 284.8 283.2 282.6 282.5 282.6 282.8 283.3 283.7 284.2 284.9 285.6 286.5 287.3 288.0 288.9 289.9 290.8 291.7 292.6 293.5 294.2 294.9 295.7 296.3 296.7 297.1 297.4 297.6 297.8 297.6 297.2 296.9 296.4 295.7 295.1 294.5 293.8 293.4 293.1 293.0 293.0 293.0 293.0 293.3 293.5 293.7 293.9 294.1 294.4 294.7 294.8 294.9 295.0 295.1 295.0 294.6 294.0 293.0 291.7 290.1 288.3 286.3 284.2 282.3 280.0 277.7 275.2 272.7 270.4 268.2 266.1 264.1 262.4 260.9 259.5 258.7 257.9 257.4 257.1 257.2 257.4 257.8 258.5 259.5 260.5 261.6 262.7 263.8 264.8 265.8 266.7 267.5 268.1 268.5 268.7 268.5 268.3 268.0 267.5 267.0 266.5 265.9 265.1 264.4 263.7 263.1 262.6 262.1 261.7 261.3 261.0 261.0 260.9 260.8 260.5 260.1 259.6 258.9 258.2 257.3 256.3 255.4 254.1 252.9 251.5 250.2 249.1 247.5 246.0 244.4 242.7 241.1 239.5 237.9 236.3 234.7 233.1 231.6 230.3 228.9 227.4 226.0 224.6 223.5 222.3 221.2 220.2 219.3 218.5 217.7 217.2 216.7 216.2 215.8 215.6 215.5 215.5 215.5 215.6 215.8 216.2 216.5 216.9 217.3 217.7 218.2 218.6 219.1 219.5 219.8 220.1 220.4 220.5 220.6 220.6 220.5 220.5 220.4 220.3 220.1 220.1 220.1 220.0 219.9 219.8 219.7 219.7 219.7 219.7 219.7 219.9 220.0 220.3 220.4 220.5 220.7 221.0 221.3 221.5 221.7 221.8 222.0 222.1 222.2 222.2 222.2 222.0 221.2 220.1 218.7 217.0 215.0 213.0 211.0 209.0 207.3 205.7 203.8 205.4 209.1 214.9 223.8 235.2 249.6 265.9 284.1 303.4 323.3 343.0 360.6 375.5 385.9 392.9 396.1 395.9 395.9 395.7 395.6 395.4 395.2 395.0 394.6 394.4 394.1 393.8 393.4 393.1 392.8 392.5 392.1 391.8 391.6 391.2 391.0 390.9 390.8 390.7 390.6 390.6 390.2 390.2 390.2 390.2 390.2 390.5 390.8 391.0 391.4 391.8 392.1 392.5 393.0 393.5 393.9 394.3 394.4 394.6 394.8 395.0 395.0 394.7 393.9 392.7 390.6 388.2 385.5 382.6 379.7 376.9 374.5 372.4 370.9 370.0 372.5 372.8 373.4 374.3 375.4 376.9 378.5 380.1 381.7 383.7 385.8 387.7 389.5 391.2 392.9 394.0 395.0 395.7 396.2 396.5 396.4 396.2 396.0 395.7 395.3 394.7 394.3 394.0 393.6 393.3 393.1 393.1 393.5 394.5 396.4 399.1 402.9 407.1 411.4 415.9 421.1 426.2 430.9 435.2 438.9 441.7 443.6 444.4 444.6 444.6 444.6 444.6 444.8 444.9 444.9 443.9 444.0 443.3 441.8 439.5 436.3 432.4 428.1 423.9 418.5 412.6 406.3 399.8 393.4 386.8 380.3 374.1 368.1 362.3 357.0 352.0 348.2 344.6 341.1 338.1 336.0 334.4 333.6 333.3 333.4 334.0 334.9 336.2 337.9 339.7 341.8 344.0 346.2 348.6 350.8 352.5 354.4 356.1 357.4 358.4 359.0 359.3 358.9 358.1 356.9 355.3 353.5 351.3 348.9 346.4 344.0 341.7 339.6 338.0 336.6 335.4 334.7 334.4 334.7 335.6 336.9 339.0 341.4 344.3 347.2 350.3 353.4 356.1 358.6 360.4 361.5 362.2 362.2 361.7 360.9 359.7 358.1 356.3 354.5 352.6 350.4 348.1 345.7 343.6 341.6 339.8 338.3 337.2 336.3 335.8 335.6 335.7 336.4 337.4 338.7 340.2 342.0 343.7 345.5 347.4 349.1 350.6 351.8 352.7 353.3 353.6 353.5 352.5 351.5 350.5 349.5 348.5 347.5 346.5 345.5 344.5 343.5 342.5 341.6 340.6 339.6 338.6 337.7 336.7 335.8 334.8 333.8 332.9 331.9 331.0 330.0 329.1 328.2 327.2 326.3 325.3 324.4 323.5 322.6 321.6 320.7 319.8 318.9 318.0 317.1 316.2 315.3 314.4 313.5 312.6 311.7 310.8 309.9 309.0 308.1 307.3 306.4 305.5 304.6 303.8 302.9 302.0 301.2 300.3 299.4 298.6 297.7 296.9 296.0 295.2 295.1 295.0 295.0 295.0 295.0 295.0 294.9 294.9 294.9 294.8 294.7 294.7 294.6 294.5 294.5 294.3 294.3 294.2 294.2 294.1 294.0 294.0 294.0 293.9 293.8 293.8 293.8 293.8 293.8 293.8 293.8 293.8 293.9 294.0 294.1 294.3 294.4 294.6 294.9 295.1 295.3 295.5 295.8 296.1 296.4 296.7 296.8 297.0 297.3 297.4 297.6 297.7 297.8 297.8 297.9 297.5 296.4 294.6 292.1 289.3 286.0 282.6 279.3 276.3 273.8 271.9 270.4 269.7 269.7 270.4 271.8 274.4 277.6 281.5 285.9 290.8 296.0 300.7 305.9 311.2 316.2 320.8 324.5 327.4 329.4 330.6 330.9 330.8 330.8 330.8 330.6 330.5 330.3 330.2 330.0 330.0 329.9 329.8 329.6 329.4 329.3 329.2 329.2 329.2 329.2 329.2 329.2 329.4 329.6 329.8 330.0 330.3 330.6 331.0 331.3 331.6 331.7 331.9 332.0 332.1 332.3 332.1 332.1 332.1 332.1 332.1 331.9 331.9 331.8 331.7 331.5 331.5 331.4 331.2 331.1 330.9 330.8 330.8 330.6 330.5 330.4 330.4 330.2 330.2 330.2 330.2 330.2 330.2 330.2 330.7 331.4 332.3 333.3 334.2 335.7 337.3 338.9 340.5 342.2 344.0 345.7 347.2 348.7 350.1 351.2 352.1 352.8 353.3 353.6 353.6 353.4 353.1 352.6 351.9 351.1 350.3 349.4 348.4 347.6 346.7 346.0 345.4 345.0 344.7 344.6 344.6 344.8 345.2 345.8 346.5 347.3 348.1 349.0 349.8 350.4 351.1 351.5 351.7 352.1 351.9 351.9 351.8 351.6 351.5 351.3 351.0 350.7 350.5 350.2 349.8 349.5 349.2 348.9 348.7 348.4 348.1 347.9 347.7 347.6 347.4 347.2 347.2 347.2 347.2 347.2 347.3 347.5 347.8 348.0 348.3 348.6 349.0 349.3 349.6 349.9 350.1 350.3 350.4 350.4 350.6 350.5 350.4 350.3 350.2 350.0 349.8 349.7 349.5 349.3 349.2 349.2 349.0 348.7 348.2 347.4 346.3 345.0 343.6 341.9 340.1 338.1 336.3 334.4 332.5 330.7 329.2 327.6 326.2 325.0 324.0 323.2 322.8 322.5 322.6 322.9 323.6 324.5 325.7 327.4 329.3 331.5 334.0 336.8 339.8 343.0 346.4 349.9 353.5 357.3 360.6 364.1 367.8 371.5 375.1 378.4 381.6 384.6 387.3 389.7 391.8 393.6 394.8 395.7 396.3 396.5 396.3 396.1 395.8 395.4 395.1 394.7 394.0 393.4 392.8 392.2 391.7 391.1 390.5 390.1 389.7 389.4 389.3 389.3 389.3 389.4 389.7 390.0 390.4 391.0 391.5 392.1 392.8 393.5 394.1 394.7 395.3 395.9 396.4 396.8 397.0 397.2 397.4 397.2 396.8 396.1 395.2 393.8 392.4 390.6 388.8 386.9 384.8 382.9 380.9 379.0 377.2 375.4 373.9 372.6 371.6 370.9 370.4 370.2 371.1 373.2 376.6 381.6 387.9 395.5 403.6 412.1 420.5 428.5 436.0 441.9 446.3 449.0 450.3 450.1 449.9 449.6 449.3 448.9 448.3 447.7 447.0 446.3 445.5 444.6 444.0 443.1 442.3 441.4 440.6 440.0 439.4 438.8 438.3 437.9 437.7 437.5 437.5 437.2 436.6 435.6 433.9 431.9 429.3 426.5 423.5 419.9 416.1 411.8 407.2 402.5 397.7 392.9 387.8 383.0 378.1 373.5 369.4 364.9 360.6 356.4 352.4 348.8 345.6 342.6 339.9 337.6 335.7 334.2 333.1 332.4 332.0 331.6 330.5 328.3 325.3 322.2 319.3 316.9 315.4 315.2 315.5 316.4 317.8 319.6 321.7 323.9 326.4 328.3 330.3 332.2 333.7 334.6 335.1 335.1 334.8 334.4 333.7 333.1 332.3 331.5 330.5 329.4 328.3 327.4 326.3 325.3 324.5 323.8 323.2 322.8 322.5 322.5 322.6 323.1 324.0 325.2 326.8 328.5 330.4 332.3 334.2 335.9 337.5 338.7 339.5 340.0 340.2 339.7 338.7 337.2 335.3 333.2 331.1 328.9 327.1 325.8 324.8 324.3 324.3 324.4 324.6 324.9 325.4 325.8 326.3 326.8 327.5 328.1 328.7 329.2 329.6 330.1 330.6 330.9 331.1 331.2 330.8 329.5 328.2 326.8 325.5 324.2 322.9 321.6 320.3 319.0 317.7 316.4 315.2 313.9 312.6 311.4 310.1 308.9 307.6 306.4 305.2 303.9 302.7 301.5 300.3 299.1 297.9 296.7 295.5 294.3 293.1 291.9 290.7 289.6 288.4 287.2 286.1 284.9 283.8 282.6 281.5 280.4 279.2 278.1 277.0 275.9 274.8 273.7 272.5 271.5 270.4 269.3 268.2 267.1 266.0 265.0 263.9 262.8 261.8 260.7 259.7 258.6 257.6 256.5 256.8 257.7 259.2 261.2 263.5 265.9 268.9 272.3 275.9 279.5 282.8 286.1 289.1 291.6 293.6 295.0 295.9 296.0 295.9 295.7 295.5 295.1 294.7 294.3 293.8 293.3 292.8 292.4 292.0 291.8 291.5 291.5 291.5 291.6 292.1 292.9 293.9 295.1 296.0 297.0 297.8 298.3 298.6 298.5 298.1 297.6 296.9 296.1 295.3 294.3 293.1 291.8 290.5 289.3 288.0 286.8 285.6 284.6 283.8 283.0 282.5 282.1 281.9 281.9 282.0 282.8 283.9 285.3 287.1 289.3 291.6 294.3 297.2 300.1 302.9 306.1 309.4 312.7 315.9 319.0 321.9 324.5 326.9 328.9 330.4 331.5 332.3 332.6 332.6 332.3 331.9 331.4 331.0 330.2 329.5 328.8 328.0 327.3 326.9 326.3 325.8 325.7 325.7 325.7 325.8 326.0 326.3 326.8 327.3 327.9 328.5 329.1 329.7 330.2 330.7 331.2 331.5 331.7 331.7 331.8 331.4 331.0 330.0 329.1 328.0 327.0 326.2 325.5 325.3 325.0 324.2 322.8 320.5 317.7 314.4 310.9 307.6 303.6 299.5 295.7 292.1 288.9 286.3 284.2 282.8 282.1 282.0 282.4 283.5 285.1 287.3 290.0 293.2 296.6 300.4 304.4 308.4 312.3 316.0 319.5 322.4 324.6 326.4 327.7 328.3 328.4 328.3 328.3 328.3 328.1 328.1 328.0 327.9 327.9 327.7 327.7 327.7 327.7 328.0 329.1 331.1 334.5 338.3 342.2 345.7 348.2 349.2 349.4 349.4 349.5 349.7 350.0 350.2 350.3 350.5 350.7 350.8 350.8 351.0 350.6 349.6 348.1 346.2 344.0 341.6 339.2 336.5 333.6 330.6 328.0 325.5 323.3 321.5 320.2 319.3 318.8 319.0 319.6 320.8 323.0 325.7 328.8 332.2 336.0 339.9 343.5 346.9 349.8 352.2 353.9 354.7 355.1 354.9 354.8 354.4 354.1 353.6 353.0 352.5 351.8 351.1 350.4 349.8 349.2 348.6 348.0 347.6 347.2 346.8 346.6 346.6 346.6 346.6 346.8 347.0 347.1 347.4 347.8 348.3 348.6 349.1 349.6 350.0 350.3 350.8 351.2 351.4 351.6 351.7 351.9 352.1 351.9 351.6 351.2 350.7 350.2 349.4 348.7 347.9 347.1 346.3 345.8 345.2 344.8 344.6 344.6 344.6 344.8 345.0 345.4 345.9 346.6 347.2 347.8 348.5 349.2 349.8 350.5 351.2 351.8 352.2 352.5 352.7 352.8 352.8 352.5 352.2 351.9 351.2 350.5 349.8 349.2 348.5 347.8 347.1 346.7 346.4 346.2 346.2 346.2 346.2 346.4 346.7 346.9 347.2 347.6 348.0 348.5 349.0 349.5 349.9 350.4 351.0 351.7 352.2 352.7 353.2 353.7 354.1 354.5 355.0 355.1 355.4 355.6 355.7 355.8 355.7 355.1 353.8 351.6 348.9 345.8 342.3 338.7 335.1 331.7 328.8 326.3 324.2 322.8 322.5 322.7 323.3 324.5 326.2 328.6 331.5 334.8 338.6 342.7 347.2 351.8 356.6 361.1 365.4 370.2 374.7 378.9 382.7 386.1 389.1 391.3 392.8 393.6 394.0 393.8 393.8 393.7 393.6 393.5 393.3 392.9 392.7 392.6 392.4 392.2 392.0 391.8 391.6 391.5 391.5 391.5 391.5 391.7 392.1 392.6 393.3 394.0 394.9 395.8 396.6 397.1 397.5 397.8 398.2 396.2 395.4 394.1 392.7 390.6 388.1 385.5 382.8 380.1 377.6 375.4 373.6 372.2 371.4 371.1 371.2 371.7 372.6 374.0 375.6 377.6 379.5 381.6 384.0 386.6 389.0 391.3 393.4 395.2 396.8 397.9 398.7 398.5 398.3 398.0 397.5 396.9 396.3 395.6 394.9 394.1 393.4 392.8 392.2 391.7 391.3 391.1 390.7 391.5 393.0 395.0 397.5 400.3 403.7 407.7 411.9 416.3 420.9 425.6 430.1 434.4 438.5 442.3 445.3 447.5 449.3 450.7 451.3 451.2 450.7 449.9 449.0 447.5 445.6 443.7 441.6 439.3 437.2 435.1 433.2 431.3 429.7 428.5 427.7 427.0 426.7 427.1 428.0 429.5 431.6 433.8 436.4 439.1 441.7 443.9 445.6 446.5 446.9 446.7 446.0 445.2 444.3 443.5 442.5 441.7 440.8 440.4 440.3 438.0 435.2 432.3 429.6 426.8 424.0 421.3 418.6 415.9 413.2 410.5 407.9 405.2 402.6 400.0 397.4 394.9 392.3 389.8 387.3 384.8 382.3 379.8 377.4 374.9 372.5 370.1 367.7 365.4 363.0 360.7 358.3 356.0 353.7 351.4 349.2 346.9 344.7 342.4 340.2 338.0 335.9 333.7 331.5 329.4 327.3 325.2 323.1 321.0 318.9 316.8 314.8 312.8 310.7 308.7 306.7 304.8 304.5 304.8 305.5 306.5 307.7 309.2 310.8 312.7 314.8 317.1 319.6 322.3 325.0 327.7 330.4 333.3 335.8 338.3 340.5 342.5 344.6 346.3 347.7 348.9 349.8 350.3 350.6 350.5 350.3 350.1 349.9 349.6 349.3 348.8 348.3 347.8 347.4 347.0 346.5 346.1 345.8 345.5 345.2 345.0 345.0 345.0 345.1 345.2 345.5 346.0 346.4 346.9 347.4 348.1 348.8 349.3 349.9 350.5 351.0 351.4 351.7 351.9 352.0 352.0 351.8 351.6 351.0 350.5 350.0 349.4 348.7 347.9 347.2 346.4 345.8 345.2 344.5 344.0 343.7 343.4 343.2 343.2 343.9 345.6 348.4 352.2 356.8 362.0 367.8 373.4 378.9 383.9 388.2 391.5 393.3 394.6 394.6 394.4 394.2 394.0 393.6 393.3 392.8 392.3 391.9 391.3 390.9 390.4 390.0 389.7 389.5 389.3 389.3 389.3 389.4 390.0 390.9 391.8 392.9 394.0 395.2 396.3 397.5 398.4 399.0 399.4 399.4 398.8 397.3 395.0 392.0 388.5 384.6 380.2 375.6 371.1 366.8 362.9 359.7 356.2 353.5 351.5 350.0 349.2 349.0 348.6 348.1 347.2 346.4 345.5 344.7 343.9 343.4 343.3 343.5 344.3 345.7 347.7 350.2 353.5 357.3 361.4 365.7 370.0 375.4 381.0 386.7 392.6 398.6 404.4 410.1 415.5 420.6 425.3 429.3 432.3 435.1 437.3 438.9 439.8 440.0 440.2 440.6 441.0 441.4 441.9 442.2 444.5 444.3 444.3 444.2 443.8 443.5 443.2 442.8 442.4 442.0 441.5 441.2 440.9 440.6 440.3 440.3 440.3 440.3 440.3 440.5 440.7 441.0 441.5 442.1 442.5 442.9 443.3 443.6 443.2 442.8 442.1 441.1 440.0 438.7 437.6 436.8 436.3 436.0 436.1 436.7 438.0 440.3 443.3 446.9 451.2 456.0 461.3 467.2 473.2 479.5 485.7 491.3 497.7 503.9 509.8 515.5 520.5 524.8 528.3 531.1 532.9 533.9 534.0 533.5 532.7 531.4 529.7 528.0 526.1 524.2 522.1 520.1 518.1 516.4 515.3 514.3 514.0 514.2 514.9 516.6 519.1 522.2 525.7 529.0 532.0 534.6 536.1 536.9 536.6 535.8 534.5 532.6 530.2 527.2 524.2 520.7 516.6 512.2 507.3 502.2 497.0 491.7 486.2 480.7 475.4 470.1 465.4 460.9 456.0 451.5 447.3 443.3 439.7 436.4 433.7 431.4 429.5 428.0 426.9 426.2 426.3 426.7 428.2 430.3 432.8 435.4 438.0 440.3 442.3 443.4 444.0 443.9 443.8 443.6 443.2 443.0 442.5 441.9 441.4 441.0 440.5 439.9 439.5 438.9 438.4 438.1 437.9 437.5 437.5 437.5 437.0 437.2 437.4 437.8 438.4 438.9 439.3 439.7 440.2 440.6 440.9 441.0 441.1 441.1 441.0 441.0 441.0 440.8 440.8 440.6 440.4 440.3 440.3 440.0 440.0 440.0 440.0 439.9 439.5 438.2 436.5 434.4 432.0 428.7 424.8 420.6 416.1 411.3 406.2 400.9 395.7 390.5 385.1 380.2 376.0 371.2 366.7 362.4 358.4 354.9 351.7 349.0 346.8 345.1 343.8 343.0 342.8 342.8 343.1 343.6 344.2 345.0 346.0 347.0 347.9 348.8 349.8 350.6 351.1 351.6 351.9 352.1 352.1 352.1 352.1 352.1 352.1 352.1 352.1 352.1 352.1 352.1 352.1 351.2 349.2 346.1 342.4 338.6 335.3 331.9 329.2 327.7 327.2 327.4 328.3 329.6 331.4 333.7 336.2 339.3 342.1 345.5 349.4 353.5 357.6 362.1 366.4 370.6 374.9 378.9 382.6 386.1 388.7 391.2 393.5 395.2 396.3 396.9 396.9 396.6 395.9 395.2 394.2 393.0 391.7 390.3 389.1 387.4 385.9 384.4 383.0 381.7 380.6 379.9 379.2 378.8 378.6 378.8 379.2 380.0 381.3 383.1 385.0 387.0 389.3 391.5 393.6 395.5 397.1 398.4 399.3 399.8 399.8 399.4 398.8 397.9 396.6 395.1 393.4 391.6 389.7 387.9 386.5 385.1 383.7 382.4 381.6 381.2 381.1 381.1 381.3 381.7 382.2 383.0 383.7 384.3 385.3 386.3 387.3 388.4 389.3 390.3 391.1 391.8 392.6 393.1 393.5 393.7 394.0 392.7 391.0 389.3 387.6 385.9 384.3 382.6 380.9 379.3 377.6 376.0 374.4 372.8 371.1 369.5 367.9 366.4 364.8 363.2 361.6 360.1 358.5 356.9 355.4 353.9 352.3 350.8 349.3 347.8 346.3 344.8 343.3 341.8 340.3 338.8 337.4 335.9 334.5 333.0 331.6 330.1 328.7 327.3 325.9 324.5 323.1 321.7 320.3 318.9 317.5 316.1 314.8 313.4 312.0 310.7 309.3 308.0 306.7 305.4 304.0 302.7 301.4 300.1 298.8 297.5 296.2 294.9 294.7 294.7 294.7 294.7 294.9 295.0 295.1 295.3 295.4 295.4 295.5 295.7 295.8 295.9 295.9 295.9 296.0 296.4 297.2 298.7 300.5 303.1 306.4 309.9 313.6 317.5 321.2 324.7 328.0 330.8 332.9 334.3 334.9 335.1 334.9 334.7 334.4 333.9 333.5 333.1 332.6 332.1 331.8 331.6 331.5 332.7 332.7 331.9 329.6 326.4 322.5 318.0 313.2 308.3 303.4 298.7 294.6 291.4 288.3 285.9 284.2 283.3 283.3 283.4 283.7 284.0 284.5 285.2 285.9 286.6 287.3 288.1 289.1 290.0 291.0 292.0 293.0 293.8 294.7 295.5 296.2 296.8 297.3 297.6 297.9 298.2 298.3 298.1 298.0 297.6 297.2 296.7 296.3 295.7 295.2 294.7 294.2 293.7 293.3 293.2 293.0 293.0 293.0 293.2 293.6 294.1 294.6 295.1 295.5 295.9 296.1 296.4 296.1 295.5 294.6 293.3 291.6 289.7 287.5 285.2 282.8 280.2 277.8 275.4 273.3 271.4 269.8 268.2 266.9 266.0 265.6 265.4 265.4 265.4 265.4 265.5 265.6 265.6 265.7 265.7 265.8 265.9 266.0 266.0 266.2 266.2 266.2 266.2 266.2 266.3 266.2 266.1 266.0 265.8 265.5 265.1 264.7 264.4 264.0 263.6 263.2 262.8 262.5 262.1 261.8 261.6 261.5 261.3 261.3 261.2 260.9 260.4 259.8 258.9 257.8 256.6 255.4 253.9 252.2 250.3 248.4 246.4 244.3 242.3 240.1 238.0 235.9 233.9 232.3 230.3 228.5 226.7 225.1 223.6 222.3 221.1 220.0 219.2 218.5 218.0 217.7 217.6 217.4 217.0 216.4 215.3 214.1 212.7 211.2 209.7 208.2 206.9 205.7 205.0 204.4 203.9 203.9 204.1 204.6 205.2 206.1 207.2 208.5 209.8 211.3 212.9 214.3 215.6 216.9 218.0 218.8 219.3 219.7 219.9 219.7 219.6 219.5 219.4 219.1 218.8 218.6 218.3 218.1 217.8 217.6 217.5 217.4 217.3 217.3 217.3 217.4 217.5 217.6 217.8 217.9 218.1 218.3 218.4 218.6 218.9 219.2 219.4 219.6 219.9 220.4 221.3 223.2 226.4 230.7 236.0 242.3 249.5 257.7 266.9 276.8 286.4 296.2 307.6 319.0 330.5 341.7 352.5 362.5 371.5 378.9 384.8 389.1 391.5 392.7 392.5 392.4 392.3 392.1 392.0 391.8 391.4 391.0 390.8 390.5 390.1 389.7 389.3 388.8 388.5 388.3 387.9 387.6 387.3 387.2 387.0 386.8 386.8 386.8 386.8 387.0 387.3 387.7 388.5 389.2 390.1 391.0 392.0 392.9 393.7 394.4 395.0 395.5 395.9 395.9 396.0 396.2 396.4 396.7 396.9 397.1 397.4 397.7 397.7 397.9 397.9 398.2 397.0 394.2 390.1 385.5 381.3 376.5 372.9 370.5 369.6 369.8 372.6 372.7 373.5 374.8 376.5 378.4 380.4 382.9 385.4 387.9 390.4 392.4 394.3 395.8 396.7 397.2 397.1 397.0 396.9 396.8 396.6 396.4 396.2 396.0 395.8 395.4 395.2 395.0 394.8 394.6 394.5 394.3 394.3 394.3 394.4 395.1 396.8 399.5 402.9 407.0 411.5 416.3 421.3 425.7 430.4 435.1 439.2 442.5 445.1 446.7 447.4 447.2 447.0 446.7 446.1 445.6 445.2 444.6 444.0 443.4 443.0 442.5 442.5 442.7 442.0 440.5 438.8 436.6 434.2 431.4 428.3 424.8 421.1 417.1 413.4 409.7 405.4 400.9 396.5 392.1 387.7 383.3 379.1 374.9 370.9 367.1 363.7 360.7 357.6 354.6 352.0 349.6 347.7 345.8 344.5 343.3 342.5 342.2 342.1 342.2 342.9 343.9 345.2 346.8 348.6 350.5 352.2 353.9 355.5 356.7 357.6 357.9 358.1 357.9 357.4 356.7 355.9 354.9 353.8 352.7 351.5 350.0 348.6 347.4 346.2 345.0 344.0 343.3 342.6 342.2 342.0 342.1 342.3 342.8 343.8 345.1 346.5 348.1 349.7 351.3 352.8 354.2 355.3 356.1 356.6 356.9 356.7 356.3 355.6 354.7 353.5 352.0 350.4 348.8 347.2 345.8 344.5 343.3 342.2 341.4 340.8 340.7 340.8 341.2 342.1 343.2 344.7 346.1 347.2 348.2 348.9 348.7 347.2 345.6 344.1 342.6 341.1 339.6 338.1 336.6 335.1 333.6 332.1 330.7 329.2 327.8 326.3 324.9 323.4 322.0 320.6 319.2 317.8 316.4 315.0 313.6 312.2 310.8 309.4 308.1 306.7 305.4 304.0 302.7 301.3 300.0 298.7 297.4 296.0 294.7 293.4 292.1 290.8 289.6 288.3 287.0 285.7 284.5 283.2 282.0 280.7 279.5 278.3 277.0 275.8 274.6 273.4 272.2 271.0 269.8 268.6 267.4 266.2 265.9 266.4 267.4 268.8 270.8 273.1 275.6 278.3 281.1 284.0 286.3 288.8 291.1 293.1 294.7 295.7 296.2 296.3 296.2 296.0 295.5 295.1 294.7 294.3 293.7 293.2 292.6 292.1 291.7 291.4 291.3 291.1 291.1 291.2 291.6 292.1 292.7 293.4 294.1 294.9 295.4 295.9 296.1 296.4 296.2 296.2 296.1 296.0 296.0 295.9 295.7 295.7 295.7 295.6 295.2 294.3 292.9 291.0 288.8 286.4 283.8 281.1 278.4 275.8 273.4 271.2 269.3 268.1 267.1 266.6 266.6 267.7 271.4 276.6 283.3 291.1 299.7 308.4 316.6 322.8 328.0 331.9 333.5 333.3 333.3 333.2 332.9 332.6 332.3 332.1 331.7 331.4 331.1 330.8 330.5 330.1 329.8 329.6 329.4 329.2 329.2 329.2 329.2 329.2 329.3 329.5 329.6 329.8 329.9 330.2 330.4 330.6 330.9 331.1 331.3 331.6 331.8 331.9 332.1 332.1 332.1 332.3 331.8 331.0 329.7 328.0 325.9 323.6 321.1 318.8 316.5 314.1 311.8 309.9 308.4 307.4 306.7 306.5 306.7 307.4 308.5 309.7 311.2 313.3 315.6 318.2 321.1 324.1 327.2 330.4 333.6 336.8 340.0 342.9 345.4 347.9 350.3 352.3 353.8 355.1 355.9 356.3 356.1 355.3 354.2 352.7 350.6 348.9 346.8 344.5 342.2 340.3 338.5 337.1 336.2 335.7 335.7 336.3 337.7 339.8 342.4 345.2 348.2 351.2 353.9 355.8 357.5 358.6 359.0 358.6 357.9 356.8 355.3 353.6 351.7 349.6 347.5 345.6 343.8 341.9 340.3 339.0 337.9 337.2 336.9 337.2 338.1 339.6 341.5 343.8 346.0 348.2 350.4 352.1 353.5 354.0 353.8 353.5 352.9 352.1 351.0 350.0 349.1 348.3 347.5 346.8 346.3 346.2 346.1 346.1 346.0 345.9 345.8 345.7 345.7 345.6 345.5 345.4 345.3 345.3 345.2 345.1 345.0 344.9 344.9 344.8 344.7 344.6 344.5 344.5 344.4 344.3 344.2 344.1 344.1 344.0 343.9 343.8 343.7 343.7 343.6 343.5 343.4 343.3 343.3 343.2 343.1 343.0 342.9 342.9 342.8 342.7 342.6 342.5 342.5 342.4 342.3 342.2 342.2 342.1 342.0 341.9 341.8 341.8 341.7 342.1 343.6 346.4 350.4 355.9 361.8 367.7 373.7 380.4 386.3 391.5 395.5 398.1 399.4 399.4 399.2 398.9 398.5 398.2 397.6 396.9 396.2 395.6 394.8 394.0 393.6 393.0 392.4 391.9 391.5 391.2 391.1 391.1 391.1 391.1 391.2 391.4 391.6 391.9 392.1 392.4 392.8 393.0 393.4 393.7 394.0 394.2 394.3 394.5 394.5 394.7 394.6 394.5 394.5 394.5 394.5 394.3 394.3 394.2 394.0 394.6 394.5 394.5 394.3 394.1 393.9 393.7 393.3 392.9 392.5 392.3 392.0 391.6 391.2 390.9 390.7 390.5 390.3 390.2 390.2 390.2 390.0 390.0 389.9 389.7 389.5 389.3 389.3 389.2 389.0 388.8 388.7 388.6 388.6 388.7 389.0 390.0 391.7 393.8 396.3 399.0 401.9 405.4 409.2 413.2 417.2 421.4 425.5 429.4 433.2 436.7 439.8 442.4 444.2 445.8 446.8 447.3 447.3 447.2 447.1 446.9 446.7 446.7 446.5 446.4 446.4 445.1 442.5 438.6 433.8 428.2 422.0 415.3 408.8 402.5 396.8 391.8 387.9 385.3 383.6 382.8 381.9 379.7 376.2 371.9 366.7 360.7 354.2 347.8 341.4 335.8 331.2 326.7 323.1 320.4 318.9 318.4 318.6 320.5 320.7 321.0 321.8 322.6 323.5 324.8 326.0 327.3 328.6 329.7 330.8 331.7 332.3 332.8 333.0 332.9 332.9 332.8 332.5 332.3 332.0 331.7 331.4 331.1 330.6 330.3 329.8 329.4 329.1 328.6 328.3 328.1 328.0 327.8 327.7 327.7 327.7 327.9 328.3 328.8 329.6 330.2 330.8 331.5 332.2 332.7 333.0 333.2 333.2 333.0 332.8 332.7 332.4 332.1 331.7 331.2 330.8 330.3 329.8 329.3 328.9 328.4 328.1 327.6 327.4 327.2 327.0 327.0 327.0 327.2 327.9 329.2 330.7 332.3 334.4 336.8 339.3 341.7 344.2 346.6 348.6 350.5 351.8 352.8 353.4 353.3 352.7 351.7 350.0 348.0 345.7 343.3 340.9 338.2 335.6 333.3 331.2 329.4 328.1 327.5 327.4 327.4 327.7 328.1 328.6 329.2 329.8 330.6 331.3 331.8 332.3 332.7 332.9 333.7 332.7 331.3 329.5 327.3 324.7 321.9 318.5 314.9 311.5 308.2 304.2 300.1 296.0 291.9 287.9 284.1 280.4 277.0 273.8 270.9 268.3 266.4 264.3 262.6 261.2 260.3 259.6 259.5 259.5 259.5 259.5 259.5 259.5 259.5 259.6 259.7 259.7 259.7 259.7 259.7 259.8 259.9 260.0 260.0 260.2 260.7 262.2 264.5 267.4 270.8 274.6 278.6 282.1 285.7 289.2 292.1 294.3 295.5 296.0 295.9 295.7 295.6 295.4 295.1 294.8 294.7 294.5 294.3 294.2 294.2 294.3 294.7 295.3 296.0 296.6 297.1 297.4 297.1 296.1 294.6 292.5 289.8 286.5 282.8 278.9 274.6 270.3 265.7 261.3 257.4 253.6 249.6 245.8 242.5 239.5 237.0 235.0 233.4 232.3 231.8 231.8 232.0 232.7 233.7 235.0 236.5 238.4 240.5 242.7 244.8 247.2 249.7 252.2 254.6 257.0 259.1 261.0 262.7 264.1 265.1 265.6 266.0 265.9 265.7 265.3 264.8 264.2 263.6 262.8 262.1 261.5 260.9 260.4 260.0 259.7 259.7 259.7 259.9 260.2 260.6 261.0 261.5 262.1 262.6 263.1 263.6 263.9 264.3 264.5 264.6 264.6 264.5 264.5 264.4 264.2 264.0 263.7 263.5 263.2 263.0 262.7 262.5 262.2 262.0 261.7 261.5 261.4 261.2 261.1 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.0 261.1 261.2 261.2 261.2 261.2 261.2 261.2 261.2 261.2 261.2 261.3 261.7 262.3 263.4 265.1 267.1 269.8 272.9 276.2 279.9 284.0 288.1 291.9 296.2 300.8 305.3 309.7 313.9 317.9 321.5 324.7 327.4 329.6 331.2 332.1 332.6 332.3 331.4 330.1 328.2 325.7 322.5 319.0 315.0 310.7 306.0 301.2 296.9 292.1 286.8 281.6 276.4 271.2 266.3 261.6 257.0 252.8 248.8 245.1 242.2 239.5 236.8 234.6 232.8 231.4 230.5 229.9 229.8 230.0 231.0 232.4 234.2 236.2 238.4 241.2 244.1 247.1 250.2 253.3 256.1 258.4 260.5 262.2 263.3 263.9 264.0 263.9 263.9 263.9 263.9 263.9 263.9 263.7 263.7 263.7 263.6 263.6 263.6 263.4 263.4 263.3 263.3 263.3 263.3 263.1 263.1 263.1 263.1 263.1 263.1 263.1 263.2 263.5 263.7 264.2 265.1 266.2 267.4 269.0 270.9 272.9 275.2 277.4 279.8 282.2 284.5 286.8 289.1 291.0 292.6 294.3 295.8 296.8 297.6 298.1 298.3 298.1 298.0 297.7 297.4 297.1 296.7 296.2 295.7 295.2 294.7 294.2 293.7 293.2 292.9 292.6 292.4 292.3 292.3 292.3 292.4 292.6 292.9 293.2 293.6 294.0 294.5 295.1 295.6 296.0 296.6 297.1 297.6 298.0 298.5 298.7 299.0 299.2 299.4 299.5 299.3 298.9 298.3 297.7 297.1 296.2 295.1 293.9 292.7 291.5 290.2 289.0 287.8 286.7 285.7 284.9 284.3 283.8 283.3 283.1 283.0 283.2 283.9 284.9 286.3 288.0 290.0 292.2 294.3 296.5 298.6 300.5 302.1 303.4 304.1 304.5 304.5 304.2 303.7 302.8 301.6 300.4 299.0 297.4 295.7 294.0 292.2 290.5 288.8 287.3 286.0 284.9 284.0 283.5 283.1 283.0 283.1 283.5 284.1 284.8 285.7 286.8 288.2 289.6 291.0 292.5 293.8 295.4 296.8 298.2 299.4 300.5 301.4 302.0 302.4 302.7 302.6 302.2 301.5 300.5 299.4 297.9 296.2 294.5 292.9 291.4 289.8 288.2 286.8 285.8 284.8 284.3 284.0 284.1 284.3 284.8 285.6 286.6 287.7 289.1 290.6 292.1 293.8 295.4 297.0 298.5 299.9 301.1 301.9 302.6 303.0 303.3 303.2 302.8 302.3 301.7 301.1 300.2 299.1 297.9 296.7 295.5 294.2 292.9 291.6 290.5 289.4 288.5 287.6 286.9 286.4 286.0 285.8 285.8 285.9 286.2 286.8 287.6 288.5 289.3 290.5 291.6 292.7 293.7 294.5 295.3 296.0 296.4 296.6 296.7 296.5 296.4 296.1 295.8 295.5 295.0 294.4 293.8 293.3 292.8 292.3 291.7 291.1 290.6 290.0 289.5 289.2 288.9 288.6 288.5 288.5 288.5 288.5 288.6 288.9 289.3 289.7 290.1 290.6 291.1 291.7 292.4 293.0 293.5 293.9 294.4 294.7 295.0 295.3 295.5 295.5 295.6 295.5 295.5 295.4 295.2 295.0 294.7 294.5 294.3 294.0 293.6 293.3 293.1 292.9 292.6 292.4 292.2 292.1 292.0 292.0 292.0 292.0 292.0 292.0 292.0 292.1 292.3 292.4 292.6 292.7 292.8 293.0 293.3 293.4 293.7 293.9 294.0 294.1 294.3 294.4 294.6 294.7 294.9 294.9 294.9 292.7 292.5 292.2 291.6 290.9 290.2 289.3 288.2 287.1 285.7 284.4 282.8 281.2 279.6 277.8 276.1 274.4 272.7 271.1 269.4 267.8 266.1 264.5 263.0 261.5 260.1 259.0 257.9 256.8 255.9 255.2 254.6 254.1 253.7 253.5 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4", - "input_type": "phoneme", - "offset": 27.937 + "f0_timestep": "0.005" } ] \ No newline at end of file diff --git "a/samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" "b/samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" index 578647dbd..509f21c41 100644 --- "a/samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" +++ "b/samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" @@ -1,362 +1,362 @@ [ { + "offset": 7.0, "text": "AP 试 着 SP 掬 一 把 星 辰 SP 在 手 心 SP", "ph_seq": "AP sh ir zh e SP j v y i b a x in ch en SP z ai sh ou x in SP", - "note_seq": "rest D#3 D#3 C4 C4 rest D#4 D#4 C4 C4 A#3 A#3 C4 C4 C4 C4 rest D#3 D#3 G3 G3 G#3 G#3 rest", - "note_dur_seq": "0.6 0.4 0.4 0.6 0.6 0.1999999 0.4000001 0.4000001 0.3999999 0.3999999 0.4000001 0.4000001 0.2 0.2 0.3999999 0.3999999 0.2 0.3999999 0.3999999 0.6000004 0.6000004 1 1 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.3947 0.209 0.2554 0.1509 0.5921 0.1045 0.1045 0.3019 0.0929 0.3019 0.0929 0.2438 0.1625 0.1045 0.0929 0.4063 0.0697 0.1277 0.2206 0.1741 0.3599 0.2438 0.9985 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 1 2 2 2 2 2 1 2 2 2 1 1", + "note_seq": "rest D#3 C4 rest D#4 C4 A#3 C4 C4 rest D#3 G3 G#3 rest", + "note_dur": "0.6 0.4 0.6 0.2 0.4 0.4 0.4 0.2 0.4 0.2 0.4 0.6 1.0 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 160.3 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.7 156.2 156.7 157.3 157.9 158.5 159.1 159.7 160.3 160.9 161.4 161.0 160.3 158.6 156.5 154.8 153.2 151.7 150.1 149.5 149.5 149.6 149.7 150.0 150.5 151.0 151.5 151.9 152.4 152.9 153.4 154.0 154.3 154.3 154.3 155.0 155.5 156.0 156.0 155.8 156.1 156.3 156.2 155.9 155.7 155.4 155.2 155.0 154.8 154.7 154.7 154.6 154.6 154.9 155.2 155.5 155.7 155.8 155.9 156.0 156.0 155.2 154.4 153.0 151.3 149.6 148.4 148.2 148.7 150.9 153.4 155.7 157.1 158.7 161.7 165.1 168.4 171.7 175.5 179.1 182.9 186.8 189.6 193.0 197.6 200.4 204.3 208.7 213.4 218.5 223.3 226.8 229.1 230.5 232.1 232.9 234.4 236.0 237.6 238.5 239.0 239.5 239.8 240.0 240.1 240.1 240.3 240.1 239.7 239.2 238.8 238.4 238.1 237.7 237.3 236.8 236.4 236.1 236.3 236.5 236.7 237.6 238.6 240.3 242.4 245.2 247.9 250.3 253.2 256.1 259.0 261.1 262.1 262.7 263.3 263.9 264.5 265.0 265.6 265.6 265.5 265.4 265.3 265.3 265.0 264.4 263.7 263.1 263.1 263.3 263.3 263.4 263.5 263.5 263.3 263.1 262.8 262.6 262.4 262.2 262.2 262.1 262.1 262.1 261.9 261.9 261.9 261.9 261.8 261.6 261.6 261.6 261.6 261.6 261.6 262.3 262.9 263.6 264.4 265.3 266.1 267.0 267.7 268.0 268.2 268.3 268.0 267.0 265.7 264.2 262.5 260.7 259.5 258.4 257.1 256.6 256.3 256.0 256.5 257.6 259.2 260.8 262.5 264.3 265.6 266.1 266.5 266.8 267.0 267.0 266.2 265.0 263.5 261.9 260.7 260.2 259.9 259.5 259.0 258.9 258.8 259.4 262.2 265.5 267.3 269.3 271.6 274.2 277.9 282.8 286.9 290.8 293.8 296.6 299.1 301.0 301.6 300.0 296.6 292.8 289.2 286.1 284.0 281.8 279.4 277.1 276.9 278.1 280.2 281.7 282.8 283.3 285.3 287.1 289.0 290.8 293.3 295.7 299.0 302.8 305.4 306.7 308.8 310.1 310.1 311.3 311.3 311.3 311.3 312.0 312.7 313.7 313.9 314.0 314.2 314.2 314.3 314.4 314.5 314.6 314.7 314.8 314.9 315.1 315.1 314.9 314.9 314.8 314.7 314.6 314.5 314.3 314.1 313.7 313.5 313.2 313.0 312.8 312.6 312.2 312.2 312.2 312.2 312.2 312.2 312.2 312.1 312.0 311.9 311.5 310.9 311.1 311.1 310.5 310.1 309.6 309.0 308.5 307.9 307.0 305.7 303.7 301.5 299.3 296.6 293.3 289.9 286.5 282.2 277.8 274.5 271.4 269.1 267.2 265.7 264.7 264.2 263.8 263.7 263.3 262.9 262.6 262.4 262.3 262.2 262.0 261.9 261.9 261.9 261.9 262.1 262.1 262.1 262.1 262.1 262.1 262.1 262.1 262.1 262.1 262.1 262.0 261.8 261.6 261.3 261.5 261.5 261.6 261.7 261.8 261.9 262.0 262.2 262.2 262.4 262.4 262.5 262.6 262.7 262.8 262.6 262.5 262.3 262.0 261.8 261.5 260.7 259.9 258.5 254.5 249.5 243.7 238.6 233.8 232.1 231.8 234.2 236.9 239.8 241.2 241.8 241.3 240.5 239.3 236.9 233.7 231.1 229.2 227.1 225.0 223.8 223.3 224.1 225.0 225.9 227.1 227.6 228.2 229.0 230.0 231.0 231.4 231.6 231.9 232.1 232.0 231.8 231.7 231.6 231.5 231.3 231.3 231.2 231.1 231.3 231.7 232.0 232.2 232.4 232.4 232.6 232.7 232.8 232.8 232.9 232.9 232.8 232.8 232.7 232.7 232.3 231.9 231.7 232.0 232.5 233.0 233.5 234.0 234.4 234.6 234.7 234.9 235.0 234.8 234.3 229.7 225.0 219.2 212.8 209.7 212.0 214.7 217.9 221.7 225.9 228.8 231.4 233.8 235.9 237.6 239.5 241.6 244.6 248.1 250.0 252.5 254.6 256.5 258.4 260.4 261.5 262.4 262.9 263.0 263.0 262.9 263.1 262.8 262.6 262.1 261.4 260.9 260.9 260.9 261.0 260.9 261.0 260.9 261.3 261.8 261.8 261.8 261.8 261.8 261.3 261.3 261.6 261.3 261.1 261.0 261.1 260.5 259.9 259.3 259.7 259.9 259.7 259.7 260.0 260.4 260.8 261.1 261.3 261.4 261.6 262.0 262.8 263.3 263.7 264.2 264.5 264.4 264.3 264.1 263.7 263.4 263.0 262.6 262.0 261.6 261.2 260.8 260.7 260.6 260.4 260.4 260.2 260.1 260.0 259.8 259.8 259.7 259.9 260.1 260.3 260.3 260.5 260.6 260.8 261.0 261.2 261.5 261.7 261.9 262.2 262.4 262.3 262.2 262.1 262.1 262.0 260.9 260.0 259.6 259.1 258.1 257.1 256.1 255.2 255.2 255.4 255.9 257.0 258.4 260.0 261.5 263.0 264.3 265.5 266.8 268.0 268.7 268.2 267.8 267.1 265.9 264.4 262.9 261.2 259.5 258.7 258.1 257.7 257.5 260.3 264.1 263.6 263.4 262.7 261.8 259.9 257.0 253.3 248.2 241.4 233.0 223.5 211.9 197.2 179.9 163.5 149.1 138.9 131.6 125.6 121.1 118.8 117.2 115.8 114.7 114.6 114.6 114.7 114.6 114.8 115.3 116.9 119.1 121.7 124.5 127.3 130.2 132.8 135.4 137.3 139.1 140.8 142.3 143.9 145.3 146.7 148.0 148.7 149.5 150.3 151.0 151.7 152.4 153.2 154.0 154.7 155.1 155.3 155.6 155.9 156.2 156.4 156.6 156.4 155.9 155.5 155.2 155.3 155.4 155.5 155.6 155.7 155.8 155.9 156.0 156.2 156.7 157.2 157.7 158.3 157.7 156.9 155.5 153.5 151.6 151.5 151.1 150.8 150.5 150.4 150.4 150.3 150.4 150.7 151.1 151.8 152.9 153.9 154.7 155.3 155.9 156.4 156.6 156.6 156.6 156.6 156.1 155.5 155.0 154.8 155.4 157.8 161.6 166.8 171.8 176.2 179.5 181.7 183.7 185.4 186.2 186.5 186.5 186.3 185.9 185.3 184.5 183.9 183.6 183.4 183.1 183.0 183.0 182.9 182.9 182.8 182.8 183.2 184.1 185.0 186.4 188.0 189.4 191.1 193.0 194.9 196.8 198.1 199.1 199.8 200.3 200.8 200.9 201.0 201.0 200.9 200.2 199.5 198.6 197.7 196.7 195.9 195.4 195.2 195.0 194.7 194.5 194.4 194.3 194.3 194.3 194.4 194.4 194.4 194.4 194.3 194.8 195.4 195.9 196.6 197.3 198.3 198.6 198.4 198.2 198.0 197.8 197.2 196.1 194.6 193.4 193.0 192.7 192.4 192.5 193.3 194.1 194.8 195.5 196.2 197.0 197.7 198.3 198.6 198.2 197.4 196.3 195.4 194.5 193.5 193.0 192.5 192.2 192.3 192.4 192.9 193.6 194.4 195.3 196.7 197.8 198.5 199.5 200.6 201.7 203.0 204.1 204.7 205.4 206.1 207.1 208.2 209.2 210.5 211.9 213.8 215.3 215.7 215.6 214.9 214.0 213.4 212.8 212.0 211.2 210.2 209.3 208.5 207.6 206.7 205.9 205.4 205.0 204.4 204.0 203.8 203.7 203.5 203.4 203.3 203.1 203.0 202.9 203.2 203.6 204.2 204.6 205.0 205.4 205.9 206.3 206.7 207.2 207.6 207.4 207.1 206.9 207.1 207.3 207.3 207.2 207.2 207.1 207.1 206.9 207.0 207.1 207.2 207.4 207.5 207.7 207.7 207.6 207.4 206.8 206.6 206.6 206.7 206.7 206.7 206.7 206.8 206.8 206.8 206.9 206.9 206.9 207.0 207.1 207.2 207.3 207.3 207.5 207.5 207.7 207.7 207.8 207.8 208.0 208.0 208.1 208.2 208.3 208.3 208.4 208.5 208.1 207.6 207.2 207.1 206.8 206.5 206.2 205.9 205.6 205.1 204.6 204.4 204.4 204.3 204.2 204.4 205.1 205.9 206.7 207.8 209.0 209.7 210.3 211.0 211.6 212.0 212.4 212.8 213.2 213.6 213.6 213.6 213.4 212.9 211.2 209.9 208.6 207.6 206.8 206.0 205.2 204.6 204.3 204.6 204.9 205.4 206.0 206.8 207.5 208.3 208.9 209.1 209.4 209.6 209.8 209.6 209.4 209.2 209.1 208.9 208.6 208.2 207.9 207.4 207.1 206.7 206.3 206.0 205.6 205.4 205.4 205.3 205.3 205.3 205.3 205.6 205.9 206.2 206.9 207.5 208.0 208.3 208.6 208.9 209.2 209.4 209.8 209.9 209.9 209.9 209.9 209.9 209.9 209.9 209.9 209.5 209.2 208.8 208.4 208.0 207.7 207.4 207.1 206.8 206.5 206.2 206.0 205.8 205.5 205.1 204.7 204.1 203.4 202.7 201.8 200.7 200.2 200.2", - "input_type": "phoneme", - "offset": 7.0 + "f0_timestep": "0.005" }, { + "offset": 13.4, "text": "AP 却 遮 SP 住 迷 恋 遥 远 的 眼 睛 SP", "ph_seq": "AP q ve zh e SP zh u m i l ian y ao y van d e y En j in SP", - "note_seq": "rest C4 C4 F4 F4 rest D#4 D#4 D#4 D#4 C4 C4 A#3 A#3 A#3 A#3 G#3 G#3 A#3 A#3 C4 C4 rest", - "note_dur_seq": "0.6 0.4 0.4 1 1 0.2 0.2 0.2 0.1999998 0.1999998 0.4000001 0.4000001 0.4000001 0.4000001 0.5999999 0.5999999 0.1999998 0.1999998 0.4000001 0.4000001 1.4 1.4 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4528 0.1509 0.2902 0.1161 0.9985 0.1045 0.0929 0.1161 0.0813 0.1509 0.0464 0.2902 0.1161 0.3019 0.0929 0.4992 0.1045 0.1045 0.0929 0.2786 0.1277 1.3932 0.058", - "f0_timestep": "0.005", + "ph_num": "2 2 1 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 F4 rest D#4 D#4 C4 A#3 A#3 G#3 A#3 C4 rest", + "note_dur": "0.6 0.4 1.0 0.2 0.2 0.2 0.4 0.4 0.6 0.2 0.4 1.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 253.4 261.6 260.8 260.0 259.2 258.5 257.6 256.8 256.0 255.3 254.5 253.6 252.8 252.0 251.3 250.5 249.7 248.9 248.1 247.4 246.7 245.8 246.2 251.6 256.7 260.1 261.3 261.8 261.9 262.0 262.1 261.8 261.6 261.2 260.9 260.6 260.4 260.0 259.8 259.5 259.3 259.4 259.4 259.4 259.5 259.6 259.7 259.8 259.9 260.0 260.1 260.1 259.9 259.7 259.4 259.3 259.1 259.3 259.4 259.7 260.2 260.6 261.1 261.6 262.0 262.3 262.5 262.7 262.9 263.1 263.4 263.6 263.6 263.4 263.2 263.1 263.1 263.1 263.1 263.1 263.1 262.8 262.6 262.4 261.8 261.1 260.4 259.7 259.0 258.5 258.1 257.9 257.7 258.2 259.2 260.5 261.9 264.7 268.4 271.0 274.6 283.7 294.4 304.2 309.0 311.4 312.9 313.2 313.6 314.7 315.4 315.5 315.4 315.1 314.7 314.4 314.2 314.1 314.1 313.7 313.4 312.9 312.7 312.3 312.0 311.6 311.3 310.9 310.6 310.2 309.9 309.5 309.2 308.9 308.9 309.0 309.2 309.3 309.4 309.6 309.7 309.9 311.5 313.3 314.9 316.6 318.4 320.2 322.0 324.2 327.2 330.0 332.9 336.0 338.5 340.4 342.0 343.5 345.3 347.1 347.9 348.4 349.1 349.3 349.6 349.8 349.8 350.1 350.3 350.5 350.9 351.0 351.1 351.3 351.3 351.3 351.5 351.5 351.5 351.4 351.3 351.3 351.5 351.6 351.7 351.8 351.9 352.1 352.1 352.4 352.5 352.5 352.7 352.7 352.8 352.9 352.9 353.3 353.5 353.2 352.1 351.0 349.8 348.8 347.8 346.8 346.2 345.5 345.0 345.0 345.0 345.0 345.4 347.5 349.4 351.4 353.5 356.1 357.0 357.0 357.0 357.0 357.0 356.7 356.0 354.8 353.3 351.8 350.4 349.0 347.5 346.0 344.7 344.2 344.2 344.5 346.2 348.2 350.1 351.9 353.6 355.1 356.4 357.5 358.1 358.5 358.2 358.0 357.5 356.6 355.4 353.9 352.5 351.6 351.0 350.4 349.6 348.4 347.3 346.8 347.1 348.0 349.0 350.1 351.5 353.1 354.9 356.4 357.1 357.5 357.8 357.0 355.9 354.9 354.0 353.2 352.5 350.9 349.3 348.4 347.9 347.6 347.2 347.1 347.8 349.0 351.3 351.3 351.3 351.3 351.4 351.7 351.7 352.0 352.2 352.0 351.0 350.0 349.1 348.2 346.8 345.0 342.0 332.2 326.6 325.2 323.6 322.0 320.5 319.0 317.1 315.5 313.6 311.7 310.0 308.3 306.5 305.2 305.1 305.0 304.2 303.3 302.6 302.0 302.0 301.3 300.0 298.8 298.3 297.9 297.8 296.6 295.3 294.3 293.6 293.2 293.2 293.2 294.1 295.1 296.0 296.6 296.9 298.2 301.5 305.6 310.2 311.8 312.9 313.5 313.9 314.4 314.9 314.6 314.2 313.7 313.5 313.0 312.6 312.3 311.9 311.5 310.4 309.3 308.4 307.6 307.6 307.8 307.8 307.6 307.2 306.8 306.4 306.1 305.7 305.5 305.1 305.3 305.5 305.7 306.2 308.3 310.8 313.0 314.0 313.8 313.3 312.6 312.1 311.9 311.7 311.7 311.4 311.4 311.6 311.9 312.2 312.6 312.8 312.9 312.8 312.7 312.1 312.6 311.9 310.1 308.1 302.4 296.6 291.6 286.0 280.4 274.0 267.4 262.8 259.2 255.4 250.9 247.6 245.8 243.4 240.7 238.3 236.2 234.9 233.7 232.8 232.4 232.3 232.4 232.7 233.4 234.5 237.1 240.0 243.1 246.7 250.0 252.4 254.1 256.0 257.9 259.5 260.7 261.7 262.4 262.8 263.1 263.4 263.5 263.6 263.6 263.6 263.6 263.7 263.7 263.7 263.7 263.7 263.7 263.4 262.7 262.1 261.4 260.8 260.2 259.5 258.9 258.4 257.9 257.5 257.1 256.6 256.2 255.5 254.4 253.2 251.9 250.9 249.7 248.6 247.4 246.1 245.0 243.6 241.5 239.2 236.7 235.0 234.2 233.7 233.2 232.5 231.7 230.9 229.7 229.6 229.7 229.7 229.3 231.0 232.9 232.9 232.9 232.8 232.9 233.2 232.9 232.7 232.7 232.1 231.5 231.1 230.7 230.3 230.0 229.6 229.4 229.0 228.8 228.6 228.3 228.6 228.9 229.2 229.5 229.9 230.2 230.5 230.7 231.0 231.1 231.2 231.3 231.8 232.3 232.9 233.1 233.3 233.4 233.6 233.8 234.0 234.2 234.2 234.2 234.2 234.2 234.3 234.3 234.3 234.3 234.3 234.0 233.7 233.5 233.6 233.7 233.5 233.2 233.0 232.7 232.7 232.7 232.5 231.8 230.7 229.7 228.8 227.9 226.9 225.9 225.0 224.7 224.5 224.3 224.8 225.4 226.8 228.2 228.6 229.1 229.5 229.9 230.4 230.8 231.2 231.7 232.2 232.5 232.8 232.9 233.0 233.1 233.3 233.4 233.5 233.7 233.7 233.6 233.4 233.2 232.9 232.7 232.6 232.3 232.1 232.1 232.0 231.8 231.6 231.5 231.4 231.5 231.7 231.8 232.0 232.2 232.4 232.7 233.1 233.6 234.0 234.4 234.7 234.9 235.2 235.4 235.7 235.9 236.2 236.1 235.9 235.8 235.8 235.5 235.1 234.6 234.2 233.8 233.3 232.9 232.5 232.2 232.1 231.9 231.7 231.5 231.5 231.6 231.7 232.5 233.0 233.1 233.3 233.6 233.9 234.2 234.5 234.8 234.8 235.0 235.1 235.2 235.3 235.3 235.1 235.1 234.9 234.8 234.6 234.3 234.0 233.8 233.5 233.3 232.9 232.6 232.4 232.2 232.1 232.0 231.8 231.7 231.5 231.2 230.5 229.3 228.3 227.1 225.7 223.2 219.9 214.4 208.5 201.8 194.8 190.6 189.5 191.5 196.6 202.7 207.0 208.8 208.7 207.9 206.8 206.5 206.6 206.9 207.0 207.1 207.3 207.5 207.7 208.0 208.3 208.3 208.3 208.4 208.5 208.6 208.6 208.2 207.8 207.5 207.1 206.6 205.9 205.3 204.6 203.8 203.3 203.1 203.6 204.6 205.6 208.2 210.6 213.2 216.5 220.0 223.5 226.8 228.9 230.8 232.7 234.0 235.1 235.4 235.6 235.5 235.3 234.7 234.1 233.5 232.8 232.1 231.8 231.5 231.1 230.8 231.0 231.3 231.4 231.4 231.2 230.9 231.0 231.5 232.0 232.1 232.1 232.2 232.3 232.3 232.3 232.8 232.7 232.5 232.3 232.5 232.7 232.7 232.7 232.7 232.7 232.8 232.8 232.8 232.8 232.8 232.8 232.9 232.9 232.9 232.9 233.0 233.1 233.1 233.1 233.1 233.0 232.9 232.9 233.0 233.1 233.1 233.1 233.2 233.0 232.8 232.8 232.9 233.0 233.9 235.4 237.3 238.5 240.1 242.1 244.2 245.9 247.3 248.2 248.8 249.5 249.5 249.4 249.0 248.6 248.4 247.8 247.3 246.8 246.2 245.7 245.1 244.6 244.0 243.4 243.5 243.9 244.4 245.5 246.8 248.2 250.6 252.8 255.0 257.4 258.9 259.6 259.7 259.9 260.2 260.6 261.3 261.3 261.2 261.1 261.0 261.0 260.9 260.8 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.9 261.4 261.8 262.2 262.5 262.9 263.1 262.9 262.7 262.7 262.5 262.5 262.4 262.4 262.2 262.2 262.2 262.1 262.1 261.9 262.0 262.2 261.8 261.5 261.3 261.1 261.0 260.9 260.8 260.3 260.2 260.3 260.4 260.5 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.6 260.5 260.4 260.3 260.3 260.3 260.1 260.0 259.9 259.7 259.7 259.5 259.4 259.7 260.2 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.7 260.4 260.3 260.5 260.6 260.8 260.9 261.0 261.1 261.2 261.3 261.4 261.5 261.6 261.7 261.8 261.9 261.9 262.1 262.1 262.3 262.4 262.1 261.7 261.5 261.2 261.1 261.0 260.8 260.7 260.6 260.6 260.4 260.3 260.2 260.0 260.0 259.7 259.4 259.2 258.8 258.1 257.7 257.5 257.4 257.3 257.2 257.4 257.8 258.1 258.5 258.7 259.1 259.4 259.8 260.1 260.5 261.6 262.8 263.8 264.7 265.5 266.4 266.0 265.7 265.5 265.2 265.0 264.6 264.3 263.5 262.6 261.6 260.5 259.6 258.8 257.9 257.4 257.0 257.0 257.1 257.4 258.0 258.8 259.6 260.4 261.4 262.4 263.3 264.2 265.1 266.2 267.1 267.7 268.1 268.0 267.8 267.4 267.1 266.0 264.6 262.9 261.2 259.5 257.8 256.4 255.5 254.8 254.2 253.6 253.1 252.5 252.1 252.9 253.5 254.1 254.9 256.4 258.1 259.6 261.1 262.7 263.9 265.0 266.1 266.9 267.6 268.4 269.0 268.9 268.7 268.6 268.4 267.4 266.5 265.7 264.7 262.8 260.9 258.7 255.4 251.3 247.4 247.9 248.6 249.1 249.7 250.2 250.8 251.4 251.7 251.7", - "input_type": "phoneme", - "offset": 13.4 + "f0_timestep": "0.005" }, { + "offset": 19.8, "text": "AP 窗 外 传 来 记 忆 SP 的 声 音 SP", "ph_seq": "AP ch uang w ai ch uan l ai j i y i SP d e sh eng y in SP", - "note_seq": "rest D#3 D#3 C4 C4 D#4 D#4 C4 C4 A#3 A#3 C4 C4 rest D#3 D#3 G3 G3 G#3 G#3 rest", - "note_dur_seq": "0.6 0.4 0.4 0.8 0.8 0.4000001 0.4000001 0.3999999 0.3999999 0.6000001 0.6000001 0.3999999 0.3999999 0.2 0.3999999 0.3999999 0.6000004 0.6000004 1.4 1.4 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4412 0.1625 0.3251 0.0813 0.6037 0.1974 0.2902 0.1045 0.2554 0.1393 0.476 0.1277 0.4063 0.1277 0.0697 0.1509 0.2438 0.4876 0.1161 1.4048 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 1 2 2 2 1 1", + "note_seq": "rest D#3 C4 D#4 C4 A#3 C4 rest D#3 G3 G#3 rest", + "note_dur": "0.6 0.4 0.8 0.4 0.4 0.6 0.4 0.2 0.4 0.6 1.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 166.0 155.6 156.0 156.5 156.9 157.3 157.7 158.2 158.6 159.0 159.5 159.9 160.4 160.9 161.3 161.7 162.2 162.6 163.1 163.5 164.0 164.4 164.3 161.3 158.3 155.5 153.2 152.2 151.3 150.4 149.5 149.0 148.5 148.1 148.0 147.9 148.1 148.3 148.6 148.8 149.3 149.7 150.3 150.7 150.8 150.9 151.0 150.9 150.8 150.7 150.7 151.0 151.5 151.9 152.4 153.1 153.9 154.8 155.5 156.2 156.8 157.3 157.7 158.1 158.1 158.0 157.9 157.8 157.6 157.5 157.3 157.2 156.9 156.3 155.8 155.5 155.4 155.2 155.0 154.8 154.9 155.0 155.0 153.6 152.4 151.4 150.2 145.8 139.7 135.4 133.2 132.1 132.3 134.8 138.5 143.5 149.8 156.1 161.6 165.9 171.5 178.0 184.6 192.5 201.8 210.0 217.2 223.3 228.1 232.0 235.8 239.2 241.2 242.1 242.7 243.2 243.0 242.2 241.8 240.9 239.8 238.8 237.8 236.7 235.8 234.9 233.9 234.0 234.2 234.4 234.5 234.7 234.9 235.4 236.6 238.1 239.6 241.2 242.6 244.0 246.5 249.2 251.2 253.2 255.0 256.9 258.9 260.8 261.8 262.8 263.9 265.0 264.8 264.5 264.3 264.1 263.9 263.3 262.7 262.2 261.6 261.3 261.0 260.8 260.6 260.5 260.1 260.0 260.2 260.6 260.8 261.2 261.5 261.8 261.9 261.8 261.6 261.8 261.8 261.9 262.0 262.1 262.2 262.3 262.5 262.6 262.8 263.2 263.4 263.7 264.0 264.2 264.1 264.0 263.8 263.6 263.6 263.4 263.3 263.2 263.0 263.0 262.8 262.6 262.5 262.6 262.8 263.0 263.2 263.4 263.6 263.7 263.8 264.0 264.1 264.3 264.5 264.7 264.7 264.3 263.7 263.1 262.6 262.0 260.7 257.1 252.3 244.2 237.2 231.3 227.9 225.8 229.1 232.5 235.1 236.6 237.8 239.0 240.1 240.7 241.3 241.9 242.6 244.0 245.4 247.0 248.7 250.2 251.1 252.0 253.1 254.2 255.5 256.8 257.5 258.2 259.4 260.8 261.0 262.0 266.0 274.3 283.3 289.1 291.9 293.7 295.0 294.5 293.6 292.6 291.9 291.3 290.5 289.5 288.6 287.9 287.1 286.6 286.4 286.3 286.2 286.0 287.2 288.5 289.7 291.4 293.5 295.6 297.6 299.3 300.9 302.3 303.6 305.3 306.8 308.4 310.2 311.9 313.0 313.7 314.2 314.4 314.8 314.7 314.2 313.6 312.9 312.7 312.6 312.5 312.3 312.0 312.3 312.7 313.0 313.2 313.5 313.8 313.6 313.5 313.3 313.1 312.9 312.7 312.2 310.7 308.6 306.8 304.8 303.2 302.0 301.1 299.7 297.7 294.7 291.0 286.6 283.3 280.0 277.3 274.5 274.3 274.6 273.0 270.5 268.2 266.2 265.3 264.8 264.5 264.5 263.9 263.3 262.8 262.4 262.0 261.8 261.4 261.1 260.8 260.5 260.1 259.9 259.6 259.3 259.0 258.8 258.5 258.9 259.2 259.6 259.7 259.7 259.7 259.8 260.3 260.9 261.5 262.1 262.6 263.3 263.9 264.3 264.6 264.8 265.2 265.6 265.8 265.2 264.4 262.6 260.6 255.7 249.5 243.9 238.5 235.2 233.4 236.0 242.0 250.2 258.6 259.0 257.7 252.3 245.4 238.4 231.8 227.9 224.5 220.7 217.0 212.8 208.5 205.3 203.3 201.7 200.3 201.4 202.6 204.2 208.6 213.8 219.0 223.2 226.1 228.4 230.9 231.9 232.8 233.5 234.3 235.3 236.0 236.2 236.4 236.5 236.1 235.8 235.5 235.3 235.0 234.6 233.8 232.9 232.3 231.7 231.4 231.2 231.1 231.1 231.1 231.1 231.2 231.2 231.2 231.2 231.3 231.6 231.9 232.2 232.7 233.0 233.3 233.4 233.6 233.6 233.7 233.6 233.6 233.4 233.2 232.6 232.1 231.9 231.8 231.7 231.6 231.4 231.4 231.6 231.7 231.2 230.9 231.2 231.6 232.1 232.0 232.0 231.8 231.8 232.3 232.9 233.5 233.9 234.2 234.4 234.6 234.8 234.9 235.1 235.0 234.8 234.7 234.6 234.5 234.3 234.2 234.1 234.0 233.8 233.6 233.6 233.4 233.3 233.2 233.1 232.9 232.8 232.7 232.6 232.4 232.3 232.5 232.6 232.9 232.8 232.8 232.7 232.7 232.7 232.7 232.7 232.6 232.4 232.3 232.1 232.6 232.8 232.7 233.0 233.5 233.8 234.1 234.2 233.9 233.5 232.5 232.2 232.2 232.1 233.6 235.9 238.4 241.0 243.7 246.8 249.7 252.6 255.1 257.9 260.6 262.8 264.4 265.9 266.5 266.5 266.5 265.8 265.0 264.2 263.3 262.4 261.7 261.3 260.9 260.4 260.1 260.0 260.1 260.1 260.3 260.3 260.7 261.1 261.6 261.9 262.3 262.7 263.1 263.5 263.9 264.3 264.6 264.8 264.6 264.2 263.8 263.3 262.8 262.1 261.2 260.4 259.7 259.2 258.7 258.0 257.5 257.2 256.8 256.6 256.5 256.7 256.8 256.8 257.1 257.3 257.7 258.0 258.3 258.7 259.2 260.1 260.6 260.5 260.4 260.4 260.1 259.4 259.1 258.4 257.6 256.7 255.3 253.5 250.6 246.2 240.2 233.2 225.1 214.9 204.4 194.4 185.6 179.3 174.0 169.6 166.8 164.9 163.7 162.8 162.3 162.1 162.0 162.1 162.0 162.2 162.5 162.8 163.1 163.4 163.8 163.5 161.1 158.8 156.9 155.1 154.0 153.2 152.6 152.2 152.4 152.6 152.8 153.0 153.1 153.3 153.5 153.4 153.2 153.0 152.8 152.6 152.5 152.3 152.1 152.2 152.3 152.4 152.4 152.6 153.1 153.6 154.1 154.6 155.1 155.6 156.1 156.5 156.5 156.3 156.1 155.8 154.9 153.9 152.6 151.3 150.3 150.5 151.2 152.0 152.8 153.6 154.4 155.1 155.6 156.1 156.6 157.1 157.4 157.6 157.7 157.4 156.6 155.9 155.2 154.4 153.6 153.1 152.7 152.4 152.2 152.2 152.2 152.3 152.6 153.4 154.6 156.1 157.9 160.6 163.7 166.9 170.0 172.4 174.6 176.5 177.0 177.0 176.7 176.3 175.6 175.4 175.4 175.4 175.3 175.7 176.7 177.7 178.5 179.5 180.9 182.2 183.6 184.8 186.0 187.5 188.9 190.2 191.4 192.9 194.2 194.9 195.6 196.3 197.0 197.7 197.8 197.7 197.5 197.4 197.2 197.1 197.0 197.2 197.2 197.2 197.2 197.2 197.2 197.0 196.9 196.6 195.9 195.1 194.5 194.1 193.8 193.5 193.2 192.9 193.0 193.0 193.1 193.1 193.2 193.3 193.3 193.5 193.6 193.9 194.2 194.5 194.2 194.0 194.1 194.2 194.4 194.8 195.3 195.3 195.2 195.2 195.0 195.0 195.1 195.2 195.2 195.4 195.4 195.2 195.1 194.9 195.1 195.3 195.5 195.7 195.9 195.8 195.7 195.6 195.2 194.8 194.5 194.2 193.8 193.6 193.4 193.5 193.7 193.6 193.4 192.9 192.4 192.1 191.8 191.5 191.5 192.3 193.1 194.2 195.3 196.5 197.9 199.1 200.0 200.7 201.2 202.2 203.0 203.6 204.0 204.3 204.4 204.5 204.6 204.6 204.8 204.8 204.8 204.9 204.9 205.1 205.4 205.6 205.7 206.0 206.4 206.8 207.3 207.7 208.0 208.3 208.6 208.9 209.2 209.5 209.3 209.3 209.2 209.0 208.7 208.3 208.0 207.6 207.2 206.8 206.5 206.2 206.3 206.5 206.5 206.4 206.3 206.3 206.2 206.1 206.0 205.9 205.9 206.1 206.3 206.2 206.0 206.0 205.9 205.7 206.0 206.2 206.5 206.7 206.9 206.5 206.1 206.2 206.4 206.7 207.3 207.7 207.9 207.9 207.9 207.8 207.8 207.8 208.0 208.1 208.0 207.9 207.9 208.0 208.4 208.5 208.6 208.6 208.7 208.6 208.3 207.9 207.6 207.2 206.9 206.5 206.2 205.8 205.5 205.1 205.1 205.0 205.0 204.9 204.8 205.0 205.3 205.6 206.0 206.5 206.8 207.2 207.7 208.3 208.7 209.0 209.0 209.0 209.1 209.1 209.1 208.9 208.7 208.5 208.3 208.1 207.8 207.4 206.9 206.5 206.2 205.9 205.6 205.2 204.9 204.6 204.5 204.4 204.3 204.2 204.1 204.0 204.2 204.6 205.0 205.3 205.7 206.0 206.4 206.7 207.0 207.3 207.5 207.9 208.6 208.6 208.6 208.3 208.0 207.5 207.1 206.8 206.4 206.0 205.9 205.8 205.7 205.7 205.7 205.6 205.8 207.1 208.6 209.7 210.6 211.2 211.9 212.3 212.1 211.8 211.3 210.9 210.5 209.6 208.4 207.6 206.8 205.8 204.8 204.2 203.8 203.4 203.1 203.3 203.6 204.1 204.6 205.3 206.2 206.6 207.0 207.4 208.0 208.6 209.3 210.0 210.1 210.1 210.1 209.8 209.5 209.2 208.9 208.6 208.3 208.0 208.0 207.8 207.9 208.1 208.6 209.0 209.4 209.8 209.8 209.8 209.5 209.2 208.9 208.6 208.3 207.8 207.3 207.2 207.2 207.1 206.5 205.9 205.3 204.7 204.0 203.5 203.4 203.5 203.5 203.9 204.4 205.0 205.7 206.5 207.1 207.8 208.4 209.3 210.3 211.5 212.6 213.4 214.0 214.7 214.9 215.1 215.3 215.5 215.0 214.3 213.1 211.7 209.6 206.9 204.0 200.4 197.1 194.5 193.3 194.0 194.8 195.4 196.1 196.8 197.5 198.2 198.5 198.5", - "input_type": "phoneme", - "offset": 19.8 + "f0_timestep": "0.005" }, { + "offset": 26.2, "text": "AP 在 半 夜 迷 失 SP 在 房 间 消 失 SP 去 幻 想 着 SP 夜 晚 之 前 的 SP", - "ph_seq": "AP z ai b an y E m i sh ir SP z ai f ang j ian x iao sh ir SP q v h uan x iang iang zh e SP y E w an zh ir q ian ian d e e SP", - "note_seq": "rest F3 F3 G3 G3 G#3 G#3 G#3 G#3 G#3 G#3 rest G3 G3 G#3 G#3 A#3 A#3 A#3 A#3 A#3 A#3 rest G#3 G#3 A#3 A#3 A#3 A#3 C4 E4 E4 rest F4 F4 G4 G4 G#4 G#4 G#4 G#4 A#4 F#4 F#4 G#4 rest", - "note_dur_seq": "0.6 0.2 0.2 0.2 0.2 0.4 0.4 0.4 0.4 0.2 0.2 0.2 0.2 0.2 0.1999998 0.1999998 0.4000001 0.4000001 0.4000001 0.4000001 0.1999998 0.1999998 0.2 0.2 0.2 0.1999998 0.1999998 0.2000003 0.2000003 0.5999999 0.5999999 0.5999999 0.2000003 0.3999996 0.3999996 0.4000001 0.4000001 0.4000001 0.4000001 0.1409998 0.1409998 0.2590003 0.1719999 0.1719999 0.7280002 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0", - "ph_dur": "0.5224 0.0813 0.1393 0.058 0.1277 0.0813 0.2438 0.1509 0.209 0.1974 0.1974 0.0929 0.1045 0.1161 0.0813 0.1393 0.058 0.2786 0.1277 0.2206 0.1741 0.209 0.0697 0.1277 0.1161 0.0813 0.1045 0.0929 0.1974 0.4876 0.1161 0.6037 0.1277 0.0697 0.2786 0.1277 0.3019 0.0929 0.2438 0.1509 0.1509 0.209 0.0464 0.1741 0.7198 0.058", - "f0_timestep": "0.005", + "ph_seq": "AP z ai b an y E m i sh ir SP z ai f ang j ian x iao sh ir SP q v h uan x iang zh e SP y E w an zh ir q ian d e SP", + "ph_dur": "0.5224 0.0813 0.1393 0.058 0.1277 0.0813 0.2438 0.1509 0.209 0.1974 0.1974 0.0929 0.1045 0.1161 0.0813 0.1393 0.058 0.2786 0.1277 0.2206 0.1741 0.209 0.0697 0.1277 0.1161 0.0813 0.1045 0.0929 0.685 0.1161 0.6037 0.1277 0.0697 0.2786 0.1277 0.3019 0.0929 0.2438 0.1509 0.3599 0.0464 0.8939 0.058", + "ph_num": "2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest F3 G3 G#3 G#3 G#3 rest G3 G#3 A#3 A#3 A#3 rest G#3 A#3 A#3 C4 E4 rest F4 G4 G#4 G#4 A#4 F#4 G#4 rest", + "note_dur": "0.6 0.2 0.2 0.4 0.4 0.2 0.2 0.2 0.2 0.4 0.4 0.2 0.2 0.2 0.2 0.2 0.6 0.6 0.2 0.4 0.4 0.4 0.141 0.259 0.172 0.728 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 1 0", "f0_seq": "181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 181.7 174.6 174.6 174.5 174.5 174.4 174.4 174.4 174.3 174.3 174.3 174.2 174.2 174.2 174.1 174.1 174.0 174.0 174.0 173.9 173.9 173.8 173.9 174.4 174.4 174.1 173.2 172.2 171.3 170.7 170.3 170.4 170.7 171.1 171.3 171.7 172.7 173.7 174.1 174.6 175.3 176.2 177.0 177.8 177.8 176.1 171.8 167.7 166.2 167.3 170.0 173.1 175.1 177.2 178.7 180.4 182.2 184.6 186.9 189.3 192.0 194.5 197.0 198.3 199.0 199.2 199.2 198.9 198.4 198.0 197.6 197.0 196.4 196.2 196.0 195.7 195.9 196.1 196.4 196.5 196.6 196.8 196.4 196.1 196.1 196.3 196.6 196.8 197.0 197.1 197.1 197.1 196.7 196.2 195.9 195.6 195.3 195.2 195.1 195.2 195.7 196.9 197.4 197.9 198.3 198.6 198.7 198.6 198.6 198.4 198.2 197.9 197.5 197.1 196.8 196.5 196.2 196.2 196.1 195.9 195.8 195.7 195.5 195.4 195.1 195.2 196.2 197.2 198.2 199.1 200.1 201.3 202.5 203.8 205.1 206.0 206.5 206.9 207.3 207.8 208.3 208.7 209.0 209.1 209.2 209.3 209.2 209.0 208.8 208.5 208.3 208.0 207.7 207.3 206.8 206.2 205.5 205.0 204.9 204.9 204.9 204.9 205.0 205.0 205.0 205.0 205.1 205.5 205.4 205.3 205.6 206.0 206.3 206.6 206.9 207.2 207.5 208.1 208.6 209.1 209.7 210.4 211.3 211.8 211.8 211.6 210.9 210.2 209.6 209.0 208.6 208.4 208.0 207.4 206.9 206.5 206.3 206.0 205.8 205.6 205.3 205.0 205.5 205.9 206.3 206.7 206.7 206.5 206.8 207.1 207.4 207.7 207.8 207.8 208.0 208.1 208.1 208.3 208.3 207.9 207.7 207.7 207.9 208.1 208.2 208.3 208.5 208.3 208.2 208.1 207.9 207.8 207.7 207.7 207.7 207.8 207.5 207.2 203.8 195.7 192.7 193.5 194.5 195.3 195.9 196.5 197.2 197.8 198.6 199.3 200.3 201.2 202.1 202.7 203.2 203.7 204.3 204.9 205.2 205.1 205.1 206.3 209.8 213.4 216.0 217.0 217.5 216.0 214.7 213.4 211.9 210.3 208.7 207.2 206.3 205.8 205.3 204.8 204.4 204.4 204.3 204.6 205.0 205.4 205.8 206.1 206.6 207.0 207.4 207.8 208.3 208.5 208.6 208.7 208.7 208.7 208.9 208.6 208.2 207.8 207.4 206.9 206.7 204.7 198.9 196.3 196.1 196.0 195.8 195.7 195.6 195.3 195.6 195.8 195.9 196.0 195.9 195.7 195.4 194.9 194.1 193.4 192.7 192.0 191.5 191.1 190.9 190.9 191.0 191.1 191.3 191.5 191.8 192.1 192.4 192.7 193.0 193.4 193.7 194.0 194.4 194.8 195.2 196.4 197.3 197.4 197.2 196.3 195.3 194.6 194.6 194.8 194.9 195.3 195.7 196.1 196.6 197.0 197.2 197.0 196.6 196.7 196.7 196.7 196.7 196.4 196.2 195.7 194.8 194.5 194.3 194.2 193.9 193.6 193.5 193.5 193.5 194.2 195.4 196.7 198.8 201.2 203.6 205.9 207.6 209.1 210.4 210.6 210.8 210.6 209.2 207.4 207.0 206.8 206.6 206.4 206.2 206.1 206.0 206.1 206.4 206.6 206.9 207.0 206.9 206.8 207.5 207.9 207.8 207.7 207.7 207.7 207.8 208.0 208.3 208.9 209.6 211.1 212.6 214.2 216.5 221.1 226.9 229.5 231.7 233.6 235.7 237.0 237.5 237.0 236.2 235.4 234.8 234.5 234.3 234.0 233.6 233.3 233.1 232.8 232.5 232.1 231.8 231.5 231.2 230.9 230.7 230.7 230.7 230.7 230.8 231.1 232.1 232.7 233.1 233.4 233.9 234.4 234.7 235.2 235.6 236.1 236.3 236.1 235.5 235.0 235.0 234.7 234.2 233.6 232.8 232.1 231.5 230.9 230.1 229.1 228.3 227.8 227.4 227.3 227.7 228.2 228.7 229.7 230.4 231.3 232.1 233.1 233.9 234.7 235.3 235.8 236.1 236.3 236.3 236.4 236.5 236.0 235.5 235.2 235.0 234.8 234.7 234.7 235.4 236.9 237.3 237.3 237.3 237.4 237.4 237.4 236.8 235.7 235.7 235.5 235.1 234.6 234.3 233.9 233.5 233.1 232.6 232.2 232.0 232.0 232.0 231.9 231.9 232.3 233.1 233.6 234.0 234.3 234.0 234.3 233.9 234.2 234.5 234.7 234.8 234.8 234.6 234.3 234.0 233.7 233.4 233.0 232.7 232.2 231.8 232.1 231.4 230.7 231.0 230.6 230.5 231.4 231.7 231.8 232.0 232.5 232.9 232.8 232.2 232.4 233.6 233.1 232.9 233.6 234.6 234.6 234.4 234.4 234.9 235.2 235.0 234.8 234.5 234.2 234.1 234.6 234.8 235.0 235.3 235.5 235.7 236.0 236.1 236.0 235.9 235.8 235.7 235.4 235.1 234.8 234.4 234.1 233.8 233.5 233.1 232.8 232.6 232.7 232.8 232.9 233.1 233.4 233.7 233.9 234.2 234.6 234.7 234.8 234.9 235.1 235.0 234.6 234.4 234.2 233.8 233.4 223.7 218.0 217.9 217.8 217.6 217.4 217.3 217.1 216.8 216.5 216.8 216.8 216.8 216.6 216.1 215.3 214.2 212.9 211.1 209.1 207.1 205.3 204.1 203.1 202.3 201.8 201.6 201.5 201.6 201.9 202.1 202.4 202.8 203.1 203.5 203.9 204.3 204.7 205.2 205.6 206.1 206.2 205.8 206.2 206.8 207.6 208.2 208.7 208.9 208.7 208.7 208.7 208.6 208.6 208.4 208.2 208.0 207.8 207.5 206.5 205.3 204.1 202.6 199.9 196.5 191.2 186.7 183.9 183.6 186.9 191.2 195.7 199.7 203.7 208.0 213.0 217.7 222.7 228.5 232.5 235.6 235.2 234.2 232.9 232.8 232.4 232.6 232.7 232.6 232.4 233.3 234.2 234.6 234.3 233.6 232.5 229.7 225.9 219.1 211.4 206.5 206.3 208.2 210.6 211.8 212.9 214.1 215.3 216.8 218.2 219.6 220.8 221.8 222.9 224.1 225.2 226.3 227.2 227.4 227.2 228.4 229.7 231.1 232.3 232.3 231.9 232.2 232.6 233.0 232.9 232.9 232.8 232.7 232.7 232.8 232.8 232.9 233.0 233.1 233.2 233.1 233.0 232.9 232.9 232.9 232.9 232.9 232.9 232.9 232.8 232.7 232.7 232.6 232.5 232.7 233.3 233.9 235.2 237.6 240.3 243.2 246.4 249.2 251.7 254.3 256.9 259.2 261.2 263.1 264.2 264.9 265.4 265.2 264.8 264.2 263.5 263.0 262.4 261.6 260.9 260.1 259.4 258.9 258.8 258.6 258.7 258.8 259.0 259.2 259.4 259.6 259.7 259.9 260.1 260.3 260.6 260.8 261.2 261.4 261.8 262.1 262.0 261.9 261.9 261.9 261.8 261.8 262.2 262.6 263.0 263.0 262.9 262.8 262.8 262.4 261.9 261.5 261.1 260.5 260.1 259.8 259.8 259.9 260.1 260.1 260.3 260.4 260.5 260.7 260.7 260.9 260.8 260.6 260.6 260.9 261.1 261.2 261.5 261.7 261.9 262.3 262.5 262.7 263.0 263.1 263.4 263.7 263.8 264.1 264.4 263.6 263.0 262.3 261.7 261.0 257.4 254.1 250.5 244.4 236.3 228.3 221.3 216.5 212.3 207.7 206.6 205.8 205.3 205.4 206.0 207.1 209.5 212.0 215.3 221.9 229.7 237.8 246.7 259.1 273.1 281.3 288.6 292.5 295.5 297.8 299.6 301.6 303.1 304.6 305.8 306.2 306.3 306.3 306.3 306.3 306.0 306.1 305.9 305.6 305.3 305.0 304.8 304.5 304.2 304.0 303.8 303.6 303.8 304.4 304.9 305.3 306.2 307.8 309.6 311.7 314.4 316.8 319.1 321.9 324.7 327.5 330.4 332.8 335.4 338.3 339.6 340.5 340.3 339.8 339.1 338.4 337.6 336.3 334.8 333.1 330.7 328.7 326.7 324.6 323.2 322.0 320.7 320.0 320.4 321.2 321.9 323.0 324.7 327.1 329.6 332.0 334.2 335.8 337.4 339.3 340.7 340.8 340.5 339.5 338.3 337.1 335.5 333.2 330.6 327.7 324.9 322.5 320.9 319.4 317.8 316.7 317.6 318.6 319.9 322.7 325.7 328.5 331.3 334.6 337.7 338.7 339.9 340.6 340.9 341.1 340.7 339.0 337.0 334.7 331.9 328.8 325.8 323.3 320.8 319.1 318.2 318.0 317.2 314.0 309.7 309.9 310.1 310.3 310.5 310.8 310.8 311.0 311.2 311.4 311.7 312.3 312.8 313.3 313.9 314.4 314.9 315.5 316.0 316.5 317.1 317.8 318.2 318.7 319.4 319.9 320.4 321.0 321.5 322.0 322.7 323.2 323.8 324.3 324.9 325.5 326.1 326.6 327.2 327.8 328.3 329.0 329.5 329.9 329.9 329.6 329.4 329.2 329.0 328.7 328.9 329.0 328.4 327.8 327.4 327.4 327.0 327.0 327.1 327.4 328.3 329.6 331.4 333.0 335.1 337.9 341.1 344.1 346.8 349.0 350.8 352.2 352.9 353.2 353.3 353.5 353.5 353.5 353.3 353.1 352.9 351.4 350.9 350.6 350.1 349.6 349.3 349.0 349.1 349.4 349.6 350.2 350.9 351.6 352.3 352.9 353.2 353.6 353.8 353.7 353.5 352.2 350.7 349.1 347.2 345.2 340.2 335.6 331.3 326.9 323.8 325.4 327.2 328.8 332.0 341.3 352.4 360.7 368.6 376.8 384.6 392.2 396.9 399.3 400.9 402.3 402.9 403.1 402.2 400.9 399.3 397.0 394.6 392.5 390.4 389.3 388.2 387.6 387.6 387.9 388.4 388.8 390.0 391.1 392.4 393.6 394.8 395.3 395.5 395.8 396.1 396.2 395.5 394.6 393.8 392.4 391.0 389.5 388.4 387.4 386.6 385.8 385.5 386.2 387.3 388.4 389.5 390.5 391.7 392.9 394.0 393.8 393.5 393.4 393.2 392.9 390.9 389.1 387.4 385.1 380.8 370.0 357.1 345.7 334.8 326.6 329.0 337.0 346.0 356.1 363.8 370.7 375.1 378.8 382.8 387.5 391.7 395.8 400.1 404.9 408.4 411.7 413.5 414.1 415.1 415.8 416.3 416.6 416.7 416.6 416.5 416.4 416.3 416.1 415.7 415.3 415.2 415.0 414.7 414.4 414.1 413.6 413.3 413.0 412.7 412.4 412.7 413.0 413.3 413.6 413.9 414.2 414.5 414.7 415.0 415.3 415.6 415.9 416.1 416.4 416.7 417.0 417.3 416.7 413.7 409.6 405.5 400.7 391.9 377.0 370.2 365.9 361.5 357.8 353.7 349.6 345.9 344.5 343.5 342.3 341.3 340.0 338.9 337.9 336.8 335.8 334.3 333.0 332.4 332.2 331.9 331.6 331.2 331.0 331.0 332.3 333.8 335.3 337.7 344.8 353.9 365.7 380.8 394.5 409.4 417.5 420.6 422.1 421.2 420.4 419.2 418.0 416.7 416.0 416.3 416.5 416.6 416.7 416.6 415.4 413.4 410.9 408.6 407.1 406.9 409.1 420.8 432.0 444.5 458.5 469.9 475.1 478.4 479.9 480.6 479.5 477.4 475.2 472.9 470.2 467.2 465.2 464.0 463.0 462.9 463.2 463.6 464.3 465.2 464.6 463.5 463.1 463.1 463.5 464.1 464.8 465.4 465.8 466.4 467.1 467.8 467.0 466.2 465.6 464.3 462.1 459.5 457.0 453.5 444.9 432.4 419.0 404.8 392.0 378.2 362.0 353.7 348.8 343.5 336.9 338.1 341.1 344.5 348.3 352.9 359.0 364.6 367.2 368.2 368.8 368.7 368.8 369.2 369.5 369.2 368.9 368.8 368.3 368.2 368.2 368.1 368.2 367.5 367.4 367.4 366.4 364.9 364.0 363.4 363.1 363.2 363.6 365.9 370.0 375.9 382.8 390.4 399.4 408.8 413.9 416.7 419.6 421.8 423.2 424.1 424.8 425.3 425.7 425.5 423.7 421.1 418.8 417.0 415.5 414.7 413.9 413.6 414.5 414.5 413.7 414.1 414.7 415.2 415.6 416.1 416.7 417.2 417.1 417.0 416.8 416.7 416.5 416.4 416.3 416.1 416.0 416.0 415.8 415.7 415.5 415.3 415.2 415.1 414.9 414.8 415.1 415.4 415.9 416.1 416.4 416.7 416.7 416.5 416.4 416.3 416.0 416.0 415.7 415.3 414.7 413.9 413.4 413.5 413.6 413.8 413.9 414.1 414.2 414.3 414.5 414.6 414.8 414.9 414.9 414.5 413.9 414.1 414.2 414.5 414.6 414.8 415.0 414.8 414.7 414.4 414.3 414.2 414.8 415.1 415.4 415.5 415.2 414.7 414.3 413.7 412.9 412.0 411.2 410.4 409.5 408.6 407.6 407.5 407.3 407.2 407.2 407.8 408.5 409.1 409.7 410.5 411.1 411.8 412.5 413.4 414.6 416.1 417.4 418.6 419.5 420.1 420.7 421.5 421.8 422.0 422.1 422.1 422.2 422.3 422.1 421.6 421.2 420.5 419.7 418.9 418.2 417.3 416.5 415.7 414.6 413.4 412.0 410.8 410.0 409.5 409.1 409.2 409.5 409.8 409.8", - "input_type": "phoneme", - "offset": 26.2 + "f0_timestep": "0.005" }, { + "offset": 34.56, "text": "SP 一 种 逃 离 SP 印 象 中 少 年 的 身 影 SP 有 着 清 澈 的 眼 睛 SP 嘴 里 还 说 着 SP 因 为 我 们 还 年 轻 SP 所 以 总 有 再 一 次 的 权 力 SP", - "ph_seq": "SP y i zh ong ong t ao l i SP y in x iang zh ong ong sh ao n ian d e sh en y in in SP y ou zh e q in ch e d e y En j in SP z ui l i h ai sh uo zh e SP y in w ei w o m en h ai n ian q ing SP s uo y i z ong y ou z ai y i i c i0 d e e q van l i SP", - "note_seq": "rest F4 F4 F4 F4 G4 D#4 D#4 D#4 D#4 rest D#4 D#4 F4 F4 F4 F4 G4 G4 G4 G4 G4 G4 G4 G4 G4 G4 G4 G#4 rest C4 C4 D#4 D#4 F4 F4 F4 F4 F4 F4 F4 F4 F4 F4 rest F4 F4 F4 F4 F4 F4 F4 F4 F4 F4 rest F4 F4 F4 F4 F4 F4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 rest G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 A#4 G#4 G#4 F#4 F#4 G4 F4 F4 F4 F4 rest", - "note_dur_seq": "0.24 0.2 0.2 0.185 0.185 0.415 0.2 0.2 0.4 0.4 0.2 0.1999999 0.1999999 0.2 0.2 0.132 0.132 0.2680001 0.1999998 0.1999998 0.2 0.2 0.2 0.2 0.5999999 0.5999999 0.1980002 0.1980002 0.402 0.1999998 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.1999998 0.1999998 0.1999998 0.1999998 0.2000003 0.2000003 0.1999998 0.1999998 0.2000003 0.1999998 0.1999998 0.1999998 0.1999998 0.2000003 0.2000003 0.1999998 0.1999998 0.2000003 0.2000003 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.2000008 0.2000008 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.2000008 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.4000006 0.4000006 0.3999996 0.3999996 0.1000004 0.1000004 0.2999992 0.4000006 0.4000006 0.1999998 0.1999998 1 0.3999996 0.3999996 0.4000006 0.4000006 0.05", - "is_slur_seq": "0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0", - "ph_dur": "0.1858 0.058 0.1393 0.058 0.1858 0.3135 0.1045 0.1393 0.058 0.4063 0.1161 0.0813 0.1277 0.0697 0.1509 0.0464 0.1393 0.1741 0.0929 0.1393 0.058 0.1741 0.0232 0.058 0.1509 0.418 0.1741 0.1974 0.4063 0.1045 0.0929 0.3019 0.1045 0.267 0.1277 0.2902 0.1161 0.1509 0.0464 0.1277 0.0697 0.1277 0.0697 0.1974 0.1161 0.0929 0.1393 0.058 0.1161 0.0813 0.1161 0.0813 0.1509 0.058 0.1974 0.1045 0.0929 0.1277 0.0697 0.1277 0.0697 0.1277 0.0813 0.1277 0.0697 0.1509 0.0464 0.1161 0.0813 0.209 0.0697 0.1277 0.1625 0.0348 0.1277 0.0697 0.1393 0.058 0.2902 0.1161 0.3251 0.0697 0.1045 0.1625 0.1393 0.3135 0.0813 0.209 0.8359 0.1625 0.267 0.1277 0.4063 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP y i zh ong t ao l i SP y in x iang zh ong sh ao n ian d e sh en y in SP y ou zh e q in ch e d e y En j in SP z ui l i h ai sh uo zh e SP y in w ei w o m en h ai n ian q ing SP s uo y i z ong y ou z ai y i c i0 d e q van l i SP", + "ph_dur": "0.1858 0.058 0.1393 0.058 0.4993 0.1045 0.1393 0.058 0.4063 0.1161 0.0813 0.1277 0.0697 0.1509 0.0464 0.3134 0.0929 0.1393 0.058 0.1741 0.0232 0.058 0.1509 0.418 0.1741 0.6037 0.1045 0.0929 0.3019 0.1045 0.267 0.1277 0.2902 0.1161 0.1509 0.0464 0.1277 0.0697 0.1277 0.0697 0.1974 0.1161 0.0929 0.1393 0.058 0.1161 0.0813 0.1161 0.0813 0.1509 0.058 0.1974 0.1045 0.0929 0.1277 0.0697 0.1277 0.0697 0.1277 0.0813 0.1277 0.0697 0.1509 0.0464 0.1161 0.0813 0.209 0.0697 0.1277 0.1625 0.0348 0.1277 0.0697 0.1393 0.058 0.2902 0.1161 0.3251 0.0697 0.267 0.1393 0.3135 0.0813 1.0449 0.1625 0.267 0.1277 0.4063 0.0464", + "ph_num": "2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 F4 G4 D#4 D#4 rest D#4 F4 F4 G4 G4 G4 G4 G4 G4 G#4 rest C4 D#4 F4 F4 F4 F4 F4 rest F4 F4 F4 F4 F4 rest F4 F4 F4 G#4 G#4 G#4 G#4 rest G#4 G#4 G#4 G#4 G#4 G#4 A#4 G#4 F#4 G4 F4 F4 rest", + "note_dur": "0.24 0.2 0.185 0.415 0.2 0.4 0.2 0.2 0.2 0.132 0.268 0.2 0.2 0.2 0.6 0.198 0.402 0.2 0.4 0.4 0.4 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.4 0.4 0.1 0.3 0.4 0.2 1.0 0.4 0.4 0.05", + "note_slur": "0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0", "f0_seq": "412.0 412.0 412.0 412.0 412.2 412.3 412.6 413.6 386.4 385.1 383.9 382.7 381.5 380.2 379.0 377.8 376.6 375.4 374.2 373.0 371.8 370.6 369.4 368.2 367.1 365.9 364.7 363.5 349.0 348.9 349.5 350.2 350.7 351.7 352.3 352.2 351.9 351.2 350.2 349.2 348.2 347.2 345.8 344.0 342.0 339.8 337.7 336.4 335.4 335.9 336.6 337.0 337.7 339.5 340.8 342.1 343.3 344.7 346.0 347.5 348.7 350.0 351.2 352.7 352.4 351.4 349.1 345.0 335.6 321.2 313.8 315.2 321.4 329.2 333.0 335.1 337.0 339.2 341.4 340.7 339.8 339.1 338.5 338.1 337.7 337.4 337.2 337.0 337.3 340.3 343.5 346.0 347.9 348.2 348.3 348.4 348.5 349.1 349.8 349.8 349.6 349.6 349.5 349.4 349.2 349.2 349.1 349.0 348.8 348.4 348.3 348.2 347.9 347.8 347.9 347.9 347.5 347.3 346.8 346.3 346.5 347.0 348.0 349.2 351.3 354.2 357.6 361.8 365.7 370.9 375.4 379.1 382.9 387.0 390.9 394.3 397.1 399.0 400.0 400.3 400.7 400.6 400.1 399.5 397.5 395.4 393.5 391.5 389.3 387.1 385.4 384.4 383.7 383.9 384.3 385.2 386.0 387.0 388.2 389.1 390.2 391.2 392.1 393.1 393.4 393.7 393.9 394.2 394.5 394.7 394.6 394.4 394.3 394.0 393.5 393.2 392.7 392.3 391.8 391.3 390.9 390.5 390.6 390.9 390.9 391.2 391.3 391.0 390.6 389.9 385.9 380.4 374.0 367.9 362.1 360.1 361.9 365.4 371.1 377.0 381.2 381.4 379.9 375.4 368.8 360.7 353.4 344.8 334.2 321.0 309.7 303.5 301.4 302.8 305.3 306.4 306.4 306.3 306.7 307.4 308.0 308.3 308.7 309.0 309.4 309.8 310.7 311.7 312.2 312.7 313.0 313.1 313.1 313.1 313.0 312.3 311.5 310.6 309.7 307.3 305.0 302.9 300.9 299.8 299.0 298.7 299.1 300.3 303.5 307.3 309.1 310.1 311.1 311.7 312.3 312.7 313.1 313.7 314.2 314.7 315.1 315.5 315.9 316.4 316.7 316.7 316.8 316.9 316.9 316.4 315.8 315.2 314.4 313.3 312.2 311.2 310.4 309.6 309.0 308.4 308.2 308.1 308.0 307.9 308.4 308.8 309.1 309.4 310.1 310.5 311.1 311.6 312.1 312.4 312.4 312.4 312.3 311.9 311.5 310.4 309.5 309.0 309.0 309.0 308.8 308.6 308.5 308.3 308.1 307.8 307.5 307.3 307.1 307.0 307.0 306.9 306.8 306.8 306.8 306.9 307.1 307.2 307.3 307.4 308.2 310.2 310.3 310.7 310.2 309.9 309.6 309.3 308.9 308.6 308.3 308.0 307.7 307.3 307.0 306.7 306.4 306.1 305.7 305.4 305.1 304.8 304.5 304.2 292.1 291.6 291.0 290.6 290.1 289.6 289.0 288.5 288.0 287.5 287.0 286.5 286.0 285.5 285.1 284.5 284.0 283.5 283.1 282.6 282.0 282.6 287.7 292.4 295.8 298.8 302.0 304.8 307.3 308.8 309.9 310.9 311.8 312.2 312.7 313.1 313.6 313.8 313.4 312.6 311.8 306.3 299.7 291.5 282.5 273.1 278.0 286.2 294.0 299.7 304.0 308.5 313.1 316.7 320.7 325.1 330.5 335.7 340.6 343.9 346.4 349.6 354.6 358.0 358.1 357.0 355.4 355.0 354.4 353.6 352.9 351.6 350.3 349.2 348.1 348.2 348.3 348.7 348.8 349.0 349.0 349.2 348.9 348.5 347.0 345.2 337.9 330.4 320.9 309.0 295.7 290.2 293.1 297.0 301.1 305.6 308.7 311.7 315.7 324.4 335.4 345.4 349.8 352.1 352.4 351.9 351.3 350.7 350.3 350.4 350.6 350.1 349.4 348.7 348.1 347.6 347.9 348.2 348.6 349.4 350.6 352.5 354.7 357.1 359.9 363.4 367.4 371.2 374.5 378.2 381.5 384.3 387.0 389.1 391.1 392.9 394.4 395.2 396.0 396.7 396.8 396.8 396.9 397.0 396.8 396.8 396.8 396.7 396.6 396.4 395.6 394.8 392.9 388.8 383.0 371.9 361.2 357.7 358.5 361.0 364.1 367.2 370.2 372.4 373.2 373.2 373.6 374.3 375.1 375.9 376.2 376.3 376.6 376.9 377.4 378.0 378.5 379.0 379.6 380.9 382.6 384.9 389.5 392.6 395.3 398.4 399.5 400.2 400.1 399.7 399.3 397.9 396.6 395.8 395.1 394.5 393.7 393.2 392.7 392.3 392.0 391.7 391.5 391.7 391.9 392.2 392.5 392.8 392.8 392.5 392.2 391.4 389.6 387.8 386.0 386.2 386.5 386.9 387.7 390.4 393.8 396.8 396.8 396.6 397.1 397.9 396.8 395.8 395.3 395.0 394.7 394.4 393.9 393.7 393.4 393.1 392.8 392.3 391.4 390.4 390.0 389.9 389.6 389.6 390.1 390.9 391.4 392.0 392.4 392.3 391.8 389.9 387.0 382.3 376.3 367.7 359.7 357.9 362.7 375.2 390.4 395.9 396.8 396.8 396.6 396.6 396.3 395.2 393.5 389.7 383.9 377.9 372.6 369.3 367.0 367.9 369.2 369.7 370.2 371.1 371.9 372.9 373.9 374.7 375.3 374.7 375.5 376.4 377.2 378.0 378.9 379.4 379.7 380.5 381.8 383.5 385.1 386.6 387.6 388.6 389.5 387.6 387.3 390.0 393.7 396.6 398.9 399.8 399.7 399.5 399.3 399.0 398.7 398.5 398.0 396.3 394.5 393.5 392.7 391.7 390.6 389.9 389.9 390.0 390.1 390.2 390.5 391.1 391.9 392.7 393.4 394.2 395.0 395.3 395.7 396.1 395.8 395.5 395.1 394.8 394.5 394.2 393.9 394.3 394.5 393.8 393.0 392.4 392.3 392.4 392.4 392.7 392.8 392.9 393.1 393.1 394.2 393.8 393.1 392.4 391.5 390.9 390.5 390.9 391.4 392.2 392.2 392.0 392.0 392.0 391.8 391.8 391.7 391.5 391.5 392.0 392.5 392.9 393.4 393.8 394.3 394.8 395.2 395.5 395.5 395.4 395.3 395.2 395.2 395.0 395.0 394.9 394.8 395.2 395.7 395.2 394.6 394.0 393.4 392.4 391.1 388.8 386.7 384.3 380.3 375.4 370.6 365.9 359.9 352.3 344.8 337.5 330.4 323.9 317.0 309.3 304.7 304.5 309.0 321.3 337.9 354.2 366.9 378.4 386.9 392.0 391.4 392.2 393.1 393.7 394.0 394.3 394.3 394.1 394.0 394.0 394.0 393.8 393.7 393.2 392.7 393.2 393.6 393.9 394.0 393.8 393.5 393.4 393.2 393.1 393.1 392.6 392.1 391.9 391.8 392.0 392.3 392.9 393.8 395.0 396.8 399.1 402.6 405.4 408.2 412.0 415.7 418.9 421.8 425.1 427.0 428.8 430.7 431.7 431.8 431.4 430.7 429.5 428.1 426.4 424.8 423.4 422.4 421.8 421.0 420.1 419.5 418.9 419.2 419.8 420.4 421.0 421.7 422.8 423.8 424.0 424.0 424.0 423.9 423.1 422.1 421.1 419.8 418.3 416.6 414.8 414.2 413.3 412.7 411.9 411.2 410.4 409.8 409.4 409.5 410.1 410.6 411.2 412.2 413.4 414.6 415.8 417.1 418.3 419.4 420.6 421.8 422.9 423.4 423.9 424.3 424.7 424.3 423.7 423.2 422.6 420.0 416.0 412.0 406.7 399.1 398.5 397.8 397.2 396.7 396.1 395.3 393.9 392.8 391.4 389.7 388.0 385.6 382.5 378.3 371.9 363.1 353.0 341.6 327.5 312.7 298.6 286.3 277.1 269.3 263.0 258.5 255.6 253.7 252.0 251.0 250.3 249.9 249.6 249.1 249.1 249.1 249.1 249.2 249.2 249.4 249.2 248.2 247.1 246.1 245.0 245.0 245.1 245.2 245.7 246.4 246.9 247.7 248.4 249.3 250.4 251.5 252.6 253.6 254.8 255.9 256.7 257.6 258.2 258.6 258.9 259.3 259.9 260.6 261.3 262.1 261.9 261.7 261.5 261.4 261.2 261.0 260.8 260.7 260.6 260.6 260.6 260.6 260.6 260.6 260.7 260.7 260.7 260.7 260.7 260.7 261.4 262.1 262.7 263.3 264.1 264.2 262.6 259.5 252.2 242.1 233.5 230.8 231.5 233.4 237.7 242.6 247.3 251.5 256.0 260.3 264.7 269.2 272.6 276.3 280.9 285.5 288.8 292.1 296.4 299.5 303.0 308.2 312.1 314.1 315.1 316.6 317.7 318.4 318.7 318.8 317.8 316.9 316.0 315.0 313.7 312.1 310.8 309.3 307.7 306.7 305.8 305.3 305.2 305.3 305.4 305.6 306.2 307.0 307.8 308.3 308.7 309.2 309.7 310.1 310.6 311.1 311.7 312.3 313.0 313.7 314.1 313.5 312.4 310.0 306.8 303.0 295.5 287.5 277.2 266.8 264.3 266.8 271.8 277.8 284.3 285.4 286.5 287.1 286.9 286.3 287.1 288.0 288.8 289.6 290.0 290.3 290.4 290.8 291.1 291.5 292.8 294.4 295.8 297.5 298.3 299.4 300.5 301.2 302.2 303.3 305.3 310.3 315.1 319.5 324.3 324.6 324.8 324.4 323.6 322.5 320.9 319.5 318.3 316.7 315.3 315.3 315.7 316.6 317.8 319.1 322.4 325.8 328.6 331.2 333.8 336.5 339.0 341.3 343.6 345.6 347.9 349.7 350.7 351.5 352.5 353.0 352.7 352.3 351.8 351.3 350.5 350.0 349.6 349.1 349.2 349.3 349.5 349.6 349.9 350.6 351.4 351.6 350.5 349.0 347.4 344.5 337.0 328.9 319.0 309.2 311.4 314.4 319.1 324.8 331.0 327.4 324.0 321.3 319.8 319.0 319.8 320.6 321.4 322.3 323.6 325.0 326.3 328.0 331.9 336.7 341.7 346.8 351.3 355.6 359.0 357.8 356.5 355.2 354.1 353.5 353.0 352.4 351.8 351.4 350.8 350.4 350.4 350.1 350.0 349.8 349.6 349.5 349.4 349.1 348.2 347.2 346.2 344.5 340.0 334.6 317.7 301.6 289.8 282.6 276.7 282.7 293.0 304.5 319.7 336.4 347.4 351.8 353.8 353.7 352.7 351.9 352.3 352.8 352.7 352.3 352.0 351.5 351.1 350.7 350.4 350.0 349.7 349.3 349.2 349.0 349.0 348.9 348.8 348.8 348.6 348.6 348.6 348.5 348.4 348.6 348.8 348.9 349.1 349.4 349.4 349.7 349.9 350.0 350.2 350.2 350.2 350.2 350.2 350.2 350.2 350.2 350.3 350.4 350.6 350.6 350.7 350.8 351.0 351.0 351.3 351.1 350.4 349.6 348.8 348.8 348.7 347.9 347.1 346.4 345.4 343.8 338.8 332.2 320.2 306.7 298.5 291.6 289.0 289.7 291.6 300.4 309.0 318.5 330.0 341.8 348.8 354.3 356.4 357.2 356.8 355.9 355.2 354.9 354.8 354.5 354.2 353.3 352.4 351.5 350.4 349.4 348.4 347.2 345.9 344.4 343.1 342.2 342.6 343.4 344.2 345.4 347.9 350.4 353.3 355.3 356.5 356.2 355.2 352.9 349.8 346.1 339.8 331.4 321.7 315.7 315.5 315.4 315.3 315.2 315.1 314.9 314.7 314.7 314.6 314.4 314.4 314.4 314.4 314.5 314.6 314.6 314.6 314.6 314.6 314.6 314.6 314.7 314.7 314.7 314.7 314.7 314.7 314.7 314.7 314.9 314.9 314.9 314.9 314.9 314.9 314.9 315.0 315.1 315.1 315.1 316.4 323.4 330.6 337.9 344.0 346.3 348.4 350.1 350.3 350.0 349.6 348.6 347.6 346.6 345.4 345.4 345.3 345.2 345.4 346.8 348.2 349.1 349.8 350.8 352.7 354.7 356.0 355.7 354.7 353.7 350.9 348.1 345.7 343.3 341.6 340.8 341.3 342.3 345.1 348.4 350.4 352.4 353.4 353.2 352.3 351.8 351.5 351.0 350.7 350.2 349.8 349.7 349.6 349.6 349.6 349.4 349.4 349.3 349.2 349.2 349.0 348.7 347.8 346.5 345.2 346.5 349.0 351.7 354.1 353.9 352.9 350.8 348.9 347.0 346.0 345.9 347.5 350.2 353.8 357.4 358.4 357.9 357.2 356.5 355.7 354.5 353.4 352.9 352.4 351.9 351.4 350.8 350.4 349.8 349.2 348.3 345.9 342.2 334.5 323.8 313.7 309.5 312.6 321.8 333.8 338.7 342.2 343.7 344.5 345.2 346.0 348.1 349.6 350.2 350.4 352.2 353.9 355.6 357.4 359.3 361.1 361.2 360.0 358.4 356.6 355.5 354.6 353.7 352.7 351.7 350.7 350.0 349.8 349.8 349.8 349.8 349.8 349.8 349.7 349.0 348.3 346.9 343.4 335.1 317.8 300.2 287.0 283.4 291.7 304.5 312.7 319.9 323.8 327.0 330.8 334.4 338.0 342.1 347.2 352.5 357.1 359.8 361.2 361.1 360.3 359.5 358.8 358.1 357.2 355.5 353.6 351.0 348.8 346.4 343.8 341.5 340.3 339.3 338.5 338.7 339.4 340.5 342.1 344.3 346.6 348.9 351.0 353.0 355.3 357.6 358.3 358.8 359.4 359.7 357.8 354.8 348.4 340.2 330.1 320.4 319.5 318.6 317.8 316.9 316.2 315.3 314.5 313.7 312.7 311.8 311.2 310.7 310.1 309.6 309.0 308.4 307.9 307.3 306.8 306.1 305.5 305.0 304.4 303.9 303.3 302.7 302.2 301.7 301.1 300.5 299.9 299.4 298.8 298.4 297.8 297.3 296.7 296.2 295.7 295.0 295.9 302.6 309.2 314.7 320.4 326.2 331.3 335.8 340.5 345.4 347.7 350.0 351.8 352.6 353.1 353.3 352.6 351.7 350.9 350.4 349.9 349.4 348.9 348.3 347.8 347.3 346.9 346.4 345.9 346.4 346.9 347.4 348.0 349.0 350.4 351.7 353.0 354.0 354.2 353.7 353.4 352.9 351.9 350.7 350.0 349.6 349.3 349.0 349.3 349.8 349.1 348.8 349.2 349.9 350.4 351.1 351.5 351.4 351.1 350.8 350.8 350.5 350.3 350.4 350.4 350.7 350.7 350.0 349.2 348.2 347.4 346.4 345.7 345.1 345.6 346.3 347.6 348.8 350.2 351.7 351.9 350.6 349.4 348.4 348.6 348.9 349.2 349.5 349.8 349.6 349.2 348.9 348.8 349.0 349.4 349.9 350.2 350.4 350.1 349.8 347.4 347.0 346.7 345.0 342.2 338.9 335.4 331.6 327.7 324.0 320.5 317.4 315.5 315.7 316.9 321.0 328.1 337.8 354.0 373.6 392.2 403.4 412.8 419.9 424.3 424.7 424.3 423.8 422.8 420.4 417.3 414.7 413.3 412.2 410.8 412.3 413.6 414.8 415.8 415.8 415.7 415.2 414.0 411.6 404.6 396.0 384.7 373.8 362.4 356.6 353.4 355.4 358.1 361.2 366.0 371.0 376.3 383.6 393.0 404.6 415.9 420.9 422.3 422.9 423.5 423.2 423.0 422.7 422.2 421.1 419.9 418.7 417.6 416.7 416.3 415.9 415.5 415.5 415.5 415.8 415.8 415.9 416.3 416.9 417.5 417.8 417.7 415.9 413.2 410.5 407.6 405.0 403.0 402.1 402.8 403.7 404.5 405.2 406.6 412.0 417.1 418.4 419.0 419.7 420.4 419.4 418.7 419.0 419.8 419.4 418.8 418.2 417.7 417.3 416.7 416.1 415.6 414.9 413.9 412.7 411.5 409.5 405.9 401.1 392.4 383.2 375.6 371.3 371.0 373.9 377.1 379.7 380.8 381.9 384.8 388.2 392.0 396.4 402.3 409.4 416.6 422.9 426.1 428.5 431.2 430.5 429.8 429.2 428.2 426.2 424.0 421.1 418.6 416.1 413.2 410.5 408.2 406.6 405.2 403.9 402.5 401.9 402.8 404.3 406.1 408.2 412.0 415.9 420.6 425.3 429.4 431.3 432.8 433.8 431.9 428.1 419.3 410.7 402.4 395.0 394.4 393.9 393.5 393.0 392.4 391.9 391.4 391.0 390.5 390.0 390.2 390.5 390.8 391.0 391.3 391.6 391.9 392.1 392.4 392.7 393.0 393.2 393.5 393.8 394.0 394.3 394.6 394.8 395.1 395.4 392.9 388.3 384.1 375.7 363.2 350.8 339.8 332.3 329.5 332.5 350.2 379.2 408.5 420.9 425.5 426.1 424.9 423.4 421.7 420.1 418.6 417.0 416.1 415.2 414.3 413.4 412.8 412.8 413.1 413.2 413.4 413.5 413.8 413.9 414.1 414.2 414.3 414.5 414.8 414.8 415.2 415.9 416.4 416.5 416.3 416.4 416.9 417.5 418.1 418.9 419.5 419.9 419.7 419.5 418.9 418.6 418.2 417.8 417.5 417.2 416.7 416.5 416.4 416.1 416.0 415.9 416.5 416.9 417.3 417.0 416.4 416.0 415.1 412.6 409.6 397.5 382.7 369.9 362.4 363.6 366.7 373.3 379.4 384.6 390.2 393.2 396.0 398.4 400.2 401.6 414.0 424.8 428.7 427.7 425.3 422.8 421.0 420.1 419.7 419.2 418.0 417.0 416.1 415.2 415.3 415.4 415.5 415.5 415.7 415.8 415.9 415.2 414.5 414.3 414.8 416.1 416.3 416.0 414.0 409.6 403.7 397.4 390.2 381.8 372.8 368.2 366.8 369.0 374.1 381.5 392.2 405.2 415.6 419.5 420.1 420.7 420.3 420.9 421.5 422.1 422.7 422.1 419.9 417.7 415.3 411.8 410.6 410.2 410.2 410.5 411.1 411.5 412.0 412.7 413.6 414.5 415.1 415.9 416.8 417.5 418.4 418.8 418.2 417.4 416.7 415.8 415.0 414.3 413.9 413.6 413.3 413.0 412.9 412.9 413.6 414.8 415.9 416.9 418.0 419.2 419.7 419.6 418.5 415.8 406.3 393.6 375.7 358.3 340.2 327.4 325.7 324.0 323.2 324.6 327.0 332.9 338.0 342.3 347.1 351.9 356.7 361.4 365.0 368.4 371.7 374.7 377.6 380.4 384.2 392.2 401.4 410.8 417.9 421.8 424.5 423.6 422.7 421.7 420.8 419.9 418.9 417.5 415.7 413.8 412.9 412.0 411.3 410.7 410.1 410.5 411.4 412.2 412.9 413.7 414.8 416.0 417.1 418.0 418.0 417.7 417.3 417.7 418.1 418.0 417.7 417.4 417.0 416.5 415.7 414.8 414.2 413.9 413.7 413.4 413.2 413.0 413.2 413.4 413.6 413.9 414.5 415.1 415.7 416.4 417.0 416.7 416.4 416.1 415.8 415.5 414.7 414.1 413.7 414.0 414.6 413.9 413.2 412.7 411.5 407.2 398.2 389.2 378.9 366.5 353.3 344.1 336.5 331.0 328.5 331.5 346.2 363.2 378.9 395.0 404.6 412.6 417.4 417.7 416.8 415.3 414.4 413.6 413.1 412.9 412.9 413.9 415.0 416.5 420.7 427.2 434.1 440.6 445.5 449.9 454.5 458.2 462.1 465.1 467.4 469.1 470.1 470.8 471.2 471.5 471.9 470.8 469.9 469.3 469.5 470.2 470.7 470.5 470.3 470.0 469.7 469.1 468.6 468.2 467.8 467.2 466.8 466.3 466.0 465.5 464.3 462.9 461.4 460.3 458.6 455.0 451.1 447.3 444.0 440.3 435.7 428.9 422.9 415.3 401.7 380.8 367.1 355.1 347.2 342.8 338.9 337.1 338.0 339.2 347.1 358.8 371.2 382.5 389.6 396.8 404.9 412.2 412.1 411.4 409.0 406.3 404.2 402.9 401.8 400.9 399.8 398.2 396.8 396.6 396.6 396.6 397.1 399.9 402.8 405.9 408.6 411.5 413.7 414.9 415.8 416.7 417.9 418.4 418.1 417.8 417.2 416.7 416.3 415.8 414.8 413.6 412.9 412.9 413.1 413.3 412.9 412.6 413.1 413.5 414.2 415.3 416.4 417.1 416.9 416.6 416.0 415.7 415.4 415.2 414.9 414.1 413.2 412.2 411.0 409.5 407.9 405.2 401.0 394.8 387.9 381.1 374.1 366.6 355.2 338.5 330.4 321.5 313.4 308.4 305.6 309.9 321.6 345.8 356.6 361.1 364.5 368.1 370.0 371.3 372.9 373.0 373.2 373.2 373.2 373.2 372.6 371.9 371.1 370.6 369.9 369.4 368.8 368.2 367.6 367.1 366.6 366.1 365.7 365.5 365.7 366.6 367.3 367.8 368.3 369.3 370.9 372.8 375.4 378.4 381.2 383.9 386.5 388.5 390.1 391.3 392.2 393.5 394.2 394.5 394.7 394.7 394.7 394.5 394.5 394.5 395.0 394.7 394.5 394.3 394.3 394.0 394.0 393.6 393.2 392.8 392.2 391.9 391.5 391.3 391.1 392.7 392.1 392.8 393.4 393.9 394.5 395.1 395.4 395.5 395.6 395.6 395.9 395.9 396.0 396.1 396.1 396.1 396.1 396.0 395.9 395.9 395.9 396.1 396.2 396.3 396.8 397.1 397.5 397.6 397.9 397.9 398.2 398.0 397.8 397.4 397.0 396.3 395.5 394.7 393.6 392.2 390.6 389.2 387.8 386.4 385.3 384.2 383.2 382.5 381.8 381.7 381.8 382.7 383.8 385.0 386.6 388.6 390.5 392.9 395.6 398.2 400.5 402.6 404.1 405.2 405.4 404.4 404.4 404.3 403.8 403.0 401.7 399.5 397.5 395.4 392.9 390.1 387.6 385.4 382.9 380.8 379.2 378.1 377.5 377.3 378.2 379.6 380.9 382.6 384.6 386.8 389.4 392.2 394.6 396.8 398.6 400.2 401.5 402.7 403.5 404.4 404.5 405.1 405.2 404.6 403.2 401.3 399.6 397.7 395.1 392.0 389.1 386.2 383.8 381.4 379.1 376.7 375.2 374.0 372.8 372.1 372.7 373.4 374.6 376.1 378.2 381.0 383.7 386.4 389.7 393.1 396.1 398.5 400.4 402.0 403.2 404.2 403.7 402.4 400.6 398.4 395.7 392.7 390.1 386.2 382.8 379.6 376.9 373.9 370.3 366.8 363.6 361.5 360.2 358.7 357.2 356.0 354.3 352.8 352.1 351.9 351.3 350.4 349.8 349.5 348.8 347.2 345.3 342.9 339.3 334.2 330.0 331.6 336.8 343.1 347.2 350.7 351.5 351.5 351.5 351.7 350.6 349.5 348.5 347.8 347.0 346.1 344.8 343.8 342.7 341.4 341.5 341.6 341.8 342.0 342.6 343.6 344.3 345.1 346.0 346.8 347.8 348.6 349.1 349.1 348.6 348.2 348.0 348.2 348.7 348.4 348.1 348.6 349.1 349.5 350.0 350.5 351.0 351.5 352.0 352.5 353.0 353.4 353.5 353.5 353.3 353.3 353.2 352.8 352.2 351.5 350.9 350.3 349.8 349.2 348.0 346.6 345.3 344.1 342.7 341.4 340.5 339.4 338.4 337.5 336.6 337.1 337.8 338.3 339.2 342.8 347.0 350.7 353.0 354.3 355.3 356.4 357.1 357.5 357.8 358.0 357.0 356.2 355.4 354.4 352.5 350.4 348.2 345.9 343.2 340.5 338.0 336.7 335.7 335.2 335.0 334.8 335.4 336.6 339.2 342.6 346.0 348.6 350.7 353.0 355.1 357.2 359.0 359.6 359.4 358.8 358.1 356.2 354.2 352.0 349.4 346.9 344.4 342.2 339.8 337.9 336.0 334.3 333.4 332.8 333.5 334.6 335.8 337.8 340.4 342.8 345.5 347.9 349.7 351.3 353.3 355.1 356.1 356.8 357.0 357.0 356.8 356.6 355.9 354.9 353.9 352.8 351.3 349.8 348.0 345.2 345.2 345.1 345.0 345.0 344.8 344.8 344.8 344.8 344.8", - "input_type": "phoneme", - "offset": 34.56 + "f0_timestep": "0.005" }, { + "offset": 48.48, "text": "SP 我 也 想 说 再 见 啊 SP 风 月 梦 话 SP 把 想 与 念 留 下 SP", - "ph_seq": "SP w o y E x iang sh uo z ai ai j ian a SP f eng y ve m eng h ua SP b a x iang y v n ian l iu x ia SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 A#4 A#4 G4 G4 A4 C5 C5 C5 rest C4 C4 G4 G4 A4 A4 A4 A4 rest C4 C4 E4 E4 F4 F4 F4 F4 C4 C4 A3 A3 rest", - "note_dur_seq": "0.32 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.133 0.133 0.267 0.2 0.2 0.3999999 0.2 0.4000001 0.4000001 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.2 0.4000001 0.4000001 0.3999996 0.3999996 0.2000003 0.2000003 0.4000001 0.4000001 0.5999999 0.5999999 0.4000001 0.4000001 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.3019 0.0232 0.1393 0.058 0.1277 0.0813 0.1277 0.0697 0.1161 0.0813 0.1277 0.1858 0.0813 0.209 0.3947 0.0697 0.1277 0.3135 0.0929 0.3019 0.0929 0.1045 0.0929 0.4063 0.1741 0.0232 0.2438 0.1625 0.3019 0.0929 0.1393 0.058 0.3367 0.0697 0.3947 0.1974 0.4063 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP w o y E x iang sh uo z ai j ian a SP f eng y ve m eng h ua SP b a x iang y v n ian l iu x ia SP", + "ph_dur": "0.3019 0.0232 0.1393 0.058 0.1277 0.0813 0.1277 0.0697 0.1161 0.0813 0.3135 0.0813 0.209 0.3947 0.0697 0.1277 0.3135 0.0929 0.3019 0.0929 0.1045 0.0929 0.4063 0.1741 0.0232 0.2438 0.1625 0.3019 0.0929 0.1393 0.058 0.3367 0.0697 0.3947 0.1974 0.4063 0.0464", + "ph_num": "2 2 2 2 2 2 1 1 2 2 2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 A#4 G4 A4 C5 C5 rest C4 G4 A4 A4 rest C4 E4 F4 F4 C4 A3 rest", + "note_dur": "0.32 0.2 0.2 0.2 0.2 0.133 0.267 0.2 0.4 0.2 0.4 0.4 0.2 0.4 0.2 0.4 0.4 0.2 0.4 0.6 0.4 0.05", + "note_slur": "0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "348.4 348.4 348.4 348.7 348.8 348.8 348.7 348.7 348.6 348.5 348.5 348.4 348.4 348.3 348.3 348.2 348.2 348.1 348.1 348.0 347.9 347.9 347.8 347.8 347.7 347.7 347.6 347.6 347.5 347.4 347.4 347.3 347.3 347.2 347.2 347.1 347.1 347.0 346.9 346.9 346.8 346.8 346.7 346.7 260.7 256.4 252.3 249.8 247.7 245.4 243.1 241.1 239.2 236.9 234.8 234.2 235.2 236.0 237.0 237.8 238.9 240.2 243.1 256.0 272.9 284.8 297.3 308.8 321.2 330.6 337.6 343.0 345.4 347.8 348.6 348.7 348.8 348.9 349.2 349.4 349.9 350.3 350.6 351.1 351.7 351.5 351.4 351.1 350.7 350.0 347.8 345.7 343.4 339.6 335.4 334.2 333.8 335.3 338.9 343.6 349.5 356.9 364.5 372.2 379.5 386.4 390.3 392.8 395.0 397.0 397.2 397.1 396.7 396.0 395.6 395.1 394.5 393.3 390.3 386.4 377.8 365.2 349.0 331.3 322.8 336.7 349.7 357.2 361.9 366.2 370.5 373.7 376.8 380.5 385.3 390.4 395.7 400.3 404.3 408.2 413.2 417.1 418.9 420.3 421.8 427.6 437.2 444.5 447.2 448.5 448.5 448.5 447.8 446.5 444.6 442.9 442.0 441.8 442.0 442.5 442.7 443.2 443.3 442.9 439.5 434.9 424.8 409.4 390.1 375.8 383.2 396.7 410.2 420.7 429.2 437.2 439.7 440.1 440.2 440.5 441.0 441.7 442.2 442.7 443.3 453.0 462.8 469.8 472.1 472.1 471.9 471.6 471.0 470.4 469.4 468.7 468.2 467.6 466.6 465.9 466.6 467.1 467.5 467.8 466.4 464.5 460.8 453.1 441.3 427.5 414.9 396.3 376.1 368.6 363.0 361.3 363.0 364.1 365.0 367.0 371.4 374.9 377.6 380.0 382.2 384.4 386.8 388.2 389.2 391.1 393.8 394.5 393.5 392.8 392.4 392.4 392.6 392.7 392.3 391.8 391.5 391.3 391.4 392.2 393.6 395.2 397.6 400.1 403.4 407.7 413.1 418.4 423.3 427.8 431.4 434.5 437.1 438.7 439.6 440.3 440.0 439.8 439.4 439.2 439.0 439.3 439.5 439.8 440.2 440.8 441.4 442.0 442.5 443.2 443.8 443.4 442.9 442.3 440.9 439.2 437.6 434.5 428.2 419.1 406.3 395.3 393.7 394.5 398.6 404.2 410.5 418.1 425.3 431.6 437.5 443.8 450.5 456.7 462.5 469.1 477.4 484.2 491.5 500.8 510.4 520.8 529.8 534.4 535.9 535.5 534.2 532.8 532.6 532.2 531.8 530.6 529.5 528.7 528.2 528.1 527.8 527.7 527.3 526.7 525.7 524.5 523.4 522.0 520.1 517.8 517.1 516.6 516.6 516.9 517.2 518.7 519.9 520.9 521.4 521.7 522.1 522.5 523.1 524.9 526.9 531.1 534.6 535.9 535.6 535.2 534.7 534.1 533.8 533.4 533.0 532.3 531.9 531.2 529.7 527.8 525.9 524.2 522.5 520.6 519.0 517.5 516.0 515.3 515.6 516.3 517.1 517.8 518.7 520.7 523.3 525.8 528.3 530.6 532.9 533.9 535.1 536.0 536.6 536.0 534.9 532.6 529.8 526.8 523.6 520.8 518.2 515.8 513.8 512.6 511.9 512.3 513.1 514.7 516.8 519.0 521.7 524.6 527.7 531.1 534.2 537.4 539.2 540.8 542.1 541.7 540.8 539.2 537.2 533.6 529.6 525.8 521.6 517.8 513.7 509.2 508.3 509.6 511.4 513.5 515.5 515.5 515.5 515.6 515.7 515.7 515.3 513.7 512.9 511.4 509.2 506.1 501.5 495.7 487.6 475.4 459.0 440.1 419.2 393.4 367.2 342.8 321.7 306.6 294.0 283.8 276.7 272.1 269.1 266.6 264.8 263.9 263.3 263.0 262.3 262.2 262.4 262.6 262.8 263.0 263.1 263.3 263.4 263.2 262.9 262.7 260.5 258.5 256.8 255.4 254.5 253.5 252.8 252.8 253.2 254.5 255.8 257.2 258.5 260.0 261.5 262.2 262.8 263.4 263.9 263.7 263.5 263.2 262.8 261.6 260.3 260.0 259.7 259.6 259.3 259.1 259.7 260.2 260.4 260.5 260.7 260.9 261.0 261.2 261.6 261.9 262.3 262.6 262.6 262.5 262.5 262.4 262.5 262.7 263.0 263.3 262.2 261.3 260.2 258.7 257.1 255.5 253.6 251.5 249.4 246.4 243.4 240.8 239.1 237.6 236.2 234.9 234.2 234.0 234.5 235.8 238.5 242.8 250.8 263.4 278.8 298.3 317.7 331.5 341.8 351.5 358.3 361.6 362.6 362.8 362.4 361.1 359.9 358.3 356.7 355.7 355.1 355.1 355.9 357.5 359.0 361.1 364.4 367.4 370.9 375.6 380.8 385.0 387.5 389.9 392.2 394.2 395.0 395.3 395.7 396.1 395.0 393.9 393.0 392.3 391.8 391.2 390.6 390.3 390.4 390.4 390.6 390.7 391.0 391.3 392.0 392.6 393.5 394.1 394.3 394.5 394.5 394.6 394.5 393.8 392.7 390.2 387.9 385.8 383.7 381.5 382.5 383.4 384.4 385.8 387.9 390.8 393.7 395.8 398.4 401.4 404.6 407.9 412.4 420.4 430.0 438.9 443.4 445.1 445.6 445.9 446.2 446.3 445.8 445.0 444.1 442.4 440.6 439.1 437.5 438.0 438.7 439.3 439.4 439.0 438.7 434.6 430.7 425.5 415.1 402.3 387.5 377.0 373.3 371.8 370.4 373.1 375.9 378.4 381.1 385.3 390.0 394.1 398.7 406.2 414.8 423.4 431.3 436.7 441.3 444.1 446.3 448.1 448.9 449.4 449.8 450.0 448.5 446.9 445.4 443.1 440.8 438.8 436.7 434.4 432.2 431.3 430.6 429.8 429.1 429.7 430.3 431.0 431.9 433.8 436.0 438.4 440.8 443.2 445.8 448.7 451.0 451.8 452.4 452.6 451.6 450.0 448.1 445.2 441.8 438.2 434.6 432.0 430.0 428.5 427.5 426.5 426.0 427.0 428.4 429.7 431.3 432.9 434.9 437.5 440.0 442.8 445.4 447.3 448.7 449.8 450.8 451.4 450.4 448.8 447.2 445.2 441.3 437.5 433.8 429.7 424.8 420.3 416.6 414.8 413.9 414.4 414.7 415.1 415.7 416.0 416.1 415.5 415.4 414.8 414.1 412.2 409.6 406.3 401.2 393.8 383.8 372.1 358.8 342.4 325.3 309.0 294.8 284.4 275.7 268.5 263.6 260.4 258.2 256.5 255.5 254.9 254.5 254.5 254.0 254.0 254.3 254.5 254.7 254.9 255.2 255.7 257.5 259.0 259.6 259.8 260.2 260.4 260.6 260.9 261.2 261.0 260.8 260.6 260.5 260.3 260.1 259.9 259.7 259.7 259.5 259.3 259.1 259.0 258.8 259.1 259.4 259.8 260.1 260.5 260.9 261.2 261.6 262.0 262.3 262.7 262.5 262.7 263.0 260.2 256.2 251.9 245.8 240.2 234.4 228.3 222.6 219.1 217.7 216.0 214.5 212.8 211.5 210.9 210.5 210.2 209.9 209.5 209.2 208.8 208.5 208.2 207.8 207.7 207.1 206.1 205.1 204.2 203.4 202.6 201.9 201.4 201.2 201.1 201.4 202.2 203.6 206.6 212.0 218.5 228.0 243.7 260.2 274.2 285.1 291.3 296.2 299.9 303.2 306.3 307.0 306.8 304.9 302.9 301.2 300.2 299.6 300.0 300.5 302.1 304.0 305.8 307.8 309.7 312.3 315.3 318.3 321.2 323.7 326.0 328.1 330.2 331.8 332.5 333.0 333.5 333.8 333.5 333.1 332.6 332.1 331.6 330.8 329.8 329.0 328.5 328.3 328.1 328.3 328.6 329.1 329.3 329.7 330.0 330.5 330.8 331.2 331.4 331.4 331.3 331.2 331.1 331.0 331.0 330.8 331.0 330.8 330.7 330.7 330.6 330.8 330.8 331.1 331.4 332.0 332.5 333.5 334.7 336.6 339.4 344.4 349.4 351.8 353.2 354.5 354.7 354.5 354.1 353.8 353.5 353.1 352.8 352.3 351.8 351.2 350.6 350.9 351.0 351.0 351.2 351.3 351.3 351.5 351.3 351.1 350.8 347.8 345.1 342.6 340.9 340.5 340.2 340.0 339.8 339.5 339.3 339.3 340.5 341.7 343.7 348.8 353.9 355.7 356.8 357.8 359.0 358.5 358.1 357.5 356.9 356.4 355.1 354.0 353.0 351.9 350.8 349.6 348.5 347.6 347.1 346.6 346.2 346.0 346.1 346.4 346.6 346.9 347.2 347.5 347.8 348.0 348.7 349.3 350.0 350.5 350.8 351.1 351.3 351.5 351.3 350.8 350.5 350.1 349.8 349.5 349.8 350.1 350.6 350.9 351.4 351.7 352.1 352.2 351.7 351.1 350.4 349.7 348.5 346.1 342.9 339.7 336.3 332.6 328.1 322.8 317.1 309.8 303.1 296.9 290.5 284.2 277.8 271.2 265.6 260.6 255.2 249.7 246.5 244.0 242.2 240.7 239.3 238.0 237.1 236.3 235.8 235.9 236.5 237.4 238.5 239.5 239.7 239.9 240.3 240.9 241.4 241.1 240.7 240.7 240.5 240.3 240.2 240.0 239.9 239.8 239.6 239.9 240.2 240.4 240.7 240.9 241.2 241.7 242.8 244.2 246.4 248.9 251.1 253.3 255.8 257.7 259.6 261.5 263.0 264.1 265.3 266.2 266.4 266.4 266.2 265.3 264.2 263.1 262.1 261.2 260.1 259.7 259.2 258.8 258.4 258.6 258.8 259.1 259.3 259.6 259.8 260.4 261.1 261.6 262.3 263.0 263.6 264.1 264.3 264.4 264.5 264.6 264.7 264.7 264.8 265.0 261.2 257.4 253.2 246.2 237.8 230.5 226.3 223.5 222.7 222.8 222.8 222.9 223.0 223.2 223.2 223.3 223.1 222.9 222.7 222.6 222.3 222.0 221.8 221.6 221.7 221.9 222.0 222.1 222.2 222.2 222.1 222.2 222.2 222.0 221.4 220.5 220.1 219.4 218.2 217.0 217.6 218.2 219.1 220.5 222.6 224.5 224.8 224.9 225.5 226.2 224.9 223.7 222.6 220.9 219.0 217.0 215.1 213.1 210.7 208.4 206.2 205.1 204.7 205.1 205.9 206.6 207.7 209.4 211.6 213.9 216.7 219.4 221.7 223.5 225.1 226.8 228.2 228.7 229.0 228.0 226.9 225.3 223.1 220.5 217.9 214.9 212.4 209.9 207.0 204.3 202.7 201.2 200.0 199.2 198.5 199.4 200.2 201.2 203.1 205.5 207.9 210.2 211.9 213.6 215.6 217.6 219.3 221.1 223.0 224.4 225.5 226.3 226.2 225.8 225.3 223.8 222.4 221.0 219.3 216.8 216.8 216.8 216.8 216.8 217.0 217.0 217.0 217.0 217.0", - "input_type": "phoneme", - "offset": 48.48 + "f0_timestep": "0.005" }, { + "offset": 54.88, "text": "SP 可 看 到 窗 台 SP 微 微 摇 曳 的 花 SP 却 难 以 自 拔 SP", - "ph_seq": "SP k e k an d ao ch uang t ai SP w ei w ei y ao y E d e h ua SP q ve n an y i z i0 b a a SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 rest C4 C4 C4 C4 A4 A4 A4 A4 C5 C5 C5 C5 rest G4 G4 G4 G4 G4 G4 A4 A4 G4 G4 A4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.4 0.4 0.2 0.2 0.3999999 0.3999999 0.2250001 0.175 0.175 0.2 0.2 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.4000001 0.4000001 0.2 0.1999998 0.1999998 0.2000003 0.2000003 0.3999996 0.3999996 0.2000003 0.2000003 0.1999998 0.1999998 1.2 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0", - "ph_dur": "0.2554 0.0697 0.3251 0.0813 0.3135 0.0813 0.2786 0.1161 0.1277 0.0813 0.3947 0.1277 0.0929 0.1161 0.0697 0.1161 0.0813 0.1393 0.058 0.1393 0.058 0.0929 0.1045 0.4063 0.0813 0.1161 0.1277 0.0697 0.1161 0.0929 0.267 0.1277 0.1277 0.0697 0.1974 1.2074 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP k e k an d ao ch uang t ai SP w ei w ei y ao y E d e h ua SP q ve n an y i z i0 b a SP", + "ph_dur": "0.2554 0.0697 0.3251 0.0813 0.3135 0.0813 0.2786 0.1161 0.1277 0.0813 0.3947 0.1277 0.0929 0.1161 0.0697 0.1161 0.0813 0.1393 0.058 0.1393 0.058 0.0929 0.1045 0.4063 0.0813 0.1161 0.1277 0.0697 0.1161 0.0929 0.267 0.1277 0.1277 0.0697 1.4048 0.0464", + "ph_num": "2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 C5 C5 rest C4 C4 A4 A4 C5 C5 rest G4 G4 G4 A4 G4 A4 rest", + "note_dur": "0.32 0.4 0.4 0.4 0.2 0.4 0.225 0.175 0.2 0.2 0.2 0.2 0.4 0.2 0.2 0.2 0.4 0.2 0.2 1.2 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0", "f0_seq": "219.5 219.5 219.5 219.7 249.7 250.9 252.1 253.4 254.6 255.9 257.1 258.4 259.7 260.9 262.2 263.5 264.8 266.1 267.4 268.7 270.1 271.4 272.7 274.1 275.4 276.8 278.1 279.5 280.9 282.3 283.7 285.1 286.5 287.9 289.3 290.7 292.1 293.6 295.0 296.5 297.9 299.4 300.9 302.4 282.4 281.2 280.1 279.1 278.1 277.0 275.9 274.9 273.9 273.5 272.1 266.8 261.6 257.0 252.4 247.4 244.1 246.5 248.9 257.4 272.7 289.5 297.2 300.9 304.9 309.3 313.7 316.3 316.3 316.7 317.1 317.6 317.4 316.4 315.3 314.0 312.9 311.8 310.9 312.8 316.7 321.0 326.8 332.2 337.9 344.2 348.2 351.0 353.4 353.8 353.3 352.8 352.2 351.6 351.1 350.4 350.0 350.1 350.2 350.2 350.4 350.4 350.1 349.9 350.9 351.3 351.0 351.0 350.7 350.6 350.0 349.5 349.0 348.3 346.7 344.4 341.4 332.1 317.3 299.7 287.3 277.3 274.5 278.1 288.3 304.5 321.6 332.8 338.7 343.2 344.8 346.4 348.1 349.7 351.8 354.3 357.6 361.7 366.2 372.3 379.1 386.0 391.5 393.3 393.8 394.0 393.2 392.9 393.1 393.7 393.8 393.8 393.7 393.6 393.4 393.1 392.9 392.7 392.3 392.1 392.0 392.1 392.2 392.2 392.2 392.2 392.4 392.4 392.4 392.4 392.7 392.7 392.6 392.3 392.1 391.8 391.4 391.0 390.8 390.5 390.9 391.2 391.7 392.0 392.4 392.7 392.7 393.0 393.1 393.3 393.4 393.6 393.8 393.9 394.0 394.3 394.9 395.5 395.5 395.2 395.2 394.8 394.3 393.8 393.4 392.9 392.4 391.4 390.4 389.3 388.2 380.4 364.9 348.5 338.4 331.5 325.4 320.9 319.4 329.6 350.6 371.5 380.8 385.2 388.3 390.9 392.3 393.7 394.5 395.2 395.6 394.3 393.4 392.5 391.5 390.6 389.3 388.5 388.8 391.8 395.2 400.5 406.5 411.9 417.7 424.0 430.5 436.3 440.0 442.4 444.9 447.0 447.9 447.5 447.0 446.7 446.1 445.7 445.1 443.9 442.3 440.4 438.1 436.9 435.9 434.7 434.1 434.8 435.8 437.0 438.0 439.2 440.4 441.4 442.4 441.8 439.7 433.5 426.0 414.6 402.6 401.8 408.2 414.3 418.6 422.6 429.1 434.9 438.7 442.7 446.7 450.8 453.3 455.6 458.6 462.4 467.2 471.2 475.5 482.0 491.9 506.3 519.6 527.4 531.0 532.7 533.3 533.2 532.6 531.2 529.3 527.1 525.2 523.7 523.0 522.6 522.3 522.0 522.3 523.1 523.9 524.6 525.1 524.7 524.0 523.3 521.1 515.0 506.6 491.0 473.2 467.8 462.4 458.8 458.9 466.7 475.1 479.9 484.6 492.6 501.9 511.7 520.7 526.0 528.6 530.9 532.6 533.7 534.6 535.3 536.4 535.7 535.2 534.3 533.0 531.2 528.2 525.6 522.9 520.2 517.2 514.8 513.2 512.2 511.2 511.6 512.3 513.0 513.7 514.6 516.6 518.9 521.1 523.3 526.2 529.3 532.5 535.2 536.9 538.6 540.1 539.0 538.0 536.7 534.2 531.5 528.6 525.2 522.2 518.6 515.2 514.1 513.4 512.7 512.7 512.8 512.9 513.4 514.7 516.3 518.1 519.9 521.6 523.1 524.5 526.0 527.0 527.7 528.4 528.7 526.9 524.3 522.0 519.2 514.0 507.8 501.3 494.1 488.4 486.7 487.1 488.4 489.6 490.9 492.2 493.3 494.3 494.0 494.3 494.0 493.3 490.7 486.8 481.7 474.2 462.7 447.1 429.2 409.2 384.5 359.3 335.7 315.4 300.7 288.7 278.9 272.2 268.0 265.1 263.0 261.6 260.9 260.6 260.5 260.1 260.3 260.8 261.1 261.6 262.0 262.4 262.6 261.1 259.6 258.2 256.5 256.1 255.8 255.9 256.3 256.7 256.7 256.8 256.8 256.9 257.7 258.7 259.7 260.4 261.2 261.8 262.4 263.1 263.6 264.2 264.8 264.4 264.1 263.9 263.5 263.1 262.8 262.4 262.1 261.9 261.5 261.5 261.4 261.3 261.3 261.3 261.2 260.9 260.2 259.4 258.9 258.5 258.2 258.0 258.1 258.3 258.7 258.9 259.5 260.6 261.9 262.9 263.3 263.6 263.9 264.2 264.2 263.9 263.7 263.4 262.5 261.8 261.1 260.6 259.9 259.5 260.2 261.8 264.1 267.4 271.9 276.3 282.1 289.0 299.7 313.7 340.2 360.1 372.6 384.4 397.2 409.2 418.5 424.8 430.1 434.4 438.0 440.8 442.2 442.4 442.0 441.5 441.3 440.9 440.6 440.0 439.9 440.6 441.0 440.3 439.2 435.4 432.4 430.7 429.5 428.0 427.0 427.6 428.3 429.0 430.0 431.0 433.6 436.2 439.2 442.3 444.6 444.2 443.4 442.5 441.8 440.8 440.2 439.9 439.6 439.2 438.9 438.6 438.5 438.5 438.7 438.8 439.0 439.0 439.2 439.2 440.4 441.5 442.5 443.8 445.1 446.5 447.2 446.1 443.8 438.0 431.8 427.0 425.5 425.6 430.9 438.3 445.8 455.3 471.6 491.0 509.2 518.3 522.0 524.3 525.7 528.2 529.8 530.9 531.4 531.5 529.2 526.7 522.9 514.0 495.6 478.2 470.2 469.9 473.5 482.6 493.2 503.1 508.4 508.4 506.6 505.1 503.2 500.9 498.2 495.3 492.5 488.2 484.7 480.9 477.6 474.2 471.4 469.5 470.8 473.5 483.9 494.0 504.3 511.3 515.5 519.5 521.3 522.8 523.8 523.9 523.5 522.9 522.5 521.9 521.1 520.4 519.8 519.5 519.1 518.7 518.4 518.0 518.0 518.4 519.0 519.4 519.8 520.1 520.5 520.8 521.1 521.0 520.8 521.5 522.6 523.6 525.1 526.6 527.7 528.1 528.6 527.8 527.0 525.9 525.1 522.6 519.8 517.1 514.7 512.8 510.9 509.5 510.1 511.1 512.2 513.3 514.6 516.5 519.1 521.7 524.5 528.0 530.8 532.7 534.2 535.5 535.4 534.7 533.1 531.2 528.9 526.3 522.4 517.9 513.1 507.2 501.8 497.3 494.3 492.2 491.7 491.2 490.9 490.3 489.9 489.3 488.2 487.4 486.5 484.8 483.3 480.8 477.9 474.1 468.6 461.2 452.8 443.2 431.1 418.4 405.9 395.0 386.7 379.5 373.6 369.2 366.2 364.1 362.1 360.9 359.9 359.2 358.6 357.8 357.2 356.9 356.6 356.3 356.0 355.7 356.5 362.8 369.3 375.8 380.8 385.3 387.0 387.7 388.3 389.1 389.9 390.6 391.6 392.3 392.7 393.0 393.2 393.5 393.8 394.3 394.6 395.0 395.5 395.5 392.9 388.2 384.6 382.6 381.3 381.5 382.0 382.5 382.9 383.4 383.9 384.3 384.8 385.6 388.2 393.4 398.1 399.4 399.3 398.7 397.9 397.4 396.8 396.3 395.7 395.2 394.6 394.2 394.0 394.0 394.0 394.0 393.8 393.8 393.8 393.8 393.3 392.9 392.5 392.8 393.1 391.8 390.8 390.3 389.9 389.5 388.1 386.5 385.3 383.8 382.4 381.8 381.6 382.2 383.4 385.3 387.5 389.4 390.4 391.3 392.2 392.2 392.1 392.6 393.2 393.8 393.8 393.8 393.9 394.0 394.0 394.0 393.7 393.5 393.2 392.7 392.4 392.1 391.9 391.6 391.3 390.8 390.5 390.3 390.0 390.2 390.7 391.0 391.2 391.5 392.0 392.2 392.2 392.2 392.2 392.2 392.3 393.0 393.9 394.6 394.7 394.5 394.4 394.3 394.3 394.0 393.8 392.7 391.7 390.1 386.8 382.5 375.2 366.7 357.1 351.9 355.0 360.9 366.0 369.7 373.0 376.8 380.6 384.3 387.4 390.2 393.2 397.4 401.0 404.6 408.6 413.5 417.8 422.6 427.9 431.2 433.8 437.1 441.2 444.9 446.7 446.7 446.8 446.4 445.8 444.6 443.5 443.2 442.9 442.4 442.3 441.7 441.3 440.9 440.4 439.7 438.7 435.7 431.1 420.7 402.1 384.1 377.0 382.0 394.4 403.9 415.7 425.7 431.3 435.4 439.7 440.3 438.8 436.5 432.4 422.8 409.0 397.2 391.4 389.5 388.4 388.3 388.3 389.0 389.5 389.7 390.1 390.5 390.8 391.4 391.8 391.4 391.1 391.0 390.7 390.9 390.9 391.0 391.1 391.4 391.5 392.0 392.2 392.6 393.0 393.4 392.3 391.4 391.0 390.9 391.3 392.8 394.6 397.4 401.1 405.1 409.4 414.1 418.2 422.4 427.0 430.9 434.3 437.1 439.7 441.3 442.2 443.2 443.9 443.9 444.1 442.5 441.2 439.6 438.0 437.0 436.0 434.9 434.5 434.3 434.4 434.5 434.8 435.2 436.2 437.7 437.3 437.1 437.4 437.8 438.2 438.8 439.1 439.5 439.9 439.5 439.2 439.4 439.5 439.7 439.7 439.8 440.0 440.1 440.3 440.5 440.4 440.0 439.7 439.9 440.3 440.8 441.2 441.7 442.2 442.5 442.2 442.0 441.9 441.6 441.3 441.2 440.9 440.6 440.5 440.3 439.9 439.9 440.2 440.4 439.7 439.1 438.7 438.9 439.0 439.2 439.6 439.9 440.2 440.4 440.0 439.2 439.2 439.2 439.4 439.5 439.4 439.0 438.4 438.0 437.5 437.5 437.5 437.6 437.7 437.7 437.7 437.8 438.0 438.0 438.0 439.0 439.6 439.2 438.4 437.5 438.4 439.1 439.1 438.8 438.5 438.4 438.1 437.8 437.0 436.0 436.3 436.5 436.6 436.9 437.2 437.5 438.0 439.0 440.3 441.5 442.8 444.1 445.4 446.7 447.9 448.3 448.6 448.7 448.9 449.2 449.4 448.6 447.7 446.3 444.6 442.7 441.2 439.4 436.8 433.9 431.4 430.4 429.9 429.1 428.5 429.4 430.2 431.3 433.3 435.4 437.7 439.0 440.6 443.4 446.7 448.3 450.0 451.2 451.4 451.3 451.0 450.5 449.8 448.7 447.7 445.4 443.4 441.4 439.2 437.0 434.8 432.9 432.0 431.3 430.7 430.1 430.7 431.7 433.3 434.9 437.5 440.3 442.3 444.0 445.9 447.4 448.3 449.0 449.6 450.5 451.3 450.8 450.1 448.8 447.4 445.7 443.6 441.5 438.6 435.4 432.6 429.7 427.8 425.9 424.0 422.1 421.6 421.7 422.2 422.6 424.7 427.0 429.0 431.0 433.2 435.4 437.7 439.6 441.8 444.1 446.5 447.7 448.1 448.2 448.2 448.5 448.8 449.8 451.1 450.5 449.9 449.1 448.4 447.8 447.2 446.3 445.7 445.4 445.4", - "input_type": "phoneme", - "offset": 54.88 + "f0_timestep": "0.005" }, { + "offset": 61.28, "text": "SP 曾 经 路 上 的 风 吹 雨 打 有 一 个 灯 塔 我 就 不 必 害 怕 SP", - "ph_seq": "SP c eng j in l u sh ang d e e f eng ch ui y v d a y ou y i g e d eng t a a w o j iu b u b i h ai p a a SP", - "note_seq": "rest D4 D4 E4 E4 F4 F4 G4 G4 G4 G4 A4 C5 C5 C5 C5 C5 C5 C5 C5 D5 D5 C5 C5 C5 C5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 A#4 A#4 A#4 A4 A4 G4 G4 G4 G4 A4 rest", - "note_dur_seq": "0.32 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.1 0.1 0.3 0.2 0.2 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.1999998 0.1999998 0.2000003 0.2000003 0.3999996 0.3999996 0.4000001 0.4000001 0.4000001 0.4000001 0.1329999 0.1329999 0.2670002 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0", - "ph_dur": "0.2438 0.0813 0.1393 0.058 0.1509 0.058 0.1393 0.058 0.1625 0.0348 0.1045 0.1974 0.0929 0.1161 0.0929 0.3135 0.0813 0.1509 0.0464 0.3135 0.0929 0.1277 0.0697 0.1509 0.0464 0.1393 0.058 0.1161 0.0929 0.1974 0.1393 0.058 0.1277 0.0697 0.1625 0.0464 0.3367 0.058 0.2786 0.1277 0.2902 0.1045 0.1277 0.267 0.058", - "f0_timestep": "0.005", + "ph_seq": "SP c eng j in l u sh ang d e f eng ch ui y v d a y ou y i g e d eng t a w o j iu b u b i h ai p a SP", + "ph_dur": "0.2438 0.0813 0.1393 0.058 0.1509 0.058 0.1393 0.058 0.1625 0.0348 0.3019 0.0929 0.1161 0.0929 0.3135 0.0813 0.1509 0.0464 0.3135 0.0929 0.1277 0.0697 0.1509 0.0464 0.1393 0.058 0.1161 0.0929 0.3367 0.058 0.1277 0.0697 0.1625 0.0464 0.3367 0.058 0.2786 0.1277 0.2902 0.1045 0.3947 0.058", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 F4 G4 G4 A4 C5 C5 C5 C5 D5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 A4 G4 G4 A4 rest", + "note_dur": "0.32 0.2 0.2 0.2 0.2 0.1 0.3 0.2 0.4 0.2 0.4 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.4 0.4 0.4 0.133 0.267 0.05", + "note_slur": "0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0", "f0_seq": "440.5 440.5 440.5 440.3 390.4 388.4 386.5 384.5 382.6 380.7 378.8 376.8 374.9 373.1 371.2 369.3 367.4 365.6 363.7 361.9 360.1 358.3 356.5 354.7 352.9 351.1 349.3 347.6 345.8 344.1 342.3 340.6 338.9 337.2 335.5 333.8 332.1 330.4 328.8 327.1 325.5 323.8 322.2 320.6 293.7 293.5 293.2 293.2 293.0 292.8 292.6 292.4 292.2 292.0 292.0 291.8 291.6 291.4 291.2 291.0 290.8 290.7 290.5 290.3 290.1 290.6 294.4 297.1 297.0 295.7 294.3 293.0 291.8 290.4 289.1 288.3 288.0 288.4 289.1 290.5 291.7 293.0 294.2 295.5 296.2 296.6 296.0 295.3 294.1 289.8 284.3 275.1 264.7 253.2 243.8 241.6 240.6 243.1 250.0 259.2 269.2 278.8 288.0 298.7 310.1 320.9 327.7 331.8 332.6 332.3 331.1 330.2 329.7 329.5 329.4 329.4 329.2 329.2 329.4 329.6 329.9 330.1 330.1 329.7 329.1 328.6 328.5 328.5 328.5 328.7 328.7 328.7 328.8 328.7 328.3 327.4 325.1 323.1 320.9 317.3 314.2 315.1 320.4 331.1 343.4 348.1 348.8 348.9 349.3 349.6 349.9 350.1 350.3 350.3 350.2 350.2 350.0 350.0 350.0 350.0 350.2 350.2 350.2 349.8 350.0 350.2 350.2 350.2 349.8 346.4 341.1 330.7 317.9 303.6 293.5 292.9 297.9 305.2 313.7 323.2 336.2 350.8 374.1 389.7 394.7 397.9 400.2 401.0 401.2 401.2 400.3 399.5 398.2 396.4 394.5 392.2 390.0 388.4 387.4 386.8 385.8 385.4 385.4 385.8 386.8 387.9 389.1 390.1 391.1 392.2 393.1 392.6 392.0 391.4 390.6 389.4 383.7 371.7 347.6 318.0 316.4 315.8 317.8 323.4 341.1 362.3 379.1 386.7 391.9 395.4 397.1 397.7 398.5 397.6 395.6 393.9 392.0 391.0 390.9 391.1 392.0 393.6 396.2 399.7 406.3 414.8 423.2 430.2 436.6 441.8 444.7 446.3 447.2 447.4 447.2 446.9 446.1 445.2 444.3 443.3 442.2 441.4 440.4 439.8 440.5 440.1 439.4 439.4 439.5 439.5 439.7 439.9 440.6 441.8 442.8 440.9 439.0 437.3 433.7 419.6 404.2 395.3 395.1 401.8 410.5 412.6 414.7 417.3 422.3 428.5 433.1 437.4 441.3 445.8 450.5 455.6 460.8 464.9 469.7 475.7 482.7 488.5 495.2 503.5 512.2 521.1 528.7 532.2 533.9 534.2 533.7 532.4 530.9 529.8 529.0 527.9 526.6 525.4 524.6 523.9 523.1 522.3 520.7 516.3 510.7 503.5 489.2 475.4 459.3 442.0 425.2 410.4 396.7 383.3 379.3 377.4 377.8 383.1 390.6 398.8 412.9 426.2 440.4 461.1 484.6 499.7 509.5 517.5 526.0 535.5 538.8 538.2 537.0 535.7 534.2 532.8 531.5 530.3 529.0 526.6 524.0 522.0 520.5 519.3 517.8 516.9 516.8 517.1 518.1 519.6 520.8 522.2 523.4 524.1 524.8 525.2 525.8 526.3 526.8 526.6 526.1 525.6 525.2 524.8 524.5 524.1 523.6 523.1 522.7 522.3 522.6 522.8 523.1 523.5 523.9 523.9 524.3 524.5 524.7 525.1 524.0 523.2 522.2 521.3 520.2 519.9 519.8 519.6 520.1 520.5 521.0 521.4 521.3 520.9 520.5 520.2 520.1 520.2 520.5 520.8 521.2 521.4 521.9 523.3 524.8 526.3 527.8 528.6 528.3 527.5 526.7 526.0 525.5 524.9 525.1 525.1 525.2 524.7 524.2 523.6 522.9 522.5 522.5 522.9 522.9 523.1 522.3 521.5 520.5 519.6 517.2 505.5 486.9 463.9 442.0 431.9 425.0 426.8 435.3 446.7 458.1 469.9 484.7 503.4 522.6 535.1 536.6 532.5 527.8 527.2 527.3 527.5 527.5 527.7 527.8 528.0 527.2 526.3 525.5 524.8 523.3 521.4 520.7 520.3 519.9 519.6 519.5 520.0 520.7 521.4 521.9 522.6 523.3 524.0 524.5 525.2 526.0 526.6 527.1 526.6 525.8 525.2 524.8 524.0 523.3 522.8 522.0 521.4 520.9 521.1 521.8 522.2 522.5 522.9 523.3 523.7 524.3 524.6 525.0 525.4 525.8 526.2 525.7 524.9 524.8 524.1 524.5 525.1 524.8 524.2 524.0 523.6 523.7 523.9 523.9 524.6 525.2 525.4 524.7 523.3 522.8 522.0 521.3 521.1 521.1 521.7 523.1 523.9 525.5 528.4 531.0 536.7 544.2 554.0 563.4 572.7 580.4 585.4 589.5 592.8 590.7 589.3 588.0 586.6 585.3 584.3 584.6 585.4 585.9 586.7 586.9 586.2 585.0 583.0 580.9 578.0 575.1 572.1 568.5 563.4 556.6 550.3 544.4 539.0 534.2 532.8 531.2 529.1 527.5 526.0 525.6 525.5 525.7 525.4 525.1 525.1 525.1 525.1 525.3 525.4 525.7 525.8 526.3 526.8 527.2 527.3 526.4 525.6 524.7 523.6 522.0 515.5 503.7 486.2 468.6 453.7 448.2 446.7 450.3 456.8 463.6 471.9 480.2 489.7 499.6 506.9 514.1 520.5 526.6 528.4 528.8 529.0 528.7 528.0 527.5 526.7 526.0 525.5 524.9 524.2 524.2 524.5 524.6 524.8 525.1 525.1 525.2 525.4 525.4 525.1 523.6 512.9 492.9 467.6 443.6 432.3 430.2 429.8 430.1 433.4 451.2 473.3 494.9 514.1 531.2 535.9 534.1 531.7 529.5 528.7 528.0 527.7 527.2 526.7 526.3 524.5 522.8 521.5 521.1 521.4 521.4 521.6 522.0 523.0 524.5 526.0 527.5 528.7 529.9 531.5 529.5 525.6 516.9 504.0 484.8 464.9 448.5 440.6 434.5 428.0 429.4 442.7 460.3 482.3 501.6 515.1 518.1 520.4 521.7 523.6 525.1 526.3 526.9 527.1 527.5 527.9 528.3 528.4 528.2 528.1 528.0 527.8 527.3 526.0 524.8 523.3 522.0 520.9 519.4 518.1 517.8 517.2 516.8 516.9 517.2 518.0 518.7 518.9 519.0 518.7 517.9 516.6 514.4 510.6 505.1 497.5 488.8 481.7 474.7 468.1 462.2 457.6 453.2 450.1 449.8 450.7 452.8 454.9 457.4 460.5 464.4 467.5 469.9 472.2 474.6 474.5 474.3 474.1 473.2 472.1 470.9 469.1 467.4 464.6 460.0 456.7 454.5 453.9 453.3 452.1 451.3 452.1 452.6 453.3 454.5 455.2 456.4 457.7 459.3 460.8 462.8 464.3 465.0 465.1 465.1 465.4 465.5 465.6 465.8 465.9 466.9 467.8 468.7 469.7 470.8 470.3 463.1 452.2 431.8 407.2 386.2 378.6 379.1 386.3 395.6 399.1 402.6 405.9 410.2 415.5 420.6 424.8 428.7 433.1 437.5 442.9 452.8 460.6 464.6 467.2 469.7 471.5 471.7 471.4 471.0 471.0 470.6 470.2 469.3 468.3 467.3 466.6 466.4 466.4 466.4 466.4 465.9 465.3 464.7 464.0 453.7 444.6 438.6 442.0 454.5 466.4 467.5 465.3 458.4 449.0 440.0 431.9 424.3 415.8 408.4 411.7 420.5 428.2 432.9 436.7 438.0 438.9 439.1 439.0 438.7 437.4 436.1 435.4 435.0 434.7 434.4 434.1 433.8 433.5 433.2 434.1 437.2 440.0 442.9 446.4 450.9 455.3 459.5 464.1 468.9 471.1 472.9 474.6 476.2 476.5 475.8 474.8 472.9 471.5 470.2 468.9 467.8 466.6 465.6 464.3 464.6 464.7 465.0 465.3 466.4 467.8 469.4 470.6 472.1 473.5 472.1 471.4 471.0 468.9 466.2 463.5 460.7 457.6 453.2 448.7 445.1 441.9 439.9 438.3 436.7 435.3 434.7 434.8 434.8 434.7 434.4 434.4 434.3 434.0 434.4 435.3 436.1 437.1 438.2 439.2 439.9 440.9 441.9 443.3 443.8 444.5 445.1 445.7 445.5 444.9 444.2 443.6 443.2 442.1 440.3 439.9 439.6 439.3 439.0 438.7 439.0 439.1 439.2 439.4 439.7 439.8 439.9 439.4 439.0 439.2 439.5 439.6 439.9 440.0 440.3 440.6 440.8 440.9 441.2 441.5 441.6 441.9 442.0 442.0 442.0 443.1 443.1 443.1 442.9 442.8 442.9 443.3 443.4 443.1 442.8 442.5 442.4 442.1 441.8 443.8 447.1 450.0 451.7 453.4 455.3 456.6 458.2 459.6 460.6 462.9 463.5 463.7 463.5 462.0 456.0 449.5 440.8 430.1 419.8 409.4 401.6 397.6 394.2 391.8 390.6 390.0 389.2 388.4 388.0 387.3 387.3 387.4 387.8 388.3 387.9 387.2 387.5 388.2 388.3 388.6 388.9 389.2 389.3 389.5 389.7 390.1 390.8 391.4 391.9 392.7 393.5 394.0 394.6 395.3 396.1 396.6 396.3 395.8 395.3 394.7 394.4 393.8 393.0 392.2 391.5 390.2 386.6 381.7 372.8 362.0 352.7 349.3 345.2 340.6 336.0 331.4 326.9 323.0 319.0 314.7 310.5 306.6 302.9 298.8 294.9 290.8 286.9 284.4 283.5 284.5 287.6 290.6 293.4 300.5 313.8 328.9 344.7 359.1 372.5 381.1 389.1 396.6 399.0 399.4 400.0 400.5 399.8 399.1 398.0 396.6 395.0 392.8 390.6 388.4 386.6 386.7 387.2 388.0 389.9 394.0 399.0 404.5 410.8 418.4 425.3 431.6 437.2 441.3 444.1 446.1 447.6 448.2 448.6 448.9 449.0 449.0 448.5 447.0 444.7 442.3 440.2 438.6 437.2 436.5 436.0 435.6 435.3 435.0 434.8 434.4 435.6 436.6 437.7 438.8 440.0 441.3 443.4 446.0 448.9 450.8 452.4 455.4 455.9 455.4 454.5 450.2 445.5 440.1 434.2 427.7 422.2 417.3 413.5 412.4 412.4 413.7 415.3 416.7 418.3 420.1 421.9 423.3 424.0 424.0", - "input_type": "phoneme", - "offset": 61.28 + "f0_timestep": "0.005" }, { + "offset": 67.28, "text": "SP 小 的 温 暖 也 能 被 放 大 SP", - "ph_seq": "SP x iao d e w en n uan y E n eng b ei f ang d a a SP", - "note_seq": "rest C4 C4 D4 D4 A4 A4 C5 C5 C5 C5 A4 A4 C5 C5 C4 C4 C4 C4 D4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.8 0.8 0.4 0.4 0.4000001 0.4000001 0.8 0.8 0.4000001 0.4000001 0.4000001 0.4000001 0.1999998 0.1999998 1.8 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0", - "ph_dur": "0.1393 0.1858 0.3483 0.058 0.3135 0.0813 0.6966 0.1045 0.3019 0.0929 0.3019 0.1045 0.7198 0.0813 0.1974 0.1974 0.2902 0.1161 0.1974 1.7995 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP x iao d e w en n uan y E n eng b ei f ang d a SP", + "ph_dur": "0.1393 0.1858 0.3483 0.058 0.3135 0.0813 0.6966 0.1045 0.3019 0.0929 0.3019 0.1045 0.7198 0.0813 0.1974 0.1974 0.2902 0.1161 1.9969 0.0464", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D4 A4 C5 C5 A4 C5 C4 C4 D4 rest", + "note_dur": "0.32 0.4 0.4 0.8 0.4 0.4 0.8 0.4 0.4 0.2 1.8 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 1 0", "f0_seq": "438.0 438.0 438.0 438.7 379.7 377.3 374.8 372.3 369.9 367.5 365.1 362.7 360.3 358.0 355.6 353.3 351.0 348.7 346.4 344.2 341.9 339.7 337.5 335.3 333.1 330.9 328.7 326.6 324.5 322.3 320.2 318.1 316.1 314.0 311.9 309.9 307.9 305.9 303.9 301.9 299.9 297.9 296.0 294.1 261.6 261.4 261.2 261.1 260.9 260.7 260.4 260.2 260.0 259.9 259.7 259.5 259.3 259.1 259.0 258.8 258.6 258.4 258.2 257.9 257.7 258.6 264.4 269.3 271.4 272.6 271.6 270.7 269.8 268.6 266.2 263.9 262.2 261.0 259.8 258.3 257.8 257.2 256.8 256.4 255.8 255.2 255.0 255.2 255.6 256.1 256.6 257.0 257.4 257.9 258.3 258.5 258.5 258.7 258.8 258.9 259.5 260.0 260.5 260.9 261.0 261.0 261.0 261.1 261.5 261.9 262.3 262.7 262.8 263.0 263.0 263.0 263.1 263.1 263.3 263.3 263.3 263.4 263.4 263.4 263.6 263.6 263.6 263.7 263.3 259.1 247.9 238.1 229.7 223.3 221.5 223.5 225.2 226.4 228.1 231.9 241.0 250.7 260.9 272.9 284.6 290.5 292.9 294.4 296.0 297.9 298.4 298.6 298.5 298.6 298.5 298.2 297.9 297.5 296.7 296.0 295.4 294.8 294.1 293.3 292.5 291.7 291.0 290.4 290.0 289.5 289.5 289.5 289.5 289.6 289.6 289.7 290.2 290.7 291.2 291.6 292.3 292.8 293.2 293.7 294.3 294.8 295.3 295.5 295.5 295.7 295.7 295.8 295.9 295.8 295.7 295.7 295.5 295.4 295.4 295.0 294.6 294.4 294.0 293.7 293.8 293.9 294.0 294.1 294.6 295.0 294.6 293.8 292.7 291.9 292.3 292.4 292.2 292.8 294.2 297.2 303.1 312.1 325.0 340.9 364.1 381.9 387.8 391.9 394.7 396.1 399.3 401.1 401.6 401.3 400.0 399.7 399.2 398.4 397.6 396.3 395.3 394.4 393.0 391.6 390.0 388.7 388.4 388.4 388.6 388.6 388.7 389.8 391.8 394.2 396.6 399.9 404.4 407.4 410.5 414.3 418.0 421.4 424.8 428.5 432.4 434.6 436.8 438.8 440.8 441.8 441.8 442.1 442.4 442.2 441.5 440.6 439.6 438.9 438.2 437.5 436.8 436.4 436.0 435.5 435.2 434.9 434.9 434.9 434.9 434.9 434.9 435.0 435.3 435.6 435.7 436.0 436.3 436.6 436.9 437.0 437.2 437.3 437.6 437.9 438.2 438.5 438.9 439.3 440.3 441.0 441.3 441.6 441.9 442.2 442.5 442.9 443.4 443.7 444.0 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 444.1 443.6 443.3 443.0 442.7 442.2 441.8 441.4 440.9 440.6 440.3 439.7 439.4 439.0 438.6 438.3 438.0 437.7 437.2 436.8 436.5 436.7 437.1 437.6 437.6 437.1 436.7 436.8 437.0 437.1 437.2 437.5 437.5 437.7 437.8 438.0 438.2 438.3 438.6 438.7 439.1 439.5 439.9 440.5 441.1 442.1 442.3 442.9 443.6 444.1 444.7 447.9 455.1 459.2 464.7 475.1 491.0 506.7 517.1 524.1 528.6 531.8 533.9 535.5 537.0 538.4 538.0 536.5 534.7 532.0 528.8 525.4 522.0 519.1 516.9 514.9 513.1 511.3 511.6 512.1 512.6 513.1 514.0 515.6 517.5 519.6 522.0 525.0 527.7 529.6 531.2 532.4 532.0 531.7 531.3 530.7 529.3 527.7 526.0 524.6 523.0 521.4 520.2 520.5 521.1 521.7 522.3 522.4 522.6 522.8 522.9 523.3 523.2 522.8 522.3 522.3 522.6 522.9 523.1 523.4 523.8 523.9 524.1 523.7 523.4 523.0 522.6 522.0 521.4 520.8 520.3 519.6 519.0 518.5 518.0 517.6 517.5 517.8 517.8 517.8 517.8 518.1 518.1 518.1 518.3 518.4 518.1 517.3 517.8 518.5 519.0 519.6 520.1 520.7 520.8 520.8 520.8 521.7 522.6 523.1 523.5 522.9 522.2 523.0 523.9 525.3 528.1 528.5 528.7 528.7 528.0 527.2 526.1 525.4 524.6 523.5 522.6 522.0 521.9 522.0 522.3 522.3 523.1 523.8 524.5 525.2 526.3 526.9 526.7 526.6 526.4 526.3 523.6 521.2 518.1 512.3 505.4 498.1 489.1 481.0 472.8 463.7 453.5 442.4 433.1 422.7 411.7 406.5 406.4 405.9 404.9 403.2 400.8 397.9 395.1 392.8 390.9 388.9 387.3 386.4 386.2 386.6 387.8 389.0 389.1 388.7 388.6 388.7 389.1 389.6 390.1 390.6 391.2 391.8 392.6 393.2 394.0 394.8 395.1 395.2 395.3 395.6 395.9 397.0 399.8 403.4 407.2 410.8 414.7 418.0 421.5 426.0 431.3 435.9 438.7 440.6 442.5 444.4 446.1 447.8 448.5 449.0 448.4 447.7 446.0 443.8 442.3 440.8 439.4 438.1 436.8 435.4 434.9 434.5 433.9 433.5 433.7 434.0 434.3 434.6 435.4 436.5 437.3 437.9 438.5 439.1 439.5 440.1 440.8 441.3 441.7 441.3 440.7 440.3 439.7 439.3 439.5 440.1 440.4 440.7 440.6 440.5 440.3 440.2 440.0 439.8 439.7 439.5 439.4 439.2 439.4 439.5 439.6 439.7 439.7 439.9 440.0 440.4 440.9 441.2 441.7 442.0 441.7 441.4 441.4 441.5 441.8 442.0 442.2 442.3 442.5 442.5 442.8 442.9 443.2 443.3 443.6 443.6 443.7 443.1 442.4 441.8 442.7 443.1 442.8 442.2 441.5 441.8 442.0 442.0 441.7 440.8 440.2 440.0 439.9 439.7 439.5 439.2 439.0 438.8 438.9 439.0 439.0 439.0 438.9 439.5 440.8 440.5 440.5 440.8 441.8 442.3 441.7 442.9 445.0 447.6 450.5 457.6 467.1 474.3 480.9 487.4 492.1 492.7 492.5 491.5 490.2 488.4 488.5 489.1 489.8 490.5 491.2 492.4 495.2 498.9 502.5 506.2 509.8 511.9 513.4 515.2 516.9 518.4 519.8 520.7 522.6 524.7 525.5 525.7 525.9 526.0 526.1 526.2 525.8 526.1 526.6 525.8 525.1 524.4 523.7 522.9 522.2 521.5 520.7 519.8 519.0 518.3 517.5 516.0 512.3 507.2 500.1 486.0 471.7 456.0 440.8 430.7 420.9 412.5 403.4 397.0 391.3 385.8 380.5 373.9 366.2 357.3 347.5 339.3 333.1 325.7 315.7 305.7 297.8 288.4 278.5 269.2 263.1 258.9 254.7 251.5 250.8 251.0 251.3 252.3 254.0 254.8 256.0 257.1 257.9 258.6 259.3 259.8 260.9 262.0 263.1 264.2 264.4 264.4 264.4 264.2 264.2 264.2 264.1 263.7 263.1 262.7 262.2 261.7 261.1 260.7 260.2 259.8 259.6 259.4 259.4 259.2 259.1 259.3 259.6 259.8 260.2 260.8 261.5 262.2 263.0 263.7 264.2 264.4 264.4 264.4 264.2 264.2 264.1 263.6 263.0 262.3 261.6 260.8 259.5 257.9 256.2 253.9 249.5 243.3 235.8 228.4 222.0 218.3 216.1 219.0 223.7 230.2 237.2 245.3 253.6 257.1 259.8 260.5 260.7 260.9 261.1 261.1 261.0 260.9 260.7 260.5 260.4 260.5 260.7 260.7 260.8 260.4 260.3 260.4 260.7 261.1 261.2 261.3 261.6 261.6 261.8 262.0 262.1 261.7 261.3 261.0 260.7 260.3 260.0 259.8 260.3 260.8 261.8 263.5 265.6 267.9 271.0 273.8 276.5 279.8 282.8 285.3 287.6 290.0 292.3 294.0 295.1 295.9 296.6 297.4 297.0 296.8 296.6 296.1 295.7 295.3 295.0 294.7 294.3 293.8 293.6 293.1 292.6 291.9 291.3 291.3 291.2 291.1 291.1 291.3 291.5 291.7 291.9 292.1 292.3 292.5 292.7 292.9 293.1 293.3 293.5 293.7 293.9 294.1 294.3 294.1 293.9 293.7 293.5 293.3 292.9 292.9 293.2 293.5 293.8 293.4 293.0 292.8 292.9 293.3 293.2 293.1 292.9 292.7 292.6 292.4 292.3 292.3 292.3 292.3 292.3 292.3 292.4 292.5 292.5 292.5 292.5 292.6 292.9 293.5 293.3 293.1 293.0 292.9 292.6 292.5 292.5 292.5 292.5 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.6 292.8 292.8 292.8 292.8 292.8 292.8 292.8 292.8 293.0 293.0 293.0 293.0 293.0 292.9 292.8 292.8 292.8 292.3 292.0 292.2 292.4 292.6 292.8 293.0 293.2 293.4 293.6 293.8 294.0 294.3 294.4 294.6 294.9 295.0 294.7 294.3 294.0 294.0 294.2 294.0 293.5 292.9 292.3 292.3 292.1 292.1 291.8 291.5 291.2 290.9 290.6 290.2 290.0 289.7 290.2 290.7 291.3 292.0 292.6 293.2 293.6 294.2 294.9 295.5 296.1 296.6 297.2 297.4 297.8 298.0 298.2 298.2 297.9 297.7 297.5 297.2 296.1 295.0 293.7 292.6 292.0 291.3 290.0 288.8 288.4 288.1 287.7 287.8 288.2 289.5 290.8 292.2 293.0 293.6 294.2 294.8 295.5 296.4 297.4 298.3 299.1 299.9 299.7 299.4 299.1 298.6 297.5 296.0 294.7 293.9 293.4 292.6 291.1 289.7 288.3 286.9 285.4 284.6 283.8 283.4 283.8 284.4 285.0 285.6 287.0 288.4 289.6 290.6 291.5 292.8 294.4 296.2 297.8 299.1 300.3 301.6 303.0 304.4 304.4 304.4 304.4 304.2 303.0 301.7 300.5 299.1 296.6 293.8 291.0 287.9 285.1 282.3 279.3 278.2 277.2 276.6 277.0 277.8 278.8 279.8 281.3 283.0 284.8 286.8 289.1 291.3 293.8 296.4 298.8 300.8 302.4 304.2 306.1 305.7 305.3 304.9 304.3 302.3 300.1 297.7 295.4 292.4 288.6 284.9 281.7 278.6 275.0 271.6 270.2 270.4 270.8 271.5 273.4 275.5 277.9 280.2 283.0 285.5 287.8 290.2 292.2 294.3 296.4 298.4 300.4 302.1 303.3 304.4 305.1 305.0 304.6 303.5 302.1 299.5 297.1 294.9 292.3 289.1 286.0 282.7 279.9 277.3 275.6 274.2 273.4 273.4 273.9 275.4 277.1 278.9 280.5 282.3 285.1 288.4 291.2 293.5 295.9 298.5 299.9 301.3 302.4 303.1 303.5 303.8 304.2 304.5 305.0 305.4 304.5 302.8 301.3 299.5 297.8 296.3 295.8 295.6 295.4 295.2 295.0 294.8 294.6 294.4 294.2 294.0 293.8 293.7 293.7", - "input_type": "phoneme", - "offset": 67.28 + "f0_timestep": "0.005" }, { + "offset": 87.0, "text": "AP 空 气 中 的 广 播 声 SP 在 回 荡 着 SP", "ph_seq": "AP k ong q i zh ong d e g uang b o sh eng SP z ai h ui d ang zh e SP", - "note_seq": "rest D#3 D#3 C4 C4 D#4 D#4 C4 C4 A#3 A#3 C4 C4 C4 C4 rest D#3 D#3 G3 G3 G#3 G#3 G#3 G#3 rest", - "note_dur_seq": "0.6 0.4 0.4 0.8 0.8 0.4000001 0.4000001 0.4119999 0.4119999 0.388 0.388 0.2 0.2 0.3999999 0.3999999 0.2 0.3999999 0.3999999 0.4000001 0.4000001 0.25 0.25 1.35 1.35 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4992 0.1045 0.2438 0.1625 0.7082 0.0929 0.3483 0.0464 0.2902 0.1161 0.3019 0.0929 0.058 0.1393 0.4063 0.0929 0.1045 0.2438 0.1509 0.3135 0.0929 0.1509 0.0929 1.3584 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 1 2 2 2 2 1 1", + "note_seq": "rest D#3 C4 D#4 C4 A#3 C4 C4 rest D#3 G3 G#3 G#3 rest", + "note_dur": "0.6 0.4 0.8 0.4 0.412 0.388 0.2 0.4 0.2 0.4 0.4 0.25 1.35 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 162.5 155.6 156.0 156.5 156.9 157.2 157.6 158.1 158.5 158.9 159.4 159.8 160.3 160.7 161.1 161.5 162.0 162.5 162.9 163.3 163.7 164.1 164.1 161.4 157.4 153.2 150.2 147.9 147.2 146.9 147.0 147.2 147.5 147.6 147.7 147.8 147.9 148.5 149.1 149.6 150.0 150.4 150.9 151.4 151.9 152.4 152.6 152.7 152.8 152.9 153.2 153.5 153.8 154.1 154.4 154.6 154.9 155.2 155.4 155.7 156.0 156.9 157.9 158.6 158.9 158.9 159.0 158.9 158.6 157.9 155.9 152.0 148.5 147.2 147.1 149.1 152.2 155.2 158.0 160.5 163.3 166.2 169.2 173.1 176.7 180.6 183.4 186.1 188.6 191.1 194.2 197.7 201.2 203.8 206.1 208.2 210.9 214.7 217.5 220.9 225.8 231.3 237.0 241.2 244.0 245.7 246.1 245.3 245.8 245.9 245.5 246.1 246.5 246.7 246.5 246.2 245.8 245.9 246.0 245.9 245.6 245.4 244.9 244.5 244.0 243.6 243.5 243.5 243.4 243.4 243.4 243.7 244.0 244.4 244.7 245.0 246.1 247.3 248.3 249.4 250.2 251.0 251.8 253.4 254.7 256.2 257.9 259.5 260.8 261.6 262.3 263.0 263.5 263.6 263.6 263.7 263.7 263.7 263.7 263.4 263.1 262.7 262.5 262.1 261.7 261.4 261.0 260.8 260.6 260.9 261.2 261.8 261.4 261.0 260.7 260.5 260.7 261.1 261.7 262.0 262.1 261.9 261.9 261.9 261.8 261.8 261.6 261.6 261.6 261.6 261.5 261.5 261.3 261.3 261.2 261.2 261.2 261.1 261.0 261.0 260.9 260.9 261.1 261.2 261.5 261.7 261.9 261.6 261.2 260.9 261.0 261.0 261.2 261.3 261.4 261.5 261.6 261.7 261.8 261.9 262.0 262.1 262.3 262.7 262.9 262.9 262.8 262.8 262.7 262.7 262.6 262.5 261.4 260.3 258.2 253.3 246.9 238.9 232.8 231.1 235.1 241.2 246.9 250.2 252.7 255.5 258.6 261.5 263.8 266.0 268.6 271.3 274.2 277.2 279.6 282.2 285.5 289.5 293.1 296.1 299.7 304.5 309.6 314.7 318.1 319.7 320.2 320.2 319.7 319.0 318.3 317.5 316.4 315.3 314.3 313.7 313.1 312.5 312.0 311.4 310.7 310.1 310.3 310.6 310.9 311.1 311.3 311.6 311.9 312.1 312.3 312.2 312.0 311.9 311.7 311.7 311.5 310.6 309.8 309.2 309.1 309.2 309.3 309.4 309.6 309.8 310.8 311.6 311.9 312.2 312.5 312.7 313.2 313.4 313.6 313.9 314.2 314.0 313.9 313.7 313.2 312.6 311.7 310.8 309.3 307.3 305.1 302.8 299.5 296.2 291.9 284.6 276.3 266.3 258.4 253.0 248.1 247.3 245.9 244.8 244.7 245.2 245.8 246.4 246.8 245.9 244.8 244.2 243.8 242.8 241.9 241.2 240.6 240.1 239.7 239.2 238.8 238.7 240.5 242.5 244.9 247.2 249.6 252.1 254.1 256.1 258.3 260.5 262.1 262.9 263.4 263.7 264.3 264.5 264.2 263.8 263.4 262.9 262.5 261.9 261.3 260.6 260.0 259.2 258.7 258.1 258.5 258.8 259.2 259.6 260.0 260.4 261.1 261.7 262.3 263.0 263.6 263.1 262.7 262.2 261.7 261.2 258.4 254.5 251.2 247.4 243.0 238.8 234.8 231.1 226.4 220.8 214.4 210.2 208.7 208.6 209.5 211.3 213.0 215.1 218.3 221.5 224.3 227.6 231.4 233.8 234.3 234.6 235.3 235.9 236.0 236.1 236.1 235.8 235.7 235.6 235.4 234.7 234.2 233.7 233.1 232.5 232.3 232.3 232.5 232.6 232.4 232.1 232.0 232.1 232.3 232.5 232.7 233.0 233.2 233.5 233.6 233.9 233.8 233.7 233.6 233.5 233.4 233.3 233.1 233.1 232.9 233.0 233.1 233.1 233.2 233.4 233.4 233.6 233.7 234.0 234.2 234.4 234.6 234.7 234.7 234.7 233.8 232.3 230.5 228.8 227.4 225.9 225.4 225.4 225.4 225.4 225.4 227.4 228.8 231.7 235.1 239.8 244.5 249.2 254.6 257.9 260.7 260.7 260.7 262.4 262.4 263.0 263.0 263.0 262.4 261.8 261.2 261.8 262.4 263.4 262.1 259.3 256.0 249.7 238.0 225.3 217.0 213.9 215.4 218.4 221.7 225.0 228.1 230.8 233.8 235.5 237.0 238.5 240.1 241.9 244.0 245.7 247.2 248.8 250.6 252.3 254.0 255.2 256.3 257.7 259.5 261.2 262.9 264.4 265.6 267.6 268.7 268.3 267.8 267.1 265.3 263.0 261.1 259.3 257.2 255.1 253.4 253.2 253.2 253.2 253.2 254.8 256.2 257.6 259.2 260.7 262.4 264.1 265.5 267.0 268.7 268.9 269.2 269.4 269.5 268.1 266.3 264.6 262.8 259.9 256.5 253.2 250.1 247.4 245.4 243.8 244.5 245.2 246.1 247.5 249.2 252.3 255.1 258.2 261.8 265.6 269.1 272.2 274.5 276.2 277.8 279.1 279.7 280.1 280.2 278.5 276.0 271.9 267.6 262.9 257.9 253.3 250.8 249.5 248.3 247.1 247.3 247.6 247.7 247.9 248.1 248.2 247.7 247.7 247.3 246.8 245.8 244.2 242.4 239.5 235.1 229.1 222.3 214.5 204.6 194.5 184.9 176.4 170.2 165.1 160.9 158.0 156.1 154.9 153.9 153.3 153.1 152.9 152.9 152.7 152.7 152.9 153.1 153.3 153.5 153.7 154.0 154.5 154.4 154.0 152.8 151.5 150.5 149.9 149.5 149.6 149.8 150.0 150.2 150.5 151.1 151.9 152.3 152.6 153.2 153.8 154.4 154.9 155.2 155.5 155.8 156.2 156.4 156.6 156.2 155.7 155.1 154.8 154.5 154.3 154.3 154.5 154.6 154.8 155.0 155.4 155.6 155.9 155.9 155.8 155.7 155.6 155.5 155.4 155.2 154.8 154.3 152.6 150.9 149.0 145.9 142.8 140.0 138.5 138.8 139.8 142.1 144.5 146.6 148.9 150.8 152.7 154.5 156.1 157.9 159.5 161.4 163.5 165.3 166.9 168.9 171.2 173.2 175.3 178.3 181.8 185.5 189.0 191.7 193.5 193.6 193.0 191.9 191.0 190.4 190.1 189.5 188.9 188.6 188.4 188.8 189.1 189.6 189.9 190.2 190.9 191.7 192.4 193.0 193.5 194.0 194.4 194.9 195.2 195.3 195.3 195.2 195.2 195.2 195.2 195.2 195.1 195.1 195.1 195.3 195.4 195.6 195.7 195.8 196.0 196.2 196.3 196.3 196.3 196.3 196.3 196.3 196.4 196.5 196.5 196.5 196.5 196.5 196.4 196.0 195.4 195.0 194.5 194.0 193.6 193.2 189.7 186.3 182.3 177.6 173.0 172.2 172.2 173.9 176.4 179.1 182.6 186.0 189.5 193.5 197.7 202.4 207.2 210.6 211.6 211.6 211.6 211.4 211.3 211.3 211.3 211.2 210.6 209.9 209.2 208.4 207.7 207.1 206.9 206.7 206.6 206.4 206.3 205.8 204.5 202.9 200.6 196.1 191.1 183.7 175.8 168.7 163.5 160.3 159.6 159.8 160.8 164.5 169.0 175.3 182.2 188.7 192.0 193.9 194.8 195.2 195.7 196.1 196.5 197.0 197.5 197.8 198.2 198.4 198.7 198.5 198.2 198.1 197.8 197.6 197.6 197.6 197.6 197.6 197.6 197.4 197.2 197.0 196.9 197.0 197.2 197.6 198.2 198.7 199.4 200.0 200.7 201.3 201.9 202.7 203.6 204.4 205.3 206.0 206.7 206.8 206.9 207.0 207.1 207.1 207.2 207.2 207.2 207.3 207.4 207.4 207.1 206.8 206.8 206.8 206.9 206.8 206.4 206.0 205.6 205.4 205.2 205.2 205.3 205.5 205.6 205.8 205.9 206.1 206.3 206.7 207.1 207.3 207.5 207.7 207.9 208.1 208.0 207.7 207.6 207.4 207.3 207.1 206.9 206.7 206.5 206.4 206.5 206.6 206.9 207.2 207.6 207.9 208.3 208.6 209.0 208.5 208.0 207.6 207.4 207.4 207.4 207.2 207.0 206.6 206.2 205.9 205.6 205.3 205.3 205.3 205.4 205.3 205.2 205.1 204.8 204.6 204.6 204.9 205.4 205.9 206.3 206.8 207.2 207.6 207.9 208.2 208.4 208.4 208.3 208.1 208.0 207.9 207.8 207.7 207.3 206.9 207.0 207.1 207.1 207.2 207.3 207.3 207.4 207.3 207.3 207.2 207.1 207.0 207.0 207.5 208.1 208.5 208.6 208.4 208.3 208.2 208.5 208.8 208.8 208.7 208.6 208.6 208.6 208.8 209.2 209.6 209.9 210.2 210.4 210.4 210.5 210.6 210.6 210.6 209.9 209.2 208.7 208.3 207.8 206.7 205.5 204.6 203.6 202.4 201.4 200.4 199.7 199.3 199.3 199.4 199.8 200.3 201.0 202.2 203.4 204.7 205.9 207.0 208.1 209.5 210.7 212.2 213.5 214.5 215.3 216.2 216.7 217.0 217.0 216.8 215.6 214.5 213.2 211.3 209.2 207.2 204.9 202.8 200.7 198.5 196.4 195.2 194.2 193.2 193.4 194.0 194.7 195.8 197.2 198.6 200.2 201.7 203.7 206.3 208.9 210.5 212.3 213.8 215.4 217.1 217.6 217.9 217.5 216.8 216.2 214.4 212.6 210.9 208.6 206.1 203.6 201.0 198.6 195.9 193.1 190.7 189.2 188.4 188.5 188.9 191.0 193.0 194.9 197.5 200.1 202.9 205.5 207.3 208.9 210.1 211.1 211.6 211.2 209.9 206.7 206.5 206.4 206.3 206.1 206.0 205.7 205.6 205.5 205.5", - "input_type": "phoneme", - "offset": 87.0 + "f0_timestep": "0.005" }, { + "offset": 93.4, "text": "AP 黄 昏 的 站 台 已 被 阳 光 淹 没 SP", "ph_seq": "AP h uang h un d e zh an t ai y i b ei y ang g uang y En m o SP", - "note_seq": "rest C4 C4 F4 F4 D#4 D#4 D#4 D#4 C4 C4 A#3 A#3 A#3 A#3 G#3 G#3 A#3 A#3 C4 C4 C4 C4 rest", - "note_dur_seq": "0.6 0.4 0.4 1.2 1.2 0.2 0.2 0.1999998 0.1999998 0.45 0.45 0.3500001 0.3500001 0.3999999 0.3999999 0.3999999 0.3999999 0.4000001 0.4000001 0.2000003 0.2000003 1.4 1.4 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4992 0.1045 0.2786 0.1277 1.0913 0.1045 0.1277 0.0697 0.1277 0.0697 0.3135 0.1393 0.267 0.0813 0.267 0.1393 0.2786 0.1161 0.2902 0.1161 0.0929 0.1045 1.4048 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 F4 D#4 D#4 C4 A#3 A#3 G#3 A#3 C4 C4 rest", + "note_dur": "0.6 0.4 1.2 0.2 0.2 0.45 0.35 0.4 0.4 0.4 0.2 1.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 255.1 261.6 260.5 259.2 258.1 257.1 255.8 254.7 253.6 252.6 251.4 250.2 249.0 247.9 246.9 245.9 244.7 243.6 242.5 241.6 240.4 239.4 239.5 245.9 251.7 255.4 258.5 260.0 261.3 262.4 263.1 263.6 263.4 263.2 263.1 262.9 262.5 262.0 261.5 261.1 260.6 260.4 260.4 260.3 260.2 260.1 260.0 259.6 259.2 258.8 258.4 258.0 257.8 257.7 257.6 257.6 258.0 258.7 259.3 259.9 260.6 261.3 261.9 262.5 262.9 263.3 263.7 264.1 264.2 263.7 262.9 261.9 261.0 259.1 257.0 253.2 246.7 240.4 237.5 237.1 237.8 240.1 242.5 245.0 247.1 249.3 251.7 254.6 257.5 260.3 263.4 265.9 268.4 270.8 272.6 274.6 277.8 281.6 284.8 287.5 291.0 295.2 303.3 311.0 317.0 321.9 325.8 328.5 330.4 331.4 332.0 332.3 332.1 331.6 331.1 330.5 329.6 329.0 328.4 327.9 327.4 327.0 326.4 326.5 326.7 326.8 327.0 327.0 327.2 327.4 327.5 327.5 327.8 327.9 328.0 328.1 328.3 328.3 328.6 328.9 329.9 331.3 332.9 335.2 337.4 339.8 342.2 344.4 346.3 348.2 350.2 351.5 352.5 353.6 354.3 354.4 354.1 353.8 353.6 353.2 352.4 351.5 350.4 349.4 348.8 348.6 348.8 348.8 348.9 349.0 349.0 349.2 349.3 349.8 350.3 350.1 349.8 349.9 350.0 350.0 350.0 350.0 350.2 350.2 350.4 350.4 350.4 350.2 350.0 349.7 349.5 349.2 349.0 348.7 348.5 348.3 348.0 348.3 348.5 348.7 349.0 349.2 349.5 349.6 349.6 349.6 349.6 349.6 349.4 349.4 349.4 349.4 349.5 349.6 349.8 350.0 350.2 350.5 350.7 350.8 351.0 350.8 350.8 350.6 350.5 350.4 350.2 350.2 350.1 350.3 350.6 350.2 349.8 349.5 349.4 349.4 349.4 348.5 347.9 347.9 348.2 348.4 348.7 348.8 348.7 348.6 348.4 348.2 348.1 347.9 347.5 347.0 346.5 346.1 346.0 345.9 345.8 346.0 346.7 347.2 347.6 347.6 347.7 347.9 348.3 348.7 349.2 349.7 350.2 350.6 351.3 352.5 353.7 354.9 355.7 356.6 357.4 358.2 358.9 358.9 358.7 358.4 358.0 356.9 355.7 354.7 353.9 353.3 352.7 352.3 351.6 351.0 351.5 351.9 352.2 352.6 353.1 353.4 353.8 354.1 354.6 354.9 355.4 355.8 356.8 357.6 357.6 357.1 356.9 356.7 356.4 355.7 355.2 354.7 354.3 353.8 353.3 352.8 352.1 351.3 350.2 349.0 347.9 346.1 344.6 342.5 339.5 335.9 332.7 329.1 322.9 316.0 309.4 303.6 298.7 293.3 287.5 285.8 285.5 289.5 297.5 306.3 314.5 316.9 316.1 314.7 313.3 312.6 312.1 312.0 311.9 311.8 312.1 312.3 312.5 312.8 312.6 312.3 312.0 311.8 311.7 311.5 311.3 310.1 307.2 302.8 293.7 279.2 267.5 260.6 258.5 258.2 264.2 270.9 278.4 286.4 292.6 295.2 297.4 299.7 302.1 304.7 307.8 310.8 312.9 313.5 313.3 313.2 312.7 312.2 311.8 311.1 310.8 310.8 310.9 311.1 311.3 310.9 310.6 310.3 310.0 309.7 309.2 308.9 308.4 306.8 304.7 302.3 297.2 291.0 283.8 276.5 270.3 267.7 265.7 263.4 260.6 259.9 259.5 260.5 262.0 263.3 262.4 261.7 261.5 261.6 261.3 261.2 260.9 260.8 261.0 261.3 261.3 261.3 261.4 261.6 261.9 262.0 261.7 261.5 261.2 261.0 260.7 260.4 260.2 260.0 260.1 260.2 260.3 260.3 260.5 260.6 260.7 260.8 261.0 261.0 261.3 261.5 261.5 261.5 261.5 261.5 262.1 262.1 262.1 262.1 262.1 262.1 261.9 262.3 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.2 262.1 262.1 262.1 262.1 262.1 261.9 261.9 261.9 261.9 261.9 261.9 261.9 261.9 261.9 261.8 261.5 260.9 260.0 259.1 257.9 256.4 254.7 253.2 251.5 249.6 247.7 245.7 243.8 242.1 240.5 238.9 237.5 236.4 235.4 234.5 233.9 233.5 233.4 233.1 233.2 233.1 233.1 233.2 233.2 232.9 232.5 232.3 232.1 232.0 231.9 231.8 231.7 231.7 231.6 231.6 231.5 231.6 231.7 232.0 232.2 232.5 232.7 232.9 233.2 233.4 233.8 234.0 234.2 234.4 234.7 234.7 234.6 234.6 234.6 234.2 233.8 233.5 233.1 232.7 232.3 232.0 231.4 230.2 228.5 223.5 217.8 210.9 202.3 192.9 187.4 186.3 188.2 191.7 196.2 200.9 204.7 208.0 211.5 215.3 219.3 222.9 226.5 230.6 234.8 236.4 238.0 239.4 240.6 240.5 239.7 238.3 236.7 235.0 234.4 233.9 233.5 233.2 233.1 233.1 233.1 233.2 233.2 233.2 233.5 233.8 234.2 234.5 234.8 235.0 235.3 235.5 235.4 235.3 235.0 234.7 234.2 233.7 233.2 232.5 232.1 232.0 231.9 231.9 231.7 231.6 230.7 230.0 229.4 229.1 228.7 228.3 228.0 228.1 228.3 228.4 228.2 227.9 227.7 227.4 226.9 226.1 225.1 223.9 222.8 221.0 219.2 217.3 214.7 211.6 208.6 205.9 203.3 200.3 197.2 194.1 191.0 188.8 187.9 187.5 187.1 188.1 189.4 191.3 193.5 195.7 197.9 199.7 200.9 201.7 203.1 204.4 205.5 205.7 205.5 205.5 205.4 205.1 204.6 204.2 203.9 203.5 203.3 203.0 202.7 202.4 202.5 202.7 202.9 203.0 203.4 204.0 204.6 205.4 206.0 206.7 207.5 208.1 208.5 208.9 209.1 209.0 208.7 208.4 208.3 208.0 207.7 207.5 207.2 206.9 206.7 206.9 207.1 207.4 207.7 207.8 208.1 208.3 208.6 209.0 209.4 209.4 209.3 209.3 209.2 209.2 209.0 208.6 208.6 208.6 207.1 204.7 202.5 199.1 198.2 199.4 201.4 204.2 207.5 210.6 214.7 218.7 222.9 226.8 229.2 230.7 232.1 233.2 233.6 235.1 236.7 236.5 236.8 237.0 237.0 236.7 235.8 235.1 234.5 234.2 233.8 233.1 232.3 231.6 230.8 230.1 230.1 230.1 230.1 230.3 230.3 230.5 230.8 231.1 231.5 231.8 232.0 232.3 232.6 232.9 233.2 233.2 233.1 233.1 233.1 233.4 233.8 234.0 234.2 234.6 234.0 233.5 233.3 233.1 233.1 232.9 232.7 232.7 233.0 233.5 233.4 233.3 233.2 233.1 232.9 232.9 232.7 232.4 232.2 231.9 231.5 231.2 230.9 230.2 229.2 229.5 229.9 230.7 232.5 235.5 239.1 242.6 246.2 250.2 254.2 258.1 260.8 262.3 263.3 264.2 264.8 265.2 265.4 265.4 265.3 265.0 264.2 263.5 262.8 262.1 261.3 261.3 261.3 261.3 261.3 261.2 261.2 261.2 261.2 261.2 259.9 258.6 257.0 255.0 253.0 252.3 251.5 250.9 250.5 250.1 249.9 250.4 251.0 251.9 254.6 259.3 263.5 265.5 266.0 266.2 266.2 266.4 266.5 266.5 266.2 265.8 265.2 264.8 264.3 263.4 262.4 261.8 261.2 260.7 260.1 259.6 259.0 258.6 258.4 258.3 258.3 258.2 258.3 258.4 258.6 258.9 259.8 260.6 261.3 261.9 262.6 263.2 263.8 264.3 264.7 265.0 265.3 265.1 264.7 264.1 263.3 262.6 261.9 261.0 260.3 259.6 259.1 258.7 258.4 258.0 257.5 257.4 257.4 257.3 257.1 257.5 258.0 258.4 258.8 259.2 259.7 260.1 260.5 260.6 260.6 260.9 261.2 261.5 261.9 262.2 261.9 261.6 261.8 262.0 262.4 262.2 261.9 261.7 261.5 261.2 261.0 260.8 260.5 260.3 260.6 260.9 261.1 260.8 260.2 260.3 260.5 260.6 260.8 261.0 260.7 260.4 260.3 260.4 260.5 260.9 261.1 261.2 261.2 261.2 261.0 261.2 261.3 261.1 260.9 260.6 260.2 260.1 260.0 260.1 260.3 260.0 259.7 259.6 259.3 259.1 259.1 259.1 259.1 259.1 259.1 259.1 259.4 259.7 260.0 260.4 260.0 259.8 259.9 260.1 260.3 260.5 260.5 260.2 260.0 260.4 261.1 261.3 261.4 261.6 261.8 261.8 262.0 262.2 262.2 262.4 262.6 262.6 262.4 262.0 261.6 261.2 260.3 259.5 259.5 259.7 259.3 259.1 258.9 259.0 259.2 259.4 259.7 259.9 260.1 260.4 260.6 260.8 261.0 261.6 262.2 262.3 262.4 262.4 262.5 262.5 262.2 261.7 261.3 260.9 260.6 260.0 259.2 258.5 257.7 257.9 258.1 258.4 258.7 259.3 260.0 261.3 262.7 263.9 265.2 266.5 267.9 268.3 268.7 269.1 269.4 269.8 268.9 268.1 267.3 266.4 264.5 262.8 260.8 258.5 256.2 254.1 252.8 251.7 250.6 250.1 251.0 251.8 252.8 254.5 256.5 258.7 260.9 263.0 265.1 266.2 267.0 267.7 268.1 267.9 267.4 266.9 266.0 264.7 263.0 260.7 258.5 256.4 254.4 252.1 250.0 249.1 248.3 247.7 247.7 248.9 250.4 252.6 254.9 257.5 259.8 262.2 264.3 265.4 266.4 267.6 268.7 269.4 269.1 268.4 267.6 266.0 264.5 263.0 260.8 258.3 255.9 253.4 250.8 247.7 244.7 245.4 246.1 246.7 247.4 248.1 248.8 249.5 249.8 249.8", - "input_type": "phoneme", - "offset": 93.4 + "f0_timestep": "0.005" }, { + "offset": 99.8, "text": "AP 我 拖 着 过 往 在 人 SP 群 中 穿 梭 SP", "ph_seq": "AP w o t uo zh e g uo w ang z ai r en SP q vn zh ong ch uan s uo SP", - "note_seq": "rest D#3 D#3 C4 C4 D#4 D#4 C4 C4 A#3 A#3 C4 C4 C4 C4 rest D#3 D#3 G3 G3 G#3 G#3 G#3 G#3 rest", - "note_dur_seq": "0.6 0.4 0.4 0.8 0.8 0.4000001 0.4000001 0.3999999 0.3999999 0.4000001 0.4000001 0.2 0.2 0.3999999 0.3999999 0.2 0.3999999 0.3999999 0.4000001 0.4000001 0.2249999 0.2249999 1.375 1.375 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4876 0.1161 0.2554 0.1509 0.685 0.1161 0.3135 0.0813 0.2786 0.1161 0.3135 0.0929 0.1277 0.0697 0.4063 0.0697 0.1277 0.3019 0.0929 0.2554 0.1509 0.1161 0.1045 1.3816 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 1 2 2 2 2 1 1", + "note_seq": "rest D#3 C4 D#4 C4 A#3 C4 C4 rest D#3 G3 G#3 G#3 rest", + "note_dur": "0.6 0.4 0.8 0.4 0.4 0.4 0.2 0.4 0.2 0.4 0.4 0.225 1.375 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 170.7 155.6 155.5 155.3 155.2 155.1 155.0 154.9 154.8 154.7 154.7 154.6 154.5 154.4 154.3 154.2 154.0 153.9 153.8 153.7 153.6 153.5 153.4 152.9 152.5 152.2 152.5 153.0 153.4 153.6 153.7 153.9 154.0 154.2 154.6 155.0 155.0 154.9 154.9 154.9 155.1 155.3 155.5 155.8 156.0 156.2 156.5 156.1 155.7 155.4 155.1 155.0 154.8 154.4 154.0 153.7 153.4 153.2 153.1 153.1 153.3 153.4 154.0 154.4 155.0 155.7 156.6 157.2 156.8 156.1 155.1 153.9 153.1 154.1 155.6 158.0 160.9 163.8 166.5 169.1 171.8 174.3 176.8 178.4 179.7 181.4 183.1 184.8 186.6 188.3 190.4 191.6 193.1 194.6 195.5 196.7 199.1 202.2 204.4 207.2 211.8 220.0 228.5 234.7 238.9 242.3 245.4 247.1 247.5 247.4 247.7 247.7 247.4 247.3 247.1 246.9 246.4 246.0 246.0 245.8 245.4 245.1 244.7 244.4 244.1 243.8 243.4 243.1 243.2 243.3 243.4 243.5 243.6 243.6 243.5 243.3 243.1 242.9 242.8 242.7 242.8 243.0 244.2 245.6 246.8 248.0 249.2 250.6 252.0 254.0 256.5 259.1 261.4 263.0 264.3 265.7 267.3 267.7 268.0 268.4 268.6 267.9 266.9 266.0 265.0 263.9 262.5 261.1 259.8 258.7 257.4 256.8 256.4 256.1 256.3 256.6 257.4 258.5 259.4 260.3 261.3 262.2 263.3 264.3 265.0 265.7 266.5 267.1 267.0 266.7 266.4 266.2 265.4 264.5 263.8 263.0 262.2 260.9 259.8 259.1 258.6 258.0 258.2 258.4 258.6 258.8 259.7 260.8 261.5 261.9 262.3 262.7 263.1 263.9 264.4 264.4 264.1 263.8 263.5 263.3 263.0 262.7 262.5 262.2 262.0 261.8 261.8 261.7 261.5 261.0 260.4 259.7 258.2 251.3 242.2 232.2 221.9 220.2 225.9 234.0 243.3 248.4 251.8 252.8 253.3 253.8 254.5 255.3 256.2 257.1 258.2 259.4 260.6 261.3 262.2 263.3 265.0 266.9 268.1 269.6 271.4 272.4 273.8 278.4 282.6 286.3 287.8 288.3 288.2 288.0 287.8 287.5 286.9 286.3 285.6 284.9 284.8 285.0 285.1 285.3 286.2 287.3 288.7 291.1 293.4 295.8 298.5 301.1 303.6 306.0 308.5 310.4 311.8 312.7 313.5 314.1 313.8 313.2 312.6 312.1 311.4 310.8 310.3 309.8 309.8 310.0 310.2 310.5 310.7 311.1 311.7 312.4 312.7 312.4 312.1 311.8 311.3 310.9 310.2 308.8 306.5 301.6 287.8 275.8 270.9 276.3 294.0 306.8 309.8 311.2 312.7 314.0 312.4 309.9 306.0 301.4 295.9 290.8 285.7 280.5 274.6 268.8 265.8 263.3 262.0 261.4 261.3 261.6 262.3 263.0 263.3 263.4 262.7 261.9 261.4 260.8 260.4 258.9 257.7 257.0 256.2 255.5 255.7 255.7 255.9 255.9 256.1 256.4 257.1 257.9 258.4 258.9 259.3 259.7 260.1 260.4 260.7 260.9 261.2 261.5 261.7 262.1 262.3 262.6 262.9 263.1 263.4 263.3 263.1 262.9 262.7 262.5 262.3 262.2 262.1 262.1 262.1 262.1 262.1 261.9 261.1 260.1 259.1 257.7 255.6 253.0 250.4 246.2 242.5 238.8 234.0 228.9 224.3 221.1 219.1 218.1 217.6 219.0 220.4 221.5 223.4 225.5 227.8 230.2 232.1 233.4 234.6 236.0 236.4 236.9 237.4 236.9 236.5 236.0 235.2 234.5 233.5 232.7 231.9 231.3 231.0 230.7 230.5 230.2 230.0 229.8 230.0 230.5 230.8 231.1 231.4 231.7 232.3 232.9 233.3 233.9 234.4 234.8 234.8 234.6 234.5 234.3 233.8 233.3 232.8 232.3 232.5 232.7 232.9 233.1 233.3 233.5 233.7 234.0 234.1 234.3 234.2 234.0 233.9 233.8 233.3 232.8 232.2 231.6 230.9 230.3 229.5 226.0 222.4 217.7 212.0 206.5 204.4 203.8 206.3 210.4 217.0 224.6 231.8 238.6 246.6 254.9 261.5 264.8 266.7 267.2 267.0 266.2 265.5 264.9 264.3 264.1 263.7 263.3 263.0 262.5 262.7 263.0 262.7 262.2 261.7 261.3 260.8 260.3 259.8 259.4 258.9 257.9 257.2 257.4 257.8 257.9 257.9 257.9 257.9 257.9 257.9 259.2 260.4 261.6 262.9 264.4 265.6 266.4 267.0 267.6 267.4 267.2 266.9 266.7 266.3 266.0 265.5 264.3 263.1 261.9 260.6 259.2 257.9 256.8 255.7 254.8 254.4 254.2 254.0 254.5 255.1 255.8 256.3 257.1 258.6 260.4 262.2 264.0 265.4 266.6 267.7 268.7 269.4 269.2 268.6 268.1 267.2 265.2 263.1 260.2 256.8 253.8 252.0 250.6 249.2 249.2 249.4 249.9 250.9 251.9 253.2 255.6 258.0 260.2 262.8 265.3 266.8 267.9 268.4 268.6 268.1 267.3 266.4 265.2 263.7 262.2 260.0 257.9 255.9 254.0 251.8 250.4 249.0 247.2 245.0 242.7 243.2 243.8 244.4 244.8 245.5 245.9 245.7 245.9 245.9 245.8 245.2 244.1 242.5 240.0 236.1 230.4 223.8 216.3 206.6 196.7 187.2 179.0 172.9 168.0 164.0 161.3 159.6 158.6 157.8 157.5 157.5 157.6 157.8 157.8 158.1 158.5 158.9 159.3 159.8 160.2 160.4 158.7 156.4 153.9 152.3 150.8 149.6 149.0 148.5 147.9 147.3 146.9 146.8 146.9 146.9 147.0 147.4 147.9 148.3 148.9 149.3 149.8 150.1 150.4 151.0 151.8 152.4 153.0 153.4 153.8 154.0 154.3 154.4 154.5 154.6 155.0 155.5 155.9 156.1 156.3 156.5 156.6 156.8 156.8 156.7 156.7 156.7 156.6 156.6 156.6 156.6 156.5 156.4 156.0 155.6 155.2 154.6 153.7 152.6 151.4 148.4 145.5 142.9 140.6 139.4 138.5 138.0 138.8 142.0 145.9 150.1 154.4 157.9 161.6 166.2 171.1 175.5 179.9 184.9 189.8 194.3 196.7 197.7 198.1 197.9 197.4 197.1 196.8 196.9 197.1 196.6 196.0 195.5 195.2 195.2 195.0 194.9 194.7 194.5 194.2 194.2 194.4 194.4 194.4 194.5 194.8 195.0 195.2 195.4 195.7 195.1 194.6 194.4 194.4 194.6 194.9 195.2 195.4 195.6 195.9 196.2 196.4 196.6 196.8 197.1 197.3 197.5 197.5 197.4 197.2 197.1 197.0 196.8 196.2 195.3 194.6 193.3 190.6 186.8 182.3 178.5 177.6 177.7 179.0 180.9 182.3 183.5 184.9 186.5 188.2 190.0 191.9 193.4 194.8 196.2 197.7 198.9 200.1 201.9 205.7 209.8 211.7 212.6 212.3 211.5 210.5 209.5 208.7 208.5 208.5 208.5 208.4 208.4 208.3 208.5 208.9 208.6 208.1 208.1 208.1 208.9 208.9 208.3 207.3 204.9 198.0 189.6 181.9 175.0 171.5 172.3 173.4 175.3 178.2 181.5 184.8 187.9 190.6 193.7 196.8 199.9 203.3 206.6 210.2 214.2 213.2 212.1 211.1 210.0 209.0 208.8 208.7 208.5 208.3 208.1 208.3 208.5 208.3 207.9 207.8 207.7 207.6 207.5 207.3 207.5 207.7 207.9 208.1 208.2 208.3 208.3 208.2 208.1 208.1 208.1 208.3 208.4 208.4 208.6 208.7 208.9 209.0 209.0 208.9 208.6 208.4 208.6 208.7 208.5 208.1 207.7 207.4 207.2 207.2 207.1 207.0 206.9 206.9 206.7 206.7 206.6 206.5 206.5 206.5 206.6 206.7 206.7 206.9 207.4 208.1 208.2 208.3 208.3 208.4 208.5 208.5 208.6 208.7 208.7 208.5 208.2 208.0 207.9 208.1 208.4 208.3 208.2 208.1 208.0 207.9 207.8 207.9 208.0 208.1 208.1 208.2 208.1 208.1 207.9 207.8 207.6 207.6 207.7 207.9 208.1 208.3 208.3 208.1 207.6 207.5 207.5 207.5 207.4 206.8 206.8 207.0 207.0 206.8 206.4 206.1 206.3 206.4 206.3 206.1 206.0 205.8 205.7 205.6 206.0 206.5 206.6 206.8 207.0 207.1 207.3 207.4 207.6 207.7 208.0 208.1 208.2 207.7 207.5 207.5 207.8 207.4 207.0 206.7 206.3 205.9 205.5 205.3 205.4 205.7 206.0 206.3 206.6 206.8 207.1 207.4 208.2 208.9 209.6 210.3 211.0 211.3 211.7 212.1 212.6 213.2 213.7 213.5 213.1 212.6 211.9 211.2 210.4 209.2 207.7 206.6 205.5 204.3 203.4 202.6 201.7 200.9 200.3 200.4 200.8 201.0 201.6 202.2 202.9 203.9 205.3 206.5 207.7 208.9 210.2 211.4 212.8 213.8 214.1 214.1 214.2 214.1 213.4 212.6 211.3 209.9 208.6 206.6 204.8 202.5 200.0 198.1 196.3 194.8 193.4 192.2 192.3 192.7 193.9 195.6 197.6 199.6 201.5 203.3 205.3 207.4 209.1 210.7 212.2 213.5 214.9 215.4 215.7 215.6 215.3 214.9 213.5 211.6 209.8 208.0 206.1 203.7 201.6 199.5 197.2 195.0 193.4 192.3 192.1 192.2 192.7 193.6 195.1 196.8 198.6 200.3 202.0 203.7 205.1 206.3 207.7 208.9 209.4 209.8 210.0 209.8 209.3 208.0 206.8 205.3 202.9 200.3 197.9 195.8 193.9 192.4 193.1 193.8 194.4 195.1 195.8 196.6 197.2 197.6 197.6", - "input_type": "phoneme", - "offset": 99.8 + "f0_timestep": "0.005" }, { + "offset": 106.2, "text": "AP 找 一 个 SP 位 置 SP 等 一 段 SP 未 知 SP 去 期 待 着 SP 固 执 如 我 的 那 些 声 音 SP 思 绪 随 一 阵 风 迷 离 SP 在 春 天 的 原 野 里 SP 在 指 间 缝 隙 SP 看 见 惊 世 的 美 丽 SP 在 晨 曦 里 笑 靥 如 花 的 你 SP", - "ph_seq": "AP zh ao y i g e SP w ei zh ir SP d eng y i d uan SP w ei zh ir SP q v q i d ai zh e SP g u zh ir r u w o d e n a x ie sh eng y in SP s i0 x v s ui y i zh en f eng p iao l i SP z ai ch un t ian d e y van y E l i SP z ai zh ir j ian f eng x i SP k an j ian j ing sh ir d e m ei l i SP z ai ch en x i l i x iao y En r u h ua ua d e n i SP", - "note_seq": "rest F3 F3 G3 G3 G#3 G#3 rest G#3 G#3 G#3 G#3 rest G3 G3 G#3 G#3 A#3 A#3 rest A#3 A#3 A#3 A#3 rest G#3 G#3 A#3 A#3 C4 C4 E4 E4 rest F4 F4 G4 G4 G#4 G#4 A#4 A#4 G#4 G#4 F4 F4 G4 G4 D#4 D#4 D#4 D#4 rest D#4 D#4 F4 F4 G4 G4 G4 G4 G4 G4 G4 G4 G4 G4 G#4 G#4 rest C4 C4 D#4 D#4 F4 F4 F4 F4 F4 F4 F4 F4 F4 F4 rest F4 F4 F4 F4 F4 F4 F4 F4 F4 F4 rest F4 F4 F4 F4 F4 F4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 rest G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 G#4 A#4 A#4 G#4 G#4 G4 G4 G4 F4 F4 F4 F4 rest", - "note_dur_seq": "0.6 0.2 0.2 0.2 0.2 0.2 0.2 0.1999999 0.4 0.4 0.2 0.2 0.2 0.175 0.175 0.2249999 0.2249999 0.2 0.2 0.2 0.4000001 0.4000001 0.1999998 0.1999998 0.2 0.2 0.2 0.1999998 0.1999998 0.8000002 0.8000002 0.5999999 0.5999999 0.2000003 0.3999996 0.3999996 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 1.2 1.2 0.1999998 0.1999998 0.5999994 0.5999994 0.2000008 0.2000008 0.3999996 0.3999996 0.1999998 0.1999998 0.1999998 0.2000008 0.2000008 0.3999996 0.3999996 0.1999998 0.1999998 0.1999998 0.1999998 0.2000008 0.2000008 0.5999994 0.5999994 0.6000004 0.6000004 0.1999998 0.3999996 0.3999996 0.4000006 0.4000006 0.3999996 0.3999996 0.1999998 0.1999998 0.2000008 0.2000008 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.1999998 0.2000008 0.2000008 0.1999998 0.1999998 0.1999998 0.1999998 0.2000008 0.2000008 0.1999989 0.2000008 0.2000008 0.1999989 0.1999989 0.2000008 0.2000008 0.2000008 0.2000008 0.1999989 0.1999989 0.2000008 0.2000008 0.1999989 0.1999989 0.2000008 0.2000008 0.2000008 0.1999989 0.1999989 0.2000008 0.2000008 0.3999996 0.3999996 0.3999996 0.3999996 0.3999996 0.3999996 0.4000015 0.4000015 0.3999996 0.3999996 0.7999992 0.3999996 0.3999996 0.4000015 0.4000015 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0", - "ph_dur": "0.5108 0.0929 0.1393 0.058 0.1625 0.0464 0.1974 0.1161 0.0813 0.2902 0.1161 0.1974 0.1045 0.0929 0.1277 0.0464 0.1741 0.0464 0.209 0.1045 0.0929 0.3019 0.0929 0.209 0.0697 0.1277 0.1161 0.0813 0.1509 0.0464 0.6502 0.1509 0.6037 0.1509 0.0464 0.2902 0.1161 0.2902 0.1045 0.3019 0.0929 0.3367 0.0697 1.0681 0.1277 0.0697 0.1277 0.418 0.1858 0.1277 0.0697 0.4063 0.0697 0.1277 0.0929 0.1045 0.1045 0.1045 0.3019 0.0929 0.1277 0.0697 0.1277 0.0697 0.1393 0.0697 0.4528 0.1393 0.6037 0.0929 0.1045 0.2786 0.1277 0.3135 0.0813 0.3599 0.0464 0.1393 0.058 0.1509 0.0464 0.1277 0.0697 0.1974 0.1161 0.0929 0.1277 0.0697 0.1393 0.058 0.1393 0.058 0.1161 0.0929 0.1974 0.1045 0.0929 0.1393 0.058 0.1393 0.0697 0.1045 0.0929 0.1625 0.0348 0.1161 0.0813 0.1393 0.058 0.209 0.1161 0.0813 0.1277 0.0697 0.1161 0.0813 0.1509 0.058 0.2438 0.1509 0.2902 0.1045 0.3135 0.0929 0.2554 0.1393 0.4063 0.7314 0.0697 0.2322 0.1625 0.4063 0.0464", - "f0_timestep": "0.005", + "ph_seq": "AP zh ao y i g e SP w ei zh ir SP d eng y i d uan SP w ei zh ir SP q v q i d ai zh e SP g u zh ir r u w o d e n a x ie sh eng y in SP s i0 x v s ui y i zh en f eng p iao l i SP z ai ch un t ian d e y van y E l i SP z ai zh ir j ian f eng x i SP k an j ian j ing sh ir d e m ei l i SP z ai ch en x i l i x iao y En r u h ua d e n i SP", + "ph_dur": "0.5108 0.0929 0.1393 0.058 0.1625 0.0464 0.1974 0.1161 0.0813 0.2902 0.1161 0.1974 0.1045 0.0929 0.1277 0.0464 0.1741 0.0464 0.209 0.1045 0.0929 0.3019 0.0929 0.209 0.0697 0.1277 0.1161 0.0813 0.1509 0.0464 0.6502 0.1509 0.6037 0.1509 0.0464 0.2902 0.1161 0.2902 0.1045 0.3019 0.0929 0.3367 0.0697 1.0681 0.1277 0.0697 0.1277 0.418 0.1858 0.1277 0.0697 0.4063 0.0697 0.1277 0.0929 0.1045 0.1045 0.1045 0.3019 0.0929 0.1277 0.0697 0.1277 0.0697 0.1393 0.0697 0.4528 0.1393 0.6037 0.0929 0.1045 0.2786 0.1277 0.3135 0.0813 0.3599 0.0464 0.1393 0.058 0.1509 0.0464 0.1277 0.0697 0.1974 0.1161 0.0929 0.1277 0.0697 0.1393 0.058 0.1393 0.058 0.1161 0.0929 0.1974 0.1045 0.0929 0.1393 0.058 0.1393 0.0697 0.1045 0.0929 0.1625 0.0348 0.1161 0.0813 0.1393 0.058 0.209 0.1161 0.0813 0.1277 0.0697 0.1161 0.0813 0.1509 0.058 0.2438 0.1509 0.2902 0.1045 0.3135 0.0929 0.2554 0.1393 1.1377 0.0697 0.2322 0.1625 0.4063 0.0464", + "ph_num": "2 2 2 1 2 2 1 2 2 2 1 2 2 1 2 2 2 2 1 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F3 G3 G#3 rest G#3 G#3 rest G3 G#3 A#3 rest A#3 A#3 rest G#3 A#3 C4 E4 rest F4 G4 G#4 A#4 G#4 F4 G4 D#4 D#4 rest D#4 F4 G4 G4 G4 G4 G4 G#4 rest C4 D#4 F4 F4 F4 F4 F4 rest F4 F4 F4 F4 F4 rest F4 F4 F4 G#4 G#4 G#4 G#4 rest G#4 G#4 G#4 G#4 G#4 A#4 G#4 G4 G4 F4 F4 rest", + "note_dur": "0.6 0.2 0.2 0.2 0.2 0.4 0.2 0.2 0.175 0.225 0.2 0.2 0.4 0.2 0.2 0.2 0.2 0.8 0.6 0.2 0.4 0.4 0.4 0.4 1.2 0.2 0.6 0.2 0.4 0.2 0.2 0.2 0.4 0.2 0.2 0.2 0.6 0.6 0.2 0.4 0.4 0.4 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.4 0.4 0.4 0.4 0.4 0.8 0.4 0.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0", "f0_seq": "182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 182.1 174.6 175.0 175.4 175.7 176.1 176.4 176.8 177.1 177.4 177.8 178.2 178.6 179.0 179.3 179.7 180.0 180.4 180.8 181.1 181.5 181.8 181.5 177.9 174.7 172.4 170.3 170.5 170.6 170.8 171.3 172.0 172.5 172.7 172.7 172.8 172.8 173.0 173.1 173.3 173.6 173.8 174.1 174.3 174.5 174.5 174.4 174.6 174.8 175.0 175.3 175.5 175.6 175.7 175.6 175.8 176.0 176.8 178.3 180.4 183.5 186.9 190.4 192.7 194.5 196.1 197.4 198.1 198.6 199.0 199.2 198.7 198.1 197.4 196.9 196.5 196.2 195.9 195.6 195.3 194.9 195.1 195.4 195.5 195.2 194.6 194.2 190.1 186.0 181.1 175.4 171.1 170.6 170.9 172.8 175.7 180.5 186.0 191.4 197.0 204.1 211.8 215.2 215.4 215.0 214.6 214.6 214.7 214.7 214.4 213.7 212.8 211.6 209.8 208.2 206.5 204.6 202.9 201.2 199.7 198.2 196.8 197.2 197.5 197.9 198.4 199.5 202.0 204.3 206.7 209.6 212.8 215.5 217.0 217.5 216.7 215.3 213.5 209.2 203.6 197.1 192.0 192.2 192.5 192.8 193.0 193.3 193.7 193.9 194.2 194.5 194.8 195.3 195.8 196.3 196.8 197.4 197.8 198.3 198.8 199.3 199.9 200.5 201.0 201.4 202.0 202.6 203.1 203.6 204.1 204.7 205.1 205.7 206.3 206.8 207.3 207.9 208.5 208.9 209.5 210.1 210.6 210.9 210.1 209.3 208.3 207.4 207.1 206.7 206.5 206.7 207.2 207.2 207.3 207.4 207.5 207.9 208.5 208.9 208.7 208.3 207.7 207.0 206.3 205.7 205.1 205.3 205.4 205.5 205.2 204.9 205.3 205.7 205.7 205.7 205.7 205.7 205.7 205.7 205.7 206.2 206.2 206.2 206.2 205.7 205.4 204.7 203.4 201.4 199.8 197.8 195.9 193.4 193.0 193.0 193.4 193.9 194.4 195.1 195.1 195.7 196.3 197.1 197.8 198.4 198.4 199.2 199.3 200.1 200.6 201.6 202.3 203.1 203.4 203.5 203.9 204.2 204.6 204.9 205.3 205.7 206.0 206.9 209.9 212.5 213.9 213.9 213.6 213.3 213.0 212.5 211.4 210.1 208.5 207.1 205.6 204.0 203.1 202.2 201.6 201.7 202.1 203.1 204.1 205.1 206.3 207.8 208.7 209.3 209.5 209.5 209.5 209.2 208.1 207.1 205.9 204.7 202.0 199.7 197.2 193.3 189.1 189.3 189.5 189.7 189.9 190.2 190.3 190.5 190.6 190.8 191.0 191.5 192.2 192.6 193.1 193.4 193.6 193.6 193.4 193.1 192.7 192.4 192.1 192.0 192.0 192.1 192.5 192.9 193.4 194.0 194.5 195.2 195.9 196.6 197.2 197.9 198.8 199.5 200.2 200.9 201.7 201.7 198.3 195.3 193.6 192.3 192.5 192.7 193.0 193.7 194.1 194.5 194.9 195.1 195.1 195.0 194.8 194.7 194.5 194.3 193.9 193.6 193.3 193.3 194.0 195.0 196.0 196.5 197.0 197.5 197.7 197.8 197.9 197.6 197.3 197.1 197.1 197.4 198.2 199.8 202.6 205.4 206.9 207.5 207.8 208.0 208.1 208.1 208.1 208.0 208.0 208.2 208.3 208.4 208.6 208.5 208.5 208.3 208.2 207.9 207.4 207.1 207.1 207.2 207.2 206.6 205.8 204.6 201.3 194.5 186.9 182.0 178.0 176.6 178.0 183.1 190.8 198.2 205.9 215.3 225.1 231.5 237.1 240.1 241.7 243.0 243.6 243.6 243.2 242.7 242.0 240.1 238.3 236.6 234.3 231.7 229.7 227.9 226.2 224.2 222.6 221.1 220.0 220.0 220.4 220.9 223.4 226.6 229.8 233.6 236.9 239.9 241.7 243.2 244.2 242.8 240.9 238.6 234.7 228.5 221.9 221.9 222.0 222.0 222.0 222.0 222.0 222.0 222.1 222.2 222.2 222.7 223.1 223.6 224.1 224.6 225.1 225.6 226.0 226.6 227.1 227.6 228.1 228.5 229.0 229.6 230.1 230.6 231.0 231.5 232.0 232.6 233.1 233.6 234.1 234.6 235.2 235.7 236.1 236.6 237.2 237.5 236.7 235.8 235.0 234.0 233.4 232.9 232.4 232.4 232.4 232.5 232.6 232.8 232.9 233.4 234.0 234.4 234.3 234.0 233.5 232.8 232.2 231.7 231.2 231.3 231.5 231.6 231.5 230.9 230.9 231.5 232.0 232.0 232.0 232.4 232.7 232.9 233.5 233.5 233.5 233.5 233.5 232.9 232.0 231.0 229.7 227.6 226.6 224.0 221.3 218.5 215.9 215.2 215.7 216.5 216.8 217.6 218.0 218.5 219.5 219.5 220.1 220.9 221.3 221.8 222.5 223.2 223.7 224.6 225.1 225.7 226.1 226.6 227.1 227.1 227.8 229.1 229.5 229.9 230.4 231.4 235.4 238.7 240.5 240.3 239.8 239.5 239.2 238.5 237.3 235.8 233.9 232.2 230.4 228.5 227.4 226.3 225.6 225.6 226.1 227.4 228.6 229.8 231.1 232.5 234.0 235.1 235.4 235.6 235.7 235.6 234.2 232.8 231.4 229.9 226.7 223.8 220.8 216.3 211.4 211.4 211.4 211.4 211.4 211.4 211.4 211.3 211.2 211.1 210.9 211.3 211.6 211.6 211.6 211.2 210.5 209.6 208.5 206.8 205.0 203.2 201.6 200.5 199.6 199.0 198.6 198.6 198.7 198.8 199.2 199.6 200.1 200.5 201.0 201.5 202.1 202.5 203.0 203.6 204.2 204.8 205.4 206.0 206.9 208.1 209.2 209.3 209.4 209.4 209.0 208.5 208.0 207.6 206.9 204.8 201.6 196.0 190.3 184.4 180.6 177.7 177.7 179.0 183.0 188.2 193.4 196.2 198.7 201.6 204.6 207.0 209.5 211.8 214.5 217.6 219.3 221.0 222.7 223.9 224.9 226.3 229.5 232.5 235.0 235.8 235.7 235.6 235.5 235.5 235.2 234.9 234.6 234.2 233.7 233.4 233.4 233.5 232.9 232.2 231.5 231.8 232.1 232.4 232.6 231.9 230.4 224.9 216.9 207.4 199.8 197.2 198.6 201.4 207.3 215.1 223.4 229.6 235.7 243.1 250.8 253.1 252.5 251.4 249.9 248.1 247.7 248.4 249.1 250.1 251.1 251.9 252.5 252.6 252.3 251.7 250.8 250.2 249.7 249.3 248.9 249.5 250.0 250.5 251.0 251.5 252.8 253.9 255.0 256.2 257.4 258.7 259.9 261.1 262.2 262.8 263.4 264.1 264.5 264.7 264.7 264.7 264.5 264.1 263.6 262.8 262.3 261.7 261.2 260.7 260.6 260.4 260.4 260.4 260.3 260.3 260.8 261.2 261.6 262.1 262.7 263.1 263.4 263.4 263.2 262.8 262.6 262.4 262.2 261.9 261.3 260.6 260.4 260.3 260.2 260.0 259.9 260.0 260.1 260.2 260.4 260.8 261.2 261.6 261.6 261.6 261.6 261.5 261.5 261.5 261.8 262.0 262.1 262.0 261.7 261.5 261.2 261.4 261.8 262.0 261.9 261.7 261.4 261.2 261.1 260.9 261.1 261.2 261.5 261.7 261.9 262.1 262.3 262.6 263.0 263.6 263.9 264.0 263.8 263.7 263.6 263.6 263.4 263.2 262.9 262.5 261.9 261.0 261.0 260.2 260.0 260.0 259.4 259.4 258.8 258.8 258.8 258.3 258.0 257.9 257.0 257.9 258.8 259.9 261.9 264.2 266.7 268.9 271.0 273.1 275.3 277.7 280.6 283.2 285.6 288.8 291.5 294.6 297.6 301.4 305.6 309.6 312.3 313.9 314.9 315.1 315.6 315.4 315.4 315.2 315.1 314.8 314.4 313.9 313.7 313.7 313.3 313.4 313.1 312.8 312.6 312.2 311.8 311.6 311.4 310.9 310.7 310.6 310.8 311.1 311.5 311.9 312.5 313.8 315.3 317.1 318.7 320.3 321.8 323.6 326.0 328.6 331.2 333.5 335.8 338.5 339.7 339.6 339.3 338.9 338.7 338.1 336.7 335.3 333.8 332.3 329.5 327.2 325.5 323.7 322.5 321.5 320.4 319.8 320.2 321.0 321.9 323.1 325.0 327.6 330.2 332.9 335.4 337.0 338.6 340.3 341.6 341.6 341.3 340.0 338.5 337.0 335.1 332.5 329.5 326.4 323.3 320.6 318.9 317.3 316.4 316.0 316.9 318.1 319.4 321.7 324.6 327.5 330.6 334.1 337.5 338.5 339.3 340.0 340.6 340.1 339.3 338.3 336.5 333.8 330.8 327.7 324.6 322.2 320.2 318.4 316.9 316.8 316.3 313.1 309.0 309.2 309.6 309.9 310.2 310.4 310.9 311.3 311.6 311.9 312.4 313.3 314.2 315.0 316.0 317.3 319.0 320.7 322.6 324.9 327.5 330.2 332.7 334.7 336.4 338.1 339.5 340.7 341.6 342.6 343.4 344.4 345.1 345.8 346.6 347.2 348.0 348.7 349.3 350.1 350.8 352.2 356.5 358.4 359.8 361.3 360.1 359.0 358.0 356.8 355.7 354.5 353.5 352.6 351.8 350.8 350.0 349.3 348.8 348.3 348.0 347.5 347.0 346.7 346.3 345.8 346.1 346.5 346.7 347.0 347.4 347.7 348.0 348.3 348.6 348.8 349.2 349.1 349.0 348.9 348.8 348.6 348.5 348.5 348.8 349.2 349.7 349.9 350.0 349.9 349.8 349.8 349.7 349.6 349.0 345.2 339.6 328.9 318.1 306.3 295.0 284.9 279.4 276.4 276.8 278.6 280.7 283.9 287.4 291.2 295.2 301.9 308.8 315.1 321.9 327.9 334.1 339.5 343.3 347.4 351.7 359.8 368.4 377.1 386.2 396.1 399.8 402.3 403.0 402.8 402.6 401.9 400.7 398.9 396.6 394.0 391.5 389.2 386.9 384.4 382.6 381.2 380.1 380.4 381.1 381.7 383.7 385.5 387.4 389.3 391.3 393.1 395.0 396.5 398.2 400.0 399.7 399.2 398.6 397.0 395.0 392.8 391.3 390.0 388.5 387.0 386.5 385.9 385.4 385.4 385.7 386.8 387.7 388.6 389.5 390.4 391.5 392.4 392.9 393.5 394.0 394.6 395.2 395.4 394.7 393.8 392.7 390.5 388.3 385.1 381.7 382.5 383.3 384.2 385.4 387.9 392.8 397.6 402.3 407.6 412.7 417.8 422.0 423.1 423.3 423.3 422.8 422.8 422.8 422.4 420.6 418.7 416.6 414.8 412.9 410.8 408.7 407.0 406.3 407.0 408.2 409.1 410.4 411.6 412.7 414.1 415.4 416.7 418.0 419.3 420.9 420.6 420.3 419.8 419.5 419.2 417.3 415.7 414.2 412.7 412.0 411.4 410.8 410.4 410.7 411.0 411.4 412.2 413.0 414.1 415.1 416.6 418.1 419.4 420.6 420.6 420.3 420.0 419.8 419.3 418.0 416.2 414.2 411.4 408.3 404.9 401.8 399.4 397.2 394.8 392.4 392.2 393.0 394.0 395.7 397.7 401.7 405.8 409.8 413.8 417.7 421.6 425.1 427.3 428.7 430.0 430.5 430.7 430.7 430.5 430.2 429.3 427.5 425.8 424.7 423.8 422.6 421.7 421.1 422.0 423.5 424.8 427.0 430.1 434.1 439.2 444.7 449.7 453.4 457.4 461.3 465.0 467.0 468.8 470.2 470.5 470.1 469.4 468.6 467.7 467.0 466.0 465.4 464.6 463.7 462.9 463.2 463.3 463.6 464.1 464.8 465.5 466.2 466.8 467.4 468.1 466.7 465.5 463.6 458.0 447.7 436.9 427.4 421.0 415.7 414.3 414.0 413.0 410.1 405.9 403.0 400.0 397.1 395.4 393.4 391.1 385.8 380.7 375.6 370.0 368.7 369.6 377.2 385.5 394.2 400.0 405.8 410.9 414.3 416.4 418.2 420.1 420.7 421.2 421.7 421.3 421.2 420.8 420.6 420.3 419.2 418.2 417.0 415.9 415.0 414.6 414.5 414.3 414.1 413.9 413.6 413.6 413.3 413.3 413.6 413.9 414.2 414.7 414.9 415.4 415.8 416.1 416.5 416.9 417.3 418.2 419.1 419.9 420.6 420.7 420.4 420.1 419.8 419.5 418.9 418.0 417.1 416.6 416.1 415.8 415.3 414.9 414.5 414.2 413.7 413.4 412.9 412.6 412.2 411.8 412.4 413.0 413.6 414.2 414.9 415.5 416.1 416.6 416.9 417.0 417.2 417.3 417.5 417.6 417.7 418.0 418.0 418.0 418.0 418.0 418.0 417.0 416.3 415.6 415.1 414.6 414.2 413.8 413.4 412.8 412.4 412.0 412.1 412.2 412.4 412.4 412.7 413.1 413.6 414.2 414.8 415.2 415.4 415.0 414.5 414.8 415.1 415.5 415.9 416.4 416.7 417.1 417.6 417.9 417.8 417.7 417.7 417.5 417.5 417.3 417.2 417.2 416.9 416.5 415.9 415.3 414.9 414.5 414.2 414.1 414.1 413.9 413.9 414.0 414.3 414.3 414.6 414.9 415.2 415.8 416.7 416.7 416.7 416.7 416.7 416.7 416.7 416.8 417.0 417.2 417.5 417.8 418.1 418.3 418.6 418.9 419.2 419.3 419.5 419.6 418.9 417.7 416.6 415.7 414.8 413.6 413.0 412.5 412.0 411.4 410.8 410.8 411.0 411.2 411.2 411.7 412.1 412.6 413.0 413.5 413.9 414.5 415.2 416.0 416.8 417.7 418.0 418.0 418.3 418.4 418.7 418.9 419.0 419.2 419.3 419.2 418.9 418.6 418.3 418.0 418.7 419.0 418.0 416.9 415.4 412.4 409.2 405.2 398.6 390.2 381.9 375.1 369.0 365.1 361.2 357.0 354.8 352.6 350.6 349.1 347.6 346.1 345.7 345.1 344.2 342.8 340.8 338.9 338.4 338.9 343.6 348.8 350.5 350.4 350.4 351.0 352.1 352.0 352.1 352.3 350.2 347.1 339.3 330.8 318.7 304.9 298.2 293.3 291.4 290.8 293.3 296.9 301.6 305.8 310.2 312.7 315.0 316.9 319.0 321.4 323.8 326.4 329.2 332.0 335.2 338.9 342.1 345.5 348.7 351.7 354.3 358.6 362.7 366.2 369.2 372.4 372.7 372.9 372.6 372.2 371.5 371.2 370.7 370.5 370.1 369.6 369.1 368.7 368.2 368.1 367.9 367.9 368.0 368.3 368.8 369.4 369.9 370.4 370.9 371.2 371.7 372.2 372.5 372.0 371.2 370.4 369.8 369.1 368.5 368.4 368.7 369.0 369.2 369.9 371.7 373.9 376.2 378.3 380.4 382.9 386.8 391.0 394.8 397.2 399.3 401.6 403.9 405.7 406.4 406.7 407.0 406.8 404.2 401.8 399.2 396.3 394.0 391.7 389.7 387.5 386.2 385.1 385.5 386.0 386.5 387.0 388.5 390.0 391.2 392.8 394.3 395.7 397.4 398.6 400.1 401.6 401.1 400.7 400.2 399.3 395.6 389.7 376.5 362.8 353.5 354.3 360.7 366.6 372.4 378.5 381.7 383.6 384.0 384.5 385.8 387.7 387.7 387.7 387.5 386.3 384.4 377.7 370.6 364.1 356.9 348.4 340.8 333.2 325.5 316.5 309.7 303.9 301.9 302.8 304.5 305.6 307.3 308.9 309.5 310.0 310.8 311.6 311.9 312.0 311.5 310.8 310.3 310.1 310.3 310.5 310.8 311.0 311.2 311.3 311.0 310.6 310.1 309.7 309.2 308.7 308.3 307.8 307.6 307.7 308.0 308.3 308.5 307.9 307.3 306.5 305.8 308.0 310.1 311.8 312.9 314.0 315.0 315.9 316.7 317.4 317.8 318.3 318.4 317.6 316.3 315.1 313.5 311.2 308.7 306.0 303.3 300.8 299.8 299.2 298.7 299.1 299.8 300.7 302.5 305.1 307.6 310.1 312.7 314.9 317.3 319.9 321.0 321.8 321.7 321.3 319.9 318.5 316.6 313.9 310.9 306.8 302.7 299.2 296.4 293.4 290.5 289.7 289.0 288.8 290.3 292.6 295.0 297.5 299.8 302.4 305.3 308.2 311.5 314.1 315.7 317.3 318.7 319.7 319.9 319.7 318.4 317.0 315.3 313.2 310.8 308.3 304.9 301.7 298.1 294.0 289.8 290.0 290.0 290.1 290.2 290.3 290.5 290.6 290.7 290.8 291.0 291.2 291.6 291.8 292.1 292.5 292.7 293.0 293.2 293.6 293.8 294.2 294.4 294.7 295.0 295.4 295.6 295.8 296.1 296.5 296.7 297.1 297.3 297.6 297.9 298.3 298.5 298.9 299.1 299.4 299.7 299.3 297.8 300.7 305.3 310.4 311.6 312.5 312.3 311.8 311.3 311.5 311.8 311.8 311.8 310.6 309.2 307.4 304.1 299.6 293.8 288.3 284.6 281.8 279.4 279.3 280.0 280.6 281.9 286.1 291.5 297.1 303.1 308.4 314.3 319.5 325.1 330.6 336.5 343.4 350.6 349.6 348.6 348.0 348.6 349.8 350.4 351.0 351.3 351.4 351.5 351.0 349.7 346.6 341.2 327.9 313.2 300.0 287.4 274.2 267.3 261.8 258.2 258.7 260.7 262.8 270.1 276.9 283.7 291.2 296.6 301.8 307.3 312.3 318.0 324.5 331.9 338.2 342.6 347.3 352.1 352.6 354.7 359.7 365.6 371.9 377.1 379.5 381.1 382.7 384.2 381.7 379.3 377.2 374.8 372.4 371.1 369.9 368.6 367.4 367.9 368.4 369.2 370.7 372.6 374.5 376.4 378.3 380.7 383.6 386.6 389.6 392.6 394.9 396.5 397.9 399.6 400.6 400.5 399.7 399.1 398.5 397.6 396.2 394.8 393.1 391.4 389.9 388.4 386.9 387.0 387.4 387.9 388.3 388.7 389.1 389.6 389.9 390.3 390.8 391.3 391.7 392.1 392.5 392.8 392.7 392.6 392.4 392.3 392.1 392.0 391.6 390.6 389.9 389.9 390.2 390.2 390.5 390.6 391.1 392.9 395.2 396.9 397.2 397.4 397.5 396.9 396.4 395.9 395.3 394.7 395.0 395.1 395.2 395.0 394.7 394.4 394.3 394.0 393.1 392.0 390.9 388.6 381.3 365.5 346.0 329.2 320.0 314.0 313.0 314.4 324.1 334.0 343.1 352.3 359.5 366.0 371.9 377.9 384.9 392.9 399.7 401.0 400.7 399.1 396.8 395.1 394.7 394.7 394.7 394.7 394.5 394.5 394.5 394.5 394.0 393.5 393.2 392.7 392.3 391.8 391.3 389.3 384.0 377.6 370.6 355.2 340.7 325.9 314.6 315.1 322.3 329.7 336.0 342.0 348.2 354.4 360.3 366.6 375.9 386.4 395.7 399.0 399.0 398.3 397.5 396.6 395.9 395.3 394.4 393.6 392.7 392.0 391.6 391.9 392.4 393.0 393.4 393.1 392.6 392.0 390.9 387.3 383.7 379.1 369.6 359.2 349.2 341.8 340.9 343.0 344.8 344.4 343.7 343.4 345.6 348.4 351.9 357.3 365.0 376.0 387.3 392.1 392.6 392.1 391.5 393.0 394.3 395.0 395.5 396.1 395.2 394.3 393.4 392.6 391.8 391.7 391.5 391.3 390.4 389.3 388.2 387.4 386.6 386.0 385.3 385.8 386.2 386.6 387.2 387.7 388.1 388.6 389.2 390.0 391.1 392.1 392.5 392.8 393.1 393.4 393.4 393.7 393.9 394.2 394.0 393.8 393.5 393.4 393.2 392.9 392.3 391.8 391.3 390.8 390.6 390.9 390.9 390.9 391.0 391.1 391.7 392.3 392.8 393.1 393.4 393.4 393.6 393.7 394.0 394.0 394.2 394.0 393.9 393.6 393.4 393.3 393.0 392.8 392.7 392.4 392.2 391.9 391.9 392.2 392.4 392.8 392.6 392.4 392.3 392.0 391.9 392.6 393.3 393.9 393.8 393.5 394.0 394.3 393.9 393.4 392.8 392.2 391.7 391.0 390.0 388.3 387.0 385.9 384.5 383.3 384.1 385.0 386.0 387.2 388.6 390.1 391.4 393.6 396.8 401.6 406.6 411.4 414.1 415.9 417.2 418.2 419.0 419.3 419.2 419.2 418.8 418.2 417.4 416.4 415.1 414.0 412.5 410.7 408.4 406.1 404.2 403.6 403.2 402.8 402.3 402.0 401.9 402.7 404.6 407.0 409.2 410.5 411.7 413.3 415.3 417.1 418.7 419.6 420.4 421.3 421.8 421.8 421.7 421.4 421.3 421.0 420.9 420.7 420.4 420.4 420.0 419.3 418.5 417.7 416.7 415.8 415.0 414.1 412.6 411.0 410.8 410.7 410.5 410.5 411.2 412.1 413.0 413.9 415.1 416.3 417.7 419.0 420.2 421.3 422.8 424.0 424.9 424.9 424.6 424.3 422.7 421.3 420.1 418.7 416.5 414.4 412.4 410.4 408.3 406.8 405.5 404.3 403.5 402.9 402.3 403.7 404.9 406.4 409.2 412.4 415.7 419.0 422.0 425.1 427.5 430.0 431.8 432.7 433.3 433.9 431.8 429.2 426.2 422.6 417.7 412.6 408.5 406.2 404.1 402.1 401.7 401.3 401.0 400.6 400.2 399.7 398.3 397.6 396.4 395.0 393.1 390.5 387.4 383.2 376.7 367.7 357.3 345.8 331.4 316.4 302.1 289.6 280.3 272.4 265.9 261.4 258.4 256.3 254.6 253.6 252.9 252.4 252.1 251.5 251.4 251.4 251.5 251.5 251.5 251.5 252.2 255.6 258.2 258.9 258.9 258.7 257.9 257.1 256.6 256.4 256.2 255.9 255.7 255.7 256.2 257.1 257.3 257.5 257.8 258.5 259.2 259.9 260.3 260.6 261.0 261.6 261.9 262.0 261.4 260.4 259.6 259.5 259.4 258.9 258.3 257.9 258.4 259.0 259.7 260.4 261.1 262.1 263.1 263.8 263.4 262.5 259.3 255.1 247.3 238.5 228.9 221.1 217.3 215.0 216.0 217.6 219.6 222.9 227.4 231.9 235.4 239.2 242.5 246.0 250.1 253.2 256.3 259.4 262.9 266.5 270.6 274.8 278.9 283.2 286.6 290.1 293.9 297.9 302.0 306.0 310.0 313.8 316.7 318.9 319.3 318.5 317.4 316.0 314.3 313.3 312.4 311.5 310.9 310.4 309.7 309.3 309.1 309.0 308.8 308.6 308.4 308.3 308.2 308.2 308.4 308.7 308.9 309.0 309.1 308.6 308.2 307.6 307.2 306.8 306.8 307.0 307.7 308.4 309.3 310.2 311.1 312.0 312.8 313.7 314.6 315.4 315.7 315.8 316.0 315.5 315.0 314.4 314.0 313.4 312.9 310.9 309.1 307.5 305.1 300.2 294.2 284.5 275.2 264.5 252.7 247.8 244.7 246.1 250.1 258.5 268.2 277.9 284.7 290.8 297.6 305.0 312.2 319.7 328.7 338.1 341.3 344.3 346.9 349.5 352.3 354.3 355.9 356.8 357.3 357.8 357.6 356.7 355.5 354.4 353.1 350.4 348.1 345.7 343.3 342.2 341.3 340.5 340.1 340.8 342.2 343.5 344.7 346.0 347.7 349.6 351.4 353.0 354.0 354.8 355.7 356.3 355.6 354.7 353.5 351.7 349.6 347.6 345.9 344.2 343.2 342.7 343.1 343.7 344.5 345.0 345.8 346.6 347.5 348.6 349.6 350.8 351.8 352.6 353.3 354.1 353.8 353.7 353.3 352.3 350.8 349.6 348.2 346.1 343.5 340.9 334.5 328.1 322.4 315.8 308.6 304.8 306.7 313.4 325.7 339.9 350.7 355.4 356.4 355.7 354.7 354.1 353.9 353.9 353.9 353.9 353.1 352.2 351.5 351.1 350.6 350.3 350.0 349.7 349.5 349.4 349.2 349.1 348.9 348.8 349.2 349.5 349.9 349.9 349.2 348.2 347.0 346.0 345.0 344.2 344.4 344.9 345.5 347.0 348.9 349.6 349.9 350.1 350.5 350.8 351.0 351.5 351.7 352.0 351.9 351.9 351.8 351.7 351.5 351.5 351.7 352.0 351.5 350.9 350.2 349.6 348.9 348.2 347.2 346.2 345.8 345.8 345.6 345.5 345.3 345.0 344.0 342.9 342.2 342.3 342.6 343.9 345.1 346.3 347.7 349.6 351.3 352.4 353.0 353.7 353.3 352.7 352.1 351.7 351.0 350.4 350.4 350.4 350.6 350.6 350.6 350.6 350.6 350.7 350.8 350.8 350.7 350.2 349.8 349.3 348.8 348.3 347.7 346.6 345.1 344.4 343.7 341.7 339.9 338.3 337.1 336.0 335.5 336.6 338.9 344.0 349.8 355.1 357.8 359.0 359.9 360.9 361.5 361.4 361.0 360.7 359.9 358.0 356.1 353.6 350.8 348.1 345.6 343.3 340.6 337.9 335.5 334.4 333.9 334.1 334.8 336.6 338.8 342.3 347.1 351.7 356.2 359.5 362.2 364.8 366.6 366.0 364.5 360.7 353.9 346.8 345.8 344.8 343.8 342.8 341.8 340.9 339.9 338.9 337.9 336.9 336.7 336.5 336.2 336.2 336.0 335.7 335.5 335.3 335.1 334.8 334.6 334.5 334.3 334.1 333.8 333.6 333.4 333.2 333.1 332.9 332.6 332.4 332.2 332.0 331.7 331.5 331.5 331.2 331.0 330.8 331.4 336.3 340.6 343.7 346.2 346.8 346.4 345.8 345.3 345.2 345.4 345.5 345.9 346.6 347.6 349.6 351.2 351.9 352.4 352.9 353.1 352.3 350.9 347.7 340.3 329.8 322.7 323.4 331.3 341.4 345.3 347.3 348.6 349.2 349.4 349.7 350.1 350.3 350.7 351.0 349.3 348.1 348.4 349.1 350.4 352.0 352.8 352.2 351.0 349.8 349.9 350.0 350.2 350.2 350.4 350.4 350.5 350.6 350.6 350.6 350.5 349.7 349.1 347.3 339.9 323.1 308.8 298.7 293.4 295.4 302.5 309.8 316.0 321.8 327.7 333.7 338.0 341.8 346.3 350.8 354.9 356.3 356.4 355.1 353.1 351.0 349.6 349.1 348.9 348.6 348.4 348.3 348.7 349.1 349.6 350.1 350.6 350.8 350.7 350.2 349.8 349.4 349.0 348.4 346.0 342.1 334.8 326.1 312.2 297.2 290.8 293.4 298.3 304.2 310.4 317.0 323.4 330.6 339.3 348.2 355.8 356.4 355.8 355.1 354.5 353.9 352.9 351.9 350.8 349.8 349.0 348.8 348.7 348.6 348.6 347.3 344.9 339.8 332.2 321.2 304.0 289.5 283.2 284.1 291.8 300.1 304.7 308.7 313.3 318.0 320.1 322.6 325.5 328.9 332.5 336.2 339.7 342.9 346.8 350.6 352.2 354.5 358.0 362.1 366.6 367.5 366.9 366.1 365.1 364.1 361.2 358.3 355.8 353.1 349.2 345.2 341.6 338.3 334.6 332.3 330.8 331.1 331.7 332.5 334.8 338.9 342.8 346.6 351.0 355.3 359.7 363.7 366.4 368.7 371.3 368.9 365.6 359.2 349.1 338.9 338.2 337.3 336.6 335.9 335.0 334.3 333.6 333.0 332.1 331.3 331.6 332.0 332.3 332.6 333.1 333.3 333.6 333.9 334.3 334.6 335.1 335.3 335.6 335.9 336.4 336.6 336.9 337.2 337.6 337.9 338.4 338.6 338.9 339.2 339.7 340.0 340.3 340.6 341.0 341.3 342.0 344.2 345.6 345.1 345.2 345.7 346.2 346.6 347.1 347.2 347.2 347.3 347.4 347.4 347.4 347.9 348.4 348.8 349.3 350.0 350.8 351.8 352.4 353.3 354.1 351.8 349.5 346.0 337.0 326.2 316.4 310.9 309.6 309.9 310.8 312.7 318.4 325.2 335.0 345.8 351.9 356.1 355.9 354.6 353.1 351.7 351.0 350.4 350.1 350.0 349.8 349.7 350.2 350.7 351.3 351.8 352.3 352.6 351.7 350.2 349.1 348.4 348.0 347.5 347.0 346.5 345.5 343.1 339.0 330.4 321.0 313.2 309.2 306.6 307.0 308.8 313.9 321.3 330.6 340.1 345.0 349.1 351.0 351.9 351.5 350.7 350.2 350.3 350.6 350.8 351.0 350.7 350.5 350.1 349.8 350.5 351.0 350.4 345.9 339.7 326.2 314.3 307.7 306.4 311.5 319.5 327.3 335.0 343.7 351.0 358.3 365.5 370.9 376.4 382.2 388.4 395.2 402.0 408.6 414.8 421.4 426.4 427.5 427.1 425.7 423.7 421.9 420.3 419.3 418.7 417.5 416.4 415.4 414.9 414.3 414.0 413.9 414.1 414.7 415.3 416.1 416.7 417.1 417.2 417.2 417.5 417.5 417.2 416.3 415.1 402.8 381.6 363.6 349.9 344.6 353.8 367.6 382.3 399.1 414.3 421.8 423.9 422.9 421.4 420.9 420.6 420.3 420.0 419.7 419.4 419.1 418.9 418.8 418.7 418.7 418.9 418.9 418.8 418.0 416.7 414.6 412.6 411.2 410.6 410.3 409.9 409.4 408.8 408.1 407.5 406.8 406.8 406.9 407.0 407.0 406.7 406.4 406.9 409.9 414.3 418.6 422.0 422.8 422.3 421.3 420.4 419.6 418.8 418.1 418.4 418.9 418.4 418.0 417.8 417.2 416.9 416.6 416.1 415.7 415.3 415.5 415.7 415.8 416.0 416.0 416.1 415.6 414.8 413.3 411.2 409.1 406.8 404.6 402.5 401.4 402.6 403.9 405.2 408.2 413.2 418.3 422.9 425.4 426.4 427.2 428.1 428.5 428.2 427.6 427.0 426.1 423.8 421.5 419.2 416.5 414.0 411.7 409.5 407.0 404.4 402.4 402.1 401.9 402.0 403.5 405.3 407.6 411.0 415.8 420.4 425.0 428.2 430.8 433.2 434.2 434.0 430.5 426.2 419.1 411.2 410.3 409.1 408.2 407.4 406.3 405.2 404.4 403.5 402.5 401.4 401.2 401.0 400.9 400.8 400.5 400.4 400.2 400.1 399.8 399.8 399.5 399.4 399.3 399.1 398.8 398.8 398.6 398.5 398.2 398.2 397.9 397.8 397.6 397.5 397.2 397.2 396.9 396.8 396.6 396.6 397.7 405.5 412.2 416.1 419.2 419.4 419.5 419.8 419.9 420.1 419.5 418.7 418.1 416.9 412.2 406.6 402.2 400.2 400.5 401.6 399.3 397.0 395.0 393.2 392.2 392.8 393.5 394.0 394.8 396.1 397.5 399.2 401.1 402.9 403.0 403.0 401.9 399.1 395.7 395.9 405.4 414.2 419.8 422.3 422.1 421.5 420.9 420.4 419.8 419.6 419.8 419.2 418.6 418.1 417.5 414.4 410.3 403.7 395.2 388.4 387.2 387.9 389.7 393.4 397.9 400.8 402.9 403.5 403.1 402.6 402.0 401.4 400.9 400.3 399.8 399.3 400.0 400.8 401.9 402.8 408.7 414.8 419.4 422.0 424.0 422.6 421.3 420.1 419.3 418.7 417.8 417.2 416.8 416.7 416.5 416.2 415.9 415.7 415.4 415.1 415.1 415.2 415.3 415.3 415.3 414.6 413.8 413.2 412.4 411.0 409.8 408.6 408.5 408.6 408.9 410.1 411.2 412.4 413.6 414.8 415.7 416.4 417.2 417.8 417.7 417.4 417.2 417.0 416.2 415.3 414.2 413.4 413.0 412.7 412.4 412.1 411.9 411.6 411.3 411.2 411.5 411.5 411.5 411.5 411.7 411.9 412.8 413.9 415.1 416.3 417.4 418.3 418.8 419.3 419.6 420.1 419.7 419.2 418.7 418.2 416.9 415.5 414.3 412.8 408.2 402.2 392.0 379.3 365.5 358.6 358.9 359.2 359.8 361.3 363.6 365.7 367.7 369.6 371.7 373.9 376.2 378.4 380.4 382.4 384.6 387.0 389.0 390.8 392.6 394.5 396.6 399.0 401.3 403.7 406.3 408.8 410.7 412.6 414.5 416.5 422.5 427.1 428.1 427.0 425.5 424.0 422.6 421.6 421.9 422.6 421.6 420.7 419.9 419.1 418.2 417.5 416.7 415.9 415.4 414.8 414.3 414.0 414.0 413.9 413.4 412.7 412.8 413.2 413.6 413.9 414.2 414.6 415.0 415.2 415.8 416.0 415.6 415.2 414.9 414.8 415.1 415.1 415.2 415.3 416.0 416.7 416.4 416.4 416.7 417.2 417.6 418.1 418.5 418.9 418.9 418.7 418.2 416.5 414.1 412.0 408.9 403.1 397.9 393.2 391.5 390.6 389.8 389.2 388.5 388.8 390.2 392.4 395.1 399.3 404.2 409.8 413.2 416.6 420.9 425.5 428.9 431.9 433.7 434.6 435.4 436.4 437.0 436.7 436.1 435.2 433.9 432.7 431.6 430.6 429.7 428.9 429.9 431.1 432.4 433.7 437.8 442.2 445.8 449.7 452.9 456.2 459.1 461.9 465.0 468.3 469.1 469.8 469.9 469.7 469.7 469.6 469.4 469.1 468.3 467.0 465.7 464.8 464.4 464.1 463.7 464.1 464.7 465.0 465.3 465.9 466.2 466.6 467.0 467.4 467.8 468.4 468.7 469.0 469.5 469.9 469.9 469.7 469.7 469.3 469.1 468.8 468.3 467.2 465.7 464.3 462.4 459.7 457.4 454.8 453.9 452.4 450.8 448.5 445.5 442.3 439.0 436.0 433.7 431.8 430.2 427.4 425.1 423.3 421.6 420.6 419.9 419.2 418.5 417.8 417.0 416.3 415.6 415.1 414.5 413.9 413.3 412.7 412.3 411.8 411.7 412.0 412.1 412.2 412.2 412.4 412.4 412.6 412.8 413.3 413.9 414.7 415.3 415.8 416.4 416.7 417.0 417.3 417.6 417.7 416.7 415.3 414.2 412.6 408.8 404.4 398.7 384.9 367.6 349.7 335.8 332.2 330.1 330.9 333.0 335.2 337.4 339.6 341.4 342.9 344.2 345.8 347.3 348.5 350.1 351.5 353.2 354.8 356.2 358.0 360.1 361.9 363.2 364.4 366.0 369.8 380.6 390.2 396.1 401.0 403.5 403.8 404.2 404.9 405.3 404.4 403.3 402.3 401.4 400.6 399.8 398.6 397.2 395.9 394.3 392.9 392.7 392.6 392.3 392.2 392.0 391.9 392.7 393.5 394.3 394.7 394.8 395.1 395.2 395.0 394.7 394.7 394.4 394.2 394.0 393.8 393.6 393.5 393.2 393.1 392.9 392.6 392.0 391.5 391.7 392.2 391.9 391.7 391.5 391.4 391.1 392.4 393.3 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.6 393.5 393.4 393.4 393.4 393.4 393.4 393.4 393.4 393.4 393.4 393.4 393.2 392.4 391.5 392.7 393.4 393.1 392.7 392.2 391.8 392.1 392.4 392.8 393.1 391.8 391.1 391.4 391.9 392.7 393.4 393.3 393.1 393.0 392.9 392.7 392.6 392.4 392.4 392.2 392.2 392.0 392.1 392.2 392.4 392.5 392.8 392.9 393.1 393.1 393.4 393.4 393.6 394.3 393.8 392.9 391.5 389.7 387.9 386.2 384.6 383.5 382.8 382.8 383.2 384.0 385.1 386.5 388.2 389.9 391.9 393.5 395.3 396.8 397.9 398.7 399.3 399.4 399.1 398.4 397.5 396.1 394.5 392.9 391.1 389.4 387.5 386.0 384.6 383.4 382.6 382.0 382.8 383.0 383.9 385.4 387.0 389.4 391.8 394.4 396.8 399.2 401.2 402.8 403.9 404.5 404.4 403.9 403.2 401.9 400.5 398.7 396.8 394.7 392.9 390.9 388.8 387.3 385.9 384.7 383.9 383.5 383.5 383.9 384.9 386.4 388.4 390.4 392.7 395.0 397.0 398.8 400.1 400.9 401.0 400.2 398.5 396.3 393.4 390.6 388.1 386.4 385.4 385.3 385.6 386.3 387.3 388.1 389.0 389.6 390.0 391.8 390.5 386.4 376.7 358.1 341.3 330.8 325.0 321.0 325.4 336.0 346.4 354.5 360.9 360.9 359.9 357.4 354.1 350.2 344.8 339.8 334.5 328.3 322.1 318.0 319.9 325.3 331.9 336.0 340.1 343.1 344.5 345.3 346.4 347.5 347.7 347.9 348.2 348.4 348.8 349.0 349.2 349.1 348.6 348.2 347.8 347.4 347.1 347.2 347.3 347.4 347.5 347.6 347.8 347.8 347.8 347.8 347.8 347.8 347.8 347.8 347.8 347.8 347.8 347.8 347.9 348.1 348.4 348.6 348.9 349.1 348.9 348.5 348.2 348.2 348.3 348.4 348.4 348.2 347.8 347.5 347.3 346.8 345.2 343.2 341.4 340.4 339.3 338.1 337.3 337.1 337.1 337.0 336.9 337.0 337.5 338.0 338.4 338.9 339.4 340.3 342.4 345.9 351.3 356.3 358.5 359.6 359.7 359.5 359.0 358.8 358.3 357.2 355.9 354.7 353.4 352.3 350.9 348.6 346.1 343.6 341.7 340.9 340.5 341.7 342.8 343.8 345.1 347.4 350.0 352.5 354.6 356.6 358.4 360.2 361.0 361.5 361.4 361.1 359.4 357.5 354.7 351.1 347.8 344.3 341.0 338.0 335.2 333.3 332.1 332.8 333.7 335.1 337.7 340.7 343.9 346.7 349.6 352.7 354.5 356.1 357.3 357.4 357.0 356.4 355.1 353.5 351.2 348.4 345.5 342.8 339.8 336.3 332.9 330.8 329.3 329.2 329.5 329.8 330.8 331.7 332.9 335.7 339.1 339.6 340.3 340.7 341.3 341.8 342.4 343.0 343.2 343.2", - "input_type": "phoneme", - "offset": 106.2 + "f0_timestep": "0.005" }, { + "offset": 128.48, "text": "SP 我 多 想 说 再 见 啊 SP 捧 起 雪 花 SP 把 爱 与 恨 留 下 SP", "ph_seq": "SP w o d uo x iang sh uo z ai j ian a SP p eng q i x ve h ua SP b a ai y v h en l iu x ia SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 A#4 A#4 A4 A4 C5 C5 C5 rest C4 C4 G4 G4 A4 A4 A4 A4 rest C4 C4 E4 F4 F4 F4 F4 C4 C4 A3 A3 rest", - "note_dur_seq": "0.32 0.188 0.188 0.212 0.212 0.2 0.2 0.2 0.2 0.4 0.4 0.2 0.2 0.3999999 0.2 0.4000001 0.4000001 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.2 0.2999997 0.2999997 0.5 0.2000003 0.2000003 0.4000001 0.4000001 0.5999999 0.5999999 0.4000001 0.4000001 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.2786 0.0464 0.1625 0.0232 0.1393 0.0813 0.1277 0.0697 0.1277 0.0697 0.3019 0.0929 0.209 0.3947 0.1045 0.0929 0.267 0.1393 0.267 0.1277 0.1045 0.0929 0.4063 0.1045 0.0929 0.3019 0.3831 0.1161 0.1161 0.0813 0.3483 0.058 0.3947 0.1974 0.4063 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 1 1 2 2 2 2 1 2 1 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 A#4 A4 C5 C5 rest C4 G4 A4 A4 rest C4 E4 F4 F4 C4 A3 rest", + "note_dur": "0.32 0.188 0.212 0.2 0.2 0.4 0.2 0.4 0.2 0.4 0.4 0.2 0.4 0.2 0.3 0.5 0.2 0.4 0.6 0.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "348.4 348.4 348.4 348.7 347.6 347.6 347.5 347.5 347.5 347.4 347.4 347.4 347.3 347.3 347.3 347.2 347.2 347.2 347.1 347.1 347.1 347.0 347.0 347.0 346.9 346.9 346.8 346.8 346.8 346.7 346.7 346.7 346.6 346.6 346.6 346.5 346.5 346.5 346.4 346.4 346.4 346.3 346.3 346.3 349.2 348.9 348.5 348.2 347.9 347.4 347.2 346.9 346.5 346.1 345.8 345.4 345.1 344.8 344.5 344.0 343.7 343.3 343.1 342.7 342.4 342.4 344.7 346.8 347.9 348.8 349.8 350.4 350.7 350.8 351.0 350.5 349.9 349.5 349.3 349.0 348.8 348.6 348.7 348.8 349.0 349.1 349.6 350.4 351.3 352.5 353.5 354.4 353.4 347.0 330.8 315.4 308.6 308.8 312.9 318.8 325.9 336.4 348.3 366.4 386.6 397.8 398.4 397.2 396.8 396.3 395.5 395.8 396.1 396.1 396.3 396.3 396.1 396.0 395.3 394.3 393.4 390.7 383.7 373.5 361.1 349.3 344.3 344.8 347.2 353.5 357.1 361.1 366.6 373.7 381.1 388.3 393.0 396.8 401.1 405.1 409.8 414.5 418.7 420.7 421.8 428.6 438.0 445.0 447.8 448.7 448.7 448.5 447.8 446.7 445.1 443.2 441.8 441.7 442.0 442.3 442.5 443.3 443.6 443.0 441.8 435.1 425.9 410.5 390.5 375.4 382.4 396.6 411.0 423.0 433.2 437.9 441.3 441.5 441.1 440.8 440.8 440.8 439.9 439.6 444.6 451.8 462.3 469.9 472.5 472.7 472.3 471.9 471.4 470.7 469.7 469.2 468.3 467.7 466.9 466.2 466.8 467.4 467.8 468.1 466.7 464.8 461.2 451.0 431.3 407.7 387.3 376.8 369.4 365.6 363.8 368.9 374.1 378.4 382.9 387.7 392.2 396.8 402.4 409.6 418.0 426.9 434.6 437.9 440.3 441.0 441.4 440.9 440.6 440.3 440.3 440.1 440.3 440.1 439.6 439.2 438.4 437.8 437.3 437.4 437.7 438.0 438.4 438.5 438.7 439.0 439.4 440.3 440.9 440.6 440.0 439.5 439.4 439.2 439.0 439.0 438.7 438.6 438.5 438.7 438.7 438.8 439.1 439.5 440.3 441.0 441.7 442.2 442.8 443.4 443.3 443.3 442.9 442.5 441.1 439.5 437.8 435.4 430.1 418.7 405.6 395.1 393.6 394.7 399.0 404.9 411.2 419.2 426.7 433.9 440.5 447.6 455.3 461.9 468.1 474.9 482.4 488.1 494.3 502.3 511.6 521.0 529.5 533.6 534.8 534.2 532.9 531.7 531.6 531.8 531.5 530.4 529.5 528.7 528.2 527.8 527.8 527.7 527.3 526.7 525.7 524.5 523.4 522.0 520.1 517.8 517.1 516.6 516.6 516.9 517.2 518.7 519.9 520.9 521.4 521.7 522.1 522.5 523.1 524.9 527.2 531.4 534.7 535.9 535.6 535.2 534.7 534.1 533.8 533.4 533.0 532.3 531.9 531.2 529.7 527.8 525.9 524.2 522.5 520.6 519.0 517.5 516.0 515.3 515.6 516.3 517.1 517.8 518.7 520.9 523.3 525.9 528.6 530.9 533.2 534.2 535.1 536.2 536.6 535.5 533.9 532.1 529.0 526.3 523.6 520.8 518.3 516.1 514.3 513.4 513.1 512.7 513.1 514.7 516.8 519.0 521.7 524.5 527.3 530.6 533.9 537.1 540.0 541.3 541.9 541.1 539.6 538.4 536.7 533.2 529.0 524.8 520.7 517.2 513.6 509.5 508.6 509.8 511.6 513.6 515.5 515.1 514.6 514.1 513.7 513.1 512.3 510.2 508.9 506.8 504.3 500.5 495.5 489.3 480.7 468.1 451.5 432.4 411.6 385.8 359.9 335.5 314.6 299.4 286.7 276.5 269.3 264.6 261.2 258.6 256.7 255.6 254.6 254.1 253.1 252.7 252.7 252.6 252.5 252.3 252.3 252.2 253.3 255.3 257.5 260.1 261.5 262.1 262.6 263.2 263.7 263.4 263.1 262.9 262.6 262.4 261.9 260.8 259.6 258.3 257.0 255.9 255.5 255.2 255.1 255.5 256.1 256.6 257.1 257.8 258.3 258.9 259.6 260.1 260.6 261.2 261.9 262.4 262.8 263.1 263.3 263.5 263.7 263.8 264.0 263.6 263.2 262.8 262.1 260.3 257.9 252.6 245.9 240.9 238.5 240.7 244.2 248.9 253.4 257.7 261.5 265.3 269.4 273.2 277.6 282.4 287.1 290.7 294.1 298.3 303.0 305.9 309.2 312.3 314.5 317.5 321.7 325.1 329.3 335.1 340.1 344.7 349.6 355.1 360.3 364.5 366.6 367.5 367.7 368.1 368.3 368.0 367.0 365.4 363.6 361.3 359.3 358.4 357.4 356.4 356.6 357.6 359.9 362.2 364.6 367.2 371.8 376.0 379.8 383.3 386.6 389.9 392.9 395.1 397.1 397.7 397.8 397.8 397.5 396.9 396.3 395.7 394.8 393.6 393.0 392.7 392.3 391.9 391.7 391.4 392.0 392.2 390.9 389.4 387.2 379.3 363.2 351.5 348.7 354.5 366.6 379.1 384.7 385.8 386.3 386.4 387.2 389.0 391.1 393.5 396.1 398.8 401.7 404.0 406.8 410.3 414.5 417.9 420.0 422.3 424.8 431.3 437.7 442.4 444.8 446.4 447.0 447.4 447.3 446.4 445.1 444.8 444.5 444.2 443.7 443.6 443.0 442.2 440.1 436.7 429.2 415.8 404.8 398.9 394.7 393.4 392.8 393.4 394.2 395.3 397.2 399.1 400.0 400.6 401.4 402.1 402.7 403.9 405.9 409.3 416.0 423.7 431.6 437.9 442.3 444.6 446.5 448.0 448.6 448.9 449.2 449.2 447.8 446.4 444.9 442.8 440.6 438.8 436.8 434.8 432.7 430.8 430.2 429.7 429.1 428.5 429.4 430.2 431.3 433.4 436.0 438.5 441.0 443.4 446.1 449.2 451.8 452.8 453.4 453.9 452.9 451.3 449.5 446.8 443.7 440.3 436.8 433.8 431.3 429.7 428.5 427.5 427.0 427.8 429.1 430.4 431.9 433.4 435.4 438.0 440.5 443.1 445.6 447.7 449.2 450.8 451.8 451.6 451.1 450.4 449.8 447.2 444.3 440.7 436.4 431.9 426.7 422.1 418.2 415.7 414.1 414.3 414.5 414.6 414.8 415.1 414.8 413.9 413.5 412.7 411.7 409.8 406.9 403.3 398.2 390.9 380.5 368.8 355.5 339.1 321.9 305.7 291.5 281.1 272.4 265.3 260.2 256.9 254.7 253.0 251.7 251.1 250.6 250.4 249.8 249.8 249.8 250.0 250.1 250.2 250.2 250.1 249.2 250.3 252.1 253.2 254.1 254.9 255.7 256.6 257.3 257.9 258.4 258.5 258.5 258.3 258.3 258.2 257.8 257.4 257.3 257.3 257.4 257.5 257.8 258.2 258.6 259.0 259.4 259.7 260.1 260.5 261.0 261.3 261.3 261.8 261.8 261.8 261.8 261.8 263.7 264.2 264.7 265.2 265.7 265.7 266.1 267.2 268.4 270.2 272.6 275.6 279.4 283.6 288.4 294.0 298.5 302.5 305.7 308.6 310.6 312.3 314.4 316.1 317.8 320.4 325.2 329.4 333.6 337.9 341.1 341.4 342.1 342.4 342.7 343.0 342.0 341.1 340.3 339.3 337.7 336.0 334.3 332.3 331.0 331.0 330.2 329.4 329.4 328.9 328.1 328.1 328.1 328.1 328.1 328.1 328.1 328.1 328.1 328.1 328.9 328.9 329.2 330.4 331.3 331.9 332.2 332.5 332.8 333.0 332.7 332.2 332.5 332.8 333.0 332.7 332.2 332.5 332.8 333.0 332.7 332.4 332.0 331.7 331.1 330.4 329.9 329.2 328.6 328.0 328.1 328.3 328.6 328.8 328.9 328.3 327.5 326.7 326.2 325.9 325.5 325.2 325.2 325.5 325.8 326.4 326.8 327.1 327.6 328.2 329.1 330.2 331.4 332.6 334.1 335.6 337.6 339.5 341.2 343.3 347.0 351.3 354.3 354.8 354.6 353.9 353.0 352.7 352.4 351.8 351.0 350.5 350.2 350.1 349.7 349.4 349.4 349.1 348.8 348.5 348.2 344.2 340.3 335.5 327.2 316.7 305.3 295.0 286.9 282.6 280.4 282.5 284.6 286.6 289.0 294.2 299.7 303.7 307.2 311.8 319.3 326.7 329.9 332.2 334.1 333.7 332.7 331.8 331.2 330.8 330.6 330.3 330.1 330.0 329.9 329.6 329.4 329.2 329.2 328.9 329.2 329.7 330.2 330.6 331.1 331.5 332.0 332.5 332.9 333.7 334.4 335.3 336.2 337.3 339.3 341.8 344.4 346.7 348.6 350.6 352.7 354.5 355.3 355.8 356.4 357.0 356.5 356.2 355.7 355.4 354.9 354.5 354.1 353.7 353.2 351.7 349.9 348.5 346.9 345.3 343.8 342.5 341.1 339.9 337.9 333.5 328.5 324.0 319.6 314.5 309.3 301.1 292.8 285.3 277.1 267.6 262.2 256.5 251.3 247.9 245.1 242.5 240.3 238.7 237.2 236.3 236.3 236.9 237.8 239.2 239.5 239.7 240.1 240.4 241.0 241.7 241.3 241.0 241.0 240.8 240.7 240.6 240.4 240.2 240.2 240.1 240.6 241.1 241.6 242.2 242.7 243.2 243.8 244.6 246.3 248.5 250.9 253.0 255.0 257.3 259.1 260.8 262.5 263.8 264.8 265.6 266.4 266.3 266.2 266.0 265.0 263.8 262.7 261.8 260.8 259.7 259.3 258.9 258.6 258.2 258.5 258.8 259.2 259.5 259.7 260.1 260.7 261.4 261.9 262.6 263.3 263.9 264.4 264.6 264.7 264.7 264.8 264.9 265.0 265.1 265.1 262.2 258.2 252.7 246.2 238.0 230.5 226.6 223.8 222.9 223.1 223.1 223.1 223.1 223.2 223.2 223.2 223.3 223.3 223.3 222.9 222.5 222.2 221.8 221.4 221.4 221.7 221.8 222.0 222.0 222.0 221.9 221.7 221.6 221.4 220.9 220.2 219.7 219.0 217.9 216.6 217.4 218.3 219.4 221.0 223.2 223.9 224.9 225.7 226.1 226.2 224.9 223.7 222.6 220.8 219.0 216.9 215.0 213.0 210.6 208.3 206.1 205.0 204.5 205.0 205.7 206.6 207.7 209.4 211.6 213.9 216.6 219.4 221.6 223.4 225.1 226.9 228.3 228.8 229.0 228.0 226.9 225.3 223.1 220.5 217.9 214.9 212.2 209.7 206.9 204.1 202.5 200.9 199.8 199.1 198.5 199.4 200.2 201.2 203.1 205.5 207.9 210.2 212.0 213.8 215.7 217.8 219.6 221.3 223.2 224.6 225.8 226.5 226.3 225.9 225.4 224.1 222.8 221.5 219.3 217.0 217.0 217.0 217.0 217.0 217.0 217.1 217.1 217.1 217.1", - "input_type": "phoneme", - "offset": 128.48 + "f0_timestep": "0.005" }, { + "offset": 134.88, "text": "SP 只 看 着 眼 下 SP 匆 匆 一 簇 繁 华 SP 在 手 中 融 化 SP", - "ph_seq": "SP zh ir k an zh e y En x ia SP c ong c ong y i c u f an h ua SP z ai sh ou zh ong r ong h ua ua SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 rest C4 C4 C4 C4 A4 A4 A4 A4 C5 C5 C5 C5 rest G4 G4 G4 G4 G4 G4 A4 A4 A4 A4 A4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.4 0.4 0.2 0.2 0.3999999 0.3999999 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.4000001 0.4000001 0.2 0.1999998 0.1999998 0.2000003 0.2000003 0.3999996 0.3999996 0.2000003 0.2000003 0.5999999 0.5999999 0.8000002 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0", - "ph_dur": "0.2438 0.0813 0.3135 0.0929 0.2902 0.1045 0.3135 0.0813 0.0929 0.1161 0.3947 0.1045 0.0929 0.1277 0.0813 0.1393 0.058 0.1277 0.0697 0.1393 0.058 0.1161 0.0813 0.4063 0.0929 0.1045 0.1161 0.0813 0.1509 0.058 0.3019 0.0929 0.1045 0.0929 0.6037 0.8011 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP zh ir k an zh e y En x ia SP c ong c ong y i c u f an h ua SP z ai sh ou zh ong r ong h ua SP", + "ph_dur": "0.2438 0.0813 0.3135 0.0929 0.2902 0.1045 0.3135 0.0813 0.0929 0.1161 0.3947 0.1045 0.0929 0.1277 0.0813 0.1393 0.058 0.1277 0.0697 0.1393 0.058 0.1161 0.0813 0.4063 0.0929 0.1045 0.1161 0.0813 0.1509 0.058 0.3019 0.0929 0.1045 0.0929 1.4048 0.0464", + "ph_num": "2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 C5 C5 rest C4 C4 A4 A4 C5 C5 rest G4 G4 G4 A4 A4 A4 rest", + "note_dur": "0.32 0.4 0.4 0.4 0.2 0.4 0.2 0.2 0.2 0.2 0.2 0.2 0.4 0.2 0.2 0.2 0.4 0.2 0.6 0.8 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0", "f0_seq": "219.5 219.5 219.5 219.7 251.7 252.9 254.2 255.4 256.7 258.0 259.2 260.5 261.8 263.1 264.4 265.7 267.0 268.3 269.7 271.0 272.3 273.7 275.0 276.4 277.7 279.1 280.5 281.9 283.3 284.7 286.1 287.5 288.9 290.3 291.8 293.2 294.7 296.1 297.6 299.0 300.5 302.0 303.5 305.0 349.2 348.8 348.4 348.0 347.5 347.2 346.7 346.3 345.9 345.5 345.0 344.6 344.2 343.8 343.3 343.0 342.5 342.1 341.7 341.4 340.9 341.8 347.8 351.2 353.3 352.9 351.9 351.2 350.8 350.5 349.2 348.1 347.7 347.5 347.0 346.0 345.3 344.6 344.1 344.2 344.4 344.9 345.2 345.5 345.8 346.2 346.5 346.9 347.2 347.6 347.8 348.3 348.6 348.9 349.2 349.6 349.9 350.3 350.5 350.6 350.6 350.6 350.5 350.2 349.9 349.4 349.0 349.1 349.3 349.4 349.6 349.7 349.6 349.2 348.6 347.8 344.3 337.3 327.9 317.8 314.6 321.7 329.2 335.0 339.1 342.6 343.1 343.5 344.0 344.8 347.2 350.3 353.8 356.6 359.3 362.0 365.6 368.8 371.1 374.0 380.6 387.8 393.6 395.5 396.0 396.3 395.5 395.2 395.2 395.2 395.2 394.8 394.3 393.9 393.5 393.1 392.9 392.7 392.3 392.1 392.2 392.5 392.8 392.9 393.1 393.4 393.6 393.8 393.8 393.6 393.1 392.8 392.3 392.0 391.6 391.1 390.0 389.2 388.2 387.2 386.2 387.0 387.8 388.6 389.3 390.2 391.0 391.8 392.6 393.2 393.6 394.2 394.7 394.8 394.2 393.1 392.0 391.0 389.7 387.4 384.6 377.0 362.7 347.6 332.1 315.8 309.0 304.2 304.2 306.6 313.1 320.9 329.0 336.1 343.4 351.5 360.4 369.4 379.1 391.3 403.9 416.4 426.1 434.0 441.3 444.6 445.9 446.0 445.5 444.7 443.6 442.0 440.6 439.4 437.8 437.0 436.2 436.3 436.5 436.6 436.7 437.0 437.5 438.0 438.6 439.2 439.6 440.4 441.7 443.1 442.8 442.4 441.9 441.5 441.1 440.5 440.1 439.6 439.2 438.8 439.0 439.3 439.6 439.9 440.2 440.5 440.7 440.3 439.7 439.3 439.5 439.6 439.9 440.0 440.2 440.5 440.6 440.8 440.9 441.2 441.3 441.3 440.4 439.4 438.6 438.5 438.2 438.3 438.7 440.5 442.8 445.9 450.5 455.3 460.7 467.2 475.1 482.5 489.2 496.9 505.1 513.1 519.2 523.6 527.3 530.2 532.1 533.1 532.9 531.7 530.2 528.3 527.8 527.1 526.2 522.9 519.6 515.3 508.1 498.2 484.3 471.8 470.2 474.4 480.4 487.1 490.2 491.3 491.9 493.5 499.3 506.5 512.6 516.2 518.6 516.9 514.6 512.4 510.9 511.1 511.9 518.2 524.6 529.9 534.3 538.6 540.5 541.8 541.7 540.9 539.8 538.7 535.6 532.6 529.4 525.7 522.3 519.2 516.2 512.9 509.5 508.4 507.0 506.4 506.7 507.5 509.7 512.1 515.2 519.1 523.3 527.4 531.5 534.9 537.5 539.8 541.9 541.5 540.8 539.5 536.1 531.9 527.8 523.6 518.9 514.0 509.1 505.2 503.5 502.4 501.4 502.9 504.6 506.2 508.6 511.6 514.6 517.3 519.9 522.7 524.8 526.7 528.1 528.6 528.7 528.1 527.4 527.0 526.3 525.8 525.1 526.2 527.2 527.7 526.9 525.7 524.5 523.4 523.1 522.3 518.1 517.7 517.1 516.8 516.4 516.0 514.9 512.9 511.4 509.4 506.9 503.4 498.5 492.4 484.2 471.9 455.3 436.3 415.4 389.6 363.4 339.1 318.1 302.9 290.3 280.1 272.9 268.2 265.0 262.4 260.7 259.6 258.8 258.4 257.5 257.3 257.3 257.3 257.4 257.4 257.4 257.8 259.8 261.2 262.5 264.1 265.1 265.0 264.5 263.2 261.6 260.3 259.6 259.1 258.7 259.1 259.7 260.5 261.4 262.5 263.4 262.7 261.0 257.8 253.1 244.4 234.9 225.3 217.4 213.8 216.2 220.0 225.3 230.1 235.0 240.3 245.6 250.7 255.7 261.1 265.0 267.5 266.1 264.4 262.6 261.5 260.5 259.5 258.7 258.0 258.3 259.0 259.7 260.2 260.5 260.7 260.9 261.2 261.4 261.6 261.8 262.1 262.3 262.7 263.6 264.7 265.0 265.5 266.4 268.6 271.8 275.6 280.2 285.5 292.3 301.2 311.3 322.8 335.8 353.4 373.2 388.6 400.1 408.6 416.0 424.8 431.5 435.9 438.7 441.1 443.3 444.8 445.6 445.9 445.1 443.8 442.7 440.6 434.8 426.7 416.5 407.6 407.8 413.4 420.2 422.6 422.9 421.0 418.9 417.5 416.5 418.3 419.9 421.5 423.0 424.8 424.4 424.4 425.9 428.4 433.9 439.3 440.4 440.8 441.2 441.8 442.1 442.4 442.4 442.1 441.8 441.7 441.4 441.1 441.0 440.8 440.4 440.4 440.8 441.4 442.0 442.7 441.9 441.1 440.0 435.4 419.5 400.7 385.8 383.0 399.5 420.1 437.3 447.2 453.0 458.7 465.2 472.1 479.8 489.6 500.2 511.2 521.0 526.2 528.6 530.2 530.8 530.4 529.9 529.2 528.1 526.9 525.2 523.7 522.6 521.1 520.0 519.1 518.7 518.7 518.4 518.4 516.9 512.8 506.4 492.7 476.2 455.8 437.3 423.5 420.4 421.0 430.3 440.0 450.2 460.5 471.0 481.6 493.5 507.4 516.0 518.2 520.2 522.2 524.3 526.3 528.6 530.7 532.2 532.9 533.0 533.0 531.3 529.5 527.6 525.7 523.5 521.6 519.6 517.4 515.2 512.9 510.9 510.2 509.9 510.7 511.4 512.1 513.1 515.5 518.1 521.1 523.9 526.7 529.9 533.3 536.3 536.7 536.7 537.0 537.0 536.3 532.6 528.8 525.0 520.5 516.6 513.5 510.9 509.2 508.1 507.3 507.9 508.7 509.6 511.3 512.9 514.8 517.1 519.9 522.6 525.7 528.6 530.7 532.4 533.6 534.6 534.4 533.8 531.9 529.3 527.0 524.2 520.5 516.3 511.6 505.4 500.0 495.5 493.5 493.6 494.0 494.3 494.6 495.0 495.3 495.5 495.0 495.0 494.8 494.2 493.2 491.3 489.1 485.9 481.2 474.3 466.3 457.1 445.4 432.9 421.0 410.1 402.0 395.2 389.5 385.8 383.2 381.5 380.0 379.3 378.8 378.6 378.6 378.3 378.4 378.7 378.9 379.2 379.5 379.7 381.0 386.9 392.0 394.6 396.6 396.3 396.0 395.7 395.3 395.0 394.6 393.9 393.3 392.6 391.8 390.5 386.4 381.3 372.6 366.2 373.7 380.4 384.0 387.0 390.4 391.1 390.9 390.5 390.1 389.7 389.3 389.6 389.9 390.5 391.3 392.2 392.9 393.7 394.6 396.3 398.3 399.4 399.1 398.3 397.5 396.4 395.7 395.3 395.0 394.7 394.7 394.4 394.0 393.3 392.4 391.6 390.8 390.1 388.7 384.2 371.4 358.6 346.7 335.7 331.2 334.3 337.1 338.7 340.6 345.2 350.7 356.1 360.7 365.2 369.8 372.5 374.9 377.1 379.8 382.6 386.7 390.8 394.1 396.2 396.6 396.8 396.9 397.0 397.2 397.2 396.4 395.5 394.8 394.0 393.1 392.6 392.3 391.8 391.4 390.9 391.1 391.2 391.5 391.7 391.8 392.1 392.2 392.3 392.6 392.7 393.0 393.2 393.2 392.6 391.5 390.7 390.1 390.0 389.8 389.7 390.3 390.9 391.4 391.7 391.8 391.8 391.7 391.5 391.5 391.5 391.6 391.9 392.1 392.4 392.2 392.0 391.9 391.7 391.5 391.3 391.6 391.9 392.0 392.2 391.5 390.6 389.8 389.2 388.5 387.7 388.8 390.1 391.7 395.0 399.1 405.8 412.3 419.3 427.7 436.5 442.5 446.0 448.4 450.4 452.1 451.1 450.0 448.6 447.2 445.6 443.7 441.9 440.4 438.6 438.2 437.7 437.6 437.3 436.8 435.7 434.2 431.4 426.0 418.9 407.2 391.4 378.4 369.1 360.5 355.1 354.4 353.6 352.8 352.6 355.9 359.9 364.6 371.7 380.9 390.6 399.7 405.7 409.6 411.5 412.7 413.6 414.7 415.7 416.7 418.0 418.7 418.4 418.0 417.6 417.2 416.7 416.3 415.9 415.4 415.1 414.5 414.1 413.7 413.3 412.9 413.2 413.4 413.7 414.8 416.0 417.5 418.7 420.0 422.3 424.8 427.2 429.6 431.9 434.4 437.0 439.6 441.9 443.3 444.4 445.1 445.8 446.3 446.8 447.3 447.9 447.0 446.0 445.2 444.3 443.3 442.4 441.6 441.0 440.4 439.7 439.2 439.4 439.5 439.7 439.7 440.0 439.9 439.7 439.7 439.5 439.5 439.5 439.3 439.2 439.5 439.8 440.1 440.4 440.7 441.0 441.2 440.8 440.2 439.8 439.7 439.8 440.0 440.0 440.0 440.0 440.3 440.3 440.3 440.3 440.5 440.5 440.5 440.5 440.7 440.8 440.8 440.8 440.6 440.5 440.5 440.4 440.3 440.1 439.6 439.0 439.9 440.3 440.1 439.8 439.5 439.2 438.9 438.6 438.5 438.7 438.7 438.9 438.8 438.2 437.5 437.4 437.2 437.1 437.0 437.0 436.8 437.1 437.4 437.8 438.2 438.5 438.8 439.2 439.9 440.5 441.3 441.5 441.7 441.8 442.0 442.7 443.3 443.8 444.5 445.1 444.5 444.1 443.5 442.9 441.8 440.5 439.3 438.2 437.2 436.5 436.1 435.8 435.4 434.4 433.2 432.3 431.8 431.6 431.3 431.4 431.5 431.8 432.4 433.3 433.9 435.8 437.5 438.6 439.6 440.5 441.5 442.4 443.4 444.6 446.1 447.4 448.4 448.6 448.1 447.2 446.1 445.0 444.0 442.4 440.8 438.0 435.5 433.2 430.7 429.5 428.2 427.5 427.8 428.5 429.7 430.9 432.2 433.4 434.9 437.2 440.0 442.7 445.1 447.6 450.3 451.3 452.4 452.9 451.7 450.0 447.9 445.4 443.1 440.8 438.0 435.4 433.1 430.9 428.5 427.2 426.3 426.1 427.5 429.5 431.7 433.6 436.0 439.0 442.6 445.4 447.6 449.6 450.8 451.7 451.1 450.2 449.6 448.5 445.8 442.8 439.4 435.3 431.7 427.8 423.8 422.0 420.5 418.9 417.8 419.9 422.6 425.7 430.0 434.9 440.3 445.4 450.3 454.2 458.4 462.7 466.4 468.7 469.7 468.9 467.2 465.6 463.4 459.1 451.8 443.8 443.4 442.8 442.4 441.9 441.5 441.0 440.7 440.5 440.5", - "input_type": "phoneme", - "offset": 134.88 + "f0_timestep": "0.005" }, { + "offset": 141.28, "text": "SP 兵 荒 马 乱 的 青 春 年 华 扬 起 的 风 沙 倒 让 人 放 不 下 SP", "ph_seq": "SP b ing h uang m a l uan d e q in ch un n ian h ua y ang q i d e f eng sh a d ao r ang r en f ang b u x ia SP", - "note_seq": "rest D4 D4 E4 E4 F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 C5 C5 C5 C5 D5 D5 C5 C5 C5 C5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 A#4 A#4 A4 A4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.32 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.4 0.4 0.2 0.2 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.2 0.2 0.4000001 0.4000001 0.1999998 0.1999998 0.2000003 0.2000003 0.3999996 0.3999996 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.2786 0.0464 0.1161 0.0813 0.1277 0.0813 0.1509 0.0464 0.1625 0.0348 0.3019 0.0929 0.1277 0.0813 0.3251 0.0697 0.1161 0.0813 0.2902 0.1161 0.1045 0.0929 0.1625 0.0348 0.1045 0.0929 0.1045 0.1045 0.3367 0.058 0.1161 0.0813 0.1509 0.058 0.2902 0.1045 0.3483 0.0464 0.2554 0.1509 0.3947 0.058", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 F4 G4 A4 C5 C5 C5 C5 D5 C5 C5 C5 C5 A#4 A#4 A#4 A4 G4 A4 rest", + "note_dur": "0.32 0.2 0.2 0.2 0.2 0.4 0.2 0.4 0.2 0.4 0.2 0.2 0.2 0.2 0.4 0.2 0.2 0.4 0.4 0.4 0.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "440.0 440.0 440.0 440.0 390.0 388.0 386.0 384.1 382.2 380.2 378.3 376.4 374.5 372.6 370.7 368.9 367.0 365.2 363.3 361.5 359.7 357.9 356.1 354.3 352.5 350.7 348.9 347.2 345.4 343.7 341.9 340.2 338.5 336.8 335.1 333.4 331.7 330.1 328.4 326.7 325.1 323.5 321.8 320.2 293.7 293.2 292.6 292.3 291.9 291.3 290.9 290.5 290.0 289.5 289.1 288.7 288.1 287.8 287.4 286.8 286.4 285.9 285.5 285.1 284.6 284.6 287.1 289.6 292.0 293.0 293.7 294.4 294.8 294.5 294.0 293.4 292.9 292.3 291.9 291.3 291.1 291.7 292.6 294.2 295.9 297.3 297.6 296.6 292.9 287.8 275.8 265.2 255.9 247.7 240.5 238.4 237.4 238.3 240.8 246.7 254.5 264.8 275.7 287.9 300.4 312.8 322.6 327.2 329.0 330.0 330.5 330.8 331.2 331.8 332.1 332.1 332.1 332.1 331.8 331.3 330.7 330.2 329.6 329.3 329.2 329.1 329.0 328.8 328.5 327.2 325.3 323.8 322.6 322.0 322.8 324.2 325.2 325.6 325.8 326.2 326.8 328.0 330.0 333.3 338.9 344.4 347.6 349.0 350.0 350.6 351.0 350.9 350.4 349.8 349.4 348.9 348.4 348.0 347.4 346.8 346.8 346.7 346.5 346.4 346.6 346.7 346.9 347.1 348.0 349.0 350.2 351.9 353.7 355.7 357.8 357.4 357.3 356.6 354.6 351.9 350.7 353.4 357.4 364.3 372.4 380.0 385.5 389.5 392.2 394.5 396.1 397.3 397.9 398.2 397.9 396.7 395.4 394.3 393.0 391.5 390.1 388.9 388.1 387.0 386.2 385.5 386.0 386.8 387.5 388.4 389.4 390.1 391.1 391.9 391.8 391.5 390.6 387.0 379.5 366.6 355.6 355.9 366.7 388.7 415.1 431.3 439.1 443.0 445.8 448.5 450.4 451.7 451.5 450.7 449.8 447.8 444.5 441.2 437.7 434.2 430.5 428.5 426.5 424.0 421.8 422.8 424.2 425.8 427.6 429.7 431.9 434.3 436.8 439.7 442.5 445.4 447.4 447.9 447.9 447.9 447.5 445.2 442.7 439.8 437.0 435.4 433.9 432.7 431.3 429.7 430.9 431.8 432.9 434.0 433.7 432.4 425.9 415.4 403.3 390.6 383.4 383.4 390.3 405.2 423.1 430.9 433.7 435.2 436.6 436.5 435.3 434.8 435.4 436.5 436.0 435.8 437.8 440.5 443.6 447.7 452.7 458.2 465.2 477.5 491.9 506.0 517.2 525.2 530.1 533.3 532.9 531.9 530.2 528.0 525.1 524.0 522.9 522.2 522.7 523.6 524.7 525.7 526.6 527.3 528.1 525.5 520.7 511.5 499.7 481.5 463.0 447.1 434.2 420.6 407.5 403.4 399.2 397.5 400.8 406.5 419.0 431.0 444.0 463.2 485.1 499.3 513.7 525.1 532.4 538.3 537.8 537.0 535.6 533.7 531.8 529.6 527.7 526.0 524.0 521.7 519.5 517.4 515.5 513.4 513.1 513.1 513.4 513.4 513.6 515.5 517.7 519.8 521.7 524.0 525.7 527.5 529.0 530.6 531.9 531.8 531.4 530.8 529.5 527.6 525.7 525.3 524.9 524.2 522.7 520.5 519.0 519.0 519.2 519.6 519.9 521.8 523.6 525.1 526.8 528.7 530.3 531.7 531.9 531.8 531.8 531.2 530.1 529.1 527.8 526.6 525.4 523.5 520.5 516.6 512.5 508.9 507.6 506.8 505.9 504.8 504.3 504.3 505.6 511.1 518.4 524.5 525.7 525.9 526.2 526.6 527.0 527.3 527.5 527.7 527.5 526.8 526.4 525.9 525.4 525.1 524.6 523.3 520.4 516.0 511.9 507.5 503.2 499.0 492.8 485.4 467.4 449.8 429.8 407.8 393.8 384.1 385.4 391.3 398.8 407.0 424.4 441.9 461.6 484.6 498.8 511.1 521.1 525.1 527.3 529.9 530.3 530.6 530.7 531.1 530.2 529.5 528.7 528.0 527.3 526.6 525.8 524.8 524.3 524.2 524.2 524.2 524.0 523.7 523.1 522.6 522.0 521.6 521.1 520.6 520.8 521.2 521.6 521.9 522.3 522.6 523.0 523.4 523.7 523.9 524.2 524.5 524.9 525.2 525.6 526.0 525.6 525.2 524.9 524.5 524.2 523.8 523.4 523.1 522.9 522.6 522.6 522.6 522.5 522.3 522.3 521.5 520.5 519.9 518.7 514.6 509.7 505.9 504.4 503.8 505.7 508.4 512.8 519.3 527.7 537.3 547.4 554.8 562.1 571.0 580.6 584.8 588.9 592.0 594.4 596.9 597.8 598.3 597.6 594.8 591.1 587.0 585.5 584.1 582.6 581.3 583.1 585.2 586.8 588.9 590.7 592.0 593.1 592.9 589.4 584.3 564.9 549.1 540.3 540.1 543.3 545.9 546.6 543.1 537.9 531.8 521.5 511.9 502.2 491.0 479.8 472.7 483.2 495.6 508.5 516.6 523.9 526.4 528.3 529.3 529.6 528.8 527.8 527.1 526.4 524.8 523.2 521.6 520.2 519.0 519.3 520.0 520.5 521.2 522.3 523.9 525.3 526.5 527.8 529.1 530.6 524.1 507.5 491.8 475.8 459.2 448.9 450.8 462.3 480.0 501.1 520.6 527.0 529.6 528.6 526.3 526.0 525.9 526.0 527.0 528.4 528.8 529.2 529.7 530.2 530.6 529.8 529.1 526.6 515.5 491.6 470.8 473.9 486.6 501.0 505.4 507.0 509.1 512.6 516.8 516.6 516.1 516.8 518.0 519.1 520.2 521.4 522.9 525.8 530.0 533.9 537.3 536.9 535.9 534.8 533.6 532.2 530.2 528.4 526.1 523.9 523.9 524.2 524.2 524.2 524.5 523.7 519.9 514.6 502.8 479.3 455.1 439.2 435.4 438.9 444.3 447.2 449.5 450.7 451.7 452.4 455.3 458.4 460.9 462.6 464.0 465.7 467.1 468.3 469.1 469.4 471.3 478.7 486.1 493.2 496.5 498.0 497.1 495.9 494.2 491.9 489.1 486.5 484.1 481.7 479.0 478.0 477.3 476.4 475.5 474.6 475.9 477.9 481.6 485.9 490.8 495.7 500.1 504.4 508.9 513.7 518.1 522.0 524.0 525.6 527.2 529.0 530.1 530.1 529.5 528.1 526.6 525.1 523.7 522.8 522.0 521.3 520.8 520.8 521.1 521.4 521.8 522.3 523.2 524.2 525.1 526.0 525.9 525.4 524.8 524.5 523.8 523.4 520.4 509.8 496.2 472.5 454.0 442.4 434.1 437.0 454.7 476.5 495.4 509.8 516.6 511.4 503.0 494.6 486.4 478.7 470.9 464.8 462.2 461.2 461.9 463.9 465.7 466.2 466.0 466.2 466.8 467.5 468.1 468.7 468.6 468.5 468.1 468.1 468.1 468.1 467.8 467.8 467.6 467.3 467.0 466.4 465.9 465.5 465.0 463.7 462.1 461.3 461.8 462.7 462.7 462.6 462.3 462.1 462.3 465.1 468.5 471.3 472.1 471.3 469.9 468.9 469.0 469.3 469.2 468.9 468.5 468.2 467.9 468.2 468.6 468.7 468.1 467.6 467.1 466.4 465.8 465.1 464.5 463.9 463.2 462.5 461.9 461.5 461.3 461.6 461.6 461.7 461.9 461.7 461.3 461.0 460.5 459.3 457.7 456.0 454.1 451.1 448.4 445.5 442.3 439.4 436.9 435.4 434.0 432.7 431.4 430.2 429.1 428.3 429.2 430.4 431.7 434.2 437.3 440.5 444.3 448.2 451.6 455.3 459.2 463.2 466.5 468.5 470.2 471.9 471.2 470.5 469.9 469.3 468.6 467.9 467.3 466.7 466.0 465.4 464.9 465.4 465.9 466.6 467.2 469.5 471.1 471.4 471.3 471.3 471.0 469.9 466.9 462.0 451.1 437.7 420.1 404.6 392.8 383.5 386.9 391.0 395.8 401.2 406.8 412.9 418.0 421.5 425.0 428.5 432.1 436.5 440.4 444.5 450.0 455.3 459.9 464.1 468.8 473.5 480.1 485.5 486.3 483.9 480.7 477.8 475.2 473.1 470.7 467.2 463.9 460.7 458.2 456.0 453.7 450.9 449.1 448.3 448.0 447.7 447.4 447.0 446.8 446.7 447.4 448.4 449.3 450.2 451.1 452.1 451.1 449.9 448.9 447.9 446.7 445.3 442.5 439.7 436.6 433.4 426.7 420.1 414.3 408.5 402.1 398.3 394.7 392.0 390.9 390.6 390.4 390.1 390.2 391.8 394.0 396.3 398.4 400.5 402.8 405.1 407.0 406.6 405.9 404.9 403.9 402.0 397.1 391.9 384.8 370.9 355.5 341.8 332.8 323.9 314.4 316.0 327.7 341.7 361.8 384.4 393.1 399.1 398.5 396.5 395.9 396.1 396.7 397.4 396.8 395.9 395.5 395.0 394.4 394.3 393.8 393.3 391.9 390.8 390.2 390.0 389.7 389.4 389.3 389.6 390.2 390.5 391.0 391.5 392.1 392.7 393.1 393.1 393.1 393.1 393.1 393.4 393.4 393.4 393.4 393.4 393.0 392.6 392.3 391.5 388.4 384.5 381.1 376.7 367.5 356.8 347.2 341.4 337.9 337.1 337.5 342.4 346.8 351.1 355.7 360.3 365.0 369.2 372.8 376.5 380.4 384.5 387.9 390.6 393.3 396.1 399.3 402.6 405.8 409.5 413.9 417.9 421.2 425.0 429.7 434.7 439.7 444.5 448.3 451.9 455.5 456.1 456.3 456.0 455.6 454.7 453.4 450.7 448.1 445.2 442.3 438.9 436.2 433.4 430.5 428.7 426.9 426.0 426.4 427.5 428.5 431.9 434.9 437.8 441.3 444.9 448.4 452.0 454.5 455.8 456.6 457.3 457.6 456.3 453.9 449.8 445.0 440.4 436.4 432.3 428.5 424.8 422.1 420.6 419.8 421.1 422.6 424.3 426.0 428.6 431.7 434.8 437.5 440.3 443.3 444.9 446.1 447.0 447.0 446.5 446.1 444.1 442.2 440.5 438.4 436.5 437.0 437.5 438.0 438.4 439.0 439.6 440.1 441.1 442.0 440.3 440.0 439.9 439.7 439.6 439.5 439.2 439.0 439.0 439.0", - "input_type": "phoneme", - "offset": 141.28 + "f0_timestep": "0.005" }, { + "offset": 147.28, "text": "SP 心 的 呼 喊 你 听 见 了 吗 SP", - "ph_seq": "SP x in d e h u h an n i t ing j ian l e m a a a SP", - "note_seq": "rest C4 C4 D4 D4 A4 A4 C5 C5 C5 C5 A4 A4 C5 C5 C5 C5 D5 D5 D5 D5 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.8 0.8 0.4 0.4 0.4000001 0.4000001 0.8 0.8 0.4000001 0.4000001 0.4000001 0.4000001 0.7999997 0.7999997 1.2 1.6 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0", - "ph_dur": "0.1625 0.1625 0.3367 0.0697 0.2438 0.1509 0.6269 0.1741 0.3019 0.0929 0.2786 0.1277 0.685 0.1161 0.3135 0.0813 0.2438 0.1625 0.7895 1.2074 1.6022 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP x in d e h u h an n i t ing j ian l e m a SP", + "ph_dur": "0.1625 0.1625 0.3367 0.0697 0.2438 0.1509 0.6269 0.1741 0.3019 0.0929 0.2786 0.1277 0.685 0.1161 0.3135 0.0813 0.2438 0.1625 3.5991 0.0464", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D4 A4 C5 C5 A4 C5 C5 D5 D5 D5 rest", + "note_dur": "0.32 0.4 0.4 0.8 0.4 0.4 0.8 0.4 0.4 0.8 1.2 1.6 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 1 1 0", "f0_seq": "439.7 439.7 439.7 439.7 376.5 373.8 371.2 368.6 366.0 363.5 360.9 358.4 355.9 353.4 350.9 348.5 346.1 343.6 341.2 338.8 336.5 334.1 331.8 329.5 327.2 324.9 322.6 320.3 318.1 315.9 313.7 311.5 309.3 307.1 305.0 302.9 300.7 298.6 296.5 294.5 292.4 290.4 288.3 286.3 261.6 262.2 262.9 263.5 264.1 264.8 265.4 266.1 266.7 267.3 268.1 268.7 269.4 270.0 270.6 271.3 271.9 272.6 273.2 273.9 274.6 274.8 272.5 269.4 266.0 262.2 258.9 257.0 255.4 253.7 251.8 251.3 251.0 251.1 251.2 251.4 251.6 252.0 252.3 252.8 253.3 253.7 254.3 254.6 255.1 255.7 256.1 256.4 256.5 256.6 257.9 259.2 260.3 260.8 261.2 261.6 262.2 262.6 262.8 262.7 262.5 262.9 263.1 263.3 263.2 262.8 262.6 262.3 262.1 261.8 261.9 262.1 262.3 262.5 262.4 262.2 262.0 261.9 261.9 261.6 261.0 260.3 259.7 259.0 258.3 257.3 256.5 255.5 253.7 250.4 246.8 242.5 238.4 234.9 232.0 231.2 235.8 243.7 254.4 268.1 281.2 289.7 293.3 294.2 294.3 294.2 295.3 296.3 296.7 297.3 297.6 296.7 295.9 295.1 294.2 293.2 292.1 291.3 290.4 289.3 288.3 287.7 287.3 287.4 287.6 287.8 288.5 289.2 289.7 290.5 291.1 291.8 292.6 293.1 293.7 293.8 294.0 294.1 294.2 294.2 294.3 294.3 294.4 294.4 293.8 293.2 290.2 287.1 283.3 278.9 276.1 277.9 281.8 285.6 289.3 292.0 294.1 296.3 298.4 301.1 304.2 307.1 310.2 312.9 316.0 318.6 320.9 323.3 325.6 328.0 330.6 333.6 337.2 340.8 344.0 348.0 353.8 358.9 363.7 370.1 377.6 385.9 398.5 408.6 415.9 418.9 419.3 419.9 419.4 417.8 415.5 414.2 412.3 410.9 409.4 407.7 406.7 406.1 404.9 403.7 404.2 404.8 405.7 407.4 409.4 411.5 414.5 417.3 420.1 423.2 425.5 428.0 430.4 432.5 433.9 435.4 436.1 436.7 437.2 437.8 438.0 438.2 438.1 437.8 437.5 437.0 436.8 437.0 437.1 437.2 437.5 437.7 437.8 438.1 438.2 438.5 437.8 437.5 437.8 438.6 439.2 439.2 439.4 439.5 439.5 439.5 439.6 439.7 439.7 439.4 439.0 438.8 439.0 439.1 439.4 439.5 439.8 439.9 439.5 439.0 439.2 439.6 439.9 440.2 440.4 440.8 441.0 441.0 441.0 441.0 441.0 440.4 440.1 440.0 440.2 440.3 440.3 440.5 440.5 440.7 440.8 440.8 440.9 441.0 441.0 441.3 441.2 440.9 440.5 440.1 439.7 439.3 439.7 440.3 440.9 441.5 441.0 441.1 441.4 441.7 442.0 442.1 442.4 442.7 442.8 442.0 441.3 440.5 438.9 434.5 429.0 415.5 399.8 387.4 379.5 377.6 381.1 384.3 387.6 392.2 397.0 401.9 406.9 411.3 416.1 421.6 424.6 426.2 427.5 429.3 433.4 438.2 442.9 446.6 450.3 453.7 457.8 461.9 465.8 471.0 479.3 495.1 510.0 519.1 524.5 528.4 529.7 530.3 531.3 532.2 532.4 531.9 530.9 529.5 527.9 526.0 524.6 523.9 523.3 522.5 521.7 520.9 521.3 521.6 522.0 522.0 522.4 522.8 523.1 523.5 523.9 524.2 524.6 524.9 525.3 525.4 525.8 526.1 526.4 526.8 527.2 525.7 524.2 523.2 522.7 522.3 522.0 521.6 521.3 520.9 520.5 520.3 520.7 521.0 521.1 521.4 521.8 522.2 522.5 522.9 523.3 523.6 523.6 522.5 521.1 519.6 518.2 516.7 514.3 511.1 508.1 508.4 508.8 509.1 509.5 509.8 511.2 512.4 514.3 519.3 527.5 535.7 539.8 540.1 539.4 538.3 536.8 535.6 534.4 533.0 531.5 529.6 528.0 526.5 524.8 523.3 522.8 522.2 521.9 521.3 520.8 520.4 519.8 519.6 519.9 520.2 520.6 521.2 522.0 523.0 524.2 524.2 524.2 524.2 523.9 523.9 523.9 524.0 524.5 525.2 526.3 527.0 527.8 528.1 527.9 527.2 526.7 526.2 524.5 516.6 499.0 483.3 481.9 487.1 494.7 504.8 514.4 519.4 521.1 521.7 520.8 519.7 519.3 520.7 522.3 524.2 524.5 524.5 524.3 523.3 518.7 507.3 496.5 487.7 478.7 468.3 456.2 444.5 432.6 418.0 403.7 391.4 389.0 394.3 402.0 407.2 411.8 413.4 414.9 416.9 417.5 418.1 418.8 419.5 419.1 418.7 418.0 416.8 415.8 414.8 414.6 414.6 414.7 414.8 414.8 414.8 415.2 417.4 419.8 422.3 424.5 426.7 428.8 430.9 433.4 436.5 439.6 442.0 443.7 445.1 445.9 446.2 446.4 446.2 445.8 445.4 444.1 442.9 441.8 440.5 439.2 437.3 435.6 434.2 433.1 432.2 431.7 431.8 432.1 432.6 434.4 436.6 438.4 439.3 440.1 441.0 441.7 442.7 444.0 445.6 447.2 448.4 447.8 447.0 446.1 444.3 442.4 440.9 439.7 438.5 437.5 436.2 434.9 434.3 434.6 435.2 435.9 436.7 437.5 439.3 441.8 443.9 445.2 446.2 447.2 448.5 448.8 449.1 449.2 449.1 448.5 446.3 444.2 441.9 438.7 434.9 431.4 428.3 426.4 425.9 426.2 426.8 428.6 430.5 432.5 434.7 436.9 438.8 440.6 442.9 445.1 446.1 447.0 447.9 448.6 447.7 446.4 445.2 444.1 442.8 441.5 440.3 439.0 437.9 436.7 435.4 433.9 431.2 428.5 424.3 413.6 391.2 373.4 368.2 371.1 376.7 385.3 393.5 401.5 410.8 418.4 426.4 434.4 441.3 448.7 457.4 463.7 470.1 476.1 483.3 491.0 501.7 512.2 519.8 523.6 525.7 526.5 527.4 527.7 527.3 526.6 525.4 523.9 522.7 521.7 520.5 519.8 519.0 517.8 516.6 515.5 514.6 514.9 515.3 515.9 516.3 517.8 519.2 520.4 521.7 523.3 524.8 526.3 527.3 527.5 527.5 527.2 527.2 527.0 526.2 525.1 523.9 522.6 521.5 520.7 521.1 521.9 522.5 522.9 523.7 524.5 525.1 525.5 525.9 526.2 526.6 526.8 526.6 526.4 526.1 526.0 525.6 525.2 524.7 524.0 523.3 522.5 521.9 521.4 520.7 519.9 519.9 519.6 519.5 519.6 520.2 521.1 522.1 522.9 523.7 524.8 525.8 526.6 527.1 526.7 526.0 525.2 524.8 525.1 525.4 524.8 523.6 522.6 522.2 521.8 521.4 521.0 520.5 520.5 520.8 520.8 520.8 520.8 521.0 521.1 521.1 521.2 521.4 521.6 522.0 522.6 522.0 521.9 522.4 523.3 524.5 524.5 527.8 528.2 528.5 528.9 529.2 529.6 530.0 530.9 523.9 530.7 530.2 529.6 529.0 528.4 527.2 524.8 520.8 515.7 512.2 507.5 501.1 495.9 488.8 482.3 477.1 472.4 465.6 458.9 454.2 449.2 446.4 444.9 443.3 441.3 440.0 440.0 440.0 438.5 438.5 439.0 440.0 441.6 442.8 444.9 447.7 449.8 451.8 453.9 456.8 460.8 482.9 493.9 509.8 522.0 528.4 530.2 529.6 528.8 528.4 527.8 527.2 526.6 526.0 525.4 525.4 524.8 524.8 524.8 524.8 525.4 527.2 528.4 529.2 530.5 531.5 533.2 535.1 536.7 539.6 543.3 547.0 551.2 555.0 559.2 561.1 564.7 568.6 571.3 574.9 578.2 581.3 586.7 589.7 591.5 593.1 595.2 596.9 599.0 600.4 601.1 601.1 601.1 601.1 601.1 600.8 600.4 599.7 598.3 596.6 595.9 595.7 595.2 594.5 593.8 592.4 591.8 590.8 589.7 589.7 589.0 588.0 588.0 587.3 589.4 589.8 590.0 590.0 589.8 589.7 589.7 589.6 589.4 589.4 588.7 587.9 587.5 587.3 587.3 587.3 587.4 587.7 587.7 587.7 587.7 587.2 586.5 585.9 585.4 586.0 586.2 586.0 585.8 585.4 585.0 584.7 585.0 585.2 585.6 586.0 586.3 586.4 586.8 587.2 587.3 587.8 588.2 588.6 588.7 589.0 589.5 589.6 589.0 588.2 587.3 587.4 587.7 587.7 587.9 588.0 588.1 588.3 588.3 588.6 588.7 589.0 588.9 588.1 586.9 586.0 586.8 587.3 587.2 586.5 586.0 586.5 587.2 587.3 587.1 586.7 585.9 585.5 585.0 584.6 585.3 586.1 586.1 586.0 586.0 585.6 585.6 585.5 585.5 586.4 587.3 586.9 586.7 586.4 586.1 585.6 585.2 584.8 584.6 584.4 585.0 585.6 585.5 585.3 585.0 585.0 584.5 584.0 583.3 582.7 583.3 584.0 584.5 584.6 584.9 585.0 585.0 585.1 585.3 585.3 585.3 585.6 586.0 586.7 586.5 586.0 585.6 585.2 584.6 584.0 583.6 583.7 583.9 584.1 584.5 584.6 585.8 586.4 586.5 586.1 585.6 585.3 585.1 584.8 584.4 584.3 583.9 583.5 583.1 582.9 582.6 582.3 582.9 583.5 583.9 583.3 582.4 581.9 581.9 582.2 582.6 583.0 583.3 583.5 583.9 584.3 584.7 585.1 585.3 585.6 585.3 584.9 584.5 584.1 583.7 583.3 582.9 582.9 582.9 582.9 582.9 582.9 582.8 582.6 582.6 582.6 582.6 582.6 582.8 583.2 583.6 584.4 584.8 585.4 585.9 586.3 587.1 587.5 587.8 588.0 587.7 587.7 587.5 587.1 586.5 585.6 584.8 584.0 583.6 583.9 584.3 585.0 585.5 585.8 586.0 586.0 585.6 585.6 585.6 585.6 585.3 585.3 585.5 585.8 586.2 586.7 587.2 587.4 586.5 585.2 585.0 584.9 584.6 584.6 584.6 585.3 586.1 586.9 587.3 586.8 586.0 585.3 585.5 585.8 586.0 586.3 586.4 586.5 586.1 585.7 585.3 584.9 584.5 584.1 583.9 584.6 585.8 586.3 586.0 585.4 585.6 586.0 586.1 586.5 586.7 587.0 587.2 586.9 586.5 586.1 585.3 585.0 585.0 585.2 585.3 585.3 585.3 585.4 585.6 585.6 585.6 585.7 586.0 586.0 586.0 585.6 585.6 585.3 585.3 585.0 585.0 584.9 584.6 584.6 584.4 584.3 583.9 584.2 585.0 585.8 587.0 587.1 587.2 586.8 586.1 585.6 585.0 585.1 585.3 585.6 585.6 585.9 585.5 584.9 584.4 583.9 584.0 584.3 584.3 584.5 584.6 584.7 585.0 585.0 585.2 585.3 585.0 584.8 584.6 584.4 584.3 583.9 583.8 583.6 583.9 584.3 584.5 584.0 583.6 584.1 585.0 585.6 585.0 584.4 584.5 585.0 585.4 585.8 586.0 586.2 586.0 585.6 585.3 585.1 584.7 585.3 586.1 586.7 586.3 585.7 585.0 585.7 586.1 586.3 586.3 586.3 586.2 586.0 586.0 586.0 586.0 585.6 585.0 584.4 584.0 583.3 583.0 583.3 583.4 583.9 583.9 584.4 584.6 584.8 585.2 585.3 585.8 586.6 587.2 586.8 586.3 586.1 586.5 586.7 586.9 587.3 586.6 586.3 586.5 587.2 587.7 587.6 587.3 587.1 587.0 586.7 586.7 586.5 586.3 586.1 586.0 585.9 585.8 586.0 586.2 586.7 587.1 587.5 587.8 588.3 588.7 588.9 588.0 587.5 587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.5 587.7 587.7 587.7 587.7 587.7 587.7 587.7 587.7 587.5 586.7 586.3 586.8 587.7 588.6 588.3 588.2 588.0 587.7 587.3 587.2 586.8 586.7 586.3 585.1 584.4 583.4 582.4 581.9 581.5 581.3 581.1 580.7 580.2 579.8 579.8 580.8 582.0 583.3 583.7 583.9 584.6 586.0 588.0 588.4 588.8 589.2 589.6 590.0 590.5 590.9 591.3 591.4 590.4 589.0 587.6 586.3 584.9 583.3 582.0 580.6 579.3 577.9 576.6 575.6 575.7 575.9 576.2 576.6 577.1 578.1 579.2 580.1 581.3 582.9 584.4 585.6 587.3 589.0 590.5 592.8 595.0 596.9 597.6 598.1 598.4 597.4 596.1 594.8 593.6 591.1 588.8 586.5 584.6 582.6 581.1 579.4 577.6 575.9 573.9 572.9 573.5 574.9 576.2 577.8 580.5 583.1 586.1 589.4 592.4 595.2 597.3 599.5 601.8 603.8 605.3 606.3 606.1 603.5 600.9 597.9 594.9 592.0 589.4 586.7 583.5 580.6 577.2 575.2 573.7 572.8 573.0 573.7 575.6 578.0 580.0 582.1 584.5 586.7 587.9 589.2 590.3 591.5 592.8 593.6 594.3 594.7 595.1 594.5 593.6 592.8 592.0 591.2 590.4 589.5 588.8 588.0 587.2 586.7 585.8 585.0 584.2 583.4 582.6 581.8 581.0 580.4 580.2 580.6 580.6 580.6 580.8 580.9 580.9 581.8 582.9 583.6 584.7 585.6 586.5 585.6 585.3 586.1 587.0 588.2 588.7 588.2 587.2 587.3 587.8 588.2 587.8 587.4 586.7 586.2 585.6 584.6 583.5 582.3 581.0 579.9 579.0 577.8 576.6 576.1 577.1 579.7 587.2 596.2 596.2 596.0 595.9 595.9 595.5 595.5 595.5 595.5 595.5", - "input_type": "phoneme", - "offset": 147.28 + "f0_timestep": "0.005" }, { + "offset": 155.28, "text": "SP 当 命 中 的 雨 滴 在 SP 手 中 蒸 发 干 净 SP", "ph_seq": "SP d ang m in zh ong d e y v d i z ai SP sh ou zh ong zh eng f a g an j ing SP", - "note_seq": "rest D5 D5 D5 D5 C5 C5 C5 C5 A4 A4 A4 A4 G4 G4 rest G4 G4 F4 F4 F4 F4 E4 E4 F4 F4 G4 G4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.2 0.2 0.5999999 0.5999999 0.4 0.4 0.2 0.2 0.4000001 0.4000001 0.1999998 0.4000001 0.4000001 0.2 0.2 0.6000001 0.6000001 0.3999996 0.3999996 0.8000002 0.8000002 0.8000002 0.8000002 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.2902 0.0348 0.2786 0.1277 0.3019 0.0929 0.1625 0.0348 0.4528 0.1509 0.3367 0.058 0.1045 0.1045 0.3947 0.0697 0.1277 0.3135 0.0929 0.1277 0.0697 0.4412 0.1625 0.3483 0.0464 0.6618 0.1393 0.8011 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest D5 D5 C5 C5 A4 A4 G4 rest G4 F4 F4 E4 F4 G4 rest", + "note_dur": "0.32 0.4 0.4 0.2 0.6 0.4 0.2 0.4 0.2 0.4 0.2 0.6 0.4 0.8 0.8 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "588.7 588.7 588.7 588.2 582.9 582.7 582.5 582.3 582.1 581.9 581.7 581.5 581.3 581.0 580.8 580.6 580.4 580.2 580.0 579.8 579.6 579.4 579.2 579.0 578.7 578.5 578.3 578.1 577.9 577.7 577.5 577.3 577.1 576.9 576.7 576.4 576.2 576.0 575.8 575.6 575.4 575.2 575.0 574.8 587.3 583.5 579.7 576.4 573.3 569.6 565.9 562.6 559.4 555.7 552.1 548.6 545.3 542.2 539.0 535.5 532.0 528.8 525.8 522.6 519.3 519.9 538.9 557.4 571.6 579.2 585.1 587.1 588.3 589.8 591.4 591.3 590.6 589.0 587.1 585.0 584.0 583.0 582.3 581.2 580.2 579.3 579.7 579.9 580.2 580.6 581.2 582.5 583.6 583.7 583.3 584.5 585.8 586.9 588.1 589.7 591.0 591.9 592.3 592.7 592.8 593.0 591.9 590.6 587.8 584.3 584.2 583.9 584.3 585.4 586.0 586.5 587.3 588.1 589.1 590.0 591.3 592.4 593.3 594.3 595.5 594.2 592.6 590.9 588.0 584.3 581.7 579.1 576.9 574.2 570.0 566.2 563.9 562.1 560.5 559.5 559.1 558.7 558.7 560.1 564.1 568.6 572.4 573.1 571.2 568.0 564.9 562.6 560.5 557.8 555.3 552.9 550.5 548.4 547.0 545.5 544.1 543.3 542.6 542.1 543.0 543.7 544.5 545.2 545.9 546.7 550.3 553.6 556.8 560.2 563.7 567.5 571.8 575.8 579.5 581.3 582.5 583.9 586.0 588.8 591.4 594.1 595.4 596.5 597.7 599.0 597.8 596.8 595.7 594.7 593.5 591.8 590.2 588.9 587.6 588.0 588.3 587.9 587.5 587.1 586.3 585.6 583.1 578.3 566.5 551.8 534.6 521.0 514.1 512.7 513.4 511.2 508.8 507.4 507.8 508.9 506.7 505.4 506.2 507.5 508.9 514.8 520.7 525.4 527.8 529.3 529.4 529.5 529.2 528.6 528.4 526.8 525.1 523.7 522.3 520.8 521.5 522.0 522.6 523.4 524.2 524.8 525.1 525.2 525.4 525.7 524.5 523.6 522.3 519.4 515.7 511.4 500.6 485.8 467.3 446.9 446.6 455.8 470.5 489.1 502.5 514.4 522.2 527.6 531.4 534.6 537.7 540.3 541.4 541.5 541.1 540.7 540.3 539.6 537.6 535.2 532.8 530.4 528.4 525.9 523.3 520.5 518.7 517.4 516.0 514.9 513.9 515.0 516.3 518.1 520.8 523.6 526.4 529.1 532.3 536.1 539.3 541.0 542.2 543.3 542.6 541.9 540.3 537.5 534.3 530.9 527.4 524.3 521.3 518.4 516.0 513.4 511.3 510.5 510.9 511.6 512.3 513.3 514.9 517.1 519.0 521.3 523.9 526.7 529.9 533.3 534.2 534.1 533.8 533.2 532.7 532.3 531.9 531.0 528.7 526.0 523.0 520.8 519.5 518.4 517.2 516.4 516.9 517.4 518.0 518.7 519.5 520.4 521.7 523.3 523.9 524.2 524.8 525.2 525.6 526.0 526.4 526.7 527.0 527.4 527.2 526.8 526.3 525.6 524.5 522.9 520.2 517.5 514.7 511.4 508.1 503.2 498.0 493.0 487.2 480.4 474.0 468.1 463.3 458.0 452.4 448.7 445.2 442.7 440.1 437.7 436.5 435.8 435.6 436.1 437.2 438.5 440.1 441.7 443.3 444.1 444.7 445.4 446.1 446.2 445.9 445.6 445.2 445.0 444.7 444.3 444.0 443.4 442.7 441.7 441.0 440.1 439.9 440.5 441.1 440.8 439.9 439.4 439.2 439.0 439.0 439.0 439.2 439.4 439.7 439.7 440.1 440.3 440.4 440.7 441.0 441.1 441.4 441.5 441.3 441.3 441.0 440.8 440.6 440.5 440.3 440.2 439.9 439.7 439.4 439.0 437.3 428.8 415.7 399.3 383.7 376.1 375.4 377.0 382.1 389.3 397.6 410.4 423.8 439.2 452.6 454.7 451.1 447.2 443.8 442.8 442.4 441.9 441.7 441.3 441.0 441.4 441.9 442.0 441.8 441.5 441.0 440.7 439.5 434.9 429.2 410.5 392.7 379.9 373.6 372.8 372.6 372.4 372.0 371.8 371.5 368.5 365.3 362.4 359.3 357.0 354.4 352.8 353.5 355.1 356.6 364.1 372.0 378.4 382.5 386.2 390.3 391.9 393.4 395.2 397.5 398.6 399.1 398.7 398.2 397.5 396.6 394.8 392.9 390.8 389.7 388.6 387.6 386.7 385.9 384.8 385.4 386.0 386.6 387.2 387.7 388.7 390.6 392.8 394.8 396.3 398.0 399.3 400.7 402.1 401.6 401.0 400.5 400.0 399.1 397.0 394.4 392.1 390.0 387.5 385.0 382.6 380.5 379.1 377.8 376.5 377.0 377.5 378.0 378.7 381.1 383.8 386.6 389.1 391.5 393.6 395.5 396.7 396.8 395.9 394.0 391.8 389.3 386.1 382.2 376.9 371.7 367.8 366.8 367.0 367.0 366.5 366.0 365.5 365.0 364.5 364.0 363.5 363.0 362.5 362.0 361.1 360.2 359.4 358.6 357.8 356.8 356.1 355.3 354.5 353.5 352.7 351.9 351.1 350.2 349.4 348.6 347.8 347.0 346.2 345.4 344.5 343.7 343.0 342.2 341.3 340.5 339.8 339.0 338.1 337.3 338.1 345.8 353.0 358.1 362.6 362.9 363.3 363.5 363.6 362.4 361.0 359.4 357.5 355.4 353.1 351.4 352.0 352.9 354.1 355.1 356.4 357.9 360.2 362.9 365.7 370.0 373.9 377.7 381.9 386.4 389.2 392.0 394.1 395.1 395.6 396.4 396.7 396.1 395.0 393.8 392.9 391.8 390.9 390.0 388.8 389.1 389.2 389.4 389.5 389.7 390.4 391.1 391.6 392.3 392.9 393.2 391.6 389.9 386.7 377.1 361.1 349.7 349.4 355.7 364.5 365.1 366.5 370.4 378.9 388.6 391.1 392.0 389.4 383.6 376.7 368.9 361.7 354.4 345.8 336.9 339.5 345.1 349.7 352.0 353.5 353.5 353.6 353.5 353.3 353.3 352.0 350.7 349.8 348.8 347.6 348.1 348.6 349.0 349.7 350.2 349.3 348.4 347.5 346.8 345.8 339.6 332.6 322.5 309.7 297.4 290.4 284.6 281.9 279.8 277.5 278.5 280.7 286.0 294.1 306.1 319.5 331.4 338.2 343.5 346.0 347.7 348.1 348.2 348.4 348.6 348.7 348.2 347.8 347.3 346.8 346.3 345.9 345.4 344.9 344.4 345.2 345.8 346.4 347.1 347.8 348.5 349.1 349.7 350.5 351.3 351.7 351.5 351.1 350.7 350.2 349.9 349.6 349.2 348.9 348.4 348.5 348.6 348.7 348.8 349.2 349.7 350.0 350.4 350.8 351.3 351.7 352.1 352.3 352.0 351.5 350.9 350.5 350.0 349.5 349.0 347.8 346.7 345.6 344.6 343.4 343.7 343.9 344.1 344.4 344.6 344.8 344.9 345.3 345.7 346.2 346.7 347.2 347.6 348.4 349.2 350.0 350.6 350.5 350.3 350.0 349.6 348.6 347.4 346.1 344.8 341.1 336.7 331.4 325.3 319.0 304.7 292.1 282.1 276.2 272.4 275.6 278.8 281.6 284.6 287.8 289.9 292.0 293.7 295.9 297.9 300.5 303.1 305.4 307.5 309.7 311.8 313.4 314.8 316.3 317.7 318.9 320.8 323.7 327.2 331.3 331.9 332.5 333.0 332.9 332.5 332.3 332.0 331.5 330.9 330.2 329.3 328.7 328.3 328.3 328.5 328.7 329.0 329.2 329.1 328.9 328.8 328.6 328.6 329.0 329.4 329.4 329.2 329.1 329.1 328.9 329.0 329.4 329.7 329.8 329.8 329.8 329.8 329.8 329.8 330.2 330.5 330.8 331.2 331.5 331.3 331.3 331.2 331.0 330.8 330.2 329.4 327.8 323.0 311.8 295.7 281.0 275.8 278.4 284.8 292.6 298.0 303.0 307.6 311.5 315.1 317.2 319.3 321.4 323.6 326.2 329.0 331.5 334.0 337.2 342.4 348.2 353.2 355.3 356.5 356.2 355.3 355.0 354.5 353.8 353.1 352.3 351.4 350.7 349.8 348.8 348.2 348.0 347.9 347.7 347.2 346.8 346.4 346.1 346.3 346.6 346.9 347.5 348.2 349.2 350.4 351.3 351.1 351.0 351.4 351.9 352.4 352.1 351.8 351.5 351.0 350.7 350.3 350.1 349.7 349.4 349.1 349.7 350.1 350.1 349.8 349.5 350.1 350.4 350.3 349.8 349.6 349.3 348.9 348.5 348.2 347.8 347.4 347.1 346.7 346.4 346.1 346.6 347.0 347.5 348.0 350.0 351.8 352.8 353.7 354.9 355.9 356.9 357.6 357.8 357.2 356.2 355.1 354.0 352.7 351.3 349.2 347.2 345.4 343.3 341.3 340.8 340.3 339.8 339.5 339.7 341.2 342.6 344.0 345.5 347.2 348.7 350.2 351.6 352.5 353.5 354.5 354.9 354.6 353.8 352.9 352.1 351.4 350.8 350.6 350.6 350.6 350.3 349.8 349.6 350.0 350.6 351.4 351.8 351.3 351.3 352.1 352.1 352.1 352.1 352.1 352.7 352.7 352.7 352.1 348.2 345.2 342.4 338.7 335.2 330.8 327.4 323.2 320.4 324.5 327.4 332.3 337.3 340.1 342.4 343.8 343.8 343.8 344.6 345.2 345.2 345.6 346.4 346.9 349.8 353.1 359.6 370.9 380.8 389.4 395.4 397.4 399.1 400.1 400.8 401.2 400.9 400.6 399.5 398.0 396.6 394.0 391.9 389.6 387.3 385.9 384.6 383.5 382.6 382.5 382.6 382.7 383.3 384.7 386.7 388.8 391.0 393.0 394.9 396.8 398.2 399.3 400.4 401.0 400.8 400.5 399.9 399.3 398.8 398.3 397.0 395.3 394.0 392.8 391.4 389.7 388.4 387.7 387.2 386.7 386.2 385.6 385.3 385.5 386.0 386.6 387.2 387.9 388.9 390.1 392.0 393.8 395.5 397.1 399.0 400.2 401.0 401.5 401.8 401.9 401.2 400.3 399.5 398.6 397.1 395.4 393.7 392.0 390.4 388.4 386.4 384.4 382.6 381.6 380.8 380.8 381.3 382.4 383.6 385.5 387.5 389.7 391.9 394.3 397.0 399.8 401.2 402.6 403.7 404.9 404.6 404.1 403.3 402.2 400.6 399.1 396.8 394.4 392.0 389.4 386.6 383.8 381.5 380.1 378.5 377.1 376.1 376.9 377.9 378.7 379.7 381.9 384.0 386.2 389.3 392.7 396.1 399.5 402.2 403.9 405.1 406.2 405.9 405.4 404.4 402.1 399.4 396.1 393.2 390.4 387.0 383.9 381.0 378.6 377.1 376.0 375.4 376.6 379.3 382.5 386.2 390.2 395.8 399.8 398.3 385.9 386.4 387.5 387.9 388.4 388.5 388.8 389.6 389.7 389.7", - "input_type": "phoneme", - "offset": 155.28 + "f0_timestep": "0.005" }, { + "offset": 161.68, "text": "SP 我 总 在 思 考 存 在 和 消 失 的 意 义 SP", "ph_seq": "SP w o z ong z ai s i0 k ao c un z ai h e x iao sh ir d e y i y i SP", - "note_seq": "rest A4 A4 A4 A4 G4 G4 G4 G4 F4 F4 G4 G4 A4 A4 A4 A4 G4 G4 G4 G4 A4 A4 A4 A4 C5 C5 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.225 0.225 0.5749999 0.5749999 0.4 0.4 0.2 0.2 0.5999999 0.5999999 0.4000001 0.4000001 0.2249999 0.2249999 0.5750003 0.5750003 0.3999996 0.3999996 0.8000002 0.8000002 0.8000002 0.8000002 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.2786 0.0464 0.2902 0.1161 0.3251 0.0697 0.1161 0.1045 0.4296 0.1509 0.2554 0.1393 0.1161 0.0929 0.4412 0.1509 0.267 0.1393 0.0929 0.1277 0.476 0.1045 0.2902 0.1045 0.6502 0.1509 0.8011 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A4 A4 G4 G4 F4 G4 A4 A4 G4 G4 A4 A4 C5 rest", + "note_dur": "0.32 0.4 0.4 0.225 0.575 0.4 0.2 0.6 0.4 0.225 0.575 0.4 0.8 0.8 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "391.8 391.8 391.8 391.8 395.9 396.0 396.2 396.4 396.6 396.8 396.9 397.1 397.3 397.5 397.6 397.8 398.0 398.2 398.4 398.5 398.7 398.9 399.1 399.2 399.4 399.6 399.8 400.0 400.1 400.3 400.5 400.7 400.9 401.0 401.2 401.4 401.6 401.8 401.9 402.1 402.3 402.5 402.7 402.8 440.0 438.4 436.6 435.2 433.8 431.9 430.4 429.0 427.5 425.8 424.3 422.7 421.0 419.6 418.1 416.5 415.0 413.5 412.1 410.6 409.1 407.4 406.1 405.0 403.8 401.2 398.3 396.8 395.5 394.4 395.4 396.8 398.3 399.8 402.2 404.9 407.8 410.7 413.9 417.7 421.6 424.7 428.0 430.8 433.8 436.5 439.0 441.1 441.7 442.0 442.0 442.4 442.3 441.7 441.1 440.3 439.4 438.8 438.1 437.3 437.5 437.5 437.7 437.9 438.0 438.2 439.4 440.4 441.5 442.8 444.1 444.8 444.2 443.4 441.1 430.4 416.8 399.2 382.0 364.1 349.8 346.3 349.5 353.3 357.3 361.8 366.2 369.4 372.5 376.0 379.5 383.0 387.2 390.7 394.6 398.8 403.0 406.1 406.4 405.7 405.1 420.1 435.7 447.0 451.1 452.4 450.1 448.1 446.5 444.7 443.1 441.5 439.7 438.2 436.8 435.2 433.6 431.9 430.8 430.3 430.0 429.6 429.5 429.6 430.6 432.4 434.0 435.5 437.0 438.0 438.7 439.7 440.6 441.3 442.1 443.1 443.8 444.3 444.2 443.7 443.3 442.9 442.4 442.0 441.2 440.3 439.3 438.7 438.9 439.0 439.2 439.6 439.9 440.2 440.4 440.8 441.1 441.4 441.7 442.0 442.3 441.6 440.8 440.2 438.7 433.7 425.8 412.1 398.2 382.9 368.3 359.8 353.6 353.4 354.4 355.3 360.0 364.3 367.8 371.9 378.4 385.6 391.3 392.7 393.4 394.5 393.7 393.3 393.1 393.3 393.6 391.8 390.0 388.3 385.9 376.9 367.0 357.0 345.1 331.2 317.7 319.6 322.8 328.2 335.1 342.6 345.6 348.4 351.0 353.9 355.1 356.2 357.3 358.8 360.6 362.4 364.1 364.0 363.7 363.5 363.0 363.4 367.0 371.5 378.6 387.5 395.9 399.8 402.6 403.7 403.9 404.5 404.1 402.5 400.6 398.4 394.4 390.8 387.5 383.9 381.5 379.6 378.2 378.2 378.6 379.1 381.9 384.6 387.0 390.0 392.9 398.1 403.0 406.3 409.4 411.2 412.7 413.6 413.3 412.2 411.2 407.3 403.0 398.8 394.2 389.3 384.3 379.8 376.0 373.4 371.5 372.4 373.3 374.6 377.1 380.4 384.8 388.7 392.7 397.1 401.6 403.9 406.3 408.4 410.2 409.8 408.9 407.9 406.6 403.9 400.5 396.9 392.8 388.9 385.0 380.8 378.9 377.6 377.6 378.1 380.0 382.4 386.4 390.3 394.2 397.0 399.5 401.6 403.2 405.0 406.8 405.2 402.1 396.3 387.9 372.1 349.9 333.7 330.0 342.3 365.7 379.4 387.9 394.9 401.1 401.9 400.1 394.8 388.8 382.0 374.1 366.0 359.2 353.7 349.0 344.4 339.0 334.0 328.9 322.5 315.3 316.2 318.8 324.5 332.3 341.1 344.0 346.4 347.5 348.3 349.2 348.9 348.3 347.5 346.6 345.6 344.8 343.8 343.2 343.2 343.4 343.7 344.0 344.7 345.6 346.6 347.6 348.2 348.7 349.3 350.0 349.6 349.2 348.8 348.5 348.0 347.6 347.7 347.8 347.8 348.0 348.0 348.1 348.2 348.2 348.4 348.5 348.7 348.9 349.2 349.4 349.7 349.9 350.2 350.2 349.2 348.2 345.3 336.7 321.5 305.4 301.2 297.0 293.1 288.6 283.5 286.8 290.6 296.2 303.6 311.1 319.1 326.7 331.8 336.9 342.8 348.6 351.3 353.6 357.8 368.9 381.9 392.9 398.4 401.7 401.6 399.9 398.3 397.0 395.9 394.3 393.7 393.1 392.6 391.9 391.3 390.8 390.5 390.2 389.8 389.3 389.0 388.6 388.3 387.8 384.8 381.5 376.6 366.3 349.7 332.9 318.3 310.1 305.1 303.8 307.4 314.3 328.7 345.2 367.0 391.3 407.4 421.3 428.8 433.9 437.5 439.2 440.0 440.0 440.0 439.7 439.1 438.2 437.0 435.3 433.7 431.8 430.1 428.5 426.8 427.2 427.8 428.5 429.0 430.0 432.4 435.5 438.2 440.9 443.8 445.9 447.8 449.8 451.3 452.0 452.4 452.7 452.7 451.5 448.6 445.4 442.2 439.1 436.5 433.6 430.4 427.6 426.1 425.3 425.2 425.5 428.2 430.8 433.3 436.1 439.2 442.7 445.6 448.2 450.0 451.6 452.6 452.4 451.5 449.7 447.2 444.5 441.7 438.7 435.8 433.9 432.6 431.1 429.9 428.9 429.7 430.9 432.1 433.4 436.1 439.2 442.2 444.1 445.6 447.3 449.0 450.7 451.6 450.3 447.9 445.6 439.6 433.6 427.6 419.2 409.6 397.9 387.3 378.5 372.6 368.3 371.0 373.6 376.0 378.4 381.1 383.8 386.2 388.1 389.9 391.8 393.5 395.4 397.1 398.7 400.7 403.7 406.2 407.7 409.1 411.7 414.9 418.3 422.1 426.1 430.7 432.3 433.8 435.3 436.3 434.2 431.4 428.6 426.1 423.8 422.1 420.9 421.0 421.1 421.3 421.6 422.1 423.5 425.0 426.6 428.2 430.6 432.7 434.9 437.3 439.7 440.4 441.0 441.6 442.2 442.8 443.3 443.0 442.7 442.2 441.8 441.5 441.1 441.0 441.2 441.5 442.1 442.4 442.8 443.3 443.6 443.8 442.5 440.5 434.8 419.2 393.9 380.2 377.7 383.4 391.3 399.4 407.7 414.2 418.6 422.8 419.8 417.2 414.8 412.6 410.5 408.3 405.7 403.8 402.6 401.4 399.6 397.5 395.0 391.8 387.9 385.0 386.5 388.6 391.3 394.7 396.0 395.8 395.7 395.9 396.3 395.9 391.0 383.7 370.5 354.1 339.8 332.1 326.7 323.7 322.1 323.7 326.1 330.5 335.5 339.5 343.0 345.4 347.6 350.1 352.5 355.0 357.6 360.0 363.0 366.6 370.0 373.0 375.3 377.7 380.2 389.1 397.2 401.3 401.2 400.0 398.6 397.3 396.3 395.5 395.0 394.3 393.6 393.1 392.6 392.0 391.4 390.7 390.3 390.2 390.2 390.2 390.2 390.2 390.2 390.4 390.8 391.2 391.4 391.7 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.2 392.7 393.0 393.3 393.5 393.4 393.3 393.1 393.0 392.9 392.7 392.6 392.4 392.3 392.2 392.0 392.1 392.3 392.4 392.4 392.7 392.4 392.2 392.1 391.8 391.5 391.5 391.2 391.0 390.9 390.6 390.4 390.3 390.7 391.0 391.5 391.5 391.5 391.5 391.5 391.5 391.8 392.1 392.4 392.6 392.9 393.0 393.2 393.5 393.8 394.0 394.0 393.8 393.8 393.6 393.6 393.5 393.4 392.7 388.1 374.7 350.9 330.8 320.8 318.7 323.0 328.8 334.0 339.7 347.7 356.6 362.6 368.9 374.7 381.3 388.8 394.2 399.6 405.4 412.0 417.2 423.0 432.6 440.4 445.4 448.5 450.5 452.3 453.3 454.1 453.2 451.8 450.3 449.0 447.5 445.9 444.1 442.7 441.4 440.6 440.3 439.9 439.4 439.1 438.8 439.2 439.6 440.1 440.4 440.9 441.3 441.8 441.7 441.4 441.3 441.0 441.0 440.7 440.3 439.6 439.0 438.6 438.0 437.6 437.5 437.5 437.7 437.7 437.7 437.9 438.0 438.0 438.0 438.1 438.2 438.2 438.2 438.3 438.5 438.5 438.2 437.9 437.6 437.2 436.2 434.9 433.4 432.4 431.8 431.5 431.2 431.0 431.7 433.1 434.6 436.5 438.7 441.0 442.9 445.2 446.1 446.9 447.8 448.5 449.1 449.0 448.6 447.9 447.0 446.1 445.1 444.3 442.5 441.0 439.4 437.7 436.0 435.2 434.7 434.1 434.2 434.5 434.8 434.9 435.1 435.4 437.0 438.3 439.0 439.6 440.3 440.9 441.4 441.4 441.3 441.3 441.0 441.0 440.9 440.6 440.0 439.0 438.1 437.3 436.4 435.4 435.5 435.7 435.8 436.0 436.0 436.2 436.3 436.6 437.1 437.7 438.4 438.9 439.5 440.1 441.0 441.5 441.3 441.1 440.8 440.8 440.4 440.2 440.1 440.8 441.5 440.9 440.4 439.9 439.4 438.7 438.3 438.7 439.1 439.2 439.2 439.2 439.1 439.0 439.0 439.0 439.0 438.9 438.5 437.8 437.2 437.8 438.1 438.1 437.8 437.5 437.2 437.0 436.8 436.9 437.0 437.7 438.6 439.1 439.2 439.2 439.0 439.0 439.1 439.4 439.7 444.1 444.1 443.6 443.3 443.0 442.8 442.5 442.0 441.3 441.3 441.2 441.0 440.7 440.5 440.2 439.9 439.7 439.4 439.1 438.5 438.5 438.5 438.5 440.5 440.5 440.5 439.1 438.4 437.9 437.3 436.2 434.7 433.7 432.7 432.2 432.2 432.2 432.2 432.2 432.2 434.2 434.9 436.0 436.0 437.7 442.5 448.2 452.1 458.9 465.9 474.0 482.2 490.2 498.3 506.0 511.4 515.7 518.7 521.5 523.6 526.6 529.0 529.8 529.5 528.7 528.3 527.4 526.4 525.1 523.3 521.5 518.9 516.9 515.9 515.5 515.2 515.2 515.3 515.5 515.5 516.9 518.4 519.4 519.9 520.2 520.6 521.0 521.4 522.6 524.2 525.4 526.9 527.9 527.9 527.5 529.6 529.4 529.0 527.9 527.5 526.7 525.5 524.3 522.7 520.8 519.0 517.5 515.9 514.3 512.8 511.7 510.9 510.5 510.4 510.5 511.0 512.0 513.4 514.9 516.8 518.6 520.7 522.6 524.9 526.9 528.6 530.2 531.8 532.9 533.5 533.8 533.7 533.0 531.4 529.6 527.7 524.8 522.0 519.0 516.0 513.6 511.4 509.5 508.1 507.4 507.2 507.5 508.5 510.0 512.5 515.2 518.3 521.1 524.1 527.5 530.5 532.8 534.7 536.2 537.0 537.2 536.7 535.7 534.2 532.3 530.1 527.9 525.6 522.9 520.4 518.3 516.5 514.9 513.4 512.8 512.8 512.9 513.4 514.3 515.5 516.9 518.3 519.8 521.1 522.6 523.8 524.7 525.4 525.7 525.8 525.0 523.6 521.4 519.3 517.7 516.6 516.0 516.3 518.4 521.3 524.6 528.7 532.7 536.4 538.9 540.1 540.6 540.0 539.2 538.8 538.0 537.0 536.2 535.1 533.9 532.8 531.8 531.5 531.5", - "input_type": "phoneme", - "offset": 161.68 + "f0_timestep": "0.005" }, { + "offset": 168.08, "text": "SP 错 愕 于 SP 一 个 习 惯 本 不 属 于 自 己 SP", "ph_seq": "SP c uo e y v SP y i g e x i g uan b en b u sh u y v z i0 j i SP", - "note_seq": "rest F4 F4 E4 F4 F4 rest D4 D4 E4 E4 F4 F4 G4 G4 A4 A4 G4 G4 G4 G4 F4 F4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.2 0.4 0.4 0.1999999 0.4 0.4 0.1999999 0.1999999 0.4000001 0.4000001 0.5999999 0.5999999 0.4000001 0.4000001 0.2 0.2 0.6000001 0.6000001 0.3999996 0.3999996 0.8000002 0.8000002 0.8000002 0.8000002 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.1741 0.1509 0.4063 0.1509 0.0464 0.3947 0.1277 0.0697 0.3135 0.0929 0.0813 0.1161 0.3367 0.0697 0.4992 0.0929 0.3367 0.0697 0.0813 0.1161 0.4644 0.1393 0.2786 0.1161 0.5689 0.2322 0.8011 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 1 2 1 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 E4 F4 rest D4 E4 F4 G4 A4 G4 G4 F4 G4 A4 rest", + "note_dur": "0.32 0.4 0.2 0.4 0.2 0.4 0.2 0.4 0.6 0.4 0.2 0.6 0.4 0.8 0.8 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "524.2 524.2 524.2 523.9 459.7 457.1 454.5 451.9 449.4 446.8 444.2 441.7 439.2 436.7 434.2 431.7 429.3 426.8 424.4 422.0 419.6 417.2 414.8 412.4 410.1 407.7 405.4 403.1 400.8 398.5 396.3 394.0 391.8 389.5 387.3 385.1 382.9 380.7 378.5 376.4 374.2 372.1 370.0 351.0 343.8 337.7 331.6 326.4 322.0 316.6 312.7 309.7 305.1 301.6 297.7 294.2 291.0 287.1 283.0 280.6 279.3 278.6 278.3 279.3 280.9 289.1 299.5 303.8 310.1 314.6 316.6 319.5 322.3 323.4 324.2 324.9 324.9 325.3 325.3 324.9 324.9 324.5 324.2 324.2 324.2 324.9 325.3 327.0 328.5 330.0 332.5 334.4 335.8 337.7 340.7 343.0 345.6 348.0 350.2 352.3 353.9 354.7 355.3 355.7 356.2 356.2 355.7 354.6 353.9 353.1 352.5 351.9 351.0 350.6 350.1 349.2 349.2 349.1 348.8 348.8 348.8 349.6 349.6 349.6 349.6 349.2 348.8 348.0 347.2 347.0 346.7 346.5 345.8 344.4 342.2 340.3 338.1 336.1 333.9 332.3 330.6 329.1 328.6 328.3 327.9 328.4 329.2 329.6 329.2 328.7 328.4 328.4 328.5 328.6 328.9 329.2 329.5 330.0 330.5 331.0 331.2 331.6 331.7 331.7 331.7 331.7 331.7 331.7 331.6 331.3 330.6 330.0 329.6 329.0 328.1 327.1 326.1 325.5 325.0 324.9 325.2 325.9 327.7 331.4 336.0 340.4 343.9 346.8 349.6 352.5 355.1 357.0 358.6 359.9 360.9 361.6 360.8 359.9 358.7 357.6 356.3 354.1 351.8 349.1 346.6 343.9 341.7 340.5 339.2 337.9 337.9 337.9 338.0 338.7 339.7 342.1 344.6 346.7 348.9 351.5 353.5 355.3 356.8 358.0 359.0 358.5 357.7 357.1 356.5 354.9 352.1 349.3 346.8 344.0 341.8 339.8 337.7 336.4 336.0 336.0 336.3 338.0 340.0 342.0 344.2 347.2 350.0 352.5 354.5 356.6 357.3 358.1 358.7 359.0 357.6 355.5 353.0 349.6 345.7 341.3 337.3 334.5 331.7 328.0 324.2 324.2 324.2 324.3 324.3 324.3 324.2 324.0 324.0 323.8 323.4 323.2 322.8 322.1 320.9 319.1 316.7 313.6 310.1 305.3 300.5 295.7 291.3 287.9 285.2 282.8 281.4 280.3 279.8 279.4 279.3 279.1 279.2 279.4 279.4 279.6 279.8 280.1 280.4 280.7 280.9 280.9 280.1 280.6 281.6 282.7 283.0 283.0 282.4 281.3 280.2 279.2 278.3 277.6 276.7 275.9 275.0 274.2 273.8 274.0 274.8 275.6 276.4 277.3 278.7 280.6 282.4 284.1 285.9 288.2 290.6 292.9 295.0 296.2 297.2 298.3 299.1 299.0 298.7 298.4 297.2 295.8 294.5 293.4 292.2 290.8 289.7 289.1 288.5 288.0 288.1 288.5 289.5 290.6 291.7 293.0 294.1 294.7 295.1 295.6 296.0 296.6 296.5 295.1 292.9 286.1 274.8 265.4 260.0 259.6 264.5 270.5 276.9 282.3 287.2 292.3 298.2 303.9 310.1 317.8 326.4 334.1 335.9 334.8 333.1 332.7 332.5 332.5 332.5 332.1 331.7 331.7 331.3 331.0 330.6 329.7 328.9 328.1 327.4 327.0 326.2 325.8 324.9 324.9 324.2 323.8 323.8 323.8 323.8 324.9 327.7 329.6 332.5 335.0 337.9 340.9 344.4 347.9 351.0 353.0 354.5 356.2 358.4 360.3 361.9 361.8 361.2 360.5 359.7 358.6 357.4 356.5 355.4 354.5 353.5 352.3 351.5 350.9 350.4 349.8 349.2 349.0 348.8 348.7 348.5 348.2 348.5 348.7 348.9 349.2 349.0 348.8 348.5 348.3 348.4 348.6 348.9 349.1 349.2 349.1 348.6 348.3 348.6 349.1 349.4 349.8 350.3 350.6 351.0 351.4 351.9 352.2 351.9 351.4 350.8 348.4 345.6 341.6 334.4 324.1 309.0 295.4 293.3 300.3 313.4 329.4 330.1 330.4 331.1 331.5 331.0 330.3 330.0 329.8 330.0 330.4 330.6 331.7 335.1 340.2 345.4 353.7 360.3 362.7 363.5 364.3 366.6 368.4 369.8 371.2 371.7 371.6 371.0 370.1 369.1 368.3 367.3 366.8 366.1 365.3 364.7 363.9 363.7 364.7 366.1 367.7 369.2 370.6 372.2 375.0 378.2 381.4 384.6 387.5 390.5 393.1 395.5 396.7 397.7 398.8 400.0 399.7 399.4 399.2 398.7 397.7 396.8 395.7 394.7 393.6 392.7 391.7 391.2 391.0 390.7 390.4 390.1 389.8 390.2 390.6 391.1 391.6 392.0 392.4 392.6 392.9 393.2 393.4 392.8 391.9 392.0 392.5 392.8 393.2 393.5 393.8 393.3 392.9 392.7 392.8 393.1 393.5 393.9 394.2 394.6 395.0 395.3 395.7 396.0 396.4 396.8 397.1 397.2 397.1 396.8 396.6 396.5 396.2 395.9 395.3 394.7 394.1 393.4 393.1 392.4 392.4 393.1 393.1 393.1 392.0 389.7 388.2 384.0 381.5 376.7 372.8 369.6 369.6 370.4 375.2 381.5 392.2 405.1 414.6 422.8 429.2 434.7 437.7 437.7 438.7 439.7 439.7 439.7 439.7 439.7 440.3 441.5 441.5 441.5 441.5 441.5 441.5 440.4 440.1 439.6 438.9 437.7 436.5 434.9 433.3 431.0 429.1 426.7 424.4 421.8 419.3 416.9 414.5 411.9 409.7 407.2 405.2 403.2 401.4 399.9 398.6 397.5 396.7 396.0 395.7 395.6 395.6 396.2 396.7 397.5 398.4 399.3 400.3 401.3 402.0 402.7 403.2 403.3 403.0 400.6 396.8 391.4 385.0 378.0 370.9 364.0 357.6 352.3 348.0 345.5 344.4 345.3 348.0 353.2 359.2 367.1 374.5 382.5 388.7 394.0 396.7 397.2 396.5 395.8 395.2 395.2 394.6 393.7 395.9 395.9 396.3 396.3 396.3 396.3 395.9 395.9 395.3 394.7 394.3 392.4 391.5 390.6 389.6 388.7 387.8 386.4 385.9 385.9 385.9 385.9 387.5 388.4 389.7 391.1 392.4 394.0 395.9 397.7 399.8 401.2 399.8 402.2 402.6 402.6 402.7 402.8 402.5 400.9 399.5 398.0 396.3 394.8 394.0 393.1 391.7 390.2 388.7 387.8 387.5 387.8 388.4 389.0 389.5 390.0 390.5 391.1 391.4 391.7 391.9 392.2 392.4 393.0 393.2 393.5 393.8 394.0 393.5 392.8 392.3 392.2 392.2 392.2 392.2 392.2 392.2 392.2 391.9 391.4 391.2 390.9 391.1 391.4 391.5 391.7 391.9 392.2 392.5 392.8 392.9 392.9 392.4 391.9 391.9 392.0 392.2 392.2 392.4 392.1 391.7 391.4 391.5 392.1 392.3 392.6 392.5 392.0 391.7 391.4 391.2 390.9 390.6 391.1 391.4 391.8 392.3 392.7 392.7 392.6 392.4 392.4 392.4 392.2 392.2 392.2 392.1 392.0 392.0 391.9 391.8 391.6 391.8 392.1 392.2 392.1 391.7 391.1 390.0 388.8 387.9 386.5 384.8 380.7 376.6 372.2 366.7 360.7 355.0 350.4 347.1 344.2 341.3 340.0 339.1 339.0 339.2 339.7 340.4 341.2 342.4 343.7 345.2 346.7 346.9 347.1 347.5 347.8 348.1 348.3 348.7 349.0 349.2 348.5 347.9 347.8 348.0 348.0 348.1 348.2 348.2 348.4 348.4 348.5 348.6 348.7 349.0 349.4 349.9 350.2 350.7 351.0 351.3 351.3 351.3 351.3 352.1 352.1 352.1 352.1 352.1 352.1 352.1 349.8 345.2 339.5 331.5 315.3 301.1 289.1 287.3 291.0 294.7 300.5 309.5 321.7 335.2 340.1 343.8 344.9 345.2 346.0 347.4 348.2 349.0 350.4 352.1 355.1 358.3 367.7 380.2 390.9 396.6 399.7 401.2 401.9 402.4 402.6 402.0 401.5 401.0 399.7 398.2 396.5 395.1 393.6 391.9 390.2 389.6 388.8 388.3 387.8 388.2 388.5 388.9 389.4 390.4 391.5 392.7 393.8 394.5 394.9 395.2 396.3 397.1 398.0 398.9 398.6 398.1 397.7 397.2 396.7 395.4 394.2 393.0 392.0 390.9 389.7 388.9 388.2 387.7 387.2 387.3 387.6 388.0 388.5 389.2 389.7 390.5 391.1 391.7 392.3 392.9 393.7 394.1 394.3 394.1 394.0 394.0 393.8 393.7 393.2 392.7 392.1 392.3 392.7 393.1 393.4 392.4 391.7 391.5 391.5 391.5 391.3 391.3 391.3 391.3 391.3 391.3 391.1 391.1 391.1 391.1 391.1 391.0 390.9 390.9 390.9 390.5 390.1 390.0 390.5 391.1 391.0 390.8 390.6 390.6 390.4 391.3 392.1 392.7 392.9 392.9 392.7 392.8 393.2 393.9 394.5 394.2 393.9 393.7 393.4 393.1 392.9 392.8 391.7 387.1 381.3 365.0 346.2 333.1 329.7 338.5 348.4 353.7 357.1 357.4 356.6 355.5 354.9 355.3 356.0 356.8 357.8 358.3 358.2 358.0 358.6 360.0 361.5 362.9 364.2 365.5 366.9 368.2 370.1 373.2 378.4 384.3 390.9 397.9 404.9 409.6 410.5 411.2 411.5 411.5 411.2 411.1 411.8 412.5 412.9 413.2 413.3 413.6 413.7 413.9 413.9 414.0 413.5 413.1 412.6 412.9 413.2 413.4 413.0 412.1 411.5 411.5 411.6 411.7 411.7 411.7 412.0 412.2 412.9 414.1 415.3 416.8 419.6 422.3 425.3 428.5 431.6 434.5 437.6 441.3 444.9 446.8 448.6 450.0 451.1 452.1 452.8 452.4 451.8 450.8 448.5 445.9 443.2 440.8 438.5 436.0 433.6 432.0 431.2 431.3 432.2 433.1 433.7 434.4 435.4 438.2 441.2 443.3 445.2 447.3 449.5 450.7 451.8 452.7 452.7 452.4 452.0 451.8 451.5 450.1 448.2 446.4 443.7 441.2 438.5 435.7 432.9 430.7 429.6 428.6 428.0 429.2 430.4 431.7 433.3 435.2 437.5 439.9 442.3 445.0 447.9 450.4 452.2 453.5 454.5 453.9 453.2 452.3 451.2 449.0 446.7 444.1 441.4 438.7 435.8 432.7 430.8 429.5 429.3 429.6 430.4 431.4 432.6 433.7 435.3 437.2 439.4 441.1 442.9 445.0 446.9 448.9 449.8 450.3 450.9 451.6 450.5 449.5 448.6 447.6 446.4 445.4 444.5 443.5 442.5 441.5 440.3 439.4 438.6 437.5 436.5 436.5 436.5 436.3 436.2 436.2 436.2 436.1 436.0 436.0", - "input_type": "phoneme", - "offset": 168.08 + "f0_timestep": "0.005" }, { + "offset": 174.48, "text": "SP 才 发 现 有 时 候 我 其 实 SP", - "ph_seq": "SP c ai f a x ian y ou sh ir h ou w o q i sh ir ir ir SP", - "note_seq": "rest A#4 A#4 A4 A4 A#4 A#4 F4 F4 G4 G4 F4 F4 E4 E4 F4 F4 A4 A4 A#4 G4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.2 0.2 0.4 0.4 0.1999999 0.1999999 0.4 0.4 0.1999999 0.1999999 0.4000001 0.4000001 0.5999999 0.5999999 1.25 1.25 0.3250003 0.4249997 0.2", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0", - "ph_dur": "0.209 0.1161 0.3019 0.1045 0.0929 0.1045 0.3251 0.0697 0.0697 0.1277 0.267 0.1393 0.1393 0.058 0.2322 0.1741 0.3367 0.2554 1.2539 0.3251 0.418 0.209", - "f0_timestep": "0.005", + "ph_seq": "SP c ai f a x ian y ou sh ir h ou w o q i sh ir SP", + "ph_dur": "0.209 0.1161 0.3019 0.1045 0.0929 0.1045 0.3251 0.0697 0.0697 0.1277 0.267 0.1393 0.1393 0.058 0.2322 0.1741 0.3367 0.2554 1.997 0.209", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#4 A4 A#4 F4 G4 F4 E4 F4 A4 A#4 G4 rest", + "note_dur": "0.32 0.4 0.2 0.4 0.2 0.4 0.2 0.4 0.6 1.25 0.325 0.425 0.2", + "note_slur": "0 0 0 0 0 0 0 0 0 0 1 1 0", "f0_seq": "439.2 439.2 439.2 439.6 438.5 438.3 438.1 438.0 437.8 437.6 437.5 437.3 437.1 436.9 436.8 436.6 436.4 436.3 436.1 435.9 435.8 435.6 435.4 435.2 435.1 434.9 434.7 434.6 434.4 434.2 434.1 433.9 433.7 433.5 433.4 433.2 433.0 432.9 432.7 432.5 432.4 432.2 432.0 431.9 466.2 462.1 458.2 454.7 451.1 447.4 443.6 440.1 436.7 432.9 429.2 425.5 421.8 418.8 415.5 412.0 408.4 405.2 402.1 398.6 395.2 394.0 402.7 411.9 421.3 428.5 434.1 436.6 437.2 437.0 437.0 436.6 434.6 432.5 430.4 428.2 426.1 425.6 425.0 424.6 425.0 426.1 428.2 430.7 434.3 438.5 442.7 446.7 450.3 454.2 457.9 461.3 464.0 466.5 468.9 470.2 471.2 471.9 471.7 471.4 471.0 469.0 467.0 465.2 463.3 462.4 461.6 461.7 462.0 462.3 462.9 463.5 465.1 466.9 468.5 470.5 472.3 473.5 472.9 471.8 470.5 464.0 455.9 448.7 442.1 439.0 445.4 451.6 456.6 459.8 461.9 459.5 457.2 455.0 452.6 451.6 450.3 449.0 447.7 446.6 445.9 439.9 436.3 437.7 441.0 444.9 446.1 446.7 446.1 445.2 444.3 443.2 442.3 442.8 442.8 443.3 443.8 444.4 444.8 445.2 445.6 445.9 445.4 444.3 441.8 440.0 438.5 437.0 431.7 430.6 430.2 429.6 429.5 430.1 431.2 432.7 434.3 435.7 437.1 439.6 446.4 457.8 469.3 478.1 483.4 487.4 487.8 488.2 488.1 487.3 486.2 484.8 481.6 478.7 475.8 472.4 469.3 466.6 465.4 464.7 463.7 464.3 464.8 465.4 465.8 466.4 467.0 467.5 468.1 468.5 469.1 470.2 471.0 470.9 470.4 469.9 469.5 469.0 468.6 467.7 466.4 465.4 464.4 463.5 462.8 463.7 465.0 465.7 466.2 466.8 467.5 468.2 468.9 469.3 469.8 469.9 469.9 469.4 467.6 465.1 462.4 459.4 453.4 447.8 441.3 431.2 420.4 410.3 399.4 386.5 373.9 362.3 354.9 348.7 342.5 340.3 338.0 335.8 335.9 336.5 338.7 341.4 343.9 344.7 344.5 344.8 346.3 348.8 350.9 351.7 352.1 353.1 353.1 353.1 352.5 352.3 351.5 350.2 349.5 348.4 347.6 346.8 345.6 345.2 344.6 344.6 344.6 345.2 345.6 346.4 348.4 347.2 351.0 354.3 358.2 362.8 368.1 372.7 376.2 379.7 383.5 385.3 391.0 396.4 401.5 407.0 408.2 409.0 409.0 407.8 406.1 403.9 401.7 400.0 398.1 396.3 394.4 392.7 391.0 389.2 388.8 388.6 388.6 388.5 388.4 388.8 389.4 389.9 390.4 390.8 391.1 391.4 391.5 391.7 391.9 392.0 391.9 391.8 391.8 391.8 391.1 390.5 390.2 390.3 390.8 391.1 391.4 391.9 392.2 393.0 393.8 394.3 394.2 393.8 393.4 393.1 392.4 391.0 389.6 387.0 378.9 369.7 362.1 358.6 355.9 353.1 350.2 348.3 346.3 344.2 342.6 340.9 339.1 337.6 336.4 335.4 333.7 332.0 330.6 329.4 331.2 339.9 347.4 351.3 354.5 355.3 356.2 355.5 354.9 354.4 354.1 353.8 353.6 353.2 353.1 352.9 352.3 351.6 351.0 350.5 350.0 349.4 349.4 349.4 349.6 349.8 350.4 350.7 349.9 348.7 347.4 344.7 342.2 341.0 339.8 338.3 336.8 335.6 334.4 333.0 331.3 330.6 330.0 330.2 330.7 331.3 331.2 331.2 331.3 331.5 331.3 331.3 331.2 331.3 331.2 330.6 329.9 329.6 329.3 329.1 328.9 328.4 328.2 328.0 327.8 327.9 328.2 328.4 328.5 328.8 329.4 330.2 330.9 331.5 332.0 332.3 332.6 332.8 332.4 331.7 331.2 330.2 326.8 322.9 316.4 304.5 293.1 287.7 287.0 288.7 290.8 292.9 293.7 294.4 295.3 296.9 298.7 300.4 301.9 303.7 304.7 305.8 307.7 309.4 310.5 312.0 313.2 314.3 315.5 316.7 318.2 319.7 321.6 323.3 324.5 325.7 327.3 328.8 329.9 331.1 332.5 336.3 342.0 346.8 350.5 353.7 353.5 353.3 352.9 352.2 351.5 350.8 349.9 349.1 348.4 347.4 347.1 346.4 345.9 345.7 345.6 345.4 345.3 345.8 346.3 346.8 347.0 347.1 347.3 347.6 347.6 347.9 348.1 348.2 348.5 349.2 349.7 349.8 349.8 349.7 349.6 349.6 349.3 349.0 348.7 348.4 348.0 347.7 347.4 347.1 346.6 346.4 346.0 345.7 345.5 345.8 346.3 346.5 346.6 346.5 346.4 346.5 346.8 347.1 347.4 347.8 348.1 348.5 348.7 349.1 349.4 349.8 349.8 349.8 349.8 350.0 350.0 350.0 349.8 349.2 348.2 347.3 345.2 340.9 334.4 321.0 307.6 299.9 298.2 302.0 310.8 319.7 325.0 328.3 329.5 330.2 330.9 331.7 333.6 336.1 338.7 341.2 343.7 345.6 347.7 349.6 351.4 353.1 354.9 357.1 359.5 361.1 362.8 364.3 366.2 369.1 372.0 374.8 376.1 381.3 383.7 385.5 386.8 388.8 389.7 390.6 391.1 392.0 392.4 393.1 393.6 393.6 393.6 394.0 394.0 394.0 394.0 394.0 394.0 393.6 393.1 392.4 392.0 391.4 390.6 390.2 389.7 389.3 389.3 389.3 389.3 390.3 391.1 393.1 395.4 399.3 402.1 405.6 407.9 411.5 412.9 417.5 421.3 424.2 427.0 429.7 432.7 433.7 435.4 436.3 437.9 439.1 440.6 441.9 443.3 444.3 444.3 444.3 444.3 444.3 444.6 444.9 444.9 445.2 445.4 445.4 445.4 445.4 445.4 444.7 444.1 443.3 442.8 442.8 442.3 441.8 441.8 441.3 441.3 440.5 440.5 440.5 440.0 440.0 440.0 440.0 439.5 438.5 438.6 438.7 438.7 439.0 439.1 439.4 439.9 440.3 440.9 440.8 440.4 440.1 439.4 438.9 438.1 437.5 436.8 436.3 435.9 435.5 435.4 435.4 435.6 436.0 436.3 437.0 437.5 438.2 439.0 439.7 440.5 441.5 442.0 442.8 443.3 443.8 444.1 444.1 444.2 444.0 443.4 442.7 441.8 440.6 439.5 438.3 437.4 436.3 435.6 434.9 434.5 434.4 434.4 435.2 436.2 437.5 439.0 440.8 442.5 444.6 446.4 447.9 449.5 450.5 451.1 451.3 451.5 450.9 450.0 448.5 446.5 444.3 442.0 439.5 437.2 434.8 432.9 431.6 430.4 430.0 430.0 430.1 430.7 431.7 432.9 434.2 435.6 437.0 438.5 439.9 440.9 441.8 442.5 442.9 442.9 442.6 441.8 440.8 439.7 438.5 437.2 435.7 434.2 433.1 432.3 431.4 430.9 430.9 431.2 431.7 432.7 433.9 435.5 437.1 438.7 440.4 442.0 443.5 444.5 445.2 445.6 445.7 445.4 445.1 444.4 443.6 442.9 442.0 441.0 440.4 439.7 439.3 439.2 439.2 439.2 439.6 440.0 440.6 441.0 441.5 442.2 442.7 443.1 443.5 443.6 442.5 442.5 442.3 442.3 442.3 442.3 442.3 442.3 442.3 442.3 442.3 442.3 443.1 443.1 445.1 446.9 448.7 450.8 451.8 453.7 456.6 457.1 459.6 461.1 463.3 464.0 465.1 466.2 467.0 468.1 469.1 469.1 469.1 469.1 467.3 466.2 464.6 464.6 464.6 463.5 461.6 459.5 457.6 455.5 452.6 450.3 449.2 446.9 445.1 443.1 442.3 441.3 440.3 440.3 440.3 440.3 440.3 440.3 440.3 442.3 443.1 444.1 443.1 444.1 444.3 445.4 445.4 445.4 445.4 444.3 443.3 443.3 442.5 442.5 442.5 442.5 440.3 439.8 438.9 437.4 435.6 433.2 430.9 427.8 424.8 421.3 418.1 414.5 411.1 407.6 404.5 401.2 398.4 395.6 393.5 391.3 389.7 388.4 387.6 386.8 386.9 387.4 388.2 389.3 390.7 392.2 393.6 395.1 396.6 397.9 399.0 399.9 400.6 400.9 401.0 400.8 400.5 399.8 399.0 398.3 397.2 396.1 395.0 393.8 392.8 391.8 390.6 389.8 389.0 388.4 388.0 387.7 387.7 387.8 388.2 388.9 389.7 390.9 391.9 392.9 394.0 395.0 395.7 396.3 396.6 396.7 396.3 395.6 394.7 393.5 392.2 391.0 389.7 388.8 388.1 387.7 387.9 388.6 390.0 391.4 392.3 392.4 391.5 390.1 387.9 385.8 384.4 383.6 383.7 384.6 385.5 387.5 387.6 387.9 388.4 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7 389.7", - "input_type": "phoneme", - "offset": 174.48 + "f0_timestep": "0.005" }, { + "offset": 179.68, "text": "SP 是 你 SP", - "ph_seq": "SP sh ir n i i SP", - "note_seq": "rest C4 C4 D5 D5 D5 rest", - "note_dur_seq": "0.32 0.8 0.8 1.2 1.2 0.8 0.2", - "is_slur_seq": "0 0 0 0 0 1 0", - "ph_dur": "0.0929 0.2322 0.5108 0.2902 1.1958 0.8011 0.1974", - "f0_timestep": "0.005", + "ph_seq": "SP sh ir n i SP", + "ph_dur": "0.0929 0.2322 0.5108 0.2902 1.9969 0.1974", + "ph_num": "2 2 1 1", + "note_seq": "rest C4 D5 D5 rest", + "note_dur": "0.32 0.8 1.2 0.8 0.2", + "note_slur": "0 0 0 1 0", "f0_seq": "395.4 395.4 395.4 395.9 395.9 395.4 395.4 395.0 394.5 394.0 393.9 393.6 393.3 392.7 392.2 391.6 391.1 391.0 390.4 389.7 389.3 388.4 387.9 386.8 386.7 385.9 385.5 385.0 384.2 382.8 381.3 379.1 377.3 375.4 372.8 371.1 366.6 366.6 362.4 358.8 355.5 352.4 349.5 346.0 341.6 337.0 332.6 328.7 324.1 319.5 316.3 313.5 310.4 306.8 302.8 300.7 298.0 294.9 291.0 288.3 284.3 280.0 276.1 271.2 265.7 260.5 257.1 255.1 254.4 254.9 254.3 254.8 256.1 258.0 258.2 258.2 258.8 259.1 258.7 258.2 257.5 256.7 255.8 254.7 253.6 252.5 251.7 251.6 251.9 253.0 254.3 255.6 257.1 258.8 260.6 262.4 264.2 265.8 267.5 269.4 271.0 271.4 271.5 271.6 271.8 270.3 268.9 267.6 266.1 263.7 261.1 258.7 256.5 254.0 251.4 249.3 248.2 247.4 246.8 247.2 248.1 249.0 250.6 252.6 254.8 256.7 258.3 259.5 260.6 261.6 262.8 263.7 264.7 265.7 266.8 266.6 266.3 266.0 265.7 264.7 263.5 262.3 261.3 260.2 259.1 258.5 257.9 257.5 256.9 256.4 256.9 257.3 257.8 258.2 258.2 258.1 258.5 258.8 259.1 259.5 260.0 260.2 260.3 260.6 260.9 261.1 261.2 261.4 261.6 261.6 261.8 261.8 261.9 261.4 261.0 261.0 260.9 260.9 260.9 260.9 260.7 260.8 261.3 262.0 262.7 262.5 262.5 262.4 262.3 262.1 262.0 262.0 262.2 262.5 262.8 263.2 263.5 264.2 264.9 264.8 264.6 264.0 262.6 260.6 259.2 258.4 257.9 257.4 257.0 256.7 256.4 256.4 256.4 256.4 256.5 258.2 259.8 262.2 264.5 268.4 273.4 277.7 285.6 288.0 291.8 298.7 307.1 312.9 321.9 329.1 337.9 348.1 365.4 413.6 453.8 468.7 479.8 488.1 496.2 503.4 511.9 518.4 524.9 532.9 539.7 548.0 549.7 551.0 552.4 553.1 553.1 553.1 552.4 551.4 550.5 549.9 549.3 548.3 547.7 547.0 546.2 545.5 544.8 544.0 543.3 542.6 542.0 541.4 540.8 540.8 540.8 540.8 542.5 543.9 545.2 546.6 549.1 551.6 553.9 556.6 559.9 563.1 566.2 570.4 574.6 578.8 583.3 584.6 583.8 583.3 583.3 583.3 583.3 583.3 583.3 583.2 582.9 582.9 582.9 582.9 582.9 582.9 583.3 584.4 585.3 586.1 586.7 586.7 586.9 587.0 587.5 588.4 589.5 589.8 589.4 588.9 588.5 588.1 587.8 587.3 587.4 587.9 588.2 587.8 587.3 586.9 586.5 586.5 587.2 587.7 587.9 587.7 587.5 587.1 587.0 586.7 586.5 586.3 586.1 585.6 585.7 586.1 586.5 586.7 587.0 587.4 587.7 587.9 588.3 588.7 588.8 589.2 588.9 587.6 586.3 586.1 587.0 587.9 588.1 587.7 587.2 586.9 586.5 585.8 585.3 584.9 584.5 584.5 584.6 585.0 585.3 585.4 585.8 586.0 586.3 586.4 586.3 585.4 585.2 585.3 585.6 585.8 586.1 586.3 586.7 587.0 587.2 587.1 586.5 586.0 585.2 585.2 586.0 586.6 586.7 586.2 586.0 586.2 586.3 586.7 586.7 586.8 587.0 587.0 587.3 586.8 586.0 585.2 585.2 585.3 585.7 586.1 586.5 586.9 586.7 586.1 585.5 585.5 585.9 586.0 586.2 586.0 585.8 585.6 585.3 585.0 585.1 585.3 585.6 586.0 586.1 586.3 586.5 586.4 586.0 585.6 585.6 586.6 586.7 586.3 585.6 585.2 585.1 585.6 586.0 586.1 586.3 586.1 586.0 586.0 585.6 585.6 585.5 585.3 585.6 586.1 586.8 586.6 585.8 584.6 584.4 584.8 585.1 585.6 586.0 586.4 586.8 587.0 586.7 586.7 586.3 586.2 586.0 586.2 587.0 587.8 588.7 589.4 589.7 589.4 589.3 589.0 589.0 588.8 588.7 588.3 588.3 588.1 588.0 587.7 587.8 587.3 586.1 584.6 582.6 580.2 577.9 575.6 573.7 571.9 570.9 570.6 570.6 571.1 571.9 573.4 575.2 577.2 579.6 582.1 584.6 587.3 590.0 592.6 594.8 596.9 598.6 600.0 601.1 601.4 601.5 600.5 599.1 596.5 593.6 589.8 586.4 582.3 578.8 575.1 572.3 569.6 568.0 567.0 567.0 567.3 568.8 570.6 573.1 575.9 579.1 582.3 585.6 589.0 591.8 594.2 596.0 597.3 597.8 597.5 597.0 595.6 594.2 592.1 590.2 587.6 585.2 582.9 580.7 578.6 576.9 575.4 574.7 574.2 574.2 575.1 576.6 578.2 580.2 582.1 583.6 584.8 583.3 583.0 582.6 581.8 580.8 579.6 578.5 576.6 574.6 572.9 571.3 570.1 569.0 568.1 568.5 569.0 569.8 571.3 572.9 574.4 575.9 577.6 579.2 580.6 582.1 583.0 583.5 584.3 583.5 583.0 581.9 579.5 576.2 573.3 570.3 567.6 565.0 563.4 562.2 561.1 560.3 560.6 561.5 562.8 566.5 570.5 574.9 581.6 589.2 596.7 605.4 615.6 623.7 621.8 619.7 618.2 616.3 614.4 612.5 610.6 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8 609.8", - "input_type": "phoneme", - "offset": 179.68 + "f0_timestep": "0.005" }, { + "offset": 185.8, "text": "AP 想 说 再 见 啊 SP 是 再 见 啊 SP 把 你 和 我 留 下 SP", "ph_seq": "AP x iang sh uo z ai j ian a SP sh ir z ai j ian a SP b a n i h e w o l iu x ia SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 C5 C5 C5 rest C4 C4 G4 G4 A4 A4 A4 rest C4 C4 E4 E4 F4 F4 F4 F4 C4 C4 A3 A3 rest", - "note_dur_seq": "0.6 0.4 0.4 0.4 0.4 0.4 0.4 0.2 0.2 0.4000001 0.1999998 0.4000001 0.4000001 0.4000001 0.4000001 0.1999998 0.1999998 0.4000001 0.1999998 0.4000001 0.4000001 0.4000001 0.4000001 0.2119999 0.2119999 0.388 0.388 0.5999999 0.5999999 0.4500003 0.4500003 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4644 0.1393 0.2902 0.1161 0.2902 0.1045 0.3135 0.0929 0.1974 0.3947 0.058 0.1393 0.2554 0.1509 0.3019 0.0929 0.209 0.3947 0.1277 0.0697 0.3135 0.0929 0.2438 0.1509 0.1393 0.0697 0.3251 0.0697 0.3831 0.2206 0.4412 0.058", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 1 1 2 2 2 1 1 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 C5 C5 rest C4 G4 A4 A4 rest C4 E4 F4 F4 C4 A3 rest", + "note_dur": "0.6 0.4 0.4 0.4 0.2 0.4 0.2 0.4 0.4 0.2 0.4 0.2 0.4 0.4 0.212 0.388 0.6 0.45 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 348.4 349.2 348.2 347.2 346.3 345.4 344.4 343.4 342.5 341.6 340.7 339.7 338.9 337.9 337.1 336.2 335.2 329.8 309.3 299.6 300.7 301.6 302.7 304.2 308.1 314.0 320.6 327.4 333.7 338.4 341.5 344.0 344.5 343.1 342.5 341.8 341.1 340.7 340.7 341.1 341.4 341.6 341.4 341.4 341.4 341.6 342.0 342.3 342.7 343.3 344.1 344.8 345.7 347.3 348.7 350.3 351.9 352.7 353.5 354.2 355.0 355.1 355.1 355.1 355.0 354.3 353.5 352.7 351.8 351.1 350.2 349.4 348.6 347.7 347.1 344.0 338.5 331.2 321.2 309.5 299.9 298.3 301.9 305.5 308.8 312.5 316.2 320.1 324.3 329.2 335.0 340.9 346.9 353.5 359.1 364.4 368.5 370.6 372.7 375.3 378.3 380.0 382.2 389.2 394.8 397.2 398.4 398.7 398.8 398.7 398.6 398.2 397.5 396.7 395.8 394.4 393.1 392.8 392.6 392.2 391.8 391.3 391.0 390.5 390.1 389.8 390.0 390.2 390.5 390.8 391.0 391.3 391.6 391.9 392.1 392.4 392.7 393.0 393.0 392.7 392.1 391.5 391.8 392.0 392.1 392.4 392.7 392.7 393.0 393.2 393.5 393.6 393.9 394.0 394.6 395.0 395.0 395.0 395.0 395.0 395.0 395.0 395.0 394.5 394.0 393.6 392.4 391.7 391.0 390.6 390.1 389.9 389.6 389.3 389.3 389.3 389.1 388.8 390.2 390.6 390.9 391.1 391.1 391.5 391.5 393.3 394.2 395.2 395.9 397.0 398.5 399.8 401.4 403.2 404.9 407.2 411.5 413.9 417.5 419.9 423.5 427.7 432.2 435.2 437.2 438.2 439.0 439.5 439.5 440.0 440.0 440.0 440.0 440.0 439.5 439.5 439.2 439.0 438.5 438.5 438.5 438.5 438.5 439.0 440.3 440.6 441.5 442.2 442.8 443.2 443.1 442.8 442.5 441.9 440.9 439.7 438.5 436.3 430.5 423.2 410.2 395.6 390.8 393.1 398.5 405.1 410.8 416.4 423.6 430.6 437.7 444.6 451.6 458.3 464.3 470.1 476.8 481.8 487.3 493.5 501.4 509.8 519.1 527.5 531.7 532.9 532.7 531.7 531.4 530.9 530.6 530.2 529.5 528.6 527.7 527.3 526.9 526.9 526.8 526.4 525.8 525.1 523.9 522.8 521.6 519.8 519.3 518.9 518.3 517.8 517.2 516.5 516.0 515.4 514.8 514.4 513.9 513.7 513.7 513.7 513.7 513.7 513.8 514.1 514.4 514.6 514.9 515.2 515.4 515.6 516.0 516.4 516.6 516.8 517.0 517.2 517.4 517.7 518.1 518.5 518.8 519.1 519.4 520.0 520.5 520.9 521.5 521.8 522.1 522.3 522.4 522.6 522.6 522.6 522.6 523.3 522.6 522.0 521.1 520.2 519.0 518.2 517.8 517.2 517.2 515.7 515.7 515.7 515.7 516.0 517.5 520.2 524.0 525.7 528.1 527.8 530.9 533.6 535.9 537.7 539.2 541.1 542.3 542.0 541.5 540.9 540.5 537.7 535.1 532.6 529.9 527.2 524.5 521.6 519.5 518.1 516.6 515.4 515.0 515.0 515.6 516.3 517.1 517.8 518.7 520.6 522.9 525.2 527.1 528.1 529.1 529.9 530.7 531.3 531.3 529.6 527.2 524.7 521.6 518.1 516.0 513.2 509.2 504.0 497.6 492.2 484.0 472.7 454.7 431.9 406.1 383.3 363.2 351.9 333.5 310.1 275.1 228.8 222.0 219.6 220.8 223.8 229.2 237.7 243.1 249.7 254.6 258.3 259.8 260.6 260.4 260.1 259.6 259.1 258.7 258.2 257.7 257.9 257.9 258.1 258.2 258.3 258.2 258.1 257.9 257.8 257.7 257.5 257.5 257.7 257.8 258.2 258.4 258.5 258.6 258.5 258.5 259.1 259.7 260.2 260.9 261.3 261.9 262.4 262.7 263.0 263.1 263.3 263.4 263.4 263.3 263.0 262.6 262.2 262.0 262.2 261.6 261.6 261.6 261.6 261.6 261.6 261.5 261.0 261.0 261.0 260.4 260.4 260.0 260.0 260.0 260.0 260.0 261.0 262.7 264.4 268.4 271.3 275.4 279.6 286.8 295.5 306.5 320.6 341.6 344.2 346.4 347.0 347.6 347.0 346.2 346.0 345.6 344.4 343.8 343.3 342.8 342.6 342.6 342.6 344.7 346.8 349.1 351.9 353.9 357.0 360.4 365.0 370.0 375.6 379.5 382.8 384.8 386.6 388.2 389.7 392.4 393.1 393.4 393.6 394.0 394.0 394.0 394.0 394.0 394.0 393.8 393.4 392.4 392.4 393.8 393.4 393.1 392.4 391.3 390.0 388.6 386.5 381.5 375.2 364.4 352.5 348.5 350.4 354.8 360.0 364.3 368.9 374.1 379.3 384.3 388.8 394.0 399.0 402.5 406.3 410.8 414.1 417.6 421.3 426.4 432.9 440.2 446.4 449.1 449.2 448.2 446.6 446.2 445.6 445.2 444.9 444.0 443.2 442.7 442.4 442.3 442.3 442.2 442.0 441.7 440.8 439.8 438.9 437.9 436.5 434.7 433.4 435.1 434.5 433.8 433.4 433.0 432.6 432.1 431.8 431.3 430.9 430.9 430.9 430.9 430.9 430.9 430.9 430.9 431.1 431.2 431.3 431.5 431.6 431.8 431.9 432.1 432.2 432.3 432.5 432.6 432.8 432.9 433.1 433.2 433.3 433.5 433.6 433.8 434.0 434.2 434.4 434.6 434.8 435.0 435.2 435.5 435.8 436.1 436.5 436.7 436.9 437.0 437.2 437.3 437.4 437.5 437.8 438.0 438.0 437.8 438.0 438.2 438.5 438.7 438.7 439.1 439.7 440.5 443.3 445.6 447.8 449.7 451.7 453.8 455.0 456.0 456.6 456.3 455.8 455.3 452.9 450.6 448.6 446.1 443.8 441.5 439.1 437.2 435.1 433.7 432.6 432.1 431.8 431.7 432.4 433.1 434.1 435.2 436.8 439.0 441.1 442.8 444.1 445.2 446.4 447.1 446.8 446.2 445.1 444.1 442.3 438.5 433.9 422.8 404.2 385.7 371.2 357.5 343.6 331.3 316.2 302.3 290.1 278.0 269.3 261.3 252.0 244.5 237.8 233.4 228.7 228.5 228.4 228.0 228.2 231.9 238.0 244.7 251.4 255.4 258.7 260.9 261.5 260.4 260.0 260.1 260.2 260.3 260.4 260.6 260.6 260.7 260.8 260.9 261.0 261.2 261.2 261.3 261.3 261.3 261.2 261.1 260.6 260.1 259.8 259.6 259.4 259.3 259.1 259.1 258.9 258.9 258.9 259.5 260.4 261.1 261.6 262.0 262.3 262.7 263.1 263.4 263.7 263.6 263.4 263.4 263.4 263.6 264.0 263.7 263.4 263.0 262.7 262.2 261.6 261.1 260.1 258.6 256.8 253.3 250.0 248.8 248.3 248.9 250.1 252.5 255.4 258.1 260.9 264.5 268.1 272.0 276.7 283.5 291.1 296.6 301.0 304.0 306.5 308.4 309.4 308.7 307.5 305.1 301.9 298.6 295.6 293.8 292.1 290.3 291.8 293.3 294.9 298.3 302.4 306.4 310.6 314.3 318.1 322.5 325.6 328.4 330.5 332.8 335.0 334.8 334.3 333.6 332.5 331.3 330.3 329.2 327.9 326.5 325.1 324.0 324.2 324.5 324.8 325.1 325.5 325.9 326.4 327.0 327.7 328.4 329.1 329.1 327.9 326.0 317.8 309.3 298.9 287.0 279.6 280.0 281.3 284.2 287.6 290.1 292.2 294.2 296.0 298.2 300.5 302.8 304.8 306.5 307.7 308.8 310.5 312.7 315.1 319.2 330.4 342.9 350.4 353.4 355.5 357.4 358.6 358.5 358.0 357.2 356.4 355.3 354.3 353.2 351.8 350.4 349.3 348.7 348.1 347.5 347.0 346.5 346.8 347.3 347.7 348.2 348.5 348.8 348.5 347.9 347.4 347.9 348.2 348.5 348.9 349.2 349.7 350.1 350.8 351.9 352.9 352.1 351.5 351.8 352.4 352.9 352.9 352.9 352.9 352.9 352.9 352.8 352.4 352.0 351.6 351.0 350.5 350.1 349.7 349.5 349.4 349.2 349.1 348.9 348.8 348.6 348.6 348.4 348.5 348.8 349.0 349.3 349.4 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.6 349.8 349.8 349.8 349.8 350.0 350.0 349.7 349.5 349.6 347.2 342.8 337.1 329.2 323.6 315.4 306.1 298.5 290.8 284.6 276.9 271.6 265.5 260.1 256.4 252.0 248.2 245.1 242.4 240.3 239.4 238.4 237.8 235.8 233.0 232.8 233.0 233.4 233.3 233.3 233.1 233.3 234.1 235.4 237.3 239.5 239.7 239.9 240.3 240.9 241.4 241.1 240.7 240.7 240.5 240.3 240.2 240.0 239.9 239.8 239.6 239.6 239.9 240.2 240.5 240.9 241.2 241.9 243.0 244.4 246.5 248.9 251.1 253.2 255.6 257.4 259.3 261.1 262.5 263.6 264.5 265.5 266.1 266.0 265.8 265.4 264.3 263.1 262.0 260.9 259.8 259.4 258.9 258.5 258.1 258.3 258.5 258.8 259.0 259.3 259.5 260.2 260.8 261.4 262.1 262.7 263.4 264.0 264.1 264.2 264.4 264.4 264.5 264.6 264.7 264.8 260.8 256.9 252.6 246.0 238.0 231.0 227.3 224.9 224.1 224.1 224.0 224.0 224.0 224.0 224.0 223.8 223.8 223.8 223.4 222.9 222.6 222.1 221.7 221.2 221.3 221.4 221.5 221.5 221.5 221.4 221.3 221.3 221.2 220.9 220.1 220.2 219.8 219.1 217.9 216.6 217.5 218.4 219.6 221.4 223.6 224.5 225.6 226.6 227.2 227.6 226.5 225.4 224.3 222.7 220.8 218.9 217.0 215.0 212.8 210.6 208.5 207.5 206.7 206.2 206.9 208.0 209.0 210.1 212.0 214.4 216.6 218.9 220.9 222.7 224.6 226.5 228.0 228.9 229.4 229.1 228.4 227.4 225.9 224.0 221.9 219.2 216.6 214.3 211.6 209.0 207.3 205.7 204.4 203.3 202.4 201.9 202.5 203.4 204.4 205.4 207.6 209.6 211.3 213.0 214.9 216.7 218.5 220.1 222.0 223.7 225.2 226.2 226.7 226.7 226.4 225.3 224.3 223.3 222.0 220.1 218.2 216.2 214.3 212.2 210.1 208.6 207.6 207.6 208.0 208.4 209.0 209.6 210.1 210.7 211.3 211.8 212.3 212.6 212.6", - "input_type": "phoneme", - "offset": 185.8 + "f0_timestep": "0.005" }, { + "offset": 192.523, "text": "SP 与 自 己 重 叠 SP 定 格 在 一 刹 那 SP 短 暂 交 叉 SP", "ph_seq": "SP y v z i0 j i ch ong d ie SP d ing g e z ai y i ch a n a SP d uan z an j iao ch a SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 rest C4 C4 C4 C4 A4 A4 A4 A4 C5 C5 C5 C5 rest G4 G4 G4 G4 A4 A4 A4 A4 rest", - "note_dur_seq": "0.289 0.388 0.388 0.4 0.4 0.4 0.4 0.2 0.2 0.3999999 0.3999999 0.2 0.188 0.188 0.2120001 0.2120001 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.4000001 0.4000001 0.2 0.3999999 0.3999999 0.4000001 0.4000001 0.2119999 0.2119999 1.388 1.388 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.209 0.0813 0.2786 0.1161 0.3019 0.0929 0.267 0.1393 0.1393 0.058 0.3947 0.1393 0.0697 0.1393 0.0464 0.1277 0.0813 0.1509 0.0464 0.1161 0.0929 0.1277 0.0697 0.3947 0.1625 0.0348 0.2902 0.1161 0.3135 0.0813 0.1161 0.1045 1.3816 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 C5 C5 rest C4 C4 A4 A4 C5 C5 rest G4 G4 A4 A4 rest", + "note_dur": "0.289 0.388 0.4 0.4 0.2 0.4 0.2 0.188 0.212 0.2 0.2 0.2 0.4 0.2 0.4 0.4 0.212 1.388 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "218.0 218.0 218.0 218.5 219.1 219.7 257.1 258.5 259.9 261.3 262.8 264.2 265.7 267.1 268.6 270.1 271.5 273.0 274.5 276.0 277.6 279.1 280.6 282.2 283.7 285.3 286.8 288.4 290.0 291.6 293.2 294.8 296.4 298.0 299.7 301.3 303.0 304.6 347.9 347.5 347.0 346.5 346.0 345.6 345.3 344.8 344.3 343.8 343.3 342.9 342.4 341.9 341.4 341.1 340.6 340.1 339.6 344.8 350.2 352.5 353.4 354.3 355.3 355.5 355.1 354.7 354.2 353.9 353.1 352.1 351.0 350.0 349.4 349.2 348.7 348.1 347.2 346.2 346.3 346.7 346.7 346.6 346.4 346.4 346.2 346.1 346.2 346.4 346.9 347.1 347.3 347.6 348.0 348.4 349.4 350.4 351.3 352.3 353.1 353.2 352.7 352.2 351.7 351.3 348.8 344.5 337.2 323.4 308.2 298.1 294.4 304.9 321.2 332.1 339.2 340.7 340.7 340.7 340.9 340.9 341.1 342.1 343.4 344.6 346.4 348.0 349.4 350.6 352.1 353.7 355.3 357.0 359.0 359.9 359.2 357.8 358.0 364.9 378.1 389.3 396.7 398.7 399.3 399.1 398.8 398.3 397.4 396.6 395.7 394.7 393.7 392.8 392.2 391.5 390.9 390.1 389.7 389.5 389.5 389.3 389.4 389.7 390.2 390.4 390.7 391.0 391.3 391.5 391.8 391.6 390.9 390.8 391.1 391.1 391.4 391.7 391.9 392.2 392.4 392.4 392.1 391.8 391.5 391.5 391.3 391.0 389.7 387.7 385.4 382.2 376.7 366.3 353.1 342.8 335.5 338.0 350.8 367.4 377.8 381.4 382.0 380.8 378.6 376.9 376.4 377.2 378.2 379.7 381.1 382.3 384.4 386.9 389.7 392.9 398.0 405.9 416.1 426.5 436.0 442.2 447.0 449.8 451.3 451.3 451.0 450.5 449.3 447.4 445.9 444.4 442.4 441.0 440.3 439.5 438.2 437.1 436.9 437.5 437.8 438.1 438.6 438.9 439.5 439.7 440.1 440.6 440.4 439.7 439.2 438.9 438.7 439.2 439.5 439.2 438.5 437.6 437.4 437.5 437.8 438.0 438.4 439.1 439.7 440.5 441.3 442.5 443.6 443.6 443.0 442.2 439.1 432.3 424.5 417.7 408.7 402.9 403.2 407.7 413.2 417.4 422.0 426.4 430.7 434.2 438.7 443.4 448.9 455.3 459.5 462.7 467.1 472.4 477.3 481.8 488.5 495.5 502.3 509.5 517.8 525.3 530.4 531.9 532.7 532.7 531.9 529.9 526.7 524.8 522.9 521.4 519.6 519.4 520.2 520.8 521.3 521.9 522.5 524.2 524.3 523.7 523.6 524.5 523.9 522.6 521.4 518.7 515.2 509.2 502.3 495.1 485.4 479.0 474.9 471.3 473.3 483.9 497.9 507.2 513.1 516.0 517.4 518.5 520.2 522.0 522.9 522.8 522.6 522.0 521.3 520.3 518.9 517.0 515.2 513.7 511.2 508.1 505.1 502.5 500.7 499.0 497.0 496.4 496.7 498.2 500.4 503.4 507.3 511.0 515.8 520.7 525.7 530.2 533.0 534.9 537.1 539.2 539.7 538.9 536.9 534.6 531.2 526.8 522.3 519.0 514.6 510.7 507.0 503.7 500.4 498.6 498.9 500.7 502.8 504.3 507.4 511.4 515.7 519.9 523.1 525.2 527.7 529.4 530.6 530.6 529.8 529.0 528.5 527.8 526.3 524.8 520.8 515.1 507.8 501.1 494.8 490.9 489.8 490.8 491.4 492.1 493.0 493.7 493.0 492.7 492.3 491.3 489.3 486.2 481.2 475.1 465.1 451.4 433.9 416.7 392.6 367.3 343.0 321.4 304.0 291.9 281.1 273.2 267.9 264.8 262.3 260.8 259.8 259.4 259.2 259.0 258.8 259.3 259.7 260.0 260.5 260.9 261.4 262.4 263.7 265.6 268.8 270.7 270.4 269.8 267.7 264.7 262.5 260.9 259.4 258.1 256.5 256.2 256.7 257.1 257.8 258.8 259.9 260.9 261.9 262.2 262.1 262.0 260.7 259.5 258.4 257.5 255.8 253.6 251.6 250.0 249.6 251.5 255.1 257.3 260.2 261.2 260.8 260.9 260.9 261.0 261.1 261.2 261.3 261.5 261.5 261.6 261.7 262.5 263.6 264.6 265.0 264.6 265.0 265.4 265.9 265.0 262.6 255.8 247.3 240.5 237.1 243.4 253.9 260.9 268.3 273.3 276.1 281.1 289.3 298.3 309.9 322.8 338.3 352.9 372.9 393.8 413.2 424.8 431.3 435.3 437.5 437.0 436.7 436.2 435.6 436.6 437.2 437.0 436.6 435.8 435.3 435.4 436.2 436.0 436.4 437.5 438.5 439.5 440.1 440.4 440.9 441.4 442.0 442.3 441.0 438.0 435.0 433.2 431.7 430.7 429.5 429.3 430.2 430.9 434.1 440.0 445.8 449.0 448.8 447.7 446.5 445.4 445.1 445.6 445.6 445.2 444.7 444.3 444.0 443.5 443.2 442.7 440.8 438.5 431.3 420.7 408.8 396.1 390.0 390.1 399.8 420.5 437.7 443.1 443.5 442.0 440.8 439.7 439.4 440.4 442.9 446.2 450.3 453.2 456.7 466.6 486.7 510.4 524.4 531.6 535.9 535.6 534.6 533.3 531.6 530.7 529.1 527.5 525.7 524.7 524.2 523.6 523.3 522.9 522.9 522.9 522.9 521.4 520.8 520.8 521.6 522.0 522.6 522.6 523.3 523.3 523.9 523.9 523.9 523.9 523.9 523.9 523.9 523.9 523.9 523.9 523.7 523.3 523.3 522.6 522.6 522.6 521.7 521.4 521.4 522.0 522.8 523.3 524.8 526.0 527.5 528.4 529.9 530.9 528.4 527.8 527.5 527.2 526.0 525.4 523.9 523.4 522.6 522.6 522.0 522.0 522.3 522.6 524.8 523.6 526.6 529.4 531.8 533.0 534.1 534.7 535.1 534.2 533.1 531.9 530.6 529.3 526.9 525.1 522.4 520.2 519.0 517.5 516.1 514.9 514.4 514.8 515.5 516.0 517.5 518.7 520.0 521.7 523.9 525.8 527.8 529.9 532.1 532.4 532.3 531.9 531.6 530.6 529.1 528.0 524.5 519.2 514.0 511.9 510.8 509.8 509.1 508.1 507.1 506.5 505.6 504.5 502.8 501.9 500.4 498.6 496.6 494.2 490.6 486.9 481.5 474.2 465.4 456.6 444.0 430.8 417.5 405.4 395.0 387.5 380.5 375.1 371.1 368.5 366.0 364.3 362.7 361.5 360.5 359.6 358.5 357.9 357.2 356.6 356.0 355.5 355.5 355.5 355.5 355.5 355.5 355.5 355.5 355.5 355.5 355.6 355.7 356.1 356.4 356.6 356.8 357.1 357.3 357.7 358.0 358.7 360.6 362.2 365.3 368.4 372.1 376.1 380.0 383.0 387.0 390.0 392.1 394.0 396.0 396.8 396.9 397.0 397.0 397.2 396.4 394.8 393.2 391.8 390.3 389.6 389.2 388.9 388.4 388.6 388.9 389.2 390.0 390.9 391.7 392.7 393.7 393.9 393.6 393.4 391.6 388.6 385.7 378.4 368.7 357.8 342.2 327.4 313.3 306.0 300.7 300.1 304.2 311.3 319.8 327.4 335.7 343.1 350.8 357.4 366.5 375.6 384.9 390.6 391.2 390.6 390.9 392.0 393.1 394.0 395.1 395.5 395.4 395.2 395.0 394.9 394.5 394.1 393.6 393.1 392.8 392.3 391.9 391.3 391.0 390.6 390.5 390.8 390.9 391.1 391.3 391.4 391.7 391.8 391.5 391.5 391.3 391.3 391.1 391.1 390.9 391.0 391.3 391.5 391.8 391.8 392.1 392.6 393.1 393.6 394.2 394.8 395.5 395.4 394.9 394.6 394.2 393.7 393.6 393.8 393.8 392.7 390.6 388.6 386.4 383.7 378.6 372.8 367.2 362.8 360.1 361.6 365.4 369.4 372.7 375.8 380.2 385.7 391.3 396.8 403.9 413.7 426.0 438.5 445.7 447.9 447.9 447.4 447.2 446.9 446.3 445.5 444.5 443.8 442.9 442.3 442.3 442.3 442.3 442.3 442.3 442.3 442.3 439.5 436.7 432.9 427.5 422.8 415.8 406.1 393.1 384.2 380.8 380.6 380.6 380.6 380.6 380.6 380.9 381.5 381.7 383.0 386.3 391.3 398.8 404.9 415.5 421.8 425.3 429.0 431.7 433.7 433.7 433.7 433.7 433.7 433.7 434.4 434.4 434.4 436.5 436.5 437.0 437.5 437.5 438.2 438.2 438.2 438.2 438.2 439.2 440.3 441.0 440.5 440.5 440.5 440.5 440.5 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 441.3 442.0 442.0 442.2 442.8 442.8 443.3 443.3 443.7 444.1 444.1 444.1 444.0 443.8 443.5 443.3 443.3 443.3 443.3 443.3 443.3 443.1 442.8 442.8 442.0 441.3 441.3 441.3 441.3 441.3 441.3 440.9 439.7 439.5 439.0 439.0 439.0 439.0 439.1 438.8 438.5 438.4 438.2 437.8 437.5 437.7 438.2 438.4 438.1 437.8 437.5 437.8 438.3 438.7 439.4 439.7 440.3 440.8 441.3 441.3 441.0 440.5 440.2 439.9 439.6 439.0 438.7 438.4 438.0 437.5 437.5 437.5 437.7 437.7 437.9 438.2 438.5 438.8 439.1 439.4 439.7 440.1 440.3 440.6 441.0 441.3 441.5 441.8 441.9 441.8 441.5 441.5 441.2 441.0 440.8 440.5 440.0 439.7 439.2 438.8 438.2 437.9 437.4 437.0 436.5 436.2 436.0 436.0 436.0 435.8 435.7 435.8 436.1 436.7 437.3 438.0 438.5 438.9 439.9 441.0 442.0 443.2 444.0 444.6 445.2 445.9 445.9 445.2 445.2 446.0 445.9 444.9 443.9 442.9 442.0 441.0 440.0 438.7 437.1 435.7 434.2 433.0 432.1 431.7 431.1 431.4 432.4 433.4 434.3 435.3 436.2 437.8 439.4 441.5 443.6 444.9 445.1 445.8 446.9 448.2 448.5 447.8 447.4 446.9 446.3 445.9 444.1 441.2 439.0 437.9 436.0 433.8 431.9 429.6 428.3 428.0 427.5 427.4 427.8 428.2 428.5 428.9 429.8 431.7 433.6 435.4 437.0 438.2 439.1 440.1 441.0 441.6 442.2 443.1 443.7 444.3 444.1 443.4 443.4 444.4 445.1 445.0 444.5 444.1 443.4 442.3 440.8 438.9 437.7 437.1 436.2 435.3 434.8 434.1 432.8 431.2 430.4 430.1 429.6 429.1 429.2 429.8 430.3 430.9 432.5 434.7 438.2 444.2 449.2 450.4 449.8 449.4 448.9 448.3 447.8 447.2 446.7 446.7 446.7", - "input_type": "phoneme", - "offset": 192.523 + "f0_timestep": "0.005" }, { + "offset": 198.88, "text": "SP 在 另 外 一 个 春 秋 冬 夏 继 续 这 喧 哗 而 我 重 新 出 发 SP", "ph_seq": "SP z ai l in w ai y i g e ch un q iu d ong x ia j i x v zh e x van h ua er w o ch ong x in ch u f a SP", - "note_seq": "rest D4 D4 E4 E4 F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 C5 C5 C5 C5 D5 D5 C5 C5 C5 C5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 A#4 A4 A4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.32 0.188 0.188 0.212 0.212 0.2 0.2 0.2 0.2 0.4 0.4 0.2 0.2 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.2 0.2 0.4000001 0.4000001 0.1999998 0.2000003 0.2000003 0.3999996 0.3999996 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.2438 0.0813 0.1277 0.058 0.1509 0.0697 0.1393 0.058 0.1625 0.0348 0.2902 0.1045 0.1509 0.058 0.3483 0.0464 0.1045 0.0929 0.3251 0.0813 0.1045 0.0929 0.1509 0.0464 0.1161 0.0813 0.1045 0.1045 0.3947 0.1277 0.0697 0.1161 0.0929 0.267 0.1277 0.2786 0.1277 0.2438 0.1509 0.3947 0.058", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 F4 G4 A4 C5 C5 C5 C5 D5 C5 C5 C5 C5 A#4 A#4 A#4 A4 G4 A4 rest", + "note_dur": "0.32 0.188 0.212 0.2 0.2 0.4 0.2 0.4 0.2 0.4 0.2 0.2 0.2 0.2 0.4 0.2 0.2 0.4 0.4 0.4 0.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "441.0 441.0 441.0 440.6 393.4 391.5 389.7 387.8 386.0 384.2 382.4 380.6 378.8 377.0 375.2 373.4 371.7 369.9 368.2 366.5 364.7 363.0 361.3 359.6 357.9 356.2 354.5 352.9 351.2 349.6 347.9 346.3 344.6 343.0 341.4 339.8 338.2 336.6 335.0 333.4 331.9 330.3 328.7 327.2 293.7 293.4 293.1 292.8 292.5 292.1 291.9 291.6 291.3 291.0 290.6 290.4 290.0 289.9 289.5 289.3 288.9 288.7 288.4 288.0 287.8 288.0 291.0 293.2 293.8 293.8 291.9 290.3 289.3 289.0 289.1 289.1 289.2 289.3 289.4 290.3 291.4 291.8 292.1 292.8 293.7 294.4 295.4 296.0 296.2 296.4 296.6 296.8 296.9 296.6 296.2 293.7 291.6 290.1 288.9 288.6 289.4 293.5 301.7 315.2 330.0 336.3 337.7 336.9 335.5 335.0 334.5 333.8 333.2 332.6 332.1 331.8 331.5 331.7 331.7 331.5 331.3 331.0 330.7 330.3 329.2 328.4 327.9 327.6 327.4 327.2 326.9 326.5 326.2 325.3 324.5 323.9 324.5 325.2 326.3 327.9 330.1 332.3 335.4 339.3 343.2 347.0 349.5 351.0 351.6 351.7 351.4 351.0 350.4 349.8 349.0 348.4 348.4 348.6 348.8 348.8 349.2 349.1 349.0 349.0 348.8 348.8 348.7 348.7 349.1 349.4 350.0 349.8 350.0 350.8 351.7 351.2 351.5 352.8 355.0 357.4 361.1 365.9 371.5 379.7 388.8 393.8 395.1 395.6 396.2 396.8 396.5 396.0 395.3 394.6 393.8 392.7 391.8 390.8 389.6 388.4 388.7 388.9 389.2 389.3 389.5 391.1 392.7 394.2 395.4 395.0 386.2 370.4 354.6 338.6 328.9 328.4 331.3 334.4 338.8 348.0 360.1 374.6 390.2 408.8 428.7 441.3 444.2 444.9 445.5 448.2 450.5 452.3 453.4 454.7 455.0 454.7 454.2 452.7 450.7 448.7 445.0 441.8 438.6 434.9 431.4 428.3 427.5 426.9 426.5 426.0 426.9 427.8 428.8 431.1 433.7 436.3 439.0 441.2 443.8 446.4 447.6 448.6 449.2 448.5 447.2 445.8 443.7 441.7 439.7 437.7 437.7 437.5 437.3 437.2 437.0 437.0 437.0 437.0 437.0 436.7 436.7 436.7 436.7 436.8 437.3 438.5 439.2 439.5 434.6 437.2 439.7 442.1 444.2 446.7 449.2 452.2 454.9 456.0 457.3 459.2 462.1 464.4 468.3 477.3 493.0 510.0 524.8 531.8 536.4 540.1 538.4 536.3 533.8 530.6 529.0 527.5 526.1 525.1 524.1 522.6 521.8 520.7 518.7 514.2 508.9 501.9 485.0 462.6 439.0 418.9 416.7 416.1 419.6 425.0 428.0 430.1 432.4 435.2 440.1 446.1 451.9 455.6 458.8 462.5 466.2 471.0 481.4 491.4 500.8 503.7 504.2 502.2 499.6 497.0 494.2 491.4 488.8 486.3 484.1 484.6 485.9 488.3 490.9 493.9 497.0 500.9 504.7 508.2 511.7 515.7 519.7 522.7 525.4 528.6 531.8 532.9 534.1 534.6 533.4 531.8 529.9 528.4 526.7 525.1 523.3 522.4 521.4 520.8 519.9 519.0 518.2 518.7 519.2 519.8 520.2 520.7 521.3 521.8 522.3 522.9 523.4 524.0 524.5 525.4 526.6 527.0 526.0 524.8 523.6 522.0 516.3 492.7 468.2 443.1 428.0 427.0 430.4 434.1 437.9 442.0 458.3 473.8 489.1 507.0 525.7 531.0 531.5 531.3 530.9 530.9 530.5 529.9 528.6 526.9 525.1 525.1 525.1 524.9 524.8 524.8 523.8 518.8 513.2 505.9 491.3 471.6 452.8 436.7 421.9 413.6 408.5 409.5 411.8 416.4 421.8 429.8 438.1 445.5 453.3 461.9 470.7 479.1 487.1 496.1 504.0 511.7 519.5 525.6 529.6 533.0 536.5 539.0 539.4 538.7 538.0 537.5 536.9 536.3 535.4 533.0 529.9 527.3 524.8 522.0 520.2 518.7 517.2 515.9 514.8 515.2 515.9 516.8 518.4 520.7 522.6 524.8 526.1 527.1 528.4 529.6 530.9 531.7 530.9 529.3 528.1 526.6 525.1 523.6 521.4 518.7 516.1 514.2 513.7 513.9 514.0 514.2 515.5 516.8 518.1 519.6 521.1 522.3 522.8 522.9 523.3 521.9 511.6 495.9 478.9 464.6 464.1 465.1 470.4 478.4 486.5 495.1 502.7 506.2 509.4 513.4 517.5 518.5 519.8 522.6 529.9 539.4 555.2 569.8 583.5 590.7 594.9 597.7 598.1 597.7 596.6 595.3 593.8 592.5 591.1 589.4 586.9 581.6 575.1 565.0 554.0 554.8 557.6 564.1 570.7 570.9 569.3 567.1 563.4 558.3 551.5 543.9 536.7 531.7 526.3 520.2 513.4 506.7 499.5 490.2 480.7 473.3 477.7 489.3 503.2 513.1 521.6 524.2 524.8 525.0 525.4 525.0 524.8 524.6 524.5 524.2 524.2 524.2 524.3 524.9 526.0 527.0 527.8 528.1 526.7 524.8 519.2 512.5 501.4 486.5 467.8 450.5 441.9 437.6 438.6 441.8 445.0 450.1 460.8 476.1 495.3 513.7 521.4 526.8 531.7 532.1 531.3 530.6 530.1 529.5 528.7 528.0 527.3 526.8 528.4 528.4 527.5 526.6 526.0 525.4 524.5 523.2 522.9 522.5 522.2 521.9 521.6 520.8 520.8 520.8 520.8 520.8 520.8 520.8 520.8 521.0 521.4 521.4 522.0 522.6 522.8 523.3 523.9 524.8 525.6 526.6 529.3 529.2 528.9 528.5 528.1 527.8 527.7 527.1 526.7 526.3 525.6 525.2 524.6 523.0 521.1 519.3 516.2 510.7 502.7 485.7 466.0 442.7 422.7 408.3 397.2 400.4 405.7 415.0 426.6 439.5 453.6 469.7 486.2 504.5 516.3 517.4 519.5 524.4 530.5 537.0 540.5 543.5 545.8 545.7 544.8 543.7 542.6 541.2 538.1 534.2 529.3 525.0 520.7 516.0 511.3 509.5 507.9 506.5 505.1 505.4 506.7 509.0 511.6 514.6 517.5 521.0 524.6 527.6 530.8 533.0 535.0 537.0 538.1 537.3 535.5 533.9 531.8 528.7 525.2 521.1 517.4 514.3 512.5 510.8 509.2 510.3 511.2 512.1 513.1 514.9 516.7 518.6 520.4 522.3 524.2 524.5 524.9 525.1 525.1 524.8 524.1 523.4 521.7 519.5 517.2 514.6 511.6 508.8 505.4 501.9 497.3 492.9 488.2 483.3 478.7 474.2 471.7 470.2 469.1 468.3 467.6 467.0 467.0 467.2 467.8 468.3 468.2 468.1 468.1 467.8 467.8 467.8 467.8 467.8 468.1 468.0 467.6 467.2 466.8 466.4 465.7 464.8 463.9 462.9 461.9 460.6 460.0 460.2 460.7 461.3 461.7 462.2 462.7 463.3 465.4 467.4 468.1 468.6 469.3 469.9 470.4 470.1 469.9 469.7 469.4 468.3 467.1 465.4 461.3 456.3 442.7 430.0 415.9 401.3 396.6 395.0 396.7 398.8 400.9 403.2 403.5 403.8 404.1 404.4 404.6 405.0 405.6 406.1 406.6 407.5 408.0 407.2 406.3 405.5 406.8 410.7 423.5 436.8 449.9 458.4 465.4 471.1 474.2 476.2 475.7 474.7 473.6 472.7 471.6 469.7 467.6 465.9 464.3 462.5 461.6 461.3 460.8 460.4 460.1 459.5 459.1 459.1 459.7 461.1 462.4 463.7 464.8 465.8 466.8 467.8 468.8 469.7 470.6 471.4 471.6 471.9 471.9 472.0 471.9 470.8 469.1 466.4 459.9 449.6 431.4 412.2 396.7 392.2 396.4 403.7 410.8 415.0 418.7 422.6 425.0 426.9 429.1 431.5 434.1 436.7 439.8 442.4 444.9 447.8 450.5 455.4 460.0 464.1 468.1 472.4 473.0 473.2 473.4 473.3 472.9 475.3 478.0 479.3 477.0 473.8 470.8 468.3 465.8 462.5 458.9 455.5 452.4 450.0 447.9 445.4 442.8 441.6 440.5 439.4 439.0 438.4 438.1 437.8 437.5 437.0 436.7 436.7 436.8 437.0 437.0 437.9 438.7 439.4 440.3 441.3 441.8 441.9 441.8 441.6 441.5 441.8 442.3 442.7 443.0 443.3 443.7 444.0 443.4 442.7 442.0 440.1 433.9 425.0 408.1 387.5 369.3 359.0 352.4 352.1 354.9 361.4 367.5 371.5 374.7 375.8 376.4 377.1 377.6 378.3 380.2 382.0 384.0 386.4 388.6 390.6 392.2 393.8 395.1 395.6 393.6 391.9 394.5 397.5 400.7 400.0 399.4 398.8 398.6 397.5 396.3 395.2 394.4 393.6 392.8 392.0 391.4 390.6 390.1 390.1 390.2 390.2 390.4 390.5 390.6 390.6 390.9 391.0 391.1 391.3 391.3 391.4 391.5 391.7 391.8 391.5 391.5 391.2 391.1 390.9 391.3 391.9 392.4 392.8 393.2 393.8 393.5 393.4 393.2 393.0 392.7 387.5 382.4 376.5 365.4 347.2 330.7 324.3 326.2 334.7 345.0 354.3 356.8 358.0 359.5 360.9 364.7 369.0 373.3 378.3 383.3 388.8 394.0 397.9 402.1 406.8 412.4 416.7 421.0 425.0 429.0 434.0 443.7 451.5 455.9 457.6 458.2 458.3 457.9 457.4 456.6 454.3 452.4 450.3 447.4 444.1 440.9 437.9 434.9 431.8 430.2 429.0 428.3 428.5 429.1 429.7 432.3 434.9 437.8 441.3 444.9 448.1 449.8 451.1 452.4 453.9 454.7 453.2 451.3 449.2 447.2 443.3 439.4 436.1 433.1 429.7 426.9 425.3 423.9 422.7 423.3 424.2 425.1 426.7 429.9 434.2 438.6 442.7 446.3 448.9 450.8 452.8 454.2 454.0 453.1 452.4 449.4 446.9 444.5 441.6 438.7 436.8 434.7 433.0 431.5 430.9 433.4 435.0 435.6 435.7 442.3 442.3 442.8 443.3 443.3 442.6 436.3 436.5 436.5 436.5", - "input_type": "phoneme", - "offset": 198.88 + "f0_timestep": "0.005" }, { + "offset": 204.88, "text": "SP 灰 烬 里 重 新 生 根 发 芽 SP", - "ph_seq": "SP h ui j in l i ch ong x in sh eng g en f a y a a SP", - "note_seq": "rest C4 C4 D4 D4 A4 A4 C5 C5 C5 C5 A4 A4 C5 C5 C4 C4 D4 D4 D4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.8 0.8 0.4 0.4 0.4000001 0.4000001 0.8 0.8 0.4000001 0.4000001 0.5999999 0.5999999 1 1 1 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0", - "ph_dur": "0.1858 0.1393 0.3135 0.0929 0.267 0.1277 0.6153 0.1858 0.2786 0.1161 0.2438 0.1625 0.6966 0.1045 0.2438 0.1509 0.4063 0.1974 0.9985 0.9985 0.0464", - "f0_timestep": "0.005", + "ph_seq": "SP h ui j in l i ch ong x in sh eng g en f a y a SP", + "ph_dur": "0.1858 0.1393 0.3135 0.0929 0.267 0.1277 0.6153 0.1858 0.2786 0.1161 0.2438 0.1625 0.6966 0.1045 0.2438 0.1509 0.4063 0.1974 1.997 0.0464", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D4 A4 C5 C5 A4 C5 C4 D4 D4 rest", + "note_dur": "0.32 0.4 0.4 0.8 0.4 0.4 0.8 0.4 0.6 1.0 1.0 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 1 0", "f0_seq": "439.5 439.5 439.5 439.7 380.2 377.9 375.6 373.3 371.0 368.8 366.6 364.3 362.1 359.9 357.8 355.6 353.4 351.3 349.2 347.0 344.9 342.8 340.8 338.7 336.7 334.6 332.6 330.6 328.6 326.6 324.6 322.6 320.7 318.7 316.8 314.9 313.0 311.1 309.2 307.3 305.4 303.6 301.8 299.9 261.6 261.7 261.8 261.9 261.9 262.1 262.1 262.1 262.2 262.3 262.4 262.5 262.6 262.7 262.7 262.8 262.9 263.0 263.1 263.1 263.1 262.5 259.5 259.3 259.9 259.2 258.2 257.3 256.6 256.4 256.2 256.2 256.3 256.5 256.6 256.8 257.1 257.8 258.2 258.7 259.1 259.4 259.7 259.6 259.4 259.1 258.9 258.7 258.4 258.2 258.3 258.3 258.4 258.7 259.2 260.1 260.8 261.4 261.8 262.3 262.8 263.3 263.7 264.2 264.6 264.7 264.8 264.6 264.1 263.3 262.5 260.6 258.8 256.3 251.5 244.1 237.4 233.6 231.2 231.1 232.3 237.2 242.3 246.2 247.8 246.8 245.4 244.1 242.9 241.7 240.3 239.0 236.4 234.0 232.2 231.9 232.5 233.8 238.3 248.7 264.1 280.0 289.7 293.9 295.4 296.0 296.2 296.3 296.5 296.8 296.9 296.7 296.4 296.0 295.5 295.0 294.4 294.0 293.4 292.8 292.1 291.7 291.4 291.2 291.3 291.5 291.7 291.9 292.1 292.3 292.5 291.8 291.4 291.4 291.6 291.6 291.8 292.1 292.5 292.9 293.2 293.6 294.0 294.3 294.6 295.0 294.7 294.9 295.1 295.3 295.5 295.8 296.1 296.3 296.5 296.7 296.5 296.3 296.1 295.8 295.0 294.3 293.5 292.8 292.4 291.3 289.8 288.4 286.8 285.6 286.1 287.5 290.2 293.4 297.4 301.9 307.9 315.3 325.6 340.0 357.2 374.5 385.7 392.4 397.4 401.4 405.3 408.0 409.8 411.2 411.7 411.9 411.5 410.8 409.8 408.6 407.8 407.7 407.3 406.9 406.3 405.9 405.5 405.1 404.7 404.2 404.5 405.2 406.2 407.8 409.4 411.1 413.7 416.3 419.2 423.3 427.6 431.6 435.2 438.8 442.3 444.0 445.6 446.6 447.5 448.5 449.1 448.2 446.9 445.6 444.3 443.1 441.7 440.6 440.1 439.5 439.1 438.6 438.2 437.8 437.2 437.5 437.8 438.1 438.4 438.7 439.0 439.3 439.6 439.7 440.0 440.3 440.6 440.9 441.2 441.3 441.3 441.5 441.5 441.5 441.5 441.8 441.8 441.8 441.6 441.3 441.2 440.9 440.6 440.5 440.3 440.6 440.6 440.4 440.1 439.7 439.4 439.1 438.9 438.9 439.2 439.3 439.6 439.7 439.9 440.3 440.3 440.6 440.9 441.0 441.3 441.5 441.7 441.9 442.0 442.3 442.0 441.7 441.5 441.1 439.5 437.6 434.8 429.0 418.7 406.5 395.5 390.2 387.4 388.7 392.0 395.3 399.4 405.0 411.1 416.3 421.2 425.6 428.7 432.0 435.4 438.0 440.5 442.8 445.1 447.7 449.8 452.1 454.2 456.8 459.2 461.9 465.1 467.9 469.9 472.1 475.6 478.8 480.9 483.4 486.2 494.0 502.8 512.4 521.7 526.6 529.1 530.9 532.1 532.6 531.5 529.3 527.2 525.4 524.1 522.6 521.5 520.4 519.0 517.8 517.2 516.9 516.5 516.2 515.8 515.5 516.6 517.5 518.3 519.4 520.5 521.3 522.0 522.7 523.4 524.5 525.2 525.9 526.6 527.3 528.1 528.0 527.8 527.6 527.5 527.5 526.6 523.4 518.9 509.9 499.6 483.6 471.9 471.5 480.2 492.2 503.3 507.8 511.2 514.3 514.3 513.1 512.6 513.1 514.0 515.2 516.0 517.0 518.0 518.9 519.9 522.6 525.2 527.7 530.3 533.0 532.6 532.3 531.9 531.8 532.4 539.4 545.2 546.7 545.6 543.6 541.6 539.7 537.3 534.2 531.5 529.6 527.7 526.0 524.2 522.3 520.6 519.6 518.6 517.5 516.3 515.2 514.3 513.4 512.4 511.3 512.1 513.3 514.7 516.3 518.1 520.0 521.7 523.3 525.0 526.9 528.5 530.2 531.9 533.6 535.5 535.0 534.4 533.9 532.7 528.1 521.2 510.0 499.3 491.6 493.0 496.2 500.6 506.9 513.5 516.3 518.2 519.5 518.6 514.9 510.4 505.7 501.4 497.5 493.2 488.5 484.9 481.4 478.0 474.0 470.5 466.7 462.5 458.7 455.2 451.1 446.3 442.4 438.2 432.8 427.2 424.5 422.9 424.1 426.6 428.7 431.6 434.3 436.0 438.0 440.5 441.5 442.5 443.4 443.2 443.1 442.3 441.2 440.4 439.2 438.2 437.3 436.7 436.3 436.0 435.7 435.6 435.3 435.2 435.0 434.7 434.6 434.3 434.3 434.8 435.4 436.1 436.9 438.0 439.2 440.5 442.0 443.3 444.6 445.7 446.1 446.5 446.8 446.3 445.5 444.9 444.2 443.4 442.4 440.9 438.7 438.2 437.4 435.6 433.2 430.7 430.4 430.1 430.0 430.0 430.9 432.4 433.7 434.9 436.4 437.7 439.0 440.5 441.6 442.6 443.3 444.2 444.9 445.8 446.5 447.4 447.8 446.8 445.5 444.3 442.8 441.5 440.4 438.9 436.1 432.7 430.1 429.7 429.7 429.7 429.7 431.3 432.6 433.8 435.4 437.0 438.5 439.9 441.3 442.7 444.3 445.7 446.8 446.7 446.2 445.6 445.2 444.6 444.2 443.7 443.3 442.7 442.2 441.8 441.3 441.3 441.3 441.5 441.5 441.5 442.0 442.7 443.3 443.7 443.6 443.6 443.3 443.2 443.1 442.9 442.8 442.2 441.8 441.3 440.8 440.3 439.4 437.7 436.2 436.0 432.9 430.2 429.8 427.5 424.5 423.1 422.1 422.1 422.1 422.7 424.8 425.9 430.3 432.9 436.7 447.2 462.9 476.8 483.7 485.8 486.0 485.4 484.0 482.2 479.5 476.5 474.4 472.1 469.9 467.7 465.1 463.3 464.3 465.6 467.0 468.6 470.8 477.2 483.4 490.1 497.3 504.8 511.7 517.8 522.3 526.0 528.7 528.4 527.7 527.0 526.3 525.4 524.5 523.7 522.3 520.8 519.3 518.4 519.0 520.0 521.1 522.3 523.3 524.0 523.5 522.0 520.9 519.8 517.4 510.7 501.9 480.0 458.8 434.3 408.1 395.6 394.8 394.0 393.5 392.5 391.8 391.1 390.3 389.2 386.9 381.5 374.4 366.1 358.8 352.8 345.2 334.9 323.9 313.3 300.5 287.3 275.5 269.2 265.9 264.1 264.2 262.3 262.3 262.9 262.7 262.7 263.1 262.7 262.4 262.4 262.5 261.5 260.5 260.1 259.6 259.2 258.7 258.2 257.8 257.4 256.8 256.8 256.8 256.8 256.8 256.8 256.8 256.8 257.0 257.6 258.3 258.5 258.8 259.1 259.3 259.7 259.9 260.0 260.1 260.1 260.1 260.2 260.3 260.3 260.3 260.3 260.6 260.9 261.2 261.6 261.9 262.3 262.4 262.6 262.8 263.0 263.0 263.2 263.3 263.4 263.6 263.7 263.8 264.0 264.1 264.2 264.3 264.1 263.7 263.5 263.1 262.9 263.0 263.3 263.6 263.9 264.1 264.4 264.6 264.9 265.1 265.3 265.5 265.7 265.7 265.3 264.7 264.1 263.6 263.1 262.5 260.6 258.7 256.2 253.3 250.4 247.6 245.3 243.4 241.5 240.1 238.7 237.4 236.4 235.9 235.5 235.2 235.4 236.2 237.4 238.8 243.0 247.1 251.5 256.6 262.1 265.7 267.9 269.4 270.6 271.8 272.8 273.6 274.0 274.4 274.6 274.6 274.6 274.6 274.4 273.8 273.1 272.7 272.3 272.1 271.9 271.7 271.6 272.1 272.5 273.1 273.6 274.4 275.6 277.3 278.9 280.6 282.4 283.8 285.4 287.1 289.2 291.0 292.7 294.6 295.9 297.1 298.1 298.7 299.2 299.8 299.0 298.2 297.6 296.7 296.0 295.2 294.3 293.7 293.2 293.0 292.6 292.2 291.9 291.9 292.3 292.5 292.7 292.9 293.1 293.3 293.5 293.7 293.9 294.1 294.3 294.6 294.8 294.9 294.5 294.0 294.0 293.8 293.8 293.7 293.7 293.6 293.5 293.5 293.4 293.3 292.7 292.1 291.6 291.6 291.6 291.8 291.9 292.1 292.1 292.0 291.9 291.8 291.7 291.6 291.8 291.8 292.1 292.1 292.3 292.5 292.5 292.7 292.7 292.5 292.3 292.1 291.7 291.5 291.3 291.6 292.2 292.6 292.9 292.9 292.8 292.8 293.4 294.0 294.6 294.3 294.0 294.2 294.4 294.6 294.9 295.0 294.7 294.3 294.0 293.7 293.3 293.3 293.1 293.0 292.8 292.6 292.6 292.4 292.3 292.5 292.5 292.6 292.7 292.8 293.0 293.0 293.2 293.3 293.5 293.5 293.7 293.7 293.8 294.0 294.0 294.1 293.6 293.1 292.7 292.1 292.0 291.9 291.7 291.6 291.5 291.7 291.9 292.0 292.1 292.3 292.5 292.6 292.7 292.9 293.2 293.4 293.6 294.0 294.4 295.0 295.5 295.9 296.2 296.1 295.7 295.3 294.9 294.7 294.3 294.0 293.6 293.2 292.9 292.6 292.1 291.8 291.5 291.2 291.3 291.3 291.5 291.7 292.1 292.6 293.0 293.5 293.5 293.5 293.5 293.5 293.5 293.6 294.2 294.9 295.4 295.6 295.6 295.2 294.8 294.7 294.8 294.3 293.7 293.1 292.5 291.8 291.2 290.9 291.0 291.5 291.5 291.5 291.6 291.6 291.6 291.7 292.1 292.5 292.9 293.3 293.9 294.3 294.7 295.1 295.0 294.8 294.7 294.5 294.1 293.7 292.8 292.0 291.2 290.3 289.5 288.6 287.9 287.5 287.3 287.1 287.7 288.3 289.4 290.8 292.1 293.6 295.1 296.4 297.7 298.3 298.9 299.5 299.9 300.4 300.4 300.1 299.6 298.7 297.6 296.4 294.6 293.2 292.5 292.1 291.5 290.5 289.5 288.7 288.0 288.1 288.5 288.9 289.3 290.0 291.3 293.0 294.7 296.1 297.2 298.1 299.2 300.1 301.0 301.8 301.4 300.7 300.1 299.5 298.8 297.4 295.8 294.7 293.6 291.8 290.0 289.1 288.4 287.9 287.7 288.0 288.5 288.9 289.3 289.7 290.1 291.2 292.2 293.2 294.0 294.5 294.9 295.3 295.6 295.6 295.4 295.2 295.0 294.8 294.2 293.5 292.7 292.0 291.4 290.6 290.0 290.2 290.6 291.7 293.0 294.5 296.0 297.3 298.4 299.3 300.4 300.8 301.1 300.9 300.7 300.5 300.3 300.0 299.2 298.2 297.1 296.0 294.9 293.9 292.7 291.5 290.2 289.6 289.0 288.4 288.6 289.0 289.7 291.2 293.1 295.0 297.2 299.1 300.7 302.2 303.3 304.6 305.8 306.4 306.0 305.1 302.1 298.0 294.1 290.6 287.5 287.7 287.9 288.1 288.2 288.6 288.8 289.0 289.1 289.1", - "input_type": "phoneme", - "offset": 204.88 + "f0_timestep": "0.005" }, { + "offset": 211.52, "text": "AP 我 曾 做 过 的 梦 啊 SP 光 和 蝉 鸣 SP 装 满 整 个 盛 夏 SP", "ph_seq": "AP w o c eng z uo g uo d e m eng a SP g uang h e ch an m ing SP zh uang m an zh eng g e sh eng x ia SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 A#4 A#4 A4 A4 C5 C5 C5 rest C4 C4 G4 G4 A4 A4 A4 A4 rest C4 C4 E4 E4 F4 F4 F4 F4 C4 C4 A3 A3 rest", - "note_dur_seq": "0.48 0.2 0.2 0.2 0.2 0.2 0.2 0.1999999 0.1999999 0.4 0.4 0.2 0.2 0.4 0.2 0.4000001 0.4000001 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.1999998 0.4000001 0.4000001 0.4000001 0.4000001 0.1999998 0.1999998 0.4000001 0.4000001 0.5999999 0.5999999 0.5 0.5 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.4528 0.0348 0.1393 0.058 0.1393 0.058 0.1741 0.0348 0.1741 0.0232 0.2786 0.1161 0.1974 0.4063 0.1393 0.058 0.2902 0.1161 0.267 0.1277 0.0813 0.1161 0.4063 0.1045 0.0929 0.2902 0.1045 0.3135 0.0929 0.1625 0.0348 0.2322 0.1741 0.3831 0.209 0.4992 0.058", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 1 1 2 2 2 2 1 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 A#4 A4 C5 C5 rest C4 G4 A4 A4 rest C4 E4 F4 F4 C4 A3 rest", + "note_dur": "0.48 0.2 0.2 0.2 0.2 0.4 0.2 0.4 0.2 0.4 0.4 0.2 0.4 0.2 0.4 0.4 0.2 0.4 0.6 0.5 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 340.9 349.2 349.2 349.2 349.2 349.2 349.2 349.4 349.4 349.4 349.4 349.4 349.4 349.4 349.4 349.4 349.4 349.6 349.6 349.6 349.6 349.6 349.5 349.0 348.6 348.0 347.4 348.4 349.1 350.0 350.9 351.9 351.6 351.4 351.1 350.4 347.6 335.4 321.1 311.1 306.8 308.1 313.3 318.3 322.1 325.9 330.0 333.6 336.9 340.2 343.9 346.8 349.7 352.8 355.3 358.2 361.5 365.7 368.5 371.6 375.6 379.7 388.3 395.3 398.5 400.2 400.0 398.8 397.3 395.7 394.7 393.4 391.9 390.8 390.3 389.9 389.3 389.9 390.7 391.3 391.9 392.4 393.2 393.8 394.3 394.8 393.4 391.9 389.1 382.3 370.3 357.4 350.0 344.6 343.5 345.0 351.3 359.9 371.4 383.6 398.3 413.6 428.5 437.1 441.2 444.2 447.2 448.9 449.1 448.6 447.8 446.9 445.6 444.3 443.1 441.6 440.3 439.6 439.0 438.5 437.8 438.0 438.4 439.1 439.9 440.8 442.0 443.0 443.1 439.9 430.0 414.1 399.3 392.8 392.8 395.6 403.7 414.2 427.1 440.6 456.7 473.5 479.6 479.1 477.6 475.6 473.2 472.9 472.5 471.9 471.0 469.9 469.1 468.1 467.6 467.3 466.7 466.6 466.2 465.8 465.4 465.6 466.2 466.6 466.9 467.4 467.5 464.8 460.4 451.1 437.8 421.6 406.2 395.9 387.4 380.1 380.6 383.8 391.8 401.0 413.7 427.7 440.8 446.0 445.3 443.4 442.5 442.2 441.8 441.8 441.8 442.0 441.4 440.8 440.4 439.9 439.5 438.9 438.4 438.1 437.6 437.2 437.8 438.6 439.1 439.2 439.0 439.0 439.4 440.0 440.4 441.0 441.6 442.0 442.6 443.0 443.6 444.0 443.6 443.2 442.7 442.0 441.7 441.1 440.7 440.3 440.8 441.3 441.7 442.3 442.7 443.3 443.9 444.3 444.6 443.8 442.5 441.2 438.6 436.1 433.4 432.7 432.4 432.1 432.1 433.1 434.7 436.6 438.7 440.9 444.3 448.2 453.2 458.4 463.6 469.7 477.1 485.0 491.4 498.2 506.8 516.0 522.8 528.6 531.0 531.7 531.8 532.4 532.2 531.3 529.9 528.1 526.0 524.5 523.3 522.7 522.0 521.4 521.0 521.0 521.1 521.4 522.5 523.4 524.4 525.2 525.7 526.1 526.3 526.4 526.8 527.2 526.1 525.4 524.5 523.5 522.6 520.5 518.6 517.2 517.8 519.0 524.0 528.6 531.2 533.2 533.9 534.0 534.2 534.2 534.5 534.6 534.7 533.9 533.1 532.0 531.2 529.3 527.4 525.5 523.6 521.7 519.8 517.7 516.4 515.6 515.2 514.9 515.3 515.7 516.3 516.6 517.5 518.7 520.1 521.4 523.9 526.1 528.2 530.1 531.6 532.4 533.2 534.4 534.9 533.9 532.7 531.5 529.9 527.8 525.0 522.3 519.7 516.9 514.9 513.4 511.9 512.6 512.9 513.4 514.1 514.6 517.2 519.8 521.9 524.2 526.9 528.9 530.9 532.2 532.9 533.0 533.4 532.9 530.1 525.5 520.2 515.7 514.6 514.0 513.4 512.8 513.1 513.2 513.4 513.6 513.7 513.6 512.1 511.3 509.9 508.1 505.3 501.1 495.6 487.8 476.0 459.8 441.0 420.5 394.9 368.9 344.7 323.7 308.6 296.1 286.1 279.2 274.8 271.8 269.5 267.9 267.2 266.7 266.6 266.0 266.2 266.6 266.9 267.3 267.7 268.1 268.1 266.6 265.3 264.5 263.7 263.2 262.9 262.8 262.6 262.2 261.9 261.2 260.6 260.6 260.9 260.5 260.2 260.0 259.6 259.2 258.9 258.8 259.2 259.6 259.4 259.2 259.1 259.1 259.3 259.7 259.9 260.0 260.1 260.2 260.3 260.2 260.0 259.7 259.4 259.1 259.7 260.3 260.8 261.5 262.1 262.7 263.3 263.8 264.2 264.5 264.8 264.6 264.4 264.0 262.2 260.2 257.8 254.5 250.4 246.2 244.1 243.0 244.5 249.3 255.8 262.6 269.1 275.6 283.4 290.6 298.1 304.0 309.2 314.4 320.6 328.4 335.6 343.5 354.1 365.7 377.8 388.1 392.9 395.6 396.8 396.6 395.1 392.6 388.9 385.7 382.3 378.5 375.3 371.8 367.9 365.9 364.8 365.1 366.3 369.1 372.1 375.3 377.9 380.8 383.9 387.0 389.1 391.1 393.2 394.3 395.3 396.3 397.1 397.1 396.8 395.6 394.4 393.2 392.0 390.9 389.5 388.4 387.7 387.7 387.9 388.6 389.5 390.5 391.4 392.0 392.8 393.6 394.2 394.5 392.4 389.5 382.3 371.3 358.5 351.0 360.0 368.0 372.4 376.4 381.9 387.2 389.6 390.6 391.5 393.6 396.4 399.0 400.7 403.0 405.4 408.3 411.5 415.1 420.2 429.2 438.7 444.9 448.2 450.4 451.1 450.3 449.6 449.1 448.6 447.7 446.7 445.4 444.4 443.1 442.0 440.6 440.2 440.1 440.3 440.5 440.5 440.6 440.6 440.1 439.2 436.7 434.5 432.1 428.3 424.0 420.3 418.7 417.6 416.7 417.2 417.8 418.5 420.0 424.6 430.7 438.9 446.3 450.2 453.3 454.5 455.3 455.0 454.5 454.0 453.4 452.6 450.3 447.3 444.0 440.5 437.3 434.1 431.3 429.3 427.7 426.2 425.1 425.2 425.6 427.0 428.3 430.1 432.8 436.3 439.7 443.3 446.4 448.5 450.6 452.9 455.0 454.8 454.3 453.6 451.3 448.7 445.6 442.5 439.0 435.2 431.7 428.4 425.4 422.2 418.9 417.5 416.7 417.2 418.2 420.4 423.2 427.6 431.9 437.0 442.0 446.9 450.2 452.7 455.5 457.1 457.8 457.0 456.2 454.8 451.6 447.9 443.4 437.6 430.1 421.6 413.7 409.0 405.9 405.8 407.2 407.8 408.4 408.9 409.5 410.1 410.1 409.6 409.6 409.2 408.6 406.8 404.3 401.1 396.1 389.1 379.0 367.6 354.6 338.3 321.5 305.5 291.5 281.2 272.7 265.7 260.9 257.7 255.7 254.0 252.9 252.5 252.1 252.0 251.7 251.7 252.0 252.2 252.5 252.8 253.0 253.8 256.4 259.0 261.3 261.3 260.8 260.2 259.7 259.2 258.6 258.1 257.5 256.9 256.5 256.7 256.9 257.2 257.4 257.8 258.0 258.4 258.6 258.9 259.1 259.4 259.7 260.0 260.4 260.9 261.5 261.5 261.6 261.7 261.8 261.9 261.9 261.7 261.5 261.2 261.0 260.8 261.0 261.4 261.7 262.1 262.4 262.3 262.2 262.2 262.1 262.1 262.0 261.9 261.9 261.8 261.8 261.7 261.5 261.0 260.3 259.6 258.8 257.7 256.8 255.7 256.0 256.4 256.8 257.0 257.3 257.9 258.8 259.9 261.3 263.1 265.5 268.0 270.9 275.0 281.5 291.0 299.6 304.0 306.3 307.4 309.2 310.2 310.9 311.1 310.9 308.4 305.6 303.1 300.7 299.1 297.9 296.8 296.1 296.9 298.5 301.1 303.7 306.1 308.6 311.3 315.2 318.6 321.4 324.2 327.2 330.2 332.8 334.3 335.4 335.0 334.5 333.8 333.0 331.3 329.4 327.5 325.7 324.2 323.4 322.7 323.4 324.0 324.6 325.3 326.2 327.4 328.5 329.3 329.8 330.0 330.4 330.7 330.6 329.1 327.2 325.2 321.6 315.3 307.1 295.7 285.2 282.6 286.5 295.5 305.4 310.1 315.1 319.2 322.6 326.0 329.8 333.3 336.6 340.1 343.8 348.3 352.8 355.7 356.7 356.8 356.4 355.3 354.1 352.6 351.0 349.3 347.8 346.9 346.0 345.0 345.2 345.3 345.4 345.6 346.4 347.4 348.1 348.8 349.3 349.8 350.3 349.8 349.1 348.5 347.8 344.1 339.8 334.3 328.0 321.4 321.2 325.7 332.7 341.8 351.0 355.4 358.5 358.8 358.2 357.6 356.8 356.2 355.6 355.0 354.3 353.3 352.4 351.7 351.1 350.6 350.1 349.5 349.0 348.4 347.8 347.3 346.7 346.4 346.6 347.0 347.3 347.7 348.0 348.4 348.8 349.5 350.1 350.7 351.3 352.1 352.6 353.1 352.9 352.2 351.7 351.1 349.8 347.0 343.6 339.9 337.2 338.7 340.9 343.3 345.8 345.8 345.7 345.6 345.6 345.6 342.6 339.6 336.6 332.0 326.8 323.5 320.4 317.6 314.4 310.2 306.2 302.3 298.7 294.4 290.0 287.0 283.9 280.6 276.7 271.8 264.1 261.5 258.5 255.5 254.2 252.1 250.8 249.4 248.5 247.5 246.7 246.4 246.1 246.1 245.8 245.8 245.5 245.5 245.2 245.2 245.2 245.5 246.4 247.2 248.8 250.2 251.2 253.0 254.5 256.0 257.3 259.6 261.3 262.4 262.7 262.8 263.1 263.5 263.9 264.2 264.4 264.5 264.8 264.8 264.8 264.8 264.5 264.2 263.9 263.6 263.3 263.3 262.9 262.7 262.7 262.5 261.9 261.9 261.9 261.9 261.9 261.9 262.4 262.4 262.7 262.7 263.0 263.3 263.6 263.9 264.2 264.2 264.2 264.3 264.5 264.5 264.5 264.5 264.2 263.9 263.5 263.0 262.1 261.0 260.1 258.6 258.3 254.6 250.4 245.5 241.2 240.1 240.3 240.8 241.3 240.8 240.3 239.8 239.3 238.8 238.3 237.3 236.3 235.2 234.3 233.5 232.7 232.0 231.1 230.1 230.1 230.1 229.9 229.8 229.5 228.9 228.3 227.7 227.5 227.2 226.5 225.5 224.1 222.3 221.3 220.7 221.3 222.2 223.6 225.7 226.0 226.6 227.4 228.4 228.2 227.7 227.0 225.6 223.9 222.3 220.4 218.7 216.8 214.7 212.4 210.4 209.3 208.7 209.0 209.6 210.3 211.1 212.4 214.1 216.0 217.7 219.5 221.1 222.8 224.9 227.0 228.4 229.6 230.7 230.8 230.6 230.1 229.0 227.8 226.3 224.0 221.8 219.8 217.5 215.6 213.6 211.6 210.0 208.6 207.3 206.8 206.6 207.0 207.7 208.3 209.9 211.5 212.9 214.5 216.1 217.8 219.5 221.3 223.5 225.7 227.8 229.1 230.0 230.4 230.4 229.1 228.0 226.8 225.4 223.1 220.5 217.8 215.4 213.0 210.3 207.9 206.9 206.1 205.4 205.6 206.2 206.7 207.5 209.2 211.5 213.7 215.8 217.8 219.6 221.1 222.7 224.2 225.7 227.0 227.2 227.4 227.1 226.7 226.7", - "input_type": "phoneme", - "offset": 211.52 + "f0_timestep": "0.005" }, { + "offset": 218.16, "text": "SP 你 望 着 晚 霞 SP 轻 声 和 我 说 话 SP 听 我 的 回 答 SP", "ph_seq": "SP n i w ang zh e w an x ia SP q in sh eng h e w o sh uo h ua SP t ing w o d e h ui d a SP", - "note_seq": "rest F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 rest C4 C4 C4 C4 A4 A4 A4 A4 C5 C5 C5 C5 rest G4 G4 G4 G4 G4 G4 A4 A4 A4 A4 rest", - "note_dur_seq": "0.24 0.4 0.4 0.4 0.4 0.4000001 0.4000001 0.1999999 0.1999999 0.4 0.4 0.2 0.2 0.2 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.2 0.2 0.3499999 0.3499999 0.2750001 0.175 0.175 0.1999998 0.1999998 0.4000001 0.4000001 0.2000003 0.2000003 1.4 1.4 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.1625 0.0813 0.3019 0.1045 0.3019 0.0929 0.2786 0.1277 0.0929 0.1045 0.3947 0.1045 0.0929 0.1277 0.0813 0.1277 0.0697 0.1393 0.058 0.1045 0.0929 0.1045 0.1045 0.3483 0.1393 0.1277 0.1161 0.058 0.1741 0.0348 0.2902 0.1045 0.1509 0.0464 1.4048 0.0464", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest F4 G4 A4 C5 C5 rest C4 C4 A4 A4 C5 C5 rest G4 G4 G4 A4 A4 rest", + "note_dur": "0.24 0.4 0.4 0.4 0.2 0.4 0.2 0.2 0.2 0.2 0.2 0.2 0.35 0.275 0.175 0.2 0.4 0.2 1.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "215.8 215.8 215.8 213.6 211.0 209.0 207.1 205.7 259.5 261.1 262.7 264.3 265.9 267.5 269.1 270.8 272.4 274.1 275.7 277.4 279.1 280.8 282.5 284.2 286.0 287.7 289.5 291.2 327.7 328.9 329.7 330.4 331.3 331.7 332.0 332.2 332.4 332.6 332.9 333.1 333.4 333.6 333.8 334.2 334.5 334.7 334.9 335.1 335.4 336.3 340.4 343.8 345.7 347.0 348.2 348.1 347.7 347.3 347.0 346.4 345.3 344.1 342.9 341.6 341.2 340.7 340.2 339.6 339.1 339.8 340.7 341.6 342.8 343.8 345.1 346.1 346.8 347.5 348.2 348.8 349.3 349.6 349.8 350.0 350.1 350.3 350.4 350.4 350.2 350.2 350.2 350.1 350.0 350.0 349.7 349.3 349.0 348.7 348.2 348.0 347.9 348.4 349.1 348.6 348.1 347.6 347.0 346.2 344.2 341.8 339.6 336.7 331.3 324.7 318.6 313.7 310.1 308.6 308.1 308.7 313.2 317.9 323.7 332.7 343.2 353.4 363.2 372.6 381.5 390.2 396.6 401.1 403.9 406.3 407.6 407.7 407.1 406.2 404.2 401.3 398.6 396.0 392.9 389.7 386.7 385.1 383.7 383.4 383.5 383.5 383.9 384.5 385.5 386.4 388.6 390.6 392.2 393.2 394.0 394.6 395.3 395.9 396.4 396.6 396.0 394.6 393.2 391.7 390.6 389.8 389.0 388.3 388.3 388.6 388.9 389.2 389.4 389.9 390.2 390.5 390.8 391.1 391.5 392.0 392.5 392.9 393.0 391.9 390.4 387.9 381.8 374.5 363.1 349.4 334.8 323.0 315.9 310.7 312.2 319.4 327.0 333.9 341.8 350.6 361.5 372.1 383.4 398.4 414.8 425.7 434.9 440.0 443.6 447.2 447.6 447.8 447.3 446.3 444.9 443.2 441.4 440.0 438.7 437.5 436.0 435.1 434.4 434.0 434.7 435.3 436.1 436.8 437.3 438.2 438.9 439.5 440.1 440.4 440.5 440.8 440.9 441.0 440.6 440.3 439.9 439.6 439.4 439.2 439.5 439.6 439.7 439.7 439.9 440.0 440.1 440.3 440.3 440.4 440.5 441.1 441.5 442.1 442.5 443.1 443.6 443.9 444.1 444.1 444.1 444.1 443.4 441.4 439.0 436.5 432.1 428.4 425.6 425.5 427.0 429.3 434.2 440.5 448.3 457.4 467.0 475.0 483.2 493.5 504.3 514.6 521.7 526.5 530.6 532.7 533.7 533.9 533.6 532.4 530.9 528.9 527.3 526.5 526.1 525.4 523.0 520.5 517.2 511.2 504.0 491.3 480.4 476.1 477.5 481.2 484.4 486.2 487.6 490.1 497.3 506.0 513.4 517.1 519.3 517.5 514.9 512.1 510.3 510.0 510.7 517.0 523.4 528.7 533.1 537.3 538.9 540.3 541.2 540.7 539.8 538.6 535.3 532.3 529.3 526.0 522.6 519.5 516.6 513.4 510.1 508.9 507.6 507.0 507.1 507.8 510.0 512.2 515.3 519.4 523.3 527.4 531.6 535.0 537.7 540.5 542.6 542.1 541.5 540.5 537.0 533.2 529.0 524.8 520.1 515.2 510.3 506.2 504.0 502.2 502.5 503.6 505.0 506.9 509.1 511.6 514.6 517.3 520.1 523.0 525.1 527.0 528.4 528.9 529.3 528.7 528.3 527.9 527.3 526.7 526.3 527.1 528.3 528.7 527.8 526.3 525.1 523.9 523.4 522.3 518.4 517.8 517.2 516.8 516.2 515.5 514.6 512.3 510.9 508.8 506.0 502.5 497.5 491.3 482.9 470.5 453.7 434.7 413.9 388.2 362.0 337.7 316.7 301.5 288.8 278.6 271.5 266.8 263.4 260.9 259.1 257.9 257.1 256.6 255.7 255.5 255.4 255.4 255.4 255.2 255.2 254.8 254.0 255.3 257.7 260.1 260.5 260.0 259.4 258.8 258.0 257.8 258.8 260.0 261.2 262.5 262.7 262.7 262.2 258.9 254.8 246.1 237.9 231.7 227.9 224.8 227.0 229.6 233.2 236.9 240.2 243.4 246.6 249.1 251.6 254.3 257.1 259.5 261.6 263.8 266.0 267.7 266.1 264.4 262.5 260.6 259.3 258.1 257.0 256.0 256.2 256.8 257.6 258.7 260.0 261.5 263.0 264.3 265.5 266.5 267.4 268.2 268.4 267.4 265.9 262.1 257.4 251.2 245.2 239.5 238.9 245.4 253.4 263.6 277.6 295.5 315.8 333.8 351.8 371.2 391.1 410.5 422.6 431.0 438.5 444.6 447.5 448.5 448.2 447.8 447.9 447.5 446.4 445.0 443.2 441.3 440.8 440.8 440.1 439.2 438.2 437.7 437.8 438.1 438.4 438.7 437.8 437.4 437.2 437.2 437.2 437.0 437.0 437.0 437.2 438.5 440.3 441.8 443.3 444.9 444.3 443.6 442.8 442.2 442.2 442.5 442.6 442.9 443.2 443.3 443.6 443.6 442.3 441.2 439.0 432.2 418.7 408.7 406.8 413.8 424.0 433.4 436.8 437.8 438.5 439.5 440.4 441.3 442.4 445.0 450.0 456.0 461.6 465.6 469.6 474.9 480.0 482.3 484.8 489.2 497.3 511.2 523.7 530.4 532.6 533.0 532.7 532.5 531.6 530.5 529.0 527.5 525.8 529.4 527.8 526.6 524.8 522.6 521.4 518.4 515.2 511.6 507.2 504.8 501.6 495.9 491.0 484.6 480.0 476.0 474.3 472.9 472.4 472.4 476.8 478.2 479.8 483.3 486.8 491.0 495.3 503.6 512.0 519.0 524.6 527.8 530.1 532.0 533.0 533.8 534.6 533.0 531.5 529.9 528.0 525.4 522.8 520.6 518.3 515.9 513.4 511.1 510.0 509.0 508.0 506.9 508.0 509.0 510.4 513.3 516.9 520.6 524.2 527.3 530.7 534.2 537.7 539.4 540.3 540.2 539.5 538.4 535.3 532.1 528.3 524.2 519.7 515.9 512.6 510.4 508.9 507.8 507.0 507.9 509.2 511.0 512.5 514.2 516.6 519.6 522.6 525.4 528.4 530.9 532.5 533.9 535.4 536.1 535.6 534.2 533.0 530.7 527.7 523.4 518.8 513.7 507.0 501.1 496.3 493.6 491.9 491.3 490.8 490.1 489.5 489.1 473.8 471.3 468.7 466.2 463.7 461.1 458.7 456.2 453.7 451.2 448.8 446.4 444.0 441.6 439.2 436.8 434.4 432.1 429.7 427.4 425.1 422.8 420.5 418.2 416.0 413.7 312.0 311.7 311.7 311.7 311.7 311.7 311.7 311.7 312.7 340.3 353.5 362.3 371.1 377.3 382.1 384.7 386.9 389.1 390.2 390.8 391.5 392.0 392.7 393.4 392.8 392.4 392.7 393.1 393.6 394.1 394.3 394.1 393.9 393.6 393.1 392.0 391.9 392.2 392.4 392.8 392.9 392.1 391.3 390.2 390.8 391.3 391.9 393.2 394.7 394.7 394.6 394.5 394.5 394.5 394.3 394.3 394.3 394.1 394.0 394.0 393.9 393.8 393.8 393.6 393.1 392.8 392.6 392.3 392.0 391.7 391.3 391.2 391.1 391.1 390.9 389.8 384.9 374.0 358.2 342.8 331.2 322.6 316.7 318.0 322.4 332.0 342.4 354.8 369.6 383.7 390.0 391.1 390.9 390.9 392.2 393.4 394.6 395.9 397.2 396.9 396.4 396.2 395.7 394.7 393.4 392.3 391.2 390.0 388.8 387.5 386.6 386.5 386.6 386.8 387.7 388.4 389.2 390.0 390.6 391.5 392.3 393.1 393.5 393.8 394.3 394.2 394.0 393.9 393.8 393.6 393.5 393.4 393.0 392.7 392.4 392.0 391.7 391.4 391.1 390.3 388.4 386.3 383.3 374.5 364.9 357.0 352.9 353.4 355.9 358.7 363.3 367.6 371.7 375.8 380.1 384.0 387.9 392.3 396.1 400.2 403.2 406.0 409.5 413.4 416.4 419.5 422.9 427.5 433.4 439.8 444.9 449.0 452.3 452.6 451.4 450.2 448.6 447.4 446.4 444.9 443.4 442.0 440.8 440.3 439.4 439.0 438.7 439.1 439.7 440.2 439.9 439.4 439.0 438.7 433.7 426.8 414.5 398.0 369.1 367.4 366.6 366.2 366.2 367.0 370.4 373.2 376.9 379.5 381.9 383.7 385.5 387.5 388.4 390.2 391.1 392.4 393.6 394.5 395.4 395.9 393.8 393.7 393.7 393.6 393.6 392.9 392.0 391.4 390.9 390.2 389.7 388.8 388.4 387.9 387.5 387.5 387.5 387.5 387.5 389.3 394.0 397.8 401.2 405.6 408.9 412.0 415.4 418.1 421.2 425.0 428.0 431.2 434.2 436.5 438.3 440.5 442.8 444.3 446.4 448.2 446.7 446.3 445.9 445.6 445.0 444.4 443.7 442.9 442.3 441.4 440.6 440.1 440.2 440.3 440.3 440.5 440.5 440.7 440.8 441.0 441.0 441.2 441.3 441.0 441.0 440.9 440.8 440.6 440.5 440.4 440.4 440.8 441.4 442.0 441.7 441.4 441.3 441.1 440.8 440.5 440.6 440.8 440.8 441.0 441.0 441.1 441.3 441.3 441.5 441.5 441.5 441.5 441.3 441.3 441.3 441.3 441.3 441.3 441.0 441.0 441.0 441.0 441.0 441.0 441.0 440.8 440.8 440.6 440.0 439.2 439.0 439.1 439.2 439.5 439.8 440.2 440.6 441.0 441.3 441.6 441.9 442.2 442.5 442.8 443.1 443.4 443.8 444.3 444.6 444.7 444.1 443.5 442.9 442.3 442.4 442.3 441.7 440.7 439.7 438.8 437.8 437.0 436.1 435.2 434.3 433.9 434.7 436.1 437.0 437.8 438.6 439.4 440.1 441.0 441.8 443.2 444.3 445.6 446.4 447.0 447.7 448.3 448.9 448.7 448.1 447.8 447.3 445.6 443.6 441.8 440.5 439.2 438.0 436.0 434.0 433.2 432.8 432.3 432.7 433.6 435.6 437.4 439.3 440.5 441.5 442.3 443.2 444.1 445.1 446.1 447.0 448.1 448.9 448.2 447.6 446.9 446.0 444.5 442.5 440.4 438.9 438.0 437.2 436.5 435.8 435.2 435.3 435.8 436.2 436.0 436.0 435.8 435.5 435.4 435.8 436.2 436.8 437.8 438.5 439.4 440.3 441.7 443.1 443.8 444.5 445.2 446.3 447.5 447.9 448.3 448.6 449.0 449.6 449.2 448.3 447.5 446.5 444.3 442.0 439.7 437.2 435.3 433.9 432.7 430.8 428.9 427.4 426.5 426.0 426.6 427.6 429.1 431.0 432.9 435.3 438.4 441.8 445.6 449.5 452.8 456.3 459.2 462.5 465.9 467.2 468.2 468.3 464.3 458.9 457.6 456.0 454.9 453.7 452.4 450.8 449.6 449.0 449.0", - "input_type": "phoneme", - "offset": 218.16 + "f0_timestep": "0.005" }, { + "offset": 224.48, "text": "SP 谁 都 想 一 生 浪 漫 无 暇 雪 月 和 风 花 去 思 念 一 个 他 SP", "ph_seq": "SP sh ei d ou x iang y i sh eng l ang m an w u x ia x ve y ve h e f eng h ua q v s i0 n ian y i g e t a SP", - "note_seq": "rest D4 D4 E4 E4 F4 F4 G4 G4 A4 A4 C5 C5 C5 C5 C5 C5 C5 C5 D5 D5 C5 C5 C5 C5 C5 C5 C5 C5 A#4 A#4 A#4 A#4 A#4 A#4 A4 A4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.32 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.4 0.4 0.2 0.2 0.3999999 0.3999999 0.2 0.2 0.4000001 0.4000001 0.2 0.2 0.1999998 0.1999998 0.2 0.2 0.2 0.2 0.4000001 0.4000001 0.1999998 0.1999998 0.2000003 0.2000003 0.3999996 0.3999996 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.4000001 0.05", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.2206 0.1045 0.1741 0.0232 0.1045 0.1045 0.1509 0.0464 0.0929 0.1045 0.3251 0.0697 0.1277 0.0813 0.3251 0.0697 0.0929 0.1045 0.2786 0.1277 0.1277 0.0697 0.1393 0.058 0.1161 0.0813 0.1277 0.0813 0.2786 0.1161 0.0813 0.1161 0.1393 0.0697 0.3135 0.0813 0.3483 0.0464 0.3019 0.1045 0.3947 0.058", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 F4 G4 A4 C5 C5 C5 C5 D5 C5 C5 C5 C5 A#4 A#4 A#4 A4 G4 A4 rest", + "note_dur": "0.32 0.2 0.2 0.2 0.2 0.4 0.2 0.4 0.2 0.4 0.2 0.2 0.2 0.2 0.4 0.2 0.2 0.4 0.4 0.4 0.4 0.05", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "441.0 441.0 441.0 440.6 380.6 378.9 377.1 375.4 373.6 371.9 370.2 368.5 366.8 365.1 363.4 361.8 360.1 358.4 356.8 355.1 353.5 351.9 350.2 348.6 347.0 345.4 343.8 342.2 340.7 339.1 337.5 336.0 334.4 332.9 331.3 329.8 328.3 326.8 325.3 323.8 322.3 320.8 319.3 317.8 293.7 293.4 293.1 292.8 292.5 292.3 291.9 291.7 291.5 291.2 290.8 290.6 290.3 290.0 289.7 289.5 289.2 288.9 288.6 288.3 288.1 287.5 287.0 289.0 293.7 299.5 301.0 300.6 299.7 298.4 296.9 295.5 294.1 292.9 292.1 291.3 290.6 290.7 290.9 291.1 291.3 292.0 292.7 293.4 294.2 295.0 295.8 295.9 295.1 293.3 291.5 289.4 287.3 285.7 284.5 285.3 289.2 295.3 305.2 319.0 333.8 339.6 339.3 338.0 336.1 333.8 331.4 331.2 331.0 330.6 330.0 330.0 330.1 331.0 331.0 331.0 330.5 330.0 329.5 329.0 328.5 328.3 327.6 327.0 326.6 326.5 326.2 325.8 325.8 325.8 325.8 326.2 326.6 327.0 327.7 328.9 330.6 332.9 335.4 337.7 340.7 344.6 347.6 350.6 351.5 352.3 352.7 353.5 353.9 353.9 353.9 353.9 353.5 352.7 348.3 347.8 348.0 348.0 348.0 347.9 347.8 347.8 347.8 347.9 348.6 349.4 350.4 351.3 352.3 353.1 353.1 353.0 352.4 351.7 351.5 351.3 352.0 353.4 356.6 361.5 369.4 377.4 382.6 385.5 387.7 390.0 391.9 393.3 394.3 395.4 396.1 395.2 394.4 397.7 397.7 397.7 397.7 397.7 397.7 397.5 396.8 396.0 395.4 394.5 394.0 393.1 392.4 392.4 392.4 392.4 392.0 392.4 393.1 394.5 395.9 396.8 399.3 401.2 405.6 409.4 405.6 414.1 422.3 429.1 434.3 437.0 438.2 439.2 439.7 440.2 440.3 439.9 439.2 438.7 437.9 437.2 436.1 435.4 434.7 434.4 434.4 434.6 435.6 436.4 437.3 438.2 439.2 440.1 440.9 441.8 441.8 441.5 441.1 440.9 440.6 440.3 438.7 437.4 436.7 436.3 435.7 435.3 436.1 436.9 438.2 439.5 441.1 442.4 443.6 444.5 444.9 445.5 446.1 446.7 447.1 446.7 445.9 444.9 443.6 442.3 441.0 439.9 439.5 439.3 439.0 437.7 435.9 434.0 431.2 428.0 426.5 427.8 430.1 433.4 438.2 445.6 454.7 465.8 478.0 494.4 512.2 526.1 533.2 537.2 538.7 538.9 539.5 539.4 538.7 537.3 535.5 531.2 527.3 524.7 522.3 519.3 519.3 519.2 518.9 518.5 519.0 519.8 521.1 522.4 521.7 520.2 517.6 515.1 513.1 512.5 512.5 512.3 510.9 509.4 508.1 506.6 504.9 502.6 500.5 498.3 498.5 504.6 508.6 508.5 506.7 504.0 501.0 498.4 496.6 494.6 492.7 490.3 487.8 485.6 483.6 482.6 481.6 481.0 480.8 482.0 484.0 485.7 487.5 489.2 491.2 495.0 499.0 502.9 506.5 510.7 514.9 519.0 521.6 523.7 526.1 528.4 528.4 528.4 528.4 528.4 528.4 527.6 526.6 525.9 525.2 524.2 523.4 522.6 521.9 521.5 521.4 521.1 521.1 521.8 522.5 523.6 524.5 524.6 524.9 525.1 524.5 523.4 522.6 522.3 522.6 522.9 523.6 524.1 524.6 525.2 525.7 525.3 524.9 524.6 524.2 523.9 523.3 522.3 521.3 520.4 521.4 522.6 524.2 525.5 526.7 527.2 527.3 527.5 527.7 527.8 527.2 526.7 526.1 525.6 524.0 515.5 503.5 483.6 463.7 443.3 430.0 420.2 417.7 417.7 420.5 424.5 428.8 432.6 435.7 438.9 442.0 445.2 449.0 453.2 457.9 463.2 468.6 473.1 476.0 478.7 481.5 492.0 502.8 511.4 517.6 523.3 528.6 532.8 533.8 533.4 533.0 532.9 532.6 532.0 531.1 529.6 528.1 526.8 525.7 524.4 523.6 523.1 522.5 522.0 521.5 520.8 520.8 520.8 520.8 520.8 520.8 520.8 520.8 520.8 521.3 521.7 522.1 522.5 522.8 523.2 523.9 524.5 526.1 527.6 528.6 529.3 530.1 530.8 531.5 531.6 526.9 518.6 498.9 480.4 461.8 442.5 430.5 428.3 428.7 432.8 438.2 445.6 453.3 459.8 466.2 472.4 479.2 485.4 491.6 498.8 506.0 514.9 524.3 532.3 539.8 547.4 556.5 564.6 570.3 576.0 582.3 585.6 589.9 595.8 601.7 604.5 605.4 605.7 604.9 603.8 602.1 599.9 597.9 596.2 594.0 591.8 591.4 591.1 590.6 590.1 589.7 589.2 588.5 588.1 587.2 585.6 583.6 581.1 576.4 570.2 564.1 557.5 551.8 549.1 546.4 543.3 544.0 544.1 543.7 542.4 538.6 535.0 532.9 531.7 531.2 531.2 529.6 528.4 527.7 527.3 527.2 527.4 527.0 526.7 526.4 526.0 525.9 525.4 524.6 523.5 522.6 521.5 520.4 518.0 511.3 502.5 479.4 460.1 447.7 442.9 441.3 450.0 459.2 466.8 473.5 480.4 487.4 493.9 500.2 507.9 517.5 528.1 536.6 539.1 539.5 536.7 533.2 529.9 527.3 526.4 526.0 525.6 525.2 525.1 525.1 525.7 526.5 527.6 528.4 529.2 530.2 525.5 507.2 481.0 454.9 444.3 464.3 482.2 494.2 498.1 498.5 503.5 508.6 512.7 515.3 517.2 520.2 525.6 530.9 535.1 538.6 541.3 540.5 539.1 537.9 536.4 533.3 530.6 528.1 525.1 522.0 519.7 519.2 520.1 521.2 522.3 523.8 524.9 526.1 527.3 526.9 526.2 523.3 515.5 504.7 486.0 466.7 451.8 444.7 442.7 441.8 444.1 446.7 449.1 452.6 457.1 461.4 465.0 467.5 470.0 472.4 484.1 496.2 504.7 507.8 508.9 510.0 510.1 507.6 503.8 500.2 496.6 494.7 493.1 490.8 487.9 485.1 483.4 484.3 486.2 488.2 490.3 494.2 497.8 501.6 505.7 510.3 515.0 519.3 522.5 525.1 527.4 528.4 528.7 528.5 528.1 527.7 527.3 526.7 526.4 525.7 525.3 524.9 524.4 523.9 523.6 524.3 524.9 525.4 526.1 526.9 527.6 527.5 526.3 523.6 514.6 495.2 480.7 476.4 484.6 497.3 504.1 507.3 509.6 511.3 511.0 507.0 503.6 500.1 496.3 492.5 488.1 483.6 480.1 477.5 475.1 471.7 468.3 464.4 459.9 456.0 459.7 463.5 466.7 469.2 471.6 471.9 472.1 472.0 471.6 471.6 471.2 466.8 460.1 452.4 443.3 434.7 430.5 430.6 432.1 435.4 439.2 442.5 443.7 443.1 441.8 443.0 444.0 445.0 446.1 447.2 448.4 449.5 450.6 451.7 452.9 454.0 455.5 456.4 455.8 454.5 456.5 460.5 464.5 468.1 469.9 471.3 472.6 473.5 473.4 472.9 472.6 472.1 471.7 471.4 471.0 470.6 470.1 469.8 469.5 468.9 468.4 467.2 465.2 460.9 455.0 454.1 454.9 455.9 456.5 456.8 457.4 457.8 458.2 459.0 460.3 461.3 462.3 463.7 466.2 469.9 473.6 474.9 475.6 477.4 479.5 478.2 477.3 476.6 476.3 476.0 474.6 473.2 471.9 470.5 469.1 467.8 466.4 465.1 463.9 463.2 462.6 462.4 462.4 462.6 462.9 463.3 463.5 463.6 463.9 464.3 465.2 466.0 466.9 467.6 468.1 468.4 468.7 469.0 468.7 468.3 468.0 467.6 467.4 467.0 467.2 467.7 467.3 466.7 466.2 465.6 465.6 465.4 465.2 465.1 464.8 464.8 464.7 465.1 465.4 464.6 463.3 462.1 461.6 462.4 463.7 465.0 465.4 465.4 465.6 464.8 463.8 462.7 461.7 460.3 458.7 456.4 453.2 449.9 446.7 443.6 440.7 436.7 430.5 423.7 418.2 413.4 409.4 406.4 403.5 402.1 400.9 400.0 399.9 400.2 400.2 400.4 401.2 402.0 402.8 403.7 404.8 405.9 407.7 409.6 411.7 414.3 417.4 420.7 424.7 428.5 431.5 434.2 436.1 437.8 440.0 441.7 442.6 443.6 444.5 444.1 443.4 442.8 442.2 441.8 441.5 441.2 440.8 440.4 440.1 439.7 439.6 440.0 440.6 441.0 441.5 442.1 442.5 442.8 442.0 441.0 432.6 418.3 406.8 400.1 403.0 414.0 423.5 429.5 434.7 437.7 439.6 440.3 440.5 439.7 437.2 433.2 429.5 424.9 417.3 405.4 393.8 387.8 387.0 387.9 389.1 388.8 388.7 389.0 389.4 390.2 390.2 390.4 390.5 390.8 390.6 390.5 390.2 390.2 390.2 390.2 390.0 390.0 390.1 390.4 390.6 390.7 391.0 391.1 391.3 391.5 391.8 391.9 392.1 392.2 392.4 392.6 392.4 392.3 392.6 392.9 393.2 393.6 393.9 394.2 394.5 394.8 394.9 393.9 392.2 387.9 382.8 376.8 372.2 369.9 368.5 364.8 362.6 363.5 368.2 375.2 382.5 389.3 392.6 393.4 393.1 393.3 393.1 393.1 393.3 393.4 394.5 396.0 398.0 400.6 402.1 404.2 406.2 407.7 410.1 417.2 426.2 438.5 445.5 448.6 450.5 451.5 452.2 452.2 451.7 451.1 449.8 448.6 447.4 445.4 443.1 440.7 437.9 434.8 431.6 429.0 426.2 423.8 422.5 421.5 422.6 423.8 425.6 427.8 430.8 433.9 437.0 441.3 445.1 448.7 451.1 453.0 454.8 456.2 456.4 456.0 455.5 454.3 451.8 448.1 443.8 439.6 435.7 432.1 428.0 424.3 420.6 417.6 416.2 415.4 416.5 418.3 420.2 423.5 428.0 432.4 435.9 439.4 442.4 445.7 449.2 451.4 451.5 451.2 450.9 449.2 445.4 441.5 438.2 434.7 430.9 429.1 427.4 426.2 426.7 428.0 428.5 429.0 429.4 429.8 430.4 430.8 431.3 431.4 431.4", - "input_type": "phoneme", - "offset": 224.48 + "f0_timestep": "0.005" }, { + "offset": 230.48, "text": "SP 却 再 也 无 法 完 全 停 下 SP", "ph_seq": "SP q ve z ai y E w u f a w an q van t ing x ia SP", - "note_seq": "rest C4 C4 D4 D4 A4 A4 C5 C5 C5 C5 A4 A4 C5 C5 C4 C4 D4 D4 rest", - "note_dur_seq": "0.32 0.4 0.4 0.4 0.4 0.8 0.8 0.4 0.4 0.4000001 0.4000001 0.8 0.8 0.4000001 0.4000001 0.5999999 0.5999999 2 2 0.2", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "ph_dur": "0.1741 0.1509 0.3019 0.1045 0.267 0.1277 0.685 0.1161 0.2554 0.1393 0.3019 0.1045 0.6037 0.1974 0.2786 0.1161 0.3947 0.209 1.9969 0.1974", - "f0_timestep": "0.005", + "ph_num": "2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 D4 A4 C5 C5 A4 C5 C4 D4 rest", + "note_dur": "0.32 0.4 0.4 0.8 0.4 0.4 0.8 0.4 0.6 2.0 0.2", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "438.7 438.7 438.7 439.3 375.4 372.7 370.1 367.5 365.0 362.4 359.9 357.3 354.8 352.4 349.9 347.4 345.0 342.6 340.2 337.8 335.4 333.1 330.7 328.4 326.1 323.8 321.6 319.3 317.1 314.9 312.6 310.5 308.3 306.1 304.0 301.8 299.7 297.6 295.5 293.5 291.4 289.4 287.3 285.3 261.6 261.0 260.3 259.8 259.2 258.5 257.9 257.2 256.7 256.1 255.4 254.8 254.1 253.5 253.0 252.3 251.7 251.1 250.5 249.9 249.2 248.6 247.5 246.6 245.8 246.7 247.9 250.0 252.1 254.3 255.4 255.8 255.9 255.8 255.8 255.8 255.7 255.7 255.6 255.5 255.5 255.4 255.5 255.9 256.4 257.0 257.7 258.3 258.9 259.7 260.4 261.1 261.2 261.3 261.4 261.5 261.5 261.6 261.3 260.9 261.2 261.4 261.7 261.9 261.9 261.8 261.8 261.7 261.6 261.6 261.5 261.4 261.2 260.8 260.5 260.1 259.3 257.0 253.6 246.9 239.2 230.4 222.6 217.7 215.7 217.1 221.7 226.1 230.3 235.2 240.3 244.4 248.5 252.4 256.8 261.8 267.3 271.7 276.0 280.4 284.8 289.2 293.5 296.7 298.4 298.1 297.0 296.0 295.2 294.3 293.8 293.2 292.6 292.0 291.4 291.1 290.8 290.5 290.2 290.2 290.5 290.7 290.9 291.0 291.1 291.3 291.8 292.3 292.6 292.4 292.0 291.6 291.7 291.9 292.0 292.1 292.3 292.4 292.5 292.6 292.8 292.9 293.0 293.1 293.3 293.3 293.5 293.4 293.3 293.3 293.2 293.1 293.0 293.0 292.9 292.8 292.0 291.1 290.2 288.7 287.0 285.3 284.1 283.1 282.1 279.8 277.2 275.3 274.4 274.3 276.1 278.7 282.3 287.4 294.7 304.0 315.6 327.0 337.6 350.2 364.3 375.7 386.4 394.0 400.5 405.8 409.1 411.3 413.4 415.4 416.3 416.5 416.3 415.6 414.8 413.9 412.9 412.5 411.6 410.7 409.8 409.5 409.0 408.7 408.6 408.6 408.9 409.1 409.7 410.6 411.5 413.0 414.7 416.2 417.8 420.6 423.4 426.2 429.0 432.1 435.2 436.8 438.5 440.0 441.3 442.0 442.4 442.7 442.8 442.6 442.0 440.4 438.9 438.3 438.0 437.5 437.2 436.9 436.8 437.2 437.5 437.5 437.8 438.1 438.2 438.5 439.1 439.8 440.4 440.1 439.7 439.6 440.0 440.6 441.0 440.8 440.4 440.2 439.9 439.6 439.2 438.9 438.7 438.9 439.2 439.5 439.6 439.8 440.1 440.3 440.5 440.8 441.0 441.2 441.5 441.5 441.8 442.1 442.3 442.5 442.3 442.0 441.7 441.5 441.1 440.5 439.6 439.1 438.9 438.5 438.2 437.8 437.6 438.0 438.6 438.2 437.8 438.0 438.1 438.2 438.5 438.9 439.4 439.6 440.1 440.5 440.8 441.1 441.1 440.8 440.5 440.4 440.1 439.8 439.6 439.5 439.2 438.9 438.7 438.9 439.0 439.2 439.4 439.5 439.9 440.3 439.9 439.7 439.7 439.7 440.0 440.7 441.4 441.8 442.6 444.1 446.6 450.3 454.9 461.1 468.6 476.8 484.7 491.0 496.7 501.4 505.5 510.0 514.1 518.2 521.7 522.9 523.7 523.7 523.3 522.6 522.0 519.9 517.4 514.8 511.9 509.0 507.2 507.1 507.6 508.4 508.8 509.4 510.1 512.0 514.3 516.8 519.0 520.7 522.3 524.2 525.8 527.3 528.1 528.6 528.7 528.4 528.4 528.2 528.1 527.8 527.1 526.8 526.4 526.1 525.7 525.2 523.9 521.1 517.0 507.8 486.9 464.1 446.0 440.5 456.0 484.6 510.3 521.0 520.2 516.0 512.3 510.0 508.5 508.1 508.1 508.1 508.1 508.1 507.8 506.3 504.6 503.1 501.5 500.6 504.3 510.9 524.2 535.1 541.6 546.7 545.2 543.5 540.9 537.7 534.2 530.9 527.7 525.0 523.0 521.4 519.6 518.1 517.4 516.8 516.3 517.1 517.8 518.5 519.2 520.2 521.1 522.1 523.1 524.2 524.8 524.8 525.2 525.4 525.1 525.1 524.7 524.5 524.3 523.9 523.9 523.6 523.4 523.1 522.9 522.6 521.6 520.8 519.8 518.7 517.8 516.7 514.7 511.1 506.2 498.2 489.3 480.4 468.9 455.1 441.5 430.1 426.5 424.3 422.1 419.4 420.5 421.2 421.8 422.4 423.1 423.5 425.7 427.5 428.7 429.7 433.3 436.2 439.0 441.8 444.3 445.0 445.5 445.1 445.2 446.1 445.2 444.9 444.9 444.5 444.1 443.4 442.9 442.7 442.4 442.3 441.7 441.5 441.5 441.5 441.5 441.6 441.9 442.2 442.5 442.8 441.8 441.0 440.1 439.4 439.2 439.0 439.0 439.0 439.0 438.7 438.8 439.2 439.6 439.9 439.7 439.7 439.5 439.3 439.2 439.0 438.7 438.6 438.5 438.3 438.0 437.7 437.3 436.8 435.7 434.4 435.0 435.4 436.0 436.4 437.0 437.7 438.6 439.6 440.8 441.8 443.1 443.9 444.2 444.3 444.3 444.6 444.3 443.6 442.7 441.8 440.6 439.7 438.9 437.9 436.7 435.8 434.8 434.1 433.8 433.4 433.1 433.0 433.4 434.3 436.2 438.4 440.3 441.6 443.0 444.6 445.9 447.0 446.9 446.3 445.6 444.0 442.2 440.9 439.9 439.0 438.4 438.2 438.2 438.4 438.5 438.8 440.6 442.4 444.2 445.6 446.9 448.1 448.9 448.4 447.7 447.0 445.0 441.0 436.2 430.9 419.0 409.1 403.9 405.3 411.2 417.8 423.4 426.0 428.0 430.2 432.4 434.6 437.2 440.4 443.6 447.0 450.3 453.0 455.3 457.9 460.2 462.5 464.8 467.5 470.5 473.9 477.6 481.6 485.7 487.9 490.8 493.5 495.7 498.7 501.6 511.5 521.1 529.0 535.9 540.1 542.3 542.1 541.1 539.3 537.0 534.6 531.8 529.1 526.5 524.2 522.6 520.8 519.0 517.0 514.9 515.6 516.3 517.0 517.7 518.7 520.2 521.5 522.6 524.2 525.4 526.9 527.7 527.0 525.9 525.1 523.9 522.9 521.9 520.8 519.6 520.5 521.6 522.4 523.1 524.2 525.2 525.6 525.1 524.1 522.9 521.8 520.8 519.3 515.3 510.4 505.6 499.0 487.8 471.0 444.1 419.8 409.9 403.5 396.0 388.2 389.2 389.9 389.4 385.1 379.1 372.0 363.7 355.6 345.5 332.7 320.1 306.5 292.4 275.3 258.3 242.3 231.3 229.6 231.5 235.4 236.1 238.1 240.4 241.3 242.0 243.3 244.7 245.3 245.6 246.2 246.8 247.3 248.2 249.4 251.0 252.8 254.2 255.0 255.5 256.2 256.8 257.5 258.1 258.7 258.9 259.0 259.1 259.2 259.4 260.0 260.5 261.1 261.5 261.6 261.8 261.8 261.9 261.9 261.9 261.9 262.1 262.1 262.2 262.2 262.2 262.4 262.4 262.4 262.3 262.2 262.2 262.1 262.1 262.1 261.9 261.9 261.5 261.1 260.6 260.9 261.2 261.6 261.9 262.3 262.7 263.1 263.4 263.7 263.7 263.7 263.8 263.9 263.9 263.7 262.4 260.7 258.2 254.4 249.2 241.3 233.9 230.8 229.1 229.8 231.7 233.6 235.4 237.1 238.8 240.7 243.0 245.1 247.2 249.5 251.8 254.2 256.5 258.2 260.0 261.9 263.9 265.6 267.5 269.6 271.8 274.1 275.9 277.5 279.6 282.0 283.2 284.4 286.0 288.4 292.6 297.5 301.6 303.4 304.2 305.1 304.8 304.3 303.7 302.8 302.1 301.2 300.3 299.4 298.5 297.6 297.2 296.8 296.0 295.2 294.3 293.6 293.2 292.9 293.1 293.3 293.5 293.7 294.0 294.3 294.5 294.7 295.0 295.3 295.5 295.7 296.0 296.3 296.5 296.7 296.4 296.0 295.6 295.3 294.9 294.5 294.3 293.9 293.5 293.2 293.5 293.8 293.7 293.7 293.7 293.7 293.5 293.5 293.5 293.4 293.3 293.3 293.2 293.2 293.3 293.5 293.5 293.7 293.8 294.0 294.2 294.2 294.4 294.5 294.6 294.9 294.9 295.1 295.1 294.9 294.5 294.2 294.1 294.0 294.0 294.0 293.8 293.8 293.8 293.7 293.7 293.6 293.5 293.6 293.8 294.0 294.4 294.6 294.7 294.4 294.0 293.7 294.2 294.5 294.9 294.9 294.6 294.4 294.2 293.6 293.0 293.6 293.8 293.7 293.7 293.5 293.3 293.1 292.9 292.7 293.0 293.5 293.2 293.1 292.9 292.6 292.4 292.2 292.1 291.8 292.1 292.4 292.8 293.1 293.4 293.7 294.0 294.2 294.3 294.2 294.0 294.0 293.9 293.8 293.7 293.7 293.2 292.9 292.8 292.9 293.0 293.0 293.2 293.2 293.2 293.3 293.3 293.1 292.8 292.5 292.3 292.5 292.6 292.6 292.8 292.8 293.0 293.1 293.2 293.2 292.6 292.1 292.1 292.2 292.3 292.5 292.3 292.2 292.1 292.0 292.0 291.8 291.6 291.5 291.5 291.3 291.3 291.1 291.0 290.8 290.8 290.7 291.0 291.2 291.6 292.0 292.2 292.5 292.8 293.2 293.5 293.9 294.4 294.9 295.4 296.0 296.5 296.9 296.7 296.3 295.9 295.9 295.8 295.0 294.0 293.0 291.9 291.1 290.5 290.0 289.6 289.0 288.8 288.9 289.2 289.5 290.0 290.9 291.9 293.1 294.2 295.4 296.6 297.6 298.7 299.8 300.3 300.7 301.0 300.8 300.5 300.1 299.0 298.0 297.4 296.7 294.8 293.1 291.4 289.6 287.8 286.1 285.2 284.7 284.1 284.3 284.7 285.1 286.2 287.7 289.1 290.6 292.0 292.9 293.7 294.7 295.5 296.3 297.2 298.1 299.0 300.0 300.9 301.5 301.6 301.6 300.7 300.1 299.2 297.8 296.2 294.5 292.9 291.6 290.4 289.1 287.1 285.5 284.5 283.5 282.7 282.0 282.8 283.9 284.9 286.1 286.8 287.5 288.3 290.0 292.0 294.0 295.6 296.9 298.3 300.0 301.5 302.8 303.6 304.4 305.1 304.4 303.5 302.1 300.5 298.8 296.9 295.1 292.6 289.4 286.3 283.2 280.4 278.7 277.8 277.0 276.4 276.7 277.3 279.2 281.5 283.9 285.7 287.5 289.5 292.0 294.3 296.6 298.8 301.0 301.7 302.0 302.4 302.4 301.9 301.1 300.1 298.1 296.2 294.5 292.8 291.3 289.8 288.4 287.1 286.3 285.8 286.1 286.5 287.0 288.3 290.0 291.7 293.4 295.0 296.9 298.2 299.2 300.2 300.8 301.2 301.8 302.3 302.7 302.9 302.4 301.6 300.1 298.6 296.9 295.2 293.1 291.1 289.4 287.5 285.6 283.6 281.8 280.2 279.0 278.0 276.9 275.8 275.0 274.0 273.1 273.8 275.0 276.9 279.6 282.2 283.4 284.5 285.6 286.7 288.0 289.0 290.2 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8 290.8", - "input_type": "phoneme", - "offset": 230.48 + "f0_timestep": "0.005" } ] \ No newline at end of file diff --git "a/samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" "b/samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" index 713bd1572..4a7ce9533 100644 --- "a/samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" +++ "b/samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" @@ -1,16 +1,14 @@ [ { + "offset": 0.047, "text": "AP 好 不 好 嘛 SP AP 求 SP 求 你 啦 SP AP 拜 托 SP 拜 托 SP 行 不 行 呀 AP 我 不 管 嘛 SP AP 你 最 好 啦 AP 我 爱 你 呀 AP 人 家 要 嘛 SP", - "ph_seq": "AP h ao b u h ao m a SP AP q iu SP q iu n i l a SP AP b ai t uo SP b ai ai t uo SP x ing b u x ing y a AP w o b u g uan uan m a SP AP n i z ui h ao l a AP w o ai n i y a AP r en j ia y ao m a SP", - "note_seq": "rest F4 F4 B4 B4 F4 F4 E4 E4 rest rest G#4 G#4 rest C#5 C#5 F#4 F#4 F4 F4 rest rest F#4 F#4 G4 G4 rest G4 G4 F#4 F#4 F#4 rest F4 F4 C5 C5 E4 E4 E4 E4 rest F#4 F#4 A#4 A#4 G#4 G#4 E4 F#4 F#4 rest rest F#4 F#4 C#5 C#5 F#4 F#4 F#4 F#4 rest F#4 F#4 C#5 F4 F4 E4 E4 rest D4 D4 G#4 G#4 B4 B4 D#4 D#4 rest", - "note_dur_seq": "0.187 0.235 0.235 0.293 0.293 0.322 0.322 0.557 0.557 0.06800006 0.4 0.1759999 0.1759999 0.1170001 0.293 0.293 0.4100001 0.4100001 0.411 0.411 0.03899989 0.4 0.2639999 0.2639999 0.1760001 0.1760001 0.1170001 0.1169996 0.1169996 0.2930002 0.4390001 0.4390001 0.1469998 0.263 0.263 0.3230004 0.3230004 0.3799996 0.3799996 0.4690003 0.4690003 0.4099998 0.2049999 0.2049999 0.2350001 0.2350001 0.1760001 0.1760001 0.2930002 0.3799992 0.3799992 0.1280008 0.4 0.2049999 0.2049999 0.2929993 0.2929993 0.3510008 0.3510008 0.4109993 0.4109993 0.4099998 0.2630005 0.2630005 0.5279999 0.2930002 0.2930002 0.2930002 0.2930002 0.3509998 0.3520002 0.3520002 0.2049999 0.2049999 0.5860004 0.5860004 0.4689999 0.4689999 0.5", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.131556 0.055444 0.175781 0.059219 0.189992 0.103008 0.18376 0.13824 0.557 0.000395 0.4 0.067605 0.176 0.029286 0.087714 0.180773 0.112226 0.240001 0.169999 0.411 0.006693 0.4 0.032307 0.149827 0.114172 0.176 0.030542 0.086458 0.117 0.264799 0.116201 0.439 0.012 0.135 0.226852 0.036148 0.224159 0.098842 0.258593 0.121407 0.469 0.314952 0.095048 0.153984 0.051016 0.198867 0.036133 0.176 0.176633 0.116367 0.379999 0.083001 0.4 0.045 0.118855 0.086145 0.205263 0.087736 0.278265 0.072736 0.410999 0.349999 0.06 0.200931 0.47386 0.11621 0.233001 0.059999 0.293 0.291 0.059999 0.27595 0.07605 0.184064 0.049935 0.519243 0.066757 0.469 0.5", - "f0_timestep": "0.005", + "ph_seq": "AP h ao b u h ao m a SP AP q iu SP q iu n i l a SP AP b ai t uo SP b ai t uo SP x ing b u x ing y a AP w o b u g uan m a SP AP n i z ui h ao l a AP w o ai n i y a AP r en j ia y ao m a SP", + "ph_dur": "0.1316 0.0554 0.1758 0.0592 0.19 0.103 0.1838 0.1382 0.557 0.0004 0.4 0.0676 0.176 0.0293 0.0877 0.1808 0.1122 0.24 0.17 0.411 0.0067 0.4 0.0323 0.1498 0.1142 0.176 0.0305 0.0865 0.3818 0.1162 0.439 0.012 0.135 0.2269 0.0361 0.2242 0.0988 0.2586 0.1214 0.469 0.315 0.095 0.154 0.051 0.1989 0.0361 0.3526 0.1164 0.38 0.083 0.4 0.045 0.1189 0.0861 0.2053 0.0877 0.2783 0.0727 0.411 0.35 0.06 0.2009 0.4739 0.1162 0.233 0.06 0.293 0.291 0.06 0.2759 0.0761 0.1841 0.0499 0.5192 0.0668 0.469 0.5", + "ph_num": "2 2 2 2 1 1 2 1 2 2 2 1 1 2 2 1 2 2 1 2 2 2 2 1 2 2 2 2 1 1 2 2 2 2 1 2 1 2 2 1 2 2 2 2 1 1", + "note_seq": "rest F4 B4 F4 E4 rest rest G#4 rest C#5 F#4 F4 rest rest F#4 G4 rest G4 F#4 F#4 rest F4 C5 E4 E4 rest F#4 A#4 G#4 E4 F#4 rest rest F#4 C#5 F#4 F#4 rest F#4 C#5 F4 E4 rest D4 G#4 B4 D#4 rest", + "note_dur": "0.187 0.235 0.293 0.322 0.557 0.068 0.4 0.176 0.117 0.293 0.41 0.411 0.039 0.4 0.264 0.176 0.117 0.117 0.293 0.439 0.147 0.263 0.323 0.38 0.469 0.41 0.205 0.235 0.176 0.293 0.38 0.128 0.4 0.205 0.293 0.351 0.411 0.41 0.263 0.528 0.293 0.293 0.351 0.352 0.205 0.586 0.469 0.5", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.4 367.5 367.9 367.3 366.0 363.4 361.2 360.0 357.6 354.9 353.6 352.5 351.6 350.8 350.4 350.0 349.0 348.0 346.7 345.7 345.5 345.1 344.0 344.0 343.9 343.0 342.4 341.8 340.5 338.4 337.7 339.5 342.2 345.7 348.1 348.1 348.9 354.1 359.3 364.7 370.1 375.7 381.3 387.0 392.7 398.6 404.6 410.6 416.7 422.9 429.2 435.7 442.2 448.8 455.4 454.9 451.2 450.2 450.0 453.0 460.3 471.7 478.6 483.6 488.0 492.9 499.6 504.6 507.2 510.3 513.1 520.9 533.6 531.7 529.8 527.9 526.0 524.2 522.3 520.5 518.6 516.8 515.0 513.1 511.3 509.5 507.7 505.9 504.1 502.3 500.6 498.8 497.0 495.3 493.5 491.8 490.0 488.3 486.5 484.8 483.1 481.4 479.7 478.0 476.3 476.5 473.8 469.3 465.0 457.3 447.7 437.3 429.9 423.8 418.0 412.3 407.0 401.2 397.7 394.4 391.4 388.7 386.1 383.7 381.3 379.4 377.2 375.2 373.4 371.6 369.9 368.3 366.7 365.3 363.9 362.8 361.8 361.0 360.3 359.8 359.1 358.3 357.2 356.2 355.5 355.0 354.5 354.2 353.7 353.2 352.6 352.0 351.3 350.6 349.8 349.1 348.2 347.0 345.4 343.5 341.8 340.2 338.4 338.2 339.3 340.4 341.1 341.3 341.3 340.8 340.5 340.4 340.1 339.8 339.3 338.5 337.5 337.1 336.8 336.7 336.4 335.6 335.4 335.3 335.8 336.1 338.0 339.3 337.9 336.6 336.7 336.2 336.3 336.7 337.1 337.7 338.3 339.0 339.2 339.4 339.6 339.6 339.6 340.1 340.2 340.1 340.2 340.1 339.8 339.6 339.4 339.3 339.0 338.8 338.7 338.6 338.4 338.1 338.1 337.5 337.1 336.8 336.6 336.4 336.4 336.2 336.0 336.1 336.0 336.0 336.1 336.4 336.4 336.2 336.3 336.1 336.0 335.8 335.5 334.8 334.6 334.1 333.8 333.7 333.5 333.1 333.3 332.7 332.5 332.5 332.4 332.3 332.2 332.0 331.7 331.2 330.9 330.5 330.1 329.9 329.5 328.9 328.6 327.9 326.8 325.8 325.1 324.3 323.6 322.5 321.4 321.3 321.3 321.4 322.1 324.2 325.7 326.0 326.4 326.4 326.6 327.5 328.5 328.3 328.1 327.7 327.3 327.0 327.1 327.4 326.8 327.6 328.5 329.4 330.3 331.1 332.0 332.9 333.8 334.7 335.5 336.4 337.3 338.2 339.1 340.0 340.9 341.8 342.7 343.6 344.5 345.5 346.4 347.3 348.2 349.1 350.1 351.0 351.9 352.8 353.8 354.7 355.7 356.6 357.5 358.5 359.4 360.4 361.3 362.3 363.3 364.2 365.2 366.2 367.1 368.1 369.1 370.1 371.0 372.0 373.0 374.0 375.0 376.0 377.0 378.0 379.0 380.0 381.0 382.0 383.0 384.0 385.0 386.1 387.1 388.1 389.1 390.2 391.2 392.2 393.3 394.3 395.4 396.4 397.5 398.5 399.6 400.6 401.7 402.8 403.8 404.9 406.0 407.0 408.1 409.2 410.3 411.4 412.5 413.6 414.6 410.0 398.8 395.2 392.2 390.0 389.2 389.5 390.4 391.8 394.0 396.9 401.3 405.9 408.3 410.8 413.2 414.7 415.5 418.4 422.8 427.1 434.9 437.1 429.5 432.4 435.3 438.2 441.1 444.1 447.0 450.0 453.0 456.0 459.1 462.1 465.2 468.3 471.4 474.6 477.7 480.9 484.1 487.4 490.6 493.9 497.2 500.5 503.9 507.2 510.6 514.0 517.4 520.9 524.4 527.9 531.4 536.8 540.0 541.0 541.4 541.4 541.9 542.5 542.9 543.6 544.5 545.3 546.1 547.9 549.6 551.4 552.8 553.7 554.4 554.3 554.0 554.3 554.4 554.2 553.7 553.0 551.8 550.2 548.1 545.8 543.3 540.2 537.1 535.3 534.6 535.1 535.4 535.0 534.2 533.3 531.7 529.8 527.8 525.5 523.0 520.7 518.4 516.0 512.9 510.7 508.7 506.6 504.0 501.5 499.4 497.0 495.1 492.0 488.5 485.0 481.3 477.6 474.0 469.7 467.0 463.9 459.3 454.2 449.4 445.4 441.8 439.4 436.5 432.6 429.0 426.2 424.4 422.8 418.2 415.6 413.9 409.1 404.4 402.6 400.5 398.4 395.5 393.3 391.3 389.3 387.6 386.6 385.7 384.2 382.6 381.4 380.4 379.1 377.2 376.5 375.7 375.0 373.9 372.9 372.0 371.1 370.4 369.4 367.8 366.6 365.9 365.4 365.1 365.1 364.6 364.0 363.4 362.9 362.4 361.9 361.5 361.5 362.0 363.0 363.8 364.4 364.8 364.9 364.7 363.3 362.7 363.5 365.5 367.4 368.3 368.5 368.9 367.2 365.4 363.6 361.5 359.7 359.0 358.7 358.3 357.8 357.3 356.8 356.5 356.3 356.2 356.2 356.2 356.3 356.4 356.6 356.6 356.6 356.6 356.4 356.1 355.5 355.0 354.5 354.0 353.3 352.9 352.7 352.5 352.1 351.8 351.5 350.9 350.5 350.2 349.8 349.5 349.2 348.8 348.3 348.0 347.6 347.3 347.0 346.4 345.6 344.4 343.4 342.4 341.6 341.0 340.4 339.7 339.1 338.6 338.1 337.8 337.7 337.4 337.0 337.1 337.3 337.3 337.7 338.2 338.4 338.9 340.5 341.8 344.0 346.4 348.6 350.6 353.0 356.8 361.8 361.9 362.5 362.9 363.2 363.4 363.7 363.9 364.2 364.4 364.6 364.9 365.1 365.4 365.6 365.9 366.1 366.4 366.6 366.8 367.1 367.3 367.6 367.8 368.1 368.3 368.6 368.8 369.1 369.3 369.5 369.8 370.0 370.3 370.5 370.8 371.0 371.3 371.5 371.8 372.0 372.3 372.5 372.8 373.0 373.3 373.5 373.8 374.0 374.3 374.5 374.8 375.0 375.3 375.5 375.8 376.0 376.3 376.5 376.8 377.0 377.3 377.5 377.8 378.0 378.3 378.5 378.8 379.0 379.3 379.5 379.8 380.0 380.3 380.6 380.8 381.1 381.3 381.6 381.8 382.1 382.3 382.6 382.8 383.1 383.4 383.6 383.9 384.1 384.4 384.6 384.9 385.2 385.4 385.7 385.9 386.2 386.4 383.6 375.9 368.7 366.6 365.2 363.6 362.4 361.6 361.4 361.5 362.2 363.0 363.8 364.5 365.1 365.5 365.5 365.5 366.2 366.7 367.8 369.0 369.2 367.9 367.2 364.2 360.0 356.0 356.8 361.6 366.9 367.4 368.9 370.4 371.9 373.4 374.8 376.3 377.9 379.4 380.9 382.4 383.9 385.5 387.0 388.6 390.1 391.7 393.3 394.8 396.4 398.0 399.6 401.2 402.8 404.4 406.0 407.9 409.5 407.9 406.3 404.4 401.7 399.0 396.8 394.9 393.1 391.8 390.4 389.1 388.1 387.5 387.1 386.8 386.8 386.8 386.8 386.7 386.4 385.9 385.4 384.8 383.7 382.2 377.6 370.7 372.4 377.4 376.7 370.2 367.7 367.7 369.3 371.7 374.1 376.5 379.0 381.4 383.9 386.4 388.9 391.4 394.0 396.5 399.1 401.7 404.3 403.5 400.6 398.4 397.5 396.8 396.2 395.3 394.4 393.6 393.0 392.2 391.8 391.6 391.4 391.2 391.0 390.8 390.6 390.4 390.0 389.4 388.7 387.9 387.1 386.2 385.3 384.2 382.7 381.0 379.4 378.1 376.9 375.8 375.3 374.7 374.1 373.1 371.6 369.6 367.7 365.7 364.7 364.1 363.4 362.5 361.3 360.4 359.7 359.0 358.6 358.1 356.7 354.6 354.8 353.1 350.9 350.0 347.8 346.7 347.3 347.9 348.6 349.2 349.9 350.5 351.1 351.8 352.5 353.1 353.8 354.4 355.1 355.7 356.4 357.0 357.7 358.4 359.0 359.7 360.4 361.0 361.7 362.4 363.0 363.7 364.4 365.1 365.7 366.4 367.1 367.8 368.5 369.1 369.8 370.5 371.2 371.9 372.6 376.0 379.1 381.5 382.7 382.5 381.3 379.9 377.6 375.0 373.5 372.2 371.1 370.2 369.4 368.5 367.6 366.6 365.4 364.3 363.2 362.0 360.7 359.9 359.5 359.5 359.5 359.5 359.5 359.7 359.7 360.0 360.4 360.6 360.6 360.5 360.5 360.3 360.2 360.1 360.1 360.1 360.1 360.1 360.1 359.9 359.6 359.1 358.5 358.0 357.5 357.0 356.6 356.6 356.7 356.9 357.3 357.9 358.4 358.6 358.8 359.0 359.2 359.0 358.6 358.1 357.4 356.7 356.2 355.5 354.8 354.0 353.5 352.9 352.2 351.5 350.6 349.9 349.6 349.4 348.3 348.2 348.2 346.6 344.9 344.8 347.3 356.6 357.4 356.6 355.9 355.8 355.9 356.0 356.1 356.1 356.2 356.3 356.4 356.5 356.6 356.7 356.8 356.9 356.9 357.0 357.1 357.2 357.3 357.4 357.5 357.6 357.6 357.7 357.8 357.9 358.0 358.1 358.2 358.3 358.4 357.6 353.5 351.7 347.8 344.6 341.8 339.2 336.7 334.8 334.0 333.7 333.3 333.0 332.7 332.2 331.6 331.0 330.0 329.7 329.2 329.0 328.9 329.1 329.4 330.4 331.3 332.8 335.2 337.8 340.4 343.2 346.2 348.4 351.0 354.1 358.0 362.0 365.1 367.4 369.6 372.3 375.2 378.4 382.1 386.2 389.5 390.1 389.9 388.0 397.4 415.7 434.8 454.8 475.7 489.6 502.6 498.6 487.1 481.7 479.8 479.8 482.6 489.3 495.9 500.7 504.1 507.5 510.9 514.9 519.1 522.9 526.0 528.5 531.2 534.2 536.6 539.5 543.8 548.5 553.2 558.3 563.4 568.3 573.7 577.1 580.7 588.1 597.3 604.5 600.0 595.5 591.1 586.6 582.2 577.9 573.6 569.3 565.0 560.8 556.6 552.4 548.3 544.2 540.1 536.0 532.0 528.0 524.1 520.2 516.3 512.4 508.6 504.8 501.0 497.2 493.5 489.8 486.1 482.5 478.9 471.8 465.0 460.0 456.6 452.6 447.4 442.5 435.5 428.5 422.1 414.6 404.2 397.5 392.7 385.9 381.5 379.6 376.0 370.0 368.1 363.7 360.6 357.0 352.9 349.6 347.5 344.6 342.3 340.4 338.8 336.6 333.6 332.4 331.3 331.3 330.5 327.1 325.1 324.5 324.0 323.5 323.0 322.9 322.7 323.1 324.0 325.1 326.2 326.5 326.0 325.1 324.2 323.3 322.8 322.5 322.5 322.6 323.3 324.0 324.6 324.7 324.2 323.8 323.0 322.3 322.0 322.3 322.8 323.3 324.0 324.7 325.7 326.2 326.1 325.7 325.5 325.3 325.3 325.4 325.5 325.6 325.7 326.0 326.6 327.4 328.5 329.7 330.9 331.5 331.6 331.7 331.5 331.4 331.2 330.9 330.3 329.5 328.7 328.1 327.0 326.3 325.7 325.0 324.4 324.1 323.6 323.1 322.6 322.2 322.0 321.9 321.8 321.9 321.3 321.0 320.5 319.8 319.4 319.2 319.0 319.0 319.0 318.7 318.1 317.7 317.4 317.2 316.9 316.5 316.1 315.6 314.7 313.8 313.3 312.4 311.6 311.4 310.2 309.6 309.7 309.8 310.3 311.3 311.8 311.8 312.2 312.6 312.7 312.6 311.8 310.8 309.6 308.6 308.3 308.8 308.8 309.7 310.6 311.5 312.4 313.3 314.3 315.2 316.1 317.0 317.9 318.9 319.8 320.7 321.7 322.6 323.6 324.5 325.5 326.4 327.4 328.3 329.3 330.2 331.2 332.2 333.1 334.1 335.1 336.1 337.1 338.0 339.0 340.0 341.0 342.0 343.0 344.0 345.0 346.0 347.0 348.1 349.1 350.1 351.1 352.2 353.2 354.2 355.2 356.3 357.3 358.4 359.4 360.5 361.5 362.6 363.6 364.7 365.8 366.8 367.9 369.0 370.1 371.1 372.2 373.3 374.4 375.5 376.6 377.7 378.8 379.9 381.0 382.1 383.3 383.8 378.8 370.7 366.3 366.4 365.7 365.0 362.2 360.7 359.0 355.8 354.3 353.3 354.1 355.3 355.3 352.8 350.1 349.2 348.4 349.2 349.9 353.0 355.9 357.8 358.8 359.9 360.1 359.9 359.3 358.7 358.2 357.9 358.1 358.4 359.3 359.8 359.7 359.5 359.3 359.7 361.1 361.6 362.1 362.2 361.1 359.7 358.7 358.2 357.0 356.1 355.3 354.3 353.4 352.3 350.6 348.6 345.9 348.2 354.0 360.9 366.0 369.3 372.1 374.8 377.4 380.0 383.3 387.5 391.1 394.5 401.8 410.5 420.7 431.9 438.6 438.7 440.6 441.3 445.3 448.4 450.5 451.9 452.6 452.6 452.4 452.4 452.4 452.2 451.8 451.0 450.5 449.7 449.7 451.4 451.7 452.8 452.0 448.4 449.2 449.2 449.1 461.6 470.3 470.4 470.5 470.6 470.7 470.8 470.9 471.0 470.5 467.1 455.8 444.9 435.7 428.3 424.6 423.8 424.3 425.6 427.5 427.5 426.7 424.4 421.5 418.9 416.4 414.8 413.8 413.0 412.2 411.2 409.8 408.4 406.3 403.9 401.6 399.3 397.0 395.0 393.1 390.9 388.2 386.1 383.8 381.5 379.3 377.0 374.3 371.6 368.8 365.6 362.6 359.8 357.2 354.9 352.7 351.0 349.7 348.3 347.0 345.7 344.2 342.8 340.7 338.7 336.7 334.3 331.7 329.6 327.5 326.4 325.5 324.6 323.5 322.8 325.5 327.9 328.1 327.5 327.4 327.4 327.6 327.8 328.2 328.9 329.2 329.4 329.4 329.2 328.7 328.1 327.7 327.6 328.2 329.5 332.0 336.4 342.0 340.7 339.2 338.3 337.7 337.5 337.7 338.0 338.7 339.4 340.0 340.9 341.9 343.1 344.3 345.8 347.5 349.7 351.9 353.8 355.7 357.5 359.2 361.1 362.9 364.2 365.3 366.4 367.4 368.3 369.6 370.6 371.7 372.7 373.5 374.3 375.3 375.8 376.4 376.8 377.0 377.0 376.8 376.4 376.0 375.6 375.4 374.8 374.2 373.6 372.9 372.4 371.8 371.4 370.7 370.0 369.3 368.6 367.9 367.3 366.8 366.1 365.4 364.6 363.6 362.6 361.5 360.3 359.3 358.5 358.0 358.1 359.3 361.8 363.5 365.4 366.8 367.4 367.2 366.6 365.3 368.5 369.2 369.4 369.5 369.6 369.7 369.9 370.0 370.1 370.2 370.4 370.5 370.6 370.7 370.9 371.0 371.1 371.2 371.4 371.5 371.6 371.7 371.9 372.0 372.1 372.3 372.4 372.5 372.6 372.8 372.9 373.0 373.1 373.3 373.4 373.5 373.6 373.8 373.9 374.0 374.1 374.3 374.4 374.5 374.7 374.8 374.9 375.0 375.2 375.3 375.4 375.5 375.7 375.8 375.9 376.1 376.2 376.3 376.4 376.6 376.7 376.8 376.9 377.1 377.2 377.3 377.5 377.6 377.7 377.8 378.0 378.1 378.2 378.4 378.5 378.6 378.7 378.9 379.0 379.1 379.3 379.4 379.5 379.6 379.8 379.9 380.0 380.2 375.6 372.2 370.7 369.9 369.3 371.3 369.0 369.1 368.7 368.1 367.9 367.8 367.7 368.7 369.8 370.4 370.9 371.8 373.2 373.8 373.6 373.3 372.6 372.1 371.8 371.4 370.8 370.6 370.3 370.2 370.1 369.9 369.5 368.9 365.7 364.5 364.5 365.3 366.7 369.0 371.9 373.0 378.7 384.6 390.5 396.5 402.6 408.8 415.1 421.5 428.0 434.6 441.3 448.1 455.0 462.0 469.1 476.3 477.8 476.8 476.5 476.2 477.1 478.4 479.4 481.8 484.6 488.0 492.5 496.7 501.1 505.4 509.6 513.7 517.3 520.9 525.5 529.3 533.9 538.8 544.0 548.8 552.9 556.5 560.7 563.4 566.9 570.3 572.6 574.0 574.6 573.3 570.9 569.6 568.9 568.0 567.1 566.9 568.3 572.7 577.3 574.7 570.4 566.1 561.8 557.5 553.3 549.1 545.0 540.8 536.7 532.7 528.6 524.6 520.7 516.7 512.8 508.9 503.8 496.7 489.6 484.9 478.0 470.7 463.5 455.7 448.6 443.7 438.2 434.0 429.9 426.2 423.0 419.6 416.0 412.2 409.6 406.9 404.2 401.4 398.5 395.9 392.9 390.4 388.2 386.4 384.6 383.0 382.0 381.3 380.2 378.9 377.8 376.6 375.4 374.5 373.6 372.9 372.1 371.2 370.3 369.5 368.8 368.1 367.7 366.8 365.6 363.8 361.4 359.5 358.5 358.0 357.5 357.0 356.4 355.5 354.9 354.9 355.5 356.4 357.4 358.2 358.8 359.0 359.0 358.8 358.8 358.7 358.6 359.2 360.3 362.1 364.5 366.0 366.3 366.5 366.3 365.3 364.0 363.4 362.9 362.3 362.6 362.9 363.1 363.5 364.0 364.5 365.0 365.8 366.5 367.2 367.9 368.6 369.4 369.9 370.4 370.9 370.9 370.6 370.4 370.1 369.8 369.6 369.3 369.0 368.7 368.5 368.2 367.9 367.7 367.4 367.1 366.9 366.6 366.3 366.1 365.8 366.2 366.5 366.5 366.3 366.1 365.7 365.4 365.0 364.7 364.1 363.4 362.9 362.6 362.4 362.4 363.2 364.7 366.5 368.5 370.9 373.7 379.5 386.7 391.3 393.6 394.9 394.5 393.9 393.4 392.8 392.2 391.7 391.1 390.5 390.0 389.4 388.9 388.3 387.7 387.2 386.6 386.1 385.5 385.0 384.4 383.9 383.3 382.8 382.2 381.7 381.1 380.6 380.0 379.5 378.9 378.4 377.8 377.3 376.7 376.2 375.7 375.1 374.6 374.0 373.5 373.0 372.4 371.9 371.4 370.8 370.3 369.8 369.2 368.7 368.2 367.6 367.1 366.6 366.1 363.1 362.8 364.3 365.5 365.4 365.5 366.1 366.0 365.7 365.1 364.5 364.1 364.3 364.1 363.9 363.7 362.3 360.1 358.6 357.8 356.6 355.7 354.4 353.3 352.5 352.2 351.8 351.3 351.4 352.9 355.4 358.7 362.4 366.0 368.1 367.2 366.9 367.4 368.2 369.0 370.1 372.6 368.9 367.0 364.2 359.0 352.7 347.5 345.8 345.3 345.4 345.5 345.7 346.2 347.1 347.8 348.4 349.1 349.6 349.4 349.0 348.6 347.9 347.4 347.8 347.7 347.1 345.6 342.0 336.1 330.2 329.9 334.4 340.2 346.1 352.1 358.2 363.5 365.1 364.8 365.1 367.5 372.6 379.3 383.0 383.7 380.6 379.1 377.2 374.9 373.0 372.1 365.3 368.4 371.5 374.7 377.9 381.1 384.3 387.6 390.8 394.1 397.5 400.9 404.3 408.5 416.0 420.9 426.9 433.4 439.7 446.5 453.6 459.7 464.6 469.2 473.7 477.3 481.7 486.7 489.9 492.7 495.6 498.9 502.5 506.5 510.2 513.4 516.3 519.1 522.0 524.9 527.8 531.1 535.2 538.3 541.7 544.2 546.1 548.2 550.4 551.8 553.2 554.0 554.6 554.9 554.8 554.5 553.8 552.4 551.5 549.6 547.4 545.0 542.1 538.4 534.2 531.2 528.1 525.3 523.1 520.8 517.7 513.7 509.2 503.6 499.4 494.6 489.3 484.5 480.3 477.1 472.5 468.5 465.9 464.1 462.1 459.8 457.6 452.6 446.9 443.1 438.6 433.7 426.7 415.3 405.8 405.6 405.4 400.5 395.1 392.0 383.8 384.2 383.0 376.2 372.4 370.8 368.7 367.7 360.7 359.0 357.3 354.8 352.0 349.2 346.8 345.8 344.0 342.7 341.1 339.7 338.8 338.6 338.9 338.9 338.9 338.5 336.8 333.5 329.4 328.5 328.7 329.5 329.5 329.3 329.2 329.2 329.1 329.2 331.0 332.5 335.0 338.1 340.3 341.3 342.2 342.4 342.6 341.9 341.1 340.6 339.3 338.4 338.1 337.0 336.7 336.1 335.1 334.1 332.5 330.6 330.1 330.1 329.6 328.4 326.9 326.8 326.2 325.1 325.8 326.5 325.4 322.8 323.2 321.4 320.7 320.6 320.0 320.9 321.9 321.5 322.5 322.9 323.0 322.9 322.8 322.7 322.6 322.5 322.4 322.3 322.2 322.1 322.1 322.0 321.9 321.8 321.7 321.6 321.5 321.4 321.3 321.2 321.2 321.1 321.0 320.9 320.8 320.7 320.6 320.5 320.4 320.3 320.3 320.2 320.1 320.0 319.9 319.8 319.7 319.6 319.5 319.5 319.4 319.3 319.2 319.1 319.0 318.9 318.8 318.7 318.6 318.6 318.5 318.4 318.3 318.2 318.1 318.0 317.9 317.8 317.8 317.7 317.6 317.5 317.4 317.3 317.2 317.1 317.0 317.0 316.9 316.8 316.7 316.6 316.5 316.4 316.3 316.3 316.2 316.1 316.0 315.9 315.8 315.7 315.6 315.5 315.5 315.4 315.3 315.2 315.1 315.0 314.9 314.8 314.7 314.7 314.6 314.5 314.4 314.3 314.2 314.1 314.0 314.0 313.9 312.7 310.7 307.4 300.5 295.1 290.7 287.0 284.5 282.4 280.6 281.1 282.5 283.2 283.3 283.2 283.0 282.6 281.9 281.2 282.2 281.9 280.9 280.7 282.2 282.6 282.4 281.9 280.5 279.9 280.8 282.4 283.7 284.3 284.0 283.3 282.8 283.4 284.9 286.8 289.5 292.3 294.1 295.9 297.5 299.1 300.6 302.1 303.8 306.0 308.1 310.5 313.1 315.7 318.0 320.8 321.9 322.1 320.6 317.8 314.1 310.6 310.1 317.6 325.4 333.4 341.6 350.0 358.5 367.3 376.3 385.5 389.2 388.3 387.3 388.9 387.9 386.4 384.5 382.1 380.3 379.3 378.8 378.3 377.6 377.2 377.8 379.0 380.4 382.0 383.5 384.6 385.7 386.7 387.8 388.9 389.7 390.9 392.2 394.0 396.1 398.4 400.5 402.2 403.5 404.9 406.2 407.7 409.0 410.2 411.0 411.4 411.2 411.0 411.0 410.4 410.3 410.7 411.5 412.8 414.8 416.8 419.3 421.3 423.5 426.3 429.3 433.7 436.0 436.6 436.5 435.8 434.8 434.4 435.4 436.5 438.1 439.7 441.8 444.1 446.2 448.9 451.1 453.2 455.4 458.3 461.5 464.6 467.2 469.7 471.7 473.6 475.5 477.9 480.6 482.9 485.0 487.0 489.1 491.1 492.7 494.1 495.1 495.6 495.6 495.2 494.5 493.7 493.1 492.7 492.7 493.5 495.1 496.9 498.6 499.8 500.1 499.6 498.4 496.6 494.7 492.6 490.3 487.8 485.4 482.3 479.3 475.8 472.0 468.2 464.4 459.9 455.5 450.1 444.7 441.3 438.8 436.0 433.4 429.5 424.7 415.6 408.6 403.1 399.2 396.5 394.0 391.8 388.6 385.9 383.6 381.6 379.6 377.6 375.9 376.5 376.8 374.1 370.5 368.0 365.7 363.8 361.4 358.3 355.3 352.6 350.4 349.0 347.8 346.3 345.2 343.6 341.6 340.4 340.1 339.3 337.5 336.0 334.8 333.9 332.9 331.2 329.6 328.1 326.5 325.2 324.8 325.2 325.1 322.3 319.5 316.7 314.0 311.3 311.7 316.0 317.5 316.4 314.6 306.4 302.6 303.3 303.4 297.1 297.2 297.7 297.1 295.3 294.2 293.9 293.6 293.3 293.0 292.6 292.2 291.6 291.2 290.9 290.5 290.5 290.3 289.6 289.2 288.9 288.6 288.6 288.3 287.6 287.1 286.8 286.8 286.1 285.4 285.0 285.0 284.6 283.9 283.4 282.8 282.2 282.2 281.6 281.0 280.5 280.4 280.2 279.6 278.9 278.3 277.7 276.4 275.6 275.1 274.4 273.5 272.4 271.0 270.2 269.5 268.3 266.7 265.4 265.4 264.6 263.3 261.3 260.8 259.5 257.1 256.6 255.6 253.9 252.1 251.0 250.2 248.8 247.2 245.6 244.6 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2 244.2", - "gender_timestep": "0.005", - "gender": "0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0", - "input_type": "phoneme", - "offset": 0.047 + "f0_timestep": "0.005" } ] \ No newline at end of file diff --git "a/samples/\350\203\275\350\247\243\347\255\224\344\270\200\345\210\207\347\232\204\347\255\224\346\241\210.ds" "b/samples/\350\203\275\350\247\243\347\255\224\344\270\200\345\210\207\347\232\204\347\255\224\346\241\210.ds" deleted file mode 100644 index ed83f3270..000000000 --- "a/samples/\350\203\275\350\247\243\347\255\224\344\270\200\345\210\207\347\232\204\347\255\224\346\241\210.ds" +++ /dev/null @@ -1,436 +0,0 @@ -[ - { - "text": "AP", - "ph_seq": "AP n i z ou l e n a m e y van d e l u a SP", - "note_seq": "rest A3 A3 A3 A3 A3 A3 D4 D4 C4 C4 B3 B3 A3 A3 G3 G3 A3 rest", - "note_dur_seq": "0.355903 0.164931 1.118056 0.131944 0.291667 0.125000 0.251736 0.164931 0.258681 0.157986 0.338542 0.078125 0.304688 0.111979 0.291667 0.125000 0.208333 1.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.355903 0.164931 1.118056 0.131944 0.291667 0.125000 0.251736 0.164931 0.258681 0.157986 0.338542 0.078125 0.304688 0.111979 0.291667 0.125000 0.208333 1.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "184.8 189.1 193.7 196.2 199.5 202.3 202.7 202.1 200.5 199.0 197.0 196.6 196.7 197.5 198.7 199.9 201.0 202.1 203.8 204.3 205.0 205.7 206.7 209.0 208.7 208.5 208.0 209.0 209.6 210.1 209.6 219.0 220.9 218.4 222.3 225.7 217.1 217.2 219.1 215.8 215.3 218.7 214.7 214.4 218.1 219.4 223.1 221.3 220.4 219.6 219.6 219.6 219.7 219.7 219.9 220.0 219.7 219.4 219.1 219.1 219.0 218.5 218.6 218.2 218.1 218.5 218.4 219.1 220.0 220.0 219.9 219.9 220.4 220.3 220.3 220.1 219.9 219.2 219.0 218.6 218.6 218.6 220.1 220.5 220.6 220.4 220.0 219.9 219.7 219.7 219.9 220.1 220.4 220.6 220.9 221.0 221.1 221.1 220.6 220.3 219.4 218.2 218.1 218.6 219.2 219.6 221.4 222.9 223.7 223.5 222.8 221.0 218.9 216.6 216.1 216.5 218.2 221.0 221.9 224.0 225.4 225.9 224.5 222.3 219.7 216.6 215.2 215.5 216.0 217.5 218.0 219.7 222.3 223.5 224.8 224.8 224.5 223.5 224.5 223.1 216.0 205.0 205.9 209.1 209.2 208.9 207.9 206.7 206.0 205.0 204.7 202.0 200.8 202.4 211.6 217.6 219.2 218.6 218.2 217.5 216.1 216.1 217.5 218.5 219.9 221.0 221.4 221.4 221.1 220.1 219.5 218.6 218.4 218.7 219.1 219.6 220.0 220.3 220.3 220.3 219.7 218.9 218.4 217.5 215.0 214.0 213.4 213.9 214.6 218.4 219.4 218.6 218.5 219.2 219.6 219.4 219.0 218.6 218.4 218.4 218.5 218.5 218.7 218.9 219.0 219.6 220.8 220.8 220.8 220.8 220.8 220.8 220.4 220.0 219.1 217.7 215.6 213.0 213.1 214.2 216.5 219.3 223.1 231.9 243.8 253.9 265.0 269.9 271.6 272.7 271.0 269.0 268.8 268.1 270.5 273.7 282.5 289.1 292.6 295.4 296.0 295.9 294.0 292.0 291.6 290.6 290.8 291.6 295.0 296.4 296.4 294.5 287.1 275.7 266.8 257.0 252.6 246.5 245.8 243.9 248.8 261.5 266.2 268.2 267.1 266.5 263.1 259.5 255.9 251.8 251.1 252.1 254.2 257.7 260.4 261.5 263.9 264.5 264.5 263.4 262.2 260.9 260.0 259.5 259.4 259.8 260.4 260.3 259.4 257.3 254.5 251.7 251.1 251.5 251.8 255.1 255.2 253.3 254.8 254.8 254.3 252.4 251.3 249.5 248.5 246.9 245.4 245.5 245.9 246.7 247.2 248.1 248.2 247.9 247.2 246.7 245.9 245.9 246.1 246.5 247.2 248.2 249.5 249.5 248.7 245.8 241.4 236.9 227.0 210.5 202.2 200.1 210.7 214.0 215.2 216.0 217.2 219.0 219.7 220.4 220.9 220.3 220.0 220.3 219.9 219.5 219.4 219.5 219.6 219.7 219.7 219.4 219.2 220.5 220.9 221.1 221.0 220.5 220.3 219.7 217.5 214.7 209.0 203.8 200.9 197.1 194.6 193.6 190.9 189.8 191.3 194.3 196.2 196.7 197.1 197.6 197.4 197.1 196.8 196.7 196.7 196.3 195.9 196.3 196.0 196.7 202.2 208.5 212.8 219.1 223.1 224.9 225.0 224.1 222.7 220.8 218.9 217.7 217.5 217.1 217.2 217.5 218.1 219.4 220.0 219.7 219.2 219.1 218.1 218.4 218.7 219.2 219.0 218.6 219.0 219.2 219.1 220.0 220.1 219.9 219.2 219.2 219.4 219.1 218.9 219.2 220.0 219.6 220.6 220.5 220.1 219.9 219.6 219.0 218.4 218.5 218.5 218.7 218.5 218.4 219.0 219.5 220.1 220.6 220.8 220.6 220.3 219.9 219.4 218.9 218.6 218.2 218.6 219.0 219.5 219.9 219.7 220.9 221.4 221.5 221.8 221.9 221.8 220.3 219.5 219.1 218.1 217.0 216.7 217.2 219.2 219.7 222.0 223.1 224.1 223.6 222.3 220.4 218.2 216.8 216.7 216.6 217.0 217.1 217.3 218.4 218.1 217.9 217.2 216.7 215.7 214.7 215.1 217.5 220.6 222.6 223.7 222.9 222.0 220.1 216.6 213.4 212.3 211.4 212.3 213.2 216.1 219.5 220.5 221.5 220.1 219.7 219.7 219.7 219.7 219.7 219.7 219.7 219.7 219.7", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 6.145833333333334 - }, - { - "text": "AP", - "ph_seq": "AP zh ir w ei l e y i g e d a an SP", - "note_seq": "rest A3 A3 G3 G3 D3 D3 D3 D3 A3 A3 G3 G3 A3 rest", - "note_dur_seq": "0.503472 0.121528 0.305556 0.111111 0.140625 0.067708 1.449653 0.008681 0.294271 0.122396 0.304688 0.111979 0.416667 2.500000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.503472 0.121528 0.305556 0.111111 0.140625 0.067708 1.449653 0.008681 0.294271 0.122396 0.304688 0.111979 0.416667 2.500000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "179.5 176.3 173.7 172.3 170.6 170.7 173.1 173.9 173.9 173.8 173.5 173.5 172.5 170.6 170.6 171.2 169.2 168.5 168.3 166.3 165.4 168.2 169.3 170.3 169.5 169.6 170.6 171.8 172.1 173.1 174.5 174.2 174.6 175.8 176.8 177.2 178.2 178.9 178.9 179.0 179.2 179.5 180.0 181.1 181.5 182.1 183.0 184.4 186.0 186.0 184.9 184.4 185.1 187.3 188.7 202.6 207.5 210.8 207.7 205.4 202.0 197.9 197.7 199.7 203.7 208.1 214.0 217.7 219.5 221.4 221.5 220.1 219.6 217.7 217.1 216.7 217.9 219.6 220.6 220.9 221.1 221.3 219.1 215.6 211.6 206.1 198.9 198.9 200.2 202.8 199.7 196.5 194.0 194.2 194.8 194.6 195.2 197.2 196.9 196.8 195.5 193.0 188.4 177.4 160.7 154.2 145.1 140.7 138.6 140.7 139.3 139.2 138.7 138.6 140.2 141.6 143.2 144.3 144.5 144.9 145.8 146.2 146.6 147.3 146.7 147.3 145.6 144.9 146.4 146.3 146.2 146.6 147.5 147.4 147.4 147.8 147.5 147.7 147.7 147.9 148.4 148.2 148.2 148.6 149.6 149.5 149.1 148.9 149.1 149.6 149.8 150.0 150.1 150.2 150.0 149.5 149.4 149.3 149.3 148.7 148.3 148.1 147.9 147.9 148.4 148.8 148.9 148.9 149.0 149.0 148.9 147.7 148.2 149.0 148.5 148.0 147.3 146.5 146.2 146.5 146.9 147.7 147.7 147.1 147.4 147.9 148.2 148.5 148.5 148.8 148.9 148.5 148.4 148.3 148.6 148.8 148.5 148.7 149.1 148.4 148.7 149.0 148.3 148.2 148.2 147.5 147.8 147.4 146.8 147.3 147.7 147.9 148.0 148.1 147.9 148.3 148.7 149.4 150.1 150.5 150.9 150.4 149.9 149.0 148.5 147.9 146.2 145.1 144.8 145.0 145.9 147.7 149.0 150.4 147.9 146.3 148.3 149.0 148.7 148.2 147.7 147.6 147.9 148.5 148.9 149.5 149.7 149.6 149.1 147.1 147.3 149.0 146.7 146.0 146.2 146.5 146.7 147.1 148.4 149.3 150.8 147.9 141.3 140.0 144.8 148.7 153.2 165.7 173.0 183.9 195.3 199.8 201.7 202.1 201.5 200.3 198.7 198.5 199.5 202.8 206.9 211.5 216.0 219.1 220.0 220.9 220.8 220.0 219.1 219.1 219.4 220.0 220.5 220.1 219.0 212.0 206.5 209.2 220.9 220.8 216.7 208.7 199.5 189.7 182.9 185.8 192.7 193.6 193.1 193.0 194.1 193.9 193.5 193.4 194.4 194.4 194.2 194.1 194.0 194.0 194.2 194.6 195.3 196.1 196.6 197.0 197.2 196.9 196.5 196.5 196.6 196.0 195.8 195.8 196.0 194.5 194.2 191.9 190.6 191.3 194.1 200.3 205.5 213.6 218.9 222.2 225.0 225.5 225.7 223.8 221.5 219.2 218.7 218.1 218.2 218.6 219.2 220.8 221.4 221.3 221.0 220.8 220.5 220.5 220.6 220.8 220.1 220.6 220.9 220.8 220.6 220.9 220.8 220.6 220.4 220.0 219.9 219.9 219.9 220.0 220.0 220.0 219.9 218.7 219.2 218.7 218.2 218.4 218.7 219.2 219.5 219.1 218.7 218.4 219.0 219.5 219.5 219.5 219.5 219.4 219.4 219.2 219.5 220.0 219.4 219.6 218.6 218.5 218.2 218.1 218.0 218.5 219.5 220.1 220.4 220.6 220.8 221.0 221.0 220.8 220.3 220.8 220.0 219.1 218.7 218.5 218.2 218.7 218.1 218.0 218.5 218.7 219.1 218.6 218.7 219.1 218.9 218.6 218.5 218.2 218.5 218.9 219.4 219.1 218.9 219.2 219.9 219.9 219.9 219.7 219.7 219.0 219.2 219.6 219.9 219.4 220.0 219.5 220.3 220.3 220.0 219.7 219.6 219.9 219.6 219.2 218.9 219.2 219.4 219.4 219.1 218.9 218.5 218.2 218.4 219.0 220.4 221.3 222.2 222.8 223.3 223.5 222.8 221.8 221.1 220.1 218.5 218.0 217.1 215.8 215.1 215.0 217.0 217.5 218.6 220.8 222.2 224.1 225.4 225.1 224.6 223.7 221.1 218.2 213.6 209.5 205.0 204.2 206.6 211.4 214.2 216.2 220.0 225.1 226.4 225.9 224.2 221.1 217.0 213.6 208.5 206.9 205.9 206.8 209.5 215.1 218.6 220.0 222.9 225.7 226.6 226.4 224.4 223.2 218.7 218.0 218.2 218.7 219.0 218.2 219.5 218.6 217.9 217.1 215.1 213.2 213.0 213.5 215.5 220.0 222.9 224.6 225.8 225.0 222.7 219.7 211.5 211.5 211.5 211.5 211.5 211.5 211.5 211.5", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 12.708333333333334 - }, - { - "text": "AP", - "ph_seq": "AP k e sh ir j ie n eng g ei n i sh en m e n e SP", - "note_seq": "rest A3 A3 A3 A3 A3 A3 D4 D4 C4 C4 B3 B3 A3 A3 B3 B3 A3 A3 rest", - "note_dur_seq": "0.354167 0.166667 1.061632 0.188368 0.282986 0.133681 0.251736 0.164931 0.294271 0.122396 0.251736 0.164931 0.228299 0.188368 0.258681 0.157986 0.118924 0.089410 1.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.354167 0.166667 1.061632 0.188368 0.282986 0.133681 0.251736 0.164931 0.294271 0.122396 0.251736 0.164931 0.228299 0.188368 0.258681 0.157986 0.118924 0.089410 1.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "193.2 193.2 193.0 192.2 191.3 189.6 189.4 190.6 192.4 194.2 195.2 196.3 198.3 200.6 203.7 204.1 205.5 205.1 206.5 208.4 208.4 209.0 209.5 208.0 209.0 209.8 212.9 208.3 204.1 202.8 204.6 203.6 203.7 199.8 197.1 200.0 202.0 199.8 198.8 198.5 198.2 198.0 197.7 197.4 197.2 199.7 206.7 206.9 205.3 204.6 205.5 206.2 206.6 206.9 207.3 207.3 207.2 206.9 206.7 206.6 206.5 206.0 204.9 203.3 202.4 202.9 203.8 206.3 208.4 213.6 220.0 225.8 228.0 227.8 225.1 222.4 216.7 214.5 213.9 214.0 216.3 217.1 220.4 223.7 224.6 225.3 224.4 222.9 220.1 218.5 215.8 214.4 214.9 216.5 220.3 223.5 224.8 226.1 225.5 224.6 223.6 221.3 219.7 218.7 217.1 216.6 217.5 219.1 220.0 221.4 223.2 223.5 223.2 221.5 219.4 219.0 216.7 216.6 217.0 217.6 219.0 220.0 221.8 222.6 222.4 222.2 221.1 219.6 218.6 218.5 218.1 219.0 219.7 221.3 223.6 224.9 224.2 219.4 204.3 193.1 188.4 190.3 192.7 194.3 197.5 198.6 200.5 204.0 205.6 207.5 209.8 213.4 215.0 214.1 213.6 220.9 224.6 223.5 222.2 220.8 219.7 218.6 218.1 218.4 218.6 218.9 219.2 219.5 219.4 218.9 218.5 219.2 220.6 220.3 220.4 221.0 220.5 218.7 217.6 214.1 197.8 181.2 177.4 182.6 189.6 196.7 202.8 211.4 218.1 221.9 221.9 220.3 220.3 220.8 220.4 220.0 219.6 219.1 218.9 218.5 218.0 217.9 217.9 218.4 218.6 219.0 219.5 219.7 220.3 220.4 220.3 219.9 220.5 221.1 221.4 221.5 222.0 219.7 216.5 215.2 215.0 216.4 219.1 225.1 234.8 249.4 266.4 273.1 276.5 278.9 279.1 278.8 278.1 276.2 278.5 281.5 283.7 286.5 289.0 290.1 290.0 289.8 291.0 293.8 295.2 296.7 297.6 298.1 298.8 298.8 297.8 296.4 294.0 291.1 286.0 275.7 257.9 237.9 233.6 238.3 266.6 286.5 287.1 282.7 278.5 276.4 271.3 266.5 262.1 259.7 258.3 257.9 258.0 259.2 260.7 261.5 262.4 262.5 262.8 263.0 263.0 262.8 262.7 262.2 262.1 261.8 261.6 260.7 259.4 257.7 254.5 251.0 249.5 248.1 247.8 246.1 246.6 249.2 248.7 248.4 247.8 247.7 247.4 247.2 246.2 246.4 246.5 246.7 246.7 246.4 246.9 246.9 246.9 247.8 248.4 249.2 250.7 247.9 240.9 238.0 244.1 246.7 246.4 245.7 243.4 240.2 236.5 233.5 231.5 227.6 227.1 222.7 217.7 214.6 213.9 216.8 217.7 218.7 219.1 218.7 217.9 217.3 217.6 218.0 219.1 220.3 221.5 221.1 220.9 219.5 219.1 219.6 220.0 221.0 222.3 222.4 222.7 222.8 222.9 221.8 219.9 216.5 213.6 212.4 211.9 213.6 220.2 225.7 239.8 245.1 247.4 249.4 250.1 250.0 248.9 247.8 247.5 248.8 245.9 242.7 238.8 238.0 238.8 239.0 236.1 233.0 234.3 233.2 229.9 225.0 224.2 223.1 222.4 221.9 221.4 221.1 220.8 220.8 220.8 220.8 220.9 221.4 221.8 221.5 221.4 221.3 221.4 221.4 221.5 221.0 220.5 220.4 220.6 220.8 221.8 221.0 221.0 221.3 221.3 220.9 219.9 219.0 218.7 218.9 219.4 219.4 218.9 218.2 218.2 218.5 218.9 219.1 219.4 219.6 219.4 219.7 219.7 218.9 218.4 218.5 218.2 218.0 217.3 217.3 217.3 217.3 218.1 218.0 217.9 217.7 217.7 218.2 218.5 218.1 218.5 219.0 219.7 220.8 221.1 221.1 220.8 219.9 219.6 220.4 221.8 222.9 222.9 222.9 222.4 221.5 218.2 217.0 215.7 216.2 217.5 218.9 222.8 225.0 226.4 226.7 226.4 225.5 221.9 217.2 215.5 214.2 214.7 216.5 219.9 223.5 224.9 225.9 225.4 224.4 222.0 216.7 214.5 211.9 210.2 211.6 215.1 216.5 218.7 221.7 223.6 224.1 224.8 224.4 224.0 222.3 219.1 215.3 208.9 208.9 208.9 208.9 208.9 208.9 208.9 208.9", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 19.479166666666668 - }, - { - "text": "AP", - "ph_seq": "AP t a y ong y van d ou ch en m o zh e SP", - "note_seq": "rest B3 B3 A3 A3 D3 D3 D3 D3 A3 A3 G3 G3 A3 A3 rest", - "note_dur_seq": "0.345486 0.175347 0.343750 0.072917 0.165799 0.042535 1.346354 0.111979 0.258681 0.157986 0.258681 0.157986 0.295139 0.121528 1.666667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.345486 0.175347 0.343750 0.072917 0.165799 0.042535 1.346354 0.111979 0.258681 0.157986 0.258681 0.157986 0.295139 0.121528 1.666667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "149.8 148.3 148.5 147.8 147.7 147.4 147.3 147.0 145.2 145.0 144.8 144.9 145.2 145.4 146.3 147.4 147.9 150.7 155.2 158.6 163.3 167.2 169.3 172.0 174.1 176.8 179.5 181.4 183.2 186.4 188.8 194.1 197.7 200.2 203.6 206.7 208.9 212.1 214.2 217.7 220.5 222.0 225.0 228.8 231.7 235.7 240.3 240.2 232.7 227.2 224.9 222.7 221.8 222.0 225.4 229.6 233.1 241.0 244.4 246.7 248.1 248.1 247.8 247.4 246.1 245.4 245.5 245.8 245.9 246.4 246.7 246.9 247.2 246.8 243.8 241.7 238.8 233.4 229.8 226.4 225.6 223.8 221.5 221.5 222.0 221.1 221.7 221.1 220.0 218.5 218.4 216.7 210.7 204.2 189.6 178.5 165.6 151.5 142.7 139.7 139.9 141.2 143.6 146.9 149.2 150.3 151.0 150.2 149.6 148.2 147.3 147.3 147.3 146.8 146.4 146.5 146.7 146.8 146.6 146.0 145.6 145.5 145.8 146.1 146.7 147.7 148.5 148.7 148.4 148.0 147.8 147.3 147.3 147.3 146.8 145.8 145.5 145.1 145.6 146.0 146.7 147.2 147.9 149.3 149.3 149.2 149.1 148.6 147.6 145.6 143.8 143.2 143.0 143.4 144.0 144.6 146.9 148.9 149.7 150.4 150.0 149.2 146.9 145.1 144.1 143.3 143.6 143.9 144.8 146.7 147.3 148.3 149.6 149.7 149.3 148.3 147.5 146.2 145.8 146.2 146.7 147.1 147.4 147.6 147.5 147.4 146.6 145.8 145.5 145.5 146.2 148.8 150.6 153.0 153.1 152.6 150.3 148.0 145.2 142.7 143.1 144.1 145.6 147.3 148.8 148.1 149.2 149.7 152.8 153.2 152.7 152.3 150.8 149.7 149.2 149.3 149.6 149.5 148.5 147.3 143.6 132.2 129.5 132.3 141.7 145.8 148.3 147.5 147.4 146.2 145.9 145.6 145.6 145.6 145.7 145.5 145.1 145.3 145.6 145.8 146.4 146.0 145.9 146.0 145.7 146.2 147.3 146.7 142.3 130.7 127.3 132.6 142.1 146.9 156.5 160.5 163.3 169.3 175.2 179.1 185.6 193.7 199.9 204.8 206.5 207.7 207.8 205.4 205.0 203.4 204.1 206.1 209.2 212.1 214.5 215.2 217.5 219.4 219.9 220.9 222.2 222.6 223.2 223.6 224.9 224.4 222.8 220.5 216.1 204.3 196.8 188.4 183.5 180.4 179.4 179.2 182.5 189.2 192.1 193.0 194.5 194.9 194.6 193.7 192.7 192.2 192.0 191.9 191.9 193.1 193.1 193.5 194.1 195.7 195.8 195.9 196.2 196.5 196.3 195.5 194.2 190.5 173.9 164.9 170.4 177.7 179.8 181.0 184.8 187.6 188.0 191.8 197.0 200.8 203.0 204.0 204.6 205.3 205.5 205.6 205.6 205.5 205.5 205.5 205.1 204.8 205.3 206.6 207.7 210.4 212.8 217.0 219.0 220.0 220.8 220.8 220.1 218.9 218.6 218.2 217.2 217.3 217.5 217.7 218.1 218.9 219.6 220.0 220.0 219.6 219.2 218.7 218.5 218.7 219.2 219.9 220.5 219.0 219.4 219.4 218.7 218.0 218.1 218.5 218.2 217.6 217.6 218.6 219.2 220.0 220.1 220.0 220.0 219.2 219.1 219.5 220.0 219.9 219.7 219.4 221.0 221.3 221.9 222.2 222.0 221.8 222.4 222.3 221.9 221.4 221.1 219.5 218.0 216.1 214.9 213.6 213.7 214.4 216.0 216.5 219.7 222.9 224.5 225.4 227.2 227.2 225.7 222.7 219.6 216.7 211.2 209.3 208.0 209.5 213.1 214.7 219.1 221.9 225.9 228.5 229.7 229.1 226.4 222.6 219.0 213.5 204.9 203.0 203.8 207.3 212.4 215.3 221.1 224.4 226.7 229.6 230.4 230.0 226.7 224.1 218.2 211.8 207.2 203.6 202.3 204.2 208.6 216.1 219.5 224.1 225.9 225.9 224.6 223.8 221.3 217.2 208.9 208.9 208.9 208.9 208.9 208.9 208.9 208.9", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 26.145833333333336 - }, - { - "text": "AP", - "ph_seq": "AP n i z ou r u w u y in d e f ei x v a SP", - "note_seq": "rest A3 A3 A3 A3 A3 A3 D4 D4 C4 C4 B3 B3 A3 A3 G3 G3 A3 rest", - "note_dur_seq": "0.355903 0.164931 1.118056 0.131944 0.312500 0.104167 0.407986 0.008681 0.407986 0.008681 0.304688 0.111979 0.251736 0.164931 0.223958 0.192708 0.208333 1.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.355903 0.164931 1.118056 0.131944 0.312500 0.104167 0.407986 0.008681 0.407986 0.008681 0.304688 0.111979 0.251736 0.164931 0.223958 0.192708 0.208333 1.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "182.0 181.2 193.0 202.7 202.4 202.1 201.5 201.6 201.5 201.3 200.6 199.8 203.3 204.2 205.6 206.6 206.9 206.2 204.9 204.7 204.8 204.8 203.8 204.1 204.9 206.0 208.1 209.9 210.9 209.8 205.0 203.3 200.5 197.8 195.4 190.2 181.4 172.2 170.5 165.4 163.2 163.1 163.4 167.8 176.5 189.1 196.7 197.8 199.0 201.4 202.2 202.7 202.8 202.8 202.9 202.9 203.0 203.0 203.1 203.5 204.2 204.8 205.3 206.2 206.6 206.7 208.6 211.2 212.9 216.2 219.2 221.5 222.2 222.0 221.5 221.1 221.1 219.7 218.7 218.5 219.2 219.7 220.5 220.8 220.8 220.5 220.3 220.1 219.9 219.9 219.9 220.1 220.4 220.5 220.8 221.0 221.1 220.9 220.4 219.6 218.2 217.1 217.1 218.4 219.2 220.3 222.8 224.0 224.6 223.6 222.4 218.7 216.3 215.2 215.1 216.8 219.7 222.0 223.5 225.0 226.4 225.7 223.6 220.9 217.5 214.7 215.0 215.5 216.1 218.2 219.6 221.7 223.2 223.8 224.6 224.9 224.0 224.4 225.3 222.2 214.1 204.7 204.9 207.4 208.1 208.3 208.0 207.7 207.2 206.7 206.6 205.0 205.5 209.1 214.5 215.5 218.7 219.2 219.2 217.9 217.1 217.1 217.1 217.0 217.0 217.0 217.6 218.2 219.2 220.3 220.6 220.5 220.0 219.6 219.6 220.0 220.3 220.6 220.5 220.1 218.4 217.5 216.2 213.0 208.1 205.6 205.3 209.1 213.6 222.3 225.1 224.1 223.5 221.8 219.5 218.1 214.6 212.5 211.8 213.0 215.3 217.3 220.8 222.4 222.8 223.2 221.7 220.6 219.6 218.1 217.6 218.5 219.2 219.2 220.1 221.5 222.4 222.4 221.9 221.0 220.5 224.1 230.6 241.4 254.4 265.0 274.3 279.6 281.4 281.7 277.5 274.3 273.1 273.1 275.0 278.6 285.3 291.5 293.5 296.7 296.9 296.7 294.7 292.5 291.6 291.3 291.5 292.3 292.8 294.7 294.2 293.3 291.6 289.3 282.2 266.8 254.0 242.4 237.2 237.7 243.8 250.5 255.1 260.3 264.4 265.7 264.7 263.0 260.7 257.3 256.7 257.1 257.6 259.8 262.2 263.6 263.9 263.3 262.8 261.8 260.3 259.8 259.8 259.8 259.8 259.8 260.3 260.6 259.8 259.4 258.0 255.5 247.9 236.3 220.9 222.7 239.9 259.2 255.8 253.3 252.4 252.4 251.1 250.1 249.1 248.7 246.9 246.8 246.2 245.2 245.1 245.8 246.2 246.9 247.5 247.7 249.2 249.8 250.1 244.8 232.5 225.3 234.3 240.3 239.9 238.8 235.5 233.1 229.1 226.7 224.0 219.5 213.0 212.1 218.6 220.4 220.0 218.4 215.0 213.7 212.3 213.7 214.7 216.5 219.6 220.6 221.7 222.0 222.3 222.7 221.9 221.8 218.9 210.6 210.6 212.4 215.0 215.3 213.7 210.3 208.1 205.0 203.1 201.0 199.7 198.1 197.2 195.0 192.5 190.5 191.1 192.9 194.1 194.3 194.9 196.0 196.3 197.0 197.4 197.1 196.9 195.7 195.1 195.4 196.5 198.0 204.3 211.8 216.8 220.3 221.1 221.4 221.4 221.4 219.9 219.0 218.2 217.2 217.7 218.0 217.9 217.9 217.7 217.5 217.1 217.3 219.2 220.5 221.7 222.4 222.8 222.6 222.2 221.4 220.8 218.7 217.9 217.5 217.5 217.6 218.4 219.2 218.6 219.1 219.7 220.1 220.4 220.5 219.9 218.4 217.1 216.8 216.5 216.1 215.7 216.0 217.0 217.9 218.2 220.3 221.3 221.3 221.3 221.4 220.9 219.6 219.4 219.2 218.1 216.5 215.8 215.2 213.2 212.3 213.4 213.9 213.9 215.0 218.2 220.5 223.1 224.9 225.3 224.6 221.5 220.3 216.8 211.6 210.6 209.2 209.5 210.1 212.6 217.3 221.8 222.8 225.0 227.2 226.8 225.0 221.8 217.7 213.7 206.5 204.4 205.0 208.9 212.0 216.3 221.1 223.6 227.5 228.0 226.8 221.8 216.7 210.2 203.3 199.8 200.0 201.3 206.7 213.2 218.9 224.0 225.9 226.4 225.9 223.8 220.1 218.0 216.0 215.8 215.8 215.8 215.8 215.8 215.8 215.8 215.8", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 32.8125 - }, - { - "text": "AP", - "ph_seq": "AP zh ir w ei l e x vn y i d uo h ua SP", - "note_seq": "rest A3 A3 G3 G3 D3 D3 D3 D3 A3 A3 G3 G3 A3 A3 rest", - "note_dur_seq": "0.399306 0.121528 0.305556 0.111111 0.140625 0.067708 1.264757 0.193576 0.407986 0.008681 0.304688 0.111979 0.263021 0.153646 2.500000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.399306 0.121528 0.305556 0.111111 0.140625 0.067708 1.264757 0.193576 0.407986 0.008681 0.304688 0.111979 0.263021 0.153646 2.500000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "174.7 172.1 169.3 169.8 168.3 167.9 170.4 171.0 172.9 175.4 176.3 175.3 169.1 162.9 159.2 159.1 157.9 162.0 165.6 166.4 166.9 167.3 168.3 171.1 172.4 173.1 175.4 178.0 178.5 179.6 181.0 181.2 182.2 185.3 186.6 187.4 189.2 190.4 191.0 192.9 194.2 194.6 195.4 197.1 197.8 199.4 208.3 212.5 212.5 208.0 204.3 199.8 197.8 197.9 201.2 205.7 210.9 215.5 219.0 220.4 222.0 221.0 220.4 218.9 218.0 217.6 217.0 218.1 219.2 220.6 221.0 221.4 221.5 219.4 216.3 213.6 208.6 200.8 199.0 203.8 206.0 202.9 198.6 195.1 194.8 195.1 194.6 194.9 196.7 196.9 197.0 195.9 192.6 187.8 174.6 161.3 156.3 150.7 149.3 148.5 146.4 146.2 144.8 143.8 144.1 144.2 146.0 147.0 149.2 147.3 147.5 148.8 149.0 148.8 148.2 147.7 146.9 146.2 146.1 146.0 145.9 146.1 146.6 147.0 146.7 147.3 147.2 147.1 147.3 147.6 147.4 148.1 149.1 150.0 150.9 150.7 150.5 150.3 149.6 149.0 147.2 146.9 146.7 146.3 146.3 146.0 147.5 147.8 148.5 148.6 148.6 148.6 148.3 147.1 145.5 145.8 148.7 149.1 148.8 148.7 147.9 147.7 147.4 147.2 147.3 147.2 146.7 146.7 146.5 146.3 146.5 146.7 146.9 147.3 147.7 148.1 148.5 148.4 148.4 148.5 148.5 148.5 148.5 148.6 148.8 148.6 148.3 148.4 148.5 148.5 148.4 147.9 147.7 147.4 147.5 146.9 147.0 147.5 147.5 147.3 147.3 147.4 147.7 147.9 148.0 149.1 149.7 150.2 148.7 142.9 138.7 138.8 137.5 138.3 140.5 143.1 144.3 145.6 146.7 148.6 153.3 154.0 155.0 157.5 161.2 160.6 153.9 152.6 151.7 147.7 145.1 144.1 143.6 143.1 143.3 144.0 144.9 145.5 146.1 146.8 147.3 147.5 147.7 147.3 147.0 146.4 145.7 145.3 144.8 144.6 145.2 146.5 146.9 147.0 147.6 147.3 147.7 148.4 149.1 153.3 162.0 174.8 181.6 188.4 193.4 195.0 196.5 196.5 196.0 196.5 199.8 204.3 209.2 214.7 218.9 221.0 221.8 221.5 221.4 220.3 218.7 218.0 218.0 218.6 219.1 219.9 220.5 220.3 217.6 197.8 185.4 206.0 218.5 213.5 205.1 199.0 195.2 191.3 190.0 189.8 194.4 196.0 195.9 195.8 194.6 193.6 192.7 193.0 193.1 193.4 194.4 195.4 195.9 196.1 196.1 195.5 195.1 194.6 194.2 191.9 184.0 175.2 171.9 175.6 178.6 180.9 184.5 187.6 191.2 194.7 203.4 217.2 222.0 222.4 222.8 221.7 222.0 221.5 220.0 219.4 219.5 219.4 219.1 219.5 220.0 219.9 220.8 219.6 221.5 220.3 222.0 221.9 221.9 222.6 223.3 222.8 222.2 221.4 221.0 220.9 220.1 220.3 219.7 219.4 219.4 219.2 219.4 219.9 219.6 219.1 219.2 219.4 219.6 219.7 219.5 219.4 219.5 219.7 219.6 219.5 218.7 219.4 219.0 218.4 218.0 217.9 218.5 218.2 218.0 217.7 218.0 218.5 218.9 219.6 219.9 220.0 220.0 220.0 219.9 218.9 217.5 216.8 216.5 216.1 216.0 216.1 216.5 216.7 217.2 217.6 217.9 218.2 218.4 219.4 220.1 219.7 219.5 219.0 218.4 217.6 216.7 217.1 216.8 216.5 217.1 217.5 218.9 219.2 219.1 219.4 220.0 219.6 219.0 219.2 220.0 220.4 220.0 219.5 218.9 217.5 216.2 216.3 214.9 214.7 215.0 216.0 218.5 220.3 223.1 225.0 225.7 225.1 223.2 221.5 219.0 216.1 214.7 212.8 211.8 212.9 214.6 217.7 219.0 220.1 222.2 223.2 223.3 222.3 220.8 219.1 217.1 216.1 214.2 214.2 214.6 216.3 219.0 221.0 222.6 225.0 226.3 226.3 224.9 222.4 221.3 216.2 214.2 213.5 213.0 215.0 217.6 220.0 222.0 223.5 224.0 223.2 221.8 219.2 216.8 215.5 214.9 214.7 215.8 217.3 220.5 221.3 222.2 221.8 219.9 218.5 215.8 214.7 213.5 213.4 215.1 216.8 220.0 223.5 224.4 224.1 221.3 218.4 215.1 210.7 208.9 208.6 211.2 213.7 218.9 222.9 226.3 227.5 227.0 225.8 220.9 216.5 211.8 209.5 209.8 211.0 215.2 217.2 220.4 225.0 227.0 226.7 225.3 218.9 218.9 218.9 218.9 218.9 218.9 218.9 218.9", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 39.479166666666664 - }, - { - "text": "AP", - "ph_seq": "AP k e zh e l i q ve sh ir c un c ao b u sh eng SP", - "note_seq": "rest A3 A3 A3 A3 A3 A3 D4 D4 C4 C4 B3 B3 A3 A3 B3 B3 A3 A3 rest", - "note_dur_seq": "0.354167 0.166667 1.128472 0.121528 0.291667 0.125000 0.246528 0.170139 0.228299 0.188368 0.256944 0.159722 0.256944 0.159722 0.302951 0.113715 0.105903 0.102431 1.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.354167 0.166667 1.128472 0.121528 0.291667 0.125000 0.246528 0.170139 0.228299 0.188368 0.256944 0.159722 0.256944 0.159722 0.302951 0.113715 0.105903 0.102431 1.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "185.4 184.4 184.3 183.7 185.2 186.7 187.4 189.3 193.6 198.5 198.2 197.6 197.7 197.0 195.2 194.9 195.4 198.6 200.2 201.0 207.1 209.9 211.4 212.9 211.2 208.9 201.0 198.8 202.7 204.8 202.1 204.1 202.3 197.1 196.9 196.8 196.2 194.9 193.4 196.7 196.9 196.5 196.7 196.6 195.3 197.0 202.7 202.1 199.8 198.5 198.3 198.5 198.7 199.3 201.6 202.8 205.1 208.6 210.1 211.8 212.0 211.5 211.9 210.7 211.2 211.5 212.8 216.0 217.9 220.1 223.5 223.7 222.6 220.8 218.9 217.1 216.1 215.1 215.6 217.0 218.2 218.6 221.0 222.0 222.7 222.6 221.8 221.4 220.8 219.0 219.1 219.6 220.4 220.9 221.7 222.0 221.5 220.8 220.0 219.9 220.0 219.2 219.1 219.0 218.9 218.7 218.6 218.5 218.4 218.2 218.2 218.9 219.4 220.3 221.5 222.3 223.8 223.6 222.9 221.9 221.3 219.7 217.3 216.5 216.3 217.0 218.5 219.5 220.3 221.1 221.4 221.7 222.4 222.0 221.4 219.4 219.2 219.0 218.5 213.0 192.5 184.3 183.9 185.7 188.4 190.9 194.9 199.1 201.6 204.7 206.2 207.8 209.6 221.0 223.7 222.6 221.0 219.4 218.7 217.7 217.2 217.3 216.7 217.3 217.9 218.7 219.6 219.7 219.6 220.5 219.9 220.1 219.1 219.5 219.9 219.4 219.5 219.0 218.4 218.0 217.9 217.7 217.1 216.1 216.2 216.2 215.3 214.6 214.9 217.3 220.1 219.9 219.4 218.2 217.6 217.3 217.0 216.6 216.7 217.1 217.9 218.4 219.0 219.2 219.1 218.9 219.9 220.8 221.7 220.1 217.5 203.6 193.9 191.5 200.5 208.1 211.3 215.2 223.7 226.7 231.7 236.8 237.7 247.5 254.3 256.8 262.5 266.2 268.2 267.0 266.8 265.1 265.9 267.9 271.6 277.0 285.6 291.5 293.7 296.0 296.6 296.4 295.0 293.7 294.0 293.3 291.5 286.5 265.9 282.7 290.3 290.0 284.3 281.9 276.9 273.7 271.8 269.1 267.4 263.8 258.7 258.8 263.6 263.4 260.3 257.4 251.1 247.4 246.5 247.5 251.8 256.1 257.7 261.2 262.5 262.5 261.5 260.4 260.6 260.9 261.9 262.8 262.4 253.7 236.5 223.1 228.2 230.4 232.5 234.6 235.4 238.3 241.9 242.3 246.4 249.7 250.5 259.1 265.1 262.8 258.6 252.6 251.0 248.4 246.4 246.1 246.2 246.2 246.4 245.7 243.8 244.4 245.1 246.1 246.8 248.4 250.7 251.8 251.8 251.5 250.1 248.2 238.9 218.4 220.0 230.8 232.5 231.6 231.2 231.3 231.7 232.0 230.1 229.1 225.3 220.9 217.3 215.5 214.5 214.6 215.2 216.2 216.6 218.6 218.9 219.1 220.6 220.6 220.6 220.5 220.3 220.1 220.1 219.9 219.9 219.9 220.1 220.4 220.8 221.5 222.0 221.3 219.7 214.2 214.4 214.6 215.8 218.3 239.1 244.4 246.7 248.7 249.5 249.1 240.6 227.4 230.8 234.8 235.7 240.7 242.3 237.8 231.7 223.2 216.7 212.4 204.8 204.0 207.1 209.9 215.2 218.0 219.9 219.9 219.2 218.5 217.7 217.2 216.8 216.3 216.1 216.2 216.5 217.2 218.4 219.9 221.3 221.5 221.3 220.9 220.1 218.6 217.9 217.9 217.7 217.7 217.7 217.6 217.6 218.1 219.0 219.7 219.7 219.9 220.8 220.0 220.1 220.3 220.4 220.6 220.8 220.5 219.7 219.9 220.3 220.5 220.9 220.3 219.6 218.9 219.0 219.2 219.5 219.7 219.5 219.2 220.0 220.5 221.4 222.0 221.9 221.0 220.1 219.9 218.4 217.1 216.0 215.7 216.0 217.0 219.0 220.9 221.5 223.2 223.8 223.8 222.6 221.7 220.0 217.2 216.0 215.7 216.1 216.8 218.4 219.1 219.2 219.9 219.6 220.0 222.7 225.0 226.4 226.7 226.4 225.4 222.8 220.6 216.7 213.5 212.4 212.9 216.0 219.5 225.1 228.3 229.7 230.5 229.5 227.0 224.8 219.5 217.5 217.0 220.3 223.3 226.7 227.1 225.9 222.0 211.0 200.9 198.6 198.6 198.6 198.6 198.6 198.6 198.6 198.6", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 46.145833333333336 - }, - { - "text": "AP", - "ph_seq": "AP r ong b u x ia y i g e z uo m eng SP", - "note_seq": "rest B3 B3 A3 A3 B3 B3 G3 G3 D4 D4 C4 C4 D4 D4 rest", - "note_dur_seq": "0.416667 0.104167 0.302951 0.113715 0.103299 0.105035 1.449653 0.008681 0.294271 0.122396 0.284722 0.131944 0.258681 0.157986 2.500000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.416667 0.104167 0.302951 0.113715 0.103299 0.105035 1.449653 0.008681 0.294271 0.122396 0.284722 0.131944 0.258681 0.157986 2.500000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "167.9 166.5 167.0 167.2 166.9 166.4 166.1 165.8 164.8 164.1 164.1 164.5 164.9 165.1 165.8 165.8 165.8 166.1 166.3 165.8 165.8 166.2 166.8 166.6 166.2 166.4 167.2 166.7 166.2 165.9 166.2 166.7 167.1 167.5 167.2 166.4 164.6 165.3 165.9 166.3 167.0 169.2 175.5 185.2 196.8 212.1 221.3 225.4 227.6 228.8 228.3 227.0 225.5 225.0 225.4 226.6 228.5 231.7 238.1 242.4 246.7 248.8 250.1 249.2 248.2 247.8 245.2 243.5 242.8 243.1 244.0 244.7 244.8 244.2 241.4 239.4 232.7 216.8 204.9 192.3 194.1 209.3 223.3 219.6 220.5 220.9 220.6 219.9 206.0 190.6 186.6 187.5 194.4 198.5 202.1 204.1 206.0 212.2 215.7 218.0 222.6 229.1 230.5 232.3 232.3 233.1 231.6 231.7 230.7 230.7 230.1 231.9 234.0 237.0 243.0 247.1 250.5 253.6 254.9 253.6 252.3 250.4 247.1 245.8 245.4 244.8 244.8 245.2 245.8 246.4 246.8 247.4 247.9 248.1 247.8 247.5 247.9 246.8 247.4 247.9 246.9 246.9 247.2 247.4 247.7 246.8 246.8 246.9 247.1 248.1 247.4 247.5 248.1 247.8 247.2 246.8 246.1 245.7 246.1 246.5 246.9 247.8 248.9 249.5 249.8 249.7 249.4 249.1 249.4 249.2 248.5 247.4 246.7 246.5 246.5 246.5 246.5 246.5 246.5 246.5 246.5 246.7 246.9 247.5 247.5 247.4 247.5 247.2 246.5 246.2 245.5 245.1 245.4 245.5 245.1 244.8 244.8 245.5 244.8 244.7 244.8 244.4 244.0 245.2 245.7 246.2 246.8 246.9 246.1 246.1 246.2 246.5 246.1 245.4 244.5 242.6 240.7 238.8 237.2 233.6 229.2 224.8 220.5 208.7 195.5 191.3 186.5 187.4 187.9 189.0 191.3 192.7 193.7 194.5 194.4 194.1 193.7 193.3 193.0 193.0 194.2 195.2 196.0 196.7 197.0 196.9 196.6 196.3 196.2 195.4 195.3 196.3 196.2 187.8 179.6 183.7 188.7 195.9 208.6 220.5 238.0 250.1 256.5 262.7 267.1 267.3 267.4 263.9 263.1 263.9 266.8 274.3 280.7 287.1 292.1 294.5 295.9 295.2 293.7 291.6 290.8 290.8 291.1 292.8 292.0 283.3 253.3 236.2 235.4 238.8 240.2 241.4 243.8 245.9 249.5 252.4 256.1 257.3 267.7 267.4 266.8 265.7 264.4 262.2 260.7 260.3 259.7 259.4 259.8 260.6 261.3 261.9 261.9 261.5 261.0 261.6 262.5 263.1 263.4 263.1 262.4 261.5 258.5 257.1 257.0 256.7 256.5 255.9 255.5 255.0 255.1 255.5 260.9 265.1 269.9 271.9 273.2 274.3 274.6 274.8 274.5 274.3 274.0 274.0 275.3 275.6 278.3 279.4 281.9 286.1 290.1 292.0 292.8 296.0 297.9 297.8 297.2 296.7 295.5 293.5 293.0 293.0 293.0 293.0 293.7 294.5 294.5 293.8 293.7 293.8 293.8 293.2 292.5 292.0 292.3 292.0 292.0 292.8 293.0 293.2 293.2 293.3 293.5 293.5 292.1 291.5 291.0 290.6 290.5 291.8 293.0 293.2 293.3 294.5 295.0 294.5 293.0 293.5 293.8 293.3 292.8 292.5 292.5 292.6 292.8 293.0 292.5 292.0 292.0 292.3 292.3 292.3 292.0 292.0 292.3 293.0 292.3 292.0 291.6 291.6 292.3 292.8 292.3 292.5 293.5 293.2 292.8 293.3 293.2 293.0 292.8 292.8 292.6 292.3 291.6 291.1 290.3 290.3 290.6 291.1 291.6 292.0 291.3 291.1 291.8 292.1 291.1 291.6 291.5 290.6 290.6 290.8 291.0 291.1 290.5 291.3 291.5 290.5 290.8 291.3 292.0 292.6 293.2 293.7 292.6 292.6 292.8 293.3 294.2 294.9 294.5 294.9 295.4 294.9 294.0 292.8 290.0 289.6 290.0 291.6 295.2 296.7 298.1 298.1 297.4 296.2 293.7 291.5 288.8 286.1 286.1 286.6 288.8 289.6 293.5 296.9 297.4 297.2 295.7 295.0 292.3 288.6 287.5 288.0 291.0 292.3 294.0 296.9 298.1 298.8 296.7 294.5 290.3 287.1 286.6 288.0 289.6 292.8 294.9 296.6 297.8 297.2 296.0 292.6 290.5 288.3 290.1 292.6 295.0 300.5 302.3 304.7 304.0 303.1 299.1 296.4 295.4 295.0 297.1 298.6 302.6 303.3 302.3 296.9 289.8 280.2 269.0 268.7 268.7 268.7 268.7 268.7 268.7 268.7 268.7", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 52.8125 - }, - { - "text": "AP", - "ph_seq": "d e r en SP", - "note_seq": "C4 C4 D4 D4 rest", - "note_dur_seq": "0.151042 0.312500 0.104167 1.250000 0.100000", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.151042 0.312500 0.104167 1.250000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "261.6 261.6 261.6 261.6 259.8 253.0 253.4 253.9 254.5 254.9 255.1 255.4 255.7 255.7 259.5 264.4 265.1 266.0 265.1 263.4 261.9 261.5 261.6 261.6 262.1 261.8 261.5 262.4 262.8 262.7 262.4 261.8 261.5 261.0 260.4 260.1 259.7 259.2 257.3 254.6 252.9 252.1 251.5 252.1 252.7 254.5 257.2 259.4 264.3 270.1 273.2 274.0 272.9 272.7 272.6 271.6 271.6 272.1 272.6 272.9 274.0 276.7 278.8 282.7 288.0 292.5 298.6 301.4 302.1 299.5 297.2 293.2 289.6 288.5 288.6 289.1 290.3 292.3 293.0 296.2 298.5 299.3 298.8 297.1 295.5 292.5 291.1 290.6 291.0 292.0 292.6 293.5 294.0 294.2 294.0 293.2 292.6 291.8 291.8 292.1 292.5 292.8 293.2 293.5 293.8 294.0 294.2 294.2 294.0 293.8 293.8 295.4 296.4 296.6 296.0 295.5 294.2 291.5 289.6 289.0 290.1 290.8 294.2 298.3 299.8 301.7 302.3 301.9 299.8 296.6 294.0 289.1 287.1 286.5 287.3 292.5 295.7 298.5 300.7 301.7 300.2 296.6 292.3 287.0 286.0 286.0 287.8 292.8 296.7 301.2 303.7 302.8 301.2 295.7 287.8 283.3 270.7 268.7 268.7 268.7 268.7 268.7 268.7 268.7 268.7", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 59.43229166666667 - }, - { - "text": "AP", - "ph_seq": "d an j iu s uan zh e ch en sh ir SP", - "note_seq": "B3 B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest", - "note_dur_seq": "0.111979 0.282986 0.133681 0.232639 0.184028 0.295139 0.121528 0.258681 0.157986 0.138889 0.069444 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.111979 0.282986 0.133681 0.232639 0.184028 0.295139 0.121528 0.258681 0.157986 0.138889 0.069444 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "209.2 210.4 211.3 211.5 211.0 211.0 212.9 215.0 217.7 220.5 222.9 240.3 245.2 245.0 243.0 242.6 242.4 242.6 243.3 243.7 244.2 246.5 247.2 248.2 248.5 248.4 246.4 244.8 244.2 243.7 244.0 245.2 246.8 247.9 248.4 248.2 247.1 237.0 220.5 202.7 204.8 210.8 225.6 236.4 245.8 258.4 267.9 275.9 294.7 297.4 296.4 293.7 291.0 290.3 289.0 288.5 288.8 289.8 291.3 292.3 292.8 293.3 293.7 294.2 294.0 293.3 294.2 295.2 294.5 284.2 257.1 255.4 263.4 279.6 293.3 302.1 311.3 324.0 330.6 341.9 357.7 363.9 383.0 398.4 399.1 396.3 394.0 394.0 393.1 390.9 389.5 388.8 389.5 390.0 391.1 393.1 393.8 393.4 392.9 391.8 388.4 389.3 390.0 392.2 394.5 394.7 394.7 393.8 391.3 386.6 348.4 318.0 312.7 324.3 346.6 356.3 382.2 410.4 432.2 443.1 447.2 446.4 442.0 437.5 433.9 433.7 433.9 435.2 438.2 441.3 442.5 442.0 440.5 439.5 438.5 436.5 435.7 438.7 441.3 442.3 443.3 436.5 426.5 400.9 405.6 418.0 430.9 435.4 437.7 443.8 449.2 458.2 462.9 469.5 489.3 498.5 499.0 495.9 495.6 495.2 496.2 495.2 482.4 458.7 487.1 515.2 521.4 515.2 508.5 508.3 512.3 520.5 513.4 511.0 514.3 519.3 515.2 512.2 510.1 508.1 501.6 499.0 498.5 498.5 501.4 501.4 503.1 509.2 508.6 511.3 510.7 504.0 496.2 487.9 480.9 478.7 480.7 487.4 493.3 503.1 512.8 515.5 514.6 507.5 501.9 494.2 481.8 477.3 475.4 479.0 484.0 493.6 501.9 506.6 509.5 508.6 506.0 498.5 486.0 481.8 478.4 481.2 485.4 491.6 489.6 478.4 478.4 478.4 478.4 478.4 478.4 478.4 478.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 61.97135416666667 - }, - { - "text": "AP", - "ph_seq": "z ai h an l iang SP", - "note_seq": "A4 A4 B4 B4 B4 B4 rest", - "note_dur_seq": "0.131944 0.263021 0.153646 0.140625 0.067708 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0", - "ph_dur": "0.131944 0.263021 0.153646 0.140625 0.067708 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "440.0 440.0 440.0 399.5 399.3 398.8 398.4 398.2 397.9 397.7 397.7 397.2 400.9 411.5 414.8 409.8 406.1 403.0 403.0 405.6 410.1 420.1 428.7 439.0 443.8 444.6 445.1 442.8 438.5 435.7 434.7 435.4 436.2 437.5 440.5 443.1 444.1 443.6 440.5 423.8 389.5 383.9 399.5 415.1 423.1 433.3 449.5 461.0 475.4 486.8 493.0 498.5 498.8 497.6 497.9 498.8 498.2 498.2 499.6 500.8 504.6 507.8 507.5 503.4 496.2 492.7 490.8 487.1 487.4 488.5 491.3 494.2 496.7 500.2 501.1 501.6 499.0 496.5 496.7 492.7 492.7 491.6 491.0 490.8 492.7 495.6 495.6 497.6 498.2 492.7 491.0 487.1 493.3 495.3 500.5 511.9 514.6 516.6 513.1 507.8 501.4 487.6 482.9 476.8 476.8 477.9 487.6 498.2 502.8 506.3 507.2 504.0 501.9 493.9 490.2 489.9 491.6 494.7 491.6 480.1 480.1 480.1 480.1 480.1 480.1 480.1 480.1", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 64.86805555555556 - }, - { - "text": "AP", - "ph_seq": "z ai l eng m o AP n i x in t ou n a p eng x ve AP y E y i r an y ao w en r e SP", - "note_seq": "A4 A4 G4 G4 G4 G4 rest B3 B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest A4 A4 B4 B4 C5 C5 C5 C5 B4 B4 B4 B4 rest", - "note_dur_seq": "0.131944 0.291667 0.125000 0.122396 0.085938 0.833333 0.460069 0.164931 0.223090 0.193576 0.241319 0.175347 0.251736 0.164931 0.255208 0.161458 0.103299 0.105035 0.625000 0.343750 0.072917 0.407986 0.008681 0.151910 0.056424 0.968750 0.072917 0.305556 0.111111 0.151910 0.056424 1.041667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.131944 0.291667 0.125000 0.122396 0.085938 0.833333 0.460069 0.164931 0.223090 0.193576 0.241319 0.175347 0.251736 0.164931 0.255208 0.161458 0.103299 0.105035 0.625000 0.343750 0.072917 0.407986 0.008681 0.151910 0.056424 0.968750 0.072917 0.305556 0.111111 0.151910 0.056424 1.041667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "440.0 440.0 440.0 395.0 394.5 394.0 393.8 393.6 393.4 393.1 392.7 392.2 396.6 409.6 413.4 408.9 406.5 403.5 403.2 405.1 409.1 416.0 423.1 432.9 436.5 439.7 444.9 444.9 444.3 441.0 439.0 437.0 436.0 437.0 438.7 440.0 442.8 443.3 442.8 441.5 437.7 434.4 425.0 405.4 397.6 389.7 382.2 385.3 395.9 395.9 395.6 394.7 396.6 395.2 393.6 393.4 393.1 392.9 391.3 389.1 379.5 373.0 371.7 370.2 371.5 375.4 393.1 397.5 397.0 397.5 397.2 396.6 396.1 395.4 393.6 392.4 391.1 391.8 391.3 390.2 390.4 390.9 392.0 392.9 393.4 395.0 396.8 397.2 397.9 398.6 398.4 397.0 395.2 393.8 391.3 386.8 385.7 385.0 385.9 387.5 391.3 395.4 397.7 399.3 399.8 398.6 394.3 387.9 383.9 381.3 382.2 385.9 387.3 392.7 398.2 401.2 403.0 402.1 398.4 390.4 384.8 381.9 379.5 380.6 383.3 388.6 391.3 392.4 396.3 397.9 398.4 398.4 399.3 399.5 400.3 383.9 359.5 304.8 389.6 343.5 313.6 309.3 307.6 304.5 301.9 300.7 300.0 299.1 297.6 294.9 292.1 290.3 289.8 287.0 285.6 285.6 283.5 281.2 277.8 275.1 272.1 271.2 269.9 267.0 266.4 265.0 262.5 261.9 261.3 260.7 260.9 230.5 229.1 229.1 229.1 229.1 229.6 234.4 237.6 239.8 244.7 247.1 247.9 247.9 247.9 247.9 246.2 244.8 244.7 243.4 244.1 245.1 245.4 245.8 242.8 241.9 241.0 241.3 241.4 240.9 241.2 242.4 242.6 242.7 242.8 243.7 244.4 245.1 246.2 248.5 249.7 248.9 240.1 224.6 208.6 209.7 213.9 220.9 228.7 232.1 242.4 247.2 252.6 261.2 266.1 269.7 278.5 288.5 296.6 300.9 299.3 295.5 293.7 291.8 290.0 289.8 289.3 289.3 289.3 289.6 291.0 292.1 293.2 293.8 293.8 293.3 293.0 293.5 294.9 295.9 296.2 296.0 295.2 294.2 288.8 266.8 262.4 275.0 291.3 300.5 315.6 331.4 344.0 367.9 391.1 394.7 400.9 397.9 395.4 394.0 390.9 389.1 388.6 388.4 388.8 389.5 390.4 390.6 390.6 390.6 390.9 390.9 390.9 391.5 392.2 392.4 392.4 392.4 392.4 393.1 393.8 387.5 376.0 373.9 373.4 376.1 380.0 387.7 404.4 420.9 438.7 445.9 446.4 445.9 442.5 436.7 432.4 430.4 430.4 431.9 434.7 438.5 439.5 440.5 440.3 439.5 438.0 436.5 436.5 437.2 438.5 439.2 440.8 439.2 434.4 434.4 437.5 436.7 436.2 442.8 447.4 455.6 464.6 471.5 480.6 494.5 501.1 503.7 497.9 497.3 495.3 491.9 483.2 458.4 459.2 472.7 487.1 485.4 484.3 479.5 478.4 477.3 476.0 481.5 507.8 516.0 514.9 511.3 506.6 498.2 491.3 487.9 486.2 491.3 493.9 496.7 503.7 506.6 507.8 503.1 497.0 487.6 482.9 481.2 482.9 485.7 489.1 495.9 501.4 502.8 503.4 501.6 499.9 493.3 488.2 486.2 486.2 489.3 491.3 493.3 499.6 503.4 504.3 502.5 497.3 493.0 482.0 473.5 472.4 472.7 476.0 479.3 486.5 492.8 493.5 487.3 488.7 537.0 520.0 509.2 506.3 505.7 505.4 505.1 504.8 464.6 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 436.0 432.7 432.4 431.9 431.7 431.7 433.2 436.7 439.7 443.6 444.3 444.6 441.0 439.2 438.7 437.5 437.7 437.0 435.7 436.0 436.2 437.0 437.5 438.0 438.5 439.0 439.0 438.5 437.7 438.5 440.3 442.0 442.3 440.8 439.0 438.0 437.5 436.7 436.5 436.2 432.9 428.2 426.9 431.0 441.3 459.5 479.8 491.9 497.9 502.5 502.2 500.5 494.7 491.9 490.2 489.3 492.2 491.9 483.4 473.5 474.8 487.8 509.3 516.8 526.3 529.3 529.0 528.7 526.3 525.4 522.9 520.5 520.5 521.1 521.4 521.7 521.7 520.8 521.4 522.6 523.9 526.9 527.2 525.4 523.6 522.6 522.0 522.0 519.6 518.4 517.5 518.7 520.5 520.8 521.1 521.4 521.7 521.7 520.2 519.3 519.6 518.1 517.5 518.1 519.3 520.5 522.0 524.2 525.1 525.4 524.8 523.9 522.9 522.0 519.6 518.1 517.5 517.8 518.4 519.9 521.4 522.9 523.9 525.1 526.3 526.6 526.6 526.0 526.6 525.1 522.6 523.6 525.1 526.9 527.8 529.0 527.8 528.1 529.9 529.0 528.7 530.9 533.0 533.6 534.2 535.5 535.8 535.8 538.0 536.1 533.3 530.9 530.9 531.8 529.3 525.4 522.9 516.9 514.3 510.1 509.2 508.1 510.7 512.5 511.6 515.7 517.2 519.3 519.9 522.0 519.6 518.7 517.5 516.9 517.5 519.9 523.3 524.5 525.7 526.0 525.7 525.1 521.1 512.5 491.0 476.8 473.2 471.7 477.2 487.4 492.5 493.3 494.5 494.2 493.9 492.5 491.3 489.9 489.9 491.3 493.3 493.6 492.7 489.6 490.2 492.2 499.6 502.5 502.2 501.4 500.5 498.2 493.9 491.3 487.9 486.2 485.7 486.0 487.6 491.6 494.2 494.7 496.2 495.9 493.3 492.5 491.6 489.3 488.2 487.9 488.5 489.3 490.2 491.0 492.2 494.2 492.5 491.9 491.6 490.5 487.9 487.1 486.8 486.8 488.5 489.9 491.9 493.9 494.2 493.3 492.2 491.6 486.8 483.7 483.2 482.6 485.7 488.2 492.2 496.7 498.2 499.0 499.0 496.7 492.7 486.0 484.6 482.9 484.8 487.1 490.8 498.2 500.2 502.5 499.0 496.5 488.8 484.8 483.2 483.4 488.2 495.6 499.9 500.8 502.5 501.6 499.0 494.2 491.9 490.8 492.5 493.3 493.0 491.6 489.3 477.9 455.8 455.8 455.8 455.8 455.8 455.8 455.8 455.8", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 66.53472222222221 - }, - { - "text": "AP", - "ph_seq": "z ai c an p o r en j ian p in c ou y i g e SP", - "note_seq": "B4 B4 A4 A4 G4 G4 E5 E5 E5 E5 D5 D5 D5 D5 B4 B4 G4 G4 rest", - "note_dur_seq": "0.131944 0.256944 0.159722 0.255208 0.161458 0.729167 0.104167 0.699653 0.133681 1.088542 0.161458 0.256944 0.159722 0.407986 0.008681 0.294271 0.122396 0.833333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.131944 0.256944 0.159722 0.255208 0.161458 0.729167 0.104167 0.699653 0.133681 1.088542 0.161458 0.256944 0.159722 0.407986 0.008681 0.294271 0.122396 0.833333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "452.9 450.8 450.8 451.1 451.1 449.2 447.4 446.1 442.8 443.1 443.1 561.1 556.3 552.4 553.1 535.5 527.5 520.5 511.3 507.5 498.8 498.8 497.6 499.9 500.2 497.0 493.0 491.9 488.8 488.2 489.6 493.6 494.7 493.6 482.0 466.4 443.8 448.5 451.3 449.0 449.0 450.0 450.0 449.0 448.9 449.2 454.8 455.0 460.3 457.1 456.6 452.9 449.8 448.7 445.6 441.8 441.8 442.0 441.5 441.0 440.3 440.3 440.5 440.8 439.7 438.0 438.2 437.0 435.4 436.2 439.2 441.3 440.8 439.5 437.2 422.3 380.0 356.8 335.2 320.2 326.5 336.9 343.5 349.4 354.3 383.5 395.9 396.8 396.1 394.3 392.9 390.0 387.5 387.7 389.5 392.7 393.4 394.5 393.8 392.2 387.9 384.6 383.5 382.2 382.8 385.9 390.4 394.3 395.0 396.1 395.0 390.4 386.2 378.0 375.2 375.8 378.4 385.7 392.0 401.6 405.8 407.2 406.3 400.7 395.6 384.6 377.6 376.2 378.9 382.4 387.3 392.7 396.3 397.5 395.9 393.6 390.4 388.6 388.4 388.8 389.5 390.9 393.6 396.1 398.6 401.4 402.6 405.4 407.9 411.7 418.0 423.5 441.7 466.6 515.8 546.7 588.7 601.4 607.3 610.2 610.5 607.0 603.8 604.9 606.3 608.4 610.2 611.2 615.1 621.2 629.5 639.7 659.3 667.7 673.9 672.7 670.4 662.7 649.1 644.2 639.4 638.6 640.9 646.1 653.6 663.5 676.6 685.3 684.9 679.7 670.4 657.7 650.6 644.6 643.5 644.2 647.6 656.2 661.9 666.1 668.8 670.0 665.4 660.0 657.0 649.4 644.9 645.3 649.1 655.1 654.3 660.4 672.7 680.9 685.7 663.8 647.9 666.1 669.6 667.7 670.4 671.2 663.8 652.4 652.8 653.6 642.7 651.7 657.7 655.1 644.2 644.6 643.1 639.7 642.3 644.2 644.6 647.2 647.2 651.7 654.3 656.2 658.1 661.9 663.8 665.8 663.8 666.9 667.3 666.5 665.8 662.7 657.4 653.6 653.2 653.2 653.6 654.7 657.0 657.4 657.0 656.6 656.2 655.5 655.1 654.7 654.3 653.9 653.9 654.3 654.3 654.7 654.7 655.1 655.5 655.1 653.2 653.9 653.6 652.8 652.8 653.2 653.9 654.7 655.1 655.8 657.0 657.7 657.0 655.8 653.6 649.8 645.7 644.2 646.1 649.1 654.3 660.8 665.0 669.2 668.1 663.5 657.7 648.7 640.5 639.4 643.1 647.2 652.4 658.5 666.1 670.4 670.4 662.3 653.6 648.7 647.6 648.7 651.7 661.9 671.6 675.1 686.9 680.1 666.9 639.7 650.6 652.4 649.4 650.2 648.3 638.6 624.3 606.7 572.7 558.2 559.5 569.6 580.6 582.6 581.9 580.9 579.6 580.6 581.9 582.9 583.3 583.3 583.3 582.9 584.6 587.3 588.3 589.4 589.7 589.4 589.0 588.3 584.3 564.1 536.7 543.0 557.9 563.1 560.5 555.0 551.5 548.0 544.5 540.1 536.7 542.0 559.2 563.1 561.1 553.4 548.3 544.8 543.6 546.7 552.1 557.6 565.4 577.6 584.3 587.0 587.7 586.0 585.3 585.0 584.6 584.6 584.6 584.6 585.3 586.3 585.3 586.0 585.6 584.3 580.6 578.2 572.6 560.3 548.8 529.1 513.5 512.8 506.6 500.5 498.2 498.2 498.5 497.3 497.0 496.7 495.6 493.9 493.3 493.9 493.6 493.3 494.2 494.2 493.3 493.6 493.9 493.6 493.0 494.2 495.0 494.5 486.0 458.4 467.5 482.0 487.6 491.6 494.5 505.6 508.6 481.6 415.0 376.7 373.6 385.5 392.2 394.7 395.2 394.5 392.9 389.7 386.6 385.0 385.0 386.4 387.7 389.5 391.1 393.6 397.0 400.5 402.1 402.1 400.2 398.2 394.5 386.8 384.2 382.2 382.2 383.9 387.0 395.0 400.9 403.9 407.2 407.9 406.1 401.2 392.7 383.7 376.2 373.6 374.1 378.2 386.6 392.4 400.9 404.9 405.4 403.0 395.6 392.0 381.9 374.9 374.1 377.8 387.0 390.9 400.0 405.1 406.1 404.6 397.0 393.8 383.5 376.5 374.5 377.3 386.6 393.4 400.7 403.7 396.1 396.1 396.1 396.1 396.1 396.1 396.1 396.1", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 75.70138888888889 - }, - { - "text": "AP", - "ph_seq": "AP w an zh eng d e SP", - "note_seq": "rest E4 E4 F#4 F#4 G4 G4 rest", - "note_dur_seq": "0.409722 0.111111 0.503472 0.121528 0.147569 0.060764 2.083333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0", - "ph_dur": "0.409722 0.111111 0.503472 0.121528 0.147569 0.060764 2.083333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "212.0 209.3 209.6 209.8 209.8 209.6 209.0 208.6 208.0 207.5 207.2 206.9 207.3 208.5 208.9 208.4 208.0 207.9 208.0 208.1 208.3 209.2 210.9 214.0 217.2 219.2 221.0 223.8 225.0 226.7 230.4 232.5 234.2 238.8 245.9 248.2 247.5 249.1 249.8 255.1 268.7 279.4 292.3 302.4 308.6 315.1 320.2 322.5 324.3 325.7 327.2 329.8 331.5 331.9 332.1 332.1 332.1 330.8 329.6 329.1 328.5 328.9 328.9 328.9 328.5 328.1 327.7 328.1 328.5 327.5 328.1 328.9 329.2 330.8 331.2 330.0 328.5 328.5 328.5 329.2 329.6 330.0 331.0 332.1 333.1 332.1 330.4 329.4 328.5 323.2 314.7 285.8 273.5 287.0 306.5 320.2 329.5 341.3 348.1 351.5 371.9 375.8 376.7 371.5 369.4 368.1 367.9 371.3 371.9 372.1 369.6 363.4 355.7 332.9 306.4 309.5 332.8 385.3 391.1 393.6 395.6 396.6 397.7 397.7 397.0 395.6 394.7 392.4 392.4 392.4 392.4 392.4 392.4 392.4 392.7 393.4 394.5 395.6 395.6 395.6 395.6 395.6 395.0 392.9 390.9 390.4 390.2 389.7 390.2 392.0 392.0 391.8 391.5 391.8 392.7 391.8 390.6 390.6 391.1 391.3 392.0 392.2 392.9 393.4 393.6 394.0 393.4 393.4 392.2 392.2 392.4 392.7 392.9 391.8 391.5 392.0 392.4 392.0 391.8 392.2 392.7 392.0 391.8 391.1 390.6 390.6 390.4 390.4 390.2 390.2 392.0 392.2 392.0 391.3 390.9 391.1 390.0 392.0 391.5 391.1 391.5 392.0 390.6 389.5 390.2 391.5 390.2 391.3 392.9 392.0 393.4 393.8 393.1 392.9 392.2 391.3 389.5 389.7 390.4 390.6 389.3 389.7 390.4 391.8 391.1 391.5 392.4 393.1 393.1 392.9 392.7 392.4 392.2 392.0 391.5 391.8 393.1 393.4 392.0 393.8 392.9 392.4 391.5 391.1 391.5 392.0 391.5 391.3 391.8 392.2 394.0 394.7 394.5 392.9 388.8 386.6 385.3 383.9 385.0 387.0 391.1 395.0 397.2 398.6 399.8 399.1 396.3 393.6 390.6 387.3 383.7 384.2 384.6 387.0 392.0 395.4 399.1 401.4 400.2 396.6 392.4 385.5 380.8 378.0 378.6 381.9 389.7 393.6 397.5 404.4 403.5 400.5 394.3 387.0 376.9 374.7 374.5 374.5 374.5 374.5 374.5 374.5 374.5 374.5", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 81.97916666666667 - }, - { - "text": "AP", - "ph_seq": "h un p o SP", - "note_seq": "A4 A4 G4 G4 rest", - "note_dur_seq": "0.153646 0.255208 0.161458 1.666667 0.100000", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.153646 0.255208 0.161458 1.666667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "440.0 440.0 440.0 440.0 440.0 363.2 362.6 362.4 362.0 361.5 361.3 361.1 360.9 360.9 366.6 390.4 401.6 407.9 409.4 408.6 405.8 403.9 404.4 406.3 411.7 414.3 423.5 430.0 437.0 442.0 444.9 444.3 443.1 441.0 440.8 440.5 440.8 442.0 441.5 435.2 426.2 426.2 426.2 431.7 426.0 407.0 396.4 391.5 383.5 384.1 395.4 402.1 405.6 406.8 406.8 405.1 405.4 403.5 400.7 398.6 396.1 395.0 394.5 393.6 392.7 392.0 391.8 391.5 391.5 392.0 392.9 392.9 392.7 392.4 392.2 392.2 391.8 391.5 391.3 391.1 390.6 390.6 390.6 391.5 392.0 391.3 392.2 394.0 394.5 394.0 393.8 393.4 392.7 392.4 392.0 392.0 392.2 392.7 393.8 394.0 394.0 393.1 392.4 392.2 393.1 393.6 393.6 392.9 392.7 392.9 393.1 392.4 391.8 390.6 389.5 388.8 387.9 387.5 387.5 388.2 389.3 390.2 391.3 391.8 391.5 390.9 390.2 389.7 389.7 389.7 389.7 390.0 389.5 388.8 387.7 387.5 388.6 390.0 393.4 394.3 394.7 394.7 393.8 392.0 390.0 387.5 385.9 386.2 386.8 388.8 391.8 396.1 397.5 398.8 399.3 398.2 394.0 390.2 388.6 388.4 388.4 388.4 390.9 395.2 397.0 401.6 403.2 402.8 400.7 397.2 393.4 389.3 386.2 384.4 384.6 387.0 390.0 392.2 396.6 399.5 403.9 406.1 406.5 404.4 397.7 392.9 387.5 384.8 385.3 388.2 393.1 397.5 396.1 388.6 388.6 388.6 388.6 388.6 388.6 388.6 388.6", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 86.09635416666667 - }, - { - "text": "AP", - "ph_seq": "AP n i b ing f ei zh en d e y i w u s uo y ou SP", - "note_seq": "rest A3 A3 A3 A3 A3 A3 D4 D4 C4 C4 B3 B3 A3 A3 G3 G3 A3 A3 rest", - "note_dur_seq": "0.355903 0.164931 1.136285 0.113715 0.251736 0.164931 0.295139 0.121528 0.304688 0.111979 0.407986 0.008681 0.407986 0.008681 0.232639 0.184028 0.169271 0.039062 1.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.355903 0.164931 1.136285 0.113715 0.251736 0.164931 0.295139 0.121528 0.304688 0.111979 0.407986 0.008681 0.407986 0.008681 0.232639 0.184028 0.169271 0.039062 1.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "181.6 180.8 193.6 204.0 203.4 202.8 201.2 201.0 201.7 200.8 200.3 198.7 201.9 202.6 203.5 204.9 205.5 204.6 204.3 203.6 201.7 199.9 199.2 199.5 200.1 201.0 203.7 207.4 209.9 206.0 207.2 207.7 207.7 207.4 204.8 201.2 198.0 195.7 194.4 193.0 190.6 190.4 190.7 191.4 192.0 193.4 197.7 200.7 203.6 204.2 205.0 205.3 205.0 205.0 204.4 203.5 203.3 203.6 204.2 206.1 209.7 212.4 215.0 218.2 219.1 219.7 219.5 219.2 219.0 218.7 217.0 214.9 213.4 212.3 211.6 212.0 213.7 216.2 218.1 222.3 225.9 227.8 229.3 227.8 226.7 222.8 218.5 215.8 212.6 211.0 212.1 213.7 216.8 221.0 224.6 227.0 228.2 227.8 225.7 224.4 219.7 216.7 215.5 214.5 215.0 216.2 218.2 221.5 222.9 226.1 228.0 228.0 227.2 223.6 219.5 216.7 215.0 214.0 214.1 214.9 216.7 218.9 221.8 224.2 225.8 227.0 225.9 224.4 220.6 219.4 217.9 216.3 216.7 217.1 217.7 219.4 220.9 222.7 222.6 222.4 218.1 206.2 191.0 191.2 203.6 206.9 205.3 206.5 207.2 208.5 209.0 208.6 209.0 211.5 221.5 222.8 222.7 222.0 220.8 218.9 217.9 217.7 217.6 217.5 217.6 218.0 218.2 218.5 219.4 219.7 219.9 220.0 220.1 220.3 220.9 222.6 223.3 221.4 215.0 204.1 204.1 206.5 206.7 206.1 206.1 206.1 205.3 203.8 204.7 209.2 219.2 221.5 221.1 220.6 218.5 217.7 216.5 216.7 216.8 216.8 216.7 216.5 216.7 217.2 218.5 220.5 221.3 221.7 222.8 222.9 222.0 220.5 219.7 217.1 208.3 193.4 194.6 203.8 209.5 211.0 215.0 218.6 219.4 221.3 233.2 252.4 268.5 269.8 271.6 271.8 270.9 270.7 270.4 270.1 271.8 273.1 274.2 277.3 278.8 281.9 285.1 288.3 293.3 294.9 295.5 295.9 296.0 296.0 297.2 297.2 296.9 295.9 294.3 293.8 290.1 287.3 280.7 266.0 257.9 262.8 281.5 296.7 301.1 301.1 299.3 296.6 288.6 280.7 275.6 269.1 264.7 262.1 262.2 262.2 262.4 262.2 261.9 261.5 261.0 260.6 260.1 260.1 260.0 260.6 261.0 261.0 261.0 261.3 260.6 259.8 259.5 259.2 258.2 257.1 255.7 252.3 251.4 248.4 247.7 248.2 250.0 249.8 249.4 248.9 248.1 247.4 247.5 247.4 247.1 246.5 246.1 246.2 246.4 246.8 247.5 247.4 247.2 247.5 247.9 247.4 248.1 247.7 247.8 248.4 247.9 247.4 247.5 246.7 244.5 242.4 238.9 231.7 222.4 215.5 210.7 208.5 207.9 210.4 212.6 214.5 217.5 219.7 220.5 221.4 221.1 221.0 219.4 219.4 219.4 219.4 219.6 220.0 220.3 220.8 222.3 222.3 220.5 206.2 197.2 209.1 216.5 219.4 217.3 210.1 205.3 201.7 198.6 195.0 191.1 189.9 191.4 194.9 197.7 197.9 198.4 198.5 198.3 197.8 196.5 195.8 189.4 185.4 184.5 184.9 188.2 193.7 204.0 211.0 213.9 216.6 218.4 219.7 219.9 219.9 219.7 219.7 219.0 219.5 218.4 218.0 217.6 217.9 219.2 215.8 218.6 217.6 219.0 218.6 219.5 220.0 219.9 219.9 220.0 219.0 217.2 217.2 217.3 218.6 220.0 220.8 221.1 221.1 220.8 220.3 219.1 219.5 219.2 218.5 218.1 219.4 219.7 220.0 220.0 219.9 219.2 219.1 219.2 219.4 219.0 218.4 218.2 218.6 219.0 219.0 218.9 219.2 219.7 220.1 220.1 220.3 220.3 219.9 219.6 220.4 219.5 221.0 221.3 221.7 223.1 223.3 223.1 222.3 221.0 219.1 216.6 214.5 214.0 215.7 218.1 221.4 223.5 224.6 224.6 224.1 222.0 220.0 215.8 213.7 212.8 213.4 216.8 219.7 222.9 225.1 225.8 225.7 225.4 224.4 220.3 216.2 214.7 212.6 213.1 213.7 214.7 217.0 217.9 221.9 222.8 223.5 222.7 220.1 215.3 208.4 205.5 205.1 206.9 212.3 217.5 224.4 227.8 225.8 221.4 221.4 221.4 221.4 221.4 221.4 221.4 221.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 89.47916666666667 - }, - { - "text": "AP", - "ph_seq": "AP zh ir y ao r eng b ao y ou sh an l iang d e x in a SP", - "note_seq": "rest A3 A3 G3 G3 D3 D3 D3 D3 A3 A3 G3 G3 A#3 A#3 A3 A3 G3 G3 D3 rest", - "note_dur_seq": "0.399306 0.121528 0.343750 0.072917 0.151910 0.056424 1.344618 0.113715 0.343750 0.072917 0.228299 0.188368 0.291667 0.125000 0.304688 0.111979 0.223090 0.193576 0.416667 1.666667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.399306 0.121528 0.343750 0.072917 0.151910 0.056424 1.344618 0.113715 0.343750 0.072917 0.228299 0.188368 0.291667 0.125000 0.304688 0.111979 0.223090 0.193576 0.416667 1.666667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 219.9 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.0 206.6 206.6 206.6 206.5 206.5 206.5 206.5 206.3 206.3 206.3 205.9 204.8 203.4 201.2 199.2 197.6 197.9 198.5 200.5 205.7 210.9 215.1 218.4 220.1 221.5 221.7 220.8 219.0 217.3 216.7 216.2 217.1 218.5 220.0 220.9 220.5 219.7 216.2 207.7 201.0 187.5 180.5 180.2 183.8 193.3 195.5 196.6 196.8 196.7 196.6 196.1 195.5 196.1 196.0 195.8 196.5 194.2 192.5 186.2 176.4 169.9 165.1 159.8 151.3 146.4 143.7 143.1 142.2 142.2 143.0 143.5 145.1 145.6 146.3 145.6 145.1 145.1 145.2 145.2 145.3 144.6 144.7 144.9 144.9 145.0 145.1 145.6 147.1 147.5 148.0 147.9 147.9 147.7 147.2 147.3 147.7 148.2 148.5 148.8 149.7 150.3 150.9 151.0 151.0 150.8 149.7 149.3 148.9 148.2 148.2 147.9 148.1 148.5 148.5 148.5 148.8 149.0 148.8 148.0 148.0 148.2 147.7 147.3 147.9 148.7 149.7 150.4 149.6 148.9 148.0 145.9 144.1 142.0 141.3 140.9 140.7 142.0 143.1 146.2 147.9 148.6 149.7 149.7 149.5 148.8 148.4 148.2 148.2 148.4 148.5 148.4 148.2 147.9 147.6 146.7 146.9 147.2 147.6 148.5 150.2 148.5 148.3 149.8 150.9 151.2 151.0 150.8 149.9 149.7 149.9 149.5 149.3 149.7 150.7 150.8 151.0 150.8 151.7 153.3 152.5 151.9 150.3 146.5 142.7 132.7 124.2 123.9 126.2 131.4 136.1 143.1 145.1 144.7 144.8 145.1 145.3 145.3 145.3 144.9 144.5 144.0 144.4 144.6 145.4 145.8 146.0 146.1 146.2 146.4 147.0 146.9 146.8 147.0 147.7 148.1 148.6 149.0 149.2 146.9 142.6 142.6 144.0 146.1 150.9 160.1 174.7 180.1 186.6 192.5 194.3 195.5 195.2 195.8 197.4 199.9 207.2 210.9 215.6 219.2 220.5 221.5 220.8 219.5 219.1 218.5 217.6 217.2 213.2 201.3 179.1 175.0 176.1 179.5 180.6 182.5 184.0 187.9 189.7 192.5 197.0 199.5 196.6 197.0 196.9 196.8 197.0 196.2 194.4 193.0 192.0 190.9 192.5 193.6 195.3 196.7 197.6 197.9 197.2 196.5 196.2 196.0 195.9 196.6 197.5 197.9 197.8 197.7 196.6 195.1 193.6 190.7 186.9 182.6 182.5 184.2 190.4 201.7 206.6 212.0 214.5 215.7 216.1 215.2 213.6 211.5 211.2 211.5 215.2 220.0 224.1 230.4 234.4 236.2 236.6 235.2 232.5 231.9 231.1 231.7 232.0 232.0 233.4 233.5 233.5 232.8 232.0 230.4 227.4 223.5 209.2 201.0 204.9 219.1 227.0 227.0 225.3 223.3 222.6 221.1 220.1 219.5 218.5 218.6 219.0 219.4 219.9 220.1 219.7 220.1 219.9 220.8 221.9 221.0 212.1 193.9 195.5 202.9 216.7 218.0 218.0 219.0 220.6 220.0 217.9 217.0 216.1 213.2 208.5 205.4 204.8 200.0 197.7 194.6 193.2 192.2 191.3 191.4 191.5 192.5 192.7 192.9 193.5 194.5 195.2 196.1 196.1 195.9 195.8 196.0 196.1 196.3 196.6 196.6 195.8 194.1 193.4 189.6 182.2 175.4 166.4 152.3 144.7 137.7 133.5 131.6 131.0 131.6 133.3 136.3 139.5 141.6 142.8 144.1 145.3 145.9 145.5 145.0 144.6 144.2 144.0 144.3 144.6 144.6 144.7 144.7 145.5 146.3 147.5 147.3 147.0 146.7 146.7 146.7 145.9 146.2 146.4 147.8 147.9 147.6 147.3 147.1 146.7 145.9 144.6 144.2 144.0 144.1 144.5 144.6 146.2 147.0 147.2 147.4 147.7 147.3 147.3 147.1 146.7 146.5 146.3 146.2 145.8 145.8 145.8 145.8 145.6 145.4 145.0 144.7 145.0 145.7 146.1 146.2 146.1 146.0 145.8 145.2 145.7 145.6 145.6 145.6 145.5 145.6 145.7 145.5 145.0 145.1 145.5 145.6 146.7 147.6 147.9 148.0 147.1 146.5 145.9 145.6 145.4 145.1 144.9 145.1 145.6 146.0 146.3 146.2 146.4 146.0 145.9 146.0 146.1 146.0 145.9 146.2 146.9 147.7 147.3 146.7 146.2 146.3 145.6 145.8 146.4 146.7 146.5 146.3 146.2 146.0 145.9 145.6 145.4 145.5 146.0 146.1 146.0 145.9 146.0 146.6 146.7 146.7 146.9 147.8 146.8 145.9 145.6 145.6 146.0 147.3 146.2 146.2 146.2 146.2 146.2 146.2 146.2 146.2", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 96.14583333333334 - }, - { - "text": "AP", - "ph_seq": "k e w ei l e y i g e x in n ian s i0 q v SP", - "note_seq": "A3 A3 A3 A3 A3 A3 G3 G3 D4 D4 C4 C4 A3 A3 G3 G3 A3 A3 rest", - "note_dur_seq": "0.166667 1.138889 0.111111 0.291667 0.125000 0.407986 0.008681 0.294271 0.122396 0.223090 0.193576 0.251736 0.164931 0.232639 0.184028 0.116319 0.092014 1.458333 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.166667 1.138889 0.111111 0.291667 0.125000 0.407986 0.008681 0.294271 0.122396 0.223090 0.193576 0.251736 0.164931 0.232639 0.184028 0.116319 0.092014 1.458333 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "153.0 150.0 146.0 149.3 153.9 158.3 160.6 162.5 166.2 168.8 173.8 176.8 177.9 180.0 182.7 189.3 194.5 201.5 206.3 208.4 209.7 210.6 210.7 209.5 207.4 206.3 205.0 203.6 203.4 202.3 201.9 203.4 205.6 208.4 213.5 216.7 219.0 221.1 221.8 221.4 220.0 218.9 217.3 216.5 216.1 216.5 217.9 218.7 220.0 221.0 221.0 220.1 220.5 220.3 219.9 219.5 218.7 218.1 217.9 218.1 218.7 219.4 220.0 220.9 221.3 221.5 221.5 221.1 220.3 218.7 218.0 217.5 217.1 217.7 218.6 219.1 219.9 221.0 220.8 220.9 221.4 221.7 221.5 220.5 219.7 218.5 217.6 217.3 217.7 218.4 218.7 219.4 220.8 221.3 221.8 221.4 220.5 219.2 217.6 217.3 216.8 216.5 217.6 219.1 219.5 219.9 221.5 222.0 221.1 220.1 219.6 219.1 218.7 218.9 219.0 219.0 219.0 218.1 218.1 219.0 219.5 219.5 220.5 220.8 220.0 218.9 219.0 218.7 218.1 218.7 218.9 218.7 219.2 219.9 219.7 220.4 220.1 220.0 219.9 220.5 221.4 221.0 221.1 221.7 222.0 221.9 221.4 220.8 220.4 220.0 218.5 217.0 215.8 213.4 213.1 214.1 218.2 221.5 223.3 225.3 224.2 223.3 221.9 219.1 218.4 217.5 217.2 218.0 218.5 218.7 219.1 220.5 221.0 221.4 221.1 220.8 220.5 220.3 220.5 220.4 219.7 219.6 220.1 220.6 220.8 220.9 220.9 220.4 219.7 218.0 216.1 211.5 207.9 199.6 192.9 189.8 188.3 190.2 191.2 192.0 193.3 194.0 194.8 195.4 195.9 196.0 196.1 196.3 196.3 195.9 195.7 195.3 195.4 195.7 196.0 196.2 196.9 197.5 197.7 197.8 199.1 200.3 197.0 185.9 183.1 188.0 194.1 203.0 214.9 233.7 253.6 263.6 266.8 269.1 270.7 270.4 267.6 266.0 267.1 271.3 276.9 282.5 287.8 293.3 295.4 297.1 297.8 297.2 296.0 294.0 292.6 290.5 275.3 259.5 261.6 275.6 286.1 287.3 287.5 287.6 287.3 286.0 286.3 287.6 288.0 288.9 286.1 282.2 278.3 272.4 266.4 262.2 260.7 258.6 257.7 257.7 258.3 259.7 260.0 261.0 261.8 262.2 263.1 262.7 263.0 263.7 263.6 264.5 264.4 264.7 265.6 264.1 262.2 260.7 256.5 248.1 239.6 226.8 216.8 214.8 212.5 211.5 216.1 221.1 224.4 224.9 226.3 224.9 223.8 221.8 219.1 217.9 217.6 218.0 218.0 218.0 217.9 218.2 219.1 220.5 222.0 223.1 224.2 224.8 223.5 217.3 207.4 204.9 204.0 203.6 203.4 202.3 201.2 199.8 199.3 200.0 199.0 197.5 195.4 194.4 195.2 197.0 197.5 196.1 189.0 178.6 168.3 173.8 178.4 182.0 189.1 196.6 199.0 203.7 209.3 213.1 221.1 223.5 222.7 223.3 223.6 223.1 220.8 219.9 219.1 217.1 216.2 216.5 217.2 218.4 218.7 220.1 221.8 222.9 223.7 223.1 222.0 220.6 219.2 218.7 218.0 216.7 216.3 217.1 218.0 218.6 219.1 220.3 221.3 221.8 222.2 222.0 221.8 221.3 220.1 219.2 218.0 217.2 217.5 218.2 219.0 219.5 220.8 221.4 221.7 222.7 224.2 223.8 222.8 220.5 219.0 217.5 216.7 216.3 216.6 218.1 218.7 219.6 222.4 224.4 224.9 224.2 223.3 220.5 217.5 215.3 213.6 214.0 214.7 216.6 218.9 220.3 223.1 224.1 223.8 223.3 221.4 217.6 216.2 214.6 213.7 214.6 216.5 218.6 221.0 224.2 225.4 224.9 222.9 218.9 216.1 212.1 209.9 210.3 212.3 216.2 217.7 221.3 222.0 221.3 219.7 217.0 215.2 214.0 213.2 214.7 215.5 217.5 218.4 218.2 217.9 216.1 214.4 213.6 214.2 214.1 216.3 218.0 218.6 219.5 222.2 222.7 222.7 222.7 222.7 222.7 222.7 222.7 222.7", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 103.16666666666667 - }, - { - "text": "AP", - "ph_seq": "AP zh e j iu j ing zh ir b u zh ir d e SP", - "note_seq": "rest B3 B3 A3 A3 B3 B3 G3 G3 D4 D4 C4 C4 D4 D4 rest", - "note_dur_seq": "0.399306 0.121528 0.282986 0.133681 0.136285 0.072049 1.336806 0.121528 0.302951 0.113715 0.295139 0.121528 0.304688 0.111979 1.666667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.399306 0.121528 0.282986 0.133681 0.136285 0.072049 1.336806 0.121528 0.302951 0.113715 0.295139 0.121528 0.304688 0.111979 1.666667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "141.2 139.9 140.0 140.4 140.2 139.9 139.5 139.2 139.1 139.0 138.9 138.5 137.6 137.3 137.2 137.2 137.6 138.7 140.5 141.3 142.4 145.1 146.5 149.5 151.7 153.2 154.5 156.9 157.9 160.6 162.2 164.6 209.5 208.7 208.7 208.7 209.3 211.8 213.7 216.6 217.7 219.4 246.9 248.9 248.9 249.5 249.5 249.5 248.7 246.9 245.7 245.0 245.0 245.0 245.2 245.7 246.7 247.7 247.4 246.9 246.5 245.9 245.7 245.7 245.7 245.4 245.0 244.7 244.5 244.8 245.1 245.1 244.7 241.7 221.0 204.6 192.9 191.7 196.4 201.6 213.8 216.3 220.3 219.0 219.2 219.0 220.1 221.1 222.0 219.0 212.9 191.3 172.5 168.8 179.7 189.7 199.6 210.7 221.1 228.5 232.8 240.6 243.1 245.7 246.9 245.5 244.1 240.7 239.9 238.7 238.5 239.1 240.6 243.3 246.1 247.1 247.8 247.8 247.5 247.2 246.4 245.9 246.2 246.7 246.9 247.7 248.2 248.7 248.7 248.4 248.1 247.8 247.5 247.2 246.9 246.7 246.5 246.4 246.5 246.7 246.9 247.7 248.1 248.1 247.7 247.5 247.2 246.9 246.9 246.4 245.8 245.1 245.2 245.7 246.2 246.7 247.1 246.4 247.8 247.4 246.8 247.5 247.1 247.5 247.4 246.8 246.4 246.4 246.2 246.1 245.9 245.7 245.7 246.1 246.1 245.9 246.5 247.5 247.9 248.1 248.2 248.1 247.8 247.5 247.1 247.2 247.9 248.5 248.9 249.2 248.1 247.7 247.8 247.2 247.7 248.1 246.9 245.5 245.1 245.0 244.8 243.8 243.7 245.1 246.1 247.8 249.4 250.1 250.5 251.0 251.5 251.7 250.1 249.2 245.9 236.1 225.7 209.1 204.3 202.4 200.3 196.6 195.9 189.9 186.1 185.4 186.1 189.4 190.0 192.0 193.2 192.9 192.6 192.3 192.1 192.1 192.6 193.1 193.2 193.3 193.4 193.9 194.8 195.5 196.3 196.9 197.2 197.4 197.4 196.9 196.8 197.4 197.4 192.5 184.9 191.9 200.1 209.8 218.9 229.0 243.8 259.2 261.8 263.3 265.3 266.0 264.4 263.6 262.7 264.2 269.4 273.7 278.1 285.0 290.3 292.5 294.3 294.9 294.3 293.7 291.5 291.1 290.8 290.3 290.5 289.6 281.5 261.0 235.4 232.9 235.0 238.5 240.3 244.1 248.2 253.6 256.9 260.3 267.9 266.0 263.1 260.7 259.5 258.9 258.8 258.8 258.8 258.9 258.9 259.4 259.7 260.1 260.4 260.9 261.2 261.5 261.8 261.9 261.9 261.2 260.7 260.7 260.7 259.5 257.4 241.6 212.6 212.8 221.1 237.9 246.5 266.7 284.0 294.5 297.2 295.9 296.9 297.2 295.4 294.7 293.8 293.5 291.6 291.0 290.5 290.1 290.3 291.0 291.5 292.1 292.1 292.1 293.0 294.2 295.0 295.0 294.7 294.3 293.7 292.5 291.3 291.0 291.1 291.6 292.1 292.6 292.8 292.5 292.1 293.0 292.5 291.6 291.5 291.6 291.8 292.3 292.6 293.3 293.8 294.2 294.7 294.5 293.8 294.2 293.2 292.8 293.0 293.3 292.6 292.1 292.8 293.5 293.7 293.7 294.0 294.5 294.2 293.7 293.3 292.6 292.1 292.0 291.5 290.6 290.0 290.5 292.5 293.0 293.3 293.5 294.5 296.0 297.2 296.9 295.4 293.7 293.0 290.8 286.8 286.5 287.6 289.3 289.8 293.8 296.7 297.8 301.4 302.4 301.4 298.5 295.4 290.6 287.5 284.2 283.5 283.8 285.5 289.1 292.0 296.4 299.5 300.7 301.4 300.5 298.3 293.3 288.6 286.0 284.5 285.5 287.1 291.6 296.7 297.8 300.2 302.1 301.6 299.7 295.9 293.5 287.3 283.3 282.0 282.4 285.0 289.3 295.0 299.1 300.0 299.5 295.5 293.3 287.3 281.7 279.1 277.7 273.4 273.4 273.4 273.4 273.4 273.4 273.4 273.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 109.47916666666667 - }, - { - "text": "AP", - "ph_seq": "d an j iu s uan zh e y i k e AP z ai sh ir w ang AP z ai sh ir l uo AP n i y En zh ong n a t uan h uo AP y E y i r an y ao x ian h uo SP", - "note_seq": "B3 B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest A4 A4 B4 B4 B4 B4 rest A4 A4 G4 G4 G4 G4 rest B3 B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest A4 A4 B4 B4 C5 C5 C5 C5 B4 B4 B4 B4 rest", - "note_dur_seq": "0.111979 0.282986 0.133681 0.232639 0.184028 0.295139 0.121528 0.407986 0.008681 0.118056 0.090278 0.625000 0.284722 0.131944 0.228299 0.188368 0.148438 0.059896 0.625000 0.284722 0.131944 0.228299 0.188368 0.140625 0.067708 0.833333 0.460069 0.164931 0.343750 0.072917 0.295139 0.121528 0.251736 0.164931 0.242188 0.174479 0.125000 0.083333 0.625000 0.343750 0.072917 0.407986 0.008681 0.151910 0.056424 0.968750 0.072917 0.223958 0.192708 0.187500 0.125000 0.937500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.111979 0.282986 0.133681 0.232639 0.184028 0.295139 0.121528 0.407986 0.008681 0.118056 0.090278 0.625000 0.284722 0.131944 0.228299 0.188368 0.148438 0.059896 0.625000 0.284722 0.131944 0.228299 0.188368 0.140625 0.067708 0.833333 0.460069 0.164931 0.343750 0.072917 0.295139 0.121528 0.251736 0.164931 0.242188 0.174479 0.125000 0.083333 0.625000 0.343750 0.072917 0.407986 0.008681 0.151910 0.056424 0.968750 0.072917 0.223958 0.192708 0.187500 0.125000 0.937500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 236.9 237.2 237.6 237.7 238.1 238.4 238.7 238.8 238.9 238.9 245.7 246.7 245.8 243.4 243.1 243.4 243.8 244.8 245.5 245.5 246.8 247.4 247.8 248.2 248.5 247.1 245.7 245.2 244.5 244.5 245.7 246.9 247.8 248.1 247.7 246.5 238.0 222.2 204.7 206.6 212.1 228.7 242.2 251.1 267.5 277.3 283.0 296.0 296.9 296.6 294.0 291.1 290.3 289.0 288.6 289.0 289.6 291.0 291.8 292.8 293.3 293.7 294.0 294.0 293.5 294.0 295.2 295.2 286.8 261.3 253.9 262.7 279.4 291.5 300.5 310.1 322.5 331.2 342.1 358.9 365.8 384.6 400.2 400.2 397.5 395.6 393.8 392.7 391.8 390.6 388.8 389.1 389.5 390.4 392.0 392.7 392.4 392.0 390.6 389.5 390.2 391.5 393.8 394.5 394.5 394.7 393.1 390.4 373.2 330.6 308.6 305.1 312.0 321.2 326.5 342.2 375.4 404.6 432.7 436.0 436.0 431.7 426.0 424.0 423.8 425.5 428.5 434.4 438.5 440.0 443.1 442.5 442.0 441.0 438.0 437.5 437.5 438.0 438.7 440.0 441.0 440.5 439.5 439.0 439.0 439.0 440.3 442.3 445.9 449.2 457.1 465.9 479.4 494.7 500.2 505.1 503.4 501.6 499.3 495.9 491.0 480.7 481.2 484.0 486.8 493.0 491.6 488.2 483.7 479.0 463.2 470.8 488.2 496.2 497.3 497.3 495.9 494.7 493.9 492.7 491.6 491.0 491.6 491.0 490.2 490.5 490.2 488.8 486.5 486.2 491.3 495.3 503.1 509.2 512.2 512.2 508.9 504.6 495.0 486.8 484.3 480.1 482.0 484.6 491.9 496.7 504.0 509.2 510.1 508.1 501.4 490.2 483.2 478.7 478.7 480.7 489.1 498.2 502.8 509.5 518.1 521.1 510.9 483.7 451.5 479.9 468.4 463.7 461.9 461.6 461.3 461.1 460.5 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 401.9 401.4 400.9 400.7 400.5 400.2 400.0 399.8 399.8 399.8 406.5 414.1 408.4 403.7 400.2 399.8 402.1 409.1 421.6 432.2 437.5 444.3 441.8 440.3 439.5 438.7 438.0 439.2 442.3 444.3 437.7 410.8 386.8 390.9 403.5 419.6 431.2 437.7 444.6 448.2 454.7 460.0 468.9 477.9 480.8 493.6 506.9 505.4 501.6 499.6 495.3 493.3 492.7 493.3 494.2 494.2 493.3 490.8 489.6 489.9 491.6 494.5 496.5 498.2 502.2 504.0 504.8 504.3 503.1 500.5 495.3 492.7 491.3 488.2 486.5 487.6 490.2 492.2 495.9 500.8 502.8 503.4 502.8 500.2 497.6 493.0 489.3 487.9 487.4 487.9 489.6 492.2 493.3 494.7 498.2 500.2 499.9 497.6 492.5 483.2 477.9 473.2 474.6 477.9 485.7 499.6 505.4 511.9 512.5 510.4 502.2 489.6 481.5 475.5 465.3 454.7 444.7 486.4 477.6 467.7 464.0 463.7 463.2 462.7 462.1 461.6 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 396.6 395.9 395.4 395.0 394.5 394.3 393.8 393.6 393.6 394.7 410.8 412.2 405.8 401.2 399.1 398.4 403.0 406.8 420.1 427.2 435.4 441.0 439.7 441.0 443.1 440.5 439.0 441.3 445.6 445.1 434.9 397.9 384.2 392.4 413.6 426.2 436.5 435.7 428.2 419.2 408.6 404.4 400.1 389.5 380.8 390.0 401.4 401.9 401.2 398.2 395.4 394.5 393.1 390.4 388.6 389.1 390.2 390.2 390.0 390.4 391.1 393.6 396.8 398.6 399.8 400.5 400.9 400.9 398.4 397.9 395.6 392.7 389.7 387.7 387.7 387.7 388.6 389.5 393.4 395.6 397.9 398.4 397.0 394.7 390.0 386.4 383.5 381.9 382.8 388.2 391.8 393.4 398.8 401.2 401.6 400.5 397.7 395.6 387.3 382.2 379.7 380.6 383.0 388.2 395.6 402.3 404.2 405.4 404.4 402.8 395.6 388.2 383.3 378.9 378.0 380.2 386.2 395.9 401.6 403.7 405.4 403.9 399.5 391.5 385.7 384.2 383.9 388.6 392.9 399.3 405.1 406.3 389.6 344.0 306.6 422.6 376.0 349.7 316.4 280.4 258.0 227.8 203.0 193.0 188.6 188.6 188.7 187.9 188.3 188.1 187.5 186.8 186.9 187.3 186.7 186.6 186.8 187.0 187.3 187.4 187.5 187.4 187.3 187.1 187.7 188.0 188.4 208.9 209.5 210.8 213.6 214.9 218.6 222.9 226.6 231.5 237.8 241.4 244.1 244.5 245.4 245.9 244.7 244.1 242.7 242.7 243.0 244.0 245.4 245.4 246.9 246.7 244.4 244.0 243.7 242.8 241.7 241.9 242.4 242.7 243.3 243.8 244.1 244.4 246.2 247.5 247.9 248.1 248.1 248.2 247.5 246.9 245.1 245.4 245.9 246.7 244.1 241.9 238.7 235.7 234.2 240.0 253.3 264.7 280.2 291.1 295.2 297.6 297.2 295.2 290.8 288.6 289.1 289.6 291.3 292.0 291.8 293.0 292.1 291.5 290.5 290.6 291.5 293.7 296.2 296.9 296.9 295.4 294.0 292.8 290.1 280.2 260.7 252.7 254.0 269.0 295.4 308.1 329.8 353.1 369.6 389.7 399.5 399.3 397.7 395.9 392.7 389.7 389.3 389.1 389.7 392.2 393.8 394.0 393.1 392.2 388.4 387.0 387.3 389.3 391.3 392.9 393.8 396.1 395.6 394.7 393.4 389.7 387.0 381.5 377.6 377.3 377.4 379.5 381.3 387.9 403.5 407.5 406.1 398.6 395.0 392.9 393.1 395.9 401.9 415.5 420.6 434.9 442.5 444.6 443.8 442.0 439.0 437.2 437.2 437.5 438.0 439.5 441.0 441.5 443.8 444.6 437.5 412.7 435.2 440.3 438.5 436.2 436.2 443.3 460.8 473.4 495.3 501.6 501.1 502.8 499.9 495.3 494.5 493.3 491.9 490.5 477.9 458.2 415.3 387.5 379.5 395.0 415.5 435.2 473.2 488.8 495.9 503.4 504.0 502.5 499.6 493.6 486.5 484.6 482.9 485.4 488.2 490.5 497.0 501.9 505.7 508.1 506.0 502.2 497.6 491.3 482.6 480.9 481.5 484.8 489.1 494.2 503.7 510.4 511.6 510.4 503.7 499.9 489.1 483.2 480.9 482.0 484.8 489.3 496.5 502.5 504.3 502.2 499.6 494.7 486.2 481.8 480.4 482.0 487.9 486.1 478.3 460.7 500.3 491.7 482.8 480.7 480.7 480.7 480.7 480.7 472.4 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 438.0 430.4 430.7 430.7 430.7 430.7 430.9 430.9 430.9 431.2 431.7 433.7 432.7 429.7 429.0 428.7 430.0 427.5 426.5 428.5 430.4 432.9 436.0 438.0 438.7 439.5 439.0 438.5 437.5 438.5 439.5 441.0 440.8 439.7 438.2 437.7 437.5 437.0 436.7 437.0 434.9 429.5 428.2 431.0 442.7 459.3 480.9 493.6 498.8 503.1 502.2 499.6 495.9 493.0 491.3 490.5 492.7 491.9 485.1 479.3 481.1 488.2 502.8 511.6 517.2 521.1 521.1 520.8 520.5 519.3 518.4 516.3 516.6 517.2 518.4 519.3 519.9 520.8 522.9 525.4 528.7 533.0 533.3 530.6 527.8 526.6 522.6 522.6 519.3 518.1 517.5 518.1 521.7 522.6 523.3 523.9 524.5 524.2 523.9 521.4 519.9 519.0 516.6 516.9 518.1 520.5 524.5 526.6 529.0 530.9 530.6 529.0 527.5 526.3 520.5 517.2 516.0 516.6 518.1 519.0 521.1 523.9 525.7 528.4 529.0 529.3 528.4 526.6 526.6 525.7 522.9 523.9 525.1 526.9 529.0 529.9 529.6 529.9 531.2 530.9 529.9 529.6 528.1 526.6 526.3 528.1 526.9 529.9 535.5 538.9 541.4 544.8 548.3 548.0 550.5 548.0 547.7 546.4 542.3 540.1 535.5 527.8 524.5 526.9 523.6 523.9 522.9 522.9 520.8 522.3 523.6 524.8 525.7 525.1 524.8 523.6 502.2 460.5 460.0 478.7 500.5 497.0 497.3 497.6 496.5 493.3 491.4 489.2 485.4 494.2 502.8 509.5 507.2 504.3 500.8 494.2 490.5 484.8 483.2 483.4 487.6 495.0 498.2 501.1 501.9 501.1 482.3 436.0 401.6 397.7 413.6 427.0 433.9 452.1 463.7 473.2 491.9 502.2 504.3 506.9 505.1 503.7 498.8 495.0 493.9 491.6 491.6 491.9 493.0 494.2 495.3 496.5 497.3 497.6 496.2 495.3 495.6 493.9 491.6 491.0 491.3 491.9 492.2 493.0 495.6 497.3 499.9 500.5 499.3 498.5 494.5 489.6 487.9 488.2 489.3 490.8 493.6 496.2 497.3 496.7 496.2 494.5 491.0 487.1 486.0 486.5 489.3 491.3 494.7 497.0 497.6 497.6 495.9 493.3 491.3 490.8 491.3 492.2 495.6 497.9 499.0 499.3 498.5 493.9 492.7 491.9 491.3 493.6 495.0 496.2 499.0 503.4 506.3 505.7 501.9 494.5 467.5 467.5 467.5 467.5 467.5 467.5 467.5 467.5", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 115.3046875 - }, - { - "text": "AP", - "ph_seq": "AP z ai c an p o r en j ian p in c ou y i g e SP", - "note_seq": "rest B4 B4 A4 A4 G4 G4 E5 E5 E5 E5 D5 D5 D5 D5 B4 B4 G4 G4 rest", - "note_dur_seq": "0.388889 0.131944 0.256944 0.159722 0.255208 0.161458 0.729167 0.104167 0.699653 0.133681 1.088542 0.161458 0.256944 0.159722 0.407986 0.008681 0.294271 0.122396 1.250000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.388889 0.131944 0.256944 0.159722 0.255208 0.161458 0.729167 0.104167 0.699653 0.133681 1.088542 0.161458 0.256944 0.159722 0.407986 0.008681 0.294271 0.122396 1.250000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 493.9 491.9 491.9 491.9 489.3 489.3 489.3 490.2 492.7 494.5 491.9 488.2 487.9 489.3 489.9 493.3 494.5 495.6 494.7 492.7 490.8 490.8 490.8 490.8 492.7 494.5 495.9 495.9 495.9 491.9 490.8 489.9 491.6 492.7 494.7 494.2 492.2 469.1 445.1 460.5 463.2 459.2 458.9 458.2 454.5 452.2 453.3 451.7 453.8 453.7 455.8 456.0 453.4 449.8 447.7 446.4 442.5 441.3 441.0 441.0 441.0 440.8 440.8 440.5 440.5 441.0 438.7 439.0 438.5 438.0 437.5 440.3 442.3 443.1 444.6 443.3 438.7 414.1 383.9 373.0 360.7 345.5 349.2 347.7 345.1 347.2 358.0 384.4 395.6 397.0 397.0 395.2 393.1 389.5 387.3 387.9 390.4 393.1 394.3 395.9 395.6 394.5 390.9 387.3 385.9 384.2 384.2 387.9 389.5 393.8 394.7 395.9 395.2 394.3 387.0 381.3 379.1 378.2 381.1 387.0 391.8 400.0 404.9 406.1 405.4 400.9 396.8 385.9 380.2 378.6 381.7 384.8 389.1 393.1 396.1 396.1 395.2 393.8 391.8 390.2 390.0 390.4 391.1 392.4 394.5 397.7 399.5 401.4 406.1 408.2 410.8 416.5 420.1 431.6 458.7 493.5 515.5 559.2 592.8 605.2 605.9 609.4 605.9 604.2 605.2 607.7 609.1 609.8 611.6 614.4 619.7 631.7 637.9 655.1 670.0 675.4 675.8 672.3 668.1 653.9 644.2 639.7 638.6 640.9 645.3 652.4 665.4 676.2 685.7 686.9 684.9 674.7 662.3 650.9 644.9 642.3 643.1 646.8 656.2 663.8 667.3 670.4 670.4 670.4 665.4 659.6 655.5 648.7 647.2 647.9 651.7 656.2 659.3 656.6 664.6 670.0 669.2 663.1 659.3 663.8 664.2 662.3 653.9 656.2 649.4 661.5 660.4 660.0 659.3 638.6 657.7 662.3 660.4 665.0 666.9 664.2 662.3 659.6 657.0 653.6 651.7 650.2 643.5 644.2 646.8 643.8 647.6 647.9 651.3 655.5 657.4 659.3 660.0 663.5 659.3 656.2 654.7 654.3 655.1 655.8 658.5 658.5 658.1 657.4 657.0 656.2 655.5 655.1 654.7 654.3 654.3 655.1 655.5 655.8 655.8 655.5 655.1 654.3 653.9 653.6 653.2 653.2 653.9 654.3 655.1 656.2 656.6 656.2 657.0 657.0 656.2 655.5 653.6 649.4 645.7 644.6 646.4 650.6 655.5 660.8 664.2 669.2 666.5 660.8 656.2 647.9 640.9 640.9 645.3 649.1 653.9 661.5 667.7 668.8 664.6 658.9 656.6 650.2 650.2 653.2 659.6 669.2 678.6 682.1 680.9 665.4 639.0 634.2 647.9 648.7 646.1 648.3 645.3 633.8 617.8 602.5 568.0 575.2 585.0 586.3 584.6 583.3 580.6 578.6 578.2 579.9 582.3 583.9 584.3 583.9 583.9 583.6 584.6 586.3 588.3 589.0 589.7 589.4 589.0 588.0 585.0 555.6 528.4 545.2 555.6 556.9 553.1 547.7 544.2 540.8 534.6 526.9 521.1 542.0 581.9 587.0 586.0 584.3 592.4 591.4 590.0 589.4 588.0 587.0 586.3 585.3 583.9 583.6 583.6 580.6 580.6 580.6 580.6 583.6 583.6 583.6 587.3 589.7 588.0 583.6 581.6 571.3 567.0 556.6 514.1 509.4 492.6 452.3 464.3 471.0 480.4 489.1 490.8 490.8 492.5 493.3 493.3 493.3 493.3 493.3 492.5 490.8 492.5 493.3 495.0 497.3 495.3 493.9 492.2 490.8 490.8 490.8 489.9 471.6 459.2 455.8 454.2 428.5 399.8 397.7 382.3 381.3 381.8 386.3 392.4 393.6 393.6 395.6 395.6 395.0 393.6 392.0 391.1 390.0 390.2 390.4 390.9 391.5 391.8 392.0 391.8 391.3 390.9 391.1 391.3 392.9 393.8 393.8 394.7 393.6 392.9 392.0 392.0 392.7 392.9 393.8 393.4 392.9 392.4 393.4 393.1 392.0 391.1 390.6 389.3 389.7 390.2 392.2 394.3 394.5 394.7 395.2 395.2 394.7 394.3 393.8 390.9 387.7 385.0 384.8 386.4 387.5 390.6 394.3 395.2 396.8 397.9 397.2 397.0 395.4 393.6 389.5 386.2 385.5 384.8 386.6 388.6 390.9 395.4 400.0 401.2 401.6 400.2 398.2 393.8 386.2 383.3 381.7 383.0 385.3 389.7 392.7 395.2 400.0 401.4 400.9 398.8 392.2 384.6 374.7 370.0 370.4 376.2 383.9 390.0 403.2 409.1 413.2 414.1 406.5 394.5 342.4 342.4 342.4 342.4 342.4 342.4 342.4 342.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 128.64583333333334 - }, - { - "text": "AP", - "ph_seq": "w an zh eng d e AP h un p o AP l u zh ong y ou j in t ou AP d an x ing h uo y ong b u zh ui l uo SP", - "note_seq": "E4 E4 F#4 F#4 G4 G4 rest A4 A4 G4 G4 rest G4 G4 A4 A4 B4 B4 C5 C5 C5 C5 rest G4 G4 A4 A4 B4 B4 C5 C5 C5 C5 D5 D5 D5 D5 rest", - "note_dur_seq": "0.111111 0.503472 0.121528 0.147569 0.060764 2.500000 0.263021 0.153646 0.255208 0.161458 1.250000 0.291667 0.125000 0.295139 0.121528 0.343750 0.072917 0.282986 0.133681 0.113715 0.094618 4.791667 0.304688 0.111979 0.223090 0.193576 0.263021 0.153646 0.343750 0.072917 0.146701 0.061632 2.170139 0.121528 0.708333 0.125000 3.750000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.111111 0.503472 0.121528 0.147569 0.060764 2.500000 0.263021 0.153646 0.255208 0.161458 1.250000 0.291667 0.125000 0.295139 0.121528 0.343750 0.072917 0.282986 0.133681 0.113715 0.094618 4.791667 0.304688 0.111979 0.223090 0.193576 0.263021 0.153646 0.343750 0.072917 0.146701 0.061632 2.170139 0.121528 0.708333 0.125000 3.750000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "352.3 347.8 342.6 334.8 327.0 307.9 307.4 309.9 313.7 314.4 315.8 321.5 324.2 324.7 325.3 325.8 326.2 327.9 328.5 328.7 328.7 328.3 326.8 326.0 325.5 325.3 325.5 325.5 325.1 325.7 325.1 325.5 325.7 324.7 324.9 325.3 325.8 327.0 327.9 328.5 327.7 327.4 326.6 325.8 326.4 326.4 327.0 327.5 328.7 329.4 328.9 328.3 327.2 326.4 323.2 312.2 291.1 271.8 288.6 299.1 314.5 327.1 335.2 346.1 358.2 370.0 374.3 372.1 370.4 368.3 366.8 367.9 369.8 370.9 370.0 368.5 365.1 356.2 342.3 315.7 315.8 338.1 367.9 379.5 378.9 380.6 382.2 381.5 380.4 379.3 378.6 377.3 375.8 375.8 375.6 375.4 375.2 375.4 377.8 380.2 382.6 387.3 392.0 396.3 398.4 398.6 397.5 394.0 392.4 390.4 390.0 389.5 389.5 390.6 391.5 391.3 391.3 391.1 391.3 392.4 391.3 390.6 390.6 391.1 391.3 391.8 392.2 392.4 392.9 393.1 393.4 392.4 392.9 392.4 391.5 391.8 392.0 392.2 390.9 390.9 391.3 391.8 392.0 391.1 392.0 392.2 391.3 390.6 390.2 389.7 389.7 389.5 389.5 389.3 390.2 391.3 391.5 390.6 390.0 390.0 389.5 389.1 390.9 390.2 389.7 390.4 390.4 389.5 388.4 389.7 390.6 389.3 390.6 391.5 390.6 392.2 392.2 392.0 390.6 390.6 389.7 388.8 389.3 390.0 389.7 388.6 389.1 390.0 390.6 390.0 390.6 391.3 391.5 391.3 391.1 391.1 390.9 390.6 390.6 390.4 391.3 392.0 392.0 391.3 392.2 392.0 391.3 390.9 390.4 391.3 391.1 390.4 389.7 389.7 390.2 390.6 390.9 390.6 389.1 388.2 388.8 388.6 388.4 389.5 390.4 390.6 390.9 391.1 391.3 391.5 391.8 390.4 389.3 389.5 389.5 389.1 388.6 388.2 388.8 390.6 392.9 394.3 395.0 393.1 392.7 390.6 387.5 386.4 383.7 383.0 384.6 387.7 390.6 392.0 395.6 396.8 397.2 395.9 392.9 391.5 388.6 385.0 383.5 383.0 384.2 386.2 390.2 396.1 398.6 401.9 404.4 401.6 396.6 390.0 380.0 377.1 374.9 378.0 381.7 389.1 396.6 400.9 403.2 401.2 397.9 392.2 383.7 379.7 378.4 380.2 385.5 394.3 395.9 395.4 392.4 385.7 382.6 363.8 337.9 336.7 335.2 334.6 334.2 333.8 333.1 332.3 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 331.2 329.1 327.2 326.0 324.3 362.8 362.0 361.1 360.5 360.3 382.6 400.7 407.0 409.4 408.4 407.2 403.9 404.4 405.1 409.4 413.9 419.6 427.5 436.2 440.8 444.6 445.6 443.3 442.0 441.0 440.8 440.5 442.3 441.5 439.5 429.0 420.4 423.1 424.5 409.4 401.9 395.4 388.2 381.5 374.4 389.3 398.6 403.0 405.6 406.1 403.9 404.4 403.2 400.5 397.5 395.6 394.5 393.6 393.1 392.2 391.3 391.1 391.3 391.5 391.5 391.8 392.0 392.2 392.4 392.4 391.5 391.5 391.8 391.3 390.6 390.9 391.1 391.5 391.8 391.8 391.3 391.5 393.8 394.3 393.8 393.4 393.1 392.7 392.2 392.0 392.0 392.9 393.6 394.5 394.7 394.3 393.6 391.5 390.2 390.2 390.4 391.1 393.1 395.0 396.3 397.5 396.8 395.9 394.5 391.1 387.5 385.5 381.3 380.0 381.3 384.2 388.2 392.2 396.3 398.6 399.1 398.8 396.8 395.2 392.0 388.4 385.7 384.4 384.2 384.8 385.9 390.4 393.8 397.9 400.9 401.2 399.3 396.8 391.8 387.0 382.8 382.4 383.7 386.8 390.4 397.9 402.6 404.4 404.9 403.2 393.6 384.4 368.1 365.3 365.1 364.9 364.7 364.7 364.5 364.3 364.1 374.7 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 376.7 354.3 354.1 353.7 353.5 353.3 353.1 352.9 352.7 352.7 362.4 369.1 367.7 362.2 354.5 350.6 348.8 351.3 355.7 369.8 379.5 386.4 392.4 395.2 395.9 393.1 390.6 389.7 389.5 389.3 390.2 392.9 394.0 393.8 392.9 381.5 358.2 325.8 333.8 347.6 359.9 370.9 380.9 394.2 406.8 416.5 440.5 453.2 452.4 449.8 447.7 442.5 440.8 439.5 440.0 440.8 442.0 442.5 442.0 440.0 437.5 435.7 436.0 437.0 438.5 443.6 446.1 446.9 443.8 443.1 441.8 436.5 431.2 427.2 417.0 402.3 400.5 398.8 406.4 419.0 443.9 460.5 465.9 473.5 471.0 469.4 461.1 457.1 455.5 457.4 462.9 474.3 479.3 490.8 497.0 498.8 499.6 497.9 494.5 492.7 490.2 489.6 490.8 491.9 492.7 493.9 495.0 494.7 493.3 485.7 466.2 414.8 399.1 412.0 450.9 476.5 492.9 509.5 524.8 527.8 526.3 526.0 524.8 522.9 523.6 524.8 525.7 524.5 520.5 506.6 481.5 491.9 506.0 514.6 529.0 537.7 525.4 516.6 518.4 520.8 522.3 522.3 522.0 521.7 521.4 521.1 520.8 521.4 522.0 522.3 520.1 518.4 522.5 528.7 542.3 556.9 570.6 589.0 598.6 602.8 604.2 603.1 600.4 594.2 591.1 590.0 589.0 589.7 590.4 590.0 590.4 590.4 590.4 590.7 588.7 588.3 588.7 589.4 589.7 589.7 588.7 588.0 587.7 589.4 589.4 588.7 590.0 591.1 591.8 590.7 590.0 589.0 589.4 590.0 590.7 590.0 589.4 590.0 588.3 588.3 589.7 590.7 591.4 592.4 590.7 590.4 592.1 593.5 592.1 591.8 592.1 593.1 593.5 592.1 592.4 591.4 592.8 591.8 591.4 591.8 592.1 592.4 592.1 591.1 590.4 591.1 592.1 591.8 591.1 592.1 592.8 593.1 591.4 590.0 589.7 590.0 590.4 590.7 591.1 591.4 590.7 590.0 589.4 588.7 587.3 588.0 589.4 589.4 589.0 588.3 589.4 590.4 591.1 592.1 591.4 590.7 590.0 590.4 590.7 590.0 589.4 590.0 590.4 589.4 588.7 588.7 589.4 589.7 590.0 589.7 589.7 590.7 590.4 589.7 589.7 591.1 591.4 591.1 592.1 591.1 590.4 592.1 592.1 591.4 589.7 590.4 589.0 587.3 587.0 589.4 588.0 587.7 588.7 590.0 590.0 589.7 589.4 589.0 588.7 589.7 590.0 589.4 588.7 590.0 589.4 589.0 590.4 589.7 589.7 590.4 590.7 591.4 591.1 590.7 589.4 588.0 587.3 588.3 587.3 588.3 589.4 590.4 589.7 588.7 588.3 588.3 588.3 588.3 588.3 587.3 586.7 587.3 589.0 590.0 590.0 589.7 589.4 588.3 588.0 587.0 587.7 588.3 586.7 586.7 587.7 587.0 589.7 588.3 588.0 588.7 589.0 589.7 590.0 589.7 589.7 590.7 590.0 589.7 590.4 591.1 591.4 591.1 590.7 589.7 588.7 588.0 587.0 586.3 586.3 587.3 587.0 586.3 585.6 587.7 587.3 588.3 586.3 586.3 587.3 586.7 587.3 588.3 588.7 589.4 589.4 590.0 590.4 590.4 588.7 586.7 586.7 587.3 588.0 588.7 591.1 590.4 588.7 586.7 586.7 586.7 586.7 587.0 587.0 587.0 587.3 587.3 586.7 587.3 587.3 586.3 587.0 587.7 588.7 589.4 588.7 588.0 587.0 586.3 586.0 587.0 585.0 586.0 588.0 589.0 589.7 590.0 590.0 590.0 589.7 589.4 589.4 589.0 589.0 588.7 589.7 591.1 590.0 588.7 588.7 588.0 585.6 585.6 586.0 586.3 586.7 587.0 587.3 587.0 587.0 586.7 586.3 586.7 588.7 588.0 589.4 590.0 591.1 592.1 592.1 591.8 591.1 592.1 589.4 590.0 590.7 591.4 591.4 591.4 591.1 590.4 589.4 587.7 588.0 587.7 586.0 585.0 585.0 585.3 585.6 585.6 586.0 586.3 585.6 584.3 583.9 585.0 587.0 588.3 590.7 592.1 593.1 593.5 592.1 590.0 587.0 581.6 576.6 574.2 574.9 577.9 584.3 590.7 600.7 607.7 610.2 611.2 608.0 604.9 596.6 583.9 572.6 563.4 560.2 564.1 572.6 579.9 585.6 595.2 601.8 604.2 602.4 599.7 595.2 584.3 573.6 571.6 570.9 573.9 581.3 589.4 595.5 599.3 601.8 601.8 596.9 594.2 589.0 586.3 585.3 589.7 596.6 600.0 604.9 607.0 606.7 596.2 554.6 505.3 654.8 601.8 572.7 559.2 555.0 549.3 548.3 538.0 533.6 525.7 517.8 515.2 511.6 506.3 503.1 496.7 493.3 487.4 480.4 474.9 469.4 465.9 457.4 451.6 446.4 442.3 432.4 426.7 421.1 412.9 408.4 403.9 402.3 399.3 389.7 379.7 366.6 376.2 386.2 387.3 386.6 386.6 386.8 387.3 387.7 388.6 390.2 391.1 390.9 390.4 390.9 391.5 392.2 393.1 394.5 393.6 388.4 384.2 365.7 359.0 362.4 371.5 375.4 379.7 388.6 394.3 398.2 402.8 408.7 415.7 422.4 429.4 438.7 445.9 450.8 447.7 443.6 440.3 436.0 433.9 433.2 433.7 435.7 438.7 439.2 439.2 438.5 438.7 440.0 443.1 443.6 443.1 442.5 441.3 438.7 432.7 421.3 399.5 364.7 360.3 367.0 374.5 380.0 385.9 395.9 403.4 408.9 415.0 427.5 448.7 457.1 461.1 459.7 457.4 450.8 451.1 452.9 461.1 474.3 484.0 492.2 497.0 498.8 497.6 496.2 493.9 492.5 491.6 491.0 491.3 492.2 492.7 493.6 493.9 493.6 492.5 491.0 487.9 487.6 489.3 491.1 499.1 513.2 522.3 529.0 531.2 531.8 529.6 526.0 522.3 521.7 522.9 523.9 524.5 525.1 521.1 518.1 480.7 446.7 427.0 469.4 520.5 526.3 521.4 517.5 518.1 520.2 520.8 520.5 519.9 521.1 521.4 521.1 520.8 520.5 521.1 519.8 520.1 524.8 535.5 545.2 559.8 574.6 582.3 590.7 595.5 596.2 592.4 589.0 585.3 584.3 584.3 585.0 585.6 587.3 589.4 590.7 592.4 592.8 592.1 591.1 590.0 589.4 589.7 590.7 591.4 589.7 589.4 590.7 590.7 591.4 591.1 589.4 588.7 590.0 589.0 590.0 590.4 589.7 591.8 591.4 591.1 590.0 588.7 590.0 588.0 588.0 589.4 589.4 587.3 588.3 588.7 588.3 588.0 587.7 588.0 588.3 588.7 589.0 589.4 589.7 590.4 590.4 591.1 591.1 591.4 591.4 590.4 589.0 588.3 589.7 588.3 587.7 587.3 588.0 589.7 589.7 589.0 589.7 590.7 590.7 588.3 587.3 587.3 587.3 588.7 588.0 587.0 587.0 587.3 587.7 588.0 587.3 588.0 589.0 589.4 589.4 589.0 586.7 585.6 585.0 585.0 585.6 586.3 587.7 589.0 591.1 590.0 589.4 589.4 590.0 589.0 585.3 582.3 581.6 580.6 580.9 582.6 585.3 588.0 590.0 591.8 591.1 590.4 589.4 586.0 583.6 581.6 582.3 582.9 585.0 588.3 592.1 593.1 592.8 593.5 595.5 594.8 594.2 591.8 588.7 586.3 585.0 583.9 584.6 585.3 588.0 592.4 594.2 595.9 595.9 595.2 592.1 587.0 585.3 582.6 581.9 583.9 586.7 590.4 593.5 599.7 605.2 603.5 568.0 530.9 524.5 523.9 528.7 526.3 527.8 532.1 535.5 536.4 538.0 541.7 545.2 547.0 548.3 552.1 566.3 581.6 589.7 592.8 591.8 590.7 589.4 588.0 586.7 585.3 584.6 584.6 586.0 583.3 586.1 593.2 606.9 620.1 640.1 670.0 685.3 699.3 708.2 710.3 707.4 699.3 692.8 690.0 688.8 693.2 699.3 704.1 710.8 708.1 696.9 679.0 660.4 627.7 597.3 583.6 582.9 583.3 583.3 586.7 591.8 593.8 592.4 590.4 590.0 589.4 587.3 586.3 586.7 586.7 586.0 587.7 590.0 589.7 589.0 588.0 586.7 585.0 579.6 577.2 571.3 561.8 557.6 554.7 554.0 563.1 566.0 569.6 573.6 576.9 579.6 577.2 577.6 580.9 578.6 576.2 572.9 573.6 573.9 570.9 571.9 572.9 573.3 571.9 574.2 578.9 581.6 582.6 582.6 582.6 582.6 582.6 583.9 585.3 586.3 587.0 587.3 586.7 586.3 587.0 588.0 587.7 587.3 586.7 586.0 586.0 586.3 587.3 588.3 588.7 587.3 587.3 588.3 588.7 588.0 588.0 587.3 586.3 585.3 586.7 588.0 588.7 589.4 589.4 589.7 589.7 588.3 587.7 588.3 589.0 587.7 587.7 588.0 587.7 587.3 588.3 588.7 586.7 586.7 587.7 588.7 588.0 589.0 587.7 588.7 589.4 589.4 588.7 588.3 588.0 587.7 588.3 589.0 588.0 587.0 586.3 586.0 586.0 586.0 585.6 585.6 586.3 587.0 586.3 585.6 586.0 587.0 586.3 585.3 585.0 585.6 586.7 586.7 586.3 588.7 588.3 588.0 587.3 587.3 588.0 588.7 589.0 589.0 588.3 587.7 587.0 586.7 586.3 587.0 587.7 589.0 590.0 590.0 589.0 590.0 590.0 589.4 588.3 587.7 587.0 589.0 589.0 590.0 589.0 588.3 587.7 588.7 587.0 587.0 588.7 589.7 590.4 589.7 589.7 589.0 589.0 590.0 589.7 588.7 589.0 589.4 589.4 588.3 586.7 587.0 587.7 588.7 589.0 588.7 588.3 587.7 588.7 588.7 588.0 589.0 588.0 585.3 585.3 585.3 585.3 586.7 587.3 587.3 587.0 586.7 586.3 586.0 585.6 586.3 587.3 586.7 586.0 588.3 589.0 589.7 590.4 590.4 589.0 588.0 587.3 586.7 587.0 587.3 587.7 587.0 586.3 585.6 584.6 584.6 585.3 586.3 586.3 585.6 585.0 587.0 587.3 587.3 587.3 587.0 587.0 586.7 587.3 587.3 585.6 584.6 583.6 583.9 584.3 583.9 583.3 584.3 586.0 586.3 586.3 585.6 585.3 585.0 584.6 584.3 586.7 587.7 588.0 588.3 590.7 592.8 594.5 595.2 594.2 592.8 591.8 588.0 587.0 584.6 581.6 577.6 576.9 576.6 579.9 581.6 584.6 590.7 595.2 599.3 600.7 600.4 598.3 593.1 587.3 580.2 576.6 576.2 578.2 584.3 587.3 590.4 594.8 596.6 595.5 591.4 588.0 585.3 584.3 583.9 586.0 587.0 587.3 590.0 591.1 593.8 593.5 592.1 588.3 586.0 583.3 576.6 573.9 574.6 578.9 581.6 584.6 592.1 595.5 599.3 606.3 607.3 602.4 594.8 590.4 578.9 572.3 569.6 570.9 572.9 576.2 583.3 590.0 592.8 594.8 596.2 596.2 596.6 596.6 596.2 592.8 589.4 586.3 580.6 576.2 565.4 562.8 562.8 562.8 562.8 562.8 562.8 562.8 562.8", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 135.72222222222223 - }, - { - "text": "AP", - "ph_seq": "AP sh ir j ie sh ir g ua y En d e AP m ao d un d e AP w u j ie d e SP", - "note_seq": "rest B3 B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest A4 A4 B4 B4 B4 B4 rest A4 A4 G4 G4 G4 G4 rest", - "note_dur_seq": "0.384549 0.188368 0.282986 0.133681 0.228299 0.188368 0.294271 0.122396 0.343750 0.072917 0.147569 0.060764 0.625000 0.258681 0.157986 0.305556 0.111111 0.147569 0.060764 0.625000 0.407986 0.008681 0.282986 0.133681 0.147569 0.060764 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.384549 0.188368 0.282986 0.133681 0.228299 0.188368 0.294271 0.122396 0.343750 0.072917 0.147569 0.060764 0.625000 0.258681 0.157986 0.305556 0.111111 0.147569 0.060764 0.625000 0.407986 0.008681 0.282986 0.133681 0.147569 0.060764 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "227.8 227.4 219.6 208.1 200.8 195.5 195.4 195.3 195.7 195.1 194.6 193.9 193.2 193.1 192.9 192.6 193.1 193.4 193.2 193.4 193.7 194.2 194.0 193.1 192.9 193.0 193.3 193.5 193.7 193.9 194.4 194.9 194.9 194.4 194.0 194.6 194.5 194.3 194.1 193.9 193.9 194.8 194.0 192.4 190.9 189.6 190.0 190.3 191.0 191.2 190.0 210.3 230.3 236.6 237.8 238.7 239.1 238.1 237.8 239.1 240.7 244.2 245.8 246.5 246.5 246.1 245.5 245.7 246.5 246.5 246.8 247.7 247.5 246.9 246.4 245.9 238.1 216.3 204.7 202.9 211.5 221.8 236.9 252.6 261.9 277.7 286.3 292.3 299.7 297.9 297.8 296.6 293.0 291.1 290.1 289.5 289.5 289.5 291.3 292.6 293.7 294.5 295.0 295.4 295.7 295.0 293.3 275.6 255.8 249.1 260.1 267.4 271.2 279.1 287.1 290.6 299.7 306.3 311.8 318.2 327.9 337.4 383.3 397.7 396.1 393.4 392.2 393.6 391.1 389.7 389.5 389.5 388.8 384.6 388.8 390.6 393.4 395.4 394.0 392.2 390.6 389.5 388.8 389.7 391.3 392.4 393.8 396.3 391.3 365.7 343.8 350.0 357.6 378.0 382.7 388.4 400.0 418.7 430.7 440.3 446.4 445.9 443.3 439.0 435.4 434.9 431.9 432.4 434.9 439.0 440.5 442.5 442.3 441.5 440.3 438.2 438.5 439.2 439.7 440.3 441.0 440.5 440.3 440.0 439.0 437.5 438.2 436.7 435.2 436.5 441.1 457.2 467.5 484.6 496.2 500.8 503.1 504.0 500.5 495.0 491.9 491.0 491.6 493.0 494.5 495.0 493.9 491.9 482.6 449.8 449.2 483.4 506.3 505.4 505.4 507.5 506.9 504.6 502.2 501.4 498.5 494.2 492.7 488.5 485.4 485.7 486.5 487.9 488.2 493.6 497.0 505.1 507.5 509.8 506.0 503.7 496.7 488.5 482.6 479.5 480.4 484.3 490.2 499.0 503.4 507.2 508.1 504.8 498.5 492.5 485.1 479.0 477.1 479.5 482.0 493.3 500.8 508.4 515.5 517.5 516.0 505.7 496.1 474.7 444.5 428.7 467.8 457.8 454.2 453.7 453.4 453.2 453.2 453.4 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 410.5 410.8 411.0 411.5 411.7 412.0 412.2 412.2 412.4 412.4 412.4 410.3 407.2 404.4 400.5 398.8 398.2 402.6 408.6 422.8 431.4 437.0 441.5 443.1 443.1 442.0 441.0 438.0 437.2 436.7 436.7 438.2 439.5 441.8 443.3 443.8 443.3 442.5 439.2 406.3 415.3 428.6 431.4 456.4 475.3 499.0 501.4 500.2 500.2 496.5 495.0 493.3 492.5 492.7 494.5 495.9 495.6 494.7 491.0 478.4 459.2 443.3 466.4 494.2 505.4 504.6 505.7 507.5 505.7 504.3 502.5 499.6 496.2 492.7 490.2 487.6 486.0 486.8 487.6 489.1 493.6 494.7 500.2 506.6 508.6 509.5 506.0 503.7 493.9 486.8 480.9 478.7 482.0 486.2 491.6 501.4 504.8 508.1 506.6 502.5 497.3 489.9 480.9 478.7 478.2 480.9 485.7 496.2 507.2 511.0 516.3 517.8 515.7 506.0 483.0 465.9 438.5 471.4 463.4 455.2 453.4 453.2 452.9 452.9 452.6 452.4 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 397.5 397.0 397.0 396.8 396.6 396.6 396.3 396.3 396.3 403.5 416.0 411.2 405.4 397.5 395.0 394.5 397.0 400.2 408.6 417.7 424.0 435.7 442.0 442.5 444.1 443.1 441.5 439.2 438.5 438.0 437.7 440.5 442.3 441.5 432.9 421.3 417.7 428.2 441.3 439.2 443.8 433.9 420.4 392.5 364.5 372.6 388.8 395.6 396.1 395.6 394.7 393.8 392.9 393.6 392.9 391.5 385.5 364.5 328.3 316.4 319.1 338.9 370.6 390.2 395.2 396.1 399.5 403.0 403.5 402.6 400.7 397.0 393.1 390.4 385.7 383.5 383.3 383.9 387.0 389.3 394.5 397.7 401.9 405.8 406.5 403.7 398.4 393.4 386.8 378.6 377.6 378.0 383.3 388.2 395.0 401.9 405.6 406.3 403.2 397.5 393.4 382.4 377.1 374.9 376.7 379.5 386.4 396.8 406.5 408.6 413.2 412.0 406.5 397.0 374.7 366.4 356.4 356.4 356.4 356.4 356.4 356.4 356.4 356.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 171.51041666666669 - }, - { - "text": "AP", - "ph_seq": "er n i x in ch eng x ve r e SP", - "note_seq": "B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest", - "note_dur_seq": "0.251736 0.164931 0.223090 0.193576 0.258681 0.157986 0.223090 0.193576 0.151910 0.056424 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.251736 0.164931 0.223090 0.193576 0.258681 0.157986 0.223090 0.193576 0.151910 0.056424 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "240.6 250.5 252.6 248.9 246.7 244.4 243.1 243.3 243.8 246.1 247.5 249.4 251.7 250.8 249.5 245.0 242.1 241.0 241.0 243.5 245.0 245.9 247.8 249.8 249.5 249.1 249.2 247.5 245.9 242.0 237.3 236.1 238.7 245.2 254.0 259.7 276.2 297.9 300.4 298.5 294.2 291.5 288.6 285.0 284.3 284.3 286.0 287.5 289.6 291.5 292.0 292.3 293.2 293.8 294.9 294.9 294.2 288.8 265.4 251.1 248.9 256.1 263.7 275.9 286.0 293.7 304.5 315.3 324.7 334.9 342.4 356.6 371.1 392.7 398.4 396.1 393.4 390.6 388.6 387.3 387.5 387.7 388.4 389.5 390.0 390.2 390.6 391.1 392.2 393.1 392.4 392.2 393.1 393.8 392.9 390.6 385.3 363.6 365.5 373.9 382.6 387.5 393.1 399.8 405.8 408.7 417.1 427.4 436.7 447.7 453.4 449.5 447.7 445.4 440.0 439.0 436.0 435.7 435.7 434.2 434.7 436.2 439.0 440.8 441.5 442.0 442.8 442.8 441.8 439.2 433.9 415.3 411.5 420.1 434.7 438.0 439.0 444.1 446.7 450.3 458.9 464.3 466.8 469.0 492.7 504.0 504.8 503.1 498.5 495.6 491.3 490.5 491.0 491.3 494.5 496.2 496.2 493.3 491.6 486.5 490.8 494.2 499.3 499.9 499.6 499.0 498.5 497.3 495.0 491.9 488.8 487.9 488.2 488.8 489.3 491.0 492.2 492.5 493.9 496.2 497.9 499.3 500.2 500.2 499.0 498.5 495.0 491.9 491.6 491.3 489.3 487.9 489.1 492.2 495.3 497.6 499.3 499.6 499.9 499.0 495.0 491.0 486.8 483.4 481.8 483.7 488.5 492.7 499.0 502.2 505.1 507.5 505.4 501.4 493.6 471.6 471.6 471.6 471.6 471.6 471.6 471.6 471.6", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 178.75 - }, - { - "text": "AP", - "ph_seq": "sh ir zh ong sh ir sh an l iang d e SP", - "note_seq": "A4 A4 B4 B4 C5 C5 C5 C5 B4 B4 B4 B4 rest", - "note_dur_seq": "0.188368 0.295139 0.121528 0.105903 0.102431 0.853299 0.188368 0.291667 0.125000 0.147569 0.060764 1.041667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.188368 0.295139 0.121528 0.105903 0.102431 0.853299 0.188368 0.291667 0.125000 0.147569 0.060764 1.041667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 409.8 409.6 416.5 434.4 436.2 437.5 438.7 438.7 438.5 437.2 438.2 440.8 438.0 437.5 436.2 435.2 436.2 436.2 439.2 442.0 442.8 446.4 448.5 447.9 448.5 442.3 435.7 433.9 432.7 433.2 434.2 437.5 440.5 441.8 443.3 442.3 439.2 404.9 379.1 392.0 428.7 439.2 450.1 464.2 481.5 500.5 509.8 506.6 498.5 493.0 492.2 489.6 482.9 457.9 433.7 442.5 477.9 494.2 512.8 519.6 521.8 528.1 524.6 527.8 527.8 523.9 523.6 524.5 521.7 521.7 521.1 522.3 522.6 524.2 526.9 526.0 530.6 534.9 529.0 531.5 531.2 531.8 534.6 536.1 533.9 530.2 514.9 509.8 510.4 513.7 517.5 523.6 532.4 536.1 536.7 536.1 531.8 528.4 519.6 510.1 507.2 506.9 510.1 516.9 523.9 534.6 541.1 542.0 542.6 538.9 533.6 524.2 513.4 507.5 508.1 509.5 514.0 521.1 526.3 528.7 531.2 531.2 530.9 527.2 522.6 520.5 518.7 518.1 519.9 522.6 524.2 525.1 527.2 529.6 530.6 532.1 533.9 533.6 531.8 512.8 500.2 514.0 525.1 525.4 524.8 522.6 519.9 519.3 518.7 518.1 519.9 521.7 525.4 525.7 529.0 528.1 527.8 526.9 527.5 526.3 525.4 521.1 519.0 519.9 520.8 520.2 521.4 521.4 522.9 523.3 526.0 525.4 526.9 526.0 524.2 521.7 516.9 512.2 505.1 500.5 497.9 495.0 494.5 494.5 495.6 495.3 494.1 493.6 495.0 493.3 493.0 490.8 491.6 492.7 493.9 493.6 492.7 491.3 491.9 491.3 491.9 487.1 472.4 435.4 423.3 451.8 495.9 506.9 505.4 507.5 507.8 507.2 505.4 501.9 500.8 495.3 492.5 490.8 488.8 489.3 490.8 492.7 493.9 497.0 497.9 501.1 503.4 504.0 502.2 500.2 496.7 490.8 486.5 485.7 485.7 489.3 491.6 496.7 500.8 502.2 502.2 499.3 497.3 495.0 486.2 484.3 482.6 484.8 486.0 492.5 499.3 502.2 505.7 506.9 505.7 503.4 498.8 493.0 485.7 483.2 483.7 486.0 489.6 497.9 502.8 507.2 510.4 509.5 506.6 499.6 493.0 485.1 482.0 482.6 486.8 491.9 497.0 501.1 506.9 508.6 505.7 499.0 492.5 486.2 483.2 485.7 490.8 495.9 502.5 508.4 508.6 506.9 502.2 497.0 491.3 468.9 468.9 468.9 468.9 468.9 468.9 468.9 468.9", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 181.47829861111111 - }, - { - "text": "AP", - "ph_seq": "AP z ai h un an r en j ian zh ao l iang w u sh u AP m o sh eng d e SP", - "note_seq": "rest B4 B4 A4 A4 G4 E5 E5 E5 E5 D5 D5 D5 D5 B4 B4 G4 G4 rest E4 E4 F#4 F#4 G4 G4 rest", - "note_dur_seq": "0.388889 0.131944 0.263021 0.153646 0.416667 0.729167 0.104167 0.699653 0.133681 1.128472 0.121528 0.291667 0.125000 0.407986 0.008681 0.228299 0.188368 1.250000 0.258681 0.157986 0.436632 0.188368 0.147569 0.060764 2.500000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.388889 0.131944 0.263021 0.153646 0.416667 0.729167 0.104167 0.699653 0.133681 1.128472 0.121528 0.291667 0.125000 0.407986 0.008681 0.228299 0.188368 1.250000 0.258681 0.157986 0.436632 0.188368 0.147569 0.060764 2.500000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "388.8 387.0 387.5 388.8 389.7 388.6 387.7 387.9 387.0 387.0 387.7 391.3 392.4 393.1 395.0 395.9 397.7 402.3 402.6 402.1 403.9 410.1 411.0 413.2 415.3 415.3 415.3 415.3 418.2 423.1 427.5 432.4 435.4 444.6 451.8 455.0 460.8 466.7 469.9 474.9 485.4 488.8 496.5 501.6 501.6 496.5 494.5 493.9 496.2 500.5 501.6 496.7 492.2 491.3 491.3 493.3 495.3 496.5 497.9 499.0 496.7 492.2 490.5 488.5 489.1 489.9 490.8 493.3 495.0 492.2 474.9 434.2 425.7 425.5 419.6 416.3 415.3 412.5 415.0 411.4 407.3 406.3 446.9 462.1 465.9 462.7 458.9 453.7 446.4 443.6 440.3 441.0 441.5 442.3 445.4 444.9 444.3 443.3 441.3 440.0 437.0 431.2 421.8 415.5 407.2 401.9 399.5 397.0 395.6 394.3 393.1 392.0 391.3 390.9 393.6 393.3 396.2 399.3 399.5 399.8 399.1 397.9 396.1 395.0 393.8 391.5 389.7 389.7 390.2 390.6 391.1 392.0 393.1 393.1 392.9 392.4 392.2 391.8 391.5 391.3 391.3 391.8 392.2 392.7 393.1 392.2 390.4 390.4 391.5 391.3 391.5 393.4 392.2 392.7 392.9 392.2 391.3 391.5 391.8 391.5 390.0 390.6 390.6 390.2 391.5 392.2 392.0 391.8 392.2 392.9 393.1 393.8 395.9 395.6 395.2 395.9 396.1 397.9 398.4 397.7 397.9 397.0 394.3 390.6 391.8 398.1 415.1 443.7 464.0 520.8 569.6 590.7 598.3 607.3 607.3 606.3 606.6 608.4 609.4 609.4 609.8 614.0 620.8 632.0 643.1 659.6 673.5 678.6 678.6 674.7 670.0 653.6 646.1 642.7 642.3 645.3 648.3 656.2 670.8 680.1 689.2 690.0 687.6 678.6 664.6 657.0 647.6 644.2 645.7 647.6 654.7 665.4 668.5 672.7 675.4 671.9 665.4 659.6 653.9 650.6 649.1 649.4 647.9 645.3 648.7 649.1 652.1 652.8 649.8 663.5 670.0 665.8 673.1 679.4 674.7 682.9 686.5 690.4 698.5 699.7 690.8 668.1 682.9 681.3 669.2 666.1 664.2 658.9 657.4 655.8 654.3 653.6 651.7 649.8 652.4 652.1 654.3 653.9 657.7 659.6 658.9 660.8 664.6 666.5 665.4 663.8 656.2 655.5 655.8 655.5 655.5 657.4 658.5 658.5 658.5 658.5 658.5 658.5 657.7 657.0 655.8 655.1 655.5 655.8 656.2 657.0 657.0 656.6 656.6 656.2 654.7 657.4 656.6 655.1 655.5 655.8 657.4 657.7 657.7 657.4 655.1 655.1 653.9 655.8 655.1 658.9 659.3 658.5 658.1 658.9 660.8 660.0 656.6 661.5 661.2 660.0 660.0 656.6 659.3 656.2 653.9 651.3 650.6 649.1 652.1 651.3 660.4 655.8 655.5 655.1 652.8 654.3 653.9 652.8 650.9 652.1 653.9 653.9 650.2 653.2 640.5 623.0 603.1 594.5 593.5 585.2 572.8 567.3 548.3 545.5 565.7 579.9 587.3 587.7 586.3 583.6 581.9 580.2 581.3 581.6 583.9 586.7 588.0 588.3 586.7 585.3 583.3 581.6 581.3 581.6 582.6 583.9 583.9 583.6 582.6 581.9 582.3 583.6 583.3 584.6 586.7 586.0 590.4 590.4 587.3 588.0 592.4 592.4 592.1 592.1 590.0 586.7 584.6 585.3 583.3 582.3 581.6 578.6 584.6 585.3 586.7 593.1 592.4 589.4 586.7 586.0 581.9 577.6 577.9 579.2 583.9 588.7 593.8 595.9 596.2 594.5 589.5 583.1 579.4 561.7 527.8 513.1 498.5 491.6 490.5 490.5 489.1 489.6 490.2 487.6 487.4 487.1 487.4 489.6 491.6 492.5 493.3 493.9 498.2 500.8 503.1 493.9 485.7 478.4 476.8 477.6 470.8 456.8 451.1 443.8 434.9 425.0 415.4 406.1 393.2 387.9 375.6 385.7 390.6 396.6 398.8 399.8 400.0 396.3 394.0 392.4 390.9 390.6 390.6 391.3 391.5 392.2 393.4 394.7 395.6 395.0 394.3 393.1 392.4 392.0 391.5 391.3 391.5 391.8 393.6 394.5 394.7 395.0 396.3 395.2 396.3 396.8 396.1 395.6 394.0 392.9 391.3 390.6 390.2 389.3 389.7 390.6 390.9 390.9 391.1 392.4 393.8 394.5 394.0 392.9 392.0 390.9 390.2 390.0 389.5 389.1 388.8 389.3 390.9 392.0 393.4 395.0 396.1 398.2 398.8 398.6 396.6 393.4 390.2 385.3 377.8 376.7 378.0 381.3 387.7 390.9 398.6 403.7 404.6 401.9 397.9 391.5 384.6 373.0 370.6 371.5 374.5 379.7 385.3 393.6 401.4 406.3 408.6 407.5 400.7 387.9 381.3 374.3 373.4 377.8 388.1 398.4 391.2 362.3 346.4 337.6 329.0 327.2 327.2 327.2 327.2 327.4 327.4 327.4 327.4 327.4 327.5 327.5 327.5 327.5 327.7 327.7 327.7 327.7 327.9 327.9 327.7 327.0 326.2 326.2 325.1 324.5 324.5 323.4 314.4 306.8 302.3 297.8 302.1 306.7 316.4 321.4 323.0 324.3 324.2 323.6 322.8 322.3 321.7 321.4 319.7 319.0 319.5 320.4 321.0 321.7 323.2 324.7 325.8 326.4 327.0 327.4 328.5 327.2 326.6 326.2 325.8 325.7 325.3 325.3 325.7 326.0 326.6 326.8 327.7 328.7 329.1 328.7 327.9 326.8 317.7 284.5 270.2 278.6 291.1 296.0 305.6 315.7 320.4 329.2 338.8 342.8 351.9 360.9 370.9 372.8 371.3 368.5 368.3 368.3 369.1 371.5 372.4 371.9 370.2 367.2 359.5 348.2 320.6 313.3 331.2 366.2 386.4 386.6 388.2 390.4 390.4 390.2 388.8 387.7 386.6 383.7 383.3 382.6 381.7 381.3 380.8 383.0 385.3 387.5 388.6 390.9 394.0 396.3 396.1 395.9 394.5 392.7 391.5 390.6 390.2 390.0 390.4 391.5 392.2 392.0 391.5 391.5 392.9 391.8 391.1 391.1 391.3 391.5 392.2 392.7 392.9 393.4 393.6 393.8 393.4 393.6 392.7 392.2 392.2 392.2 392.4 391.8 391.5 392.0 392.4 392.0 391.5 392.0 392.4 391.8 391.1 390.2 390.0 390.0 390.0 389.7 389.7 390.0 391.3 391.8 390.6 390.0 390.2 390.2 389.3 391.3 390.6 390.2 390.9 390.9 390.6 388.8 389.7 390.9 389.5 390.6 391.5 390.9 392.2 392.4 391.5 390.9 390.9 390.9 389.1 389.5 390.4 390.4 388.6 389.1 390.0 391.1 390.6 390.9 391.5 391.8 391.8 391.5 391.5 391.3 391.1 390.9 390.6 391.3 392.2 392.9 391.3 392.2 392.2 391.5 391.1 390.6 391.5 391.5 390.9 390.2 390.2 390.6 390.9 391.1 391.5 390.0 388.8 388.6 388.6 388.6 389.3 390.2 390.6 391.5 391.5 391.8 391.8 392.0 391.1 390.0 389.3 389.7 389.5 388.6 388.2 389.1 391.5 393.4 394.0 395.2 394.5 393.6 392.2 387.7 386.6 384.8 383.5 384.8 387.7 390.6 391.3 395.6 396.8 397.2 396.6 394.3 392.2 389.3 386.6 384.6 383.7 384.4 385.5 390.0 394.7 399.3 401.4 404.4 403.7 399.1 392.4 382.2 378.9 375.4 377.8 381.1 387.9 394.7 400.5 403.7 401.6 399.5 394.7 385.5 381.1 378.9 380.2 384.6 394.5 396.8 396.6 394.0 387.9 383.0 380.4 380.4 380.4 380.4 380.4 380.4 380.4 380.4", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 185.3125 - }, - { - "text": "AP", - "ph_seq": "g uo k e m ian d ui n a p ang d a d e AP y v zh ou b an SP", - "note_seq": "G4 G4 D5 D5 D5 D5 C5 C5 B4 B4 A4 A4 B4 B4 B4 B4 rest A4 A4 B4 B4 B4 B4 rest", - "note_dur_seq": "0.122396 0.250000 0.166667 1.925347 0.157986 0.304688 0.111979 0.251736 0.164931 0.255208 0.161458 0.304688 0.111979 0.147569 0.060764 0.625000 0.407986 0.008681 0.295139 0.121528 0.146701 0.061632 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.122396 0.250000 0.166667 1.925347 0.157986 0.304688 0.111979 0.251736 0.164931 0.255208 0.161458 0.304688 0.111979 0.147569 0.060764 0.625000 0.407986 0.008681 0.295139 0.121528 0.146701 0.061632 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "392.0 392.0 367.0 366.6 366.2 365.7 365.5 365.3 364.9 364.7 364.7 364.7 381.3 388.8 395.2 395.2 393.1 390.2 387.3 386.8 387.7 388.6 389.1 390.4 392.0 392.2 392.2 391.5 390.9 390.0 390.2 393.4 395.4 397.7 387.3 357.6 335.0 368.9 377.6 380.4 384.6 388.2 408.6 430.0 444.0 467.1 490.8 514.9 542.0 549.6 548.9 549.3 551.2 551.2 550.5 549.9 549.3 548.6 547.7 547.4 548.3 548.3 546.7 545.8 544.2 544.5 545.8 548.6 550.2 560.8 569.6 580.9 585.6 590.4 591.8 592.1 590.0 587.7 586.0 585.3 585.3 585.6 586.0 586.3 586.7 587.0 587.7 587.7 588.0 587.3 585.6 585.0 585.0 585.0 584.6 584.6 584.6 585.6 587.0 587.7 587.3 587.3 588.3 587.7 589.4 588.7 587.3 586.0 586.0 587.0 588.3 589.4 589.0 588.0 587.0 586.0 585.6 586.7 587.3 588.3 589.7 590.0 590.0 589.7 589.7 589.4 589.0 588.7 589.0 590.0 590.7 591.1 590.7 590.4 589.7 589.0 588.7 588.7 589.7 589.4 588.7 587.7 587.0 587.7 587.7 586.0 586.7 587.0 586.0 587.7 587.3 586.7 586.0 585.6 585.6 585.6 586.0 586.3 586.7 587.0 587.3 586.7 585.6 586.7 586.0 584.3 585.0 585.6 586.7 586.3 585.6 584.6 585.6 587.0 585.3 587.7 588.7 588.3 588.0 587.7 587.3 587.0 587.7 589.4 588.7 589.7 591.8 591.8 591.4 591.1 589.0 586.7 586.3 586.0 585.6 585.0 585.6 588.3 588.7 588.7 588.7 588.7 588.3 588.3 588.0 588.0 588.7 589.4 590.4 591.8 592.1 591.4 590.7 588.3 582.9 580.9 581.9 585.0 586.0 586.0 586.3 585.0 582.3 577.2 570.3 567.7 565.7 564.1 558.2 550.5 546.4 537.0 534.6 535.8 539.5 544.8 556.6 570.3 581.3 590.0 595.5 596.9 595.5 591.4 588.0 582.9 581.6 582.3 584.3 586.7 588.3 591.1 592.8 592.8 589.7 584.3 578.2 558.2 513.1 484.3 463.8 475.9 509.0 538.9 542.0 545.5 551.5 550.9 547.0 531.8 522.6 515.5 515.2 517.2 519.9 523.3 526.9 531.2 530.6 527.8 522.3 520.5 517.8 517.2 518.4 520.5 522.6 523.3 522.6 521.4 518.4 510.1 501.9 498.2 495.0 490.7 489.4 487.2 497.3 503.4 506.0 504.6 503.7 502.2 498.2 496.5 494.5 492.7 492.5 492.2 492.2 492.2 492.2 493.0 493.9 494.7 495.3 495.3 495.6 495.9 496.2 496.5 493.0 455.3 450.5 484.8 495.3 496.5 480.7 456.0 442.8 426.6 406.4 389.3 390.0 407.5 425.0 444.3 444.3 442.0 434.9 423.8 420.1 419.4 423.8 428.5 440.0 444.9 450.0 455.0 452.9 445.9 438.5 430.9 429.7 431.7 435.2 439.7 441.0 442.5 442.5 442.3 442.0 441.3 440.5 440.0 433.2 419.5 399.5 452.0 486.5 495.6 500.2 501.4 501.9 497.9 496.7 493.9 492.7 492.5 491.6 490.8 466.7 395.9 367.9 380.4 403.0 442.5 479.3 495.6 496.5 501.9 505.4 506.0 504.8 504.6 501.1 496.7 495.0 491.3 486.2 485.4 484.6 485.7 486.8 491.6 494.7 499.0 507.8 511.6 511.6 508.6 503.4 495.6 487.4 478.7 476.5 478.2 484.6 490.2 501.1 507.2 509.8 508.9 504.6 498.8 490.2 479.8 476.0 475.7 479.3 484.8 495.9 505.7 513.1 516.3 517.2 511.3 504.3 482.4 456.3 440.1 474.0 462.4 457.2 455.0 454.7 454.7 454.7 454.5 454.2 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 440.0 400.9 400.9 400.7 400.5 400.5 400.5 400.2 400.2 400.2 403.5 413.9 415.3 412.2 404.4 400.5 397.2 397.9 402.8 409.1 418.7 428.7 436.5 441.5 446.1 445.6 442.5 439.5 437.7 436.7 436.5 435.7 436.2 438.7 440.5 442.5 438.2 430.4 395.6 396.6 405.4 441.3 451.3 455.0 470.5 486.2 493.0 496.5 499.6 497.9 496.2 493.6 492.5 492.5 494.5 491.6 485.4 473.5 483.2 482.3 474.6 471.0 466.2 465.6 486.0 494.2 502.5 506.9 505.7 503.1 496.5 491.9 487.4 486.0 485.4 487.1 492.7 495.6 502.8 507.2 505.7 503.1 497.9 492.7 487.9 486.8 487.1 488.8 492.5 498.8 500.2 499.3 498.2 496.7 493.3 490.2 489.1 492.2 498.8 501.1 504.3 503.7 503.4 496.7 488.2 483.2 479.3 476.5 478.2 483.2 494.2 499.3 506.3 510.1 510.1 504.3 488.2 471.0 471.0 471.0 471.0 471.0 471.0 471.0 471.0", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 196.12760416666669 - }, - { - "text": "AP", - "ph_seq": "d e k un h uo SP", - "note_seq": "A4 A4 G4 G4 G4 G4 rest", - "note_dur_seq": "0.111979 0.250000 0.166667 0.125000 0.083333 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0", - "ph_dur": "0.111979 0.250000 0.166667 0.125000 0.083333 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "440.0 394.7 394.0 393.4 392.9 392.4 392.0 391.8 391.5 391.3 391.3 407.2 412.0 410.5 408.2 406.5 403.0 400.7 401.6 404.4 410.3 419.2 428.2 436.5 442.5 444.1 444.6 443.1 441.8 440.3 439.5 439.5 439.7 438.7 436.0 430.2 425.7 430.0 432.4 434.7 439.7 442.0 434.2 420.7 408.1 387.6 378.0 388.8 393.4 395.6 395.2 393.8 394.0 395.4 393.6 389.1 381.3 360.1 323.2 300.2 298.8 314.4 328.1 357.2 385.3 390.9 397.9 402.8 404.2 402.6 399.3 395.4 390.6 385.3 383.7 384.2 387.7 391.1 392.4 398.2 401.6 403.7 403.0 399.8 396.8 390.6 381.7 380.2 380.0 383.5 386.8 392.7 399.1 403.5 407.5 406.8 402.8 396.1 387.9 383.7 379.5 380.6 383.0 387.3 395.2 397.0 401.2 400.7 399.5 394.7 387.3 382.8 381.1 382.2 387.0 389.5 389.1 384.2 384.2 384.2 384.2 384.2 384.2 384.2 384.2", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 203.22135416666669 - }, - { - "text": "AP", - "ph_seq": "AP m i d i q ve r u y in y v SP", - "note_seq": "rest B3 B3 D4 D4 G4 G4 A4 A4 B4 B4 B4 B4 rest", - "note_dur_seq": "0.414931 0.157986 0.304688 0.111979 0.246528 0.170139 0.312500 0.104167 0.407986 0.008681 0.199653 0.008681 0.625000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.414931 0.157986 0.304688 0.111979 0.246528 0.170139 0.312500 0.104167 0.407986 0.008681 0.199653 0.008681 0.625000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "239.2 238.4 224.4 205.6 194.0 185.5 186.1 186.4 186.4 186.0 186.1 186.4 185.6 185.1 185.9 186.2 186.0 185.4 185.0 185.0 185.2 185.7 186.3 186.9 187.3 187.6 188.4 188.7 189.2 189.5 189.5 189.1 188.9 189.3 189.4 189.2 189.5 190.0 190.7 190.9 189.8 189.9 189.9 188.1 190.9 194.2 204.6 210.8 222.2 233.5 238.7 240.2 240.9 241.3 242.0 241.9 241.3 241.4 241.7 242.7 245.0 245.9 247.2 248.5 247.8 247.2 246.2 245.8 245.7 245.5 245.8 246.1 246.7 247.5 247.5 247.7 247.8 249.2 247.7 223.2 205.7 207.9 217.8 240.2 254.0 273.7 292.8 296.0 294.2 294.5 291.1 287.8 285.8 286.1 287.8 291.5 294.9 297.1 297.8 296.9 293.0 290.8 288.6 288.3 289.8 292.5 296.0 286.8 269.4 248.1 245.7 259.5 267.7 277.0 286.1 296.2 304.7 317.1 324.1 334.6 343.4 350.2 357.0 374.9 385.9 392.7 393.4 391.1 387.7 385.3 382.6 383.7 385.5 388.8 393.1 396.1 396.8 396.8 394.0 392.2 389.5 388.8 389.1 389.3 390.6 392.2 394.5 395.6 395.9 395.9 393.4 385.7 379.7 366.0 367.3 375.4 392.0 423.8 436.7 446.9 449.2 448.5 444.6 439.5 433.2 430.0 429.0 430.0 432.7 438.2 441.3 443.8 443.8 443.3 440.5 437.7 437.2 437.2 437.5 440.0 442.0 442.5 442.8 443.3 442.8 441.5 440.0 434.9 432.4 429.0 433.0 448.8 459.7 478.7 492.7 497.6 501.4 503.4 501.4 498.5 497.0 497.9 497.0 496.2 495.6 494.2 495.6 494.7 493.9 495.3 496.5 497.3 495.9 495.3 494.7 499.6 500.5 501.6 501.9 501.9 502.5 501.9 500.2 498.2 496.2 495.3 495.0 493.3 493.3 494.5 493.9 493.9 496.7 495.0 501.9 509.8 514.9 517.2 516.0 509.2 499.6 494.2 482.6 478.2 479.0 484.8 496.7 504.0 510.4 514.6 512.8 506.0 495.3 489.6 475.7 471.0 471.6 477.9 495.9 503.1 511.9 517.2 515.2 503.4 476.2 467.0 467.0 467.0 467.0 467.0 467.0 467.0 467.0", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 204.84375 - }, - { - "text": "AP", - "ph_seq": "y ong h eng d e h un d un zh e AP z ai n a l u j in t ou y ou w ei zh ir d e SP", - "note_seq": "A4 A4 B4 B4 C5 C5 C5 C5 B4 B4 B4 B4 rest B4 B4 A4 A4 G4 G4 E5 E5 E5 E5 D5 D5 D5 D5 B4 B4 G4 G4 rest", - "note_dur_seq": "0.072917 0.263021 0.153646 0.147569 0.060764 0.888021 0.153646 0.304688 0.111979 0.142361 0.065972 1.458333 0.284722 0.131944 0.251736 0.164931 0.291667 0.125000 0.699653 0.133681 0.657986 0.175347 1.177083 0.072917 0.305556 0.111111 0.295139 0.121528 0.304688 0.111979 1.250000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.072917 0.263021 0.153646 0.147569 0.060764 0.888021 0.153646 0.304688 0.111979 0.142361 0.065972 1.458333 0.284722 0.131944 0.251736 0.164931 0.291667 0.125000 0.699653 0.133681 0.657986 0.175347 1.177083 0.072917 0.305556 0.111111 0.295139 0.121528 0.304688 0.111979 1.250000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "428.7 429.0 429.0 429.7 431.2 433.4 436.0 437.5 439.7 440.0 438.0 440.5 440.3 440.0 438.2 436.2 434.9 434.2 434.2 434.9 433.4 432.9 434.2 434.4 440.0 443.1 445.6 446.4 446.4 444.3 441.3 437.5 431.9 414.8 381.9 365.7 374.5 387.0 397.5 410.8 423.1 432.9 445.7 481.2 499.3 499.0 495.9 494.5 491.3 492.2 493.6 495.6 493.9 492.2 488.2 481.8 472.9 457.7 417.0 416.4 453.5 502.8 514.3 519.3 525.4 528.4 530.2 529.6 528.7 526.6 524.5 523.3 524.2 523.3 528.1 523.3 524.2 522.6 525.7 529.0 529.0 526.6 524.2 521.4 517.5 514.9 515.7 517.5 521.7 522.9 523.9 524.8 526.3 526.9 526.6 524.8 523.6 522.0 521.1 520.5 520.8 522.0 521.7 521.1 521.4 522.0 522.6 523.3 523.6 523.3 520.2 519.0 518.1 517.5 517.8 518.1 519.0 520.8 522.0 523.9 526.6 527.5 525.7 523.9 521.4 518.4 517.5 518.1 519.9 522.9 525.1 528.4 530.6 532.4 544.2 522.0 520.5 513.7 508.6 508.1 505.7 505.1 505.1 506.0 507.8 509.8 511.0 512.5 518.7 522.9 526.6 530.2 533.0 532.7 529.6 527.5 527.5 528.7 527.5 526.0 526.6 524.2 525.4 526.9 522.0 525.7 522.9 522.3 521.7 521.7 518.7 512.8 506.9 504.0 501.9 495.3 492.2 490.5 487.6 485.7 485.7 486.8 484.8 476.9 468.4 466.7 471.0 483.2 487.4 489.6 489.3 491.0 491.3 492.2 493.6 493.9 496.5 500.8 503.7 504.8 488.5 489.9 482.9 469.9 466.2 481.8 504.0 512.5 520.5 519.6 518.1 513.4 511.3 509.8 507.5 508.1 509.2 509.8 509.5 506.6 502.8 497.9 496.2 493.3 492.7 495.6 494.5 493.0 491.9 491.0 492.7 493.9 493.6 493.6 494.7 495.0 494.7 494.5 494.2 493.9 494.7 493.0 492.7 491.9 492.5 493.0 493.9 494.2 493.6 491.9 491.0 491.0 490.8 490.8 490.5 491.0 491.6 491.9 490.5 490.2 491.3 489.6 488.8 488.8 489.1 489.3 489.6 489.6 489.9 491.0 492.5 493.0 493.3 493.6 493.6 493.3 491.0 489.1 488.5 490.2 492.5 494.7 498.8 501.4 501.4 499.0 493.9 489.3 479.5 475.4 476.0 479.0 486.2 493.6 501.1 505.1 504.8 501.4 493.3 489.6 485.1 477.9 480.4 484.3 492.2 496.2 500.2 503.4 502.8 499.6 493.3 488.5 486.8 485.1 485.7 486.8 489.3 488.5 486.0 481.5 482.3 483.7 487.1 493.3 498.2 499.9 497.3 493.0 479.5 453.9 432.7 460.0 472.1 474.0 471.3 470.5 470.2 470.8 471.9 473.8 476.2 475.1 473.2 474.6 476.2 476.8 477.6 478.7 479.3 478.7 477.1 477.1 477.6 479.3 479.8 477.9 477.1 480.1 480.4 480.7 481.8 481.2 482.3 483.7 484.8 486.2 487.4 493.3 489.3 475.7 471.6 465.6 465.4 467.8 473.5 482.3 485.1 491.3 494.5 495.6 496.7 495.6 494.7 492.2 488.8 488.2 487.9 488.8 489.6 491.6 493.9 494.7 494.7 493.9 486.5 478.7 472.1 470.2 471.9 474.5 470.3 474.8 496.5 501.4 499.9 498.8 492.7 486.8 474.3 458.2 452.4 442.3 438.5 437.2 438.2 439.0 439.7 440.5 440.8 439.7 438.2 437.2 437.0 437.0 437.0 437.7 437.5 435.7 432.7 429.5 420.1 409.6 389.1 366.6 352.8 351.4 351.7 371.3 385.0 389.5 393.1 394.7 394.5 392.4 389.7 385.5 386.4 387.7 389.5 392.0 394.5 395.9 396.8 395.9 393.6 389.7 387.3 385.0 386.8 387.9 391.3 393.6 393.8 392.9 392.2 391.1 389.5 387.7 386.8 386.8 388.2 388.6 390.2 390.0 390.6 390.2 389.3 389.7 389.5 388.6 389.1 389.5 390.4 390.6 390.9 391.8 392.0 392.0 392.0 392.4 393.4 395.2 396.8 397.5 400.2 395.0 375.8 367.7 385.9 391.5 385.3 386.4 389.1 397.0 410.1 419.2 439.4 449.8 461.5 505.1 553.7 582.6 596.9 606.3 613.0 614.0 614.4 613.7 611.6 608.7 608.4 608.7 609.1 609.4 611.6 616.5 623.3 627.3 639.4 649.4 653.9 658.1 660.8 660.4 659.3 657.7 655.5 653.9 653.2 653.2 651.3 653.9 655.1 656.2 657.0 658.5 657.4 659.3 657.7 658.5 656.6 655.5 653.2 650.9 654.7 654.3 653.9 653.6 653.6 653.2 652.8 653.2 656.6 656.2 655.8 655.1 655.5 655.1 655.8 657.0 658.1 646.1 648.3 637.2 620.8 606.6 602.8 611.9 629.8 628.8 652.1 657.7 657.4 661.2 665.4 662.3 665.0 661.9 661.5 658.5 659.6 660.0 657.7 654.3 652.8 655.8 655.5 660.8 661.2 659.3 658.9 660.0 659.3 657.7 656.6 654.3 653.6 653.6 653.9 653.9 652.1 653.9 654.3 654.3 654.3 655.5 657.4 657.0 655.8 654.7 653.6 652.4 652.1 652.1 653.2 654.3 655.5 656.6 657.0 655.5 653.9 652.8 652.4 653.9 653.6 654.3 655.1 653.2 653.6 654.3 654.3 653.9 655.5 657.0 657.4 656.2 654.7 655.5 655.5 653.9 653.9 656.2 655.5 654.7 654.7 655.1 655.5 654.7 653.6 652.8 652.1 652.1 652.1 652.4 653.2 653.9 654.7 653.9 653.2 652.4 652.1 652.4 652.8 653.6 655.1 655.1 652.4 648.7 651.7 652.8 672.3 671.9 660.0 661.2 662.3 657.8 653.9 653.2 638.9 615.5 605.9 605.6 603.5 595.9 582.9 576.2 574.2 575.6 572.6 572.9 576.2 578.2 580.2 584.3 584.6 583.9 583.3 582.3 581.3 580.6 581.6 583.3 586.0 588.3 588.7 588.3 588.7 590.7 589.7 587.7 583.3 582.3 582.6 582.6 582.6 582.9 581.9 580.6 579.6 579.6 577.2 576.9 577.2 577.6 579.6 583.6 585.0 586.3 585.0 583.9 584.3 584.3 583.9 584.6 583.9 581.9 579.6 578.2 576.2 569.3 565.0 556.6 548.6 533.6 524.8 521.4 509.2 498.5 499.3 490.0 491.0 487.1 486.8 488.2 487.1 487.1 489.6 489.9 488.8 491.3 494.2 492.5 494.2 493.0 493.3 493.9 493.9 493.3 493.9 493.3 491.9 491.3 494.2 493.9 493.3 493.0 491.3 488.8 476.5 420.4 436.7 477.3 493.3 492.9 473.9 450.7 410.1 391.1 386.2 390.4 395.6 398.2 399.8 399.5 398.6 395.9 392.9 391.5 389.5 388.8 388.8 389.1 390.0 391.3 391.8 393.6 396.6 399.3 398.6 397.9 396.8 393.8 391.5 387.7 385.9 385.9 387.3 389.7 392.7 394.7 396.6 396.8 396.1 395.4 393.1 389.3 386.8 385.9 385.9 387.7 390.9 393.4 396.1 398.2 399.8 399.3 397.7 396.3 392.4 390.6 389.3 386.8 385.5 387.0 390.0 393.1 394.7 396.1 397.2 397.2 396.8 396.3 395.0 392.4 390.0 388.4 386.4 385.3 385.0 386.8 389.3 390.9 392.9 394.5 397.5 399.5 402.8 400.9 398.8 395.0 389.3 383.3 379.3 379.7 382.2 386.6 395.0 400.2 403.5 407.9 407.5 404.4 394.0 389.1 378.2 374.5 375.6 378.9 385.3 388.8 394.7 398.4 398.2 392.0 388.6 388.6 388.6 388.6 388.6 388.6 388.6 388.6", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 208.26041666666669 - }, - { - "text": "AP", - "ph_seq": "w u x ian d e AP k e n eng SP", - "note_seq": "E4 E4 F#4 F#4 G4 G4 rest G4 G4 D5 D5 rest", - "note_dur_seq": "0.008681 0.431424 0.193576 0.147569 0.060764 2.500000 0.250000 0.166667 0.251736 0.164931 6.666667 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.008681 0.431424 0.193576 0.147569 0.060764 2.500000 0.250000 0.166667 0.251736 0.164931 6.666667 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "319.9 319.9 328.7 330.0 328.9 327.0 324.7 321.5 320.8 319.5 317.7 316.7 317.3 318.0 321.0 324.2 325.8 325.8 325.8 326.6 325.3 325.3 325.8 327.2 330.6 331.9 333.5 333.1 332.5 330.8 328.5 327.2 326.4 326.6 327.4 327.9 330.0 331.9 332.5 333.1 331.7 330.0 315.8 287.0 294.5 324.9 332.3 330.4 326.4 324.7 326.4 335.3 343.4 351.3 359.2 371.9 378.6 376.9 375.4 375.6 373.4 371.3 370.2 369.1 370.0 370.4 370.2 369.1 366.8 356.7 332.2 314.6 328.3 379.1 388.8 391.5 395.0 396.3 396.1 395.0 395.4 395.9 394.3 392.9 392.4 391.8 391.5 392.0 392.4 392.4 393.4 392.7 393.8 393.6 393.4 393.8 393.8 394.0 393.6 393.1 392.2 392.2 392.4 392.9 393.6 392.7 393.1 392.9 392.7 393.1 393.6 392.7 392.0 392.0 392.4 392.7 393.1 393.6 394.0 394.5 395.0 395.2 394.3 395.0 393.8 393.1 393.4 393.8 394.3 392.4 392.9 393.8 393.6 393.4 392.9 393.4 393.8 393.1 392.2 391.8 391.3 391.3 391.1 391.1 390.9 390.9 392.7 392.9 392.0 392.0 391.5 391.3 390.6 392.7 392.0 391.5 392.2 392.4 391.5 390.2 391.3 392.2 390.9 392.2 393.4 392.2 394.0 393.8 392.9 392.4 392.4 391.5 390.0 390.4 391.3 391.5 390.4 390.4 391.5 392.2 391.3 392.0 392.4 393.1 392.4 392.0 392.2 392.4 392.7 392.4 392.0 392.7 393.4 394.0 392.7 393.8 393.4 392.7 392.2 392.0 392.9 392.9 392.0 391.3 391.3 391.8 392.0 392.4 392.4 390.9 389.7 390.4 390.2 389.7 390.4 392.0 392.2 392.4 392.4 392.7 393.1 393.1 392.7 391.1 391.1 391.5 391.1 390.4 390.0 390.9 392.0 393.4 394.5 395.4 393.6 393.8 392.0 389.5 388.4 386.6 385.9 387.5 389.7 391.5 393.8 396.1 397.0 397.5 396.6 394.0 392.9 391.3 389.7 387.5 387.0 387.3 388.2 392.2 396.1 399.3 402.6 405.4 405.1 399.8 394.5 388.2 381.9 379.5 381.1 385.3 392.0 398.8 402.6 404.9 402.1 400.5 395.9 387.9 384.6 384.4 388.2 391.8 400.5 401.6 399.8 394.7 387.9 382.8 378.4 378.2 378.2 378.2 378.2 378.2 378.2 378.2 378.4 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 392.0 379.5 379.5 379.5 379.5 379.5 379.5 379.5 379.7 379.7 379.7 389.3 396.3 396.1 395.2 393.1 390.2 387.3 385.9 386.6 387.7 389.7 391.1 392.9 393.6 393.4 393.1 392.9 392.9 393.1 394.3 395.0 395.2 395.4 395.4 394.5 392.4 383.0 377.3 374.3 373.4 375.8 381.7 392.9 406.4 439.9 493.9 517.5 541.7 552.4 555.6 556.9 556.3 556.0 554.0 551.8 552.1 552.8 553.4 553.7 556.0 560.2 566.0 578.6 589.7 593.1 596.9 594.2 592.4 589.7 587.0 584.6 580.6 578.9 579.6 580.9 581.9 583.3 583.6 582.6 582.3 583.9 586.3 586.3 586.3 586.3 586.3 586.7 586.7 586.7 586.7 586.7 586.0 585.3 584.3 583.3 582.9 583.6 585.3 586.3 586.7 589.0 589.4 589.4 588.7 587.7 586.0 585.6 587.0 587.0 587.0 587.0 586.7 587.7 588.7 588.0 589.4 588.7 587.7 588.7 588.3 586.7 584.3 584.3 584.6 583.3 582.9 583.9 582.9 583.9 585.6 587.7 586.7 586.0 586.3 586.7 587.0 588.7 587.3 586.0 584.6 584.6 584.6 585.0 583.6 583.6 583.6 584.6 586.0 587.3 586.7 586.7 585.6 584.3 583.9 583.9 585.3 586.3 586.7 586.0 585.6 586.7 586.7 585.0 586.7 588.0 588.3 588.3 588.3 587.3 586.3 585.6 587.0 587.7 587.3 587.0 586.7 585.6 585.0 585.0 586.0 587.0 586.7 585.3 585.0 585.0 585.0 585.0 585.0 585.0 585.0 585.0 584.3 583.6 583.3 582.6 582.9 583.6 583.9 582.9 583.9 584.6 585.0 584.6 584.6 584.3 583.9 585.0 585.0 584.6 585.0 586.0 586.0 585.6 584.6 583.9 582.6 582.9 584.6 583.9 583.6 585.0 585.3 585.0 586.0 586.7 587.0 586.3 586.0 585.0 582.6 585.0 585.3 587.3 589.7 588.0 587.0 587.3 588.0 586.3 585.3 587.0 586.0 583.9 583.6 584.6 586.7 586.3 585.6 584.6 584.3 584.3 584.6 585.0 585.3 586.0 586.3 586.7 586.0 585.6 582.9 582.9 583.3 583.3 583.6 583.9 583.3 582.3 583.3 584.6 586.0 585.6 584.3 584.3 584.6 585.3 584.6 584.3 585.3 588.0 589.4 588.7 587.3 586.0 585.3 584.3 587.7 587.7 588.0 589.7 590.4 589.7 586.3 587.0 587.7 586.7 586.0 586.7 587.3 586.0 585.0 587.3 585.3 585.0 585.0 585.3 585.6 586.0 586.7 588.0 586.7 587.7 587.3 587.3 587.3 586.7 586.3 586.0 586.3 586.7 587.3 587.3 587.7 587.3 586.0 587.0 585.3 587.7 586.0 583.9 583.9 584.3 584.6 585.3 585.6 586.3 586.7 587.0 586.7 586.3 586.3 586.7 587.0 587.3 586.7 586.0 585.3 583.6 583.6 583.9 584.3 586.3 588.3 588.7 589.7 589.0 587.7 587.0 586.0 584.6 583.6 583.6 584.3 586.3 587.7 589.7 590.7 590.7 590.0 588.7 588.0 587.0 586.0 585.0 583.9 583.9 584.3 585.0 587.7 588.7 588.7 588.0 587.7 587.0 586.3 586.0 585.6 585.3 585.3 585.6 586.7 587.3 588.3 589.4 590.0 590.7 590.7 590.4 589.7 589.4 587.7 587.3 587.7 587.0 587.0 589.0 588.3 587.7 586.7 586.0 587.0 586.7 586.7 587.0 585.6 589.0 589.7 589.0 588.0 588.3 588.7 587.7 587.0 585.6 586.7 586.7 585.3 584.3 585.6 587.3 589.0 589.4 589.4 589.0 589.0 588.7 588.3 588.3 588.3 589.4 590.0 589.7 589.4 590.0 590.0 588.7 587.7 588.0 589.7 588.7 588.3 589.4 590.4 590.4 588.7 588.3 589.0 587.0 587.0 587.0 586.0 584.6 583.9 585.6 586.0 585.3 586.7 588.0 587.3 586.0 588.0 587.7 587.0 587.3 588.7 588.0 586.7 586.7 586.7 586.7 586.7 586.7 586.7 587.0 587.0 587.0 588.3 587.3 588.0 589.0 588.7 588.7 589.7 590.0 588.7 587.0 586.7 586.7 586.7 587.7 589.4 587.7 589.4 587.3 586.7 586.0 585.3 586.3 586.3 584.6 584.6 584.6 585.0 585.6 586.0 586.0 585.6 585.6 585.3 585.0 586.3 585.3 585.3 582.6 582.3 581.9 581.6 582.3 583.9 585.3 585.0 586.0 587.0 587.0 586.7 586.3 587.0 588.7 588.7 588.7 588.3 587.3 586.0 586.0 586.0 585.3 583.9 583.9 585.3 587.7 588.0 588.0 587.7 586.0 583.6 582.9 581.9 580.9 578.2 576.6 577.6 579.9 581.3 585.3 588.0 590.7 592.1 592.1 591.8 589.4 588.0 581.9 578.6 574.9 570.9 570.9 572.3 577.2 580.9 585.6 590.7 600.7 603.1 606.3 603.5 600.7 588.0 576.9 574.9 566.7 565.4 567.7 578.9 586.7 592.8 606.6 611.6 611.6 606.6 599.0 592.8 578.6 573.9 575.6 576.2 582.6 589.4 591.4 594.8 593.5 592.1 587.3 579.9 574.6 567.7 564.7 563.1 557.6 557.6 563.7 587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3", - "gender_timestep": null, - "gender": null, - "input_type": "phoneme", - "offset": 219.15798611111111 - } -] diff --git "a/samples/\350\277\231\344\271\210\345\217\257\347\210\261\347\234\237\346\230\257\346\212\261\346\255\211.ds" "b/samples/\350\277\231\344\271\210\345\217\257\347\210\261\347\234\237\346\230\257\346\212\261\346\255\211.ds" deleted file mode 100644 index 118f18afb..000000000 --- "a/samples/\350\277\231\344\271\210\345\217\257\347\210\261\347\234\237\346\230\257\346\212\261\346\255\211.ds" +++ /dev/null @@ -1,1090 +0,0 @@ -[ - { - "text": "那 些 关 于 我 的 事 情 SP", - "ph_seq": "n a x ie g uan y v w o d e sh ir q ing SP", - "note_seq": "D4 D4 D4 D4 F#4 F#4 F#4 F#4 F#4 F#4 E4 E4 E4 E4 A4 A4 rest", - "note_dur_seq": "0.045312 0.092969 0.094531 0.127344 0.060156 0.166406 0.021094 0.133594 0.053906 0.132812 0.054688 0.095312 0.092188 0.104688 0.082812 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.045312 0.092969 0.094531 0.127344 0.060156 0.166406 0.021094 0.133594 0.053906 0.132812 0.054688 0.095312 0.092188 0.104688 0.082812 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 294.0 295.0 296.2 296.7 297.9 297.8 296.9 296.0 294.2 287.1 266.5 250.8 246.4 250.0 258.8 263.4 267.7 273.4 278.5 281.9 284.3 288.6 293.2 295.5 295.5 295.2 294.9 294.5 294.2 294.2 273.5 244.8 225.7 227.5 259.1 297.9 332.9 346.0 352.5 357.4 363.8 365.5 366.6 368.1 370.4 371.3 372.4 372.4 371.7 369.4 364.3 362.6 368.1 373.9 373.2 372.4 371.3 370.6 371.1 370.4 370.0 369.6 370.0 371.1 372.8 374.3 375.6 375.4 374.3 374.7 374.3 373.4 372.8 371.9 369.6 368.3 367.9 368.9 370.0 370.6 361.1 327.4 305.8 307.3 334.0 336.2 333.5 332.7 330.4 325.5 304.0 290.8 299.0 307.6 312.6 314.9 317.1 319.1 323.0 329.4 338.1 336.0 335.0 333.8 331.9 329.2 319.7 295.7 299.1 310.6 326.8 332.9 339.5 348.5 356.3 373.5 401.1 437.7 457.4 460.5 455.8 450.0 439.2 430.2 422.6 418.4 421.3 431.4 442.3 457.6 461.9 458.9 442.8 442.8 442.8 442.8 442.8 442.8 442.8 442.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 13.0796875 - }, - { - "text": "还 请 你 注 意 聆 听 SP", - "ph_seq": "h ai q ing n i zh u y i l ing t ing SP", - "note_seq": "B3 B3 D4 D4 D4 D4 D4 D4 C#4 C#4 C#4 C#4 F#4 F#4 rest", - "note_dur_seq": "0.095312 0.104688 0.082812 0.107031 0.080469 0.128125 0.059375 0.179688 0.007812 0.126562 0.060937 0.102344 0.085156 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.095312 0.104688 0.082812 0.107031 0.080469 0.128125 0.059375 0.179688 0.007812 0.126562 0.060937 0.102344 0.085156 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "322.3 284.5 259.2 246.5 242.3 239.2 237.7 236.6 236.3 237.3 243.8 248.2 250.5 250.5 249.2 243.1 231.6 234.6 242.0 244.1 250.0 259.7 264.4 273.5 280.6 289.5 296.9 295.9 296.6 295.9 294.3 292.8 292.5 292.1 292.0 291.6 290.1 289.5 290.5 289.8 290.3 294.5 298.1 297.1 295.7 294.7 294.7 294.7 293.5 287.5 267.1 249.8 250.0 259.4 264.1 272.1 277.8 286.8 295.2 298.6 297.6 295.4 294.7 292.8 292.0 292.1 292.5 294.2 294.7 294.2 292.7 289.2 285.8 281.4 279.3 277.7 276.9 276.1 275.7 276.5 277.8 279.1 279.9 279.4 278.8 276.5 269.4 266.2 264.1 269.1 279.3 279.1 278.3 278.5 278.8 278.1 277.5 278.0 278.1 277.2 262.7 243.7 244.3 261.6 301.1 347.0 367.2 376.2 379.1 378.0 376.0 367.9 362.6 358.4 353.5 355.3 359.0 366.0 376.0 385.3 387.5 380.6 380.6 380.6 380.6 380.6 380.6 380.6 380.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 14.7171875 - }, - { - "text": "一 定 要 记 得 严 谨 SP", - "ph_seq": "y i d ing y ao j i d e y En j in SP", - "note_seq": "A3 A3 B3 B3 B3 B3 B3 B3 A3 A3 A3 A3 D4 D4 rest", - "note_dur_seq": "0.007812 0.132812 0.054688 0.152344 0.035156 0.121875 0.065625 0.132812 0.054688 0.152344 0.035156 0.139063 0.048438 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.007812 0.132812 0.054688 0.152344 0.035156 0.121875 0.065625 0.132812 0.054688 0.152344 0.035156 0.139063 0.048438 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "215.3 215.2 218.0 220.9 220.8 220.1 219.6 219.4 219.2 219.4 220.0 220.9 220.5 214.1 191.8 187.3 197.2 223.7 245.4 249.1 250.5 249.7 249.1 249.2 247.4 244.8 244.1 244.0 245.0 246.2 248.5 249.7 249.5 249.4 249.1 248.7 247.9 246.8 246.8 247.1 247.7 247.9 244.4 221.7 205.9 211.9 223.5 229.2 234.3 241.3 248.5 251.5 251.4 250.1 247.9 246.7 245.8 245.4 245.8 246.1 245.2 244.2 231.8 210.6 201.8 207.0 224.8 223.8 221.1 220.6 220.4 220.1 220.1 220.8 221.3 220.8 220.0 218.7 218.5 218.4 218.5 218.9 219.2 219.6 220.0 220.8 220.6 221.0 221.4 220.1 220.1 220.3 220.9 217.3 206.4 206.6 220.6 253.3 284.8 295.7 297.2 298.8 297.8 296.4 293.0 290.0 288.0 287.6 289.0 293.3 297.4 297.6 294.0 284.2 270.1 270.1 270.1 270.1 270.1 270.1 270.1 270.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 16.3046875 - }, - { - "text": "不 然 会 大 吃 一 惊 那 些 负 面 的 声 音 SP", - "ph_seq": "b u r an h ui d a ch ir y i j ing n a x ie f u m ian d e sh eng y in SP", - "note_seq": "D4 D4 G4 G4 F#4 F#4 E4 E4 D4 D4 D4 D4 E4 E4 E4 E4 F#4 F#4 F#4 F#4 F#4 F#4 E4 E4 E4 E4 A4 A4 rest", - "note_dur_seq": "0.114062 0.136719 0.050781 0.112500 0.075000 0.132812 0.054688 0.083594 0.103906 0.179688 0.007812 0.244531 0.130469 0.107031 0.080469 0.092969 0.094531 0.107031 0.080469 0.110156 0.077344 0.132812 0.054688 0.095312 0.092188 0.179688 0.007812 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.136719 0.050781 0.112500 0.075000 0.132812 0.054688 0.083594 0.103906 0.179688 0.007812 0.244531 0.130469 0.107031 0.080469 0.092969 0.094531 0.107031 0.080469 0.110156 0.077344 0.132812 0.054688 0.095312 0.092188 0.179688 0.007812 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "286.1 286.5 286.8 287.3 287.5 287.8 287.8 288.3 290.0 292.1 293.8 301.2 302.1 298.8 294.9 294.5 294.5 293.3 292.3 292.1 291.1 292.6 299.7 303.3 314.7 339.9 364.0 381.3 388.2 392.7 395.4 395.4 394.5 393.1 389.7 388.8 388.2 378.2 351.0 359.4 360.3 356.8 368.8 386.8 383.3 380.0 373.2 370.6 368.3 366.8 367.2 368.3 369.8 369.1 363.6 339.5 314.8 319.8 330.9 339.3 335.4 331.0 331.5 332.7 327.4 302.8 295.7 323.4 333.5 333.1 326.4 319.1 303.7 292.8 277.8 271.9 277.3 287.3 291.8 293.5 295.4 295.4 294.9 294.3 294.3 294.5 294.5 294.7 294.3 292.0 289.3 289.5 292.0 294.5 295.4 295.7 295.7 295.4 295.0 294.7 294.5 294.7 295.9 295.2 294.2 293.2 292.8 292.5 292.6 293.2 294.0 294.9 295.4 295.2 294.5 289.8 271.2 245.4 244.5 261.0 277.5 290.1 302.0 313.1 322.3 331.3 333.8 333.8 333.8 333.5 330.8 329.4 328.3 328.3 327.4 321.0 320.2 321.4 321.5 328.7 336.7 334.6 332.3 332.5 332.3 331.3 326.8 304.4 293.5 289.0 293.2 305.6 314.9 322.0 332.2 348.9 360.3 374.9 378.2 376.2 373.2 372.4 371.7 370.6 358.4 320.2 332.1 358.2 367.7 362.8 360.1 360.5 364.7 374.3 376.0 373.0 371.9 372.6 371.7 370.4 372.6 371.9 367.9 362.6 363.0 365.5 366.8 367.7 372.8 378.2 375.4 372.6 371.7 370.6 368.7 368.7 368.7 369.4 370.6 371.1 369.8 361.5 342.7 325.1 335.8 333.3 332.5 331.7 330.6 328.5 316.7 306.8 309.2 314.2 319.1 320.2 319.5 322.3 326.2 329.6 330.4 330.8 331.2 330.8 330.4 331.5 331.7 329.2 328.9 328.7 330.6 330.4 331.2 338.7 360.1 383.7 406.9 426.7 440.8 447.4 447.9 446.4 439.7 436.5 432.9 433.4 434.2 442.8 447.2 449.0 443.8 431.9 418.4 418.4 418.4 418.4 418.4 418.4 418.4 418.4", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 17.6984375 - }, - { - "text": "不 必 去 理 会 回 应 SP", - "ph_seq": "b u b i q v l i h ui h ui y ing SP", - "note_seq": "A4 A4 B4 B4 D4 D4 D4 D4 C#4 C#4 C#4 C#4 F#4 F#4 rest", - "note_dur_seq": "0.114062 0.132031 0.055469 0.104688 0.082812 0.126562 0.060937 0.112500 0.075000 0.112500 0.075000 0.179688 0.007812 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.132031 0.055469 0.104688 0.082812 0.126562 0.060937 0.112500 0.075000 0.112500 0.075000 0.179688 0.007812 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "435.4 435.2 435.2 434.9 434.7 434.4 434.2 433.2 427.2 420.1 414.8 427.0 440.0 442.3 443.6 443.6 443.3 443.1 442.8 442.0 439.7 418.7 423.1 433.1 432.9 447.8 471.0 491.0 498.8 500.8 499.6 499.0 490.2 473.2 458.2 472.7 497.0 503.7 481.8 453.9 436.3 394.2 334.7 296.6 288.5 289.0 288.0 288.8 291.3 291.6 293.7 295.9 297.6 297.2 296.7 294.0 292.5 293.8 295.5 295.2 295.5 295.2 294.7 294.2 293.8 293.5 292.6 284.6 269.4 249.1 237.7 231.2 235.0 245.1 264.9 279.6 281.4 281.2 278.9 278.6 278.5 278.0 277.5 273.4 264.5 246.4 231.2 235.7 243.5 251.4 266.0 279.1 281.7 279.4 276.9 276.5 276.7 275.9 275.7 276.7 278.6 281.7 281.9 278.0 277.8 291.0 323.2 355.3 373.9 381.1 384.4 384.4 378.0 368.1 363.4 356.2 352.9 355.1 360.5 374.7 385.0 387.5 380.6 358.6 358.6 358.6 358.6 358.6 358.6 358.6 358.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 20.6984375 - }, - { - "text": "因 为 自 己 的 心 灵 喜 欢 独 自 旅 行 SP", - "ph_seq": "y in w ei z i0 j i d e x in l ing x i h uan d u z i0 l v x ing SP", - "note_seq": "A3 A3 B3 B3 B3 B3 B3 B3 A3 A3 A3 A3 D4 D4 D4 D4 G4 G4 F#4 F#4 E4 E4 D4 D4 D4 D4 rest", - "note_dur_seq": "0.050000 0.133594 0.053906 0.123438 0.064062 0.121875 0.065625 0.132812 0.054688 0.092969 0.094531 0.126562 0.060937 0.182031 0.192969 0.112500 0.075000 0.132812 0.054688 0.123438 0.064062 0.126562 0.060937 0.078125 0.109375 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.050000 0.133594 0.053906 0.123438 0.064062 0.121875 0.065625 0.132812 0.054688 0.092969 0.094531 0.126562 0.060937 0.182031 0.192969 0.112500 0.075000 0.132812 0.054688 0.123438 0.064062 0.126562 0.060937 0.078125 0.109375 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "217.3 214.2 209.1 202.4 198.2 196.0 202.7 209.6 214.5 218.0 220.6 221.1 220.4 219.7 219.1 219.0 215.3 213.5 217.1 226.6 237.2 245.5 249.2 249.1 247.8 246.9 245.2 245.9 246.5 246.7 243.7 235.7 238.8 238.3 241.0 241.9 240.2 238.1 244.7 249.8 248.8 247.8 248.2 247.8 247.4 244.1 230.5 212.0 214.6 222.0 227.6 230.9 236.3 240.5 248.1 251.7 251.1 249.1 246.9 245.8 245.0 244.5 245.1 245.7 246.1 245.1 234.1 213.8 205.3 213.7 222.8 222.4 221.7 221.4 219.1 211.4 202.4 194.2 197.1 202.7 208.3 212.3 216.0 222.7 228.0 231.1 229.9 228.2 223.1 220.8 219.2 218.4 218.4 219.0 220.3 221.7 222.4 221.4 220.5 227.5 238.2 261.8 285.3 297.9 303.8 306.1 304.2 300.5 292.5 283.3 278.3 275.3 276.2 282.7 293.3 304.2 307.9 304.2 300.0 296.7 292.1 289.0 288.5 288.3 288.1 288.0 287.8 287.6 287.3 287.3 287.1 286.1 284.0 281.9 281.2 286.6 295.7 297.4 297.1 296.7 294.9 292.5 291.3 286.6 266.7 252.9 246.0 246.7 255.0 277.8 304.5 319.9 330.8 341.8 353.3 360.3 370.6 377.3 382.4 390.0 395.2 397.5 397.7 394.1 384.5 380.9 390.2 382.8 377.1 373.4 370.0 369.6 370.2 368.9 359.0 325.3 297.9 293.3 299.8 305.9 310.1 317.1 323.8 332.3 335.8 337.1 334.0 332.7 332.1 331.3 330.2 329.1 329.1 327.9 323.2 314.4 300.4 293.8 291.5 294.7 293.5 295.2 295.2 292.5 274.3 258.6 260.3 268.1 275.9 279.3 282.0 284.0 285.6 289.8 290.1 290.6 298.6 303.8 304.2 301.4 299.7 295.0 285.6 278.5 276.2 278.9 283.8 291.3 296.9 302.8 304.7 303.3 298.6 292.5 283.3 272.6 267.6 269.3 275.3 287.6 298.5 305.8 310.8 309.7 304.2 293.3 275.1 269.6 269.6 269.6 269.6 269.6 269.6 269.6 269.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 22.2625 - }, - { - "text": "化 上 最 精 美 细 致 的 眼 妆 SP", - "ph_seq": "h ua sh ang z ui j ing m ei x i zh ir d e y En zh uang SP", - "note_seq": "A3 A3 D4 D4 D4 D4 D4 D4 D4 D4 C#4 C#4 D4 D4 C#4 C#4 B3 B3 A3 A3 rest", - "note_dur_seq": "0.153906 0.095312 0.092188 0.123438 0.064062 0.121875 0.065625 0.110156 0.077344 0.092969 0.094531 0.128125 0.059375 0.132812 0.054688 0.152344 0.035156 0.128125 0.059375 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.153906 0.095312 0.092188 0.123438 0.064062 0.121875 0.065625 0.110156 0.077344 0.092969 0.094531 0.128125 0.059375 0.132812 0.054688 0.152344 0.035156 0.128125 0.059375 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "277.0 277.8 275.9 271.6 266.5 254.2 240.9 230.0 224.5 222.3 220.9 218.2 214.0 210.6 210.9 219.4 221.9 222.0 222.7 222.8 221.5 213.5 197.5 199.4 202.8 206.7 213.8 216.4 236.4 267.8 290.5 297.1 299.1 298.5 296.7 294.5 291.6 291.6 292.5 292.3 290.6 272.7 260.3 264.8 279.9 293.3 302.4 296.9 296.9 296.0 295.0 294.9 294.7 294.5 291.8 284.2 265.7 261.0 286.0 295.5 294.0 295.0 296.0 295.2 297.2 296.7 296.2 296.6 295.0 292.3 292.1 292.6 292.1 289.6 288.5 288.3 288.6 289.1 292.1 297.1 296.6 295.9 293.8 295.4 294.2 283.7 257.0 251.4 273.1 284.5 279.8 273.8 273.0 271.6 270.4 276.4 279.1 279.4 280.4 279.8 278.8 278.3 277.8 271.5 257.9 238.0 234.0 244.5 260.0 272.9 293.6 301.4 302.4 300.5 296.6 294.5 293.3 293.2 294.0 294.2 293.7 293.2 290.5 270.2 242.8 246.6 272.2 287.3 287.0 284.2 281.7 281.1 279.4 278.9 278.5 277.2 276.9 275.7 270.4 265.2 262.5 257.1 253.0 248.1 245.4 244.7 245.2 246.2 246.8 247.2 248.1 249.1 249.1 246.9 239.4 218.4 196.0 181.3 181.2 200.0 212.0 215.3 218.2 220.8 222.4 222.0 220.3 217.1 213.4 207.9 205.7 207.5 210.7 217.1 223.6 227.2 228.8 228.7 224.0 217.7 208.3 200.0 195.7 196.7 199.8 206.6 218.9 227.2 233.1 233.6 227.2 211.5 211.5 211.5 211.5 211.5 211.5 211.5 211.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 25.15859375 - }, - { - "text": "华 丽 洋 服 裙 摆 微 微 摇 晃 SP", - "ph_seq": "h ua l i y ang f u q vn b ai w ei w ei y ao h uang SP", - "note_seq": "A3 A3 B3 B3 B3 B3 B3 B3 B3 B3 A3 A3 B3 B3 A3 A3 G3 G3 A3 A3 rest", - "note_dur_seq": "0.153906 0.126562 0.060937 0.152344 0.035156 0.107031 0.080469 0.104688 0.082812 0.152344 0.035156 0.133594 0.053906 0.133594 0.053906 0.142969 0.044531 0.100781 0.086719 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.153906 0.126562 0.060937 0.152344 0.035156 0.107031 0.080469 0.104688 0.082812 0.152344 0.035156 0.133594 0.053906 0.133594 0.053906 0.142969 0.044531 0.100781 0.086719 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "220.0 220.0 220.0 220.0 220.0 220.0 220.0 220.1 220.3 220.4 220.3 217.3 212.4 207.7 207.4 214.4 216.6 216.3 216.0 216.1 216.8 217.9 219.7 221.5 222.7 222.0 219.8 221.0 224.3 232.5 242.8 246.7 248.4 248.7 248.4 247.8 246.9 245.9 245.5 245.5 245.5 246.1 247.1 247.8 250.7 252.1 250.5 247.9 246.7 246.7 246.8 245.4 246.4 247.7 248.4 248.5 244.2 228.7 211.2 215.2 224.4 238.1 252.7 253.4 251.4 248.2 246.9 246.9 248.1 246.1 230.1 210.9 208.6 218.7 225.4 233.1 239.9 247.8 260.3 261.0 254.8 250.1 247.7 246.2 246.1 246.2 246.7 247.4 246.7 244.7 240.5 230.7 212.3 201.4 204.8 219.7 220.8 218.9 219.5 219.9 220.4 221.7 221.5 220.9 220.3 219.0 215.2 214.9 220.5 228.9 240.6 248.2 249.1 248.5 248.5 246.5 244.5 244.1 245.0 245.9 246.7 248.4 248.4 247.5 245.1 241.9 238.8 232.8 227.4 222.0 218.5 217.0 216.6 217.5 220.5 222.0 221.9 221.5 217.2 213.6 210.6 208.3 206.0 200.0 195.5 194.2 195.1 196.0 197.6 197.9 196.8 186.9 173.9 165.2 160.7 162.4 165.9 172.5 185.5 198.6 215.6 222.2 225.9 228.2 228.0 225.4 221.0 213.9 209.9 208.0 208.4 210.9 217.2 220.6 225.5 227.2 225.5 220.8 211.8 204.7 200.2 198.5 202.0 208.9 216.0 224.2 230.3 233.5 231.2 221.5 211.9 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 28.15859375 - }, - { - "text": "卷 卷 发 梢 衬 托 出 可 爱 加 倍 的 脸 庞 SP", - "ph_seq": "j van j van f a sh ao ch en t uo ch u k e ai j ia b ei d e l ian p ang SP", - "note_seq": "A3 A3 D4 D4 D4 D4 D4 D4 D4 D4 C#4 C#4 D4 D4 C#4 C#4 B3 A#3 A#3 D4 D4 D4 D4 C#4 C#4 D4 D4 rest", - "note_dur_seq": "0.133594 0.121875 0.065625 0.107031 0.080469 0.113281 0.074219 0.110156 0.077344 0.102344 0.085156 0.110156 0.077344 0.106250 0.081250 0.187500 0.121875 0.065625 0.307812 0.067187 0.263281 0.111719 0.250000 0.125000 0.250000 0.125000 0.750000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.133594 0.121875 0.065625 0.107031 0.080469 0.113281 0.074219 0.110156 0.077344 0.102344 0.085156 0.110156 0.077344 0.106250 0.081250 0.187500 0.121875 0.065625 0.307812 0.067187 0.263281 0.111719 0.250000 0.125000 0.250000 0.125000 0.750000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "220.0 220.0 220.0 220.0 220.0 219.1 218.0 216.8 216.2 216.6 217.3 218.2 218.9 222.2 221.4 219.2 217.3 217.1 217.2 219.9 222.2 223.2 222.4 216.7 206.6 205.7 226.7 249.8 274.0 288.0 292.3 294.2 293.3 293.5 293.0 294.3 295.5 294.0 292.1 275.4 255.4 235.4 238.1 259.4 283.8 298.6 296.6 295.7 295.0 295.0 295.0 292.8 276.4 258.2 260.1 271.3 278.0 283.2 287.6 291.3 292.8 297.9 299.1 296.9 295.5 295.4 295.0 293.8 287.3 264.5 244.1 243.5 255.8 268.2 277.8 283.2 289.1 299.0 301.4 299.1 296.7 295.0 292.1 292.5 293.0 293.7 294.0 293.2 274.3 252.6 239.5 246.4 263.5 282.5 284.0 281.4 278.1 279.1 278.5 275.9 257.7 236.7 232.0 242.0 250.0 255.4 258.6 262.6 278.6 303.1 304.0 296.4 294.5 293.7 293.3 293.8 293.5 282.8 250.1 245.7 259.1 270.7 278.1 278.9 280.7 282.2 292.0 294.9 293.3 288.8 283.3 277.8 274.8 273.8 274.3 275.3 276.7 277.2 275.7 272.0 265.0 255.1 246.5 242.3 240.9 242.7 247.1 249.2 250.1 248.8 229.7 208.0 201.6 202.7 208.6 213.2 220.4 230.6 232.4 230.9 231.5 231.5 231.5 231.6 232.5 232.3 232.0 232.7 232.7 232.0 231.7 231.6 231.5 231.5 232.1 232.9 234.2 235.4 236.3 236.6 236.2 235.4 230.0 211.2 199.1 193.0 204.7 220.1 232.0 247.0 271.3 285.1 291.5 292.5 293.0 292.8 293.0 293.5 294.2 294.5 294.2 293.7 293.3 292.6 292.9 301.6 310.1 320.8 328.9 333.5 335.4 335.4 333.1 331.2 329.4 322.5 303.1 289.5 289.6 292.5 295.2 296.2 296.9 303.8 304.7 301.6 297.9 296.7 295.7 294.7 294.3 293.0 293.7 294.3 296.2 296.0 294.6 292.3 288.1 283.3 278.8 276.1 274.8 275.9 277.2 278.6 278.9 275.6 274.8 274.3 274.0 274.3 274.8 275.6 277.7 278.1 278.8 278.1 277.5 277.5 277.5 277.7 277.3 276.9 276.7 277.2 277.2 276.4 276.1 277.6 281.8 286.3 291.5 294.2 297.4 299.3 297.6 295.5 290.5 276.2 255.8 246.5 249.8 254.0 259.4 265.1 275.6 285.5 292.3 300.0 301.9 301.6 300.4 298.1 294.7 289.8 287.8 286.5 287.1 288.1 291.1 295.0 298.8 301.9 303.0 302.6 301.1 297.9 293.3 289.3 287.3 286.6 287.3 289.3 291.3 296.0 298.3 299.7 299.8 298.6 297.4 294.9 292.3 291.3 290.1 290.1 291.1 291.5 294.3 297.6 300.5 301.9 302.8 301.6 298.3 296.2 291.1 287.0 285.5 285.0 286.1 289.6 293.5 299.0 301.4 305.3 307.0 305.4 300.0 292.3 283.8 281.1 281.1 281.1 281.1 281.1 281.1 281.1 281.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 31.17890625 - }, - { - "text": "端 详 镜 中 影 像 SP", - "ph_seq": "d uan x iang j ing zh ong y ing x iang SP", - "note_seq": "B3 B3 D4 D4 E4 E4 D4 D4 B3 B3 D4 D4 rest", - "note_dur_seq": "0.111719 0.092969 0.094531 0.121875 0.065625 0.128125 0.059375 0.179688 0.007812 0.092969 0.094531 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.111719 0.092969 0.094531 0.121875 0.065625 0.128125 0.059375 0.179688 0.007812 0.092969 0.094531 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 246.9 246.8 245.7 245.0 243.8 243.1 242.8 242.6 242.4 242.3 247.8 251.7 250.1 248.4 247.8 247.2 246.7 245.1 233.8 222.4 233.2 243.3 254.7 266.8 278.1 288.8 295.5 297.6 296.2 295.9 294.9 294.5 294.2 292.5 289.3 280.6 262.2 253.2 264.7 276.1 297.3 318.8 331.2 333.7 334.4 332.5 330.6 328.3 327.7 328.9 329.1 327.5 321.0 299.5 272.2 275.4 283.2 287.4 299.5 302.4 300.2 297.2 297.2 296.4 295.2 293.0 291.0 287.0 281.9 271.6 264.9 259.3 254.2 251.0 248.1 244.8 245.5 246.4 248.4 249.2 249.7 242.6 222.9 209.6 215.7 230.9 241.0 252.4 262.4 272.6 282.5 295.0 299.1 301.1 301.4 299.5 294.3 288.6 285.3 280.9 279.8 281.5 284.2 290.5 296.7 300.7 302.3 301.9 298.5 291.0 282.2 278.1 273.5 273.1 278.0 284.0 294.9 301.4 306.1 306.1 301.2 286.5 271.9 271.9 271.9 271.9 271.9 271.9 271.9 271.9", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 35.51328125 - }, - { - "text": "搭 配 花 伞 遮 阳 SP", - "ph_seq": "d a p ei h ua s an zh e y ang SP", - "note_seq": "B3 B3 D4 D4 F#4 F#4 E4 E4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.111719 0.108594 0.078906 0.112500 0.075000 0.097656 0.089844 0.128125 0.059375 0.152344 0.035156 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.111719 0.108594 0.078906 0.112500 0.075000 0.097656 0.089844 0.128125 0.059375 0.152344 0.035156 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "264.8 255.7 246.8 238.9 235.5 232.7 231.3 233.8 239.4 247.4 252.6 253.6 251.7 247.7 245.8 246.4 246.8 245.7 236.9 220.5 215.3 218.0 221.1 227.0 239.2 266.3 287.6 300.2 299.7 295.2 292.6 291.5 292.0 292.8 294.2 295.5 279.3 263.1 284.3 305.4 315.1 335.0 355.5 364.1 371.7 373.6 373.0 373.6 373.6 372.1 357.6 336.2 349.4 358.0 371.7 371.1 362.3 349.3 337.9 321.7 325.3 329.6 331.5 330.8 329.6 327.5 327.2 327.0 325.1 319.7 297.9 268.7 255.4 260.8 280.4 292.6 294.5 294.0 293.7 293.5 294.2 295.2 295.9 295.7 295.2 292.0 287.3 281.3 283.6 292.2 309.5 324.9 335.8 339.7 341.1 338.7 333.1 327.5 320.1 316.7 315.8 318.6 323.2 330.2 334.8 337.3 336.0 334.2 329.8 327.0 326.2 327.5 328.5 327.7 325.7 323.8 323.8 323.8 323.8 323.8 323.8 323.8 323.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 37.01328125 - }, - { - "text": "就 算 没 人 欣 赏 SP", - "ph_seq": "j iu s uan m ei r en x in sh ang SP", - "note_seq": "B3 B3 D4 D4 F#4 F#4 E4 E4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.133594 0.097656 0.089844 0.110156 0.077344 0.136719 0.050781 0.092969 0.094531 0.095312 0.092188 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.133594 0.097656 0.089844 0.110156 0.077344 0.136719 0.050781 0.092969 0.094531 0.095312 0.092188 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 246.9 246.9 246.9 248.5 248.1 247.8 248.2 248.7 247.4 244.0 240.6 238.7 243.1 248.9 248.5 248.1 248.4 247.8 244.2 223.1 218.6 231.6 244.7 255.8 267.2 277.9 290.1 302.3 304.5 297.6 292.8 292.0 292.5 293.0 292.8 291.3 293.3 294.7 291.5 290.8 299.7 311.2 325.0 347.9 365.3 373.9 376.5 374.3 373.2 368.7 367.2 366.8 366.8 367.9 368.1 365.7 357.9 352.5 347.2 340.9 333.8 330.2 327.9 326.4 327.5 331.0 332.1 328.1 310.6 305.3 317.3 327.5 322.0 311.9 303.1 295.2 296.2 298.1 296.0 293.7 292.5 293.7 294.5 292.3 281.4 258.5 248.1 265.9 280.9 294.8 312.5 327.8 342.8 343.6 344.0 339.7 333.1 323.6 318.2 313.3 314.9 324.5 330.8 337.9 342.4 340.9 327.5 307.2 300.7 300.7 300.7 300.7 300.7 300.7 300.7 300.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 38.49140625 - }, - { - "text": "我 也 依 旧 放 光 芒 SP", - "ph_seq": "w o y E y i j iu f ang g uang m ang SP", - "note_seq": "B3 B3 D4 D4 F4 F4 F4 F4 E4 E4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.110937 0.152344 0.035156 0.179688 0.007812 0.244531 0.130469 0.214062 0.160938 0.255469 0.119531 0.250000 0.125000 1.031250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.110937 0.152344 0.035156 0.179688 0.007812 0.244531 0.130469 0.214062 0.160938 0.255469 0.119531 0.250000 0.125000 1.031250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 246.9 246.8 245.9 245.2 244.1 243.4 243.7 244.2 244.8 245.4 245.8 245.8 245.9 246.1 246.1 246.9 247.8 247.5 246.7 242.0 233.1 230.1 234.8 248.4 270.4 286.3 293.7 296.2 294.5 294.5 294.0 293.5 293.7 292.6 291.3 290.6 290.3 288.3 289.0 288.4 291.6 305.1 314.4 321.5 327.2 330.8 330.6 330.6 329.8 330.8 333.5 337.3 343.2 345.2 347.8 349.8 350.8 351.5 351.5 350.4 348.8 347.8 347.4 346.4 334.6 302.1 284.3 295.2 302.1 309.9 312.7 316.9 319.0 319.7 322.7 326.0 331.2 330.6 327.2 324.3 324.9 325.7 327.0 332.7 340.1 346.0 349.0 351.5 352.5 352.3 351.9 350.0 348.2 346.6 346.6 343.0 330.2 296.7 290.3 302.6 315.1 321.2 330.8 333.9 335.3 339.6 347.0 352.7 347.8 342.4 338.5 335.4 331.9 329.2 328.1 329.4 330.2 331.5 333.8 333.8 333.5 331.0 328.5 327.0 321.5 317.5 315.7 317.7 322.5 324.7 324.9 322.8 316.4 296.9 269.3 260.4 262.1 283.5 302.3 296.2 292.3 292.3 293.2 293.8 294.5 293.8 292.6 292.0 292.0 292.0 291.5 292.5 296.1 301.6 309.7 318.6 326.6 331.2 333.8 335.0 334.2 332.3 330.8 327.5 324.0 325.1 326.4 326.0 325.3 326.0 329.6 338.5 338.1 335.4 333.8 333.5 333.3 332.9 331.9 330.6 329.2 329.2 329.1 329.1 328.3 328.5 329.2 330.0 330.8 331.3 331.2 330.6 329.8 329.2 328.3 327.5 328.1 328.7 329.2 329.8 330.6 331.0 330.8 329.1 326.8 325.5 326.0 326.8 327.7 330.0 331.9 333.8 334.0 333.5 331.2 328.1 326.8 326.0 327.0 328.1 330.4 332.3 333.7 334.0 334.0 332.5 329.1 325.8 324.5 323.6 324.3 326.8 329.6 331.5 333.5 335.4 335.0 334.2 331.5 328.7 328.9 330.0 330.6 330.8 330.6 330.0 329.4 328.9 329.2 330.4 331.3 333.3 335.8 336.6 337.5 340.9 340.1 333.7 321.9 308.4 308.4 308.4 308.4 308.4 308.4 308.4 308.4", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 40.0140625 - }, - { - "text": "啾 SP", - "ph_seq": "j iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.133594 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.133594 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.0 582.3 574.9 567.7 564.1 558.9 545.8 531.8 574.6 579.9 590.0 590.4 589.7 586.0 585.3 582.6 580.2 576.6 578.2 576.9 574.2 572.9 582.6 590.7 597.3 601.8 601.1 598.3 594.2 588.7 579.6 558.2 550.2 550.2 550.2 550.2 550.2 550.2 550.2 550.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 43.36640625 - }, - { - "text": "也 许 我 该 说 声 抱 歉 带 来 困 扰 不 是 故 意 别 讨 厌 SP", - "ph_seq": "y E x v w o g ai sh uo sh eng b ao q ian d ai l ai k un r ao b u sh ir g u y i b ie t ao y En SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.072656 0.092969 0.094531 0.133594 0.053906 0.127344 0.060156 0.095312 0.092188 0.095312 0.092188 0.264062 0.110937 0.208594 0.166406 0.266406 0.108594 0.126562 0.060937 0.106250 0.081250 0.458594 0.103906 0.132031 0.055469 0.095312 0.092188 0.127344 0.060156 0.179688 0.007812 0.132031 0.055469 0.203906 0.171094 0.303906 0.071094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.072656 0.092969 0.094531 0.133594 0.053906 0.127344 0.060156 0.095312 0.092188 0.095312 0.092188 0.264062 0.110937 0.208594 0.166406 0.266406 0.108594 0.126562 0.060937 0.106250 0.081250 0.458594 0.103906 0.132031 0.055469 0.095312 0.092188 0.127344 0.060156 0.179688 0.007812 0.132031 0.055469 0.203906 0.171094 0.303906 0.071094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "283.7 283.2 283.3 283.5 284.0 284.8 285.5 286.8 292.3 294.7 295.5 296.4 289.0 258.6 244.0 251.7 262.2 269.3 278.1 289.1 299.2 309.4 319.0 331.2 340.3 335.8 333.3 330.6 329.6 329.4 329.2 328.1 328.3 328.1 328.1 330.0 332.1 334.0 334.4 333.5 332.7 331.5 330.4 328.5 329.2 329.8 329.6 329.1 328.3 327.4 315.3 284.0 256.7 252.0 263.4 296.7 300.4 296.2 294.3 292.8 292.6 294.3 293.0 281.2 260.0 271.3 287.8 298.8 307.7 317.2 326.3 336.7 330.6 323.2 327.9 331.2 330.2 319.0 276.2 259.2 268.5 281.7 290.8 307.2 325.0 337.2 350.2 360.6 369.6 372.6 370.6 369.4 369.1 369.4 369.1 367.4 367.4 367.9 369.1 370.2 370.4 368.7 368.1 367.9 367.9 368.3 369.6 371.1 372.4 373.9 374.7 375.2 375.2 375.4 376.7 376.5 351.8 337.2 371.6 444.4 510.7 537.3 549.6 553.1 552.4 547.4 545.2 540.1 537.3 538.6 543.6 551.5 555.3 557.9 559.5 559.5 559.5 556.0 550.9 525.4 482.3 500.8 529.6 532.4 524.5 519.6 520.2 522.6 523.3 524.4 530.4 535.1 539.5 559.2 576.6 587.0 589.4 582.3 582.3 581.3 580.6 581.9 585.6 587.0 597.9 603.5 598.3 592.8 590.7 585.6 583.3 581.6 580.9 582.3 587.0 592.1 593.1 591.1 584.3 568.3 538.3 496.5 456.9 431.9 437.3 452.4 441.5 434.2 437.2 439.0 441.0 444.6 445.6 444.6 443.3 437.5 427.0 411.1 393.1 376.5 369.5 372.8 367.9 368.5 369.6 371.3 371.9 370.4 348.2 324.0 331.0 353.3 365.3 346.9 322.7 298.3 269.8 257.1 267.7 278.9 288.3 297.1 300.0 297.8 294.9 287.6 282.8 276.9 277.5 279.6 283.8 289.8 294.9 302.1 305.4 305.1 300.9 295.7 288.0 281.2 279.6 280.2 284.6 292.6 297.2 301.7 302.3 301.7 301.2 300.0 298.6 297.9 297.9 298.1 297.6 297.8 297.9 295.7 293.7 289.5 285.8 283.0 283.5 285.6 288.3 292.5 295.9 295.7 294.2 292.5 291.8 290.5 290.5 292.3 294.3 298.8 299.0 280.1 265.4 266.6 284.3 310.8 332.1 332.7 331.2 331.5 330.6 315.7 284.3 289.5 307.7 313.5 319.3 324.0 330.2 333.1 337.7 340.1 331.7 337.1 337.1 332.3 329.2 328.5 328.1 327.5 327.0 309.2 271.6 273.2 285.0 296.8 295.3 291.2 286.0 289.0 291.1 292.0 292.6 292.8 292.8 292.6 292.6 293.7 295.5 297.4 298.1 299.6 303.2 309.7 315.8 320.8 325.8 330.4 331.0 331.2 330.6 329.2 329.4 332.1 322.3 282.0 272.1 291.9 318.3 339.9 367.9 379.3 379.1 378.6 376.9 372.1 365.7 362.4 361.1 361.3 362.4 364.5 367.0 371.5 372.8 374.1 372.8 372.1 368.9 367.2 366.2 350.4 318.2 304.0 293.0 292.0 305.6 319.5 330.6 343.4 356.2 368.2 380.8 380.2 388.4 391.5 391.8 390.9 389.5 390.0 390.2 388.6 389.1 389.5 390.0 390.0 389.3 390.9 391.3 391.8 393.4 393.6 393.8 393.8 393.6 392.2 388.4 381.5 368.1 360.5 359.0 361.5 372.4 391.4 409.2 425.0 434.4 434.7 437.0 439.7 439.2 434.7 433.9 433.4 435.4 437.7 439.7 442.8 443.8 444.3 445.4 444.9 443.8 439.7 440.3 434.2 434.9 432.9 432.2 433.4 441.8 444.9 454.5 486.8 524.8 535.5 555.0 555.0 555.0 555.0 555.0 555.0 555.0 555.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 43.98984375 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 582.9 575.2 568.0 563.4 558.9 544.8 544.8 570.6 582.3 586.3 583.9 582.3 581.9 579.2 577.2 576.9 577.6 578.6 581.9 588.3 589.7 589.7 593.8 595.9 598.6 597.3 593.5 591.4 586.7 578.2 559.5 551.2 551.2 551.2 551.2 551.2 551.2 551.2 551.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 49.3296875 - }, - { - "text": "耍 小 心 机 有 点 惊 险 但 我 绝 SP", - "ph_seq": "sh ua x iao x in j i y ou d ian j ing x ian d an w o j ve SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 rest", - "note_dur_seq": "0.188281 0.092969 0.094531 0.092969 0.094531 0.121875 0.065625 0.152344 0.035156 0.132812 0.054688 0.244531 0.130469 0.186719 0.188281 0.339062 0.047656 0.121875 0.053906 0.101562 0.085938 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.188281 0.092969 0.094531 0.092969 0.094531 0.121875 0.065625 0.152344 0.035156 0.132812 0.054688 0.244531 0.130469 0.186719 0.188281 0.339062 0.047656 0.121875 0.053906 0.101562 0.085938 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "331.9 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 283.7 282.4 281.7 281.9 282.0 283.5 285.5 287.1 290.3 298.3 298.8 297.1 296.2 295.2 293.3 281.4 265.7 261.8 279.8 290.5 296.9 308.2 319.4 332.0 342.2 339.5 333.8 330.6 330.0 330.6 330.2 328.1 308.4 290.3 301.6 320.4 324.7 325.1 326.6 329.8 336.6 345.4 340.9 334.6 331.3 329.6 328.7 328.7 326.4 320.8 311.8 293.2 273.7 265.4 261.3 267.1 278.5 292.0 293.8 292.6 293.2 293.7 293.3 293.5 294.2 295.0 295.2 295.0 294.3 288.9 289.6 298.7 312.7 322.1 327.9 331.2 331.7 330.6 329.6 329.8 331.0 330.8 321.7 285.1 284.2 305.4 327.5 345.7 367.6 382.2 379.7 378.0 378.4 376.9 373.9 370.4 367.7 366.0 366.0 366.4 367.0 368.9 371.5 371.3 371.5 369.6 369.6 369.8 370.2 372.6 373.0 371.5 366.8 358.0 364.1 369.6 385.5 411.7 433.0 454.1 484.7 514.9 541.7 560.5 562.8 564.1 562.4 556.0 547.7 544.8 545.5 548.9 553.7 557.9 558.5 558.5 560.5 559.2 553.4 543.0 516.0 532.7 551.5 550.2 546.4 546.1 546.1 544.2 543.6 545.5 546.2 546.6 544.2 545.5 560.8 567.0 563.1 557.3 553.4 546.7 543.0 544.2 547.7 553.4 565.0 577.6 584.3 587.3 586.7 585.6 585.0 585.0 585.6 586.0 587.3 588.7 592.4 596.2 595.2 588.3 579.2 559.8 512.0 464.3 432.9 424.4 436.5 443.1 442.0 442.0 442.5 444.6 444.9 444.1 442.5 439.0 434.7 423.8 416.3 408.6 399.3 386.7 375.4 372.6 370.2 371.9 372.6 368.9 351.7 313.7 332.7 358.6 365.7 357.6 334.2 313.7 293.2 270.5 256.5 268.2 282.0 290.0 296.6 299.0 297.1 293.8 286.5 279.4 276.5 275.9 278.6 282.0 288.3 296.2 300.0 303.5 302.6 299.8 293.2 285.0 279.3 277.8 278.8 281.4 288.5 295.9 299.1 302.8 303.3 299.7 297.2 297.2 297.2 297.2 297.2 297.2 297.2 297.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 49.87421875 - }, - { - "text": "不 在 人 面 前 装 可 怜 SP", - "ph_seq": "b u z ai r en m ian q ian zh uang k e l ian SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.114062 0.123438 0.064062 0.136719 0.050781 0.110156 0.077344 0.104688 0.082812 0.128125 0.059375 0.212500 0.162500 0.253125 0.121875 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.123438 0.064062 0.136719 0.050781 0.110156 0.077344 0.104688 0.082812 0.128125 0.059375 0.212500 0.162500 0.253125 0.121875 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "294.5 294.7 294.9 295.0 295.0 295.4 295.4 295.2 294.7 294.0 293.2 298.1 299.3 295.0 293.3 292.5 293.8 294.7 294.7 290.0 264.4 251.3 261.9 280.7 289.6 304.6 321.1 333.5 336.7 332.9 331.2 330.4 329.1 330.4 329.2 329.2 328.5 327.4 326.0 327.0 329.2 333.1 336.2 336.4 335.0 333.1 331.3 331.3 331.2 329.2 328.7 328.1 327.2 321.5 313.5 309.2 306.5 299.8 294.6 294.0 293.5 293.5 293.8 294.3 293.0 294.0 295.5 294.7 290.0 265.4 243.7 240.1 249.5 266.4 279.3 287.1 284.5 280.2 277.7 276.7 276.1 275.4 275.6 276.7 277.8 278.6 278.1 265.0 239.2 228.9 242.8 272.3 294.5 298.1 297.2 296.6 295.0 292.8 292.3 291.1 290.1 290.5 291.1 293.3 295.2 294.9 294.7 294.3 294.2 294.3 295.2 295.4 295.0 294.5 290.0 275.3 264.1 278.3 291.6 291.8 294.1 297.0 306.5 315.6 334.0 336.9 333.7 332.5 330.2 326.2 324.0 324.2 324.9 325.5 326.4 327.9 328.9 329.2 329.8 330.2 329.8 329.4 329.8 329.4 328.3 325.8 322.3 316.2 310.4 294.5 276.5 264.5 260.3 262.7 268.6 279.4 287.0 296.7 299.5 300.7 300.0 297.6 294.0 288.1 282.0 280.9 282.7 286.6 292.8 298.6 301.2 300.5 299.0 294.3 287.1 280.9 279.9 283.8 289.3 296.2 301.1 303.3 303.3 299.0 294.0 283.7 275.3 272.1 271.2 271.2 271.2 271.2 271.2 271.2 271.2 271.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 52.9484375 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 583.3 575.6 568.0 566.0 581.9 587.3 585.0 591.1 593.8 597.6 597.3 591.4 583.6 580.6 578.9 575.9 575.2 576.2 575.9 586.7 593.1 591.4 595.5 600.0 600.0 596.9 593.8 591.4 586.3 578.2 559.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 55.3296875 - }, - { - "text": "想 变 得 可 爱 不 惹 嫌 要 努 力 变 得 像 糖 果 一 样 甜 SP", - "ph_seq": "x iang b ian d e k e ai b u r e x ian y ao n u l i b ian d e x iang t ang g uo y i y ang t ian SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 D4 D4 E4 E4 D4 D4 E4 E4 F#4 F#4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.192969 0.132031 0.055469 0.132812 0.054688 0.106250 0.081250 0.187500 0.132031 0.055469 0.273438 0.101562 0.186719 0.188281 0.332813 0.042188 0.107031 0.080469 0.126562 0.060937 0.448437 0.114062 0.132812 0.054688 0.092969 0.094531 0.102344 0.085156 0.127344 0.060156 0.179688 0.007812 0.303906 0.071094 0.203906 0.171094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.192969 0.132031 0.055469 0.132812 0.054688 0.106250 0.081250 0.187500 0.132031 0.055469 0.273438 0.101562 0.186719 0.188281 0.332813 0.042188 0.107031 0.080469 0.126562 0.060937 0.448437 0.114062 0.132812 0.054688 0.092969 0.094531 0.102344 0.085156 0.127344 0.060156 0.179688 0.007812 0.303906 0.071094 0.203906 0.171094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "455.5 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 294.2 292.5 291.5 291.8 292.1 292.0 291.5 291.0 290.5 290.8 290.3 290.8 292.0 292.0 292.5 293.0 293.2 293.5 293.8 294.0 292.0 275.7 258.6 279.3 323.6 332.5 333.1 332.1 331.7 330.4 328.5 327.2 327.0 327.4 328.1 329.6 330.8 329.1 318.6 304.5 319.5 333.3 332.7 330.8 329.6 330.2 328.5 301.4 276.1 269.4 268.7 272.3 274.5 275.4 276.4 282.5 281.5 280.7 287.3 291.8 294.7 295.2 294.2 293.5 294.0 293.0 292.8 293.3 293.7 294.0 297.2 302.9 310.7 319.1 326.0 330.2 331.5 331.9 331.2 328.7 329.6 333.1 319.1 284.3 275.4 280.6 299.8 323.1 343.8 367.7 383.9 382.2 381.5 378.2 376.0 374.1 368.7 366.0 364.9 364.3 366.2 369.4 369.8 370.4 372.1 371.3 370.4 369.6 368.1 369.6 372.1 374.7 376.7 377.6 378.0 377.3 370.0 364.5 374.0 400.9 454.4 508.4 543.9 559.5 561.5 562.8 557.3 553.4 549.6 545.8 545.5 546.7 551.2 556.3 560.8 562.4 563.4 562.4 555.6 542.3 519.9 513.4 519.3 523.3 525.7 531.8 530.6 531.8 535.5 538.0 542.2 547.5 549.9 552.3 577.9 596.9 600.4 599.7 594.5 583.9 578.9 576.9 577.9 579.9 584.3 587.7 589.0 589.7 585.6 583.9 582.9 583.6 585.0 588.3 591.8 592.8 593.8 592.4 589.7 575.2 567.7 561.8 547.2 521.6 489.5 466.9 456.8 447.4 444.6 443.3 441.8 442.3 442.5 443.3 443.1 441.3 437.7 428.0 408.6 395.1 384.1 376.2 376.2 371.7 368.9 367.9 368.9 372.1 373.6 375.2 373.6 369.1 359.3 336.7 310.8 283.5 266.4 256.4 263.4 273.1 278.8 287.3 295.5 300.0 302.6 300.2 295.0 288.6 278.6 273.5 272.6 277.7 283.2 289.6 296.6 301.2 304.0 305.1 303.5 298.1 294.0 285.1 279.6 275.9 276.2 279.4 282.7 287.5 293.3 299.5 302.3 303.5 300.9 294.9 290.3 285.6 285.5 286.3 283.5 284.5 286.0 287.6 288.3 288.8 289.5 290.3 291.3 289.3 290.8 291.8 292.1 291.0 289.5 289.5 290.1 292.1 294.2 294.9 295.2 292.1 283.0 266.8 268.5 290.8 298.1 296.7 296.0 295.5 292.8 283.5 259.5 261.2 273.2 284.0 295.0 303.2 310.9 319.9 328.1 328.9 335.2 334.4 332.9 331.0 328.3 327.0 327.4 328.1 328.3 326.4 317.5 289.9 272.0 272.5 290.0 301.6 293.7 291.0 291.6 293.0 293.7 294.3 295.7 295.5 293.7 291.1 288.3 285.0 276.4 274.9 292.0 317.1 326.0 327.4 329.4 330.2 329.8 329.4 329.2 329.1 329.6 330.0 329.1 328.3 326.2 327.7 334.2 345.8 363.4 374.9 379.5 381.1 379.7 377.3 373.0 364.7 360.1 358.0 360.1 363.6 369.8 373.6 375.2 375.8 373.6 370.9 367.2 364.7 364.1 364.7 367.4 370.2 371.5 372.8 373.2 373.6 375.1 379.6 383.9 388.3 391.5 392.9 392.7 393.4 392.0 390.0 388.8 388.4 388.2 388.2 388.6 389.7 390.6 390.9 390.6 391.1 391.8 393.1 394.3 395.6 396.1 395.2 393.6 385.7 361.3 351.0 362.4 376.9 385.5 394.7 404.7 415.3 433.9 445.9 452.1 454.5 455.3 451.1 447.7 439.7 433.9 431.2 432.2 436.5 442.0 444.6 444.3 442.8 440.3 437.5 434.2 435.2 437.7 439.5 438.2 434.4 437.0 436.7 437.7 439.2 450.0 461.9 514.6 543.0 547.7 547.7 547.7 547.7 547.7 547.7 547.7 547.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 55.86953125 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 574.2 571.3 569.0 567.7 572.9 576.9 577.9 583.6 579.6 586.7 588.7 589.4 589.7 591.8 589.4 585.3 583.3 580.9 579.9 572.6 579.6 589.7 591.4 595.2 599.7 600.0 597.3 593.5 591.4 586.7 578.2 559.5 551.2 551.2 551.2 551.2 551.2 551.2 551.2 551.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 61.3296875 - }, - { - "text": "少 女 的 武 器 是 SP", - "ph_seq": "sh ao n v d e w u q i sh ir SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 rest", - "note_dur_seq": "0.188281 0.107031 0.080469 0.132812 0.054688 0.179688 0.007812 0.104688 0.082812 0.095312 0.092188 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.188281 0.107031 0.080469 0.132812 0.054688 0.179688 0.007812 0.104688 0.082812 0.095312 0.092188 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "331.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 279.6 278.3 277.3 277.5 277.5 277.7 277.8 278.1 278.8 288.6 294.0 296.2 295.9 294.7 293.7 292.6 292.6 293.5 293.2 290.1 290.6 295.0 301.3 309.6 325.1 332.9 332.5 332.3 330.0 329.1 328.7 328.9 329.2 329.6 327.7 305.6 272.3 273.4 285.1 307.0 339.3 337.3 333.8 333.1 332.3 331.3 329.8 328.9 327.9 328.3 328.9 329.2 329.2 325.8 319.5 313.1 303.0 296.7 293.2 291.3 292.1 294.5 295.7 291.0 268.4 253.9 265.9 281.1 289.1 293.8 301.0 309.6 320.2 329.4 332.9 334.2 330.8 324.0 303.8 297.4 315.1 324.0 326.4 328.3 335.0 341.2 347.4 352.1 360.3 379.1 389.1 387.0 378.0 372.1 366.0 361.1 359.5 359.7 362.8 369.4 373.2 382.2 384.8 381.7 363.2 363.2 363.2 363.2 363.2 363.2 363.2 363.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 61.87421875 - }, - { - "text": "狡 辩 让 目 光 多 多 驻 足 在 我 身 前 因 为 不 是 谁 都 像 这 般 耀 眼 SP", - "ph_seq": "j iao b ian r ang m u g uang d uo d uo zh u z u z ai w o sh en q ian y in w ei b u sh ir sh ei d ou x iang zh e b an y ao y En SP", - "note_seq": "C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.133594 0.264062 0.110937 0.273438 0.101562 0.110156 0.077344 0.127344 0.060156 0.450781 0.111719 0.132812 0.054688 0.128125 0.059375 0.123438 0.064062 0.123438 0.064062 0.133594 0.053906 0.232031 0.142969 0.255469 0.119531 0.367188 0.007812 0.133594 0.053906 0.132031 0.055469 0.374219 0.188281 0.095312 0.092188 0.132812 0.054688 0.092969 0.094531 0.128125 0.059375 0.132031 0.055469 0.303906 0.071094 0.303906 0.071094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.133594 0.264062 0.110937 0.273438 0.101562 0.110156 0.077344 0.127344 0.060156 0.450781 0.111719 0.132812 0.054688 0.128125 0.059375 0.123438 0.064062 0.123438 0.064062 0.133594 0.053906 0.232031 0.142969 0.255469 0.119531 0.367188 0.007812 0.133594 0.053906 0.132031 0.055469 0.374219 0.188281 0.095312 0.092188 0.132812 0.054688 0.092969 0.094531 0.128125 0.059375 0.132031 0.055469 0.303906 0.071094 0.303906 0.071094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "351.7 358.0 373.4 402.1 424.5 458.7 481.2 492.5 497.3 499.0 499.6 498.2 497.0 508.9 519.0 517.5 506.3 499.6 498.2 501.4 508.6 522.6 536.1 543.6 552.8 555.0 556.3 555.3 554.0 552.8 553.1 555.3 559.5 559.8 560.2 556.9 544.8 514.0 521.1 535.8 536.1 519.4 519.2 536.9 566.6 581.6 586.7 590.7 591.4 583.9 575.2 569.6 566.7 568.0 570.6 577.2 582.3 591.8 594.8 599.7 597.9 590.4 583.9 581.3 582.9 585.6 588.3 590.4 589.7 588.0 577.2 559.2 531.8 497.7 471.6 449.2 444.1 446.9 442.8 442.3 445.1 444.6 443.3 442.0 440.3 440.0 439.5 432.2 420.9 412.0 401.2 386.8 378.5 370.9 367.9 365.5 364.9 366.6 369.4 371.9 372.1 370.2 353.1 313.3 274.6 258.0 261.3 268.6 277.4 289.1 282.5 280.2 282.5 293.8 301.1 306.0 307.2 305.1 300.2 295.4 290.3 282.0 276.7 273.7 274.8 279.4 284.6 292.8 299.1 305.6 307.7 306.3 299.1 288.5 278.1 266.5 262.2 263.1 269.9 280.9 293.7 304.2 311.7 314.4 309.9 299.3 284.8 274.8 266.8 262.4 270.2 278.8 287.1 289.8 289.5 288.0 286.0 284.3 285.6 291.0 292.3 293.7 293.3 292.3 292.5 292.8 293.7 295.2 295.5 285.0 255.0 257.4 281.9 309.5 332.1 335.4 332.1 331.2 330.8 330.8 331.5 327.5 301.6 266.7 274.2 291.1 301.1 306.1 310.1 314.9 323.2 329.8 332.7 333.1 332.3 331.2 330.0 329.6 324.7 307.2 277.2 255.9 246.5 255.0 266.3 280.2 297.1 297.9 295.0 294.3 294.7 295.4 295.4 295.5 293.3 288.3 273.5 249.7 239.5 241.3 240.8 255.2 276.2 281.7 279.4 276.9 277.3 278.0 278.0 278.8 277.7 278.1 276.1 271.3 266.0 265.0 269.1 279.6 293.0 298.3 299.0 297.9 295.9 293.7 291.6 290.8 291.0 291.5 292.1 292.8 293.5 294.3 295.0 295.5 295.4 294.2 289.3 268.1 244.2 239.4 249.1 257.9 265.1 269.6 277.5 282.8 292.1 302.9 310.7 317.0 325.6 334.8 335.8 335.4 333.8 332.9 331.2 329.6 326.8 324.3 324.0 324.9 326.0 328.1 330.0 331.0 332.3 333.8 332.7 330.2 314.6 292.6 272.9 272.7 278.3 279.8 281.1 282.5 286.1 288.6 291.0 294.6 299.3 298.8 299.0 298.3 297.9 296.2 293.5 290.0 287.5 286.3 287.0 289.8 291.6 293.0 295.7 295.9 296.0 295.0 293.0 291.1 291.6 293.0 295.7 298.1 299.5 300.0 300.2 298.3 284.6 280.1 284.0 295.7 319.8 359.5 389.5 409.6 424.8 439.5 446.4 448.7 447.2 442.8 440.8 437.7 433.2 425.3 420.5 414.3 403.3 385.9 375.4 370.0 367.2 366.2 366.2 368.7 371.3 370.2 360.5 343.6 337.5 336.0 328.6 323.1 310.0 296.3 282.5 278.5 282.7 287.3 293.2 295.7 295.4 294.0 290.6 285.3 282.2 281.4 283.5 287.8 293.5 299.1 304.2 306.3 304.9 302.8 295.0 288.0 284.0 282.8 284.8 287.6 292.6 295.7 298.6 299.5 300.0 299.5 297.6 295.7 295.0 295.2 283.7 264.1 271.8 281.2 288.6 292.6 294.2 295.2 296.7 298.6 299.8 300.7 306.3 302.8 297.8 296.9 295.5 289.0 268.2 259.2 263.1 274.0 280.6 284.8 289.6 294.5 300.6 307.4 320.1 332.7 338.5 337.3 333.3 331.7 328.5 327.7 328.1 327.9 327.7 326.4 309.9 293.7 300.9 314.0 336.0 339.9 334.4 331.5 331.2 330.8 330.2 326.8 297.2 286.5 290.5 300.2 299.5 298.2 297.1 296.6 288.8 287.6 295.9 293.8 294.5 293.7 293.5 293.7 294.2 294.5 293.8 291.3 280.6 259.3 240.0 240.0 261.5 279.6 280.9 279.1 277.8 278.0 278.9 278.9 278.8 275.3 253.9 220.0 217.7 225.6 226.6 234.9 258.6 284.3 295.4 298.3 299.7 297.9 293.7 291.3 287.8 285.6 285.8 286.1 287.3 292.0 294.7 296.6 297.8 297.4 296.0 294.2 293.3 293.2 293.0 292.5 291.5 291.6 293.2 294.3 295.2 295.7 300.8 315.0 321.2 324.3 326.6 328.7 331.2 330.4 330.8 327.9 326.4 325.7 324.5 325.3 325.3 328.7 329.1 329.8 331.5 331.9 331.5 331.2 330.6 330.0 329.4 326.6 318.8 302.1 283.7 268.1 258.2 257.0 261.7 272.3 284.5 291.0 295.7 297.8 299.7 298.1 295.9 291.8 289.3 289.8 291.6 294.3 295.7 296.0 295.4 291.8 289.3 287.8 287.0 287.3 290.6 294.3 300.0 302.6 301.9 300.7 297.6 290.1 285.1 282.4 283.2 285.0 284.3 283.0 283.0 283.0 283.0 283.0 283.0 283.0 283.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 63.24140625 - }, - { - "text": "在 属 于 我 的 时 间 SP", - "ph_seq": "z ai sh u y v w o d e sh ir j ian SP", - "note_seq": "D4 D4 F#4 F#4 F#4 F#4 F#4 F#4 E4 E4 E4 E4 A4 A4 rest", - "note_dur_seq": "0.091406 0.095312 0.092188 0.179688 0.007812 0.133594 0.053906 0.132812 0.054688 0.095312 0.092188 0.121875 0.065625 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.091406 0.095312 0.092188 0.179688 0.007812 0.133594 0.053906 0.132812 0.054688 0.095312 0.092188 0.121875 0.065625 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "289.3 291.0 291.8 292.3 292.8 292.8 292.1 291.1 290.3 294.7 299.3 298.5 295.9 293.5 289.1 270.4 265.7 293.2 299.5 296.4 301.7 313.3 324.9 335.4 348.5 364.9 375.8 376.0 375.2 374.7 371.9 370.0 368.9 368.1 368.1 369.4 370.4 371.5 371.1 370.4 370.0 371.9 373.2 371.9 371.3 371.3 371.1 371.1 370.2 369.4 370.4 370.4 370.6 371.7 373.4 375.6 375.8 373.4 372.8 372.6 372.4 372.1 371.5 370.0 368.3 367.9 368.5 367.9 367.0 353.0 321.4 307.5 327.2 340.3 335.4 333.1 331.7 328.9 317.5 293.3 289.8 296.7 301.7 304.7 306.8 308.1 309.2 310.6 311.8 308.3 315.3 332.1 335.0 334.4 333.1 331.7 331.2 328.5 318.8 290.5 285.5 310.1 330.3 354.7 385.7 417.7 445.1 453.7 453.4 448.7 440.8 436.7 433.2 429.7 425.0 425.0 433.2 442.0 455.5 462.4 454.5 424.8 424.8 424.8 424.8 424.8 424.8 424.8 424.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 82.22109375 - }, - { - "text": "请 不 要 指 指 点 点 SP", - "ph_seq": "q ing b u y ao zh ir zh ir d ian d ian SP", - "note_seq": "B3 B3 D4 D4 D4 D4 D4 D4 C#4 C#4 C#4 C#4 F#4 F#4 rest", - "note_dur_seq": "0.073438 0.132031 0.055469 0.152344 0.035156 0.128125 0.059375 0.128125 0.059375 0.132812 0.054688 0.132812 0.054688 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.073438 0.132031 0.055469 0.152344 0.035156 0.128125 0.059375 0.128125 0.059375 0.132812 0.054688 0.132812 0.054688 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "268.7 254.6 248.4 245.2 243.1 239.4 235.8 234.4 241.4 246.8 246.2 245.1 243.8 244.8 246.5 248.1 248.8 249.1 248.5 244.5 230.6 221.9 233.8 259.7 282.5 290.0 293.8 295.7 295.5 295.0 293.8 293.0 292.8 292.0 288.5 286.3 288.8 295.2 299.1 299.3 298.6 296.6 294.9 294.0 294.0 294.0 294.2 292.8 282.5 260.4 251.4 262.2 268.8 272.1 274.2 276.1 288.1 294.5 297.2 296.4 295.2 294.9 293.5 289.5 269.1 241.2 238.8 245.5 252.3 262.3 268.4 279.8 283.3 282.5 278.6 277.0 277.2 277.0 277.7 278.9 278.8 278.5 277.2 272.3 243.8 221.4 224.1 257.1 284.6 280.7 278.8 277.5 278.1 278.6 277.8 277.2 276.4 277.5 279.1 279.8 274.5 260.9 270.3 320.5 363.8 371.9 377.1 378.4 377.8 374.1 367.4 363.4 359.7 357.6 362.0 366.6 377.6 384.2 383.0 350.4 350.4 350.4 350.4 350.4 350.4 350.4 350.4", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 83.7390625 - }, - { - "text": "不 想 在 我 的 身 边 SP", - "ph_seq": "b u x iang z ai w o d e sh en b ian SP", - "note_seq": "A3 A3 B3 B3 B3 B3 B3 B3 A3 A3 A3 A3 D4 D4 rest", - "note_dur_seq": "0.114062 0.092969 0.094531 0.123438 0.064062 0.133594 0.053906 0.132812 0.054688 0.095312 0.092188 0.132031 0.055469 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.092969 0.094531 0.123438 0.064062 0.133594 0.053906 0.132812 0.054688 0.095312 0.092188 0.132031 0.055469 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "320.4 293.3 262.5 236.2 225.1 216.0 212.0 210.9 213.7 219.0 224.1 225.8 221.9 221.5 220.9 219.4 216.0 200.5 192.3 210.2 216.1 216.3 218.6 226.4 232.0 238.3 243.5 248.8 252.7 250.0 249.8 247.9 246.2 246.4 246.5 246.5 245.9 242.7 232.7 218.0 214.0 221.7 240.6 254.8 253.2 249.7 247.5 246.7 246.5 246.7 247.4 245.9 246.5 246.1 245.5 244.4 243.4 244.0 246.2 248.4 249.4 249.4 248.9 248.5 248.1 246.9 245.8 245.2 245.4 245.4 244.4 233.9 213.6 206.2 218.1 227.0 225.1 223.1 221.4 216.2 213.0 211.4 213.6 214.7 216.0 218.0 219.1 219.6 220.6 222.4 224.6 225.4 221.1 220.5 219.9 218.9 219.0 219.5 220.4 220.8 220.8 221.3 222.0 222.6 211.8 201.9 235.5 278.9 292.5 299.1 301.7 302.1 299.5 291.5 286.0 278.3 275.6 277.2 282.5 295.4 306.0 311.3 307.0 288.3 288.3 288.3 288.3 288.3 288.3 288.3 288.3", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 85.1984375 - }, - { - "text": "那 就 不 要 再 相 见 SP", - "ph_seq": "n a j iu b u y ao z ai x iang j ian SP", - "note_seq": "D4 D4 G4 G4 F#4 F#4 E4 E4 D4 D4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.100781 0.121875 0.065625 0.132031 0.055469 0.152344 0.035156 0.123438 0.064062 0.092969 0.094531 0.244531 0.130469 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.100781 0.121875 0.065625 0.132031 0.055469 0.152344 0.035156 0.123438 0.064062 0.092969 0.094531 0.244531 0.130469 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "281.2 282.2 283.2 284.2 284.5 285.0 285.1 286.3 287.3 288.0 290.6 293.2 292.0 292.3 293.2 295.0 296.2 295.2 288.1 285.0 294.0 299.8 302.9 308.0 325.6 357.4 378.9 390.9 395.9 397.5 397.5 395.6 393.8 392.4 389.1 380.6 382.4 384.6 392.8 391.7 385.8 372.5 362.0 370.9 373.2 373.9 373.4 373.0 372.4 370.4 369.6 369.1 368.3 362.0 362.0 360.3 357.8 350.8 339.7 335.6 332.5 330.4 328.5 327.9 328.5 330.2 331.0 329.6 302.4 272.9 269.0 281.1 298.5 308.8 304.4 298.6 296.6 293.7 292.3 290.5 277.8 263.6 258.6 263.4 266.2 268.4 271.6 273.2 274.3 277.2 280.2 295.2 299.0 298.8 298.1 295.5 292.8 291.6 291.3 291.8 292.1 293.0 294.5 295.5 294.9 294.7 294.9 293.8 293.7 292.6 293.7 294.9 295.2 295.5 294.9 287.6 263.0 252.3 267.4 282.2 295.0 313.7 327.7 339.5 342.4 337.5 335.2 330.4 326.4 324.0 321.7 320.4 322.1 327.5 335.4 342.4 340.3 324.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0 309.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 86.71171875 - }, - { - "text": "眼 神 坚 定 向 前 SP", - "ph_seq": "y En sh en j ian d ing x iang q ian SP", - "note_seq": "F#4 F#4 F#4 F#4 F#4 F#4 E4 E4 E4 E4 A4 A4 rest", - "note_dur_seq": "0.072656 0.095312 0.092188 0.121875 0.065625 0.132812 0.054688 0.092969 0.094531 0.104688 0.082812 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.072656 0.095312 0.092188 0.121875 0.065625 0.132812 0.054688 0.092969 0.094531 0.104688 0.082812 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "360.1 366.6 370.0 369.6 362.8 347.2 332.9 328.7 341.4 356.4 365.3 371.3 375.8 376.5 373.2 352.9 329.2 335.2 341.3 351.5 356.2 365.3 374.5 375.8 372.1 373.2 372.4 371.7 372.4 371.3 371.1 371.1 370.4 365.5 356.2 343.8 352.3 358.6 365.1 378.6 381.3 378.4 371.9 369.8 368.9 369.1 370.9 371.9 370.9 368.3 367.2 363.8 347.8 321.8 318.8 334.6 331.5 327.7 330.0 330.2 330.8 331.7 331.2 326.0 301.6 278.5 285.8 302.1 312.0 319.5 324.2 323.8 331.7 332.5 331.3 330.8 330.2 331.5 331.5 330.2 319.5 294.0 314.9 343.2 359.7 373.3 396.4 420.6 448.5 460.3 455.5 454.2 444.1 437.2 431.4 423.8 422.3 426.0 438.7 448.2 461.3 458.4 435.4 402.1 402.1 402.1 402.1 402.1 402.1 402.1 402.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 88.42734375 - }, - { - "text": "这 就 是 我 的 信 念 SP", - "ph_seq": "zh e j iu sh ir w o d e x in n ian SP", - "note_seq": "A4 A4 B4 B4 D4 D4 D4 D4 C#4 C#4 C#4 C#4 F#4 F#4 rest", - "note_dur_seq": "0.121875 0.121875 0.065625 0.095312 0.092188 0.133594 0.053906 0.132812 0.054688 0.092969 0.094531 0.133594 0.053906 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.121875 0.121875 0.065625 0.095312 0.092188 0.133594 0.053906 0.132812 0.054688 0.092969 0.094531 0.133594 0.053906 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "432.9 432.7 432.4 431.9 431.4 431.2 430.7 430.7 431.4 433.7 435.7 437.7 438.7 439.5 440.5 441.5 442.3 444.3 444.9 440.0 424.3 407.7 416.7 426.5 433.3 445.3 468.6 489.1 498.2 498.2 499.9 497.3 491.0 471.6 455.0 488.2 484.0 478.7 491.0 491.6 473.4 432.3 382.7 326.6 297.2 289.1 288.1 288.8 289.0 290.1 292.1 294.5 296.2 297.1 296.4 295.2 293.5 294.2 296.6 297.6 296.4 296.4 296.2 295.7 295.2 294.9 293.7 292.5 292.1 292.0 291.8 290.1 270.3 248.2 246.3 266.1 278.5 281.2 279.9 279.3 278.5 271.5 253.3 237.8 239.6 249.2 256.8 260.6 268.1 274.8 279.9 284.2 285.5 285.8 282.0 279.3 277.3 275.7 275.3 276.7 278.5 279.3 279.6 275.0 273.5 280.3 296.5 318.8 354.3 370.4 382.2 385.0 385.7 381.7 369.8 358.0 351.9 349.2 351.0 361.3 376.5 388.4 392.0 375.8 361.5 361.5 361.5 361.5 361.5 361.5 361.5 361.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 89.690625 - }, - { - "text": "不 信 你 看 朋 友 圈 SP", - "ph_seq": "b u x in n i k an p eng y ou q van SP", - "note_seq": "A3 A3 B3 B3 B3 B3 B3 B3 A3 A3 A3 A3 D4 D4 rest", - "note_dur_seq": "0.067187 0.084375 0.103125 0.107031 0.080469 0.106250 0.081250 0.135156 0.052344 0.152344 0.035156 0.116406 0.071094 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.067187 0.084375 0.103125 0.107031 0.080469 0.106250 0.081250 0.135156 0.052344 0.152344 0.035156 0.116406 0.071094 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "223.6 216.3 213.2 213.4 215.1 219.5 223.8 225.3 222.2 221.1 220.4 218.0 210.6 188.4 193.9 208.1 215.1 219.2 226.2 233.4 239.2 246.7 252.4 257.7 256.8 251.8 248.8 246.8 246.2 246.1 246.5 247.2 247.1 245.4 243.5 241.4 241.0 241.4 245.7 250.1 249.1 247.8 247.4 247.4 247.5 247.7 247.9 246.7 234.7 212.5 203.1 205.4 216.7 230.7 248.9 259.2 254.6 248.5 245.2 245.2 245.7 246.2 246.7 247.2 245.9 236.3 218.4 206.3 203.4 207.6 215.8 221.7 222.0 221.7 221.7 222.4 222.6 223.3 222.7 220.0 219.0 217.3 216.0 214.2 213.2 212.9 215.1 217.6 220.4 221.1 220.6 221.0 221.4 221.8 221.5 221.1 211.2 189.4 190.2 200.3 216.8 235.0 257.2 284.2 299.5 302.4 302.1 298.6 293.8 287.5 283.5 282.2 283.5 289.0 294.3 300.0 302.8 298.1 289.1 280.1 280.1 280.1 280.1 280.1 280.1 280.1 280.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 91.2453125 - }, - { - "text": "我 的 自 拍 照 片 SP", - "ph_seq": "w o d e z i0 p ai zh ao p ian SP", - "note_seq": "D4 D4 G4 G4 F#4 F#4 E4 E4 D4 D4 D4 D4 rest", - "note_dur_seq": "0.110937 0.132812 0.054688 0.123438 0.064062 0.108594 0.078906 0.128125 0.059375 0.108594 0.078906 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.110937 0.132812 0.054688 0.123438 0.064062 0.108594 0.078906 0.128125 0.059375 0.108594 0.078906 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "288.1 288.0 287.8 287.6 287.5 287.3 287.3 287.3 287.5 287.6 287.8 290.3 292.1 293.0 293.8 293.8 293.0 292.3 292.8 293.7 295.4 298.8 295.0 281.0 284.6 326.4 365.7 385.7 394.3 396.8 397.2 395.6 388.2 367.0 332.1 368.7 400.2 398.8 392.7 383.6 377.0 369.8 360.6 359.9 372.6 373.0 372.4 371.1 370.6 370.4 370.4 367.4 346.6 310.2 288.1 308.1 324.3 331.0 338.7 332.5 329.6 331.2 330.8 330.6 329.8 331.0 329.2 327.4 316.7 286.6 267.4 263.9 266.3 268.7 282.8 290.5 293.5 296.2 296.4 295.9 296.0 295.5 286.8 260.7 248.1 281.1 290.0 284.5 281.1 279.1 279.8 286.0 289.1 291.3 293.5 295.0 294.2 292.8 290.0 285.3 284.3 286.1 293.0 298.1 302.1 303.5 301.6 297.1 292.5 286.0 280.7 280.1 282.7 288.6 298.1 302.8 305.1 304.5 301.7 292.8 283.5 275.1 272.4 270.7 270.7 270.7 270.7 270.7 270.7 270.7 270.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 92.7015625 - }, - { - "text": "踩 着 独 特 的 酷 酷 厚 底 靴 SP", - "ph_seq": "c ai zh e d u t e d e k u k u h ou d i x ve SP", - "note_seq": "A3 A3 D4 D4 D4 D4 D4 D4 D4 D4 C#4 C#4 D4 D4 C#4 C#4 B3 B3 A3 A3 rest", - "note_dur_seq": "0.084375 0.128125 0.059375 0.132812 0.054688 0.102344 0.085156 0.132812 0.054688 0.106250 0.081250 0.106250 0.081250 0.112500 0.075000 0.141406 0.051562 0.064062 0.117969 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.084375 0.128125 0.059375 0.132812 0.054688 0.102344 0.085156 0.132812 0.054688 0.106250 0.081250 0.106250 0.081250 0.112500 0.075000 0.141406 0.051562 0.064062 0.117969 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "228.7 218.4 213.1 210.8 209.9 209.8 210.3 211.3 211.8 214.7 217.3 218.1 219.7 221.1 222.7 221.8 218.6 208.0 195.4 209.9 222.3 228.3 239.1 256.4 277.0 284.8 287.6 289.0 291.1 292.5 292.8 293.2 293.3 293.3 290.0 268.4 245.5 243.7 255.5 281.1 295.2 301.7 296.4 295.5 295.5 294.5 293.3 292.6 286.1 271.6 263.7 280.7 290.5 293.7 295.2 292.5 288.3 292.8 297.8 296.4 295.4 294.9 293.3 293.8 292.8 292.6 292.5 290.6 280.9 257.0 241.4 260.7 287.3 296.9 296.0 295.0 295.0 295.0 290.1 268.2 255.7 261.2 269.3 272.3 273.1 274.3 274.5 272.8 274.3 279.6 278.5 278.1 278.6 278.8 278.1 277.5 275.7 263.3 267.7 275.6 278.5 277.0 278.1 285.7 294.2 298.3 297.6 296.7 295.9 295.0 294.2 293.2 292.6 292.1 292.8 287.8 264.4 244.8 243.8 249.3 261.5 277.8 281.5 282.5 280.4 278.6 278.6 277.5 277.3 277.2 276.5 275.3 272.4 258.8 232.4 218.3 228.1 254.0 252.4 250.1 250.2 245.5 229.7 214.9 230.4 246.7 243.3 236.6 234.0 229.5 226.3 222.3 218.4 212.1 211.5 222.9 227.8 228.9 226.4 222.6 215.1 208.5 205.3 206.3 209.5 214.5 220.6 224.8 227.5 227.2 224.8 218.9 212.9 205.7 200.0 198.8 201.9 206.2 213.9 218.4 221.5 223.6 222.8 219.0 215.1 213.7 213.7 213.7 213.7 213.7 213.7 213.7 213.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 94.228125 - }, - { - "text": "登 山 背 包 配 毛 绒 钥 匙 链 SP", - "ph_seq": "d eng sh an b ei b ao p ei m ao r ong y ao sh ir l ian SP", - "note_seq": "A3 A3 B3 B3 B3 B3 B3 B3 B3 B3 A3 A3 B3 B3 A3 A3 G3 G3 A3 A3 rest", - "note_dur_seq": "0.082031 0.095312 0.092188 0.132031 0.055469 0.132031 0.055469 0.108594 0.078906 0.110156 0.077344 0.136719 0.050781 0.152344 0.035156 0.095312 0.092188 0.126562 0.060937 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.082031 0.095312 0.092188 0.132031 0.055469 0.132031 0.055469 0.108594 0.078906 0.110156 0.077344 0.136719 0.050781 0.152344 0.035156 0.095312 0.092188 0.126562 0.060937 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "219.5 218.2 217.0 216.0 216.0 218.0 220.8 223.5 222.6 219.1 218.0 217.6 217.9 219.2 220.8 219.4 211.0 190.3 184.3 192.6 207.1 217.3 228.2 242.6 250.5 250.0 247.8 246.7 247.2 247.2 246.9 246.8 247.4 246.4 245.4 243.4 239.5 230.1 231.7 244.7 252.7 251.5 249.7 248.4 246.8 246.2 247.1 247.1 247.2 247.2 240.3 218.6 205.9 203.1 215.2 234.3 252.1 251.7 249.2 247.2 246.4 247.1 247.4 247.4 247.2 245.5 222.4 207.2 221.0 226.4 236.5 242.7 253.7 257.9 255.7 251.0 249.1 248.2 248.5 247.8 246.8 242.7 236.2 232.0 228.3 223.1 217.6 216.2 218.2 219.7 218.0 218.2 218.2 218.2 218.7 219.1 220.8 222.2 222.7 222.8 221.5 221.9 214.7 217.9 235.5 249.8 252.9 254.3 252.7 249.8 247.5 246.9 246.2 244.2 243.5 242.4 241.4 238.0 235.0 231.8 228.2 221.7 219.5 219.2 219.4 221.7 221.5 214.5 199.8 197.1 198.6 199.0 200.3 200.4 199.9 201.5 201.6 198.5 197.5 196.3 196.1 196.5 196.9 196.8 196.3 194.8 193.9 193.6 193.4 193.0 193.1 194.7 198.2 206.8 216.6 221.1 224.5 225.9 225.5 223.3 219.9 214.6 210.7 209.5 209.8 211.5 214.9 220.1 222.2 223.5 223.5 221.3 218.4 216.6 215.7 216.5 216.7 217.0 216.3 215.8 213.0 210.2 208.4 207.7 207.4 204.7 204.7 204.7 204.7 204.7 204.7 204.7 204.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 97.23046875 - }, - { - "text": "戴 上 可 爱 的 头 饰 扎 起 两 边 马 尾 辫 SP", - "ph_seq": "d ai sh ang k e ai d e t ou sh ir zh a q i l iang b ian m a w ei b ian SP", - "note_seq": "A3 A3 D4 D4 D4 D4 D4 D4 D4 C#4 C#4 D4 D4 C#4 C#4 B3 B3 A#3 A#3 D4 D4 D4 D4 C#4 C#4 D4 D4 rest", - "note_dur_seq": "0.114062 0.095312 0.092188 0.106250 0.081250 0.187500 0.132812 0.054688 0.102344 0.085156 0.095312 0.092188 0.123438 0.064062 0.104688 0.082812 0.126562 0.060937 0.264062 0.110937 0.250000 0.125000 0.367188 0.007812 0.260937 0.114062 0.750000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.095312 0.092188 0.106250 0.081250 0.187500 0.132812 0.054688 0.102344 0.085156 0.095312 0.092188 0.123438 0.064062 0.104688 0.082812 0.126562 0.060937 0.264062 0.110937 0.250000 0.125000 0.367188 0.007812 0.260937 0.114062 0.750000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "220.0 220.0 220.0 220.3 220.5 220.9 221.4 221.8 224.5 227.6 230.1 220.6 211.8 211.5 214.0 216.2 219.7 220.8 209.1 188.7 187.1 194.8 211.5 224.9 244.0 265.6 288.9 301.7 300.5 299.5 297.4 294.2 293.5 292.8 291.5 287.5 273.1 251.3 262.5 283.2 286.8 290.6 296.0 296.7 296.7 297.6 297.1 296.2 295.4 293.5 293.3 292.3 292.6 293.3 293.2 293.8 295.0 295.5 296.2 295.9 294.2 294.2 293.2 293.7 294.3 293.2 292.6 293.7 293.7 292.6 286.6 263.1 249.7 253.7 275.3 299.0 299.3 295.9 295.0 294.3 293.7 293.8 293.8 294.3 283.5 250.1 238.1 237.2 240.7 253.8 272.4 288.1 288.3 280.6 278.3 277.3 278.0 277.3 273.4 251.5 241.2 254.8 261.6 265.6 274.7 281.8 287.6 293.7 302.4 301.6 298.3 294.2 292.6 291.8 292.5 293.2 294.5 295.7 275.0 245.8 234.0 234.5 243.2 263.6 280.2 279.9 279.6 279.1 277.8 275.6 269.9 243.5 233.8 238.3 241.0 247.1 248.3 250.3 254.0 257.3 253.0 251.1 250.0 249.7 249.5 249.8 250.2 250.1 249.1 247.1 243.7 235.2 230.8 224.3 220.7 218.3 220.9 228.2 230.9 234.2 235.4 235.0 233.6 230.9 227.2 224.5 222.6 221.9 224.2 229.3 233.8 236.6 238.1 238.7 237.2 236.1 234.7 234.2 233.1 232.0 231.1 230.8 229.5 222.8 210.6 204.5 213.1 244.6 274.6 287.6 289.8 293.3 296.0 295.9 294.9 293.7 293.0 292.3 292.1 292.6 293.0 295.1 298.4 304.5 312.4 319.3 326.4 331.0 333.8 335.4 333.5 330.8 329.2 326.4 324.5 325.5 329.4 333.8 336.4 337.4 339.9 345.6 343.0 336.9 331.0 322.7 314.2 304.9 299.7 295.7 294.2 293.7 294.0 294.3 294.7 294.9 294.5 293.8 293.0 292.6 293.2 293.7 293.7 293.0 290.0 286.3 279.6 275.0 272.9 271.3 273.5 277.2 281.1 280.6 278.6 278.0 277.2 277.3 276.9 276.9 278.0 277.7 276.9 276.2 276.1 275.1 275.9 277.9 281.3 286.8 292.0 295.7 298.1 298.3 297.8 289.6 270.4 269.8 275.1 278.9 282.5 282.4 284.3 287.0 290.5 296.2 297.4 299.3 301.4 300.9 298.5 297.4 293.3 289.3 287.1 286.1 286.6 287.8 290.5 294.2 298.8 300.4 301.6 300.5 298.5 293.3 289.1 286.1 286.1 287.6 290.0 294.2 296.6 299.8 300.0 298.5 295.4 292.0 290.1 289.3 290.6 291.8 295.4 297.6 297.8 297.2 295.4 293.3 291.1 288.0 287.1 289.3 295.7 300.5 303.7 303.5 300.5 294.2 288.5 282.7 280.2 282.5 289.0 295.4 303.1 305.6 304.4 299.7 289.1 277.0 277.0 277.0 277.0 277.0 277.0 277.0 277.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 100.1984375 - }, - { - "text": "微 风 清 爽 拂 面 SP", - "ph_seq": "w ei f eng q ing sh uang f u m ian SP", - "note_seq": "B3 B3 D4 D4 E4 E4 D4 D4 B3 B3 D4 D4 rest", - "note_dur_seq": "0.084375 0.107031 0.080469 0.104688 0.082812 0.095312 0.092188 0.107031 0.080469 0.110156 0.077344 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.084375 0.107031 0.080469 0.104688 0.082812 0.095312 0.092188 0.107031 0.080469 0.110156 0.077344 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.5 245.5 244.2 243.3 243.1 243.7 244.4 245.1 245.4 243.8 242.8 242.8 244.2 246.1 249.8 250.5 248.1 232.3 219.9 220.0 229.8 247.0 266.2 287.5 304.5 301.1 299.1 294.0 292.1 293.3 294.5 294.3 292.8 285.6 267.0 260.3 279.1 295.6 306.5 319.9 330.6 336.4 334.2 332.1 331.2 329.4 329.8 330.0 321.7 293.5 282.8 291.5 295.2 295.9 297.5 297.7 297.1 294.5 292.3 292.8 292.3 292.0 292.6 294.7 295.5 294.0 278.1 256.2 239.2 238.0 238.5 242.6 244.8 247.2 247.4 245.9 246.2 248.4 248.9 248.5 248.7 247.8 243.5 244.0 245.0 246.6 251.4 260.5 272.0 288.5 296.7 300.9 301.6 298.8 293.7 290.0 285.5 283.2 282.5 284.8 287.6 290.0 291.6 293.2 296.9 298.8 301.2 301.1 298.1 294.5 289.5 278.9 270.4 270.4 270.4 270.4 270.4 270.4 270.4 270.4", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 104.540625 - }, - { - "text": "若 有 言 语 相 贬 SP", - "ph_seq": "r uo y ou y En y v x iang b ian SP", - "note_seq": "B3 B3 D4 D4 F#4 F#4 E4 E4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.103906 0.152344 0.035156 0.152344 0.035156 0.179688 0.007812 0.092969 0.094531 0.132031 0.055469 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.103906 0.152344 0.035156 0.152344 0.035156 0.179688 0.007812 0.092969 0.094531 0.132031 0.055469 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 243.7 244.5 245.4 245.9 246.8 247.7 248.4 248.8 249.1 248.5 246.7 246.2 246.5 247.5 247.9 248.4 248.4 246.9 242.7 230.5 222.6 226.3 242.2 260.2 280.0 288.0 291.1 291.0 291.0 291.5 292.0 293.3 294.9 296.2 295.0 288.1 283.7 284.4 291.6 302.0 324.4 344.6 358.6 366.6 371.9 370.4 368.3 367.4 367.0 368.7 369.8 372.1 370.9 369.2 366.1 356.3 346.8 337.3 330.2 326.2 325.8 329.4 331.5 329.6 316.6 286.5 269.6 268.7 274.3 279.8 283.9 290.1 290.9 287.1 293.0 292.6 293.7 293.8 294.2 293.5 294.3 294.9 294.3 293.7 291.0 281.5 261.3 253.2 279.4 317.8 333.8 335.6 337.9 338.9 338.1 333.3 328.7 324.9 320.1 317.8 317.1 317.5 319.1 322.5 327.7 334.6 338.9 342.0 341.8 336.9 330.8 326.4 312.7 303.0 303.0 303.0 303.0 303.0 303.0 303.0 303.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 106.02109375 - }, - { - "text": "只 做 一 种 嘉 勉 SP", - "ph_seq": "zh ir z uo y i zh ong j ia m ian SP", - "note_seq": "B3 B3 D4 D4 F#4 F#4 E4 E4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.121875 0.123438 0.064062 0.179688 0.007812 0.128125 0.059375 0.121875 0.065625 0.110156 0.077344 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.121875 0.123438 0.064062 0.179688 0.007812 0.128125 0.059375 0.121875 0.065625 0.110156 0.077344 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 246.9 246.9 248.1 247.4 246.2 245.8 245.8 246.2 246.9 247.9 248.7 245.4 246.8 246.9 246.7 246.4 247.7 245.8 232.1 208.6 211.3 235.7 248.2 258.3 267.8 277.5 283.8 288.6 290.1 290.1 290.1 292.0 292.5 292.0 292.6 293.5 296.0 298.3 300.5 303.9 314.1 329.1 348.7 360.3 366.6 370.6 372.6 372.1 371.7 371.3 370.4 366.4 347.2 328.7 342.6 363.9 362.2 354.3 340.1 330.0 330.6 329.1 327.9 327.9 329.1 330.0 329.8 328.3 324.2 315.3 292.6 267.8 258.5 261.7 274.8 283.7 293.3 295.0 295.7 295.0 295.2 294.7 294.2 293.5 289.5 289.0 289.0 289.6 294.3 300.9 312.6 328.3 336.0 338.7 337.5 334.8 328.3 325.5 321.0 319.5 319.3 321.4 325.1 326.8 328.1 329.4 330.8 333.8 336.7 337.1 335.4 333.5 329.4 322.8 305.1 301.9 301.9 301.9 301.9 301.9 301.9 301.9 301.9", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 107.503125 - }, - { - "text": "没 有 谁 比 我 更 甜 SP", - "ph_seq": "m ei y ou sh ei b i w o g eng t ian SP", - "note_seq": "B3 B3 D4 D4 F4 F4 F4 F4 E4 E4 D4 D4 E4 E4 rest", - "note_dur_seq": "0.157812 0.152344 0.035156 0.095312 0.092188 0.264062 0.110937 0.266406 0.108594 0.255469 0.119531 0.250000 0.125000 1.031250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.157812 0.152344 0.035156 0.095312 0.092188 0.264062 0.110937 0.266406 0.108594 0.255469 0.119531 0.250000 0.125000 1.031250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "246.9 246.9 246.9 246.9 246.9 246.9 248.4 246.2 243.8 241.6 240.3 241.0 242.1 243.4 244.2 247.5 248.1 247.5 246.7 246.8 246.4 245.7 244.7 244.2 245.2 247.2 246.5 248.0 255.1 267.4 279.8 290.0 294.5 296.4 296.2 292.3 272.4 260.7 264.2 272.4 281.2 290.0 298.6 310.1 319.6 331.0 340.2 344.4 351.7 355.7 354.9 351.3 348.8 346.8 345.2 344.6 345.0 345.6 346.6 347.8 347.2 347.2 349.8 351.5 351.9 351.3 350.4 349.6 346.4 342.2 330.8 302.1 274.5 282.5 296.7 301.4 307.4 312.9 319.5 329.1 330.8 332.1 330.6 327.2 325.5 325.8 326.8 331.9 335.6 341.8 345.8 349.4 349.8 350.2 350.2 349.8 349.8 350.0 350.2 350.6 350.2 349.4 348.2 346.8 345.0 342.0 336.6 333.6 334.3 338.2 344.3 345.6 346.2 344.8 341.3 337.7 335.8 334.4 332.3 330.2 329.2 329.1 329.2 329.6 329.8 330.0 329.8 329.6 330.4 329.4 329.4 330.0 331.3 332.9 333.7 329.4 302.4 275.4 255.7 245.0 249.8 253.6 262.2 281.7 290.0 292.0 292.0 293.7 294.2 294.7 294.9 293.8 292.5 290.3 288.6 289.0 291.1 295.2 299.7 307.6 315.7 324.0 329.6 333.7 335.2 334.2 320.4 301.2 306.7 316.9 318.8 317.3 315.8 314.7 315.3 320.8 333.1 337.5 338.7 339.5 338.9 337.5 336.6 333.5 331.5 329.6 328.3 327.5 327.2 327.7 327.9 327.7 327.5 327.4 327.4 327.5 327.7 329.6 331.5 332.9 334.8 334.6 334.4 333.5 331.3 328.7 324.2 321.5 320.8 321.5 324.5 326.8 332.3 337.5 340.1 339.9 338.5 335.0 328.7 322.5 320.1 319.3 320.4 324.5 327.5 333.5 337.7 340.1 338.7 334.4 330.0 322.7 320.2 320.6 323.4 327.9 333.7 337.1 339.7 338.5 336.6 332.1 324.7 321.4 320.2 324.0 328.3 334.6 336.6 336.4 333.3 330.2 329.6 328.9 325.1 325.5 326.6 326.0 334.6 340.7 355.5 380.2 388.6 408.4 415.5 415.5 415.5 415.5 415.5 415.5 415.5 415.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 108.9671875 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.375000 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 583.3 577.2 576.6 581.9 597.3 597.6 586.0 583.9 586.7 586.7 586.3 584.6 586.0 589.7 591.8 591.8 595.2 595.5 594.2 590.4 589.7 588.7 590.4 592.1 592.8 594.8 593.8 588.3 586.7 578.6 574.2 573.3 573.9 577.6 583.6 593.1 598.6 600.4 589.7 563.1 563.1 563.1 563.1 563.1 563.1 563.1 563.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 112.3296875 - }, - { - "text": "生 而 可 爱 眼 睛 明 亮 不 管 生 在 什 么 时 代 都 一 样 SP", - "ph_seq": "sh eng er k e ai y En j ing m ing l iang b u g uan sh eng z ai sh en m e sh ir d ai d ou y i y ang SP", - "note_seq": "D4 D4 E4 E4 E4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.117969 0.187500 0.106250 0.081250 0.187500 0.152344 0.035156 0.121875 0.065625 0.220312 0.154688 0.253125 0.121875 0.264062 0.110937 0.127344 0.060156 0.095312 0.092188 0.430469 0.132031 0.095312 0.092188 0.110156 0.077344 0.095312 0.092188 0.150000 0.037500 0.132812 0.054688 0.367188 0.007812 0.303906 0.071094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.117969 0.187500 0.106250 0.081250 0.187500 0.152344 0.035156 0.121875 0.065625 0.220312 0.154688 0.253125 0.121875 0.264062 0.110937 0.127344 0.060156 0.095312 0.092188 0.430469 0.132031 0.095312 0.092188 0.110156 0.077344 0.095312 0.092188 0.150000 0.037500 0.132812 0.054688 0.367188 0.007812 0.303906 0.071094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "517.5 470.8 408.6 353.9 328.5 307.2 297.8 293.7 290.3 286.8 284.8 283.2 281.9 283.3 286.5 291.5 292.5 292.8 291.8 290.6 289.8 295.9 297.8 299.3 301.0 302.7 314.6 330.2 335.0 335.4 331.9 330.4 328.9 328.7 325.1 313.7 293.3 284.6 289.1 303.0 310.6 320.4 333.8 342.4 342.8 342.2 340.3 336.0 332.7 328.5 327.4 326.4 326.6 327.7 328.1 327.5 324.6 319.9 312.6 302.3 291.6 288.3 287.6 288.5 291.8 294.0 295.0 296.2 295.9 295.2 292.8 281.1 275.3 282.8 301.4 317.7 324.2 328.5 332.1 332.7 331.7 330.0 327.5 325.5 325.3 325.3 315.5 289.6 273.8 275.7 302.7 348.4 368.1 378.0 379.7 380.6 379.3 371.3 363.2 355.5 353.1 355.7 360.7 367.0 376.2 378.9 377.6 373.4 364.3 358.4 357.8 360.9 365.3 372.4 377.6 377.1 375.6 371.7 371.9 376.0 385.7 401.9 429.9 457.2 493.6 525.4 539.8 543.6 538.3 530.9 517.5 513.4 514.6 519.9 532.4 541.4 550.9 558.2 561.1 560.8 558.9 553.4 550.9 550.2 554.0 559.8 561.1 560.8 560.5 559.8 554.7 549.3 552.8 552.5 556.4 562.1 571.6 579.9 580.6 586.3 588.3 588.7 589.4 586.3 584.3 582.6 581.6 582.9 590.0 585.0 589.7 591.4 593.1 592.8 589.7 584.3 582.3 581.9 583.6 585.6 586.7 583.6 577.2 557.9 532.1 481.6 444.1 418.4 433.1 439.2 438.0 437.7 437.7 438.7 442.0 444.9 445.1 443.3 436.0 419.9 430.4 440.0 441.0 417.3 384.0 370.2 364.1 367.9 368.9 370.2 370.6 367.2 352.9 333.3 330.6 340.9 347.4 341.0 322.0 301.1 287.9 268.4 263.0 269.4 278.3 289.8 297.9 301.7 302.1 300.0 294.9 290.8 283.7 277.5 274.2 275.0 277.7 285.0 294.9 302.1 307.0 310.2 308.4 302.1 294.9 280.4 270.4 267.3 269.3 274.3 287.6 295.7 304.0 308.3 308.4 306.5 301.1 291.0 286.0 283.5 282.4 284.2 284.6 267.4 248.2 242.7 248.9 256.4 268.8 292.1 299.8 296.7 294.5 293.3 292.1 290.1 281.9 255.5 248.8 256.7 274.6 288.3 302.3 314.7 328.7 339.5 334.4 333.5 333.1 331.5 330.8 329.6 328.7 328.5 329.4 328.3 324.2 321.4 319.7 318.6 319.0 326.0 335.2 333.1 331.2 330.8 329.6 325.8 300.0 278.6 278.8 281.4 282.7 280.9 279.6 279.1 280.5 279.3 284.0 294.5 293.8 292.0 291.8 292.3 293.0 293.7 294.3 295.0 296.0 295.9 277.7 256.9 252.5 288.3 324.9 326.4 327.4 328.9 329.6 329.8 330.0 331.0 331.5 319.5 290.8 274.6 278.8 283.8 307.4 335.6 365.1 371.5 370.2 367.4 366.2 363.0 362.2 361.8 364.3 367.7 368.7 370.6 373.4 373.9 372.6 371.3 369.8 368.3 366.0 366.0 366.6 367.4 367.4 366.6 365.5 361.1 358.2 350.0 342.2 340.5 341.9 353.3 374.9 393.1 399.5 403.5 403.9 402.3 396.8 390.9 387.7 387.0 387.5 389.5 391.8 392.9 393.4 393.6 393.4 392.7 392.2 392.2 392.0 392.9 393.8 394.0 393.4 391.1 389.1 381.9 380.4 386.3 402.0 416.6 432.1 441.5 448.7 451.3 451.3 445.9 440.3 433.2 426.5 423.3 423.8 427.0 432.4 445.4 445.4 443.3 445.1 442.3 439.7 439.7 436.0 441.5 441.0 443.1 439.0 433.4 441.0 444.1 449.8 469.7 516.0 536.4 543.9 543.9 543.9 543.9 543.9 543.9 543.9 543.9", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 112.94453125 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.375000 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 582.9 575.2 568.0 563.4 557.6 539.5 580.6 576.2 585.6 586.7 584.6 581.3 580.2 578.2 575.2 570.0 566.3 564.4 567.0 573.6 581.3 584.6 583.9 595.5 601.1 601.8 600.4 594.2 585.3 578.6 574.2 573.3 573.9 577.6 583.6 593.1 598.6 600.4 589.7 563.1 563.1 563.1 563.1 563.1 563.1 563.1 563.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 118.3296875 - }, - { - "text": "要 像 夏 天 一 般 晴 朗 引 人 注 目 不 要 为 我 太 疯 狂 SP", - "ph_seq": "y ao x iang x ia t ian y i b an q ing l ang y in r en zh u m u b u y ao w ei w o t ai f eng k uang SP", - "note_seq": "D4 D4 D4 D4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.072656 0.092969 0.094531 0.092969 0.094531 0.102344 0.085156 0.179688 0.007812 0.132031 0.055469 0.208594 0.166406 0.253125 0.121875 0.367188 0.007812 0.136719 0.050781 0.128125 0.059375 0.404687 0.157812 0.132031 0.055469 0.152344 0.035156 0.133594 0.053906 0.133594 0.053906 0.102344 0.085156 0.214062 0.160938 0.212500 0.162500 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.072656 0.092969 0.094531 0.092969 0.094531 0.102344 0.085156 0.179688 0.007812 0.132031 0.055469 0.208594 0.166406 0.253125 0.121875 0.367188 0.007812 0.136719 0.050781 0.128125 0.059375 0.404687 0.157812 0.132031 0.055469 0.152344 0.035156 0.133594 0.053906 0.133594 0.053906 0.102344 0.085156 0.214062 0.160938 0.212500 0.162500 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "319.0 300.9 293.3 289.8 289.1 289.3 290.5 292.0 296.4 295.0 293.8 294.2 294.0 292.3 280.1 244.0 229.3 231.1 241.3 252.4 263.1 272.3 284.0 287.3 297.1 296.6 295.2 294.5 295.0 294.7 293.7 289.6 274.3 250.7 246.4 257.4 273.1 294.4 312.6 330.8 333.3 331.5 329.6 329.2 328.5 328.7 328.9 328.5 313.3 281.1 272.6 280.4 281.1 281.8 282.9 285.3 288.3 289.8 290.5 291.8 293.3 294.2 294.5 294.5 294.7 294.9 294.9 293.3 288.4 286.7 294.3 310.8 324.7 328.7 331.0 331.9 331.9 330.4 328.9 327.9 327.7 328.3 308.1 280.7 271.6 275.3 295.5 332.2 364.5 374.7 376.9 377.1 374.7 370.0 368.3 366.2 364.7 366.0 367.0 368.1 371.5 370.0 370.4 373.2 373.9 374.3 371.9 358.6 352.7 362.4 372.1 376.5 383.0 401.4 416.3 430.4 445.3 459.0 472.6 483.8 497.9 520.8 531.2 530.9 528.1 527.2 527.8 528.1 533.0 539.5 543.6 551.8 556.0 558.9 558.5 559.5 556.9 554.7 554.0 555.6 557.3 557.6 557.3 557.6 558.5 557.6 551.5 546.4 545.2 546.7 551.0 554.9 574.2 597.6 600.4 600.4 597.3 590.4 583.6 579.6 576.2 575.9 576.9 580.6 584.3 586.3 588.0 590.0 590.4 589.4 588.3 587.3 587.7 588.7 591.1 591.1 588.7 585.0 576.9 559.2 552.1 532.8 500.0 458.9 433.3 419.4 423.5 426.7 432.9 441.0 446.7 447.4 446.1 441.0 435.7 430.9 423.3 409.7 401.8 391.8 382.5 377.6 372.1 369.4 368.5 367.9 366.2 368.3 370.2 370.6 366.4 348.0 305.8 271.3 261.3 253.4 261.5 271.0 280.1 286.0 290.1 297.9 301.9 302.1 299.1 291.8 285.6 282.7 281.1 283.5 286.3 292.0 295.2 297.6 300.5 300.4 297.9 297.1 294.0 292.6 291.6 291.0 290.1 288.3 288.6 289.3 291.3 295.5 299.7 302.4 303.7 303.7 301.7 297.8 295.2 291.6 286.6 282.2 278.8 281.1 283.7 284.0 284.2 284.2 284.5 288.8 293.7 292.6 293.8 293.8 293.2 291.8 292.3 293.2 293.8 295.9 284.2 256.7 241.1 247.6 273.5 304.2 324.0 331.2 333.1 332.1 331.0 329.6 328.3 327.4 327.7 328.1 327.7 324.9 327.5 330.4 335.8 336.4 334.4 332.5 332.7 331.5 329.2 328.3 329.2 328.7 328.9 329.2 328.7 322.1 316.1 313.4 311.4 307.7 301.7 296.9 293.8 293.2 293.5 293.5 294.3 295.7 295.7 295.2 292.6 286.8 282.9 280.5 278.2 276.5 275.3 275.7 276.2 277.7 278.0 278.8 279.8 278.3 271.6 263.0 267.6 271.5 270.6 269.6 271.0 272.4 278.6 293.3 298.1 297.2 297.6 295.9 294.0 292.8 290.8 290.1 290.6 291.3 292.6 293.8 294.0 294.3 294.9 294.5 294.0 293.5 292.3 286.0 271.6 262.4 264.2 271.6 277.3 280.9 284.3 289.3 295.0 301.9 306.8 311.1 308.6 302.4 300.2 295.7 293.7 295.0 296.7 299.8 306.5 311.5 319.1 326.4 331.2 332.9 333.1 331.9 330.6 329.6 329.8 330.0 329.1 326.2 312.0 295.9 289.6 291.1 292.0 291.9 292.9 289.7 287.9 290.1 290.3 291.6 298.6 302.1 303.8 302.1 297.8 290.5 284.2 279.8 278.6 281.9 288.0 293.7 300.9 303.7 303.1 299.1 292.1 283.2 275.1 271.0 273.8 278.8 287.5 298.1 308.4 311.7 310.1 297.2 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 118.98984375 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 582.9 575.2 568.0 563.4 568.6 576.6 568.3 563.7 568.6 574.6 567.3 566.0 569.3 571.6 571.6 575.9 582.3 586.7 587.7 596.6 600.0 593.8 594.8 600.0 600.0 596.9 593.8 591.4 586.3 578.2 559.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 124.3296875 - }, - { - "text": "嫉 妒 的 言 语 很 夸 张 但 我 会 打 磨 自 己 接 受 表 扬 SP", - "ph_seq": "j i d u d e y En y v h en k ua zh ang d an w o h ui d a m o z i0 j i j ie sh ou b iao y ang SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.133594 0.132812 0.054688 0.132812 0.054688 0.152344 0.035156 0.179688 0.007812 0.112500 0.075000 0.212500 0.162500 0.256250 0.118750 0.266406 0.108594 0.133594 0.053906 0.112500 0.075000 0.450781 0.111719 0.110156 0.077344 0.132031 0.055469 0.121875 0.065625 0.136719 0.050781 0.095312 0.092188 0.264062 0.110937 0.303906 0.071094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.133594 0.132812 0.054688 0.132812 0.054688 0.152344 0.035156 0.179688 0.007812 0.112500 0.075000 0.212500 0.162500 0.256250 0.118750 0.266406 0.108594 0.133594 0.053906 0.112500 0.075000 0.450781 0.111719 0.110156 0.077344 0.132031 0.055469 0.121875 0.065625 0.136719 0.050781 0.095312 0.092188 0.264062 0.110937 0.303906 0.071094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.7 293.7 285.0 283.3 281.9 282.0 282.4 282.7 283.5 284.5 285.0 289.3 295.4 295.9 294.9 292.1 290.8 291.0 291.6 293.5 295.9 295.5 274.8 245.7 247.1 273.0 314.4 332.5 333.8 333.1 332.7 331.0 330.0 329.1 327.4 327.9 329.1 325.3 303.3 266.4 265.0 285.0 316.6 338.5 339.1 336.0 335.4 333.5 331.3 330.6 329.8 329.2 328.1 321.5 316.0 308.7 303.0 298.2 295.6 290.3 289.3 289.5 291.6 292.6 293.0 293.3 293.7 294.9 296.7 298.1 297.4 296.2 293.6 298.7 308.7 323.0 328.5 330.0 330.6 330.0 329.6 330.4 330.6 327.9 312.4 278.1 265.1 266.8 275.2 294.4 323.8 361.3 376.2 378.6 375.6 371.5 364.7 360.3 354.9 353.1 357.6 363.2 368.7 373.9 376.5 376.7 374.5 368.3 364.5 361.5 361.5 364.7 371.5 377.8 373.2 343.8 321.5 323.4 338.5 347.2 363.3 393.4 433.6 468.3 499.3 518.1 520.5 514.6 513.4 508.4 506.0 507.2 512.8 524.5 536.7 548.9 555.3 558.9 559.8 559.2 557.3 555.0 552.8 554.4 554.7 552.4 545.2 518.7 499.0 528.7 538.3 534.9 522.5 509.3 489.8 485.9 519.3 542.3 549.6 549.6 548.6 542.0 538.6 535.2 539.8 544.8 553.4 565.7 580.9 589.0 594.2 593.1 589.7 584.6 582.6 582.6 583.9 585.0 588.0 590.4 590.0 587.3 584.3 573.9 557.6 505.6 459.4 440.5 444.9 447.2 443.6 442.3 441.3 443.6 446.1 446.4 445.1 440.3 436.2 428.5 415.3 410.4 396.5 386.6 374.5 370.0 371.1 370.6 372.4 371.9 367.9 348.4 325.1 318.4 311.1 302.4 297.4 294.6 287.6 278.2 275.6 280.4 288.6 290.8 294.0 296.7 297.4 297.8 296.6 294.3 291.1 287.8 286.3 286.6 288.8 292.8 299.1 304.9 307.6 306.5 301.6 293.7 284.6 279.3 279.3 281.1 287.5 294.3 299.5 302.4 304.2 303.7 300.4 295.4 290.0 288.1 288.0 289.5 293.7 296.9 298.5 297.1 285.5 260.9 241.6 235.8 230.0 238.7 264.1 290.5 295.7 294.7 293.5 293.0 292.5 293.0 294.5 295.0 292.5 286.8 285.3 288.1 294.8 301.8 318.2 330.4 333.1 333.8 331.3 330.4 330.4 329.4 328.3 325.7 300.4 261.5 246.1 252.6 268.8 286.1 304.0 315.3 327.4 333.3 331.2 330.6 330.2 329.8 328.9 324.9 301.9 274.0 271.3 274.8 278.0 275.2 276.2 277.5 285.6 288.1 291.6 293.5 294.9 295.4 295.7 295.4 294.9 294.5 270.7 248.7 247.4 260.3 287.9 314.9 327.0 332.3 332.3 332.9 324.7 303.0 287.8 290.3 296.2 305.3 313.1 320.6 327.2 337.7 348.8 357.2 373.4 377.6 374.7 374.5 373.0 368.3 366.4 365.5 365.7 367.2 368.5 369.4 371.5 371.3 370.6 369.8 368.9 368.3 369.1 371.1 372.1 374.5 377.1 375.6 354.5 319.5 294.7 294.5 316.4 339.6 359.6 384.6 395.4 396.6 396.1 394.7 394.0 393.8 393.6 393.4 392.7 391.8 390.9 390.0 388.8 389.1 390.6 391.8 392.7 393.1 392.9 392.0 390.9 389.3 387.3 381.5 372.1 364.3 359.5 350.6 347.6 350.2 360.6 377.8 392.7 402.3 407.7 412.2 412.4 411.5 408.2 405.1 403.7 403.9 407.7 415.1 428.0 435.7 447.4 453.7 455.3 451.6 445.1 438.2 428.0 421.3 421.6 425.3 431.2 444.6 453.7 471.3 499.9 524.2 538.6 547.7 550.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 124.92890625 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 583.3 575.6 568.0 567.7 585.6 593.1 600.0 599.3 602.4 597.3 585.6 581.3 578.2 576.6 577.2 580.2 584.6 591.1 592.4 597.6 604.9 599.3 599.3 600.0 600.0 597.3 593.5 591.4 586.7 578.2 559.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5 550.5", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 130.3296875 - }, - { - "text": "尽 管 感 叹 不 必 说 谎 沉 浸 其 中 为 我 着 迷 不 牵 强 作 为 主 角 就 应 该 闪 亮 登 场 SP", - "ph_seq": "j in g uan g an t an b u b i sh uo h uang ch en j in q i zh ong w ei w o zh ao m i b u q ian q iang z uo w ei zh u j ve j iu y ing g ai sh an l iang d eng ch ang SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.133594 0.127344 0.060156 0.145313 0.050781 0.110937 0.076563 0.135156 0.055469 0.138281 0.037500 0.191406 0.183594 0.254688 0.120313 0.271094 0.103906 0.122656 0.064844 0.139844 0.047656 0.440625 0.121875 0.133594 0.053906 0.133594 0.053906 0.128125 0.059375 0.110156 0.077344 0.132031 0.055469 0.208594 0.166406 0.208594 0.166406 0.313281 0.061719 0.133594 0.053906 0.128125 0.059375 0.428906 0.133594 0.121875 0.065625 0.179688 0.007812 0.127344 0.060156 0.095312 0.092188 0.126562 0.060937 0.266406 0.108594 0.221094 0.153906 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.133594 0.127344 0.060156 0.145313 0.050781 0.110937 0.076563 0.135156 0.055469 0.138281 0.037500 0.191406 0.183594 0.254688 0.120313 0.271094 0.103906 0.122656 0.064844 0.139844 0.047656 0.440625 0.121875 0.133594 0.053906 0.133594 0.053906 0.128125 0.059375 0.110156 0.077344 0.132031 0.055469 0.208594 0.166406 0.208594 0.166406 0.313281 0.061719 0.133594 0.053906 0.128125 0.059375 0.428906 0.133594 0.121875 0.065625 0.179688 0.007812 0.127344 0.060156 0.095312 0.092188 0.126562 0.060937 0.266406 0.108594 0.221094 0.153906 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.7 293.7 285.5 283.5 282.4 282.5 282.8 283.2 283.7 284.3 284.8 290.5 292.3 291.6 291.6 291.6 292.1 292.6 293.2 294.2 294.3 294.0 291.6 280.6 277.7 291.6 317.3 329.8 329.4 328.9 329.4 329.1 328.7 328.7 330.6 330.0 328.1 328.3 327.9 315.1 291.0 286.6 313.1 338.1 336.9 332.5 330.8 329.2 328.1 326.8 327.5 327.9 326.6 324.5 312.4 289.7 271.4 262.0 284.0 293.2 290.0 288.1 290.1 292.5 293.7 294.7 295.4 294.2 293.0 291.8 291.3 289.4 276.0 270.7 286.5 321.2 328.5 330.2 331.2 331.2 330.4 329.4 328.7 327.7 324.0 301.1 278.9 264.5 267.4 284.5 322.5 363.6 376.2 376.0 375.8 373.9 372.6 368.9 365.7 365.3 365.3 365.7 367.2 368.7 370.2 371.3 373.2 375.4 375.2 362.4 338.9 328.7 343.8 363.8 364.7 367.2 375.6 389.7 406.5 424.5 443.1 461.7 482.8 508.1 531.2 551.5 557.9 553.7 551.2 545.8 545.5 544.5 546.1 548.0 549.6 553.7 555.6 557.3 559.2 560.5 560.2 556.9 550.5 538.3 506.9 505.7 517.5 520.2 525.1 527.2 528.4 534.2 539.0 543.1 544.7 547.0 567.7 589.7 597.3 599.3 596.9 591.4 585.3 582.3 581.3 581.6 582.3 582.6 585.3 586.3 587.3 590.7 590.7 590.4 590.0 589.0 586.7 578.6 535.8 557.3 580.9 582.6 588.3 579.6 553.7 518.0 488.1 452.1 432.9 440.5 444.3 444.1 442.0 439.7 439.5 441.3 442.8 441.5 437.5 429.0 407.7 401.2 386.6 375.6 371.7 369.1 368.7 368.3 369.8 371.1 370.2 366.0 351.5 344.2 350.0 348.6 339.9 327.8 318.2 305.1 288.1 284.8 287.1 288.8 290.0 291.0 291.1 291.3 292.6 294.3 295.5 296.2 295.9 292.6 288.5 284.8 283.2 283.2 285.5 288.3 293.5 301.6 305.8 306.5 303.0 296.9 287.8 278.6 272.7 271.9 276.4 285.8 294.2 298.3 304.7 305.1 303.3 297.1 286.6 279.1 275.9 277.2 280.1 283.2 284.3 285.5 287.6 290.5 292.8 295.9 297.1 295.2 294.7 293.8 294.0 295.0 294.3 293.2 291.1 287.0 284.8 289.2 303.6 316.2 327.7 333.8 332.3 331.0 330.4 328.3 325.7 326.4 328.3 329.4 330.4 332.3 331.7 330.4 329.2 330.6 330.6 329.6 330.4 330.4 329.8 330.4 331.2 331.3 330.8 328.5 312.4 293.0 277.5 270.4 264.1 264.8 272.7 288.6 294.9 297.8 295.9 294.2 294.5 295.0 295.0 294.7 293.8 292.1 284.2 277.7 274.8 271.2 268.7 271.8 276.1 276.9 276.9 277.2 278.1 278.6 278.6 278.3 275.6 259.1 232.3 220.6 212.0 212.7 236.1 278.9 297.9 296.2 296.0 296.6 296.0 293.8 292.8 292.0 290.1 290.1 290.8 291.8 293.7 294.9 295.9 296.7 294.9 289.0 273.4 254.3 252.6 261.3 268.8 272.9 279.6 284.0 292.1 298.6 306.4 312.2 317.2 322.2 337.3 343.4 340.7 339.3 336.7 331.7 330.6 328.9 326.8 326.2 326.4 326.4 326.8 328.7 331.3 333.1 334.8 334.2 331.3 322.1 299.0 284.5 289.1 291.0 291.3 290.8 291.1 292.1 293.2 295.6 296.3 299.8 300.9 297.1 294.2 293.2 293.5 292.6 291.6 291.0 290.8 291.3 291.8 293.3 293.7 293.8 293.0 292.0 291.0 291.5 292.5 293.3 293.8 296.7 299.1 300.4 300.9 291.0 271.8 257.7 265.3 288.8 318.8 354.4 398.8 423.5 433.9 438.5 438.2 439.7 440.5 440.5 440.0 439.5 439.0 437.2 439.0 434.3 425.7 410.8 387.9 372.6 364.7 364.9 367.9 369.1 369.8 368.9 363.8 347.8 326.4 317.3 309.3 297.2 283.9 269.1 260.6 263.1 274.5 284.6 293.2 297.9 300.2 298.8 294.7 286.3 281.4 277.2 277.2 281.1 289.3 297.4 301.6 304.7 305.8 304.5 298.6 293.7 285.6 279.4 277.0 277.3 281.2 286.6 291.3 294.7 297.4 300.4 300.5 299.5 296.9 293.8 288.6 284.6 277.2 269.8 268.4 270.5 275.4 281.2 285.1 286.0 287.3 289.6 291.8 293.5 297.6 295.0 293.2 293.5 294.3 294.9 294.2 291.6 278.8 256.8 264.5 274.2 293.5 301.9 316.2 329.4 334.8 333.8 331.9 331.2 329.8 329.4 329.1 329.1 329.1 329.6 329.4 328.1 326.4 325.7 327.9 334.8 335.6 333.8 331.9 330.4 327.9 325.5 325.1 326.4 327.5 328.9 327.9 326.0 312.1 288.1 278.3 292.0 298.5 295.7 294.5 292.5 292.1 294.0 293.3 286.3 256.2 242.7 251.1 267.3 276.2 284.3 291.9 297.3 282.7 277.0 275.6 275.6 277.7 278.0 278.3 278.5 278.0 277.3 275.4 272.4 271.7 272.2 273.5 278.0 287.5 294.3 296.4 297.8 296.9 294.7 290.8 287.8 286.0 285.1 286.0 287.0 290.5 294.9 296.9 298.3 297.6 296.4 293.7 292.8 293.8 294.0 294.0 293.7 292.8 292.1 291.1 286.5 267.1 258.9 270.0 295.9 327.0 335.0 335.0 335.4 335.8 333.7 332.3 328.5 325.8 324.7 323.8 325.5 327.7 328.9 331.3 331.2 330.4 330.0 330.0 329.2 328.1 318.2 295.7 285.5 285.6 291.3 290.1 286.3 285.6 304.0 335.4 375.4 376.0 356.6 336.2 306.1 303.1 299.0 294.9 293.3 291.6 292.6 288.1 287.6 287.8 288.6 291.5 295.0 297.9 299.5 300.5 298.1 293.3 286.5 283.0 282.4 284.2 288.0 295.2 301.6 305.3 304.9 299.3 285.3 269.0 269.0 269.0 269.0 269.0 269.0 269.0 269.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 130.92890625 - }, - { - "text": "酷 爱 标 新 立 异 SP", - "ph_seq": "k u ai b iao x in l i y i SP", - "note_seq": "D4 D4 D4 D4 D4 D4 D4 E4 E4 F#4 F#4 rest", - "note_dur_seq": "0.166406 0.187500 0.132031 0.055469 0.092969 0.094531 0.126562 0.060937 0.179688 0.007812 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.166406 0.187500 0.132031 0.055469 0.092969 0.094531 0.126562 0.060937 0.179688 0.007812 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "278.1 284.8 288.1 287.6 287.3 286.1 284.6 283.2 281.9 280.6 279.8 279.4 280.1 281.1 282.2 283.8 291.5 295.0 297.1 297.4 296.2 293.8 293.5 293.2 292.3 292.3 293.5 294.7 295.4 297.1 298.5 299.1 297.6 296.0 294.3 293.0 293.5 293.7 292.8 294.0 294.5 283.5 259.8 244.8 247.9 254.5 272.9 291.0 298.6 298.1 296.4 295.7 295.9 293.7 286.6 266.2 258.6 270.9 288.5 286.5 283.2 280.7 279.6 281.5 296.6 298.8 296.7 294.9 292.8 291.6 291.8 292.3 293.7 294.5 293.3 292.8 293.3 296.8 302.0 315.7 326.4 328.3 330.8 331.3 332.1 331.5 330.4 329.8 329.6 329.8 329.8 330.0 326.7 325.9 330.7 340.7 356.8 370.4 378.9 382.8 383.3 380.8 376.9 368.5 362.0 357.0 357.2 359.0 362.2 368.7 371.7 374.3 376.5 377.8 376.9 373.2 370.6 365.1 360.9 353.9 353.9 353.9 353.9 353.9 353.9 353.9 353.9", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 139.14609375 - }, - { - "text": "看 似 独 行 特 立 SP", - "ph_seq": "k an s i0 d u x ing t e l i SP", - "note_seq": "D4 D4 D4 D4 D4 D4 D4 D4 E4 E4 F#4 F#4 rest", - "note_dur_seq": "0.166406 0.097656 0.089844 0.132812 0.054688 0.092969 0.094531 0.102344 0.085156 0.126562 0.060937 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.166406 0.097656 0.089844 0.132812 0.054688 0.092969 0.094531 0.102344 0.085156 0.126562 0.060937 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.3 293.3 293.8 294.3 295.0 295.7 296.2 296.6 296.7 295.7 291.6 290.5 291.8 293.3 296.6 296.7 282.5 257.7 254.0 259.1 269.4 275.1 284.2 288.0 288.8 290.8 293.3 295.2 295.0 294.3 294.0 294.0 294.2 294.0 292.8 291.0 272.3 247.1 246.2 273.5 294.5 302.4 297.9 296.7 296.0 294.0 285.6 256.2 248.2 268.8 282.2 284.6 286.6 290.1 291.6 296.2 294.9 296.7 296.9 297.1 296.2 294.2 293.3 293.3 293.5 293.7 292.3 282.0 256.1 246.0 249.0 275.5 305.8 325.5 327.7 329.6 331.0 331.2 330.8 330.2 329.6 329.1 328.9 327.9 325.8 327.7 329.7 336.1 348.1 367.9 379.5 384.6 385.7 382.2 377.3 368.5 359.9 355.1 354.5 357.0 364.7 374.5 383.3 385.3 378.9 378.9 378.9 378.9 378.9 378.9 378.9 378.9", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 140.64609375 - }, - { - "text": "绝 不 埋 没 人 群 里 SP", - "ph_seq": "j ve b u m ai m o r en q vn l i SP", - "note_seq": "D4 D4 D4 D4 E4 E4 E4 E4 E4 E4 D4 D4 D4 D4 rest", - "note_dur_seq": "0.133594 0.132031 0.055469 0.110156 0.077344 0.220312 0.154688 0.271094 0.103906 0.208594 0.166406 0.253125 0.121875 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.133594 0.132031 0.055469 0.110156 0.077344 0.220312 0.154688 0.271094 0.103906 0.208594 0.166406 0.253125 0.121875 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "347.0 343.4 335.4 322.1 312.6 299.5 292.1 288.8 287.3 286.5 286.3 286.6 287.0 299.3 302.3 299.1 295.2 293.7 292.3 293.2 292.8 290.0 267.9 239.4 227.6 240.9 256.2 279.1 297.6 298.1 297.1 295.0 294.3 294.2 294.7 294.7 288.6 286.0 286.6 288.1 289.1 292.1 295.8 302.1 315.3 331.5 338.7 340.1 339.5 336.6 331.0 324.9 321.0 319.3 321.2 324.3 328.9 333.7 335.6 335.0 334.0 330.0 327.9 326.2 325.1 325.5 326.6 326.6 324.9 324.5 324.5 324.3 323.0 321.7 321.7 322.3 322.7 330.8 333.7 333.5 332.1 330.8 329.2 328.7 329.2 329.2 328.9 329.4 330.8 331.7 333.4 338.4 345.9 356.2 365.5 370.4 372.8 373.2 371.9 368.9 368.7 368.5 366.8 366.8 368.9 370.4 376.5 378.1 379.2 375.4 370.4 363.8 359.0 349.4 343.2 334.2 329.6 326.8 325.1 325.1 326.2 327.4 328.7 330.8 332.9 334.0 334.4 333.7 326.2 309.3 308.6 316.6 317.7 316.0 312.4 310.1 307.9 306.1 304.0 300.4 296.9 298.1 296.6 294.7 293.0 292.1 291.1 290.1 289.3 288.6 288.6 288.6 288.8 289.0 289.3 291.1 293.0 294.9 295.5 296.0 296.6 297.2 297.6 296.6 295.0 295.0 294.7 293.5 289.6 286.3 285.3 284.6 285.1 289.1 296.2 299.8 301.4 301.7 301.2 298.5 294.3 289.5 281.7 279.1 278.8 280.1 285.0 292.8 298.6 302.6 304.0 300.9 296.2 288.8 281.4 276.1 274.8 276.5 284.6 289.6 297.6 301.2 303.3 300.7 295.9 286.3 286.3 286.3 286.3 286.3 286.3 286.3 286.3", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 141.99140625 - }, - { - "text": "不 想 改 变 自 己 SP", - "ph_seq": "b u x iang g ai b ian z i0 j i SP", - "note_seq": "D4 D4 D4 D4 D4 D4 D4 D4 E4 E4 F#4 F#4 rest", - "note_dur_seq": "0.114062 0.092969 0.094531 0.127344 0.060156 0.132031 0.055469 0.123438 0.064062 0.121875 0.065625 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.092969 0.094531 0.127344 0.060156 0.132031 0.055469 0.123438 0.064062 0.121875 0.065625 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.7 293.5 293.2 292.8 292.6 292.8 293.5 294.5 295.4 299.7 299.1 298.3 297.2 294.5 288.8 265.1 255.9 281.2 286.6 282.4 281.7 284.8 286.6 288.3 289.8 293.3 298.8 296.7 296.2 295.2 293.0 292.3 293.0 293.8 294.5 295.0 292.6 284.2 275.9 259.8 276.9 300.4 298.5 296.9 294.3 293.5 293.2 292.6 293.5 294.9 295.9 278.1 254.8 253.6 260.7 274.5 297.9 309.0 299.5 294.7 294.0 293.5 292.8 292.3 293.2 294.3 293.8 292.8 279.9 255.5 259.6 282.6 307.2 319.5 325.5 330.8 331.2 331.5 331.3 330.6 326.0 297.2 272.7 289.8 303.0 310.2 316.1 324.9 338.5 355.9 374.7 382.2 381.1 377.1 372.4 369.6 367.7 365.7 367.4 370.4 372.8 375.2 374.5 373.4 369.4 365.3 362.8 363.6 367.0 371.5 376.2 375.2 369.6 359.7 359.7 359.7 359.7 359.7 359.7 359.7 359.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 145.1984375 - }, - { - "text": "无 需 说 服 自 己 即 便 被 他 人 妄 议 SP", - "ph_seq": "w u x v sh uo f u z i0 j i j i b ian b ei t a r en w ang y i SP", - "note_seq": "D4 D4 D4 D4 D4 D4 D4 D4 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 E4 E4 D4 D4 D4 D4 rest", - "note_dur_seq": "0.007812 0.092969 0.094531 0.095312 0.092188 0.107031 0.080469 0.123438 0.064062 0.121875 0.065625 0.241406 0.133594 0.132031 0.055469 0.132031 0.055469 0.203906 0.171094 0.271094 0.103906 0.266406 0.108594 0.367188 0.007812 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.007812 0.092969 0.094531 0.095312 0.092188 0.107031 0.080469 0.123438 0.064062 0.121875 0.065625 0.241406 0.133594 0.132031 0.055469 0.132031 0.055469 0.203906 0.171094 0.271094 0.103906 0.266406 0.108594 0.367188 0.007812 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "291.1 291.5 299.8 299.8 298.6 297.6 295.7 286.5 275.3 256.5 262.8 273.8 277.7 278.9 283.5 289.1 293.5 294.5 294.3 298.8 297.1 295.4 294.7 293.0 279.6 248.9 242.0 256.2 269.0 277.3 283.3 288.0 298.6 302.4 298.5 296.7 297.9 297.9 296.2 294.7 294.2 293.5 291.3 276.2 257.9 251.5 247.9 241.4 254.9 279.8 300.4 299.7 296.4 294.5 295.2 296.0 296.6 296.0 293.7 279.8 258.2 248.2 258.3 281.8 312.6 353.4 390.4 424.0 438.0 443.8 446.1 446.4 439.2 420.6 383.3 405.6 433.4 439.5 429.8 411.7 392.1 372.5 353.1 359.7 371.7 373.2 374.9 375.2 376.9 376.0 374.5 372.6 370.0 367.9 365.7 363.6 364.5 365.3 367.2 370.2 374.3 377.8 380.2 380.8 373.4 361.5 354.1 349.6 342.8 329.1 312.9 300.0 295.0 292.0 291.0 296.7 300.4 297.9 295.4 292.8 290.8 290.5 291.5 292.8 294.9 294.5 272.1 248.4 242.8 257.4 290.3 299.1 295.9 294.3 294.5 294.2 293.5 292.6 292.0 291.8 293.3 293.7 288.1 273.2 269.5 288.8 315.8 328.5 330.6 332.1 331.7 329.2 326.6 326.0 324.2 323.2 324.2 325.1 327.5 329.4 330.4 331.3 332.9 332.7 332.3 330.8 329.2 328.3 325.3 319.5 300.5 297.6 302.3 309.0 311.8 308.4 306.3 303.8 302.1 310.8 327.0 331.0 332.9 333.1 332.7 331.9 330.6 329.6 330.2 329.6 328.5 327.9 329.6 334.1 341.0 351.0 360.7 368.9 373.4 376.5 376.7 374.5 372.6 370.4 369.1 368.1 364.7 358.8 356.5 357.2 356.2 362.2 361.5 357.2 350.8 346.0 342.6 335.8 332.7 330.8 330.6 330.4 330.4 330.0 328.9 327.9 328.9 330.0 331.0 332.9 331.7 332.3 332.7 331.9 330.2 325.8 321.7 314.2 303.3 297.4 290.2 285.9 286.9 290.6 294.0 296.9 298.5 298.8 297.8 296.6 295.2 293.7 292.5 291.6 291.8 292.5 292.8 292.8 293.7 295.0 295.0 295.2 295.4 295.4 293.5 292.6 291.6 291.3 291.0 290.8 290.6 290.1 289.1 288.3 287.8 289.3 295.2 298.1 299.1 298.6 297.9 297.6 295.4 293.5 292.1 287.3 284.2 282.7 284.3 286.3 291.5 295.4 301.1 303.3 303.0 298.1 292.0 283.2 278.8 277.5 280.4 289.5 297.8 303.1 306.0 303.3 294.3 286.0 283.2 283.2 283.2 283.2 283.2 283.2 283.2 283.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 146.8046875 - }, - { - "text": "想 要 和 自 己 并 肩 作 战 不 被 他 人 定 义 SP", - "ph_seq": "x iang y ao h e z i0 j i b ing j ian z uo zh an b u b ei t a r en d ing y i SP", - "note_seq": "D4 D4 D4 D4 A4 A4 A4 A4 A4 A4 A4 A4 F#4 F#4 E4 E4 E4 E4 D4 D4 D4 D4 D4 D4 E4 E4 D4 D4 D4 D4 rest", - "note_dur_seq": "0.192969 0.152344 0.035156 0.112500 0.075000 0.123438 0.064062 0.121875 0.065625 0.132031 0.055469 0.121875 0.065625 0.123438 0.064062 0.128125 0.059375 0.132031 0.055469 0.132031 0.055469 0.102344 0.085156 0.136719 0.050781 0.132812 0.054688 0.179688 0.007812 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.192969 0.152344 0.035156 0.112500 0.075000 0.123438 0.064062 0.121875 0.065625 0.132031 0.055469 0.121875 0.065625 0.123438 0.064062 0.128125 0.059375 0.132031 0.055469 0.132031 0.055469 0.102344 0.085156 0.136719 0.050781 0.132812 0.054688 0.179688 0.007812 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.5 293.0 292.6 292.3 292.1 292.0 291.6 291.3 295.0 295.7 295.2 295.2 293.5 291.6 292.5 292.6 292.1 291.6 292.0 293.2 295.5 295.7 295.7 296.7 294.7 292.1 291.1 290.8 292.1 295.7 297.9 300.2 300.7 287.1 283.7 302.8 320.2 345.5 378.4 411.6 428.5 443.6 446.1 448.2 446.7 443.6 440.0 429.0 413.2 380.6 407.0 442.0 440.8 436.2 429.5 418.0 416.0 435.7 444.3 445.4 445.9 445.1 444.1 441.3 424.8 386.4 378.0 411.2 431.4 430.0 427.7 425.0 439.5 451.1 449.0 446.1 442.8 441.0 439.2 439.7 441.0 442.3 446.9 438.2 386.2 375.2 402.8 430.7 441.8 447.4 447.7 447.7 446.7 445.4 443.6 439.7 437.2 432.2 424.5 399.8 372.5 380.5 379.9 380.9 376.7 377.3 372.8 367.4 367.4 368.9 369.4 371.5 374.3 373.6 371.3 361.8 330.4 302.1 299.4 303.7 319.3 329.2 330.6 331.3 330.8 331.2 332.5 332.7 332.1 330.8 320.4 289.8 260.0 257.6 268.5 288.0 315.1 331.2 333.5 331.7 330.4 331.0 331.2 331.2 330.2 330.2 327.4 323.4 319.5 310.2 287.0 267.3 270.0 291.5 295.2 293.5 294.0 295.4 295.7 295.2 294.7 294.0 293.8 284.3 257.6 238.4 231.9 245.1 279.3 295.4 296.7 295.4 294.0 292.8 293.3 294.7 295.2 294.5 289.1 270.1 255.8 264.4 277.7 287.1 294.3 296.6 293.3 293.7 294.2 294.7 294.5 294.0 293.3 293.5 294.2 295.5 296.2 295.7 294.0 297.0 309.7 324.5 331.5 333.5 332.9 332.1 330.0 328.9 328.3 327.7 327.4 326.6 324.5 318.8 296.7 270.6 260.3 278.5 294.7 296.0 297.2 295.9 297.1 298.3 297.4 296.6 295.9 290.6 284.2 280.7 280.1 280.2 281.9 289.1 295.0 297.6 297.9 299.3 300.7 299.5 297.4 296.0 291.3 287.8 282.7 280.4 279.1 281.1 285.5 293.5 299.7 305.3 305.3 303.3 298.8 291.5 282.0 275.3 275.3 275.3 275.3 275.3 275.3 275.3 275.3", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 150.93203125 - }, - { - "text": "不 事 事 妥 协 学 会 拒 绝 感 受 在 第 一 SP", - "ph_seq": "b u sh ir sh ir t uo x ie x ve h ui j v j ve g an sh ou z ai d i y i SP", - "note_seq": "D4 D4 D4 D4 D4 D4 D4 D4 D4 D4 F#4 F#4 E4 E4 E4 E4 D4 D4 D4 D4 D4 D4 E4 E4 D4 D4 D4 D4 rest", - "note_dur_seq": "0.114062 0.095312 0.092188 0.095312 0.092188 0.102344 0.085156 0.092969 0.094531 0.092969 0.094531 0.112500 0.075000 0.121875 0.065625 0.121875 0.065625 0.127344 0.060156 0.095312 0.092188 0.123438 0.064062 0.132812 0.054688 0.179688 0.007812 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.095312 0.092188 0.095312 0.092188 0.102344 0.085156 0.092969 0.094531 0.092969 0.094531 0.112500 0.075000 0.121875 0.065625 0.121875 0.065625 0.127344 0.060156 0.095312 0.092188 0.123438 0.064062 0.132812 0.054688 0.179688 0.007812 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.7 292.6 292.0 291.0 290.1 290.0 290.1 290.3 290.5 293.2 296.6 296.7 296.2 292.8 273.7 255.1 269.1 282.5 287.6 291.0 293.5 298.6 301.2 306.3 309.3 310.4 301.7 296.7 295.7 293.2 281.9 263.1 255.9 265.4 271.6 275.4 278.3 284.5 288.0 291.5 295.0 301.2 302.3 299.0 296.2 295.0 294.3 292.8 293.0 293.8 292.3 269.0 248.7 236.1 244.8 258.5 283.8 301.2 301.9 297.4 296.2 295.0 294.9 293.5 279.6 257.9 256.1 267.0 272.7 277.7 285.1 291.6 299.0 302.6 301.6 298.5 295.7 295.5 295.4 293.8 287.3 268.5 271.6 281.1 285.3 290.6 294.3 300.5 306.5 316.9 325.5 338.1 360.7 370.6 371.9 372.4 371.7 368.3 345.8 320.8 320.2 324.3 329.7 336.4 343.4 350.0 339.7 335.2 331.3 329.2 328.5 328.3 330.0 331.2 330.6 326.0 299.3 287.8 294.5 301.4 309.5 320.2 338.3 338.3 336.7 333.1 331.0 330.4 330.4 330.4 329.8 324.0 295.4 274.8 276.7 281.7 291.8 304.9 309.2 303.0 296.4 294.0 293.3 292.6 292.6 294.3 295.2 295.7 296.4 289.1 258.6 239.5 244.1 272.9 294.7 299.7 297.1 295.7 294.7 294.5 294.5 294.5 292.1 274.5 246.7 244.0 257.0 267.1 278.0 291.0 301.6 296.9 295.5 294.5 293.7 293.7 293.8 294.0 294.9 296.4 296.4 270.9 258.3 271.7 291.1 303.1 321.4 336.9 336.7 335.4 331.5 329.8 327.0 327.4 328.9 329.8 324.7 300.4 270.7 271.4 276.6 286.6 290.5 296.7 295.2 294.5 295.4 295.0 295.0 295.0 295.2 295.4 295.7 295.4 294.9 294.2 293.5 293.0 292.1 291.1 292.3 294.0 295.7 297.4 298.8 297.9 297.1 294.3 291.6 288.1 284.5 283.2 285.3 287.3 291.6 296.0 300.5 303.0 302.4 300.7 295.7 289.0 283.0 283.0 283.0 283.0 283.0 283.0 283.0 283.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 154.1984375 - }, - { - "text": "不 想 忍 受 无 理 要 求 有 什 么 好 稀 奇 SP", - "ph_seq": "b u x iang r en sh ou w u l i y ao q iu y ou sh en m e h ao x i q i SP", - "note_seq": "D4 D4 A4 A4 A4 A4 A4 A4 A4 A4 F#4 F#4 E4 E4 E4 E4 D4 D4 A4 A4 A4 A4 B4 B4 A4 A4 A4 A4 rest", - "note_dur_seq": "0.114062 0.092969 0.094531 0.136719 0.050781 0.095312 0.092188 0.179688 0.007812 0.126562 0.060937 0.152344 0.035156 0.104688 0.082812 0.152344 0.035156 0.095312 0.092188 0.110156 0.077344 0.112500 0.075000 0.092969 0.094531 0.104688 0.082812 0.281250 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.092969 0.094531 0.136719 0.050781 0.095312 0.092188 0.179688 0.007812 0.126562 0.060937 0.152344 0.035156 0.104688 0.082812 0.152344 0.035156 0.095312 0.092188 0.110156 0.077344 0.112500 0.075000 0.092969 0.094531 0.104688 0.082812 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 293.5 292.0 290.6 288.5 286.5 286.8 290.0 293.8 296.7 298.6 294.5 294.5 294.0 293.0 290.6 268.4 252.1 281.7 292.1 292.1 300.5 317.6 331.5 344.5 353.2 379.5 403.9 414.1 421.6 426.2 428.7 430.9 435.2 438.2 440.5 441.5 439.7 438.2 439.7 444.6 447.7 447.7 445.6 444.1 440.0 439.2 442.0 441.5 439.2 409.6 371.3 371.9 391.5 402.6 411.0 422.3 431.7 436.0 448.5 452.1 448.5 443.8 441.0 439.0 437.0 437.0 437.5 440.3 440.0 439.7 443.1 444.3 445.9 446.4 445.4 441.8 440.5 441.0 442.0 443.3 444.1 442.0 439.7 431.4 420.4 402.3 382.0 368.2 354.7 364.3 372.4 373.2 373.0 373.4 374.1 375.4 376.0 373.4 371.3 366.0 357.8 343.2 338.2 336.4 338.5 332.9 328.9 329.2 329.1 330.4 330.6 332.1 333.7 325.7 288.5 267.1 266.7 274.3 289.0 298.8 317.8 329.1 333.7 340.9 341.3 336.6 334.2 333.1 331.7 330.2 329.6 326.4 322.7 318.4 307.4 299.9 296.0 293.4 292.1 290.8 291.5 293.8 297.2 299.3 292.5 273.4 270.5 277.0 288.1 306.3 336.6 363.9 393.8 418.3 435.4 438.2 443.3 443.8 444.9 441.5 439.7 440.8 441.8 440.5 434.7 427.5 423.1 423.5 423.8 428.5 441.8 443.8 444.1 443.8 443.3 442.8 441.5 439.5 435.4 410.1 386.6 391.5 407.7 436.0 458.1 477.9 498.2 506.9 507.2 501.9 500.2 496.5 493.0 480.7 468.9 477.9 489.1 497.3 498.5 487.3 471.6 458.4 436.2 434.2 443.6 447.9 444.9 431.2 397.0 372.4 373.2 397.5 412.9 422.8 427.7 427.5 426.5 422.6 430.0 450.8 459.2 458.7 452.4 443.8 432.4 421.8 418.9 422.8 429.5 442.3 452.1 456.8 456.8 449.8 438.7 426.7 418.7 416.7 424.0 441.0 454.2 460.3 456.6 456.6 456.6 456.6 456.6 456.6 456.6 456.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 157.1984375 - }, - { - "text": "别 说 我 不 讲 道 理 SP", - "ph_seq": "b ie sh uo w o b u j iang d ao l i SP", - "note_seq": "D4 D4 D4 D4 E4 E4 F#4 F#4 F#4 F#4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.081250 0.095312 0.092188 0.639062 0.110937 0.448437 0.114062 0.121875 0.065625 0.682031 0.067969 0.625000 0.125000 2.343750 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.081250 0.095312 0.092188 0.639062 0.110937 0.448437 0.114062 0.121875 0.065625 0.682031 0.067969 0.625000 0.125000 2.343750 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "294.0 291.0 289.0 288.0 288.0 289.8 292.5 294.9 293.2 292.0 293.7 285.0 272.4 264.8 260.9 261.9 263.6 265.7 271.3 274.5 277.3 279.6 281.9 286.0 288.6 300.0 303.3 304.5 303.1 301.6 300.7 297.4 295.2 294.0 294.9 295.7 295.9 298.1 297.4 295.4 292.3 289.6 286.6 285.5 286.0 290.0 293.8 299.5 301.4 301.7 299.3 295.5 291.1 285.8 284.3 286.0 290.1 293.3 299.0 301.9 303.0 302.1 299.1 293.7 289.3 288.1 288.1 289.3 292.0 294.3 297.2 297.9 298.1 297.9 297.4 296.9 296.7 293.8 288.5 281.4 277.8 274.6 270.9 270.7 275.3 288.0 303.6 317.5 328.5 334.4 336.6 336.2 335.4 330.2 327.4 322.3 321.5 322.5 325.5 329.6 336.4 339.7 339.9 338.9 334.8 330.8 325.5 320.8 321.2 323.6 326.2 329.6 330.8 334.4 336.7 336.2 334.8 332.3 330.0 327.7 327.5 327.9 328.1 328.5 328.9 329.6 330.6 331.3 332.7 330.0 299.5 280.1 284.8 307.9 330.0 344.0 362.2 377.3 377.1 373.9 372.6 371.7 367.9 343.8 306.0 304.4 314.0 318.8 323.6 328.1 333.1 338.3 342.2 352.1 378.9 384.4 385.3 384.4 380.8 375.6 370.4 368.3 367.2 366.0 366.6 367.2 368.3 369.6 370.9 370.9 370.6 370.2 368.9 368.7 368.9 369.4 370.0 370.6 371.3 371.5 371.5 371.5 371.3 370.9 370.0 368.7 368.7 369.1 367.9 367.0 367.7 367.7 367.2 368.5 368.5 368.1 368.7 369.4 368.5 370.0 370.2 370.6 370.9 371.1 371.3 370.4 370.4 370.9 370.6 368.7 368.1 366.2 359.3 330.8 304.5 298.7 300.8 338.9 376.5 387.9 390.2 392.9 393.6 393.4 393.1 391.1 389.3 387.3 387.5 387.5 386.2 388.6 388.8 390.0 391.5 391.1 392.0 392.7 393.1 393.8 393.6 392.9 392.2 391.5 391.1 390.4 390.2 390.2 390.2 390.2 390.4 390.4 390.6 390.6 391.1 391.5 392.0 392.7 393.4 393.8 394.0 393.6 393.8 392.7 392.4 392.2 392.0 391.8 391.3 391.1 390.0 392.0 392.2 390.2 387.3 383.0 378.9 376.2 370.8 367.5 366.6 374.9 390.2 400.2 404.9 408.6 411.5 412.2 411.7 411.2 409.4 408.2 408.4 409.1 409.6 414.3 418.0 424.0 431.2 435.4 439.0 441.0 442.5 442.3 441.5 440.5 439.5 438.5 438.0 438.5 439.0 439.5 440.3 440.8 441.3 441.3 440.8 440.3 439.7 439.5 439.7 440.3 439.5 439.0 438.5 439.2 440.3 440.8 441.5 441.5 441.3 440.8 440.3 439.7 439.5 439.5 439.5 439.7 440.5 440.8 440.3 439.7 440.3 441.3 442.0 441.3 440.0 439.0 439.0 439.7 439.2 438.5 438.0 438.5 439.2 439.7 440.5 440.5 440.3 440.0 439.7 439.5 439.2 439.5 439.5 440.0 441.0 441.3 440.8 439.7 439.0 438.0 437.2 437.0 437.5 437.2 436.5 436.0 437.0 438.5 437.5 437.7 437.7 438.0 439.0 439.5 439.2 438.7 438.5 438.7 439.2 439.5 439.5 438.5 439.2 440.0 439.2 438.0 437.7 437.5 437.5 437.2 437.2 438.0 438.7 439.2 440.0 439.2 439.2 439.2 438.5 437.5 436.5 435.4 434.7 435.7 438.0 439.2 442.3 442.8 442.3 441.0 440.0 436.7 434.9 433.4 431.2 430.4 430.9 432.4 437.0 441.0 443.1 444.6 445.6 444.9 441.8 440.0 435.4 432.7 431.9 432.7 435.7 441.3 443.8 447.2 449.5 449.2 447.4 442.3 436.0 430.0 425.0 425.3 427.7 434.9 438.7 444.9 449.8 450.5 448.5 443.8 439.5 428.2 418.7 414.3 411.0 415.1 420.4 427.5 435.2 442.0 447.4 451.1 451.3 445.9 435.2 425.3 413.6 408.6 407.7 413.9 421.1 434.2 438.0 438.0 438.0 438.0 438.0 438.0 438.0 438.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 160.23125 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.172656 0.375000 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.172656 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 584.6 577.9 570.9 565.7 585.3 589.0 582.9 586.3 590.7 593.5 593.1 591.1 588.0 585.0 583.3 581.3 577.6 574.9 576.2 576.2 580.6 580.9 586.7 591.4 593.8 602.1 600.7 596.2 586.7 579.6 574.9 573.3 573.6 576.9 581.9 591.4 597.9 600.7 593.1 560.8 560.8 560.8 560.8 560.8 560.8 560.8 560.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 175.32734375 - }, - { - "text": "也 许 我 该 说 声 SP", - "ph_seq": "y E x v w o g ai sh uo sh eng SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 rest", - "note_dur_seq": "0.055469 0.092969 0.094531 0.133594 0.053906 0.097656 0.089844 0.095312 0.092188 0.095312 0.092188 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.055469 0.092969 0.094531 0.133594 0.053906 0.097656 0.089844 0.095312 0.092188 0.095312 0.092188 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "299.7 294.5 292.6 292.3 293.2 294.3 296.9 297.8 296.7 294.5 293.3 293.2 294.2 295.2 294.7 290.8 278.3 276.7 283.9 296.2 308.5 321.7 330.4 334.8 336.2 332.3 330.6 330.0 329.2 328.1 328.1 327.9 327.9 327.7 327.5 327.5 328.3 332.7 335.4 333.1 332.3 331.2 330.2 329.2 327.5 320.4 297.4 289.3 306.5 325.8 334.2 325.9 308.7 296.3 286.8 289.8 291.1 291.3 293.3 293.5 293.8 294.3 295.0 289.0 265.7 263.7 282.5 292.8 308.5 321.6 331.2 333.8 333.8 331.7 330.8 330.6 330.0 329.1 327.5 314.9 292.0 286.0 300.0 314.3 325.6 343.0 372.1 386.2 385.0 384.8 379.3 376.2 368.3 362.6 355.9 354.3 358.4 368.3 377.3 382.2 381.5 371.5 362.2 362.2 362.2 362.2 362.2 362.2 362.2 362.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 176.00703125 - }, - { - "text": "抱 歉 SP", - "ph_seq": "b ao q ian SP", - "note_seq": "C#5 C#5 D5 D5 rest", - "note_dur_seq": "0.064062 0.208594 0.166406 0.187500 0.100000", - "is_slur_seq": "0 0 0 0 0", - "ph_dur": "0.064062 0.208594 0.166406 0.187500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "499.6 508.4 511.0 511.9 512.8 512.2 511.6 519.9 522.3 523.6 523.3 520.5 516.3 512.8 511.0 513.1 516.3 522.9 535.5 545.2 551.2 556.3 559.2 560.2 558.5 555.6 552.4 550.2 549.3 548.6 548.0 550.5 553.4 555.3 556.9 558.4 564.8 574.9 582.3 587.3 588.3 590.0 590.0 589.0 586.0 584.6 583.9 586.0 589.7 590.0 585.6 579.9 567.7 549.3 546.7 546.7 546.7 546.7 546.7 546.7 546.7 546.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 177.3109375 - }, - { - "text": "带 来 困 扰 不 是 故 意 别 讨 厌 SP", - "ph_seq": "d ai l ai k un r ao b u sh ir g u y i b ie t ao y En SP", - "note_seq": "A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 G4 G4 A4 A4 rest", - "note_dur_seq": "0.129688 0.126562 0.060937 0.106250 0.081250 0.458594 0.103906 0.132031 0.055469 0.095312 0.092188 0.127344 0.060156 0.179688 0.007812 0.132031 0.055469 0.203906 0.171094 0.303906 0.071094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.129688 0.126562 0.060937 0.106250 0.081250 0.458594 0.103906 0.132031 0.055469 0.095312 0.092188 0.127344 0.060156 0.179688 0.007812 0.132031 0.055469 0.203906 0.171094 0.303906 0.071094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "545.8 538.0 519.9 492.7 474.3 452.6 441.3 436.5 433.9 434.9 437.5 440.5 440.3 436.0 434.9 435.4 436.0 438.7 441.0 442.5 444.1 442.8 436.7 424.8 403.0 388.9 371.1 361.6 371.7 371.3 371.1 371.3 371.1 368.9 363.8 341.4 342.0 355.7 374.1 370.2 356.2 329.1 304.8 278.0 258.8 261.3 272.9 289.0 298.8 301.9 300.7 295.4 289.6 281.1 277.2 278.0 281.2 285.5 291.6 296.4 303.1 305.3 305.3 301.7 293.8 284.6 280.7 279.3 280.9 286.8 294.2 299.5 303.3 303.8 301.4 299.5 298.6 297.4 297.1 297.1 297.4 298.1 298.8 299.0 297.2 294.9 291.5 287.5 286.3 287.0 287.8 291.3 295.2 297.1 296.6 295.5 293.3 292.3 291.3 291.3 292.5 294.3 297.9 297.9 280.1 262.3 261.9 270.4 300.8 328.7 332.1 331.5 331.7 331.5 326.8 295.5 291.3 307.7 319.7 326.2 329.2 331.7 332.7 334.0 333.7 329.2 333.3 336.7 334.2 330.6 329.2 328.3 328.7 327.4 322.5 287.1 267.9 273.4 283.0 286.2 286.8 290.0 292.5 291.8 291.3 292.6 293.0 292.8 292.8 292.6 293.7 295.0 296.9 297.6 299.6 303.3 309.0 318.4 323.4 327.0 330.8 332.1 332.3 331.3 330.2 329.4 331.2 315.8 279.4 279.9 299.7 319.5 338.7 359.0 377.3 378.6 379.1 377.6 373.6 366.2 362.2 360.5 360.7 362.0 363.0 365.5 369.1 372.8 374.1 373.6 371.9 369.8 367.4 366.4 362.8 330.4 304.0 296.2 288.8 297.2 310.9 325.1 336.6 348.5 365.7 374.3 378.6 387.0 390.6 390.9 390.6 390.4 390.9 390.2 389.1 389.7 389.7 389.3 389.7 389.7 390.2 391.5 392.9 393.8 393.6 393.6 394.0 392.9 392.0 387.5 377.3 364.9 358.4 360.7 368.5 387.0 408.5 431.3 444.9 451.8 451.8 452.1 445.1 438.0 430.7 426.7 429.5 436.5 446.1 445.4 448.7 446.1 445.6 443.3 440.0 439.7 437.5 436.0 435.7 443.3 439.5 440.8 438.5 444.3 450.5 454.2 481.2 491.3 511.9 527.8 526.6 526.6 526.6 526.6 526.6 526.6 526.6 526.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 177.9953125 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.141406 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.141406 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 585.6 580.2 581.3 583.6 594.2 596.2 590.4 588.7 591.8 586.7 586.3 585.0 581.3 578.9 578.2 577.9 576.2 577.6 576.2 579.6 580.9 578.2 583.9 588.7 597.6 602.1 601.4 599.0 589.4 581.9 576.6 573.9 573.3 573.3 573.3 573.3 573.3 573.3 573.3 573.3", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 181.35859375 - }, - { - "text": "耍 小 心 机 有 点 惊 险 SP", - "ph_seq": "sh ua x iao x in j i y ou d ian j ing x ian SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 rest", - "note_dur_seq": "0.188281 0.092969 0.094531 0.092969 0.094531 0.121875 0.065625 0.152344 0.035156 0.132812 0.054688 0.244531 0.130469 0.186719 0.188281 1.312500 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.188281 0.092969 0.094531 0.092969 0.094531 0.121875 0.065625 0.152344 0.035156 0.132812 0.054688 0.244531 0.130469 0.186719 0.188281 1.312500 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "280.4 559.2 556.9 552.4 548.6 539.5 519.3 479.8 417.7 362.4 320.6 299.3 290.0 285.8 285.1 286.0 287.8 290.8 298.8 299.0 297.1 296.0 295.2 293.5 283.2 265.7 261.3 279.3 290.3 297.6 307.7 320.1 332.4 344.6 340.1 334.2 330.8 330.4 331.0 330.6 328.5 309.5 290.8 302.3 321.4 325.8 326.4 327.2 330.2 337.1 346.4 341.8 335.4 331.2 329.6 329.1 329.1 326.8 321.5 313.1 293.8 275.1 265.8 261.3 266.5 278.5 291.8 294.0 293.0 293.5 294.2 293.8 293.8 294.7 295.5 295.7 295.4 294.7 290.4 290.4 298.7 312.5 322.3 327.7 330.8 332.1 331.2 330.2 329.8 331.2 331.3 320.8 288.0 285.8 309.3 330.0 346.9 367.8 382.6 380.0 378.0 377.1 374.7 373.9 372.4 369.4 366.6 366.6 367.0 367.4 368.5 370.4 370.9 370.9 369.6 369.1 369.6 370.2 372.4 374.3 373.2 369.4 362.6 368.7 375.2 392.0 419.2 430.7 463.2 494.0 525.4 551.2 565.7 564.1 565.0 562.1 558.9 551.5 548.9 549.6 550.9 555.6 558.2 558.9 559.5 556.3 542.3 518.1 530.9 549.9 548.6 543.3 539.5 537.7 533.3 528.4 523.3 518.7 516.3 515.2 510.0 503.7 506.0 538.3 549.3 552.8 555.6 555.6 554.7 552.4 551.5 550.9 550.2 549.9 548.3 547.7 547.7 545.5 543.0 542.3 544.8 548.9 558.5 568.0 576.9 589.0 595.2 600.4 601.4 599.0 592.1 585.3 581.3 579.2 579.9 581.6 584.3 589.0 592.1 593.8 594.8 593.1 589.4 587.3 586.0 583.6 582.9 583.9 585.3 587.0 589.0 592.1 593.8 594.2 594.2 592.8 590.4 587.7 585.6 583.3 582.9 581.6 580.6 581.6 583.9 586.0 588.0 592.1 594.5 596.9 598.6 596.9 593.1 589.7 586.3 584.3 575.2 574.2 574.9 576.6 579.2 585.0 589.0 597.3 601.8 603.5 601.1 594.2 589.4 579.6 570.6 569.0 569.6 571.6 576.9 583.3 592.8 596.9 604.5 606.3 599.3 589.4 580.2 569.3 564.4 564.7 570.3 584.3 594.5 603.8 607.3 603.1 593.1 568.3 543.9 544.2 544.2 544.2 544.2 544.2 544.2 544.2 544.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 181.87421875 - }, - { - "text": "但 我 绝 SP", - "ph_seq": "d an w o j ve SP", - "note_seq": "A4 A4 F#4 F#4 D4 D4 rest", - "note_dur_seq": "0.111719 0.200000 0.081250 0.163281 0.117969 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0", - "ph_dur": "0.111719 0.200000 0.081250 0.163281 0.117969 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "440.0 440.0 410.5 410.5 410.5 410.5 410.5 410.8 411.7 412.7 413.4 430.2 440.3 440.5 438.0 436.2 435.4 434.9 434.9 435.7 435.4 439.7 442.3 443.3 443.8 443.1 439.2 430.7 416.5 402.8 383.9 375.2 369.6 368.2 371.9 374.7 373.2 371.7 370.2 369.8 369.6 370.0 370.9 372.8 375.4 376.0 370.9 355.5 322.7 334.6 371.9 377.6 373.4 362.6 342.6 325.0 311.3 294.7 276.3 267.9 280.1 290.1 295.9 299.0 297.2 291.6 284.6 276.7 273.7 275.1 279.4 287.3 292.8 299.3 302.8 304.2 303.5 297.4 291.1 281.9 276.4 275.3 278.1 284.6 291.3 299.5 303.8 306.3 304.2 297.8 286.0 286.0 286.0 286.0 286.0 286.0 286.0 286.0", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 185.88828125 - }, - { - "text": "不 在 人 面 前 装 可 怜 SP", - "ph_seq": "b u z ai r en m ian q ian zh uang k e l ian SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.114062 0.123438 0.064062 0.136719 0.050781 0.110156 0.077344 0.104688 0.082812 0.128125 0.059375 0.212500 0.162500 0.253125 0.121875 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.123438 0.064062 0.136719 0.050781 0.110156 0.077344 0.104688 0.082812 0.128125 0.059375 0.212500 0.162500 0.253125 0.121875 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "293.7 293.7 297.2 297.4 297.6 297.9 298.1 298.3 298.3 298.5 298.5 298.8 299.1 295.7 293.7 292.8 294.2 294.7 294.3 289.3 266.8 253.4 265.7 284.3 291.4 305.5 318.2 325.1 328.7 330.0 331.0 330.0 329.1 330.2 329.2 329.2 328.5 327.4 326.0 327.0 329.2 333.1 336.2 336.4 335.0 333.1 331.3 331.3 331.2 329.2 328.7 328.1 327.2 321.5 313.5 309.2 306.5 299.8 294.6 294.0 293.5 293.5 293.8 294.3 293.0 294.3 295.4 294.7 290.0 265.4 243.7 240.1 249.5 266.4 279.3 287.1 284.3 279.6 277.8 276.9 276.1 275.4 275.6 276.7 277.8 278.6 278.1 265.0 239.2 228.9 242.8 272.3 294.5 298.1 297.2 296.6 295.0 292.8 292.3 291.1 290.1 290.5 291.1 293.3 295.2 294.9 294.7 294.3 294.2 294.3 295.2 295.4 295.0 294.5 290.0 275.3 264.1 278.3 291.6 291.8 294.1 297.0 306.5 315.6 334.0 336.9 333.7 332.5 330.2 326.2 324.0 324.2 324.9 325.5 326.0 327.5 328.7 329.1 329.6 330.2 329.8 329.4 329.8 329.4 328.3 325.8 321.9 316.2 310.4 294.5 276.5 264.5 260.3 262.7 268.6 279.4 287.0 296.7 299.5 300.7 300.0 297.6 294.0 288.1 282.0 280.9 282.7 286.6 292.8 298.6 301.2 300.5 299.0 294.3 287.1 280.9 279.9 283.8 289.3 296.2 301.1 303.3 303.3 299.0 294.0 283.7 275.3 272.1 271.2 271.2 271.2 271.2 271.2 271.2 271.2 271.2", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 187.1984375 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 582.9 575.2 568.0 563.4 557.6 579.2 580.6 579.6 587.3 588.3 586.0 584.6 583.3 580.6 579.9 580.9 583.3 582.6 582.6 586.3 585.3 596.2 592.8 594.8 600.4 601.4 599.7 594.8 585.3 578.6 574.2 573.6 573.6 573.6 573.6 573.6 573.6 573.6 573.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 189.5796875 - }, - { - "text": "想 变 得 可 爱 不 惹 嫌 要 努 力 SP", - "ph_seq": "x iang b ian d e k e ai b u r e x ian y ao n u l i SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 rest", - "note_dur_seq": "0.192969 0.132031 0.055469 0.132812 0.054688 0.106250 0.081250 0.187500 0.132031 0.055469 0.273438 0.101562 0.186719 0.188281 0.303906 0.071094 0.107031 0.080469 0.126562 0.060937 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.192969 0.132031 0.055469 0.132812 0.054688 0.106250 0.081250 0.187500 0.132031 0.055469 0.273438 0.101562 0.186719 0.188281 0.303906 0.071094 0.107031 0.080469 0.126562 0.060937 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "284.6 559.8 559.2 556.3 553.4 546.7 532.7 502.5 450.5 390.9 341.1 312.9 300.2 294.9 292.5 290.5 290.3 290.3 290.8 290.3 290.8 292.1 292.0 292.6 293.2 292.8 293.2 293.8 294.0 292.0 275.7 258.6 279.3 323.4 332.3 332.9 332.3 331.7 330.4 328.5 327.2 327.0 327.4 327.9 329.6 330.6 329.1 318.4 304.5 319.7 333.3 332.9 331.2 329.6 330.0 328.5 301.4 276.1 269.6 268.8 272.4 274.6 275.6 276.5 282.7 281.7 281.1 287.3 291.8 294.9 295.2 294.5 293.5 294.0 293.0 292.8 293.3 293.7 294.0 297.2 302.9 310.7 319.1 326.0 330.2 331.5 331.9 330.4 328.7 329.6 333.1 319.1 284.3 275.4 280.6 299.8 323.1 343.8 367.7 383.9 382.2 381.7 378.2 375.8 373.9 368.7 366.0 364.9 364.5 366.4 369.4 370.0 370.6 372.1 371.3 370.4 369.1 368.5 369.8 372.6 374.3 375.6 377.8 378.2 377.3 370.0 364.5 374.0 400.9 454.4 508.4 543.9 559.5 561.8 562.8 557.3 553.4 549.9 545.8 545.5 546.7 551.2 556.3 560.8 562.4 563.4 562.4 555.6 542.0 519.6 514.0 520.2 524.2 526.3 532.4 530.9 532.7 533.9 538.9 543.7 547.8 549.9 550.7 574.9 597.3 600.7 600.0 594.5 584.3 578.9 577.2 578.2 580.2 584.6 587.3 588.7 589.0 586.0 583.9 582.6 583.3 585.0 589.0 591.8 592.4 593.5 592.1 589.4 574.9 567.0 560.8 546.0 520.7 488.6 466.1 456.6 447.2 444.3 443.1 442.0 443.1 442.5 443.3 442.8 441.0 437.2 427.2 407.9 394.4 383.7 375.8 375.8 371.3 368.5 367.9 368.7 372.6 374.3 375.8 374.1 369.6 359.7 336.4 309.5 282.0 266.1 257.5 265.3 275.4 282.0 291.0 298.5 302.3 304.2 300.0 292.5 280.4 271.3 266.2 267.0 272.7 284.3 293.7 300.0 304.4 305.1 301.4 294.9 287.3 278.0 271.9 271.2 274.0 282.0 291.0 296.4 300.4 302.4 299.7 291.8 291.8 291.8 291.8 291.8 291.8 291.8 291.8", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 190.11953125 - }, - { - "text": "变 得 像 糖 果 一 样 甜 SP", - "ph_seq": "b ian d e x iang t ang g uo y i y ang t ian SP", - "note_seq": "F#4 F#4 G4 G4 G4 G4 F#4 F#4 G4 G4 B4 B4 A4 A4 A4 A4 rest", - "note_dur_seq": "0.114062 0.132812 0.054688 0.092969 0.094531 0.102344 0.085156 0.127344 0.060156 0.179688 0.007812 0.303906 0.071094 0.203906 0.171094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.114062 0.132812 0.054688 0.092969 0.094531 0.102344 0.085156 0.127344 0.060156 0.179688 0.007812 0.303906 0.071094 0.203906 0.171094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "294.3 307.0 324.2 341.4 349.6 357.4 361.1 362.2 361.5 360.3 358.8 358.4 365.1 367.2 368.5 367.4 365.1 364.3 365.7 367.7 371.1 374.1 374.7 371.4 360.6 348.1 369.5 395.2 396.8 395.4 395.6 394.3 385.9 362.2 345.2 355.9 369.6 377.3 380.2 382.2 385.0 390.0 393.6 387.0 397.7 397.2 395.9 395.0 392.2 390.4 390.2 391.1 391.8 390.0 380.8 348.6 333.1 332.4 363.3 379.3 371.5 370.4 370.0 369.8 370.4 371.5 372.4 371.3 369.1 367.0 363.4 356.8 344.0 345.6 366.3 387.7 390.4 391.3 392.2 393.1 394.3 393.8 393.4 392.9 392.4 393.1 391.5 391.3 391.9 395.8 409.3 430.2 449.5 464.8 470.5 471.6 467.8 464.8 458.7 455.5 454.5 459.5 465.4 477.9 490.8 496.7 500.5 500.8 498.5 494.2 491.6 489.3 489.3 489.9 492.2 493.9 494.2 494.2 494.2 493.6 494.0 493.7 485.9 473.5 458.4 447.4 443.1 443.1 440.5 439.0 438.7 438.7 439.0 439.0 439.0 438.2 437.5 437.0 437.7 439.2 441.0 442.8 444.3 444.3 444.1 443.3 439.2 418.7 400.0 388.6 400.7 415.3 421.8 423.5 423.5 423.3 432.4 444.3 451.8 455.0 455.3 452.6 448.5 440.3 434.7 431.4 432.2 436.0 440.5 447.2 448.7 448.7 446.1 441.3 432.7 429.0 429.2 433.7 440.5 445.1 449.5 451.3 448.5 444.9 441.8 432.2 425.0 414.8 407.7 407.7 407.7 407.7 407.7 407.7 407.7 407.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 193.1984375 - }, - { - "text": "秋 SP", - "ph_seq": "q iu SP", - "note_seq": "D5 D5 rest", - "note_dur_seq": "0.170313 0.281250 0.100000", - "is_slur_seq": "0 0 0", - "ph_dur": "0.170313 0.281250 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "587.3 587.3 587.3 587.3 587.3 587.3 587.3 587.3 588.0 587.0 585.6 592.1 596.9 599.0 591.8 590.7 593.1 596.9 596.2 594.5 592.4 589.4 587.0 583.9 581.3 579.9 576.6 578.9 575.2 584.6 585.3 595.5 601.1 601.8 600.4 594.2 585.3 578.6 574.2 573.6 573.6 573.6 573.6 573.6 573.6 573.6 573.6", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 195.5796875 - }, - { - "text": "少 女 的 武 器 是 狡 辩 让 目 光 多 多 驻 足 在 我 身 前 因 为 不 是 谁 都 像 这 般 耀 眼 SP", - "ph_seq": "sh ao n v d e w u q i sh ir j iao b ian r ang m u g uang d uo d uo zh u z u z ai w o sh en q ian y in w ei b u sh ir sh ei d ou x iang zh e b an y ao y En SP", - "note_seq": "D4 D4 E4 E4 E4 E4 D4 D4 E4 E4 F#4 F#4 C#5 C#5 D5 D5 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 A4 A4 F#4 F#4 D4 D4 D4 D4 E4 E4 E4 E4 D4 D4 C#4 C#4 D4 D4 E4 E4 D4 D4 rest", - "note_dur_seq": "0.188281 0.107031 0.080469 0.132812 0.054688 0.179688 0.007812 0.104688 0.082812 0.095312 0.092188 0.244531 0.130469 0.264062 0.110937 0.273438 0.101562 0.110156 0.077344 0.127344 0.060156 0.450781 0.111719 0.132812 0.054688 0.128125 0.059375 0.123438 0.064062 0.123438 0.064062 0.133594 0.053906 0.191406 0.183594 0.208594 0.166406 0.367188 0.007812 0.133594 0.053906 0.132031 0.055469 0.374219 0.188281 0.083594 0.103906 0.132812 0.054688 0.092969 0.094531 0.128125 0.059375 0.132031 0.055469 0.303906 0.071094 0.303906 0.071094 0.375000 0.100000", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.188281 0.107031 0.080469 0.132812 0.054688 0.179688 0.007812 0.104688 0.082812 0.095312 0.092188 0.244531 0.130469 0.264062 0.110937 0.273438 0.101562 0.110156 0.077344 0.127344 0.060156 0.450781 0.111719 0.132812 0.054688 0.128125 0.059375 0.123438 0.064062 0.123438 0.064062 0.133594 0.053906 0.191406 0.183594 0.208594 0.166406 0.367188 0.007812 0.133594 0.053906 0.132031 0.055469 0.374219 0.188281 0.083594 0.103906 0.132812 0.054688 0.092969 0.094531 0.128125 0.059375 0.132031 0.055469 0.303906 0.071094 0.303906 0.071094 0.375000 0.100000", - "f0_timestep": "0.011609977722167968", - "f0_seq": "280.4 558.9 557.6 552.1 548.0 539.2 519.0 479.3 419.2 362.4 319.9 298.6 289.1 285.0 283.0 281.5 281.2 281.5 286.8 293.7 296.0 295.7 294.5 293.3 292.3 292.5 293.5 293.2 290.3 290.8 295.5 300.4 309.2 325.3 332.9 332.3 332.1 329.8 328.9 328.3 328.5 328.9 329.4 328.7 306.3 270.7 266.8 277.5 300.9 338.7 339.3 334.6 333.7 332.9 331.5 329.6 328.7 327.7 327.9 328.3 328.9 329.6 326.0 319.9 312.2 303.1 296.2 292.1 290.1 291.6 294.5 295.9 292.3 266.8 248.4 257.7 270.5 282.7 290.2 298.4 307.8 320.0 329.1 332.9 334.0 331.5 323.0 299.0 286.6 299.1 307.2 311.5 314.9 320.2 326.0 330.3 333.3 337.3 342.4 367.4 381.3 381.3 378.9 373.4 366.8 362.2 360.7 362.2 366.4 368.5 371.7 371.7 371.5 370.9 368.5 366.6 367.4 369.1 371.1 375.6 373.6 354.7 337.7 353.1 362.8 368.9 374.9 390.9 416.5 446.5 495.6 520.2 524.5 523.3 517.2 508.1 504.3 506.0 513.1 524.2 536.1 547.7 555.3 557.9 559.2 556.3 553.4 552.1 551.5 553.1 557.3 560.2 563.1 562.4 553.7 523.3 502.5 493.3 484.8 479.6 500.0 521.7 545.8 549.6 556.9 561.5 561.1 554.7 551.2 545.5 544.5 548.0 555.3 565.4 577.6 588.0 594.2 598.6 595.5 588.0 583.6 581.6 582.3 586.3 588.0 589.4 590.0 590.4 582.3 566.7 545.2 508.1 482.3 453.4 442.9 446.9 445.9 443.6 446.7 446.1 443.8 441.5 439.7 440.3 438.5 429.2 419.4 411.2 402.1 385.1 373.3 369.8 367.2 365.5 365.7 368.5 371.1 372.4 372.8 370.4 359.0 323.6 279.1 261.8 262.2 268.6 279.5 288.3 281.1 277.8 283.2 295.4 300.9 306.7 306.8 304.2 299.7 295.7 289.6 281.1 276.4 273.4 275.6 278.5 287.0 291.8 300.9 305.8 307.9 306.7 299.8 287.6 277.7 266.8 262.2 262.1 266.7 276.9 286.3 300.7 309.0 313.7 313.3 304.7 292.3 277.2 268.7 267.3 271.8 278.6 284.8 286.3 286.1 285.1 284.0 283.5 287.3 291.5 292.1 293.8 293.2 292.5 293.0 293.8 294.9 295.7 293.8 274.8 251.0 266.5 292.5 310.2 332.1 335.6 332.5 331.5 331.2 331.0 331.5 327.4 301.6 267.4 282.4 299.8 306.3 309.9 313.3 318.4 326.8 331.0 333.1 332.7 331.9 330.8 329.8 328.5 322.1 296.7 268.4 253.2 248.7 260.8 273.2 287.7 299.8 296.4 294.3 294.5 295.0 295.5 295.7 295.5 293.5 287.8 271.8 249.4 240.7 243.8 246.8 264.0 279.9 281.4 278.8 277.0 277.8 278.0 279.1 278.3 278.1 278.0 274.6 270.4 266.5 267.1 272.4 284.3 295.9 298.6 298.6 297.1 295.4 293.2 291.6 291.1 291.3 291.8 292.3 293.0 293.7 294.3 294.9 295.5 295.4 293.8 285.3 261.0 240.9 246.2 256.4 263.6 269.9 275.0 283.3 287.6 297.9 306.6 314.0 320.4 330.0 335.2 335.8 335.2 333.1 332.3 330.4 328.7 326.0 324.0 324.2 324.9 326.0 328.1 330.2 332.1 332.9 333.8 332.9 330.4 315.1 284.6 279.3 282.0 285.3 286.1 287.0 287.8 290.3 292.2 295.2 297.4 299.9 299.0 297.9 297.4 296.7 294.9 292.6 289.5 287.6 287.1 288.1 290.1 292.1 293.8 295.5 295.9 295.9 294.5 292.8 291.6 292.1 294.0 296.6 298.1 299.1 300.5 300.7 294.5 284.2 284.0 290.1 304.2 335.7 365.1 392.0 409.1 423.8 437.2 445.6 447.4 447.2 443.6 439.5 436.2 429.5 422.3 418.0 409.1 395.0 381.5 373.0 370.9 368.3 367.7 367.4 368.9 371.3 369.8 357.4 345.8 342.2 337.7 323.3 309.4 294.9 276.7 267.4 278.3 289.5 298.1 303.0 305.3 303.1 297.1 290.8 280.1 274.6 273.8 277.5 283.3 291.3 298.1 303.7 304.9 302.1 295.2 285.1 278.8 274.0 275.1 281.2 290.1 299.3 307.2 307.7 302.4 290.3 275.0 276.7 278.6 280.7 281.7 282.0 282.4 282.7 283.3 284.2 284.8 285.3 285.6 285.6 285.6 285.8 285.8 293.3 298.5 298.1 297.6 293.2 277.5 263.0 273.1 281.5 285.1 287.5 289.6 293.7 297.2 303.6 309.2 320.2 333.1 338.9 336.4 332.5 330.0 327.9 328.3 328.3 329.1 327.0 318.4 298.1 295.2 310.2 324.5 340.1 337.3 332.7 331.3 331.2 331.0 329.2 314.6 288.6 289.5 299.5 306.3 302.4 299.9 298.1 295.7 286.3 288.8 295.9 295.4 294.5 293.7 293.5 293.7 293.8 294.0 293.0 287.5 269.8 248.6 237.6 244.1 272.1 281.5 279.9 278.1 277.7 277.8 278.6 278.6 277.3 266.5 236.6 215.1 221.9 228.4 231.9 245.5 269.3 285.6 295.5 298.3 299.3 298.1 294.5 290.3 288.0 286.3 286.3 286.8 289.6 292.8 295.2 296.7 297.9 296.9 295.5 294.3 294.2 294.0 293.5 292.3 291.0 290.0 290.8 290.8 287.6 285.8 286.4 289.5 296.5 302.8 303.3 303.5 299.7 296.7 294.2 293.5 295.0 298.1 303.0 312.0 319.7 326.4 330.2 331.9 333.5 332.3 331.0 330.8 330.8 330.4 328.9 323.6 313.1 293.3 274.5 261.2 254.5 256.5 264.2 275.5 289.1 294.5 297.4 299.0 299.1 297.1 294.5 289.8 288.8 289.5 291.1 294.9 295.7 296.6 295.7 292.3 289.8 288.0 286.8 287.3 291.5 295.7 301.1 302.3 301.7 299.8 296.4 289.5 284.3 282.7 284.0 284.2 282.5 293.7 293.7 293.7 293.7 293.7 293.7 293.7 293.7", - "gender_timestep": null, - "gender": null, - "velocity_timestep": null, - "velocity": null, - "input_type": "phoneme", - "offset": 196.12421875 - } -] \ No newline at end of file diff --git "a/samples/\351\200\215\351\201\245\344\273\231\357\274\210\347\224\267key\357\274\211.ds" "b/samples/\351\200\215\351\201\245\344\273\231.ds" similarity index 89% rename from "samples/\351\200\215\351\201\245\344\273\231\357\274\210\347\224\267key\357\274\211.ds" rename to "samples/\351\200\215\351\201\245\344\273\231.ds" index 39a25dbaa..21e98b28f 100644 --- "a/samples/\351\200\215\351\201\245\344\273\231\357\274\210\347\224\267key\357\274\211.ds" +++ "b/samples/\351\200\215\351\201\245\344\273\231.ds" @@ -1,146 +1,146 @@ [ { + "offset": 1.569, "text": "AP 啊 啊 啊 啊 啊 AP 啊 啊 啊 啊 啊 SP", - "ph_seq": "AP t ian sh ang b ai y v j ing ing ing AP sh ir er l ou w u ch eng SP", - "note_seq": "rest A#3 A#3 G#3 G#3 F#3 F#3 A#3 A#3 G#3 G#3 F#3 F3 rest D#3 D#3 F#3 F3 F3 C#3 C#3 A#2 A#2 rest", - "note_dur_seq": "0.6 0.7229999 0.7229999 0.7220001 0.7220001 0.7229998 0.7229998 0.723 0.723 1.085 1.085 0.3609996 0.5420003 0.9039998 0.723 0.723 0.723 0.7219996 0.7219996 0.7230005 0.7230005 2.169 2.169 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.494578 0.105422 0.533241 0.189759 0.612061 0.10994 0.573903 0.149096 0.522699 0.200301 1.085 0.361 0.542 0.708216 0.195784 0.61306 0.671795 0.161144 0.606036 0.115964 0.58294 0.14006 2.169 0.4", - "f0_timestep": "0.005", + "ph_seq": "AP t ian sh ang b ai y v j ing AP sh ir er l ou w u ch eng SP", + "ph_dur": "0.4946 0.1054 0.5332 0.1898 0.6121 0.1099 0.5739 0.1491 0.5227 0.2003 1.988 0.7082 0.1958 0.6131 0.6718 0.1611 0.606 0.116 0.5829 0.1401 2.169 0.4", + "ph_num": "2 2 2 2 2 1 2 1 2 2 2 1 1", + "note_seq": "rest A#3 G#3 F#3 A#3 G#3 F#3 F3 rest D#3 F#3 F3 C#3 A#2 rest", + "note_dur": "0.6 0.723 0.722 0.723 0.723 1.085 0.361 0.542 0.904 0.723 0.723 0.722 0.723 2.169 0.4", + "note_slur": "0 0 0 0 0 0 1 1 0 0 0 0 0 0 0", "f0_seq": "233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.0 232.5 231.8 230.6 229.0 227.1 225.1 223.0 220.7 218.6 216.5 214.5 212.3 210.5 209.0 207.6 206.7 206.1 206.2 206.8 207.7 209.9 211.1 211.5 212.0 212.4 212.8 213.0 213.9 215.2 216.5 217.2 217.5 218.3 219.1 219.9 220.1 220.5 221.1 221.9 222.6 223.1 223.5 224.0 224.6 224.9 225.7 227.0 228.0 228.8 228.9 229.5 230.1 230.8 231.1 231.4 231.9 232.9 233.7 234.3 234.4 234.2 234.1 233.9 233.8 233.7 233.5 233.4 233.0 232.6 232.3 231.9 231.8 231.6 231.4 231.1 230.8 230.5 230.2 230.1 230.5 231.0 231.5 231.9 232.2 232.5 233.0 233.1 233.3 233.4 233.6 233.8 233.9 234.4 234.8 234.8 234.7 234.5 234.5 234.3 234.1 234.0 234.0 233.8 233.4 233.1 232.9 232.6 232.7 232.9 233.1 233.3 233.5 233.6 234.1 234.9 235.4 236.4 237.4 238.1 238.7 239.0 237.8 235.8 232.8 228.2 223.2 218.6 216.1 214.8 214.6 215.2 215.8 216.3 216.2 215.9 215.6 215.5 215.5 215.6 215.8 215.9 216.1 216.4 216.5 216.5 216.3 216.5 217.1 217.6 217.7 217.3 217.1 216.6 216.1 216.0 215.9 215.7 215.5 215.7 216.5 217.2 217.5 217.1 216.0 214.9 213.6 212.7 211.8 210.9 210.2 209.5 208.9 208.4 208.0 207.5 207.2 206.7 206.0 205.6 205.3 204.9 204.9 204.9 204.9 204.9 205.0 205.1 205.1 205.1 205.1 205.1 205.2 205.3 205.5 205.7 205.8 206.0 206.2 206.3 206.5 206.6 206.6 207.1 207.5 207.5 207.6 207.7 207.9 208.0 208.0 208.1 208.4 208.7 209.1 209.4 209.6 209.9 209.9 209.9 209.8 209.8 209.8 209.7 209.6 209.6 209.5 209.4 209.3 209.3 209.3 209.3 209.0 208.9 208.7 208.6 208.6 208.5 208.4 208.3 208.1 208.0 207.9 207.7 207.6 207.2 206.7 206.4 206.1 205.8 205.5 205.4 205.5 205.7 205.8 206.0 206.1 206.4 206.9 207.4 207.6 207.8 208.0 208.2 208.4 208.5 208.8 209.2 209.2 209.3 209.5 209.5 209.6 209.7 209.9 209.9 209.9 209.8 209.6 209.5 209.2 209.1 209.0 208.4 207.8 206.8 206.4 205.6 204.0 202.7 201.1 199.0 196.1 191.9 185.6 180.5 174.0 165.6 160.6 157.0 155.2 157.1 160.1 164.8 171.6 177.2 180.7 181.5 181.7 181.3 180.9 180.5 180.1 181.2 182.1 182.0 182.5 183.3 184.0 184.6 184.8 184.7 184.7 184.7 184.6 184.3 184.0 183.8 183.6 183.5 183.3 183.2 183.0 182.8 182.7 182.5 182.3 182.2 182.1 182.1 182.2 182.3 182.5 182.6 182.7 182.8 183.2 183.6 183.8 184.1 184.5 184.8 185.2 185.5 186.0 186.0 185.9 185.9 185.8 185.7 185.6 185.5 185.5 185.1 184.8 184.7 184.5 184.4 184.2 184.2 184.1 184.1 184.1 184.2 184.3 184.3 184.4 184.5 184.5 184.6 184.7 184.8 184.8 184.8 184.9 185.1 185.3 185.9 186.3 186.7 186.8 186.9 187.1 187.5 187.5 187.5 187.0 186.7 186.5 186.0 185.6 185.2 185.1 184.9 184.7 184.4 184.1 183.9 183.8 183.7 183.7 183.9 184.0 184.0 184.2 184.3 184.4 184.4 184.6 185.1 185.8 186.5 187.3 187.4 187.6 187.8 187.9 187.8 187.5 186.9 186.5 186.3 186.2 185.8 184.9 184.2 183.6 183.1 182.1 181.3 180.8 179.8 179.2 179.4 180.2 181.4 183.1 185.0 187.4 190.4 195.1 199.5 202.7 206.4 209.5 211.5 213.3 214.9 216.4 217.6 218.7 219.9 220.7 221.4 221.8 221.4 220.7 219.9 219.4 219.1 218.8 218.2 217.7 217.8 217.6 217.3 216.9 216.7 216.6 216.9 217.3 217.9 218.7 219.8 220.9 221.4 223.0 225.2 227.5 229.0 230.2 232.0 233.1 233.9 234.6 234.8 234.7 234.5 234.3 234.1 234.1 233.8 233.4 233.1 232.5 231.9 231.9 232.0 232.2 232.2 232.4 232.5 232.7 233.0 233.2 233.4 233.5 233.6 233.8 233.9 233.9 234.5 234.7 234.7 234.5 234.3 234.2 234.1 234.0 233.9 233.8 233.7 233.6 233.5 233.4 233.4 233.3 233.2 233.1 233.1 233.0 232.7 232.4 232.4 232.7 233.1 233.3 233.2 232.9 232.5 232.1 232.0 232.0 232.4 232.4 232.1 231.5 231.9 232.4 233.0 233.5 233.6 233.3 232.4 231.5 230.7 227.8 223.1 216.5 206.4 198.4 194.3 193.1 193.2 194.2 195.9 197.5 198.8 199.3 199.8 200.4 201.4 202.1 202.2 202.1 202.0 202.2 202.3 202.4 202.6 202.7 202.6 202.4 201.9 201.2 200.4 202.2 204.5 205.7 206.8 207.5 207.0 206.9 206.8 206.5 206.2 205.8 205.6 205.4 205.4 205.4 205.3 205.3 205.2 205.1 205.0 204.9 204.9 204.9 204.9 204.9 204.9 204.9 205.0 205.0 204.9 205.0 205.0 205.0 205.1 205.1 205.1 205.1 205.2 205.2 205.3 205.4 205.6 205.7 205.7 205.9 206.0 206.0 206.2 206.5 206.7 206.7 206.7 206.7 206.8 206.8 206.8 206.9 206.9 206.9 207.0 207.1 207.2 207.2 207.2 207.3 207.4 207.4 207.4 207.5 207.6 207.7 207.7 207.7 207.8 207.8 207.8 207.8 207.8 207.7 207.7 207.6 207.5 207.3 207.2 207.1 207.1 207.1 207.3 207.6 207.6 207.5 207.4 207.3 207.1 206.9 206.7 206.5 206.5 206.4 206.3 206.3 206.4 206.5 206.6 206.6 206.7 206.8 206.9 206.9 206.9 206.9 207.0 207.1 207.3 207.4 207.5 207.6 207.7 208.1 208.2 208.3 208.3 208.2 208.1 208.1 208.1 208.0 207.9 207.9 207.9 207.8 207.7 207.7 207.7 207.6 207.5 207.5 207.5 207.5 207.6 207.8 207.9 208.1 208.2 208.4 208.5 208.4 208.3 208.1 208.0 207.9 207.9 207.8 207.8 207.8 207.7 207.7 207.7 207.6 207.5 207.4 207.4 207.4 207.3 207.3 207.3 207.2 207.2 207.2 207.1 207.1 207.1 207.1 207.2 207.4 207.8 207.9 207.9 207.7 207.5 207.3 207.1 207.1 207.1 207.1 207.2 207.3 207.4 207.4 207.5 207.6 207.7 207.9 207.9 207.9 208.0 207.9 207.9 207.9 208.0 208.0 208.0 208.2 208.3 208.3 208.2 208.2 208.4 208.1 207.7 207.2 207.3 207.1 206.3 205.4 204.4 203.5 202.0 200.5 199.0 197.6 196.0 194.2 191.8 189.2 186.5 184.1 182.1 180.7 179.7 179.2 179.3 179.4 179.4 179.4 180.1 180.7 181.1 181.7 182.2 182.5 182.6 182.7 182.7 182.7 182.8 182.9 182.9 183.0 183.0 182.8 182.5 182.4 182.3 182.4 182.4 182.5 182.6 182.6 182.7 182.8 182.8 182.8 182.9 183.0 183.0 183.0 183.3 183.5 183.8 184.0 184.1 184.2 184.4 184.5 184.5 184.6 184.6 184.5 184.4 184.3 184.1 183.9 183.6 183.1 182.4 181.6 180.7 179.6 178.4 177.3 176.3 174.9 173.3 172.1 170.4 168.3 167.1 166.5 166.3 166.6 166.9 167.2 168.7 169.7 170.5 171.8 173.0 174.2 175.3 176.0 176.3 176.5 176.5 176.2 176.0 175.4 174.3 173.9 173.1 171.9 171.4 170.3 168.7 167.9 167.5 167.4 167.6 167.8 168.0 168.5 169.1 169.6 170.9 172.3 173.6 174.0 174.7 175.8 176.3 176.5 176.3 176.1 175.9 175.8 175.4 174.9 174.2 173.1 171.7 170.2 169.4 168.6 167.9 168.0 168.4 168.6 169.2 170.0 170.8 172.4 173.8 174.6 175.3 175.9 176.3 176.3 176.3 176.1 175.9 175.7 175.6 174.8 174.0 173.3 172.3 171.1 169.6 168.9 168.3 167.2 167.0 166.9 167.1 167.3 167.4 167.6 168.3 169.3 170.4 171.2 171.7 172.1 172.9 174.0 174.2 174.4 174.4 174.4 174.4 174.4 174.4 174.5 174.5 174.5 174.5 174.5 174.5 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.6 174.4 174.2 174.1 173.9 173.7 173.5 173.3 173.1 172.9 172.8 172.6 172.4 172.2 172.0 171.8 171.7 171.5 171.3 171.1 170.9 170.7 170.6 170.4 170.2 170.0 169.8 169.7 169.5 169.3 169.1 168.9 168.8 168.6 168.4 168.2 168.0 167.9 167.7 167.5 167.3 167.1 167.0 166.8 166.6 166.4 166.3 166.1 165.9 165.7 165.6 165.4 165.2 165.0 164.8 164.7 164.5 164.3 164.1 164.0 163.8 163.6 163.4 163.3 163.1 162.9 162.8 162.6 162.4 162.2 162.1 161.9 161.7 161.5 161.4 161.2 161.0 160.9 160.7 160.5 160.3 160.2 160.0 159.8 159.7 159.5 159.3 159.2 159.0 158.8 158.6 158.5 158.3 158.1 158.0 157.8 157.6 157.5 157.3 157.1 157.0 156.8 156.6 156.5 156.3 156.1 156.0 155.8 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.5 155.3 155.2 155.0 154.7 154.4 154.1 153.8 153.4 153.1 152.9 152.5 152.3 152.1 151.9 151.8 151.9 152.2 152.0 151.4 150.3 147.8 145.9 144.7 144.5 144.5 144.8 145.2 145.6 145.8 146.7 147.8 148.7 149.3 149.9 150.6 150.9 151.3 151.7 151.9 152.0 152.1 152.2 152.3 152.4 152.7 153.1 153.4 153.7 153.9 154.0 154.1 154.2 154.3 154.4 154.4 154.6 154.6 154.5 154.3 154.1 153.8 153.6 153.7 153.9 154.3 154.7 155.1 155.2 155.5 155.8 156.1 156.5 156.8 157.2 157.6 158.0 158.1 158.4 158.8 158.8 158.8 158.7 158.6 158.5 158.4 158.2 158.2 158.1 157.8 157.4 157.0 156.6 156.1 155.6 155.2 155.0 154.9 154.7 154.4 154.2 154.4 154.6 154.7 155.0 155.2 155.5 156.3 156.9 157.3 157.5 157.7 157.9 158.0 158.3 158.6 158.6 158.6 158.5 158.3 158.2 158.2 158.1 157.9 157.6 156.3 155.3 154.5 155.1 155.7 156.3 155.6 155.0 154.8 154.5 154.2 154.0 154.0 153.8 153.3 152.9 152.1 150.5 149.6 148.7 147.8 147.3 147.0 146.8 146.8 147.1 147.9 148.8 149.4 149.4 148.7 148.4 149.0 150.4 153.8 160.2 166.6 172.2 176.5 179.5 181.7 183.4 184.8 185.9 186.5 186.8 187.0 187.2 186.9 186.6 186.2 185.3 184.2 183.5 182.6 181.6 180.7 180.2 179.8 179.5 179.5 179.7 179.9 180.0 180.3 181.0 181.5 182.0 182.9 183.2 183.4 183.7 183.9 184.2 184.6 185.2 185.6 185.6 185.5 185.4 185.3 185.2 185.1 184.9 184.7 184.4 184.3 184.1 183.9 183.8 183.8 183.8 183.9 184.1 184.3 184.3 184.4 184.5 184.4 184.2 184.0 184.0 183.8 183.6 183.4 183.3 183.2 183.0 182.8 182.7 182.7 182.7 182.8 182.8 182.9 183.0 183.1 183.1 183.2 183.3 183.5 183.7 183.9 184.0 184.1 184.3 184.4 184.5 184.5 184.6 184.8 184.9 185.0 185.0 185.0 185.0 184.9 184.8 184.8 184.7 184.6 184.5 184.5 184.4 184.3 184.3 184.2 184.1 184.0 184.0 184.0 184.0 183.9 183.6 183.4 183.3 183.1 182.7 182.3 181.9 181.6 181.1 180.6 180.3 179.3 178.4 177.8 176.6 175.2 173.5 172.0 170.8 170.4 170.3 170.3 170.2 170.4 170.8 171.5 172.7 174.4 177.1 178.1 178.5 178.6 178.4 178.2 178.1 177.8 177.4 176.9 176.3 175.7 175.2 174.8 174.3 173.8 173.4 173.1 172.9 173.0 173.0 173.0 173.1 173.1 173.2 173.3 173.3 173.2 173.1 173.0 172.7 172.6 172.6 172.7 172.7 172.7 172.8 172.9 172.9 173.0 173.0 173.0 173.1 173.1 173.1 173.3 173.6 173.8 174.2 174.5 174.7 174.7 174.7 174.6 174.5 174.4 174.3 174.2 174.2 174.2 174.1 173.9 173.7 173.6 173.5 173.4 173.4 173.4 173.3 173.3 173.3 173.2 173.1 173.1 173.0 172.9 172.9 172.8 172.7 172.7 172.6 172.5 172.5 172.4 172.4 172.4 172.3 172.3 172.3 172.3 172.4 172.4 172.5 172.6 172.7 172.8 172.9 172.9 173.0 173.1 173.1 173.2 173.3 173.3 173.4 173.7 173.9 173.9 173.9 173.8 173.7 173.7 173.6 173.4 173.2 173.1 173.1 173.0 172.8 172.6 172.4 172.2 172.0 172.0 172.0 172.0 172.0 172.0 172.2 172.1 172.0 172.0 172.7 173.5 173.9 173.9 173.7 173.7 172.9 171.8 170.5 168.5 165.8 161.0 156.8 153.0 150.6 148.3 145.9 143.6 141.6 139.8 138.2 136.5 134.9 134.3 133.7 133.2 133.1 133.1 133.2 133.3 133.4 133.6 134.3 134.9 135.4 136.3 136.8 137.2 137.3 137.5 137.7 137.4 137.3 137.2 136.7 136.4 136.3 136.6 136.8 136.8 137.0 137.1 137.2 137.1 137.0 136.8 136.8 136.9 137.0 137.1 137.2 137.3 137.5 137.6 137.6 137.7 137.9 138.5 138.8 139.2 139.6 139.8 140.0 140.0 139.9 139.7 139.6 139.5 139.3 139.0 138.8 138.5 138.2 138.1 138.0 137.9 137.8 137.8 137.7 137.5 137.3 137.1 136.8 136.7 137.0 137.7 138.3 138.8 139.4 139.8 139.9 140.3 140.6 140.8 141.1 141.3 141.3 141.5 141.7 142.0 142.2 142.5 142.7 142.9 143.0 142.9 142.5 142.0 141.5 140.9 140.0 138.0 136.8 135.7 134.0 133.4 133.4 134.5 134.6 134.4 134.1 133.8 133.4 132.5 131.8 131.2 130.6 129.9 129.3 128.9 128.7 128.5 128.5 128.0 127.3 126.5 125.1 123.6 123.1 123.0 123.1 123.2 124.7 126.8 127.3 126.5 125.2 124.7 124.0 123.1 122.5 122.1 122.0 122.2 122.2 122.0 122.2 122.5 122.7 122.4 122.0 121.6 121.1 120.8 120.5 119.8 119.2 118.7 117.2 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.4 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.3 117.2 117.2 117.2 117.2 117.2 117.2 117.2 117.2 117.2 117.2 117.2 117.2 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.1 117.0 117.0 117.0 117.0 117.0 117.0 117.0 117.0 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.9 116.8 116.8 116.8 116.8 116.8 116.8 116.8 116.8 116.8 116.8 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.6 116.6 116.6 116.6 116.6 116.6 116.6 116.6 116.6 116.6 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.4 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 119.3 116.5 116.7 116.8 117.0 117.3 117.6 118.0 118.3 118.6 118.9 119.1 119.2 119.3 119.2 119.1 118.8 118.4 117.9 117.3 116.6 115.7 115.0 114.1 113.4 112.9 112.2 111.8 111.5 111.3 111.2 111.2 111.4 111.7 112.1 112.6 113.2 114.0 114.8 115.5 116.4 117.4 118.2 119.0 119.7 120.5 121.0 121.5 121.9 122.1 122.2 122.2 122.0 121.7 121.1 120.6 120.0 119.2 118.3 117.4 116.6 115.7 114.8 114.0 113.4 112.6 112.0 111.6 111.3 111.0 111.0 111.0 111.1 111.5 111.9 112.4 113.1 113.7 114.6 115.5 116.3 117.2 118.2 119.1 119.9 120.6 121.3 121.8 122.1 122.4 122.5 122.5 122.3 121.9 121.3 120.9 120.1 119.2 118.3 117.5 116.6 115.8 114.9 113.9 113.1 112.5 111.9 111.3 111.0 110.8 110.6 110.7 110.9 111.2 111.7 112.3 112.9 113.7 114.6 115.5 116.4 117.4 118.4 119.3 120.1 120.8 121.4 122.0 122.5 122.7 122.8 122.7 122.5 122.2 121.7 121.0 120.3 119.5 118.6 117.5 116.6 115.7 114.7 113.8 113.0 112.2 111.6 111.1 110.8 110.5 110.4 110.5 110.7 111.0 111.4 112.0 112.8 113.6 114.4 115.5 116.5 117.4 118.3 119.4 120.3 121.0 121.7 122.3 122.8 123.0 123.1 123.0 122.8 122.4 121.9 121.3 120.4 119.6 118.6 117.7 116.6 115.6 114.7 113.7 112.8 112.1 111.4 110.9 110.4 110.3 110.1 110.2 110.4 110.8 111.3 111.9 112.7 113.5 114.4 115.4 116.3 117.4 118.5 119.4 120.4 121.3 121.9 122.5 123.0 123.3 123.4 123.3 123.1 122.7 122.2 121.5 120.2 119.1 118.2 117.2 116.5 116.0 115.7 115.6 115.6 115.9 116.2 121.7 122.1 122.1 121.9 121.6 121.3 120.9 120.5 120.0 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7 119.7", - "input_type": "phoneme", - "offset": 1.569 + "f0_timestep": "0.005" }, { + "offset": 13.157, "text": "AP 啊 啊 啊 啊 啊 AP 啊 啊 啊 啊 啊 SP", - "ph_seq": "AP x ian r en f u w o d ing ing AP j ie f a sh ou ch ang sh eng SP", - "note_seq": "rest A#3 A#3 G#3 G#3 F#3 F#3 A#3 A#3 C#4 C#4 G#3 rest F#3 F#3 A#3 A#3 C#4 C#4 C#3 C#3 D#3 D#3 rest", - "note_dur_seq": "0.578 0.723 0.723 0.7229999 0.7229999 0.723 0.723 0.723 0.723 1.445 1.445 0.723 0.723 0.723 0.723 0.723 0.723 0.7229996 0.7229996 0.7230005 0.7230005 2.167999 2.167999 0.144", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.398784 0.179216 0.638663 0.084337 0.598 0.125 0.598 0.125 0.578421 0.144579 1.445 0.723 0.588963 0.134037 0.573905 0.149096 0.482037 0.240963 0.557337 0.165663 0.498603 0.224398 2.167999 0.144", - "f0_timestep": "0.005", + "ph_seq": "AP x ian r en f u w o d ing AP j ie f a sh ou ch ang sh eng SP", + "ph_dur": "0.3988 0.1792 0.6387 0.0843 0.598 0.125 0.598 0.125 0.5784 0.1446 2.168 0.589 0.134 0.5739 0.1491 0.482 0.241 0.5573 0.1657 0.4986 0.2244 2.168 0.144", + "ph_num": "2 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest A#3 G#3 F#3 A#3 C#4 G#3 rest F#3 A#3 C#4 C#3 D#3 rest", + "note_dur": "0.578 0.723 0.723 0.723 0.723 1.445 0.723 0.723 0.723 0.723 0.723 0.723 2.168 0.144", + "note_slur": "0 0 0 0 0 0 1 0 0 0 0 0 0 0", "f0_seq": "116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 117.4 118.6 119.7 120.9 122.0 123.2 124.4 125.6 126.8 128.0 129.3 130.5 131.8 133.1 134.3 135.6 136.9 138.3 139.6 140.9 142.3 143.7 145.1 146.5 147.9 149.3 150.7 152.2 153.7 155.1 156.6 158.1 159.7 161.2 162.8 164.3 165.9 167.5 169.1 170.8 172.4 174.1 175.8 177.5 179.2 180.9 182.6 184.4 186.2 188.0 189.8 191.6 193.5 195.3 197.2 199.1 201.0 203.0 204.9 206.9 208.9 210.9 213.0 215.0 217.1 219.2 221.3 223.4 225.6 227.8 230.0 232.2 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 231.0 228.2 223.2 216.3 208.1 199.4 190.9 183.1 176.4 170.9 167.6 166.0 165.9 167.2 169.5 172.9 177.2 182.1 187.4 192.7 197.7 202.2 205.5 207.6 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.5 208.7 209.6 211.4 214.1 217.3 221.0 224.4 227.4 230.3 232.5 233.6 234.7 235.2 235.9 236.3 236.6 236.4 236.2 236.0 235.8 235.2 234.5 233.8 233.4 233.0 232.5 232.4 232.4 232.4 232.4 232.4 232.5 232.5 232.6 232.6 232.7 232.7 232.8 232.8 232.8 232.9 232.9 233.0 233.0 233.1 233.1 233.2 233.2 233.2 233.2 233.2 233.3 233.6 233.9 234.0 234.0 234.1 234.2 234.3 234.3 234.4 234.6 234.8 234.8 234.9 235.0 235.0 235.2 235.6 236.2 236.3 236.3 236.2 236.2 236.1 235.9 235.8 235.7 235.7 235.6 235.5 235.5 235.3 235.1 234.8 234.9 235.0 235.1 235.2 235.3 235.4 235.5 235.5 235.5 235.1 234.7 234.4 233.8 232.7 230.7 229.3 226.8 222.9 219.3 215.5 211.2 204.7 197.7 190.0 185.0 181.0 177.3 174.9 173.6 173.5 174.5 176.2 178.5 183.3 189.3 196.3 200.7 203.6 205.4 207.4 208.7 208.6 208.5 208.6 208.8 208.7 208.5 208.3 207.9 207.4 206.7 206.2 205.8 205.7 205.7 205.7 205.6 205.7 205.8 205.8 205.9 206.0 206.0 206.0 206.2 206.5 207.0 207.5 208.0 208.5 208.9 209.2 209.2 209.2 209.2 209.2 209.1 209.1 209.0 209.0 208.9 208.9 208.8 208.8 208.7 208.6 208.6 208.6 208.5 208.5 208.5 208.5 208.7 208.8 209.0 209.2 209.3 209.3 209.2 209.0 208.7 208.4 208.1 207.8 207.6 207.5 207.4 207.1 206.6 206.1 205.8 205.7 205.7 205.8 205.9 206.0 206.1 206.1 206.2 206.3 206.4 206.5 206.5 206.5 206.6 206.8 207.0 207.3 207.5 207.8 208.1 208.2 208.3 208.4 208.4 208.6 208.7 208.7 208.7 208.7 208.7 208.5 208.4 208.2 208.1 208.0 207.9 207.7 207.7 207.7 206.8 204.5 200.1 195.4 191.1 187.1 184.8 184.9 187.9 191.3 194.2 194.9 194.8 194.5 194.4 194.4 194.5 194.5 194.5 194.6 194.9 195.0 194.8 194.7 194.6 194.6 194.6 194.5 194.1 193.4 192.8 192.3 191.6 191.0 190.6 189.5 188.2 186.6 186.0 185.6 185.6 185.3 184.9 184.7 184.5 184.1 183.5 183.1 182.7 181.8 180.8 179.9 179.5 179.0 178.6 178.7 178.8 178.9 179.1 179.3 179.5 180.4 181.7 183.2 184.1 184.7 185.3 186.5 187.8 189.0 189.8 190.3 190.7 190.4 190.2 190.1 189.5 188.7 187.9 186.3 185.1 184.1 182.7 181.7 181.0 180.1 179.5 179.2 179.4 179.6 179.8 181.2 182.1 182.9 184.7 186.0 186.9 187.8 188.6 189.2 189.7 190.1 190.4 190.1 189.8 189.4 188.7 187.9 186.7 185.1 183.7 182.7 181.7 181.0 180.4 180.1 179.8 179.4 179.5 179.8 180.3 181.1 181.8 182.3 183.1 183.8 184.1 184.4 184.7 185.1 185.5 185.7 185.9 186.1 186.5 186.9 187.3 187.6 187.9 188.0 188.0 187.8 187.7 187.5 187.4 187.3 187.2 187.2 187.0 186.9 186.8 186.7 186.5 186.2 186.1 186.1 186.0 185.5 184.9 184.6 183.8 182.8 182.0 181.9 182.0 182.6 183.9 185.4 187.6 190.6 194.1 198.7 203.7 208.5 211.5 216.0 221.3 225.8 229.8 233.2 235.2 237.4 239.3 240.6 240.7 240.0 239.5 239.5 239.4 237.6 235.4 233.3 232.1 231.2 230.4 228.9 228.2 228.0 227.5 227.4 227.8 228.4 229.0 229.5 229.8 230.2 230.7 231.7 232.6 233.4 234.2 234.8 235.2 235.3 235.5 235.8 235.9 235.8 235.7 235.5 235.3 235.2 235.1 234.7 233.9 233.5 233.1 232.7 232.3 232.1 232.0 231.8 231.5 231.2 231.2 231.2 231.3 231.5 231.7 231.9 232.0 232.0 232.0 232.3 232.8 233.4 233.7 233.9 234.0 233.9 233.9 233.8 233.7 233.6 233.5 233.4 233.4 233.4 233.2 232.9 232.5 232.0 231.7 231.6 231.7 231.7 231.7 231.9 232.0 232.1 232.2 232.3 232.5 232.6 232.7 232.8 232.8 232.9 233.0 233.3 233.5 233.8 234.2 234.5 234.5 234.2 234.0 233.8 233.6 233.5 233.5 233.3 233.0 232.7 232.5 232.2 231.7 231.5 231.3 230.2 226.7 220.9 211.7 204.7 199.4 195.7 190.4 184.8 181.5 179.4 178.5 178.8 179.2 179.7 180.7 182.2 184.0 185.7 189.0 194.2 202.4 208.5 213.1 219.6 226.2 232.8 236.2 231.5 232.7 234.8 238.0 242.0 246.3 250.2 253.9 255.7 257.3 256.9 256.5 255.8 254.9 253.8 252.5 251.1 249.7 248.4 247.4 246.4 245.6 244.9 244.5 244.3 244.4 244.7 245.2 245.9 246.6 247.6 248.7 250.0 251.4 253.0 254.5 256.2 257.9 259.8 261.5 263.3 265.2 267.0 268.7 270.3 271.7 273.1 274.4 275.6 276.6 277.5 278.3 278.8 279.1 278.8 277.7 277.0 276.5 275.9 275.7 275.6 275.7 275.8 275.9 276.0 276.2 276.2 276.2 276.3 276.4 276.4 276.3 276.2 276.0 275.7 275.6 275.7 276.1 276.4 276.4 276.3 276.2 276.0 275.8 275.7 275.6 275.5 275.4 275.6 275.9 276.2 276.1 276.0 275.9 275.6 275.5 275.4 275.3 275.3 275.5 275.7 275.9 276.1 276.2 276.1 275.8 275.3 275.1 275.1 275.2 275.3 275.4 275.5 275.7 275.8 275.9 276.0 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.1 276.2 276.2 276.2 276.2 276.2 276.3 276.4 276.4 276.4 276.4 276.5 276.5 276.5 276.5 276.6 276.7 276.7 276.7 276.7 276.8 276.9 276.9 276.9 276.9 276.9 277.0 277.0 277.0 277.0 277.0 277.0 277.0 277.0 277.0 276.9 276.8 276.5 276.2 276.0 275.9 275.9 275.9 275.7 275.5 275.4 275.3 275.2 275.1 275.2 275.4 275.7 276.0 276.2 276.2 276.2 276.3 276.4 276.5 276.6 276.7 276.8 277.0 277.0 277.0 277.0 277.0 276.9 276.8 276.6 276.4 276.3 276.2 276.1 276.0 275.9 275.9 276.1 276.4 276.7 276.7 276.7 276.8 277.0 277.2 277.3 277.4 277.5 277.5 277.4 277.3 277.2 277.0 276.8 276.6 276.4 276.2 276.0 275.8 275.6 275.5 275.4 275.3 275.3 275.3 275.3 275.3 275.3 275.4 275.4 275.5 275.6 275.6 275.7 275.7 275.7 275.8 275.9 276.0 276.1 276.1 276.1 276.1 276.2 276.2 276.2 276.2 276.3 276.5 276.7 276.7 276.7 276.7 276.6 276.6 276.7 276.7 276.8 276.9 277.1 277.3 277.5 277.7 277.6 277.2 277.6 278.0 278.3 277.5 276.9 276.7 276.6 276.0 274.6 272.3 269.8 267.3 263.9 260.0 255.4 249.9 244.1 237.8 231.7 226.5 222.3 218.5 215.1 212.4 211.0 210.0 209.6 209.3 209.3 209.5 209.6 209.6 209.6 209.7 209.9 210.2 210.2 209.9 209.6 209.4 209.1 208.9 208.7 208.5 208.3 207.6 206.9 206.3 205.5 204.6 204.0 203.2 202.5 202.3 202.4 202.6 202.9 203.0 203.2 203.4 204.5 205.6 206.2 207.2 208.1 208.5 209.1 209.6 209.5 209.0 208.4 207.1 205.4 203.6 201.8 200.9 200.3 199.3 198.9 199.0 199.3 199.9 200.8 202.1 203.7 205.2 205.9 207.1 208.6 209.6 210.3 210.6 210.5 210.1 209.5 208.5 207.8 207.3 206.7 206.1 205.4 203.6 202.5 201.7 200.5 199.7 199.1 198.4 198.4 198.8 199.9 200.7 201.2 202.5 203.9 205.4 206.2 207.8 209.8 210.6 211.3 211.9 211.7 211.6 211.4 211.3 211.1 210.8 209.4 207.7 205.9 204.2 202.5 200.6 197.9 195.9 195.0 193.5 192.5 192.1 192.5 193.0 193.5 195.0 196.9 199.3 201.3 203.4 205.5 207.7 209.9 212.1 213.3 213.8 213.2 212.6 211.2 208.4 205.4 203.3 202.7 202.7 202.9 203.0 203.3 203.6 204.0 204.4 204.9 205.3 205.7 206.1 206.5 206.8 207.1 207.4 207.5 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.7 207.6 207.3 206.9 206.6 206.3 205.9 205.6 205.3 204.9 204.6 204.3 204.0 203.6 203.3 203.0 202.7 202.3 202.0 201.7 201.4 201.0 200.7 200.4 200.1 199.8 199.4 199.1 198.8 198.5 198.2 197.9 197.5 197.2 196.9 196.6 196.3 196.0 195.7 195.3 195.0 194.7 194.4 194.1 193.8 193.5 193.2 192.9 192.5 192.2 191.9 191.6 191.3 191.0 190.7 190.4 190.1 189.8 189.5 189.2 188.9 188.6 188.3 188.0 187.7 187.4 187.1 186.8 186.5 186.2 185.9 185.6 185.3 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 184.9 184.9 184.8 184.7 184.6 184.5 184.4 184.3 184.3 184.1 184.0 183.9 183.9 183.9 183.8 184.6 185.6 187.3 187.0 186.4 185.9 185.4 185.0 185.3 185.5 185.7 185.8 185.8 185.7 185.7 185.5 185.4 185.3 185.3 185.2 185.0 184.9 184.8 184.8 184.7 184.7 184.7 184.6 184.6 184.5 184.4 184.3 184.3 184.2 184.1 184.1 184.1 183.9 183.6 183.4 183.2 183.0 182.8 182.7 183.1 183.5 183.9 184.2 184.6 185.1 185.3 185.4 185.5 185.7 185.8 186.0 185.7 185.6 185.4 185.3 185.3 185.2 185.1 185.1 185.0 184.9 184.9 184.9 184.8 184.7 184.6 184.4 184.3 184.3 184.4 184.6 184.9 184.8 184.7 184.4 184.3 184.3 184.3 184.3 184.4 184.5 184.5 184.5 184.6 184.6 184.7 184.8 184.9 184.9 185.0 185.0 185.1 185.2 185.6 186.1 186.3 186.6 186.8 187.0 187.0 186.9 186.7 186.5 186.4 186.3 185.8 185.2 184.7 183.1 180.5 176.8 170.7 165.5 162.5 162.0 163.1 165.4 167.6 169.8 172.1 174.2 176.2 177.3 179.2 181.1 182.9 184.6 186.4 188.4 190.5 192.5 194.1 196.1 198.2 200.5 202.3 204.0 205.9 207.8 209.5 210.5 210.4 210.8 214.5 219.5 224.7 226.1 226.9 227.2 227.8 228.1 228.0 227.1 225.8 224.3 223.4 222.9 222.4 220.8 219.7 219.0 218.2 217.3 216.6 216.9 217.3 218.0 218.2 218.8 219.9 221.1 221.8 222.2 224.0 225.9 227.9 228.8 230.2 231.9 233.1 234.1 235.0 235.0 234.9 234.8 234.5 234.3 234.2 234.0 233.8 233.6 232.9 232.5 232.3 231.9 231.7 231.6 231.8 231.9 232.0 232.2 232.3 232.4 232.6 232.8 233.1 233.1 233.0 232.9 232.8 232.6 232.5 232.5 232.4 232.4 232.6 232.7 232.9 233.0 233.0 233.0 232.6 232.4 232.3 232.4 232.4 232.5 232.6 232.7 232.8 232.9 233.0 233.1 233.1 233.2 233.4 233.7 234.0 234.4 234.7 234.7 234.2 233.8 233.1 231.3 227.8 223.3 217.1 213.1 210.1 208.5 207.7 207.4 207.7 208.0 208.6 210.6 212.9 215.2 217.5 218.5 219.7 222.4 225.4 228.3 230.2 231.6 233.2 236.0 239.2 242.3 244.1 246.1 248.5 251.4 253.8 255.8 257.8 260.5 263.5 266.2 269.1 272.3 275.9 278.4 280.1 281.6 283.2 284.6 285.5 285.9 286.0 284.8 283.5 282.0 280.9 280.6 280.9 280.4 279.7 278.8 277.5 276.7 276.2 276.2 276.1 275.9 275.8 275.8 275.9 276.0 276.2 276.4 276.6 276.8 276.9 277.0 277.0 277.0 276.9 276.8 276.7 276.5 276.3 276.2 276.2 276.1 275.7 275.1 274.7 274.3 274.5 274.9 275.4 276.1 276.8 277.3 277.6 278.4 279.8 280.2 280.5 280.8 280.8 280.7 280.6 280.4 280.2 280.0 279.8 279.6 279.3 278.4 277.4 276.8 276.6 276.4 276.0 275.5 275.1 274.7 274.4 274.2 274.3 274.6 275.0 275.5 275.8 276.2 276.7 277.3 277.7 277.7 277.6 277.5 277.4 277.3 277.2 277.0 276.9 276.9 276.9 276.8 276.7 276.6 276.5 276.5 276.4 276.2 276.0 275.5 275.2 274.9 274.8 272.8 269.5 266.6 262.1 256.1 248.1 242.9 239.4 235.7 234.5 235.1 235.6 234.7 232.7 229.4 225.3 220.8 216.7 212.7 209.1 207.0 204.4 201.5 196.8 192.4 188.1 183.8 179.7 175.7 170.8 166.7 163.1 159.4 155.7 151.8 147.8 145.2 143.4 140.2 139.1 139.2 140.5 140.0 138.4 137.5 136.5 135.7 136.5 137.4 138.2 137.8 137.8 138.4 138.8 139.0 138.8 138.7 138.7 138.8 138.8 138.7 138.4 137.8 137.4 137.2 137.7 138.1 138.4 137.8 137.4 137.1 136.9 136.8 136.8 136.8 136.8 136.8 136.8 136.9 136.9 136.9 136.9 137.0 137.0 137.0 137.1 137.1 137.1 137.1 137.2 137.2 137.2 137.2 137.2 137.2 137.4 137.6 137.9 138.1 138.3 138.6 139.1 139.7 140.7 141.1 140.9 139.0 138.1 137.7 138.3 138.6 138.7 139.0 139.3 139.5 139.7 139.8 139.8 139.9 140.0 140.1 140.1 140.1 140.0 139.9 139.7 139.5 139.5 139.7 140.2 141.2 142.1 142.6 142.1 141.8 141.6 141.5 141.3 141.1 140.9 140.8 140.5 139.4 137.3 134.8 133.7 133.1 132.8 133.5 134.6 136.1 136.8 137.9 139.4 140.1 140.9 141.7 142.2 142.7 143.1 143.9 144.6 145.3 145.8 146.5 147.3 148.4 149.2 149.7 150.2 150.7 151.3 152.0 152.8 154.0 155.1 156.4 158.0 160.5 162.2 162.8 160.5 158.4 156.7 156.3 156.1 156.2 156.3 156.4 156.6 156.4 156.0 155.2 155.0 154.4 153.4 153.2 153.0 152.8 152.5 152.3 152.1 151.6 151.3 151.2 151.5 151.7 152.0 152.2 152.6 153.4 154.0 154.6 155.2 155.6 155.9 156.1 156.5 156.7 156.7 156.7 156.6 156.5 156.4 156.3 156.2 156.1 156.1 156.1 156.0 155.9 155.8 155.7 155.6 155.5 155.3 155.1 155.0 155.1 155.1 155.2 155.2 155.3 155.4 155.5 155.6 155.6 155.7 155.7 155.7 155.8 155.9 156.0 156.1 156.1 156.4 156.6 156.8 156.9 157.0 157.2 157.3 157.2 157.1 156.9 156.7 156.6 156.4 156.1 155.7 155.5 155.4 155.4 155.2 155.0 154.8 154.7 154.5 154.5 154.4 154.1 153.9 153.9 153.9 154.0 154.1 154.2 154.3 154.4 154.5 154.8 155.2 155.6 155.8 156.0 156.1 156.2 156.3 156.4 156.6 156.7 156.7 156.7 156.6 156.4 156.2 156.1 156.0 155.8 155.7 155.5 155.3 155.1 154.9 154.7 154.4 154.1 153.9 153.7 153.6 153.5 153.4 153.3 153.2 153.1 152.9 152.7 152.7 152.6 152.5 152.6 152.7 152.8 152.9 153.0 153.1 153.2 153.4 153.6 154.0 154.4 154.8 155.5 156.1 156.3 156.7 157.0 157.2 157.6 157.8 157.8 157.6 157.3 156.7 156.4 156.0 155.3 154.7 154.2 153.9 153.6 153.2 152.7 152.6 152.7 152.9 153.0 153.1 153.4 153.6 153.9 154.1 154.6 155.1 155.4 155.6 155.9 156.3 156.5 156.7 156.8 157.0 157.3 157.6 157.7 157.7 157.7 157.6 157.5 157.5 157.4 157.4 157.3 157.2 157.1 156.8 156.5 156.1 155.5 155.1 154.8 154.6 154.3 154.0 153.7 153.4 153.2 153.0 152.9 152.9 153.1 153.4 153.6 153.9 154.3 154.8 155.5 155.9 156.2 156.4 156.6 156.7 156.8 157.1 157.5 157.7 157.9 158.2 158.1 158.0 157.8 157.7 157.6 157.6 157.2 156.5 155.6 155.2 154.7 154.0 153.7 153.0 152.1 151.6 151.0 150.4 150.0 149.7 149.5 149.2 148.9 148.7 148.9 149.2 149.6 150.0 150.3 150.5 151.2 152.0 152.7 153.2 153.8 154.7 156.0 157.1 157.8 158.8 159.5 159.9 160.0 160.1 160.0 159.8 159.6 159.6 159.3 158.9 158.4 157.8 157.1 155.9 154.8 153.8 153.2 152.1 150.9 150.5 149.6 148.7 148.0 148.0 148.2 148.5 148.7 149.0 150.1 150.6 151.1 152.1 153.4 154.7 155.3 156.1 157.1 157.9 158.9 160.1 161.6 162.5 163.1 164.0 164.4 164.4 164.0 162.7 161.0 160.1 158.9 157.2 154.8 152.4 149.9 147.9 146.7 146.0 144.8 144.2 144.0 144.3 144.8 145.3 146.6 147.8 148.8 150.4 151.4 151.9 152.7 153.7 155.0 155.6 156.3 157.2 158.2 158.7 158.9 158.7 158.5 158.3 157.5 156.8 156.1 154.5 153.0 151.6 150.5 149.5 148.8 148.6 148.4 148.2 148.1 148.2 148.8 149.1 149.7 150.5 151.4 152.1 152.5 153.0 153.7 155.0 156.5 157.7 158.2 159.2 160.0 160.4 161.2 161.7 161.8 161.6 161.4 161.2 160.9 160.6 160.4 159.7 158.9 157.8 157.4 157.0 156.4 156.1 156.1 156.3 156.4 156.5 156.7 157.1 157.6 157.6 157.5 157.3 157.1 157.0 156.7 155.9 154.9 153.6 151.5 150.6 150.2 150.3 150.5 150.8 151.1 151.5 151.9 152.4 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5 152.5", - "input_type": "phoneme", - "offset": 13.157 + "f0_timestep": "0.005" }, { + "offset": 24.651, "text": "AP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", - "ph_seq": "AP w u y ve x vn x ian b u c i0 y van w ei ch ang sh eng r ir y v y E f ang l e m ing sh an d uo sh ao b ian SP n a j ia t ong z i0 x iao y En w o sh ir c ai y ao y i q v q ian n ian sh en en z ai b ai y vn j ian SP r u c i0 m en q ian x v m ing y En d ao y ao r en ch uan j ian y ao sh ou f an sh ir j iang g e y van SP", - "note_seq": "rest D#3 D#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 F#3 F#3 G#3 G#3 F#3 F#3 F#3 F#3 G#3 G#3 F#3 F#3 F3 F3 F3 F3 F3 F3 F#3 F#3 F3 F3 C#3 C#3 D#3 D#3 rest A#2 A#2 C#3 C#3 D#3 D#3 C#3 C#3 D#3 D#3 F3 F3 F#3 F#3 F#3 F#3 G#3 G#3 F#3 F#3 G#3 G#3 G#3 G#3 G#3 G#3 B3 B3 A#3 A#3 B3 A#3 A#3 A#3 A#3 G#3 G#3 A#3 A#3 rest F#3 F#3 G#3 G#3 A#3 A#3 G#3 G#3 A#3 A#3 G#3 G#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 D#4 D#4 A#3 A#3 G#3 G#3 G#3 G#3 A#3 A#3 G#3 G#3 rest", - "note_dur_seq": "0.289 0.18 0.18 0.181 0.181 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.1799999 0.1799999 0.362 0.362 0.1800001 0.1800001 0.181 0.181 0.362 0.362 0.1800001 0.1800001 0.181 0.181 0.181 0.181 0.1799998 0.1799998 0.1810002 0.1810002 0.181 0.181 0.1809998 0.1809998 0.3610001 0.3610001 0.1810002 0.1810002 0.9029999 0.9029999 0.362 0.1809998 0.1809998 0.1800003 0.1800003 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.1799998 0.1799998 0.3620005 0.3620005 0.1809998 0.1809998 0.1799994 0.1799994 0.1810007 0.1810007 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.1809998 0.1809998 0.1809998 0.1809998 0.1800003 0.1809998 0.1809998 0.3620005 0.3620005 0.3610001 0.3610001 1.084 1.084 0.1809998 0.1810007 0.1810007 0.1799994 0.1799994 0.3620005 0.3620005 0.3610001 0.3610001 0.3619995 0.3619995 0.1810007 0.1810007 0.3610001 0.3610001 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.1809998 0.1809998 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.3610001 0.3610001 0.1809998 0.1809998 0.3619995 0.3619995 0.5420017 0.5420017 0.7229996 0.7229996 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.243819 0.045181 0.134819 0.045181 0.100567 0.080433 0.215916 0.146084 0.300758 0.060242 0.22194 0.14006 0.119758 0.060242 0.316819 0.045181 0.110723 0.069277 0.110216 0.070784 0.301758 0.060242 0.134819 0.045181 0.155398 0.025602 0.122265 0.058735 0.134819 0.045181 0.110217 0.070784 0.111723 0.069277 0.135818 0.045181 0.22094 0.14006 0.111722 0.069279 0.903 0.318326 0.043674 0.110218 0.070782 0.093658 0.086342 0.288205 0.073795 0.22094 0.14006 0.271637 0.090363 0.106205 0.073795 0.22194 0.14006 0.110218 0.070782 0.121264 0.058735 0.126782 0.054218 0.102856 0.078144 0.110722 0.069279 0.120758 0.060242 0.111721 0.069279 0.181 0.095661 0.084339 0.146362 0.034637 0.297242 0.064758 0.22094 0.14006 1.084 0.120758 0.060242 0.095071 0.08593 0.11341 0.066589 0.22194 0.14006 0.22094 0.14006 0.271637 0.090363 0.125279 0.055721 0.309795 0.051205 0.128399 0.052601 0.116066 0.063934 0.110092 0.070908 0.150879 0.030121 0.129795 0.051205 0.110722 0.069279 0.120914 0.060085 0.229977 0.131023 0.111721 0.069279 0.325855 0.036144 0.451639 0.090363 0.723 0.072", - "f0_timestep": "0.005", + "ph_seq": "AP w u y ve x vn x ian b u c i0 y van w ei ch ang sh eng r ir y v y E f ang l e m ing sh an d uo sh ao b ian SP n a j ia t ong z i0 x iao y En w o sh ir c ai y ao y i q v q ian n ian sh en z ai b ai y vn j ian SP r u c i0 m en q ian x v m ing y En d ao y ao r en ch uan j ian y ao sh ou f an sh ir j iang g e y van SP", + "ph_dur": "0.2438 0.0452 0.1348 0.0452 0.1006 0.0804 0.2159 0.1461 0.3008 0.0602 0.2219 0.1401 0.1198 0.0602 0.3168 0.0452 0.1107 0.0693 0.1102 0.0708 0.3018 0.0602 0.1348 0.0452 0.1554 0.0256 0.1223 0.0587 0.1348 0.0452 0.1102 0.0708 0.1117 0.0693 0.1358 0.0452 0.2209 0.1401 0.1117 0.0693 0.903 0.3183 0.0437 0.1102 0.0708 0.0937 0.0863 0.2882 0.0738 0.2209 0.1401 0.2716 0.0904 0.1062 0.0738 0.2219 0.1401 0.1102 0.0708 0.1213 0.0587 0.1268 0.0542 0.1029 0.0781 0.1107 0.0693 0.1208 0.0602 0.1117 0.0693 0.2767 0.0843 0.1464 0.0346 0.2972 0.0648 0.2209 0.1401 1.084 0.1208 0.0602 0.0951 0.0859 0.1134 0.0666 0.2219 0.1401 0.2209 0.1401 0.2716 0.0904 0.1253 0.0557 0.3098 0.0512 0.1284 0.0526 0.1161 0.0639 0.1101 0.0709 0.1509 0.0301 0.1298 0.0512 0.1107 0.0693 0.1209 0.0601 0.23 0.131 0.1117 0.0693 0.3259 0.0361 0.4516 0.0904 0.723 0.072", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D#3 F3 F#3 F3 F#3 F3 F#3 F#3 G#3 F#3 F#3 G#3 F#3 F3 F3 F3 F#3 F3 C#3 D#3 rest A#2 C#3 D#3 C#3 D#3 F3 F#3 F#3 G#3 F#3 G#3 G#3 G#3 B3 A#3 B3 A#3 A#3 G#3 A#3 rest F#3 G#3 A#3 G#3 A#3 G#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 D#4 A#3 G#3 G#3 A#3 G#3 rest", + "note_dur": "0.289 0.18 0.181 0.362 0.361 0.362 0.18 0.362 0.18 0.181 0.362 0.18 0.181 0.181 0.18 0.181 0.181 0.181 0.361 0.181 0.903 0.362 0.181 0.18 0.362 0.361 0.362 0.18 0.362 0.181 0.18 0.181 0.181 0.18 0.181 0.181 0.181 0.18 0.181 0.362 0.361 1.084 0.181 0.181 0.18 0.362 0.361 0.362 0.181 0.361 0.181 0.18 0.181 0.181 0.181 0.18 0.181 0.361 0.181 0.362 0.542 0.723 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "155.3 155.3 155.3 155.5 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.7 155.8 155.9 156.1 156.3 156.6 156.9 157.2 157.5 157.8 158.1 158.3 158.6 158.8 159.0 159.1 159.0 158.1 156.3 154.7 154.3 154.2 154.2 154.3 154.4 154.4 154.6 154.7 154.8 155.0 155.1 155.2 155.9 156.4 156.8 156.9 156.9 156.7 156.8 156.9 157.0 157.2 157.4 157.5 157.7 157.5 157.3 158.1 159.1 160.5 162.6 166.5 171.1 173.5 174.5 174.7 174.6 175.2 175.2 175.2 175.2 175.2 175.2 175.2 175.4 175.4 175.4 175.6 175.6 175.3 174.6 174.2 173.0 171.7 169.8 168.6 167.6 166.6 166.3 167.1 169.4 171.8 174.4 175.9 177.4 179.0 181.3 183.4 185.4 186.7 188.1 189.5 190.0 190.4 190.5 190.3 189.8 189.1 188.6 188.1 187.5 187.2 186.6 185.6 185.2 184.9 184.7 184.0 183.2 182.4 182.2 182.2 182.3 182.5 182.6 182.6 182.8 183.0 183.3 184.1 184.8 185.3 185.7 186.0 186.4 187.0 187.5 187.7 187.5 186.7 184.5 180.9 177.0 172.5 171.3 171.8 176.3 179.3 181.4 181.7 181.5 181.0 180.3 179.0 177.4 175.6 174.3 173.2 172.7 172.0 171.3 170.8 170.0 169.0 167.6 166.4 165.2 164.5 163.4 162.4 163.0 164.7 167.0 168.6 170.3 171.6 171.8 171.9 171.9 171.6 171.3 171.0 170.7 170.4 170.3 170.3 170.3 170.3 170.4 170.4 170.4 170.5 170.9 171.4 172.3 172.8 173.0 173.8 174.3 174.6 174.8 175.0 175.1 175.2 175.3 175.4 175.5 175.6 175.7 175.7 175.7 175.6 175.5 175.3 175.1 175.1 175.0 174.8 174.8 174.7 174.5 174.2 173.9 173.5 173.4 173.1 172.5 172.1 171.5 170.6 169.1 167.1 164.3 157.6 151.8 147.3 146.5 146.6 148.3 153.2 159.3 165.8 173.4 179.5 182.9 184.3 184.9 184.9 185.2 185.5 185.6 185.7 185.8 185.8 185.9 185.7 185.1 184.4 183.9 183.7 182.8 181.8 180.9 180.4 180.1 180.1 180.3 180.6 181.0 181.2 181.6 182.5 183.4 184.3 185.3 185.8 186.1 186.4 186.8 187.0 186.9 186.6 186.2 185.9 185.7 185.5 184.6 182.8 180.0 173.9 167.6 161.6 157.3 155.1 154.5 154.8 156.7 159.3 160.2 161.4 162.8 164.6 166.6 168.3 169.2 170.1 171.1 172.5 173.4 174.2 175.9 177.7 179.4 180.1 181.5 183.2 182.1 180.7 179.2 177.8 177.0 176.6 176.5 176.4 176.3 176.5 176.9 177.5 177.1 176.7 176.3 175.9 175.5 175.2 175.1 174.4 173.4 169.6 166.9 164.9 161.5 158.9 157.5 158.3 159.7 161.9 164.5 167.7 171.6 176.8 181.6 185.6 187.9 189.4 190.0 190.1 190.3 190.5 189.9 189.2 188.2 188.0 187.4 186.2 184.4 182.9 181.8 181.2 180.6 180.1 180.4 181.0 181.9 182.8 183.9 185.3 186.8 188.0 188.7 189.6 190.3 190.7 190.5 190.1 189.4 188.1 186.6 185.1 182.9 180.8 179.7 178.5 177.7 178.1 178.5 179.1 179.9 181.5 183.0 183.8 184.7 185.7 187.2 188.0 188.4 188.4 188.2 188.0 187.7 187.3 186.8 186.1 184.8 183.6 183.1 182.6 182.2 182.5 182.7 183.1 183.8 184.5 185.3 186.5 186.9 186.8 186.5 186.3 186.1 186.0 185.9 185.7 185.6 185.8 186.2 186.3 186.3 186.0 185.7 185.5 185.3 183.3 180.4 176.7 169.5 165.3 162.8 160.9 159.5 158.4 159.9 163.1 167.4 172.0 174.9 176.8 178.8 180.7 182.5 188.2 195.1 202.9 207.7 209.9 210.3 209.4 208.8 208.7 208.6 208.3 207.9 208.0 207.8 207.3 206.4 204.9 202.4 197.3 191.7 185.4 182.7 181.7 183.3 186.1 188.1 188.9 190.2 191.1 191.4 191.9 192.4 193.0 193.5 194.1 194.9 195.2 194.6 192.7 190.0 187.1 184.1 182.7 182.2 182.4 182.3 182.5 183.3 183.4 183.4 183.4 183.2 182.8 182.5 182.2 182.0 181.8 181.7 181.5 181.2 180.9 180.5 180.3 180.5 180.8 181.2 181.9 182.8 184.1 184.7 185.1 185.9 186.7 187.3 187.4 187.6 187.8 188.1 188.2 188.2 188.1 187.7 187.3 187.0 186.7 186.6 186.4 186.2 186.0 185.7 185.4 184.9 184.5 183.6 182.6 182.0 181.8 181.8 181.9 182.0 182.1 182.2 182.3 182.5 182.8 183.1 183.2 183.7 184.1 184.3 184.9 185.3 185.5 185.7 185.7 185.7 185.7 185.7 185.6 185.6 185.5 185.4 185.4 185.4 185.4 185.4 185.4 185.4 185.3 185.3 185.3 185.4 185.5 185.6 185.9 186.3 186.7 187.1 187.5 188.0 188.4 189.3 191.1 193.0 195.7 199.5 201.7 204.0 206.6 207.4 208.2 209.0 209.4 209.9 210.4 210.6 210.3 209.8 209.6 209.4 209.4 208.6 207.9 207.7 207.3 207.0 206.9 206.9 206.9 206.7 206.6 206.8 207.6 207.9 207.9 207.7 206.4 204.8 203.4 201.7 199.7 196.4 194.0 191.9 189.8 188.2 186.8 185.7 185.2 184.7 184.2 183.8 183.6 183.9 184.5 185.1 185.2 185.3 185.1 183.8 181.6 178.8 174.5 169.8 165.5 162.8 161.6 161.5 161.9 163.2 164.9 166.3 169.1 172.4 174.5 176.5 178.6 181.7 183.7 184.4 183.0 180.6 178.0 176.9 176.4 176.1 176.0 175.9 175.8 175.9 175.9 175.9 175.7 175.5 175.3 175.2 175.0 174.9 174.8 174.6 174.3 173.8 173.3 172.8 171.8 171.4 171.3 171.5 171.7 171.9 172.1 172.4 172.9 173.6 174.7 175.9 176.2 176.3 176.1 176.0 175.9 175.8 175.8 175.8 175.7 175.6 175.5 175.3 175.2 175.1 174.9 174.8 174.7 174.6 173.5 172.6 172.1 172.0 171.8 171.7 171.4 171.3 171.1 171.2 171.3 171.5 171.7 172.0 172.1 173.5 174.9 175.9 176.4 176.6 176.4 176.0 175.7 175.4 175.1 175.0 174.8 175.1 175.4 175.6 175.5 175.2 174.9 174.7 174.2 172.2 168.9 164.9 159.4 156.1 153.9 153.8 154.6 156.0 159.3 162.3 164.9 166.3 168.3 170.8 174.1 177.1 190.4 191.7 191.4 190.8 190.4 189.4 188.2 187.8 187.3 186.9 186.3 185.7 185.0 184.7 184.3 184.1 184.1 184.2 184.4 184.5 184.6 184.7 185.0 185.3 185.5 185.3 185.1 184.6 183.4 181.1 178.1 171.9 169.1 168.5 173.1 178.4 173.8 177.9 180.0 180.7 180.4 180.1 179.8 179.5 179.1 178.5 178.1 177.7 177.3 176.9 176.5 176.0 175.6 175.1 174.5 174.1 173.9 173.7 173.5 173.3 173.1 172.9 172.8 172.9 173.0 173.2 173.3 173.4 173.5 173.8 174.3 174.7 175.0 175.4 175.8 176.2 176.2 176.2 176.0 175.7 175.2 174.6 173.7 172.7 171.6 168.1 165.1 163.4 162.6 162.2 161.8 161.4 160.8 160.1 159.1 158.0 156.9 155.7 154.7 153.8 152.4 151.1 149.8 148.9 148.0 146.9 145.6 144.2 142.8 141.5 140.3 139.3 138.6 138.0 138.0 138.2 138.2 137.6 137.6 137.7 138.3 138.9 139.5 139.9 140.2 140.4 140.8 140.7 140.2 138.5 135.0 130.5 126.7 124.6 124.0 125.2 126.8 129.2 133.3 136.5 138.8 140.7 143.3 146.2 147.9 150.2 152.6 153.8 154.5 154.7 154.9 154.5 153.8 153.8 153.6 153.2 152.6 152.3 152.3 151.9 151.4 150.7 150.3 149.9 149.6 149.4 149.4 149.6 149.8 150.0 150.2 150.4 150.5 150.6 151.2 151.6 151.9 152.7 153.6 154.4 154.8 155.7 157.0 157.5 157.9 158.5 158.6 158.8 158.9 158.5 158.1 157.7 156.8 155.9 155.2 153.8 152.8 152.4 151.5 150.8 150.4 150.5 150.7 150.8 150.9 151.3 152.0 152.9 153.7 154.3 155.7 157.2 158.6 159.5 160.1 160.7 160.6 160.4 160.2 159.9 159.7 159.6 158.4 157.2 156.4 154.2 151.8 149.7 148.3 147.0 145.0 143.9 143.4 143.8 144.2 144.7 146.1 147.3 148.6 150.4 152.1 153.6 154.3 155.7 157.4 159.2 160.1 160.9 161.8 162.1 162.1 161.9 161.8 161.5 160.6 159.0 157.0 155.9 154.9 153.8 151.6 149.2 146.8 145.5 144.4 143.5 142.8 142.6 142.8 143.2 144.0 145.0 146.1 147.7 149.4 150.1 151.1 152.2 152.7 153.9 155.6 156.4 157.6 159.1 159.9 160.6 161.2 161.6 161.6 161.3 160.7 160.1 159.6 158.4 157.3 156.3 153.7 152.1 151.0 149.0 147.8 147.1 145.9 145.3 145.1 145.7 146.3 146.8 148.1 149.8 152.0 153.3 154.8 156.4 157.7 159.1 160.8 161.5 162.1 162.5 162.1 161.6 161.2 159.8 158.0 155.7 155.0 154.6 154.7 154.7 154.7 154.7 154.9 155.0 155.0 155.1 155.2 155.3 155.3 155.4 155.5 155.5 155.6 155.6 155.6 155.6 155.6 155.5 155.3 155.2 155.0 120.5 120.2 119.3 118.4 116.9 115.4 113.4 111.4 109.2 106.8 104.4 102.0 99.6 97.2 94.9 92.7 90.7 88.8 87.1 85.6 84.3 83.1 82.3 81.6 81.3 81.0 81.1 81.2 81.5 81.8 82.4 83.0 83.8 84.7 85.7 86.8 88.0 89.3 90.6 92.1 93.7 95.2 96.9 98.5 100.2 101.8 103.5 105.1 106.6 108.1 109.5 110.8 112.0 113.0 114.0 114.7 115.3 115.6 115.9 117.1 117.6 118.2 118.6 118.9 119.1 119.5 120.0 119.8 119.3 118.6 118.3 118.0 117.6 115.6 114.4 113.7 115.2 117.8 121.1 123.3 126.0 129.6 132.2 134.9 137.6 138.5 139.4 140.1 140.1 140.3 140.6 140.3 140.1 140.2 140.3 140.1 139.6 139.9 140.1 140.1 140.3 140.4 140.4 140.2 140.0 140.0 140.1 140.1 140.2 139.3 138.2 137.1 135.8 134.1 132.3 132.7 134.0 135.8 140.2 145.8 152.1 155.8 157.8 158.3 158.2 157.5 156.0 155.2 154.6 154.0 153.7 153.4 152.8 152.1 151.4 150.8 151.0 151.4 151.7 153.1 154.7 155.9 156.9 157.5 157.2 156.7 156.5 157.0 157.9 158.8 159.0 158.8 158.7 159.1 159.5 159.8 159.8 159.0 157.8 157.5 157.3 157.1 156.9 156.6 156.4 156.7 157.6 158.9 159.9 159.8 159.1 158.3 156.3 153.9 152.5 151.8 151.5 151.2 150.9 150.5 149.8 148.7 147.3 146.6 146.3 146.2 145.8 145.7 145.9 145.9 145.7 145.4 144.8 144.2 143.5 142.5 141.6 140.7 140.0 139.2 138.4 138.3 138.2 138.0 137.6 137.1 136.6 136.3 136.1 136.1 136.2 136.4 136.5 136.5 136.7 137.2 137.7 138.6 138.8 139.0 139.0 139.2 139.2 139.4 139.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.5 141.4 141.3 141.2 141.2 141.1 141.1 141.1 141.0 141.0 141.0 141.0 140.9 140.6 140.4 140.1 139.8 139.5 139.2 138.9 138.5 137.2 135.3 133.5 132.6 132.0 131.8 132.8 134.7 137.7 139.4 141.3 142.5 143.9 144.7 145.6 146.0 146.6 147.7 148.2 148.8 149.4 149.9 150.3 151.1 151.3 151.9 152.3 152.7 153.1 153.2 153.5 153.8 154.1 154.5 154.8 155.0 155.6 155.5 155.4 155.3 155.2 155.1 155.0 154.9 154.9 154.8 154.7 154.5 154.6 154.7 154.9 155.0 155.2 155.3 155.5 155.9 155.9 155.7 155.4 154.9 154.5 154.1 153.9 152.8 151.1 148.8 147.8 147.6 147.5 147.5 147.5 147.4 148.1 150.2 153.0 156.1 159.8 163.6 167.3 171.0 172.4 173.5 174.0 174.5 174.9 175.2 175.4 175.6 175.7 175.9 175.8 175.3 175.1 174.9 174.7 174.3 174.1 174.0 173.4 172.6 171.7 171.3 170.9 170.4 170.0 169.5 168.7 169.2 170.2 171.4 173.9 177.3 181.4 184.2 185.9 186.6 187.0 187.2 187.3 187.5 187.7 188.0 187.8 187.5 187.1 187.0 186.8 186.1 185.7 185.5 185.3 184.9 184.5 184.1 183.9 183.7 183.4 183.2 183.1 182.9 183.1 183.4 183.7 184.2 184.6 184.8 185.0 185.2 185.7 186.4 186.9 187.0 186.9 186.7 186.6 186.4 186.2 186.1 184.9 182.6 179.2 174.0 168.4 165.2 163.7 163.6 164.4 165.5 166.5 167.1 167.6 168.2 169.6 171.0 172.3 173.0 173.4 173.7 174.6 175.5 176.4 177.2 177.6 177.8 178.1 179.0 180.3 182.0 184.1 186.5 186.5 186.3 186.1 186.1 186.0 185.9 185.7 185.5 185.3 185.3 184.9 184.1 181.8 178.3 173.1 170.7 169.2 168.7 169.2 170.5 173.0 177.4 181.2 183.9 186.1 188.7 191.6 193.2 195.0 197.1 200.0 202.5 204.2 206.9 209.2 210.6 211.5 212.0 212.4 212.1 211.6 211.3 210.5 209.9 209.9 209.9 209.7 209.5 209.4 209.0 208.1 207.5 206.9 206.2 205.9 205.5 204.9 204.2 203.4 202.6 201.9 201.1 200.3 199.4 198.5 197.3 196.2 195.1 193.8 192.7 191.5 189.7 188.7 187.9 187.2 186.7 186.2 185.5 185.1 184.7 184.5 184.4 184.2 184.1 184.1 184.3 184.3 184.4 184.6 185.0 185.2 185.5 185.8 186.3 186.9 187.5 188.2 188.9 189.8 191.1 192.9 194.5 196.2 198.0 200.5 202.5 204.1 206.0 207.2 207.9 208.5 208.4 207.9 208.0 207.7 207.3 207.4 207.3 207.1 206.7 206.2 205.6 203.9 201.6 198.8 199.5 202.0 205.7 205.5 204.6 203.4 203.7 204.0 204.4 206.1 207.9 210.1 210.8 212.3 214.9 216.1 216.3 215.0 213.5 212.1 210.9 209.9 209.1 208.7 208.5 208.2 207.8 207.4 207.1 206.8 206.4 204.8 201.7 196.3 189.4 181.2 175.1 171.5 170.7 171.5 173.6 176.9 181.2 184.4 185.7 189.1 193.3 197.8 201.6 204.7 207.0 210.3 212.5 212.6 210.7 208.5 207.4 206.9 206.8 207.1 207.4 207.7 207.7 207.5 207.4 207.2 207.1 207.1 207.3 207.3 207.3 207.4 207.6 207.8 208.1 208.1 208.2 208.4 209.2 210.0 211.2 212.5 214.0 215.9 219.1 223.2 227.0 231.9 237.1 240.4 242.9 244.7 246.1 247.3 248.2 248.2 248.0 247.5 247.0 246.7 246.7 247.0 246.7 246.2 245.9 244.1 241.2 233.6 224.3 214.1 202.9 197.0 194.3 194.9 196.2 198.2 200.5 203.9 208.3 213.2 218.4 223.8 230.0 236.1 242.3 244.6 244.6 243.1 240.6 238.6 236.9 236.3 235.6 234.7 234.0 233.5 233.4 233.4 233.4 233.4 233.2 233.0 232.8 232.7 232.6 232.5 232.5 232.6 232.8 233.1 233.2 233.2 233.0 233.0 233.2 233.8 234.4 235.1 236.0 237.0 237.8 240.0 242.4 244.5 245.8 246.8 248.0 248.9 249.5 249.8 249.9 250.0 250.1 250.3 250.3 250.0 249.6 249.2 248.9 247.8 246.3 243.8 238.3 231.6 223.2 219.7 217.9 218.1 219.3 221.4 225.7 228.0 229.8 231.9 234.1 236.1 237.2 238.9 240.5 240.9 241.2 241.0 239.3 237.5 235.9 234.8 234.1 233.6 233.0 232.4 231.8 231.2 230.7 230.3 229.9 228.8 226.9 223.7 216.9 208.8 204.1 201.0 199.7 201.9 205.5 209.7 212.2 216.5 221.5 224.0 229.3 236.7 242.1 245.0 246.4 244.5 243.0 241.7 241.8 242.1 242.6 242.0 240.9 239.5 238.6 238.0 237.7 236.8 235.9 235.0 234.0 233.4 232.9 232.6 232.3 232.0 232.2 232.4 232.7 232.8 233.0 233.5 233.5 233.4 233.2 233.1 233.0 232.8 232.7 232.7 232.7 232.8 232.9 233.1 233.3 233.4 233.4 233.4 233.6 233.7 233.9 234.0 233.9 233.6 233.2 232.8 231.8 230.6 229.0 227.5 225.4 222.4 219.5 217.3 216.1 213.6 210.9 208.2 206.4 205.4 205.3 207.3 207.3 207.3 207.3 207.3 207.3 207.3 207.3 207.3 207.3 207.2 207.2 207.2 207.2 207.2 207.2 207.2 207.2 207.2 207.1 207.1 207.1 207.1 207.1 207.0 206.9 206.9 206.9 206.9 206.9 206.9 206.9 206.8 206.8 206.8 206.8 206.8 206.8 206.8 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.7 206.6 206.1 205.4 204.2 202.6 200.8 198.7 196.5 194.0 191.6 189.2 187.0 184.9 183.2 181.6 180.6 179.6 179.3 179.3 179.7 180.6 181.6 183.2 184.8 186.7 188.7 191.1 193.5 196.0 198.4 200.7 202.8 204.8 206.2 207.4 208.1 208.5 208.5 208.5 208.6 208.6 208.6 208.6 208.7 208.7 208.7 208.8 208.9 208.9 208.9 208.9 209.0 209.1 209.1 209.1 209.1 209.1 209.2 209.2 209.2 209.8 210.5 211.8 213.3 214.8 216.1 217.1 217.5 218.7 220.6 222.6 223.6 225.1 227.8 229.5 231.1 232.8 235.1 237.7 240.3 241.6 242.3 242.5 242.0 241.3 240.8 239.4 237.8 236.4 233.4 230.3 228.1 226.4 225.1 224.3 223.6 223.2 224.1 224.7 225.5 227.1 229.1 231.0 232.2 233.5 235.2 237.7 239.6 240.9 241.5 241.8 241.8 241.2 240.3 239.1 238.0 235.9 233.3 231.6 229.9 228.2 227.1 226.6 226.4 226.2 226.2 226.3 227.3 228.5 230.0 231.1 232.5 234.2 234.9 236.4 238.3 239.1 240.1 241.0 241.0 240.9 240.6 240.2 239.9 239.8 238.7 237.3 235.7 234.1 232.7 231.5 229.4 227.8 227.0 227.3 227.7 228.3 228.5 229.4 231.3 232.3 233.3 234.4 235.7 236.8 237.6 238.7 239.6 240.3 240.5 240.8 241.2 240.9 240.5 240.0 238.0 236.2 235.3 233.3 231.4 229.8 228.8 227.8 226.7 226.0 225.7 225.9 226.1 226.3 226.4 227.0 227.9 229.0 230.4 231.7 232.7 233.8 235.2 237.3 238.5 239.5 241.1 241.9 242.3 242.4 242.1 241.8 241.5 240.8 239.7 237.1 235.3 233.7 231.9 230.2 228.6 227.3 226.6 226.3 226.9 227.6 228.5 229.4 231.1 232.9 233.8 234.9 236.4 238.5 240.2 241.4 241.9 242.1 241.8 241.0 238.9 236.3 234.0 230.4 226.6 225.7 225.4 225.7 225.9 226.2 226.6 226.9 227.1 227.3 227.2 226.8 226.2 224.5 222.4 220.0 216.0 211.9 207.7 202.8 198.8 195.2 192.2 190.0 188.3 187.1 186.2 185.4 184.9 184.5 184.1 183.9 183.7 183.4 183.3 183.3 183.2 183.5 183.8 184.1 184.3 184.6 184.8 185.1 185.2 185.3 186.0 186.7 187.4 189.3 190.1 189.2 186.2 183.9 182.7 182.4 182.1 181.7 181.6 181.2 180.5 178.3 175.4 172.0 170.8 171.0 172.5 176.6 181.0 184.6 192.4 199.9 205.0 208.7 210.9 211.3 210.2 209.3 209.3 208.3 207.5 207.5 207.3 206.9 206.3 205.9 205.6 205.6 205.5 205.3 204.9 204.9 204.9 204.9 204.9 205.0 205.3 205.3 205.1 205.0 205.3 205.6 205.3 205.0 204.7 204.9 205.6 206.5 207.3 208.0 208.7 209.6 209.9 209.8 209.1 208.3 207.7 207.8 207.6 207.0 206.4 206.0 205.8 205.6 205.8 206.2 206.5 207.4 208.9 211.9 214.4 217.1 221.7 225.0 227.4 229.6 231.8 234.0 235.8 237.0 238.0 238.5 238.5 238.1 237.8 237.3 236.7 234.8 233.1 231.5 228.1 223.0 216.8 213.6 213.6 215.8 221.0 225.8 230.4 231.9 231.7 230.3 228.9 227.9 227.2 226.3 225.7 225.5 225.4 225.1 224.8 224.3 223.9 223.3 223.3 223.2 222.8 222.3 222.3 222.9 222.2 221.1 219.2 214.2 210.2 207.5 206.4 205.8 205.4 205.2 205.3 205.8 205.7 205.7 205.7 205.5 205.4 205.3 205.2 205.2 205.3 205.3 205.2 205.2 205.6 206.1 206.8 207.1 207.6 208.3 208.6 208.7 208.6 208.6 208.5 208.3 208.2 208.1 208.0 207.1 205.5 201.9 196.0 190.2 186.1 185.3 185.5 187.0 189.3 191.7 193.1 195.0 196.9 197.8 199.4 201.3 202.9 204.2 205.2 206.7 207.9 209.0 210.7 212.4 214.0 215.4 217.6 220.0 221.6 223.0 224.4 225.8 227.2 228.9 231.6 234.5 237.4 240.0 241.1 241.0 240.2 239.4 238.7 236.6 234.7 232.8 231.3 229.1 226.6 225.3 224.4 223.8 224.2 224.5 224.8 225.6 226.8 228.3 229.5 230.3 230.8 232.3 233.5 234.4 235.0 235.1 234.8 234.6 234.3 233.9 233.3 232.1 230.1 229.6 229.4 229.6 229.7 229.8 229.9 230.7 231.6 232.7 233.3 233.5 233.2 232.6 232.1 231.6 228.5 225.4 222.3 219.3 216.4 213.8 211.5 209.4 207.5 205.1 202.6 200.3 198.8 198.0 197.7 197.6 198.8 202.1 203.8 205.1 206.3 207.3 207.8 208.0 208.1 208.3 208.8 209.3 209.7 209.7 209.9 210.1 209.9 209.5 209.1 208.9 208.4 207.9 207.2 206.7 205.9 203.7 199.1 194.1 189.5 186.7 185.3 187.1 190.1 194.0 200.0 207.7 215.4 220.8 225.5 229.6 232.8 235.4 237.4 238.6 239.7 240.7 241.1 241.1 240.6 239.2 238.4 237.5 235.3 232.3 228.9 226.1 224.2 223.0 222.0 221.9 222.3 222.7 223.1 223.3 225.0 227.1 229.6 230.9 233.0 235.5 237.1 238.5 239.9 240.4 240.5 240.3 239.2 238.4 237.8 236.0 234.2 232.5 231.8 230.5 228.8 226.8 225.7 225.0 225.2 225.3 225.5 225.7 226.0 226.6 226.4 226.1 225.5 225.0 224.6 224.2 222.3 218.9 213.4 209.9 206.2 202.2 202.1 204.4 210.7 223.0 232.6 237.5 238.3 237.8 237.2 236.4 235.8 235.6 235.1 234.5 233.9 233.6 233.4 233.0 232.9 232.8 232.9 233.0 233.1 233.3 233.4 233.5 233.5 233.3 233.1 232.9 232.7 232.5 232.1 232.3 232.8 233.4 233.9 234.5 235.7 236.3 236.7 236.5 236.0 235.4 235.1 234.6 234.1 233.9 233.8 233.7 233.5 233.4 233.4 233.4 233.3 233.2 233.1 233.0 232.9 232.8 232.8 232.7 232.6 232.5 232.5 232.3 231.9 231.2 230.5 230.1 229.7 228.8 228.5 228.7 229.3 231.6 234.5 235.5 235.6 235.1 234.8 234.7 234.7 234.6 234.5 234.3 234.2 234.1 234.0 233.9 233.9 233.9 232.9 232.0 231.2 228.4 225.2 221.9 214.8 209.7 205.9 205.9 207.1 209.0 213.9 216.8 218.2 221.0 223.5 225.5 227.3 229.8 233.4 236.9 239.9 241.9 241.5 240.7 239.1 237.6 236.3 235.1 234.8 234.4 234.0 233.6 233.3 233.2 233.0 232.9 232.7 232.5 232.3 232.2 232.1 232.0 231.9 231.4 230.3 228.5 221.2 214.6 210.9 209.4 209.9 212.7 217.8 224.1 231.2 235.8 238.2 238.1 237.4 236.7 236.4 235.9 235.4 234.9 234.2 233.6 233.5 233.4 233.2 233.0 232.9 233.0 233.1 233.2 233.4 233.5 233.6 233.7 234.1 234.3 234.5 234.5 234.6 234.6 234.7 234.8 234.9 234.9 235.0 235.1 235.2 235.2 235.2 235.0 234.7 234.4 234.4 234.5 234.6 234.6 234.5 234.4 234.4 234.5 234.6 234.7 234.3 233.4 231.3 227.0 221.9 220.2 219.2 218.6 221.5 226.9 234.0 239.0 245.7 253.6 261.9 267.8 272.1 280.7 289.0 297.2 303.7 308.4 312.0 311.5 311.5 311.5 311.5 311.5 311.4 311.3 311.3 311.2 311.1 313.0 309.4 303.4 295.2 285.3 273.7 261.4 248.9 236.5 224.7 213.5 203.5 194.7 187.3 181.3 176.6 173.4 172.0 172.1 173.5 176.3 180.7 185.9 191.4 196.7 201.2 204.6 205.7 206.0 205.9 205.8 205.7 205.5 205.3 205.1 205.0 204.9 204.8 204.9 205.4 206.8 208.9 211.6 214.6 217.9 221.3 224.4 226.9 228.9 230.1 233.1 233.4 233.4 233.4 233.4 233.4 233.4 233.4 233.4 233.4 233.4 237.4 237.2 237.0 236.7 236.4 235.7 233.2 229.4 223.8 212.5 206.2 203.0 207.2 213.3 220.3 224.8 229.2 232.9 233.8 233.2 231.6 229.4 227.6 226.1 224.5 223.0 221.8 221.1 220.5 219.9 219.1 218.2 217.2 216.5 215.7 215.0 214.4 214.2 214.1 213.1 212.3 211.6 211.0 210.3 209.7 209.4 209.1 209.0 209.0 208.9 208.9 207.7 205.8 203.5 196.8 189.2 181.2 176.6 173.3 170.8 171.0 171.4 172.0 175.1 178.5 182.1 183.3 185.5 189.3 193.7 197.5 200.1 201.9 203.7 205.9 207.2 208.0 208.1 208.1 208.0 207.9 207.7 207.6 207.5 207.5 207.4 207.4 207.3 207.1 206.9 206.5 206.2 206.0 206.0 206.1 206.2 206.3 206.4 206.4 206.5 206.6 206.6 206.7 206.9 207.0 207.2 207.4 207.5 207.9 208.3 208.9 209.4 209.9 210.2 210.4 210.4 210.1 210.0 209.9 209.4 209.0 208.5 208.1 207.9 207.6 206.5 205.3 204.0 203.2 202.3 201.1 198.9 194.8 190.2 185.0 181.3 178.4 179.8 182.0 185.9 190.9 196.5 202.0 206.8 210.2 211.6 211.8 211.8 211.6 211.6 211.6 211.6 211.5 211.4 211.4 211.3 211.3 211.2 211.0 211.0 210.9 210.8 210.8 210.7 210.6 210.6 210.6 210.5 210.4 210.4 210.4 210.6 211.3 212.5 214.1 216.1 218.3 220.7 223.2 225.5 227.6 229.5 230.8 231.6 232.0 232.0 232.0 232.0 232.0 232.0 232.0 232.0 232.1 232.1 232.1 232.1 232.1 232.3 232.3 232.3 232.4 232.4 232.4 232.4 232.5 232.5 232.5 232.5 232.5 232.6 232.7 232.7 232.7 232.7 232.5 231.1 227.5 222.9 218.0 213.8 210.9 209.8 209.8 209.8 209.7 209.6 209.5 209.5 209.3 209.2 209.0 208.9 208.7 208.5 208.3 208.1 207.9 207.9 207.7 207.5 207.3 207.2 207.1 206.9 206.8 206.7 206.7 206.7 206.7 210.7 214.1 215.5 216.1 215.7 215.5 215.2 214.1 213.1 212.1 211.1 210.3 209.6 209.0 208.2 207.2 206.4 205.6 204.8 204.2 203.9 203.9 203.9 204.1 204.2 204.2 204.7 205.4 206.2 206.7 207.0 207.9 208.8 209.7 210.0 210.5 210.9 210.8 210.7 210.6 210.4 210.1 209.7 209.1 208.4 207.8 206.3 204.9 203.4 202.1 201.3 200.8 200.3 200.4 200.8 201.2 201.8 202.6 203.6 205.1 206.9 207.8 208.8 209.9 211.1 211.9 212.4 213.1 213.5 213.5 213.2 212.9 212.6 211.6 210.3 208.5 207.8 206.3 203.5 201.4 199.7 198.3 197.3 196.4 195.3 195.6 195.9 196.3 197.1 198.1 199.5 201.0 202.6 204.5 206.1 207.5 208.7 210.9 212.7 213.6 214.3 214.9 215.2 214.9 214.4 214.0 213.5 212.8 211.8 210.7 209.0 206.3 203.9 201.6 199.4 197.5 195.7 193.6 192.8 192.5 192.8 193.2 193.6 193.8 194.8 196.1 197.5 199.5 201.6 203.3 205.6 207.9 209.1 209.7 210.1 210.5 210.2 209.7 208.6 207.2 205.3 202.2 200.2 199.1 198.2 198.1 198.4 198.6 198.7 198.9 199.3 199.8 200.3 200.9 201.6 202.3 203.0 203.3 203.3 203.3 203.3 203.3", - "input_type": "phoneme", - "offset": 24.651 + "f0_timestep": "0.005" }, { + "offset": 42.001, "text": "AP 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", "ph_seq": "AP ch ir r en x in x in n ian n ian SP y i sh ir er d u l i y v h ua er d eng x ian SP y i b ai sh ir m en q ian sh ou w o k ou j ve q i l ian ch en d an t ian SP", - "note_seq": "rest D#3 D#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 A#3 A#3 rest C#4 C#4 A#3 A#3 G#3 F#3 F#3 F#3 F#3 F3 F3 F#3 F#3 F3 F#3 F#3 D#3 D#3 rest D#3 D#3 D#3 D#3 A#3 A#3 F#3 F#3 F3 F3 D#3 D#3 C#3 C#3 D#3 D#3 A#2 A#2 G#2 G#2 F#2 F#2 G#2 G#2 A#2 A#2 A#2 A#2 rest", - "note_dur_seq": "0.288 0.181 0.181 0.181 0.181 0.3610001 0.3610001 0.181 0.181 0.3609999 0.3609999 0.362 0.362 0.1800001 0.362 0.362 0.181 0.181 0.3610001 0.3609998 0.3609998 0.181 0.181 0.3620002 0.3620002 0.1800001 0.1800001 0.362 0.5419998 0.5419998 0.9039998 0.9039998 0.1800003 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.3609996 0.3609996 0.3610001 0.3610001 0.3620005 0.3620005 0.3610001 0.3610001 0.3619995 0.3619995 0.3610001 0.3610001 0.3620005 0.3620005 0.3610001 0.3610001 0.7229996 0.7229996 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.152456 0.135544 0.120758 0.060242 0.110218 0.070782 0.229977 0.131023 0.131302 0.049698 0.330879 0.030121 0.362 0.119758 0.060242 0.217423 0.144577 0.110218 0.382084 0.049698 0.311302 0.049698 0.150879 0.030121 0.286698 0.075302 0.151386 0.291218 0.099396 0.343204 0.198795 0.904 0.121265 0.058735 0.282181 0.079819 0.22094 0.14006 0.211396 0.150604 0.236 0.125 0.22194 0.14006 0.302265 0.058735 0.272022 0.088978 0.267121 0.094879 0.22094 0.14006 0.276157 0.085842 0.226963 0.134037 0.298928 0.063072 0.263546 0.097454 0.723 0.072", - "f0_timestep": "0.005", + "ph_dur": "0.1525 0.1355 0.1208 0.0602 0.1102 0.0708 0.23 0.131 0.1313 0.0497 0.3309 0.0301 0.362 0.1198 0.0602 0.2174 0.1446 0.1102 0.3821 0.0497 0.3113 0.0497 0.1509 0.0301 0.2867 0.0753 0.1514 0.2912 0.0994 0.3432 0.1988 0.904 0.1213 0.0587 0.2822 0.0798 0.2209 0.1401 0.2114 0.1506 0.236 0.125 0.2219 0.1401 0.3023 0.0587 0.272 0.089 0.2671 0.0949 0.2209 0.1401 0.2762 0.0858 0.227 0.134 0.2989 0.0631 0.2635 0.0975 0.723 0.072", + "ph_num": "2 2 2 2 2 2 1 2 2 1 2 2 2 2 1 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D#3 F3 F#3 F3 F#3 A#3 rest C#4 A#3 G#3 F#3 F#3 F3 F#3 F3 F#3 D#3 rest D#3 D#3 A#3 F#3 F3 D#3 C#3 D#3 A#2 G#2 F#2 G#2 A#2 A#2 rest", + "note_dur": "0.288 0.181 0.181 0.361 0.181 0.361 0.362 0.18 0.362 0.181 0.361 0.361 0.181 0.362 0.18 0.362 0.542 0.904 0.18 0.362 0.361 0.362 0.361 0.362 0.361 0.361 0.362 0.361 0.362 0.361 0.362 0.361 0.723 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "207.5 207.5 207.5 207.6 207.7 207.7 207.7 207.6 207.4 207.2 207.0 206.7 206.2 205.5 204.7 203.2 201.3 199.2 195.5 191.5 187.4 181.9 176.9 172.4 168.1 164.8 162.2 160.5 159.0 157.8 157.2 156.7 156.3 156.1 155.9 155.7 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.7 155.7 155.8 155.9 156.0 156.1 156.2 156.2 156.3 156.4 156.5 156.6 156.6 156.6 156.5 156.3 155.1 154.2 154.3 154.4 154.6 154.7 154.8 155.1 155.6 155.8 155.7 155.5 155.3 155.1 155.2 155.4 155.8 156.3 156.8 157.3 157.8 158.8 159.7 159.9 160.0 160.2 160.4 160.9 161.6 162.5 163.3 164.5 166.5 169.2 171.5 171.8 171.7 171.8 172.7 174.4 175.9 176.1 176.4 176.4 175.1 174.0 173.3 174.3 174.2 172.9 170.1 165.8 160.9 157.1 156.1 156.9 158.4 161.5 165.4 166.9 168.6 170.5 173.1 176.2 179.7 182.5 184.8 186.7 188.7 190.7 192.9 192.3 192.0 192.0 191.7 191.5 191.3 189.8 189.0 188.6 187.4 185.8 184.0 182.8 181.7 180.6 178.7 177.7 177.3 176.9 176.8 176.9 177.3 177.5 177.8 179.3 180.7 181.8 182.8 183.9 185.2 185.5 185.9 186.4 186.9 187.3 187.6 187.4 187.3 187.0 187.0 186.7 186.0 185.4 184.3 182.5 179.8 176.3 171.9 168.7 167.0 167.3 168.0 168.8 169.5 169.9 170.1 170.2 170.3 170.5 171.0 171.5 171.8 171.8 171.8 171.7 171.7 172.0 172.3 172.6 172.6 172.7 173.9 174.9 175.7 176.2 176.3 176.2 175.9 175.6 175.2 174.9 174.6 174.4 174.1 173.8 173.6 173.4 173.3 173.2 173.1 173.1 173.0 173.1 173.1 173.1 173.3 173.4 173.3 173.1 173.2 173.6 174.0 174.6 175.1 175.6 176.2 177.0 178.5 180.2 181.8 182.7 183.3 183.7 184.0 184.3 184.6 184.8 185.0 185.1 185.3 185.3 185.2 184.9 184.7 184.6 184.4 184.2 184.0 183.8 183.6 183.4 183.6 183.7 183.9 184.1 184.3 184.5 184.8 185.3 185.7 186.1 186.4 186.5 186.4 186.4 186.3 186.2 186.1 186.0 186.0 186.0 186.0 185.8 185.6 185.6 185.2 184.8 184.4 184.4 184.2 183.9 183.9 183.9 183.7 183.5 183.1 182.8 182.2 181.8 181.9 182.8 184.0 185.5 187.5 190.0 193.1 196.1 199.3 203.0 209.6 216.7 223.2 227.8 231.0 233.0 234.8 236.4 237.7 238.5 238.6 237.9 236.7 235.6 234.7 232.3 229.5 226.7 225.0 223.6 222.2 220.9 220.0 220.4 220.8 221.3 221.8 223.9 226.7 230.4 232.6 234.3 235.9 238.0 240.0 241.0 241.2 241.1 240.6 239.4 238.1 236.8 234.0 230.7 229.2 226.7 223.4 220.2 218.0 216.8 216.5 216.9 218.0 219.9 221.5 223.4 226.9 230.5 233.8 235.4 237.9 240.5 241.3 241.3 241.0 240.7 240.0 238.9 236.8 234.1 231.1 227.7 225.6 224.4 224.1 224.1 224.2 224.8 225.5 226.2 227.1 228.1 229.1 230.5 232.1 233.8 236.1 238.8 241.7 245.8 250.0 254.2 258.1 261.7 265.0 266.3 267.1 267.0 265.9 264.5 262.7 260.5 258.3 255.9 254.0 252.1 250.2 249.0 247.9 246.9 247.0 247.4 248.1 249.2 250.3 251.5 251.4 251.1 250.8 250.8 251.1 251.3 251.7 251.9 252.1 253.5 255.2 256.8 259.3 262.6 266.6 268.9 271.0 273.2 275.1 276.9 278.6 279.8 280.6 281.3 281.3 281.2 280.9 280.7 280.5 280.0 278.9 277.1 272.7 268.0 264.1 263.8 264.1 264.7 265.7 267.7 269.4 269.4 268.9 267.9 265.5 264.1 263.0 261.7 261.0 260.5 259.9 259.1 258.1 257.2 256.1 255.0 254.0 253.0 252.1 251.2 250.0 248.9 248.5 247.8 246.8 245.3 243.9 242.5 240.7 239.0 237.3 236.1 235.0 233.8 232.9 232.2 231.6 231.9 232.1 232.3 232.9 233.1 233.2 233.1 233.1 233.1 233.8 234.3 234.7 234.4 234.2 234.0 233.5 233.3 233.1 232.1 231.0 229.7 228.2 226.9 225.7 223.7 221.6 219.4 217.0 214.2 210.7 209.2 207.6 206.1 205.6 205.2 204.9 204.6 204.5 204.8 205.1 205.2 205.1 205.3 205.6 206.0 206.2 206.3 206.3 206.5 206.7 207.1 207.3 207.5 207.8 208.1 208.2 208.1 208.1 208.0 207.9 207.8 207.7 207.7 207.6 207.6 207.6 207.9 208.2 208.2 208.3 208.4 208.4 208.5 208.6 208.7 208.6 208.4 208.0 206.5 204.4 200.9 194.2 187.7 184.5 182.7 181.5 181.6 182.4 183.2 183.2 183.9 184.8 186.2 186.6 187.1 189.4 190.3 190.3 188.7 186.4 184.0 182.7 182.1 181.9 182.0 182.0 182.0 182.0 182.5 183.2 183.8 184.0 184.0 184.0 183.9 183.9 184.0 184.2 184.4 184.4 184.5 184.5 184.5 184.6 184.7 184.8 184.8 184.8 184.6 184.3 183.9 183.9 184.0 184.0 184.1 184.1 184.1 184.2 184.3 184.4 184.4 184.5 184.5 184.7 184.9 185.2 186.1 186.6 186.8 186.8 186.7 186.5 186.4 186.3 186.2 186.0 185.9 185.9 185.5 184.8 183.8 184.3 184.8 185.3 185.6 185.9 186.4 186.7 187.0 187.5 188.3 189.4 190.9 191.4 191.5 191.1 190.8 190.3 189.6 188.7 188.0 187.6 187.0 186.3 185.7 185.2 184.8 184.4 184.2 184.2 184.2 184.3 184.4 184.5 184.3 184.3 184.4 184.9 185.3 185.2 185.0 184.6 183.9 183.2 182.5 181.8 180.7 179.6 178.4 177.5 176.7 175.9 175.1 174.3 173.8 173.3 172.8 172.5 172.4 172.3 172.2 172.3 172.3 172.3 172.3 172.3 172.4 172.5 172.5 172.7 173.0 173.3 173.5 173.7 173.8 174.0 174.1 174.2 174.4 174.7 175.1 175.4 175.7 175.8 175.8 175.6 175.5 175.4 175.2 175.1 175.0 174.7 174.2 173.6 172.4 170.7 166.9 163.2 159.6 158.0 157.3 157.1 159.6 161.9 164.0 166.8 169.6 172.3 174.2 176.4 178.7 180.5 182.6 185.0 188.4 191.4 193.7 195.4 196.2 196.0 194.9 193.6 192.0 190.8 189.5 188.1 186.8 185.6 184.8 184.7 184.6 184.3 184.2 184.2 184.5 184.7 184.9 185.2 185.3 185.4 185.4 185.4 185.5 185.6 185.7 185.6 185.4 184.9 184.4 183.8 183.2 182.6 182.0 181.4 180.8 180.1 179.3 178.5 177.6 176.5 175.5 174.9 174.5 174.1 173.7 173.3 173.0 172.9 173.0 173.0 173.2 173.3 173.5 173.8 173.9 174.0 174.1 174.2 174.4 174.7 175.0 175.1 175.0 175.0 175.0 174.9 174.9 174.9 174.8 174.8 174.8 174.7 174.7 174.7 174.6 174.6 174.6 174.5 174.6 174.6 174.5 174.5 174.5 174.5 174.4 174.2 173.6 172.8 171.7 169.5 166.0 161.7 157.9 156.0 155.2 156.2 157.3 158.5 160.4 163.2 166.6 169.3 172.1 175.0 179.0 181.8 183.6 186.1 188.0 189.3 190.6 191.3 191.7 191.9 191.9 191.9 191.9 191.9 191.9 191.0 190.0 188.9 188.5 187.5 185.7 185.1 184.4 183.5 182.8 182.4 182.3 182.5 182.6 182.8 183.0 183.1 183.3 183.4 183.4 183.5 183.8 184.1 184.4 184.7 185.0 185.4 185.4 185.3 185.1 185.0 184.8 184.7 184.7 184.7 184.8 184.8 184.9 185.0 185.1 185.2 185.2 185.5 185.8 186.1 186.7 187.4 188.3 188.6 188.7 188.6 188.4 188.3 188.2 187.8 187.1 185.4 182.5 178.8 173.3 170.3 168.1 166.8 166.4 166.2 166.0 165.7 165.2 164.4 164.1 163.9 163.4 162.9 162.3 161.7 161.1 160.5 159.9 159.3 158.9 158.6 158.3 158.1 157.8 157.5 157.1 156.6 156.2 156.0 155.9 155.5 154.9 154.3 154.1 154.2 154.1 153.7 153.4 154.4 155.2 155.8 155.2 155.1 155.3 155.4 155.3 154.9 154.8 154.6 154.2 153.8 153.4 153.0 152.6 152.3 152.0 151.8 151.6 151.5 152.0 152.3 152.5 152.8 153.2 153.5 153.6 154.0 154.8 155.2 155.5 155.9 156.7 157.3 157.6 157.9 158.3 158.7 158.7 158.5 158.1 158.0 157.8 157.6 157.5 157.2 156.6 156.0 155.1 154.0 153.3 152.3 150.6 148.7 147.3 146.5 145.5 144.9 145.0 145.4 145.8 146.0 146.8 147.8 148.6 149.9 151.5 153.5 154.7 155.9 157.7 158.7 159.7 160.6 161.0 161.2 161.0 160.8 160.3 158.8 157.7 156.6 155.4 153.6 151.9 150.3 149.1 147.8 145.9 144.9 144.1 144.1 144.3 144.6 145.4 146.6 147.8 148.9 149.7 150.3 151.0 151.9 152.7 153.0 153.9 155.1 155.9 157.3 159.0 159.9 160.5 160.6 160.3 159.9 159.5 158.8 158.0 157.0 155.3 154.2 153.4 151.8 150.3 149.0 148.6 147.9 147.2 147.2 147.4 147.5 147.7 148.2 149.1 149.6 150.6 151.9 154.1 155.6 156.6 158.3 159.5 160.3 161.1 161.8 162.4 162.6 162.2 161.4 160.4 159.0 157.4 156.1 154.9 153.9 151.6 149.8 149.0 147.7 147.0 146.9 147.3 147.8 148.4 149.7 151.0 152.2 154.8 156.7 157.6 159.0 160.0 160.7 160.4 160.1 159.6 158.9 157.8 156.1 155.2 154.8 154.8 154.9 155.0 155.0 155.0 155.0 155.1 155.2 155.2 154.3 153.7 151.9 149.9 147.1 144.1 140.7 137.0 133.3 129.9 126.6 123.2 119.8 117.0 114.3 112.2 110.4 109.1 108.3 107.9 107.9 108.0 108.3 108.6 109.1 109.6 110.3 111.0 111.8 112.8 113.8 115.0 116.1 117.4 118.8 120.3 121.8 123.3 124.9 126.6 128.3 130.0 131.7 133.5 135.3 137.0 138.7 140.4 142.1 143.6 145.2 146.6 148.0 149.3 150.5 151.5 152.5 153.3 154.0 154.6 154.9 155.1 155.3 157.7 158.2 158.7 158.9 159.1 159.5 159.8 159.9 159.9 159.6 159.2 158.6 158.1 157.7 157.5 156.8 156.0 155.0 153.5 150.6 146.8 142.4 136.3 129.4 126.4 125.0 125.8 128.2 132.7 140.5 144.8 149.4 154.6 158.0 159.8 159.2 156.7 155.2 154.8 155.2 155.4 155.1 154.8 155.1 156.4 157.5 158.1 158.0 157.8 157.6 157.5 157.3 157.0 156.6 155.8 155.1 154.9 154.3 153.8 153.5 152.2 151.3 151.3 152.3 153.6 154.9 157.0 158.7 159.4 159.1 158.4 157.2 154.5 151.3 148.1 145.5 143.8 144.3 145.6 147.6 151.4 155.0 158.2 160.2 162.1 164.0 166.3 168.3 170.6 174.2 176.0 177.5 179.4 181.6 183.8 186.0 187.9 189.8 191.7 193.7 195.6 197.6 200.3 203.3 202.3 203.2 204.3 206.3 208.4 210.6 212.2 213.4 213.5 213.0 212.1 210.9 209.6 208.4 207.8 207.3 207.6 208.1 209.0 210.0 211.6 213.3 215.4 217.6 219.8 222.1 224.5 226.6 228.6 230.3 231.8 232.9 233.6 234.0 233.0 233.1 232.8 232.5 232.3 232.3 232.4 232.5 232.7 232.9 232.6 232.3 231.8 231.0 229.8 228.4 226.7 224.9 222.7 220.5 218.1 215.5 212.9 210.3 207.7 205.1 202.4 199.9 197.5 195.2 193.1 191.1 189.3 187.7 186.4 185.2 184.4 183.6 183.3 183.0 183.1 183.2 183.5 183.9 184.5 185.0 185.7 186.4 187.1 187.8 188.6 189.2 189.7 190.2 190.5 190.8 190.9 190.9 190.7 190.3 189.9 189.2 188.5 187.7 186.9 186.0 185.1 184.3 183.6 182.9 182.3 181.9 181.6 181.4 181.3 181.5 181.7 182.1 182.6 183.2 183.9 184.6 185.5 186.3 187.2 188.1 188.9 189.7 190.5 191.1 191.7 192.1 192.5 192.6 192.7 192.5 192.2 191.6 190.9 190.1 189.2 188.1 187.0 185.8 184.6 183.5 182.5 181.5 180.6 179.9 179.2 178.9 178.7 190.0 188.2 186.4 184.5 182.5 180.6 178.8 177.1 175.6 174.4 173.5 173.0 173.9 173.7 173.3 173.0 172.7 172.8 172.9 173.0 173.2 173.3 173.4 173.5 173.6 173.9 174.3 174.7 174.9 175.0 175.3 175.6 176.1 176.4 176.6 176.5 176.2 176.0 175.9 175.4 174.3 172.4 169.7 165.7 162.5 160.7 160.8 161.5 162.4 163.4 164.9 166.1 166.4 166.5 166.5 166.5 166.6 166.9 167.3 167.6 167.7 167.5 167.8 168.2 168.6 168.8 168.8 168.6 169.1 169.4 169.2 168.0 166.6 164.9 161.4 157.8 155.5 154.7 154.4 154.2 153.8 153.4 152.9 152.4 151.9 151.5 151.1 150.8 150.6 150.5 150.5 150.7 150.8 150.9 150.9 151.0 151.3 151.8 152.2 152.6 152.8 153.3 154.1 154.9 155.5 155.9 156.0 156.2 156.4 156.5 156.5 156.4 156.3 156.2 156.0 155.8 155.5 155.2 155.0 154.7 154.4 154.2 154.0 153.9 153.7 152.6 151.0 148.7 147.5 147.1 146.5 146.0 145.6 144.4 143.1 141.6 140.8 139.8 138.8 137.3 135.9 134.7 133.8 133.7 134.2 134.2 133.9 133.3 132.8 132.8 133.4 133.6 133.7 133.8 134.1 134.3 134.3 135.0 135.7 136.3 136.3 136.4 136.8 137.1 137.5 137.9 138.0 138.2 138.7 138.8 138.9 139.0 139.1 139.0 138.7 138.3 137.9 137.5 137.5 137.6 137.7 137.8 137.9 137.9 138.1 138.4 138.6 139.3 140.2 141.1 141.4 140.7 138.9 135.6 132.2 129.3 127.9 127.4 129.4 131.0 132.5 133.8 135.1 136.3 137.0 137.8 138.8 140.5 141.6 142.8 144.8 146.6 148.4 149.9 151.4 153.3 156.0 157.6 158.5 158.5 158.2 157.9 158.1 157.9 157.4 156.4 155.6 154.9 154.9 154.6 154.2 154.2 154.1 154.0 153.8 153.6 153.6 153.8 154.0 154.1 154.1 154.1 154.0 154.0 154.0 154.0 154.0 154.0 154.0 154.0 154.1 154.3 154.6 154.8 154.9 155.0 155.3 155.8 156.5 156.8 156.8 156.6 156.4 156.2 156.0 155.5 154.7 151.8 147.6 142.7 140.8 140.0 140.0 139.8 139.6 139.6 139.4 139.1 138.4 138.0 137.5 136.9 136.2 135.2 133.9 132.1 129.8 126.9 124.1 121.5 119.0 117.2 115.7 114.6 114.2 114.2 115.0 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 116.7 119.5 119.3 119.1 118.5 117.8 117.0 116.0 114.9 113.7 112.3 110.9 109.3 107.8 106.2 104.6 103.0 101.4 99.8 98.2 96.8 95.3 94.0 92.7 91.5 90.5 89.5 88.6 87.9 87.3 86.8 86.5 86.2 86.1 86.1 86.2 86.5 86.8 87.2 87.8 88.4 89.1 89.9 90.8 91.6 92.6 93.6 94.6 95.6 96.6 97.6 98.5 99.4 100.2 100.9 101.6 102.1 102.5 102.8 103.0 103.0 103.0 103.2 103.3 103.5 103.7 103.8 103.9 103.6 103.6 103.6 103.6 103.6 103.7 103.7 103.7 103.7 103.7 103.7 103.7 103.8 103.8 103.8 103.8 103.8 103.8 103.8 103.9 103.9 103.9 103.9 103.9 103.9 103.9 103.9 103.9 103.9 103.9 103.9 103.3 103.2 103.3 103.2 103.0 102.8 102.6 102.3 102.0 101.6 100.7 99.4 98.1 96.9 95.8 95.0 94.3 93.7 93.2 92.8 92.6 92.9 93.2 93.2 93.8 94.6 95.3 95.5 95.6 95.7 95.6 95.3 95.0 94.9 94.7 94.3 94.0 93.6 92.6 91.8 91.0 90.3 90.5 91.0 91.5 92.5 93.5 94.0 94.5 95.0 95.7 96.5 97.4 98.7 99.4 99.7 99.4 98.4 97.1 95.9 93.9 91.8 90.8 90.1 89.6 89.9 90.7 92.0 93.1 93.9 94.5 95.4 96.3 97.1 98.0 98.9 99.8 99.8 99.8 99.9 99.8 100.3 101.1 103.5 105.2 106.4 106.7 107.0 107.3 106.8 106.1 105.4 105.3 105.2 105.0 104.8 104.7 104.4 104.3 104.3 104.4 104.4 104.4 104.3 104.2 104.2 104.1 104.1 103.9 103.3 102.6 102.2 102.1 102.3 102.7 103.2 103.6 104.0 104.4 105.3 105.9 106.3 106.4 106.6 106.9 107.1 107.3 107.4 107.2 107.0 106.7 106.5 106.3 106.1 105.8 105.5 105.3 105.3 105.5 105.7 105.9 106.2 106.7 107.0 107.4 107.7 107.2 105.6 102.7 102.2 102.5 103.2 103.4 104.1 107.2 111.2 115.4 118.3 119.1 119.4 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 119.1 118.5 118.0 117.4 116.6 116.2 116.0 116.1 116.3 116.4 116.6 116.7 116.9 117.4 118.0 118.5 119.1 119.5 120.0 120.6 120.9 121.0 120.8 120.2 119.5 117.4 115.0 112.2 111.4 111.4 112.0 112.9 113.4 113.7 114.3 114.8 115.1 115.8 116.3 116.6 117.1 117.6 118.2 118.6 119.2 119.7 119.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.5 116.6 116.9 117.2 117.8 118.2 118.9 119.6 120.2 120.9 121.4 121.8 121.7 121.3 120.9 120.3 119.6 118.9 118.2 117.2 116.4 115.6 114.7 113.9 113.2 112.6 112.0 111.6 111.2 111.1 111.0 111.1 111.3 111.6 112.1 112.7 113.3 114.1 114.9 115.7 116.7 117.5 118.3 119.3 120.1 120.9 121.4 121.9 122.3 122.5 122.5 122.5 122.2 121.8 121.3 120.7 120.0 119.1 118.2 117.2 116.4 115.3 114.5 113.6 112.8 112.2 111.6 111.1 110.8 110.6 110.5 110.6 110.8 111.1 111.7 112.2 113.0 113.7 114.8 115.7 116.6 117.7 118.6 119.6 120.4 121.3 121.9 122.5 122.9 123.1 123.2 123.1 122.8 122.4 121.8 121.1 120.4 119.4 118.4 117.4 116.3 115.4 114.3 113.4 112.5 111.7 111.1 110.6 110.3 110.6 111.5 112.6 113.8 115.1 116.1 122.9 122.8 122.6 122.3 121.9 121.4 120.9 120.5 119.9 119.5 119.5 119.5 119.5 119.5", - "input_type": "phoneme", - "offset": 42.001 + "f0_timestep": "0.005" }, { + "offset": 53.567, "text": "SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 SP 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 SP", "ph_seq": "SP y in y ang y En d a q ian h u zh ong r ir y ve b ie SP y ou d ong t ian SP p i ch en w o y van er b u t ing w en m u b u j ian SP w u w ai sh ir j ie h ua w ei y vn y En SP b ai b o l iu zh ao SP c ang sh an j i d ian SP y i y E y i r u x ve SP", - "note_seq": "rest A#2 A#2 B2 B2 A#2 A#2 B2 B2 C#3 C#3 D#3 D#3 C#3 C#3 D#3 D#3 F#3 F#3 F3 F3 rest G#3 G#3 B3 B3 A#3 A#3 rest A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 D#3 A#3 A#3 C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 G#3 G#3 rest F#3 F#3 G#3 G#3 F#3 F#3 G#3 G#3 A#3 A#3 C#4 C#4 G#3 G#3 A#3 A#3 rest A#3 A#3 G#3 G#3 F#3 F#3 D#3 D#3 rest A#3 A#3 G#3 G#3 F#3 F#3 F3 F3 rest A#3 A#3 G#3 G#3 A#3 A#3 C#4 C#4 A#3 A#3 rest", - "note_dur_seq": "0.288 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.3609999 0.3609999 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.3610001 0.3610001 0.3609998 0.3609998 0.5430002 0.5430002 0.1799998 0.1810002 0.1810002 0.5419998 0.5419998 0.362 0.362 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.1799998 0.1799998 0.1810002 0.1810002 0.3610001 0.1809998 0.1809998 0.362 0.362 0.5420003 0.5420003 0.1810002 0.1810002 0.3609991 0.3609991 0.3610001 0.3610001 0.1810007 0.3619995 0.3619995 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.5420008 0.5420008 0.3619995 0.3619995 0.1809998 0.1809998 0.3610001 0.3610001 0.1809998 0.3610001 0.3610001 0.1810007 0.1810007 0.3610001 0.3610001 0.3619995 0.3619995 0.1800003 0.3619995 0.3619995 0.1810007 0.1810007 0.3610001 0.3610001 0.3619995 0.3619995 0.1800003 0.3619995 0.3619995 0.5419998 0.5419998 0.5419998 0.5419998 0.3610001 0.3610001 0.3620014 0.3620014 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.212698 0.075302 0.301758 0.060242 0.306782 0.054218 0.331879 0.030121 0.22094 0.14006 0.261097 0.100903 0.270638 0.090363 0.256577 0.105423 0.245037 0.115963 0.285698 0.075302 0.543 0.134819 0.045181 0.150879 0.030121 0.445614 0.096386 0.362 0.300758 0.060242 0.111721 0.069279 0.135819 0.045181 0.106205 0.073795 0.135819 0.351963 0.054218 0.110218 0.070782 0.297242 0.064758 0.453148 0.088852 0.135819 0.045181 0.226966 0.134033 0.361 0.135819 0.045181 0.291214 0.070786 0.216423 0.144577 0.103821 0.077179 0.255577 0.105423 0.438088 0.103912 0.312302 0.049698 0.135818 0.045181 0.361 0.135818 0.045181 0.321846 0.039154 0.135819 0.045181 0.275154 0.085846 0.362 0.061028 0.118973 0.243866 0.118134 0.141847 0.039154 0.300758 0.060242 0.362 0.119759 0.060242 0.307785 0.054214 0.481758 0.060242 0.401939 0.14006 0.22094 0.14006 0.362001 0.072", - "f0_timestep": "0.005", + "ph_dur": "0.2127 0.0753 0.3018 0.0602 0.3068 0.0542 0.3319 0.0301 0.2209 0.1401 0.2611 0.1009 0.2706 0.0904 0.2566 0.1054 0.245 0.116 0.2857 0.0753 0.543 0.1348 0.0452 0.1509 0.0301 0.4456 0.0964 0.362 0.3008 0.0602 0.1117 0.0693 0.1358 0.0452 0.1062 0.0738 0.1358 0.352 0.0542 0.1102 0.0708 0.2972 0.0648 0.4531 0.0889 0.1358 0.0452 0.227 0.134 0.361 0.1358 0.0452 0.2912 0.0708 0.2164 0.1446 0.1038 0.0772 0.2556 0.1054 0.4381 0.1039 0.3123 0.0497 0.1358 0.0452 0.361 0.1358 0.0452 0.3218 0.0392 0.1358 0.0452 0.2752 0.0858 0.362 0.061 0.119 0.2439 0.1181 0.1418 0.0392 0.3008 0.0602 0.362 0.1198 0.0602 0.3078 0.0542 0.4818 0.0602 0.4019 0.1401 0.2209 0.1401 0.362 0.072", + "ph_num": "2 2 2 2 2 2 2 2 2 2 1 2 2 2 1 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest A#2 B2 A#2 B2 C#3 D#3 C#3 D#3 F#3 F3 rest G#3 B3 A#3 rest A#3 G#3 G#3 F#3 D#3 A#3 C#4 A#3 G#3 F#3 G#3 rest F#3 G#3 F#3 G#3 A#3 C#4 G#3 A#3 rest A#3 G#3 F#3 D#3 rest A#3 G#3 F#3 F3 rest A#3 G#3 A#3 C#4 A#3 rest", + "note_dur": "0.288 0.362 0.361 0.362 0.361 0.362 0.361 0.362 0.361 0.361 0.543 0.18 0.181 0.542 0.362 0.361 0.181 0.181 0.18 0.181 0.361 0.181 0.362 0.542 0.181 0.361 0.361 0.181 0.362 0.361 0.181 0.361 0.542 0.362 0.181 0.361 0.181 0.361 0.181 0.361 0.362 0.18 0.362 0.181 0.361 0.362 0.18 0.362 0.542 0.542 0.361 0.362 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "116.6 116.6 116.6 116.6 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 118.1 118.0 117.7 117.2 116.6 115.9 115.0 114.0 112.9 111.6 110.3 108.8 107.4 105.9 104.3 102.8 101.2 99.7 98.2 96.7 95.3 93.9 92.6 91.4 90.3 89.2 88.3 87.5 86.8 86.1 85.6 85.2 85.0 84.9 84.9 85.0 85.6 86.4 87.5 88.9 90.5 92.3 94.4 96.6 99.0 101.4 103.8 106.1 108.3 110.4 112.2 113.7 115.0 115.9 116.3 116.3 116.3 116.3 116.3 116.4 116.4 116.4 116.5 116.5 116.5 116.6 116.6 116.6 116.7 116.7 116.7 116.8 116.8 116.9 116.9 116.9 116.9 117.0 117.0 117.0 117.0 117.0 117.1 117.2 117.4 117.8 118.1 118.4 118.6 118.8 120.0 118.9 117.6 116.4 115.5 114.9 113.8 112.6 111.3 110.5 110.1 110.1 110.8 112.0 113.4 116.5 119.0 121.1 122.6 123.9 125.2 125.2 124.8 124.1 122.4 121.5 121.2 120.6 120.3 120.2 121.0 121.8 122.6 123.3 123.7 123.7 123.6 123.3 123.0 122.4 121.9 121.6 121.0 120.5 120.2 120.2 120.3 120.6 121.0 121.6 122.3 122.9 123.5 123.9 124.7 125.2 125.3 125.3 125.4 125.5 125.6 125.7 125.8 126.0 126.4 127.0 127.3 127.5 127.6 127.7 127.6 127.4 127.1 126.5 125.7 124.9 123.6 121.7 118.2 114.8 112.6 111.0 110.0 110.5 111.3 112.3 113.9 114.9 115.7 116.5 117.8 119.3 120.7 120.6 120.0 118.7 117.7 116.7 116.0 115.8 115.9 116.6 116.9 117.2 117.6 117.8 117.7 117.5 117.2 117.0 115.6 115.6 115.6 115.7 115.7 115.7 115.7 115.7 115.7 115.7 115.7 115.8 115.8 115.8 115.9 115.9 115.9 115.9 116.0 116.0 116.0 116.1 116.1 116.1 116.1 116.1 116.2 116.2 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 115.2 115.2 115.1 115.1 115.1 114.8 114.4 114.0 113.2 112.2 110.6 110.6 111.0 112.3 113.0 113.9 114.9 118.5 121.5 123.6 124.7 125.3 125.0 124.7 124.6 124.7 124.7 124.7 124.8 124.6 124.5 124.2 124.0 123.8 123.5 123.3 123.0 122.7 122.4 122.1 122.0 121.9 121.8 122.0 122.3 122.7 122.9 123.3 123.8 124.2 124.8 125.3 125.6 126.6 127.4 127.8 127.8 127.5 126.6 126.1 125.6 124.5 123.2 121.9 121.7 122.1 123.0 125.2 126.2 127.0 128.2 128.8 129.2 129.9 130.5 131.1 131.6 132.2 132.9 134.2 135.1 135.9 136.5 137.2 138.0 138.8 139.7 140.7 141.9 142.7 143.2 143.4 142.7 141.4 139.8 139.2 139.1 138.9 138.6 138.4 138.5 138.5 138.3 137.7 136.6 135.3 135.3 135.8 136.7 137.3 137.1 136.3 136.3 136.8 137.6 137.6 137.2 136.6 135.9 135.8 136.3 137.2 137.9 138.4 138.9 139.8 140.9 141.2 141.6 142.2 142.3 142.3 142.2 142.2 142.1 142.0 142.0 142.0 141.9 141.9 141.7 141.5 140.8 140.2 139.8 139.5 139.3 139.3 138.8 137.5 136.2 134.4 132.5 130.5 128.5 126.7 125.1 123.7 122.9 122.3 122.2 122.5 123.0 123.8 125.0 126.2 127.8 129.5 131.2 133.0 134.6 136.2 137.6 138.6 139.5 139.9 140.0 139.9 139.6 139.2 138.8 138.4 138.0 137.7 137.5 137.7 138.0 138.6 139.4 140.6 141.9 143.4 144.9 146.5 148.0 149.5 150.6 151.7 152.3 152.8 154.7 154.9 155.0 155.1 155.5 155.8 156.0 156.1 156.3 156.5 156.7 157.0 157.1 155.2 151.4 146.5 142.3 139.4 137.6 136.8 136.5 136.5 136.5 136.8 137.2 137.2 137.2 137.3 137.3 137.5 137.9 139.4 140.0 139.9 138.4 137.2 136.2 135.8 135.4 135.0 136.5 137.1 136.9 137.1 137.3 137.5 137.6 137.5 137.2 136.8 136.4 136.1 136.0 136.1 136.3 138.0 139.1 139.3 139.4 139.5 139.7 140.9 141.3 140.5 138.9 138.1 138.5 138.9 139.2 139.3 139.5 139.7 139.9 140.0 140.2 140.8 141.0 141.5 142.4 142.7 143.0 143.3 143.1 142.8 142.4 142.2 142.2 142.7 143.2 143.1 142.0 140.9 139.9 139.2 136.3 132.8 129.4 126.7 125.4 125.9 127.3 129.6 133.0 135.9 138.5 141.0 144.6 148.7 152.9 154.5 155.1 154.5 154.3 154.1 153.5 153.5 153.4 153.1 153.2 153.4 153.4 153.4 153.3 153.3 153.5 153.6 153.9 154.1 154.3 154.5 154.5 154.4 154.3 154.4 154.7 155.0 155.3 155.5 155.5 155.6 155.7 155.8 155.9 156.0 156.0 155.9 155.6 155.5 155.3 155.1 154.8 154.7 154.9 155.1 155.3 155.4 155.3 155.2 155.1 154.9 154.8 154.7 154.6 154.6 154.8 153.3 152.0 150.8 150.5 150.6 151.0 151.2 151.4 151.7 152.2 153.1 154.1 157.0 160.5 164.4 166.6 168.3 169.4 170.3 170.7 170.3 169.3 168.3 167.3 167.2 166.9 166.3 165.1 164.2 163.8 163.5 163.3 163.2 163.9 164.5 165.0 166.4 168.1 170.0 170.8 172.1 174.5 177.2 179.4 180.5 182.4 184.2 185.5 186.2 186.5 186.4 186.3 186.1 186.0 185.9 185.8 185.8 185.3 184.9 184.7 184.5 184.1 183.6 183.0 182.0 180.2 178.6 176.6 172.8 170.9 169.9 170.5 171.3 172.4 174.9 175.9 176.3 176.2 176.0 175.9 175.8 175.5 175.3 175.9 176.4 176.8 177.3 177.2 177.0 176.8 176.8 176.9 177.2 177.4 177.4 177.4 177.1 176.8 176.5 176.2 175.9 175.3 174.8 174.4 173.7 172.7 171.5 170.6 170.0 169.5 168.9 168.5 168.3 168.6 168.9 169.1 169.7 174.6 174.8 175.4 176.3 177.4 178.9 180.4 181.7 182.5 182.8 182.9 182.8 182.3 181.9 181.1 180.1 179.2 177.9 176.8 175.4 174.2 172.7 171.6 170.5 169.4 168.3 167.6 166.8 166.4 166.1 166.1 166.2 166.5 167.0 167.8 168.6 169.8 170.9 172.1 173.6 175.1 176.5 177.8 179.2 180.4 181.6 182.4 183.2 183.7 184.1 184.1 184.0 183.6 183.0 182.2 181.1 179.8 178.7 177.1 175.7 174.0 172.7 171.1 169.8 168.6 167.4 166.5 165.7 165.3 164.9 164.9 165.1 165.4 166.0 167.1 169.9 172.0 173.8 173.2 170.5 170.0 170.2 170.5 170.8 184.1 183.2 180.9 178.3 174.2 170.0 165.1 160.0 154.7 149.4 144.2 139.3 134.6 130.9 127.4 124.3 121.5 119.6 118.1 117.4 117.2 117.8 118.9 120.4 122.7 125.3 128.7 132.4 136.8 141.5 146.7 152.2 158.0 164.1 170.3 176.4 182.5 188.2 193.5 197.9 202.1 205.8 208.5 210.6 211.5 207.5 207.5 207.5 207.5 206.7 204.8 202.2 199.8 199.5 200.7 203.1 206.8 210.3 211.3 213.6 216.6 220.3 248.1 248.8 249.2 249.8 250.3 251.2 252.3 253.6 255.6 255.9 255.8 255.4 254.7 253.6 252.6 250.8 249.3 247.9 247.3 246.5 246.1 245.6 245.2 244.8 244.5 244.5 244.5 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.1 246.6 247.1 246.4 248.7 250.6 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 245.8 247.7 246.4 244.7 241.9 236.4 228.4 220.4 218.6 221.1 226.7 233.4 238.4 241.8 243.8 244.1 243.4 240.4 237.6 234.8 231.8 229.3 227.2 226.2 225.6 225.3 225.0 225.3 226.1 227.2 229.0 231.3 232.8 233.1 233.6 234.6 236.0 238.0 240.0 242.6 243.3 243.8 244.1 244.2 244.0 243.4 242.7 241.5 240.4 238.8 237.2 235.7 233.8 232.1 230.2 228.6 226.9 225.5 224.2 223.2 222.4 221.8 221.5 221.4 221.7 222.2 222.9 224.0 225.2 226.8 228.3 230.3 232.0 234.1 236.1 237.8 239.8 241.3 243.0 244.0 245.1 245.8 246.1 246.2 245.9 245.2 244.3 243.0 241.6 239.8 238.1 236.1 234.2 231.9 230.0 227.9 227.2 228.3 230.7 222.4 222.9 223.5 224.1 225.0 225.8 226.6 227.6 228.4 229.3 230.2 230.9 231.6 232.3 232.7 233.0 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.0 232.9 232.9 232.9 232.8 232.7 232.6 232.5 232.4 232.4 232.4 232.3 232.2 232.1 232.1 232.6 233.3 234.2 234.6 235.3 236.5 236.6 236.5 235.8 235.4 234.7 233.8 229.6 224.0 216.6 210.9 207.0 205.5 206.6 208.3 211.0 212.4 213.2 213.2 213.5 213.3 212.4 210.3 208.6 207.8 207.2 206.7 206.2 205.5 204.7 204.0 204.2 205.6 208.4 209.9 210.6 210.5 209.9 209.2 208.6 208.3 207.9 207.5 207.2 207.0 206.8 206.8 206.7 206.9 207.0 207.1 207.1 207.2 207.4 207.5 208.1 208.8 209.6 209.9 209.9 209.6 209.5 209.3 209.2 209.0 208.7 208.0 207.5 207.2 206.8 206.3 206.0 206.1 206.3 206.7 207.0 207.4 207.7 208.0 208.0 207.8 207.6 207.3 207.0 206.7 206.2 205.5 204.5 202.8 200.6 195.1 191.1 188.0 184.7 182.2 180.3 180.5 181.0 181.8 183.6 186.3 189.6 189.3 188.8 188.2 187.3 186.5 185.9 185.2 184.9 184.9 185.3 185.7 186.3 186.5 186.5 186.3 186.4 186.5 186.7 186.7 186.5 186.1 185.2 184.3 183.4 181.6 179.5 176.9 175.2 173.4 171.3 168.1 165.1 162.3 157.8 153.7 150.3 147.7 145.9 145.1 144.6 144.5 144.8 145.5 146.9 149.0 150.2 151.4 153.2 154.0 154.6 155.5 156.0 156.2 156.0 155.8 155.5 155.2 154.9 154.7 154.6 154.2 153.8 153.3 152.9 152.6 152.6 152.7 152.8 153.0 153.2 153.4 153.5 154.1 154.7 154.9 155.2 155.4 155.7 155.7 155.7 155.7 155.7 155.8 156.0 156.3 156.6 156.9 157.2 157.6 158.4 159.8 161.4 162.4 162.8 162.7 161.9 160.5 158.8 157.6 155.5 153.8 156.3 161.1 168.2 177.7 185.9 193.0 200.2 208.3 216.4 221.3 224.4 226.2 225.6 226.7 228.8 229.9 230.5 230.5 230.9 231.9 233.2 233.9 234.6 235.4 233.1 226.6 217.7 207.0 204.5 207.3 217.8 225.3 230.7 232.0 233.6 235.2 236.7 239.0 242.7 246.3 250.3 254.8 259.9 266.2 274.5 277.1 277.8 275.9 276.0 276.1 276.1 276.9 277.3 277.2 276.7 276.2 275.9 276.3 276.5 276.5 276.1 275.7 275.3 275.4 275.5 275.6 275.6 275.6 275.6 275.6 275.5 275.3 275.3 275.3 275.4 274.9 274.6 274.9 275.5 276.0 276.3 276.5 276.6 276.8 277.0 277.2 277.3 277.8 278.3 278.0 277.8 277.6 277.3 276.9 276.5 275.2 273.4 271.3 267.9 264.9 260.9 252.7 248.3 243.7 234.7 227.6 221.4 216.6 213.5 211.3 210.8 210.9 211.3 212.3 215.4 219.5 221.2 223.6 226.5 229.7 230.8 231.0 232.5 233.9 235.0 234.9 234.4 233.7 232.8 231.8 230.8 229.4 227.7 225.9 224.4 223.5 222.8 222.8 223.0 223.2 223.4 223.6 223.8 225.4 226.5 227.4 228.7 230.7 233.1 235.3 236.7 237.6 238.6 239.0 238.8 238.4 238.2 238.0 237.7 236.8 235.7 234.5 233.3 232.1 231.2 230.3 229.3 229.2 229.2 229.6 230.0 230.4 230.8 231.1 231.3 231.6 231.9 232.0 232.1 232.7 233.2 233.6 234.0 234.5 235.0 235.2 235.4 235.4 235.5 235.6 235.6 235.7 235.8 235.9 236.0 236.0 235.9 236.2 236.4 236.6 236.5 236.2 235.7 235.3 234.8 234.2 233.1 231.8 230.3 227.0 224.1 222.5 220.4 218.0 215.3 212.0 208.8 206.8 204.6 202.5 201.1 200.0 199.2 199.2 199.1 199.8 203.2 204.2 204.7 205.6 206.3 206.7 206.3 206.3 206.6 207.4 208.2 209.0 209.6 210.0 210.1 209.6 209.1 208.5 207.7 205.0 200.8 195.5 187.9 180.0 176.4 173.4 171.0 169.3 167.5 165.7 164.5 163.8 163.8 165.2 166.6 168.0 171.5 173.5 174.5 176.8 177.7 177.7 178.0 178.5 179.2 180.3 181.1 181.7 181.8 182.1 182.5 183.2 183.7 184.0 184.0 184.1 184.1 184.2 184.2 184.1 184.2 184.3 184.4 184.5 184.6 184.6 184.7 184.8 184.8 184.9 185.1 185.3 185.5 185.8 186.3 186.2 186.1 185.9 185.8 185.7 185.6 185.4 185.1 184.8 182.7 180.0 176.3 172.6 167.3 159.8 155.2 153.2 154.1 157.7 161.7 165.6 167.8 170.0 173.0 177.0 180.7 183.6 185.6 188.1 191.7 194.7 198.6 204.1 208.4 211.3 212.5 213.2 213.9 214.5 214.9 214.8 213.2 211.8 210.6 209.9 207.9 205.5 202.9 201.7 200.7 199.1 198.4 198.2 198.7 199.4 200.1 201.2 203.1 205.5 208.6 210.1 211.2 212.5 213.2 213.3 213.1 212.9 212.5 211.5 209.7 207.7 206.5 204.1 201.0 198.6 196.9 195.6 194.2 193.6 193.8 194.4 196.0 198.1 199.8 202.4 205.7 208.8 210.9 212.4 214.5 216.1 217.5 217.3 217.0 216.7 216.5 215.5 214.0 211.7 208.7 205.4 200.9 198.4 197.0 196.5 196.3 196.3 196.7 197.0 197.5 198.0 198.7 199.5 200.2 200.8 201.5 201.9 202.1 202.0 201.3 200.3 198.8 197.1 195.1 192.7 191.0 189.4 187.9 187.0 186.1 185.4 185.0 184.5 184.0 183.8 183.6 183.4 183.2 183.0 182.9 182.8 182.7 182.6 182.0 181.6 182.0 182.3 182.9 183.9 184.8 185.6 185.9 186.3 186.6 186.5 186.4 186.2 186.1 186.0 185.9 185.6 185.4 185.4 185.8 186.1 186.3 186.2 186.0 185.8 185.7 185.4 185.2 185.0 184.8 184.6 184.4 184.3 184.3 184.3 184.5 184.6 184.8 185.1 185.3 185.6 185.7 185.9 186.0 185.9 185.6 185.5 185.1 184.5 184.2 183.1 181.6 180.3 177.8 174.7 171.6 170.2 170.0 170.2 170.4 171.1 172.8 174.8 176.9 179.4 183.3 188.4 193.9 199.2 204.3 207.4 209.7 211.4 212.2 212.7 213.0 213.4 213.3 212.8 212.2 210.9 209.0 206.0 203.7 202.0 201.2 200.3 199.3 199.2 199.5 200.0 200.8 201.8 203.1 204.2 205.1 206.0 207.4 208.6 209.6 210.3 210.9 211.4 211.4 211.2 210.8 210.4 210.1 209.8 208.7 207.6 206.7 205.1 202.9 199.5 194.6 189.8 185.1 182.3 181.1 181.3 182.1 182.8 183.5 183.8 184.0 184.0 184.5 185.2 185.9 186.7 187.4 188.0 188.3 188.4 188.2 188.2 188.6 189.4 190.0 190.9 192.3 192.7 192.6 191.1 189.8 188.4 187.2 186.6 186.1 185.8 185.6 185.4 184.9 183.9 182.6 180.4 176.1 171.6 167.5 167.6 169.6 174.8 177.8 180.3 182.5 184.3 185.7 186.7 187.7 188.8 190.2 191.6 193.0 194.3 195.6 197.1 198.8 200.7 202.5 204.2 205.6 206.6 207.1 207.4 207.6 207.7 207.3 206.6 206.8 206.8 206.7 206.6 206.6 206.7 206.5 206.4 206.3 206.5 206.7 206.8 207.0 207.1 207.3 207.5 207.5 207.5 207.9 208.2 208.5 208.5 208.4 208.3 208.1 207.9 207.8 207.5 207.4 207.3 206.7 205.8 204.6 201.3 197.8 193.6 192.1 191.8 193.3 194.3 195.6 197.0 197.9 198.7 199.5 201.5 203.1 204.2 205.2 206.2 206.9 207.6 208.5 209.6 210.5 211.3 211.9 212.7 213.6 214.4 216.2 219.8 226.1 233.2 238.4 240.0 240.7 241.0 241.0 240.2 239.2 238.1 237.6 237.1 236.2 234.1 232.1 230.8 230.2 229.6 228.3 227.5 227.0 227.3 227.6 228.1 229.2 230.3 231.3 232.0 233.4 234.9 236.6 237.4 238.0 238.9 238.9 238.6 238.2 238.0 237.7 236.5 235.0 233.5 232.4 231.3 230.2 229.4 228.5 227.7 227.6 227.8 228.3 228.7 229.1 229.3 230.0 231.1 232.4 233.5 234.1 234.5 235.0 235.6 236.1 236.2 236.1 236.1 235.9 235.7 235.5 235.3 235.2 235.2 235.1 234.8 234.6 234.2 233.9 233.6 234.0 233.6 232.5 231.0 228.9 226.6 223.8 221.0 218.1 215.3 212.8 210.4 208.9 207.2 206.1 205.5 205.5 206.2 207.8 210.1 213.2 216.8 221.0 225.7 230.5 235.6 240.5 245.1 249.4 252.9 255.8 257.7 258.7 258.6 257.3 255.1 252.1 248.7 245.0 241.4 238.3 235.8 234.2 233.4 233.9 236.1 239.8 244.6 250.4 256.6 262.8 268.4 272.7 275.6 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 278.6 276.3 272.3 265.3 256.4 246.7 236.7 227.4 219.5 213.6 209.9 208.0 208.0 208.1 208.1 208.1 208.3 208.3 208.4 208.4 208.5 208.5 208.5 208.5 205.4 203.6 202.4 201.9 202.0 202.5 203.7 204.8 203.7 203.8 204.9 206.5 206.9 206.7 207.6 208.1 208.1 208.6 209.5 210.6 210.4 210.5 210.9 210.6 210.0 209.3 208.8 208.0 206.9 204.9 202.2 199.2 196.1 193.3 190.7 192.0 195.5 200.6 205.9 211.2 216.6 221.4 226.0 230.0 233.7 236.6 238.4 240.1 241.7 243.0 243.3 243.1 241.9 240.7 239.4 237.7 234.3 231.1 228.3 226.3 224.2 221.8 220.6 220.2 220.9 221.5 222.4 223.8 225.4 227.6 230.6 233.2 235.5 237.4 239.9 241.7 242.3 242.1 241.6 241.0 238.3 235.8 234.4 230.7 226.9 223.7 220.9 218.6 216.6 216.3 216.5 217.0 218.3 220.1 223.2 225.1 227.3 231.9 234.6 237.1 240.0 241.4 242.2 242.7 242.3 241.7 241.1 239.5 236.9 231.7 228.9 226.4 223.2 221.8 220.9 220.4 220.5 220.9 221.3 222.0 222.9 223.7 224.8 225.9 226.8 227.9 229.0 229.9 230.8 231.6 232.2 232.6 233.0 233.0 232.9 232.6 232.1 231.4 230.6 229.8 229.0 228.2 227.1 226.2 225.3 224.3 223.6 222.9 222.3 221.9 221.7 221.3 220.6 219.7 218.3 217.4 217.0 216.3 215.6 215.1 213.3 212.4 212.0 211.4 211.0 210.7 211.1 211.6 212.3 212.7 213.8 216.0 217.0 218.9 222.2 224.8 227.0 228.5 229.6 230.7 231.7 231.5 231.1 230.5 230.2 230.1 230.4 231.1 231.6 231.9 232.5 233.4 234.4 235.3 236.1 236.4 236.9 237.0 236.4 235.4 234.2 232.8 230.5 226.9 221.9 211.3 201.8 195.6 195.8 197.4 199.1 202.0 204.1 204.3 204.9 205.4 205.3 205.9 206.9 208.7 209.6 209.7 208.1 206.3 204.9 205.5 206.0 206.8 209.2 210.0 210.4 210.7 210.7 210.6 210.8 210.9 211.0 210.8 210.1 209.3 208.3 207.3 206.0 205.1 204.1 202.9 201.4 199.8 199.0 197.5 196.6 195.5 194.9 194.2 193.5 192.9 192.5 191.9 191.3 191.0 190.6 190.1 189.8 189.2 188.6 188.6 188.2 187.6 187.4 187.0 186.7 186.3 185.9 185.5 185.2 185.2 185.0 185.0 184.7 184.7 184.7 184.7 184.7 184.7 184.7 183.8 182.0 180.9 179.0 177.6 176.1 174.5 173.0 169.1 166.3 164.2 162.6 161.3 160.8 160.0 159.2 158.8 158.0 158.3 158.3 157.8 157.6 157.3 156.9 156.6 156.6 156.7 156.7 156.7 156.7 156.7 156.7 156.7 156.7 156.7 156.7 156.7 156.6 156.6 156.6 156.6 156.6 156.6 156.4 156.3 156.3 156.3 156.3 156.3 154.0 154.0 153.8 153.8 153.6 153.5 153.5 153.4 153.4 153.4 153.3 153.5 153.5 154.8 154.7 154.4 154.0 153.7 153.3 152.4 151.6 150.7 150.0 149.7 149.5 148.9 148.4 147.9 147.7 147.9 148.3 148.6 149.7 150.9 151.6 152.6 153.8 155.4 157.2 158.7 159.2 160.1 160.9 160.6 160.3 159.6 158.1 156.6 154.9 151.8 149.3 147.3 145.5 144.0 143.0 143.3 143.7 144.0 144.8 146.1 147.7 149.0 149.8 150.3 151.4 152.3 153.0 154.0 155.4 157.2 158.9 159.5 159.4 159.4 159.3 159.3 159.4 159.5 159.7 160.3 161.2 162.4 164.5 167.5 171.5 176.7 182.8 190.4 197.4 204.4 211.3 215.2 218.5 220.9 221.4 221.5 221.0 220.1 218.9 217.5 216.2 215.0 213.7 212.8 212.0 211.4 212.9 215.2 218.8 218.9 218.2 217.6 215.9 214.0 212.4 210.6 209.2 208.3 208.5 209.1 209.8 210.3 211.4 213.4 216.6 219.7 222.2 223.4 224.8 226.7 228.7 230.1 230.6 231.5 232.6 233.8 234.2 234.5 234.9 235.2 235.3 235.0 234.6 234.3 234.2 234.0 233.8 233.5 233.3 233.0 232.7 230.7 227.9 224.0 217.1 210.4 206.6 206.6 207.7 209.1 210.3 211.1 211.1 211.1 211.2 211.3 211.8 212.5 213.0 213.0 213.0 214.0 215.2 216.3 216.8 216.4 215.6 215.1 214.5 213.8 213.1 212.8 212.8 212.2 211.7 211.2 210.6 210.1 209.7 209.4 209.1 208.9 208.5 208.6 209.1 209.2 208.8 208.0 207.2 205.2 202.6 197.9 191.0 183.0 174.1 168.8 165.6 166.7 169.4 174.9 180.7 185.2 187.6 190.3 193.3 196.6 194.6 192.4 189.9 188.0 186.5 185.3 184.4 183.6 183.2 183.1 182.9 182.8 182.8 183.0 183.4 183.9 184.4 184.8 184.9 185.0 185.3 185.6 185.8 186.0 185.6 185.3 185.2 185.1 185.0 184.9 184.8 184.7 184.6 184.5 184.5 184.5 184.5 184.5 184.7 184.7 184.8 184.9 184.9 184.9 185.0 185.2 185.3 185.5 185.7 185.8 185.5 184.9 183.9 181.6 177.9 173.4 167.3 162.9 159.4 158.2 158.5 159.2 160.1 161.5 162.8 163.0 163.8 164.8 166.0 166.8 167.3 167.7 169.1 171.2 172.8 173.7 174.2 175.4 176.5 177.5 177.8 177.6 177.2 176.9 176.6 176.3 175.9 174.6 172.9 171.1 170.1 169.6 168.5 167.6 166.9 167.1 167.4 167.8 167.9 168.8 170.0 171.7 172.9 173.7 175.8 177.3 178.3 178.1 178.0 177.8 177.5 177.2 176.6 174.9 173.6 172.7 170.7 169.1 168.4 167.9 167.8 168.2 168.5 169.3 170.8 172.2 174.2 176.9 178.0 179.0 180.0 180.0 179.8 179.6 179.4 178.9 177.9 176.8 175.1 172.5 170.3 168.9 168.9 169.1 169.3 169.4 169.8 170.2 170.6 171.2 171.9 172.7 173.9 175.3 176.8 179.2 182.1 185.4 190.1 195.3 200.7 206.6 212.0 216.3 219.7 222.1 223.3 223.8 223.8 223.2 222.3 221.3 220.0 218.9 217.8 216.7 215.8 215.0 214.5 215.0 216.1 218.2 217.9 217.0 215.9 215.3 214.9 214.4 213.5 212.5 211.7 211.0 210.4 209.8 209.3 209.1 209.4 210.0 210.7 211.7 213.3 215.2 216.5 219.1 222.4 224.2 226.2 228.2 228.9 230.3 231.9 232.8 233.6 234.2 234.6 234.8 234.8 234.5 234.0 233.6 233.4 233.4 233.4 233.4 233.5 233.5 233.7 233.8 233.8 233.8 233.7 233.6 233.8 233.9 234.0 233.5 232.9 232.3 232.5 232.7 233.1 234.1 235.3 236.9 237.0 237.2 237.6 238.1 238.1 237.4 236.7 235.8 234.8 233.2 231.4 229.1 227.7 225.8 223.3 220.2 217.5 215.3 214.4 213.2 211.4 209.9 208.6 207.8 206.8 206.1 205.7 205.3 205.0 205.0 205.1 205.2 205.4 205.5 205.7 206.2 206.3 206.4 206.5 206.7 206.9 207.1 207.4 207.6 207.7 207.8 207.7 207.7 207.6 207.5 207.3 207.2 207.0 207.0 206.9 206.8 206.8 206.8 206.8 206.9 206.9 207.0 207.0 207.1 207.2 207.2 207.2 207.3 207.3 207.4 207.4 207.4 207.5 207.6 208.1 208.5 208.9 209.4 209.6 209.6 209.5 209.4 209.3 209.2 209.2 209.1 209.1 209.1 209.1 209.0 208.8 208.6 208.6 208.5 208.2 207.7 207.4 207.1 206.5 205.9 205.3 204.1 203.1 202.3 202.2 202.0 202.0 202.8 203.7 204.6 205.8 208.1 211.2 216.0 220.6 225.1 228.9 232.3 235.5 236.9 237.8 238.5 239.1 239.6 239.9 239.7 238.9 237.4 235.7 234.3 233.2 231.9 230.8 230.1 229.8 229.7 230.0 230.3 230.6 230.9 231.1 231.4 231.7 232.7 233.8 234.8 234.8 234.8 234.6 234.4 234.2 234.1 234.0 233.9 233.6 233.2 232.7 232.2 232.1 232.2 232.3 232.4 232.4 232.5 232.6 232.7 232.8 232.9 232.9 232.9 233.3 233.7 233.7 233.7 233.6 233.4 233.3 233.2 233.1 233.1 233.3 233.6 233.8 233.9 234.0 233.8 233.6 233.3 233.4 233.7 234.1 234.2 234.1 233.9 233.9 233.9 233.8 233.6 233.5 233.4 233.3 233.2 233.2 233.3 233.5 233.4 233.4 233.3 232.7 231.9 230.9 230.1 229.2 228.1 226.0 224.6 224.3 225.8 228.8 233.3 239.6 247.8 257.4 265.0 270.9 275.6 278.0 279.9 281.5 283.1 283.8 284.0 284.0 284.4 285.0 284.3 282.9 281.1 278.6 276.8 275.3 274.8 274.0 273.1 273.3 273.6 274.0 274.4 274.8 275.1 275.6 276.5 278.1 279.5 280.4 280.7 281.0 281.0 280.4 280.1 278.9 276.2 273.1 267.4 258.3 250.1 245.4 245.7 252.4 258.9 264.8 267.0 268.0 267.0 266.1 264.2 260.7 258.1 256.3 255.3 253.4 251.2 248.8 246.1 243.5 241.3 239.3 237.6 236.3 234.5 232.6 230.9 229.4 228.0 226.7 225.7 224.8 224.1 226.1 229.0 232.4 234.4 236.2 238.4 239.5 240.1 239.6 238.7 237.7 236.8 235.5 233.9 231.3 229.1 227.1 225.9 224.2 222.4 220.5 219.7 219.3 219.9 220.4 220.9 221.5 223.2 225.5 227.6 229.0 230.2 232.5 234.2 235.4 236.8 237.8 238.2 237.8 237.4 237.0 236.7 235.6 233.5 230.0 226.4 223.0 221.6 220.0 218.2 218.4 218.9 219.5 221.4 223.2 225.0 227.3 229.8 232.7 234.7 236.4 237.8 237.7 237.5 237.2 236.8 236.0 235.1 232.0 227.4 221.8 220.7 220.5 220.9 221.5 222.2 223.1 224.0 225.1 226.2 226.2 226.2 226.2 226.2 226.2", - "input_type": "phoneme", - "offset": 53.567 + "f0_timestep": "0.005" }, { + "offset": 70.555, "text": "AP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", "ph_seq": "AP c ong t ou q v x ve ch u k uang d ang n ian l iu c an p ian SP zh ir l ong h u f ei sh en j ian j in r ir zh ong zh ong g uo y En z uo r ir f u sh eng y i y van d a j iang m ang m ang y i w an y E d ang ch uan q v b u j ian SP", - "note_seq": "rest A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 D#3 D#3 A#3 A#3 C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 G#3 G#3 rest F#3 F#3 F#4 F#4 F4 F4 C#4 C#4 C#4 C#4 D#4 D#4 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 D#3 D#3 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 C#3 C#3 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 G#3 G#3 A#3 A#3 C#4 C#4 C#4 C#4 C#4 C#4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.288 0.181 0.181 0.181 0.181 0.181 0.181 0.1800001 0.1800001 0.362 0.362 0.1799999 0.1799999 0.362 0.362 0.5420001 0.5420001 0.181 0.181 0.3610001 0.3610001 0.362 0.362 0.1799998 0.3620002 0.3620002 0.5420001 0.5420001 0.3610001 0.3610001 0.362 0.362 0.1809998 0.1809998 0.723 0.723 0.1799998 0.1799998 0.1810002 0.1810002 0.1809998 0.1809998 0.1800003 0.1800003 0.362 0.362 0.3610001 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.1799998 0.1799998 0.1809998 0.1809998 0.3620005 0.3620005 0.3610001 0.3610001 0.1809998 0.1809998 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.3610001 0.3610001 0.1809998 0.1809998 0.3620005 0.3620005 0.5419998 0.5419998 0.5419998 0.5419998 0.5419998 0.5419998 0.3620005 0.3620005 2.891 2.891 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.167517 0.120483 0.139008 0.041992 0.102738 0.078262 0.079956 0.101044 0.096061 0.083939 0.272622 0.089378 0.149879 0.030121 0.322846 0.039154 0.466698 0.075302 0.097008 0.083992 0.309792 0.051208 0.362 0.104698 0.075302 0.288209 0.073792 0.421517 0.120483 0.22094 0.14006 0.217423 0.144577 0.120758 0.060242 0.623604 0.099396 0.134819 0.045181 0.141846 0.039154 0.119476 0.061523 0.149879 0.030121 0.303261 0.058739 0.272148 0.088852 0.126785 0.054214 0.110215 0.070786 0.110725 0.069275 0.135818 0.045181 0.316819 0.045181 0.326363 0.034637 0.135818 0.045181 0.135818 0.045181 0.149879 0.030121 0.150879 0.030121 0.306786 0.054214 0.131302 0.049698 0.277665 0.084335 0.376335 0.165665 0.346214 0.195786 0.492302 0.049698 0.215913 0.146088 2.891 0.072", - "f0_timestep": "0.005", + "ph_dur": "0.1675 0.1205 0.139 0.042 0.1027 0.0783 0.08 0.101 0.0961 0.0839 0.2726 0.0894 0.1499 0.0301 0.3228 0.0392 0.4667 0.0753 0.097 0.084 0.3098 0.0512 0.362 0.1047 0.0753 0.2882 0.0738 0.4215 0.1205 0.2209 0.1401 0.2174 0.1446 0.1208 0.0602 0.6236 0.0994 0.1348 0.0452 0.1418 0.0392 0.1195 0.0615 0.1499 0.0301 0.3033 0.0587 0.2721 0.0889 0.1268 0.0542 0.1102 0.0708 0.1107 0.0693 0.1358 0.0452 0.3168 0.0452 0.3264 0.0346 0.1358 0.0452 0.1358 0.0452 0.1499 0.0301 0.1509 0.0301 0.3068 0.0542 0.1313 0.0497 0.2777 0.0843 0.3763 0.1657 0.3462 0.1958 0.4923 0.0497 0.2159 0.1461 2.891 0.072", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 G#3 G#3 F#3 D#3 A#3 C#4 A#3 G#3 F#3 G#3 rest F#3 F#4 F4 C#4 C#4 D#4 A#3 G#3 G#3 F#3 F#3 D#3 A#3 G#3 G#3 F#3 F#3 C#3 A#3 G#3 G#3 F#3 F#3 G#3 A#3 C#4 C#4 C#4 D#4 D#4 rest", + "note_dur": "0.288 0.181 0.181 0.181 0.18 0.362 0.18 0.362 0.542 0.181 0.361 0.362 0.18 0.362 0.542 0.361 0.362 0.181 0.723 0.18 0.181 0.181 0.18 0.362 0.361 0.181 0.181 0.18 0.181 0.362 0.361 0.181 0.181 0.18 0.181 0.361 0.181 0.362 0.542 0.542 0.542 0.362 2.891 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "232.7 232.7 232.7 233.0 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 232.6 231.9 231.1 229.9 228.6 227.2 225.6 224.0 222.5 220.9 219.3 218.0 216.6 215.5 214.6 214.1 213.8 214.1 215.1 217.1 220.3 222.6 224.3 225.1 226.1 226.9 227.3 227.7 228.2 228.8 229.7 230.6 231.0 231.5 232.1 233.4 234.4 235.1 235.3 234.8 232.7 225.5 217.8 210.1 202.4 198.5 195.9 194.8 194.7 195.7 199.3 203.3 207.1 209.2 209.6 209.0 208.5 208.0 207.5 207.0 206.8 206.9 207.4 208.0 208.5 208.8 208.6 208.1 207.5 206.1 203.6 198.6 192.6 186.0 179.8 177.6 178.1 181.0 185.3 190.7 194.2 197.8 201.4 203.0 206.0 210.1 215.2 218.6 220.6 221.3 220.6 219.1 216.0 212.9 209.8 208.5 207.7 207.2 206.8 206.3 205.5 204.5 203.7 202.9 198.1 192.2 184.8 180.0 178.3 181.1 190.7 198.8 203.7 204.0 203.7 202.6 201.2 200.0 199.1 199.1 198.8 198.3 196.4 194.6 192.9 191.1 189.9 189.3 188.7 188.0 187.4 186.9 186.6 186.2 185.3 183.7 181.3 176.1 170.7 166.2 164.9 164.8 165.6 167.6 169.2 169.9 169.7 169.0 167.8 166.8 165.7 163.9 162.2 160.5 159.1 158.1 157.1 156.1 155.2 154.2 153.3 152.1 150.8 149.4 148.6 148.2 148.5 148.9 149.2 148.9 149.1 149.4 149.8 150.0 150.0 150.0 150.3 150.7 150.9 151.0 151.0 151.4 151.8 152.3 152.6 152.9 153.1 153.8 154.4 154.7 154.7 154.7 154.8 154.8 154.9 154.9 155.0 155.1 155.1 155.1 155.2 155.5 156.1 156.8 157.5 157.7 158.0 158.2 158.1 158.0 157.8 157.7 157.7 157.7 156.6 155.4 154.1 153.6 154.2 155.6 158.4 161.6 165.0 168.4 172.5 177.4 180.7 185.5 192.5 197.8 203.4 209.6 214.9 220.8 227.2 227.8 228.8 230.3 228.9 228.7 230.4 230.1 229.8 229.7 230.7 231.3 231.2 232.0 232.7 233.0 233.0 232.8 232.6 232.5 232.7 233.1 233.2 233.2 233.2 232.1 229.3 224.8 217.1 209.3 202.5 202.3 208.9 224.0 242.8 259.1 268.9 273.7 276.3 277.4 278.6 279.6 279.9 279.9 280.0 280.5 280.2 279.6 278.7 278.0 277.5 277.6 277.0 276.2 275.4 275.3 275.4 275.2 275.1 275.2 275.4 275.5 275.6 275.9 276.3 276.6 276.7 276.7 276.7 276.8 276.9 277.1 277.3 277.4 277.5 277.6 277.7 277.7 278.1 278.4 278.4 278.2 277.8 277.2 276.7 276.3 275.4 272.0 269.4 267.1 260.8 255.8 251.7 241.6 235.3 231.3 225.8 220.3 215.0 212.6 210.0 207.2 205.8 205.0 204.7 204.6 206.9 210.9 214.1 215.2 215.0 216.8 217.6 217.5 217.5 217.8 218.5 218.8 218.8 218.5 218.1 217.6 217.0 216.4 215.9 215.5 215.4 215.3 215.3 215.4 215.7 216.3 217.8 219.6 221.7 223.0 224.1 224.6 226.5 228.5 230.8 231.7 232.4 233.4 234.2 234.8 235.1 235.3 235.4 235.2 234.6 234.1 233.7 233.5 233.3 233.0 232.8 232.7 232.7 232.8 232.8 232.9 233.0 233.1 233.2 233.3 233.4 233.4 233.4 233.3 233.0 232.8 232.6 232.4 232.5 232.6 233.0 233.4 233.7 234.1 234.3 234.5 234.7 234.9 235.1 235.2 235.0 234.8 234.6 234.4 234.1 234.1 233.4 232.4 231.1 229.6 228.0 226.0 223.6 221.0 217.5 214.2 210.7 206.2 203.0 200.9 199.6 199.0 198.8 198.7 198.7 198.8 199.4 200.5 201.9 203.8 206.1 208.6 209.1 209.3 209.3 209.5 209.9 210.4 210.1 210.1 210.3 210.6 209.8 208.3 204.8 199.4 192.7 188.2 185.8 184.8 186.2 187.1 187.8 189.1 189.8 189.5 189.3 188.9 188.4 188.0 187.5 186.7 186.4 186.2 186.0 185.2 184.6 184.1 183.8 183.3 182.6 182.2 181.8 181.6 181.2 181.0 181.2 181.5 181.6 181.5 181.7 181.8 181.9 182.5 183.1 183.3 183.6 183.9 184.1 184.5 184.8 184.9 185.0 185.2 185.3 185.3 185.3 185.3 185.4 185.5 185.5 185.6 185.7 185.8 185.9 186.0 186.1 186.2 186.3 186.5 186.6 186.6 186.7 187.0 187.2 187.0 186.7 186.2 185.1 183.0 180.0 173.9 171.3 170.3 172.4 176.8 181.5 184.0 186.8 189.6 191.4 194.1 196.9 198.4 201.4 205.2 209.2 212.3 214.7 216.3 217.5 218.0 217.3 216.3 215.3 214.4 213.6 212.7 211.8 210.2 208.1 206.3 205.4 204.9 204.1 203.1 202.0 201.8 202.0 202.4 203.1 203.6 204.0 206.3 207.8 208.9 210.5 211.4 211.9 212.6 212.8 212.6 212.2 211.9 211.6 209.5 207.8 206.5 203.3 200.9 199.4 197.6 195.9 194.6 194.6 194.9 195.7 197.4 199.2 201.3 204.4 207.0 208.6 211.1 213.4 215.3 216.1 216.7 217.1 216.3 215.6 215.0 213.4 211.8 209.9 206.8 203.4 199.9 198.3 197.6 197.8 198.1 198.5 199.0 199.6 200.3 200.9 201.7 202.4 203.0 203.5 203.9 204.1 203.7 203.1 202.1 200.4 198.5 196.4 194.4 192.3 190.4 188.8 187.5 186.3 185.5 184.9 184.3 183.7 183.2 182.8 182.5 182.2 181.9 181.7 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.3 184.3 184.3 184.3 184.3 184.4 184.4 184.4 184.4 184.5 184.5 184.5 184.5 184.6 184.6 184.6 184.6 184.7 184.7 184.7 184.8 184.8 184.8 184.9 184.9 184.9 184.9 185.0 185.0 185.0 185.0 185.0 185.1 185.1 185.1 185.1 185.1 185.2 185.2 185.2 185.2 185.2 185.2 185.2 185.2 185.2 184.9 183.8 181.9 179.3 176.3 173.1 169.8 166.7 164.1 162.0 160.5 159.8 159.7 160.7 162.6 165.8 170.0 175.2 181.6 189.1 197.6 207.1 217.5 228.7 240.8 253.2 266.2 279.1 291.7 304.0 315.3 325.4 334.0 340.8 345.5 348.2 348.6 348.2 347.6 346.6 345.6 344.6 344.0 343.4 343.4 343.4 343.4 343.6 343.6 344.0 344.1 344.4 344.6 344.9 345.2 345.3 345.7 345.8 346.0 346.2 346.4 346.4 346.4 347.0 349.8 354.4 359.9 365.2 369.0 370.4 370.4 370.4 370.4 370.4 370.4 370.4 370.2 370.2 370.2 370.2 370.0 370.0 370.0 370.0 369.8 369.8 369.8 369.6 369.6 369.6 369.6 369.5 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 369.4 373.1 383.0 397.9 415.5 432.2 445.4 450.8 447.3 433.9 413.1 390.8 370.9 357.6 352.1 351.8 351.7 351.7 351.5 351.3 351.1 350.8 350.6 350.3 350.0 349.8 349.5 349.2 349.0 348.9 348.8 348.6 348.6 345.8 346.1 346.3 346.6 346.6 346.7 347.0 347.6 348.4 349.3 350.0 350.5 350.8 352.0 353.1 353.6 354.0 354.1 353.6 352.9 352.6 351.2 348.2 343.8 338.2 331.7 325.4 318.8 311.2 304.0 297.1 291.0 285.7 281.5 278.4 276.5 275.9 275.8 275.7 275.6 275.5 275.4 275.3 275.1 275.1 275.0 258.4 227.1 202.4 198.4 201.2 206.8 215.3 225.9 237.7 249.8 260.9 269.9 275.1 282.0 283.0 283.2 282.8 281.7 280.3 279.0 278.3 277.2 276.2 275.9 275.5 275.0 274.8 274.6 274.7 275.0 275.2 275.4 275.7 276.2 276.9 277.7 278.3 278.8 279.0 279.4 279.9 280.1 279.9 279.3 278.8 278.0 277.0 273.7 269.6 265.0 255.7 248.1 241.6 241.9 243.3 245.4 249.5 253.4 257.1 257.9 259.1 261.0 263.1 265.1 267.1 267.7 268.4 269.3 270.6 271.8 273.1 273.5 274.7 276.9 280.6 283.9 286.5 286.0 285.1 283.5 282.9 282.3 281.8 280.8 279.8 279.1 278.7 278.4 278.3 278.0 277.7 277.5 277.3 276.7 275.6 273.3 269.3 263.0 254.8 246.8 239.8 235.0 231.7 229.6 230.6 233.1 236.8 242.1 248.7 259.4 270.5 280.9 286.6 288.3 288.8 288.8 289.5 290.3 291.1 291.8 292.3 292.5 292.6 292.6 292.5 292.3 292.1 292.1 292.1 292.2 292.3 292.2 291.9 291.7 291.6 291.5 291.5 291.8 292.2 292.4 292.6 292.6 292.5 292.3 291.9 291.7 291.5 291.3 291.0 290.9 290.7 290.0 289.9 290.2 290.8 291.2 291.5 292.7 294.8 297.6 299.4 301.9 304.9 310.4 313.7 315.5 319.8 322.7 324.7 326.9 327.5 327.2 326.6 326.1 325.5 321.6 319.1 317.5 312.9 309.0 305.6 304.0 302.4 300.9 299.8 299.5 300.4 300.9 302.2 304.5 305.9 307.9 311.1 312.5 314.8 318.6 320.5 322.5 324.5 324.4 324.1 323.6 321.6 319.6 317.7 314.4 311.9 310.8 309.7 309.1 309.3 309.7 310.2 310.5 310.9 311.5 312.5 314.4 316.2 317.6 318.6 319.4 320.1 320.1 319.9 319.6 319.0 318.4 318.1 317.7 316.7 314.8 312.4 309.9 307.5 303.6 297.4 284.0 268.8 255.0 246.1 246.1 249.4 257.7 270.4 283.6 292.2 293.4 291.9 287.1 281.5 275.0 265.2 257.4 250.4 243.9 237.6 232.5 231.5 232.6 234.6 234.6 235.3 236.3 235.8 236.0 236.3 235.6 235.1 234.8 234.8 234.5 234.0 233.6 233.4 233.2 233.1 232.7 232.1 231.3 230.4 229.6 228.5 226.8 224.8 222.6 221.1 219.9 217.9 216.0 214.1 212.4 210.8 209.2 208.6 208.1 207.5 207.4 207.3 207.1 206.6 206.9 207.5 207.7 207.9 208.0 208.1 208.4 209.1 210.1 210.4 209.2 209.1 209.0 208.8 208.7 208.6 208.5 207.3 204.0 199.1 193.2 187.0 181.5 177.1 174.3 173.2 174.0 176.0 179.0 182.8 187.0 192.3 197.5 202.3 206.2 209.0 210.4 208.7 209.0 208.9 208.1 207.8 207.6 207.4 207.1 206.8 206.5 206.2 205.9 205.6 203.9 201.9 200.0 197.5 193.6 187.8 184.6 182.3 181.3 182.0 183.5 186.2 186.9 187.0 186.4 185.8 185.2 184.8 185.0 185.3 185.5 185.6 185.8 186.5 186.5 186.3 185.9 185.6 185.4 185.1 184.9 184.7 184.5 184.4 184.2 184.0 183.6 183.0 182.7 181.1 178.6 175.5 170.1 163.5 157.8 154.6 153.7 156.1 164.1 175.2 181.0 185.3 187.9 188.0 187.8 187.5 187.1 186.7 186.4 186.2 186.1 186.1 185.9 185.8 185.6 185.5 185.4 185.4 185.1 184.8 184.6 184.4 184.3 184.1 184.0 183.9 183.7 183.6 183.3 182.9 182.9 183.0 183.2 183.3 183.4 183.5 184.2 184.8 185.4 185.5 185.6 185.7 185.8 185.8 185.6 185.4 185.1 184.8 184.4 183.7 182.6 180.0 177.7 175.7 171.7 168.6 167.0 162.9 158.4 154.2 151.6 149.1 146.3 144.7 143.8 143.5 143.0 143.0 144.0 144.4 145.1 146.7 148.9 150.3 150.3 151.5 152.4 152.4 153.6 154.7 155.4 155.8 156.2 156.7 156.5 156.1 155.8 155.5 155.3 155.1 154.9 154.7 154.5 154.4 154.4 154.2 154.1 154.0 154.0 154.0 153.9 153.8 153.7 153.7 153.7 153.7 153.7 154.0 154.2 154.5 154.7 154.9 155.2 155.7 156.4 157.1 157.3 158.0 158.8 158.9 158.9 158.8 158.9 159.0 158.9 158.5 157.9 156.8 154.6 151.6 148.0 146.8 146.2 145.9 146.2 145.7 144.7 147.9 151.5 155.6 166.9 176.4 184.6 197.8 208.1 216.3 220.3 222.7 224.1 224.8 225.4 225.9 227.8 229.1 230.0 230.1 230.5 231.3 232.1 232.9 233.6 233.9 234.0 233.9 233.7 233.5 233.2 232.6 231.8 230.5 228.8 226.9 224.8 222.8 221.1 219.6 218.5 217.7 217.3 216.2 214.9 213.5 212.1 211.0 210.2 209.5 208.9 208.4 208.3 208.5 208.8 208.7 208.4 207.9 207.2 205.7 202.9 196.4 190.6 187.0 187.1 188.9 192.0 194.6 196.6 197.7 199.3 200.8 201.8 202.9 204.5 207.8 210.9 213.5 215.0 215.8 216.2 215.7 214.0 212.1 209.9 208.9 208.3 208.0 207.7 207.5 206.2 205.9 205.4 204.3 203.6 202.5 201.6 200.8 198.8 197.7 196.1 194.8 193.7 192.2 190.9 189.8 189.6 189.6 189.4 189.1 188.8 188.1 188.0 188.1 188.2 188.7 189.5 190.1 189.4 187.8 185.6 184.0 182.8 183.1 183.3 183.2 184.4 185.0 185.1 185.6 185.8 185.9 185.9 185.9 186.1 185.8 185.5 185.0 184.8 184.7 184.7 184.8 184.8 184.9 185.0 185.2 185.3 185.5 185.6 185.6 185.7 185.8 186.1 187.0 187.9 188.8 188.6 188.4 188.1 187.7 187.4 187.1 187.0 186.8 186.5 186.4 186.1 185.5 185.5 185.4 185.2 185.1 184.9 184.7 184.4 184.2 184.2 184.1 184.0 183.9 183.9 183.8 183.7 183.7 183.6 183.6 183.8 184.0 184.1 184.3 184.5 185.0 185.2 185.4 185.7 185.7 185.8 186.4 187.0 187.5 187.4 187.1 186.9 186.6 186.1 185.4 183.8 182.5 181.0 178.3 176.1 173.5 168.7 164.1 159.6 155.4 152.8 150.0 145.0 140.9 137.8 136.1 134.6 133.9 135.0 134.9 134.2 133.6 133.3 133.1 132.5 132.1 132.1 133.2 134.5 135.7 136.1 136.6 137.2 137.9 138.2 138.6 139.2 139.4 139.4 139.2 139.1 138.9 138.7 138.6 138.5 138.3 138.2 138.2 138.0 137.9 137.9 137.4 137.0 136.5 136.4 136.3 136.4 136.5 136.6 136.7 136.8 137.0 137.3 138.1 138.9 139.8 140.3 141.2 142.6 143.7 144.5 144.9 145.0 145.2 145.5 145.6 145.4 144.7 145.0 144.9 144.2 143.3 142.5 141.7 144.5 149.6 158.5 164.9 172.2 180.6 191.3 201.5 209.6 214.4 217.8 220.4 223.4 226.2 228.4 230.6 231.9 232.1 232.5 233.3 234.6 235.3 235.7 235.7 233.8 231.3 228.6 221.8 213.0 202.8 198.8 196.8 197.0 197.3 197.6 197.7 197.6 198.2 201.1 204.7 207.9 209.4 211.3 213.0 213.8 213.9 213.4 211.8 210.8 210.0 209.5 209.1 208.8 209.1 209.0 208.6 208.4 208.3 208.1 208.0 207.7 207.5 207.2 206.9 206.6 206.3 206.2 206.1 206.2 206.3 206.5 206.7 207.0 207.2 207.4 207.7 208.0 208.2 208.3 208.4 210.4 212.0 213.5 213.0 212.1 210.9 210.7 210.4 210.2 209.8 209.4 209.0 208.6 208.4 208.1 207.8 207.2 206.6 206.1 205.2 204.1 202.9 201.0 198.7 196.5 194.8 193.6 191.8 189.9 188.1 187.0 185.6 184.0 183.4 182.8 182.3 183.4 184.4 185.2 184.8 184.5 184.5 184.4 184.7 185.4 185.6 185.7 185.7 185.9 186.1 186.3 186.5 186.7 186.8 186.9 186.8 186.6 186.3 185.9 185.5 184.4 183.5 183.1 182.7 182.2 181.5 181.6 181.8 182.2 182.5 182.9 183.4 184.0 184.6 185.1 185.2 185.1 184.8 184.6 184.4 184.2 184.0 183.8 183.8 183.8 183.7 183.7 183.7 183.6 183.6 183.6 183.5 183.5 183.5 183.4 183.4 183.3 183.3 183.3 183.3 183.3 183.5 183.6 183.9 184.1 184.4 185.0 185.8 186.2 186.3 186.4 186.6 186.8 186.9 186.9 186.9 186.8 186.7 186.6 186.4 186.4 186.4 186.1 185.9 185.7 185.1 184.3 183.5 182.9 182.1 181.3 181.0 180.6 180.3 180.5 181.1 181.9 182.8 184.5 186.8 189.5 192.0 194.3 198.2 201.0 203.1 204.4 205.3 206.0 206.5 207.1 207.7 207.6 207.5 207.4 207.1 207.0 207.1 206.9 206.7 206.5 206.6 206.7 206.7 206.8 206.8 206.9 206.9 206.5 205.9 204.1 202.6 201.4 202.4 203.7 205.3 209.0 213.1 217.0 221.8 225.9 228.5 230.1 231.2 232.2 233.1 233.9 234.4 235.2 235.7 235.7 235.5 235.3 235.0 234.3 233.4 232.5 231.6 230.9 230.6 230.7 230.8 231.0 231.3 231.5 231.7 231.9 232.3 232.8 233.0 233.1 233.1 233.0 232.9 232.9 232.9 232.8 232.7 232.6 232.5 232.5 232.5 232.4 232.4 232.4 232.4 232.4 232.4 232.4 232.5 232.6 232.5 231.7 228.5 223.8 218.9 215.6 214.4 216.8 219.5 222.0 223.3 225.7 229.0 232.7 227.6 215.5 200.3 188.0 179.4 176.5 177.6 180.1 184.1 189.3 195.6 202.5 209.7 216.7 223.1 228.4 232.1 233.8 234.0 233.9 233.8 233.6 233.3 233.0 232.8 232.7 232.7 233.3 235.2 238.9 243.8 249.5 255.9 262.4 268.5 273.7 277.6 279.8 277.1 277.2 277.3 277.4 277.6 277.8 278.0 278.2 278.5 278.5 278.4 278.3 278.1 277.8 277.5 277.2 276.9 276.6 276.3 275.9 275.6 275.5 275.4 275.3 275.4 275.8 276.3 276.8 277.4 277.9 278.1 278.5 279.3 280.2 280.8 280.9 280.7 280.5 280.3 280.0 279.7 279.6 279.4 278.8 276.4 274.6 272.2 266.4 258.6 249.6 237.2 232.5 230.4 232.0 233.4 234.7 235.1 236.1 237.1 238.2 238.7 239.4 240.7 242.6 244.7 246.7 248.4 249.6 250.0 250.9 252.3 254.0 255.8 257.5 258.7 259.8 261.1 263.6 269.2 276.1 279.4 282.4 284.6 284.5 283.8 282.7 281.7 281.1 280.5 279.2 278.3 277.8 277.4 276.8 276.1 275.4 275.2 275.3 275.8 276.2 276.4 276.7 277.2 277.7 277.8 277.8 277.8 277.7 277.6 277.5 277.5 277.4 277.3 277.2 277.1 277.0 277.0 277.0 276.9 276.9 276.9 277.0 277.3 277.5 277.7 277.6 277.5 277.3 277.2 277.1 277.0 276.9 276.9 276.9 277.0 277.2 277.5 277.7 278.0 278.3 278.7 278.9 278.8 278.5 278.2 277.9 277.6 277.4 277.2 276.9 275.6 273.0 269.6 264.7 258.1 250.5 245.1 243.3 244.3 247.3 252.5 258.7 263.7 266.5 269.2 271.2 272.5 273.4 274.0 274.1 274.4 274.7 274.9 275.0 275.2 275.9 277.0 278.4 280.6 281.5 282.2 283.0 283.7 284.5 285.6 287.0 288.4 288.9 288.5 287.4 284.6 282.7 281.6 281.0 280.3 279.7 279.3 278.3 277.1 276.7 275.9 275.0 274.2 273.7 273.5 273.5 273.6 273.7 273.8 273.9 274.0 274.1 274.2 274.3 274.3 274.7 275.3 275.4 275.5 275.7 276.0 276.2 276.4 276.6 276.8 276.9 277.3 277.7 278.0 278.2 278.4 278.6 278.7 278.6 278.3 277.8 277.4 276.9 276.7 276.8 276.9 277.1 277.3 277.5 277.6 277.7 277.7 277.7 277.7 277.7 277.6 277.5 277.5 277.5 277.5 277.3 277.3 277.3 277.3 277.2 277.2 277.2 277.1 277.0 277.0 277.1 277.1 277.0 277.1 277.2 277.2 277.3 277.1 276.6 274.9 271.6 266.4 258.2 252.5 251.7 254.6 258.2 261.3 267.0 271.8 274.3 275.2 276.3 278.2 282.0 286.8 292.6 296.8 301.5 310.3 312.0 312.2 313.3 313.9 314.5 315.3 315.7 315.9 315.2 314.3 313.5 313.5 313.4 313.1 312.2 311.5 310.9 310.5 310.2 309.9 309.2 308.7 308.3 308.3 308.3 308.5 308.8 309.2 309.6 309.9 310.2 310.4 310.5 310.6 310.7 311.0 311.4 311.7 311.8 311.8 311.6 311.3 310.1 308.9 306.5 303.3 298.8 293.5 287.5 280.8 273.8 266.4 258.6 250.9 243.3 235.8 228.6 221.7 215.2 209.3 204.0 199.2 195.0 191.4 188.5 186.3 184.8 183.9 183.7 184.5 186.5 189.7 194.2 199.8 206.5 214.3 222.6 231.3 240.2 249.0 257.4 264.6 270.5 274.8 277.3 278.3 278.3 278.4 278.5 278.5 278.5 278.6 278.6 278.6 278.7 278.8 278.8 278.9 278.9 279.1 279.1 279.1 279.2 279.3 279.3 279.1 278.7 277.9 276.9 275.7 274.6 273.7 273.0 272.6 272.7 273.1 273.9 275.0 276.5 278.5 280.6 283.0 285.5 288.3 291.2 294.2 297.1 299.9 302.6 305.1 307.4 309.3 310.9 312.2 313.0 313.5 313.5 313.4 313.3 313.3 313.3 313.2 313.1 313.0 312.9 312.9 312.8 312.6 312.5 312.4 312.4 312.2 312.1 312.0 311.9 311.8 311.8 311.7 311.7 311.5 311.5 311.5 311.5 312.4 312.3 312.2 312.1 311.9 311.9 312.0 312.2 312.6 312.8 312.9 312.9 312.9 313.0 313.1 313.2 313.3 313.3 313.4 313.5 313.7 313.8 313.8 313.8 314.0 314.0 314.0 314.1 314.2 314.2 314.2 314.1 313.8 313.5 313.2 312.9 312.7 312.5 312.4 312.4 312.5 312.7 312.9 313.0 313.3 313.4 313.5 313.5 313.4 313.2 313.1 313.0 312.9 312.7 312.6 312.5 312.4 312.4 312.6 313.2 312.9 312.3 311.5 311.2 311.1 311.1 311.2 311.4 311.5 311.6 311.7 311.7 311.8 311.9 312.0 312.1 312.2 312.2 312.3 312.4 312.4 312.4 312.3 312.2 312.2 312.2 312.1 311.9 311.8 311.7 311.7 311.6 311.5 311.5 311.5 311.3 311.0 310.7 310.5 310.3 310.3 310.5 310.9 311.2 311.3 311.8 312.4 312.5 312.5 312.3 312.1 311.9 311.6 311.5 311.5 311.4 311.2 310.9 310.5 310.2 310.1 310.1 310.2 310.3 310.4 310.5 310.7 310.8 310.9 311.0 311.1 311.0 310.9 310.8 310.5 310.3 310.2 310.2 310.2 310.2 310.4 310.4 310.4 310.4 310.5 310.6 310.7 310.8 310.8 310.9 311.0 311.1 311.1 311.2 311.3 311.3 311.4 311.5 311.5 311.5 311.7 311.7 311.7 311.7 311.1 311.3 311.8 312.4 313.2 314.0 314.9 315.9 316.9 317.7 318.3 318.8 319.0 319.0 318.6 317.8 316.7 315.3 313.7 311.8 309.7 307.2 305.1 302.4 300.0 297.9 295.7 293.8 292.6 291.6 291.0 291.0 291.6 293.3 295.0 297.6 300.5 303.1 306.7 310.2 313.3 316.9 320.4 323.4 326.0 328.7 331.0 332.5 333.6 334.0 334.0 333.4 332.1 330.4 328.1 325.3 322.7 319.1 315.5 312.4 308.6 305.1 302.1 298.9 296.0 294.0 291.9 290.5 289.6 289.3 289.3 289.8 290.8 292.5 294.7 296.9 299.7 303.1 306.1 309.9 313.7 316.9 320.5 324.0 326.8 329.4 331.7 333.3 334.5 335.0 334.8 334.1 332.7 331.2 329.1 326.1 322.8 319.7 316.0 312.2 308.8 305.1 301.6 298.8 295.8 293.3 291.5 290.0 288.8 288.5 288.5 289.1 290.3 292.0 294.2 296.6 299.6 303.0 306.1 309.5 313.4 317.3 320.6 324.2 327.5 330.0 332.4 334.2 335.2 335.8 335.8 335.0 333.7 331.7 329.4 326.5 323.0 319.9 315.9 312.0 308.6 304.7 301.1 298.1 295.5 292.8 290.8 289.3 288.2 287.8 287.8 288.5 289.6 291.1 293.5 296.2 299.0 302.4 306.3 309.7 313.7 317.7 321.2 324.8 328.3 330.8 333.2 335.0 336.2 336.7 336.7 335.8 334.6 332.6 330.0 327.2 323.8 319.9 316.4 312.3 308.3 304.7 301.1 297.6 294.9 292.1 290.0 288.6 287.5 287.0 287.1 287.8 289.1 290.6 292.6 295.5 298.8 301.9 305.8 309.9 313.5 317.7 321.7 324.9 328.6 331.5 333.8 335.8 336.9 337.5 337.4 336.6 335.2 333.1 330.4 327.5 324.0 319.9 316.4 312.7 308.5 304.4 301.1 297.4 294.2 291.8 289.5 287.8 286.8 286.3 286.3 287.0 288.2 290.1 292.3 295.2 298.6 301.7 305.9 310.1 313.8 318.1 322.3 325.7 328.7 331.9 334.6 336.4 337.8 338.5 338.3 337.5 336.0 334.0 331.2 327.9 324.7 320.6 316.4 312.6 308.2 304.0 300.5 296.7 293.5 291.1 288.7 287.0 286.0 285.5 285.6 286.3 287.5 289.3 291.8 294.3 297.9 301.7 305.3 309.6 314.0 317.8 322.3 326.4 329.4 332.8 335.6 337.3 338.7 339.3 339.1 338.3 336.6 334.6 332.1 328.8 324.9 321.4 316.8 312.4 308.4 304.1 300.0 296.7 293.2 290.3 288.3 287.1 287.6 288.6 290.3 292.5 294.7 297.5 300.4 302.8 305.4 307.7 309.3 310.6 311.6 311.8 311.7 304.7 303.5 303.1 303.4 303.8 304.3 304.8 305.4 306.1 306.7 306.7 306.7 306.7 306.7 306.7", - "input_type": "phoneme", - "offset": 70.555 + "f0_timestep": "0.005" }, { + "offset": 96.629, "text": "AP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", - "ph_seq": "AP d u d ao n an h ua d i j i p ian d ao ch u ch eng ch u sh an q ian d i y i g uan d ang sh ir l i l ian SP j i d e y van x ing n a t ian d ou l i q ian x i y v l iang s an d ian b ei ei sh ang f u m ing j ian SP", - "note_seq": "rest D#3 D#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 F#3 F#3 G#3 G#3 F#3 F#3 F#3 F#3 G#3 G#3 F#3 F#3 F3 F3 F3 F3 F3 F3 F#3 F#3 F3 F3 C#3 C#3 D#3 D#3 rest A#2 A#2 C#3 C#3 D#3 D#3 C#3 C#3 D#3 D#3 F3 F3 F#3 F#3 F#3 F#3 F#3 F#3 G#3 G#3 G#3 G#3 G#3 G#3 G#3 G#3 B3 B3 A#3 A#3 B3 A#3 A#3 A#3 A#3 G#3 G#3 A#3 A#3 rest", - "note_dur_seq": "0.6 0.181 0.181 0.18 0.18 0.3619999 0.3619999 0.3610001 0.3610001 0.362 0.362 0.1800001 0.1800001 0.362 0.362 0.181 0.181 0.1799998 0.1799998 0.3620002 0.3620002 0.1799998 0.1799998 0.181 0.181 0.181 0.181 0.181 0.181 0.1800003 0.1800003 0.1809998 0.1809998 0.1810002 0.1810002 0.3609996 0.3609996 0.1810002 0.1810002 0.9039998 0.9039998 0.3610001 0.1810002 0.1810002 0.1799998 0.1799998 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.1810002 0.1810002 0.3610001 0.3610001 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.1809998 0.1809998 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.1810007 0.1810007 0.1809998 0.1800003 0.1800003 0.3619995 0.3619995 0.3610001 0.3610001 0.7229996 0.7229996 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.554819 0.045181 0.150879 0.030121 0.134819 0.045181 0.291214 0.070786 0.306786 0.054214 0.316819 0.045181 0.119758 0.060242 0.331879 0.030121 0.085167 0.095833 0.097351 0.082649 0.250993 0.111008 0.10691 0.07309 0.111725 0.069275 0.150879 0.030121 0.129791 0.051208 0.134819 0.045181 0.150879 0.030121 0.111725 0.069275 0.315818 0.045181 0.090638 0.090363 0.904 0.244606 0.116394 0.131302 0.049698 0.104698 0.075302 0.22194 0.14006 0.261604 0.099396 0.267121 0.094879 0.150879 0.030121 0.315819 0.045181 0.110214 0.070786 0.110725 0.069275 0.135818 0.045181 0.11412 0.066879 0.110214 0.070786 0.151382 0.028618 0.150879 0.030121 0.181001 0.060516 0.120483 0.119759 0.060242 0.282181 0.079819 0.22094 0.14006 0.723 0.4", - "f0_timestep": "0.005", + "ph_seq": "AP d u d ao n an h ua d i j i p ian d ao ch u ch eng ch u sh an q ian d i y i g uan d ang sh ir l i l ian SP j i d e y van x ing n a t ian d ou l i q ian x i y v l iang s an d ian b ei sh ang f u m ing j ian SP", + "ph_dur": "0.5548 0.0452 0.1509 0.0301 0.1348 0.0452 0.2912 0.0708 0.3068 0.0542 0.3168 0.0452 0.1198 0.0602 0.3319 0.0301 0.0852 0.0958 0.0974 0.0826 0.251 0.111 0.1069 0.0731 0.1117 0.0693 0.1509 0.0301 0.1298 0.0512 0.1348 0.0452 0.1509 0.0301 0.1117 0.0693 0.3158 0.0452 0.0906 0.0904 0.904 0.2446 0.1164 0.1313 0.0497 0.1047 0.0753 0.2219 0.1401 0.2616 0.0994 0.2671 0.0949 0.1509 0.0301 0.3158 0.0452 0.1102 0.0708 0.1107 0.0693 0.1358 0.0452 0.1141 0.0669 0.1102 0.0708 0.1514 0.0286 0.1509 0.0301 0.2415 0.1205 0.1198 0.0602 0.2822 0.0798 0.2209 0.1401 0.723 0.4", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest D#3 F3 F#3 F3 F#3 F3 F#3 F#3 G#3 F#3 F#3 G#3 F#3 F3 F3 F3 F#3 F3 C#3 D#3 rest A#2 C#3 D#3 C#3 D#3 F3 F#3 F#3 F#3 G#3 G#3 G#3 G#3 B3 A#3 B3 A#3 A#3 G#3 A#3 rest", + "note_dur": "0.6 0.181 0.18 0.362 0.361 0.362 0.18 0.362 0.181 0.18 0.362 0.18 0.181 0.181 0.181 0.18 0.181 0.181 0.361 0.181 0.904 0.361 0.181 0.18 0.362 0.361 0.362 0.181 0.361 0.181 0.18 0.181 0.181 0.181 0.18 0.181 0.181 0.181 0.18 0.362 0.361 0.723 0.4", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0", "f0_seq": "133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.4 133.3 133.0 132.6 132.0 131.2 130.3 129.2 127.9 126.6 125.2 123.6 122.0 120.3 118.5 116.7 114.9 113.1 111.3 109.5 107.7 106.0 104.3 102.7 101.0 99.6 98.2 96.8 95.6 94.5 93.5 92.6 91.8 91.2 90.6 90.1 89.7 89.5 89.3 89.4 89.6 90.2 91.1 92.3 93.8 95.6 97.6 99.9 102.6 105.4 108.5 111.8 115.2 118.9 122.5 126.2 129.9 133.5 137.1 140.4 143.5 146.2 148.6 150.6 152.1 153.2 153.8 156.5 157.1 157.5 158.0 158.6 159.3 160.0 160.5 160.1 159.6 158.9 157.9 155.4 150.8 145.0 140.3 136.8 137.8 140.8 145.4 151.9 159.1 166.5 172.6 176.1 176.4 175.9 175.7 176.0 175.7 175.2 174.6 174.3 174.2 174.3 174.4 174.4 174.4 174.3 174.2 174.2 174.0 173.8 173.8 173.5 173.1 172.9 172.0 171.1 171.0 170.8 170.6 170.7 171.2 171.7 172.3 173.2 174.4 174.8 174.8 174.8 174.7 174.6 174.6 174.5 174.4 174.3 174.2 174.1 174.0 173.9 173.7 173.6 173.5 173.5 173.4 173.3 173.3 173.2 173.2 173.5 174.2 175.2 176.3 177.6 178.9 179.9 180.5 182.3 182.8 183.2 183.7 184.3 184.8 185.9 186.4 186.7 187.0 187.2 187.4 187.2 187.0 186.8 186.5 185.9 185.2 182.7 179.1 174.8 171.0 168.6 167.1 165.5 164.8 164.5 165.1 165.7 166.2 166.2 166.1 165.7 165.3 164.9 164.5 163.9 163.6 163.5 163.7 163.8 163.6 163.6 164.2 165.5 168.3 170.4 171.4 172.8 173.7 173.8 174.1 174.2 174.0 173.8 173.5 173.3 173.1 172.9 172.7 172.6 172.5 172.4 172.6 172.8 172.9 172.9 172.9 173.0 173.1 173.1 173.1 173.2 173.2 173.3 173.3 173.3 173.4 173.5 173.6 173.9 174.4 174.9 175.3 175.5 175.7 176.0 176.1 176.2 176.1 175.9 175.8 175.7 175.6 175.4 174.9 174.5 174.1 173.8 171.4 168.4 164.8 159.8 155.3 153.5 152.3 151.6 151.1 150.1 149.2 150.5 152.7 156.3 163.7 170.1 175.5 179.6 183.1 185.7 185.8 185.9 185.9 185.3 185.0 184.7 184.2 183.9 183.6 183.2 182.4 181.5 181.1 180.6 180.1 180.4 180.5 180.7 181.0 181.9 183.0 183.4 183.9 184.4 185.0 185.3 185.3 185.2 185.2 185.1 185.0 184.8 184.7 184.5 184.3 184.1 184.0 183.9 183.7 183.7 183.6 183.4 183.5 183.6 183.9 183.9 183.6 182.7 181.9 180.1 176.7 170.0 163.2 156.2 154.4 153.7 154.7 155.8 157.7 160.6 163.7 166.1 167.3 169.1 171.7 175.5 178.5 180.6 181.7 181.3 180.3 178.8 177.5 176.6 176.3 176.1 175.9 175.8 175.6 175.4 175.5 175.5 175.1 174.2 172.7 170.3 165.3 158.6 152.2 148.4 146.9 146.4 147.3 149.4 151.4 152.4 153.1 154.2 157.7 162.6 167.5 170.7 176.0 181.5 185.4 188.0 189.8 189.7 189.0 188.2 187.6 186.4 185.2 184.9 184.1 183.2 183.0 182.6 182.3 182.0 181.8 181.8 182.0 182.2 182.4 182.4 182.6 182.8 182.8 183.2 183.8 184.0 184.2 184.5 184.5 184.6 184.7 184.8 185.0 185.2 185.4 185.5 185.6 185.7 185.9 186.3 186.3 186.2 186.2 186.1 186.0 186.0 185.9 185.8 185.7 185.7 185.7 185.6 185.3 184.9 184.4 183.8 183.4 183.0 182.7 182.3 181.5 179.1 175.9 171.5 168.8 166.0 163.2 162.8 163.9 167.5 175.9 182.8 187.3 188.1 188.1 186.8 186.8 186.7 186.6 186.4 186.1 186.0 185.7 185.5 185.3 183.5 183.6 183.8 183.9 184.1 184.4 184.7 184.9 185.0 185.2 185.2 184.6 183.1 181.0 178.4 175.6 173.0 170.9 169.7 169.1 169.5 170.5 172.1 174.2 176.8 179.9 183.4 187.1 190.9 194.6 198.1 201.3 203.9 205.9 207.2 207.9 207.9 208.0 208.0 208.0 208.1 208.1 208.2 208.3 208.4 208.4 208.5 208.5 208.5 208.3 207.7 206.3 204.2 201.5 198.7 195.7 192.6 190.0 187.6 185.9 184.7 184.2 187.0 187.8 188.2 188.4 188.2 187.9 187.7 187.3 186.9 186.4 186.0 185.6 185.0 184.6 184.6 184.6 184.6 184.6 184.6 184.6 184.6 184.6 184.6 184.6 184.8 185.0 185.1 185.3 185.4 185.6 185.9 186.2 186.4 186.6 186.8 186.9 186.8 186.6 186.3 186.1 185.6 184.6 183.8 182.6 180.6 176.6 172.7 168.9 165.5 162.9 161.5 162.3 163.6 165.4 166.6 167.8 169.0 169.8 170.7 172.3 173.8 175.2 176.4 178.6 180.3 181.0 182.7 184.6 186.3 187.2 188.1 189.0 188.5 187.9 187.4 187.3 187.2 187.0 186.7 186.5 186.2 186.1 185.9 185.7 183.9 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 182.6 179.4 176.1 173.7 173.5 174.1 175.3 176.8 179.0 181.7 184.7 187.9 191.2 194.5 197.7 200.5 202.9 204.8 206.1 206.6 206.9 207.6 208.1 207.9 207.4 206.8 206.0 203.4 199.6 195.8 194.4 195.4 199.7 203.0 205.6 205.5 204.8 203.6 204.0 203.8 203.0 201.6 199.1 196.0 190.7 187.4 185.2 184.5 184.1 183.9 183.5 183.4 183.5 183.4 183.3 183.4 183.5 183.9 185.0 185.2 185.4 185.6 186.1 186.5 186.8 186.8 186.9 187.0 186.4 185.6 184.7 182.4 179.5 175.8 173.5 172.1 172.2 174.3 177.2 181.4 182.3 182.4 181.1 179.6 178.1 177.0 176.2 175.6 175.4 174.9 174.4 174.0 173.6 173.4 173.4 173.6 173.8 174.0 174.2 174.4 174.7 175.2 175.7 176.0 176.4 176.9 177.5 178.0 178.4 178.3 178.2 178.2 178.0 177.8 177.7 177.6 177.2 176.7 176.0 175.8 175.8 175.7 175.7 175.6 175.5 175.4 175.3 175.2 175.2 175.2 175.2 175.1 175.0 174.8 174.6 174.5 174.3 174.1 173.9 173.6 171.9 169.3 166.8 162.8 158.3 155.6 154.4 154.7 156.5 161.5 168.0 171.6 174.6 176.5 176.0 175.6 175.3 175.0 175.0 175.0 175.2 175.3 175.5 175.6 175.6 175.5 175.4 175.2 175.1 175.1 175.0 174.8 174.8 174.6 174.3 173.9 173.7 173.6 173.6 173.1 172.3 168.6 165.2 162.5 166.4 171.9 180.1 183.7 186.2 187.0 187.0 186.7 186.3 185.3 184.5 184.1 184.5 185.0 185.4 185.3 185.1 184.7 181.9 178.5 174.1 166.5 161.0 158.3 160.7 165.1 170.5 176.2 180.1 181.2 182.4 183.6 184.8 187.0 189.0 190.3 193.6 196.5 197.8 198.2 197.4 195.1 192.9 190.9 189.2 186.7 184.8 184.3 183.7 182.9 182.0 181.5 180.9 180.1 179.1 178.2 177.8 177.3 176.7 176.0 175.5 175.1 175.1 175.1 175.0 174.9 174.8 174.7 174.6 174.6 174.5 174.4 174.4 174.3 174.3 174.4 174.4 174.5 174.6 174.7 174.9 174.9 174.8 174.9 174.9 174.8 174.7 174.5 174.3 174.0 173.8 173.6 173.3 172.7 171.7 169.9 168.2 166.5 163.8 161.7 159.9 155.5 152.1 149.4 147.0 145.4 144.4 144.1 143.5 142.8 141.8 140.7 139.6 138.8 138.5 138.5 138.0 138.1 138.7 138.3 138.2 138.4 138.3 139.0 141.0 141.3 141.5 141.6 141.7 141.6 141.2 140.2 139.5 139.2 139.0 138.1 136.4 135.1 134.3 134.5 134.8 134.9 134.9 135.6 136.3 137.2 138.0 138.8 139.6 141.3 142.8 143.7 144.7 145.6 146.2 148.0 149.6 150.4 151.9 153.0 153.7 153.8 153.8 153.7 153.6 153.5 153.5 153.0 152.1 151.2 150.4 149.7 149.0 148.0 147.1 146.9 146.7 146.4 146.2 146.2 146.3 146.8 147.2 147.7 148.8 149.3 149.9 151.4 152.4 153.3 154.3 155.7 156.9 157.4 158.3 159.2 159.8 160.1 160.2 159.9 159.7 159.4 159.3 158.4 157.2 156.5 155.0 153.1 151.2 150.0 149.2 148.3 147.9 147.8 148.0 148.3 148.8 149.6 150.8 152.5 154.2 155.6 156.8 158.1 159.3 160.3 161.0 161.7 162.4 162.2 162.0 161.8 160.8 159.9 159.0 157.8 156.0 153.9 151.4 149.9 148.9 147.0 145.3 143.6 142.7 142.4 142.5 143.0 143.8 145.1 146.4 147.3 147.9 149.4 151.3 153.9 155.9 157.5 158.5 159.6 161.2 163.5 164.2 164.6 164.4 164.2 163.8 163.2 162.3 160.9 158.8 157.0 155.5 154.7 153.2 151.3 149.0 147.7 146.6 145.4 144.9 144.9 145.4 145.9 146.5 147.3 149.4 151.7 153.7 155.2 156.7 158.8 160.7 162.0 162.6 163.5 164.0 163.7 163.2 162.5 161.4 159.6 157.8 156.5 154.3 151.9 150.3 148.5 146.9 145.7 145.4 145.3 145.5 146.0 146.7 148.0 150.0 152.1 154.4 156.1 157.5 158.7 159.2 159.5 159.3 159.0 158.6 157.5 156.4 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.6 155.5 155.4 155.3 155.1 154.8 154.5 153.9 153.3 152.2 150.8 149.2 146.5 143.5 140.4 136.3 132.6 129.2 126.0 123.5 121.5 120.0 119.0 118.2 117.8 117.4 117.1 116.9 116.8 116.7 120.7 120.3 119.4 118.1 116.2 114.0 111.5 108.7 105.8 102.9 100.0 97.2 94.7 92.4 90.4 88.7 87.3 86.3 85.8 85.6 85.8 86.3 87.2 88.4 89.9 91.6 93.7 95.9 98.3 100.9 103.4 106.0 108.4 110.7 112.7 114.5 115.9 116.9 117.3 119.6 120.1 120.8 121.1 121.2 121.3 121.4 121.6 121.9 121.2 119.6 115.7 114.3 114.3 117.2 120.0 122.8 125.8 129.5 133.2 135.8 136.9 137.7 138.2 138.4 138.6 139.2 139.7 140.1 140.5 140.8 140.9 140.1 139.4 139.1 139.0 138.8 138.7 138.7 138.2 137.6 137.6 137.4 137.2 137.2 137.1 136.8 135.9 135.4 135.3 136.1 137.4 139.4 143.8 148.7 153.4 156.0 157.2 157.6 157.4 157.2 157.1 156.4 156.0 155.9 155.9 155.8 155.6 156.1 156.9 157.9 158.1 158.0 157.7 157.5 157.1 156.6 157.3 156.8 155.7 154.4 153.7 153.9 154.1 154.4 154.8 154.9 155.1 155.7 155.9 157.0 159.2 160.4 161.0 160.8 159.8 158.5 156.8 154.1 151.4 148.7 147.8 146.7 145.1 143.0 142.1 143.2 143.6 143.6 143.4 143.0 142.7 142.3 142.5 143.3 144.5 144.8 144.7 144.7 144.4 144.3 144.6 145.1 145.4 145.3 143.8 141.7 138.9 136.9 136.8 139.6 140.5 140.7 139.4 138.4 137.4 136.5 136.1 135.8 135.7 135.7 135.6 135.6 135.7 135.9 136.2 136.3 136.6 137.1 137.5 137.8 138.4 138.5 138.5 138.3 138.2 138.1 137.9 137.8 137.7 137.5 137.6 137.9 138.5 138.8 139.1 139.4 139.8 140.2 140.3 140.5 140.7 140.7 140.7 140.6 140.5 140.4 140.4 140.3 140.1 139.8 139.7 140.1 140.9 141.3 140.7 139.6 137.1 136.4 137.1 137.9 139.1 140.4 141.6 142.7 143.7 145.0 146.6 148.5 150.7 153.1 155.5 158.2 159.5 159.9 159.6 159.1 158.1 158.8 159.4 159.9 159.9 159.9 159.8 159.0 158.3 157.8 157.3 156.6 155.7 154.8 154.5 155.1 155.8 155.9 155.0 154.3 153.9 153.9 154.0 154.1 154.2 154.3 154.3 154.4 154.5 154.5 154.6 155.0 155.2 155.4 155.5 155.8 156.1 156.5 156.8 157.1 156.9 156.6 156.3 156.2 155.2 152.9 149.0 146.1 145.2 145.8 147.0 149.8 151.1 152.1 153.4 154.4 155.5 157.1 158.8 160.5 162.4 163.3 164.1 165.2 167.7 170.4 172.1 172.8 173.3 173.9 173.2 172.2 171.2 171.0 171.1 171.7 172.3 172.8 173.4 173.9 174.3 174.6 174.8 174.9 175.0 175.0 174.9 175.1 174.9 174.6 174.2 173.2 171.7 169.0 165.4 161.7 160.0 160.2 162.3 167.3 173.0 179.3 183.7 186.2 187.3 187.8 188.3 188.9 189.4 189.7 190.1 189.7 189.2 188.7 186.7 185.5 184.8 183.0 181.2 179.4 178.8 177.7 176.3 176.4 176.6 176.9 178.2 179.4 180.3 181.8 183.5 185.5 186.1 186.9 188.0 188.4 188.7 189.0 188.2 187.5 186.9 186.0 184.8 183.4 182.7 181.7 180.5 180.1 179.6 179.1 179.1 179.4 179.9 180.3 181.0 182.1 182.9 183.6 184.2 184.8 185.1 184.8 184.5 184.1 183.6 182.8 182.2 182.0 182.1 182.4 182.7 182.9 183.2 183.6 184.8 185.9 185.9 185.7 185.6 185.5 185.3 185.2 185.2 185.4 185.6 185.8 185.6 185.3 184.4 181.7 177.5 169.5 164.6 161.3 161.6 163.8 166.8 171.7 174.5 176.7 178.5 181.1 184.0 185.5 188.4 191.8 193.4 195.8 198.4 199.6 199.2 196.9 191.8 188.1 185.8 185.1 184.6 184.5 184.6 184.8 185.0 185.2 185.2 185.2 185.0 184.8 184.6 182.2 179.2 175.8 169.6 165.5 162.9 163.3 164.7 166.7 171.5 176.2 180.7 185.3 190.2 195.3 198.4 202.5 207.4 211.0 212.7 213.0 211.9 211.3 211.4 210.3 209.0 207.5 207.3 206.9 206.2 206.3 206.2 205.7 205.6 205.5 205.4 205.2 205.3 205.5 205.7 206.0 206.3 207.2 207.9 208.3 208.7 209.1 209.4 210.4 211.3 211.6 211.5 211.3 211.1 210.8 210.5 210.2 210.1 209.9 209.5 208.8 208.3 208.2 208.0 207.8 207.6 207.5 207.4 207.4 207.5 207.6 207.6 207.7 207.8 207.9 208.0 208.0 208.0 207.8 207.6 207.2 206.9 206.7 206.5 206.5 206.6 206.8 207.0 207.2 207.4 207.7 208.1 208.4 208.6 208.6 208.1 208.0 207.8 207.5 207.4 207.3 207.1 206.9 206.6 206.3 206.0 205.7 205.4 205.1 204.6 203.0 198.2 191.7 186.3 184.8 187.0 194.2 201.1 206.7 207.8 208.6 209.1 209.4 210.4 211.6 215.3 217.5 218.7 217.6 215.4 212.5 210.5 208.9 207.5 207.2 206.9 206.7 206.6 206.6 206.6 206.6 206.5 206.5 206.5 206.6 206.7 207.0 207.4 207.9 208.5 209.1 209.8 210.1 210.5 210.9 211.6 211.1 208.9 204.0 202.0 204.1 213.7 223.4 232.8 237.3 240.7 242.4 243.6 244.8 246.2 246.9 247.3 247.4 247.1 246.9 246.7 246.9 247.0 247.0 246.7 246.6 246.8 246.6 246.4 246.3 246.1 245.7 245.2 244.4 243.7 243.1 241.1 237.9 233.3 226.2 220.5 217.8 218.2 220.7 228.6 233.8 237.9 240.0 240.0 239.3 238.2 237.3 236.4 235.2 234.8 234.5 234.1 233.8 233.4 233.1 232.9 232.6 232.4 232.3 232.1 232.1 232.1 232.2 232.3 232.0 231.4 231.1 231.3 231.9 232.6 233.5 234.4 235.2 236.0 237.3 240.4 243.0 245.2 247.2 248.6 249.8 250.9 251.6 251.8 251.7 251.6 251.2 249.5 247.1 244.2 237.6 233.0 229.7 227.6 227.0 227.5 228.5 230.4 232.9 234.0 234.6 235.0 235.3 235.8 236.6 237.4 238.5 239.9 241.5 243.4 245.5 246.4 247.1 247.7 245.3 242.8 239.9 238.5 237.2 235.9 235.4 234.8 234.2 234.0 234.0 234.2 233.7 233.3 232.7 232.4 231.4 229.3 225.2 219.2 210.8 206.0 202.3 200.1 198.7 198.5 199.8 202.9 207.7 214.4 217.7 221.2 226.2 231.9 236.1 237.8 241.0 244.0 246.0 246.6 246.7 246.5 246.4 246.2 246.1 245.8 245.0 243.8 242.3 240.7 239.2 238.5 237.9 237.1 235.8 234.5 233.3 233.0 232.9 232.6 232.4 232.3 232.2 232.2 232.3 232.5 232.6 232.7 232.9 232.9 233.0 233.5 233.8 233.9 233.8 233.6 233.4 233.2 233.0 232.8 232.6 232.0 230.9 229.4 227.2 224.1 219.1 215.6 213.1 210.5 207.4 204.6 203.6 202.0 199.9 197.8 196.2 195.2 194.9 194.6 194.4 193.9 193.8 194.1 195.4 196.8 198.3 201.9 203.7 204.2 206.0 206.5 206.1 206.9 207.6 208.4 208.6 208.8 208.9 209.2 209.1 208.7 208.2 207.7 207.2 206.9 206.3 205.4 204.9 204.1 203.0 202.5 202.0 201.5 201.9 202.2 202.6 202.9 203.7 205.0 206.2 207.1 207.5 208.2 208.9 209.6 210.3 210.7 210.8 211.0 211.2 211.4 211.2 210.6 209.7 206.8 202.9 197.9 188.1 180.1 176.0 175.1 175.8 177.9 182.3 186.8 190.5 192.6 194.6 197.3 200.8 203.9 206.0 208.4 211.0 213.7 216.2 219.1 223.3 227.2 233.2 230.8 226.7 221.4 216.1 211.9 209.2 207.9 207.9 207.9 207.9 207.9 207.9 207.8 207.8 207.8 207.8 207.7 207.7 207.7 207.7 207.6 207.5 207.5 207.4 207.4 207.4 207.4 207.4 207.3 207.3 207.3 207.3 207.3 207.4 207.9 208.7 209.9 211.6 213.4 215.5 217.8 220.0 222.0 224.1 226.2 228.0 229.4 230.5 231.1 232.1 234.8 235.8 237.6 239.9 241.9 243.3 244.4 243.9 243.3 242.6 242.2 240.8 238.8 236.9 233.6 229.3 226.8 224.4 222.2 221.1 220.1 219.0 219.5 220.0 220.5 222.4 224.6 227.2 229.6 232.1 234.7 236.9 239.2 241.6 243.2 244.0 243.7 243.2 242.6 242.1 239.6 237.6 236.3 233.3 230.6 229.1 227.1 225.4 224.5 223.5 223.0 223.4 224.0 224.6 225.0 227.6 230.4 232.9 235.1 237.1 238.5 240.5 242.2 243.1 244.3 245.0 244.7 244.4 243.8 242.3 240.3 238.3 237.2 234.6 231.9 230.5 228.9 227.4 226.9 227.3 227.8 228.2 229.2 230.4 231.7 233.7 235.6 236.6 236.5 236.1 235.7 233.9 231.7 229.7 229.0 229.1 229.3 229.5 229.8 230.0 230.4 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7 230.7", - "input_type": "phoneme", - "offset": 96.629 + "f0_timestep": "0.005" }, { + "offset": 108.362, "text": "AP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", "ph_seq": "AP x iu sh uo r en j ian x ing d ao n an l u y ou b u p ing b ian t ian l u t ian z u sh ang q ing t ian SP y i n ian x iao y ao zh ir j ian SP r uo w ei q ian ch eng y van b u sh ir zh en sh ao n ian SP sh eng s i0 j ian sh ui q ian p o j vn zh en q ian y ou j i r en b ing j ian SP", - "note_seq": "rest F#3 F#3 G#3 G#3 A#3 A#3 G#3 G#3 A#3 A#3 G#3 G#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 D#4 D#4 A#3 A#3 G#3 G#3 G#3 G#3 A#3 A#3 G#3 G#3 rest D#3 D#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 A#3 A#3 rest C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 F#3 F#3 F3 F3 F#3 F#3 F3 F3 F#3 F#3 D#3 D#3 rest D#3 D#3 D#3 D#3 A#3 A#3 F#3 F#3 F3 F3 D#3 D#3 C#3 C#3 D#3 D#3 A#2 A#2 G#2 G#2 F#2 F#2 G#2 G#2 A#2 A#2 A#2 A#2 rest", - "note_dur_seq": "0.433 0.181 0.181 0.181 0.181 0.361 0.361 0.362 0.362 0.3609999 0.3609999 0.181 0.181 0.3610001 0.3610001 0.181 0.181 0.181 0.181 0.1800001 0.1800001 0.181 0.181 0.181 0.181 0.1800001 0.1800001 0.181 0.181 0.3619998 0.3619998 0.1800003 0.1800003 0.362 0.362 0.3610001 0.3610001 0.9039998 0.9039998 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.362 0.362 0.1810002 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.3619995 0.3619995 0.1800003 0.1800003 0.3620005 0.3620005 0.1799994 0.1799994 0.3620005 0.3620005 0.5419998 0.5419998 0.9040003 0.9040003 0.1799994 0.3620005 0.3620005 0.3610001 0.3610001 0.3619995 0.3619995 0.3610001 0.3610001 0.3620005 0.3620005 0.3610001 0.3610001 0.3619995 0.3619995 0.3610001 0.3610001 0.3610001 0.3610001 0.3620005 0.3620005 0.3610001 0.3610001 0.3619995 0.3619995 0.3610001 0.3610001 0.7229996 0.7229996 0.108", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.29387 0.13913 0.111725 0.069275 0.135819 0.045181 0.266121 0.094879 0.22194 0.14006 0.321846 0.039154 0.11026 0.07074 0.321846 0.039154 0.135819 0.045181 0.150879 0.030121 0.149879 0.030121 0.150879 0.030121 0.150879 0.030121 0.134819 0.045181 0.100777 0.080223 0.286698 0.075302 0.08847 0.09153 0.202935 0.159065 0.25106 0.10994 0.904 0.330879 0.030121 0.120758 0.060242 0.111725 0.069275 0.303444 0.057556 0.120758 0.060242 0.255577 0.105423 0.362 0.105698 0.075302 0.330879 0.030121 0.094597 0.086403 0.229973 0.131027 0.291214 0.070786 0.149879 0.030121 0.217423 0.144577 0.110724 0.069275 0.22194 0.14006 0.436577 0.105423 0.904 0.059516 0.120483 0.22194 0.14006 0.276665 0.084335 0.223442 0.138557 0.22094 0.14006 0.288453 0.073547 0.255577 0.105423 0.282181 0.079819 0.22094 0.14006 0.302261 0.058739 0.22194 0.14006 0.287209 0.073792 0.327362 0.034637 0.275154 0.085846 0.723 0.108", - "f0_timestep": "0.005", + "ph_dur": "0.2939 0.1391 0.1117 0.0693 0.1358 0.0452 0.2661 0.0949 0.2219 0.1401 0.3218 0.0392 0.1103 0.0707 0.3218 0.0392 0.1358 0.0452 0.1509 0.0301 0.1499 0.0301 0.1509 0.0301 0.1509 0.0301 0.1348 0.0452 0.1008 0.0802 0.2867 0.0753 0.0885 0.0915 0.2029 0.1591 0.2511 0.1099 0.904 0.3309 0.0301 0.1208 0.0602 0.1117 0.0693 0.3034 0.0576 0.1208 0.0602 0.2556 0.1054 0.362 0.1057 0.0753 0.3309 0.0301 0.0946 0.0864 0.23 0.131 0.2912 0.0708 0.1499 0.0301 0.2174 0.1446 0.1107 0.0693 0.2219 0.1401 0.4366 0.1054 0.904 0.0595 0.1205 0.2219 0.1401 0.2767 0.0843 0.2234 0.1386 0.2209 0.1401 0.2885 0.0735 0.2556 0.1054 0.2822 0.0798 0.2209 0.1401 0.3023 0.0587 0.2219 0.1401 0.2872 0.0738 0.3274 0.0346 0.2752 0.0858 0.723 0.108", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest F#3 G#3 A#3 G#3 A#3 G#3 A#3 A#3 A#3 A#3 A#3 A#3 A#3 D#4 A#3 G#3 G#3 A#3 G#3 rest D#3 F3 F#3 F3 F#3 A#3 rest C#4 A#3 G#3 F#3 F#3 F3 F#3 F3 F#3 D#3 rest D#3 D#3 A#3 F#3 F3 D#3 C#3 D#3 A#2 G#2 F#2 G#2 A#2 A#2 rest", + "note_dur": "0.433 0.181 0.181 0.361 0.362 0.361 0.181 0.361 0.181 0.181 0.18 0.181 0.181 0.18 0.181 0.362 0.18 0.362 0.361 0.904 0.361 0.181 0.181 0.361 0.181 0.361 0.362 0.181 0.361 0.181 0.361 0.362 0.18 0.362 0.18 0.362 0.542 0.904 0.18 0.362 0.361 0.362 0.361 0.362 0.361 0.362 0.361 0.361 0.362 0.361 0.362 0.361 0.723 0.108", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 232.2 230.7 229.2 227.8 226.3 224.9 223.4 222.0 220.6 219.2 217.8 216.4 215.0 213.6 212.2 210.9 209.5 208.2 206.9 205.5 204.2 202.9 201.6 200.3 199.0 197.8 196.5 195.2 194.0 192.8 191.5 190.3 189.1 187.9 186.7 185.5 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.0 185.1 185.2 185.4 185.6 185.9 186.2 186.4 186.7 187.1 187.4 187.7 188.0 188.3 188.5 188.6 188.8 188.8 188.6 187.9 187.3 187.1 186.9 186.3 185.4 184.8 184.4 184.1 183.8 182.9 181.0 179.0 175.9 171.3 166.9 162.8 160.0 160.5 162.9 169.2 171.9 173.9 177.6 180.9 183.9 185.6 188.8 192.8 198.1 201.6 204.6 207.5 210.2 212.3 212.1 211.8 211.0 208.5 207.5 207.0 207.0 207.0 207.0 206.7 206.4 206.3 206.5 206.7 207.0 207.2 207.4 207.6 207.7 207.9 208.2 208.4 208.1 207.6 207.3 206.8 205.4 201.5 198.6 196.3 195.3 195.0 195.2 197.5 200.9 205.3 207.9 209.3 209.8 210.2 210.5 210.9 211.5 212.0 212.5 212.5 212.0 211.2 211.2 211.0 210.8 210.5 210.4 210.4 211.0 211.5 211.9 213.4 215.2 217.7 218.8 220.1 221.5 224.1 226.9 230.0 231.3 232.4 233.5 235.2 236.6 237.6 237.7 237.6 237.2 236.5 235.8 235.0 234.5 233.9 233.3 232.5 231.8 231.4 230.6 229.4 228.0 225.4 222.0 218.1 213.8 210.5 208.9 208.0 207.3 206.8 205.9 205.1 204.5 203.4 202.1 200.3 198.7 197.1 195.5 194.9 195.3 198.6 201.8 204.8 207.0 208.1 208.9 208.9 209.3 209.7 209.3 208.8 208.3 207.7 207.2 206.7 206.3 206.0 205.7 205.3 205.0 204.7 204.8 204.9 204.9 205.0 205.2 205.5 206.0 206.7 207.4 207.7 208.0 208.2 208.5 208.7 208.9 209.4 209.9 210.5 210.8 210.4 209.1 206.0 200.7 194.6 190.7 188.7 187.9 189.9 191.3 192.5 195.1 196.6 197.5 198.2 198.8 199.3 200.9 201.9 202.4 202.9 203.4 204.0 204.7 205.5 206.3 207.0 207.6 208.1 208.6 208.8 209.0 207.8 208.1 210.3 216.7 221.4 223.3 222.7 221.9 220.9 218.7 217.2 216.3 215.4 214.2 212.6 212.3 211.8 210.9 211.1 211.4 211.9 212.7 214.2 216.7 217.5 218.8 220.7 223.6 226.5 229.0 230.5 231.8 233.0 234.3 235.3 235.7 235.8 235.7 235.6 235.4 234.8 233.7 233.1 232.6 232.1 231.9 231.9 232.0 232.2 232.3 232.3 232.4 232.6 233.1 233.4 233.5 232.9 232.4 231.7 230.4 229.2 227.6 224.0 220.3 215.2 205.1 200.1 196.4 193.2 192.4 193.7 200.6 205.4 209.3 211.7 211.3 210.2 209.2 208.7 208.6 208.3 208.2 208.3 208.1 208.0 207.9 208.2 208.4 208.6 208.5 208.3 208.1 208.0 208.0 207.8 206.5 205.0 203.6 203.0 202.2 201.3 201.5 201.9 202.3 203.4 204.9 206.6 208.6 211.4 214.6 221.4 226.6 230.7 233.4 235.6 237.6 239.4 240.5 241.0 240.7 240.7 240.9 239.2 237.0 234.6 233.5 232.4 231.3 229.4 227.9 227.0 226.3 225.8 225.7 226.2 226.7 227.5 228.0 228.9 230.5 231.4 232.4 233.6 234.5 235.4 236.6 236.4 236.3 236.1 235.7 235.4 235.2 234.5 233.9 233.6 233.2 232.7 232.1 231.8 231.5 231.4 231.2 231.1 231.2 231.4 231.5 231.7 231.8 231.8 231.6 231.4 231.1 230.7 230.5 230.2 229.8 229.3 228.8 228.8 229.5 230.5 232.3 234.1 235.6 236.2 236.0 235.6 235.2 234.7 234.3 234.2 234.0 233.7 233.5 233.3 233.1 233.0 232.7 232.6 233.0 233.0 232.9 232.7 232.6 232.4 232.3 231.9 231.2 230.7 230.3 230.1 230.3 230.6 231.1 231.6 232.8 234.2 234.8 235.3 235.6 235.3 235.0 234.6 234.1 233.8 233.6 233.3 233.1 232.8 232.8 232.9 232.9 233.1 233.3 233.4 233.5 233.6 233.6 234.5 233.5 231.3 224.7 215.4 204.7 196.7 192.3 190.2 190.5 191.6 193.2 197.0 202.4 210.3 216.2 223.3 232.0 234.2 235.3 234.7 234.2 233.8 233.6 233.5 233.4 233.4 233.2 233.0 232.8 232.6 232.5 232.3 232.2 231.5 229.7 224.7 217.5 207.3 200.7 196.8 195.6 195.9 196.6 197.2 199.3 202.0 204.9 210.9 216.3 219.3 226.4 232.6 235.9 237.2 237.3 236.4 235.4 234.1 232.4 232.0 232.1 232.4 232.7 233.0 233.4 233.6 233.6 233.5 233.4 233.3 233.1 232.9 232.8 232.8 232.5 232.2 231.8 231.5 231.0 230.2 226.4 222.3 219.3 218.2 218.1 220.5 226.0 231.7 235.0 236.5 236.7 236.4 236.2 235.8 235.3 235.0 234.8 234.5 234.3 234.0 233.8 233.8 233.7 233.6 233.4 233.2 233.0 232.9 232.9 232.4 232.1 231.7 230.9 229.8 228.5 221.1 214.2 207.5 204.2 204.3 206.7 214.3 221.6 228.7 232.5 234.4 235.1 235.0 234.3 233.2 232.4 232.2 232.4 232.8 233.1 233.5 234.2 234.6 234.7 234.7 234.7 234.8 235.2 235.4 235.4 235.9 236.5 237.3 238.3 238.7 238.3 239.4 240.9 243.1 246.3 249.9 254.0 261.7 270.6 281.4 289.1 295.7 300.3 303.2 305.5 307.4 309.8 311.4 311.5 311.3 311.5 312.4 311.8 311.1 310.5 307.9 303.3 296.8 285.9 277.0 272.3 272.2 275.3 281.3 287.7 293.1 296.7 295.1 291.0 282.3 274.3 266.8 260.1 254.0 247.3 237.9 229.7 222.7 218.9 217.3 216.7 217.2 216.4 215.5 215.4 215.4 215.3 214.7 214.8 215.3 216.9 218.7 220.5 222.2 223.7 224.8 224.9 225.8 227.3 228.8 229.8 230.4 231.0 231.9 233.1 234.1 234.7 235.1 235.5 235.5 235.3 234.7 234.1 233.7 233.4 232.8 232.0 231.9 232.0 232.1 232.3 232.4 232.4 232.9 233.4 234.0 234.4 234.8 235.2 234.7 234.0 233.2 232.4 229.3 224.6 219.3 214.1 209.1 206.4 205.2 205.1 205.1 205.9 207.7 209.1 210.8 213.0 213.2 213.4 213.7 213.0 212.2 211.3 210.7 210.2 209.6 210.2 210.1 209.1 206.5 202.3 195.8 191.4 188.5 187.6 188.9 190.4 192.4 194.7 196.3 196.6 197.7 198.9 200.3 201.6 202.8 203.6 204.0 204.7 206.1 207.1 207.9 208.6 210.2 212.0 214.0 214.4 214.2 213.7 213.3 212.5 211.3 210.9 210.6 210.2 209.8 209.5 209.3 208.7 208.2 208.1 208.0 207.7 207.4 207.1 206.9 206.7 206.7 206.7 206.9 207.0 207.1 207.3 207.3 207.5 208.3 208.8 209.2 209.6 209.9 210.0 209.8 209.5 209.1 208.7 208.3 207.9 207.0 204.7 201.2 195.8 192.0 189.9 190.5 191.6 193.3 195.3 196.0 197.2 199.1 200.1 201.4 202.8 204.1 204.8 206.0 206.9 207.9 208.6 209.3 209.9 210.7 210.7 210.7 210.1 209.0 208.3 207.8 206.9 206.2 205.7 205.3 204.9 205.1 205.3 206.6 207.3 208.0 209.2 210.7 212.0 212.6 213.9 215.0 216.1 217.5 218.7 221.4 223.8 225.7 227.4 228.8 230.3 231.5 232.3 232.3 232.3 232.3 232.3 232.4 234.6 234.8 234.8 234.8 234.7 234.6 234.2 234.2 234.0 233.8 233.4 233.1 232.7 231.9 231.0 229.7 228.2 226.5 224.4 221.9 220.4 218.9 217.5 215.9 214.7 213.7 213.0 211.5 210.7 216.6 212.8 209.1 207.0 204.4 201.7 198.8 196.4 194.2 192.4 192.3 192.6 193.2 196.0 199.2 201.3 202.3 202.9 203.2 204.8 206.7 208.6 209.5 210.1 210.5 210.4 210.0 209.6 209.3 208.9 208.3 207.6 206.9 206.2 205.5 204.7 203.6 202.8 202.5 202.6 202.9 203.2 203.3 203.8 204.6 205.1 205.7 206.1 207.0 207.8 208.6 210.3 211.3 211.8 212.6 212.9 212.8 212.4 211.9 211.3 210.1 209.0 208.1 205.6 203.5 201.6 201.0 200.0 198.7 197.8 197.3 196.9 197.5 198.0 198.5 199.7 201.0 202.8 204.1 205.6 207.3 208.8 210.2 211.4 213.1 214.0 213.9 213.6 213.2 212.9 212.7 212.1 210.8 209.7 208.5 207.2 205.5 204.2 203.6 202.6 201.9 201.6 201.8 202.1 202.5 202.8 203.2 204.0 204.9 205.9 207.0 208.2 209.2 209.7 210.2 210.9 211.6 212.4 213.1 213.5 213.3 212.9 212.3 211.3 210.1 208.5 207.6 206.7 205.1 204.2 203.3 202.0 201.6 201.3 201.0 200.7 200.4 200.3 200.7 201.2 201.5 202.3 203.3 204.5 205.1 205.8 207.1 208.1 208.9 209.7 210.7 211.8 212.6 213.5 214.3 214.8 215.2 215.4 215.2 214.9 214.6 214.0 212.9 211.5 210.2 208.5 206.6 205.6 204.4 203.2 202.6 202.5 202.7 203.3 203.8 204.2 205.9 207.6 209.3 210.3 211.3 212.4 214.6 215.4 215.3 215.0 214.5 214.0 211.0 208.0 205.0 203.2 202.5 202.7 202.9 203.2 203.6 203.9 204.3 204.8 205.2 205.6 206.1 206.4 206.7 207.1 207.3 207.5 207.7 207.7 207.7 207.7 207.5 207.3 207.2 206.9 206.5 206.0 205.2 204.1 202.6 200.4 197.9 194.7 190.2 185.4 180.7 175.5 170.9 167.2 164.0 161.5 159.8 158.5 157.6 154.7 154.1 152.5 150.1 147.0 143.4 139.4 135.2 130.9 126.7 122.7 119.2 116.3 113.6 111.3 109.7 108.6 108.2 108.5 109.3 110.6 112.4 114.7 117.4 120.6 124.0 127.7 131.5 135.4 139.3 142.9 146.3 149.2 151.6 153.4 154.5 153.1 153.2 153.5 153.7 154.3 155.1 155.4 155.7 156.1 156.6 156.9 157.3 157.7 158.4 159.3 160.3 159.9 158.7 157.5 156.7 156.5 157.3 159.0 161.1 162.5 163.7 164.8 168.5 171.1 173.1 173.8 174.5 175.0 174.7 174.5 174.2 174.6 174.8 174.8 175.0 175.0 174.7 173.6 172.2 170.4 167.2 163.2 158.9 158.1 158.9 160.6 161.6 163.3 166.1 169.1 172.2 175.2 176.4 178.3 181.2 183.0 185.4 189.0 191.2 192.8 193.5 192.1 191.1 190.5 189.8 189.2 188.6 187.8 187.2 186.7 186.0 185.6 185.5 185.1 184.7 184.4 184.3 184.2 184.1 184.0 184.0 184.1 184.2 184.3 184.4 184.5 184.6 184.8 184.8 184.7 184.6 184.5 184.4 184.4 184.2 184.1 184.1 184.2 184.3 184.3 184.5 184.6 184.8 184.8 184.8 184.9 184.8 184.8 184.7 184.5 184.3 184.2 184.0 183.6 182.7 182.0 181.2 179.7 178.3 176.6 174.0 172.6 171.2 169.1 167.7 166.7 166.0 165.8 166.0 167.2 169.4 172.0 173.2 174.1 174.8 175.0 175.5 175.9 176.0 176.4 176.9 176.6 176.0 175.3 174.9 172.6 169.3 163.9 159.5 155.7 154.4 154.9 156.4 159.0 161.1 162.8 163.8 165.4 167.2 168.3 169.8 171.5 174.0 175.6 176.6 178.5 179.8 180.7 181.5 182.3 183.0 183.8 184.5 184.9 185.1 185.3 185.5 185.6 185.7 185.9 185.9 185.8 185.5 185.3 185.2 185.2 185.2 185.1 185.0 184.9 184.8 184.7 184.5 184.4 184.1 184.1 184.0 183.9 183.9 183.8 183.8 183.9 184.0 184.1 184.3 184.4 184.5 184.6 184.7 184.9 185.4 185.9 186.4 186.8 186.9 186.6 185.8 184.4 182.0 176.9 170.7 163.6 162.3 163.6 169.8 173.5 176.7 180.7 182.8 184.8 188.3 191.2 193.9 196.3 200.3 204.9 210.3 214.2 218.5 226.3 231.3 235.4 238.4 239.3 239.6 239.4 239.5 239.7 240.3 239.0 236.5 233.5 231.2 229.6 228.4 226.3 223.9 222.9 222.3 222.0 222.6 223.3 224.2 226.3 227.9 229.7 233.1 235.4 237.1 238.8 239.8 240.2 239.8 239.3 238.8 237.6 235.7 233.4 230.0 227.7 226.2 223.3 221.2 219.6 218.4 217.9 217.9 219.0 221.1 223.7 225.6 228.2 231.3 233.5 235.6 237.6 240.1 242.1 243.5 243.4 242.9 242.1 241.7 240.6 238.5 220.3 220.1 219.7 219.0 218.1 217.0 215.6 214.0 214.2 213.9 213.2 212.1 210.7 209.1 207.2 205.2 203.3 200.8 198.3 195.7 193.0 190.3 187.6 185.0 182.5 180.1 177.8 175.7 173.8 172.1 170.7 169.4 168.4 167.7 167.2 167.1 167.5 169.0 171.4 174.9 179.3 184.7 190.9 197.6 204.8 212.1 219.4 226.3 232.5 237.8 242.0 245.0 246.4 246.5 246.5 246.5 246.5 246.5 246.5 246.5 246.5 247.2 247.6 248.3 249.5 250.9 252.6 254.9 257.4 260.1 262.9 265.9 268.8 271.4 273.8 276.0 277.8 279.1 279.9 279.3 278.8 278.3 277.8 277.5 277.3 277.1 276.9 276.7 276.4 276.3 276.3 276.4 276.8 276.8 276.5 276.0 275.4 275.0 273.4 271.2 268.7 264.2 260.9 258.5 252.6 247.5 243.1 240.8 238.7 236.7 235.9 235.6 235.7 236.8 236.6 235.5 235.2 234.7 233.9 232.8 232.3 232.7 233.7 234.5 235.0 234.8 234.6 234.4 232.7 230.8 228.5 218.2 210.7 206.8 208.0 209.0 209.7 210.2 210.8 211.9 212.0 212.0 211.7 211.2 210.9 210.7 210.8 210.7 210.2 210.5 210.7 210.4 208.8 206.9 205.1 204.6 204.6 205.2 205.9 206.4 206.7 206.6 206.4 206.2 205.9 205.7 205.5 205.4 205.3 205.2 205.1 205.1 205.1 205.1 205.0 205.0 205.0 205.0 205.1 205.4 205.8 206.3 206.6 206.8 207.0 207.5 208.0 208.8 209.3 209.8 210.8 211.3 211.7 211.8 211.6 210.9 209.6 207.1 203.3 197.2 192.6 189.6 188.1 187.1 186.5 185.8 185.3 184.9 184.7 184.4 184.1 183.7 183.2 182.7 182.2 181.7 181.1 180.6 180.3 179.9 179.2 178.5 177.9 177.5 177.1 176.7 180.4 182.3 183.2 183.3 183.7 184.3 184.5 184.5 184.3 184.3 184.3 184.3 184.1 184.0 183.8 183.8 183.7 183.7 183.7 183.7 183.7 183.8 183.9 184.1 184.1 184.2 184.3 184.3 184.4 184.6 184.7 184.8 184.8 185.2 185.6 186.1 186.1 186.2 186.2 186.1 186.1 186.1 186.0 186.0 186.0 185.9 185.8 185.8 185.7 185.7 185.7 185.6 185.6 185.6 185.4 185.0 184.5 183.9 183.5 183.1 183.1 183.4 183.8 184.5 185.0 185.3 185.5 185.9 186.7 187.2 187.7 188.0 188.1 188.0 187.5 187.0 186.7 186.6 186.5 186.4 186.3 186.2 186.0 185.9 185.7 185.6 185.5 185.2 184.8 184.3 183.9 183.6 183.0 182.3 181.4 179.7 178.0 175.8 170.9 164.0 156.7 152.6 151.8 153.9 158.8 165.4 172.5 175.9 176.9 176.3 175.6 174.9 174.3 174.0 174.1 174.3 174.5 175.2 176.1 176.7 176.9 176.8 176.6 176.4 176.2 176.0 175.7 175.3 175.0 174.8 174.5 174.3 173.8 173.2 173.1 173.2 173.3 173.5 173.7 173.9 174.3 174.7 175.0 175.6 176.2 176.9 177.3 177.5 177.4 177.1 176.6 175.8 174.4 172.9 171.0 167.8 164.5 161.0 160.5 160.9 162.5 164.8 166.9 168.5 170.2 171.6 172.7 173.9 175.5 177.4 179.7 181.7 183.2 184.4 185.9 187.7 189.4 191.1 192.7 192.6 192.0 191.4 190.9 189.8 187.9 186.9 186.3 185.9 185.9 185.9 185.9 185.9 185.7 184.9 183.9 182.0 177.2 170.6 163.7 156.4 154.1 153.5 154.2 155.9 157.6 158.4 159.6 160.9 162.1 162.9 164.1 167.3 170.4 173.4 175.3 176.1 176.5 176.4 176.5 176.6 176.3 176.1 175.9 175.9 175.8 175.7 175.5 175.2 174.8 174.4 174.0 173.7 173.4 173.4 173.5 173.7 173.9 173.9 174.0 174.1 174.2 174.3 174.3 174.3 174.7 175.0 175.4 175.8 176.0 176.0 176.3 176.5 176.5 176.0 175.5 174.9 171.5 168.0 164.4 156.0 150.8 147.4 146.5 146.7 147.6 150.2 152.4 154.1 155.4 157.0 159.3 160.6 161.8 162.9 165.1 166.8 168.1 169.4 170.6 171.7 173.4 175.3 177.6 178.3 179.2 180.5 181.6 182.7 183.7 186.7 189.0 190.3 190.7 190.9 191.1 190.6 190.1 189.8 189.5 189.0 188.3 187.5 186.8 186.2 185.5 184.9 184.7 184.2 183.6 183.2 182.8 182.6 182.6 182.4 182.2 182.1 182.2 182.3 182.4 182.6 182.8 183.0 183.2 183.4 183.9 184.6 185.2 185.7 185.9 186.0 186.1 186.3 186.4 186.3 186.1 185.9 185.6 185.4 185.2 185.0 184.8 184.6 184.0 183.6 183.4 183.2 182.9 182.7 182.7 182.8 182.9 183.0 183.2 183.3 183.4 183.9 184.7 185.5 186.1 186.5 186.8 187.0 187.1 187.1 187.0 186.8 186.5 186.2 186.0 185.9 185.7 185.5 183.5 181.9 180.7 176.4 173.4 171.1 166.8 161.7 156.2 152.1 149.1 146.7 142.5 139.8 138.0 137.1 136.9 137.0 137.2 138.2 140.3 141.2 143.5 147.9 148.2 149.1 151.0 152.1 152.8 152.9 154.4 155.4 155.8 156.2 156.5 156.6 156.1 155.5 155.0 154.7 154.3 153.7 153.0 152.4 152.1 152.0 152.0 152.1 152.1 152.2 152.3 152.6 153.0 153.6 153.9 154.3 154.6 155.0 155.3 155.5 155.7 156.0 156.2 156.2 156.1 156.0 155.8 155.7 155.6 155.3 155.1 154.7 154.4 154.0 153.6 153.3 153.2 153.3 153.5 153.6 153.8 154.0 154.1 154.6 155.3 156.1 156.8 157.1 157.4 158.2 158.6 158.9 158.8 158.7 158.5 158.4 158.1 157.6 157.1 155.9 154.4 153.3 152.3 151.5 150.0 148.8 147.9 147.0 146.4 146.2 146.4 146.7 147.0 147.2 147.8 148.8 149.5 150.4 151.2 151.8 152.5 153.1 153.7 153.9 153.9 153.7 153.5 153.4 153.3 153.1 152.9 153.1 153.3 153.4 153.6 153.8 154.0 154.3 154.5 154.7 154.9 155.1 155.3 155.2 155.0 154.8 154.7 154.4 153.8 152.8 152.0 151.4 150.3 149.4 148.8 148.6 148.6 148.9 149.1 149.3 149.4 150.0 150.8 151.7 153.5 155.1 156.6 157.4 158.3 159.4 160.3 160.8 160.6 160.3 159.9 159.7 159.0 158.0 156.8 154.8 153.1 152.0 150.1 148.7 148.2 147.9 147.9 148.3 148.7 149.3 150.0 151.7 153.3 154.5 155.7 156.9 158.2 159.2 160.0 160.3 159.9 159.5 159.1 158.1 157.1 156.7 156.6 156.6 156.6 156.5 156.4 156.3 156.2 156.2 156.0 156.0 155.9 155.8 155.7 155.7 155.7 155.6 155.6 155.6 155.5 155.4 155.2 155.1 154.8 154.5 154.2 153.9 153.6 153.3 153.0 152.7 152.5 152.2 152.1 151.9 151.7 151.3 149.6 147.2 144.2 142.7 142.2 142.4 142.6 142.7 143.6 144.6 145.7 146.4 147.3 148.4 149.2 149.9 150.6 151.2 151.7 152.1 153.3 154.0 154.4 154.8 155.8 157.0 157.6 158.1 158.4 158.5 158.9 159.6 160.6 161.2 161.0 160.6 159.8 158.6 157.6 156.2 154.2 152.9 151.3 149.1 147.5 146.3 145.6 145.9 146.3 146.8 147.2 147.6 148.0 148.6 149.1 149.6 151.5 153.0 153.4 153.7 153.9 154.0 154.6 155.2 155.5 156.0 156.4 156.5 156.9 157.3 157.6 158.0 158.1 157.6 157.2 156.6 155.7 155.2 154.9 154.6 154.3 154.0 153.9 153.6 153.3 152.9 152.7 152.6 152.6 152.5 152.3 152.3 152.2 152.1 152.1 152.4 152.8 153.0 153.6 154.2 155.2 155.7 156.1 156.3 156.5 156.6 156.6 156.4 156.3 156.1 156.0 155.9 155.8 155.6 155.4 155.1 155.0 154.8 154.1 153.2 152.0 150.6 148.5 145.7 142.3 140.5 139.7 143.2 147.0 151.0 153.2 155.0 156.5 159.5 161.9 163.8 165.4 167.6 170.2 175.4 180.4 185.4 194.0 201.4 208.0 211.2 213.3 214.6 214.0 214.6 216.0 215.1 214.1 212.8 213.3 213.2 212.3 212.3 212.2 211.6 211.2 211.3 212.1 213.2 214.3 215.5 218.1 221.3 225.3 228.0 230.1 231.2 233.0 235.1 237.6 238.4 238.9 238.8 238.5 238.2 237.9 236.4 234.4 232.2 230.4 226.9 221.0 217.6 215.5 214.5 213.9 213.4 212.9 212.3 211.6 210.5 209.0 207.6 206.4 205.7 204.9 203.8 202.5 201.1 199.5 198.1 196.7 195.1 193.6 192.2 190.8 189.5 188.2 186.5 184.8 183.6 184.1 184.1 184.3 186.4 186.3 185.7 185.7 185.5 185.2 184.6 183.9 183.1 182.4 181.7 181.1 180.6 180.0 179.6 179.3 179.3 179.5 180.2 181.0 181.7 182.1 183.2 184.7 185.4 186.1 186.7 187.2 187.7 188.2 188.8 188.8 188.4 188.1 187.6 186.8 184.6 182.5 180.5 176.2 173.3 171.3 172.2 172.9 173.6 174.1 174.5 174.9 175.8 176.3 176.5 176.7 176.8 176.8 176.8 176.8 176.8 176.7 176.7 176.9 177.4 177.8 178.3 178.5 178.5 178.4 178.4 178.4 178.3 179.0 179.0 178.0 176.6 175.6 175.1 174.7 174.2 173.8 173.5 173.2 172.9 172.7 172.3 171.6 170.9 170.5 170.5 170.6 170.7 170.9 171.1 171.5 172.2 172.7 173.2 173.9 174.5 174.9 175.1 175.4 175.7 175.8 175.7 175.5 175.4 175.1 174.9 174.8 174.6 174.5 174.7 175.0 175.3 175.6 176.0 176.5 176.5 176.3 176.0 175.7 174.6 173.0 170.0 164.9 159.8 155.8 153.8 153.0 155.4 157.6 159.6 160.3 159.8 158.9 158.0 156.0 153.6 152.8 151.8 150.7 149.8 147.9 145.5 143.2 141.7 140.9 140.5 140.1 139.8 139.5 139.3 139.1 139.1 139.5 140.2 140.8 141.1 141.3 142.3 143.7 145.5 147.1 148.0 148.5 149.9 151.0 151.9 152.4 153.1 154.0 154.7 155.1 155.3 155.5 155.5 155.3 155.1 154.9 154.8 154.7 154.8 154.9 155.1 155.3 155.4 156.0 156.6 157.3 157.7 157.7 157.3 155.6 152.4 147.0 141.2 137.8 137.7 138.8 140.4 142.9 145.7 147.5 147.9 148.4 149.2 150.5 151.6 152.1 151.8 151.3 150.6 149.8 148.3 146.2 143.3 140.4 138.4 137.4 137.0 136.7 136.3 135.9 135.6 135.5 135.3 135.2 135.1 135.0 135.1 135.5 136.0 136.3 136.5 136.7 137.0 137.5 138.2 138.9 139.4 138.0 136.4 135.4 136.1 137.2 138.0 138.8 139.7 140.3 140.9 141.4 141.2 141.0 140.7 140.6 140.2 139.8 139.4 139.7 140.3 141.7 142.1 141.7 140.7 140.5 140.6 140.4 139.7 138.6 137.6 136.6 135.6 133.7 132.4 131.7 131.7 131.8 132.1 133.7 135.5 137.8 141.5 144.5 146.8 150.1 154.0 158.4 159.6 159.2 157.8 157.5 156.8 154.8 154.8 154.8 154.8 154.8 154.9 154.9 155.0 155.0 155.0 155.1 155.1 155.2 155.2 155.2 155.3 155.3 155.3 155.2 154.9 154.2 153.1 151.6 149.8 147.7 145.5 142.9 140.3 137.6 135.0 132.3 129.7 127.3 125.0 123.0 121.3 119.7 118.4 117.4 116.8 116.4 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.3 109.0 107.3 105.8 104.8 104.0 103.5 103.3 103.5 104.0 104.7 105.6 106.7 108.0 109.4 110.8 112.1 113.4 114.4 115.3 115.8 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.0 116.1 116.1 116.1 116.1 116.1 116.1 116.1 116.1 116.1 116.2 116.2 116.2 116.2 116.2 116.2 116.3 116.3 116.3 116.3 116.3 116.3 116.3 116.0 115.8 115.2 114.2 113.0 111.5 109.8 107.9 105.8 103.8 101.7 99.7 97.8 96.0 94.4 93.0 91.9 91.0 90.4 90.2 90.2 90.5 91.1 91.9 92.9 93.9 95.1 96.5 97.9 99.3 100.5 101.7 102.6 103.2 103.6 103.3 103.3 103.3 103.3 103.3 103.3 103.3 103.3 103.3 103.3 103.4 103.4 103.4 103.4 103.4 103.5 103.5 103.5 103.5 103.5 103.5 103.5 103.5 103.5 103.5 103.6 103.6 103.6 103.6 103.6 103.6 103.6 103.6 103.6 103.6 103.6 103.6 103.6 105.3 104.3 102.7 101.0 99.7 99.9 100.5 101.3 102.1 102.7 103.3 104.0 104.9 106.0 107.4 108.2 108.7 108.5 107.4 106.3 105.4 104.0 102.5 101.4 100.7 100.0 98.9 98.2 97.5 96.9 96.1 95.2 94.8 94.1 93.3 93.0 92.5 91.9 91.3 90.8 90.5 90.5 90.6 90.7 91.0 91.4 91.9 92.6 93.2 93.7 94.1 94.6 95.2 95.6 95.7 95.8 95.6 95.4 95.3 95.1 94.4 93.5 93.1 92.6 92.2 92.1 92.2 92.4 93.2 93.9 94.5 94.9 95.3 95.6 96.2 96.8 97.3 97.6 97.8 97.7 97.5 97.1 96.3 94.5 93.2 92.3 92.6 93.2 94.6 95.6 96.8 98.0 101.4 104.5 107.1 108.0 108.1 106.9 105.8 105.0 104.3 104.3 104.3 104.1 103.8 103.8 103.6 103.6 103.6 103.5 103.5 103.3 103.2 103.1 103.1 103.1 103.1 103.1 103.1 103.1 103.0 103.0 103.0 103.0 103.0 103.1 103.3 103.5 103.5 103.6 103.6 103.8 103.9 104.1 104.2 104.5 104.5 104.5 104.5 104.5 104.4 104.3 104.1 103.9 103.8 103.8 103.6 103.6 103.6 103.6 103.5 103.5 103.5 103.4 103.3 105.8 104.6 102.6 100.1 98.0 99.1 101.0 103.5 106.3 110.4 114.7 116.5 117.2 117.2 117.4 117.1 116.6 116.7 116.8 117.1 117.5 117.7 117.9 118.1 118.2 118.3 118.4 118.4 118.3 118.1 117.9 117.8 117.6 117.4 117.1 116.1 115.1 114.2 114.0 114.1 114.3 114.7 115.0 115.3 116.2 116.9 117.4 117.6 118.1 118.6 118.9 119.4 120.2 120.3 120.6 121.1 121.3 121.4 121.2 120.8 120.3 119.9 117.4 114.5 111.1 108.8 107.5 107.6 108.4 109.7 111.7 112.7 113.2 113.1 113.0 113.0 113.4 113.8 114.3 115.1 115.7 116.5 117.4 119.5 121.0 121.3 120.7 119.9 119.1 118.6 118.4 118.2 117.9 117.7 117.6 117.5 117.5 117.4 117.3 117.2 117.2 116.9 116.7 116.6 116.7 117.0 117.1 117.2 117.4 117.5 117.5 117.4 117.1 116.9 116.6 116.2 115.8 115.5 115.1 115.0 115.1 115.1 116.5 116.7 117.1 117.5 118.1 118.8 119.5 120.2 120.9 121.6 122.0 121.8 121.6 121.1 120.6 120.0 119.2 118.4 117.6 116.7 115.8 115.1 114.2 113.4 112.8 112.2 111.7 111.3 111.1 111.0 111.0 111.1 111.4 111.9 112.3 113.0 113.7 114.4 115.3 116.3 117.1 118.1 119.0 119.7 120.5 121.3 121.8 122.2 122.5 122.6 122.6 122.4 122.1 121.7 121.1 120.3 119.6 118.7 117.7 116.9 115.8 114.9 114.1 113.2 112.4 111.8 111.2 110.8 110.6 110.4 110.4 110.6 110.9 111.4 111.9 112.5 113.4 114.3 115.1 116.2 117.2 118.2 119.2 120.2 120.9 121.7 122.3 122.8 123.1 123.3 123.3 123.0 122.6 122.2 121.5 120.7 119.9 118.9 117.8 116.8 115.9 114.8 114.3 114.3 114.7 115.3 124.9 124.6 124.1 123.9 123.6 123.2 122.7 122.1 121.6 120.9 120.7 120.7 120.7 120.7 120.7 120.7 120.7 120.7 120.7 120.7 120.7 120.7", - "input_type": "phoneme", - "offset": 108.362 + "f0_timestep": "0.005" }, { + "offset": 125.856, "text": "SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 AP 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 SP 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 SP", "ph_seq": "SP k an ch un f eng l ie h uo j ing h ong y i j ian zh an SP q i s i0 y E AP c i0 sh en d u j ie d ian g uang zh ir j ian g an k ui t ian SP y i sh eng ch ang g e y E sh ir zh en y En SP t ian d i b u r en SP y ao x ie b u m ie SP w o m ing b u g ai j ve SP", - "note_seq": "rest A#2 A#2 B2 B2 A#2 A#2 B2 B2 C#3 C#3 D#3 D#3 C#3 C#3 D#3 D#3 F#3 F#3 F3 F3 rest G#3 G#3 B3 B3 A#3 A#3 rest A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 D#3 D#3 A#3 A#3 C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 G#3 G#3 rest F#3 F#3 G#3 G#3 F#3 F#3 G#3 G#3 A#3 A#3 C#4 C#4 G#3 G#3 A#3 A#3 rest A#3 A#3 G#3 G#3 F#3 F#3 D#3 D#3 rest A#3 A#3 G#3 G#3 F#3 F#3 F3 F3 rest A#3 A#3 G#3 G#3 A#3 A#3 B3 B3 A#3 A#3 rest", - "note_dur_seq": "0.289 0.361 0.361 0.3610001 0.3610001 0.362 0.362 0.3609999 0.3609999 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.3610001 0.3610001 0.362 0.362 0.5420001 0.5420001 0.1799998 0.1810002 0.1810002 0.5419998 0.5419998 0.362 0.362 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.1799998 0.1799998 0.1810002 0.1810002 0.362 0.362 0.1799998 0.1799998 0.362 0.362 0.5420003 0.5420003 0.1810002 0.1810002 0.3609991 0.3609991 0.3620005 0.3620005 0.1800003 0.3619995 0.3619995 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.5430002 0.5430002 0.1800003 0.1800003 0.3619995 0.3619995 0.3610001 0.3610001 0.1809998 0.3610001 0.3610001 0.1810007 0.1810007 0.3610001 0.3610001 0.3619995 0.3619995 0.1809998 0.3610001 0.3610001 0.1810007 0.1810007 0.3610001 0.3610001 0.3619995 0.3619995 0.1800003 0.3619995 0.3619995 0.5419998 0.5419998 0.5419998 0.5419998 0.3620014 0.3620014 0.3610001 0.3610001 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.200148 0.088852 0.22094 0.14006 0.22094 0.14006 0.297242 0.064758 0.255577 0.105423 0.22194 0.14006 0.276665 0.084335 0.292725 0.069275 0.257088 0.103912 0.241517 0.120483 0.542 0.064613 0.115387 0.103272 0.077728 0.477242 0.064758 0.362 0.255577 0.105423 0.110214 0.070786 0.129792 0.051208 0.097389 0.082611 0.150879 0.030121 0.321335 0.040665 0.119758 0.060242 0.273148 0.088852 0.496819 0.045181 0.135819 0.045181 0.260093 0.100906 0.362 0.121269 0.058731 0.221939 0.14006 0.266426 0.094574 0.146362 0.034637 0.302269 0.058731 0.357759 0.185242 0.109215 0.070786 0.297241 0.064758 0.361 0.122269 0.058731 0.33239 0.02861 0.131303 0.049698 0.302269 0.058731 0.362 0.048477 0.132523 0.22094 0.14006 0.15088 0.030121 0.275154 0.085846 0.362 0.121269 0.058731 0.25206 0.10994 0.496819 0.045181 0.462181 0.079819 0.243029 0.118973 0.361 0.072", - "f0_timestep": "0.005", + "ph_dur": "0.2001 0.0889 0.2209 0.1401 0.2209 0.1401 0.2972 0.0648 0.2556 0.1054 0.2219 0.1401 0.2767 0.0843 0.2927 0.0693 0.2571 0.1039 0.2415 0.1205 0.542 0.0646 0.1154 0.1033 0.0777 0.4772 0.0648 0.362 0.2556 0.1054 0.1102 0.0708 0.1298 0.0512 0.0974 0.0826 0.1509 0.0301 0.3213 0.0407 0.1198 0.0602 0.2731 0.0889 0.4968 0.0452 0.1358 0.0452 0.2601 0.1009 0.362 0.1213 0.0587 0.2219 0.1401 0.2664 0.0946 0.1464 0.0346 0.3023 0.0587 0.3578 0.1852 0.1092 0.0708 0.2972 0.0648 0.361 0.1223 0.0587 0.3324 0.0286 0.1313 0.0497 0.3023 0.0587 0.362 0.0485 0.1325 0.2209 0.1401 0.1509 0.0301 0.2752 0.0858 0.362 0.1213 0.0587 0.2521 0.1099 0.4968 0.0452 0.4622 0.0798 0.243 0.119 0.361 0.072", + "ph_num": "2 2 2 2 2 2 2 2 2 2 1 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest A#2 B2 A#2 B2 C#3 D#3 C#3 D#3 F#3 F3 rest G#3 B3 A#3 rest A#3 G#3 G#3 F#3 D#3 A#3 C#4 A#3 G#3 F#3 G#3 rest F#3 G#3 F#3 G#3 A#3 C#4 G#3 A#3 rest A#3 G#3 F#3 D#3 rest A#3 G#3 F#3 F3 rest A#3 G#3 A#3 B3 A#3 rest", + "note_dur": "0.289 0.361 0.361 0.362 0.361 0.362 0.361 0.362 0.361 0.362 0.542 0.18 0.181 0.542 0.362 0.361 0.181 0.181 0.18 0.181 0.362 0.18 0.362 0.542 0.181 0.361 0.362 0.18 0.362 0.361 0.181 0.361 0.543 0.18 0.362 0.361 0.181 0.361 0.181 0.361 0.362 0.181 0.361 0.181 0.361 0.362 0.18 0.362 0.542 0.542 0.362 0.361 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "116.8 116.8 116.8 116.6 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.5 116.0 116.0 115.8 115.2 114.3 113.1 111.6 109.9 107.9 105.9 103.8 101.6 99.5 97.4 95.4 93.6 92.0 90.6 89.4 88.4 87.7 87.3 87.2 87.3 87.7 88.4 89.3 90.6 92.1 93.9 95.8 97.9 100.1 102.4 104.7 106.9 109.0 111.0 112.7 114.1 115.2 115.9 116.3 116.3 116.3 116.2 116.2 116.1 116.1 116.0 116.0 115.9 115.8 115.7 115.7 115.7 115.6 115.6 115.7 115.8 116.2 116.6 117.1 117.7 118.2 118.7 119.1 119.4 119.5 118.5 117.4 116.5 115.5 114.1 112.4 112.4 112.9 113.7 114.4 115.3 116.3 116.8 117.3 118.0 118.5 118.9 119.4 120.6 121.5 122.2 122.6 123.0 123.3 123.4 123.1 122.5 122.7 123.2 123.8 124.5 124.9 124.8 123.3 122.4 122.3 122.4 122.4 122.5 122.4 122.2 121.6 121.2 120.6 119.8 119.7 119.7 119.9 120.1 120.2 120.4 120.6 121.0 121.6 122.1 122.6 123.2 123.5 123.9 124.2 124.5 125.0 125.5 125.7 125.8 125.7 125.7 125.6 125.5 125.4 125.4 125.3 125.3 125.1 124.6 123.8 122.7 121.1 120.0 118.6 115.8 114.0 113.1 114.0 115.3 116.5 117.4 118.1 118.9 120.1 120.7 121.2 121.9 122.8 123.8 124.5 125.3 126.3 128.1 129.2 130.0 130.2 129.4 127.9 124.6 121.8 119.8 119.4 119.5 119.8 119.2 118.6 118.0 117.6 117.2 116.6 116.0 115.5 115.1 114.7 114.3 114.1 114.1 114.2 114.5 114.8 115.2 115.7 116.0 116.3 116.5 116.9 117.2 117.6 118.4 118.8 119.1 119.2 119.3 119.5 119.7 120.0 120.3 120.1 119.9 119.8 119.5 119.3 119.3 119.3 119.2 119.1 119.1 119.1 119.1 118.9 118.7 118.4 118.3 118.2 118.1 117.1 116.3 115.9 116.4 116.9 117.4 117.3 117.3 117.5 118.0 118.6 119.3 120.2 121.1 122.0 122.3 122.6 122.9 123.3 123.6 123.8 124.1 124.3 124.5 124.7 124.9 125.0 125.1 125.1 125.1 125.0 125.0 124.9 124.9 124.8 124.6 124.5 124.0 123.0 122.6 122.3 122.1 121.9 121.7 121.7 121.9 122.2 122.7 123.4 124.2 125.3 125.8 126.1 126.5 126.6 126.7 126.7 126.5 126.3 126.2 126.1 126.0 125.7 124.7 123.6 123.1 122.5 121.9 121.2 120.9 120.9 120.9 121.0 121.1 121.3 121.4 121.6 121.9 122.5 123.2 123.9 124.7 125.9 127.8 131.6 136.1 138.6 140.4 141.5 141.3 140.4 139.5 139.7 140.0 140.3 140.5 140.6 140.7 140.5 139.9 139.0 138.8 138.5 138.2 137.8 137.6 137.6 137.4 137.0 136.4 136.4 137.3 138.8 139.2 139.3 139.1 139.0 138.9 138.8 138.7 138.6 138.4 138.4 138.3 138.0 137.9 137.8 137.7 137.6 137.5 137.4 137.3 137.1 136.7 138.0 137.6 136.9 135.9 134.4 132.6 130.7 128.5 126.2 123.9 121.8 119.7 117.8 116.3 115.0 114.1 113.5 113.2 113.4 114.1 115.2 116.7 118.6 121.0 123.5 126.1 128.8 131.4 133.7 135.7 137.2 138.2 138.7 138.8 138.7 138.5 138.4 138.3 138.1 138.1 138.0 138.0 138.3 139.2 140.8 142.9 145.1 147.5 149.5 151.0 151.8 153.8 154.4 155.7 156.6 157.3 158.1 158.5 158.7 159.1 159.4 159.5 158.9 159.2 160.3 162.3 162.7 162.1 161.1 160.4 159.9 159.3 157.9 156.2 155.3 155.2 155.4 155.3 155.0 154.7 154.4 154.2 153.9 153.4 152.9 152.5 151.9 151.6 151.4 150.3 149.5 148.9 148.5 148.0 147.4 146.5 145.1 143.3 141.4 140.4 140.0 140.2 140.2 140.1 140.5 140.6 140.6 140.6 140.6 140.7 140.8 141.0 141.3 141.2 140.9 140.5 140.2 139.9 139.6 140.6 141.5 142.2 141.6 140.8 139.8 139.0 138.5 138.1 138.0 137.9 137.6 137.4 137.2 137.0 137.3 137.6 138.0 137.4 136.4 134.9 134.3 134.6 135.6 136.4 137.0 137.6 138.0 138.3 138.4 138.9 139.7 140.5 141.2 141.7 141.9 142.3 142.9 143.7 145.8 148.1 150.0 150.5 150.6 150.5 151.0 151.8 153.2 154.8 156.2 156.8 157.3 157.6 157.2 156.6 155.9 155.6 155.6 155.6 155.1 154.8 154.6 153.8 153.7 153.8 153.8 153.8 153.9 154.3 154.4 154.5 154.7 154.8 155.0 155.1 155.2 155.3 155.3 155.3 155.4 155.5 155.5 155.6 155.6 155.6 155.7 155.7 155.8 155.8 155.9 155.9 155.9 156.1 156.3 156.6 156.6 156.2 155.7 154.4 152.3 149.5 144.6 141.4 139.2 138.6 139.2 140.6 141.7 142.7 143.8 143.8 143.8 143.9 144.4 145.3 146.7 148.2 150.3 153.3 156.0 159.9 166.1 168.3 169.8 170.2 169.1 168.4 168.4 168.3 167.8 166.7 165.6 164.6 163.9 163.7 163.5 163.2 163.7 164.2 164.7 165.3 166.4 168.2 170.6 172.9 174.5 175.7 177.2 179.3 180.3 181.1 182.2 183.7 184.9 185.8 186.1 186.1 186.0 185.8 185.6 185.5 185.4 185.2 184.9 184.7 184.6 184.4 184.3 184.1 184.0 183.8 183.7 183.5 183.0 182.2 180.5 176.8 172.5 167.3 161.7 156.3 152.0 151.2 151.4 151.8 153.0 154.7 157.5 160.3 162.9 165.1 166.4 167.6 170.9 173.1 174.3 175.0 175.7 176.6 177.7 178.8 179.6 179.7 180.0 180.3 180.3 179.9 179.3 178.6 178.2 177.9 177.4 176.5 175.4 174.5 173.7 173.0 172.7 171.7 170.2 169.5 168.9 168.3 167.0 166.0 165.3 164.9 164.9 165.1 165.4 166.1 167.0 167.8 169.3 171.2 172.9 174.3 175.5 178.0 179.5 180.3 181.4 182.2 182.8 182.4 182.0 181.6 179.9 178.4 177.3 175.1 173.2 171.7 169.7 167.3 164.1 162.6 161.4 160.5 160.7 160.9 161.1 162.8 164.5 166.4 168.0 169.7 171.5 173.7 175.4 176.4 177.6 178.8 180.1 180.8 181.2 181.4 180.9 180.4 180.1 178.2 176.4 175.2 172.6 170.4 169.4 168.1 166.8 165.5 165.0 164.6 164.3 164.1 164.0 163.8 163.7 163.5 162.8 162.1 161.8 162.5 163.2 163.9 165.2 165.7 166.0 166.4 167.0 167.7 168.5 169.4 170.4 171.7 173.0 174.5 176.2 178.5 181.2 184.1 187.4 190.9 194.0 196.9 199.5 201.2 202.4 203.2 203.6 203.7 203.6 203.3 202.9 202.5 202.1 201.7 201.3 201.0 200.8 200.8 201.7 204.5 208.5 211.5 212.6 212.3 211.0 209.3 207.3 201.5 194.8 187.8 181.1 175.8 171.4 173.8 176.2 178.5 182.9 185.6 187.1 191.0 193.9 196.1 198.4 201.2 204.3 206.0 208.4 211.6 214.5 217.2 219.6 222.3 222.5 223.1 223.9 224.8 225.6 226.2 226.4 226.3 226.1 225.8 225.4 224.9 224.4 223.7 223.1 222.4 221.8 221.3 220.6 220.0 219.5 219.1 218.7 218.5 218.4 218.3 218.5 218.9 219.7 220.6 221.8 223.1 224.6 226.4 228.3 230.3 232.3 234.3 236.5 238.4 240.4 242.1 243.6 245.1 246.3 247.3 248.0 248.5 250.7 250.7 250.3 249.9 249.7 248.9 248.0 247.3 246.9 246.5 246.5 246.6 246.7 246.9 247.0 247.1 247.2 247.2 247.2 247.3 247.4 247.5 247.7 247.9 248.1 248.1 248.1 248.1 248.0 247.9 247.8 247.7 247.6 247.5 247.3 247.2 247.0 246.8 245.9 244.5 242.7 239.7 236.2 233.6 231.2 229.1 226.4 223.8 221.3 219.9 219.1 218.7 219.3 219.6 219.7 221.5 223.3 225.1 227.0 230.2 234.3 235.7 237.1 238.7 239.2 239.2 238.9 238.6 238.3 238.0 237.4 236.8 236.2 235.4 234.7 233.9 232.6 231.8 231.5 231.2 231.0 230.7 230.5 230.3 229.9 229.5 229.2 229.1 228.9 228.7 228.3 228.4 228.5 228.8 229.0 229.3 230.0 230.7 231.2 231.7 232.7 233.7 234.1 234.6 234.9 234.7 234.4 234.0 233.7 233.1 232.3 231.3 230.4 229.2 227.4 225.5 223.9 223.2 222.2 221.3 221.5 221.9 222.4 222.9 224.6 226.9 229.9 231.9 234.0 237.5 239.2 240.0 239.5 238.0 236.6 236.5 236.4 236.3 236.1 235.9 235.6 235.3 235.0 234.8 234.5 234.3 234.0 233.7 233.5 233.3 233.3 233.2 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 235.7 234.2 231.6 228.1 223.8 218.9 213.7 208.4 203.2 198.3 193.7 190.0 186.9 184.6 183.2 182.5 182.9 184.1 186.3 189.3 193.1 197.5 202.4 207.6 212.8 217.9 222.7 226.6 229.6 231.5 232.6 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 232.7 226.7 227.8 229.8 231.2 231.1 228.7 225.4 220.9 217.2 214.0 212.0 210.2 208.3 205.4 204.0 203.1 202.9 202.9 202.9 202.7 203.2 204.1 205.7 207.0 208.1 208.6 209.0 209.3 209.0 208.8 208.6 208.3 207.1 205.1 202.8 200.8 199.5 200.4 202.2 204.5 207.3 208.8 209.4 209.2 208.9 208.5 208.2 207.8 207.5 207.4 207.6 207.8 208.0 208.3 208.8 208.9 208.3 207.3 204.2 199.2 193.1 191.0 192.1 195.3 200.0 202.5 203.6 202.3 199.7 196.3 193.8 191.7 190.0 188.5 187.3 186.3 185.5 184.8 184.0 184.7 185.1 185.2 184.8 184.5 184.5 184.3 184.4 184.9 185.2 185.4 185.7 185.7 185.8 186.1 186.3 186.4 186.3 186.2 185.9 185.3 184.6 183.2 180.8 176.7 172.6 168.5 165.4 162.6 160.1 158.2 156.8 155.9 155.6 155.0 153.8 152.1 150.7 150.0 149.6 149.4 149.0 148.7 148.7 149.1 149.9 150.7 151.4 151.8 152.2 152.6 152.9 153.1 153.2 153.5 153.8 154.1 154.5 154.8 155.0 155.1 155.1 155.1 155.1 155.1 155.3 155.4 155.5 155.6 155.5 155.4 155.2 155.2 155.2 155.3 155.6 155.8 156.3 156.6 156.8 156.9 157.1 157.4 157.6 157.7 157.7 157.8 158.0 158.3 158.6 158.9 159.2 159.7 159.7 159.5 159.4 157.9 155.7 153.9 153.1 153.6 156.3 161.2 167.8 178.4 189.0 199.5 206.6 211.4 214.7 217.9 220.8 223.3 226.2 227.5 227.6 228.5 229.2 229.9 230.2 230.9 231.9 232.6 233.2 233.6 233.8 234.0 234.3 234.6 234.1 229.4 218.0 201.9 184.4 169.0 159.3 154.8 156.0 159.1 164.0 170.6 178.7 188.0 198.0 208.0 217.3 238.0 240.2 241.7 243.4 245.4 247.4 251.7 254.9 258.2 262.8 266.7 270.4 273.2 275.9 276.5 278.5 278.9 279.8 280.2 280.2 280.7 280.7 280.7 273.6 275.6 276.5 278.6 279.0 278.9 278.5 278.1 277.6 277.4 276.7 276.1 276.2 276.3 276.5 276.8 276.9 276.9 276.9 276.7 276.5 276.0 275.3 273.7 269.0 256.3 242.8 231.4 234.2 241.8 272.7 268.7 265.4 260.7 258.6 256.4 254.4 251.3 247.9 243.3 238.7 233.6 220.3 217.5 215.0 213.9 213.0 212.2 211.3 210.6 209.8 209.2 209.0 209.0 208.6 208.6 208.6 208.5 208.4 208.3 208.3 208.3 208.3 208.3 208.3 208.3 208.3 208.3 208.3 208.2 208.1 207.9 207.9 207.9 215.4 217.4 219.2 220.3 223.5 226.1 227.9 229.1 230.3 231.3 231.8 232.5 232.9 233.4 233.8 234.2 234.2 234.2 234.2 234.2 234.2 232.9 232.7 232.5 232.3 232.2 232.0 231.7 230.9 230.4 230.4 230.7 231.1 231.4 232.4 233.5 234.5 235.0 235.5 236.2 237.2 238.0 238.4 238.5 238.4 238.3 238.1 237.9 237.9 237.8 237.5 236.9 236.2 235.6 235.2 235.0 234.9 234.6 234.4 234.1 233.7 233.4 233.1 232.8 232.3 231.8 231.3 230.3 228.5 224.4 216.7 208.8 203.0 198.0 193.7 191.0 191.1 192.4 194.9 200.7 206.7 209.6 210.6 210.8 210.2 209.5 208.8 208.3 208.1 208.1 208.3 208.5 208.6 208.4 208.5 208.6 208.5 208.1 207.5 206.9 206.3 205.1 201.6 197.9 193.2 185.3 178.1 172.0 168.3 165.4 163.0 161.1 161.1 162.4 168.1 172.7 176.4 181.8 184.6 185.5 185.9 186.0 186.0 185.6 185.4 185.3 185.3 185.2 184.9 184.9 184.9 185.0 184.9 184.8 184.7 184.7 184.7 184.7 184.6 184.6 184.6 184.6 184.7 184.7 184.7 184.8 184.7 184.9 185.3 186.2 186.3 186.5 186.7 186.8 186.8 186.6 186.5 186.4 186.3 185.7 184.9 183.8 183.0 181.8 179.9 176.8 172.8 168.1 165.3 164.1 164.5 165.7 168.0 172.0 175.2 177.5 178.7 179.7 181.0 182.7 185.1 187.3 188.7 190.1 191.6 193.3 194.8 196.5 199.0 202.1 205.5 208.9 210.7 211.9 211.9 211.1 210.3 210.3 210.2 210.2 210.1 209.7 209.1 208.1 206.6 205.2 204.7 203.6 202.5 202.0 202.2 202.7 203.4 204.0 204.5 205.6 206.6 207.8 209.9 211.4 212.1 212.1 211.9 211.6 211.4 210.9 209.9 208.0 206.7 205.4 203.1 201.0 199.1 197.8 196.4 194.8 195.2 195.5 195.8 197.6 199.0 200.2 204.1 206.9 209.1 211.3 213.3 215.2 216.3 216.5 216.2 215.9 215.5 215.1 214.2 212.4 210.1 207.5 205.6 204.1 202.5 201.2 200.5 200.7 200.9 201.2 201.5 201.9 202.4 202.7 203.0 203.4 203.4 203.2 202.8 201.9 200.7 199.1 197.1 195.1 192.7 190.9 189.1 187.6 186.1 184.9 183.8 182.9 182.0 181.2 180.4 179.7 179.1 178.5 178.1 177.7 177.5 177.3 177.0 176.3 176.2 177.0 177.9 178.6 178.9 179.0 179.1 179.2 179.3 179.5 179.7 179.9 180.1 180.2 180.3 180.5 181.1 181.7 182.5 183.7 184.1 184.3 184.8 185.1 185.2 185.1 185.1 185.0 184.8 184.7 184.5 184.3 184.2 184.1 184.1 182.6 179.9 175.6 170.0 164.7 163.4 163.4 164.1 165.0 168.1 172.1 174.0 175.9 177.7 178.6 180.2 182.1 184.1 185.9 187.4 188.0 189.2 191.0 193.2 195.4 197.7 199.6 201.2 202.7 204.2 205.8 207.5 209.9 212.4 215.1 216.2 215.9 214.9 211.9 210.6 210.4 209.4 209.1 209.2 208.9 208.8 208.9 207.9 206.9 206.0 205.6 205.3 204.9 204.5 204.4 204.6 204.9 205.2 205.5 205.7 206.0 206.3 206.4 206.7 207.2 207.2 207.4 207.7 207.8 207.9 208.0 208.2 208.5 208.9 209.2 209.2 209.0 208.5 208.0 207.8 206.7 205.2 203.6 202.1 200.8 199.7 197.1 194.7 193.4 192.7 192.2 191.8 191.4 190.9 190.2 188.9 187.6 186.5 185.6 184.6 183.2 182.2 181.2 180.0 178.7 177.4 176.2 175.8 176.1 178.8 181.2 183.2 183.4 183.7 183.9 183.9 184.0 184.1 183.9 184.0 184.3 184.8 185.2 185.6 185.8 185.9 185.8 185.7 185.6 185.5 185.6 185.1 183.8 180.6 176.8 172.7 169.3 167.9 168.8 172.9 179.4 187.5 195.1 201.3 206.0 207.6 208.3 208.2 208.2 207.8 207.2 207.3 207.5 207.7 207.6 207.4 207.1 207.1 207.0 206.8 206.6 206.5 206.5 206.5 206.4 206.3 206.3 206.3 206.2 206.4 206.5 206.7 206.9 207.0 207.1 207.4 207.7 208.0 208.1 208.2 208.5 208.6 208.8 208.9 209.3 209.5 209.5 209.2 208.9 208.6 208.4 207.9 207.2 206.6 205.5 203.8 202.9 201.1 198.0 195.7 193.5 190.8 189.1 188.0 187.6 188.9 190.6 192.0 194.7 198.2 202.5 206.5 211.9 219.8 226.4 231.7 235.2 238.1 240.4 241.8 242.2 242.2 242.2 241.9 241.4 240.6 238.9 236.8 233.8 232.3 231.1 229.9 228.7 227.8 228.2 228.7 229.2 229.5 230.1 230.9 232.2 233.3 234.5 236.4 237.2 237.7 238.5 238.5 238.1 237.4 237.0 236.5 235.3 233.6 231.8 230.6 229.7 229.0 228.6 228.6 228.8 229.0 229.5 230.3 230.9 231.4 231.7 232.4 232.9 233.4 234.5 235.4 235.9 235.5 235.2 235.0 234.2 233.4 232.5 232.3 232.0 231.7 231.5 231.4 231.5 231.9 232.1 232.1 232.8 233.0 232.8 232.2 231.7 231.1 226.8 221.8 216.2 214.6 214.9 216.5 220.5 223.4 224.6 225.4 227.3 231.2 233.5 235.2 236.2 237.1 238.6 241.3 244.6 248.3 252.6 255.8 258.7 261.2 265.4 269.1 272.1 275.9 279.5 283.0 285.1 286.0 285.8 286.2 285.3 282.4 280.7 279.9 280.3 279.6 278.7 278.2 277.2 275.7 273.5 268.4 260.9 250.9 236.9 226.3 221.5 223.9 230.1 239.5 245.3 249.0 247.9 243.9 239.5 235.9 232.8 229.7 226.0 222.6 219.0 214.1 209.9 206.1 203.4 202.7 202.9 204.5 204.0 203.4 204.3 205.1 205.7 205.2 205.6 206.1 206.6 206.7 206.6 206.9 207.5 208.1 208.6 208.8 208.9 208.9 209.0 209.0 208.4 208.0 207.8 207.4 206.9 206.5 206.2 206.0 206.1 206.4 207.0 207.8 208.4 208.7 209.0 208.9 208.9 208.9 208.7 208.6 208.6 208.5 208.4 208.1 207.4 206.9 206.7 205.8 204.6 203.4 202.6 201.8 200.9 200.5 200.7 201.2 202.1 203.3 204.7 206.9 209.7 213.2 217.8 221.8 224.9 228.7 232.2 235.0 238.0 240.3 241.4 242.1 242.8 243.5 242.8 241.6 239.9 238.4 236.9 235.2 232.0 229.0 226.5 224.8 223.3 221.9 221.9 222.3 222.9 224.8 226.5 227.5 230.6 233.2 234.4 236.8 238.8 239.7 241.0 241.7 241.4 241.0 240.0 238.2 235.3 232.9 231.8 229.3 226.1 222.1 219.9 218.3 217.2 217.2 217.6 218.2 219.7 221.5 223.3 226.6 230.5 234.9 237.2 239.0 241.4 242.4 242.9 242.7 242.4 242.0 241.1 238.7 235.8 232.0 228.4 225.1 222.3 220.5 219.5 219.0 219.1 219.5 220.0 220.8 221.7 222.7 223.8 225.0 226.1 227.3 228.5 229.5 230.5 231.5 232.1 232.6 232.9 232.9 232.5 231.7 230.1 228.2 226.2 223.6 221.0 218.5 215.5 212.9 210.4 207.8 205.7 203.8 202.3 201.1 200.3 200.6 201.9 204.0 206.3 207.3 207.4 207.2 207.4 207.9 208.3 208.9 209.5 210.8 211.8 212.3 213.0 213.7 214.2 215.2 216.1 217.2 218.6 220.4 222.9 223.6 224.5 225.8 227.5 229.3 231.3 232.3 233.1 233.9 234.5 234.8 234.7 234.5 234.4 234.3 234.1 234.0 234.0 233.6 233.1 232.6 232.2 231.9 231.9 231.7 231.5 231.3 231.1 230.8 230.6 230.2 229.8 229.4 229.1 228.7 228.2 227.2 225.3 220.0 216.5 212.7 205.8 202.2 200.0 200.2 204.0 208.4 211.4 211.8 211.3 209.7 208.9 208.4 207.9 207.5 207.2 206.7 206.8 207.2 207.8 208.3 208.7 208.6 207.9 206.8 205.4 204.4 202.5 196.9 189.5 182.5 181.5 182.3 184.2 184.8 185.4 186.3 188.4 189.3 189.5 190.6 191.1 191.1 190.7 189.7 188.4 187.0 185.7 184.6 184.6 184.9 185.3 185.5 185.4 185.1 185.2 185.4 185.7 186.1 186.3 186.4 186.5 186.6 186.5 186.2 185.9 185.5 185.2 185.0 184.9 184.5 184.3 184.4 184.4 184.4 184.5 184.5 184.6 184.7 184.7 184.7 184.8 185.1 185.3 185.6 185.9 186.1 186.2 186.0 185.9 185.6 185.4 185.2 184.8 183.7 182.6 181.6 180.7 179.1 176.1 174.0 170.7 165.7 160.7 156.6 153.4 148.4 144.5 142.8 142.5 143.0 143.9 145.0 147.1 150.9 152.4 153.6 155.2 155.4 155.3 155.5 155.7 155.7 155.4 155.5 155.8 156.5 156.4 156.0 155.3 154.6 154.0 153.4 153.4 153.5 153.7 153.6 153.4 153.1 153.1 153.1 153.4 153.7 154.0 154.0 154.4 155.1 155.7 156.1 156.4 156.7 156.5 155.9 155.5 154.9 154.4 154.1 153.3 152.0 150.4 149.6 149.5 149.8 150.6 151.9 153.5 154.7 155.7 157.1 158.5 159.8 160.7 161.2 161.4 161.8 161.0 160.8 160.1 159.1 157.8 156.2 154.3 152.2 149.9 147.4 144.8 142.2 139.6 137.0 134.5 132.1 129.8 127.7 125.8 124.1 122.6 121.4 120.5 119.8 119.4 119.2 119.4 119.8 120.5 121.6 122.9 124.5 126.3 128.5 131.0 133.6 136.6 139.8 143.2 146.8 150.6 154.6 158.6 162.2 166.3 170.7 175.0 179.2 183.4 187.3 191.1 194.6 197.9 200.8 203.4 205.4 207.1 208.3 209.0 209.2 209.1 208.9 208.7 208.4 208.2 208.0 207.9 208.1 208.7 209.6 210.9 212.8 214.9 217.3 219.7 222.3 224.9 227.2 229.2 230.9 232.2 233.0 235.4 235.2 235.1 234.8 234.6 234.5 233.9 233.4 232.9 232.6 231.9 231.1 230.4 228.5 225.2 219.0 211.1 203.6 201.5 201.6 203.4 205.9 210.0 214.4 215.6 216.7 217.7 218.7 219.2 219.6 220.8 222.4 224.0 225.0 225.2 225.2 225.9 225.8 225.3 223.6 221.8 220.0 216.0 213.1 210.8 209.8 209.2 208.9 208.2 208.0 208.1 208.3 208.8 209.6 209.1 208.3 207.3 205.8 200.5 193.1 184.7 180.7 179.3 183.3 188.8 196.8 195.7 194.6 193.3 192.5 191.5 190.2 189.9 189.3 188.1 186.6 185.3 184.5 184.5 184.5 184.3 183.9 183.4 182.9 182.4 182.0 181.8 181.5 181.3 181.5 181.6 181.7 181.8 181.9 182.1 182.1 182.5 182.9 183.3 183.6 183.9 184.3 184.4 184.6 184.9 185.3 185.7 186.0 186.2 186.4 186.6 186.7 186.7 186.5 186.3 186.1 185.9 185.6 185.4 185.2 185.1 184.7 183.7 182.9 182.0 181.1 180.1 178.7 175.9 173.6 171.6 170.6 169.3 168.2 167.7 166.9 166.0 165.1 164.5 164.0 163.5 163.5 163.9 164.0 165.0 166.4 167.1 168.9 171.4 173.1 174.5 175.7 176.7 177.7 178.4 178.4 178.6 178.7 178.4 178.0 177.6 177.4 176.9 176.2 175.4 174.9 174.6 173.4 172.1 170.7 169.2 168.2 167.5 166.4 165.8 165.6 165.7 165.9 166.1 166.7 167.5 168.4 169.7 170.8 171.7 174.0 175.4 176.3 177.2 178.1 178.8 179.2 179.2 178.5 178.1 176.9 174.5 173.3 171.5 169.2 166.8 165.0 164.1 163.2 162.6 162.2 162.6 163.1 163.7 165.0 166.1 166.9 168.1 169.6 171.4 172.3 172.9 173.0 172.6 171.5 169.7 171.4 170.5 169.0 167.0 164.4 161.4 157.9 154.2 150.3 146.3 142.2 138.2 134.3 130.5 127.1 123.9 121.0 118.5 116.4 114.7 113.3 112.4 111.9 111.9 112.4 113.3 114.8 116.9 119.4 122.4 125.9 130.0 134.6 139.6 145.1 151.0 157.2 163.8 170.6 177.6 184.1 190.4 197.4 204.0 210.3 216.0 221.2 225.7 229.3 231.9 233.4 234.0 234.0 234.0 234.0 233.9 233.9 233.8 233.6 233.5 233.4 233.3 233.2 233.1 233.0 232.9 232.8 232.8 232.7 232.7 233.5 233.7 233.7 233.6 233.3 233.2 233.1 233.0 232.8 232.5 232.4 232.1 231.9 231.8 231.6 231.3 231.1 230.1 228.4 227.9 227.4 226.7 226.1 225.3 224.1 222.6 220.5 217.7 215.9 212.9 208.3 204.8 202.0 199.9 198.0 197.0 197.1 197.1 198.5 202.2 203.2 203.8 203.6 203.9 204.4 205.2 206.1 206.7 207.1 207.4 207.7 207.9 208.0 207.9 207.7 207.3 206.8 206.3 205.7 205.2 204.9 204.7 204.6 204.8 205.0 205.3 205.6 205.7 206.1 206.9 207.2 207.5 208.1 208.4 208.5 208.5 208.4 208.4 208.3 208.2 208.1 208.0 208.0 207.9 207.8 207.8 207.7 207.5 207.2 206.9 206.9 206.9 206.8 206.7 206.6 206.5 206.4 206.3 206.2 206.2 206.4 206.8 207.3 207.6 207.8 207.9 208.0 208.2 208.3 208.4 208.5 208.6 208.6 208.5 208.3 208.1 208.0 208.0 208.0 207.8 207.6 207.4 207.1 207.0 206.8 206.7 205.3 203.8 202.4 201.8 200.4 198.5 195.3 189.6 182.2 175.8 170.4 165.7 165.8 166.4 167.4 168.5 170.6 173.3 177.0 182.3 188.9 192.5 197.4 204.2 206.5 209.2 212.6 213.7 214.7 215.6 215.7 215.7 215.7 215.5 215.3 215.3 215.0 214.3 212.9 212.4 212.0 211.6 211.3 211.3 211.5 212.4 213.5 214.4 215.3 216.8 219.4 222.7 225.9 228.4 229.8 231.0 232.6 233.8 234.7 235.3 236.0 236.5 236.5 236.0 235.3 234.6 233.9 233.4 233.1 232.9 232.7 232.4 232.1 232.0 232.1 232.2 232.3 232.4 232.5 232.6 232.7 232.7 232.7 232.9 233.1 233.4 233.7 234.0 234.2 234.5 234.7 234.8 234.8 234.8 234.7 234.5 234.4 234.3 234.2 234.3 234.6 235.3 235.3 234.8 234.6 234.3 234.0 233.8 233.4 232.3 230.0 223.3 213.5 202.7 197.1 195.8 198.4 200.2 201.4 202.1 204.4 207.9 216.5 222.1 225.5 231.0 235.3 238.8 244.9 248.7 251.0 251.0 250.8 250.5 250.3 250.4 250.5 249.8 249.1 248.5 248.6 248.3 247.8 247.2 246.7 246.4 246.5 246.5 246.4 246.2 246.0 245.9 245.8 245.5 245.2 245.1 245.1 245.1 245.2 245.3 245.5 245.9 246.2 246.5 246.7 247.0 247.7 247.7 247.6 247.4 247.2 247.0 247.0 244.8 240.5 234.0 223.9 215.2 209.5 207.5 206.9 207.1 207.2 207.1 206.8 206.2 205.7 205.3 205.2 205.2 205.3 205.3 205.2 205.2 204.9 204.7 205.0 205.5 206.0 206.0 208.1 212.3 223.1 229.8 234.9 237.0 238.9 240.5 241.2 240.7 239.9 238.9 238.0 236.9 234.0 232.5 230.9 227.6 224.7 222.2 220.8 219.6 218.8 219.2 219.7 220.5 222.0 223.8 226.2 229.7 233.2 236.4 238.1 239.9 241.3 241.3 241.0 240.3 239.0 237.7 236.0 233.3 229.7 226.2 224.5 222.6 220.5 218.2 216.8 216.1 217.0 217.8 218.5 220.5 223.9 228.3 230.6 233.3 236.3 238.5 240.7 243.0 244.6 245.5 245.9 245.0 242.9 240.1 237.9 234.0 228.9 225.7 221.1 214.7 213.9 213.6 214.2 215.2 216.3 217.7 219.1 220.7 222.4 222.4 222.4 222.4 222.4 222.4", - "input_type": "phoneme", - "offset": 125.856 + "f0_timestep": "0.005" }, { + "offset": 142.844, "text": "SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", "ph_seq": "SP t ai sh ang w ang q ing h e ch ou b ai f a zh ang s an q ian SP m ing g e h e x v sh ui j ie zh ir z ai w u x ing zh ir w ai t ian x in w o x in zh ir j ian b u c i0 j i sh eng j i sh ir z uo y i j ie x iao y ao x ian SP", - "note_seq": "rest A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 D#3 D#3 A#3 A#3 C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 G#3 G#3 rest F#3 F#3 F#4 F#4 F4 F4 C#4 C#4 C#4 C#4 D#4 D#4 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 D#3 D#3 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 C#3 C#3 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 G#3 G#3 A#3 A#3 C#4 C#4 C#4 C#4 C#4 C#4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.289 0.18 0.18 0.181 0.181 0.181 0.181 0.1800001 0.1800001 0.362 0.362 0.181 0.181 0.3609999 0.3609999 0.5420001 0.5420001 0.181 0.181 0.3610001 0.3610001 0.362 0.362 0.181 0.3610001 0.3610001 0.5420001 0.5420001 0.362 0.362 0.3610001 0.3610001 0.1809998 0.1809998 0.723 0.723 0.1799998 0.1799998 0.1810002 0.1810002 0.1809998 0.1809998 0.1810002 0.1810002 0.3610001 0.3610001 0.3610001 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.1810002 0.1810002 0.1799994 0.1799994 0.3620005 0.3620005 0.3610001 0.3610001 0.1809998 0.1809998 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.3620005 0.3620005 0.1799994 0.1799994 0.3620005 0.3620005 0.5419998 0.5419998 0.5419998 0.5419998 0.5419998 0.5419998 0.3620005 0.3620005 2.891 2.891 0.072", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.180037 0.108963 0.110725 0.069275 0.135819 0.045181 0.120758 0.060242 0.119758 0.060242 0.237 0.125 0.135819 0.045181 0.285698 0.075302 0.451912 0.090088 0.110214 0.070786 0.225456 0.135544 0.362 0.105698 0.075302 0.305275 0.055725 0.417 0.125 0.22194 0.14006 0.22094 0.14006 0.111725 0.069275 0.622094 0.100906 0.110725 0.069275 0.120759 0.060242 0.111725 0.069275 0.120759 0.060242 0.276665 0.084335 0.290138 0.070862 0.125275 0.055725 0.131302 0.049698 0.110215 0.070786 0.151389 0.02861 0.277665 0.084335 0.305275 0.055725 0.111725 0.069275 0.110214 0.070786 0.110725 0.069275 0.135818 0.045181 0.241517 0.120483 0.140845 0.039154 0.282182 0.079819 0.453148 0.088852 0.377846 0.164154 0.481758 0.060242 0.22194 0.14006 2.891 0.072", - "f0_timestep": "0.005", + "ph_dur": "0.18 0.109 0.1107 0.0693 0.1358 0.0452 0.1208 0.0602 0.1198 0.0602 0.237 0.125 0.1358 0.0452 0.2857 0.0753 0.4519 0.0901 0.1102 0.0708 0.2255 0.1355 0.362 0.1057 0.0753 0.3053 0.0557 0.417 0.125 0.2219 0.1401 0.2209 0.1401 0.1117 0.0693 0.6221 0.1009 0.1107 0.0693 0.1208 0.0602 0.1117 0.0693 0.1208 0.0602 0.2767 0.0843 0.2901 0.0709 0.1253 0.0557 0.1313 0.0497 0.1102 0.0708 0.1514 0.0286 0.2777 0.0843 0.3053 0.0557 0.1117 0.0693 0.1102 0.0708 0.1107 0.0693 0.1358 0.0452 0.2415 0.1205 0.1408 0.0392 0.2822 0.0798 0.4531 0.0889 0.3778 0.1642 0.4818 0.0602 0.2219 0.1401 2.891 0.072", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 G#3 G#3 F#3 D#3 A#3 C#4 A#3 G#3 F#3 G#3 rest F#3 F#4 F4 C#4 C#4 D#4 A#3 G#3 G#3 F#3 F#3 D#3 A#3 G#3 G#3 F#3 F#3 C#3 A#3 G#3 G#3 F#3 F#3 G#3 A#3 C#4 C#4 C#4 D#4 D#4 rest", + "note_dur": "0.289 0.18 0.181 0.181 0.18 0.362 0.181 0.361 0.542 0.181 0.361 0.362 0.181 0.361 0.542 0.362 0.361 0.181 0.723 0.18 0.181 0.181 0.181 0.361 0.361 0.181 0.181 0.181 0.18 0.362 0.361 0.181 0.181 0.18 0.181 0.362 0.18 0.362 0.542 0.542 0.542 0.362 2.891 0.072", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "232.5 232.5 232.5 232.9 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 232.9 232.6 232.2 231.8 231.2 230.7 229.9 229.2 228.5 227.9 227.3 226.7 226.1 225.6 225.2 224.9 225.0 226.3 230.0 233.9 236.2 236.7 236.7 236.1 235.6 235.1 234.7 233.2 231.5 229.7 228.3 226.3 221.5 215.7 209.2 200.8 197.1 195.0 194.5 194.6 195.2 196.9 198.8 200.5 201.3 202.1 203.1 204.4 206.8 209.6 210.3 212.3 214.6 214.8 214.3 213.3 212.5 211.2 209.8 208.9 208.4 208.4 208.6 208.4 208.0 207.9 207.9 208.0 208.1 208.1 208.1 208.0 207.6 207.2 207.2 207.2 207.2 207.3 207.5 207.5 207.6 207.7 207.7 208.0 208.1 208.1 208.0 207.7 207.4 207.3 207.1 207.1 207.2 207.4 207.5 207.6 207.7 207.7 207.7 207.8 207.9 208.0 208.0 207.9 207.4 207.0 206.5 205.6 204.1 201.6 195.6 190.2 185.6 183.1 181.9 182.6 184.1 185.2 185.5 187.1 189.1 191.2 191.7 191.7 191.7 189.9 187.5 185.2 183.9 183.3 183.6 184.0 184.1 183.9 184.3 184.9 185.7 186.1 186.4 186.8 187.1 187.2 186.3 184.9 183.2 180.7 177.3 173.2 166.9 162.6 159.3 158.0 157.4 156.9 156.3 155.8 155.4 155.6 155.2 154.7 154.1 153.5 153.0 152.7 153.2 153.7 154.0 153.7 153.1 152.9 152.7 152.5 152.2 152.0 152.0 152.1 152.3 152.6 152.7 152.7 152.6 152.8 153.0 153.3 153.4 153.5 153.5 153.4 153.3 153.1 153.0 152.9 152.8 153.1 153.4 153.9 154.4 154.9 155.2 155.3 155.5 155.7 155.8 156.1 156.6 157.2 157.6 157.9 159.0 160.1 161.2 161.1 160.5 159.5 155.9 152.2 148.3 147.9 148.7 151.3 153.3 156.5 161.2 167.0 173.4 180.7 185.4 190.6 196.5 202.9 209.2 215.1 221.1 225.9 228.9 231.0 232.7 233.8 232.3 231.5 231.5 230.9 230.5 230.6 231.1 231.3 230.9 231.1 231.6 232.3 232.6 233.0 233.4 232.9 230.0 223.8 232.9 228.4 221.4 213.0 204.6 197.8 193.5 191.5 192.3 194.1 197.0 200.7 205.3 210.3 215.5 220.6 225.0 228.7 231.3 232.5 232.7 232.6 232.5 232.4 232.3 232.1 231.9 231.8 231.6 231.5 231.4 231.2 231.2 231.7 232.9 235.2 238.5 242.8 247.8 253.0 258.3 263.5 268.2 272.0 274.7 276.2 279.7 280.2 280.7 281.2 281.7 281.9 282.0 281.7 281.3 280.9 280.0 277.7 274.1 268.2 260.9 252.7 249.8 251.7 256.8 262.0 267.7 273.8 276.8 277.2 275.9 273.5 269.1 263.3 257.5 253.9 251.8 247.6 243.9 240.5 237.9 234.5 235.1 234.0 232.0 229.1 225.6 222.0 218.4 215.0 212.2 210.1 209.0 208.5 208.5 208.5 208.5 208.4 208.4 208.4 208.3 208.2 208.1 208.0 208.0 207.9 207.8 207.7 207.6 207.5 207.5 207.4 207.4 207.3 207.3 207.3 207.4 207.7 208.6 209.9 211.6 213.7 216.0 218.5 221.0 223.6 225.9 228.0 229.7 231.1 231.7 231.6 233.0 234.9 236.1 237.2 237.8 237.8 237.5 237.0 236.7 236.4 236.1 235.0 233.7 232.3 231.5 230.8 229.4 228.9 228.8 228.5 228.3 228.2 228.6 229.1 229.7 229.9 230.7 231.6 232.9 234.4 235.7 236.4 236.9 237.1 237.6 238.0 238.2 237.7 237.2 236.0 232.1 225.6 218.7 215.8 216.9 220.4 223.5 225.0 225.4 225.1 224.3 222.8 220.4 218.4 216.5 214.9 213.4 212.1 210.6 208.9 207.3 207.2 208.7 211.4 212.0 212.4 212.8 212.1 211.6 211.0 210.2 209.5 208.9 208.2 207.6 207.2 206.4 204.7 201.7 195.4 188.6 181.1 179.7 181.2 187.5 198.9 209.3 217.5 218.1 218.0 217.0 215.9 214.7 213.5 211.6 209.2 205.9 201.4 195.6 188.0 184.3 181.5 180.0 179.3 178.9 178.6 178.7 179.2 179.8 179.9 180.3 181.2 181.9 182.3 182.5 183.2 183.9 184.5 184.6 184.8 185.0 185.0 184.9 184.8 184.5 184.3 184.2 184.0 184.0 184.2 184.6 185.0 185.6 186.0 186.3 186.7 186.8 186.9 186.5 186.1 185.7 185.5 183.5 181.0 178.1 177.6 177.9 179.2 181.7 184.3 185.8 187.1 188.5 190.4 191.9 193.2 194.2 195.7 197.3 198.1 199.1 200.2 201.0 202.1 203.3 204.9 206.5 208.0 209.3 210.8 212.1 212.5 212.5 212.4 212.6 212.7 212.9 212.8 212.9 213.2 212.1 211.1 210.3 207.3 205.5 204.4 202.6 200.5 198.3 197.1 196.6 196.6 197.2 197.7 198.0 200.0 201.7 203.1 206.7 209.0 210.4 212.2 213.6 214.6 214.8 214.8 214.5 213.6 212.0 209.1 208.0 205.8 202.2 198.8 196.1 194.5 193.2 192.6 193.1 193.4 194.6 197.1 199.3 201.5 203.7 207.3 210.1 211.6 213.7 215.4 216.1 215.9 215.6 215.2 214.9 213.9 211.9 208.3 205.2 203.4 200.8 199.0 198.6 198.4 198.5 198.7 199.1 199.5 199.9 200.5 201.1 201.6 202.1 202.5 202.7 192.2 178.4 159.8 141.3 126.6 119.8 117.5 116.3 116.1 115.5 114.7 113.7 112.5 111.3 110.0 108.8 107.9 107.1 106.5 106.2 106.8 108.8 112.4 117.7 124.9 133.2 142.4 152.2 161.8 170.6 177.2 181.5 183.6 183.8 184.0 184.3 184.6 184.8 185.0 185.1 185.2 185.3 185.4 185.5 185.6 185.7 185.9 185.9 186.0 186.1 186.0 185.8 185.5 185.3 185.1 184.9 184.4 184.0 183.8 183.4 183.3 183.2 183.3 183.5 183.6 183.8 184.1 184.5 185.6 186.6 187.4 188.6 190.0 191.7 192.6 193.4 194.3 194.4 194.6 195.0 193.9 192.9 192.1 188.9 187.0 186.9 191.6 198.3 208.3 216.1 226.4 240.6 251.1 262.9 276.7 290.5 303.4 314.3 323.1 329.6 333.3 339.4 344.9 348.7 349.4 348.9 347.7 347.3 347.7 348.8 347.0 345.5 345.8 344.0 342.2 341.1 340.7 340.2 339.1 338.7 338.8 339.7 340.4 341.3 343.4 345.9 348.3 349.6 352.2 354.9 356.8 360.2 363.7 365.9 368.3 370.9 374.1 375.6 376.6 377.6 377.2 376.5 375.4 374.6 373.7 371.9 370.5 369.3 368.1 367.4 366.9 366.4 366.1 366.0 366.3 366.7 367.2 367.2 367.9 368.8 369.2 369.9 370.7 371.3 371.6 371.9 372.8 372.9 372.5 372.2 371.1 369.0 365.0 359.9 354.1 352.6 352.8 354.1 356.0 357.2 357.8 359.1 359.6 359.5 358.3 357.8 357.8 357.8 357.9 358.0 357.9 357.7 357.6 357.9 358.1 358.2 358.7 359.2 359.5 359.2 359.4 360.3 361.1 361.6 361.8 362.3 362.9 363.8 365.3 366.2 366.2 364.5 361.9 358.0 356.5 354.8 353.1 351.4 349.9 348.6 347.4 346.3 345.6 344.8 344.7 345.5 346.1 346.8 347.6 348.4 349.2 350.2 351.5 352.8 354.1 355.0 355.7 356.3 356.4 355.3 352.6 348.2 342.2 334.4 325.5 320.0 319.9 324.8 330.8 335.7 337.7 339.2 341.9 343.2 344.1 344.7 344.3 343.6 342.5 341.5 340.2 336.7 332.9 329.2 326.1 324.3 322.7 320.2 317.7 315.0 311.8 308.7 305.7 302.8 298.9 294.8 291.9 289.1 286.4 284.2 283.3 283.2 282.5 281.4 280.1 279.2 277.7 276.0 274.8 273.6 272.4 271.3 270.5 270.1 270.0 269.8 269.7 269.6 269.8 270.2 271.6 272.6 273.4 273.5 274.0 274.8 275.8 276.7 277.5 278.0 278.5 279.1 279.2 279.2 279.1 278.9 278.7 278.5 278.2 278.1 278.0 277.9 277.1 275.3 272.4 268.2 261.9 254.4 248.5 244.8 245.2 247.0 251.3 253.7 257.2 262.1 264.6 266.7 268.1 269.0 269.9 270.7 272.8 275.1 277.8 278.7 280.2 282.4 284.0 284.5 283.7 283.3 283.0 282.7 282.5 282.1 281.6 280.7 279.8 279.1 278.7 278.1 278.9 277.8 275.4 271.9 267.3 262.0 256.2 250.2 244.1 238.2 232.7 227.7 223.4 219.9 217.2 215.7 215.0 215.3 216.3 218.1 220.5 223.6 227.3 231.6 236.3 241.3 246.4 251.7 257.0 261.8 266.4 270.4 273.7 276.2 277.7 278.2 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.3 278.5 279.2 280.2 281.9 283.9 286.3 288.8 291.4 293.9 296.2 298.2 299.8 300.8 301.2 300.1 302.0 304.5 305.8 308.3 311.7 314.1 317.3 321.0 322.6 324.0 325.1 325.7 325.7 324.9 324.4 323.5 321.7 320.8 318.8 315.3 313.7 311.3 307.7 306.2 305.0 304.2 304.4 304.7 305.1 305.3 305.6 306.1 308.4 310.6 313.0 314.3 315.9 318.3 321.4 323.8 324.7 324.8 324.4 323.6 323.1 321.7 318.8 315.2 312.4 311.0 308.1 305.6 304.4 304.7 305.3 305.8 306.9 308.4 310.5 312.3 313.8 314.7 316.0 317.2 317.7 318.1 318.3 317.8 316.9 316.0 315.3 314.3 312.2 305.7 299.3 293.2 287.4 289.0 292.0 293.7 294.4 294.4 292.4 289.5 285.4 277.5 271.0 265.1 259.1 253.3 247.9 243.0 240.1 239.1 239.8 239.8 239.0 237.5 236.2 235.3 235.1 234.5 233.8 233.4 233.8 234.6 234.9 234.7 234.0 232.3 227.5 220.8 208.7 200.4 194.4 195.9 199.8 205.3 207.9 209.5 210.4 213.1 215.0 216.2 216.3 216.8 217.6 217.2 217.2 217.6 217.0 215.6 213.9 211.4 209.0 206.6 206.0 205.7 206.0 205.8 206.0 206.6 206.7 206.7 206.7 207.4 208.2 209.0 209.2 209.3 209.3 209.2 209.0 208.9 208.7 208.6 208.5 208.0 207.2 206.0 205.3 205.2 205.7 206.0 206.8 208.3 208.4 208.2 208.1 208.3 208.5 208.7 209.0 209.2 209.2 208.9 208.4 208.0 206.5 203.7 199.2 194.1 189.6 186.4 186.2 186.7 187.4 189.1 190.8 191.6 191.5 191.1 190.6 190.2 189.7 189.1 189.3 189.5 189.3 189.3 189.4 190.1 189.8 189.0 187.3 186.2 185.5 185.3 185.1 185.0 185.3 185.3 185.2 185.4 185.5 185.5 185.4 185.1 184.7 184.4 183.8 182.9 181.1 177.7 173.5 170.3 167.1 164.6 165.1 166.5 168.7 172.5 177.8 183.7 188.0 191.9 195.5 195.2 193.6 191.1 189.7 188.8 188.1 187.7 187.0 186.1 185.1 184.5 184.1 183.8 183.3 182.9 182.6 182.4 182.1 182.1 182.2 182.3 182.5 182.6 182.8 182.9 183.0 183.0 183.2 183.4 183.8 184.1 184.4 184.7 184.7 184.8 184.9 185.0 185.1 185.2 185.3 185.5 185.6 185.7 185.7 185.7 185.7 185.0 184.5 183.5 182.1 180.2 178.0 175.5 172.8 170.0 167.1 164.2 161.4 158.8 156.4 154.2 152.4 150.8 149.7 148.9 148.5 148.5 149.0 149.8 150.9 152.1 153.2 156.0 156.9 157.8 159.0 160.2 161.1 162.2 162.4 162.1 161.5 160.8 160.0 159.0 157.9 156.9 155.7 154.6 153.5 152.4 151.4 150.5 149.7 149.1 148.6 148.3 148.2 148.2 148.4 148.8 149.3 150.0 150.8 151.8 152.8 154.0 155.3 156.5 157.7 159.0 160.0 161.1 162.0 162.6 163.3 163.7 163.9 163.8 163.5 163.1 162.4 161.7 160.7 159.7 158.4 157.1 155.9 154.5 153.2 152.0 150.8 149.7 149.0 148.2 147.6 147.3 147.1 147.2 147.3 148.2 150.8 152.9 154.8 215.5 218.4 218.7 219.3 220.5 220.0 219.6 219.1 220.2 221.6 223.3 225.8 228.7 232.5 234.1 235.7 237.4 237.3 236.4 234.3 228.9 220.7 208.6 205.1 203.6 205.0 207.1 210.8 216.9 220.4 223.1 225.3 226.8 228.6 231.4 232.1 230.8 227.6 222.4 217.6 214.3 211.6 209.9 209.3 208.7 208.1 207.7 207.4 207.1 207.0 206.9 206.8 206.7 206.6 206.6 206.5 206.5 206.5 206.7 206.9 207.1 207.3 207.4 207.5 208.1 208.2 208.1 208.2 208.3 208.4 208.6 208.7 208.7 208.7 208.7 208.6 208.3 208.1 207.9 207.8 207.7 207.5 207.3 207.1 207.0 206.8 206.0 204.2 200.2 196.3 193.2 193.4 195.1 197.2 197.6 197.9 198.3 198.8 199.1 199.2 198.9 198.6 198.3 197.7 197.2 196.8 198.0 198.6 198.7 196.9 194.2 190.7 188.9 187.8 187.1 186.7 186.3 186.0 185.7 185.3 185.0 185.0 185.1 185.3 185.6 185.9 186.1 186.0 185.6 184.7 183.3 181.5 178.8 174.7 169.5 162.5 159.3 156.7 154.8 155.7 157.4 160.3 166.7 172.0 175.2 180.6 184.7 187.0 187.7 188.2 188.2 188.4 188.6 188.7 188.5 188.3 187.8 187.3 186.8 186.6 186.4 186.1 185.8 185.5 185.3 185.2 185.1 184.9 184.8 184.7 184.5 184.3 184.2 184.1 184.1 184.0 184.1 184.1 184.3 184.4 184.4 184.6 184.7 184.8 184.8 184.9 185.6 186.3 187.0 187.5 187.4 187.2 186.8 186.4 185.7 184.1 179.4 173.3 164.2 160.2 158.3 160.0 164.3 168.9 171.4 172.5 171.8 168.4 165.6 163.2 160.8 158.5 156.4 154.1 151.8 149.2 146.0 142.5 139.0 136.5 134.8 133.6 138.8 139.4 140.4 141.5 142.5 143.7 144.6 145.0 144.8 144.4 143.7 143.1 142.3 141.3 140.4 139.4 138.3 137.3 136.3 135.3 134.5 133.7 132.9 132.5 132.1 131.8 131.8 131.9 132.2 132.6 133.1 133.8 134.6 135.5 136.5 137.7 138.8 139.9 141.1 142.1 143.1 144.1 144.8 145.5 145.9 146.2 146.2 146.2 145.9 145.3 144.6 143.9 142.9 141.7 140.7 139.4 138.4 137.1 136.0 134.8 133.7 132.9 132.1 131.5 132.9 135.0 137.2 185.2 193.2 199.3 204.5 209.2 214.8 220.3 225.1 228.5 231.3 233.5 235.5 237.1 237.9 238.7 238.4 234.1 226.1 217.8 210.9 207.9 206.3 206.5 208.1 209.6 209.6 209.7 209.8 209.4 209.9 210.7 212.3 212.6 212.3 211.5 210.7 209.9 208.9 208.3 207.9 208.2 208.7 209.3 209.6 209.4 209.1 209.1 209.1 209.1 208.9 208.9 208.9 208.7 208.3 207.8 207.5 206.6 205.0 201.8 195.1 186.5 179.2 174.9 173.5 176.3 180.0 184.3 188.2 190.7 192.1 197.9 203.0 207.7 210.6 212.2 213.0 212.0 211.1 210.3 210.0 209.7 209.3 208.9 208.4 207.8 205.6 201.5 196.3 189.6 183.4 178.0 178.5 180.6 185.3 192.0 199.0 206.6 209.5 211.4 211.5 209.4 207.3 205.3 202.9 200.8 199.1 197.6 196.2 194.6 193.2 191.6 189.8 186.9 184.5 182.6 182.2 182.5 183.0 183.5 184.3 185.3 185.8 186.2 186.7 186.6 186.2 185.8 185.4 184.9 184.0 182.2 178.9 173.9 166.5 159.7 154.6 151.2 149.2 148.7 150.8 153.7 157.0 160.6 165.0 172.6 177.6 181.7 184.1 184.1 183.6 183.1 182.9 182.8 182.5 182.2 182.0 181.8 181.8 181.7 181.9 182.0 182.2 182.4 182.6 182.7 182.7 183.1 183.7 184.3 185.1 185.9 186.3 186.6 186.8 186.8 186.7 186.6 186.3 186.0 185.7 185.6 185.4 185.2 184.8 184.8 185.1 185.4 185.6 185.6 185.2 183.9 182.0 178.8 175.7 172.9 171.6 171.0 170.8 172.1 173.9 176.0 178.6 180.3 181.3 183.7 186.0 188.1 189.4 191.0 193.0 195.6 197.5 198.8 200.7 202.9 205.4 206.9 208.1 209.0 209.7 210.5 211.2 210.5 209.8 209.0 209.3 209.4 209.1 208.9 208.6 208.3 206.8 204.8 202.3 196.8 189.6 180.0 175.4 171.6 169.1 167.0 166.1 166.6 168.1 171.2 176.6 181.0 186.0 192.2 197.4 202.8 208.8 216.2 223.2 229.1 232.6 234.1 233.3 233.1 233.1 233.1 232.9 232.9 233.2 232.9 232.5 231.9 231.7 231.5 231.3 230.9 230.7 230.7 230.8 231.0 231.2 231.4 231.6 231.7 231.9 232.0 232.1 232.4 232.8 233.3 233.6 233.8 233.8 233.6 233.5 233.3 233.2 233.1 233.0 232.9 232.8 232.8 232.7 232.5 232.3 232.3 232.3 232.3 232.3 232.3 232.2 232.1 232.0 232.2 232.3 232.4 232.1 231.9 231.9 232.0 232.5 233.2 234.3 235.3 236.3 237.8 239.3 240.9 243.8 247.7 252.4 259.4 265.1 270.1 273.2 275.4 276.9 278.0 278.9 279.8 280.7 280.1 278.6 275.6 273.0 271.0 269.8 268.1 265.4 264.8 264.6 265.3 265.9 266.4 267.0 267.6 268.8 270.7 272.8 274.6 276.1 276.8 278.0 280.1 281.6 282.7 283.2 284.0 284.5 284.5 284.2 283.8 283.4 281.9 280.4 279.3 278.5 277.3 275.5 274.5 274.0 273.9 273.8 273.6 273.4 273.3 273.2 273.3 273.5 273.8 274.2 274.7 275.1 275.4 275.5 275.8 276.8 277.8 278.7 278.8 278.6 278.4 278.1 277.8 277.6 277.5 275.0 275.0 275.0 275.0 274.9 274.8 274.8 274.6 274.5 274.5 274.4 274.3 274.3 274.2 274.2 274.2 273.4 271.4 268.1 263.8 258.8 253.2 247.9 242.9 238.6 235.2 232.9 232.2 232.1 232.5 233.2 234.1 235.4 237.0 238.8 240.7 242.9 245.2 247.4 249.9 252.6 255.3 257.9 260.6 263.1 265.4 267.6 269.6 271.3 272.7 273.8 274.5 274.9 276.9 276.9 276.9 277.0 277.1 277.2 277.3 277.4 277.5 277.6 277.7 277.7 277.9 278.2 278.6 278.4 278.2 277.8 277.6 277.5 277.5 277.4 277.3 277.2 277.1 277.0 276.9 276.8 276.7 276.5 276.5 276.8 277.5 278.1 278.5 278.6 278.4 278.2 278.0 277.9 277.9 278.1 278.5 278.7 278.6 278.5 278.4 278.3 276.7 276.7 276.7 276.5 276.5 276.4 276.4 276.3 276.2 276.1 276.1 276.0 275.9 275.7 275.6 275.5 275.4 275.3 275.3 275.2 275.1 275.1 275.0 275.0 275.0 274.4 273.0 270.5 267.3 263.3 258.8 254.0 249.4 244.9 240.7 237.2 234.4 232.5 231.5 231.3 231.8 232.5 234.0 235.7 237.9 240.4 243.2 246.2 249.6 253.0 256.4 259.8 263.2 266.4 269.2 271.7 273.8 275.5 276.7 277.3 276.9 277.1 277.3 277.5 277.7 277.8 277.8 277.8 278.2 278.5 278.8 278.8 278.7 278.6 278.5 278.3 278.1 277.9 277.8 277.7 277.4 277.2 277.0 276.9 276.9 276.9 276.9 276.9 276.9 277.0 277.0 277.0 277.1 277.2 277.2 277.3 277.4 277.5 277.6 277.7 277.7 277.8 277.9 278.0 278.0 278.0 278.1 278.1 278.1 278.1 278.3 278.7 279.2 279.3 279.2 279.1 279.0 278.9 278.7 278.6 278.5 278.3 278.3 278.3 278.3 278.3 278.0 277.7 277.2 276.7 276.2 275.6 275.0 274.4 273.8 273.2 272.7 272.3 271.9 271.8 271.7 271.6 272.3 273.9 276.3 279.5 283.6 288.1 293.2 298.3 303.3 308.1 312.4 315.8 318.3 319.6 320.1 319.5 318.3 316.6 314.6 312.6 311.1 309.9 308.8 308.4 307.4 306.9 306.6 306.5 306.8 307.3 308.0 308.6 309.2 309.6 310.5 311.9 313.2 313.9 314.3 314.7 314.8 314.5 314.0 313.6 312.9 310.6 310.6 310.6 310.6 310.7 310.8 310.9 310.9 311.1 311.2 311.4 311.5 311.5 311.7 311.8 314.0 310.5 304.7 296.9 287.5 276.6 265.3 254.0 243.1 233.1 225.0 217.2 210.5 205.3 201.6 199.5 199.6 200.9 203.5 207.9 213.4 220.1 227.5 235.5 244.0 252.2 259.8 266.5 271.9 275.6 277.5 277.5 277.5 277.5 277.5 277.4 277.3 277.3 277.2 277.2 277.0 276.9 276.9 276.8 276.6 276.5 276.4 276.4 276.3 276.1 276.1 276.0 275.9 275.9 275.9 275.9 275.9 275.8 275.4 274.5 273.3 272.0 271.0 270.2 270.1 270.5 272.0 274.2 276.9 280.2 284.0 288.0 292.0 296.1 299.9 303.3 306.0 308.1 309.3 309.7 309.7 309.7 309.6 309.5 309.5 309.5 309.5 309.4 309.3 309.3 309.3 309.2 309.2 309.2 309.0 309.0 309.0 308.9 308.8 308.8 308.8 308.8 308.8 308.8 308.6 308.6 308.6 310.4 310.3 310.2 310.2 310.3 310.4 310.5 310.7 310.9 311.1 311.2 311.3 311.3 311.3 311.3 311.4 311.5 311.5 311.6 311.7 311.8 312.0 312.1 312.2 312.3 312.4 312.4 312.4 312.3 312.2 312.1 311.9 311.7 311.6 311.4 311.3 311.3 311.3 311.3 311.3 311.3 311.1 311.1 311.1 310.9 310.9 310.9 310.8 310.8 310.7 310.6 310.6 310.6 310.6 310.5 310.4 310.4 310.4 310.5 310.6 310.8 311.0 311.3 311.4 311.5 311.5 311.5 311.5 311.5 311.5 311.4 311.3 311.3 311.3 311.3 311.3 311.3 311.1 311.1 311.1 311.0 310.9 310.9 310.9 310.9 310.8 310.8 310.8 310.7 310.6 310.6 310.6 310.6 310.6 310.6 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.1 309.7 310.0 310.2 310.5 310.7 311.0 311.3 311.4 311.5 311.5 311.5 311.6 311.7 311.7 311.7 311.8 311.8 311.9 312.0 312.2 312.2 312.2 312.3 312.4 312.4 312.5 312.6 312.6 312.7 312.7 312.7 312.7 312.7 312.7 312.6 312.5 312.4 312.3 312.1 312.0 311.9 311.8 311.7 311.7 311.6 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.6 311.7 311.7 311.7 311.7 311.8 311.8 311.9 312.0 312.0 312.1 312.2 312.2 312.3 312.4 312.5 312.6 312.6 312.6 312.6 312.7 312.7 312.8 312.9 312.9 312.9 312.9 312.9 312.9 312.9 312.9 312.9 312.8 312.6 312.6 312.6 312.5 312.3 311.1 311.1 311.5 311.9 312.4 313.1 313.7 314.4 315.1 315.7 316.1 316.6 316.7 316.7 316.4 316.0 315.4 314.4 313.3 311.8 310.2 308.8 306.9 305.1 303.5 301.8 300.2 299.0 297.9 297.1 297.1 297.3 298.1 299.0 300.3 301.7 303.5 305.3 307.5 309.9 311.8 314.2 316.6 318.4 320.5 322.3 323.6 324.8 325.7 326.0 326.0 325.7 325.1 324.0 322.5 321.0 318.9 316.7 314.7 312.2 310.1 307.7 305.8 303.6 301.6 300.2 298.6 297.6 296.9 296.6 296.4 296.7 297.4 298.5 299.7 301.3 303.3 305.1 307.5 309.9 312.0 314.6 316.9 319.0 321.1 322.6 324.2 325.3 326.1 326.6 326.6 326.3 325.5 324.5 323.0 321.2 319.3 317.1 314.7 312.6 309.9 307.4 305.4 303.1 301.2 299.7 298.1 297.1 296.4 296.0 295.9 296.2 296.9 297.9 299.3 300.9 302.8 305.1 307.2 309.7 312.2 314.4 316.9 319.3 321.2 323.1 324.7 325.8 326.7 327.2 327.2 326.9 326.0 324.9 323.3 321.7 319.5 317.5 315.0 312.4 310.1 307.6 305.1 303.1 301.0 299.1 297.8 296.6 295.9 295.4 295.4 295.7 296.4 297.6 299.0 300.5 302.6 304.9 307.2 309.7 312.0 314.7 316.9 319.4 321.7 323.4 325.1 326.4 327.4 327.7 327.7 327.5 326.6 325.3 324.0 322.0 319.7 317.7 315.0 312.2 309.9 307.2 304.7 302.6 300.4 298.9 297.2 296.2 295.3 294.9 294.9 295.2 296.0 296.9 298.5 300.4 302.1 304.5 307.2 309.5 312.2 314.9 317.3 319.9 322.3 324.0 325.7 327.2 327.9 328.4 328.4 327.9 327.4 326.0 324.3 322.7 320.2 317.7 315.3 312.6 309.7 307.4 304.7 302.3 300.4 298.4 296.7 295.7 294.8 294.3 294.3 294.8 295.5 296.6 298.2 299.7 301.9 304.0 306.8 309.5 312.0 314.9 317.7 319.9 322.4 324.5 326.0 327.5 328.5 328.9 329.1 328.5 327.7 326.4 324.7 322.8 320.4 317.8 315.3 312.5 310.0 307.0 304.7 302.1 299.8 298.1 296.4 295.0 294.3 293.8 293.8 294.2 294.9 296.2 297.6 299.6 301.9 304.0 306.8 309.7 312.2 315.2 317.8 319.7 321.1 321.8 322.3 322.1 321.4 320.4 319.3 317.9 316.4 315.1 313.7 312.4 311.7 312.5 310.5 309.7 309.7 309.8 309.9 310.1 310.2 310.3 310.4 310.4 310.4 310.4 310.4 310.4", - "input_type": "phoneme", - "offset": 142.844 + "f0_timestep": "0.005" }, { + "offset": 180.123, "text": "AP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 SP 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 SP", "ph_seq": "AP p i ch en w o y van er b u t ing w en m u b u j ian SP w u w ai sh ir j ie h ua w ei y vn y En SP b ai b o l iu zh ao SP c ang sh an j i d ian SP y i y E y i r u x ve SP", - "note_seq": "rest A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 D#3 A#3 A#3 C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 G#3 G#3 rest F#3 F#3 G#3 G#3 F#3 F#3 G#3 G#3 A#3 A#3 C#4 C#4 G#3 G#3 A#3 A#3 rest A#3 A#3 G#3 G#3 F#3 F#3 D#3 D#3 rest A#3 A#3 G#3 G#3 F#3 F#3 F3 F3 rest A#3 A#3 G#3 G#3 A#3 A#3 B3 B3 A#3 A#3 rest", - "note_dur_seq": "0.6 0.181 0.181 0.18 0.18 0.1809999 0.1809999 0.181 0.181 0.3610001 0.181 0.181 0.3610001 0.3610001 0.543 0.543 0.1799998 0.1799998 0.3620002 0.3620002 0.3609998 0.3609998 0.181 0.3610003 0.3610003 0.362 0.362 0.1809998 0.1809998 0.3610001 0.3610001 0.5419998 0.5419998 0.1810002 0.1810002 0.3610001 0.3610001 0.362 0.362 0.1799998 0.362 0.362 0.1810002 0.1810002 0.3609996 0.3609996 0.3620005 0.3620005 0.1799994 0.3620005 0.3620005 0.1800003 0.1800003 0.3619995 0.3619995 0.3610001 0.3610001 0.1809998 0.3610001 0.3610001 0.5430002 0.5430002 0.5419998 0.5419998 0.3610001 0.3610001 0.3620005 0.3620005 0.4", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.473154 0.126846 0.110214 0.070786 0.121269 0.058731 0.105698 0.075302 0.137329 0.333885 0.070786 0.111725 0.069275 0.302269 0.058731 0.448121 0.094879 0.136329 0.043671 0.22194 0.14006 0.361 0.137329 0.043671 0.291725 0.069275 0.217423 0.144577 0.108078 0.072922 0.261604 0.099396 0.457664 0.084335 0.125275 0.055725 0.290215 0.070786 0.362 0.121269 0.058731 0.321335 0.040665 0.135819 0.045181 0.276664 0.084335 0.362 0.059516 0.120483 0.271638 0.090363 0.139336 0.040665 0.301758 0.060242 0.361 0.120758 0.060242 0.305275 0.055725 0.476731 0.066269 0.466698 0.075302 0.22094 0.14006 0.362 0.4", - "f0_timestep": "0.005", + "ph_dur": "0.4732 0.1268 0.1102 0.0708 0.1213 0.0587 0.1057 0.0753 0.1373 0.3339 0.0708 0.1117 0.0693 0.3023 0.0587 0.4481 0.0949 0.1363 0.0437 0.2219 0.1401 0.361 0.1373 0.0437 0.2917 0.0693 0.2174 0.1446 0.1081 0.0729 0.2616 0.0994 0.4577 0.0843 0.1253 0.0557 0.2902 0.0708 0.362 0.1213 0.0587 0.3213 0.0407 0.1358 0.0452 0.2767 0.0843 0.362 0.0595 0.1205 0.2716 0.0904 0.1393 0.0407 0.3018 0.0602 0.361 0.1208 0.0602 0.3053 0.0557 0.4767 0.0663 0.4667 0.0753 0.2209 0.1401 0.362 0.4", + "ph_num": "2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 1 1", + "note_seq": "rest A#3 G#3 G#3 F#3 D#3 A#3 C#4 A#3 G#3 F#3 G#3 rest F#3 G#3 F#3 G#3 A#3 C#4 G#3 A#3 rest A#3 G#3 F#3 D#3 rest A#3 G#3 F#3 F3 rest A#3 G#3 A#3 B3 A#3 rest", + "note_dur": "0.6 0.181 0.18 0.181 0.181 0.361 0.181 0.361 0.543 0.18 0.362 0.361 0.181 0.361 0.362 0.181 0.361 0.542 0.181 0.361 0.362 0.18 0.362 0.181 0.361 0.362 0.18 0.362 0.18 0.362 0.361 0.181 0.361 0.543 0.542 0.361 0.362 0.4", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.9 207.8 207.5 206.4 204.9 202.9 200.4 197.5 194.3 190.7 186.9 182.9 179.0 175.0 171.0 167.2 163.5 160.0 156.8 153.8 151.0 149.1 147.2 145.5 144.2 143.2 142.7 142.7 142.9 143.6 144.7 146.1 147.9 150.1 152.6 155.6 158.9 162.5 166.3 170.4 174.8 179.4 184.1 188.9 193.8 198.7 203.5 208.2 212.6 216.8 220.6 224.1 227.1 229.4 231.3 232.5 233.2 233.4 233.4 233.4 233.2 233.1 233.0 232.9 232.8 232.5 232.4 232.3 232.2 232.1 232.0 232.0 232.0 206.2 205.5 204.7 204.0 204.2 205.6 208.4 209.9 210.6 210.5 209.9 209.2 208.6 208.2 207.9 207.5 207.2 207.0 206.8 206.7 206.8 206.9 207.0 207.1 207.1 207.2 207.4 207.5 208.1 208.8 209.6 209.9 209.9 209.6 209.5 209.3 209.2 209.0 208.7 207.8 207.5 207.2 206.7 206.3 206.0 206.1 206.3 206.7 207.0 207.4 207.7 208.0 208.0 207.8 207.6 207.3 207.0 206.7 206.2 205.5 204.5 202.6 199.6 194.8 190.8 187.4 184.5 182.0 180.3 180.5 181.1 182.0 183.7 186.3 189.6 189.3 188.8 188.2 187.3 186.6 186.0 185.2 184.9 184.9 185.3 185.7 186.3 186.5 186.5 186.3 186.4 186.5 186.7 186.7 186.5 186.1 185.1 184.2 183.4 181.6 179.5 176.9 175.2 173.4 171.3 168.1 165.1 162.3 157.8 153.7 150.3 147.7 145.9 145.0 144.6 144.5 144.9 145.6 147.0 149.5 150.2 151.5 153.6 154.0 154.6 155.5 156.0 156.2 156.0 155.8 155.5 155.2 154.9 154.7 154.6 154.2 153.8 153.3 152.9 152.6 152.6 152.7 152.9 153.0 153.2 153.4 153.5 154.1 154.7 154.9 155.2 155.4 155.7 155.7 155.7 155.7 155.7 155.8 156.0 156.3 156.6 156.9 157.2 157.6 158.5 159.9 161.4 162.5 162.8 162.7 161.8 160.3 158.8 157.6 155.5 153.8 156.3 161.1 168.2 177.7 185.9 193.0 200.2 208.3 216.4 221.3 224.5 226.1 225.6 226.9 229.0 230.0 230.5 230.6 230.9 232.0 233.3 233.9 234.6 235.4 233.1 226.6 217.7 207.0 204.5 207.3 217.8 225.3 230.7 232.0 233.6 235.2 236.9 239.5 242.7 246.8 250.8 254.8 260.6 267.2 274.5 277.4 277.6 275.9 276.0 276.1 276.1 276.9 277.3 277.2 276.7 276.2 275.9 276.3 276.5 276.5 276.1 275.7 275.3 275.4 275.5 275.6 275.6 275.6 275.6 275.6 275.5 275.3 275.3 275.3 275.4 274.9 274.6 274.9 275.5 276.0 276.3 276.5 276.6 276.8 277.0 277.2 277.3 277.9 278.2 278.0 277.8 277.5 276.4 275.5 273.5 270.4 266.4 261.6 256.3 250.6 244.4 238.1 231.7 225.5 219.6 214.1 209.0 204.5 200.7 197.4 194.8 193.0 191.9 191.5 192.8 195.8 199.5 203.2 206.0 206.7 206.7 206.7 206.7 206.8 206.9 206.9 206.9 207.0 207.1 207.2 207.2 207.3 207.3 207.3 207.5 208.0 208.8 210.0 211.4 213.1 215.1 217.3 219.5 221.7 224.0 226.0 227.9 229.5 230.8 231.9 232.5 237.6 238.6 239.0 238.8 238.4 238.2 238.0 237.7 236.8 235.7 234.3 233.1 232.1 231.1 230.2 229.3 229.2 229.3 229.6 230.1 230.5 230.8 231.1 231.3 231.6 231.9 232.0 232.1 232.7 233.2 233.6 234.0 234.5 235.0 235.2 235.4 235.4 235.5 235.6 235.7 235.7 235.8 235.9 236.0 236.0 235.9 236.2 236.4 236.6 236.5 236.2 235.7 235.3 234.8 234.2 233.1 231.8 230.3 227.0 224.1 222.5 220.1 217.6 215.2 211.6 208.5 206.8 204.4 202.3 201.1 200.0 199.2 199.2 199.1 199.8 203.2 204.2 204.7 205.7 206.3 206.7 206.3 206.3 206.6 207.4 208.2 209.0 209.7 210.0 210.1 209.6 209.0 208.4 207.6 204.6 200.8 195.5 187.9 180.0 176.4 173.4 171.0 169.3 167.5 165.7 164.5 163.8 163.8 165.2 166.7 168.6 171.7 173.6 174.9 177.0 177.8 177.7 178.0 178.6 179.4 180.4 181.1 181.7 181.8 182.1 182.5 183.2 183.7 184.0 184.0 184.1 184.1 184.2 184.2 184.1 184.2 184.3 184.4 184.5 184.6 184.6 184.7 184.8 184.8 184.9 185.1 185.3 185.5 185.8 186.3 186.2 186.1 185.9 185.8 185.7 185.6 185.4 185.1 184.8 182.7 179.8 175.6 172.3 166.8 158.4 154.9 153.3 154.3 158.0 162.0 166.4 167.9 170.0 173.0 177.0 180.7 183.6 185.7 188.2 191.7 194.7 198.6 204.1 208.4 211.3 212.5 213.3 214.0 214.5 214.9 214.5 213.2 211.6 210.5 209.9 207.9 205.5 202.9 201.7 200.7 199.1 198.4 198.2 198.7 199.4 200.1 201.2 203.1 205.5 208.6 210.1 211.2 212.7 213.2 213.3 213.1 212.9 212.5 211.3 209.5 207.7 206.5 204.1 201.0 198.6 196.9 195.6 194.2 193.6 193.8 194.4 196.0 198.1 199.8 202.7 206.3 209.0 211.1 212.8 214.6 216.3 217.4 217.3 217.0 216.7 216.5 215.5 214.0 211.7 208.7 205.4 200.9 198.4 197.0 196.5 196.3 196.3 196.7 197.0 197.5 198.1 198.8 199.5 200.2 193.4 192.2 190.2 187.4 184.0 179.9 175.6 171.0 166.4 162.0 157.6 153.7 150.2 147.3 144.8 143.0 141.8 141.4 141.8 143.0 145.0 147.8 151.3 155.4 159.8 164.3 168.7 172.7 176.1 178.4 179.7 182.0 182.4 182.9 183.8 184.8 185.6 185.9 186.3 186.6 186.5 186.4 186.2 186.1 186.0 185.9 185.7 185.5 185.5 185.8 186.1 186.3 186.2 186.0 185.8 185.7 185.4 185.2 185.0 184.8 184.6 184.4 184.3 184.3 184.4 184.6 184.8 185.1 185.2 185.3 185.5 185.6 185.8 186.0 186.0 185.8 185.6 185.2 184.7 184.1 182.9 181.6 180.3 177.8 174.6 171.5 170.2 170.0 170.2 170.4 171.1 172.7 174.7 176.8 179.3 183.8 189.4 194.1 199.5 204.9 207.5 209.8 211.5 212.1 212.7 213.2 213.6 213.4 212.8 212.2 210.9 209.0 206.0 203.7 202.0 201.2 200.3 199.2 199.1 199.4 200.0 200.9 202.0 203.1 204.3 205.2 206.0 207.6 208.6 209.2 210.3 211.0 211.4 211.2 210.9 210.7 210.5 210.1 209.3 208.7 207.8 206.2 204.9 202.8 199.5 194.6 189.5 184.1 182.2 181.2 181.5 182.2 182.9 183.7 183.9 184.1 184.1 184.7 185.3 186.0 186.8 187.5 188.1 188.4 188.5 188.3 188.2 188.5 189.6 189.9 190.3 191.2 192.2 192.5 191.8 189.9 188.2 187.1 186.5 186.1 186.0 185.7 185.3 184.4 183.6 182.5 180.4 176.2 171.7 167.6 167.7 169.7 174.9 177.9 180.4 182.7 184.4 185.9 186.9 187.9 189.0 190.5 191.9 193.3 194.7 195.9 197.2 198.8 200.7 202.7 204.4 205.7 206.8 207.3 207.6 207.8 207.9 207.4 206.7 207.0 207.0 206.8 206.6 206.6 206.7 206.4 206.3 206.2 206.4 206.6 206.7 207.0 207.1 207.3 207.5 207.7 207.7 208.0 208.3 208.5 208.5 208.4 208.3 208.1 207.9 207.8 207.5 207.4 207.3 206.6 205.7 204.6 200.9 197.2 193.5 191.8 191.7 192.9 194.7 196.2 197.1 198.0 198.9 199.8 201.7 203.3 204.3 205.4 206.3 206.9 207.6 208.5 209.7 210.4 211.1 211.9 212.7 213.6 214.5 215.7 219.2 226.1 232.4 237.1 238.9 239.8 240.2 240.1 239.5 239.0 239.0 237.8 236.7 236.3 234.3 232.3 230.6 229.7 229.1 228.4 227.2 226.4 226.7 227.1 227.5 227.9 229.1 230.6 232.0 232.9 233.9 235.7 236.8 237.9 239.6 239.8 239.7 239.3 238.9 238.6 238.2 236.7 234.9 233.2 231.7 230.5 229.4 228.5 227.7 227.3 227.5 227.8 228.3 228.6 229.1 230.3 231.3 232.2 233.1 233.9 234.6 235.1 235.8 236.5 236.5 236.3 236.0 235.8 235.6 235.5 235.3 235.1 235.0 234.9 234.6 234.2 233.9 233.7 233.6 233.6 233.5 233.4 233.3 233.2 233.2 232.4 231.7 230.9 230.4 229.2 227.6 226.2 223.5 220.0 218.6 217.3 216.1 216.6 218.0 219.9 224.1 230.2 237.7 243.8 250.7 258.8 268.5 276.7 282.5 284.4 285.9 287.0 286.6 285.6 283.5 281.4 279.7 278.6 279.0 278.9 278.0 277.5 277.4 277.8 277.8 277.7 277.5 277.1 276.5 275.6 274.0 272.3 270.7 268.6 265.9 262.5 259.1 255.4 251.0 244.1 237.5 232.2 227.8 223.1 217.9 212.3 207.6 204.2 201.6 199.5 197.9 197.6 197.6 197.7 197.4 197.4 198.9 200.2 201.3 201.3 201.9 202.8 204.2 204.6 204.8 205.0 205.3 205.7 205.9 206.0 206.1 206.4 206.6 206.9 207.2 207.6 208.0 208.4 208.7 208.9 209.1 209.5 209.9 209.9 209.7 209.6 209.5 209.5 209.4 209.3 209.2 209.1 208.8 208.7 208.6 208.5 208.2 207.8 207.5 206.8 206.0 205.2 203.5 201.2 199.2 198.1 197.6 198.5 199.4 200.3 203.6 206.2 208.5 213.7 217.9 221.3 226.2 230.1 233.4 237.5 239.7 240.6 241.5 242.2 242.7 243.4 242.8 241.3 239.8 238.3 236.7 233.4 229.8 226.1 224.6 222.9 220.9 221.0 221.3 221.9 223.8 225.4 226.6 229.8 232.4 233.9 236.5 238.5 239.6 241.1 242.1 242.4 241.8 240.7 239.0 236.2 233.9 232.8 229.3 225.8 222.9 220.5 218.8 217.7 217.9 218.4 218.8 220.3 222.5 225.2 227.3 229.9 233.8 236.7 239.2 241.4 242.5 243.0 242.6 242.0 241.3 240.7 239.1 236.5 231.4 228.6 226.2 222.8 221.3 220.3 219.7 219.9 220.3 220.9 221.6 222.4 223.4 224.4 225.5 226.7 227.8 228.7 229.7 230.7 231.6 232.2 232.6 233.0 233.0 232.9 232.6 232.1 231.4 230.6 229.8 228.9 228.0 227.1 226.1 225.1 224.3 223.5 222.8 222.3 221.9 221.6 221.3 220.6 219.7 218.3 217.4 217.0 216.3 215.6 215.1 213.3 212.4 212.0 211.4 211.0 210.7 211.1 211.6 212.3 212.8 214.1 216.0 217.1 219.3 222.2 225.1 227.2 228.5 229.6 230.7 231.7 231.5 231.1 230.5 230.2 230.1 230.4 231.1 231.6 231.9 232.5 233.4 234.6 235.4 236.1 236.5 236.9 236.9 236.3 235.3 234.1 232.5 230.3 226.9 221.9 211.3 201.8 195.6 195.8 197.4 199.1 202.0 204.1 204.3 204.9 205.4 205.3 205.9 207.1 208.8 209.6 209.5 208.1 206.1 205.0 205.5 206.0 206.8 209.2 210.0 210.4 210.7 210.7 210.6 210.8 210.9 211.0 210.8 210.1 209.3 208.3 207.8 207.4 206.9 206.5 206.0 205.4 204.9 204.3 203.4 202.4 201.4 200.5 199.3 197.6 195.1 192.1 188.6 188.4 188.2 187.8 187.4 186.9 186.4 185.9 185.5 185.0 184.6 184.3 184.2 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.1 184.0 183.4 181.7 179.4 176.4 173.1 169.5 165.9 162.5 159.7 157.3 155.6 154.6 154.3 154.3 154.4 154.4 154.4 154.5 154.6 154.7 154.7 154.7 154.8 154.8 154.8 154.8 154.8 154.8 154.8 154.9 154.9 155.0 155.0 155.0 155.1 155.1 155.2 155.2 155.2 155.3 155.3 155.3 167.7 164.7 161.0 156.2 152.5 149.7 148.2 148.0 148.6 149.7 151.0 151.9 152.4 152.9 153.5 154.2 154.3 154.4 154.8 154.6 154.4 154.0 153.7 153.3 152.4 151.6 150.7 150.0 149.7 149.5 148.9 148.4 147.9 147.7 147.9 148.3 148.6 149.7 150.9 151.7 152.7 153.8 155.7 157.3 158.7 159.3 160.2 160.9 160.6 160.3 159.6 158.1 156.6 154.9 151.8 149.3 147.3 145.5 144.0 143.0 143.8 144.0 144.8 145.8 146.9 148.7 150.6 152.5 155.2 157.8 160.4 163.7 166.8 169.7 172.7 175.8 179.2 182.0 184.8 187.7 189.8 191.8 193.6 194.7 195.5 196.0 195.7 194.7 193.0 190.4 187.3 184.0 180.1 176.1 172.5 168.8 165.3 162.6 160.3 158.4 157.2 156.6 156.7 157.1 157.9 159.2 160.9 162.8 165.2 167.9 170.7 173.9 177.4 180.7 184.3 188.1 191.4 194.9 198.4 201.1 203.8 206.3 208.0 209.2 210.2 210.6 210.8 210.7 209.9 208.8 208.0 208.2 209.2 210.7 212.8 215.2 218.1 221.1 224.0 226.6 228.9 230.7 231.7 234.9 235.2 235.3 235.0 234.6 234.3 234.2 234.0 233.8 233.5 233.3 233.0 232.7 230.7 227.9 224.0 217.1 210.4 206.6 206.6 207.7 209.4 210.4 211.1 211.1 211.1 211.2 211.3 211.8 212.5 213.0 213.0 213.0 214.0 215.1 216.2 216.8 216.4 215.6 215.1 214.5 213.8 213.1 212.8 212.7 212.2 211.6 211.1 210.5 210.0 209.6 209.3 209.1 208.8 208.5 208.6 209.1 209.2 208.8 208.0 207.2 205.2 202.6 197.9 191.0 183.0 174.1 168.8 165.6 166.8 170.1 174.9 181.5 185.5 187.6 190.7 193.7 196.6 194.3 192.1 190.0 188.1 186.5 185.3 184.4 183.6 183.2 183.1 182.9 182.8 182.8 183.0 183.4 183.9 184.4 184.9 184.9 185.0 185.3 185.6 185.8 186.1 185.6 185.3 185.2 185.1 185.0 184.9 184.8 184.7 184.6 184.5 184.5 184.5 184.5 184.5 184.7 184.7 184.8 184.9 184.9 184.9 185.0 185.2 185.4 185.5 185.7 185.7 185.5 184.9 183.9 181.6 177.9 173.4 167.3 162.9 159.4 158.2 158.5 159.2 160.1 161.5 162.8 163.0 163.8 164.9 166.2 166.8 167.3 167.8 169.3 171.2 173.1 173.8 174.2 175.4 176.5 177.5 177.8 177.6 177.2 176.9 176.6 176.3 175.9 174.6 172.9 171.1 170.1 169.4 168.5 167.6 166.9 167.1 167.4 167.8 167.9 168.9 170.3 171.8 172.9 173.7 175.8 177.3 178.3 178.1 178.0 177.8 177.5 177.2 176.6 174.9 173.6 172.7 170.4 169.0 168.4 167.8 167.8 168.2 168.6 169.5 170.8 172.5 174.5 176.9 178.0 179.0 180.0 180.0 179.8 179.6 179.4 178.9 177.9 176.8 175.1 172.5 170.3 168.9 168.9 169.1 169.3 169.4 169.8 170.2 170.7 171.3 172.0 172.9 174.0 175.3 176.8 179.2 182.1 185.4 190.1 195.3 200.7 206.6 212.0 216.2 219.7 222.1 223.3 223.7 223.6 223.1 222.1 221.0 219.8 218.5 217.3 216.4 215.5 214.7 214.2 214.8 215.9 217.9 217.7 216.8 215.8 215.2 214.7 213.8 213.2 212.7 211.7 211.0 210.4 209.6 209.2 208.9 209.3 209.8 210.5 211.7 213.4 215.1 216.3 219.0 222.3 224.0 226.1 228.0 228.8 230.2 231.8 232.7 233.5 234.1 234.5 234.8 234.8 234.4 234.0 233.6 233.4 233.4 233.4 233.5 233.5 233.5 233.7 233.8 233.8 233.8 233.7 233.6 233.8 233.9 234.0 233.5 232.9 232.3 232.6 232.9 233.4 235.0 235.8 236.1 236.9 237.4 237.7 238.3 238.2 237.6 236.5 235.7 235.0 233.5 231.7 229.6 228.1 226.2 223.6 220.5 217.7 215.5 214.5 213.2 211.4 209.8 208.5 207.5 206.6 205.9 205.5 205.2 205.0 205.0 205.1 205.2 205.4 205.4 205.6 206.1 206.2 206.3 206.4 206.7 206.9 207.1 207.4 207.6 207.6 207.7 207.7 207.7 207.6 207.5 207.3 207.2 207.0 206.8 206.7 206.7 206.7 206.7 206.7 206.8 206.8 206.8 206.9 207.0 207.1 207.2 207.2 207.3 207.3 207.4 207.4 207.4 207.5 207.6 208.1 208.6 209.0 209.5 209.6 209.6 209.5 209.3 209.2 209.1 209.0 209.0 209.0 209.0 209.0 208.9 208.7 208.5 208.5 208.3 208.0 207.6 207.2 206.8 206.3 205.8 205.3 204.1 203.1 202.3 202.1 202.0 202.0 202.9 204.0 205.3 206.9 208.8 210.9 215.8 220.5 224.9 228.6 232.0 235.2 236.6 237.6 238.4 239.0 239.6 240.2 239.9 238.8 237.2 235.5 234.3 233.5 232.1 231.2 230.4 230.1 229.8 229.7 230.1 230.4 230.8 231.0 231.2 231.6 232.5 233.5 234.4 234.4 234.4 234.3 234.2 234.1 234.0 233.9 233.9 233.8 233.7 233.4 232.7 232.3 232.1 232.0 232.1 232.2 232.3 232.4 232.6 232.7 232.7 232.8 233.1 233.4 233.5 233.4 233.3 233.2 233.1 232.9 232.8 232.7 232.8 232.9 233.3 233.4 233.5 233.6 233.7 233.8 233.9 234.0 234.2 234.2 234.2 234.1 234.0 233.9 233.8 233.8 233.7 233.6 233.4 233.3 233.2 233.3 233.2 233.0 232.9 232.7 232.5 232.4 231.8 230.9 230.2 229.1 227.8 226.6 225.6 224.9 225.7 227.0 228.6 231.0 235.7 241.5 245.2 248.0 249.9 250.4 250.8 251.2 251.5 251.9 252.3 252.4 252.4 252.4 251.9 251.0 250.0 248.6 247.5 246.5 246.2 245.7 245.1 245.1 245.3 245.5 245.7 245.9 246.1 246.2 246.6 247.2 247.9 248.2 248.2 248.0 247.8 247.5 247.2 246.6 245.4 242.9 238.5 231.2 224.1 218.8 215.8 216.5 217.9 220.3 221.1 222.0 222.7 222.9 223.1 223.3 223.6 223.9 224.4 224.8 225.2 225.5 225.9 226.1 226.1 225.9 225.9 226.1 226.6 227.1 227.5 227.5 227.6 228.2 229.0 230.1 231.4 233.4 235.8 238.4 239.9 241.1 242.2 242.6 242.3 241.5 240.5 239.6 238.9 237.3 235.2 232.5 230.3 228.5 227.4 225.9 224.2 222.8 222.7 222.9 223.0 223.3 223.8 225.2 227.2 229.4 231.9 233.4 234.4 235.1 236.1 237.1 237.7 238.2 238.3 238.0 237.5 236.9 235.7 233.0 229.9 228.5 225.9 223.0 221.4 220.2 219.6 219.9 220.7 222.1 224.5 226.8 229.1 231.1 233.5 236.0 237.3 238.3 238.9 238.5 238.2 237.8 237.5 236.3 234.3 231.1 226.3 220.5 219.3 219.1 219.6 220.3 221.2 222.0 223.2 224.3 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4 225.4", - "input_type": "phoneme", - "offset": 180.123 + "f0_timestep": "0.005" }, { + "offset": 192.001, "text": "SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 啊 SP", "ph_seq": "SP t ai sh ang w ang q ing h e ch ou b ai f a zh ang s an q ian SP m ing g e h e x v sh ui j ie zh ir z ai w u x ing zh ir w ai t ian x in w o x in zh ir j ian b u c i0 j i sh eng j i sh ir z uo y i j ie x iao y ao x ian SP", - "note_seq": "rest A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 D#3 D#3 A#3 A#3 C#4 C#4 A#3 A#3 G#3 G#3 F#3 F#3 G#3 G#3 rest F#3 F#3 F#4 F#4 F4 F4 C#4 C#4 C#4 C#4 D#4 D#4 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 D#3 D#3 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 C#3 C#3 A#3 A#3 G#3 G#3 G#3 G#3 F#3 F#3 F#3 F#3 G#3 G#3 A#3 A#3 C#4 C#4 C#4 C#4 C#4 C#4 D#4 D#4 D#4 D#4 rest", - "note_dur_seq": "0.288 0.181 0.181 0.181 0.181 0.18 0.18 0.1810001 0.1810001 0.3609999 0.3609999 0.181 0.181 0.362 0.362 0.5420001 0.5420001 0.181 0.181 0.3610001 0.3610001 0.3609998 0.3609998 0.181 0.3620002 0.3620002 0.5420001 0.5420001 0.3610001 0.3610001 0.362 0.362 0.1799998 0.1799998 0.723 0.723 0.1809998 0.1809998 0.1810002 0.1810002 0.1799998 0.1799998 0.1810002 0.1810002 0.362 0.362 0.3610001 0.3610001 0.1809998 0.1809998 0.1810002 0.1810002 0.1799998 0.1799998 0.1809998 0.1809998 0.3610001 0.3610001 0.3620005 0.3620005 0.1809998 0.1809998 0.1800003 0.1800003 0.1809998 0.1809998 0.1809998 0.1809998 0.3610001 0.3610001 0.1809998 0.1809998 0.3610001 0.3610001 0.5430002 0.5430002 0.5419998 0.5419998 0.5419998 0.5419998 0.3610001 0.3610001 2.892 2.892 0.5", - "is_slur_seq": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", - "ph_dur": "0.227758 0.060242 0.111725 0.069275 0.135819 0.045181 0.119758 0.060242 0.125275 0.055725 0.240517 0.120483 0.135819 0.045181 0.271637 0.090363 0.42759 0.11441 0.094254 0.086746 0.246544 0.114456 0.361 0.105698 0.075302 0.301759 0.060242 0.423027 0.118973 0.22094 0.14006 0.217423 0.144577 0.110725 0.069275 0.623604 0.099396 0.135818 0.045181 0.125275 0.055725 0.110725 0.069275 0.135819 0.045181 0.276154 0.085846 0.276436 0.084564 0.125275 0.055725 0.135819 0.045181 0.109214 0.070786 0.150879 0.030121 0.276665 0.084335 0.312303 0.049698 0.111725 0.069275 0.109215 0.070786 0.111725 0.069275 0.125275 0.055725 0.226967 0.134033 0.140335 0.040665 0.275154 0.085846 0.442094 0.100906 0.382362 0.159637 0.481758 0.060242 0.216423 0.144577 2.892 0.5", - "f0_timestep": "0.005", + "ph_dur": "0.2278 0.0602 0.1117 0.0693 0.1358 0.0452 0.1198 0.0602 0.1253 0.0557 0.2405 0.1205 0.1358 0.0452 0.2716 0.0904 0.4276 0.1144 0.0943 0.0867 0.2465 0.1145 0.361 0.1057 0.0753 0.3018 0.0602 0.423 0.119 0.2209 0.1401 0.2174 0.1446 0.1107 0.0693 0.6236 0.0994 0.1358 0.0452 0.1253 0.0557 0.1107 0.0693 0.1358 0.0452 0.2762 0.0858 0.2764 0.0846 0.1253 0.0557 0.1358 0.0452 0.1092 0.0708 0.1509 0.0301 0.2767 0.0843 0.3123 0.0497 0.1117 0.0693 0.1092 0.0708 0.1117 0.0693 0.1253 0.0557 0.227 0.134 0.1403 0.0407 0.2752 0.0858 0.4421 0.1009 0.3824 0.1596 0.4818 0.0602 0.2164 0.1446 2.892 0.5", + "ph_num": "2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 1", + "note_seq": "rest A#3 G#3 G#3 F#3 D#3 A#3 C#4 A#3 G#3 F#3 G#3 rest F#3 F#4 F4 C#4 C#4 D#4 A#3 G#3 G#3 F#3 F#3 D#3 A#3 G#3 G#3 F#3 F#3 C#3 A#3 G#3 G#3 F#3 F#3 G#3 A#3 C#4 C#4 C#4 D#4 D#4 rest", + "note_dur": "0.288 0.181 0.181 0.18 0.181 0.361 0.181 0.362 0.542 0.181 0.361 0.361 0.181 0.362 0.542 0.361 0.362 0.18 0.723 0.181 0.181 0.18 0.181 0.362 0.361 0.181 0.181 0.18 0.181 0.361 0.362 0.181 0.18 0.181 0.181 0.361 0.181 0.361 0.543 0.542 0.542 0.361 2.892 0.5", + "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "232.7 232.7 232.7 233.0 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 233.1 232.9 232.6 232.1 231.7 231.2 230.5 229.9 229.2 228.5 227.9 227.3 226.7 226.1 225.6 225.2 224.9 225.2 226.3 230.5 234.3 236.2 236.8 236.6 236.0 235.5 235.1 234.7 233.0 231.2 229.7 228.1 225.5 221.3 215.7 209.2 200.8 197.1 195.0 194.5 194.6 195.3 197.2 199.0 200.5 201.4 202.2 203.1 204.6 207.2 209.6 210.4 212.6 214.6 214.8 214.2 213.3 212.3 211.0 209.8 208.9 208.4 208.4 208.6 208.4 208.0 207.9 207.9 208.0 208.1 208.1 208.1 208.0 207.6 207.2 207.2 207.2 207.2 207.4 207.5 207.5 207.6 207.7 207.7 208.0 208.1 208.1 208.0 207.7 207.4 207.2 207.1 207.1 207.2 207.4 207.5 207.6 207.7 207.7 207.7 207.8 207.9 208.0 208.0 207.9 207.4 206.9 206.5 205.4 203.7 201.6 195.6 190.2 185.6 183.1 181.9 182.8 184.2 185.2 185.5 187.3 189.3 191.6 191.7 191.7 191.7 189.7 187.4 184.8 183.9 183.4 183.6 184.0 184.1 183.9 184.3 184.9 185.7 186.1 186.4 186.8 187.1 187.1 186.3 184.8 182.8 180.6 176.9 172.3 166.8 162.2 159.1 158.0 157.3 156.8 156.3 155.7 155.5 155.6 155.2 154.7 154.1 153.5 153.0 152.7 153.2 153.7 154.0 153.6 153.1 152.9 152.7 152.5 152.2 152.0 152.0 152.1 152.4 152.6 152.7 152.7 152.7 152.8 153.0 153.3 153.4 153.5 153.5 153.4 153.3 153.1 153.0 152.9 152.8 153.1 153.5 154.0 154.5 154.9 155.2 155.3 155.5 155.7 155.8 156.2 156.7 157.3 157.7 158.1 159.0 160.1 161.2 161.1 160.5 159.6 155.5 151.7 148.3 147.9 149.0 151.3 153.6 157.0 161.2 167.9 174.3 180.7 186.1 191.3 196.5 203.9 210.0 215.1 222.0 226.3 228.9 231.0 232.7 233.8 232.3 231.5 231.5 230.8 230.5 230.8 231.2 231.3 230.8 231.1 231.6 232.4 232.7 233.0 233.5 232.9 229.6 222.6 219.3 217.9 219.3 221.0 221.9 221.4 220.3 219.4 218.7 221.7 228.5 240.1 251.3 259.5 262.9 263.3 262.6 261.4 261.3 261.4 261.2 260.0 259.0 258.8 259.2 259.6 259.9 260.1 260.3 260.5 260.5 260.6 261.6 262.5 263.5 264.7 266.2 267.6 268.4 269.6 271.1 273.3 274.5 275.5 276.6 278.2 279.7 280.3 280.7 281.2 281.7 281.9 282.0 281.7 281.3 280.9 280.0 277.7 274.1 268.2 260.2 252.2 249.7 252.1 257.7 262.2 268.2 274.4 276.9 277.1 275.5 273.4 268.6 262.3 257.2 253.8 251.1 247.4 243.9 240.5 237.9 234.5 230.5 225.7 222.2 219.5 217.2 215.8 215.1 214.9 214.6 214.1 213.1 212.4 211.9 212.0 212.2 212.6 213.2 213.7 214.2 214.8 215.1 215.2 215.3 215.6 216.2 216.9 217.5 217.7 217.6 217.4 217.3 217.2 217.0 216.8 216.8 216.7 216.7 216.9 217.1 217.3 217.5 218.0 219.1 221.0 222.8 224.3 225.6 227.5 230.4 231.7 233.0 234.9 236.3 237.3 237.9 237.7 237.4 237.0 236.6 236.4 236.1 234.9 233.5 232.3 231.5 230.6 229.3 228.9 228.7 228.5 228.3 228.2 228.6 229.1 229.7 229.9 230.7 231.6 233.2 234.5 235.7 236.6 236.9 237.1 237.7 238.0 238.2 237.7 237.2 235.8 231.4 224.8 218.6 215.4 217.2 220.4 223.5 225.0 225.4 225.1 224.3 222.8 220.4 218.2 216.2 214.8 213.3 211.8 210.5 208.8 207.3 207.2 209.0 211.5 212.0 212.4 212.5 212.1 211.5 210.9 210.1 209.5 208.9 208.2 207.6 207.2 206.2 204.3 201.7 194.5 187.7 181.1 179.4 182.0 187.5 200.6 210.3 217.5 218.2 217.9 217.0 215.7 214.5 213.4 211.4 208.8 205.9 201.4 195.6 188.0 184.3 181.4 179.6 179.2 178.9 178.5 178.7 179.2 179.9 179.9 180.4 181.4 181.9 182.3 182.6 183.2 183.9 184.6 184.6 184.8 185.0 185.0 184.9 184.8 184.5 184.3 184.2 184.0 184.0 184.2 184.6 185.1 185.6 186.1 186.4 186.7 186.8 186.8 186.5 186.0 185.7 185.5 183.2 180.6 178.1 177.6 177.9 179.2 181.7 184.3 185.8 187.1 188.6 190.7 192.1 193.2 194.4 195.9 197.3 198.2 199.2 200.2 201.2 202.2 203.4 205.1 206.6 208.1 209.5 210.9 212.1 212.5 212.5 212.4 212.5 212.7 212.9 212.8 213.0 213.0 212.0 211.1 209.8 207.1 205.4 204.1 202.5 200.3 198.1 197.0 196.6 196.7 197.2 197.7 198.4 200.1 201.7 203.1 206.7 209.0 210.4 212.4 213.8 214.6 214.9 214.8 214.5 213.5 211.6 209.1 207.8 205.4 202.2 198.3 195.9 194.5 193.0 192.7 193.1 193.5 194.9 197.1 199.3 201.5 203.7 207.3 210.2 211.9 213.9 215.4 216.2 215.9 215.5 215.1 214.9 213.8 211.5 208.0 205.1 203.0 200.6 199.0 198.5 198.4 198.5 198.7 199.1 197.8 197.1 195.5 193.0 189.9 186.2 182.2 178.0 173.5 169.1 164.9 161.0 157.4 154.3 151.7 149.7 148.4 147.7 147.6 148.1 149.1 150.7 152.8 155.1 157.5 160.6 163.9 167.1 170.4 173.5 176.3 178.8 180.5 181.7 182.4 183.6 183.4 183.2 183.3 183.4 183.7 183.8 184.0 184.4 184.6 184.8 185.1 185.2 185.2 185.3 185.4 185.5 185.6 185.7 185.9 185.9 186.0 186.1 186.0 185.8 185.5 185.3 185.1 184.8 184.3 184.0 183.8 183.4 183.2 183.2 184.2 184.6 185.7 187.4 189.6 192.4 195.7 199.5 203.7 208.2 212.9 217.8 222.6 227.5 231.9 236.1 239.8 242.9 245.3 247.1 247.8 247.5 246.5 244.4 241.8 238.5 235.8 233.4 231.6 230.8 231.6 233.9 238.5 245.4 254.1 263.6 274.7 286.9 297.7 308.4 318.2 324.4 328.5 329.2 333.1 332.8 332.0 331.0 329.6 328.3 327.2 326.4 326.1 326.3 326.8 327.6 328.8 330.4 332.3 334.5 336.9 339.6 342.4 345.2 348.2 351.3 354.2 357.0 359.8 362.4 364.7 366.5 368.1 369.3 370.1 370.4 376.8 377.6 377.2 376.5 375.4 374.6 373.7 371.9 370.5 369.3 367.9 367.3 366.9 366.3 366.1 366.0 366.3 366.8 367.2 367.2 368.0 368.8 369.3 370.0 370.8 371.4 371.6 371.9 372.8 372.9 372.5 372.2 371.1 369.0 365.0 359.4 353.8 352.6 352.9 354.4 356.1 357.2 358.0 359.2 359.6 359.3 358.2 357.8 357.8 357.8 357.9 358.0 357.9 357.7 357.6 357.9 358.1 358.2 358.8 359.2 359.5 359.2 359.5 360.3 361.2 361.6 361.8 362.4 363.1 363.8 365.5 366.2 366.2 364.2 361.4 358.0 356.2 354.6 353.1 351.4 349.9 348.6 347.4 346.3 345.4 344.8 344.8 345.6 346.1 346.8 347.8 348.4 349.3 350.4 351.6 352.8 354.3 355.1 355.8 356.4 356.4 355.3 352.6 348.2 342.2 334.4 325.5 320.0 319.9 325.5 331.5 335.9 337.9 339.6 342.0 343.3 344.2 344.7 344.2 343.4 342.4 341.4 339.6 336.6 332.4 328.7 326.0 324.3 322.7 320.2 317.7 315.0 311.8 308.7 305.6 302.2 298.5 294.7 291.4 288.8 286.3 283.8 283.3 283.2 282.4 281.2 280.0 279.0 277.5 276.0 274.6 273.4 272.4 271.3 270.5 270.1 270.0 269.8 269.7 269.6 269.9 270.5 271.6 272.6 273.4 273.5 274.1 275.0 275.9 276.8 277.6 278.0 278.6 279.1 279.2 279.2 279.1 278.9 278.7 278.5 278.2 278.1 278.0 277.9 276.9 275.3 272.0 267.4 261.9 253.3 248.0 244.8 245.2 247.6 251.3 254.1 257.8 262.1 265.0 266.9 268.1 269.2 270.0 270.7 272.8 275.1 277.8 278.7 280.3 282.8 284.1 284.4 283.5 283.3 283.0 282.7 282.5 282.1 281.5 280.6 279.8 278.9 278.6 278.0 276.9 275.9 274.5 272.6 268.6 262.1 252.9 242.9 234.9 230.2 230.5 229.8 227.8 224.9 221.4 217.8 214.4 211.7 209.9 209.2 210.8 215.0 221.5 229.8 239.5 250.0 259.8 267.9 273.2 276.3 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.7 276.6 276.1 275.6 274.9 274.0 273.1 272.2 271.4 270.8 270.3 270.1 270.3 271.0 272.3 274.0 276.2 278.8 281.6 284.7 287.8 290.9 293.8 296.5 298.9 300.8 302.1 302.9 305.8 308.3 311.7 314.1 317.3 321.0 322.8 324.2 325.1 325.8 325.6 324.9 324.4 323.2 321.7 320.7 318.4 315.3 313.5 310.9 307.7 306.0 304.9 304.2 304.4 304.7 305.1 305.3 305.6 306.1 308.4 310.8 313.5 314.4 316.0 318.8 321.7 323.8 324.9 324.8 324.3 323.4 323.1 321.5 318.2 314.9 312.3 310.8 307.9 305.6 304.4 304.7 305.3 305.8 306.9 308.4 310.5 312.5 314.0 314.8 316.2 317.3 317.8 318.2 318.2 317.8 316.8 315.9 315.2 314.2 311.2 305.5 298.6 292.3 287.3 289.0 292.0 293.7 294.4 294.4 292.4 289.5 285.1 276.2 270.4 264.9 258.0 252.8 247.8 242.1 240.0 239.1 239.9 239.7 239.0 237.2 236.1 235.3 235.0 234.4 233.8 233.4 233.8 234.6 234.9 234.7 234.0 232.3 226.9 218.6 208.1 199.9 194.7 196.0 200.2 205.7 208.0 209.6 210.9 213.3 215.0 216.1 216.3 216.9 217.5 217.1 217.2 217.6 217.0 215.6 213.7 211.0 208.7 206.6 205.9 205.8 206.0 205.8 206.1 206.6 206.7 206.7 206.7 207.5 208.3 209.0 209.2 209.3 209.3 209.2 209.0 208.9 208.7 208.6 208.5 208.0 207.1 205.7 205.3 205.2 205.7 206.0 206.9 208.6 208.4 208.2 208.1 208.3 208.5 208.7 209.0 209.2 209.2 208.8 208.4 208.0 206.5 203.7 199.2 194.1 189.6 186.4 186.2 186.8 187.5 189.3 190.9 191.6 191.4 191.0 190.6 190.2 189.6 189.1 189.3 189.5 189.3 189.3 189.5 190.1 189.8 189.0 187.3 186.2 185.5 185.3 185.1 185.0 185.3 185.3 185.2 185.4 185.5 185.5 185.3 185.1 184.7 184.3 183.8 182.9 180.8 177.2 173.4 169.8 166.7 164.6 165.1 166.5 168.7 172.5 177.8 183.7 188.0 192.2 195.5 195.2 193.4 190.8 189.7 188.7 188.1 187.7 186.9 185.9 185.0 184.4 184.1 183.7 183.3 182.8 182.6 182.4 182.1 182.1 182.2 182.3 182.5 182.6 182.8 182.9 183.0 183.0 183.2 183.5 183.8 184.1 183.0 182.6 181.6 180.1 178.2 175.9 173.3 170.5 167.7 165.3 163.0 160.8 159.0 157.6 156.7 156.2 156.2 156.2 156.2 156.2 156.2 156.2 156.2 156.2 156.2 156.1 156.1 156.1 156.1 156.1 156.0 156.0 156.0 155.9 155.9 155.9 155.8 155.8 155.7 155.7 155.7 155.7 155.7 155.6 155.6 155.6 155.5 155.5 155.4 155.4 155.4 155.3 155.3 155.2 155.2 155.2 155.1 155.1 155.1 155.1 155.0 155.0 155.0 155.0 154.9 154.9 154.9 154.8 154.8 154.8 154.8 154.8 154.8 154.8 154.8 154.8 155.1 155.4 155.6 155.7 155.8 156.0 156.2 156.6 156.9 157.4 158.1 158.7 159.1 159.3 159.2 158.9 158.8 158.6 157.3 154.1 150.3 148.5 147.1 146.6 148.0 152.0 157.6 162.0 166.7 171.6 176.5 182.6 189.0 193.5 198.9 205.0 211.2 215.5 218.4 218.7 219.5 220.5 220.0 219.5 219.1 220.4 221.8 223.3 226.1 229.2 232.5 234.3 235.9 237.4 237.3 236.2 234.3 228.2 219.2 208.6 205.1 203.6 205.0 207.1 211.1 218.1 220.6 223.2 225.7 226.8 228.8 232.0 232.1 230.7 227.0 221.9 217.4 213.5 211.4 209.9 209.2 208.6 208.1 207.7 207.4 207.1 207.0 206.9 206.8 206.7 206.6 206.5 206.5 206.5 206.5 206.7 206.9 207.1 207.3 207.4 207.6 208.1 208.2 208.2 208.2 208.3 208.4 208.6 208.7 208.7 208.7 208.7 208.6 208.3 208.1 207.9 207.8 207.7 207.5 207.2 207.1 207.0 206.7 205.9 204.1 199.5 195.9 193.2 193.5 195.3 197.2 197.6 197.9 198.3 198.8 199.1 199.2 198.9 198.6 198.3 197.8 197.2 197.0 198.1 198.6 198.4 196.8 193.9 190.4 188.9 187.8 187.1 186.7 186.3 185.9 185.6 185.3 185.0 185.0 185.1 185.3 185.6 185.9 186.1 186.0 185.5 184.7 183.2 181.1 178.8 174.1 168.6 162.5 158.9 156.4 154.8 155.8 157.7 160.3 167.7 172.4 175.2 181.3 185.0 187.0 187.7 188.2 188.2 188.4 188.6 188.7 188.5 188.2 187.7 187.2 186.8 186.5 186.4 186.1 185.7 185.5 185.3 185.2 185.1 184.9 184.8 184.7 184.5 184.3 184.2 184.1 184.1 184.0 184.0 182.7 181.9 180.5 178.6 176.3 173.5 170.4 167.1 163.7 160.2 156.8 153.5 150.4 147.6 145.2 143.0 141.3 140.0 139.2 138.8 138.8 138.8 138.7 138.7 138.7 138.6 138.6 138.6 138.5 138.4 138.4 138.4 138.4 138.4 138.4 138.4 138.4 138.4 138.4 138.4 138.4 138.4 136.2 134.7 133.6 132.6 131.8 131.2 130.8 130.2 129.9 130.4 130.9 131.3 131.2 131.4 131.8 132.4 132.7 133.0 133.4 134.1 134.9 135.2 136.0 136.8 137.1 137.3 137.3 137.4 137.6 137.8 138.0 138.2 138.3 138.6 139.1 139.7 140.4 140.8 140.9 140.8 140.7 140.8 140.8 140.6 140.4 139.9 139.7 139.7 139.6 139.4 139.2 139.2 139.4 139.8 140.2 140.7 141.4 142.0 142.5 143.0 143.0 142.1 139.8 136.6 134.5 134.1 139.8 149.1 164.4 175.2 185.6 194.9 199.7 204.8 210.2 215.3 220.3 225.1 228.5 231.3 233.5 235.5 237.1 237.9 238.7 237.7 234.0 225.1 216.8 210.8 207.7 206.3 206.5 208.3 209.6 209.6 209.7 209.7 209.4 210.0 211.0 212.4 212.6 212.3 211.5 210.7 209.9 208.9 208.3 207.9 208.2 208.7 209.3 209.6 209.4 209.1 209.1 209.1 209.1 208.9 208.9 208.9 208.7 208.3 207.8 207.5 206.5 205.0 201.8 195.1 186.5 179.2 174.9 173.5 176.3 180.3 184.9 188.4 190.8 193.1 198.2 203.4 208.2 210.7 212.3 212.8 212.0 211.1 210.3 210.0 209.7 209.3 208.9 208.4 207.8 205.6 201.5 196.3 188.6 182.7 178.0 178.6 181.2 185.3 193.0 200.0 206.6 209.9 211.4 211.5 209.1 207.1 205.3 202.7 200.7 199.1 197.4 196.0 194.6 193.2 191.6 189.8 187.0 184.4 182.2 182.2 182.5 183.1 183.5 184.3 185.5 185.8 186.2 186.8 186.6 186.2 185.7 185.4 184.8 183.8 182.0 178.9 173.9 166.5 159.7 154.6 151.2 149.2 148.7 151.1 154.2 157.1 161.0 166.2 172.8 178.1 182.0 184.1 184.1 183.5 183.1 182.9 182.8 182.5 182.2 182.0 181.8 181.8 181.7 181.9 182.0 182.2 182.4 182.6 182.7 182.8 183.2 183.7 184.3 185.2 185.9 186.3 186.6 186.8 186.8 186.7 186.5 186.2 186.0 185.7 185.6 185.4 185.2 184.8 184.8 185.1 185.4 185.6 185.6 185.2 183.7 181.5 178.6 175.5 172.7 171.6 171.0 171.0 172.2 174.1 176.5 178.7 180.3 181.7 183.8 186.1 188.3 189.4 191.0 193.0 195.6 197.5 199.0 201.0 203.2 205.4 207.1 208.2 209.0 209.8 210.6 211.3 210.4 209.7 209.0 209.3 209.3 209.1 208.8 208.6 208.3 206.5 204.5 202.3 196.8 189.6 180.0 175.4 171.4 168.6 166.9 166.1 166.7 168.2 171.5 177.7 181.4 186.4 193.5 197.8 203.2 210.1 216.8 223.5 230.3 232.9 234.1 233.4 233.1 233.1 233.1 232.9 232.9 233.2 232.9 232.4 231.9 231.7 231.5 231.3 230.9 230.7 230.7 230.8 231.0 231.2 231.4 231.6 231.7 231.9 232.0 232.1 232.4 232.8 233.3 233.6 233.8 233.8 233.6 233.4 233.3 233.1 233.1 233.0 232.9 232.8 232.8 232.7 232.5 232.3 232.3 232.3 232.3 232.3 232.3 232.2 232.1 232.0 232.2 232.3 232.4 232.1 231.9 231.9 232.0 232.5 228.5 228.8 229.8 231.2 233.1 235.2 237.7 240.2 242.7 245.1 247.2 248.9 250.2 251.0 250.9 250.5 249.7 248.5 247.2 246.1 245.3 245.0 245.2 245.7 246.6 247.8 249.1 250.8 252.8 254.9 257.1 259.4 261.8 264.1 266.4 268.4 270.2 271.8 273.1 274.2 274.7 274.9 280.1 281.6 282.7 283.2 284.0 284.5 284.5 284.2 283.8 283.3 281.8 280.4 279.1 278.4 277.2 275.1 274.4 274.0 273.8 273.7 273.6 273.4 273.3 273.2 273.3 273.5 273.8 274.2 274.7 275.1 275.4 275.5 275.9 276.8 277.9 278.7 278.8 278.6 278.3 278.0 277.8 277.6 277.5 275.0 275.0 275.0 275.1 275.1 275.1 275.1 275.1 275.1 275.1 275.2 275.3 275.3 275.3 275.4 275.4 275.4 275.5 275.6 275.6 275.6 275.6 275.7 275.7 275.7 275.7 275.7 275.7 275.8 275.9 275.9 275.9 275.9 275.9 275.7 275.7 275.7 275.7 275.7 275.6 275.6 275.4 275.4 275.3 275.3 275.2 275.1 275.1 275.1 275.0 275.0 275.0 277.1 276.9 276.9 276.9 277.0 277.1 277.2 277.3 277.4 277.5 277.6 277.7 277.7 277.9 278.2 278.6 278.4 278.1 277.8 277.6 277.5 277.5 277.4 277.3 277.2 277.1 277.0 276.9 276.7 276.6 276.5 276.5 276.8 277.5 278.1 278.5 278.6 278.4 278.2 278.0 277.9 277.9 278.1 278.5 278.7 278.6 278.5 278.4 278.3 278.2 278.1 278.0 277.9 277.3 276.0 273.5 269.0 262.1 257.1 253.7 251.8 253.1 255.1 256.9 257.6 258.3 259.3 260.5 261.7 262.5 262.9 263.4 264.1 265.1 266.1 266.9 267.4 267.9 268.8 270.0 271.3 272.5 273.1 273.6 274.7 275.3 275.7 276.3 277.7 279.8 283.0 284.5 285.3 285.4 285.1 284.5 283.8 283.3 282.7 281.7 280.7 279.7 278.4 277.8 277.4 276.8 276.5 276.6 276.8 276.9 277.1 277.3 277.5 277.7 277.8 277.8 277.9 278.2 278.5 278.8 278.8 278.7 278.6 278.5 278.3 278.1 277.9 277.8 277.6 277.4 277.2 277.0 276.9 276.9 276.9 276.9 276.9 276.9 277.0 277.0 277.0 277.1 277.2 277.2 277.3 277.4 277.5 277.6 277.7 277.7 277.8 277.9 278.0 278.0 278.0 278.1 278.1 278.1 278.1 278.3 278.7 279.3 279.3 279.2 279.1 279.0 278.9 278.6 278.6 278.5 278.3 278.3 278.3 278.3 276.7 275.2 272.6 267.4 259.4 250.1 239.8 229.5 219.7 210.8 203.5 198.1 194.6 192.9 193.1 194.3 196.4 199.2 202.9 207.5 212.7 218.5 224.9 231.7 238.7 245.8 252.7 259.4 265.5 270.9 274.9 278.4 280.9 282.4 282.4 281.5 280.2 278.3 276.6 275.3 274.5 274.6 276.3 279.8 284.8 290.2 295.5 299.7 302.4 309.2 309.6 310.7 311.9 313.4 313.9 314.3 314.7 314.8 314.5 314.0 313.6 312.9 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 308.6 307.6 304.5 299.3 292.5 284.3 275.2 265.7 256.5 247.8 239.9 233.2 228.2 224.8 222.9 222.8 224.6 228.2 233.8 240.7 248.4 256.2 263.8 270.4 274.8 276.9 277.5 277.5 277.5 277.5 277.5 277.5 277.5 277.5 277.3 277.3 277.3 277.3 277.2 277.2 277.2 277.0 277.0 277.0 277.0 276.9 276.9 276.7 276.7 276.7 276.6 276.5 276.5 276.4 276.4 275.0 274.7 273.8 272.7 271.5 270.6 270.1 270.4 271.4 273.3 276.1 279.7 283.7 288.0 292.5 296.8 300.9 304.1 306.5 308.0 308.6 308.6 308.7 308.8 308.8 308.8 308.8 308.9 309.0 309.1 309.2 309.3 309.3 309.5 309.6 309.7 309.8 309.9 310.1 310.1 310.2 310.3 310.4 310.4 310.4 310.6 310.6 310.6 310.6 310.6 310.5 310.3 310.2 310.2 310.3 310.4 310.5 310.7 311.0 311.1 311.2 311.3 311.3 311.3 311.3 311.4 311.5 311.5 311.6 311.7 311.9 312.0 312.1 312.2 312.3 312.4 312.4 312.4 312.3 312.2 312.1 311.9 311.7 311.5 311.4 311.3 311.3 311.3 311.3 311.3 311.2 311.1 311.1 311.1 310.9 310.9 310.9 310.8 310.8 310.7 310.6 310.6 310.6 310.6 310.5 310.4 310.4 310.4 310.5 310.6 310.8 311.0 311.3 311.4 311.5 311.5 311.5 311.5 311.5 311.5 311.4 311.3 311.3 311.3 311.3 311.3 311.3 311.1 311.1 311.1 311.0 310.9 310.9 310.9 310.8 310.8 310.8 310.8 310.7 310.6 310.6 310.6 310.6 310.6 310.5 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.1 309.7 310.0 310.2 310.5 310.7 311.0 311.3 311.5 311.5 311.5 311.5 311.6 311.7 311.7 311.8 311.8 311.8 311.9 312.0 312.2 312.2 312.2 312.3 312.4 312.4 312.5 312.6 312.6 312.7 312.7 312.7 312.7 312.7 312.7 312.6 312.5 312.4 312.3 312.1 312.0 311.9 311.8 311.7 311.7 311.6 311.5 311.5 311.5 311.5 311.5 311.5 311.5 311.6 311.7 311.7 311.7 311.7 311.8 311.8 311.9 312.0 312.0 312.1 312.2 312.2 312.3 312.4 312.5 312.6 312.6 312.6 312.6 312.7 312.7 312.8 312.9 312.9 312.9 311.1 311.1 311.5 312.0 312.4 313.0 313.7 314.4 315.0 315.7 316.0 316.4 316.6 316.4 316.2 315.7 315.1 314.2 313.1 311.7 310.2 308.6 306.8 305.4 303.6 301.9 300.7 299.3 298.3 297.6 297.1 297.4 298.1 299.1 300.5 301.9 303.8 305.8 307.7 310.1 312.2 314.6 316.6 318.8 320.8 322.3 323.8 324.9 325.7 326.0 326.0 325.7 324.8 323.6 322.5 320.5 318.6 316.7 314.3 311.8 309.9 307.4 305.1 303.3 301.3 299.9 298.6 297.6 296.8 296.6 296.6 296.8 297.6 298.5 299.9 301.6 303.3 305.4 307.7 309.9 312.4 314.9 316.9 319.2 321.2 322.8 324.3 325.5 326.2 326.6 326.6 326.2 325.5 324.3 322.7 321.2 319.0 316.7 314.6 312.2 309.5 307.4 305.1 302.8 301.2 299.4 297.9 297.1 296.3 296.0 296.0 296.5 297.2 298.1 299.6 301.1 303.1 305.1 307.6 310.1 312.2 314.7 317.3 319.3 321.4 323.4 324.7 325.9 326.8 327.2 327.2 326.6 325.8 324.7 323.0 321.4 319.2 316.7 314.6 312.0 309.7 307.2 305.1 302.8 300.7 299.1 297.7 296.6 295.9 295.5 295.5 295.9 296.6 297.8 299.1 301.0 303.0 305.1 307.6 310.1 312.4 315.1 317.7 319.7 322.0 323.7 325.3 326.4 327.3 327.7 327.7 327.3 326.4 325.3 323.7 321.5 319.7 317.3 314.6 312.2 309.5 306.8 304.7 302.4 300.2 298.6 297.2 296.0 295.4 294.9 295.0 295.4 296.0 297.2 298.8 300.4 302.5 304.9 307.2 309.9 312.6 314.9 317.7 320.2 322.1 324.2 325.8 327.0 327.8 328.3 328.3 327.8 326.8 325.7 323.9 322.2 319.9 317.7 314.9 312.2 309.7 307.0 304.4 302.3 300.1 298.1 296.9 295.6 294.9 294.5 294.5 294.9 295.7 296.9 298.5 300.2 302.4 304.9 307.2 310.0 312.4 315.3 317.7 320.2 322.7 324.5 326.2 327.5 328.3 328.9 328.9 328.5 327.5 326.0 324.5 322.4 320.1 317.7 314.9 312.0 309.5 306.8 304.0 301.9 299.6 297.9 296.2 295.2 294.4 294.0 294.0 294.4 295.2 296.2 298.0 300.0 301.9 304.5 307.2 309.7 312.7 315.7 318.0 320.6 323.2 324.9 326.8 328.1 329.1 329.4 329.4 328.9 328.1 326.7 324.0 321.5 318.8 316.0 314.0 311.8 310.1 309.0 308.0 307.6 307.4 307.7 308.4 309.2 310.2 312.5 310.2 309.7 309.7 309.8 310.0 310.1 310.2 310.3 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4 310.4", - "input_type": "phoneme", - "offset": 192.001 + "f0_timestep": "0.005" } ] \ No newline at end of file From d79ce3fe8ac8a0832202ce8fe1e48cce17e64aa6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 26 May 2023 20:19:45 +0800 Subject: [PATCH 406/475] Move file --- inference/{vocoder => }/val_nsf_hifigan.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename inference/{vocoder => }/val_nsf_hifigan.py (100%) diff --git a/inference/vocoder/val_nsf_hifigan.py b/inference/val_nsf_hifigan.py similarity index 100% rename from inference/vocoder/val_nsf_hifigan.py rename to inference/val_nsf_hifigan.py From 2de7a21f797a8f043f99b6a76f6a7f881c01a48c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 28 May 2023 01:32:43 +0800 Subject: [PATCH 407/475] Refactor inference structure --- basics/base_svs_infer.py | 9 +- configs/variance.yaml | 1 + inference/ds_acoustic.py | 117 ++++++++++----- inference/ds_variance.py | 29 +++- modules/diffusion/ddpm.py | 7 +- scripts/infer.py | 298 +++++++++++++++++++------------------- utils/infer_utils.py | 7 + 7 files changed, 275 insertions(+), 193 deletions(-) diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 5c6d157b9..17e5a977c 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -26,15 +26,16 @@ def __init__(self, device=None): self.device = device self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] self.spk_map = {} + self.model: torch.nn.Module = None - def build_model(self, ckpt_steps=None): + def build_model(self, ckpt_steps=None) -> torch.nn.Module: raise NotImplementedError - def preprocess_input(self, param): + def preprocess_input(self, param: dict, idx=0) -> dict[str, torch.Tensor]: raise NotImplementedError - def run_model(self, sample): + def forward_model(self, sample: dict[str, torch.Tensor]): raise NotImplementedError - def infer_once(self, param): + def run_inference(self, params, **kwargs): raise NotImplementedError() diff --git a/configs/variance.yaml b/configs/variance.yaml index 3c2e68399..9b8efeed1 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -40,6 +40,7 @@ predict_energy: false predict_breathiness: false dur_prediction_args: + arch: fs2 hidden_size: 512 dropout: 0.1 num_layers: 5 diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index c39e0e20f..22ad76415 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -1,3 +1,6 @@ +from collections import OrderedDict + +import tqdm import json import pathlib @@ -5,13 +8,13 @@ import torch from basics.base_svs_infer import BaseSVSInfer -from modules.fastspeech.tts_modules import LengthRegulator from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST +from modules.fastspeech.tts_modules import LengthRegulator from modules.toplevel import DiffSingerAcoustic from modules.vocoders.registry import VOCODERS from utils import load_ckpt from utils.hparams import hparams -from utils.infer_utils import resample_align_curve +from utils.infer_utils import cross_fade, resample_align_curve, save_wav from utils.phoneme_utils import build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -23,16 +26,10 @@ def __init__(self, device=None, load_model=True, load_vocoder=True, ckpt_steps=N self.variance_checklist = [] self.variances_to_embed = set() - self.variances_to_predict = set() - if hparams.get('predict_energy', False): - self.variances_to_predict.add('energy') - elif hparams.get('use_energy_embed', False): + if hparams.get('use_energy_embed', False): self.variances_to_embed.add('energy') - - if hparams.get('predict_breathiness', False): - self.variances_to_predict.add('breathiness') - elif hparams.get('use_breathiness_embed', False): + if hparams.get('use_breathiness_embed', False): self.variances_to_embed.add('breathiness') self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) @@ -63,12 +60,14 @@ def build_vocoder(self): vocoder.to_device(self.device) return vocoder - def preprocess_input(self, param): + def preprocess_input(self, param, idx=0): """ :param param: one segment in the .ds file + :param idx: index of the segment :return: batch of the model inputs """ batch = {} + summary = OrderedDict() txt_tokens = torch.LongTensor([self.ph_encoder.encode(param['ph_seq'])]).to(self.device) # => [B, T_txt] batch['tokens'] = txt_tokens @@ -79,7 +78,9 @@ def preprocess_input(self, param): batch['mel2ph'] = mel2ph length = mel2ph.size(1) # => T - print(f'Length: {txt_tokens.size(1)} token(s), {length} frame(s), {length * self.timestep:.2f} second(s)') + summary['tokens'] = txt_tokens.size(1) + summary['frames'] = length + summary['seconds'] = '%.2f' % (length * self.timestep) if hparams['use_spk_id']: spk_mix_map = param.get('spk_mix') # { spk_name: value } or { spk_name: "value value value ..." } @@ -94,14 +95,14 @@ def preprocess_input(self, param): assert name in self.spk_map, f'Speaker \'{name}\' not found.' if len(spk_mix_map) == 1: - print(f'Using speaker \'{list(spk_mix_map.keys())[0]}\'') + summary['spk'] = list(spk_mix_map.keys())[0] elif any([isinstance(val, str) for val in spk_mix_map.values()]): print_mix = '|'.join(spk_mix_map.keys()) - print(f'Using dynamic speaker mix \'{print_mix}\'') + summary['spk_mix'] = f'dynamic({print_mix})' dynamic = True else: print_mix = '|'.join([f'{n}:{"%.3f" % spk_mix_map[n]}' for n in spk_mix_map]) - print(f'Using static speaker mix \'{print_mix}\'') + summary['spk_mix'] = f'static({print_mix})' spk_mix_id_list = [] spk_mix_value_list = [] @@ -159,26 +160,24 @@ def preprocess_input(self, param): )).to(self.device)[None] for v_name in VARIANCE_CHECKLIST: - if v_name in self.variances_to_embed or (v_name in self.variances_to_predict and param.get(v_name)): + if v_name in self.variances_to_embed: batch[v_name] = torch.from_numpy(resample_align_curve( np.array(param[v_name].split(), np.float32), original_timestep=float(param[f'{v_name}_timestep']), target_timestep=self.timestep, align_length=length )).to(self.device)[None] - print(f'Using manual {v_name} curve') - elif v_name in self.variances_to_predict: - print(f'Using predicted {v_name} curve') + summary[v_name] = 'input' if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] gender = param.get('gender', 0.) if isinstance(gender, (int, float, bool)): # static gender value - print(f'Using static gender value: {gender:.3f}') + summary['gender'] = f'static({gender:.3f})' key_shift_value = gender * shift_max if gender >= 0 else gender * abs(shift_min) batch['key_shift'] = torch.FloatTensor([key_shift_value]).to(self.device)[:, None] # => [B=1, T=1] else: - print('Using dynamic gender curve') + summary['gender'] = 'dynamic' gender_seq = resample_align_curve( np.array(gender.split(), np.float32), original_timestep=float(param['gender_timestep']), @@ -194,10 +193,10 @@ def preprocess_input(self, param): if hparams.get('use_speed_embed', False): if param.get('velocity') is None: - print('Using default velocity value') + summary['velocity'] = 'default' batch['speed'] = torch.FloatTensor([1.]).to(self.device)[:, None] # => [B=1, T=1] else: - print('Using manual velocity curve') + summary['velocity'] = 'manual' speed_min, speed_max = hparams['augmentation_args']['random_time_stretching']['range'] speed_seq = resample_align_curve( np.array(param['velocity'].split(), np.float32), @@ -210,14 +209,16 @@ def preprocess_input(self, param): min=speed_min, max=speed_max ) + print(f'[{idx}]\t' + ', '.join(f'{k}: {v}' for k, v in summary.items())) + return batch @torch.no_grad() - def run_model(self, sample, return_mel=False): + def forward_model(self, sample): txt_tokens = sample['tokens'] variances = { v_name: sample.get(v_name) - for v_name in self.variances_to_embed | self.variances_to_predict + for v_name in self.variances_to_embed } if hparams['use_spk_id']: spk_mix_id = sample['spk_mix_id'] @@ -241,11 +242,61 @@ def run_vocoder(self, spec, **kwargs): y = self.vocoder.spec2wav_torch(spec, **kwargs) return y[None] - def infer_once(self, param, return_mel=False): - batch = self.preprocess_input(param) - mel = self.run_model(batch, return_mel=True) - if return_mel: - return mel.cpu(), batch['f0'].cpu() - else: - waveform = self.run_vocoder(mel, f0=batch['f0']) - return waveform.view(-1).cpu().numpy() + def run_inference( + self, params, + out_dir: pathlib.Path = None, + title: str = None, + num_runs: int = 1, + spk_mix: dict[str, float] = None, + seed: int = -1, + save_mel: bool = False + ): + batches = [self.preprocess_input(param, idx=i) for i, param in enumerate(params)] + + out_dir.mkdir(parents=True, exist_ok=True) + suffix = '.wav' if not save_mel else '.mel.pt' + for i in range(num_runs): + if save_mel: + result = [] + else: + result = np.zeros(0) + current_length = 0 + + for param, batch in tqdm.tqdm( + zip(params, batches), desc='infer segments', total=len(params) + ): + if 'seed' in param: + torch.manual_seed(param["seed"] & 0xffff_ffff) + torch.cuda.manual_seed_all(param["seed"] & 0xffff_ffff) + elif seed >= 0: + torch.manual_seed(seed & 0xffff_ffff) + torch.cuda.manual_seed_all(seed & 0xffff_ffff) + + mel_pred = self.forward_model(batch) + if save_mel: + result.append({ + 'offset': param.get('offset', 0.), + 'mel': mel_pred.cpu(), + 'f0': batch['f0'].cpu() + }) + else: + waveform_pred = self.run_vocoder(mel_pred, f0=batch['f0'])[0].cpu().numpy() + silent_length = round(param.get('offset', 0) * hparams['audio_sample_rate']) - current_length + if silent_length >= 0: + result = np.append(result, np.zeros(silent_length)) + result = np.append(result, waveform_pred) + else: + result = cross_fade(result, waveform_pred, current_length + silent_length) + current_length = current_length + silent_length + waveform_pred.shape[0] + + if num_runs > 1: + filename = f'{title}-{str(i).zfill(3)}{suffix}' + else: + filename = title + suffix + save_path = out_dir / filename + if save_mel: + print(f'| save mel: {save_path}') + torch.save(result, save_path) + else: + print(f'| save audio: {save_path}') + save_wav(result, save_path, hparams['audio_sample_rate']) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index a52e1ce21..5867ac808 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -1,3 +1,6 @@ +import pathlib +from collections import OrderedDict + import librosa import numpy as np import torch @@ -50,12 +53,14 @@ def build_model(self, ckpt_steps=None): return model @torch.no_grad() - def preprocess_input(self, param): + def preprocess_input(self, param, idx=0): """ :param param: one segment in the .ds file + :param idx: index of the segment :return: batch of the model inputs """ batch = {} + summary = OrderedDict() txt_tokens = torch.LongTensor([self.ph_encoder.encode(param['ph_seq'].split())]).to(self.device) # [B=1, T_ph] T_ph = txt_tokens.shape[1] batch['tokens'] = txt_tokens @@ -73,8 +78,11 @@ def preprocess_input(self, param): mel2note = self.lr(note_dur) # [B=1, T_s] T_s = mel2note.shape[1] - print(f'Length: {T_w} word(s), {note_seq.shape[1]} note(s), {T_ph} token(s), ' - f'{T_s} frame(s), {T_s * self.timestep:.2f} second(s)') + summary['words'] = T_w + summary['notes'] = note_seq.shape[1] + summary['tokens'] = T_ph + summary['frames'] = T_s + summary['seconds'] = '%.2f' % (T_s * self.timestep) if param.get('ph_dur'): # Get mel2ph if ph_dur is given @@ -153,10 +161,12 @@ def preprocess_input(self, param): librosa.hz_to_midi(interp_f0(f0)[0]).astype(np.float32) ).to(self.device)[None] + print(f'[{idx}]\t' + ', '.join(f'{k}: {v}' for k, v in summary.items())) + return batch @torch.no_grad() - def run_model(self, sample): + def forward_model(self, sample): txt_tokens = sample['tokens'] midi = sample['midi'] ph2word = sample['ph2word'] @@ -179,7 +189,7 @@ def run_model(self, sample): def infer_once(self, param): batch = self.preprocess_input(param) - dur_pred, pitch_pred, variance_pred = self.run_model(batch) + dur_pred, pitch_pred, variance_pred = self.forward_model(batch) if dur_pred is not None: dur_pred = dur_pred[0].cpu().numpy() if pitch_pred is not None: @@ -192,3 +202,12 @@ def infer_once(self, param): for k, v in variance_pred.items() } return dur_pred, f0_pred, variance_pred + + def run_inference( + self, params, + out_dir: pathlib.Path = None, + title: str = None, + num_runs: int = 1, + seed: int = -1 + ): + pass diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 428ab4c3c..453fbf2e4 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -239,7 +239,7 @@ def wrapped(x, t, **kwargs): dpm_solver = DPM_Solver(model_fn, noise_schedule) steps = t // hparams["pndm_speedup"] - self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer']) + self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer'], leave=False) x = dpm_solver.sample( x, steps=steps, @@ -253,7 +253,7 @@ def wrapped(x, t, **kwargs): iteration_interval = hparams['pndm_speedup'] for i in tqdm( reversed(range(0, t, iteration_interval)), desc='sample time step', - total=t // iteration_interval, disable=not hparams['infer'] + total=t // iteration_interval, disable=not hparams['infer'], leave=False ): x = self.p_sample_plms( x, torch.full((b,), i, device=device, dtype=torch.long), @@ -262,7 +262,8 @@ def wrapped(x, t, **kwargs): else: raise NotImplementedError(algorithm) else: - for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, disable=not hparams['infer']): + for i in tqdm(reversed(range(0, t)), desc='sample time step', total=t, + disable=not hparams['infer'], leave=False): x = self.p_sample(x, torch.full((b,), i, device=device, dtype=torch.long), cond) x = x.transpose(2, 3).squeeze(1) # [B, F, M, T] => [B, T, M] or [B, F, T, M] return x diff --git a/scripts/infer.py b/scripts/infer.py index b3196ca9c..e681f4797 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -1,159 +1,161 @@ -# coding=utf8 -import argparse import json import os import pathlib import sys +from pathlib import Path -root_dir = pathlib.Path(__file__).parent.parent.resolve() +import click + +root_dir = Path(__file__).parent.parent.resolve() os.environ['PYTHONPATH'] = str(root_dir) sys.path.insert(0, str(root_dir)) -import numpy as np -import torch - -from inference.ds_acoustic import DiffSingerAcousticInfer -from utils.hparams import set_hparams, hparams -from utils.infer_utils import merge_slurs, cross_fade, trans_key, parse_commandline_spk_mix, save_wav - -parser = argparse.ArgumentParser(description='Run DiffSinger inference') -parser.add_argument('proj', type=str, help='Path to the input file') -parser.add_argument('--exp', type=str, required=True, help='Selection of model') -parser.add_argument('--ckpt', type=int, required=False, help='Selection of checkpoint training steps') -parser.add_argument('--spk', type=str, required=False, help='Speaker name or mix of speakers') -parser.add_argument('--out', type=str, required=False, help='Path of the output folder') -parser.add_argument('--title', type=str, required=False, help='Title of output file') -parser.add_argument('--num', type=int, required=False, default=1, help='Number of runs') -parser.add_argument('--key', type=int, required=False, default=0, help='Key transition of pitch') -parser.add_argument('--gender', type=float, required=False, default=0, help='Formant shifting (gender control)') -parser.add_argument('--seed', type=int, required=False, help='Random seed of the inference') -parser.add_argument('--speedup', type=int, required=False, default=0, help='PNDM speed-up ratio') -parser.add_argument('--mel', action='store_true', required=False, default=False, - help='Save intermediate mel format instead of waveform') -args = parser.parse_args() - -proj = pathlib.Path(args.proj) -name = proj.stem if not args.title else args.title -exp = args.exp -if not (root_dir / 'checkpoints' / exp).exists(): - for ckpt in (root_dir / 'checkpoints').iterdir(): - if not ckpt.is_dir(): - continue - if ckpt.name.startswith(exp): - print(f'| match ckpt by prefix: {ckpt.name}') - exp = ckpt.name - break - else: - raise FileNotFoundError('There are no matching exp in \'checkpoints\' folder. ' - 'Please specify \'--exp\' as the folder name or prefix.') -else: - print(f'| found ckpt by name: {exp}') - -if args.out: - out = pathlib.Path(args.out) -else: - out = proj.parent - -sys.argv = [ - sys.argv[0], - '--exp_name', - exp, - '--infer' -] - -with open(proj, 'r', encoding='utf-8') as f: - params = json.load(f) -if not isinstance(params, list): - params = [params] - -if args.key != 0: - params = trans_key(params, args.key) - key_suffix = '%+dkey' % args.key - if not args.title: - name += key_suffix - print(f'| key transition: {args.key:+d}') - -if args.gender is not None: - assert -1 <= args.gender <= 1, 'Gender must be in [-1, 1].' - -set_hparams(print_hparams=False) -if args.speedup > 0: - hparams['pndm_speedup'] = args.speedup - -sample_rate = hparams['audio_sample_rate'] - -# Check for vocoder path -assert (root_dir / hparams['vocoder_ckpt']).exists(), \ - f'Vocoder ckpt \'{hparams["vocoder_ckpt"]}\' not found. ' \ - f'Please put it to the checkpoints directory to run inference.' - -infer_ins = None -if len(params) > 0: - infer_ins = DiffSingerAcousticInfer(load_vocoder=not args.mel, ckpt_steps=args.ckpt) -spk_mix = parse_commandline_spk_mix(args.spk) if hparams['use_spk_id'] and args.spk is not None else None - -for param in params: - if args.gender is not None and hparams.get('use_key_shift_embed'): - param['gender'] = args.gender - - if spk_mix is not None: - param['spk_mix'] = spk_mix - - merge_slurs(param) - - -def infer_once(path: pathlib.Path, save_mel=False): - if save_mel: - result = [] - else: - result = np.zeros(0) - current_length = 0 - - for i, param in enumerate(params): - if 'seed' in param: - print(f'| set seed: {param["seed"] & 0xffff_ffff}') - torch.manual_seed(param["seed"] & 0xffff_ffff) - torch.cuda.manual_seed_all(param["seed"] & 0xffff_ffff) - elif args.seed: - print(f'| set seed: {args.seed & 0xffff_ffff}') - torch.manual_seed(args.seed & 0xffff_ffff) - torch.cuda.manual_seed_all(args.seed & 0xffff_ffff) - else: - torch.manual_seed(torch.seed() & 0xffff_ffff) - torch.cuda.manual_seed_all(torch.seed() & 0xffff_ffff) - - if save_mel: - mel, f0 = infer_ins.infer_once(param, return_mel=True) - result.append({ - 'offset': param.get('offset', 0.), - 'mel': mel, - 'f0': f0 - }) +def find_exp(exp): + if not (root_dir / 'checkpoints' / exp).exists(): + for subdir in (root_dir / 'checkpoints').iterdir(): + if not subdir.is_dir(): + continue + if subdir.name.startswith(exp): + print(f'| match ckpt by prefix: {subdir.name}') + exp = subdir.name + break else: - seg_audio = infer_ins.infer_once(param) - silent_length = round(param.get('offset', 0) * sample_rate) - current_length - if silent_length >= 0: - result = np.append(result, np.zeros(silent_length)) - result = np.append(result, seg_audio) - else: - result = cross_fade(result, seg_audio, current_length + silent_length) - current_length = current_length + silent_length + seg_audio.shape[0] - sys.stdout.flush() - print('| finish segment: %d/%d (%.2f%%)' % (i + 1, len(params), (i + 1) / len(params) * 100)) - - if save_mel: - print(f'| save mel: {path}') - torch.save(result, path) + assert False, \ + f'There are no matching exp starting with \'{exp}\' in \'checkpoints\' folder. ' \ + 'Please specify \'--exp\' as the folder name or prefix.' else: - print(f'| save audio: {path}') - save_wav(result, path, sample_rate) - - -os.makedirs(out, exist_ok=True) -suffix = '.wav' if not args.mel else '.mel.pt' -if args.num == 1: - infer_once(out / (name + suffix), save_mel=args.mel) -else: - for i in range(1, args.num + 1): - infer_once(out / f'{name}-{str(i).zfill(3)}{suffix}', save_mel=args.mel) + print(f'| found ckpt by name: {exp}') + return exp + + +@click.group() +def main(): + pass + + +@main.command(help='Run DiffSinger acoustic model inference') +@click.argument('proj', type=str, metavar='DS_FILE') +@click.option('--exp', type=str, required=True, metavar='EXP', help='Selection of model') +@click.option('--ckpt', type=int, required=False, metavar='STEPS', help='Selection of checkpoint training steps') +@click.option('--spk', type=str, required=False, help='Speaker name or mix of speakers') +@click.option('--out', type=str, required=False, metavar='DIR', help='Path of the output folder') +@click.option('--title', type=str, required=False, help='Title of output file') +@click.option('--num', type=int, required=False, default=1, help='Number of runs') +@click.option('--key', type=int, required=False, default=0, help='Key transition of pitch') +@click.option('--gender', type=float, required=False, default=0, help='Formant shifting (gender control)') +@click.option('--seed', type=int, required=False, default=-1, help='Random seed of the inference') +@click.option('--speedup', type=int, required=False, default=0, help='Diffusion acceleration ratio') +@click.option('--mel', is_flag=True, help='Save intermediate mel format instead of waveform') +def acoustic( + proj: str, + exp: str, + ckpt: int, + spk: str, + out: str, + title: str, + num: int, + key: int, + gender: float, + seed: int, + speedup: int, + mel: bool +): + proj = pathlib.Path(proj).resolve() + name = proj.stem if not title else title + exp = find_exp(exp) + if out: + out = pathlib.Path(out) + else: + out = proj.parent + + if gender is not None: + assert -1 <= gender <= 1, 'Gender must be in [-1, 1].' + + with open(proj, 'r', encoding='utf-8') as f: + params = json.load(f) + + if not isinstance(params, list): + params = [params] + + if len(params) == 0: + print('The input file is empty.') + exit() + + from utils.infer_utils import trans_key, parse_commandline_spk_mix, merge_slurs + + if key != 0: + params = trans_key(params, key) + key_suffix = '%+dkey' % key + if not title: + name += key_suffix + print(f'| key transition: {key:+d}') + + sys.argv = [ + sys.argv[0], + '--exp_name', + exp, + '--infer' + ] + from utils.hparams import set_hparams, hparams + set_hparams() + + # Check for vocoder path + assert mel or (root_dir / hparams['vocoder_ckpt']).exists(), \ + f'Vocoder ckpt \'{hparams["vocoder_ckpt"]}\' not found. ' \ + f'Please put it to the checkpoints directory to run inference.' + + if speedup > 0: + assert hparams['K_step'] % speedup == 0, f'Acceleration ratio must be factor of K_step {hparams["K_step"]}.' + + spk_mix = parse_commandline_spk_mix(spk) if hparams['use_spk_id'] and spk is not None else None + for param in params: + if gender is not None and hparams.get('use_key_shift_embed'): + param['gender'] = gender + + if spk_mix is not None: + param['spk_mix'] = spk_mix + + merge_slurs(param) + + from inference.ds_acoustic import DiffSingerAcousticInfer + infer_ins = DiffSingerAcousticInfer(load_model=not mel, ckpt_steps=ckpt) + print(f'| Model: {type(infer_ins.model)}') + + infer_ins.run_inference( + params, out_dir=out, title=name, num_runs=num, + spk_mix=spk_mix, seed=seed, save_mel=mel + ) + + +@main.command(help='Run DiffSinger acoustic model inference') +@click.argument('proj', type=str, metavar='DS_FILE') +@click.option('--exp', type=str, required=True, metavar='EXP', help='Selection of model') +@click.option('--ckpt', type=int, required=False, metavar='STEPS', help='Selection of checkpoint training steps') +@click.option('--predict', type=str, multiple=True, metavar='TAGS', help='Parameters to predict') +@click.option('--spk', type=str, required=False, help='Speaker name or mix of speakers') +@click.option('--out', type=str, required=False, metavar='DIR', help='Path of the output folder') +@click.option('--title', type=str, required=False, help='Title of output file') +@click.option('--overwrite', is_flag=True, help='Overwrite the input file') +@click.option('--num', type=int, required=False, default=1, help='Number of runs') +@click.option('--seed', type=int, required=False, help='Random seed of the inference') +@click.option('--speedup', type=int, required=False, default=0, help='Diffusion acceleration ratio') +def variance( + proj: str, + exp: str, + ckpt: int, + spk: str, + predict: tuple[str], + out: str, + title: str, + overwrite: bool, + num: int, + seed: int, + speedup: int +): + print(predict) + pass + + +if __name__ == '__main__': + main() diff --git a/utils/infer_utils.py b/utils/infer_utils.py index 0f029cb8f..90a2b90db 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -1,4 +1,5 @@ import re +import warnings import librosa import numpy as np @@ -10,6 +11,12 @@ def merge_slurs(param): if not param.get('is_slur_seq'): return + warnings.warn( + 'You are running inference from a DS file in old format. Please re-export it in new format ' + 'or ask for a new version from the provider of this file.', + category=DeprecationWarning + ) + warnings.filterwarnings(action='default') ph_seq = param['ph_seq'].split() note_seq = param['note_seq'].split() note_dur_seq = param['note_dur_seq'].split() From 830ee8459eb51a92096108f75272bb3d05425ed1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 29 May 2023 22:58:45 +0800 Subject: [PATCH 408/475] Modification of energy and breathiness definitions (#91) * Change energy and breathiness definition to dB * Smoothen energy and breathiness * Smoothen energy and breathiness (acoustic model) * Update comments * Support different values of `dilation_cycle_length` * Add missing `torch.no_grad()` --- configs/acoustic.yaml | 2 + configs/variance.yaml | 15 ++++--- deployment/modules/toplevel.py | 19 +++++---- modules/diffusion/ddpm.py | 4 +- modules/diffusion/wavenet.py | 4 +- modules/fastspeech/param_adaptor.py | 21 +++++----- modules/toplevel.py | 18 ++++---- preprocessing/acoustic_binarizer.py | 29 +++++++++++-- preprocessing/variance_binarizer.py | 64 +++++++++++++++-------------- utils/binarizer_utils.py | 20 ++++++++- 10 files changed, 124 insertions(+), 72 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 43408906a..c674eb779 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -51,6 +51,8 @@ spec_max: [0] mel_vmin: -6. #-6. mel_vmax: 1.5 interp_uv: true +energy_smooth_width: 0.12 +breathiness_smooth_width: 0.12 use_spk_id: false f0_embed_type: continuous diff --git a/configs/variance.yaml b/configs/variance.yaml index 9b8efeed1..972a0cbae 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -56,25 +56,28 @@ pitch_prediction_args: pitd_norm_max: 8.0 pitd_clip_min: -12.0 pitd_clip_max: 12.0 - repeat_bins: 64 - residual_layers: 20 - residual_channels: 256 + repeat_bins: 24 + residual_layers: 10 + residual_channels: 192 + dilation_cycle_length: 4 # * -energy_db_min: -72.0 +energy_db_min: -96.0 energy_db_max: -12.0 -breathiness_db_min: -72.0 +energy_smooth_width: 0.12 +breathiness_db_min: -96.0 breathiness_db_max: -20.0 +breathiness_smooth_width: 0.12 variances_prediction_args: repeat_bins: 64 residual_layers: 20 residual_channels: 256 + dilation_cycle_length: 5 # * lambda_dur_loss: 1.0 lambda_pitch_loss: 1.0 lambda_var_loss: 1.0 -dilation_cycle_length: 5 # * schedule_type: 'linear' K_step: 1000 timesteps: 1000 diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index a6820da40..df8290e3a 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -24,13 +24,15 @@ def __init__(self, vocab_size, out_dims): ) self.diffusion = GaussianDiffusionONNX( out_dims=out_dims, + num_feats=1, timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - hparams['residual_layers'], - hparams['residual_channels'] - ), + denoiser_args={ + 'n_layers': hparams['residual_layers'], + 'n_chans': hparams['residual_channels'], + 'n_dilates': hparams['dilation_cycle_length'], + }, spec_min=hparams['spec_min'], spec_max=hparams['spec_max'] ) @@ -103,10 +105,11 @@ def __init__(self, vocab_size): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - pitch_hparams['residual_layers'], - pitch_hparams['residual_channels'] - ) + denoiser_args={ + 'n_layers': pitch_hparams['residual_layers'], + 'n_chans': pitch_hparams['residual_channels'], + 'n_dilates': pitch_hparams['dilation_cycle_length'], + }, ) if self.predict_variances: del self.variance_predictor diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 453fbf2e4..64f81760c 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -12,7 +12,7 @@ from utils.hparams import hparams DIFF_DENOISERS = { - 'wavenet': lambda args: WaveNet(*args), + 'wavenet': WaveNet } @@ -66,7 +66,7 @@ def __init__(self, out_dims, num_feats=1, timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None, spec_min=None, spec_max=None): super().__init__() - self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type]((out_dims, num_feats, *denoiser_args)) + self.denoise_fn: nn.Module = DIFF_DENOISERS[denoiser_type](out_dims, num_feats, **denoiser_args) self.out_dims = out_dims self.num_feats = num_feats diff --git a/modules/diffusion/wavenet.py b/modules/diffusion/wavenet.py index de9d09eaa..0a1400d30 100644 --- a/modules/diffusion/wavenet.py +++ b/modules/diffusion/wavenet.py @@ -63,7 +63,7 @@ def forward(self, x, conditioner, diffusion_step): class WaveNet(nn.Module): - def __init__(self, in_dims, n_feats, n_layers, n_chans): + def __init__(self, in_dims, n_feats, *, n_layers=20, n_chans=256, n_dilates=4): super().__init__() self.in_dims = in_dims self.n_feats = n_feats @@ -78,7 +78,7 @@ def __init__(self, in_dims, n_feats, n_layers, n_chans): ResidualBlock( encoder_hidden=hparams['hidden_size'], residual_channels=n_chans, - dilation=2 ** (i % hparams['dilation_cycle_length']) + dilation=2 ** (i % n_dilates) ) for i in range(n_layers) ]) diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py index 9179ec691..cb87eb87a 100644 --- a/modules/fastspeech/param_adaptor.py +++ b/modules/fastspeech/param_adaptor.py @@ -24,17 +24,17 @@ def build_adaptor(self, cls=MultiVarianceDiffusion): if self.predict_energy: ranges.append(( - 10. ** (hparams['energy_db_min'] / 20.), - 10. ** (hparams['energy_db_max'] / 20.) + hparams['energy_db_min'], + hparams['energy_db_max'] )) - clamps.append((0., 1.)) + clamps.append((hparams['energy_db_min'], 0.)) if self.predict_breathiness: ranges.append(( - 10. ** (hparams['breathiness_db_min'] / 20.), - 10. ** (hparams['breathiness_db_max'] / 20.) + hparams['breathiness_db_min'], + hparams['breathiness_db_max'] )) - clamps.append((0., 1.)) + clamps.append((hparams['breathiness_db_min'], 0.)) variances_hparams = hparams['variances_prediction_args'] return cls( @@ -44,10 +44,11 @@ def build_adaptor(self, cls=MultiVarianceDiffusion): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - variances_hparams['residual_layers'], - variances_hparams['residual_channels'] - ) + denoiser_args={ + 'n_layers': variances_hparams['residual_layers'], + 'n_chans': variances_hparams['residual_channels'], + 'n_dilates': variances_hparams['dilation_cycle_length'], + } ) def collect_variance_inputs(self, **kwargs) -> list: diff --git a/modules/toplevel.py b/modules/toplevel.py index 9816fc776..2a518f554 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -34,10 +34,11 @@ def __init__(self, vocab_size, out_dims): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - hparams['residual_layers'], - hparams['residual_channels'] - ), + denoiser_args={ + 'n_layers': hparams['residual_layers'], + 'n_chans': hparams['residual_channels'], + 'n_dilates': hparams['dilation_cycle_length'], + }, spec_min=hparams['spec_min'], spec_max=hparams['spec_max'] ) @@ -96,10 +97,11 @@ def __init__(self, vocab_size): timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], - denoiser_args=( - pitch_hparams['residual_layers'], - pitch_hparams['residual_channels'] - ) + denoiser_args={ + 'n_layers': pitch_hparams['residual_layers'], + 'n_chans': pitch_hparams['residual_channels'], + 'n_dilates': pitch_hparams['dilation_cycle_length'], + } ) if self.predict_variances: diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index c5f9cc1d4..1f7536e69 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -20,6 +20,7 @@ from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS from utils.binarizer_utils import ( + SinusoidalSmoothingConv1d, get_mel2ph_torch, get_pitch_parselmouth, get_energy_librosa, @@ -41,6 +42,9 @@ 'speed' ] +energy_smooth: SinusoidalSmoothingConv1d = None +breathiness_smooth: SinusoidalSmoothingConv1d = None + class AcousticBinarizer(BaseBinarizer): def __init__(self): @@ -128,6 +132,7 @@ def check_coverage(self): f' (+) {sorted(unrecognizable_phones)}\n' f' (-) {sorted(missing_phones)}') + @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): if hparams['vocoder'] in VOCODERS: wav, mel = VOCODERS[hparams['vocoder']].wav2spec(meta_data['wav_fn']) @@ -162,13 +167,29 @@ def process_item(self, item_name, meta_data, binarization_args): if self.need_energy: # get ground truth energy - energy = get_energy_librosa(wav, length, hparams) - processed_input['energy'] = energy.astype(np.float32) + energy = get_energy_librosa(wav, length, hparams).astype(np.float32) + + global energy_smooth + if energy_smooth is None: + energy_smooth = SinusoidalSmoothingConv1d( + round(hparams['energy_smooth_width'] / self.timestep) + ).eval().to(self.device) + energy = energy_smooth(torch.from_numpy(energy).to(self.device)[None])[0] + + processed_input['energy'] = energy.cpu().numpy() if self.need_breathiness: # get ground truth energy - breathiness = get_breathiness_pyworld(wav, gt_f0 * ~uv, length, hparams) - processed_input['breathiness'] = breathiness.astype(np.float32) + breathiness = get_breathiness_pyworld(wav, gt_f0 * ~uv, length, hparams).astype(np.float32) + + global breathiness_smooth + if breathiness_smooth is None: + breathiness_smooth = SinusoidalSmoothingConv1d( + round(hparams['breathiness_smooth_width'] / self.timestep) + ).eval().to(self.device) + breathiness = breathiness_smooth(torch.from_numpy(breathiness).to(self.device)[None])[0] + + processed_input['breathiness'] = breathiness.cpu().numpy() if hparams.get('use_key_shift_embed', False): processed_input['key_shift'] = 0. diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index f34e84372..431025900 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -5,13 +5,13 @@ import librosa import numpy as np import torch -import torch.nn as nn import torch.nn.functional as F from scipy import interpolate from basics.base_binarizer import BaseBinarizer from modules.fastspeech.tts_modules import LengthRegulator from utils.binarizer_utils import ( + SinusoidalSmoothingConv1d, get_mel2ph_torch, get_pitch_parselmouth, get_energy_librosa, @@ -29,31 +29,15 @@ 'mel2ph', # mel2ph format representing number of frames within each phone, int64[T_s,] 'base_pitch', # interpolated and smoothed frame-level MIDI pitch, float32[T_s,] 'pitch', # actual pitch in semitones, float32[T_s,] - 'energy', # frame-level RMS, float32[T_s,] - 'breathiness', # frame-level RMS of aperiodic parts, float32[T_s,] + 'energy', # frame-level RMS (dB), float32[T_s,] + 'breathiness', # frame-level RMS of aperiodic parts (dB), float32[T_s,] ] - -# This operator is used as global variable due to a PyTorch shared memory bug on Windows. +# These operators are used as global variable due to a PyTorch shared memory bug on Windows. # See https://github.com/pytorch/pytorch/issues/100358 -smooth: nn.Conv1d = None - - -def build_smooth_op(kernel_size, device): - global smooth - smooth = nn.Conv1d( - in_channels=1, - out_channels=1, - kernel_size=kernel_size, - bias=False, - padding='same', - padding_mode='replicate' - ).eval().to(device) - smooth_kernel = torch.sin(torch.from_numpy( - np.linspace(0, 1, kernel_size).astype(np.float32) * np.pi - ).to(device)) - smooth_kernel /= smooth_kernel.sum() - smooth.weight.data = smooth_kernel[None, None] +midi_smooth: SinusoidalSmoothingConv1d = None +energy_smooth: SinusoidalSmoothingConv1d = None +breathiness_smooth: SinusoidalSmoothingConv1d = None class VarianceBinarizer(BaseBinarizer): @@ -105,9 +89,6 @@ def check_coverage(self): @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): - if smooth is None: - build_smooth_op(round(hparams['midi_smooth_width'] / self.timestep), self.device) - seconds = sum(meta_data['ph_dur']) length = round(seconds / self.timestep) T_ph = len(meta_data['ph_seq']) @@ -170,7 +151,12 @@ def process_item(self, item_name, meta_data, binarization_args): frame_midi_pitch = torch.from_numpy(frame_midi_pitch).to(self.device) # Below: smoothen the pitch step curve as the base pitch curve - smoothed_midi_pitch = smooth(frame_midi_pitch[None])[0] + global midi_smooth + if midi_smooth is None: + midi_smooth = SinusoidalSmoothingConv1d( + round(hparams['midi_smooth_width'] / self.timestep) + ).eval().to(self.device) + smoothed_midi_pitch = midi_smooth(frame_midi_pitch[None])[0] processed_input['base_pitch'] = smoothed_midi_pitch.cpu().numpy() if hparams['predict_pitch'] or self.predict_variances: @@ -178,13 +164,29 @@ def process_item(self, item_name, meta_data, binarization_args): # Below: extract energy if hparams['predict_energy']: - energy = get_energy_librosa(waveform, length, hparams) - processed_input['energy'] = energy.astype(np.float32) + energy = get_energy_librosa(waveform, length, hparams).astype(np.float32) + + global energy_smooth + if energy_smooth is None: + energy_smooth = SinusoidalSmoothingConv1d( + round(hparams['energy_smooth_width'] / self.timestep) + ).eval().to(self.device) + energy = energy_smooth(torch.from_numpy(energy).to(self.device)[None])[0] + + processed_input['energy'] = energy.cpu().numpy() # Below: extract breathiness if hparams['predict_breathiness']: - breathiness = get_breathiness_pyworld(waveform, f0 * ~uv, length, hparams) - processed_input['breathiness'] = breathiness.astype(np.float32) + breathiness = get_breathiness_pyworld(waveform, f0 * ~uv, length, hparams).astype(np.float32) + + global breathiness_smooth + if breathiness_smooth is None: + breathiness_smooth = SinusoidalSmoothingConv1d( + round(hparams['breathiness_smooth_width'] / self.timestep) + ).eval().to(self.device) + breathiness = breathiness_smooth(torch.from_numpy(breathiness).to(self.device)[None])[0] + + processed_input['breathiness'] = breathiness.cpu().numpy() return processed_input diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index 61fdf53be..84187d076 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -77,7 +77,8 @@ def get_energy_librosa(wav_data, length, hparams): energy = librosa.feature.rms(y=wav_data, frame_length=win_size, hop_length=hop_size)[0] energy = pad_frames(energy, hop_size, wav_data.shape[0], length) - return energy + energy_db = librosa.amplitude_to_db(energy) + return energy_db def get_breathiness_pyworld(wav_data, f0, length, hparams): @@ -111,3 +112,20 @@ def get_breathiness_pyworld(wav_data, f0, length, hparams): ) # synthesize the aperiodic part using the parameters breathiness = get_energy_librosa(y, length, hparams) return breathiness + + +class SinusoidalSmoothingConv1d(torch.nn.Conv1d): + def __init__(self, kernel_size): + super().__init__( + in_channels=1, + out_channels=1, + kernel_size=kernel_size, + bias=False, + padding='same', + padding_mode='replicate' + ) + smooth_kernel = torch.sin(torch.from_numpy( + np.linspace(0, 1, kernel_size).astype(np.float32) * np.pi + )) + smooth_kernel /= smooth_kernel.sum() + self.weight.data = smooth_kernel[None, None] From 5d0c348e629b2012379054b407d855bb959fd905 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 29 May 2023 23:02:38 +0800 Subject: [PATCH 409/475] Fix --speedup not working --- scripts/infer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/infer.py b/scripts/infer.py index e681f4797..7e8429b3f 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -107,6 +107,7 @@ def acoustic( if speedup > 0: assert hparams['K_step'] % speedup == 0, f'Acceleration ratio must be factor of K_step {hparams["K_step"]}.' + hparams['pndm_speedup'] = speedup spk_mix = parse_commandline_spk_mix(spk) if hparams['use_spk_id'] and spk is not None else None for param in params: From 30e26d4e7138c55cde16e1d86d12dd7dc5dcf4af Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 29 May 2023 23:14:58 +0800 Subject: [PATCH 410/475] Fix wrong config --- configs/variance.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 972a0cbae..5524e69a3 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -56,10 +56,10 @@ pitch_prediction_args: pitd_norm_max: 8.0 pitd_clip_min: -12.0 pitd_clip_max: 12.0 - repeat_bins: 24 - residual_layers: 10 - residual_channels: 192 - dilation_cycle_length: 4 # * + repeat_bins: 64 + residual_layers: 20 + residual_channels: 256 + dilation_cycle_length: 5 # * energy_db_min: -96.0 energy_db_max: -12.0 @@ -69,10 +69,10 @@ breathiness_db_max: -20.0 breathiness_smooth_width: 0.12 variances_prediction_args: - repeat_bins: 64 - residual_layers: 20 - residual_channels: 256 - dilation_cycle_length: 5 # * + repeat_bins: 48 + residual_layers: 10 + residual_channels: 192 + dilation_cycle_length: 4 # * lambda_dur_loss: 1.0 lambda_pitch_loss: 1.0 From 43e2c406455694aa41e8fbe729fe9a788ee0ce36 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 31 May 2023 12:07:06 +0800 Subject: [PATCH 411/475] Fix TypeError: string indices must be integers --- basics/base_binarizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index f7235b47a..212647eb2 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -151,12 +151,12 @@ def process(self): shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') self.check_coverage() - # Process train set and valid set + # Process valid set and train set self.process_dataset('valid') self.process_dataset( 'train', num_workers=int(self.binarization_args['num_workers']), - apply_augmentation=any(args['enabled'] for args in self.augmentation_args) + apply_augmentation=any(args['enabled'] for args in self.augmentation_args.values()) ) def check_coverage(self): From 708ec58966b3ae59eedd6921acc878b8c763eec9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 2 Jun 2023 00:15:26 +0800 Subject: [PATCH 412/475] Support variance model inference from CLI --- inference/ds_acoustic.py | 2 +- inference/ds_variance.py | 144 +++++++++++++++++++++++-- modules/fastspeech/variance_encoder.py | 10 +- modules/toplevel.py | 2 +- scripts/infer.py | 65 ++++++++++- 5 files changed, 205 insertions(+), 18 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 22ad76415..60eceaab2 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -167,7 +167,7 @@ def preprocess_input(self, param, idx=0): target_timestep=self.timestep, align_length=length )).to(self.device)[None] - summary[v_name] = 'input' + summary[v_name] = 'manual' if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 5867ac808..4f77a2b58 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -1,3 +1,7 @@ +import copy +import json + +import tqdm import pathlib from collections import OrderedDict @@ -13,6 +17,7 @@ LengthRegulator, RhythmRegulator, mel2ph_to_dur ) +from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST from modules.toplevel import DiffSingerVariance from utils import load_ckpt from utils.hparams import hparams @@ -23,10 +28,13 @@ class DiffSingerVarianceInfer(BaseSVSInfer): - def __init__(self, device=None, ckpt_steps=None): + def __init__( + self, device=None, ckpt_steps=None, + predictions: set = None + ): super().__init__(device=device) self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) - self.model = self.build_model(ckpt_steps=ckpt_steps) + self.model: DiffSingerVariance = self.build_model(ckpt_steps=ckpt_steps) self.lr = LengthRegulator() self.rr = RhythmRegulator() smooth_kernel_size = round(hparams['midi_smooth_width'] / self.timestep) @@ -44,6 +52,12 @@ def __init__(self, device=None, ckpt_steps=None): smooth_kernel /= smooth_kernel.sum() self.smooth.weight.data = smooth_kernel[None, None] + self.auto_completion_mode = len(predictions) == 0 + self.global_predict_dur = 'dur' in predictions and hparams['predict_dur'] + self.global_predict_pitch = 'pitch' in predictions and hparams['predict_pitch'] + self.variance_prediction_set = predictions.intersection(VARIANCE_CHECKLIST) + self.global_predict_variances = len(self.variance_prediction_set) > 0 + def build_model(self, ckpt_steps=None): model = DiffSingerVariance( vocab_size=len(self.ph_encoder) @@ -53,10 +67,16 @@ def build_model(self, ckpt_steps=None): return model @torch.no_grad() - def preprocess_input(self, param, idx=0): + def preprocess_input( + self, param, idx=0, + load_dur: bool = False, + load_pitch: bool = False + ): """ :param param: one segment in the .ds file :param idx: index of the segment + :param load_dur: whether ph_dur is loaded + :param load_pitch: whether pitch is loaded :return: batch of the model inputs """ batch = {} @@ -84,8 +104,8 @@ def preprocess_input(self, param, idx=0): summary['frames'] = T_s summary['seconds'] = '%.2f' % (T_s * self.timestep) - if param.get('ph_dur'): - # Get mel2ph if ph_dur is given + if load_dur: + # Get mel2ph if ph_dur is needed ph_dur_sec = torch.from_numpy( np.array([param['ph_dur'].split()], np.float32) ).to(self.device) # [B=1, T_ph] @@ -150,7 +170,7 @@ def preprocess_input(self, param, idx=0): ph_midi = ph_midi.round().long() batch['midi'] = ph_midi - if param.get('f0_seq'): + if load_pitch: f0 = resample_align_curve( np.array(param['f0_seq'].split(), np.float32), original_timestep=float(param['f0_timestep']), @@ -161,6 +181,29 @@ def preprocess_input(self, param, idx=0): librosa.hz_to_midi(interp_f0(f0)[0]).astype(np.float32) ).to(self.device)[None] + if self.model.predict_dur: + if load_dur: + summary['ph_dur'] = 'manual' + elif self.auto_completion_mode or self.global_predict_dur: + summary['ph_dur'] = 'auto' + else: + summary['ph_dur'] = 'ignored' + + if self.model.predict_pitch: + if load_pitch: + summary['pitch'] = 'manual' + elif self.auto_completion_mode or self.global_predict_pitch: + summary['pitch'] = 'auto' + else: + summary['pitch'] = 'ignored' + + if self.model.predict_variances: + for v_name in self.model.variance_prediction_list: + if self.auto_completion_mode and param.get(v_name) is None or v_name in self.variance_prediction_set: + summary[v_name] = 'auto' + else: + summary[v_name] = 'ignored' + print(f'[{idx}]\t' + ', '.join(f'{k}: {v}' for k, v in summary.items())) return batch @@ -210,4 +253,91 @@ def run_inference( num_runs: int = 1, seed: int = -1 ): - pass + batches = [] + predictor_flags: list[tuple[bool, bool, bool]] = [] + + for i, param in enumerate(params): + param: dict + if self.auto_completion_mode: + flag = ( + self.model.fs2.predict_dur and param.get('ph_dur') is None, + self.model.predict_pitch and param.get('f0_seq') is None, + self.model.predict_variances and any( + param.get(v_name) is None for v_name in self.model.variance_prediction_list + ) + ) + else: + predict_variances = self.model.predict_variances and self.global_predict_variances + predict_pitch = self.model.predict_pitch and ( + self.global_predict_pitch or (param.get('f0_seq') is None and predict_variances) + ) + predict_dur = self.model.predict_dur and ( + self.global_predict_dur or (param.get('ph_dur') is None and (predict_pitch or predict_variances)) + ) + flag = (predict_dur, predict_pitch, predict_variances) + predictor_flags.append(flag) + batches.append(self.preprocess_input( + param, idx=i, + load_dur=not flag[0] and (flag[1] or flag[2]), + load_pitch=not flag[1] and flag[2] + )) + + out_dir.mkdir(parents=True, exist_ok=True) + for i in range(num_runs): + + results = [] + for param, flag, batch in tqdm.tqdm( + zip(params, predictor_flags, batches), desc='infer segments', total=len(params) + ): + if 'seed' in param: + torch.manual_seed(param["seed"] & 0xffff_ffff) + torch.cuda.manual_seed_all(param["seed"] & 0xffff_ffff) + elif seed >= 0: + torch.manual_seed(seed & 0xffff_ffff) + torch.cuda.manual_seed_all(seed & 0xffff_ffff) + param_copy = copy.deepcopy(param) + + flag_saved = ( + self.model.fs2.predict_dur, + self.model.predict_pitch, + self.model.predict_variances + ) + ( + self.model.fs2.predict_dur, + self.model.predict_pitch, + self.model.predict_variances + ) = flag + dur_pred, pitch_pred, variance_pred = self.forward_model(batch) + ( + self.model.fs2.predict_dur, + self.model.predict_pitch, + self.model.predict_variances + ) = flag_saved + + if dur_pred is not None and (self.auto_completion_mode or self.global_predict_dur): + dur_pred = dur_pred[0].cpu().numpy() + param_copy['ph_dur'] = ' '.join(str(round(dur, 6)) for dur in (dur_pred * self.timestep).tolist()) + if pitch_pred is not None and (self.auto_completion_mode or self.global_predict_pitch): + pitch_pred = pitch_pred[0].cpu().numpy() + f0_pred = librosa.midi_to_hz(pitch_pred) + param_copy['f0_seq'] = ' '.join([str(round(freq, 1)) for freq in f0_pred.tolist()]) + param_copy['f0_timestep'] = str(self.timestep) + variance_pred = { + k: v[0].cpu().numpy() + for k, v in variance_pred.items() + if (self.auto_completion_mode and param.get(k) is None) or k in self.variance_prediction_set + } + for v_name, v_pred in variance_pred.items(): + param_copy[v_name] = ' '.join([str(round(v, 4)) for v in v_pred.tolist()]) + param_copy[f'{v_name}_timestep'] = str(self.timestep) + + results.append(param_copy) + + if num_runs > 1: + filename = f'{title}-{str(i).zfill(3)}.ds' + else: + filename = f'{title}.ds' + save_path = out_dir / filename + with open(save_path, 'w', encoding='utf8') as f: + print(f'| save params: {save_path}') + json.dump(results, f, ensure_ascii=False, indent=2) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 699a399fc..80b967b8a 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -15,6 +15,7 @@ class FastSpeech2Variance(nn.Module): def __init__(self, vocab_size): super().__init__() self.predict_dur = hparams['predict_dur'] + self.linguistic_mode = 'word' if hparams['predict_dur'] else 'phoneme' self.txt_embed = Embedding(vocab_size, hparams['hidden_size'], PAD_INDEX) @@ -57,7 +58,7 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, spk_emb :param infer: whether inference :return: encoder_out, ph_dur_pred """ - if self.predict_dur: + if self.linguistic_mode == 'word': b = txt_tokens.shape[0] onset = torch.diff(ph2word, dim=1, prepend=ph2word.new_zeros(b, 1)) > 0 onset_embed = self.onset_embed(onset.long()) # [B, T_ph, H] @@ -70,7 +71,11 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, spk_emb word_dur_embed = self.word_dur_embed(word_dur.float()[:, :, None]) encoder_out = self.encoder(txt_tokens, onset_embed + word_dur_embed) + else: + ph_dur_embed = self.ph_dur_embed(ph_dur.float()[:, :, None]) + encoder_out = self.encoder(txt_tokens, ph_dur_embed) + if self.predict_dur: midi_embed = self.midi_embed(midi) # => [B, T_ph, H] dur_cond = encoder_out + midi_embed if spk_embed is not None: @@ -79,7 +84,4 @@ def forward(self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, spk_emb return encoder_out, ph_dur_pred else: - ph_dur_embed = self.ph_dur_embed(ph_dur.float()[:, :, None]) - encoder_out = self.encoder(txt_tokens, ph_dur_embed) - return encoder_out, None diff --git a/modules/toplevel.py b/modules/toplevel.py index 2a518f554..f83483f79 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -128,7 +128,7 @@ def forward( ) if not self.predict_pitch and not self.predict_variances: - return dur_pred_out, None, None, ({} if infer else None) + return dur_pred_out, None, ({} if infer else None) if mel2ph is None and word_dur is not None: # inference from file dur_pred_align = self.rr(dur_pred_out, ph2word, word_dur) diff --git a/scripts/infer.py b/scripts/infer.py index 7e8429b3f..d62f26244 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -137,9 +137,9 @@ def acoustic( @click.option('--spk', type=str, required=False, help='Speaker name or mix of speakers') @click.option('--out', type=str, required=False, metavar='DIR', help='Path of the output folder') @click.option('--title', type=str, required=False, help='Title of output file') -@click.option('--overwrite', is_flag=True, help='Overwrite the input file') @click.option('--num', type=int, required=False, default=1, help='Number of runs') -@click.option('--seed', type=int, required=False, help='Random seed of the inference') +@click.option('--key', type=int, required=False, default=0, help='Key transition of pitch') +@click.option('--seed', type=int, required=False, default=-1, help='Random seed of the inference') @click.option('--speedup', type=int, required=False, default=0, help='Diffusion acceleration ratio') def variance( proj: str, @@ -149,13 +149,68 @@ def variance( predict: tuple[str], out: str, title: str, - overwrite: bool, num: int, + key: int, seed: int, speedup: int ): - print(predict) - pass + proj = pathlib.Path(proj).resolve() + name = proj.stem if not title else title + exp = find_exp(exp) + if out: + out = pathlib.Path(out) + else: + out = proj.parent + if (not out or out.resolve() == proj.parent.resolve()) and not title: + name += '_variance' + + with open(proj, 'r', encoding='utf-8') as f: + params = json.load(f) + + if not isinstance(params, list): + params = [params] + + if len(params) == 0: + print('The input file is empty.') + exit() + + from utils.infer_utils import trans_key, parse_commandline_spk_mix, merge_slurs + + if key != 0: + params = trans_key(params, key) + key_suffix = '%+dkey' % key + if not title: + name += key_suffix + print(f'| key transition: {key:+d}') + + sys.argv = [ + sys.argv[0], + '--exp_name', + exp, + '--infer' + ] + from utils.hparams import set_hparams, hparams + set_hparams() + + if speedup > 0: + assert hparams['K_step'] % speedup == 0, f'Acceleration ratio must be factor of K_step {hparams["K_step"]}.' + hparams['pndm_speedup'] = speedup + + # spk_mix = parse_commandline_spk_mix(spk) if hparams['use_spk_id'] and spk is not None else None + for param in params: + # if spk_mix is not None: + # param['spk_mix'] = spk_mix + + merge_slurs(param) + + from inference.ds_variance import DiffSingerVarianceInfer + infer_ins = DiffSingerVarianceInfer(ckpt_steps=ckpt, predictions=set(predict)) + print(f'| Model: {type(infer_ins.model)}') + + infer_ins.run_inference( + params, out_dir=out, title=name, + num_runs=num, seed=seed + ) if __name__ == '__main__': From 76f1379450034fac4bc6163fe58521712ae6ec65 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 2 Jun 2023 00:33:50 +0800 Subject: [PATCH 413/475] Update sample --- .../\344\270\215\350\260\223\344\276\240.ds" | 104 +++++++++--------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git "a/samples/\344\270\215\350\260\223\344\276\240.ds" "b/samples/\344\270\215\350\260\223\344\276\240.ds" index f8f4290c8..13acd18ea 100644 --- "a/samples/\344\270\215\350\260\223\344\276\240.ds" +++ "b/samples/\344\270\215\350\260\223\344\276\240.ds" @@ -10,9 +10,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "164.8 165.2 165.1 165.0 164.4 164.5 164.7 164.7 164.9 165.0 165.2 165.0 165.3 165.3 165.0 165.1 165.4 165.2 165.2 165.4 165.4 165.1 165.2 165.4 165.5 165.7 165.9 165.9 165.6 165.8 165.9 165.6 165.8 165.9 166.2 165.9 165.9 165.9 165.8 166.0 166.0 165.7 165.6 165.3 165.9 165.7 165.8 165.7 166.0 166.6 166.9 167.6 168.5 175.9 185.7 196.0 208.6 222.5 234.5 241.7 246.7 248.4 247.7 245.5 244.0 242.2 235.0 217.2 212.1 216.8 219.9 223.4 226.9 221.8 219.3 218.8 219.3 220.4 221.0 221.9 222.5 223.8 223.9 223.2 220.6 214.2 206.6 212.7 220.6 227.3 235.4 243.9 252.9 256.1 255.8 256.3 256.0 255.1 251.2 244.5 234.5 229.3 226.5 224.9 227.5 238.1 248.9 258.2 263.8 266.6 261.8 252.4 240.2 230.2 221.6 218.6 220.9 226.8 241.2 254.7 263.4 270.6 273.5 270.9 261.7 243.8 240.8 247.7 254.3 259.0 265.5 271.8 279.1 261.9 253.8 249.3 248.2 248.4 248.1 247.7 247.0 246.6 246.5 247.2 246.8 244.4 240.6 231.0 229.9 228.6 226.0 224.1 221.3 220.1 218.5 217.3 217.4 217.4 217.9 219.5 221.1 222.2 222.6 221.8 221.1 220.2 219.8 219.0 219.3 218.8 219.7 219.6 219.5 222.9 223.4 222.3 220.7 220.4 221.2 221.9 222.3 222.3 221.3 221.0 219.1 218.3 219.1 224.7 234.1 242.0 248.1 252.4 253.4 252.8 248.1 244.0 242.6 242.7 242.3 243.2 244.7 244.9 246.0 246.0 244.2 238.1 228.6 216.9 208.2 198.7 191.6 188.0 189.1 191.0 193.6 195.0 197.2 198.1 197.2 197.1 196.2 195.3 193.8 193.7 193.6 194.5 195.3 196.4 196.6 196.3 194.2 192.4 193.6 195.9 198.1 198.8 198.9 198.5 198.6 197.6 195.8 194.1 191.7 187.7 185.1 183.1 181.5 179.6 178.0 176.3 174.4 173.0 171.1 169.4 167.9 166.0 164.3 161.7 162.1 165.0 167.5 168.5 169.6 169.2 167.8 163.0 159.2 156.4 155.5 156.7 160.3 165.1 170.1 174.4 178.6 180.4 179.7 177.1 171.2 165.0 159.5 154.5 153.4 153.4 154.6 157.9 164.3 170.5 176.4 180.1 181.1 177.5 173.1 165.3 157.5 151.0 147.8 146.6 147.8 149.3 149.4 149.5 149.6 149.5 149.9 150.8 151.3 152.2 152.9 153.6 154.4 155.4 156.0 157.1 157.6 158.0 158.4 158.8 158.7 158.5 158.5 158.2 158.6 158.8 158.7 158.4 158.5 158.8 158.8 158.7 159.2 158.7 159.4", "f0_timestep": "0.011609977324263039", - "energy": "0.0002 0.0 0.0001 0.0 0.0002 0.0001 0.0003 0.0004 0.0001 0.0 0.0004 0.0006 0.0004 0.0004 0.001 0.0004 0.0008 0.0006 0.0006 0.0006 0.001 0.0009 0.0013 0.0023 0.0014 0.0024 0.0028 0.0025 0.0031 0.0031 0.003 0.0026 0.003 0.0023 0.0025 0.0027 0.0021 0.0022 0.0021 0.0022 0.0017 0.0017 0.0014 0.0013 0.0011 0.0018 0.0018 0.0018 0.0026 0.0044 0.0244 0.0389 0.0547 0.067 0.0723 0.075 0.0728 0.071 0.0707 0.0697 0.0719 0.0749 0.0792 0.0814 0.0816 0.0755 0.0639 0.0503 0.0333 0.0193 0.0151 0.0259 0.0467 0.064 0.0744 0.0821 0.0828 0.0794 0.0824 0.0822 0.0825 0.0848 0.0824 0.0811 0.0743 0.063 0.0494 0.0336 0.0236 0.0215 0.0227 0.0222 0.0414 0.0625 0.0777 0.0921 0.098 0.0959 0.0914 0.0858 0.0785 0.071 0.0634 0.057 0.0525 0.0524 0.0539 0.0581 0.0621 0.0635 0.0664 0.067 0.0672 0.064 0.0582 0.0515 0.0432 0.0379 0.0353 0.036 0.0423 0.0504 0.0559 0.0532 0.0461 0.0345 0.0182 0.0102 0.0037 0.0012 0.0049 0.0102 0.0415 0.0589 0.0718 0.0832 0.0838 0.0842 0.0848 0.0825 0.085 0.086 0.0861 0.0879 0.0843 0.0776 0.0678 0.0506 0.0355 0.0197 0.0125 0.0127 0.0133 0.0151 0.0357 0.0538 0.0676 0.08 0.0834 0.0843 0.0864 0.0866 0.0866 0.0844 0.0815 0.0799 0.0783 0.0791 0.0783 0.0745 0.0721 0.0694 0.0672 0.0699 0.0767 0.082 0.0868 0.0888 0.0884 0.0875 0.0863 0.0838 0.0781 0.0723 0.0642 0.0585 0.0546 0.0528 0.0551 0.057 0.06 0.0626 0.0642 0.0645 0.063 0.0614 0.0588 0.0568 0.0574 0.0572 0.0574 0.058 0.059 0.0596 0.0606 0.0598 0.061 0.0606 0.06 0.0581 0.0574 0.0569 0.0567 0.0627 0.0685 0.0727 0.0792 0.0821 0.0823 0.0827 0.0797 0.0756 0.0732 0.0722 0.0731 0.0726 0.0737 0.0735 0.0718 0.0713 0.0684 0.0649 0.0634 0.0631 0.0639 0.0647 0.0651 0.0656 0.066 0.0658 0.0644 0.0585 0.0498 0.039 0.0276 0.0185 0.0156 0.0169 0.0165 0.016 0.0155 0.014 0.0133 0.0117 0.0089 0.0224 0.0382 0.051 0.0614 0.0678 0.0688 0.0703 0.0697 0.069 0.0672 0.0636 0.0602 0.0558 0.052 0.0494 0.0494 0.0474 0.048 0.0481 0.0466 0.0466 0.0451 0.0444 0.0425 0.0403 0.0371 0.0337 0.0312 0.0283 0.0261 0.0248 0.0231 0.0227 0.0215 0.0214 0.0208 0.0204 0.0195 0.0186 0.0183 0.0175 0.0154 0.0132 0.0104 0.0084 0.005 0.0027 0.0011 0.0008 0.0011 0.001 0.0006 0.0005 0.0008 0.0011 0.0014 0.0009 0.0009 0.0004 0.0 0.0005 0.0003 0.0004 0.0004 0.0004 0.0001 0.0004 0.0002 0.0 0.0 0.0001 0.0001 0.0003 0.0002 0.0 0.0 0.0003 0.0", + "energy": "-79.6824 -75.5457 -73.7248 -72.1148 -71.9961 -72.8376 -74.7103 -75.9698 -76.7203 -76.4745 -75.6223 -74.2676 -72.1918 -69.884 -67.4335 -65.1903 -62.8453 -61.0503 -59.4356 -57.6432 -56.0874 -55.0188 -53.5811 -53.1038 -52.4225 -51.987 -51.9398 -51.7419 -51.7107 -51.9007 -51.9156 -51.8013 -52.0085 -52.25 -52.6968 -53.0934 -53.8388 -54.5906 -55.1659 -56.1946 -57.0114 -57.4786 -57.6957 -57.749 -57.7266 -56.0182 -53.6722 -49.4826 -44.9419 -40.0747 -34.8739 -30.3236 -27.1223 -25.4272 -24.3722 -23.8297 -23.5321 -23.5164 -23.3808 -23.5265 -23.2948 -23.1467 -23.1139 -23.137 -23.7721 -24.9402 -26.9339 -28.7846 -30.2604 -30.8519 -30.613 -29.2659 -27.3553 -25.1829 -23.1791 -21.9925 -21.3518 -21.4041 -21.2288 -21.3028 -21.4215 -21.7518 -22.0445 -22.8233 -23.9048 -25.5673 -27.565 -29.4093 -30.6029 -30.9044 -30.2212 -28.9487 -26.79 -24.4656 -22.5569 -21.4587 -20.9191 -20.9704 -21.4645 -22.3992 -23.3721 -24.3381 -25.1218 -25.8175 -26.1433 -26.308 -26.1604 -25.8319 -25.5952 -25.5701 -25.3677 -25.5527 -25.8105 -26.113 -26.6331 -26.8365 -27.4674 -27.5459 -27.6051 -27.4736 -27.4216 -27.2353 -27.3625 -28.7889 -31.2344 -35.157 -38.9815 -41.8418 -43.0922 -41.9621 -39.1062 -35.0453 -30.2094 -26.3503 -24.3371 -23.0809 -22.4358 -22.1148 -21.6846 -21.3192 -21.1727 -21.1673 -21.3058 -21.7713 -22.3558 -23.7801 -25.843 -28.6959 -31.7843 -34.4603 -36.0448 -36.0965 -34.56 -32.1565 -29.1585 -26.353 -24.3976 -22.7588 -22.2128 -21.6691 -21.6818 -21.7668 -21.7719 -21.6567 -21.744 -21.7398 -21.9423 -22.2149 -22.4737 -22.6968 -23.1086 -23.0061 -23.0995 -22.4648 -21.8437 -21.3584 -20.9571 -20.8997 -21.0521 -21.1932 -21.4662 -22.0496 -22.7553 -23.3845 -24.0592 -24.6477 -25.1572 -25.3289 -25.3306 -25.1148 -24.9358 -24.6971 -24.7572 -24.9267 -24.8475 -24.9611 -25.1813 -25.505 -25.6081 -25.6749 -25.4676 -25.1835 -25.1372 -24.9368 -25.0702 -25.1604 -25.4347 -26.0017 -26.3132 -26.4198 -26.4459 -25.8248 -25.0937 -24.4642 -23.6119 -23.0273 -22.3236 -22.0948 -21.9727 -22.0899 -21.9831 -22.1384 -22.2264 -22.2868 -22.0908 -22.1723 -22.3745 -22.3223 -22.5856 -22.6655 -23.034 -23.2401 -23.5962 -23.7777 -23.8667 -23.9294 -24.0995 -24.1918 -24.3505 -24.7286 -25.4822 -26.6169 -28.1982 -29.8836 -31.8413 -33.4153 -34.5885 -35.3296 -35.9949 -36.3722 -37.006 -37.4126 -37.1883 -36.15 -34.528 -32.358 -29.8275 -27.5703 -25.3584 -24.415 -23.9009 -23.7052 -23.9353 -24.342 -24.884 -25.4424 -25.9663 -26.3784 -26.8376 -26.7825 -26.752 -26.5744 -26.454 -26.1188 -26.414 -26.7328 -27.2216 -28.0398 -28.3471 -28.9833 -29.6437 -30.2257 -30.5487 -30.8972 -31.1438 -31.4722 -31.5148 -31.8873 -32.1716 -32.2745 -32.5526 -32.7089 -33.3149 -33.6512 -34.3917 -35.1516 -36.5003 -38.492 -41.1755 -44.6136 -48.6393 -52.6569 -56.2618 -59.695 -62.0333 -63.845 -64.9931 -65.5417 -65.7467 -65.9802 -66.2236 -66.1447 -66.5185 -66.6935 -66.8444 -67.0168 -67.15 -66.9787 -67.0617 -67.5821 -67.3931 -67.6991 -67.6392 -67.4776 -67.3161 -67.0905 -67.2987 -68.0815 -69.4036 -70.4832 -70.2787 -67.8975 -65.7169", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0 0.0001 0.0 0.0001 0.0001 0.0002 0.0001 0.0001 0.0001 0.0 0.0002 0.0002 0.0002 0.0002 0.0004 0.0004 0.0004 0.0008 0.001 0.0009 0.0011 0.0012 0.0016 0.0019 0.0021 0.0022 0.0027 0.0025 0.0027 0.0027 0.0025 0.0024 0.0024 0.0024 0.0022 0.0024 0.0022 0.0023 0.002 0.0015 0.0014 0.0013 0.0011 0.0012 0.001 0.0011 0.0011 0.0011 0.0019 0.003 0.0028 0.003 0.0027 0.0026 0.0028 0.0032 0.0037 0.0037 0.0039 0.003 0.0025 0.0023 0.0022 0.0023 0.0026 0.0029 0.0034 0.0068 0.0101 0.0131 0.0146 0.0145 0.0124 0.0101 0.006 0.0027 0.0021 0.0015 0.0015 0.0012 0.0012 0.001 0.0007 0.001 0.0014 0.0031 0.0081 0.0152 0.0194 0.0223 0.0245 0.0223 0.0191 0.0149 0.0092 0.0046 0.0034 0.003 0.0032 0.003 0.0029 0.0027 0.0025 0.0027 0.0023 0.0022 0.002 0.002 0.0018 0.0014 0.0012 0.0007 0.0009 0.0007 0.0006 0.0007 0.0004 0.0002 0.0003 0.0003 0.0002 0.0002 0.0 0.0004 0.0003 0.0006 0.0018 0.0021 0.002 0.0016 0.0009 0.0013 0.002 0.0026 0.0026 0.0026 0.0023 0.0022 0.0021 0.0023 0.0019 0.0017 0.0015 0.0016 0.0017 0.0019 0.0023 0.0031 0.0043 0.0069 0.0109 0.0131 0.0142 0.0131 0.0107 0.0075 0.0038 0.0025 0.002 0.002 0.0019 0.0016 0.0014 0.0015 0.0012 0.0012 0.001 0.0011 0.001 0.001 0.0009 0.0009 0.0012 0.0013 0.0016 0.0018 0.0019 0.0019 0.0023 0.0017 0.0016 0.0018 0.0018 0.0018 0.0019 0.0017 0.002 0.0017 0.0016 0.0015 0.0016 0.0014 0.0013 0.0013 0.0011 0.0009 0.0008 0.0009 0.0005 0.0008 0.0007 0.0006 0.0005 0.0006 0.0005 0.0006 0.0003 0.0006 0.0004 0.0003 0.0002 0.0003 0.0005 0.0006 0.0006 0.0007 0.001 0.0009 0.0011 0.0013 0.0017 0.0015 0.0013 0.0014 0.0012 0.0012 0.0013 0.0015 0.0013 0.0013 0.001 0.0009 0.0008 0.0007 0.0007 0.0007 0.0008 0.0009 0.0007 0.0008 0.0009 0.0015 0.0038 0.0085 0.0124 0.0149 0.0171 0.0172 0.0174 0.0174 0.0165 0.0161 0.0144 0.0121 0.0099 0.007 0.0038 0.0028 0.002 0.0019 0.002 0.002 0.002 0.0021 0.0022 0.0023 0.0021 0.002 0.0021 0.002 0.0017 0.0019 0.0019 0.0019 0.0018 0.0017 0.0016 0.0013 0.0016 0.0014 0.0013 0.0011 0.0011 0.001 0.0008 0.0007 0.0004 0.0003 0.0004 0.0007 0.0006 0.0004 0.0006 0.0006 0.0005 0.0004 0.0005 0.0002 0.0007 0.0013 0.0015 0.0016 0.0015 0.001 0.0008 0.0006 0.0004 0.0002 0.0003 0.0002 0.0002 0.0004 0.0002 0.0001 0.0001 0.0002 0.0002 0.0001 0.0 0.0002 0.0 0.0 0.0 0.0 0.0001 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0", + "breathiness": "-79.8225 -77.0239 -74.8982 -73.6184 -73.4931 -74.5329 -76.178 -77.3928 -77.6 -77.1705 -75.8096 -73.7519 -71.4804 -68.8682 -66.6073 -64.5742 -62.6184 -60.66 -58.822 -57.2703 -55.6806 -54.469 -53.379 -52.6833 -52.0238 -51.712 -51.6303 -51.7031 -51.6427 -51.7344 -51.8449 -51.8935 -52.1289 -52.0842 -52.4013 -52.72 -53.3679 -54.2367 -55.4654 -56.5834 -57.7778 -58.9033 -59.7389 -60.6197 -61.031 -60.1364 -57.7882 -54.8002 -51.0498 -47.2266 -44.2166 -42.5347 -42.3753 -43.4972 -45.4627 -47.5085 -49.615 -51.372 -52.5215 -52.8023 -53.1442 -52.9834 -52.6248 -52.0025 -50.8504 -49.1215 -46.754 -44.368 -41.9003 -40.1537 -39.6153 -40.7384 -42.8076 -45.6104 -48.9994 -52.3106 -55.4426 -58.013 -60.0155 -60.9531 -61.2285 -60.1576 -57.5289 -53.7366 -49.076 -44.4191 -40.3786 -37.2415 -35.3146 -35.1985 -35.6972 -37.5299 -40.0733 -42.7105 -45.4913 -47.6906 -49.2032 -50.2332 -50.3573 -50.5106 -50.739 -51.2662 -52.0315 -53.0964 -54.1078 -55.2507 -56.8793 -58.2912 -60.2061 -61.7515 -63.2309 -64.2536 -64.7855 -65.4711 -65.9433 -66.8209 -67.6646 -68.8919 -70.1055 -70.9349 -71.4209 -70.9821 -69.9113 -67.7308 -65.1978 -62.642 -60.1161 -57.8375 -56.182 -55.434 -54.9375 -54.7186 -54.387 -54.0425 -53.7634 -53.8464 -53.9349 -54.0255 -53.9615 -54.0075 -54.0626 -53.8738 -53.6208 -53.2273 -52.3018 -51.2351 -49.547 -47.8129 -45.9974 -44.2559 -42.9005 -42.0767 -42.0207 -42.6923 -44.2588 -46.6687 -49.181 -51.8792 -54.209 -56.28 -57.5812 -58.7384 -59.7562 -60.447 -60.8257 -60.7305 -60.0139 -58.8071 -57.5416 -56.5233 -55.6978 -55.1032 -54.4553 -54.2654 -54.0382 -53.7404 -53.9048 -54.0099 -54.4902 -54.7458 -55.0172 -55.1928 -54.9656 -55.006 -55.0528 -54.8136 -55.2216 -55.5885 -56.0917 -56.3505 -56.874 -57.4413 -58.0119 -58.9587 -59.6404 -60.4609 -61.4228 -62.7474 -63.8936 -65.3129 -66.7378 -67.9442 -68.789 -69.5113 -69.7594 -69.869 -69.715 -69.5445 -69.5565 -69.657 -69.2966 -68.624 -67.413 -65.8647 -63.8559 -62.0367 -60.2987 -59.1615 -58.09 -57.3529 -56.8769 -56.5174 -56.3534 -56.3127 -56.2392 -56.2727 -56.3629 -56.555 -56.8134 -57.1371 -58.0181 -58.8765 -59.6071 -60.6598 -61.4813 -62.0869 -62.3714 -62.1978 -60.981 -58.8983 -55.7291 -51.9386 -47.834 -44.1512 -40.5876 -38.2639 -36.6005 -35.9241 -35.4518 -35.6435 -35.9436 -36.9561 -38.3601 -40.2116 -42.5374 -45.1142 -47.5312 -49.648 -51.5582 -52.7352 -53.5633 -53.6725 -53.7826 -53.7846 -53.7436 -53.7155 -53.8887 -53.9794 -53.8615 -53.8605 -53.55 -53.4594 -53.2543 -53.1741 -53.4898 -53.5675 -54.2602 -54.9578 -55.7753 -56.6609 -57.5747 -58.4485 -59.1999 -59.7585 -60.3152 -61.1392 -61.8969 -62.709 -63.5452 -64.1823 -64.6701 -64.8315 -64.8666 -65.3303 -65.5983 -65.7081 -65.2188 -64.4848 -63.579 -62.3408 -61.7194 -61.5083 -62.2873 -63.4983 -64.6929 -65.9384 -67.1388 -68.3029 -69.3473 -70.5802 -72.0663 -73.4742 -75.0543 -76.3213 -77.3109 -77.8574 -77.5778 -76.5398 -74.9435 -72.8673 -71.3131 -70.1741 -69.8511 -70.0775 -70.7908 -71.7058 -72.79 -74.3639 -75.2202 -76.4785 -76.6666 -76.5556 -76.0215", "breathiness_timestep": "0.011609977324263039" }, { @@ -26,9 +26,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0", "f0_seq": "130.1 130.2 130.1 129.9 130.0 129.9 130.0 129.9 130.0 129.8 129.8 129.9 129.8 130.1 130.1 130.1 130.2 130.2 130.0 129.8 129.9 129.9 130.0 129.9 130.0 129.8 129.8 129.5 129.5 129.0 128.8 127.8 126.2 125.3 129.0 133.5 138.9 144.8 152.1 158.8 163.4 166.0 166.7 165.9 163.2 160.7 157.4 158.9 160.6 162.5 163.8 165.8 166.6 167.7 166.0 165.6 165.5 165.0 164.9 165.3 165.0 164.8 166.1 164.9 162.7 159.6 158.1 163.0 167.3 171.8 176.1 179.9 185.3 190.2 189.4 186.4 185.4 185.3 185.1 185.5 184.8 184.3 185.1 186.2 186.7 186.9 185.6 183.8 181.8 180.8 183.8 188.3 194.5 198.1 199.9 200.5 199.2 198.6 197.3 196.9 195.9 195.7 195.3 195.3 195.4 194.4 189.5 183.6 180.1 176.8 173.1 169.4 165.6 163.7 160.2 157.5 157.3 159.1 159.7 163.7 171.6 179.3 183.7 186.3 187.4 186.6 183.3 179.9 177.7 239.1 312.9 370.0 370.0 370.0 366.6 309.2 189.3 184.6 180.0 174.6 170.5 168.1 165.7 164.7 163.9 164.2 164.7 165.9 165.9 165.0 163.2 160.3 158.2 155.4 152.5 149.4 146.2 143.2 142.2 142.3 143.2 144.7 146.8 148.7 148.8 148.4 147.8 147.6 147.7 147.3 147.0 147.0 146.6 146.6 146.1 145.5 145.1 144.3 144.8 145.7 146.3 146.2 146.2 146.5 146.5 145.8 145.0 144.3 144.3 144.6 144.9 145.4 145.7 146.9 150.0 156.2 162.4 165.1 167.5 169.0 169.2 168.3 166.4 162.6 157.8 154.9 152.3 152.0 153.6 157.8 162.7 166.5 170.1 170.5 170.2 168.7 166.8 164.3 161.9 160.2 159.4 159.9 160.8 162.8 165.1 166.5 166.6 167.7 168.0 167.9 165.6 163.1 161.0 160.0 159.8 159.5 160.4 161.9 164.5 168.0 169.9 170.5 170.8 170.2 168.6 165.2 161.6 160.0 158.4 157.3 157.2 159.6 163.3 167.8 171.4 173.9 174.8 174.6 173.1 170.8 165.7 160.7 158.4 157.3 155.9 155.3 157.0 159.8 163.1 166.6 169.8 172.5 172.5 172.3 169.4 165.3 161.7 159.1 157.8 156.0 155.2 157.0 160.6 163.5 167.3 171.1 173.7 173.9 172.5 169.6 164.6 157.4 152.2 150.6 150.9 151.9 155.1 159.9 164.6 168.3 171.6 174.4 174.5 172.5 167.2 160.1 152.7 148.3 147.5 148.9 151.3 155.1 160.7 167.6 171.7 174.5 176.5 174.3 170.8 166.7 157.9 158.7 157.1 157.2 156.8 157.3 156.4 155.8 156.1", "f0_timestep": "0.011609977324263039", - "energy": "0.001 0.0011 0.0014 0.0015 0.0014 0.0022 0.0025 0.0023 0.0029 0.0031 0.0031 0.0036 0.0042 0.004 0.0041 0.0041 0.0039 0.0038 0.0032 0.0028 0.003 0.0023 0.0025 0.0023 0.0021 0.002 0.0019 0.0024 0.0016 0.0016 0.0106 0.0255 0.0354 0.0459 0.055 0.0619 0.0651 0.0654 0.0638 0.0597 0.0584 0.0569 0.0557 0.0528 0.0484 0.0426 0.0332 0.025 0.0174 0.0125 0.0101 0.0083 0.0219 0.0389 0.0527 0.0651 0.0698 0.0722 0.072 0.0688 0.0688 0.0662 0.0637 0.0614 0.0524 0.0433 0.0324 0.0202 0.0157 0.0148 0.0153 0.0128 0.0274 0.0473 0.0585 0.0687 0.0716 0.0679 0.0663 0.0633 0.0616 0.0609 0.0605 0.0607 0.0632 0.0643 0.0644 0.0644 0.0627 0.0608 0.0602 0.0619 0.0642 0.0669 0.0707 0.0718 0.0712 0.0702 0.0688 0.0669 0.0677 0.0667 0.0675 0.0673 0.0644 0.0584 0.0487 0.0365 0.0234 0.0134 0.0135 0.0143 0.0139 0.0189 0.0411 0.0533 0.0602 0.0647 0.058 0.0508 0.0479 0.0466 0.0489 0.0521 0.0531 0.0514 0.0436 0.0345 0.0238 0.011 0.0047 0.0056 0.0108 0.0137 0.0181 0.0362 0.0478 0.0565 0.0635 0.0636 0.0627 0.0619 0.063 0.0626 0.0634 0.0656 0.0659 0.0679 0.0668 0.0645 0.0614 0.0601 0.0612 0.0645 0.0702 0.0719 0.0711 0.069 0.0629 0.0605 0.0614 0.0613 0.0646 0.0643 0.065 0.0641 0.064 0.065 0.0639 0.0637 0.0615 0.06 0.0616 0.0622 0.0621 0.0623 0.0612 0.0616 0.0609 0.0615 0.0611 0.0605 0.0598 0.0595 0.0577 0.0576 0.0578 0.0541 0.0522 0.0514 0.0516 0.0526 0.0554 0.0563 0.0575 0.0597 0.059 0.0588 0.0584 0.0594 0.0592 0.0601 0.058 0.0574 0.0562 0.0524 0.051 0.0464 0.0434 0.0418 0.0421 0.0422 0.0432 0.0435 0.0449 0.0446 0.0441 0.0433 0.0419 0.0414 0.0407 0.0411 0.0413 0.0411 0.0417 0.0414 0.0411 0.0419 0.0421 0.0429 0.0434 0.043 0.0416 0.041 0.0394 0.0383 0.0376 0.0374 0.0376 0.0375 0.0378 0.0383 0.0383 0.0389 0.0386 0.0378 0.0376 0.0374 0.0361 0.0351 0.0341 0.0334 0.0328 0.0332 0.0333 0.0331 0.0333 0.0339 0.0338 0.0354 0.0341 0.0335 0.0318 0.031 0.0305 0.029 0.0292 0.0285 0.0292 0.0298 0.0308 0.031 0.0312 0.0312 0.0309 0.0302 0.0303 0.0297 0.0284 0.0278 0.0268 0.0259 0.026 0.0258 0.0266 0.0268 0.0267 0.0268 0.0266 0.0262 0.0253 0.0242 0.0222 0.021 0.0205 0.0195 0.0191 0.0185 0.0175 0.0182 0.0189 0.0199 0.0213 0.0212 0.0203 0.0188 0.0169 0.0154 0.0136 0.0122 0.0116 0.0108 0.0109 0.011 0.0111 0.0119 0.0122 0.0128 0.0124 0.0115 0.0087 0.0061 0.0041 0.0027 0.0011 0.0014 0.0014 0.0012 0.0005", + "energy": "-64.1734 -65.216 -64.1812 -62.7385 -60.425 -58.0543 -55.7484 -53.9038 -52.4517 -51.3679 -50.205 -49.6155 -48.5423 -48.2435 -48.0869 -48.3242 -48.8713 -49.4441 -50.201 -50.8807 -51.1111 -51.4662 -51.847 -51.931 -52.7397 -52.4484 -51.1882 -48.4929 -44.6632 -39.9528 -35.1361 -30.6411 -27.5068 -25.7996 -24.8868 -24.5746 -24.3521 -24.2524 -24.486 -24.6544 -24.9749 -25.2448 -26.1826 -27.6831 -29.7284 -32.3235 -35.1133 -37.5805 -39.1566 -39.2314 -37.7942 -34.9947 -31.7005 -28.5013 -25.84 -23.9954 -22.9181 -22.5584 -22.4658 -22.5543 -22.5762 -23.1762 -23.5176 -24.4473 -26.1634 -28.0395 -29.9255 -31.5644 -32.5116 -32.5766 -31.5643 -29.9935 -28.337 -26.4802 -24.7031 -23.7022 -23.0784 -22.9064 -23.2498 -23.5788 -23.5758 -23.8228 -23.6211 -23.5057 -23.3128 -23.2973 -23.2766 -23.3805 -23.5182 -23.593 -23.5341 -23.5683 -23.3773 -23.0895 -22.9493 -22.8105 -22.6509 -22.5925 -22.5528 -22.6818 -22.7611 -22.9488 -23.2465 -24.0842 -25.1654 -26.7089 -28.8168 -31.1315 -32.9873 -33.9208 -34.5033 -33.5675 -31.9451 -29.8315 -27.8476 -26.2138 -25.0153 -24.5586 -24.5564 -24.8711 -25.0201 -25.3941 -25.6975 -25.8733 -26.6969 -28.5458 -31.313 -33.8575 -36.1464 -37.8542 -38.1904 -37.5404 -35.6418 -32.7792 -30.0581 -27.7214 -26.1275 -24.9954 -23.8815 -23.4514 -23.3125 -23.4745 -23.5844 -23.6014 -23.7385 -23.6898 -23.7514 -23.4111 -23.4145 -23.3708 -23.5265 -23.5587 -23.532 -23.4615 -23.4769 -23.407 -23.2876 -23.4792 -23.784 -23.7599 -23.6955 -23.8753 -23.8833 -23.9803 -23.8556 -23.89 -23.8448 -23.957 -23.9584 -24.0182 -23.9348 -23.9601 -23.981 -23.9139 -23.9639 -24.0258 -24.2091 -24.1872 -24.2777 -24.4066 -24.5618 -24.5783 -24.6104 -24.5129 -24.5863 -24.6319 -24.7447 -24.426 -24.4516 -24.5579 -24.4204 -24.3466 -24.1246 -24.032 -23.996 -23.5416 -23.6568 -23.5801 -23.5588 -23.4886 -23.4312 -23.5057 -23.5543 -23.6262 -23.8066 -23.9713 -24.213 -24.1506 -24.0657 -24.1678 -23.9444 -23.7988 -23.3642 -23.3054 -23.1845 -23.337 -23.6943 -24.0383 -24.1446 -24.5307 -24.4483 -24.5852 -24.7057 -24.6281 -24.4592 -24.4208 -24.1545 -23.8134 -23.9547 -23.8505 -23.7997 -24.0975 -23.9568 -24.3729 -24.4523 -24.8523 -24.9808 -24.8916 -24.742 -24.5414 -24.3747 -24.2083 -23.9847 -24.0414 -24.3656 -24.6565 -25.1327 -25.4731 -25.985 -26.1888 -26.3249 -26.0222 -25.8974 -25.4934 -24.9725 -24.5031 -24.2817 -24.1754 -24.2049 -24.264 -24.5342 -24.8986 -25.0055 -25.356 -25.6699 -25.9767 -26.0647 -26.2118 -26.2591 -25.9461 -25.6916 -25.6032 -25.8468 -25.4601 -25.7522 -25.7937 -25.9961 -26.173 -26.5243 -26.6892 -26.9761 -26.9673 -27.2507 -27.301 -27.2003 -27.1242 -27.2306 -27.127 -27.0956 -27.4114 -27.7777 -28.2319 -28.8025 -29.5294 -29.9683 -30.6852 -31.0113 -31.4043 -31.4057 -31.4479 -31.4446 -31.1501 -30.9905 -30.8507 -30.695 -31.1027 -31.7183 -32.5904 -33.6916 -34.4289 -35.1856 -35.6858 -35.7517 -35.5857 -35.2024 -35.2878 -35.3073 -35.9081 -37.0109 -38.8727 -41.6198 -44.9601 -48.3777 -51.9885 -54.7993 -56.2711 -56.0208 -54.1574 -51.6567", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0002 0.0006 0.0006 0.0008 0.0015 0.0019 0.0027 0.0031 0.0031 0.0032 0.0033 0.0034 0.0039 0.0041 0.0044 0.0044 0.0041 0.004 0.0038 0.0037 0.0032 0.0026 0.0026 0.0024 0.0022 0.0018 0.0016 0.0011 0.0005 0.0013 0.002 0.0025 0.0024 0.0021 0.0019 0.0017 0.0019 0.0018 0.0018 0.0014 0.0014 0.0012 0.001 0.0011 0.0013 0.0018 0.0035 0.0059 0.0083 0.0099 0.0108 0.0108 0.0096 0.0075 0.006 0.0034 0.0017 0.0014 0.0013 0.0012 0.0015 0.0013 0.0013 0.0011 0.0015 0.0037 0.0059 0.0102 0.0142 0.0163 0.0171 0.015 0.012 0.008 0.0038 0.0015 0.0013 0.0013 0.001 0.0009 0.0008 0.0006 0.0006 0.0004 0.0005 0.0004 0.0004 0.0004 0.0003 0.0005 0.0004 0.0006 0.0008 0.0011 0.001 0.0013 0.0013 0.0013 0.0014 0.0013 0.0013 0.0013 0.0014 0.0012 0.0013 0.0017 0.004 0.0073 0.0104 0.0117 0.0155 0.0161 0.0147 0.0136 0.0089 0.0045 0.0022 0.0015 0.001 0.0009 0.0008 0.0006 0.0006 0.0003 0.0006 0.0003 0.0004 0.002 0.0027 0.0033 0.0035 0.0034 0.0036 0.0041 0.0041 0.0036 0.0031 0.0025 0.0019 0.0019 0.0019 0.0016 0.0012 0.0015 0.0011 0.0012 0.0012 0.0015 0.0014 0.0016 0.0014 0.0016 0.0015 0.0015 0.0016 0.0017 0.0017 0.0019 0.0019 0.002 0.0023 0.0024 0.0022 0.0022 0.0019 0.0019 0.0021 0.0018 0.0018 0.0019 0.0014 0.0018 0.0016 0.0017 0.0017 0.0016 0.0016 0.0014 0.0014 0.0013 0.0009 0.001 0.0008 0.0008 0.0008 0.0007 0.0007 0.0004 0.0003 0.0002 0.0004 0.0004 0.0003 0.0002 0.0002 0.0003 0.0007 0.0012 0.0015 0.0017 0.002 0.0022 0.0024 0.0024 0.0021 0.0019 0.0016 0.0015 0.0013 0.0015 0.0019 0.0019 0.0019 0.0019 0.0016 0.0014 0.0012 0.0014 0.0016 0.0012 0.001 0.0012 0.0011 0.0013 0.0013 0.0016 0.0016 0.0016 0.0013 0.0011 0.0012 0.0012 0.0012 0.0011 0.001 0.001 0.0009 0.001 0.0009 0.0009 0.0012 0.001 0.001 0.0011 0.0009 0.0009 0.001 0.0012 0.0011 0.0009 0.001 0.0009 0.001 0.0009 0.0007 0.0011 0.0011 0.001 0.0008 0.001 0.0008 0.0008 0.0008 0.0007 0.0007 0.0008 0.0005 0.0006 0.0007 0.0008 0.0009 0.0008 0.0008 0.0006 0.0007 0.0005 0.0007 0.0005 0.0005 0.0004 0.0005 0.0005 0.0005 0.0006 0.0005 0.0005 0.0008 0.0006 0.0008 0.0008 0.0007 0.0006 0.0004 0.0005 0.0005 0.0004 0.0003 0.0002 0.0003 0.0003 0.0002 0.0005 0.0001 0.0002 0.0002 0.0003 0.0002 0.0002 0.0003 0.0001 0.0 0.0 0.0 0.0 0.0001 0.0 0.0 0.0 0.0002 0.0002 0.0011 0.0018 0.0021 0.002 0.0014 0.0007 0.0005 0.0002 0.0002", + "breathiness": "-73.1654 -71.3893 -68.7023 -66.0819 -63.2788 -60.4863 -57.8625 -55.3371 -53.1124 -51.479 -50.1676 -49.3908 -48.8597 -48.524 -48.3306 -48.5263 -48.8271 -49.4778 -50.1659 -50.7336 -51.2959 -51.7536 -52.1222 -52.2739 -53.0465 -53.7344 -54.545 -54.8628 -55.4497 -55.7261 -55.9454 -55.8607 -55.9282 -56.4991 -57.2076 -57.9905 -58.5816 -59.117 -59.5272 -59.9993 -60.0915 -60.0784 -60.0966 -59.1493 -57.4403 -54.8805 -52.0669 -49.0249 -46.418 -44.5474 -43.8206 -44.0533 -45.8305 -47.9999 -50.4034 -53.1104 -55.0576 -56.771 -57.4272 -57.2839 -57.2826 -56.1721 -54.0639 -51.1808 -47.6709 -43.7075 -40.0463 -36.9066 -34.9238 -34.1365 -34.1865 -35.3496 -38.1818 -41.767 -46.0551 -50.3748 -54.3032 -57.5345 -59.8841 -61.1114 -62.136 -62.7278 -62.948 -63.1461 -62.7216 -62.373 -61.966 -61.5337 -61.4693 -61.1321 -60.7637 -60.1881 -59.48 -58.307 -57.7178 -56.9745 -56.5757 -56.9776 -57.0017 -57.4041 -57.3065 -57.3021 -56.71 -55.1305 -52.6423 -49.5485 -46.0367 -42.6083 -39.4485 -37.541 -36.8127 -37.2204 -38.7728 -41.2482 -44.2356 -47.5321 -51.0546 -54.3405 -57.4066 -59.6528 -62.1014 -64.1599 -65.4712 -65.4861 -64.3833 -61.8965 -59.31 -56.2048 -53.4694 -51.326 -49.5802 -49.034 -48.6893 -48.5856 -48.6965 -49.478 -50.6852 -51.9908 -53.2231 -54.3855 -55.3827 -55.987 -56.3123 -56.4985 -56.3061 -56.3211 -56.2042 -56.0913 -56.1844 -56.4175 -56.7676 -56.782 -57.1181 -56.696 -56.0986 -55.5904 -54.9215 -54.332 -53.9569 -53.9041 -53.9664 -53.9217 -53.9366 -53.9118 -54.1546 -54.4696 -54.7781 -54.9702 -55.1716 -55.3437 -55.2937 -55.4017 -55.5564 -55.8022 -56.4342 -57.2663 -58.2084 -59.4592 -60.421 -61.139 -62.0018 -62.6407 -63.0502 -63.5875 -64.1411 -64.8428 -65.6654 -66.4252 -67.1502 -67.8101 -68.3813 -68.3999 -67.8198 -66.5094 -64.6499 -62.5445 -60.2488 -58.1775 -56.5715 -55.3793 -54.7583 -54.321 -54.3827 -54.5208 -54.9234 -55.2126 -55.7142 -55.6618 -55.5473 -54.9124 -54.004 -53.1677 -52.2443 -51.8282 -51.7827 -52.1802 -52.9165 -53.8764 -55.0834 -55.8515 -56.5325 -56.7262 -56.9909 -56.8436 -56.5634 -56.0372 -55.7449 -55.4569 -55.0088 -54.7387 -54.7006 -54.6406 -54.7385 -54.9998 -55.1714 -55.4823 -55.6791 -55.7815 -55.9539 -55.8306 -55.6391 -55.3116 -55.0932 -55.0752 -55.1432 -55.4941 -55.8431 -56.6105 -57.0604 -57.6144 -57.9657 -57.9667 -57.6448 -57.3427 -56.8959 -56.555 -56.0496 -55.3913 -54.889 -54.6491 -54.4855 -54.6813 -55.1795 -55.8578 -56.035 -56.2638 -56.1472 -56.0891 -55.9008 -56.1277 -55.9699 -55.8799 -56.029 -55.9192 -56.012 -56.0327 -56.3059 -56.8734 -57.1979 -57.6571 -58.0317 -58.2053 -58.3419 -58.3441 -58.3712 -58.3276 -58.5641 -58.7891 -59.196 -59.5122 -60.2628 -61.2237 -62.4582 -63.6266 -64.9473 -66.1019 -66.8931 -67.4189 -67.783 -67.4958 -67.176 -66.7072 -66.3626 -65.9614 -65.9756 -66.0651 -66.5907 -67.4458 -68.6145 -69.9729 -71.4711 -72.6474 -73.5949 -73.6671 -73.5501 -73.2424 -72.3225 -71.0427 -68.7221 -66.0969 -63.1065 -60.4244 -58.1822 -57.526 -57.8218 -58.1269 -58.4208 -57.9962 -56.844", "breathiness_timestep": "0.011609977324263039" }, { @@ -42,9 +42,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "154.7 154.9 154.6 154.5 154.5 154.6 154.6 154.4 154.9 154.8 155.3 155.3 155.0 155.1 155.4 155.3 155.2 155.5 155.4 155.6 155.9 155.5 155.9 155.5 155.7 155.6 155.4 155.5 155.2 155.5 156.0 155.4 155.6 155.5 155.8 155.6 155.8 155.6 155.6 155.7 155.5 155.4 155.5 155.3 155.5 155.5 155.6 154.9 155.2 154.8 154.6 154.1 154.3 159.0 167.1 178.1 192.1 203.8 220.8 235.0 242.0 246.9 248.8 249.5 247.4 244.0 241.4 237.7 234.6 233.4 228.2 224.2 223.7 221.7 219.1 218.0 217.9 218.0 218.8 220.0 220.7 220.8 220.1 218.4 213.0 204.4 203.8 205.2 219.9 231.8 247.0 259.3 252.9 251.1 253.4 254.0 253.5 252.3 250.3 245.5 239.9 234.2 230.2 226.6 226.0 227.3 232.2 239.9 250.5 258.5 262.2 261.0 257.0 249.3 240.3 233.8 230.7 229.0 229.8 234.5 241.2 246.4 252.1 255.7 259.4 262.3 262.6 262.1 260.2 257.3 253.6 252.5 252.9 251.7 249.6 249.2 250.6 250.3 250.2 248.8 248.1 247.9 248.0 248.8 248.1 246.8 243.1 236.8 235.5 235.7 235.0 234.7 233.9 233.8 234.2 222.8 217.8 215.5 216.3 217.5 218.1 218.4 219.8 222.1 224.0 224.3 223.5 222.9 222.8 222.9 221.5 220.5 220.1 220.1 218.3 218.2 220.0 220.3 220.1 220.2 219.9 218.5 217.6 218.1 219.6 220.8 221.0 220.0 219.8 223.1 231.2 243.1 246.7 248.9 249.1 249.0 247.6 245.5 244.6 243.6 243.7 244.1 245.9 247.7 248.0 248.1 248.8 248.6 248.2 248.5 249.8 251.0 255.2 260.0 272.7 284.4 294.2 302.6 304.5 304.3 301.8 299.9 296.3 293.7 292.5 292.0 292.6 293.9 295.3 297.2 298.8 299.2 296.3 288.3 275.2 259.3 249.3 244.5 243.6 245.7 246.4 247.7 250.7 251.2 252.6 251.5 251.3 248.9 245.5 242.6 239.8 235.6 235.3 240.5 244.6 249.5 253.8 255.7 257.0 257.7 257.4 257.5 254.1 251.9 246.3 242.0 236.3 232.2 229.9 232.1 236.5 241.7 248.6 254.2 257.6 259.4 260.6 259.8 258.6 254.4 244.5 234.7 227.7 225.2 226.1 227.9 228.4 229.1 231.2 235.7 242.3 250.3 253.9 256.0 255.1 253.5 255.2 257.5 257.1 252.2 253.8 254.1 254.9 256.4 256.6 258.1 258.7 258.1 257.3 258.4 259.5 261.4 263.6 266.0 268.0 271.2 274.5 278.2 277.3 272.2 262.1 253.2 247.1 244.2 243.2 243.9 244.4 246.2 246.4 244.6 240.1 234.9 226.8 226.3 224.9 223.9 223.9 222.5 224.0 220.8 218.9 219.3 219.4 220.1 220.1 219.3 219.7 220.1 219.7 219.8 219.2 218.6 214.7 205.9 190.8 191.4 193.9 196.4 197.4 198.0 197.8 195.0 194.1 194.5 195.8 196.8 196.4 196.3 196.0 195.6 195.5 194.9 194.7 194.6 194.0 195.2 195.2 195.1 195.7 197.0 196.6 196.2 196.4 196.3 195.9 195.0 194.3 194.5 195.0 196.5 198.1 198.6 196.3 190.9 182.0 169.6 165.2 166.9 167.9 168.9 171.3 164.5 163.3 163.0 162.5 162.3 163.0 164.2 164.9 166.2 167.0 167.0 164.3 163.5 161.5 158.7 154.7 149.9 140.9 141.7 145.0 146.1 148.7 152.2 158.1 166.3 175.2 184.1 191.0 196.0 198.4 199.0 197.1 194.5 190.4 179.9 180.3 189.5 197.8 207.1 216.7 227.8 225.5 223.3 223.4 223.7 223.3 221.1 219.4 218.7 219.4 220.1 220.2 218.5 213.8 207.4 199.9 200.3 199.5 200.2 199.4 201.0 198.8 194.0 191.0 191.6 193.7 196.2 197.9 196.8 197.8 196.3 191.3 180.0 176.2 180.0 183.6 187.7 191.5 195.5 199.2 202.2 205.9 206.1 208.2 209.8 212.7 214.3 214.2 214.6 215.6 216.6 217.1 217.6 218.5 218.9 218.5 218.1 218.2 218.9 219.0 218.5 218.7 220.4 221.6 222.2 223.1 226.3 231.3 235.6 240.7 246.5 249.2 250.2 250.4 251.1 250.9 250.1 247.8 245.2 242.3 241.3 241.4 242.5 244.6 250.0 256.5 261.0 262.4 261.9 259.1 254.7 248.9 243.2 236.7 233.7 232.8 233.9 236.4 241.1 246.8 253.5 260.2 263.6 263.8 261.1 255.1 248.4 241.4 231.9 224.9 223.1 231.0 244.0 247.8 243.7 244.6 246.9 248.8 248.5 248.4 248.7 249.4 250.0 250.1 247.5 243.7 243.5 243.5 244.5 245.5 244.6 244.9 245.4 245.0 245.1 246.2 246.0 245.1 246.4 246.2", "f0_timestep": "0.011609977324263039", - "energy": "0.0004 0.0007 0.0001 0.0002 0.0003 0.0006 0.0008 0.001 0.0006 0.0008 0.0005 0.0007 0.0007 0.001 0.0008 0.0012 0.0015 0.0017 0.0024 0.0018 0.0023 0.0019 0.0024 0.0022 0.0027 0.002 0.0021 0.003 0.0022 0.0022 0.0022 0.0026 0.0028 0.0027 0.0028 0.0025 0.0022 0.0025 0.0019 0.0018 0.0017 0.0011 0.0016 0.0013 0.0011 0.001 0.0025 0.0079 0.0099 0.0103 0.0231 0.0418 0.0533 0.062 0.0667 0.0643 0.0635 0.0632 0.063 0.0631 0.067 0.0702 0.0723 0.0763 0.078 0.0792 0.0804 0.0786 0.0775 0.0761 0.0751 0.0757 0.0769 0.0785 0.0793 0.0815 0.0824 0.0813 0.081 0.0797 0.0764 0.0731 0.0663 0.0558 0.0411 0.0253 0.0136 0.0051 0.0036 0.0055 0.0176 0.0413 0.0591 0.0728 0.0845 0.088 0.0909 0.0926 0.09 0.0881 0.0834 0.078 0.0727 0.0665 0.0608 0.0574 0.0579 0.0608 0.0636 0.0641 0.0655 0.0635 0.0623 0.0612 0.0581 0.0562 0.0557 0.0541 0.0537 0.0548 0.0549 0.0567 0.0588 0.0621 0.066 0.0692 0.072 0.0746 0.0757 0.0763 0.077 0.077 0.0797 0.0815 0.0848 0.0879 0.0898 0.0915 0.0943 0.0922 0.0924 0.0899 0.0866 0.0873 0.0854 0.0814 0.0706 0.057 0.0402 0.0252 0.022 0.0192 0.0143 0.0299 0.0524 0.0685 0.0807 0.0874 0.0875 0.0877 0.0879 0.0872 0.0861 0.0858 0.0871 0.0886 0.09 0.0907 0.0916 0.092 0.0902 0.0869 0.08 0.075 0.077 0.0795 0.0842 0.0884 0.087 0.0862 0.0855 0.0831 0.0811 0.0811 0.0784 0.0769 0.0752 0.0699 0.0656 0.0636 0.0637 0.0684 0.0725 0.0757 0.0777 0.0789 0.0809 0.0821 0.0836 0.0839 0.084 0.0832 0.0816 0.082 0.0814 0.0823 0.0822 0.0811 0.0785 0.0757 0.0705 0.0639 0.0586 0.0566 0.0613 0.0732 0.0837 0.0938 0.1007 0.1043 0.1053 0.1029 0.1006 0.0969 0.0938 0.0925 0.0901 0.0891 0.0865 0.0847 0.0855 0.0858 0.089 0.0915 0.0929 0.093 0.0919 0.0905 0.0871 0.0865 0.0849 0.0828 0.0837 0.0845 0.0873 0.0903 0.0914 0.0926 0.0895 0.0797 0.0644 0.0478 0.0286 0.0235 0.0248 0.0348 0.0526 0.069 0.0827 0.091 0.0904 0.0845 0.0797 0.0778 0.0761 0.0739 0.0697 0.0655 0.0626 0.0625 0.0637 0.0634 0.0639 0.0627 0.0599 0.0586 0.0557 0.0538 0.0531 0.0518 0.0506 0.0494 0.049 0.0493 0.0488 0.0479 0.0452 0.0427 0.0397 0.0378 0.0355 0.034 0.0329 0.0334 0.034 0.0349 0.0292 0.0252 0.0185 0.0108 0.0043 0.0038 0.0028 0.0034 0.0036 0.003 0.0025 0.0017 0.0017 0.0008 0.0012 0.002 0.0055 0.0093 0.0111 0.0138 0.0153 0.0207 0.0452 0.0622 0.077 0.0871 0.0866 0.0854 0.082 0.0786 0.077 0.0754 0.0741 0.0728 0.0681 0.0584 0.046 0.033 0.0258 0.0267 0.028 0.0319 0.0469 0.0575 0.0673 0.0761 0.0795 0.0829 0.084 0.0853 0.0855 0.0849 0.086 0.085 0.0818 0.0758 0.0635 0.0482 0.0322 0.0153 0.0086 0.0093 0.0322 0.0504 0.0617 0.0701 0.0714 0.0691 0.0676 0.0666 0.0645 0.0629 0.0611 0.0632 0.066 0.0693 0.0709 0.0721 0.0722 0.0711 0.0718 0.0704 0.0684 0.0683 0.0691 0.069 0.0691 0.0688 0.0658 0.0629 0.0613 0.0594 0.0591 0.0587 0.0584 0.0558 0.0532 0.046 0.0349 0.0231 0.0117 0.0122 0.0318 0.0485 0.0633 0.0712 0.0761 0.0764 0.0779 0.079 0.0791 0.0795 0.0779 0.0814 0.0808 0.0798 0.0819 0.0794 0.0775 0.0733 0.0674 0.0596 0.0558 0.0604 0.0665 0.0736 0.0765 0.075 0.0705 0.0667 0.0645 0.0621 0.0608 0.058 0.0555 0.0551 0.0532 0.0473 0.0389 0.0276 0.0115 0.0089 0.0109 0.0116 0.0185 0.042 0.0549 0.066 0.0725 0.0683 0.0656 0.0623 0.0606 0.0604 0.0599 0.0604 0.0603 0.059 0.0536 0.0448 0.0338 0.021 0.0122 0.0055 0.0164 0.0438 0.0598 0.0747 0.0851 0.0881 0.09 0.0881 0.0865 0.0832 0.0774 0.0685 0.0551 0.0384 0.0211 0.0099 0.0089 0.0085 0.0095 0.0095 0.0092 0.0218 0.0382 0.0551 0.0708 0.0832 0.09 0.0926 0.0943 0.0955 0.096 0.0958 0.0951 0.0941 0.0934 0.0945 0.0941 0.093 0.0906 0.088 0.0843 0.0797 0.0745 0.0671 0.0637 0.0611 0.0589 0.0589 0.0593 0.0608 0.0625 0.0664 0.0695 0.0722 0.0747 0.0745 0.0744 0.0732 0.0704 0.0669 0.0617 0.0569 0.0516 0.0491 0.0473 0.0491 0.0515 0.0545 0.058 0.0592 0.0597 0.0585 0.0548 0.0489 0.0422 0.0361 0.031 0.0282 0.0271 0.0268 0.0284 0.0301 0.0324 0.035 0.0353 0.0368 0.0359 0.0335 0.0306 0.0253 0.0202 0.0166 0.0144 0.0151 0.0165 0.0161 0.0175 0.0184 0.019 0.0208 0.0219 0.0219 0.0208 0.019 0.0159 0.0133 0.0075 0.0037 0.0022 0.0013 0.0009 0.0007 0.0007 0.0002 0.0002 0.0 0.0 0.0001 0.0 0.0 0.0003", + "energy": "-89.252 -88.1934 -84.7832 -80.8903 -76.4724 -72.9601 -70.2108 -68.5781 -67.6996 -67.2147 -66.4785 -65.6012 -64.3584 -62.5313 -60.9495 -59.1922 -57.8083 -56.9083 -56.0358 -55.3353 -54.9027 -54.4263 -53.8403 -53.5872 -52.8632 -52.3027 -51.7714 -51.5115 -51.4057 -51.6064 -51.9786 -52.2125 -53.2219 -54.0253 -55.4653 -56.8724 -58.0493 -59.4999 -60.1405 -60.7369 -61.0301 -60.9314 -60.264 -58.5584 -56.0604 -53.001 -48.9722 -44.4398 -39.5052 -34.8771 -30.9505 -27.8255 -25.5458 -23.9223 -23.1554 -23.2069 -23.5136 -23.7696 -23.8706 -24.1053 -23.8051 -23.392 -23.2345 -22.7855 -22.6313 -22.5 -22.524 -22.783 -23.2398 -23.2871 -23.2527 -23.2201 -22.9348 -22.4939 -22.3697 -22.0569 -21.8742 -21.879 -22.0523 -22.1204 -22.6576 -23.8788 -25.305 -27.4676 -30.5755 -34.1641 -37.1138 -38.7386 -38.5096 -36.5856 -33.2826 -29.5796 -25.7308 -23.0976 -21.7462 -21.1798 -21.0408 -21.3548 -21.4816 -21.9085 -22.366 -22.9171 -23.2776 -23.8984 -24.4123 -24.5083 -24.6523 -24.7366 -24.5736 -24.1095 -23.914 -23.7824 -23.754 -23.9068 -24.0751 -24.1154 -24.6487 -24.3858 -24.3768 -24.4067 -24.0655 -24.1285 -23.876 -23.7039 -23.9562 -23.8223 -23.5761 -23.6591 -23.2944 -23.2329 -23.0318 -22.7412 -22.5003 -22.0078 -21.5567 -21.3692 -20.9157 -20.7964 -20.7363 -20.9846 -21.2645 -21.3601 -21.6125 -21.7978 -22.5147 -23.7841 -25.5096 -27.2434 -29.3504 -31.2128 -31.9894 -31.7382 -30.5805 -28.8093 -27.09 -25.0737 -23.5606 -22.3884 -21.9157 -21.7973 -21.8385 -21.7978 -21.8538 -21.7329 -21.7425 -21.7936 -21.7086 -21.5003 -21.6212 -21.5818 -21.8565 -21.9733 -22.2124 -22.181 -22.2755 -22.1938 -22.1787 -22.2355 -22.5122 -22.5672 -22.449 -22.4459 -22.6574 -22.8449 -22.878 -23.1947 -23.3358 -23.6307 -23.6744 -23.605 -23.4713 -23.0207 -22.7427 -22.4647 -22.2783 -22.2812 -22.0879 -22.0639 -22.3466 -22.4832 -22.4441 -22.5756 -22.6538 -22.6414 -22.5738 -22.9307 -23.1022 -23.3668 -23.67 -23.6911 -24.0251 -23.967 -24.0016 -23.5155 -23.1646 -22.7272 -22.4562 -21.9936 -21.5876 -21.5979 -21.4908 -21.2719 -21.3718 -21.4203 -21.536 -21.5099 -21.4316 -21.6271 -21.642 -21.546 -21.571 -21.5099 -21.3697 -21.3336 -21.2812 -21.3992 -21.1875 -21.2825 -21.2865 -21.2958 -21.3737 -21.2754 -21.2724 -21.3955 -21.0118 -20.8039 -20.96 -21.2064 -22.1616 -23.8699 -25.7936 -27.6708 -28.9697 -29.3306 -28.7528 -27.3985 -25.4114 -23.7126 -22.3059 -21.3215 -20.9918 -20.7894 -21.1451 -21.2958 -21.7728 -22.1456 -22.6122 -22.8776 -22.9651 -23.1239 -23.2456 -23.4417 -23.6581 -23.7818 -24.223 -24.644 -24.9805 -25.3221 -25.6605 -25.9163 -26.2471 -26.6946 -27.1015 -27.5224 -27.7166 -28.207 -28.3165 -28.6716 -29.1515 -29.2595 -29.7525 -29.8934 -30.3411 -30.6305 -31.2138 -32.7169 -33.8621 -36.2039 -38.7111 -41.6227 -44.2456 -46.4271 -47.9447 -48.7954 -49.7737 -50.6633 -51.5148 -51.9771 -51.4815 -50.2856 -48.2736 -45.2141 -42.2548 -39.3054 -36.8807 -34.3378 -31.9554 -29.7886 -27.5941 -25.4271 -23.6507 -22.3817 -21.6218 -21.264 -21.1467 -21.3855 -21.6649 -22.3325 -23.1324 -24.1959 -25.8874 -27.2622 -28.9699 -29.6821 -30.1992 -29.7252 -28.9234 -27.5038 -26.0204 -24.3732 -23.0377 -21.9643 -21.515 -21.1895 -21.021 -20.8972 -21.0781 -21.0833 -21.3625 -21.8723 -22.5672 -24.0926 -26.5361 -29.382 -31.6063 -33.0707 -33.1755 -31.9154 -30.0799 -27.8739 -25.7317 -24.5803 -24.1007 -23.9721 -23.9624 -23.9616 -23.7739 -23.8985 -24.0088 -23.8701 -23.6225 -23.7292 -23.7998 -23.7045 -23.7169 -23.8899 -23.752 -23.9033 -23.7389 -23.8163 -23.8025 -23.5814 -23.4773 -23.6117 -23.841 -24.1646 -24.4713 -24.6622 -24.7769 -25.0621 -25.5326 -26.3227 -28.276 -29.9921 -32.1933 -33.3491 -33.7901 -32.9427 -31.2008 -28.9724 -26.4365 -24.7047 -23.0847 -22.8224 -22.6377 -22.4679 -22.3265 -22.2226 -22.1529 -22.3701 -22.2492 -22.3678 -22.6258 -22.9326 -23.3665 -23.7692 -24.1187 -24.2238 -24.0056 -24.0687 -23.8805 -23.7976 -23.6921 -23.8746 -24.1807 -24.3218 -24.6799 -24.998 -25.002 -24.9705 -24.9051 -25.4043 -26.3277 -28.0071 -30.2406 -32.4936 -34.391 -35.0912 -34.6668 -33.0833 -30.752 -28.107 -25.9531 -24.6297 -24.0421 -24.0014 -24.0482 -24.3114 -24.4224 -24.6012 -24.8101 -24.7855 -25.244 -26.1222 -27.6014 -29.9666 -32.493 -34.1192 -34.8822 -34.1617 -32.4581 -29.7189 -26.8945 -24.6848 -23.157 -22.4309 -22.0533 -22.244 -22.679 -23.4885 -25.0215 -27.2897 -30.1989 -33.45 -36.5028 -38.8026 -40.6024 -41.3193 -40.6532 -38.0336 -35.3386 -32.2531 -29.2734 -26.4824 -24.4255 -23.0851 -22.3686 -21.7648 -21.4492 -21.2698 -21.003 -20.9806 -20.7669 -20.74 -20.7643 -20.8684 -20.805 -20.9974 -21.134 -21.4973 -21.6632 -22.1037 -22.5288 -23.3995 -23.9238 -24.5405 -24.8627 -24.9467 -24.8605 -24.3451 -24.0725 -23.6403 -23.3044 -22.809 -22.5716 -22.7203 -22.4564 -22.7009 -23.1153 -23.464 -23.8816 -24.369 -24.8303 -24.9947 -25.0161 -24.8783 -24.3964 -23.9253 -23.5933 -23.4416 -23.5865 -23.8886 -24.786 -25.3117 -26.0114 -26.9744 -27.3974 -27.8853 -28.0282 -27.7178 -27.8564 -27.5232 -27.2102 -27.1803 -27.2162 -27.3946 -27.8585 -28.7503 -30.2167 -31.4539 -32.7443 -33.5554 -34.2137 -34.3898 -34.1382 -33.7467 -33.1305 -32.7842 -32.8838 -32.7022 -32.9108 -33.1771 -34.0117 -35.2212 -36.9384 -40.1881 -43.424 -47.2261 -51.35 -54.7795 -58.4945 -61.4173 -63.6348 -65.1273 -66.2082 -66.5349 -66.293 -65.9319 -64.883 -63.6505 -62.1704", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0003 0.0003 0.0003 0.0005 0.0003 0.0006 0.0005 0.0005 0.0006 0.0007 0.0007 0.001 0.0009 0.001 0.001 0.0013 0.0016 0.0018 0.0018 0.0021 0.0022 0.0022 0.0026 0.0027 0.003 0.0031 0.0029 0.0029 0.0028 0.0027 0.0026 0.0028 0.0027 0.0028 0.0025 0.0023 0.0022 0.0021 0.0019 0.0015 0.0012 0.0007 0.0005 0.0007 0.0008 0.004 0.0073 0.0104 0.0115 0.0114 0.0099 0.0068 0.004 0.0022 0.0016 0.0017 0.0017 0.0013 0.0011 0.0008 0.0005 0.0003 0.0006 0.0003 0.0005 0.0006 0.0005 0.0006 0.0007 0.0006 0.0009 0.0012 0.0013 0.0013 0.0013 0.0012 0.0011 0.001 0.0008 0.0005 0.0005 0.0003 0.0005 0.0003 0.0003 0.0011 0.0013 0.0023 0.0035 0.0043 0.0042 0.0038 0.0022 0.0008 0.0007 0.0007 0.0007 0.0009 0.0011 0.0011 0.0012 0.001 0.0011 0.001 0.0011 0.0011 0.0011 0.0011 0.0012 0.0013 0.0011 0.0012 0.0012 0.0012 0.0013 0.0011 0.001 0.0009 0.001 0.0009 0.001 0.001 0.001 0.0009 0.0008 0.0011 0.0012 0.0014 0.0013 0.0013 0.0013 0.0012 0.0016 0.0016 0.0015 0.0017 0.0017 0.0015 0.0015 0.0013 0.0012 0.0009 0.0006 0.0007 0.0006 0.0054 0.0127 0.0186 0.0228 0.0234 0.0209 0.0155 0.0092 0.0055 0.0028 0.0018 0.0014 0.0012 0.0012 0.0014 0.0013 0.0011 0.0012 0.001 0.001 0.0008 0.001 0.0012 0.001 0.001 0.0009 0.0012 0.0014 0.0018 0.0022 0.002 0.002 0.0019 0.0017 0.0018 0.0018 0.0019 0.002 0.0023 0.0026 0.0026 0.0024 0.0024 0.002 0.0023 0.0026 0.0026 0.0024 0.0024 0.0018 0.0014 0.0013 0.0013 0.0013 0.0014 0.0009 0.0011 0.0013 0.0015 0.0017 0.0016 0.0013 0.0014 0.0016 0.0024 0.0025 0.0031 0.0037 0.004 0.0037 0.0028 0.002 0.0016 0.0013 0.0017 0.0021 0.0022 0.0024 0.0023 0.0023 0.0024 0.0022 0.0015 0.0009 0.0007 0.0005 0.001 0.0013 0.0015 0.0014 0.0015 0.0013 0.0009 0.0009 0.0009 0.0007 0.0008 0.0009 0.0007 0.0008 0.0008 0.0006 0.0006 0.0015 0.0058 0.0126 0.019 0.024 0.0268 0.0252 0.0208 0.015 0.0076 0.0038 0.003 0.0025 0.0023 0.0023 0.0025 0.0024 0.0027 0.0028 0.0029 0.0029 0.0029 0.0027 0.0028 0.0023 0.0021 0.0018 0.0014 0.0015 0.0016 0.0016 0.0018 0.0018 0.002 0.0019 0.0019 0.0019 0.0018 0.0018 0.0016 0.0013 0.0013 0.0012 0.001 0.001 0.0012 0.0013 0.0014 0.0019 0.0021 0.0023 0.0024 0.0026 0.003 0.0032 0.0034 0.0033 0.0028 0.0022 0.0014 0.0007 0.0007 0.002 0.0054 0.0082 0.0113 0.0144 0.0153 0.0141 0.0118 0.0081 0.0047 0.0037 0.0028 0.0023 0.0019 0.002 0.0021 0.0023 0.0024 0.0025 0.0037 0.0114 0.0175 0.0243 0.0268 0.0296 0.033 0.0303 0.0281 0.0217 0.0103 0.0026 0.0015 0.0017 0.0016 0.0014 0.0014 0.0016 0.0014 0.0013 0.0013 0.0011 0.0012 0.0011 0.001 0.0018 0.0028 0.0042 0.0051 0.0051 0.005 0.0042 0.0032 0.0025 0.0022 0.0021 0.0019 0.0016 0.0016 0.0011 0.0009 0.0008 0.0007 0.0007 0.0006 0.0005 0.0008 0.0007 0.0011 0.0014 0.0014 0.0016 0.0017 0.0014 0.0015 0.0013 0.0013 0.001 0.0007 0.0007 0.0004 0.0004 0.0003 0.0002 0.0007 0.0015 0.0034 0.0063 0.0076 0.0068 0.0064 0.004 0.0007 0.0003 0.0 0.0001 0.0002 0.0002 0.0002 0.0003 0.0003 0.0006 0.001 0.0012 0.0024 0.0038 0.0038 0.0041 0.0043 0.0034 0.0034 0.003 0.0028 0.0023 0.0021 0.0022 0.0019 0.0014 0.0013 0.0013 0.001 0.001 0.0012 0.0011 0.0012 0.0013 0.0027 0.0067 0.0116 0.0134 0.0133 0.0124 0.0078 0.0032 0.0022 0.0018 0.0017 0.0014 0.0012 0.001 0.0008 0.0007 0.0007 0.0008 0.0005 0.0003 0.0005 0.001 0.002 0.003 0.0034 0.0034 0.0029 0.0016 0.0011 0.0011 0.0014 0.0015 0.0015 0.0013 0.0014 0.0013 0.0014 0.0018 0.0023 0.0029 0.0041 0.0055 0.0065 0.0069 0.0071 0.0064 0.0052 0.0038 0.0021 0.0017 0.0015 0.0014 0.0013 0.0014 0.0014 0.0015 0.0014 0.0014 0.0013 0.0011 0.0012 0.0012 0.0014 0.0014 0.0015 0.0018 0.0018 0.0017 0.0015 0.0016 0.0014 0.0015 0.0016 0.0017 0.0018 0.002 0.002 0.0017 0.0018 0.0017 0.0014 0.0017 0.0019 0.002 0.0019 0.0016 0.0016 0.0014 0.0012 0.0013 0.0012 0.0011 0.001 0.0012 0.0013 0.0014 0.0013 0.0014 0.0012 0.0012 0.0009 0.001 0.0009 0.0008 0.0008 0.0008 0.0006 0.001 0.0008 0.0009 0.001 0.0008 0.0008 0.0008 0.0005 0.0004 0.0004 0.0005 0.0007 0.0016 0.0003 0.0005 0.0005 0.0004 0.0006 0.0004 0.0003 0.0003 0.0005 0.0006 0.0005 0.0007 0.0006 0.0005 0.0006 0.0004 0.0004 0.0005 0.0006 0.0005 0.0003 0.0003 0.0002 0.0002 0.0004", + "breathiness": "-96.0 -96.0 -94.664 -90.6115 -86.1119 -81.608 -78.06 -75.2225 -72.7695 -70.9146 -69.7455 -68.5748 -67.022 -65.38 -63.3612 -61.5213 -59.9992 -58.6368 -57.7049 -56.8111 -56.3186 -55.6187 -54.9114 -54.1729 -53.4075 -52.7562 -51.951 -51.5151 -51.1574 -50.6906 -50.9266 -51.0095 -51.9604 -53.0865 -54.5869 -56.314 -58.4003 -60.1204 -61.6196 -62.8992 -63.6145 -64.2242 -63.9451 -62.0054 -58.8465 -55.2998 -51.0284 -46.9331 -44.0915 -43.3335 -44.1976 -45.9805 -48.0874 -50.1087 -52.0301 -53.7472 -55.1085 -56.6306 -58.4782 -60.1554 -61.7089 -62.8321 -63.258 -63.7473 -63.7901 -63.8413 -63.685 -63.2666 -62.6438 -61.925 -61.0572 -60.0642 -59.8534 -60.0206 -60.1903 -60.6665 -61.3821 -62.001 -62.8686 -63.6745 -64.395 -65.2832 -65.5144 -65.1292 -63.9092 -61.6588 -59.1777 -56.2836 -54.3815 -53.4852 -54.2612 -55.9791 -57.5451 -58.7244 -59.9077 -60.2381 -60.1739 -59.949 -59.6459 -59.3445 -59.0347 -58.9541 -58.8798 -58.889 -58.9406 -58.9858 -58.9341 -59.0738 -58.959 -59.0783 -58.9484 -58.9722 -59.0447 -58.7911 -58.9067 -59.1613 -59.4989 -59.5228 -59.4695 -59.1383 -58.694 -57.6329 -57.2209 -56.93 -56.9303 -56.9376 -57.0955 -57.0781 -57.0308 -56.6161 -56.3695 -55.7107 -55.5157 -54.9307 -54.8075 -54.5083 -54.5033 -54.8097 -55.1413 -55.9016 -56.8852 -58.1016 -58.418 -57.3649 -54.6712 -50.8625 -46.2378 -41.5739 -37.3999 -35.033 -34.7275 -36.3435 -39.0911 -42.9724 -46.856 -50.3878 -53.2836 -55.5174 -56.8612 -57.9177 -58.9125 -59.701 -60.3367 -60.8295 -61.1141 -61.0049 -61.093 -61.1636 -61.1676 -60.5151 -59.9994 -59.0573 -57.9951 -56.6705 -55.5652 -54.6237 -54.3148 -53.9958 -53.9588 -54.1504 -53.8708 -53.8653 -53.2723 -52.9373 -52.3912 -51.6863 -51.1429 -50.6709 -50.4323 -50.4159 -50.8051 -51.5718 -52.5034 -53.4255 -54.3546 -55.2464 -55.8878 -56.3575 -56.6741 -56.962 -56.8528 -56.8461 -56.6624 -56.4005 -56.2205 -56.1723 -56.2026 -56.0211 -55.8491 -55.2913 -54.2695 -53.1982 -52.0916 -51.5297 -51.3216 -51.7828 -52.7584 -53.8983 -54.8679 -55.5457 -55.8439 -55.6393 -55.0364 -54.2177 -53.64 -53.6068 -54.0372 -54.5197 -55.7336 -56.7536 -57.8269 -58.3419 -58.0966 -57.9237 -57.5658 -57.5581 -57.6614 -58.0408 -58.9098 -59.9255 -61.0294 -62.0386 -62.9242 -63.3572 -63.6458 -63.6366 -62.0022 -59.3501 -55.3415 -50.9048 -45.7123 -40.7633 -36.5504 -33.9414 -33.3111 -33.9408 -36.0294 -38.8315 -41.9045 -44.9949 -47.6294 -49.458 -50.6004 -51.0478 -51.2179 -51.2749 -51.1943 -51.3224 -51.0264 -50.551 -50.4939 -50.415 -50.6162 -51.1831 -51.8932 -52.8613 -53.6107 -53.8369 -54.0552 -53.6918 -53.6038 -53.314 -53.101 -53.1389 -53.4965 -53.8521 -54.408 -55.138 -55.7481 -56.4059 -56.761 -57.1221 -57.4509 -57.7469 -57.9404 -57.676 -57.1471 -56.2103 -55.1529 -53.7942 -52.6406 -51.7063 -51.0244 -51.0369 -51.0942 -51.1314 -51.5206 -51.591 -51.4113 -50.3633 -48.2109 -45.7272 -42.9279 -40.6669 -38.816 -38.2299 -38.4583 -39.8168 -41.9529 -44.1263 -46.5958 -49.291 -51.1897 -52.7925 -53.6564 -53.9163 -53.1027 -51.5075 -49.25 -46.515 -42.9754 -39.7184 -36.7622 -34.4484 -32.9649 -32.754 -33.4074 -35.5939 -38.9048 -42.7527 -46.712 -50.5942 -53.7296 -55.9706 -56.9204 -57.4693 -57.6675 -57.5763 -57.5568 -57.7989 -58.1262 -58.4184 -58.2207 -56.8616 -54.8838 -52.3922 -49.7758 -48.0868 -47.0641 -47.3457 -48.3514 -50.0607 -51.8077 -53.1441 -54.5473 -55.4991 -56.6795 -58.1046 -59.448 -60.7928 -62.2052 -63.2436 -63.9085 -63.7663 -63.3636 -62.7461 -61.5921 -60.3356 -59.1047 -58.0262 -56.841 -56.3627 -56.2442 -56.7867 -57.7885 -59.1591 -60.5874 -62.5662 -64.1509 -65.6273 -66.3489 -65.8345 -64.3637 -61.5414 -57.9439 -54.042 -50.659 -48.5663 -48.5214 -49.918 -53.1969 -56.7671 -60.0966 -62.9994 -65.3782 -66.3734 -66.5369 -66.1036 -65.195 -63.5132 -61.5025 -58.6022 -55.4174 -52.5222 -50.1053 -48.7042 -48.2539 -48.6901 -49.334 -50.3028 -50.9672 -51.6802 -52.5782 -53.3974 -54.3361 -55.4459 -56.415 -57.6104 -58.3516 -59.0837 -59.4013 -59.2791 -58.7189 -57.2895 -54.7374 -51.7971 -48.2418 -44.8749 -42.1855 -41.1176 -41.9688 -44.1527 -47.3562 -50.9007 -54.2403 -57.1689 -59.4756 -61.571 -63.395 -64.854 -66.0408 -66.497 -66.8443 -66.4901 -65.0522 -62.9321 -60.3552 -57.4833 -54.5043 -52.6772 -51.6526 -52.0836 -52.9908 -54.6075 -56.1236 -57.3717 -58.6228 -58.9893 -59.1812 -59.1938 -58.8008 -57.9728 -56.3924 -54.3 -51.8979 -49.48 -47.1718 -45.5361 -44.6207 -44.1191 -44.585 -45.7238 -47.5989 -49.7485 -52.1076 -54.6551 -56.5804 -57.716 -58.103 -58.0547 -58.3926 -58.2549 -58.1865 -58.3444 -58.4464 -58.3364 -58.333 -58.5567 -58.3716 -58.5613 -58.6475 -58.3034 -58.1895 -57.7638 -57.1631 -56.5534 -56.0275 -55.583 -55.3652 -55.3888 -55.2415 -55.413 -55.4606 -55.4994 -55.7239 -56.11 -56.5095 -56.842 -57.3266 -57.6017 -57.678 -57.4492 -57.2428 -56.9377 -56.8465 -56.524 -56.3913 -56.3928 -56.1603 -56.2293 -56.3555 -56.4319 -56.6139 -56.923 -57.6856 -58.8815 -59.836 -60.9072 -61.6772 -62.1527 -62.2264 -62.0848 -62.0033 -61.4319 -61.0476 -60.6156 -60.6366 -60.923 -62.0103 -63.5044 -65.001 -66.6217 -67.9789 -68.972 -69.4057 -69.481 -69.1867 -69.2485 -69.2134 -69.5424 -69.9232 -70.1819 -69.4534 -68.0052 -65.4334 -62.682 -59.9955 -58.0249 -57.5249 -58.5911 -60.1397 -62.5194 -64.1622 -66.064 -67.1456 -68.0017 -68.8107 -70.1569 -71.7804 -72.0111 -72.7575", "breathiness_timestep": "0.011609977324263039" }, { @@ -58,9 +58,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0", "f0_seq": "264.3 264.5 264.9 264.7 264.0 263.9 263.8 263.7 263.6 263.5 263.3 263.1 263.0 262.2 262.1 262.2 262.4 262.1 261.7 262.1 262.0 262.3 262.0 261.7 261.7 261.9 261.1 261.5 262.0 261.6 261.1 260.9 260.5 261.4 260.8 261.2 260.6 260.8 260.6 260.1 260.0 259.3 259.5 259.0 259.2 258.4 258.0 257.5 257.0 256.2 256.3 249.7 247.1 246.4 246.6 246.3 245.2 245.1 245.4 245.6 246.0 247.3 243.9 239.3 231.1 245.7 260.2 275.7 291.2 304.0 297.8 297.4 298.9 298.5 298.4 297.2 296.5 295.2 295.6 295.6 294.3 289.0 279.3 267.0 274.0 282.6 292.0 301.6 312.2 324.2 339.4 336.1 333.4 334.2 334.2 332.0 328.0 324.5 319.7 315.9 314.4 314.7 317.2 321.6 325.4 327.3 330.2 331.7 330.8 329.7 329.1 328.4 328.3 329.2 329.6 328.7 330.1 331.6 332.2 333.0 333.6 334.5 334.3 332.1 330.4 327.9 324.5 322.1 320.6 322.3 326.4 331.0 337.2 342.4 346.1 347.6 347.5 347.1 343.7 338.2 331.5 325.8 321.6 320.6 321.8 324.2 328.4 332.9 339.7 345.6 349.6 351.9 351.6 349.0 344.6 340.2 334.1 327.5 321.6 318.3 317.5 320.0 325.5 332.6 339.8 346.2 351.4 354.0 354.5 351.7 347.2 341.6 335.7 328.4 324.3 320.1 318.3 319.1 321.8 326.3 331.3 337.1 341.9 344.8 345.4 344.6 341.9 337.4 334.4 329.5 322.5 320.2 321.1 322.5 328.0 329.3 329.4 331.9 335.2 338.4 341.4 339.9 334.5 324.0 313.3 305.5 300.9 298.5 296.5 294.6 292.8 291.2 289.4 290.6 290.1 290.6 291.0 292.3 293.2 293.5 292.9 292.0 291.1 291.2 292.3 293.4 291.2 285.6 272.9 285.6 301.5 316.1 331.3 344.5 338.7 336.1 335.5 336.0 334.4 334.7 334.3 333.5 331.3 327.3 321.6 311.6 291.5 277.8 265.5 253.1 242.4 231.8 221.1 210.4 204.5 204.3 210.1 215.8 221.1 225.5 225.7 224.9 223.2 218.2 212.5 207.4 203.3 202.3 206.8 214.3 222.9 229.8 233.1 235.4 234.9 233.3 226.2 220.1 211.8 206.6 205.5 207.7 212.8 216.2 220.5 223.8 226.4 225.8 223.8 220.7 216.1 212.6 214.2 218.0 223.0 228.1 231.9 232.4 232.2 230.3 226.4 221.5 215.1 210.1 207.2 206.3 209.0 213.2 218.4 225.3 231.9 236.5 235.8 233.6 228.6 219.3 211.6 204.8 201.5 199.3 198.5 198.7 199.5 200.1 200.0 200.6 201.4 201.6 203.4", "f0_timestep": "0.011609977324263039", - "energy": "0.0002 0.0 0.0 0.0 0.0 0.0 0.0001 0.0002 0.0 0.0003 0.0005 0.0006 0.0012 0.0012 0.0017 0.0019 0.0024 0.003 0.003 0.0031 0.0035 0.0038 0.0042 0.0042 0.0048 0.0052 0.005 0.005 0.0051 0.0056 0.0046 0.0046 0.0038 0.0038 0.0029 0.0024 0.0021 0.0018 0.0012 0.0003 0.0008 0.0007 0.0023 0.0044 0.0093 0.0141 0.0177 0.0204 0.0214 0.0455 0.0661 0.0823 0.0984 0.1039 0.1042 0.1048 0.1029 0.1014 0.0998 0.0973 0.0938 0.0867 0.075 0.0586 0.0409 0.0244 0.0117 0.0133 0.0443 0.0686 0.0881 0.1047 0.1122 0.1148 0.1168 0.1183 0.1189 0.1187 0.1191 0.1165 0.1112 0.0977 0.0781 0.0547 0.0227 0.0087 0.0059 0.0057 0.0041 0.0282 0.06 0.0898 0.1127 0.13 0.1353 0.1354 0.1338 0.1275 0.1247 0.121 0.1196 0.1196 0.1215 0.1219 0.1213 0.1234 0.1225 0.1243 0.1259 0.126 0.1264 0.1257 0.125 0.1279 0.1294 0.1302 0.1309 0.1322 0.1329 0.1358 0.1379 0.1367 0.1358 0.132 0.1281 0.1236 0.1192 0.1149 0.1131 0.1105 0.1103 0.111 0.1104 0.11 0.1082 0.1063 0.1042 0.1023 0.1005 0.1009 0.1017 0.1017 0.1031 0.1018 0.0986 0.0972 0.094 0.0922 0.091 0.0883 0.0874 0.0865 0.0871 0.088 0.0883 0.0903 0.091 0.092 0.0931 0.0926 0.0919 0.0912 0.0899 0.0877 0.0863 0.0837 0.0819 0.0811 0.082 0.0834 0.0845 0.0861 0.0859 0.0848 0.0828 0.0799 0.0797 0.078 0.0767 0.0758 0.0738 0.0721 0.071 0.0704 0.0716 0.0707 0.0704 0.0695 0.0674 0.0652 0.0624 0.0608 0.0599 0.0602 0.0619 0.0624 0.0637 0.064 0.0625 0.0619 0.0617 0.0597 0.0591 0.055 0.0454 0.0357 0.0185 0.0051 0.004 0.0061 0.0079 0.0073 0.0186 0.049 0.0664 0.0828 0.0953 0.1006 0.1035 0.1022 0.1007 0.0964 0.0933 0.0897 0.0845 0.0751 0.0644 0.0502 0.0329 0.0195 0.0097 0.01 0.032 0.0516 0.0668 0.0818 0.0916 0.097 0.101 0.0998 0.0976 0.0945 0.0907 0.0894 0.0833 0.0706 0.0556 0.0345 0.0185 0.0136 0.0164 0.0165 0.0158 0.0242 0.0366 0.0476 0.0606 0.071 0.0767 0.0817 0.0849 0.0856 0.0868 0.0849 0.0786 0.0718 0.063 0.0557 0.0538 0.0548 0.0582 0.062 0.0644 0.0647 0.0651 0.0631 0.0625 0.0598 0.0554 0.0502 0.0453 0.0417 0.0417 0.0423 0.0431 0.0458 0.0469 0.048 0.0475 0.0453 0.0426 0.0401 0.0358 0.0335 0.0324 0.0317 0.0322 0.0318 0.033 0.0321 0.0328 0.0323 0.0304 0.0278 0.0254 0.0227 0.0211 0.0207 0.0205 0.021 0.0208 0.0213 0.0206 0.02 0.0193 0.0179 0.0157 0.0131 0.0094 0.0054 0.0027 0.0005 0.0003 0.0001 0.0 0.0002 0.0004 0.0 0.0", + "energy": "-76.4405 -76.5256 -74.4063 -73.0861 -71.7266 -70.4336 -68.8047 -66.8576 -65.0031 -62.4978 -60.3319 -58.4413 -56.4821 -55.1429 -53.416 -52.6508 -51.3696 -50.4501 -49.7025 -49.1096 -48.7279 -48.3942 -48.0149 -47.6001 -47.3578 -47.0648 -46.921 -46.9005 -46.5417 -46.8413 -47.1645 -47.7262 -48.7419 -50.0175 -51.9117 -53.7846 -55.6816 -57.1533 -58.1447 -57.8667 -56.022 -52.8884 -49.5469 -45.6288 -42.0286 -38.733 -35.5938 -32.8131 -29.899 -27.2258 -24.9457 -22.8488 -21.5842 -20.9296 -20.5896 -20.3093 -20.5122 -20.4533 -20.7249 -21.029 -21.8342 -23.1986 -25.5178 -28.6544 -30.84 -32.2506 -32.1546 -31.0378 -28.718 -25.7735 -23.1896 -21.3225 -20.2067 -19.744 -19.6112 -19.4866 -19.7968 -20.2124 -20.9382 -21.8635 -23.6913 -26.1659 -29.6741 -33.6239 -37.5591 -40.6632 -41.9958 -41.2128 -38.3191 -34.0838 -29.43 -25.1919 -22.1895 -20.3476 -19.4537 -18.9404 -18.7284 -18.3615 -18.534 -18.4151 -18.7538 -18.7528 -18.8681 -18.8964 -18.875 -18.7962 -18.7293 -18.4264 -18.4169 -18.1771 -17.8407 -17.771 -17.1816 -17.138 -16.8438 -16.6908 -16.4734 -16.2236 -16.0025 -15.7953 -15.6082 -15.3419 -15.0501 -15.107 -15.0768 -15.1229 -15.1594 -15.1789 -15.6046 -15.6469 -15.7773 -15.7358 -15.8323 -15.8902 -15.8832 -15.9121 -15.8263 -15.5001 -15.2982 -15.1784 -14.9968 -15.3575 -15.4906 -15.8244 -16.1773 -16.3759 -16.4925 -16.5525 -16.57 -16.685 -16.6792 -16.4111 -16.3307 -16.203 -15.9121 -15.5885 -15.9907 -16.0279 -16.1809 -16.8088 -17.428 -18.0712 -18.6349 -19.3269 -19.8313 -20.2442 -20.342 -20.6269 -20.7683 -20.813 -20.6706 -20.6888 -20.5447 -20.77 -20.6588 -20.8722 -21.3856 -22.0458 -22.8093 -23.5017 -24.158 -24.4992 -24.8046 -24.9057 -24.9676 -24.9614 -25.0835 -25.2507 -25.4364 -25.3639 -25.7704 -25.9107 -26.0438 -26.2896 -26.6413 -26.3642 -26.1989 -26.083 -25.806 -25.5567 -25.506 -26.1169 -27.1576 -28.8174 -31.0746 -33.6041 -36.1308 -38.4585 -39.7732 -39.6419 -38.1632 -35.7226 -32.646 -29.3845 -26.37 -24.1679 -22.651 -21.5751 -21.3026 -20.9786 -20.8505 -20.5992 -21.0919 -21.762 -22.9664 -25.181 -28.3728 -31.737 -34.8002 -36.6882 -37.7586 -36.5154 -33.9826 -31.0163 -27.9144 -25.5578 -24.171 -23.5642 -23.2288 -23.157 -23.3992 -23.7664 -24.2189 -25.1165 -26.4348 -28.6804 -31.2608 -33.7187 -35.8408 -37.3441 -37.5171 -36.5434 -34.3014 -31.7797 -29.1178 -26.7967 -24.9069 -23.646 -23.017 -22.5876 -22.6563 -22.6626 -23.0877 -23.6638 -24.2115 -24.7434 -25.0899 -25.6148 -25.632 -25.6691 -25.7574 -25.381 -25.2765 -25.2767 -25.3197 -25.2748 -25.4742 -25.5585 -25.9985 -25.8981 -25.8719 -25.9449 -26.0143 -26.0321 -25.8932 -26.0579 -26.1915 -26.1564 -26.3679 -26.577 -26.9752 -27.4948 -27.881 -27.9481 -28.4366 -28.4589 -28.5309 -28.6973 -28.596 -29.0544 -29.181 -29.6401 -29.9382 -30.4214 -31.0857 -31.7785 -32.0898 -32.5015 -32.5936 -32.594 -32.7337 -32.8577 -33.0031 -33.3105 -34.1025 -35.7497 -38.216 -41.9699 -46.2206 -51.2796 -56.6667 -61.8138 -66.0404 -69.0089 -70.4879 -70.3407 -68.9001 -66.1306 -63.4367", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0002 0.0002 0.0 0.0001 0.0004 0.0002 0.0004 0.0004 0.0008 0.0012 0.0014 0.0014 0.0016 0.0019 0.0023 0.0026 0.0029 0.0034 0.004 0.0041 0.0045 0.0047 0.0048 0.0051 0.0055 0.0057 0.0058 0.0058 0.0055 0.0056 0.0053 0.0049 0.0037 0.0031 0.0028 0.0023 0.0017 0.0014 0.001 0.0009 0.0013 0.0027 0.0059 0.0095 0.0149 0.0188 0.0204 0.0202 0.0168 0.0124 0.0072 0.0028 0.0024 0.0022 0.002 0.0022 0.0021 0.002 0.0021 0.0019 0.0016 0.0011 0.0014 0.003 0.0052 0.0075 0.0078 0.0074 0.0058 0.0023 0.0012 0.0011 0.0014 0.0013 0.0015 0.0015 0.0014 0.0015 0.0017 0.0021 0.0022 0.0029 0.0039 0.0048 0.0058 0.0062 0.0061 0.0054 0.0044 0.003 0.0024 0.0019 0.0022 0.0022 0.0019 0.0019 0.0021 0.002 0.0019 0.0019 0.0019 0.0019 0.0019 0.0021 0.0021 0.0021 0.0022 0.0018 0.0018 0.0018 0.0018 0.0021 0.0024 0.0021 0.0021 0.0023 0.0025 0.0025 0.0025 0.0025 0.0023 0.0024 0.0025 0.0027 0.0028 0.0028 0.0028 0.003 0.0033 0.0034 0.003 0.003 0.003 0.0027 0.0025 0.0022 0.0019 0.0021 0.0026 0.003 0.0031 0.0031 0.003 0.0027 0.0021 0.0016 0.0015 0.0015 0.0017 0.002 0.0019 0.002 0.0017 0.0018 0.0017 0.002 0.0024 0.0023 0.0023 0.0018 0.0018 0.0016 0.0015 0.0015 0.0016 0.0016 0.0015 0.0016 0.0016 0.0017 0.0018 0.0018 0.0019 0.0018 0.0018 0.0014 0.0013 0.0011 0.0012 0.0012 0.0011 0.0011 0.001 0.001 0.001 0.001 0.0009 0.0009 0.0009 0.0008 0.0008 0.0008 0.0008 0.0007 0.0006 0.0007 0.0006 0.0006 0.0008 0.0007 0.0007 0.001 0.0011 0.0018 0.0028 0.0029 0.0042 0.0055 0.0067 0.0085 0.0083 0.0074 0.0062 0.0031 0.0018 0.0015 0.0017 0.0016 0.0018 0.0015 0.0014 0.0011 0.0011 0.001 0.0007 0.0007 0.0007 0.0013 0.0022 0.0032 0.0033 0.0029 0.0021 0.001 0.0005 0.0009 0.0009 0.001 0.001 0.001 0.0008 0.001 0.0012 0.0014 0.0024 0.0033 0.0051 0.0078 0.0117 0.0162 0.0171 0.0166 0.0139 0.0088 0.0038 0.0027 0.0026 0.0026 0.0025 0.0024 0.0021 0.0022 0.0023 0.0021 0.0022 0.0021 0.0019 0.0017 0.0016 0.0015 0.0016 0.0015 0.0014 0.0015 0.0017 0.0016 0.0017 0.0017 0.0016 0.0012 0.0011 0.0011 0.001 0.0011 0.0009 0.0009 0.0009 0.001 0.0008 0.0011 0.0009 0.0008 0.0008 0.0006 0.0009 0.0008 0.0005 0.0006 0.0007 0.0006 0.0005 0.0007 0.0005 0.0005 0.0004 0.0004 0.0002 0.0002 0.0004 0.0002 0.0003 0.0006 0.0004 0.0003 0.0004 0.0008 0.0019 0.0016 0.0018 0.0015 0.0004 0.0003 0.0002 0.0 0.0 0.0001 0.0 0.0003", + "breathiness": "-82.6794 -81.3932 -79.5872 -77.2479 -74.999 -72.7115 -70.4778 -68.0253 -65.301 -62.8872 -60.5212 -58.7632 -56.9795 -55.7419 -54.1813 -53.0376 -51.8816 -50.692 -49.6188 -48.8492 -48.3782 -47.7947 -47.464 -47.2357 -47.031 -47.05 -46.8519 -46.7099 -46.5789 -46.6912 -47.0309 -47.7321 -48.6754 -49.8295 -51.42 -53.1469 -55.3033 -57.6478 -59.4143 -59.9541 -58.8824 -56.4233 -52.691 -48.3614 -43.6952 -39.582 -36.8705 -35.8454 -35.8647 -37.9792 -40.6744 -43.8962 -47.137 -50.0525 -52.5332 -53.7571 -54.4443 -54.8157 -54.9843 -55.1905 -55.1522 -54.4494 -53.2425 -51.6317 -49.7332 -47.9423 -47.0442 -47.5724 -49.2703 -51.4559 -53.6594 -56.0994 -57.4231 -58.2485 -58.5072 -58.2951 -58.0817 -58.1315 -57.8181 -57.7348 -56.5587 -55.5466 -53.7977 -51.8195 -49.9153 -48.6078 -47.9131 -48.1563 -48.6517 -49.8086 -50.9628 -52.226 -53.043 -53.8352 -53.922 -53.9862 -53.6194 -53.4517 -54.047 -54.7926 -55.3957 -56.1459 -56.4925 -56.6269 -56.4866 -56.2119 -55.8178 -55.4264 -54.8841 -54.2847 -53.6721 -52.9745 -52.0457 -51.837 -51.3397 -51.1415 -51.291 -51.2767 -51.7718 -51.7734 -51.9769 -52.0692 -51.9028 -51.6476 -51.0971 -50.6421 -49.9081 -49.3411 -48.7255 -48.5488 -48.6024 -48.7947 -49.257 -49.8721 -50.4618 -50.9038 -51.0381 -51.1331 -50.8946 -50.5674 -49.8678 -49.3185 -48.7976 -48.4963 -48.8374 -49.5093 -49.9702 -50.6221 -51.0701 -51.3051 -51.5658 -51.2179 -51.2402 -51.2268 -51.0592 -51.1234 -50.8241 -50.9558 -50.9847 -51.4612 -52.2289 -52.987 -53.5997 -54.3009 -54.6202 -54.9032 -55.0511 -55.1946 -55.4575 -55.8394 -56.5377 -57.0416 -57.6493 -58.2927 -59.2377 -60.038 -60.706 -61.5122 -61.9356 -62.1903 -62.5744 -63.1633 -63.6203 -64.2993 -64.8893 -65.5839 -66.301 -66.9165 -67.5433 -68.2718 -68.9093 -69.5008 -69.6333 -69.6146 -68.974 -68.293 -67.2822 -66.2036 -65.0086 -64.1864 -63.3013 -62.0678 -59.9737 -57.295 -53.732 -49.9632 -46.361 -43.5196 -41.6529 -40.7855 -41.9491 -43.9733 -46.6231 -49.2735 -51.8548 -53.9148 -55.1521 -55.8149 -56.0464 -55.8997 -56.2971 -56.8227 -57.6133 -58.7356 -59.6316 -60.2414 -60.027 -58.9242 -56.94 -54.7419 -52.9615 -52.3947 -52.8349 -53.9833 -55.6802 -57.1392 -58.3314 -58.9875 -59.385 -59.4335 -59.4161 -59.2171 -58.7097 -57.3989 -55.5509 -52.9824 -49.8744 -46.7235 -44.0987 -42.1042 -41.0795 -41.1725 -42.4083 -44.1122 -46.2411 -48.2822 -50.0437 -51.0636 -51.6801 -51.5996 -51.7844 -51.7621 -52.1494 -52.658 -53.3676 -54.0201 -54.9237 -55.5268 -56.1008 -56.4458 -56.4479 -56.5898 -56.5601 -56.6007 -56.7653 -56.583 -56.5823 -56.6731 -56.9154 -57.1257 -57.1895 -57.1952 -57.2144 -57.2554 -57.6497 -58.0576 -58.165 -58.3493 -58.1897 -58.3203 -58.307 -58.1411 -58.3786 -58.6471 -59.1249 -60.0548 -60.7366 -61.3529 -61.8056 -61.9185 -62.1243 -62.1445 -62.317 -62.7353 -63.8108 -64.8773 -66.0074 -67.2416 -68.0264 -68.481 -68.7919 -68.9506 -68.1169 -66.8684 -64.9836 -62.7892 -60.381 -58.6474 -58.459 -59.5009 -62.1133 -65.1365 -68.2519 -71.1523 -73.1946 -74.2246 -74.4311 -73.9377 -74.6396", "breathiness_timestep": "0.011609977324263039" }, { @@ -74,9 +74,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0", "f0_seq": "196.8 197.5 197.4 197.9 197.2 197.3 197.2 197.1 196.7 196.6 196.4 196.4 196.2 195.9 195.9 196.1 196.1 196.3 196.6 197.0 196.6 196.5 196.2 196.6 196.9 196.8 196.8 196.6 196.4 196.5 195.9 196.1 195.7 195.8 193.3 192.3 194.9 201.7 211.9 224.0 234.0 242.5 248.6 249.7 247.7 244.4 240.8 234.4 225.1 219.5 220.8 220.5 222.6 226.9 223.0 219.4 218.5 217.8 218.9 219.1 219.3 219.2 219.4 219.8 218.7 217.6 218.8 216.2 212.5 221.4 228.1 236.0 243.7 251.0 254.8 249.3 246.1 245.1 245.5 245.2 244.4 244.7 245.7 246.5 248.0 250.2 250.5 249.4 244.7 238.0 235.8 244.1 255.4 267.4 281.9 288.7 291.3 294.3 295.2 296.7 293.9 291.1 288.0 285.2 280.5 277.1 275.0 274.7 275.6 278.2 280.8 286.4 290.1 293.8 295.0 295.3 294.4 292.7 289.5 286.5 285.2 284.3 284.9 285.9 290.1 294.2 298.3 302.8 305.9 304.7 301.5 295.9 290.5 284.0 280.7 279.5 282.8 287.0 292.8 298.7 304.8 309.0 308.2 305.8 300.7 296.4 296.7 293.7 292.3 293.1 293.9 294.2 294.5 294.6 293.1 283.6 267.8 255.0 251.9 248.4 248.1 247.3 246.6 246.8 246.0 246.3 245.4 246.1 245.2 242.8 235.7 228.6 222.6 217.2 213.8 212.0 216.5 215.4 218.4 220.6 222.4 222.8 222.5 221.3 220.9 219.7 220.3 221.6 221.3 220.7 220.6 220.6 221.3 221.0 219.9 219.5 219.7 218.6 219.4 220.4 220.1 220.2 220.7 220.5 219.6 218.4 218.5 220.1 221.4 219.1 205.4 208.1 216.8 224.2 231.9 240.8 250.2 261.7 253.3 252.8 251.5 252.4 250.5 248.1 246.7 247.6 247.5 247.3 246.3 241.6 230.4 227.6 222.7 218.1 213.1 207.9 204.1 198.3 192.7 192.4 196.2 198.7 200.5 200.9 200.3 198.4 194.9 191.0 188.0 186.3 185.5 188.3 192.7 199.1 204.3 207.5 207.3 207.1 205.4 200.7 196.0 190.5 185.6 184.8 185.6 190.4 195.2 200.1 205.7 209.3 210.2 210.3 206.6 200.7 192.1 185.3 182.7 181.6 183.6 187.7 193.7 198.7 204.1 207.8 208.3 207.1 203.6 196.8 188.5 182.1 179.0 178.6 181.5 186.2 190.6 196.6 203.1 207.3 209.9 211.0 208.6 202.6 194.8 187.0 182.9 180.9 181.3 182.7 186.9 191.8 198.1 203.3 205.0 205.7 206.6 206.6 205.1 201.3 198.8 196.9 195.1 192.7 189.3 187.0 186.3 183.9 182.6 182.1", "f0_timestep": "0.011609977324263039", - "energy": "0.0009 0.0011 0.001 0.0017 0.0018 0.0023 0.0034 0.0039 0.0042 0.0046 0.0053 0.0058 0.0061 0.0061 0.0061 0.0062 0.0052 0.0048 0.0046 0.0037 0.003 0.0023 0.0017 0.0016 0.0016 0.0037 0.0057 0.0089 0.0117 0.0136 0.0133 0.0287 0.0482 0.0615 0.0731 0.0773 0.0764 0.0732 0.0703 0.0697 0.0711 0.0754 0.0786 0.0812 0.0823 0.0818 0.0793 0.0721 0.06 0.0458 0.028 0.0193 0.0344 0.0487 0.0624 0.0731 0.0756 0.0746 0.072 0.0703 0.0708 0.0708 0.0713 0.0729 0.0738 0.0751 0.0706 0.0609 0.0491 0.0345 0.0215 0.0193 0.0207 0.0412 0.0641 0.0788 0.0868 0.0895 0.0856 0.0821 0.0818 0.0809 0.0782 0.0773 0.0768 0.0774 0.0777 0.0738 0.0649 0.0506 0.0359 0.0206 0.0138 0.0122 0.0329 0.0534 0.0722 0.0856 0.0912 0.0954 0.0948 0.0947 0.0956 0.0932 0.0923 0.09 0.0862 0.0838 0.0823 0.0834 0.0858 0.0867 0.0884 0.0907 0.0915 0.0923 0.0929 0.0916 0.0915 0.0902 0.0886 0.0882 0.0894 0.0893 0.0897 0.089 0.0877 0.0881 0.0896 0.0897 0.0925 0.0928 0.0907 0.0893 0.0841 0.0798 0.0771 0.0754 0.0772 0.0797 0.0828 0.0847 0.0861 0.0864 0.0829 0.0747 0.0606 0.0444 0.0259 0.0143 0.0152 0.0171 0.0174 0.0151 0.0192 0.0521 0.0677 0.0795 0.0874 0.0838 0.0829 0.0808 0.0789 0.0786 0.0789 0.08 0.0817 0.083 0.0849 0.0842 0.08 0.0699 0.055 0.0367 0.0288 0.0416 0.0585 0.0739 0.0833 0.0885 0.0894 0.0881 0.0887 0.0875 0.0885 0.0893 0.0894 0.0884 0.0881 0.0896 0.0884 0.0916 0.0918 0.0909 0.0918 0.0899 0.0903 0.0902 0.0902 0.0925 0.0922 0.0937 0.094 0.0958 0.0944 0.0933 0.0951 0.0931 0.0875 0.0764 0.0583 0.0374 0.0198 0.0171 0.0194 0.0178 0.0339 0.053 0.0684 0.0832 0.0895 0.0919 0.0917 0.0917 0.0912 0.0892 0.0896 0.0882 0.0813 0.0708 0.0546 0.0371 0.0213 0.0183 0.0176 0.0161 0.0329 0.0481 0.0622 0.0728 0.0773 0.0776 0.0787 0.0807 0.0795 0.0798 0.0768 0.0719 0.0664 0.0624 0.0581 0.0574 0.0585 0.0613 0.064 0.0683 0.0723 0.0744 0.0754 0.0742 0.0696 0.0651 0.0589 0.0526 0.0486 0.0472 0.0468 0.0477 0.05 0.0539 0.0578 0.0594 0.0598 0.0579 0.0529 0.048 0.0432 0.0392 0.0353 0.0342 0.0338 0.035 0.0377 0.0413 0.0438 0.0457 0.0452 0.0423 0.039 0.035 0.0308 0.0279 0.026 0.0241 0.023 0.0215 0.0214 0.0214 0.0233 0.0244 0.0256 0.026 0.0241 0.0217 0.0181 0.0155 0.0132 0.0114 0.011 0.0112 0.0115 0.0124 0.0128 0.0129 0.0136 0.013 0.0119 0.0101 0.0074 0.0039 0.0012 0.0008 0.0004 0.0 0.0005 0.0002 0.0 0.0", + "energy": "-60.0683 -57.6949 -55.3042 -51.9782 -49.7139 -48.0388 -46.3777 -45.4232 -44.8352 -44.3521 -44.0329 -43.9521 -44.2622 -44.6129 -45.2272 -46.4559 -48.1796 -50.1593 -52.2321 -54.1932 -55.1554 -54.6364 -52.5926 -49.4155 -45.6809 -42.0208 -39.2642 -37.2285 -35.4448 -33.5056 -31.5576 -29.1449 -26.5471 -24.3391 -22.6286 -21.86 -21.391 -21.5744 -21.7903 -21.9292 -21.7714 -21.6841 -21.4486 -21.1163 -21.156 -21.5952 -22.9419 -24.5299 -26.4047 -27.6914 -28.1322 -28.0894 -27.0306 -25.297 -23.6966 -22.2553 -21.4869 -21.3013 -21.2453 -21.3647 -21.3302 -21.3613 -21.5408 -21.7208 -22.1542 -22.7901 -24.1193 -25.6945 -27.4102 -28.868 -29.6409 -29.2189 -28.1575 -26.3354 -24.4197 -22.7402 -21.5841 -20.9593 -20.6636 -20.8037 -21.1731 -21.0748 -21.2227 -21.426 -21.4867 -21.8948 -22.6005 -23.9673 -25.6852 -28.1136 -29.977 -31.2885 -31.1673 -30.3658 -28.3736 -26.0316 -23.8805 -21.8685 -20.7084 -20.2422 -19.4498 -19.3106 -19.5782 -19.5176 -19.4708 -19.5483 -19.6501 -19.3479 -19.5164 -19.3193 -19.1494 -18.9276 -18.7716 -18.5497 -18.7428 -18.7376 -18.7709 -19.1425 -19.2529 -19.5874 -19.9308 -19.6432 -19.7468 -19.5046 -19.4483 -19.2537 -18.953 -18.7447 -18.9113 -18.9788 -19.1203 -19.3777 -19.4919 -20.0285 -20.222 -20.5213 -20.6438 -21.0315 -20.5538 -20.6317 -20.5636 -20.566 -21.1339 -21.7849 -22.8781 -24.7809 -27.0796 -29.3079 -31.6935 -33.6747 -34.8059 -34.8367 -33.4334 -31.1325 -28.6026 -25.8842 -23.5631 -21.7119 -20.6129 -20.1827 -20.1978 -20.3469 -20.1151 -20.2191 -20.0863 -20.1368 -20.3329 -20.4813 -20.8417 -21.5941 -22.9013 -24.2627 -24.9509 -25.5635 -25.0058 -24.436 -23.3506 -22.2412 -21.2013 -20.7182 -20.47 -20.0844 -20.3486 -20.3504 -20.3866 -20.3759 -20.4138 -20.3706 -20.3249 -20.2085 -20.357 -20.2525 -20.1964 -20.2076 -20.2152 -20.1716 -20.1509 -20.2261 -20.2149 -20.2242 -20.1483 -20.1886 -20.3318 -20.3683 -20.4892 -20.515 -20.7776 -21.6992 -22.7407 -25.0726 -27.1637 -29.2469 -30.7798 -31.3577 -30.8404 -29.4471 -27.242 -25.0873 -23.2613 -21.751 -20.7985 -20.3566 -20.2015 -20.3965 -20.2588 -20.5032 -20.8749 -21.3561 -22.5047 -24.0471 -26.227 -28.5532 -30.4736 -31.5163 -31.5591 -30.3673 -28.7043 -26.4517 -24.4561 -22.9613 -22.2154 -21.9512 -21.5189 -21.2793 -21.5045 -21.3151 -21.3299 -21.719 -22.0988 -22.2049 -22.6093 -22.479 -22.4573 -22.4707 -22.372 -22.3748 -21.9771 -21.8725 -22.0122 -22.3988 -22.7029 -23.46 -24.0024 -24.5991 -25.0398 -25.2278 -25.3089 -25.266 -25.2554 -24.9303 -24.8949 -24.5538 -24.6262 -24.9322 -25.2425 -25.6615 -26.2044 -26.719 -27.0872 -27.4257 -27.2567 -27.2186 -27.1986 -27.1962 -27.2337 -27.4923 -27.7144 -27.9404 -28.404 -28.6977 -28.9751 -29.4861 -29.731 -29.9905 -30.2042 -30.3648 -30.3856 -30.909 -30.9995 -31.2736 -31.7474 -32.2985 -32.8718 -33.479 -34.3521 -34.977 -35.5617 -36.1919 -36.5214 -36.3271 -36.1949 -35.7851 -35.5429 -35.451 -35.743 -36.367 -37.5461 -39.8982 -43.1233 -47.3119 -51.6217 -55.4911 -58.5283 -60.015 -60.2387 -58.1204 -55.3067 -52.3269", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0003 0.0006 0.001 0.0017 0.0022 0.0028 0.0034 0.0043 0.0051 0.0061 0.0066 0.0072 0.0076 0.0077 0.007 0.0064 0.006 0.0055 0.0053 0.0049 0.0043 0.0029 0.0026 0.0025 0.0028 0.0046 0.0066 0.0099 0.0127 0.0137 0.0138 0.0113 0.0076 0.0046 0.0025 0.002 0.0017 0.0016 0.0011 0.0013 0.0012 0.0011 0.0012 0.001 0.0009 0.0007 0.0009 0.0015 0.0056 0.0121 0.0148 0.0161 0.015 0.0104 0.0067 0.0034 0.0031 0.0027 0.0024 0.0021 0.002 0.002 0.002 0.0018 0.0017 0.002 0.0028 0.0061 0.0132 0.0166 0.0199 0.0216 0.0195 0.0167 0.0129 0.0091 0.0042 0.0033 0.0025 0.0024 0.002 0.002 0.0018 0.0014 0.0012 0.0008 0.0008 0.0009 0.0009 0.001 0.0043 0.0089 0.0126 0.013 0.0128 0.0107 0.0044 0.0034 0.0026 0.002 0.0018 0.0015 0.0013 0.0011 0.0012 0.0012 0.0014 0.0012 0.0013 0.0013 0.0009 0.0011 0.0009 0.0009 0.0011 0.0011 0.0012 0.0014 0.0013 0.0012 0.001 0.0009 0.0009 0.0008 0.0009 0.0009 0.0008 0.0007 0.0007 0.0008 0.001 0.001 0.0009 0.0012 0.0011 0.001 0.0008 0.0007 0.0008 0.0007 0.0006 0.0004 0.0005 0.0008 0.001 0.0013 0.0052 0.0083 0.0108 0.014 0.0157 0.0167 0.0173 0.0155 0.0134 0.0108 0.0073 0.0038 0.0016 0.0014 0.0011 0.0009 0.0007 0.0008 0.0006 0.0003 0.0003 0.0005 0.0 0.0 0.0001 0.0002 0.0006 0.0011 0.0017 0.0022 0.0022 0.0019 0.0016 0.0014 0.0013 0.0012 0.0012 0.0011 0.0014 0.0018 0.0014 0.0015 0.0015 0.0013 0.0014 0.0015 0.0015 0.0016 0.0018 0.0018 0.0016 0.0017 0.0015 0.0016 0.0018 0.0017 0.0019 0.0019 0.002 0.0021 0.0023 0.0021 0.0021 0.0023 0.0043 0.01 0.0158 0.0206 0.0233 0.0233 0.0206 0.016 0.0109 0.0059 0.002 0.0015 0.0013 0.0016 0.0013 0.0011 0.0011 0.0013 0.0018 0.004 0.0076 0.0116 0.0144 0.0174 0.0177 0.0162 0.0135 0.0096 0.0061 0.0026 0.0023 0.0023 0.0024 0.0026 0.0026 0.0025 0.0027 0.003 0.0031 0.0031 0.003 0.0027 0.0028 0.0028 0.0024 0.0025 0.0023 0.0025 0.0025 0.0026 0.0027 0.0024 0.0023 0.0021 0.0019 0.0019 0.0019 0.0019 0.0019 0.0018 0.0017 0.0018 0.0017 0.0019 0.0018 0.002 0.0019 0.0017 0.0015 0.0014 0.0014 0.0011 0.001 0.001 0.0009 0.0012 0.0015 0.0011 0.0011 0.0012 0.0011 0.0011 0.001 0.0008 0.0007 0.0007 0.0008 0.0006 0.0006 0.0004 0.0006 0.0007 0.0005 0.0007 0.0005 0.0005 0.0003 0.0003 0.0004 0.0002 0.0002 0.0002 0.0001 0.0003 0.0003 0.0005 0.0014 0.0016 0.002 0.0018 0.0013 0.0008 0.0005 0.0004 0.0003 0.0001 0.0001 0.0", + "breathiness": "-60.8752 -59.1407 -55.0369 -52.2111 -49.4973 -47.4628 -45.8913 -44.4749 -43.5553 -42.731 -42.3994 -42.6083 -43.1632 -44.1372 -45.2398 -46.3536 -48.003 -49.8528 -52.2689 -54.4952 -55.5197 -55.145 -53.1517 -49.9893 -46.1224 -41.8823 -38.7654 -36.9003 -36.611 -37.2889 -38.7037 -40.7649 -43.3971 -46.0233 -48.632 -51.2114 -53.287 -55.2771 -57.0216 -58.5086 -60.1325 -61.5397 -62.533 -62.309 -60.6549 -57.4444 -53.7257 -48.9714 -44.4135 -40.6654 -38.7018 -39.1639 -41.0666 -43.7468 -46.5081 -49.0807 -50.8494 -52.4234 -52.8809 -53.1421 -53.1677 -53.2043 -53.3362 -52.5726 -51.1033 -48.6475 -45.7078 -42.6438 -39.3131 -36.9181 -35.6577 -35.8617 -37.2705 -39.6626 -42.5757 -45.8694 -49.2753 -51.9319 -54.1394 -55.8223 -56.8787 -57.6799 -58.5556 -59.239 -59.9471 -60.1957 -59.0537 -56.7436 -53.2729 -49.569 -45.5083 -42.1955 -39.9126 -39.771 -41.1168 -43.2845 -46.3639 -48.9539 -51.3978 -53.2241 -54.3758 -54.9663 -55.6342 -56.3967 -56.7313 -56.9322 -56.7382 -56.2754 -55.9399 -55.6201 -55.4452 -55.5685 -55.9473 -56.1245 -56.2927 -56.2497 -55.9464 -55.5848 -55.5387 -55.6671 -55.6884 -56.0987 -56.4084 -56.576 -56.529 -56.2292 -56.1096 -55.9926 -55.9561 -56.2694 -56.4516 -56.9168 -57.3254 -58.004 -58.5368 -59.2354 -59.8608 -60.4331 -60.9971 -61.0203 -60.9402 -60.4592 -59.2432 -56.9695 -53.8716 -50.3812 -46.2867 -42.5003 -39.5312 -37.4086 -36.3759 -36.4801 -37.3248 -38.9386 -41.5065 -44.5467 -48.1719 -51.5679 -55.0922 -57.6765 -59.7752 -60.8585 -61.4693 -61.8477 -61.6254 -61.1161 -60.8405 -60.3904 -60.2431 -59.9514 -59.6996 -59.2515 -58.3929 -57.8326 -56.5831 -55.7315 -55.3544 -55.1598 -55.423 -55.8908 -56.3169 -56.7391 -57.0277 -56.7712 -56.4992 -55.7088 -55.3045 -54.7671 -54.2336 -54.0773 -53.7686 -53.9686 -53.9733 -54.1698 -54.3736 -54.8144 -55.5021 -55.7783 -56.0545 -56.0026 -55.943 -55.5235 -55.1519 -54.8755 -54.7499 -54.5629 -53.7781 -52.749 -50.5573 -47.6995 -44.3151 -40.6912 -37.7056 -35.2194 -34.0899 -34.4839 -36.4695 -39.5766 -43.4868 -47.8552 -51.7543 -54.9808 -57.1826 -58.4729 -59.0294 -58.3995 -56.4226 -53.2217 -49.2796 -45.232 -41.2689 -38.5543 -37.0663 -36.9064 -37.7468 -39.4289 -41.617 -44.4458 -46.8532 -49.0016 -50.5217 -51.4549 -51.6844 -51.4991 -51.3315 -51.1953 -51.4241 -51.4523 -51.6802 -52.0541 -52.0986 -52.3941 -52.6184 -52.9163 -53.0145 -53.3195 -53.114 -52.668 -52.3866 -51.9886 -51.8984 -51.8961 -52.185 -52.6799 -52.9914 -53.535 -53.9614 -54.4868 -54.6617 -55.1358 -55.1736 -55.3119 -55.2794 -55.1266 -55.1989 -55.4881 -55.5791 -55.9448 -56.4149 -56.5837 -57.0853 -57.4679 -58.0667 -58.6417 -59.2008 -59.5947 -59.5942 -59.8477 -60.1239 -60.4889 -61.1059 -61.8206 -62.6826 -63.7095 -64.6905 -65.3025 -65.5994 -65.8222 -65.7667 -65.5952 -65.3867 -65.7641 -66.4118 -67.3691 -68.8002 -69.9625 -70.857 -71.8714 -72.2643 -72.7594 -72.52 -72.2536 -71.9872 -71.7399 -70.8272 -69.4365 -67.8269 -65.6946 -63.8673 -62.3261 -61.0939 -61.2484 -62.3816 -63.4175 -64.582 -65.2483 -65.164 -64.8688", "breathiness_timestep": "0.011609977324263039" }, { @@ -90,9 +90,9 @@ "note_slur": "0 0 0 0 0 0 0 0", "f0_seq": "178.2 177.9 178.0 178.1 177.7 178.1 178.2 178.5 178.3 178.1 178.0 178.1 177.8 178.0 178.0 177.9 178.0 178.2 178.4 178.4 178.5 178.7 178.4 178.9 179.4 179.6 180.0 180.8 181.8 182.7 184.8 186.9 190.3 190.7 189.2 187.4 186.0 184.6 183.0 185.7 188.4 192.3 197.1 199.1 198.8 195.7 190.7 180.3 171.2 168.3 166.3 165.9 164.6 164.9 164.9 165.0 165.0 165.0 165.8 165.7 166.3 166.6 166.7 166.7 166.1 163.9 158.9 153.6 157.1 163.4 169.3 176.4 183.4 190.8 199.3 197.2 196.1 196.0 196.4 196.2 196.1 195.9 195.8 196.2 196.5 195.4 193.9 194.0 197.1 203.7 209.2 214.6 220.1 226.4 231.8 229.9 226.2 227.6 226.9 225.1 221.8 216.0 210.3 207.0 205.5 206.0 211.0 217.9 224.5 229.5 228.7 226.0 220.9 211.8 205.3 201.4 199.8 202.0 206.0 210.7 214.6 221.5 225.7 231.0 231.4 231.9 230.2 227.4 224.2 224.9 224.8 227.0 232.1 238.1 242.2 247.1 250.0 248.7 247.7 247.5 246.6 245.9 246.2 246.3 247.2 248.2 248.2 248.9 246.1 241.7 234.7 225.6 216.1 208.6 208.3 210.6 213.7 213.7 216.7 218.9 219.3 218.4 217.0 214.2 211.7 208.5 206.4 205.4 207.2 211.9 218.6 224.8 228.6 229.6 228.1 224.6 220.6 213.9 209.7 209.2 210.8 214.8 221.5 225.3 226.9 227.5 225.7 223.1 219.2 215.9 212.6 211.1 211.3 211.5 215.2 221.4 226.5 229.4 230.9 231.1 229.0 225.3 221.7 217.1 213.7 211.1 210.3 211.9 215.4 219.6 223.9 227.3 229.3 231.5 231.0 229.3 225.6 221.0 216.6 214.2 211.5 210.8 212.2 215.0 218.5 220.8 224.4 227.3 230.1 230.8 230.0 227.6 223.4 219.6 216.2 213.1 211.3 211.3 213.2 216.2 220.5 224.9 228.1 230.5 231.5 230.0 227.5 223.4 219.1 214.2 211.1 209.6 210.3 214.9 219.2 223.0 227.7 229.4 229.8 229.4 226.3 222.8 219.6 213.6 208.7 206.2 205.8 210.5 216.9 224.1 226.0 229.7 229.2 228.4 228.5 228.2 227.2 225.9 223.7 220.2 216.9 216.8 217.0 215.8 215.1 215.4 213.8 213.8", "f0_timestep": "0.011609977324263039", - "energy": "0.0013 0.0021 0.0027 0.0033 0.0039 0.0049 0.0057 0.0068 0.007 0.0074 0.0068 0.0071 0.0079 0.0083 0.0085 0.0088 0.0084 0.0082 0.0075 0.0071 0.0058 0.005 0.0034 0.0017 0.0014 0.0021 0.0022 0.0041 0.008 0.0123 0.0158 0.0193 0.0428 0.0578 0.0693 0.0802 0.0775 0.0744 0.0716 0.0664 0.065 0.0677 0.0693 0.0729 0.0759 0.0711 0.0635 0.05 0.0312 0.0184 0.0118 0.0202 0.041 0.0545 0.0683 0.0797 0.0834 0.0858 0.0873 0.0864 0.0864 0.0851 0.0816 0.0767 0.0675 0.0565 0.0401 0.0248 0.0106 0.008 0.0095 0.0117 0.0159 0.0414 0.0614 0.079 0.0912 0.0925 0.0925 0.0882 0.085 0.0848 0.0812 0.0785 0.0749 0.0678 0.0576 0.0444 0.0333 0.0221 0.0219 0.0241 0.0215 0.0411 0.0604 0.0722 0.0854 0.0873 0.0839 0.0817 0.0771 0.0737 0.0698 0.0663 0.0616 0.059 0.0569 0.0547 0.0546 0.0531 0.0516 0.0513 0.0513 0.0508 0.0495 0.0468 0.0442 0.0425 0.0413 0.0392 0.0373 0.0356 0.0342 0.0338 0.0347 0.034 0.0331 0.0307 0.0289 0.0251 0.023 0.0294 0.0367 0.0481 0.0606 0.0729 0.0829 0.0887 0.0924 0.0939 0.0943 0.0943 0.0944 0.0927 0.0906 0.0899 0.0876 0.0859 0.0823 0.0786 0.0752 0.0697 0.0671 0.0658 0.0649 0.0681 0.0707 0.0767 0.0807 0.0833 0.0873 0.0871 0.0874 0.0853 0.0811 0.0763 0.0702 0.0667 0.0643 0.0632 0.0646 0.0686 0.0707 0.0754 0.0778 0.0785 0.0776 0.0743 0.068 0.061 0.0581 0.0563 0.0582 0.0612 0.0646 0.0665 0.0692 0.0707 0.0722 0.0734 0.0707 0.0671 0.0612 0.0558 0.0531 0.0537 0.0564 0.0591 0.0629 0.0661 0.0682 0.0694 0.0699 0.0667 0.0641 0.0599 0.0551 0.0522 0.0496 0.0488 0.0505 0.0521 0.0552 0.0593 0.0602 0.0622 0.0626 0.0617 0.0611 0.0588 0.0573 0.0535 0.0502 0.0476 0.0466 0.0475 0.0481 0.05 0.0517 0.0537 0.0544 0.055 0.0551 0.0545 0.0529 0.0503 0.046 0.0427 0.0399 0.0372 0.038 0.038 0.0395 0.0415 0.0437 0.0453 0.0468 0.0469 0.0451 0.042 0.0379 0.0339 0.0304 0.0275 0.0265 0.0263 0.0262 0.0265 0.0279 0.0293 0.0311 0.0311 0.0306 0.0281 0.0248 0.023 0.0212 0.0188 0.0179 0.0176 0.0176 0.0187 0.0191 0.0192 0.0193 0.0183 0.0159 0.0144 0.0127 0.0112 0.0081 0.0055 0.003 0.0009 0.0007 0.0003 0.0 0.0", + "energy": "-64.9498 -61.0856 -58.4382 -55.7203 -52.9628 -51.0196 -48.8767 -46.9911 -45.7665 -44.4903 -43.499 -42.8414 -42.7891 -42.776 -43.0174 -43.5147 -44.378 -45.6248 -47.1324 -49.2573 -51.0767 -53.1493 -54.2141 -54.4525 -53.2617 -51.3746 -48.832 -45.6852 -42.2987 -38.8279 -35.4679 -31.9468 -28.8521 -26.3618 -24.4885 -23.378 -22.7888 -22.8512 -22.9026 -23.0111 -22.9413 -22.9756 -23.1649 -23.7172 -24.645 -26.1427 -28.2047 -30.2065 -31.7897 -32.5812 -31.6836 -30.5395 -28.6406 -26.4029 -24.6446 -23.2655 -22.3812 -22.1155 -21.9826 -21.8669 -22.3161 -22.6065 -23.2768 -24.6195 -26.5305 -29.2725 -32.4238 -35.1128 -37.1192 -37.4422 -36.4477 -34.035 -30.9169 -27.7906 -25.074 -22.9636 -22.0308 -21.5162 -21.6308 -21.6173 -21.801 -21.8451 -22.2507 -22.6425 -23.4862 -24.7473 -26.453 -28.1196 -29.6082 -30.8477 -31.0724 -30.6262 -29.1785 -27.3305 -25.2393 -23.9481 -22.631 -22.28 -22.1442 -22.4106 -22.8242 -23.2341 -23.5393 -24.1029 -24.3435 -24.7011 -24.8619 -24.9144 -24.9425 -24.7611 -24.8479 -25.3057 -25.4593 -25.9287 -26.2579 -26.7506 -27.1597 -27.4858 -27.7301 -27.8896 -27.6377 -27.7664 -27.5531 -27.3844 -27.423 -27.2174 -27.1536 -27.2891 -27.5053 -27.2864 -27.1186 -26.6457 -26.1762 -24.9736 -24.0993 -22.7744 -21.8675 -20.9804 -20.3499 -20.122 -19.9444 -19.893 -19.7488 -19.8216 -19.8828 -19.9123 -20.1418 -20.3695 -20.478 -20.9134 -20.9997 -21.0984 -21.1243 -20.964 -20.982 -20.8195 -20.4824 -20.336 -20.1905 -20.2385 -20.1796 -20.0852 -20.2696 -20.3489 -20.7308 -20.8559 -21.1135 -21.2042 -21.392 -21.336 -21.0548 -20.8824 -20.7034 -20.6279 -20.2495 -20.2725 -20.5729 -20.7068 -21.0201 -21.327 -21.2293 -21.3712 -21.3739 -21.1715 -20.9684 -20.7312 -20.6801 -20.3698 -20.3848 -20.4559 -20.5696 -20.6266 -20.7899 -20.7293 -20.7316 -20.7859 -20.5684 -20.4687 -20.3839 -20.208 -20.2863 -20.314 -20.6351 -20.6877 -21.253 -21.6714 -22.0123 -22.4235 -22.6976 -22.8142 -22.7359 -22.7133 -22.6347 -22.6548 -22.4438 -22.5732 -22.6738 -22.8129 -22.9746 -23.5537 -23.6156 -23.8136 -23.9102 -23.8575 -23.7481 -23.6873 -23.5543 -23.3242 -23.3744 -23.4073 -23.2935 -23.4168 -23.5119 -23.783 -24.0192 -24.4601 -24.6565 -25.1032 -25.4145 -25.5766 -25.5532 -25.494 -25.3813 -25.0941 -24.7748 -24.749 -24.7262 -24.7689 -25.0273 -25.98 -26.5742 -27.6146 -28.4482 -28.9295 -29.303 -29.4375 -29.2736 -29.1515 -29.0494 -28.9301 -29.0395 -29.3113 -30.0077 -30.4868 -31.0892 -32.0811 -32.73 -33.266 -33.5359 -33.7349 -33.6575 -33.277 -32.9205 -32.7392 -32.9455 -33.4078 -34.0991 -35.821 -38.7057 -42.8939 -47.8543 -52.825 -57.2251 -60.4964 -62.0808 -61.557 -60.0239 -56.1395", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0008 0.0017 0.0025 0.0034 0.0043 0.005 0.0058 0.0069 0.0072 0.0078 0.0084 0.008 0.0086 0.0088 0.0087 0.0093 0.0092 0.0092 0.009 0.0081 0.007 0.0055 0.0038 0.0021 0.0012 0.001 0.0017 0.0043 0.0087 0.0137 0.0165 0.0162 0.0148 0.0105 0.0046 0.0032 0.0026 0.0022 0.0021 0.0021 0.0022 0.002 0.002 0.0019 0.002 0.0023 0.0017 0.0016 0.0042 0.0083 0.0132 0.0279 0.0453 0.0468 0.0473 0.0426 0.0133 0.0028 0.002 0.0016 0.0016 0.0017 0.002 0.002 0.0017 0.0016 0.0023 0.003 0.0044 0.0077 0.0114 0.0133 0.0133 0.0119 0.0085 0.0044 0.0025 0.0023 0.002 0.0021 0.0025 0.0022 0.002 0.0018 0.0019 0.0027 0.0076 0.0137 0.0191 0.0233 0.0265 0.0282 0.027 0.0228 0.0172 0.0096 0.0036 0.0026 0.0024 0.0026 0.0027 0.003 0.0031 0.003 0.0025 0.0023 0.0018 0.0016 0.0015 0.0014 0.0014 0.0011 0.001 0.0009 0.0009 0.0009 0.0008 0.0006 0.0004 0.0006 0.0005 0.0004 0.0004 0.0005 0.0006 0.0007 0.0006 0.0006 0.0005 0.0006 0.0006 0.0006 0.0009 0.0014 0.0019 0.0021 0.0024 0.0024 0.0021 0.0021 0.0021 0.0018 0.0019 0.0017 0.0018 0.0016 0.0016 0.0016 0.0015 0.0015 0.0015 0.0015 0.0016 0.0015 0.0015 0.0017 0.0019 0.0018 0.0018 0.0018 0.0018 0.002 0.0021 0.0022 0.0019 0.0018 0.0017 0.0016 0.0016 0.0015 0.0015 0.0017 0.0015 0.0014 0.0014 0.0014 0.0016 0.0015 0.0015 0.0013 0.001 0.001 0.001 0.0012 0.0014 0.0012 0.0012 0.0012 0.0011 0.0013 0.0012 0.0013 0.0012 0.001 0.0012 0.0013 0.0013 0.001 0.0012 0.0011 0.0013 0.001 0.0012 0.0012 0.0011 0.0012 0.0009 0.0009 0.0008 0.0008 0.0008 0.0008 0.001 0.0009 0.0011 0.0008 0.0009 0.0007 0.001 0.0009 0.0005 0.0008 0.0007 0.0007 0.0005 0.0006 0.0007 0.0007 0.0007 0.0007 0.0007 0.0006 0.0008 0.0006 0.0009 0.0009 0.0007 0.0006 0.0005 0.0005 0.0006 0.0004 0.0004 0.0004 0.0005 0.0005 0.0005 0.0007 0.0006 0.0008 0.0007 0.0005 0.0005 0.0004 0.0003 0.0003 0.0003 0.0002 0.0004 0.0004 0.0002 0.0004 0.0003 0.0004 0.0004 0.0003 0.0003 0.0002 0.0002 0.0002 0.0003 0.0 0.0001 0.0001 0.0003 0.0001 0.0004 0.0004 0.0005 0.0014 0.0015 0.0013 0.0014 0.0008 0.0004 0.0002 0.0001 0.0", + "breathiness": "-67.8952 -63.2037 -58.9702 -55.874 -52.7344 -50.1791 -47.6635 -45.8418 -44.3407 -43.1926 -42.3234 -42.0945 -42.3678 -42.7055 -43.1856 -43.4004 -44.0144 -44.9617 -46.4633 -48.228 -50.8229 -53.5067 -55.5883 -56.7852 -56.4353 -54.9357 -52.2606 -48.4354 -44.4213 -40.7192 -38.5396 -37.6412 -38.3126 -40.1649 -42.7984 -45.5553 -48.3751 -50.6723 -52.2224 -53.1389 -53.4239 -53.5802 -53.9313 -54.4237 -55.0007 -55.0799 -53.7696 -51.3895 -47.8801 -43.3003 -38.5473 -34.5655 -32.6968 -33.9171 -37.1956 -42.2273 -47.2208 -51.7685 -55.1762 -57.1833 -57.1932 -56.8803 -56.413 -55.6219 -54.6897 -53.2537 -51.1689 -48.7833 -46.1019 -43.3215 -41.5261 -40.5776 -41.0098 -42.674 -45.0948 -47.7112 -49.9252 -51.8071 -53.0552 -53.4726 -53.4923 -52.7489 -51.8417 -49.8243 -47.1646 -44.229 -40.9404 -37.9775 -35.5007 -33.9426 -33.6556 -34.3185 -35.9654 -38.4904 -41.4227 -44.6402 -47.6346 -50.1304 -51.872 -53.0747 -53.1078 -53.2517 -53.4921 -53.676 -53.7235 -54.3233 -54.5492 -54.8855 -55.1524 -55.8301 -56.3729 -57.444 -58.7344 -60.2027 -62.048 -63.8582 -65.4486 -67.0274 -68.167 -69.0565 -69.5804 -69.6658 -69.4383 -69.0004 -68.4288 -67.7387 -66.9742 -66.124 -64.9481 -63.6229 -62.231 -60.5749 -58.9207 -57.129 -55.7699 -54.2148 -53.3623 -52.68 -52.7721 -52.95 -53.3785 -53.9231 -54.5238 -55.0185 -55.509 -56.0273 -56.4342 -56.5359 -56.7662 -56.6588 -56.3631 -56.1134 -55.6346 -55.034 -54.3393 -53.8136 -53.3766 -53.3423 -53.1659 -53.1706 -53.1819 -53.17 -53.0776 -53.4771 -53.5854 -53.9513 -54.3424 -54.1165 -53.5936 -52.8302 -52.0074 -51.5702 -51.0777 -51.0243 -50.8774 -51.5505 -52.2943 -52.8028 -53.5051 -54.2628 -54.7814 -55.2389 -55.5545 -55.7189 -55.6484 -55.4368 -55.0073 -54.6258 -54.3227 -54.2607 -54.6718 -54.8983 -55.8879 -56.3931 -56.8029 -57.0188 -57.0009 -56.9097 -56.862 -56.6015 -56.4184 -56.2674 -56.0085 -56.1922 -56.4234 -57.1096 -57.4295 -58.1544 -58.6728 -58.9054 -59.0506 -58.7994 -58.4892 -57.9117 -57.4787 -57.1194 -56.8174 -56.8799 -57.2354 -57.6388 -58.1042 -58.5176 -58.3829 -58.3509 -58.0187 -57.8538 -57.9137 -57.727 -57.7595 -57.9922 -57.9065 -57.5927 -57.535 -57.5294 -57.7967 -58.1225 -58.7357 -59.2836 -59.6553 -59.9979 -59.9849 -60.2292 -60.0642 -59.9858 -59.6603 -59.2063 -58.8603 -58.5066 -58.5834 -59.1568 -60.0799 -61.4809 -63.1459 -64.5721 -65.6905 -66.4295 -66.9589 -67.0798 -67.2864 -67.8562 -68.1703 -68.9312 -69.5652 -70.1429 -70.7368 -71.7252 -72.6334 -73.8278 -74.8411 -75.634 -76.3652 -76.6443 -76.4118 -75.9242 -75.3208 -73.9746 -71.9318 -69.7571 -67.4407 -65.1468 -63.2201 -62.4725 -62.6043 -63.7918 -64.8387 -65.7448 -65.8257 -64.6617", "breathiness_timestep": "0.011609977324263039" }, { @@ -106,9 +106,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0", "f0_seq": "125.8 126.1 126.0 125.8 125.6 125.7 125.7 125.8 125.4 125.5 125.4 125.5 125.5 125.4 125.5 125.7 125.5 125.5 125.5 125.5 125.3 125.4 125.3 125.4 125.3 125.2 125.2 125.6 125.6 125.3 125.3 125.4 125.6 125.2 125.2 125.5 124.9 124.9 124.9 124.9 124.8 125.1 124.9 125.0 125.1 125.1 125.6 125.9 125.2 125.7 128.7 133.6 140.0 146.5 153.3 158.2 164.0 175.1 184.5 188.8 192.2 195.3 197.1 197.1 195.1 190.0 184.2 177.8 169.6 170.2 172.9 176.6 176.9 168.0 162.5 162.3 162.3 163.1 164.5 165.5 166.0 165.9 166.8 166.2 164.8 163.0 157.8 150.9 157.6 166.7 176.3 185.4 190.8 195.0 196.0 197.0 197.3 197.5 196.2 195.7 195.6 194.7 194.7 195.4 193.6 191.0 186.6 191.4 198.4 205.6 213.7 221.6 230.0 235.5 238.4 233.2 232.8 231.4 226.6 220.4 213.5 207.4 204.8 203.7 206.7 213.9 223.3 230.8 235.9 236.7 233.9 226.9 217.5 211.4 208.5 207.2 211.4 219.3 228.3 234.5 237.3 244.2 250.6 256.0 260.3 260.1 261.7 264.7 267.9 270.0 272.7 275.9 280.3 285.1 287.1 277.9 270.4 261.1 256.4 254.4 252.9 251.5 251.2 250.5 249.4 249.8 248.5 248.3 247.7 246.2 244.8 241.9 237.6 233.1 227.4 225.8 224.6 224.5 224.4 224.7 225.6 225.7 225.3 226.7 227.1 228.2 227.9 228.1 228.7 228.7 228.8 228.9 228.5 228.6 227.2 226.4 226.5 225.6 225.3 229.1 237.9 247.7 252.9 253.4 252.8 251.2 248.2 245.5 243.5 243.4 243.7 245.6 247.0 248.9 249.8 249.5 249.7 250.5 250.0 248.7 247.3 245.8 245.0 245.1 246.1 247.3 249.3 249.4 249.2 248.3 247.6 247.5 247.6 247.6 246.7 246.9 247.0 247.1 248.1 248.7 248.5 248.2 248.4 248.3 248.2 248.0 248.6 248.9 248.0 248.3 248.0 247.6 248.3 248.1 247.9 247.7 248.0 248.4 248.0 247.0 247.3 246.3 246.8 246.1 246.5 246.2 245.7 246.2 246.8 247.5 247.7 247.9 247.3 247.2 247.1 247.4 247.7 247.6 247.3 247.1 246.7 247.2 247.2 246.4 247.1 247.4 248.0 247.4 248.1 247.5 247.8 247.9 248.1 248.0 247.4 247.4 246.7 246.4 246.2 245.7 246.0 246.8 247.8 247.7 247.4 246.5 247.1 247.0 246.4 246.1 246.0 246.0 246.0 246.6 246.3 246.7 246.3 246.1 247.2 247.1 246.9 247.4 247.4 246.7 246.9 246.2 246.5 247.2 248.6 251.4 253.2 255.6 254.8 252.7 251.9 250.9 248.7 246.3 247.0 247.6 248.4 249.4 251.0 252.1 252.9 254.2 253.5 253.4 252.1 251.4 249.1 246.8 245.2 246.1 247.9 248.4 248.6 249.9 250.1 250.0 252.6 252.5 252.1 251.8 251.2 250.0 248.8 248.3 247.9 246.5 245.8 244.2 243.8 242.7 243.4 241.1", "f0_timestep": "0.011609977324263039", - "energy": "0.0 0.0004 0.0008 0.0004 0.0001 0.0001 0.0006 0.0007 0.0005 0.0008 0.0008 0.0005 0.0007 0.0007 0.001 0.0009 0.0012 0.0014 0.0011 0.0017 0.0014 0.0016 0.0016 0.0012 0.0017 0.0022 0.0017 0.0019 0.002 0.0015 0.0019 0.0025 0.0019 0.002 0.002 0.002 0.0017 0.0014 0.0015 0.001 0.001 0.0004 0.0001 0.0001 0.0 0.0013 0.002 0.0103 0.0212 0.0289 0.0386 0.0468 0.0496 0.0532 0.0542 0.0529 0.0501 0.0473 0.0445 0.0456 0.0495 0.0526 0.0557 0.0578 0.0571 0.0525 0.0443 0.0329 0.0181 0.01 0.0258 0.0434 0.0521 0.0591 0.0611 0.0591 0.0596 0.0591 0.061 0.0611 0.0617 0.0635 0.0633 0.062 0.0571 0.049 0.0374 0.0241 0.0105 0.0089 0.0216 0.0421 0.0558 0.0663 0.0714 0.0701 0.0695 0.0687 0.0675 0.0672 0.0633 0.0608 0.0586 0.0544 0.0513 0.0449 0.035 0.0265 0.0197 0.0195 0.0226 0.022 0.0239 0.0464 0.061 0.0749 0.0841 0.0831 0.0805 0.0753 0.0711 0.0652 0.0602 0.0546 0.0527 0.0509 0.049 0.049 0.0464 0.0447 0.0421 0.0408 0.0373 0.0347 0.0312 0.0303 0.0297 0.0292 0.0297 0.0305 0.0337 0.0366 0.0361 0.0317 0.0238 0.0143 0.0045 0.0009 0.0011 0.0048 0.0071 0.0087 0.009 0.0141 0.0459 0.067 0.0795 0.089 0.0885 0.0858 0.0842 0.0842 0.0838 0.0819 0.0819 0.0825 0.0854 0.0871 0.09 0.0913 0.0924 0.0957 0.0982 0.0999 0.0981 0.0948 0.0922 0.0926 0.0976 0.1021 0.1035 0.1043 0.1058 0.1061 0.1088 0.1101 0.111 0.1092 0.1111 0.1108 0.1097 0.1124 0.1108 0.109 0.1024 0.0918 0.0784 0.067 0.0591 0.056 0.0591 0.066 0.0724 0.0797 0.0818 0.0818 0.0807 0.0781 0.0781 0.0771 0.0777 0.0783 0.0801 0.0815 0.0827 0.0845 0.0834 0.0845 0.0833 0.0835 0.0828 0.0816 0.0814 0.0787 0.0798 0.0795 0.0805 0.0816 0.0821 0.0799 0.0797 0.0794 0.079 0.0797 0.0809 0.0816 0.0822 0.0835 0.0823 0.0832 0.0822 0.0822 0.0824 0.0818 0.082 0.0812 0.082 0.0822 0.083 0.0839 0.0834 0.0834 0.085 0.0855 0.0855 0.0862 0.0861 0.086 0.0868 0.0863 0.0853 0.0851 0.085 0.0849 0.0855 0.0854 0.0852 0.0852 0.0852 0.0845 0.085 0.0844 0.0837 0.0838 0.0837 0.0844 0.0838 0.0837 0.0853 0.085 0.0847 0.0855 0.0859 0.0857 0.0864 0.0863 0.0855 0.0867 0.0867 0.0873 0.0872 0.086 0.0861 0.0846 0.084 0.0835 0.0835 0.0834 0.0825 0.0835 0.0825 0.0834 0.0831 0.083 0.083 0.0817 0.0816 0.0806 0.0805 0.0793 0.0789 0.0777 0.0775 0.0769 0.0771 0.0763 0.0753 0.074 0.0724 0.0717 0.0708 0.0701 0.0686 0.0675 0.066 0.0646 0.0642 0.0624 0.0617 0.0622 0.0612 0.0618 0.0607 0.0606 0.0594 0.058 0.0557 0.0522 0.0491 0.0465 0.0443 0.0429 0.0424 0.0415 0.0403 0.0392 0.0383 0.0384 0.0379 0.0374 0.0362 0.0345 0.0334 0.0327 0.0308 0.0297 0.0278 0.0264 0.0241 0.0212 0.0173 0.0129 0.009 0.005 0.0026 0.0019 0.0016 0.0012 0.0012 0.001 0.0004 0.0004 0.0004 0.0 0.0001 0.0003", + "energy": "-79.4123 -78.6147 -77.6421 -76.1255 -74.3339 -72.9097 -71.4942 -70.0373 -68.5843 -66.7924 -65.2784 -63.4517 -61.881 -60.4025 -58.7164 -57.3725 -56.0777 -54.8059 -53.5454 -52.7446 -52.0078 -51.4571 -51.1888 -51.155 -51.1998 -51.5804 -51.6807 -52.1785 -52.4769 -52.6492 -52.9648 -53.3055 -53.6811 -53.9255 -54.4223 -55.498 -56.6695 -57.8105 -59.2783 -60.5551 -60.9367 -60.9367 -59.49 -56.9966 -53.3045 -49.0416 -44.2405 -39.6709 -35.4626 -32.1711 -29.8388 -28.0383 -26.6881 -25.9358 -25.4507 -25.078 -24.8675 -24.8987 -24.8791 -24.9404 -24.9168 -24.9757 -25.1126 -25.224 -25.7177 -26.2623 -27.0827 -27.8622 -28.1738 -28.1075 -27.7922 -27.2669 -26.4087 -25.6694 -25.0786 -25.1646 -25.2494 -25.3118 -25.4304 -25.1355 -25.0997 -25.2152 -25.5061 -26.1288 -27.401 -29.0814 -30.9235 -32.0145 -32.0225 -31.3687 -29.8282 -27.7371 -25.5096 -24.189 -23.623 -23.4517 -23.5395 -23.5571 -23.8334 -23.9879 -24.2977 -24.6227 -25.3823 -26.1696 -27.314 -28.5168 -29.9699 -31.3755 -32.3605 -32.8261 -32.3739 -31.2091 -29.4827 -27.4428 -25.5849 -24.123 -23.1952 -22.7879 -23.2107 -23.7777 -24.3329 -24.914 -25.6055 -25.8071 -26.1576 -26.3725 -26.7722 -27.0621 -27.3514 -27.7685 -27.9402 -28.4443 -28.7054 -29.1569 -29.6262 -30.0382 -30.1584 -30.1524 -30.1554 -30.3748 -30.6075 -31.326 -32.6165 -35.6383 -39.2316 -42.9488 -45.8366 -47.3932 -48.218 -47.1798 -44.7387 -41.2722 -37.3443 -33.5813 -29.9229 -26.2647 -23.6445 -22.474 -21.5586 -21.5202 -21.6476 -21.9995 -22.0772 -22.3639 -22.5282 -22.3528 -22.3903 -22.1843 -22.0427 -22.0929 -22.0915 -22.0576 -21.9633 -22.3083 -22.3715 -22.2057 -22.3572 -22.336 -22.2029 -22.0413 -21.8161 -21.6574 -21.8097 -21.5332 -21.5541 -21.5986 -21.5409 -21.5711 -21.6391 -21.5822 -21.6199 -21.6573 -21.9928 -22.4008 -22.7424 -23.395 -23.96 -24.3079 -24.6757 -24.9059 -24.6443 -24.5933 -24.3789 -24.1752 -24.2418 -24.3623 -24.5578 -24.6866 -24.8141 -24.7937 -24.3312 -24.1652 -23.8025 -23.5532 -23.397 -22.9374 -23.0751 -23.0403 -23.0294 -22.9414 -23.1041 -22.8776 -22.746 -22.6362 -22.2661 -22.0786 -22.0404 -21.9974 -22.0032 -21.9343 -21.9744 -22.0849 -22.0082 -22.0264 -21.9829 -21.8076 -21.6224 -21.5901 -21.2836 -21.1797 -21.2068 -20.8479 -20.9468 -21.0078 -20.9195 -20.903 -20.8911 -20.9171 -20.8992 -20.8274 -20.5907 -20.7607 -20.6572 -20.5935 -20.5866 -20.476 -20.1856 -20.3066 -20.1839 -20.0094 -20.1021 -19.9857 -20.2066 -20.0326 -20.1466 -20.2513 -20.3166 -20.2713 -20.4217 -20.3043 -20.3469 -20.2918 -20.2045 -20.4475 -20.2531 -20.3392 -20.2681 -20.22 -20.2242 -20.2819 -20.1886 -20.3823 -20.4769 -20.517 -20.5978 -20.5643 -20.7589 -20.5863 -20.7129 -20.5875 -20.6679 -20.6044 -20.7517 -20.6494 -20.6185 -20.6465 -20.7427 -20.735 -20.8588 -20.8018 -20.7195 -21.2245 -21.0286 -21.0975 -21.2462 -21.4153 -21.499 -21.4965 -21.595 -21.7436 -21.8012 -22.0034 -22.1425 -22.2652 -22.4268 -22.5683 -22.5857 -22.729 -22.8743 -23.0567 -23.1332 -23.5216 -23.7217 -23.8317 -24.0891 -24.1859 -24.4784 -24.8054 -24.996 -25.374 -25.5369 -25.7469 -26.1424 -26.4607 -26.6336 -27.183 -27.5016 -27.9313 -28.2609 -28.5274 -29.061 -29.2057 -29.3352 -29.6012 -29.6529 -29.9297 -30.1448 -30.4154 -30.9081 -31.1638 -31.5813 -31.7771 -32.0969 -32.2832 -32.9314 -33.0091 -33.8955 -34.7002 -36.3601 -38.7605 -41.969 -45.9441 -50.2654 -53.9383 -57.3537 -60.2052 -62.1064 -63.9456 -65.2932 -66.6041 -67.5043 -67.88 -68.0135 -67.3255 -66.6848 -63.3237", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0004 0.0 0.0002 0.0 0.0001 0.0002 0.0001 0.0002 0.0002 0.0005 0.0007 0.0009 0.0009 0.001 0.001 0.0012 0.0014 0.0013 0.0014 0.0014 0.0018 0.0018 0.0016 0.0018 0.0019 0.0019 0.002 0.0018 0.0018 0.0018 0.002 0.0021 0.0022 0.0023 0.0025 0.0023 0.0021 0.0019 0.0018 0.0014 0.0013 0.0011 0.0009 0.0008 0.0008 0.0009 0.0012 0.0015 0.0018 0.0017 0.0011 0.001 0.0015 0.0018 0.002 0.0016 0.0014 0.0013 0.001 0.0004 0.0002 0.0003 0.0003 0.0005 0.0005 0.0003 0.0004 0.0009 0.0035 0.0055 0.0064 0.0064 0.0059 0.0037 0.002 0.0017 0.0014 0.0012 0.0014 0.0016 0.0014 0.0015 0.0014 0.0013 0.0012 0.001 0.0007 0.0005 0.0007 0.0007 0.0009 0.0011 0.0011 0.0012 0.0013 0.0015 0.0013 0.001 0.0009 0.0009 0.0009 0.0009 0.0008 0.0008 0.0011 0.0031 0.0081 0.012 0.0169 0.0232 0.0276 0.0295 0.0288 0.0235 0.016 0.0084 0.0024 0.0018 0.0016 0.0017 0.0015 0.0014 0.0013 0.0012 0.001 0.001 0.001 0.0008 0.0007 0.0006 0.0007 0.0006 0.0006 0.0006 0.0006 0.0005 0.0002 0.0002 0.0002 0.0 0.0001 0.0002 0.0003 0.0008 0.0015 0.0017 0.0018 0.0014 0.0019 0.0051 0.0079 0.0084 0.0086 0.0066 0.0037 0.0027 0.0022 0.0024 0.0022 0.0021 0.0021 0.0014 0.0012 0.0014 0.001 0.0009 0.0008 0.0009 0.0009 0.001 0.0014 0.0016 0.0017 0.0021 0.0025 0.0026 0.0027 0.0025 0.0023 0.0022 0.0024 0.0021 0.0021 0.0023 0.0022 0.002 0.0018 0.002 0.002 0.0021 0.0024 0.0024 0.0021 0.0022 0.002 0.0023 0.0022 0.0025 0.0024 0.0024 0.0024 0.0024 0.0023 0.002 0.0021 0.0019 0.0017 0.0015 0.0015 0.0014 0.0014 0.0017 0.0019 0.0019 0.0019 0.0019 0.0018 0.002 0.0021 0.0022 0.0024 0.0022 0.0022 0.0022 0.0022 0.0021 0.0021 0.002 0.0019 0.0019 0.0019 0.002 0.0019 0.0022 0.0024 0.0023 0.0024 0.0022 0.0023 0.0021 0.0021 0.0018 0.002 0.0021 0.0022 0.0024 0.0024 0.0026 0.0023 0.0022 0.0023 0.0023 0.0023 0.0022 0.0023 0.0021 0.0024 0.0024 0.0023 0.0023 0.002 0.0023 0.0024 0.0023 0.0023 0.0023 0.0019 0.002 0.0023 0.0021 0.0023 0.0022 0.0023 0.0021 0.0024 0.0022 0.0024 0.0022 0.0023 0.0022 0.0023 0.0024 0.0024 0.0025 0.0024 0.0025 0.0023 0.0025 0.0026 0.0027 0.0026 0.0026 0.0022 0.0024 0.0023 0.0022 0.0022 0.0022 0.0023 0.0024 0.0023 0.0021 0.0022 0.0023 0.0023 0.0021 0.0023 0.0023 0.0022 0.0019 0.002 0.0021 0.0025 0.0024 0.0023 0.0022 0.0019 0.002 0.002 0.0017 0.0017 0.0016 0.0015 0.0015 0.0014 0.0017 0.0015 0.0018 0.0017 0.0016 0.0016 0.0014 0.0012 0.0008 0.0011 0.0011 0.001 0.001 0.0009 0.0009 0.001 0.001 0.001 0.0008 0.0007 0.0006 0.0005 0.0003 0.0003 0.0004 0.0003 0.0003 0.0003 0.0004 0.0004 0.0002 0.0002 0.0002 0.0004 0.0003 0.0005 0.0012 0.0014 0.0017 0.0015 0.0009 0.001 0.001 0.0009 0.0008 0.0006 0.0004 0.0002 0.0004 0.0006 0.0003", + "breathiness": "-82.8759 -81.5084 -80.187 -79.093 -77.4811 -76.299 -75.0647 -73.2943 -71.5834 -69.5803 -67.0957 -64.5302 -62.3567 -60.2401 -58.2676 -56.6142 -55.4259 -54.3502 -53.7333 -53.0694 -52.6332 -52.229 -51.7775 -51.5657 -51.5742 -51.5262 -51.8111 -52.2039 -52.6354 -53.1083 -53.4807 -53.9912 -54.5556 -55.0967 -55.5363 -56.3784 -57.2283 -58.3852 -60.1133 -61.9474 -63.5929 -64.9596 -65.3019 -65.131 -64.1784 -62.9691 -62.0642 -61.1697 -60.8076 -60.6121 -60.0359 -59.2204 -57.8614 -56.7861 -55.7258 -55.6153 -56.4502 -57.6276 -59.376 -61.0226 -62.6431 -64.0857 -64.8481 -65.5605 -66.019 -65.864 -65.5583 -64.5219 -62.852 -61.2598 -59.3105 -57.8263 -56.4124 -55.4106 -55.034 -54.8309 -55.0045 -55.1052 -55.3341 -55.7801 -56.2612 -56.9549 -57.7689 -58.9447 -60.6133 -62.5939 -64.6089 -66.2881 -66.8345 -66.5543 -65.1706 -63.1945 -61.1001 -59.444 -58.3733 -57.8957 -57.8022 -58.1614 -58.6536 -59.4196 -59.6244 -59.2087 -58.0155 -55.467 -51.9543 -48.1532 -43.5275 -39.5585 -36.2532 -34.0911 -33.1932 -33.7731 -35.7409 -39.3235 -43.3366 -47.4606 -50.9183 -53.6967 -55.3755 -56.3343 -56.7035 -57.3539 -58.0087 -58.5155 -58.9893 -59.4369 -59.4178 -59.5694 -59.635 -60.0366 -60.5246 -61.3394 -62.3596 -63.424 -64.5845 -65.9775 -66.9974 -67.9847 -68.4447 -68.4071 -67.6112 -66.0396 -64.0 -61.5053 -59.7822 -57.4749 -55.4505 -53.3416 -51.3256 -49.7205 -48.0176 -46.9026 -47.045 -48.1579 -49.7434 -51.3404 -53.0439 -54.218 -55.2592 -55.8077 -56.7092 -58.1489 -59.6904 -61.5363 -62.9674 -64.2028 -64.5106 -64.4582 -63.601 -62.5765 -60.905 -59.2933 -57.6402 -56.1491 -55.3011 -54.6471 -54.3317 -54.3706 -54.2027 -54.0862 -54.1713 -53.9338 -53.9701 -53.6156 -53.7488 -53.7208 -53.9613 -53.9864 -54.2095 -54.197 -54.0487 -53.5128 -53.1325 -52.4622 -51.9304 -51.4839 -51.3911 -51.2925 -51.6134 -51.9385 -52.5486 -53.1911 -54.0058 -55.1078 -55.906 -56.7863 -57.4639 -57.9541 -57.8799 -57.8209 -57.4337 -57.3435 -57.2801 -57.3903 -57.4617 -57.529 -57.2705 -57.0253 -56.5003 -55.9813 -55.5473 -55.0745 -54.9553 -55.2224 -55.3346 -55.8038 -55.9761 -56.4565 -56.5321 -56.5165 -56.7619 -56.6802 -56.6536 -56.3853 -56.1846 -55.7326 -55.3617 -55.2683 -55.0131 -54.9645 -54.9246 -54.7513 -54.5352 -54.2048 -53.737 -53.0127 -52.5843 -52.1223 -51.8295 -51.4753 -51.2261 -51.2308 -50.8974 -50.7732 -50.7475 -50.6385 -50.6335 -50.6831 -50.6353 -50.5339 -50.3482 -50.2464 -50.0275 -50.0613 -50.3178 -50.4162 -50.3223 -50.0699 -49.6833 -48.9444 -48.6953 -48.4203 -48.3292 -48.5403 -48.7717 -49.3015 -49.5234 -49.7715 -49.8724 -50.1227 -50.2818 -50.5966 -50.8174 -50.951 -51.2102 -51.2027 -51.4706 -51.4848 -51.7588 -51.7475 -51.9827 -52.1973 -52.201 -52.2453 -52.197 -52.3167 -52.6395 -52.6818 -52.8806 -52.8179 -52.6641 -52.443 -52.1897 -52.3231 -52.4426 -52.652 -53.0192 -53.193 -53.2851 -53.3946 -53.2873 -53.2961 -52.9318 -52.8552 -52.9094 -53.1403 -53.5149 -53.7756 -54.2369 -54.5763 -54.4959 -54.3717 -54.2007 -54.1052 -54.2391 -54.6583 -54.976 -55.6301 -56.3461 -57.0768 -57.6453 -58.1059 -58.9152 -59.3615 -59.867 -60.2526 -60.5127 -60.9277 -61.2928 -61.6394 -62.3609 -62.9239 -63.6677 -64.4955 -65.1396 -65.9335 -66.6198 -67.095 -67.6036 -68.1712 -68.6072 -69.1631 -69.4208 -69.3363 -69.3747 -69.352 -68.5362 -67.1794 -65.1015 -62.5165 -60.195 -58.2923 -57.2482 -57.402 -58.7212 -60.5656 -62.6637 -64.3838 -66.2228 -67.9976 -70.2682 -72.1013 -74.4848 -76.3663 -77.2687 -76.6288", "breathiness_timestep": "0.011609977324263039" }, { @@ -122,9 +122,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "147.2 147.0 147.2 147.4 147.3 147.2 147.0 147.8 147.6 147.6 147.4 147.6 148.1 147.9 147.9 147.9 147.9 148.0 147.8 147.8 147.9 148.1 148.1 148.2 147.9 148.0 148.1 148.2 148.1 148.4 148.2 148.1 148.6 148.2 148.3 148.5 148.4 148.5 148.2 148.3 148.5 148.6 148.6 148.3 148.7 148.5 148.6 148.7 148.6 148.6 148.7 148.6 147.2 146.8 151.4 160.4 173.3 188.0 206.0 220.7 235.0 244.4 248.3 247.6 243.2 238.0 232.6 228.7 225.2 222.3 220.2 219.1 217.7 216.7 218.1 219.7 220.7 220.7 220.9 221.6 220.6 219.0 219.1 218.8 218.7 219.8 219.8 219.5 218.6 217.6 218.8 226.0 242.6 251.3 255.9 260.3 261.5 259.9 255.6 248.3 242.7 238.4 237.0 237.8 238.8 242.2 246.5 250.5 253.5 253.7 252.5 250.9 249.0 246.4 244.4 242.4 241.7 242.7 245.9 248.1 248.9 248.4 248.8 249.2 249.5 250.0 248.8 247.0 245.0 245.0 245.6 246.4 246.3 245.7 244.7 244.3 245.8 246.4 246.5 245.5 246.0 246.4 246.8 247.0 245.6 242.6 237.7 232.9 232.3 232.9 233.4 233.2 233.3 233.2 234.5 227.9 223.4 222.5 220.6 220.3 221.1 220.7 220.8 220.1 219.7 215.5 203.3 203.4 203.8 207.2 210.6 217.5 221.5 227.3 221.9 223.2 224.8 224.7 224.8 224.8 223.5 223.8 224.8 224.0 222.1 221.1 220.1 219.7 219.4 225.7 234.5 240.5 245.0 247.6 249.8 250.2 248.2 246.4 244.6 243.3 243.7 244.8 245.9 246.5 246.7 246.5 246.2 243.4 238.8 232.1 223.5 211.6 203.5 196.7 193.9 194.0 194.4 196.5 196.6 196.9 197.9 198.8 198.2 198.0 195.9 193.9 194.2 193.9 193.1 194.3 195.5 196.7 198.0 199.1 199.0 199.2 199.4 200.0 199.7 199.8 199.4 199.8 201.2 202.2 202.5 202.7 201.1 195.0 182.0 179.7 176.9 174.7 172.3 169.5 167.2 165.0 163.2 161.7 163.7 166.6 168.7 170.3 170.9 170.5 167.2 161.7 156.5 151.8 150.0 152.6 157.0 163.5 171.8 179.0 182.7 183.8 182.7 179.3 173.9 167.4 160.0 153.1 150.6 151.8 153.7 158.2 162.8 167.8 173.0 176.0 177.9 178.3 176.2 170.8 164.1 160.0 161.0 163.9 167.4 171.6 172.2 172.2 172.5 172.4 171.9 171.5 171.0 171.6 171.5 170.1 169.8 170.2 170.7 170.8 170.7 171.0 170.8 170.1 169.8 169.6 169.9 169.0 170.4", "f0_timestep": "0.011609977324263039", - "energy": "0.0 0.0 0.0001 0.0002 0.0002 0.0 0.0001 0.0003 0.0001 0.0005 0.0009 0.0005 0.0008 0.0006 0.0012 0.001 0.0017 0.0018 0.0018 0.002 0.0025 0.0026 0.0027 0.0032 0.0031 0.0031 0.0036 0.0033 0.0032 0.0037 0.0033 0.0032 0.0029 0.0027 0.002 0.0028 0.0026 0.0024 0.0018 0.0015 0.0013 0.0014 0.0012 0.0013 0.0009 0.0014 0.0027 0.0056 0.0083 0.0122 0.0238 0.0382 0.0494 0.0588 0.0659 0.0692 0.0696 0.0689 0.0659 0.0655 0.0681 0.0723 0.0788 0.0809 0.0817 0.0815 0.0803 0.0793 0.0791 0.0778 0.0766 0.0767 0.0749 0.075 0.0763 0.0772 0.0782 0.0811 0.0811 0.0804 0.082 0.079 0.0785 0.0769 0.0721 0.07 0.0662 0.0641 0.0624 0.0586 0.055 0.0581 0.073 0.0831 0.0924 0.0963 0.0935 0.09 0.0835 0.0742 0.0655 0.0594 0.0574 0.0566 0.0568 0.058 0.06 0.0624 0.0655 0.0674 0.0694 0.0715 0.0715 0.0723 0.0714 0.0717 0.0729 0.0731 0.0768 0.078 0.0789 0.0836 0.0835 0.0863 0.0907 0.0925 0.0938 0.0953 0.0947 0.0917 0.0898 0.0863 0.0861 0.0845 0.0843 0.0826 0.079 0.0766 0.0744 0.0723 0.0731 0.0739 0.0757 0.077 0.0726 0.0643 0.0506 0.0331 0.0191 0.0153 0.0192 0.0206 0.0205 0.0228 0.0431 0.0571 0.0685 0.0758 0.0761 0.075 0.0747 0.0755 0.0737 0.0706 0.0615 0.0496 0.036 0.0206 0.0119 0.0078 0.0067 0.0077 0.0196 0.0409 0.0574 0.0727 0.0812 0.0848 0.0846 0.0818 0.083 0.0813 0.0792 0.0798 0.0768 0.0749 0.0702 0.0627 0.0574 0.0543 0.0561 0.058 0.0609 0.0616 0.0627 0.0656 0.0676 0.069 0.0685 0.0672 0.0656 0.065 0.0643 0.0645 0.0632 0.0616 0.0607 0.0611 0.0631 0.0632 0.0637 0.0651 0.0639 0.0662 0.069 0.0717 0.0754 0.0772 0.0789 0.0789 0.0767 0.0771 0.0752 0.073 0.071 0.0673 0.0669 0.0669 0.0664 0.0682 0.0686 0.0696 0.0709 0.0723 0.0718 0.0736 0.0758 0.0741 0.0755 0.0742 0.0717 0.072 0.0684 0.0667 0.0658 0.062 0.0546 0.0436 0.0285 0.0139 0.0085 0.0093 0.0101 0.0102 0.009 0.0193 0.0393 0.0536 0.063 0.0695 0.0687 0.0661 0.0651 0.0629 0.0605 0.0595 0.0554 0.0509 0.0486 0.0447 0.0438 0.0439 0.0439 0.045 0.0445 0.0445 0.0435 0.0426 0.0414 0.0386 0.0361 0.0329 0.0293 0.027 0.0246 0.0237 0.0219 0.0216 0.0204 0.0196 0.0191 0.0178 0.0177 0.0171 0.0161 0.015 0.0137 0.0125 0.0107 0.0079 0.0057 0.0035 0.0016 0.0005 0.0006 0.0006 0.0005 0.0004 0.0009 0.0004 0.0008 0.0005 0.0002 0.0004 0.0002 0.0001 0.0005 0.0 0.0 0.0003 0.0002 0.0 0.0", + "energy": "-79.4568 -79.5384 -79.3348 -78.1158 -76.6991 -75.3745 -73.5211 -71.7881 -69.5246 -67.3326 -65.1125 -63.1968 -60.8837 -58.8876 -57.1724 -55.1923 -54.1859 -53.0566 -52.2278 -51.652 -51.0596 -50.5514 -50.1871 -50.0547 -50.1738 -50.2226 -50.4327 -50.9056 -51.5581 -52.0602 -52.7481 -53.5475 -54.4538 -55.5362 -56.5386 -57.8167 -58.7865 -59.9379 -60.9119 -61.8464 -62.4537 -62.4413 -61.3366 -59.282 -56.0194 -52.2883 -47.976 -43.492 -39.3229 -35.3782 -32.3495 -29.6033 -27.7895 -26.4203 -25.8965 -25.7566 -25.8307 -26.1072 -25.9932 -26.008 -25.6453 -25.2594 -24.6926 -24.3203 -23.7762 -23.5533 -23.5932 -23.571 -23.5496 -23.611 -23.7575 -23.8832 -23.7052 -23.7858 -23.5659 -23.3871 -23.3167 -23.1392 -23.1716 -23.221 -23.1999 -23.3025 -23.4093 -23.5289 -23.7822 -24.1651 -24.6253 -24.9491 -25.3634 -25.3059 -24.8264 -24.4862 -23.5831 -23.0045 -22.4844 -22.1552 -21.8871 -22.0392 -22.0966 -22.567 -22.9205 -23.213 -23.6604 -24.0536 -24.0764 -24.0787 -23.6635 -23.4554 -23.112 -22.8221 -22.6489 -22.2734 -22.3417 -22.0251 -21.8598 -21.9132 -21.7855 -21.9747 -21.7888 -21.7193 -21.6425 -21.488 -21.4479 -21.294 -21.2591 -21.2903 -21.3462 -21.386 -21.4912 -21.5404 -21.5418 -21.9118 -22.0025 -22.297 -22.3111 -22.3789 -22.5186 -22.6168 -22.6342 -22.7638 -22.9156 -22.9387 -23.1615 -24.0266 -25.1595 -27.0912 -29.3119 -31.4115 -33.4862 -34.6915 -35.0855 -34.1754 -32.2071 -29.7811 -27.5802 -25.7886 -24.3527 -23.686 -23.2738 -23.4704 -23.3323 -23.7838 -24.2452 -25.6395 -27.129 -29.5412 -32.2696 -34.3517 -35.86 -35.9547 -35.4665 -33.5276 -31.1872 -28.3444 -25.9876 -23.8659 -22.9886 -22.3935 -22.2759 -22.1705 -22.3438 -22.5437 -23.1661 -23.2117 -23.926 -24.5076 -24.7762 -25.1689 -25.069 -25.2473 -25.0844 -24.998 -24.9174 -24.5602 -24.466 -24.3602 -24.2722 -24.0816 -24.2005 -24.12 -24.3791 -24.4818 -24.6686 -24.8494 -24.7958 -24.868 -24.7395 -24.8126 -24.7607 -24.5333 -24.5374 -24.1457 -24.188 -23.8313 -23.5896 -23.2558 -23.0488 -22.9782 -22.715 -22.8086 -23.0746 -23.0568 -23.0934 -23.2349 -23.283 -23.2212 -23.4841 -23.5688 -23.5165 -23.8337 -23.5667 -23.6911 -23.8272 -23.8824 -23.7066 -23.6635 -23.6171 -23.5531 -23.5482 -23.6034 -23.5424 -23.5505 -23.885 -24.2931 -25.1256 -26.0701 -27.8904 -30.2327 -32.6696 -35.3447 -37.5214 -38.7996 -38.5628 -37.1418 -34.8909 -32.3768 -29.4466 -27.0358 -25.7751 -24.6657 -24.3972 -24.4464 -24.6307 -25.0209 -25.1446 -25.6649 -26.001 -26.4776 -26.8383 -27.1949 -27.5836 -27.6552 -27.7134 -27.9072 -28.0464 -28.3358 -28.7426 -29.2999 -29.8346 -30.5237 -31.0458 -31.6723 -32.2548 -32.727 -33.0392 -33.4939 -33.9622 -34.6379 -35.1244 -35.6741 -36.1922 -36.9365 -37.3094 -37.8572 -38.4748 -39.0323 -40.0395 -41.2312 -43.4271 -46.4906 -50.1409 -54.4225 -58.5158 -62.2859 -65.6312 -67.7252 -68.4937 -68.7453 -68.9271 -69.0702 -69.3527 -69.577 -70.616 -71.2043 -71.8518 -71.7854 -71.6079 -71.5885 -71.8141 -71.9084 -70.5965 -68.2728 -66.6896", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0 0.0003 0.0001 0.0 0.0002 0.0 0.0 0.0001 0.0002 0.0007 0.0004 0.0006 0.0008 0.0009 0.0012 0.0017 0.0015 0.0015 0.0017 0.0025 0.0028 0.0029 0.0032 0.0034 0.0036 0.0037 0.004 0.004 0.0039 0.0036 0.0032 0.0031 0.0028 0.0026 0.0026 0.0024 0.0021 0.0017 0.0017 0.0012 0.001 0.0008 0.0004 0.0006 0.0006 0.0014 0.0028 0.0056 0.0079 0.0092 0.0099 0.0089 0.0068 0.0045 0.0028 0.002 0.002 0.002 0.0021 0.0019 0.0012 0.0009 0.0007 0.0004 0.0008 0.0005 0.0006 0.0006 0.0007 0.0008 0.0009 0.001 0.0013 0.0016 0.0019 0.0019 0.0022 0.0022 0.0024 0.0024 0.0024 0.0024 0.0025 0.002 0.002 0.0018 0.0017 0.0018 0.0018 0.002 0.0023 0.0024 0.0026 0.0028 0.0022 0.0024 0.0022 0.0023 0.0024 0.0023 0.0016 0.0018 0.0017 0.0015 0.0012 0.0008 0.0007 0.0003 0.0005 0.0005 0.0003 0.0004 0.0005 0.0006 0.0006 0.0005 0.0006 0.0005 0.0006 0.0008 0.0008 0.0007 0.0008 0.0006 0.0006 0.0006 0.0005 0.0006 0.0006 0.0004 0.0002 0.0005 0.0003 0.0005 0.0004 0.0005 0.0004 0.0004 0.0008 0.0007 0.0006 0.0004 0.0004 0.0005 0.0005 0.0009 0.002 0.004 0.0075 0.0136 0.0214 0.0234 0.0231 0.0207 0.0123 0.006 0.002 0.0013 0.001 0.0009 0.0008 0.0009 0.0005 0.0004 0.0004 0.0004 0.0004 0.0007 0.0004 0.0004 0.0003 0.0003 0.0004 0.0009 0.001 0.0008 0.0007 0.0007 0.0006 0.0007 0.0007 0.0006 0.0007 0.0008 0.0007 0.0008 0.0006 0.0007 0.0007 0.0005 0.0008 0.0006 0.0007 0.0008 0.0007 0.0007 0.0008 0.0013 0.0007 0.0007 0.0006 0.0006 0.0005 0.0005 0.0004 0.0004 0.0003 0.0001 0.0004 0.0004 0.0005 0.0005 0.0004 0.0007 0.0006 0.0006 0.0007 0.0006 0.0007 0.0007 0.0008 0.0011 0.0011 0.001 0.0012 0.001 0.0012 0.0011 0.001 0.0009 0.001 0.0008 0.001 0.001 0.0009 0.0013 0.0015 0.0014 0.0014 0.0012 0.0014 0.0014 0.0012 0.0012 0.0011 0.0011 0.0013 0.0029 0.0041 0.0058 0.0075 0.0094 0.0109 0.011 0.0106 0.0093 0.0068 0.0052 0.0043 0.0029 0.0025 0.0024 0.0025 0.0027 0.0026 0.0028 0.0026 0.0026 0.0024 0.0023 0.0022 0.0024 0.0022 0.0021 0.0018 0.002 0.002 0.0021 0.002 0.002 0.0019 0.0013 0.0011 0.0009 0.0008 0.0008 0.0008 0.0008 0.0007 0.0005 0.0004 0.0006 0.0007 0.0006 0.0005 0.0005 0.0005 0.0003 0.0005 0.0011 0.0014 0.0013 0.0013 0.0007 0.0006 0.0005 0.0003 0.0003 0.0001 0.0001 0.0003 0.0 0.0001 0.0002 0.0001 0.0001 0.0 0.0001 0.0002 0.0001 0.0002 0.0001 0.0", + "breathiness": "-88.6473 -90.7176 -91.4583 -91.981 -90.6174 -88.5107 -84.5072 -79.5176 -74.9647 -70.5323 -66.9204 -64.5351 -62.3729 -60.0711 -58.0641 -56.2069 -54.8623 -53.7061 -52.5564 -52.1407 -51.6467 -51.048 -50.5717 -50.317 -50.1463 -50.0593 -50.3275 -50.6348 -51.088 -51.6151 -52.3873 -53.1159 -53.9708 -55.1219 -56.3015 -57.615 -58.8214 -60.5584 -62.114 -63.763 -65.2051 -65.4209 -64.6614 -62.1845 -58.7373 -54.2841 -49.8246 -45.9972 -43.8328 -42.8558 -43.1291 -44.0194 -45.6235 -47.0692 -48.7279 -50.3348 -52.3366 -54.4396 -57.0082 -59.5531 -61.6922 -63.0791 -63.7417 -63.8534 -63.5421 -62.6066 -61.6479 -60.6525 -59.4908 -58.3041 -57.2108 -55.9888 -54.9402 -54.3546 -53.83 -53.473 -53.2508 -53.0094 -52.8565 -52.837 -52.757 -52.9694 -53.1799 -53.4984 -53.9362 -54.469 -54.6665 -54.5337 -54.3502 -53.5748 -53.0407 -52.3995 -52.1476 -52.0882 -52.4724 -52.7628 -52.8678 -53.1564 -53.3598 -53.6869 -53.8377 -54.1246 -54.6859 -55.4845 -56.9459 -58.1707 -59.7388 -61.0743 -61.9503 -62.4077 -62.0493 -61.4935 -60.5359 -59.6756 -59.2383 -58.9825 -58.6291 -58.5268 -58.6492 -58.6479 -58.7861 -59.1139 -59.7952 -60.42 -61.1321 -61.6538 -62.1701 -62.6884 -63.1985 -63.4375 -63.8794 -64.3313 -64.448 -64.4034 -64.4803 -64.3335 -64.38 -64.3419 -64.5571 -64.5712 -64.7342 -64.5993 -64.2325 -63.2684 -61.3595 -58.5939 -54.8096 -50.3815 -45.878 -41.9416 -39.6009 -38.8233 -39.7358 -42.2752 -45.4857 -49.5671 -53.3816 -56.5456 -58.9181 -60.3124 -61.2088 -61.9013 -63.2781 -65.2265 -67.5584 -70.2563 -72.8252 -74.9267 -76.2726 -76.0332 -74.9711 -72.5809 -70.191 -67.6498 -66.0492 -65.5027 -65.5477 -65.7734 -65.9198 -65.9623 -65.835 -65.4355 -64.8872 -64.1953 -63.5762 -62.8702 -62.5228 -62.3133 -62.6651 -63.0562 -63.8568 -64.6435 -65.4507 -65.8499 -65.9937 -66.2957 -65.9732 -65.7196 -65.4932 -65.4973 -65.6595 -65.7873 -66.2775 -66.6771 -66.9361 -66.9036 -67.0918 -67.1669 -67.2887 -67.1579 -66.8506 -66.4004 -65.8351 -65.0595 -64.1784 -63.1028 -62.2731 -62.0055 -61.2173 -60.7046 -60.1934 -59.5473 -59.0226 -58.5626 -58.2883 -58.2031 -58.2447 -58.2138 -58.2511 -58.0701 -58.2297 -58.1647 -58.1209 -58.2695 -58.4763 -58.29 -58.4937 -58.2924 -58.4015 -58.376 -58.5421 -58.4496 -58.8728 -58.9705 -58.5123 -57.0381 -54.6732 -51.9576 -48.7514 -45.5289 -43.1038 -41.641 -41.3353 -41.7605 -43.0018 -44.7506 -47.0475 -48.9429 -50.628 -51.5911 -52.2512 -52.5094 -52.6075 -52.4685 -52.656 -52.8966 -53.1918 -53.4913 -53.9552 -54.4787 -54.92 -55.1357 -55.4732 -55.5902 -55.7239 -55.9864 -56.2249 -56.6825 -57.3983 -58.3915 -59.5562 -60.5999 -62.0556 -62.8947 -63.7347 -64.5027 -65.0797 -65.6 -66.0143 -66.5276 -67.1249 -67.6129 -68.4793 -69.5338 -70.2093 -70.6399 -69.9971 -68.2748 -66.2234 -63.918 -62.4341 -62.2123 -62.9332 -64.5763 -66.5147 -68.0941 -69.2982 -69.6976 -69.8367 -69.6353 -69.8874 -70.3055 -71.0931 -72.3325 -73.9388 -75.9394 -77.7804 -80.0611 -82.1595 -84.1916 -85.0202 -85.4632 -85.3687", "breathiness_timestep": "0.011609977324263039" }, { @@ -138,9 +138,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0", "f0_seq": "133.9 134.2 134.3 134.3 134.3 134.2 134.1 134.2 134.3 134.4 134.3 134.2 133.9 134.2 134.1 134.4 134.0 133.7 133.7 133.7 133.6 133.4 133.7 133.7 133.5 133.4 134.0 134.0 133.8 133.9 133.9 134.4 134.4 133.2 132.5 133.1 134.4 137.5 141.5 147.2 151.3 155.2 157.7 160.1 163.0 164.7 165.7 165.1 164.1 162.3 164.0 163.4 162.4 161.3 160.5 160.4 160.1 161.3 162.1 162.7 163.5 163.6 164.0 165.5 165.5 165.1 161.8 156.5 151.4 158.1 165.5 172.1 180.6 189.5 187.6 186.3 185.4 185.6 185.6 185.5 185.3 184.9 185.3 184.9 185.7 185.3 184.7 183.8 182.1 182.1 182.9 184.8 187.9 188.9 188.8 191.2 194.7 196.0 196.5 196.4 195.8 194.3 193.4 193.5 194.2 194.1 194.2 194.5 194.5 195.3 195.9 194.8 194.3 193.1 189.4 185.3 182.1 181.1 181.0 182.3 183.1 183.5 184.8 186.1 186.8 186.0 184.1 179.3 173.1 169.2 169.3 169.3 170.1 170.0 170.5 171.6 168.0 165.8 163.6 163.2 163.7 163.3 163.3 164.1 165.5 168.1 169.1 169.1 167.2 163.9 160.8 155.3 150.1 145.4 145.9 148.2 146.3 145.7 144.5 142.5 142.8 143.0 144.1 145.3 145.5 145.8 146.1 146.3 146.0 145.4 145.1 145.0 145.3 145.7 146.2 146.2 146.5 146.2 146.8 147.1 147.7 148.2 148.6 148.5 146.9 144.9 145.2 147.0 149.5 151.8 154.3 156.5 159.2 161.1 164.3 167.2 170.6 172.2 168.1 166.7 165.9 164.8 163.4 162.1 161.7 161.6 161.8 161.5 161.9 161.4 163.6 164.5 164.4 164.8 166.3 166.9 166.3 165.8 165.1 165.2 165.2 164.7 163.4 161.9 161.0 161.0 160.7 159.1 159.5 160.7 162.8 166.2 168.9 171.4 172.9 173.1 172.7 169.8 167.4 164.7 160.8 157.9 156.5 157.1 159.9 164.1 168.1 170.9 173.1 174.6 173.8 171.4 168.2 164.9 159.5 156.2 155.6 155.9 159.3 162.9 167.1 171.2 173.7 175.5 175.2 171.2 167.5 163.8 159.1 156.2 155.3 156.8 159.8 164.5 168.7 171.7 174.1 174.5 172.8 169.0 165.4 160.5 156.4 154.6 155.1 158.1 163.0 166.5 169.9 171.8 173.4 171.7 167.8 162.8 159.3 156.8 155.5 154.7 156.6 161.1 165.7 169.7 172.6 174.6 173.0 169.1 164.0 156.9 154.1 150.3 149.1 151.7 154.6 160.1 169.4 173.7 177.1 178.3 174.9 168.6 160.7 153.1 151.8 151.6 152.4 153.0 153.5 152.9 154.1", "f0_timestep": "0.011609977324263039", - "energy": "0.0014 0.0017 0.0027 0.003 0.0037 0.0043 0.0048 0.0066 0.0073 0.0068 0.0071 0.0073 0.0081 0.0079 0.0073 0.0075 0.0078 0.0073 0.0069 0.006 0.0049 0.0039 0.0034 0.004 0.0035 0.0023 0.0029 0.0057 0.0147 0.0198 0.0218 0.0331 0.0453 0.056 0.0651 0.0677 0.0672 0.0668 0.0643 0.0629 0.0624 0.0616 0.0633 0.0638 0.0642 0.0656 0.0664 0.0658 0.0652 0.0645 0.0635 0.0643 0.0664 0.0693 0.0717 0.0732 0.0724 0.0706 0.0691 0.0678 0.0676 0.0671 0.0676 0.0691 0.0694 0.0659 0.0567 0.0477 0.0329 0.0266 0.027 0.0259 0.0375 0.0483 0.061 0.0719 0.0769 0.0778 0.077 0.0766 0.0752 0.0751 0.0751 0.0746 0.0739 0.074 0.0715 0.0692 0.0682 0.067 0.069 0.0701 0.071 0.0714 0.0711 0.071 0.0717 0.0713 0.072 0.0722 0.0718 0.0721 0.0715 0.0728 0.0721 0.0721 0.0727 0.071 0.0703 0.0695 0.0668 0.0673 0.0645 0.0616 0.0634 0.063 0.0671 0.0711 0.0734 0.0752 0.0752 0.074 0.0717 0.0697 0.0679 0.0653 0.0598 0.0493 0.039 0.0262 0.0153 0.0123 0.0125 0.0107 0.0219 0.0402 0.0492 0.0586 0.0615 0.0601 0.0593 0.0564 0.056 0.053 0.053 0.0541 0.0536 0.054 0.0546 0.0561 0.0551 0.0527 0.0491 0.0434 0.0488 0.0547 0.0635 0.071 0.0696 0.0716 0.0682 0.067 0.0706 0.0692 0.0704 0.0706 0.0697 0.0685 0.0674 0.0662 0.0634 0.0637 0.0611 0.0614 0.0604 0.0596 0.0616 0.0618 0.0626 0.0635 0.0646 0.0644 0.064 0.0604 0.0531 0.0427 0.0306 0.0222 0.0189 0.021 0.0222 0.021 0.0181 0.0148 0.0089 0.0224 0.039 0.0498 0.06 0.0645 0.0643 0.0652 0.0657 0.0664 0.0676 0.0679 0.0666 0.0668 0.0641 0.0637 0.0645 0.0643 0.0663 0.068 0.0698 0.0701 0.0704 0.0697 0.069 0.0702 0.0715 0.0719 0.0726 0.0722 0.0707 0.0705 0.0698 0.0689 0.0689 0.0677 0.0667 0.0667 0.0667 0.0674 0.0675 0.0665 0.0664 0.0657 0.0647 0.0636 0.0602 0.0566 0.0527 0.0496 0.0472 0.0478 0.0482 0.0486 0.0497 0.05 0.0511 0.0522 0.0508 0.0481 0.0454 0.0426 0.0398 0.0376 0.0362 0.0371 0.0378 0.039 0.0412 0.0433 0.0448 0.0453 0.0447 0.0419 0.0382 0.035 0.0321 0.0307 0.0296 0.031 0.0315 0.0343 0.0365 0.0381 0.0399 0.0401 0.0373 0.0347 0.0309 0.0272 0.0253 0.0255 0.0265 0.0274 0.0294 0.0316 0.0332 0.0344 0.0343 0.0322 0.0292 0.0261 0.0228 0.02 0.0187 0.0183 0.0186 0.0195 0.0217 0.0226 0.0236 0.0229 0.0213 0.0193 0.0171 0.0153 0.0134 0.0114 0.0113 0.0114 0.0121 0.0132 0.0136 0.014 0.0138 0.0129 0.0101 0.0073 0.0044 0.0015 0.0006 0.0003 0.0006 0.0 0.0", + "energy": "-59.9544 -56.4974 -54.1024 -51.9043 -50.2592 -48.9099 -47.8119 -47.3643 -46.637 -46.232 -45.7724 -45.8199 -46.0032 -45.9755 -46.5538 -47.0068 -47.5336 -47.9974 -48.2819 -48.3502 -48.4421 -48.3372 -48.505 -48.999 -48.98 -48.4707 -46.9585 -44.1943 -40.9468 -36.5966 -32.882 -29.4611 -27.1338 -25.4859 -24.5315 -24.1134 -23.9785 -23.6818 -23.8249 -23.8755 -23.8721 -24.0372 -23.9757 -23.7894 -23.8657 -23.4998 -23.6521 -23.4514 -23.7068 -23.9632 -23.9073 -23.9649 -23.9561 -23.6902 -23.5264 -23.3505 -23.4394 -23.4371 -23.448 -23.4532 -23.661 -23.7919 -24.0436 -24.6134 -25.2049 -26.2541 -27.6812 -28.8415 -30.0223 -30.4808 -30.2463 -29.3508 -28.015 -26.1809 -24.7556 -23.4745 -22.4759 -22.0437 -22.2911 -22.0458 -22.3021 -22.3632 -22.329 -22.3645 -22.4947 -22.5054 -22.3931 -22.5591 -22.7426 -22.7181 -22.5507 -22.5025 -22.5165 -22.5999 -22.5498 -22.9713 -23.1623 -23.2887 -23.4204 -23.5563 -23.684 -23.4822 -23.2312 -23.288 -22.9184 -22.8194 -22.7572 -22.4116 -22.4351 -22.3995 -22.5135 -22.6844 -23.1061 -23.1082 -23.3895 -23.4226 -23.6263 -23.1159 -22.9109 -22.5826 -22.4978 -22.5028 -22.7358 -23.2107 -23.9235 -25.2356 -27.0291 -29.003 -31.3454 -33.3503 -35.1539 -35.8025 -35.1249 -33.5836 -31.3562 -29.1295 -27.1836 -25.7048 -24.9247 -24.5819 -24.4958 -24.4943 -24.6669 -24.611 -24.8237 -24.9206 -25.0764 -25.3645 -25.4917 -26.0242 -26.3845 -26.5687 -26.6195 -26.2708 -26.1346 -25.47 -24.748 -24.0641 -23.6872 -23.7171 -23.5296 -23.654 -23.4201 -23.4329 -23.526 -23.5208 -23.6748 -23.7718 -23.8712 -23.8649 -23.7827 -23.7978 -23.8139 -23.635 -23.7598 -23.8711 -24.0025 -23.6248 -23.8072 -23.8426 -24.1966 -24.5158 -25.4166 -26.6209 -28.2048 -30.1043 -31.7648 -32.966 -33.5471 -34.4207 -34.8473 -35.1025 -35.3609 -35.0253 -34.0462 -32.7322 -30.6214 -28.0997 -25.932 -24.6185 -23.8971 -23.5738 -23.2851 -23.1853 -23.1251 -22.9786 -22.7334 -22.6956 -22.4824 -22.306 -22.4351 -22.2607 -22.2646 -22.1521 -22.3215 -22.0485 -22.203 -22.3874 -22.2534 -22.3625 -22.2813 -22.4237 -22.5772 -22.5966 -22.7646 -22.976 -23.3738 -23.5505 -23.6003 -23.3922 -23.3406 -23.3925 -23.4167 -23.3848 -23.4415 -23.5079 -23.7263 -23.9401 -24.3544 -24.6898 -25.2725 -25.6531 -26.1688 -26.584 -26.684 -26.3899 -26.4305 -26.1552 -26.1218 -25.8242 -25.7868 -25.6819 -25.8789 -25.9537 -25.8788 -26.0196 -26.3516 -26.2622 -26.2846 -26.2012 -26.1988 -25.9518 -26.0034 -25.8266 -25.7974 -25.6688 -26.0541 -26.0033 -26.4055 -26.5223 -26.712 -26.889 -26.8341 -26.6458 -26.3247 -26.2922 -26.1451 -26.1108 -26.0998 -25.9499 -26.1658 -26.2625 -26.4205 -26.5968 -27.1836 -27.7695 -27.8522 -28.2838 -28.3482 -28.2764 -27.9751 -27.8672 -27.7537 -27.885 -28.359 -28.7547 -29.21 -29.6461 -30.2272 -30.229 -30.2021 -30.1061 -29.9969 -29.4712 -29.4703 -29.4999 -29.9741 -30.5446 -31.402 -32.4858 -33.4626 -34.1279 -34.6061 -34.8403 -34.5867 -34.5736 -34.3927 -34.354 -34.8679 -36.2429 -38.1835 -41.8774 -45.3963 -48.9079 -52.1077 -54.053 -53.5036 -51.6994 -49.9105", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0007 0.0019 0.0027 0.0032 0.0037 0.0042 0.0052 0.0065 0.0073 0.0077 0.0075 0.0074 0.0073 0.0071 0.0075 0.0074 0.0074 0.0076 0.007 0.0062 0.0049 0.0036 0.0029 0.0023 0.0021 0.0019 0.0021 0.0043 0.0059 0.0073 0.0076 0.007 0.006 0.0046 0.0034 0.0026 0.0023 0.0019 0.0016 0.0011 0.0008 0.0008 0.0009 0.0007 0.0007 0.0005 0.0005 0.0003 0.0004 0.0006 0.0003 0.0005 0.0005 0.0011 0.0012 0.0014 0.0015 0.0013 0.0014 0.0013 0.0011 0.001 0.0006 0.0005 0.0002 0.0004 0.0005 0.004 0.0102 0.0167 0.0234 0.0247 0.0231 0.019 0.0082 0.0026 0.0015 0.0013 0.0013 0.0012 0.0013 0.0014 0.0013 0.0013 0.0013 0.0012 0.0012 0.0012 0.0011 0.0011 0.0011 0.0008 0.0009 0.0009 0.0012 0.0007 0.001 0.001 0.0012 0.0013 0.0013 0.0014 0.0013 0.0013 0.0013 0.0015 0.0013 0.0014 0.0014 0.0015 0.0016 0.0017 0.0016 0.0018 0.0018 0.0019 0.0018 0.0019 0.0018 0.002 0.002 0.0022 0.0025 0.0027 0.0025 0.0022 0.0023 0.0044 0.0088 0.011 0.0123 0.0131 0.0123 0.0116 0.0101 0.0076 0.0051 0.0028 0.0022 0.0017 0.0017 0.0014 0.0012 0.0011 0.0007 0.0005 0.0005 0.0006 0.0005 0.0004 0.0004 0.0004 0.0003 0.0002 0.0003 0.0005 0.0006 0.0006 0.0004 0.0008 0.0008 0.0009 0.0009 0.0009 0.0007 0.0008 0.0005 0.0004 0.0004 0.0006 0.0003 0.0004 0.0005 0.0006 0.0007 0.0006 0.0005 0.0003 0.0004 0.0004 0.0003 0.0005 0.0005 0.0007 0.0017 0.0066 0.0113 0.0143 0.0175 0.0202 0.0205 0.021 0.0194 0.0161 0.0115 0.0082 0.0044 0.0022 0.0015 0.0016 0.0019 0.0018 0.0019 0.0019 0.0018 0.0017 0.0019 0.0019 0.002 0.002 0.0018 0.0018 0.0017 0.002 0.0018 0.002 0.0022 0.002 0.0019 0.0018 0.0022 0.0022 0.0024 0.0023 0.002 0.0019 0.0018 0.0018 0.0017 0.0018 0.0019 0.002 0.0022 0.0023 0.0024 0.0025 0.0022 0.0023 0.0023 0.0023 0.0021 0.0019 0.0017 0.0017 0.0017 0.0017 0.0015 0.0018 0.0016 0.0012 0.0015 0.0015 0.0015 0.0017 0.0018 0.0016 0.0017 0.0013 0.0013 0.0014 0.0012 0.0012 0.0012 0.0013 0.0012 0.0011 0.0012 0.0014 0.0013 0.0012 0.0009 0.0011 0.0011 0.0008 0.001 0.0007 0.0009 0.0009 0.0009 0.0007 0.001 0.0009 0.001 0.0007 0.0005 0.0004 0.0005 0.0006 0.0006 0.0006 0.0007 0.0009 0.0009 0.0011 0.0009 0.0008 0.0005 0.0006 0.0005 0.0004 0.0004 0.0002 0.0005 0.0006 0.0007 0.0008 0.0006 0.0006 0.0007 0.0005 0.0002 0.0002 0.0002 0.0002 0.0001 0.0001 0.0003 0.0003 0.0002 0.0002 0.0002 0.0004 0.0005 0.0003 0.0004 0.0001 0.0 0.0 0.0", + "breathiness": "-61.3265 -58.2349 -54.8128 -51.7422 -49.3541 -47.6554 -46.3462 -45.717 -45.3057 -45.4701 -45.2281 -45.3327 -45.338 -45.5031 -45.8681 -46.3337 -46.8998 -47.4954 -48.2763 -49.2788 -50.4607 -52.0827 -54.4433 -56.7448 -58.6813 -59.2327 -58.48 -56.3998 -53.6939 -50.5156 -48.2097 -47.5053 -48.2483 -49.321 -51.1131 -52.932 -54.5577 -56.2509 -57.6365 -58.9372 -59.8381 -60.4415 -61.2115 -61.3286 -61.7848 -62.3358 -62.8502 -63.4805 -63.7315 -63.7378 -63.4426 -62.3204 -61.0055 -59.3933 -58.041 -56.9706 -56.4409 -56.473 -56.9128 -57.856 -58.8936 -60.5567 -62.1318 -62.7358 -61.5483 -58.3524 -53.7652 -48.3303 -42.8624 -38.2038 -35.4984 -35.1526 -37.5089 -40.6745 -44.7484 -48.6572 -52.269 -55.0154 -56.4971 -57.2731 -57.5426 -57.5362 -57.5359 -57.1311 -56.8664 -56.3209 -56.2114 -56.0996 -56.2193 -56.7876 -57.5378 -57.9429 -58.4978 -58.6587 -58.5847 -58.6092 -58.5589 -58.1493 -57.9601 -57.9455 -57.7356 -57.737 -57.9443 -58.2692 -58.3922 -58.5066 -58.3382 -58.2238 -57.6855 -57.0815 -56.5561 -55.8731 -55.1504 -54.2246 -53.8881 -53.4768 -53.3044 -53.1164 -53.3248 -53.1389 -53.1081 -53.1383 -53.0494 -52.6537 -52.2508 -50.8208 -48.7948 -46.5166 -43.9477 -41.4103 -39.7327 -38.94 -38.9454 -39.7968 -41.5267 -43.777 -46.5213 -48.9691 -51.2623 -53.141 -54.7351 -55.9709 -57.071 -58.3814 -59.5877 -61.1127 -62.7734 -63.807 -64.9667 -65.7251 -66.29 -66.6077 -66.5934 -66.0715 -65.5685 -64.7972 -63.7669 -62.8222 -62.1027 -61.3783 -60.5166 -60.0942 -59.8243 -59.6262 -59.6304 -59.7971 -60.1904 -60.2259 -60.4783 -60.4605 -60.6425 -60.6908 -60.7197 -61.1311 -61.2661 -61.9888 -62.5529 -62.9071 -63.1666 -63.0382 -62.0081 -59.521 -56.0279 -51.7509 -47.213 -42.9969 -39.4355 -36.8065 -35.5389 -34.7492 -34.6247 -35.1537 -36.3683 -38.4229 -41.357 -44.6714 -48.2102 -51.0643 -53.3456 -54.9455 -55.8271 -56.1229 -56.2831 -56.1655 -56.3766 -56.4145 -56.3262 -55.835 -55.6032 -55.5529 -55.3521 -55.1605 -55.268 -55.211 -55.2091 -55.4629 -55.5587 -55.6935 -55.6831 -55.5447 -55.2281 -55.2169 -55.092 -55.3324 -55.4648 -55.9004 -56.2447 -56.551 -56.5405 -56.5867 -56.4247 -56.2029 -56.1286 -56.1458 -56.3556 -56.6867 -56.9605 -57.3248 -57.4528 -57.8432 -58.0748 -58.4594 -58.7967 -59.0035 -59.0293 -59.0376 -58.8828 -58.4697 -58.205 -57.7149 -57.5365 -56.723 -56.301 -56.0976 -55.8614 -56.0227 -56.1745 -56.5616 -56.8237 -57.1711 -57.1739 -57.3035 -57.1633 -57.057 -57.0662 -56.673 -56.5468 -56.5368 -56.6286 -56.7397 -56.6384 -56.6877 -56.1507 -55.7307 -54.8214 -54.2701 -53.5829 -53.2422 -53.047 -53.2028 -53.7841 -54.6269 -55.3974 -56.3811 -57.3381 -57.9224 -58.5597 -58.6293 -58.8673 -58.5853 -58.1801 -57.9239 -57.5261 -57.9849 -58.7463 -59.797 -61.1317 -62.6696 -63.8124 -64.4835 -64.598 -64.1251 -63.3704 -62.4693 -61.9143 -61.5651 -61.8972 -62.598 -63.8799 -65.4237 -66.7508 -68.165 -69.1722 -69.4816 -69.4075 -68.8566 -67.8858 -66.375 -64.4695 -62.2554 -60.2459 -58.5416 -58.0324 -58.6161 -59.9562 -61.4231 -62.8023 -62.9842 -63.859", "breathiness_timestep": "0.011609977324263039" }, { @@ -154,9 +154,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "134.4 134.5 133.9 133.8 133.9 133.9 134.1 133.8 134.0 133.8 133.8 134.0 134.0 134.3 134.2 134.1 133.5 133.8 133.9 133.8 133.9 134.1 133.9 134.0 133.9 133.7 133.7 133.8 133.8 133.6 133.8 133.6 133.6 133.6 133.6 133.5 133.7 133.8 133.8 133.9 133.4 133.5 133.5 133.5 133.4 133.3 132.6 132.2 131.6 133.3 140.1 147.8 156.9 166.8 177.7 188.1 199.9 215.4 230.2 240.4 247.0 250.3 250.6 249.5 245.4 240.4 236.1 229.7 226.4 225.7 226.0 226.4 225.5 223.5 220.5 221.2 220.6 219.9 219.9 220.0 221.4 221.7 222.3 221.0 217.4 210.0 201.2 208.7 216.8 224.7 232.8 238.9 239.4 231.2 224.7 224.9 225.0 225.7 225.9 225.7 223.7 222.0 222.2 223.5 228.0 236.0 244.3 249.7 253.1 253.7 253.0 250.0 246.4 243.8 242.4 244.2 247.8 250.3 250.9 250.6 250.3 249.8 248.9 246.5 241.6 234.9 227.6 229.1 237.7 246.7 256.5 267.6 276.0 267.2 261.8 262.4 259.4 256.0 251.3 250.2 247.3 248.1 245.8 245.9 246.1 244.2 244.4 243.8 237.8 226.7 218.2 219.9 221.1 223.3 226.4 220.9 219.9 219.7 219.0 217.5 216.6 216.8 218.1 220.4 221.7 222.1 220.9 213.1 199.6 193.0 200.2 207.5 214.1 223.3 225.3 222.4 221.8 222.5 223.3 223.4 222.8 222.7 221.4 220.1 218.7 217.5 216.5 218.3 224.1 231.8 240.7 246.5 247.7 247.4 246.7 245.0 243.9 242.8 242.2 239.3 240.4 242.6 245.5 248.1 250.5 250.7 250.0 249.4 246.6 247.2 244.9 262.4 282.8 303.4 324.6 335.8 324.5 316.7 308.8 304.4 299.0 296.6 294.0 294.1 293.3 291.8 290.8 289.8 291.1 292.2 294.5 295.0 292.0 286.9 277.2 262.6 255.4 249.1 248.5 248.8 249.4 251.3 252.3 251.9 250.6 250.8 247.8 244.5 241.7 242.2 241.6 243.5 245.1 249.1 252.3 255.3 258.0 257.0 257.1 260.3 262.6 262.4 262.3 259.6 254.1 247.5 239.0 233.0 231.6 230.8 232.0 236.3 244.1 253.0 260.0 264.3 265.9 264.5 263.3 258.7 249.6 238.3 230.4 223.5 220.6 221.3 228.9 239.1 250.2 257.5 262.0 262.8 260.5 255.4 251.2 249.3 245.5 241.5 235.8 232.1 227.4 222.8 217.0 212.5 207.5 202.8 198.5 193.9 188.7 184.3 180.3 176.4 173.1 169.3 166.2 162.9 159.9 158.0 154.8 156.1 161.6 172.9 184.4 198.5 209.8 222.2 234.2 244.1 250.4 250.7 248.4 243.7 238.5 233.9 236.6 238.7 242.8 245.6 251.3 249.5 243.5 234.4 229.2 222.8 220.4 218.8 218.6 218.2 218.0 220.7 223.2 222.4 218.6 212.3 204.3 195.0 194.3 195.3 196.6 199.7 201.4 196.6 196.4 196.4 196.5 196.9 196.7 197.0 198.4 198.2 197.8 197.8 197.1 195.1 189.5 180.2 180.1 179.8 205.3 206.0 203.5 200.5 198.5 197.1 196.4 196.4 195.7 194.9 195.4 194.9 195.8 197.0 198.4 198.6 196.8 191.6 181.1 179.8 182.9 189.4 197.0 190.1 187.4 181.0 175.4 171.0 168.4 166.5 164.7 164.2 164.2 164.6 164.4 163.0 160.8 154.8 161.0 167.7 175.4 184.4 194.8 202.8 197.6 196.8 196.8 196.6 196.5 196.9 196.8 196.8 196.0 195.6 194.5 193.1 189.8 183.1 189.6 196.8 205.0 212.6 221.2 229.5 235.8 227.7 224.8 222.0 220.5 218.6 218.3 217.7 217.9 218.5 220.0 221.4 222.2 222.3 218.1 213.0 204.6 200.9 195.7 193.7 191.5 191.4 192.0 193.8 195.5 197.1 197.3 197.2 198.9 199.8 200.9 198.9 195.1 187.8 196.6 205.7 215.4 226.2 237.2 248.7 260.8 252.7 250.2 250.8 249.7 247.9 246.5 245.5 243.7 243.0 242.0 241.4 241.2 241.8 242.4 244.1 245.0 245.7 245.8 246.7 247.6 248.0 247.8 248.3 248.0 247.0 245.6 244.2 244.4 244.4 242.2 242.0 243.2 243.8 244.8 245.9 247.5 247.5 248.4 249.6 249.7 249.3 248.7 246.3 243.6 242.2 242.5 243.5 244.4 244.4 244.7 245.5 245.7 245.5 244.7 245.8 245.3 244.4 244.6 244.9 246.0 247.2 249.0 250.6 251.0 251.2 250.4 249.2 247.4 245.1 243.0 241.0 239.8 239.2 240.3 242.6 245.7 250.5 255.3 257.9 258.8 258.4 255.6 252.2 246.9 243.4 242.7 242.0 241.8 243.8 245.8 247.7 248.4 249.8 250.0 249.6 250.2 248.7 247.6 246.9 247.9 247.9 250.2 253.6 259.9 258.4 256.2 254.8 254.3 253.9 250.4 248.5 248.5 247.8 247.6 247.5 247.7 248.5 249.7 247.9", "f0_timestep": "0.011609977324263039", - "energy": "0.0003 0.0007 0.0006 0.0008 0.0006 0.0004 0.0001 0.0002 0.0001 0.0004 0.0006 0.0011 0.0009 0.0013 0.0021 0.0019 0.0017 0.0023 0.0019 0.0021 0.0024 0.0029 0.0032 0.0038 0.0032 0.0038 0.0031 0.0038 0.0038 0.0042 0.0037 0.0028 0.0028 0.0026 0.0027 0.0021 0.0021 0.002 0.002 0.0009 0.0007 0.0009 0.0008 0.0009 0.0039 0.0199 0.0334 0.044 0.0522 0.0598 0.0662 0.0721 0.0766 0.0771 0.0734 0.0685 0.063 0.0596 0.061 0.0637 0.0676 0.0733 0.0776 0.0808 0.0824 0.0837 0.0831 0.0834 0.0835 0.0814 0.0807 0.0795 0.078 0.0767 0.0763 0.0746 0.076 0.0765 0.077 0.0779 0.0788 0.0783 0.0767 0.073 0.0633 0.0509 0.0356 0.0217 0.0166 0.0182 0.0182 0.0301 0.0523 0.0677 0.0802 0.0902 0.0898 0.0893 0.088 0.083 0.0792 0.0758 0.0703 0.0679 0.0652 0.0655 0.0672 0.0696 0.0741 0.0759 0.0767 0.0766 0.0738 0.0715 0.0712 0.0714 0.0724 0.0731 0.0733 0.074 0.0754 0.0765 0.074 0.0671 0.056 0.0421 0.0264 0.0134 0.0066 0.004 0.0079 0.0265 0.0508 0.0658 0.0788 0.0858 0.0857 0.0844 0.0822 0.0786 0.0765 0.0751 0.075 0.0742 0.0729 0.0747 0.0752 0.0726 0.0637 0.051 0.0321 0.0161 0.0127 0.0247 0.0446 0.058 0.0684 0.0762 0.0771 0.0775 0.0784 0.0782 0.0772 0.0765 0.0764 0.0732 0.0667 0.0558 0.0425 0.0259 0.0124 0.0073 0.0089 0.0352 0.0593 0.0743 0.0859 0.091 0.0928 0.0927 0.0933 0.0925 0.0895 0.0855 0.0784 0.0719 0.0623 0.0553 0.0517 0.051 0.0552 0.0612 0.0662 0.0711 0.072 0.0728 0.0719 0.0691 0.0679 0.0668 0.0663 0.0675 0.0666 0.0663 0.0654 0.0633 0.0594 0.0537 0.044 0.0319 0.0194 0.0081 0.0064 0.0054 0.0136 0.0424 0.0584 0.0697 0.0795 0.0787 0.0773 0.0788 0.0796 0.0805 0.0828 0.0822 0.0821 0.0819 0.0813 0.0814 0.0823 0.0823 0.0818 0.0813 0.0825 0.085 0.0867 0.0874 0.0866 0.0867 0.0866 0.0857 0.0853 0.0825 0.0773 0.0742 0.0688 0.0603 0.0487 0.0341 0.0159 0.0093 0.0047 0.0052 0.0074 0.028 0.0498 0.0678 0.0808 0.0874 0.0896 0.0894 0.087 0.0841 0.0822 0.0764 0.0737 0.0718 0.069 0.069 0.0663 0.0579 1.0 0.0583 0.0677 0.0652 0.0609 0.057 0.055 0.0523 0.0486 0.0451 0.0391 0.0326 0.0292 0.0267 0.0257 0.0254 0.0252 0.0251 0.0256 0.027 0.0262 0.0234 0.0195 0.0135 0.0065 0.0036 0.0028 0.0035 0.0042 0.0046 0.0046 0.0045 0.0037 0.0031 0.0025 0.0017 0.0012 0.0014 0.0021 0.0017 0.0021 0.0048 0.0069 0.0102 0.019 0.0426 0.0608 0.0716 0.0798 0.0782 0.0726 0.0682 0.0626 0.0589 0.0612 0.0632 0.0665 0.0698 0.0687 0.0685 0.0609 0.0512 0.0403 0.027 0.0211 0.0205 0.0433 0.0595 0.0725 0.0796 0.0772 0.0747 0.0709 0.0696 0.0667 0.0653 0.0651 0.066 0.0706 0.0724 0.0691 0.064 0.0512 0.0359 0.023 0.0077 0.0108 0.0371 0.0516 0.0612 0.0695 0.0673 0.0652 0.064 0.063 0.0637 0.0641 0.0669 0.0688 0.0696 0.0667 0.0578 0.0465 0.0334 0.0199 0.0191 0.0389 0.0588 0.0693 0.0747 0.0733 0.0669 0.0623 0.0621 0.0599 0.0586 0.0566 0.0568 0.0586 0.0615 0.0624 0.058 0.0498 0.0376 0.0234 0.0102 0.011 0.0337 0.0465 0.0544 0.0612 0.0588 0.0567 0.057 0.0562 0.058 0.0592 0.0609 0.0627 0.062 0.0596 0.0526 0.041 0.0295 0.0146 0.0129 0.0117 0.0214 0.0465 0.063 0.0756 0.0855 0.0858 0.085 0.0853 0.0829 0.0818 0.08 0.0779 0.0738 0.0684 0.0586 0.0467 0.0351 0.0237 0.0222 0.0221 0.0223 0.0229 0.0437 0.056 0.0653 0.0717 0.0673 0.0652 0.0631 0.0632 0.0639 0.0656 0.0683 0.0681 0.0711 0.0719 0.0718 0.0715 0.0702 0.0708 0.0722 0.0719 0.0718 0.0741 0.0734 0.0765 0.0791 0.0792 0.0811 0.0819 0.0798 0.0799 0.0767 0.0711 0.0649 0.0538 0.0413 0.0293 0.02 0.0179 0.0176 0.0162 0.0284 0.0467 0.0584 0.0706 0.0764 0.077 0.0758 0.0741 0.0722 0.0703 0.0691 0.0685 0.0679 0.0691 0.0703 0.0735 0.0725 0.0748 0.0755 0.0763 0.0794 0.0798 0.0821 0.0816 0.0817 0.082 0.0816 0.0809 0.0803 0.0794 0.0779 0.0776 0.0774 0.0771 0.078 0.0794 0.0797 0.0791 0.0787 0.0787 0.0784 0.0781 0.0785 0.0788 0.078 0.0779 0.0769 0.0772 0.0769 0.0782 0.0791 0.0788 0.0789 0.0781 0.0779 0.0779 0.0775 0.0769 0.0764 0.0763 0.0773 0.0789 0.0797 0.0802 0.0803 0.0808 0.0815 0.0826 0.0826 0.081 0.0796 0.0764 0.0737 0.0719 0.0699 0.0676 0.0669 0.0649 0.0654 0.064 0.0624 0.0619 0.0593 0.0572 0.0541 0.05 0.0469 0.0438 0.0413 0.041 0.0399 0.0408 0.04 0.0401 0.0395 0.0383 0.0387 0.039 0.0382 0.0377 0.0345 0.0313 0.0285 0.0246 0.0228 0.0224 0.0072 0.4223 0.0089 0.006 0.0027 0.0013 0.002 0.0001 0.0003 0.0 0.0 0.0 0.0 0.0 0.0", + "energy": "-96.0 -94.3094 -91.4866 -87.593 -83.7124 -80.1443 -76.7181 -73.3169 -70.0865 -66.3284 -63.5008 -60.5257 -58.2108 -56.4144 -54.681 -53.6093 -52.706 -51.8749 -51.2954 -50.8493 -50.4417 -49.845 -49.4263 -49.1795 -48.875 -48.8167 -48.7717 -49.1033 -49.2032 -49.6492 -49.7477 -50.107 -50.5461 -51.0187 -52.0365 -53.2795 -54.6915 -56.4358 -57.7396 -58.6593 -57.803 -55.2013 -50.6697 -45.3961 -39.0419 -33.0644 -27.9217 -24.8297 -22.9372 -21.9994 -21.6064 -21.6795 -21.6807 -22.2252 -22.6057 -23.2264 -23.5747 -23.8081 -23.7341 -23.5387 -23.1131 -22.4831 -21.9055 -21.5075 -21.0767 -20.9267 -20.9925 -21.2772 -21.322 -21.3312 -21.4209 -21.6098 -21.2952 -21.4722 -21.3591 -21.3085 -21.3206 -21.6574 -21.5534 -21.8364 -21.858 -22.3334 -23.1435 -24.8258 -26.6609 -28.8362 -30.9749 -32.8539 -33.4175 -32.7396 -31.3989 -29.3983 -27.1279 -24.9274 -23.4634 -22.3246 -21.8604 -21.9051 -22.1782 -22.4839 -22.9073 -23.2415 -23.4812 -23.5142 -23.6022 -23.2775 -23.0808 -22.8177 -22.4549 -22.5543 -22.4247 -22.5145 -22.4891 -22.5867 -22.6718 -22.6534 -22.5508 -22.466 -22.4108 -22.4942 -22.708 -23.0823 -24.0613 -25.3593 -27.7398 -31.6162 -36.4248 -40.9584 -43.4326 -43.7385 -41.6823 -37.7677 -32.5333 -27.7852 -24.2671 -22.5665 -21.8949 -21.8696 -21.9017 -21.8548 -21.5516 -21.6329 -21.3323 -21.2752 -21.3572 -21.4797 -22.5516 -24.3195 -26.6704 -29.0188 -31.2933 -32.3513 -32.2923 -31.1726 -28.9956 -26.8619 -24.8301 -23.5164 -22.9586 -22.6113 -22.5098 -22.4587 -22.5374 -22.6804 -23.3375 -24.644 -26.7988 -29.6147 -32.5282 -34.6042 -35.5079 -34.8751 -32.5111 -29.3533 -26.1912 -23.9147 -22.2221 -21.4901 -21.0064 -20.656 -20.7627 -21.0411 -21.4755 -22.0904 -22.8754 -23.673 -24.3966 -24.8451 -24.9288 -24.8663 -24.4166 -24.0147 -23.7793 -23.3292 -23.052 -23.0945 -22.9931 -22.854 -22.8534 -22.7822 -22.8158 -22.8699 -23.2052 -23.563 -24.64 -26.0256 -28.1872 -30.8897 -34.3772 -37.505 -39.7763 -40.4036 -39.4142 -36.8748 -33.1605 -29.4727 -25.9237 -23.5103 -22.0914 -21.2229 -21.148 -20.7352 -20.7635 -20.7697 -20.7249 -20.8605 -21.0169 -21.2934 -21.4264 -21.5461 -21.794 -21.88 -21.6129 -21.5168 -21.3764 -21.1076 -20.8444 -20.952 -21.0237 -21.1402 -21.2607 -21.6573 -21.8406 -22.4044 -23.5623 -24.9062 -27.2305 -31.0204 -35.0994 -39.1748 -42.5405 -44.0155 -43.5456 -40.9404 -37.0381 -32.3696 -28.3362 -25.779 -23.5267 -22.6248 -21.9412 -21.7669 -21.9932 -22.2826 -22.6001 -23.0183 -23.3716 -23.7563 -24.186 -24.5261 -24.9193 -24.8793 -24.914 -25.1953 -25.2274 -25.3731 -25.8529 -26.3837 -26.9708 -27.6503 -28.4625 -29.3892 -30.1614 -31.3621 -32.5686 -33.5461 -34.3843 -35.0504 -35.6719 -36.1154 -36.6717 -37.9856 -39.7849 -42.4572 -45.4958 -48.3312 -50.7322 -51.9741 -52.533 -51.8899 -51.2068 -50.4966 -50.3749 -50.9832 -52.6534 -54.7873 -57.1491 -59.342 -60.2941 -59.5633 -56.9921 -53.4562 -48.5186 -43.6782 -38.8766 -34.8675 -31.6468 -28.7209 -26.4567 -24.5755 -23.7697 -23.2059 -23.1856 -23.2743 -23.069 -23.0366 -22.9308 -22.9276 -22.9328 -23.2874 -23.885 -24.8236 -26.3616 -27.9219 -29.3304 -29.8152 -29.7674 -29.0716 -27.6017 -25.9094 -24.479 -23.3372 -23.1264 -23.0659 -23.2326 -23.6086 -23.4549 -23.4006 -23.3399 -23.1065 -23.012 -23.4532 -24.3447 -26.0364 -28.9081 -31.3041 -32.9874 -33.5583 -32.5992 -30.9567 -28.3621 -25.7497 -23.991 -23.642 -23.2454 -23.3457 -23.3022 -23.2959 -23.6512 -23.5033 -23.7108 -24.0837 -25.1663 -26.5935 -28.0406 -29.2811 -29.5699 -29.149 -27.9214 -26.496 -24.9229 -23.8912 -23.5067 -23.6679 -23.8387 -24.101 -24.35 -24.5816 -24.7405 -24.4973 -24.5763 -24.7133 -25.134 -26.1943 -27.7701 -29.3356 -30.5819 -30.8162 -30.2292 -29.1632 -27.5731 -25.9215 -24.5888 -24.0311 -24.0658 -24.3242 -24.197 -24.3185 -24.274 -24.2825 -24.6029 -25.0865 -26.139 -27.9494 -30.2926 -32.1667 -33.4004 -33.2121 -32.3707 -30.145 -27.5372 -25.3552 -23.1395 -22.199 -21.6892 -21.8502 -21.7848 -21.8918 -22.1733 -22.3725 -22.6845 -23.4263 -24.3424 -25.7907 -27.0389 -28.9681 -30.4717 -30.9309 -30.587 -29.6789 -28.002 -26.2783 -24.8825 -24.0363 -23.2902 -23.1651 -23.034 -23.1563 -23.2354 -23.1736 -23.0723 -23.0305 -22.5941 -22.6641 -22.3776 -22.2924 -22.3727 -22.2988 -22.0695 -21.9348 -21.7091 -21.5049 -21.3021 -20.8582 -21.0457 -20.5669 -20.7773 -20.9628 -20.8248 -21.1862 -21.4747 -22.1572 -23.3171 -24.7564 -26.8687 -28.9967 -31.3806 -33.2379 -33.629 -32.7745 -31.3742 -28.9555 -26.4404 -24.3809 -22.4727 -21.7897 -21.3844 -21.3925 -21.6221 -22.1254 -22.1394 -22.4532 -22.6467 -22.7413 -22.7894 -22.7501 -22.6329 -22.4831 -22.2452 -22.0661 -21.9257 -21.775 -21.6527 -21.775 -21.6643 -21.6077 -22.0243 -21.8202 -22.0283 -21.9755 -21.9954 -21.8907 -22.0036 -21.8643 -22.0231 -21.9376 -21.9986 -21.8793 -21.682 -21.612 -21.4028 -21.3655 -21.3146 -21.291 -21.3788 -21.4606 -21.3979 -21.5754 -21.3581 -21.3943 -21.1746 -21.0962 -21.1905 -21.0177 -21.3985 -21.5049 -21.6329 -21.5835 -21.6411 -21.734 -21.6144 -21.7485 -21.4774 -21.4128 -21.3326 -20.9606 -21.1287 -21.0763 -21.3575 -21.5728 -21.7628 -22.3901 -22.8265 -23.7498 -24.4077 -24.9543 -25.647 -26.0988 -26.2178 -26.1102 -25.6717 -25.2077 -25.1223 -25.1478 -25.123 -25.5269 -26.0628 -26.9476 -27.5021 -27.836 -28.4804 -28.6477 -28.9468 -29.0354 -29.065 -29.3314 -29.788 -30.2143 -30.8451 -31.3604 -32.1164 -32.5067 -33.273 -34.022 -35.4948 -37.9737 -41.1373 -45.4677 -49.9762 -54.2961 -57.9349 -60.6471 -62.5381 -63.8931 -64.4426 -64.7795 -64.8878 -64.7184 -63.8492 -62.0426 -58.8358 -57.3239", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0003 0.0002 0.0001 0.0002 0.0003 0.0002 0.0002 0.0001 0.0003 0.0004 0.0004 0.0005 0.0005 0.0006 0.0006 0.0008 0.001 0.0013 0.0016 0.0021 0.0027 0.003 0.0033 0.0033 0.0038 0.004 0.0042 0.0042 0.0041 0.0037 0.0034 0.0031 0.0025 0.0022 0.0024 0.0022 0.0019 0.0016 0.0012 0.0008 0.0004 0.0005 0.0011 0.0048 0.009 0.0102 0.0106 0.0096 0.006 0.0027 0.0022 0.0025 0.0023 0.0019 0.0014 0.001 0.0006 0.0006 0.0003 0.0004 0.0003 0.0002 0.0001 0.0004 0.0004 0.0004 0.0006 0.0006 0.0008 0.0012 0.0015 0.0015 0.0014 0.0014 0.0016 0.0017 0.0018 0.0016 0.0015 0.0015 0.0016 0.0021 0.0023 0.0026 0.0056 0.0084 0.0123 0.0163 0.0188 0.0197 0.0177 0.014 0.0087 0.0036 0.0025 0.0024 0.0022 0.0018 0.0018 0.0017 0.0019 0.0015 0.0019 0.002 0.002 0.0019 0.0019 0.0015 0.0014 0.0014 0.0013 0.0013 0.0014 0.0013 0.0011 0.0011 0.0013 0.0013 0.0011 0.0012 0.001 0.0012 0.0014 0.001 0.0006 0.001 0.002 0.0017 0.0016 0.0014 0.0009 0.001 0.0009 0.0007 0.0006 0.0005 0.0006 0.0006 0.0005 0.0005 0.0006 0.0005 0.0006 0.0003 0.0007 0.0012 0.0015 0.0019 0.0039 0.008 0.0119 0.0142 0.0144 0.0127 0.0099 0.0054 0.0027 0.0021 0.0016 0.0016 0.0013 0.0019 0.0018 0.0016 0.0017 0.0021 0.0021 0.0026 0.0025 0.0021 0.004 0.0056 0.0059 0.0057 0.0049 0.0022 0.0011 0.0009 0.0012 0.001 0.0009 0.0011 0.001 0.0011 0.0012 0.0013 0.0012 0.001 0.0009 0.001 0.001 0.001 0.001 0.0009 0.001 0.0009 0.0008 0.0008 0.0008 0.0007 0.0007 0.0007 0.0007 0.0007 0.0009 0.0009 0.0007 0.0009 0.0022 0.0033 0.0039 0.0046 0.0045 0.0038 0.0028 0.0014 0.0009 0.0009 0.001 0.0009 0.0009 0.0008 0.0009 0.0008 0.0005 0.0007 0.0007 0.0007 0.0007 0.0007 0.0008 0.0005 0.0012 0.0016 0.0017 0.0017 0.0017 0.0013 0.0011 0.001 0.0008 0.0008 0.0006 0.0006 0.0008 0.0007 0.0008 0.0009 0.0017 0.0022 0.0032 0.0051 0.0061 0.0059 0.0048 0.0031 0.0017 0.0012 0.0016 0.0018 0.0017 0.0018 0.0021 0.0021 0.0023 0.0026 0.0026 0.0031 0.0034 0.0037 0.0051 0.2838 0.006 0.0042 0.0027 0.0023 0.0018 0.0016 0.0016 0.0014 0.0013 0.0015 0.0012 0.001 0.0009 0.0008 0.0007 0.0008 0.0008 0.001 0.001 0.001 0.0007 0.0011 0.0015 0.0016 0.002 0.0019 0.0027 0.0036 0.0044 0.005 0.005 0.0045 0.0034 0.0025 0.0018 0.0012 0.0005 0.0006 0.0006 0.0017 0.0041 0.0068 0.0084 0.0094 0.0087 0.0074 0.0058 0.0038 0.0035 0.0033 0.003 0.0029 0.0023 0.0017 0.0011 0.001 0.0012 0.0013 0.0012 0.0032 0.0072 0.0121 0.0167 0.0179 0.0173 0.0133 0.0061 0.0041 0.0032 0.0031 0.0025 0.0022 0.0017 0.0014 0.0013 0.001 0.0007 0.0007 0.0006 0.0006 0.0007 0.0006 0.0009 0.0017 0.0036 0.0055 0.006 0.0059 0.0047 0.0021 0.0014 0.0013 0.0011 0.0008 0.0008 0.0006 0.0005 0.0004 0.0003 0.0004 0.0003 0.0004 0.0005 0.0002 0.0005 0.001 0.0015 0.0018 0.0017 0.0016 0.0016 0.0015 0.0017 0.0013 0.0014 0.0014 0.0013 0.0011 0.0009 0.0009 0.0008 0.0005 0.0005 0.0024 0.0051 0.0054 0.0053 0.0048 0.0032 0.0019 0.002 0.002 0.0017 0.0016 0.0015 0.0014 0.0014 0.0012 0.0008 0.0007 0.0005 0.0021 0.0047 0.0078 0.0108 0.0134 0.0135 0.0115 0.0089 0.0038 0.0014 0.0011 0.001 0.0011 0.0009 0.0009 0.0008 0.0006 0.0008 0.0009 0.0027 0.0049 0.0078 0.0144 0.0185 0.021 0.0207 0.0164 0.0118 0.0058 0.0028 0.0022 0.0019 0.0017 0.0016 0.002 0.0019 0.0018 0.0013 0.0012 0.001 0.001 0.0009 0.0008 0.0008 0.0008 0.0007 0.0009 0.0009 0.0007 0.001 0.0011 0.0013 0.0013 0.0016 0.0014 0.0015 0.0016 0.0017 0.0016 0.0019 0.0035 0.0079 0.0118 0.0147 0.0168 0.0172 0.0154 0.0135 0.0101 0.0061 0.0031 0.0021 0.0017 0.0017 0.0017 0.0017 0.0017 0.0019 0.0019 0.0015 0.0015 0.0014 0.0014 0.0015 0.0015 0.0015 0.0017 0.0016 0.0017 0.0018 0.0017 0.0016 0.0015 0.0015 0.0015 0.0016 0.0017 0.0016 0.0017 0.0016 0.0017 0.0016 0.0018 0.0017 0.0018 0.0017 0.0017 0.0017 0.0017 0.0017 0.0021 0.0019 0.0017 0.0016 0.0013 0.0014 0.0015 0.0017 0.0017 0.0017 0.0017 0.0017 0.0016 0.0016 0.0017 0.0016 0.0015 0.0016 0.0016 0.0015 0.0015 0.0016 0.0016 0.0019 0.0018 0.0019 0.0019 0.0017 0.0018 0.0018 0.0016 0.0015 0.0014 0.0013 0.0014 0.0017 0.0015 0.0013 0.0012 0.001 0.0008 0.001 0.0008 0.0009 0.0008 0.001 0.0007 0.0007 0.0006 0.0008 0.0007 0.0006 0.0007 0.0008 0.0006 0.0009 0.0007 0.0007 0.0006 0.0005 0.0006 0.0008 0.0011 0.0017 0.0577 0.001 0.0022 0.0017 0.0008 0.0008 0.0006 0.0002 0.0002 0.0002 0.0001 0.0003 0.0002 0.0", + "breathiness": "-96.0 -96.0 -95.0884 -92.2524 -88.4698 -84.9465 -80.939 -76.4849 -71.6702 -66.9052 -63.1381 -60.0484 -57.5892 -55.8429 -54.6579 -53.7123 -52.896 -52.2144 -51.6089 -50.9458 -50.1861 -49.717 -49.1855 -48.8367 -48.5799 -48.4008 -48.3384 -48.3678 -48.7104 -49.1017 -49.6066 -50.2842 -50.7714 -51.2139 -51.8211 -52.9828 -54.6101 -56.8691 -59.063 -60.3045 -59.5893 -57.3869 -53.4624 -49.2564 -45.2754 -42.4361 -42.3456 -43.7269 -46.0084 -48.3241 -50.5917 -52.2261 -53.3582 -54.4393 -55.6651 -57.5732 -59.7209 -61.7065 -63.353 -64.2174 -64.451 -64.053 -63.4421 -62.8385 -62.1768 -61.5894 -60.6909 -59.9732 -58.9666 -57.7939 -57.0478 -55.8644 -55.4673 -55.0964 -55.1204 -55.3011 -55.2799 -55.3541 -55.2464 -54.9976 -54.7155 -53.4709 -51.8788 -49.3661 -46.4869 -43.5089 -40.8304 -38.5027 -37.2989 -37.0284 -38.4179 -40.3924 -43.2769 -46.3238 -48.9102 -51.3081 -52.876 -53.9053 -54.1143 -54.3953 -54.5589 -54.4407 -54.2057 -54.2951 -54.3404 -54.4664 -54.9216 -55.2034 -55.698 -56.1617 -56.7367 -57.3505 -58.0117 -58.4867 -58.8267 -59.2596 -59.3483 -59.5805 -59.0798 -59.0104 -59.1316 -59.1567 -59.2543 -59.0333 -58.7074 -58.4895 -58.3625 -58.1606 -57.6768 -57.3516 -57.1147 -56.8419 -56.8169 -57.3653 -58.3405 -59.4162 -60.6102 -61.6609 -62.2367 -62.6477 -62.8894 -63.0254 -62.9144 -62.6081 -61.3677 -59.6461 -57.1913 -54.2723 -50.8144 -47.4864 -44.5148 -42.0168 -40.4142 -40.4122 -41.7591 -44.048 -46.8698 -49.7619 -52.2996 -54.3829 -55.6204 -56.2321 -56.1468 -55.8652 -55.8319 -55.9473 -55.8325 -55.2563 -54.0931 -52.3212 -49.818 -47.6635 -46.3727 -45.8951 -47.5874 -49.9418 -52.5521 -55.0512 -57.0531 -58.5316 -59.0658 -59.2315 -59.1545 -59.177 -59.0653 -59.0494 -58.8612 -58.8437 -58.9703 -59.2067 -59.1639 -59.3707 -59.4222 -59.8033 -59.9755 -60.151 -60.1516 -60.2606 -60.2458 -60.4637 -60.7614 -61.2204 -61.8603 -62.46 -62.8507 -62.3974 -61.2372 -59.1621 -56.2755 -53.6919 -51.1931 -50.1957 -49.7251 -51.006 -52.9078 -54.7774 -56.4839 -57.875 -58.797 -59.1391 -58.9583 -58.7142 -58.8237 -58.9102 -59.1005 -59.3312 -59.6466 -60.014 -60.1928 -60.2347 -60.0028 -59.7276 -58.8282 -57.8865 -57.238 -56.5934 -56.618 -57.1834 -57.9195 -58.9272 -60.2184 -61.3767 -62.3142 -62.4721 -62.172 -61.1186 -59.7367 -58.3843 -56.6416 -54.7917 -53.0496 -51.5544 -50.4577 -50.0589 -50.2403 -51.295 -52.4072 -53.7683 -54.8499 -55.3139 -55.2309 -54.6725 -54.3313 -54.0551 -54.1014 -54.0775 -53.9525 -53.8438 -53.5734 -53.2917 -53.193 -53.1459 -53.4116 -53.7882 -54.2797 -54.9812 -55.5287 -56.0191 -56.2494 -56.0903 -56.1033 -56.3356 -56.9393 -57.7372 -58.8363 -59.9883 -61.1059 -62.2556 -63.0842 -63.7208 -63.6581 -63.0532 -62.3751 -61.5183 -61.1458 -60.285 -59.3617 -58.0694 -56.2094 -54.5003 -52.3228 -50.9368 -50.0251 -50.1574 -51.2499 -53.1929 -56.4099 -59.1262 -60.9686 -61.0853 -59.6957 -56.4738 -52.4807 -48.0489 -44.8441 -42.6981 -41.7481 -42.2367 -43.0292 -44.2285 -45.6795 -47.251 -48.8662 -50.5349 -52.1947 -53.8 -55.3105 -55.9038 -55.9912 -54.4756 -52.1625 -49.0516 -45.1704 -41.608 -38.5574 -36.6936 -36.5616 -37.8596 -40.1401 -42.8963 -46.1316 -49.1898 -51.5588 -53.7507 -55.221 -56.797 -58.6318 -60.0515 -61.3598 -62.6194 -63.6157 -63.9172 -63.1845 -61.171 -58.3886 -55.274 -52.0095 -48.9648 -47.5761 -47.6098 -49.2869 -51.5612 -54.235 -56.5729 -58.5831 -60.1346 -61.2605 -62.3819 -63.6315 -64.935 -66.4498 -67.8921 -69.646 -70.8718 -71.1137 -70.1119 -68.1204 -65.0334 -61.866 -59.0618 -56.9255 -56.0303 -55.3877 -55.379 -55.5306 -55.6501 -56.0784 -56.9485 -57.8492 -58.956 -60.3587 -61.4581 -62.0468 -61.5672 -59.74 -57.0099 -53.8609 -50.7353 -48.3515 -47.4324 -47.6883 -48.9049 -50.6221 -52.2311 -53.5669 -54.7429 -55.607 -56.5274 -57.5262 -58.7202 -59.5308 -59.7425 -58.5995 -55.884 -52.1769 -47.6303 -43.579 -40.5227 -39.4247 -40.2743 -42.8607 -46.3124 -50.1471 -53.7282 -56.4194 -58.4949 -59.571 -59.8412 -59.8169 -59.5889 -58.3387 -56.0311 -52.7838 -48.9694 -44.6272 -40.717 -37.7348 -35.8237 -35.6034 -36.7859 -39.3621 -42.1662 -45.256 -48.3285 -50.6931 -52.4418 -53.5794 -54.8431 -55.7174 -56.7598 -57.5167 -58.2858 -59.0077 -59.5047 -60.064 -60.1344 -60.2859 -60.2067 -60.2439 -60.1972 -59.7828 -59.3158 -58.6862 -58.2901 -57.5819 -57.3017 -56.9311 -57.0248 -56.8634 -56.8595 -56.3874 -54.9203 -52.9506 -49.9927 -46.7389 -43.2304 -40.1943 -37.9677 -36.6354 -36.5109 -37.5086 -39.2965 -41.9805 -44.7524 -47.9791 -50.2859 -52.3472 -53.9609 -55.053 -55.6941 -56.4477 -57.114 -57.77 -58.1774 -58.4893 -58.9793 -59.0953 -59.3455 -59.3341 -59.063 -59.0306 -58.551 -58.0814 -57.6582 -57.5959 -57.2671 -56.9154 -56.9169 -56.8491 -57.0436 -57.299 -57.9049 -58.1391 -58.4784 -58.5681 -58.3861 -58.123 -57.8873 -57.4313 -57.3372 -57.2659 -56.8742 -56.938 -56.4582 -56.2968 -56.0027 -55.893 -55.7934 -55.9137 -56.2238 -56.5388 -56.7769 -57.2014 -57.382 -57.6168 -57.6266 -57.6083 -57.7081 -57.6318 -57.5922 -57.7364 -57.6837 -57.8539 -57.8429 -57.8481 -57.7208 -57.8161 -57.6589 -57.5079 -57.5248 -57.3807 -57.4643 -57.3505 -57.3867 -57.5991 -57.5118 -57.4584 -57.5625 -57.7531 -58.193 -59.181 -59.9235 -60.8211 -61.5712 -62.4329 -63.2358 -63.8347 -64.3692 -64.7146 -65.0974 -65.3948 -65.7603 -66.2145 -66.7592 -67.4511 -68.3875 -69.075 -69.9462 -70.5831 -71.0974 -71.3741 -71.5572 -70.6916 -68.9802 -66.9586 -64.5929 -62.4639 -60.9293 -60.4328 -61.1203 -61.9961 -63.4248 -64.8708 -66.2132 -67.3648 -68.5579 -69.7185 -70.6682 -71.2543 -70.5794 -69.5379 -69.0751", "breathiness_timestep": "0.011609977324263039" }, { @@ -170,9 +170,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "149.7 149.7 149.5 149.5 149.3 149.5 149.2 149.4 149.6 149.8 149.7 149.7 149.7 150.0 149.7 149.6 149.4 149.6 149.1 149.7 149.6 149.3 149.3 149.0 149.0 148.6 148.5 148.9 148.4 148.3 148.4 148.6 148.2 150.8 161.2 173.4 189.4 203.1 219.8 234.6 247.3 251.5 254.5 253.8 250.6 247.5 244.9 238.4 226.7 237.7 249.2 260.3 270.6 283.2 288.6 293.7 294.9 295.2 295.5 294.2 292.7 292.2 291.7 290.5 286.4 279.7 270.5 273.4 278.7 283.0 287.7 289.5 294.8 300.4 297.7 294.7 296.0 297.5 297.6 297.8 297.7 297.1 297.2 297.1 297.6 298.2 298.9 298.7 298.2 296.5 296.7 299.4 304.6 312.7 322.6 330.9 335.4 337.1 337.4 336.3 333.8 331.3 329.1 328.1 327.9 327.1 327.6 328.2 328.9 329.5 330.4 330.8 331.2 332.4 332.7 332.3 332.2 330.6 331.0 331.6 333.3 331.6 330.8 330.3 328.5 328.0 328.7 329.7 331.1 331.3 332.3 333.1 334.6 334.9 335.6 335.1 334.9 333.8 331.3 328.9 327.3 326.6 326.9 326.8 327.4 328.9 331.5 335.1 338.7 341.1 341.7 343.3 344.5 342.8 340.0 335.3 328.6 323.2 320.3 320.2 321.6 325.0 328.4 333.8 338.1 343.3 346.2 348.4 346.6 341.6 333.8 327.1 322.2 320.1 321.8 324.4 328.3 334.2 337.6 339.2 340.1 341.5 340.4 337.5 332.8 327.3 319.3 306.4 301.7 302.0 300.4 299.9 298.7 297.6 295.8 296.0 295.0 291.4 290.2 291.5 290.9 293.1 294.5 296.1 295.6 295.4 293.1 292.3 291.3 292.2 291.5 290.0 289.3 287.0 290.3 296.6 303.2 313.4 324.1 328.3 329.7 329.8 328.9 329.1 330.2 329.0 328.1 326.3 316.4 300.9 279.4 271.9 265.3 259.5 253.3 246.2 239.4 230.4 219.7 215.2 217.0 218.1 220.5 221.3 220.1 219.3 217.4 213.4 209.3 206.1 204.1 206.1 210.3 216.8 222.7 226.7 229.9 231.7 232.9 231.0 226.8 221.5 216.7 212.8 210.2 210.6 212.9 216.5 220.5 225.0 227.9 229.2 229.2 228.5 225.2 221.7 217.5 214.4 211.3 210.6 211.1 212.8 215.9 219.5 224.9 229.2 231.2 231.7 232.2 229.1 223.7 216.5 209.4 205.9 203.1 203.6 206.6 213.1 218.6 224.9 230.9 235.5 238.2 237.6 233.4 227.5 219.3 209.4 202.8 201.1 203.6 208.7 214.1 220.4 227.6 229.6 230.2 229.5 225.2 222.8 222.0 224.5 226.8 229.0 230.5 231.8 233.3 234.6 237.0 240.1 246.7 254.1 258.5 264.3 260.5 257.1 255.4 253.0 251.0 248.4 246.6 246.4 245.4 244.6 244.1 243.8 244.7 245.1 245.6 246.4 245.5 244.6 243.3 242.1 242.4 243.7 243.0 239.0 233.2 229.0 226.6 221.9 217.6 217.4 218.0 218.9 220.5 221.5 222.2 223.6 223.7 223.3 222.3 218.7 210.6 216.1 222.4 228.8 234.9 241.4 248.6 258.0 250.4 244.6 243.6 243.7 244.2 244.4 245.1 245.3 245.6 245.9 247.2 247.2 247.4 247.5 245.8 246.5 246.9 249.0 251.6 256.2 259.3 259.4 261.3 262.2 263.3 262.7 262.0 261.1 260.3 259.8 260.5 261.2 262.1 265.4 269.5 277.0 285.1 297.0 303.0 305.0 304.3 302.0 297.0 292.4 290.9 291.5 292.2 293.4 295.0 297.2 299.4 299.6 298.4 296.5 295.4 292.4 290.9 289.6 289.0 289.5 290.4 290.1 290.0 290.8 291.4 293.0 293.1 293.8 294.1 294.8 294.5 296.6 298.7 298.5 298.3 296.9 293.3 287.4 282.1 276.6 270.0 263.1 257.0 249.2 246.6 245.7 246.3 245.8 247.0 248.9 249.7 249.1 248.9 246.3 244.6 239.2 230.7 221.2 212.3 215.9 216.8 217.4 216.6 216.3 218.7 220.0 220.1 220.3 220.5 220.2 219.9 219.9 219.3 219.6 219.5 219.6 220.3 220.1 219.0 218.8 218.8 219.4 219.2 220.6 220.4 220.1 219.0 218.7 218.9 218.8 219.3 219.1 218.5 213.9 207.2 214.6 221.1 227.6 233.7 241.3 247.3 254.9 259.9 250.4 249.0 247.7 247.2 246.8 246.5 247.1 246.9 245.5 244.6 244.3 240.3 234.4 228.6 224.4 219.8 214.7 210.3 205.9 201.7 198.3 193.0 193.4 194.3 197.0 198.9 198.7 197.6 197.5 194.7 191.0 187.6 185.7 185.2 185.6 188.3 193.9 199.9 203.9 206.7 207.5 206.7 203.7 198.4 192.6 186.8 182.4 181.6 182.8 186.0 191.0 198.0 204.1 207.7 209.3 209.7 208.0 203.9 198.3 190.7 184.1 181.4 181.5 183.3 187.4 193.2 200.4 205.4 207.9 209.4 208.7 205.7 199.7 192.5 183.6 178.8 177.7 179.2 183.4 187.1 192.9 198.8 203.9 207.2 207.9 207.5 204.0 198.3 190.5 181.8 177.8 177.5 178.8 182.8 185.7 190.5 198.8 203.6 205.0 205.4 202.3 198.0 193.0 190.1 188.5 186.2 185.3 185.7 184.8 184.7 185.2", "f0_timestep": "0.011609977324263039", - "energy": "0.0009 0.0009 0.0012 0.002 0.0021 0.0025 0.0029 0.0037 0.0032 0.0038 0.0044 0.0046 0.0044 0.0048 0.0052 0.0039 0.0035 0.0032 0.0026 0.0028 0.0021 0.0025 0.002 0.0015 0.0005 0.0009 0.0008 0.0012 0.0004 0.0023 0.0044 0.0206 0.037 0.0484 0.061 0.069 0.0744 0.0779 0.0783 0.0809 0.0821 0.0846 0.0859 0.0867 0.087 0.0814 0.0699 0.0552 0.0371 0.0215 0.0142 0.0117 0.02 0.051 0.0703 0.0862 0.0989 0.1006 0.1029 0.103 0.1016 0.101 0.0985 0.0925 0.0799 0.0624 0.0409 0.0171 0.0082 0.0121 0.022 0.023 0.0294 0.0564 0.0715 0.0842 0.0966 0.099 0.1016 0.1034 0.103 0.1036 0.1031 0.1057 0.1043 0.1055 0.1038 0.0984 0.0935 0.0836 0.077 0.0721 0.0709 0.074 0.0776 0.0801 0.0817 0.0821 0.0815 0.0833 0.0852 0.0867 0.0869 0.0866 0.0853 0.0841 0.0839 0.0837 0.083 0.0817 0.0818 0.082 0.0846 0.0858 0.0879 0.0902 0.0911 0.0916 0.091 0.0911 0.0897 0.0894 0.089 0.0885 0.0892 0.0896 0.09 0.0904 0.091 0.092 0.0924 0.0925 0.0925 0.0927 0.0941 0.095 0.0962 0.0959 0.0944 0.093 0.0921 0.0923 0.0937 0.0954 0.0956 0.0944 0.0931 0.0908 0.0878 0.0859 0.0827 0.0805 0.0797 0.0798 0.0818 0.0839 0.0861 0.0864 0.0842 0.0803 0.0769 0.0735 0.0717 0.0702 0.0687 0.068 0.0696 0.0704 0.0739 0.0763 0.0763 0.0761 0.073 0.0687 0.0647 0.0597 0.058 0.0579 0.0575 0.0566 0.0555 0.0542 0.0533 0.0535 0.0553 0.0532 0.0478 0.0383 0.0247 0.0108 0.0097 0.0131 0.0171 0.0188 0.0185 0.0307 0.0541 0.0694 0.0821 0.089 0.089 0.0881 0.0879 0.086 0.0831 0.0839 0.0849 0.0866 0.09 0.0895 0.0892 0.0884 0.0854 0.0851 0.0831 0.0826 0.0827 0.0849 0.0878 0.091 0.0942 0.0954 0.095 0.0919 0.0893 0.0858 0.0852 0.0832 0.0784 0.0692 0.054 0.0368 0.0258 0.0221 0.0245 0.0219 0.025 0.0414 0.0578 0.0728 0.0841 0.0907 0.093 0.0936 0.0939 0.0903 0.0878 0.0845 0.0801 0.0782 0.0753 0.0756 0.0755 0.0772 0.0787 0.0795 0.0808 0.0815 0.0821 0.0836 0.0846 0.0842 0.0821 0.0783 0.0747 0.0716 0.0695 0.0696 0.0694 0.0711 0.0749 0.0768 0.0784 0.078 0.0764 0.0754 0.0742 0.0731 0.0714 0.0691 0.0658 0.0639 0.0624 0.0617 0.0633 0.0655 0.0677 0.0694 0.0694 0.0684 0.0656 0.0613 0.0566 0.0511 0.0474 0.0437 0.0418 0.0412 0.0426 0.0467 0.0512 0.0549 0.0567 0.057 0.0562 0.0535 0.0497 0.0454 0.041 0.0375 0.0345 0.0327 0.0337 0.0356 0.0383 0.039 0.0391 0.0375 0.0322 0.0266 0.02 0.0121 0.0066 0.0056 0.0053 0.0063 0.0061 0.0061 0.0062 0.0058 0.0056 0.0044 0.0068 0.0303 0.0481 0.0695 0.0831 0.0932 0.0978 0.0969 0.0975 0.0953 0.0932 0.0913 0.0903 0.0902 0.0913 0.0902 0.0899 0.089 0.0857 0.0857 0.0843 0.0842 0.085 0.0842 0.0847 0.0842 0.0842 0.0838 0.0863 0.0897 0.0898 0.0931 0.0921 0.0908 0.0915 0.0918 0.0916 0.0904 0.0874 0.0821 0.0756 0.0647 0.0503 0.0347 0.0197 0.015 0.0152 0.0138 0.0191 0.045 0.0644 0.0803 0.0909 0.0923 0.0908 0.0874 0.0861 0.0818 0.0789 0.0771 0.0753 0.0776 0.0794 0.0819 0.0835 0.084 0.0827 0.0809 0.0812 0.0871 0.0942 0.1007 0.1059 0.1074 0.1051 0.1059 0.1048 0.1006 0.1002 0.0954 0.0897 0.0859 0.0772 0.0722 0.0691 0.0687 0.0723 0.0753 0.0775 0.0795 0.0812 0.0814 0.0821 0.0822 0.0808 0.0815 0.0811 0.0804 0.081 0.0799 0.0801 0.0799 0.0805 0.0812 0.0814 0.0846 0.0851 0.0899 0.093 0.0857 0.9791 0.0945 0.1011 0.0955 0.0895 0.0886 0.0884 0.0894 0.0906 0.0932 0.0922 0.094 0.0956 0.0964 0.0995 0.0987 0.0988 0.1003 0.0966 0.0979 0.0979 0.0962 0.0958 0.0921 0.0883 0.0858 0.0859 0.0853 0.0855 0.0862 0.0881 0.0894 0.0904 0.0909 0.0851 0.0755 0.0619 0.0437 0.0246 0.018 0.0436 0.0622 0.0794 0.0941 0.098 0.1002 0.0976 0.0953 0.0944 0.0917 0.0895 0.0885 0.087 0.0876 0.0888 0.0884 0.0872 0.0869 0.0858 0.0849 0.0839 0.0852 0.0856 0.087 0.0877 0.087 0.087 0.0865 0.0872 0.0868 0.086 0.0825 0.0742 0.0631 0.0484 0.0318 0.02 0.0206 0.0231 0.0241 0.023 0.0366 0.0549 0.0686 0.0806 0.0864 0.0867 0.0862 0.0866 0.0856 0.0827 0.0806 0.0773 0.0739 0.0712 0.0615 0.05 0.0402 0.0297 0.0272 0.0286 0.0258 0.0296 0.0445 0.058 0.0674 0.077 0.0781 0.0788 0.0801 0.0793 0.0788 0.0767 0.074 0.0694 0.0658 0.0619 0.0597 0.0599 0.0604 0.0636 0.0663 0.0688 0.0714 0.0722 0.0724 0.0699 0.0649 0.0591 0.0511 0.0462 0.0423 0.0416 0.0413 0.0433 0.0473 0.0515 0.0544 0.0572 0.0574 0.056 0.0525 0.0476 0.0429 0.038 0.0347 0.0314 0.0306 0.0308 0.0334 0.037 0.0399 0.0431 0.045 0.0449 0.0433 0.0396 0.0354 0.0304 0.0277 0.026 0.0245 0.0238 0.0234 0.0231 0.0257 0.0278 0.0288 0.0297 0.0293 0.0269 0.0237 0.0201 0.0171 0.0142 0.0138 0.0141 0.0143 0.0143 0.0145 0.0144 0.0139 0.0138 0.0137 0.0129 0.0121 0.0093 0.0067 0.0034 0.0012 0.0 0.0 0.0 0.0", + "energy": "-69.7716 -67.4253 -64.0391 -61.2484 -58.5513 -56.3528 -54.4246 -53.0137 -51.5748 -50.5218 -49.3598 -48.4889 -47.6152 -47.2428 -46.9001 -47.1551 -47.5841 -48.0028 -49.221 -50.5246 -52.1032 -54.1453 -55.7999 -57.8098 -59.3621 -59.8461 -58.2623 -55.2829 -50.2948 -44.4977 -38.4069 -32.7739 -27.8815 -24.8121 -23.0526 -21.6547 -21.3935 -21.2538 -21.2573 -21.2334 -21.5432 -21.6272 -22.0846 -22.6384 -23.6054 -25.6002 -28.0885 -31.3158 -34.0447 -36.4626 -36.7328 -35.7138 -33.1534 -29.9616 -26.6688 -23.7424 -22.1135 -21.1326 -20.6544 -20.92 -21.0996 -21.5725 -22.5667 -24.1308 -27.0217 -29.7283 -32.6049 -34.6972 -35.957 -35.8253 -34.4693 -32.0034 -29.3681 -26.8042 -24.4117 -22.4627 -21.2675 -20.7761 -20.4639 -19.9069 -19.7632 -20.1093 -20.3445 -20.275 -20.5367 -20.7949 -21.0169 -21.2421 -21.4771 -21.7614 -22.0397 -22.551 -22.8732 -22.8361 -23.0645 -22.9518 -22.7762 -22.5004 -22.5253 -22.5022 -22.3044 -22.3609 -22.248 -21.9661 -22.1673 -21.8861 -21.7658 -21.8103 -21.9405 -21.938 -21.9056 -21.9128 -21.5593 -21.4282 -21.7011 -21.5343 -21.5499 -21.8234 -21.9148 -21.8569 -21.9392 -21.5892 -21.6424 -21.7781 -21.6913 -21.771 -21.7482 -21.6673 -21.7811 -21.6871 -21.6663 -21.5864 -21.569 -21.5807 -21.5979 -21.4041 -21.2865 -21.0575 -20.8281 -20.6862 -20.4907 -20.3713 -20.3063 -20.1118 -20.1347 -19.8552 -20.0096 -20.0315 -20.1169 -20.4207 -20.5397 -20.5727 -20.8145 -20.6865 -20.8016 -20.7365 -20.5657 -20.3927 -20.5422 -20.559 -20.8776 -21.0277 -21.5842 -21.8867 -22.5176 -22.6921 -22.8881 -22.9688 -22.7915 -22.421 -22.1518 -21.6812 -21.7135 -21.6302 -21.9164 -22.2081 -22.7536 -23.2082 -23.4561 -23.7174 -23.8679 -23.521 -23.409 -23.454 -23.8591 -24.9294 -27.0429 -29.5158 -31.6645 -33.2532 -34.2935 -34.6901 -33.9276 -32.2348 -30.112 -27.847 -25.6097 -23.7091 -22.1657 -21.34 -20.7894 -20.6665 -20.6421 -20.4911 -20.556 -20.0758 -20.4218 -20.0997 -20.0411 -20.0904 -19.9326 -20.1912 -20.2348 -20.4547 -20.4767 -20.3571 -20.4073 -19.9646 -20.0107 -19.8145 -19.6492 -19.6938 -19.764 -19.8236 -19.928 -20.1305 -20.2495 -21.2934 -22.4756 -24.2504 -26.8021 -28.9327 -30.7133 -31.9886 -31.7581 -30.7634 -29.1827 -26.8332 -24.8978 -22.9468 -21.9237 -21.1238 -20.8066 -20.7183 -20.8245 -21.1125 -21.3687 -21.5751 -22.1152 -22.2607 -22.482 -22.4162 -22.1022 -21.8944 -21.6007 -21.3086 -20.8697 -20.5706 -20.8091 -20.7542 -20.8828 -20.9846 -21.1545 -21.2875 -21.3064 -21.1506 -20.8849 -20.7225 -20.2922 -20.2352 -19.9949 -19.8051 -19.6879 -19.5542 -19.3474 -19.6441 -19.5654 -19.6468 -19.889 -20.0684 -20.1683 -20.102 -20.4869 -20.5736 -20.8977 -20.926 -21.1316 -21.3884 -21.5254 -21.7634 -21.8004 -22.2879 -22.6356 -23.0511 -23.3517 -24.034 -24.4571 -24.8313 -25.2813 -25.5709 -25.8859 -25.9338 -26.4391 -26.6826 -27.2202 -28.0936 -29.1484 -30.0142 -31.0006 -31.8873 -32.83 -33.4551 -33.7278 -34.1928 -34.3191 -35.1017 -36.0054 -38.0539 -40.0456 -42.5566 -44.7574 -46.5214 -47.7103 -48.2508 -48.2798 -48.3445 -48.4084 -48.2576 -47.7206 -45.8846 -43.2183 -39.0205 -34.1743 -29.5425 -25.612 -22.2982 -20.5787 -19.7423 -19.5883 -19.8481 -20.2502 -20.6665 -20.8572 -21.1279 -21.3128 -21.4735 -21.6776 -21.7511 -21.8677 -21.7843 -21.7984 -21.9576 -21.7045 -21.9144 -21.9483 -21.782 -21.8446 -21.6991 -21.6571 -21.5993 -21.4505 -21.6597 -21.4434 -21.3631 -21.1419 -20.8699 -20.8012 -21.1014 -21.0896 -21.4393 -22.058 -22.9637 -24.7259 -26.4797 -28.7141 -31.2493 -33.4716 -34.9009 -34.4951 -33.1001 -30.7783 -27.9236 -24.9997 -23.1388 -21.8426 -21.2583 -21.1379 -21.0953 -21.2943 -21.5163 -21.6757 -21.8409 -21.7924 -21.8785 -21.7897 -21.689 -21.7854 -21.5166 -21.5411 -21.3016 -20.9149 -20.3756 -20.0994 -19.5974 -19.0476 -19.0026 -18.8576 -19.1991 -19.4518 -19.8276 -20.3238 -21.0204 -21.9734 -22.669 -22.9672 -23.431 -23.3204 -23.1645 -22.6127 -22.1965 -21.8509 -21.4409 -21.0251 -20.8 -21.1585 -21.3297 -21.575 -22.0484 -21.9959 -22.3644 -22.4073 -22.4251 -22.3176 -22.1228 -22.024 -21.9838 -22.0663 -22.122 -22.3747 -22.631 -22.7415 -22.9145 -23.2552 -23.6361 -23.8828 -24.1769 -24.4078 -24.1123 -23.9961 -24.0552 -23.9187 -23.8589 -23.7683 -23.7726 -23.6985 -23.3133 -23.016 -22.8491 -22.8274 -22.5953 -22.4162 -22.1602 -21.8736 -21.8779 -21.4929 -21.4684 -21.5097 -21.5327 -21.7881 -21.7983 -21.821 -21.716 -21.704 -21.5864 -21.8286 -22.8723 -24.4512 -26.736 -28.699 -30.0705 -30.1363 -29.3081 -27.8405 -25.3104 -23.1496 -21.6618 -20.8319 -20.7207 -20.4188 -20.2723 -20.1657 -20.1766 -20.0461 -20.1456 -20.2382 -20.2571 -20.3744 -20.3589 -20.3241 -20.1551 -20.1693 -20.0802 -20.3559 -20.3301 -20.4229 -20.5682 -20.7601 -20.7446 -21.0195 -20.8255 -20.932 -21.0174 -21.5954 -22.6539 -24.0413 -26.0251 -27.7973 -29.6565 -31.0657 -31.7832 -31.7544 -30.3672 -28.5116 -26.5567 -24.686 -22.8747 -21.7177 -21.1738 -20.7469 -20.6923 -21.0399 -21.2822 -21.46 -21.5537 -22.1134 -22.3969 -23.4327 -24.4469 -25.787 -27.3743 -28.9626 -30.0569 -30.3887 -29.9417 -28.7923 -27.1179 -25.4199 -23.8978 -22.4904 -21.8655 -21.8003 -21.7116 -22.0165 -22.299 -22.6487 -23.0387 -23.4013 -23.6941 -24.0399 -24.3019 -24.4159 -24.3043 -24.1811 -23.7767 -23.3374 -22.9178 -22.8152 -22.8717 -23.0226 -23.8607 -24.4644 -25.2307 -25.9537 -26.5679 -26.9697 -26.8823 -26.7042 -26.4722 -26.1184 -26.0087 -25.9239 -25.9142 -25.9729 -26.349 -26.822 -27.5931 -28.3187 -28.9995 -29.6152 -29.9819 -30.0032 -29.7457 -29.6086 -29.2202 -28.7854 -28.739 -29.0821 -29.5212 -30.333 -31.1882 -32.179 -32.8071 -33.2347 -33.3331 -33.284 -33.1272 -32.8125 -32.8047 -32.7381 -32.7523 -33.1829 -33.3749 -33.9929 -34.6285 -35.2461 -36.0495 -36.9015 -37.6329 -38.4881 -39.0363 -39.1892 -38.8316 -38.3616 -37.9105 -37.3518 -37.4249 -38.2357 -39.942 -42.8567 -46.5664 -50.6124 -54.1943 -56.6746 -57.0695 -56.0401 -54.7119", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0004 0.0008 0.0011 0.0016 0.0026 0.0034 0.0039 0.0042 0.0043 0.0044 0.0045 0.0047 0.0051 0.0052 0.0054 0.0049 0.0046 0.0043 0.0037 0.0034 0.0027 0.0022 0.0018 0.0016 0.0012 0.0012 0.0013 0.001 0.0009 0.0018 0.0047 0.018 0.0268 0.0313 0.0312 0.0272 0.0212 0.009 0.0031 0.0031 0.0028 0.0023 0.0016 0.0016 0.0015 0.0016 0.0018 0.0029 0.0043 0.006 0.0078 0.0084 0.0082 0.0071 0.0054 0.0028 0.0026 0.0022 0.0021 0.0018 0.0017 0.0016 0.0019 0.002 0.0021 0.0023 0.0024 0.0024 0.0063 0.0164 0.0265 0.0295 0.0292 0.0239 0.014 0.0045 0.0023 0.0019 0.0017 0.0016 0.0014 0.0014 0.0011 0.0013 0.0012 0.0016 0.0014 0.0014 0.0015 0.0015 0.0013 0.0014 0.0011 0.0013 0.0012 0.0013 0.0013 0.0012 0.0014 0.0012 0.0011 0.0011 0.0011 0.001 0.001 0.001 0.0011 0.0011 0.0013 0.0013 0.0011 0.0012 0.0012 0.0013 0.0011 0.0013 0.0011 0.001 0.0009 0.001 0.0008 0.0009 0.0007 0.001 0.0009 0.001 0.0009 0.001 0.001 0.0014 0.0015 0.0016 0.0017 0.0015 0.0013 0.0013 0.0011 0.0013 0.0012 0.0013 0.0014 0.0014 0.0014 0.0015 0.0015 0.0016 0.0018 0.0019 0.0018 0.0016 0.0014 0.0013 0.0013 0.0009 0.0012 0.0011 0.0011 0.0009 0.0012 0.0012 0.0014 0.0015 0.0011 0.001 0.0012 0.0009 0.0011 0.0012 0.0012 0.0014 0.0016 0.0015 0.0013 0.0011 0.0008 0.0009 0.0008 0.0008 0.0008 0.001 0.0007 0.0009 0.0006 0.0008 0.001 0.0012 0.0014 0.0027 0.0038 0.006 0.0092 0.0123 0.0147 0.0165 0.0153 0.013 0.0101 0.0046 0.0025 0.0019 0.0017 0.0019 0.0019 0.0017 0.0014 0.0013 0.0012 0.0009 0.0009 0.0008 0.0008 0.0006 0.0006 0.0005 0.0007 0.0012 0.0016 0.0018 0.0019 0.0022 0.002 0.0019 0.0017 0.0017 0.0018 0.0016 0.0014 0.0012 0.0022 0.006 0.0141 0.0185 0.0224 0.0259 0.0262 0.0246 0.0213 0.0156 0.0105 0.0047 0.0035 0.0027 0.0022 0.0022 0.002 0.0019 0.0022 0.0024 0.0025 0.0023 0.0022 0.0021 0.002 0.0023 0.0022 0.0021 0.0019 0.0019 0.002 0.0021 0.0022 0.0021 0.002 0.0019 0.0017 0.0014 0.0016 0.0016 0.0018 0.0019 0.0019 0.0018 0.002 0.0021 0.0021 0.0021 0.002 0.0018 0.002 0.0016 0.0016 0.0015 0.0018 0.0016 0.0017 0.0016 0.0016 0.0014 0.0014 0.0014 0.0018 0.0018 0.0019 0.0015 0.0012 0.0012 0.0009 0.001 0.001 0.0011 0.0013 0.0013 0.0012 0.0014 0.0013 0.0014 0.0015 0.0014 0.0012 0.0011 0.0009 0.0006 0.0004 0.0006 0.0007 0.0007 0.0006 0.0007 0.001 0.0011 0.0015 0.0024 0.0034 0.0047 0.0055 0.0065 0.0069 0.0069 0.0064 0.0058 0.0052 0.0039 0.0034 0.0026 0.0012 0.0011 0.0015 0.0018 0.0018 0.0018 0.0018 0.0015 0.0016 0.0012 0.0012 0.0011 0.001 0.0011 0.0009 0.001 0.0008 0.001 0.0008 0.0007 0.0007 0.0009 0.001 0.0012 0.0013 0.0014 0.0014 0.0015 0.0015 0.0015 0.0013 0.0011 0.0013 0.0012 0.0013 0.0014 0.0016 0.0017 0.002 0.0045 0.0078 0.0103 0.0121 0.013 0.0128 0.0118 0.0112 0.0096 0.007 0.0045 0.0021 0.0014 0.0015 0.0012 0.0011 0.0013 0.0009 0.0009 0.0007 0.0008 0.0007 0.0008 0.0007 0.0007 0.0008 0.001 0.001 0.0015 0.002 0.0023 0.0022 0.0022 0.002 0.002 0.0019 0.0022 0.002 0.0018 0.002 0.0021 0.0021 0.0021 0.002 0.0021 0.0023 0.0023 0.0024 0.0024 0.002 0.0019 0.0016 0.0016 0.0016 0.0015 0.0013 0.0012 0.0013 0.0011 0.0009 0.0008 0.0007 0.0008 0.0007 0.001 0.0011 0.0017 0.0025 0.0033 0.1469 0.001 0.0027 0.0016 0.0009 0.001 0.0009 0.0009 0.0008 0.0011 0.0011 0.001 0.0008 0.0007 0.0006 0.0007 0.0006 0.0005 0.0005 0.0005 0.0008 0.0009 0.0009 0.0011 0.0013 0.0012 0.001 0.001 0.0007 0.0004 0.0008 0.0005 0.0002 0.0005 0.0005 0.0006 0.0006 0.0018 0.0033 0.0043 0.0044 0.0043 0.0034 0.0016 0.0015 0.0013 0.0013 0.0011 0.0011 0.0012 0.0013 0.0014 0.0015 0.0011 0.0013 0.0013 0.0014 0.0013 0.0011 0.0011 0.0012 0.0012 0.0011 0.0016 0.0013 0.0014 0.0013 0.0013 0.0012 0.0011 0.0011 0.0017 0.0037 0.0082 0.0132 0.0159 0.0196 0.0232 0.0261 0.0282 0.0275 0.0227 0.0174 0.0089 0.0026 0.0018 0.0017 0.0018 0.0014 0.0011 0.0012 0.0013 0.0013 0.0013 0.0019 0.0042 0.0108 0.0173 0.0225 0.0261 0.0274 0.0251 0.0212 0.0162 0.0092 0.0038 0.0025 0.0022 0.0024 0.0022 0.0021 0.0022 0.0022 0.0022 0.0024 0.0022 0.002 0.0021 0.002 0.0022 0.0024 0.0021 0.002 0.0022 0.0021 0.0022 0.0021 0.0023 0.0018 0.0018 0.0016 0.0012 0.0011 0.0014 0.0015 0.0015 0.0015 0.0014 0.0013 0.0014 0.0014 0.0016 0.0015 0.0014 0.0014 0.0012 0.0011 0.0009 0.0008 0.0009 0.0008 0.0009 0.0009 0.001 0.0009 0.0013 0.0012 0.001 0.001 0.0009 0.0005 0.0004 0.0005 0.0006 0.0006 0.0007 0.0007 0.0006 0.0006 0.0006 0.0005 0.0005 0.0002 0.0003 0.0002 0.0003 0.0002 0.0002 0.0003 0.0002 0.0005 0.0004 0.0003 0.0003 0.0005 0.0008 0.0016 0.0017 0.0015 0.0014 0.0007 0.0006 0.0004 0.0003", + "breathiness": "-82.2954 -79.2073 -74.1486 -68.7278 -63.2737 -58.8326 -55.5474 -52.8528 -50.9938 -49.2801 -48.114 -47.3358 -46.8284 -46.2226 -46.1786 -46.4497 -47.0033 -48.1381 -49.6158 -51.5208 -54.2517 -57.1805 -60.1768 -63.8553 -67.1185 -68.8794 -67.9857 -65.4345 -61.5822 -57.1243 -53.044 -49.9053 -48.873 -48.8957 -49.2011 -49.1957 -48.9753 -48.8351 -49.1434 -50.3596 -51.7424 -53.3773 -54.5686 -54.8756 -54.3031 -52.802 -51.0866 -49.2783 -47.5184 -46.1796 -45.6353 -45.7282 -46.7509 -47.9926 -49.9531 -51.6702 -52.8703 -54.0146 -54.6498 -54.5041 -54.0012 -53.5616 -53.0266 -52.4734 -51.432 -49.742 -46.9948 -43.7152 -40.6027 -37.698 -35.4579 -34.9053 -36.6575 -40.0374 -44.0644 -48.4416 -52.6285 -55.8492 -58.2557 -59.571 -60.0696 -60.4925 -60.5968 -60.4864 -60.3669 -60.1496 -59.9919 -59.5597 -59.2901 -59.4172 -59.3093 -59.0644 -59.0028 -59.0252 -58.9892 -59.0701 -59.3345 -59.4967 -59.5787 -59.5899 -59.7821 -60.0741 -60.2454 -60.4242 -60.6185 -60.8434 -60.7998 -60.8206 -60.7117 -60.7449 -60.5562 -60.525 -60.587 -60.3338 -60.2784 -60.009 -59.7331 -59.6166 -59.37 -59.1721 -59.2423 -59.0867 -58.6386 -58.3462 -57.9692 -57.6946 -57.6794 -57.4938 -57.3612 -57.4091 -57.3266 -57.4963 -57.3068 -57.4318 -57.6468 -57.7604 -57.8026 -57.8797 -57.8748 -57.8053 -57.5407 -57.2724 -56.8285 -56.69 -56.3513 -56.3126 -56.3232 -56.5894 -56.9844 -57.2995 -57.4963 -57.231 -57.0171 -56.6811 -56.4761 -56.579 -56.5192 -56.6256 -56.7874 -56.9592 -56.855 -56.9438 -57.3546 -57.5305 -57.6631 -57.5566 -57.31 -57.0012 -56.6457 -56.3922 -56.44 -56.7831 -57.5175 -58.3851 -59.0055 -59.5189 -59.7328 -59.6221 -59.5415 -59.2359 -59.4499 -59.2422 -58.6294 -57.8795 -56.698 -55.0614 -52.5986 -49.4368 -46.1584 -42.8482 -39.9157 -37.6001 -36.6378 -37.0874 -38.3146 -40.7338 -43.6866 -47.2835 -50.8123 -54.1534 -57.3535 -59.8306 -61.5768 -63.0927 -64.3654 -65.3914 -65.3498 -65.3698 -65.0854 -64.6691 -63.9767 -63.2056 -61.9211 -60.1688 -58.0779 -56.2422 -54.5003 -53.2992 -52.6734 -52.5348 -52.8562 -53.3377 -53.8291 -54.4856 -54.6642 -54.5384 -53.5562 -51.4006 -48.5705 -45.2206 -41.2551 -37.8631 -35.3388 -34.2271 -34.5824 -35.9186 -38.3216 -41.4322 -44.4807 -47.3708 -49.6227 -51.0721 -52.1605 -52.6169 -52.8679 -53.2078 -53.4739 -53.3557 -53.3898 -53.1476 -52.8941 -52.6043 -52.3118 -52.0208 -52.1029 -52.5261 -52.9124 -53.4181 -53.8867 -53.9555 -53.9259 -53.974 -54.11 -53.8412 -53.8171 -53.8552 -53.3848 -53.3192 -53.1279 -53.1945 -53.2741 -53.5651 -53.7315 -54.1714 -53.9972 -54.0964 -53.9902 -54.1734 -54.1179 -53.9566 -53.5758 -53.3655 -53.1249 -53.1327 -53.4745 -53.5573 -53.9925 -54.0394 -53.9868 -53.5453 -53.3412 -53.1566 -53.4633 -53.7376 -54.2042 -54.9552 -55.3285 -55.6762 -56.1901 -56.9071 -57.5181 -58.2686 -58.8742 -59.226 -59.53 -59.6679 -59.8242 -60.5367 -61.6164 -62.6975 -63.5299 -64.5946 -65.2406 -65.4887 -65.2561 -64.3141 -63.0548 -61.268 -59.2161 -57.1037 -55.1076 -53.1117 -51.5289 -50.2222 -49.4485 -49.3866 -49.1461 -48.9611 -48.7133 -48.4158 -48.0374 -47.703 -47.5399 -48.0713 -49.0791 -49.9033 -51.02 -52.041 -53.0944 -54.0872 -55.2198 -56.0959 -56.9098 -57.9951 -58.7338 -59.4382 -59.9806 -60.6519 -60.7523 -61.1134 -61.3159 -61.4456 -61.1709 -60.4987 -59.3876 -58.1161 -56.8052 -55.6434 -54.8674 -54.6091 -54.603 -54.7026 -54.6207 -54.5953 -54.6916 -54.5099 -54.6206 -54.7718 -54.5227 -54.1567 -52.8769 -50.743 -48.4078 -45.8724 -43.3753 -41.475 -40.0759 -39.407 -39.4957 -40.2116 -41.5415 -43.4919 -45.8219 -48.4657 -51.208 -53.4813 -55.3289 -56.8066 -57.7923 -58.6487 -59.5445 -60.2058 -60.9935 -61.9092 -62.3746 -62.461 -62.2254 -61.7794 -60.6821 -59.6206 -57.9259 -56.6218 -55.3922 -54.5971 -54.4308 -54.2514 -54.2718 -54.377 -54.015 -53.6477 -53.0811 -52.8008 -52.4576 -52.1451 -52.1677 -52.0366 -51.8486 -51.9845 -52.5675 -53.5288 -54.6039 -56.0442 -57.2038 -58.2595 -58.6386 -59.0859 -59.1349 -59.0583 -58.8883 -58.7294 -58.4513 -57.9951 -57.8077 -57.6433 -57.5677 -57.7826 -58.1181 -58.7595 -59.3495 -60.006 -60.5209 -61.1607 -61.9166 -62.8973 -63.7559 -64.8595 -65.6697 -66.2425 -66.7248 -66.9279 -67.2586 -67.2978 -67.3229 -66.9424 -66.6068 -66.0352 -65.2424 -64.6406 -63.8901 -63.1559 -62.145 -61.4115 -60.3965 -59.7336 -59.1894 -59.1388 -59.6884 -60.6752 -61.8597 -63.0816 -63.9991 -64.4907 -64.321 -63.85 -62.7505 -61.5806 -59.5688 -56.9992 -54.2606 -51.9291 -50.0949 -49.3877 -49.8072 -51.1428 -52.8171 -54.5261 -55.7106 -56.6512 -56.9606 -56.9032 -56.7162 -56.4881 -56.479 -56.0619 -55.8296 -55.8951 -56.1618 -56.5992 -56.7107 -57.0331 -57.2755 -57.404 -57.1823 -57.3534 -57.3151 -57.288 -57.3655 -57.4892 -57.6092 -57.5552 -57.0955 -55.8909 -53.6424 -50.4886 -46.6883 -42.7963 -39.0525 -35.8894 -33.7453 -32.7944 -32.6027 -33.3739 -35.2828 -38.1771 -41.8508 -45.5529 -49.6493 -52.9232 -55.6609 -57.4498 -58.6533 -59.1665 -59.6783 -58.8777 -56.1999 -52.1094 -46.9827 -41.5337 -36.6094 -32.8084 -30.9831 -31.1485 -32.5662 -34.9218 -37.7276 -41.0534 -43.9773 -46.5125 -48.1751 -49.2043 -49.9654 -50.6468 -51.4805 -52.5711 -53.5573 -54.9039 -55.6756 -56.1468 -56.3409 -56.1388 -56.1364 -55.7564 -55.5288 -55.314 -55.0635 -54.9932 -54.6579 -54.4753 -54.3375 -54.6447 -54.9441 -55.8086 -56.6548 -57.5567 -58.1579 -58.5062 -58.5328 -58.4038 -58.1596 -57.9174 -57.5512 -57.8154 -58.1121 -58.6366 -59.3712 -59.8711 -60.4577 -60.9565 -61.3176 -61.5968 -61.5973 -61.3659 -61.467 -61.1354 -60.9829 -61.0335 -61.3734 -62.0419 -62.8512 -63.7097 -64.5136 -65.5345 -66.2434 -66.9194 -67.4353 -67.5141 -67.8243 -67.7643 -68.015 -68.0547 -68.154 -68.2689 -68.645 -69.2279 -70.0236 -71.1376 -72.2512 -73.4913 -74.8604 -76.0445 -76.9058 -77.0172 -76.8811 -75.9988 -74.3314 -71.8163 -68.773 -65.363 -62.3287 -59.9695 -58.9025 -58.8411 -59.3735 -60.1998 -60.7748 -61.1564", "breathiness_timestep": "0.011609977324263039" }, { @@ -186,9 +186,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "186.0 185.8 185.9 185.8 185.8 185.8 186.2 185.9 186.0 186.1 186.1 186.3 186.4 186.7 186.6 186.3 186.5 186.8 187.0 187.0 187.1 187.3 187.8 187.7 187.8 187.9 187.9 187.2 186.7 186.3 184.3 181.0 178.8 175.4 176.3 177.9 179.1 182.2 184.3 188.6 191.5 193.7 195.7 196.4 195.9 194.5 190.4 185.4 175.1 166.3 167.2 167.0 166.4 169.3 166.4 163.4 162.6 162.6 163.4 164.5 165.5 166.6 166.7 166.2 165.5 164.6 163.1 158.0 149.2 142.0 145.9 151.2 166.1 179.1 188.4 194.3 195.3 195.7 196.0 196.5 196.8 196.5 196.8 197.1 197.1 196.3 193.5 188.0 189.5 194.9 200.9 207.0 214.9 223.3 228.9 233.9 228.5 228.0 224.7 220.6 215.6 210.8 207.9 208.9 210.1 212.7 218.4 224.8 227.4 229.1 227.6 221.8 214.7 206.4 201.9 200.6 201.0 203.0 206.4 213.0 217.7 221.6 223.3 223.1 222.1 218.9 217.3 218.9 222.6 225.9 229.3 234.3 237.3 241.9 246.0 246.2 246.8 247.6 247.3 247.4 247.6 246.9 247.4 248.2 248.3 247.6 244.8 238.7 229.3 219.0 219.3 221.4 224.0 225.8 227.0 227.8 224.6 226.9 227.1 228.1 226.3 223.0 216.7 208.6 201.0 195.4 194.4 196.1 201.9 210.1 218.7 227.0 232.9 235.0 230.2 225.2 218.0 211.1 205.5 200.8 197.6 194.8 191.3 188.4 185.8 183.4 181.0 177.5 175.1 172.7 171.0 168.2 165.9 164.2 161.7 159.4 157.3 154.9 154.3 157.6 160.2 163.3 168.3 176.6 186.2 192.7 197.4 199.1 198.8 196.3 191.6 186.6 181.6 176.4 172.3 168.7 166.7 166.6 164.4 163.5 163.1 162.9 165.1 166.9 168.0 167.5 166.5 165.6 165.6 163.9 160.9 156.1 152.4 162.3 172.1 182.0 192.7 201.1 200.8 195.9 194.8 194.4 194.0 194.9 196.1 196.5 196.9 197.0 196.2 194.3 187.8 181.1 184.0 187.1 190.3 193.0 195.4 197.4 199.2 200.3 197.7 197.8 198.2 198.3 198.5 198.1 196.9 196.3 195.2 195.0 196.5 200.7 207.9 214.7 218.6 221.0 221.6 220.9 218.2 216.1 214.3 213.2 213.4 215.1 217.8 220.4 221.8 222.1 220.7 216.7 221.1 225.1 230.1 234.0 239.4 244.1 248.8 253.5 258.9 261.9 253.6 249.2 248.7 248.2 247.8 246.7 245.7 245.6 246.8 246.8 247.1 245.6 242.8 227.0 230.8 242.2 254.1 265.7 280.4 266.2 262.4 261.7 262.0 262.3 261.6 261.5 261.2 262.2 262.9 262.8 263.2 263.1 263.6 262.7 263.0 269.1 276.1 286.5 292.8 296.2 297.0 296.9 295.1 293.3 290.9 290.5 289.5 289.4 291.4 293.1 295.8 296.1 296.2 294.9 294.7 292.5 289.6 286.1 283.9 283.5 284.3 286.6 288.8 290.7 293.7 295.2 294.4 290.6 286.9 279.4 266.7 252.5 241.9 230.8 219.7 210.0 200.8 193.1 185.4 178.2 171.4 158.1 147.7 143.0 143.2 143.4 143.8 143.7 144.0 143.8 143.8 142.3 141.6 141.2 142.3 143.5 144.1 145.4 146.6 147.9 148.6 148.8 148.5 148.5 148.0 147.6 146.7 147.2 148.1 149.0 149.0 148.7 146.5 140.5 140.7 142.4 144.6 146.4 148.0 149.7 151.8 153.8 153.8 152.9 147.3 146.5 146.8 147.3 148.0 148.3 148.4 147.9 147.5 147.0 147.0 146.7 146.7 147.0 147.8 149.4 151.9 155.2 159.0 162.6 164.0 165.2 166.4 167.8 167.4 166.6 166.2 164.8 162.8 160.8 159.2 158.6 158.3 158.1 159.4 161.6 164.9 167.6 169.9 172.0 173.4 173.0 172.2 169.8 166.3 162.0 160.2 158.5 157.6 158.7 161.1 164.8 168.3 170.9 173.0 174.4 174.4 172.7 169.9 165.7 161.6 158.2 156.1 155.8 156.7 160.9 165.3 169.9 173.2 175.8 178.1 177.9 176.8 172.4 166.6 160.9 156.7 154.4 154.1 155.7 158.8 163.5 167.9 171.8 174.8 177.1 176.6 174.8 170.7 165.7 157.9 152.1 150.0 150.6 152.9 156.5 161.5 166.9 172.8 177.3 179.5 178.5 175.4 170.7 165.0 156.5 152.2 150.3 149.0 148.9 151.1 154.9 160.2 163.3 166.1 170.5 172.3 172.7 171.8 169.1 164.7 162.4 161.7 161.3 159.8 158.3 158.1 157.5 156.8 158.0", "f0_timestep": "0.011609977324263039", - "energy": "0.0016 0.0026 0.0025 0.0034 0.004 0.0044 0.006 0.007 0.0077 0.0084 0.0091 0.0097 0.0101 0.0095 0.0095 0.0085 0.0079 0.0079 0.0066 0.0056 0.0058 0.004 0.0034 0.0032 0.0022 0.0025 0.0017 0.0126 0.0417 0.0613 0.076 0.0887 0.0924 0.0916 0.0918 0.0866 0.0814 0.0753 0.0694 0.0655 0.065 0.0649 0.0676 0.069 0.0688 0.0675 0.0597 0.0507 0.0374 0.0226 0.0164 0.0172 0.0428 0.065 0.078 0.0897 0.0901 0.0902 0.0907 0.091 0.093 0.0916 0.0907 0.0864 0.083 0.0774 0.0693 0.06 0.0467 0.0324 0.022 0.0203 0.0324 0.0505 0.063 0.0728 0.0783 0.0797 0.08 0.0806 0.0814 0.078 0.0788 0.079 0.0753 0.0733 0.0659 0.0532 0.0431 0.0316 0.0274 0.0298 0.0303 0.0363 0.0614 0.0764 0.0872 0.0946 0.0893 0.0865 0.0813 0.0773 0.0741 0.0703 0.0687 0.067 0.0653 0.0632 0.0636 0.0639 0.0643 0.0669 0.0692 0.0726 0.0714 0.0694 0.0661 0.0637 0.0664 0.067 0.0676 0.0673 0.0662 0.0666 0.0694 0.072 0.0728 0.0771 0.0779 0.0788 0.0824 0.0821 0.0844 0.0868 0.0892 0.0904 0.09 0.0913 0.0898 0.0885 0.0875 0.0858 0.0842 0.0826 0.0823 0.0792 0.0759 0.0668 0.052 0.0376 0.0193 0.011 0.0099 0.0083 0.0252 0.0498 0.0652 0.0795 0.0877 0.0886 0.0857 0.082 0.0789 0.0731 0.0692 0.0635 0.0565 0.0533 0.0497 0.0498 0.0502 0.0501 0.0511 0.0472 0.0418 0.0334 0.0222 0.0105 0.0048 0.0037 0.0048 0.0042 0.0038 0.004 0.0033 0.0024 0.0022 0.002 0.0018 0.0021 0.0022 0.001 0.0018 0.0018 0.0105 0.0293 0.0445 0.0559 0.0614 0.0631 0.061 0.0581 0.0562 0.054 0.0535 0.0555 0.0584 0.0629 0.0639 0.0634 0.0659 0.064 0.0658 0.0655 0.0635 0.0637 0.0618 0.0639 0.0653 0.0676 0.0668 0.0665 0.0662 0.0651 0.0638 0.0624 0.0603 0.059 0.058 0.0533 0.0467 0.0377 0.0261 0.0189 0.0143 0.0137 0.0277 0.045 0.0577 0.0692 0.0743 0.0738 0.074 0.073 0.073 0.0713 0.0705 0.0691 0.064 0.055 0.0434 0.0269 0.0136 0.0109 0.0133 0.0145 0.0155 0.0336 0.0511 0.0655 0.0743 0.0764 0.0762 0.0739 0.0753 0.0741 0.0724 0.0681 0.0618 0.0576 0.0545 0.0548 0.0577 0.0615 0.0642 0.0649 0.0643 0.064 0.0637 0.0651 0.0658 0.067 0.0675 0.0666 0.0672 0.0653 0.0614 0.0551 0.0444 0.034 0.0243 0.0164 0.0151 0.0186 0.0195 0.0208 0.0219 0.0192 0.027 0.0451 0.0579 0.0691 0.0758 0.0766 0.0773 0.0773 0.0778 0.0777 0.0775 0.0776 0.0737 0.0656 0.054 0.0394 0.0224 0.0106 0.0085 0.0158 0.0441 0.0612 0.0737 0.0855 0.0881 0.0908 0.0958 0.0979 0.0991 0.1002 0.097 0.0943 0.093 0.0887 0.0832 0.0773 0.0699 0.0676 0.0688 0.0718 0.0746 0.0778 0.08 0.0813 0.0826 0.0839 0.0839 0.0846 0.0859 0.0854 0.0868 0.0869 0.0872 0.0891 0.0873 0.0868 0.0857 0.0855 0.0852 0.0856 0.0852 0.0834 0.0819 0.0798 0.0799 0.077 0.0773 0.078 0.0796 0.0788 0.0707 0.0587 0.0418 0.0223 0.0076 0.0089 0.0107 0.0126 0.0139 0.0141 0.0124 0.0109 0.0255 0.0404 0.0517 0.0604 0.0642 0.0635 0.064 0.0642 0.0618 0.0628 0.0622 0.0613 0.0608 0.0589 0.0571 0.0557 0.0567 0.0555 0.0577 0.0571 0.058 0.0581 0.0572 0.0587 0.058 0.0592 0.0595 0.0588 0.06 0.0574 0.0563 0.0502 0.0415 0.0315 0.0176 0.0107 0.0077 0.0087 0.01 0.0089 0.0093 0.0254 0.0435 0.053 0.0629 0.0666 0.0663 0.066 0.0664 0.067 0.0665 0.0671 0.0659 0.0639 0.0627 0.0613 0.0595 0.0595 0.058 0.0574 0.0593 0.0605 0.0618 0.0628 0.0624 0.0627 0.0629 0.0633 0.0652 0.0645 0.0646 0.0644 0.063 0.0635 0.0624 0.0598 0.0582 0.056 0.0546 0.0539 0.0554 0.0551 0.0558 0.0571 0.0586 0.0601 0.0611 0.0609 0.0587 0.0558 0.0524 0.05 0.0478 0.0466 0.0464 0.0476 0.0483 0.0504 0.0543 0.0567 0.0584 0.0578 0.0543 0.051 0.0471 0.0452 0.0425 0.0406 0.0394 0.0379 0.0382 0.0411 0.0445 0.0491 0.0533 0.0544 0.0539 0.0501 0.0459 0.0415 0.0373 0.0353 0.0329 0.0322 0.031 0.032 0.0338 0.0365 0.0398 0.042 0.0424 0.041 0.0377 0.0333 0.0298 0.0273 0.0251 0.0238 0.0233 0.022 0.0233 0.0233 0.0237 0.0241 0.0244 0.0236 0.0229 0.0219 0.0201 0.0183 0.0169 0.0153 0.0141 0.0135 0.0134 0.0141 0.0147 0.0144 0.014 0.0137 0.0133 0.0131 0.0128 0.0112 0.0093 0.0061 0.0031 0.0017 0.001 0.0006 0.0008 0.0009 0.0005 0.0", + "energy": "-69.6202 -63.9466 -62.3796 -58.7964 -55.8835 -53.1044 -51.0651 -49.3564 -47.845 -46.7122 -45.7976 -45.576 -45.6027 -45.851 -46.525 -47.2397 -48.4085 -49.8802 -51.5763 -53.7268 -55.7543 -57.5055 -57.8497 -56.3819 -52.2057 -46.9958 -40.5806 -34.541 -29.1939 -24.8764 -22.578 -21.3436 -21.0302 -20.7794 -20.9473 -21.026 -21.3873 -21.6293 -21.799 -22.0638 -22.1881 -22.3298 -22.4994 -22.7969 -23.2814 -24.5472 -26.1382 -28.6157 -30.5882 -31.8652 -31.9253 -30.9331 -29.2708 -26.7869 -24.5196 -22.1836 -21.2314 -20.7818 -20.4511 -20.1813 -20.2882 -20.1868 -20.5754 -20.8622 -21.5365 -22.6425 -23.4225 -24.6719 -25.7343 -26.6637 -26.9805 -26.8538 -26.1035 -25.1971 -23.7931 -22.9489 -22.3512 -22.0481 -22.054 -22.0914 -22.1489 -22.2351 -22.437 -22.7029 -23.1225 -24.0965 -25.2344 -26.8592 -28.2403 -29.5747 -30.4033 -30.0885 -28.9408 -27.0699 -25.2263 -23.7831 -22.7241 -22.3255 -22.3313 -22.4533 -22.5701 -22.7784 -23.0173 -23.2346 -23.6896 -23.6847 -23.9791 -24.0928 -24.1176 -24.2551 -24.2985 -24.3666 -24.4633 -24.8331 -25.1488 -25.2791 -25.5879 -25.7248 -25.7292 -25.5139 -25.2536 -25.08 -24.5691 -24.2795 -23.7987 -23.329 -23.0406 -22.6454 -22.2993 -22.2545 -22.0862 -22.0803 -22.1505 -22.0592 -22.0371 -21.7154 -21.7033 -21.7505 -21.7749 -21.688 -21.8825 -21.8574 -21.9456 -22.1361 -22.5468 -23.1322 -24.5317 -26.4242 -29.3831 -32.4724 -34.8501 -36.227 -35.8903 -34.3969 -31.4948 -28.1892 -25.4863 -23.6667 -22.7714 -22.4342 -22.3678 -22.6002 -23.0465 -23.4461 -24.2503 -24.6842 -25.1785 -25.3219 -25.5869 -25.7963 -26.0401 -26.4468 -26.8191 -27.5882 -28.7361 -30.8805 -33.3778 -36.6108 -39.9073 -43.239 -45.6524 -47.3869 -48.3586 -48.5005 -48.3695 -48.4571 -48.5049 -48.7254 -49.2884 -48.863 -48.1244 -46.2737 -43.0415 -39.1463 -34.4939 -30.499 -27.5761 -25.4325 -24.5358 -23.9778 -24.1454 -24.1921 -24.5414 -24.9272 -24.9628 -24.9664 -25.1059 -25.0252 -24.65 -24.3183 -24.0844 -23.9627 -23.5924 -23.5279 -23.5627 -23.3386 -23.3211 -23.3534 -23.5196 -23.5218 -23.7949 -23.7005 -23.8751 -23.8914 -24.0012 -24.1336 -24.5816 -25.1548 -25.9422 -27.4544 -28.927 -30.5877 -32.0421 -32.6985 -32.4905 -31.2813 -29.6014 -27.5251 -25.5729 -23.8577 -23.0915 -22.7481 -22.5944 -22.8141 -22.8415 -23.3262 -23.8998 -24.8066 -26.5458 -28.6259 -30.8974 -33.0781 -34.5501 -35.3322 -35.223 -33.755 -31.6127 -29.0274 -26.792 -24.6688 -23.3718 -22.5678 -22.358 -22.0166 -22.4563 -22.6959 -23.1518 -23.2076 -23.6054 -24.2456 -24.4157 -24.4269 -24.4462 -24.2186 -24.1157 -23.7628 -23.5415 -23.1506 -23.2018 -23.1983 -23.1056 -23.069 -23.3113 -23.3102 -23.7166 -24.3186 -25.6515 -27.1557 -29.3545 -31.3141 -33.0329 -34.5342 -35.2572 -35.5898 -35.4972 -34.6217 -33.693 -32.2002 -30.2038 -28.0234 -26.024 -24.0727 -22.6953 -22.2109 -21.5733 -21.524 -21.3435 -21.5278 -21.744 -22.3052 -23.6163 -25.7449 -28.7138 -31.1791 -33.4216 -34.1099 -33.4919 -31.3237 -28.6385 -25.509 -22.9918 -21.4692 -20.9999 -20.2854 -20.1255 -20.0731 -20.2106 -20.0656 -20.2118 -20.3292 -20.6344 -21.0583 -21.5518 -21.7534 -22.1307 -22.2997 -22.1908 -22.0629 -21.6593 -21.3133 -21.2252 -20.9256 -21.0727 -21.1407 -21.282 -21.2959 -21.3301 -21.5671 -21.5145 -21.3911 -21.3948 -21.3303 -21.111 -21.0847 -21.098 -21.2035 -21.2812 -21.5516 -21.7096 -21.9459 -22.0803 -22.0835 -22.1077 -22.2704 -22.4017 -22.1345 -22.5498 -23.2099 -24.6951 -26.947 -29.6928 -32.758 -35.7269 -37.8457 -39.2117 -39.644 -38.8282 -37.206 -34.943 -32.1855 -29.5453 -26.9974 -24.845 -23.2927 -22.6216 -22.3827 -22.5174 -22.8434 -23.2258 -23.1707 -23.4444 -23.7006 -23.7857 -23.856 -23.81 -24.0347 -23.8685 -23.801 -23.8515 -23.8356 -23.7241 -23.6739 -23.6835 -23.7688 -23.6582 -23.7351 -23.8921 -23.9802 -24.4607 -25.0857 -26.273 -28.1446 -30.2619 -33.1003 -35.8305 -38.2073 -40.2524 -41.1404 -40.4243 -38.3209 -35.8303 -32.751 -29.81 -26.8896 -25.0616 -24.1726 -23.8229 -23.4143 -23.3143 -23.2623 -23.2007 -23.4863 -23.4114 -23.7771 -24.1315 -24.5797 -24.6828 -25.3138 -25.4991 -25.6298 -25.6233 -25.7275 -25.7956 -25.9043 -25.935 -25.766 -25.7255 -25.8104 -25.6948 -25.7942 -25.6969 -25.7702 -25.6379 -25.5068 -25.7385 -25.7674 -25.7302 -25.8311 -25.6988 -25.7722 -25.7166 -25.5452 -25.2941 -25.1727 -25.1455 -25.3893 -25.3024 -25.6789 -25.7513 -26.2555 -26.6898 -26.8743 -26.8121 -27.0295 -27.086 -27.1142 -26.9276 -27.0572 -27.2241 -27.4852 -27.7216 -27.6788 -28.1102 -28.0814 -28.4302 -28.4683 -28.454 -28.5268 -28.5978 -28.6873 -28.7596 -28.6734 -28.7048 -28.5137 -28.7536 -28.8008 -29.0557 -29.09 -29.4423 -29.5892 -29.7574 -30.2199 -30.281 -30.3461 -30.1956 -30.2485 -29.8416 -29.713 -29.2151 -29.2643 -29.1049 -29.3618 -29.6304 -30.0743 -30.5277 -31.1519 -31.8331 -32.2826 -32.6809 -32.8236 -33.1363 -32.8892 -32.8096 -32.8815 -32.6337 -32.6606 -32.599 -32.683 -33.0251 -33.3549 -34.0681 -34.7259 -35.5168 -36.2324 -36.8805 -37.7591 -38.0935 -38.2499 -38.3326 -38.2127 -38.2333 -38.466 -38.7665 -39.5126 -40.7685 -43.2257 -46.3094 -50.4087 -54.6939 -58.1996 -60.8122 -61.0912 -61.014 -57.93", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0006 0.0012 0.002 0.0025 0.0033 0.0044 0.0056 0.0063 0.0076 0.0085 0.0087 0.0094 0.0092 0.0085 0.0085 0.0083 0.008 0.0076 0.007 0.0058 0.005 0.004 0.0032 0.0024 0.0014 0.0011 0.001 0.001 0.0014 0.0017 0.0016 0.0015 0.0017 0.0022 0.0025 0.0024 0.0024 0.002 0.0017 0.0016 0.0015 0.0015 0.0014 0.0015 0.0012 0.0009 0.0009 0.0008 0.0007 0.0006 0.0013 0.0026 0.0033 0.0038 0.0034 0.0025 0.0016 0.0015 0.0015 0.0017 0.0019 0.0018 0.0018 0.0016 0.0016 0.0018 0.0023 0.0041 0.0124 0.0222 0.0266 0.0301 0.0286 0.0227 0.0174 0.0068 0.0024 0.0016 0.0014 0.0014 0.0013 0.0012 0.0013 0.0014 0.0019 0.0028 0.0059 0.0107 0.0162 0.0209 0.0253 0.0282 0.0301 0.029 0.0251 0.0191 0.0111 0.0035 0.0021 0.0024 0.0021 0.0019 0.0016 0.0014 0.0011 0.0011 0.0011 0.0011 0.0011 0.0012 0.001 0.001 0.0009 0.0011 0.001 0.0009 0.0008 0.0006 0.0005 0.0005 0.0005 0.0004 0.0006 0.0004 0.0005 0.0005 0.0005 0.0006 0.0005 0.0006 0.0008 0.0007 0.0009 0.0012 0.0015 0.0017 0.0016 0.0017 0.0016 0.0011 0.0012 0.001 0.0009 0.0008 0.0007 0.0007 0.0008 0.0008 0.0038 0.0096 0.0124 0.0135 0.0135 0.0098 0.0062 0.004 0.0023 0.0016 0.0015 0.0018 0.002 0.0025 0.0027 0.0027 0.0024 0.0024 0.0019 0.0016 0.0015 0.0016 0.0015 0.0014 0.0011 0.001 0.001 0.0012 0.0016 0.0019 0.0025 0.0028 0.0029 0.0035 0.0038 0.0037 0.0037 0.003 0.0027 0.0022 0.0019 0.0018 0.0016 0.001 0.0007 0.002 0.0099 0.0157 0.0161 0.0157 0.0117 0.0034 0.0019 0.0015 0.0012 0.0011 0.001 0.0008 0.0007 0.0007 0.0006 0.0007 0.0007 0.0007 0.0006 0.0006 0.0007 0.001 0.0012 0.0014 0.0016 0.0019 0.0018 0.002 0.0017 0.0015 0.0017 0.0017 0.0015 0.0014 0.0012 0.0013 0.0027 0.0079 0.0128 0.0156 0.0173 0.0162 0.0123 0.0093 0.0058 0.0037 0.0023 0.0016 0.0016 0.0016 0.002 0.0023 0.0026 0.0024 0.0021 0.0017 0.0015 0.002 0.0033 0.0087 0.0136 0.0162 0.0166 0.0158 0.0119 0.0076 0.0062 0.0023 0.0019 0.0017 0.0016 0.0018 0.0018 0.0017 0.0017 0.0013 0.0011 0.0012 0.0012 0.0014 0.0012 0.0013 0.0012 0.0011 0.0011 0.0009 0.001 0.001 0.0009 0.001 0.0009 0.0009 0.0006 0.0009 0.003 0.0064 0.0104 0.013 0.0155 0.0182 0.0195 0.0218 0.0233 0.0216 0.0192 0.0149 0.0091 0.0054 0.0027 0.0018 0.0018 0.0015 0.0017 0.0017 0.0017 0.0016 0.0014 0.0015 0.0015 0.0011 0.0006 0.0015 0.0039 0.005 0.0047 0.0048 0.0034 0.0022 0.0025 0.0025 0.0023 0.0019 0.0019 0.0019 0.0019 0.0021 0.0025 0.0026 0.0022 0.0019 0.0017 0.0021 0.0021 0.0023 0.0024 0.0021 0.002 0.0019 0.0016 0.0018 0.0018 0.0018 0.0016 0.0016 0.0017 0.0018 0.0019 0.002 0.002 0.0015 0.0014 0.0012 0.0014 0.0014 0.0016 0.0016 0.0015 0.0017 0.0019 0.002 0.0019 0.0018 0.0017 0.0016 0.0017 0.0022 0.0027 0.0033 0.0055 0.0084 0.011 0.0137 0.0153 0.0152 0.0144 0.0122 0.0097 0.0074 0.0059 0.0034 0.0018 0.0011 0.0012 0.001 0.001 0.001 0.0011 0.0012 0.0012 0.0011 0.0011 0.0008 0.0009 0.0012 0.001 0.001 0.0011 0.001 0.0009 0.0011 0.001 0.0009 0.001 0.0007 0.0007 0.0011 0.0007 0.0007 0.0008 0.0019 0.0035 0.0058 0.0072 0.0083 0.0096 0.0089 0.0082 0.0068 0.0043 0.0034 0.0025 0.0021 0.0018 0.0016 0.0014 0.0015 0.0013 0.0016 0.0016 0.0016 0.0014 0.0013 0.0015 0.0013 0.0015 0.0016 0.0018 0.0019 0.002 0.0021 0.0021 0.0019 0.002 0.0016 0.0014 0.0012 0.0013 0.0014 0.0015 0.0015 0.0014 0.0014 0.0014 0.0013 0.0014 0.0016 0.0018 0.002 0.0021 0.0021 0.0021 0.002 0.002 0.0019 0.002 0.0018 0.0017 0.0016 0.0015 0.0014 0.0014 0.0016 0.0016 0.0018 0.0019 0.0018 0.0018 0.002 0.002 0.0022 0.002 0.0021 0.0016 0.0014 0.0012 0.0011 0.0015 0.0014 0.0016 0.0018 0.0016 0.0014 0.0014 0.0016 0.0017 0.0017 0.0015 0.0014 0.0012 0.0012 0.0012 0.0018 0.0015 0.0017 0.0014 0.0015 0.0013 0.0012 0.0013 0.0013 0.0011 0.0013 0.0012 0.0005 0.0007 0.0006 0.0008 0.0008 0.0009 0.0008 0.001 0.0012 0.001 0.0009 0.0007 0.0009 0.0007 0.0006 0.0004 0.0004 0.0003 0.0003 0.0004 0.0005 0.0004 0.0004 0.0004 0.0004 0.0005 0.0014 0.0014 0.0016 0.0016 0.0009 0.0008 0.0005 0.0004 0.0003 0.0002 0.0", + "breathiness": "-72.0557 -67.207 -63.8697 -59.7345 -56.1637 -53.386 -51.0564 -49.4711 -48.0998 -47.0812 -46.1599 -45.5155 -45.5413 -45.6302 -46.0712 -46.8848 -47.9576 -49.2444 -51.1624 -53.6891 -56.4814 -59.2603 -61.8423 -63.7406 -64.8043 -65.1785 -64.6812 -63.8528 -62.3832 -60.7794 -59.1228 -57.6604 -56.1753 -54.7688 -54.0763 -53.9089 -53.8981 -54.3392 -54.6609 -54.9676 -55.0669 -55.3188 -55.3607 -55.5836 -56.2978 -57.19 -58.148 -58.5295 -58.0012 -56.9824 -55.4464 -54.1759 -52.7283 -52.3232 -52.3493 -52.7724 -53.3571 -53.8476 -54.1424 -54.0321 -53.9244 -53.7056 -53.781 -53.3636 -52.3319 -50.2743 -47.6851 -44.981 -42.9589 -41.7264 -41.4153 -42.5788 -44.5869 -46.8924 -49.4613 -51.6382 -53.8379 -55.8547 -57.8344 -59.3964 -60.2621 -60.7825 -60.2829 -58.9283 -55.9451 -52.2106 -47.7784 -43.5081 -39.3909 -36.1686 -34.2834 -33.4063 -33.6837 -35.2389 -37.98 -41.4944 -44.941 -48.1042 -50.7375 -52.7079 -53.8202 -54.7725 -55.8067 -57.1517 -58.3841 -59.6016 -60.6537 -61.423 -62.0778 -62.438 -62.6577 -63.0066 -63.4016 -63.9358 -64.7756 -65.9607 -67.0682 -68.3223 -68.9204 -69.7402 -69.6312 -69.2604 -68.9225 -68.0025 -67.3792 -66.7834 -66.3174 -65.7635 -65.5483 -64.9156 -64.1514 -63.0227 -61.9075 -60.9446 -60.1224 -59.572 -59.4858 -59.7548 -60.3183 -61.1044 -61.9644 -63.1631 -64.3335 -65.1182 -64.9863 -63.4212 -60.3889 -56.3349 -51.9631 -47.9066 -44.8213 -43.4899 -44.1727 -45.9489 -48.8309 -51.6137 -54.1729 -56.1285 -56.6706 -56.2804 -55.5347 -54.5375 -53.8805 -53.562 -53.8987 -54.7053 -55.5236 -56.2704 -56.832 -57.4364 -57.8057 -58.2918 -58.6734 -58.945 -59.0419 -58.6485 -58.0714 -57.0622 -55.9394 -54.471 -53.0485 -52.0629 -50.9985 -50.1518 -49.53 -49.2116 -49.7827 -50.6337 -51.5867 -51.7878 -50.6738 -48.6714 -45.6076 -42.7572 -40.6545 -40.2291 -41.7798 -43.9454 -46.7671 -49.8002 -52.2825 -54.6193 -56.5571 -58.1568 -59.8991 -61.4597 -63.0883 -63.8076 -63.9327 -63.8377 -63.1331 -62.3427 -61.6681 -60.8571 -59.7528 -58.5266 -57.157 -56.1439 -55.3747 -54.8136 -54.9809 -55.4979 -55.8985 -56.5201 -57.0188 -57.6387 -57.8651 -57.2097 -55.417 -52.5748 -49.0912 -44.8978 -41.3482 -38.5834 -37.7212 -38.7592 -41.1284 -43.8506 -46.9192 -49.6571 -51.8771 -53.2221 -54.1016 -54.1858 -54.2263 -54.3823 -54.531 -54.7843 -54.5957 -53.4635 -51.0366 -48.017 -43.9938 -40.4286 -37.5214 -36.3015 -36.3576 -38.1491 -41.1517 -44.2819 -47.7704 -50.8181 -53.1036 -54.8924 -56.2251 -56.8722 -57.7226 -58.3484 -58.7267 -59.0176 -59.3841 -59.5505 -60.0212 -60.4243 -60.9026 -61.7517 -62.334 -63.1445 -63.6989 -63.9825 -64.1872 -64.1126 -64.0401 -63.3787 -61.887 -59.1669 -55.8996 -51.9052 -47.7256 -43.8147 -40.3095 -37.8082 -36.3169 -35.706 -35.1273 -35.2643 -35.9461 -37.5373 -39.5376 -42.1848 -44.9734 -47.7999 -50.3119 -52.6305 -54.2615 -55.5835 -56.5549 -57.2988 -58.031 -58.2908 -57.8156 -56.7733 -54.6913 -52.5763 -49.7462 -47.6904 -46.1208 -46.1601 -46.9017 -48.1577 -50.02 -51.6884 -53.192 -54.0616 -54.4726 -54.5512 -54.3746 -54.1294 -53.9012 -53.689 -53.603 -53.4724 -53.6379 -53.6423 -53.8298 -54.1116 -54.2241 -54.7727 -55.2665 -55.8766 -56.3858 -57.3896 -57.827 -58.4266 -58.7512 -58.7045 -58.9081 -58.7309 -58.5233 -58.2769 -58.0304 -57.9441 -57.9532 -57.8385 -57.933 -58.0084 -58.2321 -58.4651 -58.2853 -57.9696 -57.8823 -57.4541 -57.2234 -57.0535 -56.8182 -56.4328 -55.9556 -55.1878 -54.0945 -52.4796 -50.0557 -47.6149 -45.0273 -42.6696 -41.0033 -39.697 -39.0522 -38.8915 -39.7724 -41.2851 -43.4118 -45.9508 -49.1114 -51.5421 -53.7196 -55.5736 -56.8796 -57.5271 -58.1966 -59.0521 -59.8198 -60.7151 -61.3381 -61.8865 -62.2628 -62.5649 -62.8615 -63.3327 -63.6323 -64.0802 -64.4802 -64.6978 -64.8313 -65.0219 -64.9586 -64.9505 -65.0667 -64.7332 -64.4989 -63.2866 -60.9493 -58.526 -55.155 -51.6123 -48.1685 -45.4632 -43.7051 -42.5768 -42.9236 -43.9505 -45.7588 -47.9646 -50.0635 -52.1321 -53.9654 -55.0452 -55.6489 -55.7556 -56.0553 -55.8388 -55.8755 -55.7075 -55.7067 -55.6372 -55.7747 -55.537 -55.4694 -55.1839 -55.0164 -54.6725 -54.6202 -54.3628 -54.5683 -54.81 -55.2069 -55.8325 -56.4092 -57.1651 -57.4471 -57.9498 -58.0211 -58.0357 -58.0241 -57.5509 -57.2027 -56.758 -56.2704 -55.9694 -55.6505 -55.4687 -55.2779 -55.0334 -54.8386 -54.9044 -55.041 -55.2827 -55.6328 -56.0877 -56.3666 -56.8954 -57.3649 -57.7356 -58.0901 -58.106 -58.035 -58.0302 -57.9354 -58.1454 -58.3574 -58.4003 -58.1098 -57.7717 -57.2164 -57.1246 -57.3783 -57.9929 -58.7801 -59.3894 -60.0602 -60.2041 -60.2868 -60.1553 -59.8082 -59.7799 -59.793 -59.618 -59.6499 -59.5184 -59.3302 -59.4228 -59.7132 -60.2003 -60.9104 -61.5238 -61.776 -62.1465 -62.3491 -62.4719 -62.0922 -62.0717 -61.7058 -61.3637 -61.2936 -61.4463 -62.0505 -62.7773 -63.5881 -64.2835 -65.1902 -65.0489 -65.1804 -65.0133 -64.5525 -64.3369 -64.2929 -64.0683 -64.0949 -64.3102 -64.6393 -65.4184 -66.1335 -67.2752 -68.3945 -69.6079 -70.8854 -71.9183 -72.8218 -73.5719 -73.8369 -73.9717 -73.9039 -73.3957 -72.5763 -71.1226 -69.2791 -67.014 -64.7134 -63.6388 -63.4272 -64.0991 -65.3194 -66.8023 -67.6874 -67.3781", "breathiness_timestep": "0.011609977324263039" }, { @@ -202,9 +202,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "147.5 147.5 147.5 147.3 147.4 147.1 147.0 147.1 147.0 146.8 147.0 146.9 147.5 147.5 147.7 147.8 147.3 147.3 147.4 147.4 147.0 147.2 147.2 147.2 147.4 147.5 147.3 147.3 147.3 147.5 147.6 147.3 147.6 147.6 147.6 147.8 147.7 147.6 147.9 147.5 147.4 147.5 147.4 147.6 147.0 147.0 147.0 147.1 146.6 146.6 147.2 147.1 147.6 148.1 152.6 161.5 177.9 190.9 209.0 224.5 237.8 249.8 251.0 249.3 245.1 240.2 233.1 221.7 212.2 222.6 233.0 244.8 254.0 255.2 253.3 253.0 251.5 247.1 244.5 242.7 243.1 244.1 244.9 246.4 251.8 256.2 256.4 255.3 252.7 248.2 242.1 236.1 228.0 220.5 217.6 216.5 216.7 219.0 221.1 221.9 222.1 222.1 221.6 219.3 217.4 214.4 210.2 204.2 196.4 194.7 194.8 195.2 195.6 196.7 192.9 193.1 193.2 194.4 196.2 197.0 197.8 197.5 196.9 196.1 194.7 192.8 189.5 186.3 181.0 187.7 195.2 203.2 212.9 221.3 221.9 222.4 222.5 221.5 221.6 221.2 220.3 220.0 219.7 218.4 218.1 216.4 212.2 203.5 207.2 213.1 218.8 226.0 233.5 241.0 251.2 252.7 255.3 257.6 258.5 257.6 253.4 247.8 242.0 237.9 233.8 232.5 234.4 239.2 244.5 251.1 255.9 259.0 260.1 258.6 255.3 249.3 242.2 233.4 227.2 223.3 222.6 225.8 232.1 240.6 251.3 259.9 264.5 264.3 261.0 252.6 244.4 235.7 231.7 227.2 223.9 219.3 215.9 211.8 208.7 205.3 201.7 197.9 194.7 191.4 188.0 184.8 181.3 178.0 174.7 171.1 168.6 165.4 162.4 159.3 157.3 154.0 152.4 159.2 173.5 188.0 205.4 221.7 237.7 248.4 253.6 256.0 253.1 247.2 239.6 223.6 226.6 231.3 236.0 241.9 247.9 253.4 258.1 254.7 251.3 250.3 247.8 247.8 246.7 246.7 246.7 246.8 247.3 243.9 236.9 228.0 212.6 203.8 210.1 214.1 218.3 222.5 221.9 217.6 219.5 219.8 219.8 218.9 219.4 220.8 220.5 220.0 220.1 220.7 221.6 221.9 223.1 223.1 225.9 225.4 226.3 229.7 237.4 243.0 244.4 244.9 245.5 245.4 246.1 246.8 247.0 248.0 249.5 249.4 249.6 249.2 245.4 239.3 230.4 229.2 227.3 225.3 223.2 219.1 218.8 223.3 241.4 256.6 277.4 298.4 312.0 322.5 328.7 331.5 331.8 332.1 330.0 329.8 329.1 327.6 324.7 319.6 312.8 310.9 306.9 301.3 294.1 290.2 290.4 291.9 293.3 293.4 293.3 292.3 289.6 288.3 287.9 288.1 288.4 289.3 290.6 292.8 296.6 300.5 304.5 305.4 304.0 300.1 295.3 290.3 286.6 283.9 285.1 287.4 292.0 298.0 302.6 306.7 309.5 310.6 309.7 306.2 301.3 295.8 290.9 289.7 278.5 269.2 262.8 256.6 250.8 244.8 240.0 234.9 230.2 225.4 219.2 214.2 209.2 204.3 199.3 194.3 189.9 184.8 180.2 177.6 178.0 197.6 220.6 246.3 274.0 292.1 303.1 308.9 308.0 303.5 297.3 291.3 284.8 277.3 271.4 278.0 282.1 287.3 292.6 298.0 298.8 296.2 295.3 294.6 294.9 294.5 295.5 294.8 294.8 293.5 293.2 292.6 290.3 286.2 277.9 267.9 264.2 273.9 285.2 295.0 308.5 305.1 302.4 300.7 293.4 283.6 272.0 264.4 258.8 254.8 251.9 250.0 247.4 242.1 248.5 256.2 264.3 271.3 280.0 287.4 296.5 305.9 297.7 292.0 292.4 292.5 293.3 294.9 295.6 295.8 294.9 294.9 294.2 293.7 294.4 292.0 288.6 286.9 292.9 301.5 313.5 323.4 329.8 331.6 333.7 332.6 330.9 329.9 328.9 328.7 329.4 329.8 329.7 329.0 326.7 320.9 302.4 314.9 327.7 345.3 362.4 382.0 390.8 392.9 397.0 397.4 395.4 393.3 389.4 383.9 380.5 377.8 378.2 380.7 386.8 390.9 392.4 394.2 394.2 392.9 389.9 390.4 392.0 392.4 391.4 389.0 387.8 388.1 386.9 387.5 389.7 390.1 390.9 391.5 392.4 392.9 393.0 391.5 390.0 387.6 383.1 379.0 370.5 363.9 358.2 353.9 348.8 345.5 341.3 338.5 335.5 334.1 333.4 331.1 329.5 328.0 328.0 328.5 330.7 332.2 330.6 327.0 318.4 306.3 299.3 292.0 292.1 291.1 291.2 293.1 294.8 294.7 293.5 291.0 287.4 283.3 279.9 277.1 273.6 269.9 265.0 259.5 254.3 249.3 246.6 245.8 245.3 245.9 245.5 245.0 244.1 244.5 245.6 247.1 249.1 250.0 249.1 249.2 248.4 248.2 246.5 245.0 244.1 244.9 245.0 245.9 247.2 249.1 251.3 250.2 249.0 248.5 247.8 247.3 245.4 238.1 229.8 223.2 221.8 220.3 219.1 218.4 217.1 215.9 215.9 214.8 217.1 218.4 219.2 218.7 218.4 217.8 217.5 218.1 218.4 217.8 218.6 218.5 219.1 219.4 220.4 220.7 220.4 219.4 219.1 218.8 219.0 220.3 220.8 220.5 222.0 222.6 223.2 223.4 223.0 221.3 217.8 212.0 207.1 212.1 217.9 223.2 229.1 234.5 239.4 245.9 252.1 258.9 257.3 258.9 258.0 255.1 252.2 247.8 242.8 240.6 238.6 237.8 240.3 242.7 247.2 252.0 257.1 259.7 260.6 259.9 257.1 252.2 243.8 232.5 226.0 223.8 225.6 231.5 239.9 249.2 257.4 263.4 267.1 267.9 265.5 258.8 248.3 233.5 224.2 216.3 210.3 203.5 199.1 194.1 190.1 186.0 182.4 178.9 175.4 171.6 168.4 165.3 161.7 158.7 155.7 153.1 150.3 147.6 144.6 142.1 146.8 153.3 162.0 169.0 181.1 194.5 215.3 232.6 249.6 258.6 257.0 251.6 240.8 234.3 231.2 230.4 234.6 237.7 241.5 245.1 248.8 252.9 257.6 259.4 256.6 257.4 256.5 254.1 251.9 250.0 248.3 248.7 249.7 250.1 249.8 247.0 239.3 218.4 220.3 222.5 224.9 227.7 229.2 222.0 219.5 218.2 219.8 221.4 221.9 221.0 219.7 217.1 215.9 214.3 212.8 207.1 196.9 193.8 192.4 190.9 189.7 188.9 186.9 186.1 183.6 184.8 186.8 189.1 191.5 193.3 195.0 196.3 196.1 195.3 194.3 193.8 193.4 193.6 194.3 195.2 195.4 195.1 194.9 195.4 196.1 195.3 195.1 195.4 195.8 196.5 197.4 198.1 197.4 196.7 195.6 193.4 186.9 176.6 170.6 179.5 190.0 200.4 212.2 224.8 236.0 230.9 228.5 226.7 223.9 221.4 219.0 216.7 215.8 216.5 217.2 219.6 222.6 226.0 227.7 226.7 227.3 227.2 225.5 224.5 221.4 219.0 217.2 215.7 215.9 216.8 218.2 220.5 222.7 224.4 225.3 223.4 220.3 215.8 213.1 212.7 214.5 217.7 221.4 226.0 228.3 231.1 230.5 228.9 226.5 222.7 221.0 220.5 220.3 218.8 218.9 220.5 222.0 220.3 216.9 213.4 215.8 217.9 221.1 224.6 228.1 233.2 227.3 221.8 222.7 222.5 222.0 221.3 220.8 220.7 220.9 220.8 220.8 221.1 220.4 216.9 209.1 200.4 206.6 212.9 218.8 226.1 229.6 224.0 222.8 222.4 222.3 221.8 221.0 220.9 220.5 219.8 221.2 221.2 219.3 210.6 202.5 201.5 201.9 200.9 200.3 199.1 198.6 196.1 193.4 193.9 194.1 195.4 196.4 197.7 198.1 198.0 197.8 196.8 195.8 195.0 195.9 195.9 196.1 197.4 199.5 203.2 211.3 216.2 219.4 220.2 221.8 222.7 222.1 221.7 220.8 220.4 219.5 219.5 219.3 218.7 212.9 206.2 213.0 220.7 229.2 236.5 245.5 254.4 265.2 258.0 249.7 247.0 247.7 247.6 247.8 248.7 249.5 249.9 248.3 245.1 237.0 216.1 211.8 205.8 194.5 184.8 175.8 168.1 158.5 156.9 158.4 163.4 165.3 167.4 168.6 167.4 162.9 157.2 152.6 149.1 148.7 150.1 154.3 160.5 168.2 175.0 180.5 182.6 181.1 176.0 167.7 159.9 151.9 148.9 148.6 151.4 155.1 161.3 167.6 172.5 175.0 177.3 178.5 177.4 173.7 168.6 169.4 169.0 169.1 168.8 168.5 168.3 167.7 166.5 165.5 164.4 163.0 160.6 158.6 156.7 155.2 154.5 153.9 153.6 153.2 153.2 152.9 153.1 152.8 152.2 150.0 149.7 150.2 150.2 150.1 149.7 149.5 151.9 156.3 161.7 165.8 167.4 167.5 167.0 166.9 167.1 167.1 166.4 167.2 166.2 165.4 163.7 162.6 162.5 163.3 165.0 166.9 166.8 166.1 165.2 164.6 163.3 163.0 158.3 153.9 148.7 146.9 144.0 142.3 140.6 140.6 141.5 143.4 144.9 145.2 146.1 146.4 146.7 147.2 147.8 148.4 147.8 147.0 146.2 144.7 143.6 141.2 141.0 142.8 144.7 147.1 152.6 159.0 160.3 162.1 162.4 162.9 163.5 163.8 164.6 164.5 164.0 164.2 163.9 162.8 161.7 157.0 163.5 169.4 176.2 182.7 189.5 196.8 197.7 197.7 197.3 196.3 195.9 195.3 194.4 195.4 196.7 197.4 197.7 197.5 195.1 182.6 186.0 189.2 195.4 200.1 205.1 210.4 204.5 201.2 202.3 202.1 201.5 201.2 200.8 199.8 198.0 196.0 194.6 193.6 193.7 198.4 205.4 214.9 220.9 225.4 225.4 223.1 219.6 215.9 213.4 212.4 213.5 215.4 218.5 220.8 221.3 221.7 221.7 222.7 223.1 222.5 221.0 219.4 218.2 217.8 218.3 219.2 221.0 218.9 220.5 223.1 222.7 223.8 222.5 220.8 218.4 218.6 219.4 219.5 219.9 219.6 218.1 213.5 207.9 201.9 196.5 193.9 192.0 192.4 193.4 194.1 195.3 196.2 196.5 196.5 196.4 196.4 196.3 194.9 195.1 195.7 196.4 195.5 195.2 195.3 193.8 208.8 221.2 226.0 225.6 225.2 225.3 225.3 224.6 222.6 221.1 219.6 219.2 218.7 217.9 217.0 217.6 218.1 217.5 218.4 218.1 218.8 219.7 220.3 220.4 220.6 220.1 220.6 221.1 221.8 221.9 221.5 221.0 220.4 220.6 221.2 221.1 218.9 217.8 215.4 211.8 211.2 220.5 232.3 241.4 247.3 251.5 254.4 256.3 257.4 258.8 257.6 255.4 254.5 254.8 255.0 256.1 258.8 262.5 265.7 277.0 286.3 294.4 297.6 300.2 298.5 297.4 294.7 292.7 291.7 293.3 294.3 294.9 292.3 286.4 277.9 270.5 268.0 265.2 263.2 260.9 258.9 256.1 253.5 251.8 248.1 242.9 242.8 247.3 250.8 253.0 254.1 253.3 249.4 243.9 237.9 233.8 233.3 236.2 241.7 247.9 254.4 259.9 263.8 264.4 263.3 259.9 253.0 242.7 232.3 226.0 224.3 225.6 227.3 234.2 242.7 251.5 259.5 265.0 265.9 264.3 259.2 251.0 243.0 234.1 223.9 219.6 216.4 214.1 211.7 209.4 207.1 204.7 202.8 201.0 198.7 196.5 194.4 192.2 190.1 188.1 185.3 183.0 180.7 177.7 174.6 171.1 168.6 166.5 168.8 177.1 188.6 197.6 208.2 219.5 235.1 245.9 251.0 250.6 247.7 243.1 237.7 230.0 223.8 223.5 231.7 241.5 251.0 253.4 250.4 249.6 248.0 246.3 245.2 244.8 244.3 244.2 245.0 246.1 246.9 248.6 246.9 243.4 235.8 228.3 228.6 230.9 245.0 233.0 226.5 221.3 220.3 219.5 218.0 218.5 219.2 220.1 220.4 219.5 219.6 218.8 219.3 216.1 212.4 206.5 201.1 196.5 191.2 190.4 189.9 190.1 191.1 193.5 195.8 196.9 197.0 196.7 196.9 196.0 196.4 196.5 195.1 193.4 186.3 186.4 196.8 204.7 214.4 224.1 226.9 223.2 221.2 220.8 220.7 220.7 221.3 222.0 221.5 221.3 221.4 221.2 220.5 220.3 221.0 219.7 221.4 222.2 223.8 223.2 224.1 222.1 222.6 223.7 224.9 225.0 224.5 223.3 223.0 223.1 223.8 224.7 227.2 229.7 234.8 241.3 247.6 253.1 256.4 257.0 256.4 256.0 253.9 248.6 242.4 237.6 234.9 234.9 236.3 242.0 248.9 254.6 258.7 261.0 262.0 262.2 257.2 249.7 238.7 233.3 228.6 222.8 218.2 213.7 209.3 205.6 202.0 198.5 195.0 192.1 188.9 185.6 182.4 179.2 176.0 172.9 169.8 166.9 163.9 161.3 157.7 155.1 151.7 152.4 160.8 177.5 191.1 208.6 225.5 240.5 251.1 254.8 255.2 250.9 245.6 238.2 225.4 228.0 232.8 237.5 243.2 248.2 254.9 251.5 248.5 249.7 248.7 247.9 247.2 246.1 246.8 247.0 247.0 246.0 243.7 238.7 229.6 211.8 213.0 215.1 217.4 219.9 221.7 218.6 218.1 219.0 219.4 220.1 220.1 220.8 221.9 221.0 219.9 219.5 220.1 220.2 221.0 221.2 223.4 224.6 226.6 229.1 239.2 244.6 247.6 247.0 248.7 248.9 248.0 247.8 247.9 248.1 248.7 249.7 249.8 248.3 242.7 235.9 227.9 241.1 252.9 266.4 282.2 298.8 322.1 323.5 327.7 330.9 332.1 330.0 330.0 330.4 329.8 330.1 329.7 326.9 321.6 311.5 301.2 303.0 305.3 307.8 310.5 312.2 309.7 301.5 297.7 298.5 300.1 302.2 302.0 300.4 297.1 293.7 290.0 283.4 279.6 278.3 279.8 283.4 289.3 295.8 302.2 307.0 310.4 311.0 308.8 303.0 293.7 286.3 279.0 274.6 276.0 281.0 288.8 295.6 302.9 309.9 315.1 316.5 311.5 298.7 283.1 277.7 271.7 266.8 260.8 254.0 248.9 244.7 240.4 236.6 232.5 229.1 224.6 220.6 216.3 212.2 208.7 205.0 201.9 198.7 195.6 193.1 189.7 191.4 200.8 210.5 222.1 238.0 258.2 277.1 293.3 299.1 301.7 299.8 293.1 288.8 282.3 274.6 268.6 275.7 282.3 290.0 296.6 303.9 307.7 304.7 300.3 300.3 297.1 296.2 294.7 294.8 293.7 293.2 293.9 293.8 292.7 291.7 288.7 286.2 281.1 274.1 270.6 269.9 276.6 280.3 282.2 282.2 282.6 276.6 270.3 264.4 261.0 256.3 252.9 251.7 249.0 244.8 239.1 246.9 257.2 267.4 276.0 285.3 295.4 304.0 297.3 292.4 292.5 292.2 293.1 293.5 294.2 295.2 294.8 294.9 294.7 292.6 291.0 291.4 289.8 288.7 292.0 298.4 308.2 317.9 323.4 328.9 330.6 331.1 330.8 330.8 330.2 330.3 330.9 330.6 329.8 330.6 326.7 320.0 310.3 320.4 333.5 346.2 358.3 373.6 389.6 395.8 393.9 398.3 400.5 402.0 401.7 399.8 397.5 392.6 389.3 386.8 386.3 389.1 389.9 391.7 393.7 396.0 394.0 394.7 393.7 392.8 389.6 387.1 385.7 387.1 388.1 390.9 392.2 390.9 390.8 390.2 390.0 388.1 382.6 375.3 364.8 336.3 327.7 331.1 334.9 338.1 336.4 332.5 333.9 334.0 334.9 334.0 333.8 330.4 328.2 326.6 327.7 327.7 327.4 324.7 317.1 303.3 288.6 278.9 285.4 300.2 301.3 297.1 295.3 294.1 292.5 293.4 294.4 296.5 297.6 297.6 296.1 293.8 294.0 278.0 253.3 232.9 235.5 236.9 236.3 236.5 235.8 237.1 239.6 243.1 246.7 248.1 248.1 247.8 248.2 247.7 244.0 242.9 241.8 241.3 238.7 238.9 239.0 241.9 242.1 244.0 244.9 245.4 245.2 246.9 248.8 249.9 250.2 249.9 249.6 249.3 247.5 243.6 236.4 231.3 222.8 219.2 216.9 214.3 211.8 209.3 205.9 205.9 209.2 215.3 219.6 224.5 225.8 223.7 220.9 218.0 215.4 214.3 213.9 214.9 217.4 220.0 220.9 223.1 224.4 224.6 223.9 222.3 221.1 219.2 218.3 217.9 217.2 218.1 219.0 220.1 221.8 221.4 220.8 220.2 217.0 210.9 202.1 192.3 199.2 206.8 215.8 225.2 233.7 240.4 249.1 254.3 256.9 257.4 255.5 251.4 247.3 243.7 238.2 235.4 235.7 238.9 243.3 250.0 254.9 260.5 262.7 263.3 261.9 257.6 251.3 242.8 232.6 227.2 225.8 226.9 232.2 241.9 251.9 259.3 263.7 266.5 265.9 262.2 252.3 240.0 228.1 218.9 213.3 207.6 203.3 199.8 196.3 192.9 190.3 186.8 183.8 180.8 178.1 175.1 172.4 169.6 166.9 164.4 161.1 158.3 154.2 150.4 149.4 152.0 158.6 166.2 171.0 181.2 193.9 206.1 223.4 236.6 249.2 254.0 252.7 248.1 241.8 233.4 227.3 232.0 237.0 242.0 246.8 251.9 256.6 257.6 252.2 251.8 251.0 250.0 249.1 247.7 246.9 247.3 248.2 248.8 249.2 249.1 247.6 246.3 244.9 240.8 234.6 233.0 225.5 224.7 219.2 217.6 216.1 217.5 219.4 221.2 221.8 220.8 220.2 219.8 219.4 215.5 208.5 196.9 194.4 193.3 192.3 192.3 191.2 189.9 192.2 189.8 190.1 190.2 190.7 192.4 193.5 194.7 195.1 194.9 194.1 193.7 193.7 193.4 193.0 193.6 194.9 194.6 193.8 194.5 195.3 195.5 195.2 195.3 195.6 197.2 198.1 198.7 198.5 197.6 196.2 195.9 192.9 187.8 178.9 171.0 180.5 190.8 201.9 214.8 228.8 231.5 232.5 232.5 230.2 226.7 220.6 213.8 210.8 208.9 209.7 213.2 217.8 224.9 230.7 235.5 238.3 238.5 235.5 228.1 219.6 210.2 206.1 204.1 203.9 206.2 211.1 218.7 224.7 230.0 234.6 235.3 232.6 231.2 224.9 222.1 218.7 214.5 210.7 206.2 203.2 200.4 198.0 195.6 193.1 190.5 187.9 186.2 184.2 181.9 180.7 179.2 175.3 170.8 166.6 164.2 162.3 160.6 159.2 157.9 155.6 153.4 151.6 149.4 150.2 155.6 161.6 167.6 174.3 181.8 189.3 195.7 198.5 199.0 197.5 195.8 191.5 184.1 187.4 191.5 196.5 201.2 206.8 214.2 217.4 218.9 220.0 220.1 220.3 219.3 219.5 219.6 221.0 221.3 220.8 219.3 217.9 213.7 207.9 218.2 227.3 237.3 247.8 260.7 253.5 249.9 249.2 246.9 246.7 246.6 246.5 245.3 246.0 247.4 246.9 245.1 242.7 237.8 228.6 229.0 230.6 230.5 232.2 230.4 224.6 218.8 218.4 218.5 220.1 221.2 221.4 221.7 221.8 221.6 220.7 218.5 216.0 207.9 211.0 216.0 223.6 231.9 241.2 251.0 250.0 249.6 250.8 250.1 249.7 248.7 246.5 245.7 246.9 248.3 248.8 250.4 247.8 242.4 235.6 228.5 238.8 250.1 262.8 275.7 293.4 297.8 296.5 300.2 304.1 307.1 306.7 305.0 300.5 296.0 291.4 286.7 284.8 285.6 289.5 294.8 298.5 302.2 305.2 304.3 301.1 300.0 297.7 292.3 285.6 280.2 276.4 275.7 278.0 283.6 289.2 294.9 299.0 304.5 309.0 310.9 308.0 303.5 292.3 284.1 277.9 270.6 263.4 257.1 252.0 247.0 243.1 239.8 235.8 232.4 228.5 225.3 221.0 218.5 215.7 212.8 210.0 208.4 205.7 202.9 200.4 197.4 194.5 197.8 208.4 224.8 238.5 259.2 279.4 292.6 298.5 300.1 293.7 291.3 291.7 291.9 291.2 288.1 287.1 284.4 286.0 288.5 289.5 291.3 295.1 296.4 293.6 292.4 291.3 291.8 293.6 294.0 292.9 292.1 290.4 287.7 287.2 278.4 270.8 257.5 252.3 250.4 249.7 248.4 248.6 247.5 246.3 245.7 247.0 248.3 249.7 249.3 247.2 245.8 243.8 241.9 238.8 232.9 229.9 229.1 227.7 226.6 226.1 224.5 224.6 223.1 220.1 218.4 216.7 217.4 218.2 219.3 221.3 222.1 222.0 221.4 220.8 218.0 211.4 216.8 223.0 229.6 236.3 245.3 249.2 248.2 247.3 247.5 247.4 247.4 247.5 248.6 248.4 249.6 249.8 251.2 250.4 244.0 231.6 242.7 253.8 265.8 280.7 294.7 311.3 328.1 339.1 334.1 335.4 335.4 334.2 331.8 328.4 326.1 321.0 321.8 319.3 323.5 326.6 329.5 332.0 336.8 339.0 337.7 335.0 334.2 332.3 330.0 326.7 323.5 322.3 324.3 326.9 330.2 330.8 329.5 330.2 328.6 328.0 322.7 314.3 294.8 281.7 284.6 286.6 288.2 290.2 289.7 289.2 289.8 291.2 292.2 293.6 294.9 296.0 295.4 294.5 293.6 294.8 295.6 294.8 293.8 295.1 296.3 304.3 313.4 323.1 329.6 332.8 332.1 332.2 332.4 330.0 328.9 327.4 326.5 329.3 331.1 330.2 327.1 325.0 321.5 312.5 317.8 337.3 352.7 370.7 388.8 378.4 371.5 371.5 370.9 369.9 367.1 365.6 365.4 366.0 366.8 367.3 367.5 365.9 364.4 360.8 355.6 352.0 350.2 345.6 341.1 336.6 330.8 329.1 329.0 328.3 330.8 331.9 332.8 334.9 333.6 331.3 328.0 322.2 306.7 286.9 267.9 267.4 270.2 274.9 283.1 283.8 284.8 287.0 291.5 297.5 300.0 302.3 300.2 298.2 293.4 290.8 289.3 288.7 289.9 292.1 292.3 292.5 292.3 290.9 291.4 292.2 292.9 292.7 292.5 293.3 294.4 295.1 294.7 295.4 295.9 294.0 293.8 291.7 286.4 274.1 278.0 284.9 287.0 294.7 302.8 309.9 304.1 296.3 295.5 296.8 298.1 299.0 296.5 294.7 294.0 293.4 294.4 294.3 295.2 295.3 296.7 298.8 300.8 302.6 308.0 313.6 322.8 330.2 337.3 339.5 339.3 337.7 334.0 330.5 326.7 323.6 322.1 322.0 324.1 325.3 326.8 328.1 329.3 329.9 330.3 330.3 329.3 328.7 327.9 328.6 329.1 329.0 329.2 329.7 329.7 329.0 328.2 328.3 329.0 328.3 328.2 327.9 327.1 325.8 326.9 327.1 330.4 334.2 336.6 338.8 338.6 337.6 334.5 329.4 325.9 321.8 317.8 315.1 314.5 315.0 317.0 321.0 325.4 332.0 337.2 341.5 343.1 342.3 340.3 339.2 335.8 332.2 328.7 324.2 319.1 317.8 317.8 318.6 322.4 326.3 331.5 336.3 338.8 339.1 337.6 336.8 334.9 333.0 331.3 328.7 328.5 328.5 327.5 326.6 327.4 327.2 327.4 330.3 333.9 337.3 339.4 341.1 341.4 340.9 337.7 332.8 326.0 318.6 314.0 312.6 315.3 320.3 326.6 333.7 341.0 346.1 349.9 351.7 349.4 344.4 338.7 331.3 326.0 321.4 318.9 319.4 321.5 326.0 334.5 339.9 343.5 345.9 346.8 345.0 342.4 338.1 333.2 327.5 323.1 320.3 318.8 321.4 325.2 330.8 335.2 337.9 341.9 344.1 343.5 341.8 339.7 336.8 333.7 330.1 327.0 326.6 326.0 327.8 330.8 334.3 337.2 339.0 339.2 338.5 337.1 335.2 333.9 332.4 330.0 327.0 325.0 323.4 321.2 318.0 315.8 314.3 311.6 308.7 305.0 300.0 294.2 287.7 284.3 282.5 280.0 277.5 276.6 274.9 273.2 270.2 266.9 262.5 256.0 247.4 247.9 247.6 248.0 248.4 249.1 249.1 249.4 249.1 250.1 250.8 250.8 251.0 251.8 251.3 252.3 252.2 251.7 250.3", "f0_timestep": "0.011609977324263039", - "energy": "0.0004 0.0002 0.0006 0.0007 0.0004 0.0004 0.0005 0.0005 0.0008 0.0014 0.0011 0.0008 0.0014 0.0017 0.0021 0.002 0.0023 0.0022 0.0025 0.0031 0.003 0.0032 0.0034 0.0034 0.0031 0.0037 0.0031 0.0031 0.0026 0.0028 0.0025 0.0026 0.0025 0.0014 0.0018 0.0019 0.0011 0.0008 0.001 0.0014 0.0021 0.0022 0.0042 0.006 0.008 0.0103 0.011 0.0121 0.0126 0.0118 0.0139 0.0281 0.039 0.0517 0.063 0.0713 0.0769 0.077 0.0734 0.0683 0.0667 0.0672 0.0707 0.0744 0.0751 0.0712 0.0622 0.0486 0.0333 0.0221 0.0186 0.0259 0.0453 0.0639 0.0838 0.0927 0.0944 0.0909 0.0799 0.0738 0.0713 0.069 0.0683 0.0702 0.0711 0.0714 0.0718 0.0711 0.0711 0.0728 0.0735 0.0741 0.0729 0.0715 0.0722 0.0702 0.0699 0.0664 0.0618 0.0602 0.0559 0.0579 0.0596 0.0613 0.0666 0.067 0.0671 0.0607 0.0487 0.0355 0.0204 0.0147 0.0254 0.0481 0.0665 0.0801 0.0906 0.092 0.0903 0.0875 0.0849 0.0814 0.0786 0.0764 0.0699 0.061 0.0512 0.0365 0.0211 0.0128 0.0088 0.0124 0.0431 0.0564 0.0726 0.0848 0.0922 0.0972 0.0995 0.1003 0.0977 0.0953 0.0915 0.0853 0.0776 0.0657 0.0516 0.0373 0.0242 0.0143 0.0125 0.0124 0.0129 0.0286 0.0503 0.0738 0.0935 0.1078 0.114 0.1131 0.1077 0.1021 0.0943 0.0878 0.0813 0.0774 0.0763 0.0762 0.0779 0.0785 0.082 0.082 0.0805 0.0784 0.0752 0.0729 0.0712 0.0692 0.069 0.0652 0.0616 0.0573 0.0528 0.0526 0.0559 0.0579 0.0589 0.0599 0.0592 0.059 0.0562 0.0492 0.0389 0.0275 0.0143 0.0048 0.0028 0.0033 0.0045 0.0049 0.005 0.0051 0.0054 0.0045 0.0035 0.0029 0.0029 0.0021 0.0021 0.0021 0.0011 0.0007 0.004 0.0102 0.0427 0.0624 0.0787 0.0906 0.0911 0.0894 0.0864 0.0825 0.0807 0.0822 0.082 0.0854 0.0832 0.0768 0.068 0.0522 0.0386 0.0297 0.0292 0.032 0.0324 0.0355 0.055 0.0723 0.0862 0.0958 0.0957 0.0917 0.089 0.0847 0.0807 0.0802 0.0792 0.0766 0.07 0.0593 0.0445 0.0288 0.0157 0.01 0.0134 0.0386 0.0591 0.074 0.0855 0.0876 0.0851 0.0822 0.0789 0.0774 0.0768 0.0782 0.0795 0.0813 0.0827 0.0817 0.0817 0.0804 0.0773 0.0728 0.0663 0.0619 0.0614 0.0679 0.0764 0.0835 0.0896 0.0923 0.0929 0.0925 0.0915 0.0894 0.0869 0.0852 0.0847 0.0873 0.0875 0.0801 0.0672 0.048 0.0262 0.0153 0.0308 0.0512 0.0691 0.0838 0.0934 0.0957 0.0934 0.092 0.0881 0.0843 0.0834 0.0828 0.0843 0.0866 0.089 0.0914 0.0941 0.0954 0.0946 0.0923 0.0887 0.0891 0.0983 0.1055 0.1154 0.1211 0.1198 0.1205 0.1173 0.1156 0.1139 0.1094 0.1051 0.1004 0.0948 0.0915 0.0905 0.092 0.0947 0.0992 0.1017 0.1035 0.1043 0.1023 0.1005 0.0987 0.0935 0.0886 0.0812 0.0731 0.0677 0.0648 0.0644 0.0646 0.0644 0.064 0.0623 0.0611 0.0599 0.056 0.0492 0.0393 0.0258 0.0125 0.0044 0.0038 0.0041 0.0058 0.0057 0.0055 0.0054 0.0046 0.0041 0.0022 0.0016 0.0004 0.0007 0.0 0.0 0.0002 0.0086 0.0257 0.0509 0.0682 0.0843 0.0939 0.1017 0.1016 0.1001 0.1004 0.0953 0.097 0.0993 0.1023 0.1017 0.0959 0.0833 0.0634 0.0443 0.0208 0.0158 0.0185 0.026 0.0566 0.0812 0.099 0.1126 0.1131 0.1105 0.1082 0.1068 0.1064 0.1056 0.1052 0.105 0.1062 0.1065 0.1055 0.0961 0.0801 0.0594 0.0356 0.0201 0.0398 0.0621 0.0774 0.0896 0.0961 0.0943 0.0928 0.0915 0.0883 0.0879 0.0866 0.0849 0.0788 0.0691 0.0569 0.0434 0.0362 0.0374 0.0409 0.0434 0.0447 0.0445 0.0603 0.0766 0.0909 0.1002 0.1033 0.1025 0.1005 0.1015 0.0992 0.0986 0.0995 0.0988 0.1 0.1006 0.0991 0.0971 0.0945 0.0934 0.0965 0.1015 0.109 0.1135 0.116 0.1193 0.117 0.1166 0.1137 0.1102 0.1097 0.1093 0.1098 0.1106 0.1044 0.0894 0.0705 0.0437 0.0188 0.01 0.0074 0.0044 0.0324 0.0651 0.0911 0.1111 0.1241 0.1267 0.1254 0.1237 0.1212 0.1182 0.1171 0.1154 0.1153 0.1142 0.1122 0.1107 0.1102 0.1091 0.1073 0.1048 0.099 0.0952 0.0933 0.0925 0.0932 0.0942 0.0955 0.0959 0.0961 0.0956 0.0955 0.0956 0.0954 0.0965 0.0962 0.0972 0.0979 0.0983 0.0989 0.1 0.1007 0.0999 0.0987 0.0981 0.0992 0.1006 0.1019 0.1039 0.1048 0.1049 0.1071 0.1094 0.1105 0.1129 0.1113 0.1089 0.1064 0.1034 0.102 0.1005 0.1 0.1009 0.1012 0.1014 0.1012 0.1 0.1 0.0995 0.0999 0.1011 0.1016 0.1029 0.1027 0.1019 0.102 0.0998 0.0988 0.0993 0.0982 0.099 0.099 0.0993 0.0991 0.0992 0.0991 0.0994 0.0983 0.0966 0.0958 0.0945 0.0946 0.0943 0.0947 0.096 0.0965 0.0978 0.1003 0.1004 0.1011 0.1017 0.1002 0.1013 0.1017 0.1025 0.103 0.1021 0.1026 0.1018 0.1008 0.1001 0.0989 0.0974 0.0932 0.0863 0.071 0.0553 0.039 0.0213 0.0121 0.0122 0.013 0.0147 0.0154 0.039 0.061 0.078 0.0919 0.097 0.0984 0.0971 0.0954 0.0942 0.0929 0.0911 0.0922 0.092 0.0935 0.0943 0.0933 0.0941 0.0939 0.0922 0.0946 0.0948 0.0949 0.0959 0.0931 0.0922 0.0917 0.0897 0.09 0.089 0.088 0.087 0.0824 0.0717 0.0586 0.0424 0.0305 0.029 0.0314 0.0354 0.0373 0.0362 0.0398 0.0607 0.0809 0.1023 0.1121 0.1126 0.1093 0.1038 0.1016 0.0981 0.0948 0.0912 0.086 0.084 0.0806 0.0784 0.0771 0.0765 0.0759 0.0763 0.0758 0.0749 0.0734 0.0723 0.0715 0.0683 0.0645 0.0625 0.0591 0.0589 0.0588 0.0582 0.0585 0.0567 0.0561 0.055 0.0516 0.0476 0.0412 0.0327 0.0235 0.0149 0.0081 0.0059 0.0066 0.0072 0.0078 0.008 0.0089 0.0087 0.0081 0.0072 0.0071 0.007 0.0062 0.006 0.005 0.0043 0.014 0.0253 0.0438 0.0657 0.0831 0.0986 0.1086 0.104 0.0952 0.0809 0.0673 0.0631 0.0674 0.0725 0.0769 0.0781 0.0733 0.0681 0.0644 0.0667 0.0731 0.0806 0.0833 0.0744 0.0656 0.0592 0.0653 0.0785 0.0914 0.0979 0.0996 0.099 0.0975 0.0966 0.0955 0.0946 0.0933 0.0889 0.0775 0.0634 0.0443 0.0243 0.0166 0.0176 0.0399 0.0596 0.0721 0.0781 0.0762 0.0704 0.0668 0.0697 0.071 0.0718 0.0732 0.0742 0.0749 0.0717 0.0637 0.0514 0.037 0.0279 0.0279 0.0302 0.0287 0.041 0.0574 0.0686 0.0816 0.0893 0.0901 0.0934 0.091 0.0899 0.0887 0.0879 0.0895 0.0882 0.0884 0.0869 0.0859 0.0867 0.0854 0.0864 0.0868 0.0862 0.0859 0.0816 0.0771 0.0737 0.0705 0.0723 0.0725 0.0758 0.0765 0.0751 0.0757 0.0738 0.0688 0.0592 0.0455 0.0297 0.0133 0.0066 0.0055 0.0048 0.0304 0.0552 0.0762 0.0945 0.1034 0.1048 0.1036 0.0996 0.097 0.0965 0.0953 0.0944 0.0955 0.0957 0.0973 0.0976 0.0993 0.1004 0.0997 0.1001 0.0973 0.0946 0.0925 0.09 0.0881 0.0886 0.0878 0.0858 0.0853 0.0836 0.0834 0.0836 0.0844 0.0824 0.0818 0.0785 0.0748 0.0738 0.0714 0.0708 0.0719 0.0729 0.0747 0.0765 0.0761 0.0756 0.0743 0.0725 0.071 0.0711 0.0697 0.0684 0.0677 0.0617 0.0533 0.0419 0.0276 0.0117 0.0106 0.0129 0.0154 0.0202 0.0448 0.0645 0.0793 0.0926 0.096 0.0974 0.0961 0.0937 0.0933 0.0907 0.0906 0.0902 0.0861 0.0814 0.0698 0.0556 0.039 0.0238 0.0171 0.0175 0.0229 0.0442 0.0598 0.0739 0.0845 0.0868 0.0871 0.0874 0.0862 0.0836 0.0835 0.08 0.0749 0.069 0.0574 0.0451 0.0331 0.0236 0.0215 0.0226 0.0227 0.036 0.0534 0.0669 0.0787 0.0832 0.0828 0.0824 0.0799 0.0808 0.0803 0.0791 0.0805 0.0789 0.0788 0.0781 0.0787 0.078 0.0778 0.0793 0.0806 0.0852 0.0889 0.0885 0.0877 0.0838 0.0807 0.0813 0.0807 0.0809 0.0807 0.08 0.0795 0.0764 0.0686 0.0568 0.0419 0.026 0.0213 0.0234 0.0261 0.0248 0.0281 0.0501 0.0665 0.0794 0.09 0.0884 0.087 0.0859 0.0839 0.0817 0.0825 0.0811 0.0753 0.0647 0.0499 0.0307 0.0117 0.0074 0.003 0.0179 0.0408 0.0565 0.0687 0.0762 0.0772 0.0764 0.0757 0.075 0.0715 0.0704 0.0688 0.0654 0.0628 0.0615 0.0578 0.0565 0.0566 0.0552 0.0558 0.0553 0.0542 0.054 0.0521 0.0492 0.0463 0.0432 0.0387 0.035 0.0312 0.0294 0.0283 0.0279 0.0269 0.0263 0.0255 0.0256 0.0241 0.0219 0.0191 0.0154 0.0115 0.0076 0.0039 0.0028 0.0028 0.003 0.0031 0.0039 0.0038 0.0036 0.0031 0.0018 0.0019 0.0012 0.0005 0.0002 0.0001 0.0001 0.0002 0.002 0.01 0.0287 0.0424 0.055 0.0631 0.0661 0.0659 0.064 0.0616 0.0584 0.0558 0.0539 0.0534 0.0533 0.0541 0.0553 0.0558 0.0565 0.0568 0.0575 0.0587 0.0572 0.0565 0.0579 0.0592 0.0627 0.0656 0.0683 0.0693 0.0708 0.072 0.0706 0.0712 0.0679 0.0647 0.0584 0.0471 0.0365 0.0232 0.0173 0.0169 0.0189 0.0184 0.0269 0.0381 0.0477 0.0566 0.0614 0.0635 0.0631 0.0634 0.0633 0.0623 0.0643 0.0626 0.0613 0.059 0.0532 0.0468 0.0383 0.0275 0.0167 0.0123 0.026 0.0394 0.05 0.0593 0.0627 0.0631 0.0634 0.0614 0.0613 0.0589 0.057 0.0559 0.0545 0.0523 0.0474 0.0405 0.0297 0.0197 0.0125 0.0117 0.0124 0.0252 0.0428 0.0564 0.0672 0.0736 0.0736 0.0742 0.0735 0.0729 0.0724 0.0721 0.0726 0.0712 0.0648 0.0558 0.0429 0.0279 0.0164 0.0166 0.0166 0.0248 0.047 0.0619 0.0748 0.0847 0.0868 0.0858 0.083 0.0815 0.0769 0.0745 0.0708 0.0662 0.063 0.0604 0.0626 0.064 0.0695 0.0733 0.0746 0.0744 0.0729 0.0712 0.0715 0.0721 0.0715 0.0707 0.0711 0.0722 0.0724 0.0737 0.0737 0.0742 0.0751 0.0755 0.074 0.0748 0.074 0.0736 0.0739 0.0745 0.0761 0.0778 0.0789 0.0798 0.0816 0.0826 0.0838 0.0836 0.0825 0.0807 0.0784 0.0769 0.0751 0.0734 0.0721 0.0711 0.0708 0.0719 0.0717 0.0702 0.0713 0.0761 0.082 0.087 0.0905 0.0899 0.0883 0.0876 0.085 0.0816 0.0792 0.0743 0.0711 0.0698 0.0685 0.0682 0.065 0.059 0.051 0.0409 0.0378 0.0556 0.0697 0.084 0.0931 0.0944 0.0921 0.0898 0.0885 0.0867 0.0863 0.0869 0.0863 0.0863 0.088 0.0877 0.0889 0.0906 0.0901 0.0905 0.0911 0.0906 0.0898 0.0896 0.089 0.0884 0.0887 0.0894 0.0891 0.0877 0.0863 0.0847 0.0827 0.0787 0.0783 0.0746 0.0705 0.0668 0.0632 0.0591 0.0595 0.0667 0.0756 0.0864 0.0963 0.1033 0.1087 0.1094 0.109 0.107 0.1032 0.0997 0.095 0.0886 0.0828 0.0761 0.0715 0.0687 0.0703 0.0737 0.0783 0.0829 0.087 0.0888 0.0899 0.0905 0.09 0.0888 0.0856 0.0829 0.0806 0.0791 0.0765 0.0679 0.0567 0.0413 0.025 0.019 0.0195 0.0198 0.0204 0.0205 0.0182 0.0264 0.044 0.0588 0.0745 0.0874 0.0924 0.0951 0.0949 0.0921 0.0896 0.0864 0.0831 0.0792 0.0766 0.0742 0.0725 0.0724 0.0717 0.0735 0.0747 0.0739 0.0743 0.0721 0.0715 0.0701 0.068 0.0652 0.0612 0.0582 0.0558 0.0539 0.0519 0.0513 0.0508 0.05 0.0505 0.0496 0.0499 0.0489 0.0445 0.0367 0.0271 0.0172 0.0067 0.0048 0.0048 0.0048 0.0045 0.005 0.0043 0.0035 0.0031 0.0024 0.0021 0.0022 0.0036 0.0052 0.0079 0.0113 0.0138 0.0159 0.0155 0.0272 0.0459 0.0615 0.0741 0.0817 0.0815 0.0784 0.0749 0.0695 0.0678 0.0685 0.0703 0.0737 0.0745 0.0739 0.0702 0.0603 0.0474 0.0335 0.018 0.0229 0.0459 0.0626 0.0778 0.0883 0.09 0.09 0.0862 0.0831 0.0798 0.0779 0.0775 0.0767 0.0775 0.0779 0.0741 0.0683 0.0579 0.0432 0.0477 0.0388 0.0614 0.0734 0.0828 0.088 0.0865 0.0865 0.0843 0.0834 0.0821 0.0815 0.0806 0.0807 0.0807 0.0803 0.0802 0.0809 0.0776 0.0734 0.0697 0.0643 0.0657 0.0684 0.0714 0.0761 0.0751 0.0755 0.0762 0.0741 0.0766 0.0748 0.073 0.0723 0.0697 0.0686 0.0635 0.0545 0.0424 0.0274 0.0147 0.0098 0.0108 0.0314 0.0533 0.0704 0.0836 0.09 0.092 0.0918 0.092 0.0917 0.0914 0.0897 0.0888 0.0885 0.086 0.0849 0.0825 0.0809 0.0825 0.0819 0.0837 0.0879 0.091 0.0994 0.1065 0.1102 0.1143 0.1128 0.1122 0.11 0.1055 0.1012 0.0944 0.0889 0.0834 0.0796 0.0792 0.0798 0.0833 0.0872 0.0894 0.0901 0.0883 0.0841 0.0814 0.0773 0.0742 0.0702 0.0638 0.059 0.0549 0.0533 0.0526 0.0555 0.0573 0.0562 0.0555 0.0535 0.0501 0.0459 0.0378 0.0271 0.0155 0.0053 0.0027 0.0032 0.0044 0.0044 0.0051 0.0049 0.0049 0.0045 0.0041 0.0041 0.003 0.0027 0.0026 0.002 0.0015 0.0007 0.0012 0.0038 0.0086 0.0289 0.0536 0.069 0.0822 0.0876 0.0867 0.084 0.0812 0.0781 0.0804 0.0825 0.083 0.0804 0.0753 0.0637 0.051 0.0358 0.0247 0.0219 0.023 0.0297 0.0494 0.0673 0.0803 0.0904 0.0913 0.0905 0.0896 0.0864 0.0846 0.0823 0.081 0.0819 0.0796 0.0713 0.0589 0.0415 0.0223 0.0109 0.0065 0.0128 0.0394 0.0566 0.0716 0.0831 0.0836 0.0835 0.0797 0.0759 0.0744 0.0737 0.0738 0.074 0.074 0.0732 0.0723 0.0697 0.0671 0.064 0.0623 0.065 0.0693 0.0742 0.0803 0.0836 0.0874 0.089 0.0883 0.0896 0.0871 0.0852 0.0843 0.0822 0.0807 0.0762 0.0661 0.0525 0.0356 0.0183 0.0123 0.0112 0.0119 0.0267 0.0493 0.0691 0.0846 0.0949 0.0984 0.0971 0.0952 0.0935 0.0881 0.0856 0.0833 0.081 0.0799 0.0734 0.0616 0.0479 0.0305 0.0185 0.0178 0.0203 0.0389 0.0601 0.0813 0.098 0.1072 0.1126 0.1135 0.1144 0.1169 0.1154 0.1138 0.1076 0.1011 0.0981 0.0954 0.0958 0.0964 0.0986 0.0992 0.1015 0.101 0.0999 0.0998 0.0978 0.0948 0.0902 0.0842 0.0763 0.0732 0.069 0.0681 0.0684 0.0677 0.0669 0.065 0.0621 0.0588 0.0543 0.046 0.0361 0.0232 0.0097 0.0041 0.0037 0.005 0.0057 0.0053 0.0055 0.0057 0.006 0.0049 0.0045 0.0039 0.0035 0.0026 0.0021 0.0014 0.0008 0.0034 0.0211 0.0414 0.0607 0.0773 0.0875 0.093 0.0929 0.0904 0.0898 0.0873 0.0859 0.0879 0.0898 0.0916 0.0886 0.0775 0.0621 0.0427 0.0222 0.0168 0.0215 0.0221 0.04 0.0649 0.0816 0.0945 0.0986 0.0955 0.0918 0.0889 0.0896 0.0884 0.0883 0.0895 0.0898 0.0906 0.0907 0.0911 0.0905 0.0884 0.0866 0.0843 0.0849 0.0889 0.0921 0.0953 0.0988 0.099 0.098 0.0982 0.096 0.0962 0.095 0.0941 0.0915 0.0844 0.0734 0.0596 0.0437 0.0329 0.031 0.0321 0.0324 0.0312 0.0473 0.0646 0.0793 0.0919 0.0967 0.0969 0.0969 0.0952 0.0951 0.0941 0.0947 0.0964 0.0957 0.0961 0.0954 0.0938 0.0934 0.0909 0.0909 0.0942 0.0974 0.105 0.108 0.1106 0.113 0.1128 0.111 0.1125 0.1116 0.1095 0.1084 0.1042 0.1005 0.0929 0.0789 0.0621 0.0421 0.0268 0.0253 0.025 0.0235 0.0428 0.0668 0.0873 0.1053 0.1135 0.1155 0.115 0.1168 0.1168 0.1176 0.1186 0.1196 0.1198 0.1199 0.1182 0.1154 0.1111 0.1066 0.1037 0.1006 0.0977 0.0959 0.0912 0.0895 0.0875 0.0851 0.0839 0.0824 0.0826 0.0811 0.0808 0.0812 0.0826 0.0847 0.0818 0.0723 0.0569 0.0377 0.0197 0.0172 0.0224 0.0475 0.0639 0.078 0.0911 0.0952 0.097 0.0953 0.0907 0.0833 0.079 0.0758 0.0746 0.0758 0.0768 0.0759 0.0703 0.0614 0.0491 0.0336 0.0264 0.0459 0.0611 0.0747 0.0843 0.0867 0.0856 0.0835 0.0821 0.0808 0.0802 0.0792 0.078 0.0765 0.0703 0.0595 0.0464 0.0283 0.0146 0.0134 0.0145 0.0249 0.0448 0.0616 0.0782 0.0895 0.095 0.098 0.098 0.098 0.096 0.0943 0.092 0.0884 0.0856 0.0828 0.0788 0.0755 0.072 0.0705 0.0686 0.0674 0.0674 0.0667 0.067 0.0682 0.0683 0.0677 0.0671 0.0658 0.0651 0.0644 0.0639 0.0615 0.055 0.0447 0.0327 0.0182 0.0089 0.0082 0.0102 0.0139 0.0266 0.0407 0.0558 0.0698 0.078 0.0848 0.0886 0.09 0.09 0.0887 0.086 0.084 0.0825 0.082 0.0815 0.0831 0.0853 0.0861 0.0904 0.092 0.0898 0.0898 0.0849 0.0822 0.083 0.0818 0.0818 0.082 0.0797 0.0797 0.0787 0.0769 0.0748 0.0716 0.0643 0.0529 0.0401 0.0249 0.0145 0.0102 0.0096 0.0221 0.0436 0.065 0.0843 0.1001 0.1077 0.1123 0.1119 0.1081 0.1023 0.0939 0.0858 0.08 0.0762 0.0749 0.0745 0.0763 0.0794 0.0817 0.082 0.0809 0.0776 0.0738 0.0721 0.0684 0.0669 0.0659 0.0623 0.0594 0.0565 0.0537 0.0549 0.0552 0.0548 0.0558 0.0553 0.0548 0.0529 0.0471 0.0392 0.0289 0.017 0.0065 0.0046 0.0053 0.0053 0.0056 0.0048 0.0051 0.0049 0.0042 0.004 0.0037 0.0032 0.0025 0.0025 0.0026 0.0022 0.0031 0.012 0.0241 0.041 0.0601 0.0758 0.0861 0.0907 0.0868 0.0791 0.071 0.0645 0.0598 0.0628 0.0665 0.0705 0.0716 0.0686 0.061 0.0505 0.0413 0.0349 0.0349 0.0364 0.0353 0.0448 0.058 0.0723 0.0851 0.092 0.095 0.0932 0.0915 0.0898 0.0877 0.0893 0.0898 0.0896 0.0888 0.0886 0.0873 0.087 0.0882 0.0857 0.0871 0.0887 0.0911 0.0927 0.0924 0.0874 0.0853 0.0827 0.08 0.0803 0.0772 0.0749 0.0739 0.0741 0.0699 0.0639 0.0541 0.0398 0.0268 0.0228 0.0199 0.0256 0.0468 0.0626 0.0744 0.0848 0.0864 0.0876 0.0869 0.084 0.0828 0.0823 0.0819 0.0825 0.0823 0.0813 0.0807 0.0815 0.0799 0.0785 0.078 0.0769 0.0757 0.0761 0.071 0.0695 0.0665 0.0655 0.0669 0.0662 0.0687 0.0697 0.0705 0.0724 0.0716 0.0678 0.0591 0.0472 0.0314 0.0161 0.0058 0.005 0.029 0.0546 0.0758 0.0936 0.1029 0.104 0.1016 0.0959 0.0898 0.0847 0.0793 0.0762 0.0715 0.0712 0.0716 0.071 0.0745 0.0762 0.0773 0.0773 0.0745 0.0708 0.0647 0.0599 0.0553 0.0512 0.0503 0.0491 0.0507 0.0503 0.0513 0.0511 0.0505 0.0501 0.048 0.0455 0.0439 0.0406 0.0366 0.0298 0.0217 0.013 0.0063 0.0036 0.0036 0.004 0.0044 0.0042 0.0041 0.0036 0.0025 0.0021 0.0015 0.0018 0.002 0.0033 0.0054 0.0077 0.0101 0.0119 0.0136 0.0122 0.0221 0.0398 0.0525 0.0642 0.0714 0.0723 0.0727 0.0713 0.0695 0.0695 0.0713 0.0733 0.0729 0.0679 0.058 0.0447 0.0288 0.0161 0.0144 0.0146 0.016 0.0302 0.0538 0.0689 0.0824 0.0879 0.0876 0.0862 0.0856 0.084 0.0827 0.082 0.0809 0.0797 0.0763 0.0678 0.0555 0.042 0.0289 0.0209 0.0199 0.0288 0.0525 0.071 0.0866 0.0968 0.0978 0.096 0.095 0.0902 0.0849 0.0821 0.0778 0.0764 0.0757 0.0719 0.0616 0.0506 0.0348 0.0214 0.0186 0.0274 0.0506 0.0674 0.083 0.0936 0.094 0.0936 0.0913 0.0896 0.0881 0.0868 0.0864 0.0849 0.0766 0.0639 0.0462 0.0247 0.0079 0.004 0.0059 0.0295 0.0556 0.0748 0.0915 0.1013 0.1027 0.1017 0.0972 0.095 0.0927 0.0908 0.0899 0.0893 0.0895 0.0839 0.0746 0.0604 0.0419 0.0221 0.0086 0.0048 0.0065 0.0415 0.0712 0.0918 0.1101 0.1179 0.1167 0.1153 0.1141 0.1103 0.1067 0.102 0.0937 0.0909 0.0925 0.0962 0.1017 0.107 0.1094 0.1112 0.1111 0.1078 0.1059 0.1048 0.103 0.1002 0.0985 0.0931 0.0883 0.0851 0.082 0.0796 0.0804 0.079 0.076 0.0729 0.0693 0.0671 0.0647 0.0581 0.048 0.0346 0.0194 0.008 0.0047 0.0052 0.0044 0.0036 0.0038 0.0035 0.0036 0.0033 0.0028 0.0029 0.0021 0.0015 0.0016 0.0029 0.0037 0.0082 0.0103 0.011 0.0262 0.0425 0.0516 0.0636 0.0701 0.0748 0.0788 0.0765 0.0773 0.0733 0.0696 0.0676 0.0656 0.0651 0.0646 0.0619 0.0546 0.0444 0.0299 0.012 0.019 0.0501 0.0762 0.0936 0.1053 0.1059 0.103 0.0983 0.0959 0.0921 0.0889 0.0878 0.0864 0.0866 0.0873 0.0867 0.0844 0.0829 0.0808 0.0788 0.0817 0.0842 0.0897 0.0926 0.0918 0.0917 0.0881 0.0853 0.0846 0.0831 0.081 0.0806 0.0804 0.0789 0.0748 0.0641 0.0496 0.0352 0.0229 0.0212 0.0222 0.0198 0.0261 0.0456 0.0594 0.072 0.0813 0.0817 0.0805 0.078 0.076 0.073 0.0731 0.0723 0.0711 0.0685 0.0603 0.0482 0.0354 0.0239 0.0168 0.0154 0.026 0.047 0.0644 0.0783 0.0873 0.0886 0.0891 0.0904 0.0906 0.0895 0.0885 0.0878 0.0846 0.0809 0.0694 0.0539 0.0378 0.024 0.0212 0.0249 0.0252 0.0253 0.0499 0.0691 0.0856 0.1018 0.1063 0.1094 0.1088 0.1083 0.106 0.1031 0.1036 0.103 0.1048 0.1035 0.102 0.0999 0.0976 0.0967 0.0958 0.0943 0.0911 0.0884 0.0847 0.0809 0.0807 0.0795 0.0785 0.0775 0.0762 0.0761 0.0775 0.0784 0.081 0.0827 0.0789 0.0705 0.0564 0.0384 0.0229 0.0165 0.0293 0.0485 0.066 0.0834 0.0967 0.1022 0.1048 0.1022 0.0966 0.0913 0.0858 0.0844 0.0851 0.0867 0.0879 0.0862 0.085 0.0838 0.0856 0.0874 0.0909 0.0952 0.0991 0.1049 0.1087 0.1116 0.1143 0.1148 0.1138 0.1096 0.1047 0.0981 0.094 0.0915 0.0859 0.0747 0.0601 0.0422 0.0257 0.0193 0.0193 0.0363 0.0586 0.0741 0.088 0.0963 0.0994 0.1019 0.1019 0.1004 0.0971 0.0937 0.0929 0.0917 0.0918 0.0917 0.0898 0.0881 0.0861 0.0839 0.0849 0.0872 0.0892 0.0908 0.0896 0.0891 0.0897 0.0895 0.0895 0.0902 0.0911 0.0934 0.0946 0.096 0.0928 0.0839 0.0707 0.0519 0.0305 0.0154 0.0066 0.008 0.0372 0.0734 0.099 0.1196 0.1313 0.1324 0.1301 0.1266 0.1226 0.1177 0.1137 0.1091 0.1075 0.1064 0.1067 0.1083 0.1058 0.1053 0.1048 0.1012 0.1026 0.1026 0.1037 0.1057 0.1059 0.1067 0.1052 0.105 0.1028 0.1011 0.0998 0.0964 0.0905 0.0784 0.0614 0.0403 0.0188 0.0051 0.0014 0.0009 0.016 0.0465 0.0765 0.0995 0.1202 0.1308 0.1331 0.1339 0.1321 0.1309 0.1308 0.1299 0.1296 0.1257 0.1204 0.1126 0.1039 0.0955 0.0878 0.0833 0.0814 0.0816 0.0824 0.0856 0.0893 0.0924 0.0976 0.0999 0.102 0.1015 0.1003 0.0984 0.0957 0.096 0.0967 0.0983 0.1014 0.1046 0.1068 0.1091 0.1096 0.1106 0.1118 0.1142 0.1166 0.1206 0.1224 0.1246 0.1259 0.1255 0.1245 0.1234 0.1243 0.1259 0.1279 0.1303 0.1327 0.1347 0.1373 0.1387 0.1397 0.1411 0.1429 0.146 0.1495 0.1515 0.1525 0.1525 0.1524 0.1487 0.1461 0.1435 0.1398 0.1394 0.1399 0.1404 0.1409 0.1418 0.1402 0.1393 0.1378 0.1374 0.138 0.14 0.1427 0.1474 0.1513 0.1521 0.1512 0.1482 0.1436 0.1405 0.1377 0.137 0.1375 0.1364 0.1357 0.1335 0.133 0.1347 0.1388 0.1434 0.1516 0.1562 0.1604 0.162 0.1593 0.1587 0.1581 0.1564 0.1579 0.1576 0.156 0.1564 0.1561 0.1583 0.1592 0.1605 0.1622 0.1631 0.1619 0.1594 0.1547 0.1498 0.1455 0.1423 0.1393 0.1383 0.14 0.1452 0.156 0.1658 0.1736 0.1802 0.1802 0.1748 0.1668 0.1556 0.1467 0.1409 0.1372 0.135 0.1319 0.132 0.1359 0.1422 0.1474 0.1512 0.1526 0.1545 0.156 0.1549 0.1495 0.1439 0.138 0.1318 0.1281 0.1238 0.122 0.1233 0.1247 0.1264 0.1279 0.1287 0.1287 0.1288 0.1293 0.1288 0.1276 0.1255 0.1237 0.122 0.1202 0.1183 0.1157 0.117 0.1161 0.117 0.1165 0.1149 0.1156 0.1153 0.1144 0.1125 0.1091 0.1046 0.1027 0.101 0.0994 0.0969 0.0951 0.0914 0.0869 0.0841 0.0772 0.0727 0.0686 0.0642 0.0608 0.0573 0.0529 0.049 0.0461 0.0435 0.0422 0.037 0.0335 0.0281 0.0208 0.0153 0.0098 0.0054 0.003 0.0022 0.0013 0.0014 0.0005 0.0007 0.0006 0.0003 0.0002 0.0006 0.0 0.0 0.0 0.0004 0.0 0.0005", + "energy": "-72.4997 -73.225 -71.964 -71.0829 -70.1428 -68.5593 -66.4865 -64.0694 -61.9648 -59.7575 -58.1241 -56.3718 -55.1209 -54.0302 -53.065 -52.3517 -51.665 -50.8945 -50.2099 -49.7794 -49.3388 -48.9774 -48.9762 -48.7147 -48.8239 -49.1226 -49.2181 -49.5701 -50.1103 -50.6867 -51.3404 -52.1127 -53.3516 -54.4485 -56.0905 -57.8805 -59.3243 -60.174 -60.1472 -58.7571 -55.8455 -52.3167 -48.5144 -44.9793 -42.5784 -40.4502 -39.4799 -38.5814 -37.2586 -35.7659 -33.8002 -31.3396 -29.4347 -27.4034 -26.3761 -25.5459 -25.049 -24.7385 -24.2037 -23.9926 -23.213 -22.7831 -22.4911 -22.7536 -23.4182 -24.98 -27.0193 -29.1998 -30.372 -30.8915 -30.0494 -28.594 -26.1593 -23.8883 -22.0089 -20.8313 -20.7628 -21.0394 -21.8435 -22.7567 -23.1442 -23.5502 -23.5227 -23.6798 -23.2295 -23.0258 -22.8027 -22.487 -22.3105 -22.0811 -22.0723 -21.9504 -21.8866 -21.9861 -22.0333 -22.2026 -22.3794 -22.48 -22.6917 -22.7756 -22.7835 -22.6319 -22.9248 -23.0108 -23.5223 -24.3199 -25.752 -27.5153 -29.3618 -30.532 -30.7906 -30.2814 -28.7462 -26.659 -24.6322 -22.9962 -22.1305 -21.9203 -21.6674 -21.5738 -21.6567 -21.8717 -22.2901 -23.2616 -24.672 -26.8484 -29.7592 -32.9351 -35.0785 -36.0332 -35.5399 -33.4604 -30.5306 -27.2602 -24.3538 -22.2626 -21.6429 -21.2467 -20.9308 -20.6982 -20.6673 -20.7799 -21.4315 -22.4161 -24.2501 -26.5453 -29.4021 -32.2309 -34.6427 -36.1484 -36.2966 -34.8264 -32.2114 -29.0657 -25.9015 -23.0906 -21.0878 -20.0429 -19.5969 -19.709 -19.9813 -20.4585 -20.9513 -21.5917 -21.8663 -22.4725 -22.5761 -22.606 -22.6595 -22.4686 -22.1478 -22.2006 -22.2897 -22.497 -22.7976 -23.346 -23.7517 -24.2546 -24.745 -25.3692 -25.9509 -26.5005 -27.1588 -27.4551 -27.94 -27.9719 -28.2239 -28.38 -28.9211 -29.8214 -31.5462 -34.1739 -37.3383 -40.8578 -43.5862 -46.4091 -47.4606 -48.3949 -48.1365 -47.605 -47.1715 -47.0337 -47.3181 -47.8712 -48.7841 -49.4563 -50.3979 -51.4401 -52.3169 -52.0167 -50.5479 -47.6617 -43.1551 -38.2299 -33.24 -28.7869 -25.3935 -23.5002 -22.7987 -22.5459 -22.3233 -22.3736 -22.3362 -22.436 -22.5505 -23.0821 -23.6808 -25.3332 -27.1166 -29.2247 -31.0408 -32.5191 -32.6631 -31.9541 -30.2512 -27.9716 -25.7838 -23.6698 -22.3405 -21.8259 -21.5433 -21.6476 -21.9582 -21.9531 -22.2588 -22.4314 -22.9429 -23.9815 -25.8967 -28.4631 -31.4235 -33.9005 -34.9749 -34.9284 -33.3048 -30.8661 -27.6311 -25.0835 -23.223 -22.4266 -22.0535 -21.9298 -21.71 -21.8564 -21.8758 -22.0381 -22.156 -22.3186 -22.4668 -22.5738 -22.7801 -23.3235 -23.751 -24.1951 -24.3314 -24.3973 -24.0335 -23.7192 -22.942 -22.3918 -21.851 -21.6589 -21.281 -21.1769 -21.2289 -21.3138 -21.1455 -21.3671 -21.6646 -22.2128 -23.3786 -25.0896 -27.0381 -28.3794 -29.3816 -29.6041 -28.6492 -26.9597 -24.8957 -23.002 -21.8112 -21.2015 -21.1015 -21.2084 -21.2396 -21.238 -21.1545 -21.0415 -20.8995 -20.9479 -20.8509 -20.8035 -20.7387 -20.8035 -20.7729 -20.6748 -20.4635 -20.1484 -19.6642 -19.4193 -19.1669 -19.0693 -19.0784 -18.9701 -19.0951 -18.9187 -18.8553 -19.1146 -19.2741 -19.3924 -19.8363 -20.1845 -20.3629 -20.5209 -20.5888 -20.5626 -20.4078 -20.1454 -20.378 -20.3603 -20.3449 -20.5034 -21.0006 -21.3843 -21.7815 -22.023 -22.5504 -22.6653 -22.9313 -23.0811 -23.2722 -23.3785 -23.6945 -24.2648 -25.3836 -27.3336 -30.0121 -33.469 -37.2894 -40.9461 -43.9186 -45.6599 -46.4979 -46.4119 -46.2439 -46.4149 -46.836 -47.7207 -49.1449 -50.9756 -53.1954 -55.7253 -57.2469 -57.145 -54.8402 -50.7157 -44.8347 -37.6439 -31.3997 -25.9803 -22.3467 -20.2256 -19.7442 -19.8133 -19.6531 -19.7633 -19.6888 -19.558 -19.4655 -19.8269 -20.5041 -22.4102 -24.5951 -27.4234 -30.0144 -31.3756 -31.6986 -30.6379 -28.3559 -25.5529 -22.6334 -20.6827 -19.669 -19.3868 -19.2542 -19.2035 -19.3101 -19.2234 -19.2171 -19.2226 -19.2692 -19.4368 -19.9456 -21.0736 -22.8423 -24.6535 -26.3822 -27.46 -27.3887 -26.8774 -25.6124 -23.8909 -22.4451 -21.3828 -21.1487 -21.2254 -21.2347 -21.5754 -21.7956 -22.3405 -22.9615 -24.3013 -24.9416 -25.8491 -26.767 -27.3545 -27.7429 -28.0347 -27.3248 -26.8162 -25.372 -24.3429 -23.0383 -21.9059 -20.8684 -20.4223 -19.9997 -19.9971 -20.1149 -20.2055 -20.1653 -20.3057 -20.3752 -20.1789 -20.5594 -20.5615 -20.5615 -20.4768 -20.3552 -20.0386 -19.7656 -19.5384 -19.3481 -18.9669 -18.6849 -18.8741 -18.9791 -19.1532 -19.2464 -19.4434 -19.4293 -19.5811 -20.3144 -21.2114 -22.9428 -25.962 -29.6466 -32.9245 -34.6476 -34.7731 -32.9216 -29.8175 -25.7145 -22.4047 -20.1156 -19.3905 -19.2258 -19.0589 -19.1555 -19.341 -19.2912 -19.4784 -19.7883 -19.6824 -20.0463 -20.3198 -20.3485 -20.5466 -20.9977 -20.7657 -21.3056 -21.555 -21.5835 -21.6566 -21.8335 -21.8348 -21.8018 -21.7239 -21.6303 -21.4269 -21.2862 -21.2157 -21.0318 -21.0247 -20.9666 -21.0227 -20.6337 -20.73 -20.6853 -20.5307 -20.528 -20.2744 -20.0934 -19.9069 -19.817 -19.6128 -19.2522 -19.229 -19.1442 -19.2309 -19.1233 -18.9799 -19.4681 -19.5785 -19.6175 -19.8496 -19.9442 -20.0575 -20.1834 -20.0258 -20.1082 -19.9994 -19.8288 -19.9913 -19.8566 -19.7335 -19.89 -19.742 -19.8238 -19.5306 -19.5218 -19.4218 -19.3738 -19.1876 -19.299 -19.1562 -19.3307 -19.4761 -19.7217 -20.0136 -20.5628 -20.8482 -20.9676 -21.0452 -21.0362 -21.0225 -20.8795 -20.7948 -20.8749 -20.8786 -20.7004 -20.6592 -20.5655 -20.7153 -20.6302 -20.3325 -20.37 -20.0938 -20.1759 -20.0507 -20.1909 -20.0717 -20.0139 -19.9717 -19.8431 -19.6027 -19.4513 -19.1247 -18.8099 -18.59 -18.6272 -18.9154 -19.8368 -21.3286 -23.5693 -26.4455 -29.5272 -32.3072 -34.6422 -36.1071 -36.5643 -35.126 -33.2717 -30.423 -27.765 -25.1657 -23.2277 -21.7765 -21.1218 -20.6174 -20.4405 -20.1161 -20.2554 -20.319 -20.2382 -20.244 -20.2772 -20.1868 -20.2393 -20.2665 -20.0318 -20.3436 -20.5083 -20.7151 -20.9332 -21.2392 -20.9903 -21.3231 -21.2922 -21.1927 -20.9127 -20.8353 -20.8485 -21.1295 -21.4921 -22.2786 -23.8312 -24.9597 -26.4732 -27.5349 -28.5263 -28.9062 -28.7283 -27.758 -26.5571 -25.0262 -23.4617 -22.0242 -20.9412 -20.1257 -19.6851 -19.7847 -19.9068 -19.8563 -20.1673 -20.6301 -21.0454 -21.3536 -21.853 -22.318 -22.406 -22.4983 -22.5581 -22.7265 -22.5648 -22.6627 -22.4483 -22.3827 -22.6122 -23.2441 -23.5845 -24.1346 -24.6519 -24.9944 -24.9972 -24.9988 -24.7258 -24.8853 -24.8811 -25.208 -25.8908 -26.7121 -28.0212 -30.2642 -32.947 -36.2713 -39.2295 -41.7286 -43.4625 -44.3418 -44.1558 -43.5303 -42.7743 -42.3885 -42.464 -42.796 -43.7463 -45.5527 -46.5823 -46.7975 -45.4476 -42.9175 -38.9762 -34.415 -30.069 -26.4816 -24.1018 -22.8111 -22.1538 -22.0691 -22.2823 -22.7509 -22.6596 -23.1103 -23.2913 -23.3135 -23.2115 -23.2871 -23.7702 -24.035 -24.6376 -24.9058 -24.8744 -24.9102 -25.1026 -25.0934 -24.8505 -24.4557 -23.7807 -23.183 -22.1736 -21.223 -20.4076 -19.8956 -19.6226 -19.4468 -19.6938 -19.8642 -20.233 -21.099 -22.3423 -24.1499 -26.3981 -28.1748 -29.6975 -29.7962 -29.0232 -27.7076 -25.8625 -23.8642 -22.4535 -21.9209 -21.8944 -22.1367 -21.8726 -21.9483 -22.0041 -21.8965 -21.9964 -22.3772 -23.2772 -24.6246 -26.0059 -27.7266 -29.5311 -30.1434 -29.7349 -28.7707 -27.0048 -25.2594 -23.3983 -21.8825 -20.8758 -20.1496 -20.0711 -19.95 -19.9748 -19.9657 -20.1773 -20.1931 -20.3427 -20.2576 -20.6346 -20.9627 -20.966 -21.1476 -21.2612 -21.5428 -21.7913 -21.9892 -22.2574 -22.5672 -22.6474 -22.7805 -22.8606 -22.8433 -22.7619 -22.7389 -22.7194 -23.1786 -24.2233 -25.8927 -28.4387 -32.3393 -36.2756 -39.0262 -39.8322 -38.4579 -35.365 -31.2649 -27.0126 -23.0157 -20.6914 -19.6078 -19.3598 -18.9197 -18.7425 -18.6706 -18.6454 -18.9352 -18.9319 -19.1018 -18.9696 -19.1074 -19.1276 -19.267 -19.3722 -19.3741 -19.2503 -19.3845 -19.2645 -19.1467 -19.1155 -18.9132 -18.9445 -18.7727 -18.6826 -18.5982 -18.7656 -18.8663 -19.1834 -19.0797 -19.3022 -19.6216 -19.7454 -20.1951 -20.2946 -20.5603 -20.7926 -21.1427 -21.2986 -21.3842 -21.3978 -21.4767 -21.7823 -21.5032 -21.5596 -21.3274 -21.4373 -21.4207 -21.3309 -21.8158 -22.5399 -23.9417 -26.1264 -28.6523 -31.1017 -32.3461 -32.4221 -31.2362 -28.9677 -26.3231 -23.8885 -21.6476 -20.4256 -19.9338 -19.484 -19.2604 -19.3793 -19.375 -19.4202 -19.4432 -19.8214 -20.6257 -21.6439 -23.3073 -25.3815 -27.8478 -29.3529 -30.1143 -29.5639 -28.3203 -26.2246 -23.9285 -21.7263 -20.3265 -19.5167 -19.1806 -19.2028 -19.2672 -19.5537 -19.6621 -20.3457 -21.245 -22.7295 -24.6121 -26.5945 -28.7487 -30.1143 -30.7196 -30.317 -29.1471 -27.1717 -25.109 -23.2307 -21.8955 -20.9031 -20.7174 -20.7777 -20.8651 -21.2989 -21.5034 -21.8386 -22.028 -22.1501 -22.0459 -22.0016 -21.7579 -21.962 -21.8976 -21.5971 -21.5547 -21.5237 -21.0415 -20.9034 -20.9037 -20.7681 -20.7038 -20.6553 -20.4348 -20.45 -20.63 -21.0333 -21.58 -22.8316 -24.6535 -26.6476 -28.6482 -30.728 -32.1453 -32.8122 -31.9415 -30.5302 -28.5753 -26.6083 -24.6679 -22.8416 -21.8934 -21.3471 -20.9173 -21.1476 -21.2427 -21.7433 -22.326 -23.2198 -25.3507 -28.6239 -32.587 -35.6844 -37.7925 -37.878 -36.2857 -33.2279 -29.6158 -26.065 -23.7287 -22.8737 -22.1605 -22.0776 -22.0703 -22.2158 -22.2932 -22.6388 -22.9949 -23.466 -23.8731 -24.3544 -24.6258 -24.9135 -24.9722 -24.953 -25.1326 -25.0204 -25.0928 -25.1893 -25.4829 -25.6755 -25.9373 -26.6554 -27.2009 -27.9396 -28.9084 -29.491 -29.9277 -30.8692 -31.1386 -31.487 -31.7849 -32.447 -33.118 -34.4469 -36.3637 -39.0076 -41.8119 -44.2287 -46.4176 -47.531 -47.9906 -47.8165 -47.4378 -47.2486 -47.6644 -48.5534 -49.3574 -50.7125 -51.9283 -52.7773 -53.4706 -53.045 -50.9673 -47.7154 -43.1254 -38.2519 -33.0521 -28.3476 -24.81 -22.6381 -22.0898 -21.9712 -22.1707 -22.4681 -22.7284 -23.0201 -23.1567 -23.2467 -23.4333 -23.0715 -23.26 -23.2127 -23.0767 -22.9272 -22.845 -22.8036 -23.0923 -23.0578 -23.1887 -23.4399 -23.3984 -23.212 -23.1538 -22.9452 -22.8857 -22.6367 -22.6678 -22.656 -23.3944 -24.4307 -26.8553 -29.8544 -32.8855 -35.2428 -36.9066 -36.9647 -35.9595 -34.024 -32.1461 -29.9779 -28.2521 -26.7575 -25.5868 -24.8419 -24.3688 -24.1821 -24.0633 -23.9936 -23.9325 -23.8192 -23.9536 -24.6271 -25.6454 -27.1846 -29.4108 -31.5564 -32.8631 -33.1643 -32.3956 -31.2204 -29.0084 -27.3929 -26.0244 -25.3984 -25.095 -25.1848 -24.9755 -25.0606 -25.0889 -25.1606 -25.2773 -25.6755 -26.4139 -27.5372 -29.2493 -31.2359 -33.2292 -34.4364 -34.0299 -32.7778 -30.7835 -28.1135 -25.7587 -23.646 -22.7304 -22.1232 -21.8488 -21.9044 -22.0665 -22.0253 -22.2465 -22.8033 -23.6386 -25.373 -27.4866 -29.6566 -31.4953 -32.3387 -32.283 -30.7329 -28.5669 -26.027 -23.9142 -22.4392 -21.9268 -21.5796 -21.6074 -21.6007 -22.1422 -22.3184 -22.5193 -22.8543 -23.0726 -23.5195 -23.6969 -23.5841 -23.3637 -23.264 -23.3565 -23.1904 -22.9979 -23.0396 -23.0215 -22.9848 -23.0264 -23.0087 -22.9612 -22.7029 -22.5708 -22.1397 -21.9777 -21.8873 -22.1283 -22.0827 -22.2603 -22.0895 -22.1695 -22.0676 -21.8413 -21.7667 -21.7464 -21.7348 -21.7102 -21.6425 -21.3206 -21.3871 -21.1132 -21.1022 -21.1097 -21.4713 -21.7807 -21.8129 -21.8901 -22.071 -22.2223 -22.2961 -22.4126 -22.6124 -22.8667 -22.8246 -22.6835 -22.4313 -22.1544 -21.8808 -21.7207 -21.4367 -21.3329 -21.4248 -21.4671 -21.9496 -22.1951 -22.6866 -23.2499 -23.8259 -24.4599 -25.2978 -26.4766 -27.6236 -29.3184 -29.9124 -29.8878 -29.0761 -27.4537 -25.3803 -23.2933 -21.8488 -21.1429 -20.8933 -21.1562 -21.1599 -21.4909 -21.6109 -21.6957 -21.7467 -21.8467 -21.712 -21.7104 -21.7001 -21.5566 -21.5797 -21.4681 -21.4523 -21.2742 -21.2486 -21.0615 -21.1077 -21.0966 -21.0492 -21.1237 -20.8405 -20.654 -20.7 -20.5975 -20.892 -21.1952 -21.2134 -21.6397 -22.1344 -22.6774 -23.0334 -23.2704 -23.0083 -22.6636 -21.909 -21.1167 -20.3459 -19.6159 -19.2271 -19.2412 -19.218 -19.4535 -19.676 -19.7696 -20.293 -20.741 -21.1601 -21.5744 -21.9129 -22.3097 -22.6306 -22.4654 -22.6067 -22.4351 -22.2771 -22.1951 -22.1803 -21.8856 -21.8403 -21.8366 -21.7596 -21.87 -22.0776 -22.4242 -22.692 -23.8057 -25.0891 -26.8717 -28.5468 -30.0838 -31.668 -32.709 -32.8203 -32.0912 -30.6653 -28.6574 -26.2547 -23.8731 -22.0805 -20.9305 -20.1875 -19.9602 -19.966 -20.2254 -20.438 -20.6394 -21.1826 -21.6141 -21.8887 -22.1615 -22.4917 -22.3946 -22.4295 -22.3559 -22.5574 -22.4699 -22.5502 -22.3998 -22.7649 -23.0248 -23.4176 -23.6954 -24.344 -24.8204 -25.0058 -25.3291 -25.4576 -25.2057 -25.2481 -25.3941 -25.3318 -25.6867 -26.3924 -27.0386 -28.6805 -30.9647 -33.7167 -36.7644 -39.5754 -41.8636 -43.7078 -44.7737 -46.1822 -46.3888 -47.4202 -48.3412 -49.5901 -50.7004 -50.9518 -50.5593 -49.1057 -46.7475 -43.7782 -40.8343 -37.7521 -34.6995 -31.9161 -29.3275 -26.9014 -24.8163 -23.5497 -22.9411 -22.8803 -23.0141 -23.408 -23.3981 -23.2594 -22.805 -22.5151 -22.6398 -22.6189 -23.3472 -25.2135 -27.3345 -28.9889 -30.3112 -29.8664 -28.9133 -27.1908 -24.9345 -22.8628 -21.6481 -21.0081 -21.0893 -21.2958 -21.5616 -21.843 -22.1106 -22.3042 -22.5877 -23.2156 -23.5905 -24.9822 -26.4137 -27.8679 -28.4098 -28.3728 -27.2596 -25.9037 -24.1839 -22.8227 -22.1078 -21.9389 -22.2656 -22.4038 -22.3667 -21.9981 -21.9078 -21.4796 -21.4852 -21.32 -21.4987 -21.6576 -21.6511 -21.9071 -22.0057 -22.174 -22.327 -22.4862 -22.5505 -22.8589 -22.8308 -23.019 -22.9894 -22.6244 -22.5539 -22.5709 -22.322 -22.5534 -22.6448 -23.5786 -24.6156 -26.0945 -28.5122 -31.3519 -33.3831 -34.0946 -33.4127 -31.2708 -28.4149 -25.2616 -22.1921 -20.2358 -19.1816 -18.9728 -18.7731 -18.9053 -18.8771 -19.1717 -19.1836 -19.4515 -19.7739 -19.8914 -19.8473 -20.5026 -20.639 -20.9363 -21.2244 -21.0937 -20.7057 -20.4007 -19.9843 -19.562 -19.3386 -19.2519 -19.2342 -19.2605 -19.3147 -19.5607 -20.1187 -20.8118 -21.554 -22.2547 -22.614 -23.0229 -23.0558 -23.1627 -23.0031 -23.0336 -22.8327 -23.0555 -23.2668 -23.5012 -23.5268 -24.028 -24.4995 -24.9876 -25.578 -25.7518 -26.2175 -26.4972 -26.4367 -26.9189 -27.1104 -27.5843 -28.1405 -29.2979 -31.4051 -34.1749 -37.4981 -40.411 -43.2913 -45.3354 -46.5613 -47.2214 -46.9403 -46.3138 -46.0298 -45.9031 -45.7561 -45.8446 -46.3206 -47.2121 -48.0052 -49.6052 -51.1313 -51.9746 -51.1486 -48.2063 -44.185 -38.9947 -33.4027 -28.4415 -24.5492 -22.5754 -21.6982 -21.3107 -21.2138 -21.4593 -21.3836 -21.5236 -21.7624 -22.0201 -22.7336 -23.9108 -25.8956 -27.8999 -29.9118 -30.9353 -30.8031 -30.0405 -28.3721 -26.1752 -24.207 -22.7469 -21.8225 -21.324 -20.9814 -21.037 -20.868 -21.2884 -21.1728 -21.6511 -22.2819 -23.4868 -25.5903 -28.4283 -31.5276 -33.7747 -34.8222 -34.4991 -32.8223 -30.2089 -27.1693 -24.4474 -22.9894 -22.3561 -22.007 -21.8989 -21.8467 -22.0035 -22.2292 -22.2376 -22.2664 -22.1924 -22.3179 -22.3201 -22.3104 -22.383 -22.367 -22.3593 -22.4677 -22.0988 -21.7442 -21.6733 -21.5271 -21.4181 -21.1441 -21.0527 -21.0225 -21.1931 -21.4222 -21.5593 -22.2516 -23.5379 -25.3644 -27.5492 -30.0913 -32.8973 -35.1276 -36.337 -35.954 -34.1675 -31.1922 -27.7722 -24.8936 -22.4351 -20.9767 -20.3548 -20.0714 -19.9129 -19.917 -19.8803 -19.8648 -20.2346 -20.5601 -21.7528 -23.3649 -25.3024 -27.5428 -29.438 -30.6339 -30.5553 -29.2668 -27.2779 -24.9261 -22.4031 -20.4206 -19.0671 -18.2755 -18.0935 -18.1858 -18.3393 -19.0222 -19.1449 -19.8649 -20.4437 -20.8987 -21.1234 -21.1421 -21.0486 -21.0095 -20.7564 -20.8026 -20.608 -20.5799 -20.4957 -20.6634 -21.2122 -22.2266 -23.0502 -23.9488 -24.9334 -25.3712 -25.8727 -26.1248 -26.3705 -26.3652 -26.3118 -27.0861 -28.041 -30.312 -33.18 -37.2248 -40.75 -43.6773 -46.0337 -47.0019 -47.309 -47.0027 -46.6424 -46.5302 -46.9432 -47.4367 -48.1723 -49.248 -50.198 -51.0495 -51.2402 -50.6765 -48.7705 -45.423 -40.5568 -35.7747 -30.5752 -26.1747 -22.8305 -20.9574 -20.0759 -19.7151 -19.5901 -19.824 -20.0124 -20.4147 -20.4781 -21.0235 -21.4792 -22.6804 -24.4689 -26.5101 -28.6973 -30.1367 -30.6135 -30.1262 -28.3835 -26.2958 -23.9146 -22.051 -20.691 -20.1197 -20.1316 -20.1999 -20.2206 -20.3207 -20.4822 -20.2558 -20.4087 -20.2295 -20.3183 -20.24 -20.2315 -20.4714 -20.5172 -20.7049 -20.7266 -20.7658 -20.6936 -20.6237 -20.3882 -20.412 -20.1002 -20.1027 -19.8696 -19.8863 -19.7635 -19.8728 -20.5158 -21.5666 -23.1104 -24.9666 -27.03 -28.8205 -30.2311 -30.9703 -30.6289 -29.6178 -27.9624 -26.1527 -24.4641 -22.8361 -21.6956 -20.9549 -20.824 -20.5143 -20.4208 -20.3308 -20.3163 -20.2866 -20.1523 -20.1712 -20.1053 -20.2345 -20.3004 -20.1896 -20.3797 -20.3115 -20.032 -19.4407 -19.1463 -18.4545 -18.0213 -17.8674 -17.7656 -17.7381 -17.6932 -17.5527 -17.6203 -17.5097 -17.6873 -18.6424 -19.8586 -21.7441 -24.3978 -26.6724 -28.3283 -29.202 -28.5398 -27.422 -25.38 -23.3002 -21.6221 -20.4412 -19.8088 -19.6279 -19.5582 -19.644 -19.7096 -19.6369 -19.8891 -19.6045 -19.5535 -19.763 -19.7417 -19.9576 -20.1563 -20.571 -20.888 -21.2834 -21.6177 -22.0542 -22.5426 -22.8337 -23.0867 -23.4312 -23.4123 -23.2039 -22.9621 -22.4974 -22.2889 -22.2329 -21.8562 -21.8207 -22.9309 -23.922 -26.0897 -27.669 -28.7873 -28.97 -27.9551 -26.1465 -23.7945 -21.7028 -20.4795 -19.8308 -19.8222 -20.0806 -20.123 -20.2511 -20.6994 -20.8559 -21.0775 -21.2857 -21.4912 -22.194 -23.3272 -24.5815 -25.8802 -26.2265 -26.1124 -25.143 -23.7056 -22.2246 -21.0643 -20.2193 -20.1056 -20.1188 -20.3276 -20.7469 -20.9253 -21.6546 -22.0737 -22.8986 -24.3206 -26.1829 -28.4747 -30.4271 -31.3789 -31.4089 -30.0853 -28.0071 -25.7284 -23.4735 -21.5318 -20.2046 -19.5515 -19.1096 -18.9212 -18.7513 -18.8114 -18.8316 -18.9428 -19.2475 -19.5534 -20.1966 -20.681 -21.4061 -21.9882 -22.2953 -22.7102 -23.0407 -22.9474 -23.2313 -23.1994 -22.9595 -23.0093 -22.876 -22.6732 -22.2733 -22.5216 -22.6692 -23.1197 -23.4444 -24.5566 -26.2498 -28.3268 -31.0214 -32.9374 -33.7831 -33.7162 -32.0496 -29.8875 -27.4406 -25.0337 -23.1768 -22.0368 -21.2971 -20.836 -20.4926 -20.4204 -20.2266 -20.4397 -20.4839 -20.5432 -20.5948 -20.4918 -20.2818 -20.2788 -20.2471 -20.3685 -20.2346 -20.1788 -19.9604 -19.7036 -19.6991 -19.5165 -19.9015 -19.7025 -19.9595 -20.1602 -20.343 -20.8004 -21.2864 -22.2266 -23.3216 -25.1028 -27.9154 -31.0924 -34.0665 -35.5778 -35.7213 -34.301 -31.7274 -28.364 -25.0928 -22.2342 -20.5478 -19.8902 -19.7392 -19.8463 -20.1685 -20.399 -20.9947 -21.5486 -21.9653 -22.4103 -22.5786 -22.4377 -22.5547 -22.2949 -22.3785 -22.3186 -22.2336 -22.3465 -22.4771 -22.7199 -22.9897 -23.3741 -23.5563 -24.1509 -24.3223 -24.7054 -24.8903 -25.1302 -25.2796 -25.5296 -25.9007 -26.3648 -27.2413 -28.0844 -29.9031 -32.6112 -35.9225 -39.1242 -42.4219 -44.5324 -46.2161 -46.5627 -46.6145 -45.9309 -45.6338 -45.6631 -46.2013 -47.2141 -48.4422 -49.1028 -48.8699 -46.7584 -43.3337 -39.0158 -34.3335 -30.0779 -26.5508 -24.3929 -23.1583 -22.3457 -21.8324 -21.7746 -21.8265 -21.9037 -22.2459 -22.6413 -22.801 -22.8584 -22.8017 -23.0455 -23.2485 -24.1251 -25.2289 -26.7058 -28.1148 -29.0048 -29.4603 -29.0708 -28.2634 -26.7676 -25.0601 -23.0056 -21.6207 -20.648 -20.397 -20.3041 -20.4839 -20.4757 -20.1481 -20.1723 -20.0899 -19.986 -19.9445 -20.1692 -20.2454 -20.3102 -20.6476 -20.619 -20.6795 -20.7044 -20.7208 -20.6405 -20.7435 -20.6233 -20.8825 -20.9402 -21.2397 -21.5371 -21.5924 -22.0394 -22.5663 -23.2762 -24.4228 -25.9231 -27.4541 -29.3668 -30.5434 -31.0554 -30.4255 -29.1807 -27.291 -25.3257 -23.2987 -22.0974 -21.4162 -21.2105 -21.2119 -21.217 -21.3016 -21.2444 -21.2567 -21.4757 -21.5372 -21.8161 -21.9496 -21.9884 -22.1929 -22.4222 -22.5672 -22.8694 -22.9431 -23.0406 -22.9464 -23.2324 -23.1844 -23.1699 -23.4735 -23.3957 -23.6445 -23.62 -23.829 -24.1976 -24.9243 -26.4009 -28.8654 -32.2066 -34.9147 -36.607 -36.9018 -35.4928 -32.8486 -29.2742 -25.7215 -22.9248 -21.2214 -20.717 -20.5522 -20.7312 -21.1415 -21.5736 -22.1697 -22.6567 -23.0521 -23.2197 -23.3181 -23.6965 -23.9164 -23.8725 -23.7909 -23.7261 -23.7867 -23.9668 -24.0538 -24.2001 -24.9195 -25.1489 -26.0119 -26.6908 -27.0409 -27.0144 -27.2707 -27.0392 -27.0192 -27.3494 -27.3571 -28.413 -29.8284 -32.1071 -35.2479 -39.01 -42.4544 -45.3989 -47.4569 -48.5636 -48.5711 -48.2882 -47.7489 -47.6329 -47.9391 -48.6718 -49.7832 -50.8008 -51.5365 -51.5328 -50.1989 -48.2494 -45.7373 -43.3628 -40.6686 -38.379 -35.7044 -33.1213 -30.2798 -27.6917 -25.7811 -24.4378 -23.8558 -23.6499 -23.9154 -23.6931 -23.4787 -23.4999 -23.3919 -23.2469 -23.7359 -24.9305 -26.657 -28.7087 -30.7994 -32.1954 -32.8529 -32.2751 -30.6423 -28.5061 -26.487 -24.3015 -22.7936 -21.8387 -21.3404 -21.2891 -21.2178 -21.3762 -21.3419 -21.3412 -21.5506 -21.979 -22.8317 -23.9398 -25.4535 -27.3119 -28.3193 -28.8111 -28.2406 -26.881 -24.87 -22.9551 -21.2168 -20.6698 -20.4336 -20.7428 -20.9452 -21.2691 -21.7942 -21.9487 -22.3257 -22.5855 -23.3582 -24.535 -26.5586 -28.5384 -29.9448 -30.5658 -30.0922 -28.9038 -26.6735 -24.6759 -22.7862 -21.7269 -21.2374 -20.8842 -20.9859 -20.8961 -20.9546 -21.0683 -21.5429 -22.4211 -24.3371 -27.4064 -30.9419 -33.7571 -35.106 -34.8687 -33.1535 -29.7172 -25.6333 -22.0792 -19.5666 -18.6446 -18.1285 -17.9646 -18.1171 -17.9842 -17.8409 -18.2013 -18.5943 -19.3449 -20.4297 -22.2866 -24.9342 -28.2928 -31.5582 -34.2483 -35.1041 -34.6156 -32.4671 -29.138 -25.3866 -22.2611 -20.1136 -18.9423 -18.2687 -17.9453 -18.1954 -18.2527 -18.4278 -18.6133 -18.7461 -18.8294 -18.9704 -18.9455 -18.857 -18.709 -18.593 -18.7097 -18.7698 -18.8405 -19.2203 -19.5248 -19.9415 -20.3721 -20.6432 -21.193 -21.5041 -21.7406 -21.8686 -21.8948 -21.8729 -21.7851 -21.94 -22.0253 -22.0181 -22.4716 -23.305 -24.611 -26.6851 -29.7051 -33.2653 -36.8917 -40.5819 -42.9852 -44.9865 -45.9116 -46.4825 -46.648 -47.1871 -48.46 -49.859 -51.1955 -52.1713 -52.4083 -51.0343 -48.8567 -45.5879 -41.3435 -36.8518 -32.5032 -29.0944 -26.0556 -23.5297 -21.9812 -20.9558 -20.8353 -20.873 -21.0356 -21.186 -21.6574 -22.0093 -22.1645 -22.1264 -22.5914 -22.9942 -24.0633 -25.4615 -26.6148 -27.3159 -27.3348 -26.9548 -25.5235 -23.9071 -22.1662 -20.7911 -19.8776 -19.8179 -19.6771 -19.6619 -20.1135 -20.3222 -20.3784 -20.4083 -20.5135 -20.9458 -20.9526 -20.9561 -21.1945 -21.1606 -21.1697 -21.0777 -20.8511 -20.71 -20.5126 -20.3959 -20.3413 -20.5111 -20.584 -20.5526 -20.8034 -20.9558 -21.2693 -21.9217 -22.7744 -24.2457 -25.867 -28.032 -30.194 -32.2917 -33.0355 -32.8247 -31.6701 -29.7024 -27.4298 -25.1584 -23.5401 -22.7128 -22.4439 -22.4972 -22.5194 -22.4358 -22.5268 -22.8933 -23.0513 -23.8916 -24.9856 -26.7815 -28.9833 -30.8411 -32.1409 -32.4006 -31.6256 -29.68 -27.2251 -25.0591 -23.0599 -21.4589 -20.9191 -20.3594 -20.2521 -20.016 -20.0551 -20.1226 -20.5942 -21.4282 -22.8124 -24.4481 -26.6704 -29.0592 -31.0412 -32.1482 -32.0669 -31.0183 -29.0592 -26.6572 -24.4362 -22.7327 -21.3986 -20.5267 -20.0606 -19.9354 -19.6793 -19.6176 -19.6893 -19.9176 -20.0952 -20.4235 -20.4553 -20.7924 -20.9422 -20.9956 -21.1243 -21.1001 -21.224 -21.3669 -21.528 -21.6789 -22.218 -22.4183 -22.638 -22.8258 -22.8857 -22.5989 -22.5936 -22.4509 -22.347 -22.5088 -22.3252 -23.1242 -24.5164 -26.2628 -28.3093 -29.7821 -30.0373 -29.7492 -28.4781 -26.5604 -24.0614 -22.408 -21.2736 -20.6846 -20.6478 -20.8639 -21.0965 -21.1657 -21.2341 -21.3703 -21.1754 -21.3198 -21.175 -20.9253 -20.7044 -20.3483 -20.1647 -19.9845 -19.6449 -19.4974 -19.4781 -19.6419 -19.7715 -19.8556 -19.9207 -19.9866 -20.1355 -20.1747 -20.3495 -20.5399 -21.1029 -22.2393 -24.482 -26.6722 -28.9889 -30.692 -31.0054 -30.4281 -28.9175 -26.9229 -25.0801 -23.3031 -22.3503 -21.9727 -21.9861 -22.0857 -22.4922 -22.5327 -22.3686 -22.2393 -21.9573 -21.9412 -21.8774 -21.7758 -21.5388 -21.545 -21.4365 -21.31 -21.1727 -21.0289 -20.9658 -20.9108 -21.0923 -20.9017 -20.9963 -21.1316 -20.8765 -20.9394 -20.688 -20.592 -20.9351 -21.5011 -22.8193 -24.9935 -28.1789 -30.6333 -32.524 -32.6726 -31.5693 -28.7565 -25.5549 -22.1215 -19.4686 -18.163 -17.491 -16.9407 -16.8624 -16.8692 -17.1338 -17.2466 -17.4773 -17.4826 -17.509 -17.4833 -17.4558 -17.4735 -17.4069 -17.3298 -17.572 -17.6657 -17.8344 -18.2184 -18.5656 -18.6712 -18.9732 -19.0354 -19.2468 -19.3999 -19.4524 -19.9901 -20.6287 -21.7894 -24.2594 -28.3779 -33.9111 -38.6767 -41.502 -42.0432 -40.2207 -36.4981 -31.5262 -26.5605 -22.9959 -20.9515 -20.3338 -19.9532 -19.6885 -19.3499 -19.0158 -18.8146 -18.5349 -18.4715 -18.1868 -18.7325 -19.045 -19.8292 -20.2952 -20.9126 -21.7584 -21.8957 -22.0489 -22.2059 -22.1009 -21.8633 -21.6318 -21.5602 -21.3992 -21.2076 -21.0672 -20.9347 -20.8166 -20.5758 -20.2813 -20.3379 -20.0369 -20.0075 -19.8698 -19.5329 -19.4441 -19.3658 -19.046 -18.8047 -18.711 -18.3154 -18.206 -18.1479 -18.0611 -17.9949 -17.7135 -17.5775 -17.4245 -17.2514 -17.189 -16.9313 -16.8479 -16.9536 -16.6754 -16.6234 -16.6361 -16.7052 -16.7367 -16.5196 -16.4336 -16.402 -16.3603 -16.1315 -16.0321 -15.8852 -15.686 -15.5527 -15.3773 -15.4216 -15.5753 -15.2598 -15.1943 -15.3451 -15.1008 -15.2729 -15.4111 -15.5343 -15.7617 -15.766 -16.0803 -15.9896 -15.8683 -15.7223 -15.5416 -15.4376 -15.4056 -15.1784 -15.1737 -15.071 -15.1839 -15.1609 -15.4086 -15.4503 -15.5523 -15.7291 -15.7538 -15.9709 -16.0739 -16.0522 -16.0448 -15.6376 -15.7446 -15.5151 -15.3057 -15.2511 -15.1944 -15.1017 -15.2386 -15.2368 -15.4075 -15.5158 -15.5491 -15.7406 -15.8611 -15.6685 -15.7399 -15.7308 -15.6791 -15.5692 -15.6825 -15.7217 -15.8015 -15.9737 -15.8891 -16.0962 -16.1868 -16.4529 -16.7366 -16.8795 -17.1457 -17.4431 -17.4794 -17.5586 -17.4992 -17.3998 -17.4586 -17.347 -17.356 -17.5228 -17.4412 -17.4839 -17.8964 -17.6974 -17.8667 -17.9973 -18.0317 -18.128 -18.1694 -18.3192 -18.4033 -18.545 -18.6477 -18.7514 -19.0617 -19.4731 -19.6273 -20.1261 -20.3343 -20.4907 -20.7653 -20.7012 -20.6098 -20.5546 -20.462 -20.3225 -20.1441 -20.4147 -20.4387 -20.648 -20.9547 -21.1054 -21.2697 -21.4236 -21.4288 -21.6741 -21.9373 -22.0145 -22.1626 -22.4258 -22.6249 -22.8185 -22.5843 -22.7238 -22.9103 -22.8881 -23.0891 -23.1915 -23.5486 -24.0598 -24.3521 -24.8229 -25.3661 -25.7397 -25.9552 -26.1832 -26.6024 -27.1053 -27.6884 -28.0866 -29.0412 -29.9554 -31.1469 -32.6502 -35.1575 -38.664 -43.2092 -49.0954 -54.8888 -60.2771 -64.6058 -67.2165 -68.2256 -68.3448 -68.2906 -68.4592 -69.406 -70.4136 -71.3055 -71.2604 -70.4234 -68.3838 -65.2275 -61.101 -55.1712", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0002 0.0002 0.0 0.0003 0.0003 0.0004 0.0004 0.0004 0.0007 0.0013 0.0014 0.0013 0.0013 0.0014 0.0018 0.0019 0.002 0.0021 0.0022 0.0024 0.0027 0.0028 0.003 0.0031 0.0033 0.0035 0.0035 0.0033 0.0028 0.0024 0.002 0.002 0.002 0.002 0.0016 0.0013 0.0012 0.0009 0.0007 0.001 0.0013 0.002 0.0035 0.0057 0.008 0.0098 0.0113 0.0131 0.0137 0.0128 0.0131 0.0162 0.0227 0.0311 0.0537 0.0719 0.0839 0.0817 0.0703 0.0471 0.0107 0.0013 0.0006 0.0004 0.001 0.0008 0.001 0.004 0.0087 0.0135 0.0163 0.0191 0.0179 0.0153 0.0117 0.0039 0.0021 0.0021 0.0021 0.0019 0.0016 0.0016 0.0015 0.0013 0.0008 0.0007 0.0004 0.0005 0.0003 0.0004 0.0006 0.0009 0.0013 0.0017 0.0018 0.0018 0.0021 0.002 0.0018 0.0016 0.0014 0.0011 0.0011 0.0011 0.001 0.001 0.0011 0.0017 0.0042 0.0069 0.0118 0.0157 0.0168 0.0164 0.0132 0.0081 0.0029 0.0018 0.002 0.0022 0.0023 0.0022 0.0019 0.0017 0.0015 0.0016 0.0013 0.0013 0.0016 0.0025 0.0036 0.004 0.0043 0.0031 0.002 0.0011 0.001 0.0011 0.0013 0.0012 0.0013 0.0013 0.0013 0.0014 0.0016 0.0014 0.0015 0.0032 0.0056 0.0082 0.0107 0.0127 0.0131 0.0118 0.0101 0.0063 0.0024 0.0021 0.0022 0.0024 0.0023 0.0026 0.0026 0.0025 0.0026 0.0022 0.002 0.0019 0.0022 0.0021 0.0021 0.0022 0.002 0.0019 0.002 0.0023 0.0022 0.0024 0.0021 0.0022 0.0022 0.002 0.0021 0.0017 0.0016 0.0017 0.0016 0.0017 0.0017 0.0017 0.0013 0.0013 0.0011 0.0014 0.002 0.002 0.0025 0.0031 0.004 0.0047 0.005 0.0057 0.0052 0.0047 0.004 0.0029 0.0028 0.002 0.0016 0.0009 0.0005 0.0006 0.0023 0.0066 0.0127 0.0136 0.0138 0.0117 0.0046 0.0038 0.004 0.0038 0.0033 0.0024 0.0018 0.0015 0.0014 0.0016 0.0046 0.0082 0.0164 0.0238 0.031 0.0357 0.0349 0.0309 0.0228 0.011 0.0038 0.0021 0.0014 0.0012 0.001 0.001 0.001 0.0009 0.0006 0.0008 0.0007 0.0006 0.0009 0.0019 0.004 0.0054 0.0057 0.0053 0.0041 0.0019 0.0012 0.0011 0.0012 0.0011 0.0011 0.0011 0.001 0.001 0.001 0.0009 0.0009 0.001 0.0009 0.0009 0.0008 0.0006 0.0008 0.0012 0.0013 0.0017 0.0019 0.0017 0.0016 0.0017 0.0012 0.0015 0.0017 0.0017 0.0018 0.0018 0.0019 0.0022 0.0022 0.0032 0.0075 0.0112 0.0119 0.0121 0.0105 0.0069 0.0051 0.0039 0.0032 0.0029 0.0026 0.0017 0.0011 0.0012 0.0013 0.0014 0.0013 0.0015 0.0013 0.0016 0.0015 0.0015 0.0016 0.0014 0.0016 0.0019 0.0023 0.0026 0.0024 0.0025 0.0024 0.0023 0.0021 0.0022 0.0021 0.0022 0.0021 0.0019 0.0022 0.0021 0.0018 0.0017 0.0011 0.0013 0.0015 0.0016 0.0014 0.0014 0.0015 0.0012 0.0013 0.0015 0.0014 0.0013 0.0015 0.0014 0.0014 0.0013 0.0013 0.0012 0.0012 0.0013 0.0012 0.0012 0.0013 0.0018 0.0023 0.0027 0.0034 0.0038 0.0044 0.0053 0.0055 0.0056 0.0052 0.0041 0.0033 0.0023 0.0015 0.0009 0.0008 0.0004 0.0005 0.0026 0.0059 0.0132 0.017 0.0167 0.016 0.0113 0.0056 0.0042 0.0033 0.003 0.0024 0.0018 0.0018 0.0017 0.0022 0.0032 0.0043 0.0068 0.0093 0.0116 0.0155 0.0192 0.0202 0.019 0.0155 0.0103 0.0038 0.0024 0.0019 0.0015 0.0014 0.0017 0.0013 0.0014 0.0013 0.0011 0.0008 0.0009 0.0013 0.0032 0.0062 0.0097 0.0126 0.0127 0.0116 0.0086 0.0033 0.0021 0.0015 0.0014 0.0014 0.0014 0.0015 0.0017 0.002 0.0037 0.0117 0.0218 0.0307 0.0384 0.045 0.049 0.0492 0.0477 0.0413 0.0324 0.0233 0.0109 0.0038 0.0025 0.0021 0.0017 0.0016 0.0017 0.0015 0.0015 0.0017 0.0016 0.0016 0.0016 0.0014 0.0014 0.0014 0.0014 0.0013 0.0014 0.0014 0.0013 0.0012 0.0014 0.0014 0.0014 0.0015 0.0016 0.0017 0.0018 0.0018 0.0018 0.0017 0.0038 0.0057 0.0063 0.0069 0.0062 0.0054 0.0044 0.0033 0.0023 0.0018 0.0017 0.002 0.002 0.002 0.0017 0.0017 0.0016 0.0015 0.0015 0.0014 0.0013 0.0011 0.0011 0.001 0.0011 0.0012 0.0014 0.0017 0.0018 0.0019 0.0017 0.0016 0.0016 0.0015 0.0015 0.0016 0.0018 0.0018 0.0019 0.002 0.0018 0.0019 0.0018 0.0018 0.002 0.0021 0.0021 0.002 0.0021 0.0027 0.0025 0.0028 0.0027 0.0024 0.0023 0.0021 0.002 0.002 0.0018 0.0019 0.0022 0.0021 0.002 0.0016 0.0016 0.0014 0.0015 0.0015 0.0015 0.0016 0.0015 0.0013 0.0011 0.001 0.0009 0.001 0.001 0.001 0.001 0.0011 0.001 0.001 0.001 0.0014 0.0017 0.0023 0.0028 0.0031 0.003 0.0029 0.0027 0.0024 0.0023 0.0021 0.0019 0.0019 0.0022 0.0025 0.0026 0.0028 0.0027 0.0025 0.0024 0.0026 0.0023 0.0023 0.0026 0.0022 0.0025 0.0027 0.0026 0.0028 0.0027 0.0023 0.0022 0.002 0.0023 0.0025 0.0025 0.0023 0.0019 0.0027 0.0043 0.0061 0.008 0.0098 0.0113 0.0119 0.0111 0.0099 0.0075 0.0048 0.0037 0.0027 0.0023 0.0023 0.0025 0.0023 0.0024 0.0024 0.0021 0.0021 0.0023 0.0022 0.0021 0.002 0.0021 0.002 0.0021 0.002 0.0022 0.0022 0.002 0.0017 0.0019 0.0018 0.0016 0.0017 0.0017 0.0019 0.0027 0.0088 0.0147 0.0179 0.0254 0.0301 0.034 0.0398 0.0427 0.0422 0.0384 0.0303 0.0194 0.0092 0.0041 0.0035 0.0034 0.0036 0.0037 0.0037 0.0037 0.0031 0.0026 0.0027 0.0027 0.0027 0.0029 0.0025 0.0022 0.0019 0.0023 0.0026 0.003 0.0031 0.003 0.0029 0.0029 0.003 0.003 0.003 0.0029 0.0029 0.0025 0.0021 0.0017 0.0018 0.0019 0.0022 0.0022 0.0019 0.0023 0.0027 0.0034 0.0056 0.0063 0.0069 0.0074 0.0069 0.008 0.0091 0.0095 0.0094 0.0089 0.0072 0.0061 0.0047 0.0028 0.0021 0.0017 0.0018 0.0019 0.0023 0.0025 0.0035 0.0055 0.0063 0.0067 0.0061 0.0047 0.0019 0.001 0.0009 0.0012 0.0038 0.0149 0.0282 0.0396 0.0541 0.0662 0.0813 0.0877 0.0842 0.0739 0.0517 0.0312 0.0189 0.0125 0.0049 0.0032 0.003 0.0028 0.0025 0.0025 0.0027 0.0031 0.0035 0.0038 0.0045 0.0062 0.0089 0.0118 0.0131 0.0125 0.011 0.0075 0.0038 0.0033 0.0028 0.0022 0.0022 0.0019 0.0019 0.0021 0.0021 0.002 0.0027 0.0053 0.0118 0.0176 0.0226 0.0275 0.0294 0.03 0.0271 0.0217 0.0154 0.0079 0.0042 0.0033 0.0034 0.0032 0.003 0.0025 0.0027 0.0026 0.0026 0.0023 0.0021 0.0018 0.0018 0.002 0.0018 0.0017 0.0015 0.0014 0.0012 0.0012 0.0013 0.0014 0.0018 0.0017 0.0012 0.0009 0.001 0.0012 0.0013 0.0011 0.0011 0.0019 0.0033 0.0042 0.005 0.0055 0.0049 0.0043 0.0033 0.002 0.0016 0.0017 0.0016 0.0017 0.0022 0.0019 0.0015 0.0016 0.0016 0.0017 0.0015 0.0014 0.0013 0.0013 0.0013 0.0011 0.0013 0.0014 0.0016 0.0016 0.0016 0.0015 0.0011 0.0012 0.0009 0.001 0.0008 0.0009 0.0009 0.0012 0.001 0.0011 0.001 0.0009 0.0009 0.0007 0.0007 0.0006 0.0006 0.0007 0.0008 0.0008 0.0006 0.0008 0.0008 0.0008 0.0006 0.0005 0.0006 0.0005 0.0005 0.0008 0.001 0.002 0.0057 0.0102 0.0132 0.0153 0.0161 0.014 0.0111 0.0071 0.0035 0.0022 0.0021 0.002 0.0018 0.0019 0.002 0.0022 0.0023 0.0025 0.0025 0.0025 0.0039 0.0063 0.0093 0.0139 0.0178 0.0186 0.0172 0.0143 0.0086 0.0028 0.0021 0.002 0.0017 0.0017 0.0016 0.0013 0.0016 0.0021 0.0047 0.0104 0.0169 0.0202 0.0227 0.0228 0.0211 0.0201 0.018 0.0148 0.0112 0.0057 0.0029 0.0024 0.0019 0.0018 0.0011 0.001 0.0008 0.0008 0.0005 0.0006 0.0005 0.0008 0.0007 0.0008 0.0012 0.0017 0.0022 0.0024 0.0023 0.0019 0.0018 0.0019 0.0018 0.0018 0.0015 0.0017 0.0016 0.0015 0.0022 0.0038 0.0075 0.0118 0.0168 0.0202 0.0236 0.0263 0.0269 0.0254 0.0216 0.0151 0.0079 0.0028 0.0018 0.0015 0.0013 0.0012 0.0016 0.0013 0.0014 0.0012 0.0011 0.0011 0.0015 0.0015 0.0013 0.0014 0.0021 0.0023 0.0031 0.0035 0.0034 0.0034 0.0036 0.0038 0.0037 0.0037 0.0034 0.0032 0.003 0.0027 0.0026 0.0025 0.0024 0.0023 0.0023 0.0022 0.0021 0.0021 0.0017 0.0016 0.0015 0.0014 0.0013 0.0013 0.001 0.0009 0.0009 0.001 0.0008 0.0008 0.0006 0.0006 0.0007 0.0008 0.0009 0.0011 0.0016 0.0015 0.0018 0.0017 0.0016 0.0018 0.0024 0.0027 0.0034 0.0039 0.0037 0.0035 0.0028 0.002 0.0014 0.0008 0.0008 0.0005 0.0001 0.0004 0.0024 0.0093 0.0129 0.0136 0.0131 0.0096 0.0048 0.0027 0.0023 0.0018 0.0015 0.0011 0.0009 0.0007 0.0007 0.0006 0.0007 0.0005 0.0007 0.0005 0.0006 0.0006 0.0005 0.0005 0.0007 0.0007 0.0005 0.0009 0.0008 0.0011 0.0011 0.0012 0.0009 0.0011 0.0014 0.0014 0.0016 0.0015 0.0014 0.0018 0.0023 0.0036 0.0051 0.0056 0.0053 0.0049 0.0039 0.0027 0.0029 0.0027 0.0024 0.0019 0.0019 0.002 0.0022 0.0021 0.0022 0.0018 0.0019 0.0017 0.0026 0.0033 0.0042 0.0041 0.0042 0.0036 0.0028 0.0023 0.0024 0.002 0.0016 0.0015 0.0014 0.0013 0.0012 0.0008 0.0008 0.0005 0.0004 0.0026 0.0065 0.0097 0.0119 0.013 0.0121 0.0104 0.0082 0.006 0.0038 0.0017 0.0015 0.0014 0.0012 0.0013 0.0012 0.0014 0.0013 0.0013 0.0015 0.0019 0.0028 0.005 0.0085 0.0125 0.0149 0.0145 0.0128 0.0087 0.0033 0.0019 0.0017 0.002 0.0017 0.0017 0.002 0.0019 0.0018 0.0017 0.0021 0.002 0.0023 0.0018 0.0019 0.0017 0.0019 0.0018 0.0018 0.002 0.0018 0.0016 0.0016 0.0017 0.0014 0.0015 0.0013 0.0013 0.0013 0.0015 0.0014 0.0014 0.0015 0.0015 0.0016 0.0013 0.0015 0.0016 0.0017 0.0018 0.0017 0.0019 0.0016 0.0017 0.0017 0.0018 0.0014 0.0013 0.0014 0.0013 0.0012 0.0013 0.001 0.0007 0.0007 0.0007 0.0008 0.0007 0.0009 0.0011 0.0013 0.0014 0.0017 0.0019 0.0018 0.0019 0.0017 0.0015 0.0014 0.0013 0.0009 0.0008 0.0007 0.0007 0.0008 0.0005 0.0005 0.001 0.0017 0.0024 0.0027 0.0027 0.0023 0.0018 0.0019 0.0016 0.0014 0.0014 0.0013 0.0013 0.0012 0.0011 0.0011 0.0012 0.0014 0.0014 0.001 0.0011 0.0009 0.0011 0.0011 0.001 0.0013 0.0011 0.0011 0.0009 0.0012 0.0011 0.0013 0.0013 0.0015 0.0011 0.0021 0.0011 0.0013 0.0016 0.0017 0.0018 0.0023 0.0024 0.0029 0.003 0.0028 0.0027 0.0029 0.0032 0.0032 0.0032 0.003 0.0027 0.0028 0.0024 0.0021 0.0019 0.0014 0.0013 0.0014 0.0015 0.0016 0.0014 0.0012 0.0011 0.0009 0.0008 0.0009 0.001 0.0008 0.0009 0.0005 0.0006 0.0011 0.0022 0.0062 0.0113 0.0142 0.0172 0.0211 0.0218 0.0231 0.0226 0.0203 0.0173 0.0129 0.0096 0.0042 0.0036 0.0036 0.0039 0.0035 0.0035 0.0038 0.0035 0.0038 0.0034 0.0031 0.0029 0.0028 0.0028 0.0026 0.0026 0.0022 0.0023 0.0023 0.0024 0.0026 0.0028 0.003 0.0032 0.0027 0.0028 0.0024 0.0025 0.0024 0.0021 0.0018 0.0016 0.0014 0.0017 0.0017 0.0017 0.0017 0.0018 0.0022 0.002 0.0021 0.0027 0.0033 0.0042 0.0049 0.0051 0.0048 0.0037 0.0029 0.0024 0.0022 0.0024 0.0042 0.0065 0.0088 0.0123 0.0151 0.0163 0.016 0.0143 0.0111 0.0084 0.0057 0.0044 0.0044 0.0045 0.0045 0.0038 0.0029 0.0017 0.0013 0.0012 0.001 0.001 0.0009 0.002 0.0058 0.0121 0.0157 0.017 0.0162 0.012 0.0074 0.003 0.0025 0.0022 0.0023 0.002 0.0017 0.0016 0.0013 0.0016 0.0013 0.001 0.0007 0.0007 0.0006 0.0011 0.0028 0.0024 0.0026 0.0031 0.003 0.0026 0.0025 0.0025 0.0019 0.002 0.0016 0.0018 0.0016 0.0016 0.0019 0.0019 0.0018 0.002 0.0018 0.0019 0.0019 0.0022 0.0024 0.0021 0.0023 0.002 0.0019 0.0018 0.0014 0.0013 0.0009 0.001 0.0008 0.0005 0.0006 0.0006 0.0004 0.0005 0.0012 0.002 0.0055 0.0078 0.0093 0.0094 0.0082 0.0063 0.002 0.0012 0.0011 0.001 0.0009 0.001 0.0009 0.001 0.001 0.0011 0.0012 0.0011 0.0011 0.0008 0.0008 0.0008 0.001 0.0011 0.0014 0.0024 0.0027 0.003 0.0029 0.0028 0.0028 0.0028 0.0026 0.0025 0.0024 0.0023 0.0025 0.0025 0.0024 0.0025 0.0022 0.002 0.0022 0.0021 0.002 0.0021 0.0019 0.002 0.0018 0.0018 0.0018 0.0018 0.0018 0.0017 0.0016 0.0015 0.0013 0.0014 0.0012 0.0012 0.0012 0.0012 0.0017 0.0021 0.0019 0.002 0.0023 0.0027 0.0034 0.0042 0.0046 0.0051 0.0053 0.0048 0.0045 0.0034 0.0028 0.0023 0.0019 0.0015 0.0009 0.0004 0.0011 0.0024 0.0094 0.0263 0.0128 0.0123 0.0107 0.0046 0.0034 0.0036 0.0039 0.0033 0.0024 0.0015 0.0016 0.0018 0.0026 0.0068 0.0113 0.0165 0.0202 0.0232 0.0258 0.0241 0.0211 0.0165 0.0086 0.0041 0.0031 0.0022 0.0018 0.0017 0.0017 0.0016 0.0015 0.0016 0.0018 0.0016 0.0013 0.0023 0.0031 0.0046 0.0057 0.0055 0.005 0.004 0.0022 0.0017 0.0017 0.0016 0.0015 0.0013 0.0012 0.0011 0.0009 0.0008 0.0008 0.0006 0.0006 0.0005 0.0007 0.0006 0.0008 0.0009 0.0011 0.0014 0.0016 0.0014 0.0013 0.0012 0.0013 0.0015 0.0017 0.0017 0.0016 0.0014 0.0014 0.0016 0.0018 0.0041 0.0069 0.0085 0.0102 0.0113 0.0109 0.0097 0.0079 0.0049 0.0026 0.002 0.0018 0.0016 0.0017 0.0013 0.0013 0.0013 0.0013 0.0014 0.0016 0.0025 0.0058 0.0104 0.0157 0.0193 0.0211 0.0197 0.0155 0.0118 0.0067 0.0035 0.0037 0.0031 0.0027 0.0027 0.0025 0.0024 0.0024 0.0023 0.0023 0.0024 0.0023 0.0023 0.0027 0.0025 0.0021 0.002 0.0016 0.0016 0.0015 0.0015 0.002 0.0019 0.0019 0.0018 0.0017 0.0016 0.0017 0.0016 0.0015 0.0016 0.0014 0.0014 0.0012 0.0013 0.0014 0.0014 0.0018 0.0017 0.0021 0.0033 0.0041 0.005 0.0054 0.0059 0.006 0.0058 0.0057 0.0049 0.0043 0.0038 0.0032 0.0026 0.0015 0.0011 0.0014 0.0018 0.0022 0.0021 0.0016 0.0019 0.0024 0.0029 0.003 0.0029 0.0023 0.0016 0.0013 0.0013 0.0018 0.002 0.0031 0.005 0.0075 0.0108 0.0152 0.0206 0.0219 0.0208 0.0174 0.0102 0.0045 0.0025 0.0021 0.0019 0.002 0.0017 0.0015 0.0013 0.0009 0.0008 0.001 0.001 0.0012 0.0016 0.0015 0.0016 0.0019 0.0019 0.0022 0.0019 0.002 0.002 0.0021 0.0025 0.0028 0.0032 0.0028 0.0026 0.0025 0.0026 0.0031 0.0096 0.0186 0.025 0.0312 0.0345 0.0347 0.0341 0.0295 0.0237 0.0181 0.0104 0.0038 0.002 0.0015 0.0018 0.0018 0.0018 0.002 0.0019 0.0019 0.0019 0.0017 0.0016 0.0016 0.0017 0.0019 0.0017 0.0017 0.0018 0.0018 0.0015 0.002 0.0019 0.0022 0.0021 0.0022 0.0023 0.0026 0.003 0.0033 0.0036 0.004 0.0089 0.0155 0.0198 0.0235 0.026 0.0256 0.0242 0.0203 0.015 0.009 0.0052 0.0047 0.004 0.0034 0.0034 0.0031 0.0031 0.0027 0.0027 0.0023 0.0019 0.0019 0.0022 0.0018 0.0017 0.0017 0.0014 0.0015 0.0017 0.0014 0.0017 0.0016 0.0017 0.0018 0.0016 0.0014 0.0013 0.0011 0.001 0.0011 0.0012 0.0016 0.0028 0.0063 0.0112 0.0146 0.018 0.0186 0.0165 0.0137 0.0084 0.0052 0.0036 0.0027 0.0022 0.0023 0.0022 0.0022 0.0017 0.0016 0.0011 0.0009 0.0008 0.0007 0.0004 0.001 0.0032 0.0042 0.0044 0.0046 0.004 0.0026 0.0026 0.0021 0.0019 0.0017 0.0018 0.0018 0.0017 0.0017 0.002 0.0025 0.0031 0.0046 0.0059 0.0083 0.0124 0.0147 0.0148 0.0136 0.0104 0.0054 0.0037 0.0034 0.0035 0.003 0.0029 0.0025 0.0023 0.0021 0.0023 0.0023 0.0023 0.0019 0.0017 0.0015 0.0012 0.0015 0.0012 0.0011 0.001 0.0008 0.0008 0.0006 0.0004 0.0005 0.0005 0.0004 0.0002 0.0005 0.0005 0.0006 0.0007 0.0014 0.0022 0.0037 0.0056 0.0068 0.0069 0.0064 0.0052 0.0031 0.0025 0.0025 0.0023 0.0019 0.0016 0.0015 0.0013 0.0013 0.0013 0.0014 0.0016 0.0014 0.0015 0.0016 0.0013 0.0015 0.0015 0.0013 0.0014 0.001 0.0008 0.0012 0.0009 0.0012 0.001 0.001 0.0012 0.0008 0.0009 0.001 0.001 0.0011 0.0016 0.0042 0.0065 0.0073 0.0087 0.009 0.0078 0.0073 0.0056 0.0028 0.0026 0.0025 0.0024 0.0026 0.0027 0.0029 0.0028 0.0028 0.0026 0.0021 0.0019 0.0021 0.0021 0.0022 0.0024 0.0022 0.0021 0.002 0.0022 0.0022 0.0023 0.0024 0.0023 0.0024 0.0023 0.0021 0.0022 0.0019 0.0018 0.0018 0.0017 0.0015 0.0017 0.0018 0.0016 0.0016 0.0018 0.002 0.0022 0.0032 0.0041 0.0048 0.0055 0.0057 0.0055 0.0057 0.0053 0.0046 0.0041 0.0037 0.003 0.0024 0.0018 0.0011 0.0013 0.0014 0.0016 0.0016 0.0018 0.0021 0.0033 0.0043 0.0051 0.0052 0.0055 0.0048 0.0031 0.0014 0.0011 0.0008 0.0012 0.0019 0.0126 0.0219 0.0294 0.0357 0.0401 0.0406 0.0369 0.0311 0.0227 0.0136 0.0084 0.0039 0.0028 0.0023 0.0024 0.0024 0.0025 0.0026 0.0027 0.0026 0.0023 0.0021 0.0019 0.0018 0.0014 0.0011 0.0011 0.0014 0.0016 0.0019 0.0021 0.0021 0.0022 0.0024 0.0021 0.0024 0.0021 0.002 0.0018 0.0018 0.0018 0.0043 0.011 0.0164 0.0201 0.0229 0.0219 0.019 0.0146 0.0097 0.0053 0.0039 0.003 0.0025 0.0023 0.0019 0.0021 0.0022 0.0021 0.002 0.0017 0.0014 0.0016 0.0015 0.0013 0.0013 0.0011 0.0012 0.0011 0.0008 0.0007 0.0007 0.0009 0.001 0.0009 0.001 0.0009 0.0008 0.0008 0.0006 0.0008 0.0007 0.0008 0.0015 0.0028 0.0035 0.0045 0.0045 0.0039 0.0029 0.0023 0.002 0.002 0.002 0.0022 0.0023 0.0023 0.0021 0.0018 0.0012 0.0012 0.0012 0.0012 0.0013 0.0011 0.0013 0.0011 0.001 0.0011 0.001 0.001 0.0009 0.0008 0.0008 0.0005 0.0008 0.0007 0.0005 0.0006 0.0007 0.0005 0.0006 0.0005 0.0008 0.0006 0.0006 0.0007 0.0018 0.0024 0.0024 0.0024 0.002 0.0024 0.0033 0.0042 0.0045 0.0043 0.0037 0.0028 0.0018 0.0015 0.0012 0.0019 0.0035 0.0055 0.0085 0.0108 0.013 0.0139 0.0129 0.0125 0.0103 0.0081 0.0062 0.0033 0.0025 0.0026 0.0023 0.0019 0.0019 0.0017 0.0017 0.0015 0.0015 0.0018 0.0036 0.0064 0.0103 0.0135 0.0158 0.0164 0.0146 0.0116 0.0065 0.0035 0.0022 0.0019 0.0016 0.0013 0.0014 0.0013 0.0013 0.0013 0.0011 0.0014 0.0021 0.0065 0.0116 0.0153 0.0179 0.018 0.0152 0.0117 0.0073 0.0036 0.0027 0.0026 0.0023 0.002 0.0019 0.0018 0.0015 0.0011 0.0008 0.0006 0.0009 0.0033 0.0075 0.0127 0.0174 0.0191 0.0183 0.0147 0.0092 0.0047 0.0029 0.0028 0.0029 0.0025 0.0023 0.0022 0.002 0.002 0.0019 0.0017 0.0016 0.0026 0.0031 0.0031 0.0023 0.0031 0.0041 0.0046 0.0047 0.0039 0.0026 0.0028 0.0029 0.0025 0.002 0.0017 0.0018 0.0022 0.002 0.0019 0.0016 0.0015 0.0015 0.0026 0.0033 0.0041 0.0045 0.0046 0.004 0.0032 0.0024 0.0024 0.0027 0.0025 0.0025 0.0025 0.0024 0.0025 0.0022 0.0018 0.0016 0.0016 0.0017 0.0019 0.0017 0.0019 0.0015 0.0015 0.0014 0.0015 0.0017 0.0018 0.0017 0.0017 0.0015 0.0016 0.0017 0.0019 0.002 0.0018 0.0017 0.0015 0.0012 0.0013 0.0013 0.0012 0.0017 0.0021 0.0027 0.0024 0.0024 0.0026 0.003 0.0034 0.0035 0.0034 0.0032 0.0029 0.0029 0.0025 0.0019 0.0017 0.0015 0.0013 0.0027 0.0033 0.0055 0.0068 0.0076 0.0096 0.0202 0.0363 0.05 0.0568 0.0548 0.0484 0.0341 0.0107 0.002 0.001 0.0009 0.001 0.0007 0.0007 0.0012 0.0013 0.0025 0.0049 0.007 0.0082 0.0083 0.0075 0.0046 0.0036 0.0032 0.0027 0.0027 0.0022 0.0023 0.002 0.0019 0.002 0.0021 0.002 0.0022 0.0019 0.0015 0.0013 0.001 0.001 0.0012 0.0013 0.0015 0.0017 0.0018 0.0019 0.002 0.0019 0.0019 0.0018 0.0018 0.0019 0.0024 0.0023 0.0045 0.0097 0.0135 0.0179 0.0215 0.0223 0.0212 0.0179 0.0127 0.007 0.0032 0.0023 0.0017 0.0015 0.0014 0.0014 0.0017 0.0019 0.0017 0.0015 0.0014 0.0023 0.0058 0.0104 0.0132 0.0148 0.0151 0.013 0.0102 0.007 0.0037 0.0026 0.0022 0.0019 0.002 0.0021 0.002 0.0023 0.0021 0.0024 0.0028 0.0056 0.0109 0.015 0.0191 0.023 0.0249 0.0256 0.0245 0.0206 0.0158 0.0087 0.0037 0.0033 0.003 0.0035 0.0033 0.0029 0.0027 0.0021 0.0021 0.0019 0.0018 0.0018 0.0019 0.0018 0.0016 0.0015 0.0012 0.0013 0.0011 0.0011 0.0011 0.001 0.0011 0.0011 0.001 0.001 0.0009 0.0007 0.0009 0.001 0.0007 0.0009 0.001 0.0048 0.009 0.0136 0.0161 0.0163 0.0145 0.0104 0.0059 0.0026 0.0024 0.0025 0.0028 0.0024 0.0045 0.0021 0.0018 0.0017 0.0016 0.0014 0.0013 0.0009 0.0009 0.0006 0.0011 0.0015 0.0021 0.0024 0.0025 0.002 0.0023 0.0026 0.0028 0.0028 0.0027 0.0025 0.0025 0.0021 0.0024 0.0025 0.003 0.006 0.0107 0.0156 0.0195 0.0212 0.0204 0.0167 0.0123 0.0079 0.0038 0.0036 0.0035 0.0031 0.0032 0.003 0.0027 0.0023 0.0021 0.0017 0.0015 0.0015 0.0017 0.0017 0.0019 0.002 0.0021 0.0025 0.0027 0.0026 0.0026 0.0023 0.0021 0.0019 0.0018 0.002 0.002 0.0021 0.0021 0.0022 0.0024 0.0023 0.0031 0.003 0.0031 0.0038 0.0053 0.0059 0.0062 0.006 0.0045 0.0041 0.0034 0.0031 0.0026 0.0026 0.0024 0.0022 0.002 0.0019 0.0018 0.0019 0.002 0.0024 0.0027 0.003 0.0028 0.0025 0.0022 0.0018 0.0018 0.0016 0.0017 0.0015 0.0015 0.0015 0.0016 0.0017 0.0016 0.0015 0.0017 0.0022 0.0025 0.0026 0.0027 0.0026 0.0019 0.0025 0.003 0.0032 0.0031 0.0028 0.0026 0.0022 0.002 0.0022 0.0023 0.0026 0.0026 0.0025 0.0023 0.0021 0.002 0.0021 0.0021 0.0021 0.0019 0.0018 0.0015 0.0018 0.0017 0.0019 0.0019 0.002 0.0021 0.0022 0.0018 0.0017 0.0019 0.0018 0.0019 0.0022 0.0019 0.0019 0.0018 0.0018 0.002 0.0017 0.0017 0.0016 0.0015 0.0017 0.0016 0.0017 0.0019 0.0019 0.0022 0.0022 0.0026 0.0027 0.0027 0.0027 0.0028 0.0028 0.0026 0.0026 0.0028 0.0029 0.0031 0.0033 0.0033 0.0032 0.0032 0.0031 0.0029 0.0029 0.003 0.0032 0.0035 0.0035 0.0032 0.0031 0.0031 0.0034 0.0036 0.0036 0.0032 0.0029 0.0025 0.0023 0.0023 0.0025 0.0029 0.0032 0.0033 0.0032 0.0031 0.0026 0.0026 0.0023 0.0023 0.0027 0.0032 0.0037 0.0039 0.0037 0.0035 0.0029 0.0029 0.0031 0.0031 0.0035 0.0034 0.0033 0.0027 0.0026 0.0025 0.0028 0.0027 0.0029 0.0034 0.0036 0.0038 0.0034 0.0029 0.0028 0.0029 0.0029 0.0036 0.0039 0.004 0.0042 0.0044 0.0045 0.0043 0.0041 0.0035 0.0035 0.0034 0.0032 0.0036 0.0036 0.0035 0.0034 0.0029 0.003 0.0031 0.0031 0.0031 0.0036 0.0035 0.0039 0.0039 0.0035 0.0032 0.0027 0.003 0.0029 0.003 0.0026 0.0024 0.0022 0.0022 0.0024 0.0023 0.0022 0.0022 0.0022 0.0021 0.0018 0.0016 0.0016 0.0018 0.002 0.0021 0.002 0.0018 0.0018 0.0016 0.0015 0.0017 0.0017 0.0019 0.0019 0.0015 0.0015 0.0016 0.0017 0.0017 0.0018 0.0015 0.0015 0.0018 0.0018 0.002 0.0019 0.0017 0.0018 0.0017 0.0016 0.0015 0.0017 0.0017 0.0014 0.0013 0.0012 0.0013 0.0009 0.0011 0.0009 0.0008 0.0008 0.001 0.0009 0.0015 0.0026 0.004 0.0044 0.0043 0.004 0.0023 0.0015 0.0011 0.0009 0.0005 0.0005 0.0006 0.0003 0.0002 0.0002 0.0003 0.0002 0.0003 0.0002 0.0004", + "breathiness": "-77.0975 -78.8382 -78.3582 -77.5376 -75.8485 -73.7377 -70.6094 -67.0688 -63.6513 -60.6329 -58.1063 -56.289 -54.9085 -53.9091 -53.1697 -52.3094 -51.5902 -50.9198 -50.2428 -49.9215 -49.3722 -49.0216 -48.8688 -48.8848 -48.907 -49.0633 -49.1441 -49.3333 -49.7143 -50.2545 -50.5759 -51.3953 -52.3662 -53.5363 -54.9505 -56.7609 -58.8004 -60.4678 -60.8286 -59.7619 -57.401 -54.06 -50.0029 -46.2652 -42.6077 -40.4859 -39.2831 -38.8886 -38.2971 -37.0827 -35.6564 -33.5832 -31.6631 -29.9732 -28.9536 -29.1355 -30.8579 -34.1892 -39.0923 -44.8051 -50.1401 -54.5979 -57.8447 -59.3995 -59.0342 -56.7493 -53.2406 -49.2149 -45.0033 -41.6311 -39.6246 -39.6768 -41.3105 -43.6429 -46.4393 -49.3989 -51.7978 -53.8896 -55.1554 -56.2591 -57.3919 -58.6515 -59.9107 -61.0367 -62.355 -63.659 -64.3682 -64.6163 -64.0386 -62.7336 -61.0264 -58.8772 -56.9181 -55.492 -54.5095 -54.0263 -53.921 -54.2266 -54.6657 -54.9341 -55.7746 -56.4964 -57.5728 -58.3763 -58.1998 -57.1587 -54.705 -51.4072 -47.562 -43.7489 -40.7202 -39.554 -39.874 -41.5888 -44.0001 -46.5498 -48.9848 -50.9271 -52.2482 -52.9068 -53.2225 -53.7391 -54.7271 -55.8598 -56.5734 -56.7947 -56.4592 -55.0719 -53.6947 -52.1564 -51.4929 -51.9523 -53.0361 -55.1259 -56.9012 -58.424 -59.2773 -59.7166 -59.6387 -59.51 -59.4202 -59.4385 -59.0827 -58.4 -56.9474 -54.8822 -52.1389 -48.8928 -45.5641 -43.0362 -41.6294 -41.5723 -42.2644 -44.5737 -46.8339 -49.2179 -51.4138 -52.7456 -53.483 -53.3763 -53.2447 -53.2618 -53.324 -53.4732 -53.7601 -54.0348 -54.5628 -54.8844 -55.0928 -55.3449 -55.4875 -55.4452 -55.239 -55.1988 -54.9974 -54.9029 -54.9239 -54.9851 -55.5971 -56.1401 -56.8338 -57.4579 -58.3666 -58.9678 -59.7963 -60.1536 -60.405 -60.4113 -60.0621 -59.7773 -59.2805 -58.7894 -58.3134 -57.7221 -56.6249 -55.5876 -53.9698 -52.3913 -50.8817 -49.4422 -48.6747 -48.1076 -48.3467 -49.2397 -50.6334 -52.2063 -54.1197 -55.6063 -56.6651 -56.959 -56.4119 -55.1008 -53.8093 -52.4302 -51.2648 -50.7986 -50.5424 -51.1815 -51.7699 -52.4875 -53.364 -54.1761 -55.1004 -55.8253 -56.0578 -55.6131 -54.3578 -51.6713 -48.0488 -44.0549 -40.4399 -37.7277 -35.8597 -35.8873 -37.7291 -40.672 -44.571 -48.2843 -51.8535 -55.056 -57.1232 -58.822 -59.9936 -60.9873 -61.9821 -62.6078 -62.9948 -62.933 -61.9219 -59.9168 -57.7464 -55.211 -53.0509 -51.6456 -51.4883 -52.1965 -53.9009 -55.5198 -57.0716 -58.2551 -59.1654 -59.8971 -60.4266 -60.8021 -60.8923 -61.1636 -61.258 -61.4345 -62.132 -62.0977 -62.549 -62.8142 -62.9457 -62.5672 -61.8797 -60.8465 -59.7357 -58.6969 -57.725 -57.2207 -56.988 -56.8352 -56.8139 -56.4429 -56.3208 -55.9422 -56.0407 -55.3878 -54.4535 -52.964 -50.4047 -47.8468 -45.3714 -43.1237 -42.2654 -42.2709 -43.6437 -45.4222 -47.2374 -49.3183 -51.3116 -53.3992 -55.1398 -56.5662 -57.5229 -58.3638 -58.9048 -58.7818 -58.9304 -58.6755 -58.4235 -58.1353 -57.7298 -57.4069 -57.0039 -56.7487 -56.2483 -55.8864 -55.3632 -54.446 -54.215 -53.9984 -53.7954 -54.0028 -54.2669 -54.5496 -55.092 -55.5189 -55.8744 -56.2013 -56.716 -56.992 -57.1384 -57.1812 -57.1272 -56.9349 -56.7421 -56.38 -56.2623 -56.3054 -56.3238 -56.1858 -56.6461 -57.0433 -57.3685 -57.6684 -58.0129 -58.1144 -58.6769 -59.0019 -59.3916 -59.6989 -60.1216 -60.1258 -60.143 -59.71 -59.1189 -58.0309 -56.4233 -54.6234 -51.9674 -50.0856 -48.3799 -46.9606 -46.3272 -46.1654 -46.904 -47.8903 -49.8258 -52.0605 -54.7386 -57.1917 -58.1162 -57.197 -54.8096 -51.3102 -47.2519 -43.1212 -40.2031 -39.7758 -40.4881 -42.4471 -45.2412 -48.1507 -50.771 -53.1023 -54.7612 -55.2533 -55.0737 -54.0337 -51.9086 -49.4026 -46.6232 -43.8811 -41.3866 -39.1287 -38.1549 -37.9636 -39.0452 -40.8503 -43.6308 -46.8846 -49.8542 -52.7054 -54.9331 -56.4592 -57.6034 -58.2784 -58.6837 -58.9739 -58.6377 -57.4667 -55.4257 -52.3891 -48.5786 -44.451 -40.9323 -38.4079 -37.8034 -39.2025 -41.7241 -45.1107 -48.7245 -52.0098 -54.741 -56.2422 -56.5394 -55.7555 -53.6325 -50.2377 -46.1288 -41.3293 -37.0225 -33.0922 -30.2284 -28.9165 -27.975 -27.3132 -27.5842 -28.7153 -30.9977 -34.5222 -38.8831 -43.4588 -47.6221 -51.2556 -54.0378 -55.4949 -56.2579 -56.7294 -56.7831 -56.5587 -56.1811 -55.6952 -55.4822 -55.358 -55.4953 -55.7418 -55.7457 -55.709 -56.1002 -56.2681 -56.8866 -57.3041 -57.6754 -57.8074 -57.8256 -57.5974 -57.7567 -57.5406 -57.2452 -56.5623 -55.3109 -53.9545 -51.7989 -49.5302 -47.4521 -46.0066 -45.5107 -45.997 -47.3848 -49.1556 -51.1986 -52.8865 -54.356 -55.2861 -55.8972 -55.9731 -56.1022 -56.4985 -56.8059 -57.2912 -57.5305 -57.6186 -57.7261 -57.6844 -57.6259 -57.4496 -56.8798 -56.3192 -55.9986 -55.3057 -54.9445 -54.648 -54.9647 -55.4995 -55.7427 -56.136 -56.3302 -56.2336 -55.9875 -55.9677 -55.9754 -56.3176 -56.7915 -57.1389 -57.3348 -57.1473 -56.8215 -56.0257 -55.2133 -54.2972 -53.4626 -52.7976 -52.4195 -52.2296 -52.0231 -52.2278 -52.791 -53.4061 -54.0737 -54.8296 -55.2895 -56.0466 -56.6902 -57.236 -57.6633 -58.0779 -58.3666 -58.2354 -58.0639 -57.8836 -57.6389 -57.7933 -58.2105 -58.5173 -59.227 -60.1938 -60.9849 -61.8208 -61.9434 -62.0215 -61.7389 -60.9675 -60.3476 -59.4601 -58.7575 -58.1602 -57.7433 -57.2862 -56.8299 -56.4064 -55.8371 -55.4995 -55.3627 -55.2588 -55.479 -55.5092 -55.2645 -55.3701 -55.1165 -54.9648 -54.8303 -54.6843 -54.6673 -54.7618 -54.7514 -54.4478 -54.295 -53.9074 -53.5297 -53.0856 -52.7541 -52.6146 -52.4507 -52.2708 -52.3738 -52.3038 -52.0568 -51.6776 -51.2813 -50.806 -50.6086 -50.0614 -49.3339 -48.1185 -46.6527 -45.1876 -43.3403 -41.7389 -40.6865 -40.3686 -40.5248 -41.962 -43.6015 -45.6934 -47.6143 -49.7106 -51.0938 -52.1045 -52.8151 -53.2245 -53.4491 -53.5584 -53.5305 -53.3922 -52.9232 -52.9094 -52.8303 -52.7592 -52.7558 -52.5495 -52.568 -52.8423 -52.9072 -53.2645 -53.7106 -54.5196 -55.0952 -55.355 -55.2351 -54.4437 -52.9792 -50.6693 -47.6602 -44.0197 -40.3361 -37.0621 -33.8898 -31.4019 -29.9687 -29.0753 -28.6355 -28.9351 -30.0925 -32.1956 -34.8067 -38.2034 -41.4067 -44.2073 -46.5593 -48.1466 -49.4784 -50.0664 -50.8967 -51.7179 -52.1916 -52.6257 -52.8404 -52.8037 -53.0219 -53.0657 -53.1386 -53.3323 -53.2458 -52.9205 -52.4619 -51.8101 -51.5099 -51.2948 -51.5834 -51.8102 -52.0633 -52.469 -52.8628 -53.2435 -53.658 -54.0525 -54.6447 -54.9882 -54.9765 -54.8862 -54.5607 -54.0658 -53.6854 -52.9183 -51.9188 -50.7096 -48.995 -47.2352 -45.4254 -44.0417 -42.9731 -42.2237 -41.8433 -42.3296 -43.4801 -45.7118 -48.2209 -51.4989 -53.7588 -55.5325 -55.8512 -55.7011 -54.8099 -53.6508 -52.2589 -51.1253 -49.601 -48.867 -48.9711 -49.8305 -51.4503 -53.2549 -54.3724 -54.1535 -52.4189 -49.2496 -45.0915 -40.6216 -35.7742 -31.6644 -28.3105 -25.9354 -24.4899 -24.4412 -25.3822 -27.1504 -30.2275 -33.4848 -37.1802 -40.9546 -44.4082 -47.298 -49.4401 -50.6371 -50.9434 -50.6284 -49.8353 -48.9073 -47.4504 -46.0757 -44.4791 -43.22 -41.9307 -41.1184 -41.2707 -42.0084 -43.8439 -45.8239 -48.1283 -50.0469 -51.9906 -53.4531 -54.5238 -55.3505 -55.797 -55.8715 -54.2639 -51.5757 -48.0735 -43.9351 -39.9174 -36.3988 -33.813 -32.7653 -33.0514 -33.8681 -35.9653 -38.5582 -41.46 -44.442 -46.9771 -48.9238 -50.4433 -51.346 -52.0736 -52.5528 -53.1994 -53.6139 -54.0344 -54.4251 -55.0378 -55.4369 -56.0675 -56.818 -57.3664 -58.2231 -58.9556 -59.827 -60.4933 -61.156 -61.3062 -61.3963 -61.7403 -61.7522 -62.0231 -62.3011 -62.4234 -62.4279 -61.5347 -59.7232 -57.1305 -54.2795 -51.465 -49.6577 -48.7416 -48.8428 -49.9237 -51.2904 -52.0128 -53.1697 -53.7272 -53.8419 -54.0902 -54.0162 -53.9918 -54.1494 -54.3453 -54.7503 -54.9317 -55.125 -55.0447 -54.9192 -54.9304 -54.7432 -54.7588 -54.8099 -55.0342 -55.2503 -55.5874 -56.028 -56.6281 -57.1443 -57.5194 -57.5022 -57.3497 -57.3365 -57.4139 -57.5156 -57.6542 -57.4713 -57.1851 -56.9881 -56.8137 -57.0498 -57.8609 -58.794 -60.0777 -60.9599 -62.1183 -62.7237 -63.3444 -63.759 -64.3822 -64.6563 -64.8772 -65.228 -65.583 -65.5784 -65.1691 -63.6593 -60.9686 -57.4353 -53.2087 -48.8299 -44.7731 -41.8519 -40.1826 -39.9599 -41.0806 -43.44 -46.1914 -49.0645 -51.5646 -53.5909 -54.6103 -54.9813 -54.9523 -54.8652 -55.0004 -54.7898 -53.9782 -52.3824 -50.1909 -47.0012 -43.9802 -40.8603 -38.6424 -37.0307 -36.5015 -37.2995 -39.1852 -41.1622 -43.9172 -46.5326 -49.1807 -51.1231 -52.3614 -53.055 -53.2136 -52.6077 -50.8404 -48.2354 -45.0257 -41.5907 -38.4301 -35.7351 -34.0886 -33.3604 -33.5891 -34.5197 -36.4711 -39.2805 -42.7276 -46.4109 -49.8856 -53.1643 -55.9333 -58.3887 -60.4887 -62.3163 -63.9381 -64.7793 -64.8303 -64.2541 -62.9376 -61.2656 -59.4707 -57.8489 -56.4001 -55.6766 -54.8846 -54.5828 -54.4568 -54.6081 -54.8962 -55.0 -55.625 -55.7195 -55.9167 -55.7471 -55.0703 -53.5341 -51.0025 -48.0159 -44.2986 -40.8982 -37.8642 -35.8632 -34.5485 -34.5177 -34.882 -36.4049 -38.7615 -42.1272 -45.6795 -49.3325 -52.5197 -54.9865 -56.6456 -57.4975 -57.897 -58.1937 -58.3585 -58.3658 -58.4071 -58.5422 -58.035 -57.0987 -56.0786 -54.5299 -53.3468 -52.2659 -51.8501 -51.5614 -51.3561 -51.316 -51.1695 -51.1816 -51.1321 -51.2418 -51.7899 -52.3966 -52.9581 -53.6497 -54.1659 -54.521 -54.7281 -55.0137 -55.3572 -55.657 -56.0264 -56.1486 -56.2259 -56.0546 -55.9967 -55.9341 -56.3303 -57.0042 -58.1098 -59.3776 -60.4726 -61.5719 -62.5054 -63.1062 -63.6433 -63.908 -63.453 -62.8063 -61.6191 -60.5302 -58.9229 -57.485 -56.175 -54.6777 -53.2007 -51.7122 -50.091 -48.7556 -47.7025 -47.491 -47.8729 -49.0906 -50.5453 -52.6181 -54.6586 -56.6504 -57.6225 -57.7101 -56.5869 -54.5469 -52.3279 -50.2275 -48.6587 -47.8444 -48.0267 -48.8303 -49.7203 -50.5585 -51.4116 -52.4182 -53.1338 -53.794 -54.4073 -55.3185 -55.8702 -56.5198 -57.342 -58.0028 -58.8345 -59.2792 -59.7364 -60.0513 -60.5723 -60.6001 -60.883 -60.7014 -60.8248 -60.6193 -60.1356 -59.8052 -59.5416 -58.9815 -58.5918 -57.2903 -55.8011 -54.1456 -52.85 -51.5337 -50.7358 -49.9931 -49.4168 -48.8266 -47.9962 -47.4167 -47.2562 -47.6559 -48.5259 -49.2216 -50.4419 -51.1584 -51.8222 -52.2247 -52.3894 -52.6833 -52.9397 -52.8959 -52.3191 -51.4609 -50.2485 -49.1656 -48.2485 -47.6047 -48.0576 -48.5536 -49.7476 -50.7265 -51.9306 -52.5746 -53.4208 -54.4312 -55.6344 -56.8607 -58.5111 -59.6574 -61.2356 -61.7315 -60.7834 -58.2629 -54.8401 -50.6086 -46.4966 -43.1245 -40.8124 -40.3437 -41.3993 -43.6816 -46.665 -50.3157 -53.5681 -56.8525 -59.026 -60.6199 -61.4769 -61.8013 -61.5006 -61.1519 -59.9855 -57.9275 -55.0722 -51.5359 -47.7978 -44.0504 -41.3125 -39.3362 -39.4121 -41.3199 -44.014 -47.2857 -50.5358 -53.1956 -55.1376 -55.776 -55.6483 -55.4595 -55.4412 -55.3047 -55.2511 -55.0132 -54.9054 -54.6829 -54.6181 -54.6704 -54.8187 -55.2817 -55.6673 -56.1152 -56.4388 -56.568 -56.8718 -57.1929 -57.1363 -57.2247 -57.2901 -57.3488 -57.3085 -57.1573 -56.8765 -56.8703 -56.7376 -56.9468 -56.9718 -56.7717 -56.4913 -56.1294 -55.366 -54.8655 -54.5165 -54.4163 -54.6013 -54.9664 -55.4273 -55.9092 -56.3938 -56.6978 -57.1472 -57.6191 -58.6002 -59.733 -60.9404 -62.0384 -62.8909 -63.3447 -63.3834 -62.8754 -61.9748 -61.0003 -59.7731 -58.5309 -57.3948 -56.7235 -56.3307 -56.3658 -56.6962 -57.2448 -58.0092 -58.7378 -60.2359 -61.518 -62.8348 -64.0097 -64.7331 -64.1569 -62.516 -59.8347 -56.9793 -54.2914 -52.0298 -50.995 -51.2948 -52.1311 -53.3942 -54.5107 -55.0984 -55.7654 -56.0172 -56.4213 -56.6943 -57.5899 -58.6255 -59.2345 -60.0633 -60.7069 -61.2063 -61.6607 -62.2874 -62.8686 -63.4821 -63.8893 -63.9449 -63.8927 -63.6529 -63.6181 -63.2137 -62.9831 -62.612 -62.0757 -61.3636 -60.3138 -59.7095 -58.7009 -57.9403 -57.4234 -56.6559 -55.9314 -55.078 -54.3263 -53.1862 -52.5538 -52.2239 -51.7708 -51.6128 -51.5016 -51.7431 -51.7307 -51.6323 -51.6253 -51.6765 -51.767 -52.1765 -52.6049 -52.7226 -53.1701 -53.5453 -54.0216 -54.4346 -54.8774 -55.7359 -56.3095 -56.8389 -57.4892 -58.0471 -58.2742 -58.6718 -58.4772 -58.0469 -57.0121 -55.0568 -52.2358 -48.6444 -44.5469 -40.7607 -37.4769 -34.5609 -32.8741 -32.5426 -32.9214 -33.4079 -34.6524 -36.0812 -38.262 -40.7481 -43.4456 -45.8805 -48.2749 -49.8878 -51.0916 -51.7051 -51.9193 -52.1373 -52.5762 -52.8778 -53.1205 -53.5796 -53.7292 -53.7602 -54.0104 -54.2716 -54.6402 -55.1589 -55.4236 -55.402 -55.2397 -54.9229 -54.7654 -54.5639 -54.845 -54.7571 -54.8556 -54.8199 -55.0649 -55.4418 -56.1539 -56.8644 -57.546 -57.8786 -58.0945 -58.1512 -57.6878 -57.0408 -56.2802 -55.0198 -53.3516 -51.5658 -49.7493 -48.2327 -47.1068 -46.0439 -46.2031 -46.493 -47.5938 -48.8328 -50.248 -51.0855 -50.6725 -48.9324 -46.6354 -43.5592 -40.7044 -38.1929 -37.0032 -36.9778 -38.111 -39.9498 -41.9678 -44.1463 -46.1945 -48.3682 -50.2697 -52.2975 -54.429 -56.8557 -59.5435 -61.3536 -62.04 -60.8112 -58.078 -54.271 -49.8943 -45.6963 -42.5027 -40.9105 -41.1036 -42.8971 -45.49 -48.1536 -50.8744 -53.2403 -54.7711 -55.995 -56.8565 -57.9875 -59.1835 -60.8283 -62.2531 -63.9254 -65.1644 -65.6717 -65.239 -64.141 -62.459 -60.7076 -58.8448 -57.6308 -56.2851 -55.8842 -55.5214 -55.5354 -55.8897 -56.3076 -56.4898 -56.6586 -56.9905 -56.9241 -56.9779 -56.7031 -56.3304 -55.9013 -55.5397 -55.206 -54.8169 -55.007 -54.9519 -55.2707 -56.0108 -56.8105 -57.7124 -58.7417 -59.438 -60.4944 -60.9867 -61.3012 -62.1575 -63.3083 -64.4453 -65.3065 -64.9755 -63.5677 -61.2154 -57.8985 -54.7697 -52.2337 -50.7669 -51.1013 -52.7375 -54.8696 -56.8823 -58.804 -59.9304 -60.3773 -60.2375 -60.0467 -59.6054 -59.729 -59.6103 -59.3272 -59.8628 -60.4506 -61.1379 -61.9604 -62.4697 -62.4033 -62.0295 -60.7822 -59.1116 -57.1757 -55.5771 -54.174 -53.1695 -52.9678 -52.8965 -52.9055 -53.0407 -53.1038 -53.6195 -53.8938 -54.1358 -54.4763 -54.5692 -54.8464 -55.0986 -55.4985 -56.335 -56.9512 -57.1818 -57.3733 -57.2049 -57.0771 -57.0164 -57.1571 -57.2989 -57.6574 -58.2173 -58.7093 -59.5687 -60.2595 -61.0891 -61.7054 -61.9668 -61.883 -61.6153 -60.6578 -59.5632 -57.8293 -56.1649 -54.4601 -52.9718 -51.3933 -50.1988 -49.0142 -47.8517 -47.0863 -46.1366 -45.7347 -45.9517 -46.5653 -47.9095 -49.4919 -51.4855 -53.6288 -55.4249 -56.3695 -56.54 -55.8343 -54.6982 -53.0121 -51.3978 -49.823 -49.522 -49.4156 -49.738 -50.2445 -50.9557 -51.907 -52.8206 -53.4003 -53.7726 -52.7515 -51.0489 -48.1907 -45.1814 -41.9071 -38.9888 -37.0943 -36.1025 -36.3655 -37.9163 -40.4852 -43.6531 -47.0969 -50.2442 -53.1898 -55.2611 -56.387 -57.2393 -57.9015 -58.1019 -58.1431 -57.7745 -57.0473 -55.9508 -54.571 -52.8739 -51.1932 -49.8279 -48.9055 -49.3358 -50.5245 -52.3023 -54.309 -56.3085 -57.8751 -59.2286 -60.0683 -60.6382 -61.2601 -61.6132 -62.0753 -62.5783 -62.9434 -63.436 -63.7204 -63.6759 -63.2724 -62.0027 -60.88 -59.6997 -58.7199 -57.5354 -56.6557 -56.4015 -56.2701 -56.3242 -56.7133 -56.9828 -57.2147 -56.9664 -56.5436 -55.6854 -54.072 -52.2646 -50.0786 -47.9574 -45.765 -44.0912 -43.0962 -43.1512 -43.912 -46.1607 -48.4271 -50.9561 -53.2298 -55.1137 -56.0126 -56.5162 -56.8504 -57.2032 -57.2312 -56.9104 -55.5167 -53.0526 -49.6673 -45.7396 -41.7484 -38.0602 -35.6816 -34.8337 -35.4675 -37.0303 -40.0452 -43.0455 -45.8308 -48.4178 -50.1398 -51.37 -51.8925 -52.2343 -52.5939 -52.6987 -52.8966 -53.0731 -53.4773 -53.4683 -53.7525 -53.7941 -53.9811 -54.2414 -54.904 -55.33 -55.673 -55.7748 -55.8951 -55.8315 -55.7637 -55.5645 -55.7832 -56.5569 -57.2789 -57.8969 -58.7411 -59.4869 -59.8353 -60.3256 -60.1718 -59.9215 -59.0398 -58.2128 -57.2889 -56.3322 -54.9643 -54.0408 -52.8452 -51.3652 -50.1805 -48.9791 -48.1386 -47.8069 -47.506 -47.7429 -48.6013 -49.5275 -50.7865 -51.8116 -52.2792 -51.7453 -50.6962 -48.3314 -46.9748 -45.418 -45.6646 -46.2281 -47.946 -49.6231 -51.5873 -53.1234 -54.8156 -56.0919 -57.5885 -58.5036 -59.1684 -59.0979 -57.8596 -55.4102 -51.5616 -47.3233 -43.1279 -39.7216 -37.3191 -36.2519 -36.6228 -38.5001 -41.3766 -44.8359 -48.3519 -51.616 -54.2987 -56.197 -57.467 -58.6996 -59.6086 -60.3238 -60.7988 -61.1134 -60.9714 -60.6623 -59.8744 -59.2081 -58.2246 -57.4565 -56.4523 -55.3604 -54.4978 -53.6718 -53.2848 -52.9836 -52.8108 -52.9556 -52.7724 -52.6359 -52.3537 -51.3364 -49.6514 -47.3189 -44.23 -40.9077 -37.639 -35.0373 -33.025 -32.0462 -31.9066 -32.6732 -34.7237 -37.4247 -40.8832 -44.6881 -48.6076 -51.9506 -54.7493 -56.638 -57.6918 -58.1788 -58.3187 -58.1385 -57.833 -57.5078 -57.2558 -57.4579 -57.4928 -58.02 -58.3201 -58.519 -58.3154 -57.3557 -56.3762 -54.9666 -53.6749 -52.8482 -52.3463 -52.4322 -52.3461 -52.3558 -51.8823 -50.5426 -48.3233 -45.7857 -42.786 -39.7096 -37.1909 -35.1563 -33.9106 -33.7222 -34.3258 -35.8997 -37.8928 -40.4424 -42.911 -45.3397 -47.2316 -48.513 -49.7457 -50.3412 -50.8805 -51.3338 -51.6405 -52.0243 -52.1334 -52.5683 -52.7874 -52.9108 -53.2811 -53.6983 -54.198 -54.7308 -55.3895 -56.5074 -57.5426 -58.4356 -59.1262 -59.5714 -59.852 -59.8617 -59.9732 -60.0404 -60.2729 -60.2813 -59.8092 -58.8672 -56.339 -52.8771 -48.9533 -45.1498 -41.7664 -40.0729 -39.7151 -41.0144 -43.0497 -45.8341 -48.1827 -50.4916 -52.1278 -53.3412 -54.6247 -55.8026 -57.0187 -58.4381 -60.0033 -61.194 -61.6525 -61.4705 -60.2408 -58.5602 -56.6462 -54.8504 -53.3944 -52.3725 -51.7855 -51.3523 -51.2477 -51.5352 -52.1057 -52.4962 -53.2381 -53.537 -53.9116 -53.996 -53.5321 -52.4545 -50.7454 -48.3565 -45.8377 -43.3062 -41.0326 -39.7995 -39.4329 -40.6198 -42.4834 -44.5084 -46.7269 -48.457 -49.5555 -50.4302 -50.6262 -50.9229 -51.6156 -52.0725 -52.5112 -53.1058 -53.6616 -53.9583 -54.7833 -55.2769 -55.9174 -56.4355 -57.1838 -57.5443 -58.294 -59.1448 -60.144 -61.5844 -62.5482 -63.629 -64.5796 -65.4576 -65.6691 -65.8749 -65.5611 -64.0585 -61.7198 -58.2191 -54.3456 -50.1418 -46.6352 -44.1371 -42.9468 -43.7451 -45.3931 -47.4015 -49.6343 -51.4754 -52.8954 -53.3467 -53.785 -54.0607 -54.2295 -54.3361 -54.3865 -54.6475 -54.7732 -54.8406 -54.9193 -54.9735 -54.9739 -54.8972 -54.8977 -54.9541 -55.2546 -55.5283 -56.0637 -56.1888 -56.4901 -56.8039 -57.1249 -57.8229 -58.436 -59.0022 -59.6889 -60.4314 -60.5043 -59.7847 -58.2333 -56.2024 -53.7841 -51.6058 -49.44 -48.4356 -48.4658 -49.08 -49.6243 -50.5829 -51.2966 -51.9704 -52.2782 -52.543 -52.9608 -53.1015 -53.0724 -53.3736 -53.4343 -53.4876 -53.7427 -54.0722 -54.2581 -54.5536 -54.672 -55.0657 -55.2337 -55.3773 -55.2739 -55.1717 -54.6118 -54.2858 -53.9629 -53.7905 -53.7513 -53.7036 -53.6748 -53.9739 -54.5328 -55.417 -56.2902 -56.7459 -57.1474 -57.5113 -57.5402 -57.201 -56.5786 -55.8044 -54.8788 -53.3653 -51.8646 -50.1486 -48.8459 -47.5222 -46.7203 -45.9061 -45.843 -46.1067 -47.2867 -49.4154 -51.8773 -54.1238 -55.4219 -56.1363 -55.995 -55.1086 -53.9848 -52.6253 -51.8606 -50.8686 -50.3916 -49.6423 -49.7263 -50.2347 -51.4607 -53.1063 -54.9673 -56.4623 -57.4671 -57.0684 -54.9045 -51.3994 -46.9972 -42.2298 -38.002 -34.5655 -32.1591 -31.0223 -31.1792 -32.7392 -35.082 -38.1036 -41.6361 -44.8579 -48.2348 -50.8261 -53.0527 -54.8068 -55.923 -56.5629 -56.7104 -56.3984 -55.6733 -55.2128 -55.0115 -55.216 -55.579 -55.9867 -55.7982 -55.4803 -54.7144 -53.8868 -53.4398 -53.06 -53.1134 -53.1839 -53.7684 -54.1871 -54.668 -55.3586 -55.79 -55.3116 -53.3889 -50.7518 -46.8901 -43.1168 -39.6265 -37.4066 -36.4921 -37.4958 -39.1614 -41.9818 -45.0145 -47.9801 -50.6134 -53.0422 -54.3877 -55.3341 -55.6367 -55.8552 -55.8795 -55.9639 -55.9865 -55.9393 -56.2069 -56.8696 -57.3745 -58.4834 -59.443 -60.4879 -61.2499 -61.9706 -62.3021 -62.6167 -62.7133 -62.8964 -63.0305 -63.4342 -64.1224 -64.8639 -65.3299 -65.5952 -65.1969 -63.955 -61.5084 -58.5149 -55.0004 -51.7325 -49.5948 -48.619 -48.9022 -50.2499 -51.7213 -53.0724 -53.8654 -54.3146 -54.4221 -54.7001 -54.9915 -55.6928 -56.4078 -57.2029 -58.1574 -59.116 -59.5337 -59.8308 -60.265 -60.5673 -60.7266 -60.631 -60.5248 -60.4537 -60.394 -60.5655 -61.0071 -61.4893 -62.3132 -63.6108 -64.5735 -65.5888 -66.8051 -67.5765 -68.4333 -68.9058 -69.1765 -68.4176 -66.7172 -64.5107 -61.8407 -59.4011 -57.2074 -56.0723 -54.7072 -53.8277 -52.5949 -51.0945 -49.5882 -48.0439 -47.4256 -47.4353 -48.0942 -49.5816 -50.9888 -51.843 -51.6528 -50.7389 -48.6045 -45.8971 -42.8846 -40.3906 -39.4338 -39.1401 -39.6865 -41.2155 -43.1199 -45.5288 -47.7327 -49.4586 -51.1503 -52.2147 -52.939 -53.5218 -54.2079 -54.3796 -54.1808 -53.0941 -51.0123 -48.1675 -44.7883 -41.5232 -38.5045 -36.4751 -35.4083 -35.6766 -37.556 -40.4556 -44.1304 -48.1161 -51.8229 -55.0874 -57.5771 -58.9432 -59.687 -59.7674 -59.2099 -57.3934 -54.0619 -50.165 -45.8136 -41.8949 -38.5571 -36.4979 -35.7887 -36.5292 -38.5278 -40.8815 -43.5984 -46.3517 -49.1338 -51.6989 -54.1667 -56.0133 -58.1382 -59.7785 -61.0899 -61.4032 -60.2949 -57.9032 -54.1781 -49.3897 -44.6289 -40.6896 -38.6311 -38.8918 -41.3292 -44.5683 -47.8845 -51.1172 -53.6537 -55.2249 -55.8055 -55.9022 -56.0047 -56.4196 -56.7526 -57.027 -57.1049 -56.9877 -56.4911 -55.7016 -54.3512 -52.9042 -51.3532 -50.2542 -49.5642 -49.4914 -49.7305 -50.6015 -51.5225 -52.3709 -53.2565 -53.7251 -54.2581 -54.4776 -54.5302 -54.6635 -54.676 -54.6198 -54.1846 -53.3253 -52.0921 -50.6006 -48.9874 -47.9316 -47.807 -48.6271 -50.0055 -51.4702 -52.6974 -53.7902 -54.3899 -54.3521 -54.1448 -53.8503 -53.8508 -53.9064 -53.9908 -54.4025 -54.7624 -55.0236 -55.125 -55.0951 -55.2283 -55.3638 -55.3607 -55.5249 -55.4571 -55.6659 -55.6674 -55.5365 -55.6941 -55.8778 -56.1608 -56.3537 -56.6574 -56.9125 -57.135 -57.2648 -57.4922 -57.8209 -57.9789 -58.0064 -57.9291 -57.6465 -57.2438 -56.9503 -56.4324 -55.6236 -54.6289 -53.1302 -51.655 -49.6622 -48.2662 -47.0937 -46.6646 -46.9927 -48.1406 -49.7363 -51.6774 -53.7482 -54.1121 -53.2985 -51.6076 -48.5902 -45.562 -42.9217 -41.5568 -41.4517 -42.058 -42.7833 -43.4224 -44.1154 -44.9779 -46.3619 -48.4506 -51.7093 -54.9874 -58.3072 -60.3336 -61.0363 -60.1032 -58.2702 -55.0816 -51.9576 -48.7546 -46.4283 -45.4681 -45.6301 -46.6167 -47.8325 -49.4283 -50.7014 -51.6128 -52.0372 -52.1848 -52.1293 -52.3212 -52.5984 -53.0854 -53.5409 -54.2745 -55.0742 -55.9 -56.4576 -56.8011 -57.4339 -57.8215 -58.3635 -58.4045 -58.4971 -58.3328 -58.328 -58.2717 -58.0799 -57.9506 -57.6835 -57.2736 -56.5652 -55.7255 -54.042 -51.8903 -48.9842 -45.6091 -42.5172 -39.7111 -37.6624 -37.3108 -37.861 -39.9117 -42.7803 -46.1314 -49.4743 -52.6249 -55.6468 -57.7377 -59.25 -60.0118 -60.3244 -60.5193 -59.9111 -58.2228 -55.4446 -51.9301 -48.3543 -44.4634 -41.3728 -39.2981 -38.7104 -39.0369 -40.4523 -43.0146 -45.88 -48.8383 -51.6267 -53.7094 -55.331 -56.1266 -56.2523 -55.9696 -55.1701 -53.4206 -50.819 -47.8364 -44.4148 -41.0656 -38.2614 -36.0157 -35.0206 -34.6404 -35.4875 -37.0314 -39.5047 -42.4309 -45.0975 -47.7723 -49.6456 -50.9493 -51.5084 -51.9689 -52.419 -53.1228 -53.1913 -53.7652 -54.0586 -54.4247 -54.7548 -55.1704 -55.6111 -56.1501 -56.5147 -56.9984 -57.3239 -57.822 -58.0378 -58.5355 -58.7579 -59.0592 -59.2087 -59.4847 -59.3338 -59.8555 -59.8932 -60.276 -60.2136 -59.6809 -57.3737 -54.3062 -50.2418 -46.1413 -42.663 -40.7329 -40.752 -42.548 -44.9207 -48.0961 -50.9574 -53.0123 -54.0829 -54.473 -54.6842 -54.9786 -56.0108 -57.2901 -58.666 -59.7927 -61.0365 -61.7583 -62.2754 -62.2094 -61.3701 -60.206 -58.9172 -57.4568 -56.2224 -55.2753 -54.9672 -54.9962 -55.2429 -55.7207 -56.0188 -55.9614 -56.0514 -55.664 -54.9626 -53.7909 -52.0346 -49.227 -46.2258 -43.12 -40.3374 -38.4738 -38.0091 -39.0741 -41.2145 -43.6449 -46.5819 -49.4971 -52.2241 -54.2798 -56.0049 -57.1707 -57.7659 -58.422 -58.7518 -58.8267 -58.6428 -58.3774 -57.9518 -57.2746 -56.835 -56.0767 -55.3134 -54.8094 -54.5449 -54.054 -54.33 -54.3472 -54.3473 -54.2974 -54.044 -53.9998 -54.1681 -54.2623 -54.3889 -54.237 -53.6919 -52.372 -50.8857 -49.2577 -47.6389 -46.8774 -46.5652 -46.6116 -47.2842 -47.7457 -48.2349 -48.6631 -49.0944 -49.5978 -50.2066 -50.8077 -51.4358 -52.2462 -52.775 -53.3115 -53.597 -53.8473 -53.7414 -53.8839 -54.0165 -54.1135 -54.3442 -54.5676 -54.9268 -54.9938 -55.0037 -54.9699 -54.9939 -55.359 -55.8432 -56.8415 -57.567 -58.5863 -58.8409 -58.5636 -57.5394 -56.2 -55.2985 -54.9015 -55.0334 -55.6233 -56.3428 -57.1614 -57.3983 -57.3273 -56.8816 -56.374 -55.9525 -55.6356 -55.6529 -55.6936 -55.6079 -55.6549 -55.4208 -54.9835 -54.6741 -54.2635 -54.091 -54.0096 -54.0223 -54.1494 -54.2372 -54.3687 -54.4162 -54.5132 -54.8194 -55.1977 -55.7601 -56.3509 -56.9401 -57.3328 -57.5666 -57.7586 -57.9817 -58.257 -58.5956 -58.7188 -58.8901 -58.9637 -58.8013 -58.4734 -58.2401 -57.8773 -57.6545 -57.2849 -56.7751 -56.3876 -55.9515 -55.6422 -55.4768 -55.3166 -55.42 -55.318 -55.3487 -55.3948 -55.4665 -55.4398 -55.3632 -55.3064 -55.0427 -54.7965 -53.9757 -53.621 -52.8888 -52.1783 -51.7898 -51.715 -51.5331 -51.3567 -51.2181 -50.9727 -50.7112 -50.4832 -50.3915 -50.4019 -50.6685 -50.6977 -50.981 -51.1751 -51.0788 -50.923 -50.8932 -50.7189 -50.8014 -50.8807 -50.7933 -50.6981 -50.4293 -50.0837 -49.5839 -49.3484 -49.4788 -49.8383 -50.3654 -50.8254 -51.1619 -51.268 -50.9058 -50.455 -49.5902 -48.9962 -48.5006 -47.9206 -48.0923 -48.4651 -49.3463 -49.9995 -50.688 -50.8925 -50.7476 -50.6321 -49.9508 -49.1575 -48.7692 -48.3379 -48.2622 -48.4614 -48.6336 -48.9016 -49.2919 -49.6084 -49.5204 -49.4833 -49.3415 -48.9034 -48.9969 -48.9261 -48.8515 -48.9546 -49.0383 -49.0896 -49.3094 -49.5229 -49.838 -50.1113 -50.4209 -50.5184 -50.6113 -50.4997 -50.4706 -50.6603 -51.0895 -51.5584 -51.9686 -52.4412 -52.3815 -52.4458 -52.207 -52.247 -52.1714 -52.4491 -52.6606 -53.0267 -53.2011 -53.2779 -53.351 -53.3672 -53.4873 -53.5959 -54.215 -54.4777 -54.905 -55.2716 -55.6538 -56.054 -56.5531 -56.8537 -57.034 -57.025 -56.6461 -56.5278 -56.4216 -56.3989 -56.8238 -56.9804 -57.3234 -57.135 -57.0194 -56.707 -56.2126 -56.1115 -56.2268 -56.5145 -57.2831 -58.0687 -59.0748 -59.7353 -60.637 -61.2471 -61.6158 -61.7028 -61.8696 -61.7831 -61.7789 -61.5846 -61.6646 -61.5089 -61.5956 -61.4969 -61.1889 -61.2447 -61.0066 -61.1056 -61.3651 -62.0222 -62.8645 -63.5603 -63.8672 -63.5788 -62.401 -60.6794 -58.8697 -57.8725 -58.371 -60.4845 -63.4884 -66.3566 -68.3918 -69.4232 -69.2363 -68.9419 -68.5798 -68.9876 -69.9407 -71.3424 -72.6919 -74.1247 -74.7286 -74.5501 -72.9719 -69.6215", "breathiness_timestep": "0.011609977324263039" }, { @@ -218,9 +218,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "201.5 201.9 202.1 202.2 202.0 202.3 201.9 202.2 202.4 202.5 202.2 202.4 202.4 202.5 202.3 201.9 201.8 201.7 201.3 201.3 200.9 201.2 200.9 200.9 201.1 200.9 201.1 201.0 201.1 200.9 201.0 200.8 200.8 200.6 200.6 200.9 200.6 200.9 200.8 200.4 200.4 199.9 200.5 199.5 199.3 199.2 198.6 197.8 197.9 197.5 197.5 197.4 196.5 197.8 202.4 210.1 219.2 227.2 235.2 242.7 248.6 250.4 248.4 244.1 239.9 235.3 232.7 230.3 228.0 226.1 224.7 222.2 224.3 220.1 218.3 215.8 216.0 217.3 219.6 221.6 221.6 221.4 222.4 222.0 219.6 217.3 207.6 217.0 225.0 233.3 242.5 252.9 262.1 257.0 254.0 251.8 250.2 248.0 245.4 242.3 238.3 235.2 235.4 236.3 239.7 243.7 247.0 249.6 252.7 256.1 257.4 255.5 249.4 244.1 239.7 237.1 236.6 238.8 243.1 247.0 249.3 250.1 249.9 249.6 248.0 245.3 238.8 229.9 226.3 233.8 240.8 247.1 254.3 258.1 252.8 249.8 248.1 247.2 247.1 246.9 247.4 246.6 245.9 246.0 245.4 243.4 238.7 232.9 230.2 228.6 227.0 226.6 225.9 225.6 223.2 218.8 218.1 218.5 219.3 219.8 219.4 219.9 221.5 222.7 222.3 221.3 220.0 212.4 212.3 216.7 218.2 220.9 224.8 228.9 237.1 242.2 246.0 248.4 251.0 252.5 252.7 250.4 247.7 244.9 242.3 240.8 240.3 241.3 243.8 246.6 247.5 248.9 248.8 246.9 246.1 246.1 246.1 246.0 245.4 245.2 244.2 245.1 247.1 247.8 246.7 244.2 239.7 233.1 224.0 210.5 199.1 191.7 187.1 186.3 187.3 188.2 191.3 192.9 194.6 194.9 195.4 193.3 190.7 188.6 186.8 185.4 184.6 184.3 184.2 185.2 190.3 192.4 196.9 198.3 197.9 197.9 198.1 197.7 195.9 194.1 193.8 193.6 194.2 193.7 192.0 189.1 183.0 178.4 175.8 173.5 171.5 170.0 169.1 167.9 166.6 165.8 164.6 162.7 165.5 167.2 170.6 172.0 172.0 170.7 167.7 164.5 161.0 159.3 159.0 159.9 161.9 164.5 167.3 169.9 171.1 171.3 170.7 170.0 168.6 165.7 162.2 158.7 156.9 156.5 157.7 158.7 160.9 162.6 166.9 171.1 174.1 175.4 175.5 175.3 174.4 172.5 166.3 160.6 153.5 149.9 150.6 150.5 150.7 150.9 150.9 151.0 151.5 151.6 151.8 151.6 151.4 152.0 151.9 152.1 151.8 152.2 152.2 152.2 152.1 152.1 152.2 152.2 151.9 152.1 152.1 152.2 152.1 152.3 152.1 152.6 152.2 152.4 152.3", "f0_timestep": "0.011609977324263039", - "energy": "0.0009 0.0007 0.0003 0.0 0.0 0.0 0.0 0.0002 0.0003 0.0 0.0004 0.0006 0.0005 0.0007 0.0007 0.001 0.0019 0.002 0.0026 0.0028 0.0024 0.0031 0.0028 0.0034 0.0036 0.0036 0.0033 0.0033 0.0032 0.0034 0.0037 0.0031 0.0032 0.0037 0.0032 0.0036 0.0027 0.0032 0.0027 0.0021 0.0021 0.0018 0.0023 0.0021 0.0011 0.0019 0.0026 0.006 0.0068 0.0135 0.0416 0.0612 0.0793 0.0909 0.0964 0.0997 0.0864 0.8695 0.0903 0.0874 0.0884 0.0879 0.0837 0.0796 0.0683 0.0523 0.0389 0.0243 0.0219 0.0205 0.0171 0.0361 0.0564 0.0711 0.087 0.094 0.0936 0.0925 0.0892 0.0847 0.0819 0.0784 0.0767 0.0756 0.0705 0.0602 0.0476 0.0316 0.0213 0.0188 0.0177 0.0297 0.0528 0.0705 0.0858 0.0959 0.097 0.0973 0.0948 0.0898 0.0853 0.0807 0.0781 0.0771 0.0776 0.0789 0.0793 0.0806 0.0801 0.0808 0.0812 0.0815 0.0823 0.0802 0.0778 0.0764 0.0758 0.0756 0.0766 0.0769 0.0764 0.0761 0.0775 0.0777 0.0782 0.0757 0.0668 0.0549 0.0381 0.0219 0.0128 0.0109 0.0222 0.0468 0.0631 0.0765 0.0854 0.0869 0.0874 0.0869 0.0852 0.0827 0.0798 0.0782 0.0752 0.0695 0.0586 0.0464 0.0305 0.0153 0.0134 0.0126 0.0113 0.0217 0.0379 0.051 0.061 0.0662 0.0687 0.068 0.0678 0.069 0.0678 0.0687 0.0664 0.0594 0.0491 0.036 0.0222 0.0108 0.0098 0.0107 0.0097 0.0263 0.0417 0.0582 0.0722 0.0823 0.0876 0.0894 0.0895 0.0876 0.0848 0.0825 0.08 0.0775 0.0768 0.0769 0.0781 0.0798 0.0811 0.0824 0.0826 0.0829 0.0815 0.0811 0.0779 0.0764 0.0739 0.0724 0.0719 0.0706 0.0712 0.0698 0.0685 0.0686 0.0668 0.0656 0.0641 0.061 0.0591 0.057 0.0575 0.0602 0.0621 0.0643 0.0644 0.065 0.0644 0.0654 0.0674 0.0671 0.0684 0.0673 0.0664 0.0661 0.0651 0.0648 0.0636 0.0629 0.0612 0.0608 0.0619 0.0618 0.0626 0.0649 0.0669 0.068 0.0677 0.0677 0.0649 0.0619 0.0589 0.0536 0.046 0.0369 0.0282 0.0176 0.0093 0.0064 0.0061 0.0059 0.0054 0.004 0.0055 0.0228 0.0368 0.0489 0.0612 0.0674 0.0724 0.0743 0.0728 0.0722 0.0685 0.0625 0.0577 0.0529 0.049 0.0482 0.049 0.0493 0.051 0.0532 0.0535 0.0531 0.0526 0.0499 0.0481 0.0459 0.0441 0.0405 0.0379 0.0353 0.0331 0.0304 0.0294 0.0277 0.0262 0.0252 0.0247 0.0234 0.0233 0.0234 0.0219 0.0208 0.0193 0.0165 0.0137 0.0104 0.0069 0.0039 0.0014 0.0009 0.0005 0.0009 0.0005 0.0004 0.001 0.0007 0.0003 0.0004 0.0004 0.0007 0.0005 0.0009 0.0006 0.0006 0.0006 0.0006 0.0003 0.001 0.0003 0.0003 0.0004 0.0012 0.0006 0.0008 0.0013 0.0011 0.0007 0.001 0.0005", + "energy": "-84.7352 -79.8672 -76.9259 -74.3279 -72.6848 -71.6481 -71.832 -72.0578 -71.9213 -71.3753 -70.5525 -69.1435 -67.2656 -64.7839 -62.4082 -60.1304 -58.1232 -56.2407 -54.348 -52.7843 -51.5178 -50.4847 -49.7667 -49.0566 -48.5893 -48.0153 -47.7907 -47.8097 -47.8982 -47.8828 -48.3452 -48.721 -49.1732 -50.0205 -51.0167 -52.1942 -53.4129 -54.5532 -55.271 -55.6153 -56.0106 -56.1224 -56.2356 -55.5307 -54.7165 -53.048 -49.8197 -45.5118 -40.2561 -34.7711 -30.2984 -26.4422 -23.7321 -22.1378 -21.3318 -20.9819 -20.7975 -20.617 -20.4349 -20.2809 -20.2469 -20.3634 -21.1412 -22.1028 -23.8361 -26.0036 -28.3613 -30.3702 -31.6017 -31.529 -30.328 -28.2048 -25.8082 -23.5626 -21.8648 -20.929 -20.6945 -20.9283 -21.1971 -21.5132 -21.9465 -22.2739 -22.6563 -23.379 -24.2977 -25.6751 -27.8817 -29.9259 -31.2916 -31.5332 -31.0575 -29.2528 -26.8362 -24.3459 -22.0056 -20.8812 -20.1878 -20.1209 -20.2813 -20.8038 -21.1047 -21.4863 -21.8386 -22.0804 -22.1849 -22.1056 -22.2467 -22.0759 -21.8817 -21.6671 -21.7616 -21.6644 -21.7135 -21.8769 -21.7619 -21.9202 -22.0151 -22.1401 -22.0962 -22.0856 -21.9162 -22.0732 -21.9159 -22.0018 -22.5623 -23.8584 -25.7387 -28.2692 -30.4974 -31.8417 -31.9793 -31.3842 -29.5158 -27.2089 -24.6908 -22.8075 -22.0394 -21.4496 -21.2549 -21.1619 -21.1211 -21.5389 -21.7218 -22.2774 -23.4144 -25.3192 -27.5924 -30.0088 -32.3715 -33.9393 -34.2465 -33.4648 -31.9032 -29.5269 -27.4926 -25.5921 -24.1803 -23.3013 -22.8977 -22.5579 -22.2412 -22.3109 -22.5847 -22.8439 -23.6506 -24.6284 -26.3527 -28.3216 -30.6756 -32.6651 -34.0579 -33.8576 -32.8646 -31.0432 -28.4169 -25.8869 -23.8762 -22.4292 -21.7915 -21.7346 -21.5987 -21.8586 -21.9975 -22.3653 -22.5604 -22.7493 -22.5109 -22.4846 -22.3576 -22.1145 -21.8895 -21.7822 -21.7554 -21.7775 -21.8612 -22.1534 -22.2173 -22.3252 -22.6143 -22.6941 -22.6958 -22.9424 -23.063 -23.2527 -23.4739 -23.7431 -24.0815 -24.2241 -24.6855 -24.7578 -24.6764 -24.4105 -24.2367 -23.7808 -23.2608 -22.886 -22.4996 -22.03 -22.1866 -22.1772 -22.0944 -22.3072 -22.34 -22.6182 -22.9866 -23.1069 -23.378 -23.6287 -23.7008 -23.7493 -23.6413 -23.869 -23.7113 -23.5569 -23.4802 -23.4291 -23.2908 -23.4851 -23.6303 -23.7106 -24.2068 -24.6009 -25.479 -26.6054 -28.727 -31.6709 -35.2305 -38.7899 -42.3053 -45.4179 -47.6664 -48.0639 -46.3594 -43.4571 -39.5869 -35.6352 -31.6699 -28.4035 -26.1402 -25.1043 -24.4962 -24.4065 -24.3134 -24.6352 -25.1902 -26.0289 -26.7239 -27.5507 -28.3142 -28.8368 -29.364 -29.6205 -29.7165 -29.8438 -30.2605 -30.641 -30.973 -31.7734 -32.3849 -33.1748 -34.0021 -34.6717 -35.3761 -35.9701 -36.548 -36.9388 -37.3682 -37.5807 -37.6706 -37.836 -38.0775 -38.6269 -38.8694 -39.5202 -40.374 -41.2726 -42.2172 -43.8913 -45.8412 -48.8779 -52.7601 -57.2105 -61.617 -65.6751 -68.8343 -71.0341 -72.2272 -72.433 -72.4991 -72.3857 -72.0681 -71.7085 -71.5307 -71.3873 -71.1517 -70.7657 -70.584 -70.1089 -69.8691 -69.5993 -69.2945 -69.4385 -69.916 -70.4948 -71.649 -72.8413 -74.333 -75.6898 -76.7141 -76.7074 -76.1863 -73.9876 -74.2594", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0002 0.0002 0.0001 0.0001 0.0001 0.0 0.0001 0.0002 0.0001 0.0003 0.0003 0.0003 0.0005 0.0008 0.0012 0.0012 0.0017 0.0018 0.0021 0.0024 0.0026 0.0031 0.0034 0.0037 0.0038 0.004 0.004 0.0035 0.0035 0.0034 0.0034 0.0037 0.0036 0.0037 0.0034 0.0031 0.0031 0.0024 0.002 0.0015 0.0013 0.001 0.001 0.0009 0.0009 0.0016 0.0024 0.0036 0.0052 0.0059 0.006 0.0054 0.0035 0.003 0.0026 0.0032 0.0028 0.1227 0.0016 0.0031 0.0024 0.0018 0.002 0.0041 0.0088 0.0159 0.0184 0.021 0.0218 0.02 0.0174 0.0135 0.0092 0.0046 0.003 0.0028 0.0024 0.002 0.002 0.0021 0.0019 0.0016 0.0012 0.0009 0.0014 0.0049 0.0102 0.0142 0.0176 0.0189 0.018 0.0149 0.0106 0.0068 0.0035 0.0022 0.0017 0.0016 0.0015 0.0013 0.0011 0.0012 0.0012 0.0013 0.0014 0.0012 0.0009 0.0009 0.0008 0.0008 0.0009 0.0008 0.0008 0.001 0.0011 0.0007 0.0008 0.0007 0.0005 0.0009 0.0006 0.0004 0.0006 0.0006 0.0007 0.0006 0.0005 0.002 0.0051 0.0081 0.0101 0.0107 0.0097 0.0075 0.005 0.0022 0.0016 0.0012 0.0009 0.001 0.001 0.001 0.0009 0.0007 0.0007 0.0013 0.0042 0.0076 0.0098 0.0119 0.0126 0.0128 0.012 0.0098 0.0077 0.0039 0.0016 0.0015 0.0012 0.0011 0.0012 0.0009 0.001 0.001 0.0014 0.0011 0.0014 0.0024 0.0053 0.0075 0.0084 0.0089 0.0082 0.0065 0.0045 0.0033 0.0025 0.0024 0.0022 0.0021 0.0021 0.0023 0.0018 0.0019 0.0019 0.0016 0.002 0.0021 0.0019 0.0018 0.0015 0.0016 0.0015 0.0016 0.0018 0.0017 0.0017 0.0017 0.0019 0.0018 0.0018 0.0017 0.0016 0.0015 0.0015 0.0016 0.0016 0.0016 0.0017 0.0018 0.0016 0.0018 0.0017 0.0016 0.0013 0.001 0.001 0.001 0.001 0.001 0.0009 0.0009 0.0012 0.0013 0.0012 0.0012 0.0013 0.0013 0.0015 0.0015 0.0017 0.0014 0.0016 0.0015 0.0016 0.0014 0.0015 0.0016 0.0014 0.0015 0.0014 0.0013 0.0012 0.0012 0.0009 0.0014 0.0034 0.005 0.0061 0.0066 0.0065 0.0061 0.0053 0.0047 0.0042 0.0032 0.0024 0.0017 0.0015 0.0013 0.0011 0.0013 0.0014 0.0016 0.0019 0.0019 0.0018 0.0012 0.0011 0.001 0.0011 0.001 0.0011 0.0014 0.001 0.0012 0.0011 0.0011 0.001 0.0009 0.0012 0.0009 0.0011 0.0008 0.0007 0.0008 0.0008 0.0005 0.0004 0.0004 0.0005 0.0005 0.0003 0.0004 0.0003 0.0002 0.0003 0.0001 0.0016 0.0025 0.0026 0.0028 0.0021 0.001 0.0004 0.0002 0.0002 0.0003 0.0001 0.0003 0.0003 0.0003 0.0002 0.0001 0.0002 0.0002 0.0001 0.0001 0.0001 0.0 0.0002 0.0001 0.0001 0.0 0.0 0.0001 0.0002 0.0001 0.0002 0.0001 0.0001 0.0003 0.0006", + "breathiness": "-87.4023 -83.7471 -79.9261 -76.2726 -73.5368 -72.2088 -72.0842 -72.5934 -72.6872 -72.1092 -70.8632 -69.0234 -66.9089 -64.7018 -62.5399 -60.3556 -58.4561 -56.9375 -54.9487 -53.4693 -52.0216 -50.9446 -49.8708 -49.036 -48.4183 -47.893 -47.4594 -47.324 -47.3168 -47.5042 -47.7785 -48.0658 -48.4877 -49.2262 -50.3533 -51.7097 -54.018 -56.2916 -58.5401 -60.1935 -61.6591 -62.0668 -61.8966 -60.8765 -59.0071 -56.7219 -54.122 -51.8587 -49.7105 -48.7321 -48.4699 -49.2776 -50.6332 -52.3337 -54.2728 -55.8789 -57.162 -57.8999 -57.99 -57.6075 -56.4915 -54.5738 -52.0173 -48.6569 -45.2853 -41.9198 -38.9513 -37.3169 -36.5815 -37.0911 -38.728 -41.1663 -44.052 -46.8065 -49.1067 -51.422 -53.4577 -54.8972 -56.1798 -57.2354 -58.2462 -58.2166 -56.8219 -54.0866 -50.584 -46.6198 -42.9339 -39.6841 -38.0512 -37.7267 -38.9943 -41.1771 -44.0515 -47.2732 -50.4864 -53.4759 -55.618 -57.1267 -57.5657 -58.0058 -58.0789 -58.5307 -58.9957 -59.5983 -60.3975 -61.3491 -62.1231 -62.759 -63.2378 -63.5315 -63.9271 -64.1704 -64.4451 -64.6687 -65.0088 -65.4177 -65.4004 -65.5347 -65.8061 -66.0828 -66.619 -66.7735 -66.7937 -65.5988 -63.0213 -59.3153 -54.8859 -49.7751 -45.2391 -41.8679 -40.1275 -40.2778 -41.8414 -44.4408 -47.5868 -50.6879 -53.5847 -55.9504 -57.4868 -58.6542 -58.7943 -58.846 -58.253 -56.9382 -54.4742 -51.4223 -47.7654 -44.2699 -41.3172 -38.9073 -38.1136 -38.1926 -39.7047 -41.7692 -44.4515 -47.424 -50.118 -52.206 -53.8979 -54.8476 -55.3634 -55.8374 -56.1478 -56.1625 -55.6567 -54.5991 -52.9109 -50.8082 -48.3667 -46.5937 -45.5597 -45.2195 -45.6503 -46.5005 -48.0453 -49.6811 -51.3976 -52.6017 -53.665 -54.1887 -54.4018 -54.6095 -55.1119 -55.526 -55.9484 -56.5101 -56.87 -57.0664 -56.9921 -57.1697 -56.7349 -56.5491 -56.416 -56.1916 -56.1511 -56.0221 -56.0511 -56.1662 -56.4109 -56.8951 -57.0248 -57.2598 -57.3073 -57.07 -56.7883 -56.7131 -56.325 -56.4446 -56.1632 -56.3563 -56.5727 -56.934 -57.3434 -57.5882 -58.0005 -58.0956 -58.3511 -58.3292 -58.3733 -58.2889 -57.9853 -57.9878 -57.6698 -57.6713 -57.5185 -57.784 -57.787 -57.5808 -57.5089 -57.4747 -57.6651 -57.4475 -57.3287 -57.3054 -57.3586 -57.3701 -57.5755 -57.8936 -58.0096 -58.489 -58.806 -59.0545 -58.9442 -58.3993 -57.2803 -55.8161 -53.8555 -52.0915 -50.5284 -49.7101 -49.441 -49.8015 -50.7025 -51.9292 -53.8028 -55.5588 -57.2823 -58.4974 -59.206 -59.7814 -59.4996 -59.4433 -59.039 -58.5735 -58.2238 -57.8131 -57.6765 -57.6879 -57.7562 -57.9724 -58.3917 -58.5699 -59.1574 -59.5084 -60.0912 -60.3682 -61.0857 -61.3984 -62.1441 -63.1493 -63.9927 -64.7196 -65.4197 -65.9989 -66.3302 -66.5874 -67.2024 -67.5098 -67.8022 -68.2645 -68.5451 -68.8121 -69.3739 -70.1533 -70.3638 -69.8249 -68.204 -66.2832 -64.1649 -62.3676 -62.2465 -63.1775 -66.0067 -69.2764 -72.6114 -76.0793 -78.6537 -80.3202 -81.308 -81.8311 -82.0872 -81.9502 -81.6785 -81.1367 -80.6834 -80.7442 -80.5727 -80.5496 -80.3233 -80.003 -79.9658 -79.8149 -79.848 -80.566 -81.5197 -82.8134 -84.1416 -85.3906 -86.0478 -86.3451 -85.1485 -83.9271 -82.0553", "breathiness_timestep": "0.011609977324263039" }, { @@ -234,9 +234,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0", "f0_seq": "129.1 129.1 129.0 128.8 129.0 129.3 129.0 129.0 128.9 128.9 128.9 128.5 128.8 128.5 128.4 128.7 128.7 129.0 128.8 128.9 128.9 129.2 129.2 129.5 129.3 129.4 129.3 129.0 128.6 128.3 125.1 125.5 128.2 130.7 135.6 140.7 145.9 151.8 157.6 161.6 164.3 166.1 166.8 167.0 165.7 164.2 160.7 155.9 151.8 154.1 157.5 160.7 164.0 166.6 166.5 163.6 163.7 163.5 164.0 164.3 164.2 164.4 165.0 164.1 163.4 163.2 160.8 157.7 153.5 161.5 168.0 175.2 181.8 183.4 184.7 184.3 183.6 183.8 185.3 185.9 186.3 186.2 186.3 186.6 186.7 186.5 186.6 185.8 185.2 183.2 182.8 184.3 188.3 192.9 196.2 198.5 200.2 199.1 197.7 196.0 195.9 196.1 195.4 195.9 195.9 195.6 196.1 195.7 192.4 183.4 185.0 189.2 194.5 199.0 203.1 195.9 190.1 190.6 188.6 187.2 184.7 183.9 184.1 184.6 185.9 187.3 187.7 185.9 180.5 174.3 166.3 164.2 160.7 162.2 165.3 162.0 162.5 162.4 163.4 164.7 165.7 167.0 166.9 166.0 164.4 162.0 159.0 153.6 148.6 147.0 146.7 147.1 147.6 147.7 148.0 147.3 144.1 143.4 143.7 144.2 144.8 146.2 147.1 147.2 146.3 146.2 145.7 145.6 145.1 144.9 145.3 146.5 146.3 146.0 146.5 147.4 147.9 147.8 147.3 147.4 147.1 147.4 147.7 147.5 147.1 147.5 147.7 147.2 144.6 139.9 143.0 146.2 149.8 153.7 158.4 162.6 165.4 167.7 167.5 167.5 167.3 166.3 165.3 164.6 163.0 161.7 161.0 159.8 159.0 160.3 161.4 163.5 164.9 166.5 166.9 166.7 165.9 165.0 164.6 163.9 163.2 163.0 163.0 161.8 161.3 161.0 162.1 163.0 164.4 165.6 166.9 167.3 167.7 167.7 166.5 165.2 164.0 163.1 162.3 162.1 162.1 162.1 161.9 161.7 162.1 163.3 163.8 164.9 166.6 167.9 168.8 169.2 169.2 169.2 168.3 167.9 164.5 162.2 160.9 160.1 159.7 160.7 163.6 166.6 169.3 171.7 173.4 174.8 175.0 174.4 171.5 166.5 162.0 160.0 158.7 158.4 158.8 160.8 164.7 168.7 172.7 175.0 176.3 176.7 175.4 172.3 167.1 160.3 156.1 153.4 153.4 154.6 157.1 160.0 165.2 171.5 176.4 180.4 180.4 177.7 173.0 168.4 162.3 156.2 152.6 151.3 152.0 154.1 157.7 163.4 169.6 171.3 170.1 171.1 171.8 172.3 170.1 167.4 165.9 163.6 163.7 164.0 163.1 162.7 163.0 162.9 163.0 163.7", "f0_timestep": "0.011609977324263039", - "energy": "0.001 0.0004 0.0013 0.0017 0.0026 0.0029 0.0036 0.0045 0.0044 0.0053 0.0059 0.0062 0.0064 0.007 0.0078 0.0073 0.0072 0.0065 0.0054 0.005 0.0051 0.0036 0.0033 0.0025 0.0017 0.0015 0.0041 0.0164 0.0264 0.0317 0.0389 0.0437 0.047 0.0499 0.0542 0.0585 0.0605 0.0619 0.0616 0.059 0.0585 0.0586 0.0581 0.0591 0.0592 0.0549 0.0499 0.0412 0.0283 0.0182 0.0145 0.0142 0.024 0.041 0.0542 0.0657 0.0735 0.0755 0.0758 0.0757 0.0735 0.073 0.0728 0.0715 0.0702 0.0638 0.0541 0.0414 0.0249 0.0101 0.008 0.0078 0.0323 0.0536 0.0699 0.0808 0.0854 0.084 0.0816 0.0803 0.079 0.0771 0.078 0.0771 0.0755 0.0752 0.0723 0.0698 0.0653 0.0591 0.0547 0.0505 0.05 0.0586 0.0667 0.0734 0.0768 0.0752 0.0726 0.0701 0.0704 0.0685 0.0678 0.0678 0.066 0.066 0.0624 0.0543 0.0433 0.031 0.0184 0.0116 0.0115 0.0215 0.0413 0.0546 0.0643 0.0694 0.0695 0.0682 0.0663 0.0653 0.0623 0.0612 0.0602 0.0603 0.0606 0.0592 0.0544 0.0458 0.0351 0.0221 0.0167 0.0203 0.0363 0.0477 0.0571 0.0642 0.0643 0.0642 0.0627 0.06 0.0595 0.0583 0.0565 0.0512 0.0446 0.0354 0.0251 0.0173 0.0092 0.0075 0.0073 0.0069 0.023 0.0386 0.0509 0.0595 0.0644 0.0639 0.0637 0.0644 0.0641 0.0627 0.0613 0.0595 0.0579 0.0575 0.0563 0.0567 0.056 0.0548 0.0543 0.0536 0.0526 0.0527 0.0531 0.0532 0.0536 0.0533 0.0537 0.0541 0.0534 0.054 0.0532 0.0532 0.0503 0.0449 0.037 0.0282 0.0187 0.0123 0.0101 0.008 0.0094 0.0261 0.0414 0.0537 0.0628 0.0661 0.0643 0.0626 0.0629 0.0624 0.0618 0.061 0.0585 0.0575 0.0562 0.0561 0.0576 0.0587 0.061 0.0612 0.0615 0.0617 0.0616 0.0619 0.062 0.0624 0.0624 0.0632 0.0629 0.063 0.0624 0.0629 0.0636 0.0636 0.065 0.0647 0.0655 0.0669 0.0675 0.0695 0.0703 0.0702 0.07 0.0689 0.0688 0.0695 0.0704 0.0714 0.0712 0.0718 0.0715 0.0711 0.0713 0.0711 0.071 0.0716 0.072 0.0716 0.0719 0.0714 0.0719 0.0724 0.0724 0.0726 0.0705 0.0692 0.0673 0.0653 0.0632 0.0626 0.0611 0.0597 0.0591 0.0584 0.0577 0.0571 0.0564 0.0564 0.0549 0.0532 0.0515 0.0488 0.0472 0.0456 0.0449 0.0439 0.0437 0.0429 0.042 0.0414 0.0399 0.0395 0.0376 0.0367 0.0357 0.0344 0.0327 0.0309 0.0298 0.0291 0.0279 0.0263 0.0254 0.0247 0.0243 0.0248 0.0249 0.0249 0.0242 0.0232 0.0217 0.0198 0.0181 0.0171 0.0163 0.0163 0.0162 0.016 0.0156 0.0161 0.0163 0.0153 0.0142 0.0116 0.0094 0.0059 0.0024 0.0009 0.0001 0.0 0.0 0.0002 0.0001 0.0", + "energy": "-64.1018 -61.3184 -58.9542 -56.4449 -54.2023 -52.8365 -51.6178 -50.7199 -50.0199 -48.9616 -48.1479 -47.4129 -47.053 -46.9205 -46.8455 -47.2298 -47.8541 -48.7469 -49.8334 -51.0681 -52.4373 -53.5948 -54.1032 -53.0972 -50.8545 -46.9282 -43.1534 -38.5575 -34.6455 -31.2666 -28.8533 -27.2909 -26.2728 -25.5908 -25.2447 -24.8699 -24.7928 -24.8413 -24.9618 -24.8939 -25.0956 -25.1676 -25.5976 -26.1298 -26.6562 -27.8278 -29.2328 -30.5347 -31.73 -32.0966 -31.8551 -30.5627 -29.0935 -27.3723 -25.5218 -24.2052 -23.4166 -23.0206 -22.8561 -22.9469 -22.9143 -22.9606 -23.4271 -24.017 -25.1995 -27.0661 -29.8301 -32.397 -34.3888 -35.0999 -34.3229 -32.4491 -29.5738 -27.0529 -24.2577 -23.5166 -22.7934 -22.9076 -22.9279 -22.8686 -22.9977 -22.7463 -22.9192 -23.0439 -23.0631 -23.2007 -23.4307 -23.7052 -24.1363 -24.1041 -24.5374 -24.4393 -24.4992 -24.2288 -23.7761 -23.5321 -23.2661 -23.3382 -23.3931 -23.3997 -23.6182 -23.8246 -24.3092 -24.9916 -25.8931 -27.0743 -28.9498 -31.3746 -33.7415 -35.836 -36.4801 -35.6294 -33.7786 -31.2671 -28.6964 -26.2253 -24.634 -24.3737 -24.3943 -24.5373 -24.6418 -24.8779 -25.1 -25.0547 -25.1876 -25.608 -26.6188 -27.9355 -29.8404 -31.6699 -32.9513 -33.6968 -32.8821 -31.7697 -30.0102 -27.7846 -25.9478 -24.5666 -24.0071 -23.9044 -24.1252 -24.4845 -24.8521 -25.6086 -26.6042 -28.2569 -30.3077 -32.8674 -35.4691 -37.8677 -39.2658 -39.1958 -37.9697 -35.6217 -32.5562 -29.74 -27.0365 -25.0704 -24.1087 -23.5522 -23.2099 -22.9972 -23.0715 -23.258 -23.2903 -23.4073 -23.8235 -23.9656 -24.0478 -24.2054 -24.1983 -24.3916 -24.4044 -24.3876 -24.3581 -24.3208 -24.1787 -24.392 -24.5515 -24.5372 -24.7124 -24.7598 -24.8726 -25.0365 -25.4248 -25.9938 -27.1671 -28.9816 -31.4202 -33.965 -36.3761 -37.6611 -37.7128 -36.5157 -34.1589 -31.2656 -28.4597 -26.1319 -24.3774 -23.6019 -23.2963 -23.2357 -22.9692 -23.075 -23.0069 -22.9702 -22.9875 -23.0596 -22.9744 -22.8753 -22.8405 -22.7638 -22.7954 -22.6349 -22.4099 -22.3773 -22.2519 -22.1923 -22.2912 -22.3701 -22.4146 -22.3961 -22.3991 -22.3903 -22.3797 -22.1407 -22.3959 -22.2798 -22.2652 -22.0933 -22.1594 -22.0619 -22.076 -22.044 -21.9406 -21.9718 -22.1448 -22.0523 -22.144 -22.188 -22.2844 -22.3299 -22.538 -22.6869 -22.6456 -22.679 -22.5463 -22.809 -22.6829 -22.6972 -22.7431 -22.8189 -22.7588 -22.8588 -22.8015 -23.1431 -23.38 -23.5744 -24.0022 -24.3374 -24.4493 -24.5934 -24.8206 -24.6224 -25.1048 -24.8279 -24.8096 -24.767 -24.801 -25.0546 -25.3021 -25.3914 -25.8477 -26.2274 -26.4811 -26.9874 -27.3616 -27.5613 -27.8961 -28.07 -28.2265 -28.0501 -28.2195 -28.3183 -28.4786 -28.7998 -29.022 -29.3519 -29.8475 -30.0093 -30.5059 -30.9516 -31.0931 -31.3736 -31.4184 -31.6111 -31.6996 -31.7501 -31.4111 -31.3188 -30.9852 -30.8067 -31.0688 -31.4356 -32.3187 -33.1853 -34.1299 -35.1754 -35.4358 -35.6689 -35.8314 -35.4837 -35.1172 -35.0359 -35.177 -35.3696 -36.032 -36.8225 -38.5304 -41.2609 -44.6406 -48.4311 -52.2015 -55.3428 -57.4402 -57.8112 -56.0323 -52.387", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0004 0.0008 0.0012 0.0018 0.0028 0.0034 0.0044 0.0049 0.0052 0.0061 0.0068 0.0073 0.0073 0.0073 0.0071 0.007 0.0069 0.0067 0.0062 0.0051 0.0044 0.0034 0.0027 0.0022 0.0009 0.0016 0.0041 0.0062 0.0064 0.0057 0.0038 0.0006 0.0005 0.0004 0.0009 0.0009 0.0009 0.0008 0.0007 0.0005 0.0006 0.0006 0.0006 0.0005 0.0005 0.0006 0.0005 0.0028 0.0065 0.0093 0.0132 0.0153 0.0145 0.0126 0.0082 0.0032 0.0018 0.0017 0.0018 0.0016 0.0016 0.0017 0.0016 0.0017 0.0014 0.0011 0.0011 0.0011 0.001 0.0018 0.0039 0.0042 0.0047 0.0046 0.0035 0.0029 0.0025 0.0022 0.0021 0.0018 0.0017 0.0015 0.0013 0.0012 0.001 0.0009 0.0008 0.0006 0.0007 0.0005 0.0005 0.0005 0.0005 0.0004 0.0006 0.0006 0.0005 0.0005 0.0005 0.0006 0.0005 0.0005 0.0003 0.0005 0.0005 0.0006 0.0007 0.0008 0.0018 0.0043 0.0068 0.0088 0.0104 0.0103 0.0088 0.0066 0.003 0.0015 0.001 0.0007 0.0008 0.0007 0.0006 0.0005 0.0005 0.0004 0.0002 0.0004 0.0002 0.0001 0.0 0.0001 0.0002 0.0006 0.001 0.0012 0.0009 0.0009 0.0007 0.0006 0.0006 0.0006 0.0005 0.0005 0.0006 0.0006 0.0006 0.001 0.0035 0.0048 0.0063 0.0071 0.0067 0.0062 0.0047 0.0038 0.0027 0.0016 0.0015 0.0012 0.0012 0.0013 0.001 0.0009 0.0009 0.001 0.0012 0.0013 0.0011 0.0014 0.0012 0.001 0.0009 0.0008 0.0009 0.0007 0.0007 0.0005 0.0006 0.0006 0.0005 0.0002 0.0004 0.0005 0.0005 0.0005 0.0004 0.0006 0.0011 0.0037 0.007 0.0084 0.0095 0.0094 0.0076 0.0061 0.0044 0.0031 0.0025 0.0022 0.002 0.0021 0.0015 0.0016 0.0012 0.0013 0.0014 0.0014 0.0014 0.0014 0.0015 0.0017 0.0015 0.0018 0.0017 0.0017 0.0018 0.0016 0.0014 0.0015 0.0011 0.0012 0.0012 0.0013 0.0013 0.0013 0.0014 0.0013 0.0016 0.0015 0.0014 0.0015 0.0015 0.0014 0.0014 0.0015 0.0014 0.0015 0.0015 0.0015 0.0013 0.0014 0.0013 0.0013 0.0012 0.0012 0.0013 0.0013 0.0017 0.0016 0.0018 0.0017 0.0016 0.0016 0.0018 0.002 0.002 0.0019 0.0017 0.0017 0.0016 0.0016 0.0018 0.0018 0.0017 0.0018 0.0017 0.0017 0.0015 0.0017 0.0018 0.002 0.0018 0.0019 0.0015 0.0016 0.0014 0.0012 0.0014 0.0012 0.001 0.0011 0.0008 0.0009 0.0009 0.0008 0.0009 0.001 0.0008 0.0007 0.0008 0.0007 0.0006 0.0006 0.0006 0.0007 0.0007 0.0006 0.0004 0.0004 0.0004 0.0005 0.0004 0.0005 0.0005 0.0004 0.0002 0.0001 0.0 0.0001 0.0003 0.0002 0.0004 0.0003 0.0002 0.0004 0.0005 0.0011 0.0015 0.0015 0.0013 0.001 0.0006 0.0003 0.0001 0.0002 0.0003 0.0005", + "breathiness": "-66.8125 -64.5206 -61.2219 -58.5055 -55.6692 -53.5151 -51.8447 -50.3154 -49.102 -48.3448 -47.7998 -47.0586 -46.6891 -46.6378 -46.7404 -47.1582 -47.9117 -48.9888 -50.0356 -51.5782 -53.4637 -55.1155 -57.0541 -58.7987 -60.4425 -61.662 -62.434 -63.2906 -63.6391 -63.7348 -63.3814 -62.6415 -61.8641 -60.8648 -60.1829 -59.8885 -59.8846 -60.509 -60.7642 -61.6211 -62.1721 -62.8918 -63.3023 -63.0537 -61.0665 -57.9331 -53.5448 -48.6039 -43.6498 -39.9478 -37.831 -38.0416 -40.6044 -44.2983 -48.119 -51.541 -54.1855 -56.1656 -56.8771 -57.1047 -57.2041 -57.4085 -57.6143 -57.488 -57.1491 -56.3656 -55.0242 -53.2973 -51.4503 -49.6077 -48.0511 -47.2743 -47.4613 -48.0945 -49.6898 -51.2559 -52.8357 -54.0737 -55.173 -55.7691 -56.4743 -57.085 -57.9055 -58.6217 -59.0793 -59.4392 -59.7936 -60.1016 -60.2229 -60.9657 -61.587 -62.4695 -63.2966 -64.0036 -64.5724 -64.8749 -64.7914 -65.1302 -65.0953 -65.1892 -65.3994 -65.7544 -65.8551 -66.1323 -66.4106 -66.2617 -64.8882 -62.8313 -59.9801 -56.5217 -53.4219 -51.2682 -50.3248 -50.7862 -52.4574 -54.558 -56.5149 -58.0851 -59.3394 -60.1869 -60.8038 -61.5748 -62.2678 -63.4309 -64.3482 -65.4351 -66.9459 -67.9846 -68.2404 -67.7635 -66.879 -65.3481 -63.7497 -62.8446 -62.1397 -62.6201 -63.4417 -64.3485 -65.1789 -65.6382 -65.899 -66.0133 -65.8897 -65.0477 -63.2465 -60.7667 -57.3462 -53.8785 -50.4185 -47.7562 -45.9126 -45.5439 -46.2641 -47.7186 -49.1768 -51.1915 -52.9817 -54.8568 -56.1607 -56.9937 -57.3984 -57.4015 -57.4615 -57.4842 -57.6171 -57.7148 -58.2104 -58.8211 -59.6197 -60.5597 -61.5992 -62.7798 -63.7448 -64.6424 -65.2396 -65.6845 -66.2292 -66.7915 -67.0666 -67.6299 -68.1316 -68.6542 -69.0425 -69.2195 -68.5747 -66.6025 -63.3355 -59.1185 -54.3307 -50.0337 -46.3594 -44.1944 -43.5124 -44.1816 -45.5327 -47.414 -49.4329 -51.5387 -53.2061 -54.1771 -54.8302 -55.2949 -55.7727 -56.0312 -56.3276 -56.6107 -56.8594 -56.8986 -56.6671 -56.3622 -55.8318 -55.6553 -55.2366 -54.9577 -54.9219 -54.8318 -54.8039 -54.813 -54.684 -54.4854 -54.3294 -54.2733 -54.6501 -54.7408 -55.2324 -55.3045 -55.4379 -55.5174 -55.5128 -55.8482 -55.9199 -55.9174 -55.9959 -56.1138 -55.9355 -55.8426 -55.5849 -55.6885 -55.5803 -55.5874 -55.7086 -55.7929 -55.7558 -55.6705 -55.6201 -55.5214 -55.611 -55.5222 -55.6332 -55.7691 -55.8022 -55.5141 -55.0863 -54.7803 -54.5436 -54.2038 -54.0469 -54.3348 -54.7788 -54.9453 -55.1037 -55.0829 -55.0725 -55.0619 -54.772 -54.9349 -54.8639 -54.7605 -54.569 -54.6894 -54.4673 -54.6637 -55.143 -55.6648 -56.421 -56.8893 -57.288 -57.4047 -57.6364 -57.7527 -57.8257 -58.2024 -58.4812 -58.9212 -59.049 -59.1178 -59.5012 -59.8524 -60.5657 -61.2639 -62.2255 -63.194 -63.9324 -64.574 -65.0983 -65.0984 -65.1495 -64.9699 -64.7088 -64.2561 -63.8105 -63.6704 -64.0639 -64.6886 -65.6934 -67.6461 -68.975 -70.298 -70.9714 -71.3237 -70.8986 -70.3763 -69.7112 -69.0447 -68.5711 -68.5597 -67.9454 -67.2013 -65.7152 -63.9033 -61.7938 -60.4696 -60.1279 -60.7527 -61.7846 -62.6734 -62.9628 -62.1819", "breathiness_timestep": "0.011609977324263039" }, { @@ -250,9 +250,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "171.5 172.2 172.1 171.6 171.4 171.7 171.5 171.7 171.7 171.6 171.5 172.0 171.6 171.8 171.5 171.4 171.5 171.4 171.3 171.2 171.3 171.3 171.1 171.2 171.4 171.2 171.2 171.5 171.1 171.4 171.1 171.0 170.8 171.1 171.0 171.0 171.2 170.8 171.0 170.3 170.9 170.5 169.9 169.4 169.0 168.5 168.0 167.7 165.7 165.7 169.5 173.2 179.6 188.9 199.0 208.4 220.1 230.7 240.7 246.1 247.9 246.7 245.0 242.6 239.3 236.0 233.4 230.2 227.8 228.6 228.5 229.3 230.6 224.8 220.3 219.2 217.9 218.6 219.2 219.6 220.8 221.0 220.1 219.9 219.5 218.3 210.6 218.0 226.4 233.6 244.3 254.5 252.2 253.8 250.8 252.3 252.3 251.9 248.8 244.7 240.8 237.6 235.9 236.5 237.8 240.7 245.2 251.0 254.2 255.8 253.3 249.4 243.1 238.3 235.6 235.6 236.1 237.5 240.4 243.3 245.7 247.7 248.4 248.1 246.0 244.0 240.2 236.1 233.6 232.5 231.5 235.2 236.5 240.6 243.9 245.6 246.1 247.2 248.0 247.9 248.1 247.4 247.5 248.4 246.8 244.4 240.6 232.3 229.8 230.3 229.7 229.6 228.8 227.8 227.0 218.3 218.5 218.6 219.5 220.9 222.3 223.4 224.0 223.3 221.2 219.2 215.8 208.6 212.7 217.9 223.7 229.7 236.2 243.4 250.4 258.4 251.7 253.0 252.3 252.6 249.6 244.7 240.2 238.5 240.0 242.2 244.3 246.3 248.7 249.3 249.0 247.0 247.2 245.9 244.8 244.1 243.1 243.2 243.4 245.7 246.4 247.8 248.8 248.9 249.0 247.7 246.9 248.3 250.5 250.6 250.6 251.4 255.6 263.4 273.0 285.1 297.6 302.1 303.0 302.8 300.2 296.1 293.2 292.3 292.6 293.9 294.2 296.0 298.2 300.2 298.8 295.9 289.7 281.1 268.8 255.1 245.0 243.1 242.1 241.2 243.1 246.0 248.5 250.4 252.1 252.6 251.9 250.1 246.0 242.1 233.8 232.5 237.6 241.3 245.1 249.1 253.7 255.8 253.6 253.6 253.0 251.7 248.8 245.8 242.3 239.0 234.5 232.1 232.7 236.1 240.1 246.2 252.2 257.8 260.6 261.9 259.6 254.8 246.6 237.0 227.7 224.2 223.6 227.9 233.5 241.8 250.1 259.0 266.9 270.1 268.8 260.9 250.3 235.7 221.2 215.6 212.0 210.1 208.1 205.2 202.8 199.9 196.9 194.1 191.1 188.7 185.9 182.7 180.3 177.5 174.7 172.1 169.2 165.8 162.8 160.7 158.0 156.2 153.1 157.9 163.6 175.7 187.3 200.3 214.7 230.4 243.1 250.6 250.3 249.1 245.6 241.6 235.6 228.9 225.0 227.6 231.4 235.4 237.6 232.1 226.0 223.6 221.1 221.4 221.0 219.3 218.1 218.5 219.0 218.6 218.1 218.1 215.1 209.9 205.5 199.2 193.4 189.9 187.5 186.8 189.3 192.1 194.2 195.5 196.6 197.5 197.5 197.3 196.9 195.2 195.0 195.4 190.7 193.3 194.5 196.7 197.6 200.8 203.0 204.2 201.9 199.5 198.0 196.5 195.9 196.5 196.4 196.3 196.7 196.1 194.4 191.6 187.7 181.0 178.6 174.7 172.8 168.3 167.1 167.9 163.5 163.1 161.9 162.0 163.8 166.0 167.0 166.8 165.8 165.6 164.9 164.7 164.9 164.9 162.2 159.3 164.8 173.1 180.9 188.3 196.7 200.1 197.0 196.1 195.5 195.5 194.8 195.3 194.7 194.8 196.3 196.9 196.2 194.1 189.1 194.3 200.4 206.0 212.4 218.8 225.1 231.4 228.2 223.5 221.9 221.3 220.9 219.4 219.1 219.5 220.0 220.1 219.7 217.9 215.0 207.9 197.5 192.8 194.8 195.6 197.4 199.7 197.1 195.8 194.6 195.0 194.7 195.3 195.9 196.1 196.5 196.7 197.6 198.2 199.2 198.2 197.0 195.8 196.5 197.4 199.7 202.7 208.2 211.7 215.2 217.3 219.3 220.9 220.5 220.2 220.8 221.0 220.4 221.0 220.8 221.0 221.9 222.4 222.1 222.6 223.8 226.9 232.4 239.0 244.3 247.9 250.3 250.6 251.6 251.4 249.7 247.3 244.5 242.5 240.7 240.6 240.9 242.4 244.0 246.9 250.1 254.2 257.7 258.6 257.6 254.1 250.8 244.9 239.5 236.5 235.5 236.5 238.5 241.7 244.1 246.1 248.0 250.0 251.3 251.2 248.9 246.9 245.0 242.7 241.0 240.0 241.3 245.1 248.0 248.2 247.4 247.7 248.8 249.4 249.5 247.5 245.4 244.6 244.5 245.7 248.5 251.6 251.1 250.7 251.2 252.7 252.7 251.1 248.0 246.2 243.7 243.9 244.8 247.9 251.4 254.0 256.0 258.2 257.5 254.5 250.3 246.1 241.7 235.5 229.0 223.0 210.8 211.1 210.8 210.8 210.8 210.2 209.8 209.4 209.1 208.4 208.4 207.2 208.0 207.8 207.9 208.6", "f0_timestep": "0.011609977324263039", - "energy": "0.0 0.0004 0.0005 0.0006 0.0009 0.0004 0.0003 0.0 0.0006 0.0008 0.0003 0.001 0.0012 0.0009 0.001 0.0011 0.0017 0.0014 0.0016 0.002 0.002 0.0017 0.0023 0.0023 0.0023 0.0023 0.0027 0.0021 0.0019 0.0018 0.0018 0.002 0.0021 0.0023 0.0023 0.0014 0.0012 0.0012 0.0004 0.0006 0.0006 0.0005 0.0001 0.0003 0.0006 0.0005 0.002 0.0176 0.0289 0.0371 0.0466 0.0517 0.0552 0.0568 0.0573 0.0566 0.0529 0.0503 0.0495 0.0509 0.053 0.0545 0.0553 0.0553 0.0542 0.0489 0.0418 0.033 0.0215 0.0181 0.0166 0.0226 0.0395 0.0552 0.0682 0.077 0.081 0.0833 0.0836 0.0842 0.0809 0.0792 0.0775 0.0754 0.069 0.0576 0.0433 0.0239 0.0089 0.0051 0.0084 0.0377 0.0593 0.0779 0.0912 0.0958 0.0958 0.0919 0.0895 0.0869 0.083 0.0811 0.0777 0.0766 0.077 0.0755 0.0766 0.0776 0.0803 0.0808 0.0826 0.0815 0.0802 0.0801 0.0773 0.0766 0.075 0.0728 0.0731 0.0747 0.0756 0.0773 0.078 0.0763 0.0757 0.0724 0.0705 0.0681 0.0654 0.0629 0.0624 0.0636 0.0638 0.0665 0.0682 0.068 0.0682 0.0687 0.0683 0.0694 0.0694 0.0692 0.0703 0.0689 0.07 0.0681 0.0608 0.0496 0.0353 0.0201 0.0156 0.0158 0.0146 0.0284 0.0457 0.0574 0.0711 0.0763 0.0802 0.0812 0.0804 0.0807 0.0791 0.0774 0.075 0.0712 0.065 0.0539 0.042 0.0277 0.0208 0.0197 0.0186 0.0188 0.0373 0.0555 0.0677 0.0808 0.0867 0.0889 0.0902 0.0895 0.0874 0.0849 0.0848 0.0853 0.0874 0.0911 0.0916 0.0941 0.0951 0.0961 0.0948 0.0921 0.0895 0.0843 0.0819 0.0828 0.0839 0.0862 0.089 0.0888 0.0877 0.0866 0.0836 0.08 0.0782 0.0752 0.0731 0.0723 0.0719 0.0724 0.072 0.0751 0.0784 0.0823 0.0872 0.092 0.0981 0.1028 0.1057 0.1059 0.106 0.1046 0.1037 0.104 0.101 0.0987 0.0942 0.0882 0.0839 0.0804 0.0797 0.0807 0.0799 0.0796 0.0802 0.0802 0.0804 0.0798 0.0785 0.0764 0.0753 0.0736 0.0729 0.0741 0.0751 0.0758 0.0717 0.0611 0.0485 0.0344 0.0259 0.0252 0.025 0.028 0.0407 0.0545 0.0689 0.0812 0.0876 0.0911 0.0913 0.0889 0.0863 0.0822 0.0793 0.0757 0.0749 0.073 0.0726 0.0731 0.0739 0.0754 0.0761 0.0765 0.0756 0.0733 0.0709 0.0673 0.0643 0.0587 0.054 0.0509 0.0486 0.0469 0.0476 0.0485 0.0501 0.0525 0.0533 0.052 0.0483 0.0408 0.0321 0.0212 0.0104 0.0039 0.0026 0.0033 0.0036 0.0041 0.0042 0.0052 0.0052 0.0045 0.0032 0.003 0.0017 0.0015 0.0012 0.0012 0.0012 0.0006 0.0021 0.0018 0.0113 0.03 0.048 0.0629 0.0706 0.0737 0.0703 0.0654 0.0617 0.0591 0.0592 0.0601 0.0646 0.0661 0.0679 0.0663 0.0586 0.0496 0.0352 0.02 0.01 0.0217 0.042 0.0584 0.0704 0.0777 0.0792 0.0783 0.0765 0.075 0.0727 0.0726 0.0717 0.07 0.0692 0.0668 0.066 0.0634 0.059 0.0533 0.0466 0.0472 0.0531 0.0622 0.0686 0.0742 0.0765 0.0769 0.0761 0.0766 0.0763 0.0747 0.0746 0.0703 0.0621 0.0522 0.0384 0.0194 0.0065 0.0036 0.0058 0.017 0.039 0.0571 0.0713 0.0795 0.0822 0.0806 0.076 0.0753 0.0718 0.0689 0.068 0.065 0.063 0.0591 0.0509 0.0405 0.0283 0.0158 0.007 0.007 0.018 0.0359 0.0491 0.0596 0.067 0.0676 0.0667 0.0653 0.0637 0.0633 0.0615 0.0592 0.0567 0.0526 0.0487 0.0408 0.0335 0.0248 0.0155 0.0102 0.0089 0.0182 0.0353 0.0481 0.0606 0.0668 0.0683 0.0677 0.0644 0.0638 0.0607 0.0589 0.0577 0.0552 0.0517 0.0448 0.0341 0.0237 0.0151 0.0133 0.0127 0.0115 0.027 0.0454 0.0573 0.0671 0.0701 0.0696 0.0679 0.0686 0.0699 0.0696 0.0701 0.0699 0.0694 0.0697 0.0665 0.0598 0.0491 0.037 0.026 0.0168 0.0279 0.0429 0.0551 0.0639 0.0697 0.0704 0.0684 0.0681 0.0671 0.0656 0.0645 0.0635 0.0642 0.0653 0.0667 0.0683 0.0668 0.0669 0.0657 0.0662 0.0667 0.068 0.0726 0.0811 0.0891 0.0959 0.1024 0.1052 0.1078 0.1087 0.1104 0.1086 0.1064 0.1038 0.1007 0.0985 0.0947 0.0894 0.0823 0.0745 0.0696 0.0655 0.064 0.0651 0.0668 0.0721 0.0779 0.082 0.0869 0.0882 0.0892 0.0889 0.0861 0.0839 0.0793 0.0754 0.072 0.0699 0.0703 0.0721 0.0764 0.0797 0.0844 0.088 0.0908 0.0907 0.0896 0.0861 0.0823 0.078 0.0732 0.07 0.0677 0.0675 0.0713 0.0749 0.0772 0.0795 0.0787 0.0786 0.0795 0.0776 0.0744 0.0704 0.0638 0.0604 0.0594 0.0597 0.0608 0.0618 0.064 0.063 0.064 0.0639 0.0632 0.0628 0.0609 0.0579 0.0552 0.0538 0.0538 0.0548 0.0554 0.0558 0.0555 0.0563 0.0568 0.0566 0.0553 0.0514 0.0468 0.0426 0.039 0.0366 0.0363 0.0369 0.0369 0.0369 0.0361 0.0351 0.0338 0.0315 0.0283 0.0247 0.0204 0.0153 0.0108 0.0072 0.0041 0.0019 0.0005 0.0008 0.0011 0.0007 0.0006 0.0004 0.0 0.0001 0.0 0.0 0.0 0.0", + "energy": "-90.9529 -89.6454 -88.42 -85.7444 -82.5876 -78.7135 -74.7747 -71.0291 -67.7053 -64.6335 -62.1973 -59.8473 -58.1208 -56.2134 -54.6743 -53.6257 -52.578 -51.7287 -50.9983 -50.4395 -50.0846 -49.8311 -49.828 -49.8124 -50.2029 -50.5329 -50.8437 -51.1678 -51.4531 -51.7011 -52.1456 -52.6008 -53.1781 -53.9966 -55.0175 -56.0783 -57.3363 -58.6552 -59.1347 -59.5472 -59.1969 -57.963 -55.8957 -52.5667 -49.0487 -45.1648 -41.4668 -37.8422 -34.6565 -32.2944 -30.3378 -28.9897 -27.7203 -26.2646 -25.4319 -24.8796 -24.2928 -24.2306 -23.9714 -23.8444 -23.6907 -23.7747 -24.2145 -25.0059 -26.5129 -28.2891 -30.3495 -32.3391 -33.5046 -33.9634 -33.2679 -31.7175 -29.9289 -27.6727 -25.8205 -24.3182 -23.3191 -22.8202 -22.526 -22.6349 -22.3927 -22.8969 -23.4621 -24.6129 -26.945 -29.5577 -32.3505 -34.5665 -34.9294 -34.2281 -32.2346 -29.5152 -26.2869 -23.5172 -22.1579 -21.358 -20.9995 -21.1255 -21.4504 -21.6643 -21.9517 -22.2979 -22.4923 -22.7847 -22.7166 -22.5849 -22.3351 -22.225 -22.114 -22.0298 -22.2945 -22.254 -22.5126 -22.5812 -22.9569 -23.0247 -23.0243 -23.1294 -23.0468 -22.8819 -22.8854 -22.7206 -22.7828 -23.0257 -23.1335 -23.4376 -23.9765 -24.498 -24.925 -25.2368 -25.2912 -25.3833 -25.4568 -25.0653 -24.9734 -24.6391 -24.6886 -24.4564 -24.5381 -24.3535 -24.2525 -24.3042 -24.1934 -24.5457 -24.7467 -25.6448 -26.7289 -28.4961 -30.3569 -32.2563 -33.5951 -33.8488 -32.9083 -31.1093 -28.8009 -26.4049 -24.5881 -23.3934 -23.0277 -22.8835 -23.2056 -23.606 -24.0214 -24.6381 -25.8742 -27.2813 -28.963 -30.4818 -32.1212 -33.0807 -33.7138 -33.597 -32.5175 -30.8729 -28.7218 -26.49 -24.7567 -23.405 -22.5074 -22.3326 -22.247 -22.1854 -22.2162 -22.299 -22.3547 -22.3108 -22.3154 -22.3207 -22.3002 -22.3329 -22.4161 -22.6028 -22.3984 -22.3393 -22.4359 -22.2805 -22.482 -22.3998 -22.5567 -22.9657 -23.093 -23.2325 -23.3228 -23.6756 -24.1101 -24.2382 -24.3256 -24.4252 -24.5729 -24.4237 -24.5123 -24.4961 -24.4175 -24.3451 -23.8776 -23.5 -22.4851 -21.7445 -20.7916 -19.9112 -19.1336 -18.6676 -18.433 -18.394 -18.4597 -18.4311 -18.7533 -18.7738 -19.1196 -19.5207 -19.9321 -20.3295 -20.8263 -21.2101 -21.7269 -22.1798 -22.7288 -22.8915 -23.1378 -23.1205 -23.1986 -23.2556 -23.1967 -23.1452 -23.1666 -23.1558 -23.447 -23.8526 -25.0089 -26.5346 -28.2304 -29.8283 -31.2409 -31.6054 -31.0765 -30.0713 -28.2938 -26.7608 -24.8738 -23.5541 -22.8005 -22.3367 -22.6624 -22.8296 -23.151 -23.3535 -23.8189 -23.9345 -24.3511 -24.3232 -24.5697 -24.7993 -24.8659 -25.0012 -24.9773 -24.9437 -25.0248 -25.4339 -25.8975 -26.4703 -27.1468 -27.9843 -28.8067 -29.2596 -29.4368 -29.5365 -29.5974 -29.1421 -29.1551 -29.1838 -29.8408 -31.0266 -33.7978 -36.9957 -41.1996 -44.7268 -47.8915 -50.0065 -51.1816 -51.2418 -50.4129 -49.9428 -49.7499 -49.8098 -50.097 -50.7303 -51.5417 -52.4292 -53.4294 -54.3302 -55.1797 -54.8838 -53.6512 -50.5819 -46.3746 -40.9514 -35.5642 -30.3559 -26.9243 -24.278 -23.3739 -23.1132 -23.5492 -24.0565 -24.7153 -25.042 -25.3181 -25.6466 -25.8657 -25.9405 -26.0193 -26.9712 -28.5664 -29.9976 -31.2483 -31.5081 -30.8761 -29.6039 -27.5346 -25.1495 -23.4024 -22.3768 -21.8805 -21.874 -21.8122 -21.9526 -22.2022 -22.2738 -22.4932 -22.5992 -22.7983 -23.0945 -23.5714 -23.9909 -24.4082 -24.7959 -25.0826 -25.4699 -25.4684 -25.3271 -25.1657 -25.1505 -24.9836 -24.9642 -25.032 -25.0773 -25.218 -25.4957 -25.4421 -26.1606 -27.5164 -30.148 -32.9538 -35.6861 -37.1761 -37.3762 -35.7185 -33.3895 -30.1258 -27.2017 -25.3003 -24.383 -24.3402 -24.3055 -24.4496 -24.4534 -24.1235 -23.9437 -23.9671 -24.0124 -24.3811 -25.4512 -27.5673 -30.0944 -32.5522 -34.0355 -34.5926 -33.87 -32.288 -30.2784 -28.1931 -26.4425 -25.7684 -25.4578 -25.2384 -25.32 -25.6348 -25.5373 -25.7926 -25.992 -26.5394 -27.1822 -28.4072 -30.4259 -32.8107 -34.8773 -35.9876 -35.8047 -34.9246 -32.792 -30.2134 -27.6098 -25.8094 -24.966 -24.6846 -24.8705 -24.8852 -25.1504 -25.4382 -25.9377 -26.7961 -27.7568 -29.4339 -31.2151 -33.3269 -34.9764 -36.3997 -36.6129 -35.9623 -34.1203 -31.9709 -29.9668 -27.7384 -26.3388 -25.5518 -25.2848 -25.0819 -25.1437 -25.3858 -25.3152 -25.234 -25.6477 -26.0012 -26.8998 -27.9323 -29.409 -30.8607 -31.8534 -32.1579 -31.4539 -30.3126 -28.8347 -27.1158 -25.9125 -25.2262 -24.8585 -24.8321 -24.8062 -24.9907 -24.9072 -24.9074 -24.9878 -24.7402 -24.7335 -24.5321 -24.4192 -24.2267 -24.0978 -23.744 -23.6441 -23.2317 -22.7156 -22.631 -22.1858 -22.0152 -21.8924 -21.4514 -21.4211 -21.1957 -21.1246 -20.8307 -20.7199 -20.7099 -20.536 -20.6279 -20.6439 -21.1911 -21.6113 -22.2638 -22.6377 -23.3356 -23.5977 -23.5228 -23.2233 -22.7948 -22.4782 -21.9143 -21.5693 -21.4114 -21.2835 -21.2918 -21.5135 -21.3802 -21.5992 -21.7043 -21.7316 -21.7788 -21.8539 -21.7999 -21.6805 -21.5178 -21.2189 -21.0968 -20.7268 -20.6179 -20.8652 -20.8725 -21.5366 -21.8993 -22.3221 -22.6983 -22.8615 -22.9291 -22.7637 -22.4493 -22.2861 -21.7542 -21.3643 -21.3369 -21.2397 -21.41 -21.4067 -21.4843 -21.7885 -21.8507 -22.0141 -21.7239 -21.6727 -21.5842 -21.5503 -21.4313 -21.6819 -21.7942 -22.1057 -22.4895 -22.7979 -23.0623 -23.4377 -23.8099 -24.0222 -24.1777 -24.3483 -24.5442 -24.6434 -24.875 -25.0522 -25.2692 -25.3655 -25.6164 -25.7397 -25.9181 -26.2473 -26.5898 -26.8339 -27.1611 -27.8154 -28.3141 -28.9942 -29.3233 -29.74 -30.1634 -30.7403 -31.6723 -33.1903 -35.3596 -38.1686 -42.0086 -45.8714 -50.0957 -53.8016 -56.9736 -59.6309 -61.9868 -63.8101 -64.9416 -66.1395 -66.6931 -67.0286 -66.805 -64.484 -60.7412 -56.6917", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0 0.0 0.0001 0.0001 0.0001 0.0002 0.0001 0.0003 0.0003 0.0004 0.0005 0.0007 0.0009 0.0011 0.0011 0.0011 0.0014 0.0015 0.0017 0.0017 0.0019 0.0022 0.0024 0.0027 0.0027 0.0026 0.0025 0.0023 0.0022 0.002 0.002 0.0021 0.0021 0.0019 0.0019 0.0018 0.0015 0.0014 0.0014 0.0012 0.0009 0.0009 0.0007 0.0004 0.0005 0.0002 0.0003 0.0001 0.0003 0.0004 0.0005 0.001 0.0017 0.0022 0.0022 0.002 0.0016 0.0014 0.001 0.0006 0.0003 0.0005 0.0008 0.001 0.0016 0.0067 0.0108 0.0138 0.0165 0.0177 0.0171 0.0155 0.0129 0.0093 0.0056 0.0017 0.0012 0.0012 0.0012 0.0013 0.0012 0.0011 0.001 0.0012 0.0011 0.0011 0.0017 0.0026 0.0032 0.0037 0.0045 0.0049 0.0044 0.004 0.0029 0.0024 0.0021 0.002 0.0019 0.002 0.0019 0.0016 0.0015 0.0012 0.0013 0.0013 0.0014 0.0015 0.0014 0.0017 0.0014 0.0015 0.0014 0.0012 0.0011 0.0008 0.0008 0.0009 0.0009 0.001 0.0009 0.0009 0.0009 0.0009 0.0007 0.0006 0.0007 0.0005 0.0005 0.0005 0.0005 0.0004 0.0004 0.0003 0.0004 0.0005 0.0004 0.0002 0.0005 0.0004 0.0005 0.0006 0.0006 0.0009 0.0008 0.0013 0.0034 0.0067 0.0107 0.0136 0.0154 0.0155 0.0141 0.0114 0.0081 0.0056 0.003 0.0026 0.0024 0.0019 0.0019 0.0017 0.0017 0.0019 0.002 0.002 0.0025 0.0068 0.0132 0.0191 0.0225 0.0234 0.0221 0.0175 0.0138 0.0099 0.0048 0.0037 0.003 0.0024 0.0023 0.0015 0.0019 0.0018 0.0019 0.002 0.002 0.0021 0.0021 0.0018 0.0018 0.0017 0.0015 0.001 0.0012 0.001 0.0009 0.0011 0.0012 0.001 0.0012 0.001 0.0012 0.001 0.001 0.0009 0.0009 0.0008 0.0006 0.0005 0.0003 0.0006 0.0009 0.0013 0.0015 0.0019 0.002 0.002 0.0021 0.0018 0.0016 0.0017 0.0016 0.0015 0.0015 0.0016 0.0021 0.0028 0.0026 0.0027 0.0024 0.0022 0.0018 0.002 0.0019 0.0018 0.0024 0.0023 0.002 0.0019 0.0014 0.0013 0.001 0.0009 0.0006 0.001 0.0009 0.0012 0.0018 0.0064 0.0112 0.016 0.0212 0.0244 0.0254 0.0235 0.019 0.0139 0.0081 0.0046 0.0036 0.0027 0.0021 0.0023 0.0022 0.0025 0.0025 0.0026 0.0025 0.0025 0.0025 0.0022 0.0022 0.0022 0.0019 0.0018 0.0018 0.002 0.0019 0.0021 0.0021 0.002 0.0019 0.0014 0.0009 0.001 0.0009 0.0009 0.0009 0.0009 0.0012 0.001 0.0008 0.0012 0.0013 0.0018 0.0018 0.0016 0.0021 0.0026 0.0033 0.0039 0.0045 0.0054 0.0053 0.0049 0.0041 0.0027 0.002 0.0016 0.0012 0.0008 0.0009 0.0006 0.0016 0.0036 0.0107 0.0211 0.0227 0.0229 0.0207 0.0065 0.0033 0.0028 0.0026 0.002 0.0017 0.0008 0.0006 0.0006 0.0005 0.0007 0.0017 0.0033 0.0046 0.0074 0.0076 0.0062 0.0053 0.0028 0.0017 0.0019 0.0021 0.002 0.0019 0.0016 0.0016 0.0015 0.0015 0.0014 0.0014 0.0014 0.0014 0.0013 0.0013 0.0013 0.0012 0.0009 0.0011 0.001 0.0013 0.0011 0.0012 0.0013 0.0013 0.0013 0.0011 0.001 0.0011 0.0009 0.001 0.0007 0.0013 0.0022 0.0023 0.0038 0.0055 0.0063 0.0061 0.0054 0.0041 0.0026 0.0025 0.002 0.0021 0.0019 0.002 0.002 0.002 0.002 0.0015 0.0016 0.0013 0.001 0.0012 0.0014 0.0017 0.0019 0.002 0.0021 0.002 0.002 0.002 0.0013 0.0014 0.0014 0.0016 0.0013 0.0014 0.0014 0.0011 0.0009 0.0009 0.0008 0.0028 0.0062 0.0086 0.0097 0.0097 0.0085 0.0062 0.004 0.0025 0.0016 0.0016 0.0013 0.001 0.001 0.0008 0.0009 0.0008 0.0009 0.0008 0.0031 0.0056 0.0083 0.0108 0.0116 0.0121 0.0111 0.0094 0.0072 0.0037 0.0018 0.0014 0.0011 0.001 0.0009 0.0006 0.0005 0.0005 0.0004 0.0005 0.0002 0.0004 0.0007 0.004 0.008 0.0097 0.0101 0.0091 0.0052 0.0025 0.0012 0.0011 0.0011 0.0009 0.0007 0.0006 0.0005 0.0005 0.0003 0.0003 0.0003 0.0007 0.0001 0.0006 0.0006 0.0005 0.0004 0.0005 0.0007 0.001 0.0014 0.0015 0.0018 0.0018 0.0018 0.0017 0.0017 0.0015 0.0014 0.0012 0.0011 0.001 0.0011 0.0011 0.0012 0.0013 0.0013 0.0015 0.0018 0.0021 0.002 0.002 0.0017 0.0015 0.0015 0.0015 0.0015 0.0013 0.0014 0.0014 0.0013 0.0013 0.0011 0.0012 0.001 0.0011 0.0011 0.001 0.0011 0.001 0.0011 0.0013 0.0013 0.0015 0.0014 0.0016 0.0016 0.0009 0.0011 0.001 0.001 0.0011 0.0013 0.0012 0.0011 0.0012 0.0011 0.0011 0.0012 0.001 0.0009 0.0008 0.0008 0.0007 0.0007 0.0007 0.0007 0.0007 0.0007 0.0008 0.0009 0.0006 0.0006 0.0006 0.0008 0.0007 0.0007 0.0005 0.0006 0.0006 0.0006 0.0006 0.0008 0.0007 0.0007 0.0007 0.0008 0.0009 0.0005 0.0008 0.0005 0.0006 0.0005 0.0005 0.0005 0.0005 0.0004 0.0005 0.0006 0.0005 0.0005 0.0003 0.0005 0.001 0.0013 0.0013 0.0012 0.0006 0.0004 0.0 0.0002 0.0002 0.0001 0.0002 0.0002 0.0004 0.0006 0.0003 0.0007", + "breathiness": "-94.0044 -93.3422 -94.3739 -92.4355 -88.387 -83.6519 -78.1495 -72.8589 -68.1653 -64.4488 -61.6988 -59.4729 -57.4532 -55.4376 -53.7904 -52.4882 -51.6508 -51.3525 -51.115 -50.8434 -50.8318 -50.8087 -50.8125 -50.9703 -51.0636 -51.4867 -51.4759 -51.8153 -52.0834 -52.2178 -52.5983 -52.7535 -53.1083 -53.9999 -54.71 -55.5868 -56.6675 -57.9713 -58.8962 -59.7518 -60.2536 -59.854 -58.191 -55.0557 -51.4417 -47.1589 -43.2611 -39.3933 -36.5854 -34.3709 -33.3306 -31.8271 -30.6575 -30.176 -31.1723 -34.4657 -39.3193 -44.714 -50.1213 -54.3681 -57.8592 -59.1574 -58.6782 -56.6311 -54.0775 -50.6899 -47.0402 -43.865 -41.2713 -39.9147 -40.0099 -41.7172 -44.2196 -47.4029 -50.8113 -53.8091 -56.2896 -58.0997 -58.8057 -58.8599 -58.6075 -58.5276 -57.9458 -56.9021 -55.2467 -53.0822 -51.0167 -49.0523 -47.7515 -46.9391 -47.0615 -47.6303 -48.4276 -49.296 -50.3211 -51.2173 -52.1492 -52.7426 -53.0918 -53.6054 -54.1464 -54.78 -55.1003 -55.5363 -55.7206 -56.0204 -55.9924 -55.9991 -56.4071 -56.611 -56.8788 -57.1027 -57.503 -58.2552 -58.7717 -59.6819 -60.5046 -61.2959 -61.9316 -62.2075 -62.1485 -62.3274 -62.365 -62.441 -62.611 -62.833 -63.1111 -63.5371 -63.8718 -64.2477 -64.6917 -65.3181 -65.7402 -66.1369 -66.7283 -66.578 -66.7938 -66.8946 -66.5413 -66.2677 -65.5409 -64.636 -63.3425 -61.4885 -58.4076 -54.6909 -50.7216 -46.51 -42.7264 -39.9202 -38.1678 -37.9009 -38.6481 -40.2612 -42.6502 -45.3351 -48.1958 -51.019 -53.4827 -55.0718 -55.8869 -56.0339 -55.6078 -54.5273 -52.9576 -50.3552 -47.4737 -44.5027 -41.5674 -39.0987 -37.3351 -36.8422 -37.6535 -39.4753 -42.4306 -45.3457 -48.2655 -50.578 -52.2779 -53.2609 -53.6489 -53.9721 -54.254 -54.6357 -54.9255 -55.0303 -55.132 -55.0771 -55.1183 -55.075 -55.3565 -55.4327 -55.5971 -55.9086 -56.2385 -56.6887 -57.021 -57.514 -57.7471 -58.0179 -58.5728 -58.9799 -59.5068 -60.2262 -61.3394 -62.231 -63.2949 -63.984 -64.1601 -63.9742 -63.1628 -61.8576 -60.2479 -58.4503 -57.2114 -55.8294 -54.6324 -53.6856 -53.0576 -52.6858 -52.7589 -52.9453 -52.9163 -53.0813 -52.7113 -53.0569 -53.1808 -53.6628 -53.9596 -54.5469 -54.8728 -55.3685 -55.5734 -55.9097 -56.6853 -57.387 -58.2492 -58.9071 -59.2539 -59.389 -59.1152 -58.8996 -58.5409 -58.3124 -58.1829 -57.3578 -55.4948 -52.8863 -49.315 -45.5943 -41.5888 -38.3846 -36.1877 -34.9503 -34.9778 -36.6815 -38.7871 -41.7289 -44.8824 -47.6196 -49.9092 -51.748 -52.5139 -53.0174 -53.6882 -54.2189 -54.8469 -55.4965 -55.9857 -56.4033 -56.9498 -56.9602 -57.2117 -57.3109 -57.4369 -57.3239 -57.1772 -56.914 -56.8644 -56.9904 -57.6745 -58.362 -59.6095 -60.8055 -62.3669 -63.4374 -64.6614 -65.6451 -66.2915 -66.2188 -65.6005 -64.3957 -63.2467 -61.923 -60.6688 -59.7494 -58.4993 -57.3058 -55.5875 -53.7806 -52.0929 -50.8044 -49.9619 -49.717 -50.1533 -51.1986 -52.6493 -54.3861 -56.4274 -58.7927 -60.5706 -60.3977 -58.5451 -55.1849 -50.8197 -45.9809 -42.1702 -40.0128 -39.8857 -41.2354 -43.7694 -46.3115 -49.3949 -52.7153 -55.5956 -58.3615 -60.747 -62.9665 -64.1057 -63.8023 -61.6891 -58.6743 -54.8818 -51.3457 -48.4595 -46.5008 -46.3951 -47.0077 -48.383 -49.8881 -51.2258 -52.0983 -52.8446 -53.6339 -54.0647 -54.6852 -55.1949 -55.6619 -55.9006 -56.2714 -56.2138 -55.8999 -55.8625 -55.9616 -56.5752 -56.9403 -57.4136 -58.1339 -59.0402 -59.5097 -60.032 -60.2803 -60.4102 -60.5717 -60.5511 -60.3393 -60.3908 -60.3163 -59.5685 -58.5274 -57.1656 -56.0159 -55.0173 -54.1434 -53.3789 -52.6452 -52.2376 -51.7374 -51.5886 -51.5051 -51.7527 -52.6397 -53.5059 -54.1382 -54.6013 -55.1247 -55.5557 -55.7645 -56.0495 -56.4876 -56.5442 -57.2125 -58.142 -58.6636 -59.1905 -59.1691 -58.9579 -58.4787 -57.9733 -57.1948 -56.7758 -56.4013 -56.5767 -56.7103 -57.0829 -57.7005 -58.2691 -58.9351 -59.6186 -60.0529 -59.3839 -58.0756 -55.628 -52.7757 -49.5943 -46.5094 -44.307 -43.4995 -44.1396 -46.0532 -48.2262 -50.8912 -53.4137 -55.4242 -57.1477 -58.4019 -59.5081 -60.5035 -61.7616 -62.0419 -61.385 -59.0561 -55.8737 -51.8852 -47.8771 -44.1279 -41.5889 -40.7655 -41.452 -43.4967 -46.4535 -50.2376 -53.8444 -57.402 -60.6192 -63.009 -64.6217 -65.6865 -66.3478 -67.0185 -67.2753 -66.828 -65.3496 -62.4542 -58.5028 -54.0347 -49.6003 -45.8823 -44.0583 -44.4899 -46.4902 -49.6124 -53.1677 -56.6353 -59.6481 -62.3316 -63.9959 -65.4286 -66.8981 -67.9123 -68.7479 -69.344 -69.3119 -69.4156 -69.4044 -68.9632 -68.4979 -67.6834 -66.6385 -65.4171 -63.8801 -62.052 -60.3205 -58.9601 -57.6472 -56.7111 -56.1772 -55.914 -55.9148 -55.9952 -56.2013 -56.7689 -57.2709 -57.6134 -57.7355 -57.8922 -57.632 -57.382 -57.1397 -56.6989 -56.4531 -56.3789 -56.1121 -56.2518 -56.6656 -56.797 -57.311 -57.6864 -58.0723 -58.1712 -58.506 -58.6538 -58.8921 -59.038 -58.9135 -58.5828 -58.3725 -57.9171 -57.5255 -57.4556 -57.3162 -57.1871 -56.904 -56.4031 -55.7898 -55.275 -54.9481 -54.8382 -55.3683 -55.9336 -56.7005 -57.4908 -57.9705 -58.2183 -58.3198 -58.0444 -57.6348 -57.3896 -57.2607 -57.0522 -56.9757 -57.3399 -57.4812 -57.6938 -57.7966 -58.0728 -58.2428 -58.2166 -58.429 -58.462 -58.3217 -58.1551 -58.231 -58.1584 -58.364 -58.5483 -59.029 -59.3934 -59.9412 -60.5019 -60.8131 -61.0823 -61.6208 -61.6447 -62.0424 -62.199 -62.6567 -63.1448 -63.5571 -63.9748 -64.3379 -64.7388 -65.3218 -66.0235 -66.9534 -67.887 -68.72 -69.3899 -69.804 -69.6972 -69.7245 -69.1292 -67.8354 -65.9487 -63.4855 -60.9418 -58.4978 -56.8918 -56.1224 -56.8498 -58.5928 -60.9974 -63.9987 -66.6823 -69.1334 -70.8621 -72.2773 -72.6452 -72.6765 -71.0776 -69.1668 -65.3783", "breathiness_timestep": "0.011609977324263039" }, { @@ -266,9 +266,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "184.5 184.9 185.0 185.0 184.9 185.0 185.2 185.2 184.9 184.8 184.6 184.6 184.2 184.2 184.3 184.0 183.7 183.9 183.8 184.2 183.7 183.7 183.9 183.6 183.5 183.2 183.2 181.5 181.7 179.3 174.4 174.9 182.3 188.1 194.9 203.9 211.9 223.0 230.2 240.2 246.6 250.2 251.6 249.7 245.9 243.8 241.6 238.4 240.5 255.1 266.6 278.9 290.6 302.6 294.2 295.2 296.4 296.1 296.0 294.1 293.2 292.8 293.1 294.3 293.4 292.0 288.3 279.7 270.3 274.8 282.4 296.0 311.0 325.5 325.1 327.4 333.0 335.6 336.5 335.5 332.4 330.1 326.4 322.6 320.5 317.7 315.2 315.2 318.8 320.7 323.2 326.6 328.7 329.4 329.9 328.2 326.2 324.8 325.1 325.7 326.9 327.4 327.2 325.8 326.0 328.4 329.3 329.4 329.9 328.3 326.8 327.9 328.2 327.5 327.6 328.8 329.4 330.2 330.9 331.1 330.8 330.2 329.3 327.8 327.0 326.1 324.4 323.9 322.9 322.3 323.4 325.3 327.1 328.5 330.2 331.6 333.1 334.8 335.8 335.8 334.4 332.8 329.2 325.8 321.7 319.5 319.6 321.1 324.6 328.9 334.1 337.3 338.8 338.7 338.8 335.8 331.8 327.6 323.4 319.0 315.1 315.8 318.4 322.8 328.0 332.0 336.8 340.1 342.4 343.4 338.6 333.8 328.1 321.9 310.5 295.1 279.6 269.9 263.5 258.7 253.8 249.1 243.9 239.5 233.9 229.3 222.8 217.3 212.0 208.0 204.9 200.8 197.4 193.6 189.3 185.4 180.4 178.8 184.5 200.6 217.2 234.0 249.4 267.9 282.3 294.8 300.8 301.1 295.7 287.5 270.0 273.6 284.1 294.7 305.8 315.7 326.5 339.9 335.5 335.3 333.8 334.0 333.3 332.4 333.1 333.5 332.6 330.5 325.2 315.9 307.0 292.0 274.1 260.2 247.3 235.6 227.6 221.7 216.3 216.1 218.8 221.3 224.2 224.3 224.3 221.8 218.0 215.4 214.4 213.9 214.1 216.0 219.3 223.3 226.4 229.2 230.7 231.2 229.3 223.3 216.9 210.7 206.1 207.0 209.8 215.5 219.1 223.6 227.6 228.8 229.0 226.9 223.7 219.3 214.6 209.8 208.1 208.0 209.7 212.6 217.5 224.3 229.5 232.0 232.9 232.5 228.7 222.7 214.9 207.1 202.5 202.2 203.2 207.0 213.4 222.2 230.3 235.4 239.2 240.0 238.5 233.8 225.1 213.7 203.0 200.9 200.2 203.8 211.9 222.5 231.7 238.8 242.7 242.4 237.8 232.4 221.4 208.1 196.3 188.5 187.7 187.0 185.6 184.6 183.1 181.7 180.8 179.8 178.9 178.0 177.3 177.2 176.4 174.5 172.9 171.6 171.1 170.1 169.3 168.8 169.6 170.3 174.4 179.3 184.1 194.5 206.5 220.5 232.4 242.7 248.2 248.6 245.5 241.2 236.0 231.9 227.6 224.3 222.5 220.5 220.9 219.4 219.1 218.1 219.2 220.9 222.1 221.5 219.0 217.0 216.3 216.1 216.8 218.3 217.8 214.7 207.5 215.3 224.2 233.7 242.5 253.3 248.5 243.9 243.2 244.6 245.3 245.5 245.9 245.4 245.2 245.2 246.7 247.7 249.8 248.5 245.6 243.4 245.0 248.4 252.3 252.8 254.6 257.7 258.6 259.8 260.1 259.8 260.3 260.1 259.6 260.1 259.9 259.7 261.6 264.8 268.6 275.9 283.3 291.0 297.1 301.5 302.2 301.9 301.0 297.6 294.0 291.8 289.4 287.3 286.5 288.1 290.9 292.8 294.9 297.0 298.8 298.7 297.9 296.5 293.0 290.4 288.9 288.5 289.0 289.9 290.1 291.0 293.2 296.7 297.9 297.0 294.6 291.7 288.7 286.5 272.0 260.7 257.6 258.2 257.0 256.4 256.4 250.7 248.7 247.0 245.6 246.5 248.0 248.6 247.8 247.0 246.7 246.3 246.2 245.0 239.4 222.3 221.2 221.1 221.2 221.5 222.5 221.7 221.3 219.3 221.4 222.5 220.8 220.0 217.9 216.0 215.8 216.3 215.8 216.0 216.2 217.4 219.4 220.6 220.5 219.2 218.8 217.6 217.2 217.8 218.7 219.1 218.6 218.6 218.7 219.2 220.2 221.3 222.4 221.2 218.5 213.2 211.9 221.0 229.7 238.8 247.5 258.2 268.7 256.4 251.3 248.1 246.4 247.5 248.0 247.9 248.1 248.5 247.2 248.3 246.6 242.2 235.1 227.9 217.4 208.3 200.9 193.5 186.0 184.6 187.2 189.7 192.7 194.6 196.4 197.6 197.7 195.2 193.8 192.6 193.7 194.4 194.3 194.9 196.0 196.5 196.3 197.2 197.1 196.8 197.5 197.0 197.0 195.9 195.2 195.8 194.8 192.7 191.2 190.4 189.4 189.2 190.0 191.9 194.9 198.9 202.0 204.7 205.6 206.1 204.0 199.5 195.0 190.5 186.9 185.2 185.3 188.1 191.7 197.5 204.3 208.7 211.4 212.0 210.2 204.4 198.2 192.3 187.1 182.5 180.8 182.5 187.1 193.6 199.9 205.6 209.7 211.6 210.7 204.4 197.1 188.6 179.2 176.2 175.2 178.3 183.5 191.0 200.2 207.8 211.9 213.7 213.1 209.6 201.3 189.9 179.2 173.0 172.7 174.9 179.0 187.4 200.6 210.7 215.9 217.1 213.1 204.3 190.1 177.2 176.0 174.3 172.7 172.4 170.9 169.6 168.5 167.1 166.4 165.6 164.7 163.2 161.8 160.8 159.3 158.2 157.3 156.2 155.7 154.6 154.6 154.0 154.4 155.1 155.2 156.1 157.6 163.4 172.1 181.3 188.7 194.7 198.3 197.5 193.8 188.9 185.0 181.5 177.9 174.8 172.5 169.6 166.9 163.8 162.1 162.5 164.6 166.6 165.2 164.8 164.2 163.9 164.0 164.1 165.1 165.6 166.9 167.8 166.3 164.9 165.3 166.0 171.5 177.3 184.1 189.2 192.0 194.5 194.1 194.2 192.6 191.6 190.3 190.2 190.1 191.2 191.4 192.9 193.9 194.0 193.4 193.7 195.8 197.1 195.0 195.8 195.3 195.6 195.5 195.4 194.0 193.5 192.5 193.5 194.8 195.3 197.4 201.1 206.6 212.0 217.9 220.5 221.4 219.9 218.3 217.2 215.4 216.3 216.9 219.3 220.1 220.4 220.3 220.4 220.8 219.6 218.7 216.9 215.7 215.2 215.1 217.2 220.2 229.3 238.6 246.1 246.4 245.6 246.7 247.1 245.3 244.8 245.0 244.8 244.0 243.2 242.9 240.5 237.1 232.3 224.2 217.2 207.8 207.6 218.9 209.6 205.1 200.0 198.5 199.2 199.5 199.8 200.7 201.1 201.5 202.0 202.4 202.6 202.8 202.1 202.0 202.1 202.4 203.6 206.1 212.2 219.7 226.4 231.0 231.7 230.2 226.8 221.9 217.1 213.1 210.7 210.8 212.8 216.2 220.4 225.8 229.9 232.0 232.3 230.6 227.1 221.5 214.5 209.6 206.7 206.3 209.0 213.0 216.3 218.2 220.3 221.9 221.6 221.0 219.1 218.0 217.0 215.7 215.7 218.4 222.5 224.0 222.9 221.9 220.4 218.2 216.0 214.3 214.3 214.5 214.4 214.7 216.5 219.8 223.8 227.4 229.1 229.1 227.7 224.5 221.3 217.9 214.8 212.6 212.1 213.3 215.0 217.4 220.3 222.8 225.9 227.7 228.6 227.0 224.4 221.6 218.5 215.8 213.7 212.5 212.6 214.3 216.0 218.3 221.2 224.6 227.4 229.0 229.9 229.0 227.2 223.7 221.2 217.8 214.6 213.2 212.8 212.6 214.6 218.5 223.8 228.1 231.2 232.3 230.8 228.0 223.4 218.6 214.0 211.3 210.2 210.6 213.1 219.2 223.2 228.4 232.6 230.9 232.1 233.1 231.7 231.4 230.1 227.4 222.0 219.3 216.4 213.4 211.7 209.9 208.6 207.3 207.0", "f0_timestep": "0.011609977324263039", - "energy": "0.0008 0.001 0.0014 0.0024 0.0029 0.0031 0.0043 0.0045 0.005 0.0055 0.006 0.0063 0.0072 0.0073 0.0077 0.0073 0.0066 0.0064 0.0055 0.0044 0.0036 0.0034 0.0026 0.0014 0.0016 0.0012 0.0015 0.0048 0.0292 0.0405 0.0521 0.0639 0.0689 0.0723 0.073 0.0724 0.069 0.0667 0.0645 0.0633 0.0648 0.0681 0.0709 0.073 0.0724 0.0668 0.0564 0.0442 0.0289 0.0183 0.0155 0.0162 0.0255 0.047 0.0636 0.0773 0.0855 0.0869 0.0873 0.0839 0.0845 0.0818 0.0791 0.0794 0.0785 0.079 0.0744 0.0643 0.0503 0.032 0.0194 0.0163 0.0194 0.0384 0.054 0.0676 0.0821 0.089 0.0952 0.0992 0.0996 0.0992 0.0968 0.0965 0.0945 0.0926 0.0937 0.0923 0.0943 0.0935 0.0931 0.0922 0.0921 0.0917 0.0922 0.0926 0.0931 0.0938 0.0944 0.0951 0.0964 0.0964 0.0966 0.0965 0.0968 0.0972 0.0986 0.0986 0.0994 0.1002 0.1004 0.1008 0.0996 0.101 0.1017 0.1014 0.1025 0.1017 0.1017 0.102 0.1017 0.1023 0.1036 0.104 0.1065 0.1067 0.1077 0.1073 0.1071 0.1073 0.1071 0.1062 0.1055 0.1053 0.1042 0.1028 0.1013 0.0984 0.0967 0.0965 0.0983 0.1014 0.1032 0.1053 0.1054 0.1028 0.1002 0.0966 0.0933 0.0924 0.0905 0.0894 0.087 0.0858 0.0851 0.0875 0.0888 0.0902 0.0886 0.0866 0.0846 0.0809 0.079 0.0764 0.0741 0.073 0.0726 0.0722 0.0717 0.0704 0.0699 0.0687 0.0676 0.0626 0.0533 0.0431 0.0292 0.0178 0.0116 0.0086 0.0062 0.0052 0.0046 0.0039 0.0036 0.0036 0.0035 0.0037 0.003 0.0031 0.002 0.0017 0.0037 0.0066 0.0104 0.0287 0.0462 0.0614 0.072 0.0789 0.0812 0.081 0.0802 0.075 0.0711 0.0709 0.0722 0.0763 0.076 0.0699 0.0579 0.0433 0.0292 0.0205 0.021 0.0197 0.023 0.0458 0.0643 0.081 0.0934 0.0974 0.0969 0.0944 0.0906 0.086 0.0819 0.079 0.0754 0.0702 0.0638 0.0536 0.0421 0.0309 0.0193 0.0146 0.033 0.0504 0.0678 0.0819 0.0882 0.0944 0.0981 0.1011 0.1032 0.1013 0.0985 0.0959 0.0954 0.0944 0.0935 0.0931 0.0926 0.0925 0.0929 0.0915 0.0906 0.0911 0.0913 0.0922 0.0926 0.0893 0.0869 0.0831 0.0794 0.0774 0.077 0.0778 0.0805 0.0822 0.0824 0.0839 0.0831 0.0819 0.0794 0.0757 0.0715 0.0695 0.0666 0.0655 0.0635 0.0644 0.0665 0.069 0.0709 0.0716 0.0716 0.0683 0.066 0.0603 0.0546 0.0508 0.0454 0.0426 0.0416 0.0412 0.0446 0.0466 0.05 0.051 0.0499 0.0484 0.0447 0.0403 0.0356 0.0316 0.028 0.0263 0.0251 0.0255 0.027 0.0285 0.0302 0.0307 0.031 0.0291 0.0261 0.0206 0.015 0.0104 0.0067 0.0048 0.0037 0.0029 0.0031 0.0027 0.0025 0.0029 0.0019 0.0026 0.0025 0.0021 0.0014 0.0018 0.0013 0.0029 0.0056 0.0089 0.0108 0.0252 0.0426 0.0581 0.071 0.0777 0.0801 0.0795 0.0766 0.0734 0.0712 0.0703 0.0715 0.0753 0.0774 0.0782 0.0787 0.0779 0.0772 0.0779 0.0783 0.0807 0.0835 0.0856 0.0875 0.0885 0.0879 0.0903 0.092 0.0918 0.0945 0.0927 0.0931 0.0922 0.088 0.0835 0.0749 0.0619 0.0476 0.0348 0.0223 0.0187 0.026 0.046 0.0621 0.0768 0.0876 0.092 0.095 0.0967 0.0977 0.0952 0.092 0.0895 0.0867 0.0858 0.0827 0.0792 0.0753 0.0737 0.0737 0.0754 0.08 0.0862 0.0904 0.0947 0.0967 0.0989 0.1014 0.1019 0.1024 0.1026 0.1032 0.1017 0.1009 0.0963 0.0913 0.0872 0.0822 0.0823 0.0848 0.0893 0.0932 0.0962 0.0975 0.099 0.1003 0.1006 0.0998 0.0963 0.0941 0.0921 0.092 0.0924 0.0937 0.0957 0.097 0.1007 0.1018 0.1054 0.1091 0.113 0.117 0.118 0.1195 0.1185 0.1173 0.1163 0.1144 0.114 0.1132 0.1128 0.1141 0.1148 0.1154 0.1089 0.0985 0.0803 0.0589 0.0362 0.0198 0.0074 0.0045 0.0262 0.0449 0.0591 0.073 0.0788 0.0804 0.0813 0.0822 0.0845 0.0852 0.0851 0.0826 0.0805 0.0763 0.0697 0.0586 0.0472 0.0329 0.0234 0.0206 0.0185 0.0178 0.0391 0.0567 0.0732 0.0895 0.094 0.0965 0.0972 0.0965 0.0979 0.0969 0.0943 0.0917 0.0904 0.0869 0.085 0.085 0.0822 0.0816 0.0823 0.08 0.0785 0.078 0.0773 0.0785 0.0778 0.0781 0.0751 0.0734 0.0749 0.0748 0.0756 0.0751 0.0703 0.0605 0.0478 0.0346 0.0219 0.0183 0.0175 0.0142 0.0247 0.0445 0.0598 0.0738 0.0823 0.0823 0.0823 0.0794 0.0773 0.0744 0.0728 0.071 0.0707 0.0708 0.0703 0.0663 0.0579 0.0467 0.0332 0.0193 0.0129 0.0285 0.0405 0.053 0.0638 0.071 0.0736 0.0771 0.0802 0.0815 0.0835 0.0822 0.0815 0.0823 0.0825 0.0857 0.0907 0.0941 0.097 0.0975 0.0977 0.097 0.0966 0.097 0.0958 0.0968 0.0965 0.0965 0.0964 0.0955 0.094 0.0925 0.0906 0.0879 0.087 0.0854 0.084 0.0818 0.081 0.0823 0.0838 0.0852 0.0845 0.0842 0.0848 0.083 0.0803 0.0758 0.0698 0.0653 0.0614 0.0596 0.0588 0.0603 0.0637 0.0663 0.0686 0.0688 0.0686 0.0658 0.0607 0.0577 0.0525 0.0493 0.0469 0.0444 0.0441 0.0456 0.0492 0.0507 0.0534 0.0533 0.0517 0.0495 0.0451 0.0424 0.0384 0.035 0.0333 0.0307 0.03 0.0315 0.0331 0.0363 0.0383 0.0395 0.0388 0.0367 0.0329 0.0288 0.0245 0.0217 0.0194 0.0186 0.0187 0.0189 0.0211 0.0218 0.0235 0.0222 0.0197 0.0161 0.0114 0.0069 0.0037 0.0019 0.0022 0.0019 0.0026 0.0023 0.0024 0.0022 0.0017 0.0016 0.0014 0.0019 0.002 0.0023 0.0012 0.0017 0.0036 0.0065 0.0078 0.0107 0.0244 0.0392 0.0493 0.0578 0.0615 0.0625 0.0628 0.0622 0.0624 0.0621 0.0653 0.0663 0.0695 0.0682 0.0663 0.0665 0.0646 0.0641 0.064 0.0635 0.0645 0.0658 0.0683 0.0695 0.0716 0.0715 0.0733 0.0728 0.0736 0.0723 0.0731 0.0745 0.0749 0.0753 0.0725 0.0714 0.0681 0.0642 0.0593 0.0536 0.0496 0.0527 0.0593 0.067 0.0716 0.0735 0.0734 0.0727 0.0713 0.0697 0.0667 0.0665 0.0652 0.0646 0.0656 0.0649 0.0648 0.0647 0.065 0.0659 0.0678 0.0687 0.0709 0.0745 0.0761 0.0776 0.0794 0.0775 0.0776 0.0781 0.076 0.0748 0.073 0.0696 0.0658 0.0637 0.0623 0.0638 0.0661 0.0688 0.0702 0.0724 0.0733 0.0731 0.0734 0.074 0.0727 0.0742 0.0755 0.0756 0.0788 0.0809 0.0818 0.0819 0.0798 0.077 0.0756 0.0728 0.0694 0.066 0.0636 0.0611 0.0648 0.0722 0.0813 0.0913 0.0968 0.0995 0.0978 0.0943 0.0907 0.0876 0.0839 0.0802 0.0786 0.0748 0.0724 0.0677 0.0587 0.0473 0.0327 0.0238 0.038 0.0572 0.0725 0.0853 0.0925 0.0922 0.0903 0.0882 0.0844 0.0852 0.0887 0.0903 0.0933 0.0947 0.0956 0.0951 0.0924 0.0881 0.0802 0.0734 0.0678 0.0621 0.0621 0.0621 0.0646 0.0696 0.0722 0.0775 0.0803 0.08 0.0799 0.074 0.0681 0.0627 0.0593 0.0588 0.0609 0.0643 0.0684 0.0727 0.0751 0.0783 0.0787 0.0762 0.0724 0.0675 0.0637 0.0618 0.0627 0.0641 0.0669 0.0699 0.0715 0.0723 0.0722 0.0702 0.0707 0.0711 0.0693 0.0692 0.0666 0.0663 0.0673 0.0697 0.0713 0.0718 0.0735 0.0723 0.0721 0.0735 0.073 0.0724 0.0718 0.0687 0.0671 0.0677 0.0668 0.0691 0.0704 0.0708 0.0713 0.0699 0.0684 0.066 0.0647 0.064 0.0619 0.0619 0.0618 0.0612 0.0627 0.0636 0.0647 0.0658 0.0663 0.0659 0.0645 0.0632 0.0623 0.0588 0.0576 0.0575 0.0559 0.0556 0.055 0.0547 0.0556 0.0569 0.0579 0.0591 0.06 0.06 0.0581 0.0567 0.0554 0.0523 0.0502 0.047 0.0445 0.044 0.0422 0.0414 0.0401 0.0393 0.0397 0.0393 0.0391 0.0393 0.0377 0.036 0.0329 0.029 0.0272 0.0253 0.0235 0.0226 0.0221 0.0213 0.0215 0.0217 0.0222 0.0219 0.0212 0.0193 0.0166 0.0127 0.009 0.0052 0.0032 0.0017 0.0012 0.0005 0.0004 0.0 0.0", + "energy": "-65.2388 -63.0897 -61.1362 -58.4144 -55.3125 -52.6419 -50.3775 -48.8933 -47.6301 -47.0614 -46.5109 -46.5257 -46.5443 -46.7109 -47.3192 -47.813 -48.8029 -50.0414 -51.4686 -52.6686 -52.6137 -50.7894 -47.7774 -43.4155 -38.2736 -33.3564 -29.1082 -25.8654 -23.937 -23.0193 -22.5575 -22.4753 -22.3438 -22.0843 -21.6578 -21.6722 -21.2329 -21.3312 -21.1339 -20.9768 -21.0442 -21.1766 -21.3339 -21.6006 -22.2805 -23.1892 -24.9285 -26.7655 -28.5219 -29.4698 -29.4421 -28.1784 -26.4845 -24.3082 -21.9676 -20.5061 -19.5351 -19.4043 -19.22 -19.404 -19.5239 -19.4645 -19.6025 -19.7658 -20.0766 -20.7777 -21.9959 -23.9621 -25.9025 -27.8711 -28.9933 -29.4424 -28.1958 -26.3267 -23.8506 -21.7154 -19.8986 -18.6744 -18.2856 -17.8862 -17.7358 -17.8858 -17.7395 -17.8184 -17.8737 -17.8224 -18.0448 -17.9561 -18.0154 -17.9941 -18.0091 -17.8197 -17.8078 -17.7552 -17.7194 -17.6535 -17.432 -17.1885 -17.2915 -16.9866 -17.0617 -16.9521 -16.974 -16.9886 -16.6965 -16.5752 -16.3103 -16.4071 -16.1268 -16.3408 -16.0038 -15.923 -15.9839 -15.9661 -15.7639 -15.8731 -15.867 -15.6374 -15.8731 -15.9852 -15.8813 -16.0582 -15.976 -16.0223 -15.8664 -15.8376 -15.8255 -15.8619 -15.9398 -16.0782 -16.0536 -16.1536 -16.1376 -16.4659 -16.4469 -16.5974 -16.6841 -16.5852 -16.9678 -16.8249 -17.0122 -17.1752 -17.0204 -17.2579 -17.1698 -17.1597 -17.2481 -17.6189 -17.8882 -18.1115 -18.4175 -18.6896 -18.7573 -19.1921 -19.1801 -19.4879 -19.5881 -19.6342 -19.7954 -19.8976 -20.2764 -20.7237 -21.0152 -21.3945 -21.8932 -21.982 -22.1016 -22.2589 -22.4549 -22.7294 -23.0735 -23.8014 -24.8283 -26.2554 -29.3109 -32.672 -36.4997 -39.7469 -42.5978 -44.2488 -45.0809 -45.253 -45.3834 -45.3393 -46.057 -47.2941 -48.5347 -50.0807 -51.2946 -51.2566 -50.2886 -48.0385 -44.6265 -39.9953 -35.0614 -30.2181 -26.0589 -22.9641 -20.9595 -19.7998 -19.5056 -19.3567 -19.7268 -19.8511 -19.9846 -20.0938 -20.2167 -20.4797 -21.2341 -22.4867 -24.0396 -25.8805 -27.8993 -29.2064 -29.5285 -28.7014 -26.7117 -24.3878 -22.091 -19.7974 -18.3985 -17.9528 -18.0594 -18.2726 -18.609 -18.7931 -19.084 -19.4432 -19.7827 -20.6535 -22.3792 -24.6533 -26.9 -28.662 -29.1708 -28.597 -27.0928 -24.9466 -22.683 -21.0344 -20.2276 -19.6266 -19.2699 -19.2083 -19.0403 -19.1699 -19.03 -19.0684 -19.1077 -19.0901 -19.1447 -19.2078 -18.9794 -19.0645 -19.0111 -18.9021 -18.9728 -18.928 -19.1195 -19.2989 -19.5688 -19.7961 -19.7828 -20.0155 -20.0434 -19.935 -20.1272 -20.0506 -20.0068 -19.8062 -19.7211 -19.8876 -19.8162 -19.933 -20.0739 -20.3995 -20.6945 -20.7707 -21.1501 -21.3223 -21.2518 -21.3739 -21.3749 -21.5997 -21.7014 -21.7938 -21.7791 -22.0012 -22.0186 -22.3859 -22.7293 -23.2834 -23.845 -24.3695 -24.3331 -24.8374 -25.1865 -25.138 -24.9395 -24.841 -24.9342 -24.9708 -24.9297 -25.4494 -25.8674 -26.6038 -27.5085 -28.147 -28.6421 -29.2755 -29.5237 -29.9366 -30.2521 -30.8813 -31.6515 -32.7914 -34.7328 -37.2676 -39.6701 -42.261 -44.372 -45.8405 -46.692 -47.0035 -47.1956 -47.1498 -47.4633 -48.064 -48.563 -49.137 -49.7787 -50.502 -50.8232 -50.0469 -48.676 -46.0987 -43.0073 -39.0264 -35.0487 -30.9896 -27.531 -24.628 -22.506 -21.1868 -20.8011 -20.8424 -20.8764 -21.2257 -21.4426 -21.5101 -21.376 -20.9795 -20.9701 -20.5128 -20.2975 -20.4398 -20.1679 -19.9035 -19.8602 -19.2638 -18.9386 -18.3778 -18.332 -18.0936 -18.1078 -17.9849 -18.4414 -18.778 -19.0887 -19.3256 -19.9021 -20.6085 -21.5098 -22.895 -24.3632 -26.3157 -28.3375 -29.5327 -29.8646 -29.3152 -27.6112 -25.5013 -23.0884 -21.0291 -19.6366 -18.9455 -18.6099 -18.4697 -18.6277 -18.8021 -18.8094 -19.0508 -19.0899 -19.4227 -19.7884 -20.1053 -20.2507 -20.5836 -20.255 -19.9257 -19.3803 -18.8309 -18.4236 -17.9882 -17.3604 -17.1857 -17.2224 -16.9812 -17.0876 -17.1795 -17.1591 -17.4639 -18.017 -18.4121 -19.0332 -19.5733 -19.7589 -19.8136 -19.573 -19.8757 -19.52 -19.2179 -19.1989 -18.7103 -18.5048 -18.2298 -17.8958 -17.8486 -17.6019 -17.3952 -17.3476 -17.3429 -17.1819 -17.0717 -16.8365 -16.7661 -16.4805 -16.3726 -16.2944 -16.148 -16.1062 -16.3222 -16.5016 -16.6582 -16.9671 -17.1512 -17.3837 -17.5753 -17.4952 -17.4508 -17.5739 -17.715 -18.1994 -19.2674 -21.2201 -24.5602 -28.827 -33.1321 -36.1218 -37.2647 -36.2316 -33.2024 -28.9186 -24.6423 -21.7381 -19.9851 -19.3661 -19.2706 -19.2024 -19.3878 -19.5254 -19.9462 -20.212 -20.6079 -21.8089 -22.8966 -24.9713 -26.9836 -28.9639 -30.494 -30.974 -30.3885 -28.844 -26.6688 -24.3408 -22.2678 -20.4281 -19.5471 -19.217 -18.9563 -19.0045 -19.0677 -19.0928 -19.5218 -19.6718 -19.8104 -20.1526 -20.3125 -20.3078 -20.3571 -20.4991 -20.5729 -20.6783 -20.8668 -20.7981 -21.0498 -21.1558 -21.4407 -21.4682 -21.3296 -21.4253 -21.398 -21.4869 -21.8901 -22.5257 -23.929 -25.2402 -27.4722 -29.7592 -31.9786 -33.3116 -33.6311 -32.3863 -30.5937 -27.8264 -25.3727 -22.9003 -21.6935 -20.6722 -20.4977 -20.4369 -20.3962 -20.6159 -20.8736 -20.9877 -21.5009 -22.3466 -23.4453 -25.441 -28.1311 -30.4958 -32.1894 -32.6133 -31.9898 -30.2258 -28.0195 -25.4762 -23.4557 -22.196 -21.2693 -20.8379 -20.5181 -20.6163 -20.4884 -20.5654 -20.4689 -20.373 -20.3653 -20.2399 -20.0816 -20.0626 -19.7893 -19.9053 -19.7391 -19.5339 -19.5926 -19.6555 -19.4432 -19.3803 -19.3323 -19.404 -19.2749 -19.2857 -19.3632 -19.5393 -19.7906 -20.1063 -20.3852 -20.7661 -20.9059 -20.8733 -21.0713 -20.9088 -20.8847 -20.9463 -21.086 -21.2118 -21.5072 -21.5215 -21.9472 -22.1565 -22.5395 -22.9009 -23.0594 -23.1968 -23.1621 -23.109 -23.0885 -23.0922 -23.2377 -23.1145 -23.2442 -23.1321 -23.4839 -23.7325 -24.1264 -24.9662 -25.2436 -25.6232 -25.7441 -26.04 -26.032 -25.9218 -25.8496 -25.8237 -25.968 -26.087 -26.5812 -27.0799 -27.7569 -28.5008 -28.9747 -29.2644 -29.2105 -29.0679 -28.9282 -28.9356 -28.7441 -28.7482 -28.7991 -29.2363 -29.9584 -30.693 -31.9486 -33.4163 -34.9043 -35.9586 -36.6719 -36.9801 -36.3029 -35.7568 -35.4942 -35.9747 -37.1998 -39.4965 -42.1066 -44.8359 -47.3994 -48.9983 -50.194 -50.4582 -50.3061 -49.9254 -49.699 -49.7482 -49.9631 -50.0421 -50.2902 -50.739 -50.8136 -50.245 -49.315 -47.2372 -44.7412 -41.4832 -37.858 -33.7842 -30.4201 -27.2315 -24.8786 -23.1147 -22.2728 -21.8837 -22.0464 -22.247 -22.6253 -22.689 -22.4185 -22.1634 -22.0734 -22.0006 -21.7504 -21.6269 -21.6749 -21.5845 -21.6673 -21.7077 -21.5246 -21.4794 -21.6249 -21.5514 -21.5639 -21.4334 -21.5067 -21.5575 -21.4493 -21.4512 -21.4251 -21.3322 -21.376 -21.4009 -21.1856 -21.3106 -21.5013 -21.826 -21.9586 -22.2774 -22.2213 -21.9088 -21.5637 -21.0588 -20.4747 -20.285 -20.181 -20.3208 -20.2821 -20.3418 -20.5429 -20.7667 -20.9009 -21.1207 -21.0812 -21.3346 -21.2842 -21.4077 -21.207 -20.9818 -20.7955 -20.6414 -20.3953 -20.5213 -20.4496 -20.454 -20.7204 -20.6515 -20.721 -20.9234 -21.1847 -21.3456 -21.6856 -22.0139 -22.1999 -22.4341 -22.4144 -22.3045 -22.0234 -21.8133 -21.452 -21.3434 -21.1441 -21.0788 -21.2122 -21.082 -21.1593 -21.1441 -20.9501 -20.7804 -20.6549 -20.5391 -20.4546 -20.4711 -20.5736 -20.7558 -21.2305 -21.6713 -22.1204 -22.4298 -22.6307 -22.3942 -21.6998 -20.8419 -19.7848 -19.0944 -18.5132 -18.155 -18.0781 -18.324 -18.8217 -19.3135 -20.0796 -20.7446 -21.6215 -22.143 -22.8749 -23.7867 -24.3356 -25.2431 -25.5635 -25.7091 -25.0927 -24.4116 -23.4368 -22.6621 -21.9369 -21.8114 -21.4777 -21.2496 -20.8292 -20.69 -20.4151 -20.2816 -20.082 -20.1696 -20.1719 -20.2726 -20.4082 -20.7075 -21.28 -21.7864 -22.2435 -22.4308 -22.656 -22.6038 -22.3328 -21.9582 -21.5166 -21.2096 -21.286 -21.311 -21.4761 -21.5364 -21.7076 -21.7615 -21.895 -21.741 -21.6526 -21.4816 -21.1443 -20.9183 -20.6999 -20.4456 -20.4242 -20.4174 -20.5797 -20.6534 -20.9552 -20.9309 -21.08 -21.1398 -21.0968 -21.0486 -21.0312 -20.8202 -20.5187 -20.6199 -20.6141 -20.4411 -20.321 -20.3129 -20.1803 -20.0972 -20.0932 -19.9943 -19.9964 -19.9566 -19.9678 -20.0794 -20.0835 -20.1805 -20.346 -20.3949 -20.6436 -20.6866 -20.7896 -20.8667 -20.7429 -20.5051 -20.5035 -20.6839 -20.6665 -20.7482 -20.8718 -21.0392 -21.2641 -21.5568 -21.8482 -22.0525 -22.027 -22.2593 -22.0215 -22.0744 -21.8227 -21.5784 -21.263 -21.0377 -20.9031 -20.9327 -20.9155 -20.9993 -21.0773 -21.1016 -21.1567 -21.2098 -21.2481 -21.2573 -21.2369 -21.1894 -21.2435 -21.3161 -21.2993 -21.5318 -21.598 -21.6852 -22.0055 -22.4011 -22.797 -23.2345 -23.8766 -24.2539 -24.7308 -25.115 -25.3794 -25.7408 -25.9098 -25.9792 -26.2075 -26.3749 -26.8279 -27.1235 -27.789 -28.1803 -28.7913 -29.4987 -29.9878 -30.0143 -30.1027 -29.8451 -29.827 -29.994 -30.0037 -30.1083 -30.4993 -31.1445 -32.2212 -33.764 -36.1569 -39.1013 -42.2465 -45.2205 -47.2965 -48.2544 -48.0003 -46.3999 -45.3925", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0003 0.0008 0.0014 0.002 0.0027 0.0032 0.004 0.0044 0.0055 0.0062 0.0069 0.0075 0.008 0.0078 0.0072 0.007 0.0062 0.006 0.0053 0.0047 0.0042 0.0032 0.0025 0.0019 0.0012 0.0008 0.0007 0.0007 0.0008 0.0012 0.0012 0.0012 0.0016 0.0019 0.0019 0.002 0.0021 0.0024 0.0023 0.0018 0.0017 0.0017 0.0019 0.002 0.0022 0.003 0.006 0.0099 0.0126 0.0151 0.0163 0.016 0.0145 0.0117 0.0086 0.0049 0.0024 0.0018 0.0016 0.0014 0.0013 0.0013 0.0012 0.001 0.0008 0.0008 0.001 0.0016 0.0032 0.0057 0.0103 0.0138 0.0152 0.0151 0.0124 0.0077 0.0033 0.0025 0.0022 0.0025 0.0025 0.0025 0.0022 0.0022 0.0021 0.002 0.0022 0.0024 0.0022 0.0023 0.0024 0.002 0.0022 0.0022 0.0023 0.0023 0.0023 0.002 0.002 0.002 0.0021 0.0023 0.0021 0.0022 0.0022 0.0025 0.0026 0.0027 0.0028 0.0029 0.0029 0.0027 0.0029 0.0025 0.0024 0.0025 0.0024 0.0027 0.0026 0.0026 0.0024 0.0024 0.0024 0.0025 0.0025 0.0028 0.0026 0.0025 0.0026 0.0023 0.0025 0.0026 0.0029 0.0028 0.0029 0.0026 0.0024 0.0024 0.0025 0.0023 0.0027 0.0023 0.0024 0.0019 0.0017 0.0019 0.0022 0.0021 0.0026 0.0027 0.0025 0.0023 0.0018 0.0014 0.0012 0.0013 0.0017 0.002 0.002 0.002 0.002 0.0017 0.0017 0.0017 0.0016 0.0017 0.0018 0.0016 0.0018 0.0016 0.0016 0.0014 0.0015 0.0014 0.0012 0.0015 0.0013 0.0018 0.0026 0.0031 0.004 0.0042 0.0043 0.0041 0.0035 0.0033 0.003 0.0028 0.0031 0.0028 0.0022 0.0012 0.0032 0.0062 0.0072 0.0071 0.006 0.0046 0.0038 0.0044 0.0053 0.0056 0.0055 0.0049 0.004 0.0025 0.0016 0.0014 0.0019 0.0036 0.0079 0.0125 0.0156 0.0188 0.0211 0.0205 0.0185 0.015 0.0103 0.0045 0.0024 0.0019 0.0016 0.0016 0.0013 0.0014 0.0012 0.0013 0.0012 0.0012 0.0013 0.0021 0.0031 0.005 0.008 0.0086 0.0084 0.0076 0.0048 0.0016 0.0015 0.0016 0.0017 0.0018 0.0018 0.0018 0.0019 0.002 0.002 0.0018 0.0018 0.0022 0.0024 0.0025 0.0023 0.0025 0.002 0.0018 0.0019 0.002 0.0023 0.0024 0.0021 0.002 0.002 0.0021 0.0023 0.0025 0.0023 0.0022 0.0025 0.0021 0.0021 0.0025 0.0024 0.0023 0.0024 0.0021 0.002 0.0022 0.0023 0.0023 0.0023 0.0021 0.0018 0.0017 0.0017 0.0021 0.0023 0.0019 0.0018 0.0017 0.0013 0.0013 0.0014 0.0012 0.0013 0.0013 0.0013 0.0011 0.0014 0.0015 0.0015 0.0015 0.0012 0.001 0.001 0.0007 0.0006 0.0007 0.0007 0.0006 0.0008 0.0009 0.0011 0.0011 0.0011 0.0011 0.0009 0.0009 0.0011 0.0017 0.0023 0.0029 0.0031 0.0035 0.0039 0.0035 0.0036 0.0033 0.0025 0.0019 0.0012 0.0008 0.0013 0.0039 0.0068 0.0091 0.0106 0.0114 0.0106 0.009 0.0067 0.0027 0.0024 0.0023 0.0025 0.0022 0.0015 0.0013 0.001 0.0012 0.0011 0.001 0.0009 0.0007 0.001 0.0009 0.0009 0.0013 0.0014 0.0015 0.0016 0.0019 0.002 0.002 0.002 0.002 0.002 0.0019 0.0019 0.0017 0.0013 0.0013 0.0019 0.0057 0.0115 0.0168 0.0188 0.0183 0.0152 0.0087 0.0047 0.0028 0.0025 0.0021 0.0017 0.0018 0.0018 0.0016 0.0015 0.0012 0.0013 0.0014 0.0014 0.0015 0.0013 0.0015 0.0015 0.0018 0.002 0.0019 0.0021 0.0019 0.002 0.0022 0.0023 0.0023 0.0022 0.0022 0.0023 0.0025 0.0025 0.0027 0.0026 0.0027 0.0028 0.003 0.0031 0.0027 0.0028 0.0025 0.0025 0.0023 0.0023 0.0025 0.0024 0.0024 0.0022 0.002 0.0021 0.002 0.0018 0.0018 0.002 0.0021 0.0023 0.0024 0.0027 0.0026 0.0025 0.0024 0.0021 0.0021 0.0018 0.0018 0.002 0.0022 0.0023 0.0024 0.0023 0.0025 0.0027 0.0024 0.0023 0.0025 0.0021 0.0029 0.0026 0.0023 0.0021 0.0033 0.0037 0.004 0.004 0.003 0.002 0.0016 0.0017 0.0016 0.0015 0.0016 0.002 0.0022 0.0028 0.0027 0.0051 0.0117 0.016 0.0198 0.0216 0.0204 0.0173 0.0125 0.008 0.0045 0.0035 0.0035 0.0031 0.0028 0.0025 0.0025 0.0024 0.0024 0.0026 0.0024 0.002 0.0016 0.0013 0.0011 0.0011 0.001 0.0008 0.0009 0.0008 0.0008 0.0009 0.0008 0.0005 0.0005 0.0005 0.0006 0.0008 0.001 0.001 0.0013 0.0043 0.0096 0.0132 0.0168 0.0183 0.0181 0.0159 0.0129 0.0095 0.0057 0.0034 0.0024 0.0017 0.0014 0.0014 0.0013 0.0014 0.0013 0.001 0.0007 0.0006 0.0005 0.0003 0.0011 0.0028 0.0044 0.0052 0.005 0.0045 0.0036 0.0024 0.0021 0.0024 0.0028 0.0028 0.0025 0.0023 0.0021 0.0019 0.0018 0.0016 0.0016 0.0018 0.002 0.002 0.002 0.0021 0.0019 0.0017 0.0018 0.0017 0.002 0.002 0.0019 0.0021 0.0025 0.0026 0.0026 0.0024 0.0023 0.0023 0.0024 0.0026 0.0026 0.0028 0.0028 0.0026 0.0023 0.0024 0.0026 0.0026 0.0024 0.0026 0.0025 0.0023 0.0022 0.002 0.0018 0.0017 0.0017 0.0018 0.0021 0.002 0.0021 0.0022 0.0019 0.0021 0.0021 0.0022 0.0018 0.0019 0.0013 0.0013 0.0014 0.0015 0.0015 0.0013 0.0014 0.0013 0.0013 0.0014 0.0014 0.0015 0.0011 0.0009 0.0006 0.0006 0.0007 0.0006 0.0007 0.0007 0.0008 0.0009 0.0009 0.0008 0.0005 0.0008 0.0007 0.0004 0.0005 0.0003 0.0005 0.0004 0.0005 0.0003 0.0007 0.0003 0.0005 0.001 0.0013 0.0011 0.0012 0.0013 0.0011 0.0017 0.0016 0.002 0.0025 0.0024 0.002 0.0021 0.0016 0.0015 0.0013 0.001 0.0005 0.0017 0.0045 0.0067 0.0081 0.0087 0.0084 0.007 0.0054 0.004 0.0026 0.0023 0.0021 0.0019 0.0017 0.0016 0.0015 0.0014 0.0013 0.0015 0.0012 0.0011 0.0014 0.0012 0.0014 0.0013 0.0016 0.0016 0.0016 0.0016 0.0017 0.0015 0.0014 0.001 0.001 0.001 0.001 0.0009 0.001 0.0011 0.0009 0.0014 0.0014 0.0015 0.0013 0.0015 0.0014 0.0017 0.0016 0.0017 0.0017 0.0016 0.0016 0.0017 0.0016 0.0013 0.001 0.0009 0.0006 0.0006 0.0004 0.0005 0.0004 0.0005 0.0004 0.0005 0.0007 0.0008 0.0013 0.0017 0.0017 0.0017 0.0017 0.0016 0.0018 0.0016 0.0019 0.0019 0.002 0.0017 0.0017 0.0016 0.0016 0.0018 0.0018 0.002 0.0019 0.0021 0.0017 0.0019 0.0016 0.0014 0.0014 0.0012 0.0012 0.0012 0.0014 0.0015 0.0016 0.0017 0.0014 0.002 0.0021 0.0019 0.0019 0.0019 0.0016 0.0015 0.0013 0.0017 0.002 0.0022 0.0019 0.0018 0.0017 0.0017 0.0015 0.0011 0.0012 0.0008 0.0005 0.0005 0.0003 0.0005 0.0004 0.0002 0.0002 0.0002 0.0005 0.001 0.0011 0.0012 0.0016 0.0018 0.002 0.0023 0.0021 0.0015 0.0016 0.0013 0.0016 0.0016 0.0016 0.0014 0.0017 0.0016 0.0017 0.0018 0.002 0.0021 0.0019 0.0018 0.0019 0.0019 0.0017 0.0015 0.0016 0.0016 0.0018 0.0017 0.0016 0.0015 0.0015 0.0013 0.0013 0.0014 0.0013 0.0016 0.0015 0.0017 0.0016 0.0017 0.0016 0.0013 0.0013 0.0011 0.0011 0.0012 0.0012 0.0016 0.0017 0.002 0.0019 0.0019 0.0016 0.0016 0.0018 0.0016 0.0018 0.0018 0.0017 0.0016 0.0015 0.0015 0.0017 0.0017 0.0017 0.0018 0.0016 0.0016 0.0014 0.0014 0.0015 0.0018 0.0018 0.002 0.0017 0.0016 0.0017 0.0016 0.0018 0.0017 0.0014 0.0017 0.0015 0.0012 0.0013 0.0014 0.0011 0.0011 0.0016 0.0012 0.0012 0.0012 0.0012 0.0011 0.0011 0.0009 0.0011 0.0008 0.0009 0.0008 0.0008 0.0011 0.0011 0.001 0.001 0.001 0.001 0.0008 0.0009 0.001 0.001 0.0011 0.001 0.0008 0.0008 0.0009 0.001 0.001 0.0009 0.0008 0.0009 0.0008 0.0007 0.001 0.0007 0.0009 0.0007 0.0006 0.0004 0.0006 0.0003 0.0002 0.0001 0.0002 0.0002 0.0003 0.0003 0.0008 0.0003 0.0004 0.0009 0.0014 0.0014 0.0018 0.0018 0.0012 0.001 0.0006 0.0002 0.0 0.0", + "breathiness": "-66.7848 -64.2435 -60.8567 -57.8247 -55.5168 -53.118 -50.7932 -48.6901 -47.0064 -45.8911 -45.0254 -45.0045 -45.4111 -46.0348 -46.7097 -47.4031 -48.1749 -49.3713 -51.3565 -54.1684 -57.2756 -60.0176 -63.1449 -65.4114 -66.911 -67.5003 -67.1171 -66.1391 -64.5572 -62.3808 -60.1784 -57.9894 -56.1014 -54.3039 -53.1409 -52.5557 -52.4431 -52.8982 -53.6618 -54.6097 -55.6508 -56.4422 -56.378 -55.3603 -53.0463 -49.9534 -46.4512 -42.6672 -39.5089 -37.2178 -36.1054 -36.2576 -37.7813 -40.3362 -43.3492 -46.6903 -50.0844 -53.0516 -55.3714 -57.1861 -58.3152 -59.0654 -59.6817 -59.7216 -59.3868 -58.2694 -55.6158 -51.9683 -47.9408 -43.8854 -40.7758 -38.7795 -38.8548 -40.1027 -42.598 -45.1785 -47.7502 -49.4082 -50.9428 -51.2949 -51.9089 -52.5164 -53.2761 -54.0576 -54.8662 -55.5717 -55.611 -55.705 -55.4959 -55.0139 -54.6124 -53.8919 -53.0651 -52.5215 -51.8347 -51.1312 -50.5785 -50.2131 -49.9834 -49.7394 -49.9264 -50.1275 -50.5117 -51.3839 -51.8029 -52.2281 -52.6755 -52.6458 -52.4328 -51.7704 -51.4005 -50.9631 -50.6072 -50.4047 -50.114 -50.1238 -50.1387 -49.8958 -50.0656 -50.0883 -50.415 -50.4963 -50.5366 -50.9562 -51.5448 -51.7501 -52.0666 -52.2759 -52.3256 -52.2056 -51.7596 -51.1973 -50.3957 -49.5922 -48.9494 -48.694 -48.7493 -49.1654 -49.9235 -50.6143 -51.1966 -51.9617 -52.3724 -52.6812 -52.9378 -52.8866 -52.5339 -52.447 -52.3769 -52.7405 -53.3439 -54.085 -54.7864 -54.9827 -54.9381 -54.6404 -54.163 -53.8516 -53.8484 -54.4927 -55.1513 -56.0754 -57.24 -57.9838 -58.5792 -58.8701 -58.7915 -58.593 -58.4981 -58.7173 -58.9024 -59.0765 -59.0981 -58.4882 -57.6828 -56.1832 -54.1573 -52.3437 -50.3117 -48.8849 -47.7436 -46.9873 -46.6098 -46.6675 -47.1247 -48.1491 -49.4775 -51.3759 -52.5228 -53.2847 -52.4328 -50.5878 -47.8546 -44.9332 -42.6566 -41.4565 -41.6764 -42.3431 -43.3154 -44.3331 -45.4182 -46.4705 -48.1059 -49.7834 -51.9497 -53.187 -52.8368 -50.5843 -47.0059 -43.0812 -38.8091 -35.3367 -33.2089 -32.3955 -32.8736 -34.3504 -36.7904 -39.8232 -42.8718 -45.8823 -48.6835 -50.7305 -52.4382 -53.5454 -54.8455 -56.2128 -57.3128 -57.7484 -56.6296 -54.7511 -52.0358 -48.9614 -46.1369 -44.0312 -43.2186 -43.7522 -45.4028 -47.6807 -50.0927 -52.0759 -53.2362 -53.7754 -53.8931 -53.6763 -53.5912 -53.6398 -53.6957 -53.9131 -53.8602 -53.8227 -53.848 -53.6208 -53.5961 -53.6169 -53.7327 -53.8244 -53.8871 -53.5794 -53.2397 -53.0364 -52.7896 -53.0421 -53.0322 -53.3548 -53.3997 -53.2943 -53.1105 -53.1442 -53.1394 -53.2418 -53.2986 -53.4309 -53.4754 -53.6435 -53.7019 -53.9287 -53.9818 -54.1437 -54.0508 -53.9693 -53.9492 -53.8419 -53.8557 -53.6093 -53.3982 -53.0351 -52.751 -52.3255 -52.5431 -52.4701 -53.0645 -53.1128 -53.5472 -54.061 -54.2697 -54.7907 -55.2602 -55.6089 -56.0171 -56.0107 -55.9509 -55.5599 -55.093 -55.1233 -55.2838 -56.0205 -56.6632 -57.6686 -58.4957 -59.4989 -60.438 -61.2504 -61.6425 -62.0346 -61.6544 -61.0005 -59.8985 -58.8775 -57.6026 -56.0741 -54.5252 -53.1181 -51.6522 -50.1306 -48.8741 -48.3326 -47.8664 -47.9743 -48.7574 -50.2556 -51.8475 -53.4435 -53.8178 -52.9608 -50.9216 -48.0258 -44.6964 -42.056 -40.5991 -40.4133 -41.3418 -42.95 -44.6201 -46.4874 -48.1211 -49.5667 -51.4059 -53.0097 -54.7517 -56.3442 -57.6042 -58.8634 -59.7628 -60.1909 -60.5689 -60.7571 -60.8283 -60.3827 -59.6139 -58.2851 -56.8236 -55.4506 -54.4529 -54.0234 -53.7214 -54.0133 -54.3337 -54.9064 -55.6946 -56.4825 -57.4323 -57.1916 -55.5248 -52.8885 -49.132 -44.9056 -40.8834 -38.0709 -37.207 -38.3447 -40.8282 -43.5774 -46.3306 -48.748 -50.7354 -51.8758 -52.7037 -53.5317 -54.549 -55.5121 -56.4825 -57.1618 -57.6462 -57.8703 -57.9042 -57.4934 -56.9878 -56.4209 -55.7808 -55.07 -54.6194 -54.4574 -54.0495 -53.8463 -53.5354 -53.0981 -52.7704 -52.3585 -51.9055 -51.6231 -51.5102 -51.1876 -51.3346 -51.5241 -51.6941 -51.94 -52.1978 -52.4321 -52.563 -52.5974 -52.6475 -52.8128 -52.847 -53.1144 -53.2754 -53.4655 -53.7089 -53.9087 -53.9382 -54.1368 -54.0931 -54.0478 -54.126 -53.9178 -53.697 -53.2198 -52.9308 -52.6226 -52.4747 -52.1928 -51.9547 -52.108 -52.145 -52.3752 -52.7003 -53.0368 -53.3352 -53.5169 -53.5574 -53.376 -53.3776 -53.401 -53.6043 -53.6182 -53.6816 -53.9221 -53.4948 -52.7027 -51.3927 -49.7368 -48.391 -47.2878 -46.9929 -47.2858 -48.6236 -50.034 -51.3874 -52.6205 -53.7062 -54.2162 -54.4465 -54.5247 -54.0434 -52.728 -50.8409 -48.076 -44.7151 -41.3788 -38.3594 -36.4402 -35.786 -36.4541 -38.2609 -40.7843 -43.7703 -46.5099 -48.5793 -50.4027 -51.2611 -51.4843 -51.6379 -51.4899 -51.4924 -51.4586 -51.7018 -51.836 -52.2563 -53.1847 -53.9658 -54.9625 -56.0071 -57.1898 -58.3597 -59.3221 -60.0347 -60.6156 -61.093 -61.0559 -61.0491 -60.6935 -60.3225 -59.4736 -57.8729 -55.3519 -52.151 -48.4972 -44.7885 -41.528 -38.904 -37.566 -37.7383 -38.7333 -40.4477 -42.8938 -45.854 -48.4314 -51.3517 -53.5662 -55.18 -56.5851 -57.5577 -58.5938 -59.722 -61.3836 -62.6362 -63.5277 -63.3075 -61.7971 -59.1811 -55.9024 -52.7797 -49.9279 -48.5216 -48.0427 -48.5216 -49.6469 -50.8663 -51.8303 -52.3962 -52.5441 -52.7985 -53.0215 -53.5721 -54.5327 -55.3337 -55.8301 -56.1791 -56.2854 -56.3919 -56.4747 -56.5933 -56.8725 -56.9469 -56.8768 -57.0702 -56.8378 -56.7079 -56.6046 -56.2487 -55.7628 -55.5753 -55.3164 -55.0424 -55.2754 -55.6049 -56.0584 -56.1202 -56.3674 -56.4932 -56.3019 -55.9391 -55.7135 -55.2572 -54.9557 -54.759 -54.8047 -54.8088 -54.7626 -54.878 -55.1274 -55.2626 -55.4808 -55.5483 -55.5972 -55.5167 -55.6928 -55.9154 -56.101 -56.4365 -56.5288 -56.6357 -56.2097 -56.3132 -56.3488 -56.2533 -56.4398 -56.7243 -57.1343 -57.2627 -57.5496 -57.9237 -58.4985 -58.7854 -58.9994 -58.7745 -58.2636 -57.9255 -57.6623 -57.5137 -58.136 -58.8928 -59.5099 -60.164 -60.7883 -61.5077 -61.7167 -62.186 -62.5393 -62.8901 -62.7902 -62.9101 -63.0655 -63.3876 -64.2035 -65.664 -67.2986 -68.7273 -70.0635 -70.4243 -70.4597 -69.5923 -68.4213 -66.644 -64.6603 -62.9668 -60.8774 -59.2028 -57.7303 -56.5673 -55.2554 -53.9231 -52.5296 -51.3172 -50.6341 -50.3895 -50.8627 -51.7874 -53.3603 -54.7625 -55.841 -55.5085 -54.0374 -51.4301 -48.53 -45.666 -43.4331 -42.8036 -43.1314 -43.8708 -45.1276 -46.3854 -47.7234 -48.8994 -50.2646 -51.5732 -53.1766 -54.6724 -55.9026 -56.6496 -57.3193 -57.5072 -57.9166 -57.9274 -58.196 -57.8699 -57.5104 -57.3728 -56.9285 -56.4836 -56.475 -56.5724 -56.6734 -56.9517 -57.4809 -57.8442 -58.3159 -58.9679 -59.4687 -59.9898 -60.4732 -60.5663 -60.4754 -60.6208 -60.3584 -60.0119 -59.6161 -58.8517 -58.1477 -57.4058 -56.6517 -55.8584 -55.5633 -55.1816 -55.5831 -56.6028 -57.7244 -59.1319 -60.749 -62.0836 -63.0381 -63.5234 -64.2459 -64.0108 -63.5699 -62.599 -61.3386 -59.919 -58.2697 -57.0631 -56.4205 -55.9176 -55.775 -55.7775 -55.9427 -55.8801 -55.8667 -55.8938 -55.4322 -55.118 -55.0378 -54.864 -54.8354 -55.0445 -55.3701 -55.6707 -55.9309 -56.1664 -56.6194 -56.9517 -57.5531 -58.095 -58.6228 -59.1018 -59.0216 -58.7996 -58.3572 -57.8952 -57.504 -57.4157 -57.2762 -57.1523 -57.5533 -57.564 -57.9507 -58.3017 -59.1873 -59.8267 -59.9319 -59.7763 -59.0094 -57.9308 -56.2489 -55.0433 -53.9427 -53.4072 -53.1618 -53.6216 -54.4294 -55.249 -56.7857 -58.1759 -59.595 -61.2727 -62.9646 -64.4235 -65.6282 -66.4374 -66.2028 -65.4738 -64.0589 -62.0379 -60.3716 -58.8935 -58.0094 -57.5597 -57.6662 -57.989 -57.975 -58.2247 -57.9844 -57.9237 -57.4188 -57.2479 -57.0217 -56.6905 -56.329 -55.6925 -55.3973 -55.1437 -54.5606 -54.5139 -54.54 -54.5758 -54.7399 -54.9352 -55.1447 -55.1087 -54.8991 -54.6588 -54.4051 -54.6302 -54.7431 -54.7707 -54.6498 -54.8482 -54.6695 -54.8712 -54.743 -54.5144 -54.4767 -54.1721 -54.2041 -53.9223 -53.904 -54.0728 -54.5166 -54.939 -55.4183 -55.8832 -56.3712 -56.6893 -57.1 -56.9967 -56.8562 -56.7717 -56.383 -56.1487 -55.6379 -55.4887 -55.0827 -54.9245 -54.4815 -53.9962 -53.5505 -53.4864 -53.5511 -53.5754 -54.0506 -54.2811 -54.4585 -54.7531 -54.7639 -54.5391 -54.2248 -54.1175 -54.2222 -54.3803 -54.7367 -54.9731 -55.2838 -55.3251 -55.4119 -55.4408 -55.5667 -55.6922 -55.9285 -56.2053 -56.4119 -56.296 -56.3397 -56.3863 -56.2761 -56.4146 -56.1825 -55.7748 -55.4752 -55.0587 -55.0357 -55.3478 -55.7613 -56.136 -56.7382 -57.1474 -56.9805 -56.8908 -56.5791 -56.4037 -56.0829 -56.0337 -55.7395 -56.0053 -56.206 -56.5465 -56.716 -57.0055 -57.4894 -57.7596 -57.9638 -58.0873 -58.2906 -58.3994 -58.4015 -58.755 -59.1973 -59.5467 -59.871 -60.2737 -60.3051 -60.6605 -61.0898 -61.9404 -62.9028 -64.2987 -65.5579 -66.6882 -67.391 -67.721 -67.9983 -68.0067 -67.9151 -67.5927 -66.9343 -65.3644 -62.8845 -59.759 -56.6079 -53.7258 -52.4853 -52.4817 -53.5844 -55.0146 -56.1611 -56.9879 -58.3155", "breathiness_timestep": "0.011609977324263039" }, { @@ -282,9 +282,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0", "f0_seq": "167.2 167.4 167.2 167.1 167.3 167.4 167.2 167.5 166.9 166.9 167.1 167.2 167.2 166.7 167.1 167.0 167.1 167.2 167.0 167.0 167.1 167.0 167.1 167.0 166.9 167.1 167.1 167.0 166.9 167.0 167.2 167.1 167.1 167.0 167.0 167.1 167.2 166.7 166.9 166.6 166.7 166.6 166.3 166.3 166.5 166.2 166.0 165.7 165.6 165.0 163.7 161.0 161.2 164.4 170.1 175.3 182.7 188.2 192.4 196.0 196.8 194.9 192.6 187.7 182.4 177.7 173.7 170.6 169.2 168.3 164.9 163.9 163.9 164.4 165.7 167.4 169.0 168.5 167.1 165.1 163.9 162.2 159.4 156.4 155.4 160.9 165.3 171.2 178.1 185.7 193.5 193.7 195.6 196.4 196.6 196.0 195.1 195.6 196.1 195.3 194.6 194.9 194.6 192.5 186.3 191.3 197.0 203.8 211.4 219.0 226.6 223.2 221.9 221.9 221.3 220.0 218.5 215.5 213.1 211.5 211.1 213.0 215.3 219.4 221.5 224.6 226.4 224.8 223.3 221.6 219.6 218.2 217.1 216.5 218.0 220.5 221.7 223.2 224.7 224.5 223.5 221.9 220.3 212.6 205.8 206.0 218.7 229.5 239.5 249.9 258.8 255.5 247.8 246.2 246.2 245.0 244.1 245.4 245.8 247.0 247.8 248.3 248.1 247.4 242.1 237.0 237.5 238.2 238.8 240.1 240.4 241.8 242.5 233.2 226.8 224.6 223.3 224.3 224.4 224.8 225.4 225.1 225.0 226.6 226.3 226.2 227.7 228.3 230.5 235.9 241.6 246.8 250.7 251.0 251.1 250.2 248.6 246.9 244.9 244.6 244.7 244.5 245.6 245.5 245.3 245.1 246.1 246.5 246.4 245.5 245.2 246.3 246.8 247.9 248.4 249.2 249.8 250.0 249.9 248.8 248.1 247.6 246.2 243.1 240.4 239.1 238.9 241.1 242.4 245.9 249.7 254.3 258.3 260.8 262.0 260.5 257.9 252.0 246.4 240.9 236.8 235.7 236.4 238.2 241.2 245.7 250.2 253.4 255.7 257.9 258.7 256.6 253.6 249.8 245.7 242.3 240.0 238.6 240.0 241.7 245.2 247.4 249.3 250.0 251.9 253.0 251.5 251.8 248.5 247.2 245.2 243.8 242.0 240.1 240.5 241.5 243.1 245.5 247.2 249.0 250.9 252.3 253.1 253.1 251.8 249.4 247.5 244.0 241.4 239.6 238.8 239.0 239.8 242.1 245.1 248.4 252.1 255.7 257.9 258.0 256.5 255.0 252.5 249.1 244.4 240.9 239.7 240.2 242.7 245.7 248.3 251.9 255.9 258.7 259.6 259.1 256.6 253.3 251.4 247.8 245.9 246.0 245.1 244.9 245.1 245.3 246.1 247.7 249.1 250.6 252.4 252.7 253.5 252.1 251.6 250.4 248.9 247.5 240.9 239.1 240.4 238.7 237.9 235.8 233.5 232.0 229.9 229.2 227.5", "f0_timestep": "0.011609977324263039", - "energy": "0.0001 0.0003 0.0003 0.0001 0.0 0.0 0.0 0.0 0.0002 0.0003 0.0006 0.001 0.0009 0.0013 0.0011 0.0011 0.001 0.0011 0.0014 0.0014 0.0018 0.0014 0.001 0.0015 0.0023 0.0016 0.0012 0.0018 0.0019 0.0023 0.0021 0.0018 0.0016 0.0016 0.0013 0.001 0.0016 0.0008 0.0007 0.0012 0.0006 0.0021 0.0039 0.0057 0.0093 0.0102 0.0107 0.0087 0.0149 0.0316 0.0427 0.0522 0.0584 0.0586 0.0586 0.0572 0.0583 0.0586 0.0596 0.0601 0.0618 0.0636 0.0638 0.0624 0.0588 0.0531 0.0482 0.0452 0.0461 0.0497 0.0536 0.0585 0.0614 0.0628 0.065 0.0655 0.0659 0.0653 0.0634 0.0593 0.0536 0.0441 0.0317 0.0204 0.0075 0.0036 0.0023 0.0056 0.0116 0.0298 0.0431 0.0553 0.0635 0.0659 0.0664 0.0642 0.0628 0.0619 0.0601 0.0601 0.0591 0.0563 0.0488 0.0387 0.0276 0.015 0.0102 0.0104 0.0109 0.0204 0.0354 0.0494 0.0612 0.0684 0.0697 0.0686 0.0654 0.0626 0.0602 0.0583 0.0567 0.0553 0.0562 0.0556 0.0569 0.0584 0.0594 0.0604 0.061 0.0617 0.0612 0.061 0.0613 0.0609 0.0616 0.062 0.0619 0.0619 0.0616 0.0607 0.0597 0.0558 0.0479 0.0388 0.0247 0.0122 0.0055 0.0016 0.0051 0.0126 0.0357 0.0541 0.0675 0.0768 0.0783 0.0776 0.0746 0.074 0.0748 0.0739 0.0743 0.073 0.0706 0.0666 0.0553 0.0432 0.0279 0.0169 0.0168 0.0167 0.0156 0.0248 0.0417 0.0602 0.074 0.0837 0.0881 0.0903 0.0926 0.0947 0.0963 0.0957 0.0938 0.09 0.0835 0.0773 0.0716 0.0662 0.0632 0.0638 0.069 0.0756 0.0821 0.086 0.0896 0.0908 0.0927 0.0916 0.0917 0.0904 0.0894 0.0908 0.0922 0.0948 0.0978 0.1 0.1026 0.1033 0.1027 0.1023 0.1024 0.1031 0.1052 0.1088 0.1103 0.1131 0.1149 0.1147 0.1153 0.1158 0.1162 0.1166 0.1153 0.1131 0.1107 0.1089 0.1092 0.1099 0.1103 0.1105 0.1115 0.1144 0.1172 0.1198 0.1202 0.1204 0.1193 0.1164 0.1124 0.108 0.1028 0.0989 0.0961 0.0943 0.0952 0.0965 0.1008 0.1047 0.1098 0.1134 0.1146 0.1169 0.1158 0.1129 0.1087 0.1035 0.0989 0.0976 0.0972 0.0958 0.0986 0.0988 0.1002 0.1022 0.1037 0.1046 0.1061 0.1072 0.1069 0.1065 0.104 0.102 0.1004 0.0999 0.099 0.0989 0.0994 0.0987 0.1006 0.1016 0.1015 0.1018 0.1018 0.1017 0.1014 0.0998 0.0972 0.0944 0.092 0.0884 0.0854 0.082 0.0791 0.0767 0.0768 0.0777 0.0797 0.0824 0.0851 0.0874 0.0878 0.0867 0.0836 0.079 0.074 0.0683 0.0631 0.0598 0.057 0.0552 0.0565 0.0578 0.0611 0.0628 0.064 0.0638 0.0618 0.0594 0.0556 0.0512 0.0479 0.0444 0.0419 0.0403 0.0376 0.0368 0.0357 0.0349 0.0345 0.0347 0.0348 0.0335 0.0321 0.03 0.0273 0.0248 0.0214 0.0178 0.0139 0.0095 0.0042 0.002 0.0008 0.0004 0.0 0.0 0.0001 0.0 0.0", + "energy": "-91.335 -89.7543 -88.383 -85.6886 -83.0304 -79.082 -75.0048 -71.7329 -68.8001 -65.786 -63.1616 -61.2834 -59.8956 -58.3297 -57.4727 -56.43 -55.552 -54.7478 -54.0625 -53.0499 -52.6828 -51.7087 -51.4832 -51.1853 -51.1393 -50.8482 -50.885 -51.1412 -51.7173 -52.2423 -53.0803 -54.1466 -55.3765 -56.6882 -58.1499 -59.6094 -60.8225 -60.829 -59.7906 -57.1166 -53.3254 -48.859 -44.5726 -41.0405 -38.619 -36.6978 -34.6424 -33.3207 -31.5333 -29.7351 -27.6725 -26.3259 -25.256 -24.9498 -24.9649 -25.2148 -25.2955 -25.3292 -25.1965 -24.7194 -24.5882 -24.2155 -24.4308 -24.2151 -24.7828 -24.8786 -25.0314 -24.7296 -24.562 -24.1277 -23.6643 -23.1635 -22.8621 -22.5952 -22.3149 -22.3453 -22.2758 -22.1049 -23.0658 -24.19 -26.0001 -29.1427 -32.8816 -37.0152 -40.1746 -41.5083 -40.8474 -38.5198 -34.7017 -30.5636 -26.629 -24.1201 -22.8961 -22.3024 -21.9669 -21.8241 -21.8871 -21.9968 -22.1775 -22.5648 -23.4474 -24.4169 -26.5535 -28.8417 -31.591 -33.8027 -35.0644 -35.3012 -34.0371 -31.6979 -29.2189 -26.7667 -24.844 -23.8242 -23.2886 -22.9991 -23.0155 -23.2514 -23.8476 -24.1528 -24.3964 -24.6009 -24.7535 -24.6398 -24.5302 -24.3527 -24.3227 -24.0788 -24.0728 -23.8876 -23.8853 -23.8572 -23.6577 -23.5224 -23.6244 -23.3975 -23.3141 -23.4556 -23.4958 -23.8155 -24.1596 -25.5846 -27.743 -31.1623 -34.7336 -38.0026 -39.5868 -39.6159 -37.945 -34.7655 -30.8552 -27.2423 -24.7529 -23.1203 -22.2898 -22.2181 -21.8666 -21.9792 -21.992 -22.1089 -22.3131 -22.5452 -23.3954 -24.8566 -26.5255 -28.5478 -30.7308 -32.4781 -33.4958 -33.385 -32.1026 -29.8598 -27.4884 -25.1241 -23.2855 -21.6907 -21.0167 -20.3801 -20.1976 -20.0347 -19.7342 -19.766 -20.0205 -20.1637 -20.4099 -20.8716 -21.1311 -21.2029 -21.0704 -20.8747 -20.6911 -20.3689 -19.7811 -19.7571 -19.775 -19.6828 -19.5635 -19.649 -19.6294 -19.5408 -19.3843 -19.1047 -19.0646 -18.7185 -18.6109 -18.4787 -18.2654 -18.3845 -18.2751 -18.4647 -18.3906 -18.3017 -18.1627 -18.0916 -18.0649 -18.0308 -18.0833 -17.9269 -17.9051 -17.8964 -18.0363 -18.1549 -18.3318 -18.3528 -18.5165 -18.5 -18.758 -18.7347 -18.891 -18.9377 -18.8184 -18.9846 -18.8544 -18.8639 -18.7145 -18.8683 -19.1378 -19.3065 -19.5206 -19.9062 -20.2405 -20.5881 -20.5745 -20.6458 -20.5735 -20.3369 -20.1182 -19.7785 -19.7094 -19.6029 -19.6439 -19.6976 -19.9684 -20.2255 -20.4278 -20.475 -20.4288 -20.4176 -20.2021 -20.0455 -19.9943 -19.7657 -19.6512 -19.2851 -19.1918 -19.1025 -19.02 -18.8851 -18.882 -18.8461 -18.8728 -18.8663 -18.8815 -18.7271 -18.5221 -18.5701 -18.7958 -18.4065 -18.4897 -18.5473 -18.6959 -18.8047 -18.7242 -18.9657 -19.0963 -19.112 -19.2226 -19.439 -19.4099 -19.6751 -19.7725 -20.1118 -20.3216 -20.5106 -20.8492 -20.7611 -20.912 -20.7575 -20.9414 -21.1273 -21.1093 -21.5748 -22.0827 -22.593 -22.9297 -23.6614 -23.9743 -24.215 -24.4748 -24.6898 -24.5828 -24.6231 -24.533 -24.5085 -24.4152 -24.432 -24.7476 -24.9891 -25.256 -25.9678 -26.4499 -27.2758 -28.0291 -28.6166 -29.3096 -29.4574 -30.1256 -30.0788 -30.0589 -29.8724 -29.6564 -29.6835 -30.0465 -30.4455 -31.2586 -32.7731 -35.0749 -38.433 -42.7027 -47.2208 -52.688 -57.1975 -61.3337 -63.726 -64.4405 -64.0196 -62.0945 -60.0283 -55.5607", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0001 0.0002 0.0002 0.0003 0.0004 0.0001 0.0002 0.0005 0.0009 0.0009 0.001 0.0012 0.0014 0.0012 0.0012 0.0013 0.0014 0.0015 0.0017 0.0017 0.0018 0.0017 0.0015 0.0017 0.0018 0.002 0.002 0.002 0.0022 0.002 0.0019 0.0019 0.0015 0.0012 0.0012 0.001 0.0009 0.0008 0.0006 0.0004 0.0011 0.0036 0.0063 0.0091 0.0107 0.0114 0.011 0.0093 0.0073 0.005 0.0032 0.0023 0.0022 0.0023 0.0019 0.0019 0.0017 0.0016 0.0014 0.0015 0.0016 0.0018 0.0016 0.0012 0.0011 0.0008 0.0006 0.0006 0.0008 0.0009 0.0008 0.0011 0.0011 0.0012 0.0012 0.0013 0.0011 0.0009 0.001 0.001 0.0009 0.0007 0.0006 0.0007 0.0011 0.0013 0.0017 0.0019 0.0017 0.0015 0.0009 0.0006 0.0003 0.0002 0.0001 0.0004 0.0002 0.0004 0.0001 0.0004 0.0008 0.0014 0.0027 0.0053 0.0073 0.0093 0.0105 0.0108 0.0097 0.008 0.0059 0.0033 0.002 0.0016 0.0019 0.0016 0.0016 0.0016 0.0017 0.0018 0.0018 0.002 0.002 0.0019 0.0017 0.0016 0.0015 0.0016 0.0017 0.0016 0.0017 0.0014 0.0013 0.0017 0.0015 0.0015 0.0016 0.0014 0.0012 0.0013 0.0011 0.0011 0.0011 0.0029 0.0034 0.0036 0.0035 0.0044 0.0041 0.0039 0.0038 0.0025 0.0015 0.0009 0.0008 0.0007 0.0006 0.0004 0.0005 0.0005 0.0005 0.0008 0.0011 0.0043 0.0093 0.0125 0.0147 0.0171 0.0172 0.0166 0.0146 0.0112 0.0067 0.0039 0.0027 0.0019 0.0013 0.0014 0.0013 0.0014 0.0015 0.0011 0.0012 0.0015 0.0014 0.0017 0.0017 0.0017 0.0017 0.0013 0.0013 0.0012 0.001 0.0011 0.0011 0.0011 0.0013 0.0012 0.0011 0.0011 0.001 0.001 0.0011 0.001 0.0011 0.001 0.0011 0.0011 0.001 0.0012 0.0013 0.0014 0.0018 0.0017 0.0017 0.0016 0.0016 0.0019 0.0018 0.0019 0.0019 0.0019 0.0021 0.002 0.0021 0.002 0.002 0.002 0.0021 0.002 0.0019 0.0019 0.0018 0.002 0.002 0.0018 0.0021 0.0021 0.0021 0.0019 0.0017 0.0016 0.0013 0.0015 0.0016 0.0016 0.0019 0.0017 0.0018 0.0018 0.0018 0.0022 0.0022 0.0021 0.0019 0.0016 0.0016 0.0015 0.0014 0.0015 0.0015 0.0019 0.002 0.0019 0.0018 0.0018 0.0018 0.0018 0.0021 0.0021 0.0021 0.0022 0.002 0.0018 0.0017 0.0016 0.0017 0.0017 0.0017 0.0018 0.0015 0.0013 0.0016 0.0015 0.0016 0.0019 0.0017 0.0016 0.0015 0.0011 0.0012 0.0013 0.0012 0.0011 0.0014 0.0015 0.0014 0.0013 0.0013 0.0012 0.0013 0.0014 0.0014 0.0014 0.0012 0.001 0.001 0.0008 0.0007 0.0008 0.0009 0.0009 0.0008 0.0008 0.0009 0.0008 0.0008 0.0007 0.0007 0.0006 0.0009 0.0006 0.0005 0.0005 0.0005 0.0004 0.0004 0.0004 0.0005 0.0004 0.0005 0.0005 0.0005 0.0007 0.0006 0.0005 0.0005 0.0011 0.0017 0.0019 0.002 0.0012 0.0006 0.0003 0.0 0.0002 0.0003 0.0002", + "breathiness": "-91.4506 -90.1157 -88.8501 -87.4963 -85.1005 -81.7114 -77.0341 -71.9005 -67.5696 -63.9775 -61.5759 -59.9055 -58.6826 -57.9638 -56.9921 -56.3148 -55.688 -55.048 -54.2957 -53.5921 -52.9275 -52.0595 -51.4595 -50.9934 -50.4482 -50.1894 -50.1263 -50.387 -50.8454 -51.5106 -52.5931 -53.55 -54.7379 -56.0512 -57.8142 -59.5694 -61.0583 -61.309 -60.3 -57.7811 -54.2937 -49.6965 -44.9632 -41.2362 -38.7549 -37.1565 -36.3884 -36.4674 -37.4484 -39.0776 -41.309 -44.0753 -46.794 -49.2714 -51.4057 -52.8255 -53.8701 -54.5647 -55.0557 -55.2875 -55.6701 -56.0918 -56.7625 -57.435 -58.2199 -58.9139 -59.3829 -59.8602 -59.5196 -59.0599 -58.7559 -58.2717 -57.5982 -57.0313 -56.6807 -56.6577 -56.7508 -57.0468 -57.8454 -58.8863 -60.2433 -61.5505 -62.2862 -62.5521 -61.7602 -60.3191 -58.6327 -57.2705 -56.794 -57.3061 -58.3643 -59.7856 -61.1052 -62.6188 -63.6501 -64.3588 -64.7033 -64.5811 -63.7731 -62.6302 -60.8494 -58.3765 -55.3677 -52.1503 -48.3491 -45.2268 -42.6441 -41.2948 -41.5974 -43.2214 -45.5138 -48.1696 -50.6605 -52.8539 -54.5646 -55.4319 -55.9871 -56.4299 -56.9448 -57.4803 -58.0676 -58.2697 -58.4898 -58.4508 -58.6699 -58.6426 -58.7091 -58.6493 -58.5662 -58.427 -58.0434 -57.6344 -57.3217 -57.2047 -56.9445 -57.0641 -57.1678 -57.6331 -57.9999 -58.4716 -58.5309 -57.798 -56.717 -55.0553 -53.2044 -51.0144 -49.9399 -49.4156 -49.7431 -50.4906 -51.8649 -53.4825 -55.2215 -57.0463 -58.8675 -59.9455 -61.171 -61.8945 -62.2905 -62.1527 -60.7325 -58.4387 -55.0258 -51.0895 -46.982 -42.8362 -39.9017 -37.7569 -36.951 -37.4428 -38.6105 -40.3107 -43.0435 -45.8638 -48.3732 -50.4827 -51.8333 -52.7979 -52.9308 -53.5211 -54.086 -54.352 -54.4021 -54.2738 -53.9167 -53.5351 -53.0061 -52.5849 -52.38 -52.3305 -52.6245 -52.9711 -53.4946 -54.2031 -54.7181 -54.8706 -54.8118 -54.5896 -54.3829 -53.9516 -54.171 -54.1209 -54.1279 -53.7489 -53.2765 -52.697 -52.0048 -51.4054 -51.0868 -50.9399 -50.9891 -50.8442 -50.6682 -50.5554 -50.6963 -50.603 -51.1129 -51.4707 -52.0396 -52.3511 -52.7225 -52.9325 -53.2517 -53.5449 -53.9338 -54.0466 -54.1494 -54.0211 -53.7837 -53.5455 -53.6271 -53.6112 -54.1915 -54.3056 -54.5276 -54.3996 -54.3434 -54.0966 -54.0748 -54.2998 -54.7916 -55.0851 -55.6091 -55.6287 -55.7001 -55.5833 -55.5473 -55.5289 -55.6792 -55.8085 -55.88 -56.0499 -56.008 -56.1647 -56.3334 -56.7932 -57.1288 -57.3062 -57.5066 -57.681 -57.1525 -56.6616 -55.8536 -55.2161 -54.7599 -54.6594 -54.8912 -55.2172 -55.5299 -55.8076 -55.8265 -55.569 -55.4166 -55.1297 -54.5609 -54.1666 -53.8349 -53.1668 -52.7585 -52.5277 -52.4776 -52.6168 -52.933 -53.1127 -53.0476 -53.0559 -52.9545 -52.5556 -52.4327 -52.7055 -53.1331 -53.6208 -54.2086 -54.5479 -54.9458 -55.2688 -55.4071 -55.7373 -55.7135 -55.8083 -55.8324 -56.1051 -56.2392 -56.8158 -57.5633 -58.284 -58.8299 -59.6824 -59.905 -60.4559 -60.9205 -61.2763 -61.8462 -62.3827 -62.7677 -63.4652 -63.8572 -64.1901 -64.6398 -65.3576 -66.0776 -67.1031 -68.1056 -68.9111 -69.8609 -70.6216 -71.3396 -71.895 -72.4216 -72.61 -72.4874 -72.2865 -72.0855 -71.1525 -69.7042 -67.5992 -64.7788 -62.1597 -60.1182 -59.2179 -59.8052 -61.7633 -64.5258 -67.3592 -70.1343 -71.981 -72.1588 -71.8469 -70.8648", "breathiness_timestep": "0.011609977324263039" }, { @@ -298,9 +298,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "199.9 200.4 200.4 200.8 201.1 200.8 201.0 201.2 201.4 200.7 201.4 201.6 201.2 201.4 200.9 201.3 201.4 201.4 201.6 201.5 201.5 201.9 202.3 202.3 202.6 202.4 202.3 202.5 202.9 202.9 202.9 203.2 203.0 203.2 203.5 203.2 203.4 203.8 203.8 203.3 203.8 203.7 203.5 204.3 204.0 204.0 204.0 203.7 204.0 204.0 204.2 203.9 203.5 203.7 204.2 206.2 208.1 210.9 222.3 231.8 238.5 244.2 246.0 246.9 244.4 241.9 237.6 230.9 227.1 221.2 213.0 220.3 225.6 221.6 219.0 218.3 219.3 220.7 221.6 221.8 221.7 221.6 221.8 222.1 220.5 216.2 207.3 213.4 221.2 229.5 238.6 247.4 257.9 257.2 257.2 257.6 256.2 252.9 248.7 243.4 238.0 235.1 235.5 236.0 238.7 242.5 246.5 251.2 255.2 257.6 257.0 254.8 248.1 243.5 237.5 232.8 233.0 235.7 240.5 248.1 251.8 254.9 253.8 253.4 249.5 243.6 233.4 222.4 225.1 231.4 239.4 247.1 254.8 262.2 255.1 250.8 250.5 250.6 250.3 249.5 248.4 246.9 246.1 246.0 245.5 243.3 236.6 231.9 229.5 228.9 228.6 229.2 228.1 228.6 225.7 220.8 220.0 219.6 219.6 220.2 221.1 221.4 221.6 221.7 222.1 221.7 222.0 221.4 219.2 216.6 214.2 215.4 226.6 241.4 250.5 256.3 256.8 257.0 254.3 252.2 247.1 244.7 243.2 242.6 243.1 242.8 244.1 247.7 252.3 255.7 257.9 255.8 252.4 249.6 246.8 243.6 241.0 239.5 238.8 238.6 243.3 250.2 257.1 261.4 260.5 255.8 245.3 226.1 220.3 217.1 215.0 213.4 211.0 208.5 207.4 204.2 197.8 191.8 191.6 190.4 190.7 189.5 189.9 191.1 192.3 192.1 192.6 194.6 195.9 197.0 196.8 195.9 195.0 195.1 194.4 194.1 194.6 195.6 196.2 197.0 196.4 195.6 195.8 196.4 196.6 196.9 195.1 192.5 185.9 178.9 174.1 172.1 170.2 168.9 167.1 165.7 161.7 159.1 161.6 164.3 166.6 167.2 166.8 165.9 164.2 161.2 157.5 155.1 153.3 153.7 155.9 159.9 165.7 169.8 173.9 176.0 176.4 174.7 170.8 166.6 161.6 156.5 154.1 154.9 158.0 160.7 164.6 168.0 171.2 172.6 174.0 174.4 172.2 168.6 163.3 157.7 156.0 156.2 157.2 159.2 159.1 159.3 159.3 159.4 160.2 160.5 160.6 161.1 161.4 161.4 161.6 161.6 162.0 162.2 162.2 162.0 161.9 161.5 160.9 161.1 160.7 160.2 160.6 160.2 160.8 160.9 159.2", "f0_timestep": "0.011609977324263039", - "energy": "0.0007 0.0006 0.0005 0.0004 0.0004 0.0004 0.0004 0.001 0.0006 0.0006 0.0002 0.0006 0.0005 0.0006 0.0013 0.0015 0.0015 0.0017 0.0018 0.0018 0.0024 0.0023 0.0021 0.0025 0.0022 0.0031 0.0028 0.0026 0.0025 0.0026 0.0022 0.0021 0.0024 0.0023 0.0018 0.0019 0.0011 0.001 0.001 0.0005 0.0006 0.0017 0.0025 0.0046 0.0073 0.0093 0.0113 0.0122 0.0122 0.0117 0.0196 0.0411 0.0562 0.0686 0.0765 0.0785 0.0759 0.0704 0.0663 0.0622 0.0621 0.0639 0.0664 0.069 0.0697 0.0703 0.0694 0.0645 0.0547 0.0434 0.0359 0.0479 0.0613 0.0724 0.0796 0.0814 0.0813 0.0806 0.081 0.0796 0.0793 0.0787 0.0777 0.0767 0.0715 0.0604 0.0477 0.0316 0.0203 0.0173 0.0163 0.0175 0.0393 0.0609 0.0743 0.0873 0.0901 0.0894 0.0886 0.0849 0.0826 0.0779 0.0756 0.0732 0.0724 0.0729 0.0721 0.0734 0.0741 0.0741 0.0762 0.0763 0.0757 0.073 0.0686 0.0663 0.0642 0.0627 0.0636 0.0633 0.0634 0.0648 0.0654 0.0659 0.0659 0.0581 0.0484 0.0362 0.0227 0.0106 0.0054 0.0063 0.0144 0.0417 0.0594 0.0779 0.0892 0.0931 0.0943 0.092 0.0889 0.0862 0.0838 0.081 0.077 0.0667 0.0548 0.039 0.025 0.0159 0.016 0.0155 0.0142 0.0176 0.0385 0.0536 0.0656 0.075 0.0773 0.0778 0.0783 0.0773 0.0752 0.0734 0.0721 0.0693 0.0681 0.0673 0.0665 0.0643 0.0612 0.0577 0.0594 0.0681 0.077 0.0861 0.0931 0.0947 0.097 0.0946 0.0893 0.0858 0.0801 0.0777 0.0768 0.0749 0.0759 0.0762 0.0773 0.0779 0.0796 0.0818 0.0825 0.084 0.0812 0.079 0.0768 0.0761 0.0769 0.0763 0.0761 0.0743 0.074 0.0743 0.0737 0.0677 0.0559 0.0421 0.0224 0.009 0.0082 0.009 0.0105 0.0113 0.0267 0.0475 0.0611 0.0722 0.0778 0.0767 0.0755 0.074 0.0742 0.0746 0.0742 0.0746 0.073 0.0715 0.0698 0.0673 0.0652 0.0642 0.0626 0.0624 0.0623 0.0616 0.0621 0.0616 0.0608 0.0618 0.061 0.0605 0.0588 0.057 0.0561 0.055 0.0537 0.0475 0.0392 0.0283 0.016 0.007 0.0037 0.0046 0.015 0.0313 0.044 0.0548 0.0623 0.0664 0.0674 0.0677 0.068 0.0661 0.0637 0.061 0.0565 0.0502 0.0457 0.0415 0.04 0.0403 0.0406 0.0422 0.0448 0.0454 0.0469 0.0445 0.0417 0.0383 0.0341 0.0317 0.0289 0.0266 0.0253 0.0237 0.0236 0.0232 0.0226 0.0224 0.0218 0.0211 0.0203 0.0182 0.0175 0.0168 0.0146 0.0132 0.0093 0.006 0.0039 0.0024 0.0011 0.0005 0.0008 0.0 0.0011 0.0002 0.0002 0.001 0.0013 0.0007 0.0005 0.0013 0.0014 0.0035 0.0057 0.0 0.8704 0.0184 0.0099 0.0047 0.0027 0.0021 0.0005 0.0003 0.0", + "energy": "-85.1906 -85.6317 -82.9888 -79.9285 -76.7469 -73.8136 -71.0893 -68.088 -65.654 -63.3308 -61.3078 -58.9783 -56.9385 -54.5962 -52.7045 -50.9216 -49.646 -48.3092 -47.4336 -46.6461 -46.0314 -45.6278 -45.5431 -45.4692 -45.4194 -45.6897 -45.6981 -46.0117 -46.3126 -46.5807 -46.8516 -47.6508 -48.7553 -49.9729 -51.8796 -53.7776 -55.4411 -56.5255 -56.3205 -55.116 -52.7314 -49.4349 -45.9827 -42.5203 -39.7879 -37.8649 -36.2416 -34.91 -32.8876 -30.8782 -28.7769 -26.6949 -24.7159 -23.4766 -23.0592 -23.1638 -23.3149 -23.8427 -24.2302 -24.3863 -24.4607 -24.4082 -24.245 -24.0971 -24.1211 -24.6683 -25.3246 -26.242 -26.9153 -27.1329 -27.0845 -26.481 -25.2792 -24.173 -23.3012 -22.6892 -22.625 -22.1858 -22.2609 -22.3146 -22.2885 -22.391 -22.7971 -23.1916 -24.1433 -25.8283 -27.8228 -29.732 -31.3677 -31.9263 -31.5719 -30.3463 -28.102 -25.9171 -23.8483 -22.3261 -21.68 -21.2943 -21.4494 -21.8489 -22.0846 -22.608 -23.007 -23.2338 -23.1403 -23.1837 -23.0648 -23.0259 -22.7505 -22.9081 -23.0176 -22.9893 -23.2819 -23.5091 -23.7765 -23.9134 -24.1812 -24.0989 -23.9187 -23.8336 -23.5799 -23.8151 -24.0041 -24.7038 -26.0268 -27.8831 -30.4826 -33.562 -36.0886 -37.5049 -37.5815 -35.8679 -33.3141 -30.1218 -26.816 -24.2562 -22.6257 -22.1096 -21.6274 -21.5535 -21.4793 -21.6593 -22.017 -22.9796 -24.4648 -26.4459 -28.8302 -31.2299 -33.4399 -35.1552 -35.8341 -35.214 -33.7432 -31.6713 -29.3472 -26.9537 -25.1092 -24.2187 -23.8319 -23.5331 -23.5249 -23.6516 -23.7785 -23.7963 -23.8958 -23.9159 -24.3854 -24.7009 -25.0838 -25.3484 -25.4251 -25.1872 -24.463 -23.6795 -23.0651 -22.2067 -21.7442 -21.613 -21.4387 -21.729 -21.6316 -21.9893 -22.3791 -22.5701 -22.6584 -22.8774 -22.7518 -22.8251 -22.6917 -22.4455 -22.355 -22.5476 -22.6129 -22.7377 -23.0973 -23.0616 -23.1796 -23.0553 -22.9499 -23.0691 -22.8649 -22.8406 -23.0655 -23.3493 -24.0728 -25.3382 -27.4959 -30.0381 -32.6679 -35.0179 -36.5082 -36.527 -35.3126 -33.328 -30.5993 -28.5699 -26.7168 -25.505 -24.7267 -24.3631 -23.9437 -24.0692 -23.9255 -24.1175 -24.1467 -24.4522 -24.6397 -24.8738 -24.8675 -25.0203 -25.1611 -25.4667 -25.4381 -25.5856 -25.5736 -25.5434 -25.4977 -25.2478 -25.1182 -25.2998 -25.3286 -25.1945 -25.4953 -25.6153 -25.8425 -26.9023 -28.2999 -30.7219 -34.2091 -38.2032 -41.5198 -43.1593 -42.7515 -40.5914 -37.1568 -32.9113 -29.5851 -27.2548 -26.0113 -25.3583 -25.1034 -25.3862 -25.5728 -25.76 -26.1986 -26.726 -27.1656 -27.7919 -28.299 -28.5319 -28.7398 -28.9064 -29.0147 -29.0387 -29.2369 -29.2617 -29.4134 -29.7861 -30.3193 -30.9963 -31.7453 -32.4319 -33.2718 -34.2449 -34.7511 -35.1009 -35.5035 -35.8031 -35.85 -35.7635 -36.2705 -36.4652 -36.7235 -37.122 -37.4521 -38.2079 -39.05 -40.5626 -43.0648 -45.9249 -50.1502 -54.257 -58.3027 -61.8517 -65.0837 -67.1987 -68.6946 -69.6178 -70.6507 -71.5864 -72.5023 -73.5392 -74.6852 -74.4268 -73.9682 -73.1884 -71.9524 -71.2119 -71.1089 -71.2447 -72.1851 -73.3176 -74.7946 -75.6588 -76.7723 -78.8739 -85.6579", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0004 0.0 0.0001 0.0004 0.0003 0.0003 0.0004 0.0003 0.0006 0.0006 0.0005 0.0006 0.0008 0.001 0.0012 0.0015 0.0019 0.0019 0.002 0.002 0.002 0.0023 0.0024 0.0025 0.0027 0.0029 0.0028 0.0029 0.0028 0.0028 0.0025 0.0024 0.0023 0.0021 0.002 0.0013 0.0013 0.0011 0.0009 0.0007 0.0006 0.0006 0.0022 0.0045 0.0074 0.0099 0.012 0.0129 0.0129 0.0121 0.0097 0.0074 0.0043 0.0023 0.0022 0.0023 0.0022 0.0022 0.0019 0.0017 0.0012 0.0012 0.0005 0.0007 0.0006 0.0005 0.0004 0.0002 0.0002 0.0003 0.0008 0.0013 0.0013 0.0012 0.0009 0.0009 0.0009 0.0008 0.0009 0.0007 0.0006 0.0007 0.0007 0.0009 0.0014 0.0048 0.0101 0.0135 0.0158 0.0175 0.0172 0.0153 0.0128 0.009 0.0038 0.0033 0.0031 0.0028 0.0024 0.0024 0.0021 0.0018 0.0017 0.0016 0.0018 0.0018 0.0017 0.0017 0.0014 0.0013 0.0014 0.0012 0.0013 0.0018 0.0016 0.0018 0.0017 0.0014 0.0016 0.0015 0.0014 0.0013 0.0012 0.0013 0.0017 0.0016 0.0015 0.0028 0.0052 0.0062 0.0069 0.0068 0.0052 0.0043 0.003 0.0019 0.0021 0.0019 0.0014 0.0016 0.0015 0.0015 0.0015 0.0014 0.0013 0.0025 0.0056 0.0086 0.0114 0.014 0.0157 0.0158 0.0152 0.0129 0.0098 0.0068 0.0035 0.0024 0.0021 0.0017 0.0018 0.0016 0.0016 0.0014 0.0011 0.0009 0.0009 0.0008 0.0007 0.0004 0.0007 0.001 0.001 0.001 0.0012 0.0013 0.0013 0.0013 0.0016 0.0015 0.0018 0.0016 0.0012 0.0014 0.0014 0.0015 0.0015 0.0015 0.0015 0.0015 0.0013 0.0014 0.0015 0.0016 0.0017 0.0019 0.0017 0.0016 0.0019 0.0017 0.0018 0.0015 0.0013 0.0011 0.0011 0.0012 0.0015 0.0024 0.0031 0.0057 0.0101 0.0127 0.0133 0.0126 0.0094 0.0048 0.0023 0.0012 0.0011 0.0012 0.0013 0.001 0.001 0.0009 0.0009 0.0008 0.0008 0.0008 0.0008 0.0006 0.0006 0.0006 0.0006 0.0006 0.0007 0.0005 0.0007 0.0005 0.0005 0.0004 0.0003 0.0004 0.0004 0.0003 0.0004 0.0004 0.0005 0.0004 0.0003 0.0006 0.0011 0.001 0.0009 0.0022 0.0033 0.0034 0.0033 0.0029 0.0019 0.0019 0.0019 0.0017 0.0018 0.0016 0.0017 0.0016 0.0015 0.0013 0.0013 0.0009 0.0013 0.0009 0.0012 0.0012 0.0014 0.0011 0.0009 0.001 0.001 0.0012 0.0011 0.001 0.001 0.0008 0.0007 0.0006 0.0004 0.0004 0.0003 0.0004 0.0003 0.0004 0.0005 0.0005 0.0007 0.0003 0.0004 0.0005 0.001 0.0015 0.0016 0.0016 0.0011 0.0009 0.0009 0.0005 0.0004 0.0005 0.0007 0.0005 0.0008 0.001 0.0009 0.001 0.0018 0.002 0.0023 0.0011 0.1417 0.0015 0.0023 0.0011 0.0005 0.0006 0.0005 0.0004 0.0", + "breathiness": "-92.9279 -96.0 -94.5353 -90.5159 -84.7393 -78.7702 -73.511 -68.9892 -65.4334 -62.6755 -60.1789 -58.1462 -56.1338 -54.1671 -52.2832 -50.7421 -49.3583 -48.4691 -47.639 -46.8312 -46.014 -45.6843 -45.6123 -45.3278 -45.3346 -45.2948 -45.4379 -45.7025 -45.9093 -46.1885 -46.9005 -47.6146 -48.7151 -50.3644 -52.4826 -54.6585 -56.7936 -58.0911 -58.1277 -56.61 -54.0495 -50.4322 -46.4512 -42.6178 -39.5294 -37.4142 -36.4485 -36.1741 -37.03 -38.9346 -41.3845 -44.2864 -46.9251 -49.2328 -51.015 -52.0694 -53.3781 -54.5355 -56.5028 -58.7868 -61.1578 -63.2914 -64.6592 -65.6658 -66.1447 -65.6448 -64.7229 -63.551 -62.1196 -60.6125 -59.6015 -58.7128 -58.4363 -58.7086 -59.2789 -60.0849 -60.7834 -61.4494 -61.7286 -61.9814 -62.0615 -61.4651 -59.4901 -56.3487 -52.4737 -48.4101 -44.3275 -40.7692 -38.0882 -37.0374 -36.9896 -38.3433 -40.3514 -43.1283 -46.1735 -49.0062 -51.2868 -53.0777 -53.9255 -54.1793 -54.3366 -54.7198 -54.9795 -54.9812 -54.8648 -54.797 -54.701 -54.7048 -54.9065 -55.1702 -55.4077 -55.8285 -56.0273 -56.4662 -56.6698 -57.055 -57.3444 -57.7293 -58.1646 -58.5001 -58.7691 -58.9279 -59.0834 -58.9803 -57.731 -56.1291 -53.7918 -51.5238 -49.0399 -47.2649 -46.4053 -47.0134 -48.3966 -49.9776 -51.9001 -53.6729 -55.0935 -56.1015 -56.8957 -57.5107 -58.2387 -58.8915 -58.7855 -57.7386 -55.3747 -52.0507 -48.0898 -44.2447 -41.1681 -38.7247 -38.2562 -38.4407 -39.7642 -41.7899 -44.7061 -47.5376 -50.4365 -53.1811 -55.1433 -56.4813 -56.8934 -57.2318 -57.457 -57.916 -58.6871 -59.7205 -61.049 -62.4169 -63.7158 -64.6119 -64.9328 -64.4442 -63.2894 -61.6551 -59.8818 -58.2565 -57.0501 -56.5719 -56.1706 -55.9769 -56.0503 -55.9982 -56.0739 -55.8165 -55.7607 -55.5452 -55.0249 -54.8858 -54.7558 -54.9169 -55.1937 -55.4759 -55.8114 -56.2676 -56.8397 -57.2494 -57.4259 -57.8285 -58.3504 -58.5143 -58.8161 -59.3579 -59.8038 -59.434 -58.4598 -56.4038 -53.63 -50.279 -46.8859 -43.9026 -41.6866 -40.6132 -40.7727 -42.0243 -44.4489 -47.1577 -50.1772 -52.8892 -54.9828 -56.3278 -56.9938 -57.5264 -58.0551 -58.6705 -59.3954 -60.0704 -60.7383 -61.5133 -62.1663 -62.9735 -63.8926 -64.6639 -65.3472 -65.663 -65.8545 -66.2001 -66.461 -66.6914 -67.1156 -67.7193 -68.36 -69.0208 -69.5589 -69.9258 -70.4466 -70.0625 -69.197 -66.5167 -63.9414 -61.243 -58.5694 -56.3471 -55.1136 -54.5463 -54.1495 -54.3555 -54.6568 -55.0118 -55.6231 -56.3884 -56.5988 -56.7512 -57.2014 -57.5172 -57.8299 -58.428 -58.9368 -59.482 -59.8539 -59.7665 -59.5831 -59.2461 -58.9696 -59.1909 -59.3876 -59.7677 -60.3903 -60.965 -61.794 -62.1755 -62.5919 -62.8763 -63.1571 -63.7132 -64.2127 -64.9055 -65.9878 -66.9097 -67.9212 -68.5394 -69.2739 -69.5486 -69.4984 -69.9651 -70.136 -69.934 -69.1344 -67.9339 -66.159 -64.7766 -64.3462 -65.1982 -67.2393 -69.8583 -72.3681 -74.1677 -75.0337 -74.9738 -74.5411 -74.3141 -75.0042 -76.5418 -78.054 -79.5664 -80.399 -80.6381 -80.3189 -79.9258 -79.5699 -79.515 -79.6878 -79.5307 -78.9624 -77.8953 -76.569 -74.8216 -74.2901", "breathiness_timestep": "0.011609977324263039" }, { @@ -314,9 +314,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0", "f0_seq": "123.7 124.0 123.8 123.5 123.8 123.8 123.7 123.5 123.5 123.4 123.5 123.3 123.4 123.2 123.3 123.2 123.2 123.0 123.1 123.2 123.3 123.2 123.2 123.2 123.2 123.2 123.4 123.5 123.3 123.6 123.2 123.6 123.7 124.1 123.7 127.3 130.9 136.0 143.8 150.8 154.8 158.3 161.2 163.7 164.9 166.3 165.4 163.9 162.4 162.2 159.1 156.0 157.9 157.2 162.1 161.3 162.6 163.0 164.3 164.6 164.9 164.7 164.8 165.1 165.5 165.5 165.0 165.1 163.8 164.5 165.1 168.0 176.0 180.1 182.5 183.7 185.0 185.1 185.2 184.8 184.8 185.0 184.9 184.5 184.8 184.6 183.8 181.4 174.4 166.6 174.4 182.5 191.5 201.4 210.7 204.1 201.0 199.6 197.5 197.1 196.4 195.7 195.2 195.5 196.7 196.7 195.4 191.9 185.0 179.3 183.7 186.7 191.4 196.6 200.3 192.7 187.8 186.0 185.2 185.0 183.6 182.9 183.9 184.3 185.5 186.5 185.5 181.7 175.7 173.8 174.5 174.8 175.8 175.3 177.0 175.1 170.0 165.6 166.0 165.2 165.4 165.5 165.7 165.7 165.4 164.5 163.3 161.0 156.4 151.2 150.7 149.1 148.4 148.3 147.8 150.1 146.5 144.6 145.1 145.5 146.0 145.7 145.7 145.7 146.2 146.6 147.0 147.5 147.1 146.7 147.3 147.3 147.7 148.0 148.6 148.6 148.5 148.1 147.8 147.6 147.6 148.1 148.2 148.1 146.7 144.4 138.9 136.8 140.0 143.0 146.2 150.1 154.2 158.0 162.4 166.1 169.8 168.3 167.5 166.2 165.6 164.3 162.7 160.7 159.4 157.8 157.1 156.1 157.0 158.2 160.3 162.6 166.0 168.2 170.1 171.1 170.6 169.5 167.6 165.2 163.6 162.2 161.0 161.1 161.2 162.7 164.7 167.4 170.0 172.3 173.3 172.7 170.7 167.1 163.3 159.3 156.2 154.8 155.7 158.4 161.7 165.4 168.7 172.0 174.9 175.5 174.3 170.8 164.6 159.1 155.6 153.4 153.0 155.6 158.2 162.1 166.0 168.6 171.0 172.9 172.7 170.6 165.8 161.2 156.6 154.3 154.0 154.5 156.4 156.9 156.7 161.2 167.4 172.6 174.8 174.9 173.8 170.0 165.6 160.4 158.3 156.7 157.5 157.0 157.1 156.0 155.4", "f0_timestep": "0.011609977324263039", - "energy": "0.0009 0.001 0.0015 0.0018 0.0026 0.0028 0.0033 0.0038 0.0045 0.0053 0.005 0.0059 0.0061 0.0059 0.0062 0.0061 0.0053 0.0052 0.0051 0.0044 0.0043 0.0033 0.003 0.002 0.0015 0.0022 0.0024 0.0048 0.0069 0.0093 0.0108 0.0169 0.0281 0.0375 0.0461 0.0514 0.0528 0.0544 0.0542 0.0532 0.0526 0.0527 0.0534 0.0539 0.0552 0.0561 0.0572 0.0569 0.0557 0.0515 0.0445 0.0398 0.0361 0.0405 0.0484 0.0555 0.0591 0.0603 0.0643 0.0632 0.0641 0.0654 0.0646 0.0653 0.0652 0.0665 0.0658 0.0625 0.0572 0.0488 0.041 0.0422 0.0512 0.0597 0.0654 0.0684 0.068 0.0669 0.0671 0.0653 0.0652 0.0649 0.0638 0.0636 0.0626 0.061 0.0575 0.0499 0.0391 0.027 0.0167 0.011 0.0102 0.0228 0.0442 0.0566 0.0667 0.0719 0.0703 0.0684 0.0653 0.0632 0.0612 0.0583 0.057 0.0557 0.0517 0.0455 0.0377 0.0257 0.0162 0.0068 0.0036 0.0166 0.0374 0.0502 0.061 0.0659 0.064 0.0617 0.0574 0.0562 0.0552 0.0535 0.0541 0.0517 0.0465 0.039 0.028 0.0172 0.0111 0.0102 0.0101 0.0084 0.0237 0.0374 0.0487 0.0582 0.0617 0.0634 0.0634 0.0637 0.0646 0.0636 0.0629 0.0606 0.0546 0.0452 0.0338 0.0206 0.0108 0.0081 0.0075 0.0078 0.0165 0.0328 0.0434 0.0513 0.056 0.055 0.0556 0.0557 0.0549 0.0554 0.0528 0.0525 0.0533 0.0535 0.0552 0.0552 0.0571 0.0569 0.0574 0.0586 0.0581 0.0585 0.0588 0.0582 0.0594 0.059 0.0594 0.0583 0.0556 0.0525 0.0457 0.0367 0.0279 0.0176 0.0096 0.0054 0.0046 0.0049 0.0043 0.0045 0.0055 0.0228 0.0375 0.0487 0.0592 0.0637 0.0636 0.0651 0.0643 0.0642 0.0643 0.0622 0.061 0.058 0.0565 0.0547 0.0541 0.0544 0.0531 0.0545 0.056 0.0566 0.0586 0.0585 0.0583 0.0579 0.0565 0.056 0.0531 0.0509 0.0492 0.0486 0.0488 0.0486 0.0501 0.0508 0.0514 0.051 0.0503 0.0489 0.0471 0.0451 0.0425 0.041 0.0382 0.0372 0.0372 0.0376 0.0392 0.0418 0.0432 0.0428 0.0422 0.0395 0.0357 0.0337 0.0306 0.0284 0.027 0.0256 0.0246 0.0244 0.0245 0.0243 0.0245 0.0246 0.0242 0.0233 0.0221 0.0202 0.0179 0.0168 0.0163 0.0156 0.0158 0.0157 0.0151 0.0142 0.0126 0.0122 0.0114 0.0119 0.0115 0.0108 0.0128 0.0073 0.0036 0.002 0.0012 0.0009 0.0015 0.0007 0.0005", + "energy": "-65.3696 -60.4417 -58.081 -56.0796 -54.2833 -53.1243 -51.9912 -50.9192 -50.0225 -48.7493 -47.699 -46.7565 -46.3044 -45.9734 -46.1649 -46.5033 -47.246 -48.5306 -49.8078 -51.2256 -52.7499 -54.5727 -55.7045 -56.1027 -55.6121 -54.331 -51.9249 -48.9045 -45.2837 -41.7533 -38.0742 -34.8365 -32.2163 -29.8955 -28.2061 -27.0054 -26.3076 -26.0197 -25.7624 -25.5255 -24.993 -24.8438 -24.4179 -24.2005 -24.276 -24.2679 -24.7294 -25.5039 -26.6528 -27.5673 -28.7515 -28.9156 -28.8245 -28.1784 -27.1132 -26.0805 -25.2743 -24.8833 -24.6933 -24.6615 -24.8671 -24.9219 -24.8813 -24.844 -24.7153 -24.9961 -25.1874 -25.6161 -25.7562 -25.8505 -25.8921 -25.5156 -25.4403 -25.048 -24.7298 -24.6702 -24.7833 -24.7322 -24.8378 -25.0216 -24.8145 -24.594 -24.938 -25.0661 -25.502 -26.5285 -28.0245 -29.8775 -32.2952 -34.3134 -35.4214 -35.4246 -34.0379 -31.8441 -29.1871 -26.7086 -25.2116 -24.7051 -24.2977 -24.6462 -24.8316 -24.8775 -25.5158 -25.5405 -26.0867 -27.0291 -28.3454 -30.4942 -33.137 -35.3243 -36.6124 -36.3977 -35.098 -32.8427 -29.9882 -27.4304 -25.78 -25.0504 -24.9732 -25.1429 -25.4891 -25.7606 -25.9467 -26.2133 -26.7003 -27.3653 -28.7263 -30.3233 -32.5251 -34.7201 -36.5257 -37.2024 -36.9323 -35.4538 -33.4634 -31.0603 -28.8619 -26.7983 -25.7655 -25.3922 -25.178 -24.988 -25.0 -25.3043 -25.8613 -26.8325 -28.3433 -30.553 -32.8965 -35.5263 -37.4811 -38.277 -37.826 -36.2024 -34.1403 -31.8052 -29.6522 -27.9104 -26.9968 -26.5928 -26.3621 -26.6575 -26.5111 -26.745 -26.994 -26.9114 -27.3487 -27.1398 -27.135 -27.1222 -27.3078 -27.0696 -26.8704 -26.9133 -26.8198 -26.7668 -26.6993 -26.7677 -26.9896 -26.906 -27.1056 -27.5359 -27.9338 -29.0499 -30.3771 -32.6627 -35.2761 -38.1146 -40.9012 -43.707 -45.6351 -46.5182 -45.8682 -43.662 -40.7127 -37.1377 -33.3124 -29.5077 -27.1522 -25.7014 -25.1477 -24.8428 -24.8119 -24.843 -25.1969 -25.5358 -25.8128 -26.1141 -26.365 -26.5525 -26.5591 -26.3811 -25.8423 -25.7196 -25.4845 -25.2341 -25.4033 -25.4589 -25.7382 -25.7215 -25.9175 -26.132 -26.397 -26.4317 -26.8506 -26.9685 -26.9303 -27.1206 -27.3438 -27.5683 -27.5631 -27.7726 -27.7398 -27.9218 -28.345 -28.4865 -28.7753 -29.1239 -29.5762 -29.5947 -29.9761 -30.0578 -30.1552 -30.1516 -29.9787 -30.0127 -30.0318 -30.1367 -30.5582 -31.1093 -31.6348 -32.1441 -32.9015 -33.276 -33.5283 -33.8706 -34.0622 -34.1937 -34.2432 -34.1303 -34.2577 -34.2249 -34.1449 -34.2285 -34.4346 -34.7283 -35.2903 -35.9572 -36.9875 -37.8271 -38.3466 -38.7485 -38.7742 -39.0935 -39.2486 -39.439 -40.2162 -41.3944 -43.5349 -46.4891 -50.8323 -55.9014 -60.7214 -65.0072 -67.5447 -70.0406 -67.4427", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0004 0.0006 0.0009 0.0013 0.0023 0.0027 0.0036 0.0046 0.0052 0.006 0.0066 0.0066 0.0069 0.0067 0.0064 0.0065 0.0064 0.0062 0.0058 0.005 0.0041 0.0029 0.0018 0.0012 0.0008 0.0012 0.0025 0.0046 0.0073 0.0103 0.012 0.0126 0.0128 0.0108 0.0085 0.0063 0.0029 0.0023 0.0022 0.0022 0.0018 0.0016 0.0016 0.0016 0.0017 0.0019 0.0019 0.0021 0.0018 0.0017 0.0014 0.0009 0.0012 0.0013 0.0015 0.0018 0.0017 0.0017 0.0016 0.0015 0.0015 0.0015 0.0014 0.0015 0.0014 0.0016 0.0016 0.0017 0.0016 0.0018 0.0016 0.0016 0.0018 0.0015 0.0019 0.0021 0.0018 0.0018 0.0018 0.0014 0.0016 0.0013 0.0015 0.0014 0.0016 0.0018 0.0017 0.0017 0.0028 0.0051 0.0078 0.0101 0.0108 0.0102 0.0086 0.0051 0.0028 0.0023 0.0015 0.0015 0.0013 0.001 0.0007 0.0007 0.0007 0.0005 0.0006 0.0004 0.0005 0.0011 0.0018 0.0025 0.0032 0.0034 0.0029 0.0021 0.0013 0.0011 0.0011 0.001 0.0009 0.0008 0.0006 0.0003 0.0002 0.0002 0.0009 0.0023 0.0056 0.0079 0.01 0.0107 0.0099 0.0078 0.0054 0.0032 0.0015 0.0008 0.0006 0.0004 0.0005 0.0006 0.0006 0.0008 0.0006 0.0007 0.0011 0.0021 0.0036 0.006 0.0076 0.0088 0.0089 0.0079 0.0064 0.0042 0.0026 0.0017 0.001 0.0006 0.0005 0.0005 0.0005 0.0005 0.0006 0.0006 0.0006 0.0003 0.0001 0.0003 0.0005 0.0001 0.0003 0.0004 0.0005 0.0005 0.0005 0.0004 0.0005 0.0004 0.0004 0.0003 0.0003 0.0002 0.0002 0.0 0.0009 0.0019 0.0026 0.0033 0.0039 0.0042 0.0043 0.0047 0.0043 0.0034 0.0026 0.0014 0.0008 0.0013 0.0014 0.0016 0.0015 0.0015 0.0013 0.0015 0.0013 0.0015 0.0016 0.0016 0.0013 0.0017 0.0014 0.0014 0.0016 0.0015 0.0015 0.0016 0.0015 0.0017 0.0014 0.0015 0.0013 0.0014 0.0016 0.0016 0.0018 0.0017 0.0017 0.0017 0.0017 0.0019 0.0021 0.0016 0.0016 0.0016 0.0015 0.0015 0.0017 0.0012 0.0013 0.0014 0.0013 0.0013 0.0013 0.0013 0.0015 0.0014 0.0014 0.0011 0.0012 0.0013 0.0009 0.001 0.0009 0.0008 0.0008 0.0008 0.0009 0.0009 0.0008 0.0009 0.0008 0.0008 0.0008 0.0007 0.0005 0.0004 0.0005 0.0003 0.0003 0.0004 0.0004 0.0004 0.0005 0.0004 0.0002 0.0006 0.0031 0.0028 0.0029 0.0026 0.0018 0.0006 0.0005 0.0004 0.0", + "breathiness": "-71.7401 -65.6534 -61.8305 -58.6497 -56.3412 -54.482 -53.1664 -51.7437 -50.1797 -48.7255 -47.4327 -46.5417 -46.1668 -45.8958 -45.7783 -45.7907 -46.191 -47.1965 -48.3996 -50.331 -52.749 -55.1291 -57.4461 -58.8591 -58.9702 -57.5584 -54.852 -51.6978 -48.0569 -45.1951 -43.883 -43.6924 -44.2059 -45.7938 -47.472 -49.4316 -50.8994 -52.4163 -52.8074 -53.0902 -53.3528 -53.599 -53.7829 -54.0995 -54.3778 -55.0274 -55.7719 -56.7786 -57.5112 -58.1407 -58.5411 -58.6507 -58.4241 -57.8816 -57.4889 -56.858 -56.8024 -56.7638 -56.6012 -56.5151 -56.3802 -56.1239 -56.022 -55.746 -55.6744 -55.2947 -54.9368 -54.6375 -54.3437 -54.2601 -54.3093 -54.5226 -54.9368 -55.4579 -55.8484 -56.2054 -56.7447 -57.2002 -57.5278 -57.7514 -58.0546 -58.2911 -58.3462 -58.4393 -58.3074 -57.4997 -56.1156 -53.919 -51.0268 -48.0597 -45.7084 -44.3891 -44.1234 -45.5328 -47.5814 -50.0446 -52.5336 -54.5439 -56.4511 -57.9119 -59.4496 -60.8362 -62.2605 -64.0357 -65.5853 -66.1745 -65.5146 -63.4918 -60.9293 -57.9753 -54.9748 -52.9442 -52.2491 -53.3015 -54.8638 -56.9851 -59.0176 -61.0855 -62.6689 -63.625 -64.8506 -65.9171 -66.9872 -67.4859 -66.5291 -63.8349 -59.9592 -55.3693 -50.5484 -46.3131 -43.3736 -42.4946 -43.632 -46.2536 -49.5229 -53.1476 -56.5098 -59.6496 -61.9039 -63.3676 -63.6988 -63.7284 -63.6185 -63.0476 -61.7861 -59.858 -57.3858 -54.3169 -51.1769 -48.0438 -45.457 -43.7127 -43.2891 -43.9811 -45.7076 -48.2101 -50.8372 -53.5214 -55.8727 -57.9296 -59.083 -60.112 -61.1435 -62.3431 -63.5251 -64.7729 -66.3391 -67.636 -68.9865 -69.9875 -70.4726 -71.0221 -70.8981 -70.8087 -70.7722 -70.716 -70.565 -70.5452 -70.3337 -70.0315 -69.8648 -69.4223 -68.557 -66.6756 -64.1788 -60.726 -56.8203 -53.2265 -50.0045 -48.0818 -47.7965 -48.2547 -49.3248 -50.7037 -52.5236 -54.4954 -56.2413 -57.7054 -58.5991 -59.2946 -59.2617 -59.2511 -58.9291 -58.6962 -58.3463 -57.9751 -57.6021 -57.0873 -56.6077 -56.0102 -55.754 -55.5663 -55.4858 -55.5234 -55.8042 -55.7922 -55.793 -55.7825 -55.7807 -55.9427 -55.876 -56.1273 -56.3658 -56.7958 -57.2729 -57.7253 -57.9981 -58.4489 -58.7652 -58.5676 -58.4751 -58.3022 -58.0108 -57.9343 -58.0253 -58.4828 -58.8518 -59.4961 -60.113 -60.3827 -60.587 -60.8426 -60.9907 -60.9679 -60.685 -60.8002 -60.8083 -60.7844 -61.254 -61.8946 -62.95 -64.4322 -65.7897 -66.9582 -68.1739 -68.7166 -68.9226 -68.7015 -68.2914 -67.4931 -66.7914 -66.2666 -65.9021 -65.9485 -66.034 -66.7465 -67.7548 -68.5372 -69.5835 -70.4122 -71.082 -71.7645 -72.0596 -72.6835 -73.1541 -73.0867 -72.4507 -70.7924 -68.3689 -65.9618 -64.3982 -64.4468 -66.543 -70.0909 -74.0018 -76.8143 -76.405", "breathiness_timestep": "0.011609977324263039" }, { @@ -330,9 +330,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "184.9 186.0 186.0 185.9 186.0 185.8 186.2 186.5 186.4 186.7 186.1 187.0 186.5 186.9 186.8 186.5 186.5 186.5 186.6 186.6 186.4 186.8 186.8 186.8 186.6 186.9 186.7 187.0 187.1 187.0 187.5 187.5 187.3 187.4 187.5 187.1 187.2 187.3 187.6 187.4 187.4 187.3 187.1 187.1 187.2 187.0 186.9 187.3 186.9 187.6 188.9 190.3 191.9 195.3 199.8 203.9 210.1 215.2 227.3 235.3 241.6 245.8 247.8 248.2 248.6 247.6 244.6 241.2 237.2 233.8 230.4 228.0 226.1 221.4 218.7 218.5 220.0 220.7 221.4 221.9 221.7 220.9 219.5 218.9 219.0 218.6 218.2 217.9 217.8 218.9 218.8 218.9 220.6 220.8 221.8 221.8 221.9 220.7 219.2 217.5 217.0 216.6 217.4 219.7 224.2 232.1 239.2 244.6 249.1 249.7 249.6 249.2 247.7 246.8 245.6 245.3 247.5 248.5 248.7 248.0 249.1 249.1 248.2 245.0 240.9 233.9 234.1 240.5 245.9 252.4 258.8 266.7 274.5 285.0 272.1 267.2 265.5 262.5 256.2 252.3 249.5 248.0 246.8 247.8 249.6 250.8 251.0 249.2 245.0 238.1 226.6 216.8 214.4 215.5 218.9 218.7 218.5 218.9 220.0 221.1 221.4 222.1 222.1 222.0 222.3 222.2 221.1 217.9 213.3 213.0 223.6 234.4 246.0 257.6 271.7 266.3 259.7 257.3 254.3 251.5 248.7 245.3 242.2 237.2 234.2 233.0 236.4 241.6 247.3 251.6 255.0 256.4 255.8 254.3 251.6 248.3 247.6 246.7 248.5 248.6 249.0 249.6 250.4 250.8 249.5 246.2 250.3 254.2 261.3 267.8 274.8 282.0 290.2 300.6 310.7 325.2 325.8 316.7 312.5 310.6 306.1 301.8 296.6 293.1 290.3 289.8 290.2 291.6 292.1 293.0 292.5 292.9 288.3 283.8 275.4 263.7 255.1 248.7 243.8 241.8 242.2 243.6 245.4 248.9 249.9 250.6 250.0 248.9 248.0 247.9 248.2 247.2 245.2 245.6 247.2 247.7 248.3 249.6 250.2 249.4 249.5 249.7 248.9 246.8 244.2 243.2 243.4 243.5 244.1 246.0 246.9 249.5 250.5 252.0 253.1 253.6 253.1 251.3 247.4 243.8 240.4 237.2 236.3 237.4 240.3 243.8 246.3 249.7 254.5 258.6 261.8 263.3 261.2 256.6 251.8 244.3 242.7 240.6 239.8 243.3 244.9 246.9 251.6 255.1 257.7 260.1 261.7 259.5 255.1 249.0 238.9 241.1 242.9 247.0 251.7 256.5 260.8 264.9 268.2 264.2 256.5 253.6 251.3 247.9 245.6 244.4 243.6 244.3 245.0 245.8 247.7 249.4 249.1 245.3 239.1 233.6 227.9 223.2 219.8 219.2 219.5 220.0 220.7 220.1 220.6 219.7 219.1 219.1 218.3 216.8 216.7 218.0 215.7 213.2 208.1 203.2 196.3 192.8 189.9 190.1 192.2 193.9 195.6 196.1 196.3 196.7 196.5 195.3 193.1 194.2 194.9 192.0 184.0 186.3 190.8 194.4 198.6 202.4 207.0 209.3 211.0 203.8 199.3 197.6 196.0 195.8 195.1 194.9 195.6 197.5 198.6 196.9 194.0 189.4 183.8 178.4 174.5 172.2 170.5 170.4 168.6 164.3 162.8 162.7 164.2 164.8 164.9 165.2 165.5 165.4 165.8 165.8 164.5 165.1 165.8 165.8 167.9 170.2 173.2 177.8 183.8 190.6 193.5 196.3 197.8 197.7 197.2 195.8 194.8 195.5 196.4 196.3 195.1 193.8 188.6 190.0 189.2 195.6 206.0 215.6 224.9 231.9 226.1 224.7 223.6 221.7 220.8 220.2 218.8 219.0 219.7 219.5 218.7 214.5 210.1 203.8 203.0 202.3 202.4 202.4 201.7 203.6 195.1 193.8 191.5 191.2 191.8 193.3 195.1 195.8 196.2 197.3 198.5 199.3 200.7 201.5 200.1 198.8 198.2 199.9 201.7 204.7 210.1 214.1 217.7 219.6 220.9 221.0 220.7 220.6 220.4 220.1 220.8 221.3 221.8 222.1 222.6 222.5 221.8 222.9 226.4 234.5 244.6 252.4 257.5 258.2 257.4 254.5 250.0 245.5 242.2 239.7 240.0 242.6 246.9 251.8 255.5 257.5 258.9 257.8 254.5 250.3 245.3 241.1 236.9 236.0 236.1 239.9 244.5 248.5 250.0 251.9 253.3 253.9 254.1 252.6 250.6 248.2 244.7 241.3 238.6 237.2 239.1 240.9 242.9 245.5 248.4 252.3 255.8 257.0 256.9 255.0 252.2 248.0 243.8 239.8 237.2 235.6 236.8 239.7 244.4 248.7 253.1 256.4 258.7 259.0 256.7 253.9 249.6 245.4 240.8 237.4 235.8 238.7 242.2 245.4 248.9 255.9 260.5 260.2 257.4 251.0 248.7 247.8 245.8 240.2 234.1 232.8 232.9 232.8 233.4 233.6 234.3 235.0 235.4 235.2 234.5 234.0 233.6 233.4 233.2 233.2 234.2 234.0 234.2 233.9 234.2 232.9 233.6 234.4", "f0_timestep": "0.011609977324263039", - "energy": "0.0001 0.0004 0.0009 0.0007 0.0007 0.0006 0.0004 0.0003 0.0004 0.0008 0.0003 0.0006 0.0006 0.0007 0.0007 0.0009 0.001 0.0007 0.0017 0.0016 0.0009 0.0015 0.0017 0.0016 0.0014 0.0021 0.0018 0.0017 0.0024 0.0021 0.002 0.0025 0.0026 0.0028 0.0026 0.0023 0.0026 0.0017 0.0019 0.0016 0.0013 0.0 0.0007 0.0002 0.0007 0.0007 0.0021 0.0083 0.0229 0.0365 0.0467 0.0564 0.0617 0.0649 0.0662 0.0635 0.0608 0.0564 0.053 0.0534 0.0555 0.0599 0.0631 0.0672 0.07 0.0737 0.0761 0.0766 0.0762 0.0743 0.0738 0.0756 0.0791 0.0835 0.0872 0.0881 0.0862 0.0853 0.085 0.0836 0.0834 0.0823 0.0807 0.079 0.0756 0.0741 0.0731 0.0742 0.0743 0.0723 0.0705 0.0714 0.0806 0.0933 0.1019 0.108 0.1073 0.1006 0.0937 0.0844 0.0758 0.0687 0.0646 0.0593 0.0573 0.0582 0.0582 0.0604 0.0621 0.0628 0.0645 0.0655 0.0655 0.0653 0.0648 0.0654 0.0678 0.0682 0.0673 0.0676 0.0669 0.0667 0.0672 0.066 0.0603 0.0509 0.0389 0.0251 0.016 0.0163 0.0149 0.0127 0.0302 0.0478 0.0616 0.0733 0.0761 0.0739 0.0702 0.0657 0.063 0.0607 0.0593 0.0589 0.0599 0.0605 0.0627 0.0636 0.0628 0.0553 0.0454 0.0338 0.0189 0.0302 0.044 0.058 0.0697 0.0746 0.0748 0.0738 0.0726 0.0728 0.0737 0.0734 0.0734 0.0724 0.0689 0.0605 0.0494 0.0356 0.0235 0.0181 0.0173 0.0158 0.0393 0.0569 0.0719 0.0864 0.0913 0.0956 0.0957 0.0939 0.0907 0.0858 0.0821 0.0803 0.0785 0.0793 0.0803 0.0818 0.0843 0.0856 0.0881 0.0888 0.0854 0.0841 0.0826 0.0801 0.0822 0.081 0.0795 0.0776 0.0733 0.0678 0.0603 0.0509 0.0393 0.028 0.0204 0.0195 0.0208 0.0188 0.0189 0.0158 0.0147 0.036 0.0507 0.064 0.0769 0.0819 0.0835 0.0855 0.0857 0.0849 0.0851 0.0853 0.0862 0.0853 0.0864 0.0866 0.0874 0.0904 0.0904 0.0936 0.0931 0.0941 0.0942 0.0916 0.0905 0.0867 0.0854 0.0852 0.0836 0.0843 0.0843 0.0841 0.0849 0.0838 0.0844 0.0836 0.0811 0.0795 0.079 0.0781 0.0816 0.0869 0.0925 0.0998 0.1028 0.1057 0.104 0.1029 0.1017 0.1002 0.1002 0.0986 0.0951 0.0948 0.0935 0.0925 0.0943 0.0954 0.0949 0.0996 0.1008 0.1025 0.1041 0.1006 0.1011 0.1004 0.1016 0.101 0.0988 0.0956 0.091 0.0902 0.0891 0.0897 0.0913 0.0928 0.0936 0.0925 0.0896 0.088 0.0881 0.0881 0.0841 0.0815 0.0736 0.0702 0.068 0.0684 0.0706 0.0717 0.0723 0.0725 0.0707 0.0698 0.0639 0.0537 0.0441 0.0296 0.0172 0.0107 0.0101 0.0111 0.0114 0.0242 0.0428 0.0586 0.0703 0.078 0.0783 0.0742 0.0682 0.0615 0.0575 0.0564 0.0555 0.0566 0.0586 0.0593 0.0621 0.0636 0.0634 0.0612 0.0577 0.0553 0.055 0.058 0.0618 0.0678 0.0713 0.0669 0.1703 0.0658 0.0747 0.075 0.0713 0.0691 0.0687 0.0673 0.0669 0.0666 0.0653 0.0648 0.0625 0.0634 0.0681 0.0721 0.0767 0.0808 0.0822 0.0849 0.0865 0.0859 0.0875 0.0847 0.0829 0.0826 0.0761 0.0676 0.0552 0.0386 0.0255 0.0162 0.0146 0.0137 0.0157 0.042 0.0565 0.0706 0.0784 0.0788 0.0779 0.0744 0.0723 0.0699 0.0673 0.0673 0.0648 0.0626 0.0599 0.055 0.0511 0.044 0.035 0.0272 0.019 0.0295 0.0415 0.0539 0.063 0.0681 0.0695 0.0708 0.0693 0.071 0.0711 0.0683 0.0681 0.0656 0.0646 0.0649 0.0618 0.0575 0.0519 0.0485 0.046 0.0496 0.0545 0.0585 0.0617 0.0635 0.0652 0.0669 0.0668 0.0662 0.0647 0.0637 0.0629 0.0618 0.0581 0.0503 0.0405 0.0274 0.0141 0.0084 0.0084 0.0188 0.0421 0.0583 0.0724 0.0806 0.0818 0.0805 0.0748 0.0723 0.0696 0.0667 0.0673 0.0664 0.0658 0.0635 0.0548 0.0444 0.032 0.0201 0.0153 0.0127 0.023 0.0425 0.0579 0.0713 0.0804 0.0817 0.0841 0.0819 0.0795 0.0775 0.0712 0.0685 0.0668 0.0661 0.0685 0.0695 0.0706 0.0719 0.0749 0.0771 0.0759 0.076 0.0815 0.0861 0.0939 0.0996 0.1002 0.1021 0.1035 0.1037 0.1057 0.1055 0.1041 0.102 0.0982 0.0936 0.0863 0.0767 0.0672 0.0591 0.0546 0.0527 0.055 0.0586 0.0634 0.0675 0.0722 0.075 0.0755 0.0748 0.0712 0.0679 0.0648 0.0636 0.0624 0.0639 0.0669 0.0692 0.0724 0.0731 0.071 0.0706 0.069 0.0686 0.0677 0.0677 0.0667 0.0667 0.0669 0.0669 0.0684 0.0677 0.069 0.0685 0.0692 0.0694 0.0689 0.0679 0.0663 0.0641 0.0619 0.0598 0.0593 0.0584 0.0585 0.0576 0.0573 0.0567 0.0562 0.0574 0.058 0.0582 0.0572 0.0558 0.053 0.0493 0.0461 0.0433 0.0413 0.0407 0.0408 0.0401 0.0387 0.0375 0.0356 0.0363 0.035 0.0352 0.0334 0.0313 0.0297 0.0275 0.0252 0.023 0.0208 0.0189 0.0183 0.0181 0.0177 0.0183 0.0188 0.0191 0.0189 0.0187 0.0173 0.0154 0.0118 0.0083 0.0045 0.0021 0.0005 0.0004 0.0007 0.0007 0.0004 0.0013 0.0009 0.0006 0.0009 0.0005 0.0007 0.0005 0.0003 0.0004 0.0 0.0 0.0007 0.0004 0.0006 0.0", + "energy": "-76.4803 -74.1881 -71.9449 -69.7725 -68.0731 -65.8974 -64.108 -61.7049 -59.3436 -56.7672 -54.0908 -51.9865 -50.2804 -48.9903 -48.3801 -47.8988 -47.5232 -47.3454 -47.281 -47.1253 -47.0304 -46.9543 -47.1495 -47.1563 -47.1126 -47.3549 -47.4221 -47.6069 -48.1689 -48.3781 -49.0648 -49.5202 -50.1657 -50.883 -51.6702 -52.0739 -53.0044 -53.8405 -54.6326 -55.3228 -56.1229 -56.3964 -56.3624 -54.6938 -51.7983 -47.6572 -42.7577 -37.4548 -32.9558 -28.8818 -25.9969 -24.983 -24.7743 -24.4005 -24.6627 -24.8791 -24.9547 -24.9938 -25.0233 -24.979 -24.6522 -24.7093 -24.1299 -23.8499 -23.3681 -22.9971 -22.6691 -22.5467 -22.1732 -21.8149 -21.5914 -21.2087 -20.7313 -20.3434 -20.0235 -19.6846 -19.531 -19.6412 -19.7507 -19.7601 -19.8783 -19.6979 -20.0207 -20.0873 -20.4403 -20.7209 -20.8992 -21.545 -21.716 -21.9278 -21.7292 -21.1271 -20.5626 -20.2719 -19.7563 -19.5211 -19.5135 -19.72 -20.3437 -20.8593 -21.7062 -22.4718 -23.2958 -23.9644 -24.5355 -24.4708 -24.5362 -23.9118 -23.487 -22.865 -22.7874 -22.4138 -22.3944 -22.4643 -22.3717 -22.1103 -21.8761 -22.1798 -21.735 -21.7306 -21.5833 -21.8729 -22.5187 -23.6383 -25.4967 -27.4617 -29.9148 -32.1059 -33.4689 -33.7025 -32.6601 -30.4412 -27.8534 -25.0664 -22.8213 -20.9854 -20.4302 -20.2074 -20.2051 -20.4312 -20.4113 -20.8379 -20.7877 -21.1019 -21.2269 -21.4679 -21.6728 -22.575 -23.3292 -24.6015 -25.8857 -26.8433 -27.1245 -27.169 -26.2395 -24.945 -23.5481 -22.2126 -21.5969 -21.0673 -20.9705 -20.8153 -20.9743 -21.1327 -21.5478 -22.1359 -23.3113 -24.9239 -27.0372 -29.1008 -30.7629 -31.2494 -30.8672 -28.9856 -26.6138 -24.0412 -22.04 -20.5559 -20.0317 -19.809 -19.9851 -20.3103 -20.576 -21.3791 -21.4818 -21.9677 -21.8857 -21.6033 -21.5382 -21.2198 -20.841 -20.7643 -20.3861 -20.2474 -20.1821 -20.1847 -20.2871 -20.2149 -20.2928 -20.4522 -20.8652 -21.3222 -22.0802 -23.3418 -24.9267 -26.8709 -28.5725 -30.4175 -31.8761 -33.1745 -33.8073 -33.6453 -32.2171 -30.6885 -28.3136 -25.7754 -23.3279 -21.081 -19.7529 -19.0396 -18.6202 -18.4822 -18.6392 -18.6834 -18.6603 -18.7964 -18.6739 -18.6724 -18.4645 -18.4301 -18.4638 -18.4982 -18.537 -18.4764 -18.4334 -18.3356 -18.2803 -18.1033 -18.1371 -17.9306 -17.8419 -17.8384 -17.8646 -17.9726 -17.915 -17.9138 -17.8775 -18.1386 -18.3985 -18.6352 -18.9428 -19.1726 -19.324 -19.5563 -19.4053 -19.3697 -18.8496 -18.617 -18.4033 -18.3703 -18.224 -18.3203 -18.4476 -18.7866 -18.8123 -18.8551 -18.9144 -18.7922 -18.7558 -18.8131 -18.7339 -18.6672 -18.6933 -18.528 -18.3945 -18.4258 -18.5426 -18.5772 -18.7546 -18.7901 -18.8186 -18.743 -18.7993 -18.9322 -18.917 -19.15 -18.9699 -19.1364 -19.0904 -18.7602 -18.5996 -18.7627 -18.7542 -18.7103 -19.0671 -19.0397 -19.1341 -19.2884 -19.4387 -19.547 -19.5723 -19.3804 -19.5543 -19.376 -19.3259 -19.5443 -19.973 -20.9044 -22.9178 -25.3868 -28.0636 -30.6888 -33.2259 -34.799 -34.647 -33.2665 -31.2328 -28.391 -25.5966 -23.5244 -21.8741 -20.9703 -20.8434 -21.2805 -21.9999 -22.4968 -23.1702 -23.3677 -23.8069 -23.7784 -23.7074 -23.4813 -23.5779 -23.5823 -23.6 -23.64 -23.737 -23.5533 -23.3895 -23.1692 -22.8545 -22.5987 -22.5429 -22.413 -22.2649 -22.4692 -22.3146 -22.1592 -22.062 -21.828 -21.9074 -21.8632 -22.117 -22.3493 -22.4544 -22.6188 -22.5779 -22.7035 -22.474 -22.3258 -22.1479 -22.0248 -21.9836 -21.726 -21.7228 -21.7994 -21.9933 -22.1707 -22.4221 -23.1519 -24.2328 -25.5323 -27.353 -29.3506 -31.2187 -32.2485 -32.1045 -31.3439 -29.6054 -27.4999 -25.6585 -23.7323 -22.6122 -22.1936 -22.1089 -22.3038 -22.455 -22.7959 -22.8677 -23.1914 -23.2225 -23.4443 -23.989 -24.8338 -26.4939 -28.1768 -29.6186 -30.4915 -30.1597 -29.3202 -27.8018 -25.885 -24.2312 -23.2074 -22.7059 -22.504 -22.5099 -22.4315 -22.4254 -22.4485 -22.4724 -22.7112 -22.792 -23.2939 -24.066 -24.6864 -25.2935 -25.6684 -25.588 -25.4886 -24.835 -24.4397 -24.1222 -23.6871 -23.4449 -23.3253 -23.3235 -23.0137 -22.9362 -23.0877 -23.4737 -24.0838 -25.4205 -27.8104 -30.2956 -32.9319 -34.1077 -34.1491 -32.6644 -30.3222 -26.932 -23.8386 -21.4279 -20.391 -19.7264 -19.9794 -20.0024 -20.2095 -20.4789 -20.6681 -20.9308 -21.1992 -21.625 -22.8026 -24.318 -26.3903 -28.5432 -30.2543 -30.7451 -30.2035 -28.6108 -26.4219 -24.2484 -22.2834 -21.0527 -20.536 -20.1404 -20.2941 -20.2676 -20.5593 -20.4646 -20.7533 -20.8644 -20.9387 -21.2692 -21.2075 -21.4959 -21.681 -21.9025 -21.8045 -21.7371 -21.551 -21.2036 -21.0698 -20.4847 -20.0346 -19.7845 -19.413 -19.1818 -19.0903 -18.8467 -18.8162 -19.0698 -19.1493 -19.6378 -19.9812 -20.9246 -21.8466 -23.0456 -23.699 -24.0048 -23.9894 -23.5209 -22.8548 -22.0444 -21.5965 -21.2899 -20.9737 -21.1697 -21.2792 -21.5315 -21.5084 -21.5421 -21.548 -21.3871 -20.9863 -20.883 -20.5553 -20.0815 -20.0597 -19.8196 -19.9315 -19.7401 -19.707 -19.4731 -19.5972 -19.4945 -19.4614 -19.4544 -19.5272 -19.3945 -19.3224 -19.2491 -18.9904 -19.0423 -19.1042 -19.3749 -19.4669 -19.6225 -19.971 -20.1303 -20.6502 -20.5865 -20.8774 -20.787 -20.8709 -20.5058 -20.4452 -20.1147 -20.029 -19.9594 -20.1875 -20.4082 -20.7499 -21.563 -22.1715 -23.0446 -23.6267 -24.1906 -24.5263 -24.6301 -24.4916 -24.3864 -24.2168 -24.3422 -24.4351 -24.7656 -25.2126 -25.8666 -26.415 -26.8021 -27.4191 -28.1313 -28.8364 -29.4004 -29.9155 -30.7535 -31.2915 -31.6659 -31.8283 -31.6425 -31.6587 -31.7789 -32.4339 -33.7483 -35.7378 -39.2081 -43.0256 -47.3689 -51.6667 -55.6772 -58.8853 -61.1114 -62.5067 -63.1716 -63.6207 -63.6917 -64.0444 -63.6742 -63.8713 -64.1035 -64.3445 -64.8273 -65.3167 -65.8874 -66.4361 -66.626 -65.5164 -64.2005 -60.0927", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0002 0.0 0.0001 0.0003 0.0002 0.0003 0.0002 0.0003 0.0003 0.0006 0.0007 0.0011 0.0012 0.0015 0.0013 0.0013 0.0012 0.0014 0.0015 0.0016 0.0019 0.0019 0.0022 0.002 0.0019 0.0019 0.0018 0.0018 0.0021 0.0024 0.0024 0.0027 0.0025 0.0027 0.0025 0.0026 0.0026 0.0023 0.0017 0.0013 0.0006 0.0006 0.0004 0.0001 0.0002 0.0004 0.0009 0.001 0.0013 0.0012 0.001 0.0013 0.0013 0.0015 0.0013 0.0012 0.0009 0.0005 0.0004 0.0004 0.0003 0.0004 0.0006 0.0005 0.0003 0.0005 0.0005 0.0005 0.0007 0.0008 0.001 0.0014 0.0021 0.0026 0.0029 0.0027 0.0024 0.0022 0.0021 0.0021 0.002 0.0021 0.0023 0.002 0.0018 0.0015 0.0012 0.0011 0.0009 0.0011 0.0011 0.0016 0.0018 0.0018 0.0021 0.0022 0.0024 0.0023 0.0025 0.0023 0.0023 0.0018 0.0016 0.0011 0.001 0.0008 0.0008 0.0005 0.0007 0.0004 0.0005 0.0002 0.0004 0.0005 0.0004 0.0005 0.0005 0.0004 0.0003 0.0002 0.0004 0.0006 0.0012 0.0031 0.0057 0.0094 0.0127 0.0149 0.0159 0.0156 0.0132 0.0102 0.0073 0.0029 0.0013 0.0012 0.0012 0.0012 0.0014 0.001 0.0009 0.0007 0.0005 0.0006 0.0005 0.0006 0.0006 0.0007 0.001 0.002 0.0041 0.0053 0.0059 0.0053 0.0043 0.0024 0.0016 0.0016 0.0013 0.0011 0.0008 0.0005 0.0006 0.0004 0.0005 0.0008 0.0018 0.0053 0.0105 0.0143 0.0171 0.0171 0.015 0.0111 0.0071 0.0033 0.0019 0.0017 0.0016 0.0017 0.0015 0.0016 0.0017 0.0017 0.0017 0.0017 0.0015 0.0013 0.0014 0.0013 0.0014 0.0016 0.0016 0.0016 0.0013 0.0013 0.0009 0.001 0.0009 0.0008 0.0009 0.0009 0.001 0.0027 0.0061 0.0114 0.0156 0.0183 0.0204 0.0217 0.0211 0.0206 0.0187 0.0147 0.0112 0.0072 0.0033 0.0019 0.0009 0.0012 0.0011 0.0009 0.001 0.001 0.001 0.0008 0.001 0.0008 0.0009 0.001 0.0012 0.0011 0.0013 0.0015 0.0018 0.0022 0.0022 0.0018 0.0016 0.0014 0.0011 0.0011 0.0009 0.0009 0.0008 0.0008 0.0007 0.0006 0.0006 0.0005 0.0007 0.0007 0.0005 0.0004 0.0006 0.0007 0.0009 0.0006 0.0007 0.0008 0.0011 0.0009 0.0009 0.0008 0.001 0.001 0.001 0.001 0.001 0.0011 0.001 0.001 0.0012 0.0011 0.0011 0.0011 0.0014 0.0014 0.0014 0.0014 0.0014 0.0013 0.0012 0.0011 0.0013 0.0012 0.0014 0.0014 0.0014 0.0014 0.0016 0.0016 0.0017 0.0018 0.0019 0.0018 0.0016 0.0011 0.0012 0.001 0.0008 0.0008 0.0008 0.0009 0.001 0.0012 0.0012 0.0019 0.0023 0.0031 0.0039 0.0051 0.0075 0.0095 0.0112 0.0112 0.0098 0.0075 0.0041 0.0025 0.0017 0.0017 0.0017 0.0015 0.0016 0.0014 0.0011 0.0002 0.0002 0.0002 0.0004 0.0003 0.0005 0.0006 0.0005 0.0009 0.0012 0.0011 0.0013 0.0013 0.0015 0.0022 0.0027 0.007 0.0023 0.0018 0.0019 0.0014 0.0014 0.0013 0.0013 0.0012 0.0013 0.0012 0.0012 0.0009 0.001 0.0011 0.0013 0.0014 0.0015 0.0013 0.0013 0.0015 0.0016 0.0016 0.0015 0.0017 0.0017 0.002 0.0025 0.0041 0.0071 0.0109 0.0133 0.0148 0.0139 0.0108 0.0081 0.0046 0.0029 0.0023 0.0022 0.0016 0.0016 0.0014 0.0012 0.0009 0.0009 0.0009 0.0006 0.0008 0.0006 0.0007 0.001 0.0022 0.0031 0.0039 0.004 0.0039 0.0032 0.0024 0.0024 0.0023 0.0025 0.0023 0.0022 0.002 0.0019 0.0021 0.0022 0.0022 0.0022 0.0021 0.0021 0.0017 0.0018 0.0018 0.0016 0.0018 0.0015 0.0017 0.0019 0.0018 0.0017 0.0016 0.0018 0.0016 0.0013 0.0012 0.0012 0.0013 0.0009 0.0007 0.001 0.0016 0.0019 0.0026 0.0024 0.0023 0.002 0.0016 0.0015 0.0012 0.0012 0.001 0.0009 0.0008 0.0008 0.0004 0.0003 0.0007 0.0009 0.0031 0.0073 0.0103 0.013 0.0147 0.0143 0.012 0.009 0.0064 0.0039 0.0033 0.0028 0.0025 0.0019 0.0017 0.0016 0.0016 0.0013 0.0012 0.0008 0.0009 0.0007 0.0006 0.0008 0.0006 0.0004 0.0005 0.0005 0.0006 0.0011 0.0012 0.0015 0.0017 0.0022 0.0022 0.002 0.0018 0.0015 0.0013 0.0012 0.0012 0.0014 0.0014 0.0017 0.0015 0.0015 0.0014 0.0016 0.0018 0.0018 0.0016 0.0019 0.0018 0.0016 0.0015 0.0013 0.0011 0.001 0.001 0.001 0.0011 0.0011 0.001 0.001 0.001 0.0009 0.001 0.0011 0.0011 0.0012 0.001 0.0011 0.0011 0.0013 0.0013 0.0012 0.0013 0.0012 0.0011 0.001 0.0012 0.0009 0.0008 0.0011 0.0011 0.0008 0.0009 0.0011 0.0009 0.0009 0.0008 0.0006 0.0008 0.0006 0.0008 0.0007 0.0006 0.0007 0.0009 0.0008 0.0007 0.0009 0.0009 0.0009 0.0008 0.0008 0.0006 0.0004 0.0003 0.0005 0.0005 0.0005 0.0006 0.0005 0.0005 0.0006 0.0005 0.0007 0.0003 0.0002 0.0002 0.0002 0.0003 0.0002 0.0002 0.0003 0.0004 0.0003 0.0003 0.0004 0.0002 0.0005 0.0016 0.0019 0.002 0.002 0.0013 0.001 0.0006 0.0007 0.0007 0.0007 0.0007 0.0006 0.0006 0.0007 0.0007 0.0009 0.0009 0.0006 0.0002 0.0002 0.0 0.0 0.0003 0.0002", + "breathiness": "-78.1818 -78.1244 -75.4093 -72.5085 -69.0397 -65.8042 -63.0818 -60.3167 -57.5902 -54.5568 -52.4682 -50.9081 -50.1102 -49.2192 -48.4593 -48.1081 -47.5306 -47.0774 -46.8129 -46.7442 -46.7947 -47.1323 -47.1343 -47.2998 -47.066 -47.3803 -47.4118 -47.5881 -47.8316 -48.2337 -48.7844 -49.2833 -49.9001 -50.5878 -51.3516 -51.9518 -52.8143 -53.6101 -54.5934 -55.7136 -57.3505 -58.7766 -60.3951 -61.8727 -62.8819 -63.6134 -63.7036 -63.4792 -62.7811 -62.1588 -61.1369 -59.9944 -58.6298 -57.0638 -55.7214 -54.652 -54.2268 -54.1925 -54.826 -55.9429 -56.8744 -57.6281 -58.737 -59.4037 -59.8977 -60.4118 -60.5846 -60.2415 -59.9048 -58.8133 -57.5131 -55.979 -54.3428 -52.7892 -51.7913 -51.2893 -51.0899 -51.1313 -51.4614 -51.9319 -52.6013 -53.3461 -53.9916 -54.4299 -54.8203 -55.0317 -54.8979 -54.6638 -54.4507 -54.1877 -53.9638 -53.4469 -52.7679 -52.2215 -51.4791 -51.0295 -50.5569 -50.6573 -50.7535 -51.5615 -52.3081 -53.2052 -54.0347 -55.0982 -56.0723 -57.1725 -58.3067 -59.479 -60.5737 -61.5878 -62.398 -63.1659 -63.501 -63.6509 -63.505 -63.4217 -63.0202 -62.9463 -62.0377 -61.4273 -59.9722 -57.8671 -54.6039 -50.9267 -47.0615 -43.1992 -40.0237 -37.8205 -36.5904 -36.3394 -36.6371 -37.7803 -40.1009 -42.9083 -46.4688 -49.5826 -52.9272 -55.6476 -57.8655 -59.0042 -60.1083 -60.9384 -61.4136 -61.7124 -61.7259 -61.781 -61.5384 -59.9651 -57.4946 -54.4557 -50.9946 -47.3438 -44.6495 -43.6011 -44.2951 -46.2098 -48.6924 -51.5899 -54.5625 -56.8615 -58.6604 -59.8133 -60.522 -60.1803 -58.4805 -55.1758 -51.1387 -47.1977 -43.1682 -39.7849 -37.5887 -36.7485 -37.4808 -39.3047 -42.1586 -45.0448 -48.0409 -50.2921 -51.9821 -52.9456 -53.2873 -53.3517 -53.5213 -53.6344 -53.5928 -53.5681 -53.2527 -52.9454 -52.653 -52.6728 -52.7685 -53.1468 -53.3652 -53.5451 -53.8029 -54.0117 -54.2409 -54.6656 -54.7678 -54.7834 -53.9478 -51.8168 -48.8845 -45.2466 -41.3029 -37.6629 -35.0832 -33.5735 -33.3595 -33.5156 -33.9733 -35.0622 -35.8469 -37.7611 -40.35 -43.5329 -46.7873 -49.8078 -52.4208 -53.9703 -54.8843 -55.4116 -55.7419 -56.2163 -56.7118 -57.2601 -57.4189 -57.3785 -57.2483 -56.7607 -56.1263 -55.2687 -54.4859 -53.698 -53.0093 -52.6241 -52.2814 -52.2204 -52.9374 -53.521 -54.4388 -55.2512 -56.2003 -56.619 -57.2082 -57.5226 -57.9361 -58.3285 -58.534 -58.6804 -59.0904 -59.3678 -59.7535 -59.7398 -59.8308 -59.7591 -59.5242 -59.2385 -58.9119 -58.5273 -58.2365 -57.93 -57.9679 -57.7441 -57.6519 -57.4569 -57.0665 -56.2646 -55.4334 -54.8767 -54.3481 -54.1223 -54.1506 -54.453 -54.6246 -54.983 -55.0484 -55.1493 -54.9581 -54.6567 -54.4278 -54.1654 -53.8354 -53.9953 -54.365 -54.6342 -55.1707 -55.5209 -55.8852 -55.9933 -55.8404 -55.5439 -55.2827 -54.7358 -54.4333 -54.278 -54.3119 -54.2198 -54.3296 -54.3828 -54.4953 -54.3304 -54.2934 -54.3854 -54.4226 -54.1667 -53.6314 -52.2703 -50.5667 -48.2789 -45.5048 -42.8504 -40.347 -38.3914 -37.5867 -37.4267 -38.5421 -40.4455 -42.8235 -45.5658 -47.9663 -50.221 -51.7675 -52.9157 -54.0889 -55.317 -56.5062 -57.8564 -58.8975 -59.6164 -60.2486 -60.4546 -60.4762 -60.2135 -59.8211 -59.1954 -58.6816 -58.4619 -58.2976 -58.6103 -58.6466 -59.0158 -58.9512 -58.9854 -58.8711 -58.793 -58.8333 -58.8602 -58.9497 -59.0468 -59.0111 -58.6143 -58.0828 -57.3806 -56.5504 -55.9994 -55.5908 -55.4114 -55.2307 -55.3207 -55.3542 -55.312 -55.2368 -55.2264 -55.343 -55.4957 -55.7079 -55.6795 -55.3894 -54.1099 -51.8845 -48.979 -45.591 -42.2227 -39.4463 -37.784 -37.4725 -38.8769 -41.0073 -44.0997 -47.2428 -50.3296 -52.9358 -55.0165 -56.4888 -57.5336 -58.7864 -60.1476 -61.3791 -62.4195 -63.4402 -64.3188 -64.2399 -63.1793 -61.3914 -58.4833 -55.0799 -51.4123 -48.332 -46.6452 -46.2451 -46.8542 -48.1223 -49.4364 -50.513 -51.2638 -51.547 -51.8177 -52.1546 -52.0686 -52.5938 -53.0469 -53.7074 -54.0246 -54.5147 -54.6298 -54.4007 -54.0237 -53.1499 -53.0448 -53.0129 -52.981 -53.4219 -53.6396 -54.2015 -54.5671 -54.8821 -55.1782 -55.5116 -56.1803 -56.5694 -57.3373 -57.7849 -57.8148 -57.5688 -56.8799 -55.9119 -54.7768 -53.7355 -53.2159 -53.2719 -53.9308 -54.7914 -55.8802 -56.972 -57.8392 -58.5484 -59.183 -59.7883 -59.8595 -59.0887 -57.5693 -54.9024 -51.3949 -47.5745 -43.8534 -40.7366 -38.6519 -38.2834 -38.8065 -40.2914 -42.4788 -44.6989 -46.7535 -48.6703 -50.1569 -50.9455 -51.7563 -52.5524 -53.3445 -54.768 -55.8896 -56.8651 -58.0633 -58.9601 -60.2228 -61.0722 -61.6551 -61.7667 -60.907 -59.7314 -58.5135 -56.9594 -55.5614 -54.9111 -54.4441 -54.2624 -54.2123 -54.4566 -54.4133 -54.5771 -54.667 -54.6572 -54.6197 -54.6601 -54.252 -54.1764 -54.0497 -54.005 -53.9306 -53.8554 -53.9037 -53.8599 -53.6291 -53.585 -53.2215 -53.2866 -53.3434 -53.5176 -53.6834 -53.8862 -54.181 -54.2164 -54.0892 -54.1336 -54.4411 -54.1842 -54.038 -53.7786 -53.2559 -52.6713 -52.166 -51.8058 -51.4477 -51.1772 -50.978 -50.9342 -51.1218 -51.6607 -52.0859 -52.7485 -53.3102 -53.8472 -54.0995 -54.2121 -54.1918 -53.7853 -53.7913 -53.6537 -53.5543 -53.8632 -54.2226 -54.8595 -55.309 -55.6157 -55.9752 -56.2484 -56.1797 -55.9016 -55.9121 -55.3761 -55.0827 -54.9089 -54.8527 -54.8489 -55.5055 -56.327 -57.3791 -58.3389 -59.1443 -59.4799 -59.6425 -59.6033 -59.7216 -59.8723 -60.1654 -60.5613 -61.007 -61.7655 -62.4402 -63.2275 -64.028 -64.9135 -65.5403 -66.5316 -67.3212 -68.0684 -68.7509 -69.5067 -70.1903 -70.7113 -70.6252 -69.4864 -66.8983 -63.8664 -60.1268 -57.1636 -54.9689 -54.6308 -56.0423 -58.6322 -61.4639 -64.0186 -66.312 -67.9369 -68.6946 -69.6934 -70.4881 -71.4238 -72.0245 -72.6338 -73.2851 -74.023 -74.7375 -75.202 -75.5827 -75.705 -75.2892 -74.4175 -71.7642", "breathiness_timestep": "0.011609977324263039" }, { @@ -346,9 +346,9 @@ "note_slur": "0 0 0 0 0", "f0_seq": "295.2 295.9 295.9 295.6 295.8 295.9 295.7 295.3 294.9 295.7 295.4 295.6 295.1 295.7 295.8 295.9 295.3 295.7 295.4 295.6 295.0 294.3 294.5 293.9 292.9 291.9 289.9 288.5 286.4 284.5 282.2 278.7 274.8 262.1 253.3 249.1 246.8 245.4 246.3 247.1 248.6 249.4 249.7 249.5 247.1 242.4 232.7 240.3 248.4 258.2 267.4 277.8 287.8 296.5 297.0 293.4 294.6 295.5 296.5 295.0 294.3 293.1 293.0 293.1 292.0 293.0 293.0 292.3 291.1 291.3 292.5 293.6 296.3 297.2 296.7 296.6 297.9 300.2 299.7 300.0 299.9 299.1 298.1 297.6 297.2 296.7 299.9 303.7 310.4 320.5 331.9 339.3 342.8 342.4 340.0 335.8 330.9 327.0 324.7 323.7 324.2 326.6 327.9 331.1 334.1 336.0 337.1 336.4 334.7 329.3 325.1 321.4 318.3 318.8 322.6 328.2 332.1 336.5 338.5 338.9 339.4 336.3 333.0 329.3 326.7 324.6 322.7 323.6 325.7 328.8 330.7 332.7 334.6 334.9 332.2 328.5 326.5 326.6 327.8 329.0 328.7 326.3 326.7 328.2 330.2 335.0 339.9 338.9 336.6 332.9 330.1 328.9 326.5 324.9 316.2 306.2 298.4 295.0 292.3 290.7 290.5 290.5 288.5 294.5", "f0_timestep": "0.011609977324263039", - "energy": "0.0008 0.0003 0.0015 0.0012 0.0019 0.0029 0.0037 0.0041 0.0048 0.0053 0.0054 0.0061 0.0063 0.0066 0.0069 0.0067 0.006 0.0059 0.0052 0.0053 0.0044 0.0035 0.003 0.0025 0.0021 0.0027 0.0032 0.004 0.0076 0.0118 0.016 0.0223 0.0497 0.0707 0.0877 0.1014 0.1023 0.1011 0.1036 0.1024 0.1033 0.1037 0.1015 0.0981 0.0885 0.0736 0.0569 0.0387 0.0257 0.0215 0.021 0.0212 0.0375 0.059 0.0774 0.0937 0.1014 0.103 0.1039 0.1019 0.102 0.1017 0.0989 0.0989 0.0993 0.0994 0.101 0.099 0.0993 0.0995 0.0993 0.1003 0.1018 0.1051 0.1094 0.1135 0.1172 0.1206 0.122 0.1235 0.123 0.1192 0.1147 0.1083 0.102 0.0947 0.089 0.0818 0.078 0.0772 0.0771 0.0802 0.082 0.0847 0.0877 0.0899 0.0917 0.0924 0.0904 0.0892 0.0882 0.0872 0.0868 0.0879 0.0884 0.0903 0.093 0.0944 0.0964 0.0975 0.0965 0.0957 0.0921 0.0891 0.0876 0.0833 0.0844 0.086 0.0849 0.0867 0.0857 0.0856 0.0849 0.0828 0.0802 0.0755 0.0733 0.0712 0.0693 0.0698 0.0683 0.0683 0.0668 0.0653 0.0641 0.0632 0.0622 0.0601 0.0574 0.0546 0.0517 0.0492 0.0467 0.0451 0.0454 0.0446 0.0441 0.0432 0.0418 0.0416 0.0404 0.0385 0.0361 0.0322 0.0269 0.0204 0.0123 0.0061 0.0034 0.0014 0.0 0.0 0.0 0.0", + "energy": "-65.6936 -64.1165 -61.8608 -59.1117 -56.2734 -53.7117 -51.7337 -50.1927 -49.1277 -48.5202 -48.0763 -47.8974 -47.9735 -47.6198 -47.8233 -48.0748 -48.615 -49.4552 -50.4729 -51.9191 -53.0641 -54.1557 -54.7004 -54.8213 -53.7267 -51.9302 -49.4445 -46.2532 -43.064 -39.074 -35.2739 -31.6305 -28.0661 -25.2404 -22.8907 -21.1795 -20.3331 -19.9744 -19.6711 -19.5708 -19.9292 -20.4001 -20.9468 -21.9161 -23.2659 -24.9649 -27.2879 -29.2312 -30.9996 -31.4933 -31.1093 -29.3397 -27.6426 -25.1181 -23.0249 -21.4669 -20.6255 -20.2145 -20.2651 -20.1956 -20.1555 -20.1137 -20.1254 -19.8942 -19.8132 -19.8069 -19.6163 -19.702 -19.6669 -19.4871 -19.2518 -19.4362 -18.8438 -18.4695 -18.2431 -18.0827 -17.8583 -17.863 -17.6752 -17.6793 -17.7732 -18.2192 -18.4294 -18.8381 -19.4614 -19.94 -20.3568 -20.5205 -20.9277 -20.9542 -20.8452 -20.7463 -20.6057 -20.3279 -20.2072 -20.1497 -19.9121 -19.8915 -19.9019 -19.8162 -19.841 -19.8356 -19.8754 -19.6348 -19.3879 -19.1903 -18.9705 -18.6884 -18.6024 -18.4858 -18.7381 -18.8664 -19.1068 -19.3255 -19.3572 -19.5448 -19.4845 -19.4225 -19.2277 -18.982 -18.5051 -18.6845 -18.3903 -18.6543 -18.5381 -18.5956 -19.0494 -19.0499 -19.132 -19.2484 -19.2577 -19.3348 -19.2345 -19.1962 -19.2903 -19.4134 -19.7511 -19.9864 -20.0692 -20.4069 -20.5011 -20.7654 -21.112 -21.2013 -21.3546 -21.6452 -21.9111 -22.0996 -22.207 -22.4381 -22.7393 -23.4445 -23.994 -25.287 -27.2293 -30.0918 -33.539 -37.51 -41.5952 -44.4387 -46.7366 -47.416 -47.6757 -46.8313", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0003 0.0002 0.0005 0.0009 0.0016 0.0022 0.003 0.0041 0.0045 0.0054 0.006 0.0065 0.007 0.0068 0.0067 0.0065 0.006 0.0059 0.0055 0.005 0.0041 0.0032 0.0026 0.0016 0.0017 0.0015 0.0018 0.0024 0.0044 0.0109 0.0142 0.0153 0.0152 0.012 0.0069 0.0042 0.0032 0.0024 0.0021 0.002 0.0021 0.0023 0.0023 0.0028 0.0041 0.0103 0.0153 0.0178 0.0209 0.0224 0.0222 0.0218 0.0192 0.0138 0.0091 0.0048 0.0034 0.0023 0.002 0.0018 0.0019 0.0019 0.0015 0.0012 0.0009 0.0011 0.0013 0.0013 0.0013 0.0013 0.0009 0.0013 0.0013 0.0019 0.0024 0.0024 0.003 0.0028 0.0026 0.0028 0.0025 0.0025 0.0023 0.0021 0.0018 0.0019 0.0019 0.0018 0.002 0.0017 0.0019 0.002 0.0018 0.0018 0.0017 0.0015 0.002 0.0017 0.0019 0.0019 0.0017 0.0017 0.0018 0.0018 0.0016 0.0017 0.0018 0.0019 0.0022 0.0023 0.0023 0.0024 0.0021 0.002 0.0017 0.0016 0.0016 0.0013 0.0014 0.0014 0.0014 0.0014 0.0012 0.0013 0.001 0.001 0.0012 0.0011 0.001 0.0009 0.0007 0.0008 0.0006 0.0006 0.0005 0.0006 0.0006 0.0006 0.0008 0.0006 0.0007 0.0006 0.0006 0.0004 0.0003 0.0004 0.0003 0.0005 0.0003 0.0002 0.0003 0.0004 0.0004 0.0004 0.0005 0.0009 0.0029 0.0029 0.0027 0.0027 0.0013 0.0006 0.0003 0.0004", + "breathiness": "-66.941 -65.2751 -61.8759 -58.5357 -55.1205 -52.4144 -50.0784 -48.238 -47.3525 -46.8891 -46.7175 -46.9523 -46.8454 -46.9053 -47.2264 -47.5457 -48.1535 -48.6014 -49.8067 -51.2123 -53.0606 -55.1153 -57.0827 -58.1457 -57.9052 -56.4726 -53.5506 -49.9815 -46.2065 -42.812 -40.9738 -40.5656 -41.7063 -44.1327 -46.7942 -49.633 -51.9183 -53.6398 -54.3422 -54.149 -53.6252 -52.478 -50.8283 -48.6732 -45.9712 -42.9305 -39.9088 -37.5222 -35.6419 -34.4987 -34.5527 -35.2493 -37.4419 -40.3072 -43.707 -47.1299 -50.5397 -53.0372 -55.0333 -56.5265 -57.9093 -59.2774 -60.3205 -61.4582 -62.2752 -62.9347 -63.2716 -63.2675 -62.9472 -62.1196 -60.8909 -59.5713 -58.2057 -56.7617 -55.5844 -54.9686 -54.4375 -54.5457 -54.6067 -54.9652 -55.3219 -55.7358 -56.2353 -56.8656 -57.0774 -57.2644 -57.4077 -57.2591 -57.202 -56.9606 -56.7067 -56.2036 -56.2817 -55.964 -55.8317 -55.6485 -56.0069 -56.2409 -56.4861 -56.8487 -56.9072 -57.1159 -57.1744 -57.2575 -57.0432 -56.8923 -56.7725 -56.4194 -56.3117 -56.3343 -56.4219 -56.6564 -56.8924 -57.0033 -57.1003 -57.2658 -57.2106 -57.225 -57.2246 -57.2284 -57.1066 -56.8937 -56.6225 -56.3409 -55.5657 -55.2508 -54.6788 -54.4223 -54.5767 -54.9153 -55.8798 -56.655 -57.2554 -57.7618 -57.9494 -57.7169 -57.7653 -57.4731 -57.2437 -57.6417 -58.0449 -58.6862 -59.359 -60.0536 -60.6791 -61.3507 -61.9044 -62.3039 -62.5017 -62.4754 -62.3056 -61.4436 -59.8915 -57.6885 -55.1941 -52.7484 -51.0631 -50.0122 -50.2367 -52.1136 -54.7472 -57.1693 -59.6786 -62.2043", "breathiness_timestep": "0.011609977324263039" }, { @@ -362,9 +362,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "207.9 208.6 208.4 208.6 208.9 209.4 208.9 208.9 208.9 208.3 208.4 208.4 208.5 208.2 208.2 208.3 208.2 207.9 207.9 208.2 208.1 208.3 208.0 208.1 208.5 207.8 207.7 207.4 207.3 207.0 206.9 206.5 207.5 212.3 227.4 242.8 256.7 268.8 282.9 290.8 296.1 298.9 299.0 296.0 294.2 292.3 287.4 276.6 269.8 279.5 290.4 300.4 312.7 325.3 327.5 330.0 330.5 332.6 334.0 335.4 335.2 332.0 326.4 321.5 316.1 304.1 279.2 268.5 259.6 249.8 240.1 230.6 222.4 213.1 208.8 209.9 213.5 218.6 222.0 223.4 221.7 218.6 214.1 209.8 206.1 204.2 202.3 203.9 210.1 217.6 224.0 228.4 229.3 228.8 226.4 220.7 214.1 208.4 205.3 205.2 208.4 213.3 217.7 222.1 227.3 230.3 231.0 230.4 227.0 222.2 216.3 211.4 208.9 208.0 209.9 213.9 218.9 225.3 229.8 232.5 233.6 232.4 226.8 218.4 210.6 204.4 202.3 202.0 206.5 211.8 218.0 225.8 231.7 234.6 235.4 234.4 231.1 222.3 212.0 202.0 198.5 199.5 204.7 213.3 223.6 234.6 242.9 246.2 245.6 238.7 225.6 211.1 192.8 190.6 188.8 187.6 186.7 186.4 186.0 186.1 186.0 185.8 185.6 185.0 183.2 182.2 180.8 180.4 180.8 179.8 179.3 177.9 177.1 176.6 175.2 174.2 173.6 174.8 181.1 189.8 197.7 204.0 215.2 226.6 237.6 246.5 250.1 248.9 246.0 241.3 232.1 222.6 225.5 226.1 228.2 231.9 234.5 230.7 225.0 222.1 217.8 216.9 216.6 218.2 218.7 219.1 219.4 218.7 217.1 214.0 208.8 214.9 221.3 227.9 233.5 240.0 247.3 256.0 255.6 248.0 247.0 246.3 245.4 245.5 245.4 244.2 245.1 245.8 247.4 248.5 249.8 248.3 245.6 241.7 244.5 248.9 256.4 258.4 259.3 260.2 261.1 261.9 261.8 261.9 261.6 261.4 261.1 261.4 260.6 261.0 262.1 266.1 271.3 280.4 288.7 297.4 302.8 304.9 304.1 302.3 299.1 295.2 290.8 288.6 286.0 284.5 285.1 287.4 291.4 296.0 300.1 302.0 300.5 300.2 299.4 296.9 293.7 290.5 286.4 284.6 284.3 284.9 287.0 288.5 290.6 294.7 298.8 301.0 298.0 292.3 288.6 282.5 275.1 268.2 261.3 257.0 252.0 250.5 247.6 246.6 248.6 247.0 247.0 247.9 247.9 247.6 246.3 246.6 246.7 245.3 242.1 239.2 232.5 225.0 217.3 214.2 213.0 211.8 211.0 210.3 210.9 213.6 216.9 218.3 218.4 218.4 217.4 217.2 218.1 217.9 219.2 217.2 218.4 217.7 218.5 218.2 217.9 218.8 219.3 221.1 220.7 220.6 220.6 220.4 219.6 220.0 220.0 220.3 220.7 219.9 214.3 206.6 212.7 218.9 226.2 233.3 240.4 247.9 254.7 262.9 270.8 259.5 253.4 249.0 246.9 246.0 245.9 246.2 247.3 247.1 247.4 247.3 246.0 243.3 237.8 229.5 216.7 203.2 192.1 184.3 181.7 184.3 188.3 191.0 194.5 195.5 197.2 198.0 197.6 196.2 194.3 193.2 191.9 191.1 192.4 194.3 196.8 199.5 202.6 204.3 206.2 206.0 202.8 196.5 190.6 186.4 184.8 184.6 187.1 190.5 195.0 199.2 203.2 206.1 207.0 206.6 204.1 199.1 192.9 186.4 182.1 181.8 182.9 187.8 193.7 199.7 203.9 206.7 207.7 207.4 205.1 200.6 194.7 187.9 182.2 179.9 180.5 182.7 187.0 192.5 197.2 200.5 202.2 203.6 203.4 201.1 197.0 191.6 184.6 181.7 180.7 182.8 187.9 193.1 197.2 199.3 200.1 201.9 202.4 202.0 198.6 195.3 194.0 193.8 193.3 191.5 189.3 188.4 187.6 186.7 186.1 186.8", "f0_timestep": "0.011609977324263039", - "energy": "0.001 0.0019 0.0008 0.0015 0.0014 0.0021 0.0021 0.0025 0.0035 0.0037 0.0039 0.0044 0.0044 0.0054 0.0055 0.0058 0.0062 0.0057 0.0051 0.0044 0.0045 0.0036 0.0035 0.0031 0.0023 0.0017 0.0009 0.0005 0.0009 0.0025 0.0215 0.0372 0.0509 0.0628 0.0688 0.0712 0.0699 0.0684 0.0667 0.0648 0.0618 0.0596 0.0596 0.059 0.0619 0.0629 0.057 0.0493 0.0348 0.0193 0.0126 0.0138 0.0178 0.0383 0.059 0.0726 0.0833 0.0871 0.0864 0.0871 0.0863 0.0852 0.083 0.0767 0.0653 0.0507 0.0327 0.01 0.0021 0.0005 0.0031 0.0103 0.0332 0.0477 0.0601 0.0685 0.071 0.0721 0.0744 0.0754 0.0749 0.0752 0.0733 0.0709 0.0698 0.065 0.0604 0.0568 0.0546 0.0571 0.0597 0.064 0.0658 0.0665 0.066 0.0662 0.0642 0.0599 0.0554 0.0487 0.0456 0.045 0.046 0.0476 0.049 0.0514 0.0538 0.0551 0.0564 0.0542 0.0506 0.047 0.0432 0.0412 0.0405 0.04 0.0397 0.04 0.0412 0.0426 0.0435 0.0437 0.0424 0.0405 0.0371 0.0341 0.0317 0.0295 0.0292 0.0287 0.0288 0.029 0.0304 0.0316 0.0324 0.0329 0.0314 0.0301 0.0276 0.0248 0.0219 0.0202 0.0183 0.0173 0.0167 0.0185 0.0214 0.023 0.0239 0.0224 0.0204 0.0171 0.0137 0.0091 0.0048 0.0027 0.0023 0.0021 0.002 0.0022 0.0026 0.0027 0.0029 0.003 0.0025 0.0021 0.0022 0.002 0.002 0.0018 0.002 0.0025 0.0045 0.0072 0.0121 0.0305 0.0504 0.0642 0.0747 0.0783 0.075 0.0674 0.0588 0.0527 0.05 0.0538 0.0564 0.0562 0.0554 0.0491 0.0415 0.0326 0.022 0.0166 0.0157 0.0153 0.0385 0.0539 0.0656 0.0769 0.0776 0.0774 0.0763 0.0745 0.0712 0.071 0.0704 0.0686 0.0681 0.06 0.048 0.0363 0.0234 0.0191 0.0195 0.0202 0.0313 0.0524 0.069 0.084 0.0919 0.0923 0.0923 0.0888 0.0867 0.0838 0.081 0.0826 0.0863 0.079 0.3494 0.0876 0.0817 0.0755 0.0729 0.0721 0.0795 0.0855 0.09 0.0945 0.0968 0.0987 0.0993 0.1007 0.1015 0.1005 0.1005 0.0983 0.0931 0.0877 0.0809 0.0759 0.0738 0.0756 0.0802 0.0846 0.0883 0.0894 0.0901 0.0911 0.0922 0.0909 0.0883 0.0847 0.0798 0.078 0.0778 0.0772 0.0777 0.0785 0.0788 0.0794 0.0794 0.0798 0.08 0.0817 0.0821 0.082 0.082 0.0829 0.0837 0.0839 0.0831 0.0808 0.0784 0.077 0.075 0.0731 0.0702 0.0678 0.0654 0.0626 0.0605 0.0583 0.0558 0.0548 0.0547 0.0598 0.0672 0.0751 0.0809 0.0823 0.081 0.0796 0.0782 0.0772 0.0756 0.0727 0.0717 0.0687 0.0617 0.0522 0.0397 0.0225 0.0128 0.011 0.0119 0.0135 0.0219 0.0337 0.0443 0.0552 0.0632 0.0664 0.0684 0.0692 0.0703 0.0695 0.0697 0.0688 0.0681 0.0677 0.0673 0.0688 0.0691 0.0705 0.0724 0.0726 0.0726 0.073 0.0733 0.0744 0.0744 0.076 0.0747 0.0734 0.0731 0.0696 0.067 0.0614 0.0516 0.0392 0.0267 0.0151 0.0127 0.0129 0.0138 0.0131 0.0101 0.0279 0.0438 0.0565 0.067 0.0717 0.0716 0.0703 0.0698 0.0681 0.0661 0.0651 0.0642 0.0651 0.0664 0.067 0.0664 0.0644 0.0609 0.058 0.0536 0.0508 0.0515 0.0558 0.0603 0.0648 0.0685 0.0727 0.0768 0.0793 0.0803 0.0801 0.0787 0.0763 0.0735 0.07 0.0656 0.0637 0.0628 0.0618 0.0635 0.0668 0.0688 0.0706 0.0689 0.066 0.0628 0.0589 0.0554 0.052 0.0492 0.0477 0.0484 0.0492 0.0505 0.0545 0.0564 0.0565 0.0552 0.0507 0.0465 0.0404 0.0364 0.0329 0.0301 0.029 0.0289 0.0321 0.0337 0.0376 0.0415 0.0422 0.0443 0.0426 0.0387 0.0344 0.0295 0.0254 0.0238 0.0219 0.0206 0.0204 0.0218 0.0237 0.0262 0.027 0.028 0.0277 0.0268 0.0244 0.0219 0.0187 0.0164 0.0147 0.0129 0.0132 0.0132 0.014 0.0139 0.0142 0.0136 0.0133 0.0126 0.0108 0.0091 0.0065 0.0036 0.0026 0.0019 0.0014 0.0006 0.0008 0.0 0.0008", + "energy": "-53.2531 -51.7005 -50.9627 -49.4958 -48.3281 -46.9597 -45.6972 -44.7957 -43.7735 -42.7082 -41.9532 -41.6578 -41.74 -41.9425 -42.288 -42.9107 -43.3441 -44.2659 -45.0433 -46.2902 -47.493 -48.8676 -50.4702 -52.0036 -53.5796 -54.2219 -53.6242 -51.4436 -47.2846 -42.3364 -36.3207 -31.0863 -27.1264 -24.2873 -23.0286 -22.6905 -22.4303 -22.6605 -22.6883 -22.4928 -22.3443 -22.4572 -22.571 -23.285 -23.9599 -25.7305 -27.9518 -30.3282 -32.4733 -33.5374 -33.5337 -32.3455 -30.2113 -27.3543 -25.2548 -23.1857 -21.8475 -21.2035 -21.0344 -20.775 -20.5328 -20.846 -21.244 -22.0946 -24.1311 -26.9603 -30.1132 -33.0613 -34.9887 -35.8851 -35.1443 -33.1307 -30.4902 -27.9821 -25.6756 -24.0984 -23.2916 -22.8452 -22.5797 -22.2801 -22.3595 -22.1006 -22.2113 -22.4578 -22.3534 -22.3263 -22.6584 -22.9528 -23.1395 -23.2383 -23.3537 -23.2187 -23.137 -22.9389 -22.8733 -22.7663 -23.0703 -23.3043 -23.7083 -24.0824 -24.3308 -24.4478 -24.377 -24.2574 -24.1532 -23.9034 -23.6876 -23.3763 -23.5147 -23.6365 -23.6636 -24.1647 -24.4176 -24.7202 -25.2188 -25.3488 -25.5496 -25.6988 -25.6392 -25.5913 -25.5893 -25.6484 -25.587 -25.8875 -25.9042 -26.3822 -26.6602 -27.3202 -27.7769 -28.0512 -28.4278 -28.4435 -28.5144 -28.6623 -28.8671 -28.9659 -29.1791 -29.3889 -29.8301 -30.3564 -31.45 -32.4748 -33.26 -33.9357 -34.4186 -34.3378 -34.302 -34.0281 -33.8299 -34.0588 -35.0 -36.8249 -39.4404 -42.4 -45.401 -48.0202 -50.0989 -50.9317 -51.0334 -50.4128 -50.0946 -49.7277 -49.6984 -49.8847 -50.5658 -51.1958 -51.9043 -52.3163 -52.3612 -51.1489 -49.2903 -46.0646 -42.1852 -37.6012 -33.2023 -29.22 -26.0095 -23.8854 -22.3848 -22.042 -22.0461 -22.672 -22.6595 -22.901 -22.7995 -22.984 -22.7497 -23.059 -23.5542 -24.6643 -26.3363 -28.2963 -29.144 -29.9235 -29.3403 -28.1455 -26.0944 -24.0912 -22.2784 -21.2803 -20.8752 -20.71 -20.9059 -21.1803 -21.443 -21.75 -21.9547 -22.8673 -24.0339 -25.6022 -27.676 -29.7843 -31.6956 -32.2197 -31.8912 -30.2311 -27.8206 -25.3725 -23.4234 -21.7768 -20.8043 -20.69 -20.882 -21.1406 -21.4488 -21.7694 -21.5582 -21.6188 -21.5683 -21.207 -21.0736 -21.1705 -21.0445 -21.0452 -20.907 -20.7957 -20.8317 -20.4482 -20.1837 -19.5481 -19.2371 -19.2612 -18.9914 -18.8002 -18.8213 -18.8429 -19.1807 -19.6198 -19.9572 -20.4247 -20.7216 -21.0033 -21.2709 -21.3287 -21.3442 -21.2059 -21.1626 -21.1161 -20.8837 -20.9457 -20.8526 -20.9878 -20.8808 -20.7259 -20.7788 -20.8633 -21.0522 -20.8944 -20.7658 -20.8173 -20.6253 -20.6524 -20.602 -20.624 -20.5796 -20.3093 -20.2532 -20.1464 -20.3114 -20.4681 -20.6588 -20.4327 -20.4711 -20.5082 -20.6072 -20.4651 -20.5988 -20.5216 -20.6479 -20.6796 -20.7062 -20.8622 -20.988 -21.0896 -21.0766 -21.0377 -20.8462 -20.6325 -20.014 -19.4548 -19.0479 -18.8585 -18.6776 -18.413 -18.377 -18.4541 -18.742 -19.2993 -19.798 -20.8205 -22.7516 -25.0437 -27.7998 -30.5314 -32.9871 -34.4653 -34.4415 -33.0681 -31.0758 -28.5519 -26.3383 -24.2699 -22.9724 -22.439 -21.9903 -22.0899 -22.0051 -21.8712 -21.8549 -21.817 -21.9508 -21.8753 -21.9513 -22.0723 -22.251 -22.2948 -22.2055 -22.1533 -21.9798 -21.8589 -21.6736 -21.7623 -21.8745 -21.6461 -21.9865 -22.0579 -22.1349 -22.7425 -23.4987 -25.1209 -27.0625 -29.2438 -31.3224 -32.9592 -34.4076 -35.1818 -34.983 -33.7809 -32.011 -29.9225 -27.4698 -25.2786 -23.0991 -21.9321 -21.7472 -21.5756 -21.8136 -22.0374 -22.2332 -22.4827 -22.4811 -22.2971 -22.578 -22.6622 -22.6412 -23.2378 -23.6025 -24.1391 -24.3672 -24.664 -24.5917 -24.3735 -23.954 -23.3398 -23.0754 -22.6529 -22.3086 -22.2512 -22.2829 -22.2037 -22.4859 -22.4218 -22.8359 -23.3253 -23.1817 -23.3423 -23.0582 -22.6882 -22.5484 -22.4293 -22.2528 -22.2595 -22.7676 -23.118 -23.8744 -24.3954 -24.9713 -25.491 -25.6589 -25.7879 -25.8837 -25.9602 -26.1018 -26.2209 -26.2911 -26.3118 -26.6312 -27.0709 -27.8223 -28.3033 -29.0898 -29.6611 -30.0603 -30.0274 -29.8573 -29.4411 -28.9783 -28.4975 -28.1264 -27.6265 -27.8881 -28.006 -28.3639 -29.3771 -30.2002 -31.4685 -32.2021 -33.0257 -33.1538 -33.0439 -32.5915 -31.8272 -31.3732 -31.0081 -30.9567 -31.3641 -31.842 -32.479 -33.7706 -34.5099 -35.3665 -36.1208 -36.3039 -36.4919 -35.9869 -35.4123 -35.1792 -35.0927 -35.4674 -35.9269 -37.3095 -39.2025 -42.2626 -45.7612 -49.8105 -53.6007 -56.5649 -58.4971 -59.3497 -58.9398 -57.8872 -60.2942", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0004 0.0005 0.001 0.0014 0.0022 0.0027 0.0035 0.0039 0.0042 0.0045 0.0047 0.0049 0.0054 0.0058 0.0058 0.006 0.006 0.0056 0.0056 0.0053 0.0043 0.0034 0.0032 0.0027 0.0023 0.0018 0.0012 0.0007 0.0008 0.002 0.0023 0.0024 0.0026 0.0022 0.0019 0.002 0.0019 0.002 0.0016 0.0012 0.0008 0.0005 0.0005 0.0004 0.0008 0.0011 0.0011 0.0022 0.0049 0.008 0.0115 0.0149 0.0156 0.0145 0.012 0.0059 0.0019 0.0015 0.0012 0.0011 0.001 0.0011 0.0011 0.001 0.0012 0.0021 0.0022 0.0019 0.0011 0.0007 0.0027 0.0043 0.0046 0.0045 0.0034 0.0025 0.0025 0.0025 0.0024 0.002 0.0018 0.002 0.0021 0.0023 0.002 0.0019 0.0018 0.0016 0.0015 0.0016 0.0017 0.0019 0.0019 0.0018 0.0017 0.0019 0.0018 0.0016 0.0014 0.0015 0.0012 0.0013 0.0015 0.0016 0.0014 0.0015 0.0014 0.0013 0.0013 0.0012 0.0013 0.0013 0.0011 0.0012 0.0011 0.001 0.0009 0.001 0.001 0.0007 0.001 0.0009 0.001 0.0013 0.0013 0.0012 0.0011 0.0009 0.0009 0.0009 0.0008 0.0007 0.0008 0.0006 0.0007 0.0008 0.0009 0.0007 0.0006 0.0008 0.0007 0.0007 0.0006 0.0004 0.0004 0.0002 0.0003 0.0005 0.0004 0.0006 0.0005 0.001 0.0012 0.0012 0.0018 0.0015 0.0013 0.0013 0.0016 0.002 0.0024 0.0028 0.0028 0.0031 0.0026 0.002 0.0018 0.0013 0.0017 0.0018 0.0015 0.0019 0.0046 0.0069 0.0074 0.0073 0.0061 0.0045 0.0031 0.003 0.0032 0.0029 0.0024 0.0022 0.0017 0.0014 0.0014 0.0013 0.0011 0.0011 0.0035 0.0071 0.0117 0.0152 0.0164 0.0148 0.0115 0.0072 0.0031 0.0024 0.0022 0.0019 0.0016 0.0013 0.0011 0.0012 0.0013 0.0015 0.0019 0.0041 0.0094 0.0131 0.0163 0.0185 0.0188 0.0179 0.0164 0.013 0.0095 0.0054 0.0028 0.0023 0.0027 0.0022 0.0018 0.0017 0.0018 0.0017 0.0019 0.0022 0.0387 0.0011 0.0022 0.0018 0.0013 0.0015 0.0019 0.002 0.0025 0.002 0.0019 0.0017 0.0023 0.0023 0.0025 0.0024 0.002 0.0019 0.0022 0.0019 0.0018 0.0021 0.0015 0.0014 0.0016 0.0015 0.0017 0.0015 0.0014 0.0016 0.0015 0.0015 0.0013 0.0012 0.0009 0.001 0.001 0.001 0.0009 0.001 0.0009 0.0008 0.0008 0.0006 0.0008 0.001 0.0008 0.0007 0.0005 0.0006 0.0006 0.0007 0.0008 0.0007 0.0008 0.0008 0.0008 0.0008 0.0007 0.0006 0.0004 0.0005 0.0005 0.0006 0.0006 0.0007 0.0009 0.001 0.0012 0.0016 0.0016 0.0018 0.002 0.0022 0.0021 0.0022 0.0023 0.0022 0.0022 0.0018 0.0017 0.0017 0.0027 0.0044 0.0067 0.0094 0.0116 0.0121 0.0112 0.0088 0.0052 0.0028 0.002 0.0018 0.0016 0.0015 0.0017 0.0018 0.0018 0.0017 0.0018 0.0022 0.0021 0.0022 0.0021 0.0018 0.0018 0.0016 0.0018 0.0019 0.0018 0.0018 0.0017 0.0019 0.0019 0.002 0.002 0.0019 0.0019 0.0021 0.0042 0.0073 0.0099 0.0115 0.0129 0.0136 0.0141 0.014 0.0126 0.0102 0.0068 0.0026 0.0014 0.0013 0.001 0.0012 0.0011 0.001 0.0007 0.0006 0.0006 0.0004 0.0003 0.0006 0.0006 0.0004 0.0006 0.0006 0.0006 0.0005 0.001 0.0015 0.0019 0.002 0.0021 0.0019 0.0019 0.0018 0.0019 0.0019 0.0019 0.0018 0.0017 0.0016 0.0016 0.0018 0.0019 0.0019 0.0017 0.0013 0.0014 0.0013 0.0015 0.0015 0.0016 0.0015 0.0019 0.0013 0.0012 0.0015 0.0014 0.0015 0.0014 0.0012 0.001 0.0011 0.0012 0.0013 0.0011 0.0012 0.001 0.0011 0.0008 0.0006 0.0006 0.0007 0.0007 0.0007 0.0007 0.0009 0.0009 0.0009 0.001 0.001 0.0007 0.0007 0.0008 0.0006 0.0006 0.0007 0.0004 0.0006 0.0007 0.0006 0.0008 0.0006 0.0008 0.0005 0.0007 0.0004 0.0003 0.0003 0.0003 0.0001 0.0003 0.0002 0.0003 0.0003 0.0002 0.0002 0.0001 0.0005 0.001 0.0012 0.0015 0.0015 0.0011 0.0009 0.0004 0.0005 0.0001 0.0", + "breathiness": "-61.6993 -57.6138 -53.4912 -50.4491 -47.7499 -46.0626 -44.8186 -44.1287 -42.9609 -42.2632 -41.5003 -41.0983 -41.28 -41.3935 -42.0289 -42.5569 -43.0494 -44.2839 -45.518 -47.3405 -49.3468 -51.7682 -54.0883 -56.6545 -59.0958 -61.0811 -62.7142 -63.2585 -63.4078 -62.2528 -60.7579 -59.3096 -58.054 -57.2494 -56.8036 -57.0756 -57.6654 -58.5901 -59.7111 -60.7693 -61.5077 -61.4622 -61.059 -60.0098 -58.6016 -56.469 -53.7145 -50.313 -46.9529 -43.854 -41.3397 -40.2557 -40.5659 -42.7638 -46.0955 -49.7687 -53.2495 -56.2999 -58.5037 -59.6354 -60.3457 -60.5142 -60.063 -59.3915 -58.0899 -56.2016 -54.307 -52.1458 -50.4867 -49.4707 -49.4329 -50.0417 -50.4499 -51.1583 -51.524 -51.9525 -52.0064 -51.8577 -51.8987 -52.2237 -52.5203 -52.9006 -53.4009 -53.7365 -53.9653 -54.1209 -54.2169 -53.8938 -54.0028 -54.3097 -54.8624 -55.5188 -56.2104 -56.9903 -57.3317 -57.5965 -57.4136 -57.2862 -56.9219 -56.9627 -56.9284 -57.2474 -57.6429 -57.7612 -58.1106 -58.2284 -58.3952 -58.4342 -58.4578 -58.5074 -58.4233 -58.4179 -58.3601 -58.3054 -58.2617 -58.2035 -58.4323 -59.0446 -59.4659 -59.9725 -60.4134 -60.48 -60.3049 -59.7419 -59.2644 -59.0334 -58.932 -59.2821 -59.9064 -60.7132 -61.6459 -62.2765 -63.1641 -63.7033 -64.0532 -64.0966 -64.1064 -63.8024 -63.8801 -63.9902 -64.6936 -65.8886 -66.9637 -68.0194 -68.6864 -69.06 -69.0891 -68.9651 -67.8018 -66.1825 -63.9764 -61.6687 -59.1982 -57.025 -55.6241 -55.0083 -54.4055 -54.1518 -53.3535 -52.5281 -51.5578 -51.0876 -51.1214 -51.3368 -52.5518 -54.3728 -56.3432 -57.6181 -57.8448 -56.1681 -53.762 -50.1003 -46.4486 -43.415 -42.2687 -42.4275 -43.764 -45.6613 -47.7015 -49.7344 -51.518 -53.082 -54.6063 -56.1289 -57.7051 -58.1182 -57.3256 -54.7831 -51.2256 -46.8217 -42.4909 -38.6893 -35.757 -34.9392 -35.4866 -37.5949 -40.5569 -43.6248 -46.9323 -50.0221 -52.4627 -54.4269 -55.7019 -56.6943 -57.1152 -56.7592 -55.1847 -52.6136 -49.3716 -45.4667 -41.8116 -38.9073 -36.8231 -36.0842 -36.2679 -37.4878 -39.168 -41.4484 -44.1894 -46.916 -49.6848 -51.7856 -53.3643 -54.6914 -55.9028 -57.1392 -58.5037 -59.6268 -60.4825 -60.9595 -61.036 -60.2755 -59.3277 -58.3323 -57.2546 -56.3193 -55.1539 -54.4459 -53.9414 -53.5809 -53.4464 -53.2166 -53.2627 -53.0104 -53.0073 -52.9209 -53.2298 -53.667 -54.1375 -54.7734 -55.2849 -55.648 -55.9971 -55.8864 -56.0033 -56.3259 -56.5879 -56.6799 -57.0181 -57.1649 -57.3381 -57.551 -57.7076 -58.2492 -58.5614 -58.9515 -59.3081 -59.5013 -59.321 -59.1418 -58.8613 -58.8252 -58.5807 -58.3812 -58.5254 -58.7248 -59.3043 -60.0668 -60.9684 -61.7397 -62.2281 -62.2746 -62.2959 -61.796 -61.3598 -61.0425 -60.6671 -60.4864 -60.5491 -60.6282 -61.0558 -61.2689 -61.617 -61.7717 -61.5237 -60.9139 -59.9772 -58.8663 -57.5576 -56.1879 -55.1137 -54.3433 -53.7187 -53.5791 -53.3999 -53.3271 -53.5595 -53.613 -53.7142 -53.1949 -52.134 -50.6041 -48.3775 -45.8625 -43.5826 -41.4744 -40.2218 -40.2081 -41.5674 -43.6653 -46.25 -49.1155 -51.6476 -53.614 -54.8075 -55.7606 -56.0867 -56.5239 -56.8365 -57.234 -57.3439 -57.4522 -57.7756 -57.8648 -57.6838 -57.5244 -57.3348 -57.0198 -56.8466 -56.6082 -56.4102 -57.1378 -57.6563 -57.8732 -57.807 -57.5638 -56.555 -54.6287 -51.9186 -48.7075 -45.3153 -42.1227 -39.4391 -37.6202 -36.7963 -36.5454 -36.9724 -38.1791 -40.3512 -43.0395 -46.3375 -49.9615 -53.5438 -56.7403 -59.3618 -61.0338 -62.4204 -63.4831 -64.4802 -65.4912 -66.2429 -66.7031 -66.4691 -65.7102 -64.6707 -63.3744 -61.9351 -60.7038 -59.2614 -58.2077 -57.1269 -56.192 -55.1247 -54.3838 -54.0793 -53.6528 -53.5224 -53.9217 -54.2702 -54.6045 -55.1189 -55.4215 -55.8226 -55.9605 -56.0004 -55.913 -55.8205 -55.3809 -54.9829 -54.5689 -54.3094 -54.1628 -54.0116 -54.0159 -54.1864 -54.5184 -54.764 -54.984 -55.1219 -55.2669 -55.3917 -55.8528 -56.4033 -57.1363 -57.4744 -57.7932 -58.1717 -58.3678 -58.7756 -59.0818 -59.2876 -59.5591 -59.7276 -59.9178 -60.1194 -60.2238 -60.6126 -60.8181 -61.1226 -60.7472 -60.5737 -60.226 -60.2936 -60.6624 -61.4119 -62.4126 -63.6836 -64.8658 -65.7883 -66.0409 -65.8256 -65.3467 -64.8831 -64.5988 -64.6053 -65.1334 -65.9761 -67.0616 -68.368 -69.7293 -71.12 -72.0895 -73.0886 -73.5464 -73.5535 -73.249 -72.7175 -71.6152 -70.2557 -68.3272 -66.0995 -64.1624 -62.4876 -61.2467 -60.5695 -60.7934 -62.1246 -62.9785 -64.3971 -65.6355 -66.3991 -68.0023", "breathiness_timestep": "0.011609977324263039" }, { @@ -378,9 +378,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "143.9 144.1 144.1 144.2 143.9 144.0 143.9 144.0 144.3 144.1 144.1 143.8 143.8 143.9 143.7 143.5 143.5 143.6 143.6 143.5 143.4 143.4 143.3 143.4 143.5 143.2 143.4 143.3 142.8 142.5 142.5 142.5 142.2 142.2 145.9 151.2 156.1 167.0 177.6 188.4 192.3 194.6 196.1 195.5 192.5 188.4 183.5 175.7 174.7 175.2 174.1 173.6 173.8 169.0 165.1 162.9 162.7 163.3 163.9 164.3 164.4 164.5 164.9 165.0 164.8 165.4 166.2 165.9 166.9 168.7 170.3 172.4 181.1 187.0 192.5 194.0 196.6 196.7 196.2 196.7 196.1 196.3 197.3 197.5 197.4 197.2 193.5 186.7 190.4 195.7 201.8 208.3 216.0 224.2 230.8 230.6 227.2 226.9 223.3 220.5 217.2 212.3 208.2 208.2 208.9 211.2 216.2 222.5 226.0 230.2 229.6 225.6 218.1 210.3 203.8 201.8 202.3 203.4 207.5 212.3 218.2 222.7 225.1 225.4 223.6 220.9 217.1 215.9 221.3 224.2 227.5 231.3 233.2 237.1 241.6 244.9 245.9 247.5 247.7 246.8 246.8 247.0 247.0 248.5 247.5 246.1 243.3 236.6 226.7 215.8 215.8 217.5 219.9 222.0 224.7 226.4 223.7 225.6 226.8 226.9 225.1 222.0 216.3 209.1 201.9 197.2 197.0 199.4 206.0 215.0 222.2 228.5 232.0 230.9 224.9 217.7 212.4 206.0 203.5 200.2 198.1 195.8 194.2 192.9 191.0 189.3 187.7 185.8 184.9 183.1 181.8 179.6 178.2 176.8 175.0 173.2 171.8 170.1 169.9 170.6 170.6 169.9 171.2 175.6 184.2 191.7 197.3 199.9 199.5 197.9 194.8 189.0 183.5 177.1 171.9 167.8 166.5 166.3 164.1 162.8 162.8 163.0 164.5 166.3 166.4 166.6 166.8 166.9 166.8 165.4 163.6 159.5 156.1 158.0 164.9 177.1 189.4 198.0 195.5 196.4 196.7 195.4 195.0 195.2 195.1 195.2 195.3 194.4 193.9 192.1 188.0 181.0 184.8 188.1 192.0 195.3 198.3 200.7 202.2 200.9 197.5 197.6 197.6 198.3 198.5 197.2 195.9 195.4 194.8 194.3 195.5 199.6 207.7 214.7 218.4 220.3 220.2 219.2 216.7 215.3 214.5 212.8 213.3 214.3 216.6 219.1 222.2 222.9 222.9 218.4 212.8 217.7 223.1 228.5 234.5 240.2 245.9 251.1 257.7 264.9 252.9 249.1 249.0 249.0 247.7 246.6 245.8 245.7 246.2 247.1 247.2 247.1 246.4 231.8 228.2 232.1 242.7 254.3 268.4 264.8 261.8 262.3 263.0 262.5 262.0 261.8 261.8 262.4 262.9 261.5 262.2 263.7 265.0 267.1 267.9 274.7 283.2 291.6 297.2 298.3 297.6 296.3 293.4 289.7 288.1 287.3 286.7 288.8 290.6 293.3 294.6 295.4 295.3 294.4 293.5 290.3 286.9 283.9 282.9 283.9 285.5 287.6 290.7 293.6 296.8 296.2 292.8 286.0 279.4 266.2 254.1 244.8 234.6 224.8 214.8 205.6 196.9 189.6 181.7 174.7 167.6 156.3 148.2 143.8 143.2 143.7 143.5 143.4 143.4 143.6 142.7 140.9 140.1 139.8 141.9 143.3 144.4 145.4 146.6 147.5 148.0 148.1 147.8 148.6 148.0 147.0 146.8 146.9 147.3 148.7 148.3 147.7 145.6 141.3 140.9 142.0 143.4 144.9 146.3 147.9 149.7 151.6 151.1 150.7 146.2 145.6 146.0 146.4 147.6 147.7 148.3 147.9 146.7 145.9 145.6 145.8 145.8 146.1 146.1 146.8 147.5 148.4 150.5 153.4 159.4 164.9 169.5 171.3 171.2 170.2 168.5 164.8 161.0 157.6 155.2 155.4 157.6 162.0 166.8 170.6 172.8 174.6 175.5 174.6 171.8 169.0 164.9 160.2 157.6 156.7 156.5 157.9 160.9 164.6 168.1 171.5 173.9 175.0 174.5 173.1 170.7 167.1 162.9 159.4 158.3 158.2 158.2 159.6 162.7 165.9 168.9 171.6 174.0 174.7 174.9 173.9 170.9 166.5 161.4 158.4 157.5 156.9 157.3 159.8 163.4 167.3 171.0 175.3 177.6 177.7 177.0 174.2 169.6 162.6 156.7 152.0 150.3 151.2 153.4 157.6 163.6 169.7 175.0 179.0 180.3 178.6 174.3 169.2 163.1 155.0 151.3 150.4 148.9 148.7 151.5 155.1 160.3 164.0 166.8 169.3 171.2 171.6 170.5 168.8 165.6 163.4 164.1 163.5 162.1 161.0 161.2 160.4 160.4 160.5", "f0_timestep": "0.011609977324263039", - "energy": "0.001 0.001 0.0016 0.0018 0.0031 0.0042 0.0052 0.0057 0.0062 0.0065 0.0069 0.008 0.0076 0.0081 0.0074 0.0077 0.0075 0.0066 0.0062 0.0054 0.0044 0.0038 0.0038 0.0033 0.0029 0.0022 0.0019 0.0016 0.0016 0.0047 0.0199 0.0393 0.0556 0.0692 0.0762 0.079 0.0755 0.0705 0.0679 0.0647 0.0648 0.0663 0.0666 0.0664 0.065 0.0611 0.0534 0.0421 0.029 0.013 0.0103 0.0263 0.048 0.0605 0.0702 0.0764 0.0765 0.077 0.0764 0.0755 0.0761 0.0766 0.0782 0.0792 0.079 0.0771 0.0765 0.0713 0.0655 0.0592 0.0527 0.0518 0.0593 0.0701 0.0771 0.0817 0.0846 0.081 0.0803 0.0817 0.0806 0.0782 0.0785 0.0746 0.0713 0.0682 0.058 0.0462 0.0337 0.0225 0.0185 0.0205 0.019 0.0322 0.0602 0.0785 0.0917 0.0996 0.0957 0.0925 0.0889 0.0862 0.0831 0.0783 0.0755 0.0719 0.0696 0.0672 0.0664 0.0649 0.0656 0.0667 0.0682 0.0739 0.0729 0.0727 0.0698 0.0666 0.067 0.0682 0.0684 0.0694 0.0691 0.0666 0.0672 0.0666 0.067 0.0686 0.0706 0.0726 0.0753 0.0773 0.0779 0.0808 0.0832 0.084 0.0861 0.0859 0.0849 0.0858 0.0846 0.0829 0.0816 0.0784 0.0775 0.0738 0.0702 0.0605 0.0468 0.0326 0.0158 0.0098 0.0094 0.0091 0.0249 0.0448 0.0597 0.0754 0.0835 0.0866 0.0865 0.0847 0.0818 0.0754 0.0706 0.0624 0.0555 0.0528 0.0498 0.0497 0.049 0.0489 0.0489 0.047 0.0419 0.0347 0.0252 0.0163 0.0079 0.0048 0.0046 0.0046 0.0047 0.0043 0.0028 0.0034 0.0034 0.0026 0.0023 0.0018 0.0026 0.0045 0.0072 0.0102 0.0115 0.0261 0.0425 0.055 0.0628 0.0662 0.0641 0.0612 0.06 0.057 0.0567 0.0578 0.0613 0.065 0.0671 0.0684 0.0679 0.0664 0.064 0.0609 0.0578 0.0567 0.0579 0.0605 0.063 0.0665 0.066 0.065 0.0647 0.0613 0.0596 0.0579 0.0553 0.0563 0.0549 0.0517 0.0462 0.0366 0.0269 0.0165 0.0098 0.0115 0.0331 0.0515 0.0636 0.0737 0.0775 0.0784 0.0772 0.076 0.0745 0.0715 0.0702 0.0663 0.0575 0.0465 0.0323 0.0152 0.0068 0.0098 0.0121 0.0128 0.0117 0.0297 0.0482 0.0623 0.0754 0.081 0.0841 0.0846 0.0842 0.0816 0.0782 0.0723 0.0641 0.0584 0.053 0.052 0.0553 0.0587 0.0613 0.0629 0.0642 0.0654 0.0661 0.0665 0.0669 0.0678 0.0675 0.067 0.0668 0.0661 0.0641 0.0601 0.0516 0.0413 0.0301 0.0182 0.0151 0.0153 0.0169 0.0179 0.0172 0.0153 0.0175 0.0363 0.0498 0.0609 0.0715 0.0747 0.0772 0.0774 0.0775 0.0772 0.0762 0.077 0.0755 0.0695 0.0589 0.0452 0.0277 0.0128 0.0054 0.0084 0.0324 0.0527 0.0672 0.081 0.0872 0.0895 0.0919 0.0929 0.0932 0.094 0.0917 0.0889 0.0868 0.0824 0.0777 0.0717 0.0675 0.0669 0.0692 0.0725 0.0733 0.0745 0.0743 0.075 0.0777 0.0776 0.0772 0.077 0.0771 0.0768 0.0785 0.0801 0.0801 0.081 0.0801 0.08 0.0799 0.0786 0.0785 0.0779 0.0765 0.0754 0.0747 0.073 0.0719 0.0717 0.0713 0.0725 0.0715 0.0659 0.0554 0.0397 0.0233 0.0092 0.0087 0.0101 0.011 0.0116 0.0115 0.0104 0.0082 0.009 0.0236 0.0398 0.0506 0.0599 0.0627 0.0612 0.0615 0.0598 0.06 0.0596 0.0587 0.0566 0.0553 0.0544 0.0525 0.0522 0.0527 0.0529 0.0551 0.0558 0.0566 0.057 0.0563 0.0575 0.0579 0.0576 0.0583 0.0581 0.0581 0.0567 0.0549 0.0489 0.041 0.03 0.0184 0.0097 0.0084 0.009 0.0093 0.0088 0.0073 0.0193 0.0411 0.0523 0.0604 0.0671 0.0649 0.0655 0.0665 0.0672 0.0667 0.0656 0.0652 0.0638 0.0637 0.0655 0.0656 0.0648 0.0652 0.0615 0.0582 0.056 0.0516 0.0503 0.0504 0.0502 0.053 0.0539 0.0548 0.0561 0.0553 0.0543 0.0526 0.0498 0.0473 0.0455 0.0439 0.0446 0.0459 0.0482 0.0509 0.0545 0.0567 0.0572 0.0566 0.0541 0.0516 0.0497 0.0475 0.0466 0.0446 0.0438 0.0421 0.0439 0.0459 0.0479 0.0515 0.0529 0.0529 0.0519 0.0492 0.0452 0.0425 0.0416 0.041 0.04 0.0399 0.0395 0.039 0.0404 0.0415 0.0442 0.0464 0.0479 0.0486 0.0476 0.0447 0.0412 0.0388 0.0377 0.0357 0.0348 0.0338 0.0333 0.034 0.035 0.036 0.0377 0.04 0.0405 0.0403 0.0379 0.0349 0.0317 0.0285 0.027 0.0246 0.0243 0.023 0.0224 0.0226 0.0218 0.0222 0.023 0.0225 0.0219 0.021 0.0193 0.0183 0.0172 0.0152 0.0143 0.0129 0.0128 0.0123 0.013 0.0139 0.0141 0.0138 0.0133 0.0128 0.0121 0.0115 0.0102 0.0088 0.0051 0.0031 0.0012 0.0009 0.0011 0.0005 0.0009 0.0 0.0", + "energy": "-61.528 -62.1956 -59.1671 -56.0726 -52.5839 -49.8909 -47.8208 -46.3701 -45.6179 -45.0578 -44.7181 -44.6719 -44.8285 -45.2803 -45.7067 -46.197 -47.3006 -48.2628 -49.0515 -49.8638 -50.1991 -50.3273 -50.913 -51.7131 -52.7151 -53.1552 -52.762 -50.5314 -47.2736 -42.6132 -37.5605 -32.7106 -28.8646 -25.9616 -24.1731 -23.5451 -23.1641 -22.8966 -22.68 -22.7327 -22.8001 -22.5444 -23.2155 -24.1392 -25.8628 -28.9567 -32.5157 -35.6566 -37.5212 -37.7766 -36.3751 -33.4244 -29.6943 -26.3574 -23.8492 -23.2782 -23.1964 -22.9226 -22.894 -23.0411 -22.8938 -22.8585 -22.8079 -22.7339 -22.9599 -22.8785 -22.9408 -23.4829 -23.7451 -23.9639 -23.9951 -24.0434 -23.6671 -23.4068 -22.8397 -22.6461 -22.179 -22.0798 -22.0659 -22.1582 -22.1729 -22.2276 -22.5943 -23.0972 -23.9608 -24.7227 -26.2742 -27.6173 -29.059 -30.1639 -30.6851 -30.2767 -28.929 -27.2743 -25.2885 -23.5434 -22.187 -21.5062 -21.4954 -21.7719 -21.9642 -22.2674 -22.4397 -22.9481 -23.0475 -23.1717 -23.4412 -23.6411 -23.7771 -23.8606 -23.9893 -23.997 -24.2986 -24.2798 -24.5434 -24.7717 -24.8937 -24.8599 -24.9511 -24.9894 -24.7674 -24.9293 -24.5279 -24.6658 -24.6631 -24.4548 -23.9931 -23.7009 -23.2761 -22.9064 -22.5903 -22.2707 -21.9089 -21.7779 -21.3877 -21.4996 -21.6829 -21.5703 -21.9622 -21.9824 -22.1431 -22.3049 -22.6494 -22.8802 -23.3145 -23.7052 -25.199 -27.146 -29.7837 -32.8067 -35.2146 -36.1869 -36.1145 -34.5077 -31.353 -28.192 -25.0861 -23.0051 -22.2621 -21.9695 -22.311 -22.6456 -23.1903 -23.6858 -24.6821 -25.2366 -25.8366 -26.0989 -26.0515 -26.1216 -26.2466 -26.111 -26.6584 -27.2543 -28.3673 -30.2791 -32.9896 -35.8262 -38.6388 -41.2891 -43.6606 -45.4258 -46.6549 -47.83 -48.8865 -50.2014 -51.4314 -52.5779 -52.3929 -51.6725 -49.6935 -47.2142 -43.8549 -40.0916 -36.3741 -33.0088 -29.7405 -27.271 -25.3648 -24.7158 -24.4622 -24.7184 -25.0425 -25.1594 -25.4883 -25.2316 -24.9315 -24.4298 -24.1551 -24.0688 -23.8847 -24.2406 -24.4872 -24.8811 -25.0492 -24.9512 -24.8912 -24.6577 -24.3429 -23.9757 -24.0466 -24.0161 -24.2082 -24.5998 -24.4762 -25.0097 -25.2243 -25.922 -26.6736 -28.15 -30.1964 -32.5827 -34.2804 -34.9409 -34.4122 -32.5206 -30.0202 -26.9774 -24.3092 -22.7512 -21.7571 -21.7033 -21.6795 -21.6422 -22.1374 -22.5256 -23.2767 -24.7071 -26.3959 -28.9294 -31.6297 -34.3024 -36.2357 -37.6648 -37.3335 -35.9694 -33.6059 -30.7582 -27.8226 -25.6184 -23.6042 -22.5277 -22.2563 -21.8692 -22.0998 -22.2887 -22.8454 -23.1496 -23.9931 -24.2798 -24.7471 -24.7482 -24.7418 -24.5192 -24.2998 -23.9409 -23.7849 -23.3632 -23.3002 -23.3039 -23.1791 -23.1492 -23.2519 -23.2482 -23.2659 -23.6999 -24.4005 -25.5035 -26.999 -29.02 -31.1808 -33.1549 -34.5627 -35.6023 -36.0729 -35.7265 -34.8585 -33.2861 -31.3998 -29.0969 -26.8073 -24.7977 -23.422 -22.4165 -22.0813 -21.7611 -21.59 -21.4226 -21.7168 -21.9417 -22.6389 -24.2585 -26.719 -29.3999 -31.0743 -32.102 -31.6982 -30.5864 -27.9826 -25.46 -22.9834 -21.4473 -20.6181 -20.3127 -20.1394 -20.1142 -20.0737 -20.2506 -20.4807 -20.6972 -20.8715 -21.232 -21.7877 -22.1903 -22.0736 -22.2679 -22.232 -22.0562 -21.7287 -21.4153 -21.4006 -21.0485 -21.2712 -21.2308 -21.3389 -21.2618 -21.3611 -21.4773 -21.3852 -21.2859 -20.996 -20.9142 -20.8529 -20.7723 -20.7976 -21.0131 -21.1184 -21.277 -21.3728 -21.5684 -21.5194 -21.9178 -21.9936 -22.1608 -22.5168 -23.0115 -24.13 -25.9289 -28.6091 -31.4271 -34.4794 -36.7297 -38.1007 -38.9546 -39.0331 -38.5855 -37.9391 -36.3454 -34.6293 -32.3139 -29.7664 -27.4112 -25.2685 -23.7505 -23.3783 -23.1075 -23.2325 -23.4205 -23.6119 -23.5616 -23.8519 -23.8185 -24.0605 -24.0243 -24.0397 -23.857 -23.8689 -23.8081 -23.828 -23.4563 -23.5051 -23.3843 -23.3423 -23.1908 -23.2546 -23.1828 -23.3222 -23.5681 -24.0804 -24.7705 -25.9414 -27.6041 -29.7526 -32.3222 -34.9922 -37.3843 -39.427 -40.4867 -39.9822 -38.1451 -35.7601 -32.6397 -29.8429 -27.2182 -25.4212 -24.5 -24.1055 -23.8195 -23.6965 -23.6897 -23.7806 -23.6716 -23.7397 -23.9629 -23.779 -23.9015 -23.9633 -24.1687 -24.0803 -24.7173 -25.0019 -25.2573 -25.25 -25.6769 -25.7093 -25.6104 -25.1937 -24.6077 -24.0144 -23.6313 -23.5625 -23.6975 -24.0004 -24.3988 -24.7269 -24.9847 -25.1744 -25.1026 -24.9226 -24.5888 -24.2096 -23.5054 -23.6139 -23.3615 -23.5154 -23.6533 -23.8901 -24.3542 -24.9377 -25.2903 -25.5352 -25.6547 -25.6549 -25.2232 -25.0431 -24.4229 -24.0293 -23.5577 -23.5418 -23.4185 -23.8147 -24.1255 -24.5935 -24.9787 -25.3027 -25.5104 -25.3799 -25.3023 -25.184 -24.9529 -24.7462 -24.8318 -24.6311 -24.9626 -25.025 -25.2259 -25.4857 -25.75 -26.0614 -26.3103 -26.8721 -27.3594 -27.5926 -27.6683 -27.8912 -27.8467 -27.9617 -27.7783 -27.774 -27.9073 -27.9476 -28.3131 -28.2481 -28.8394 -29.3459 -29.9269 -30.567 -31.3426 -31.9269 -32.1161 -32.7624 -32.7241 -32.8574 -32.8281 -32.6431 -32.4864 -32.3137 -32.4345 -32.3636 -32.7999 -33.1066 -33.5745 -34.3888 -35.2592 -36.1527 -36.5502 -37.2515 -37.3143 -37.1892 -37.1565 -37.2768 -37.3308 -37.4485 -37.9652 -39.1558 -41.0114 -44.2926 -48.0 -51.8646 -55.2316 -57.3405 -57.4209 -56.5637 -54.6807", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0006 0.0012 0.0023 0.0035 0.0048 0.0057 0.0063 0.0066 0.0072 0.0072 0.0079 0.0084 0.008 0.0079 0.008 0.0077 0.0075 0.0067 0.0055 0.0042 0.0037 0.0036 0.0033 0.0028 0.0022 0.0019 0.0016 0.002 0.0025 0.0037 0.0042 0.0034 0.0031 0.0012 0.0007 0.0008 0.001 0.0008 0.0008 0.0007 0.0005 0.0003 0.0005 0.0003 0.0008 0.0009 0.0014 0.0016 0.0019 0.0015 0.0013 0.0017 0.002 0.002 0.002 0.002 0.0019 0.0019 0.002 0.0019 0.0015 0.0015 0.0018 0.0018 0.0019 0.0019 0.0017 0.002 0.0024 0.0026 0.0026 0.0027 0.0024 0.0022 0.0022 0.0023 0.0019 0.002 0.0016 0.0012 0.0012 0.0012 0.0013 0.0016 0.0022 0.0062 0.0134 0.0167 0.0193 0.0211 0.0197 0.019 0.0171 0.0131 0.01 0.0056 0.0032 0.0023 0.0022 0.002 0.0017 0.0014 0.0016 0.0014 0.0011 0.0012 0.0009 0.001 0.0008 0.001 0.0008 0.001 0.0012 0.001 0.0009 0.0006 0.0007 0.0007 0.0007 0.0007 0.0005 0.0005 0.0006 0.0006 0.0005 0.0004 0.0003 0.0005 0.0006 0.0005 0.0006 0.0006 0.0008 0.001 0.0013 0.0011 0.0011 0.0011 0.001 0.0008 0.0005 0.0005 0.0005 0.0003 0.0005 0.0004 0.0007 0.0034 0.0075 0.0098 0.0114 0.0117 0.0095 0.0072 0.0046 0.0022 0.0015 0.0014 0.0015 0.0018 0.002 0.0023 0.0022 0.002 0.002 0.0018 0.0015 0.0016 0.0014 0.0013 0.0012 0.001 0.001 0.0013 0.0015 0.0017 0.0016 0.0023 0.0029 0.0032 0.0037 0.0041 0.0041 0.0038 0.0033 0.0022 0.0013 0.0011 0.0014 0.0022 0.0044 0.0077 0.01 0.0108 0.0107 0.009 0.007 0.0052 0.0034 0.0021 0.0018 0.0015 0.0019 0.002 0.0019 0.0017 0.0018 0.0014 0.0012 0.0012 0.0013 0.0012 0.0012 0.0013 0.0013 0.0016 0.0016 0.0016 0.0021 0.0019 0.0021 0.002 0.0017 0.0015 0.0011 0.0009 0.0009 0.0008 0.0006 0.0005 0.0006 0.0012 0.0034 0.0047 0.0051 0.0049 0.0044 0.0023 0.0015 0.0013 0.0012 0.0015 0.0014 0.0015 0.0015 0.0017 0.0018 0.0017 0.0022 0.0025 0.0037 0.0059 0.0095 0.0121 0.0132 0.013 0.0114 0.008 0.0057 0.0046 0.0015 0.0015 0.0014 0.0015 0.0015 0.0013 0.0012 0.0011 0.0011 0.0011 0.001 0.0008 0.0009 0.0007 0.0006 0.0006 0.0008 0.0008 0.0007 0.0007 0.0007 0.0006 0.0006 0.0006 0.0005 0.0006 0.0004 0.0014 0.0044 0.0071 0.0106 0.0133 0.015 0.0167 0.0177 0.0183 0.0175 0.0155 0.0122 0.008 0.0049 0.0025 0.0019 0.0017 0.0013 0.0013 0.0013 0.0014 0.0013 0.0011 0.0012 0.0014 0.0011 0.0015 0.0014 0.0028 0.005 0.0055 0.0058 0.0051 0.0031 0.0025 0.0023 0.0021 0.002 0.0018 0.0019 0.0019 0.0022 0.0023 0.0022 0.0018 0.0019 0.0014 0.0015 0.0017 0.0018 0.0019 0.0018 0.0014 0.0014 0.0013 0.0014 0.0013 0.0013 0.0012 0.0013 0.0015 0.0016 0.0016 0.0015 0.0014 0.0013 0.0012 0.0012 0.0015 0.0015 0.0013 0.0013 0.0015 0.0014 0.0016 0.0018 0.0019 0.0017 0.0016 0.0016 0.0018 0.0027 0.0036 0.0056 0.0083 0.0104 0.0117 0.0132 0.0132 0.0122 0.0105 0.0084 0.0063 0.005 0.0041 0.0031 0.0017 0.0009 0.001 0.0007 0.0009 0.001 0.001 0.0009 0.001 0.0011 0.0009 0.0009 0.0008 0.0008 0.0009 0.0008 0.0008 0.0008 0.0008 0.0009 0.0009 0.0009 0.0007 0.0009 0.0006 0.0006 0.0006 0.0006 0.0008 0.0022 0.0035 0.0057 0.0074 0.009 0.0098 0.0095 0.0084 0.007 0.0045 0.0027 0.0018 0.0021 0.0018 0.0021 0.0017 0.0016 0.0015 0.0015 0.0018 0.0016 0.0015 0.0015 0.0014 0.0014 0.0015 0.0013 0.0017 0.0017 0.0018 0.002 0.0019 0.0021 0.0022 0.0019 0.0018 0.0018 0.002 0.0022 0.0023 0.0022 0.0016 0.0014 0.0013 0.0012 0.0013 0.0015 0.0015 0.0014 0.0016 0.0018 0.0018 0.0018 0.0017 0.0015 0.0017 0.0015 0.0015 0.0012 0.0014 0.0013 0.0014 0.0015 0.0013 0.0014 0.0014 0.0014 0.0014 0.0017 0.0016 0.0015 0.0013 0.001 0.0009 0.0009 0.001 0.0012 0.0014 0.0013 0.0012 0.0014 0.0013 0.0013 0.0016 0.0017 0.0016 0.0015 0.0014 0.001 0.0009 0.001 0.001 0.0012 0.0012 0.0011 0.0011 0.001 0.001 0.0011 0.0012 0.001 0.001 0.0009 0.0005 0.0005 0.0004 0.0004 0.0005 0.0003 0.0003 0.0006 0.0006 0.0007 0.0007 0.0007 0.0008 0.0006 0.0006 0.0003 0.0006 0.0001 0.0002 0.0 0.0 0.0002 0.0001 0.0001 0.0002 0.0001 0.0002 0.001 0.0017 0.0016 0.0017 0.0011 0.0007 0.0007 0.0006 0.0003 0.0 0.0", + "breathiness": "-67.2713 -65.75 -62.3392 -58.5487 -54.7104 -51.3906 -49.0952 -47.2136 -46.0197 -45.1415 -44.6324 -44.3676 -44.3932 -44.6661 -45.1991 -46.172 -47.6055 -49.6016 -51.8389 -54.4184 -56.7243 -58.7259 -60.235 -61.7255 -62.2899 -62.375 -61.8482 -60.8492 -59.8107 -58.8984 -58.0253 -58.055 -58.4706 -59.3569 -60.3381 -61.6405 -62.6512 -63.5478 -63.8887 -64.1201 -64.0648 -63.8984 -63.4754 -62.8537 -62.044 -60.9855 -60.0105 -59.2555 -57.8992 -56.7064 -55.4319 -54.3424 -53.37 -52.5443 -52.4284 -52.7522 -53.0927 -54.0596 -54.7313 -54.8649 -55.0331 -55.3573 -55.6936 -55.8929 -56.0158 -55.6473 -54.8568 -54.5847 -54.1252 -53.8008 -53.8966 -54.2103 -54.7316 -55.3356 -55.9688 -56.5525 -57.098 -57.6649 -58.3576 -59.1171 -59.9297 -60.2592 -59.6705 -57.9101 -54.8752 -51.2772 -46.9147 -42.4682 -38.4505 -35.3431 -33.8678 -33.9751 -34.6649 -36.7815 -39.8819 -43.0981 -46.4767 -49.5862 -51.8028 -53.6795 -55.0727 -56.1579 -57.0693 -58.2005 -59.032 -59.8534 -60.6203 -61.2824 -61.812 -62.2301 -62.8533 -63.3861 -63.8453 -64.4334 -65.0774 -65.6244 -66.4114 -67.0667 -67.7552 -68.4195 -68.7057 -68.6934 -68.7135 -68.5898 -68.4096 -68.1135 -67.885 -67.6023 -67.115 -66.7687 -65.9259 -64.9046 -63.3929 -62.2701 -61.0216 -59.9736 -59.3244 -59.1518 -59.6459 -60.6653 -61.7285 -62.992 -64.0736 -64.84 -65.1088 -63.8139 -61.3311 -57.969 -53.9041 -50.0419 -46.8471 -45.1211 -45.1362 -46.4625 -48.9289 -51.6571 -54.0566 -56.2207 -57.1948 -57.6189 -56.9517 -56.3734 -55.8737 -55.8358 -56.1135 -56.8113 -57.2686 -58.1764 -58.4606 -58.9138 -59.219 -59.2004 -59.1722 -58.6631 -58.3675 -57.6081 -56.1745 -54.4586 -52.8055 -50.9204 -49.1358 -48.1925 -48.0118 -48.3644 -49.6162 -51.4586 -53.5518 -54.9863 -55.6455 -54.7581 -52.6844 -49.5223 -46.456 -43.6548 -41.8583 -41.8645 -42.833 -44.5362 -46.7948 -48.7474 -50.6552 -51.8966 -52.8477 -53.3844 -54.0217 -54.2 -54.4204 -54.7034 -54.9202 -54.9693 -55.1589 -55.5166 -55.6785 -56.124 -56.4117 -56.2958 -56.1147 -55.6417 -55.6763 -55.6748 -56.0429 -56.8252 -57.9609 -59.4033 -60.7394 -61.9553 -63.0958 -64.1218 -64.6542 -63.5179 -61.1395 -57.7551 -53.9184 -50.5021 -48.0092 -47.4367 -48.7884 -51.038 -53.2644 -55.3428 -56.7068 -57.6621 -57.6442 -57.564 -57.3256 -57.2403 -57.0428 -56.3946 -55.4038 -53.7477 -51.5408 -48.9064 -45.8243 -43.2534 -41.1935 -40.451 -40.9117 -42.6339 -45.0926 -47.7917 -50.6982 -52.9703 -55.0933 -56.6086 -57.4213 -58.3417 -59.0632 -59.6315 -60.1506 -60.3635 -60.5036 -60.496 -60.6156 -60.8802 -61.2813 -61.8447 -62.3781 -62.7632 -62.9098 -63.1218 -63.3108 -63.2573 -63.2387 -63.1698 -62.5494 -61.0781 -58.9316 -55.7141 -51.8511 -47.8374 -44.0458 -40.7849 -38.6265 -37.2718 -36.6166 -36.4466 -36.8334 -38.0545 -39.8652 -42.3714 -44.9385 -47.8168 -50.5121 -52.8905 -55.0268 -56.4389 -57.7032 -58.7169 -59.3584 -59.4429 -58.9306 -57.6713 -55.8375 -53.1598 -50.6142 -47.9653 -46.5688 -46.1508 -46.6893 -48.1529 -50.1004 -51.9611 -53.4778 -54.4429 -55.0768 -55.0583 -55.1448 -54.9153 -54.7904 -54.7314 -54.6238 -54.5868 -54.4608 -54.458 -54.6542 -54.6363 -54.74 -54.973 -55.2899 -55.7195 -56.1597 -56.6428 -57.1541 -57.5241 -57.6783 -58.3051 -58.1623 -57.7896 -57.4835 -57.1491 -56.331 -55.7663 -55.4938 -55.4212 -55.7861 -56.2176 -56.8768 -57.2957 -57.4109 -57.4433 -57.2882 -57.2352 -56.9288 -56.9451 -56.5735 -55.7374 -54.3134 -52.6081 -50.3896 -47.8688 -45.5561 -43.5171 -41.8297 -40.8338 -39.6595 -39.3437 -39.3648 -40.0511 -41.4003 -43.2227 -45.4811 -48.2493 -50.7403 -53.0107 -55.1757 -56.5773 -57.8908 -58.6067 -59.1375 -59.6293 -60.3371 -60.9668 -61.637 -62.1864 -62.6911 -63.1467 -63.4783 -63.4005 -63.6171 -63.5054 -63.4131 -63.5498 -63.7299 -63.8779 -64.1619 -64.1632 -64.4265 -64.3462 -63.9125 -62.9336 -60.8847 -58.1076 -54.4025 -50.9442 -47.5892 -44.7331 -43.1114 -42.4313 -43.112 -44.2971 -45.915 -48.1652 -50.1703 -51.868 -53.1549 -54.1363 -54.5216 -54.8479 -54.9999 -55.3955 -55.6371 -56.0745 -56.6463 -56.8898 -57.1012 -57.0939 -56.5827 -55.8128 -55.0454 -54.1235 -53.4943 -53.0044 -53.1327 -53.1641 -53.4519 -54.1275 -54.6716 -54.7617 -54.9459 -54.6768 -54.2973 -54.1556 -54.1826 -54.4528 -54.8129 -55.059 -55.1956 -55.1627 -55.2716 -55.3176 -55.3927 -55.7963 -55.8316 -56.1449 -55.9216 -55.568 -55.3849 -55.1526 -54.9668 -55.1128 -55.4436 -55.1014 -55.0532 -54.752 -54.6987 -54.7871 -54.7049 -55.2217 -55.5077 -55.7748 -56.2792 -56.376 -56.8219 -57.0896 -57.4481 -57.1948 -56.8892 -56.3283 -55.5502 -54.9244 -54.6395 -54.3835 -54.6198 -54.6538 -54.8407 -55.214 -55.3497 -55.9364 -56.232 -56.7763 -57.0948 -57.3268 -57.5986 -57.8479 -57.8807 -58.1797 -58.5971 -58.8527 -59.259 -59.4573 -59.6482 -59.9148 -60.141 -60.4687 -61.1619 -62.0118 -62.9356 -63.9691 -64.8359 -65.8628 -66.5611 -66.9979 -67.1023 -67.2335 -66.8873 -66.8722 -66.7262 -66.806 -67.0625 -67.5946 -68.4237 -69.3581 -70.7703 -72.0591 -73.2581 -74.2412 -74.8009 -75.0818 -74.8964 -74.5784 -73.8807 -73.346 -72.3443 -71.0356 -69.2112 -66.9194 -65.1206 -63.8509 -63.5329 -64.0585 -64.7191 -65.8872 -65.8523 -66.0447", "breathiness_timestep": "0.011609977324263039" }, { @@ -394,9 +394,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "146.0 146.3 145.9 146.1 146.0 145.6 146.3 146.1 145.9 146.0 146.2 146.3 146.4 146.2 146.3 146.3 146.4 146.4 146.2 146.4 146.5 146.4 146.2 146.4 146.3 146.5 146.4 146.3 146.7 146.2 146.4 146.4 146.5 146.7 146.4 146.3 146.5 146.6 146.7 146.7 146.6 146.2 146.3 146.4 146.3 145.9 145.8 146.1 146.1 146.3 146.2 146.7 146.9 147.3 152.3 160.7 177.4 189.5 207.6 223.1 236.1 249.3 252.0 249.8 246.3 240.7 233.4 221.7 213.0 222.4 232.4 244.1 254.2 255.2 252.8 252.7 251.2 247.8 244.7 242.9 241.8 243.6 244.7 245.4 252.3 257.3 258.2 256.3 253.9 248.8 242.2 235.6 227.6 219.8 217.7 216.3 216.6 218.9 221.3 221.8 221.7 222.3 221.3 218.7 216.7 213.7 210.0 203.2 195.3 194.9 195.0 195.1 194.7 196.4 192.5 193.0 192.9 193.8 196.0 197.1 197.9 197.6 197.2 195.9 194.5 193.1 188.9 186.0 180.4 187.3 194.7 203.3 213.1 221.9 221.9 222.2 222.2 221.4 221.2 220.6 219.8 219.6 218.9 218.1 218.1 216.3 211.7 203.6 207.2 212.2 218.0 224.3 230.5 238.1 247.6 250.8 253.4 256.1 257.6 256.5 253.0 248.8 243.3 238.7 235.8 233.9 234.6 239.3 243.9 248.9 253.9 257.4 259.3 258.8 256.3 252.1 244.6 235.2 228.0 223.2 222.2 223.8 228.4 238.0 249.6 259.2 263.8 265.5 262.6 255.7 246.5 235.5 230.8 225.9 222.8 218.8 214.9 211.1 208.0 204.6 200.9 197.2 194.1 190.5 187.1 183.9 180.2 177.1 173.8 170.6 167.6 164.2 161.3 158.3 155.9 153.2 151.8 158.9 174.0 188.6 206.7 222.5 238.8 249.6 253.8 256.2 253.5 247.4 240.6 224.9 225.6 231.1 236.0 242.2 247.7 253.6 258.3 255.6 251.2 250.6 248.3 247.5 246.4 246.6 246.5 246.5 247.5 245.5 238.4 230.1 210.6 204.1 210.2 214.4 218.2 222.4 222.9 218.7 219.9 220.1 220.1 218.6 219.0 220.2 220.4 220.1 220.3 221.4 221.4 221.7 222.9 223.1 224.4 224.0 225.3 229.7 238.2 243.9 245.2 245.4 246.1 245.8 246.1 247.0 247.5 248.4 250.1 250.5 249.9 249.5 245.3 238.6 231.3 230.3 229.1 226.4 223.5 221.5 220.2 223.5 240.4 255.4 277.3 298.8 313.2 322.8 329.6 332.3 332.5 332.6 330.6 329.7 329.3 327.5 325.1 320.0 313.6 311.2 307.5 301.4 294.0 290.6 291.3 291.6 293.2 294.0 293.9 292.9 290.0 288.3 287.6 287.0 287.9 289.1 291.1 294.4 298.2 303.4 306.0 305.1 302.3 297.6 291.0 286.6 282.8 281.7 284.4 288.1 292.8 299.4 303.6 308.1 309.4 310.7 308.8 304.9 299.7 294.4 289.9 288.4 277.7 268.6 262.2 256.1 250.1 244.8 239.3 234.6 229.7 224.6 219.3 214.8 209.8 205.1 200.6 196.2 191.8 187.5 182.6 180.0 180.3 199.8 221.5 247.9 273.9 291.9 303.2 308.9 307.8 303.5 297.9 291.7 284.8 277.3 271.3 277.6 282.8 287.9 292.4 297.9 298.3 294.5 294.5 294.2 294.3 294.3 295.7 296.0 294.7 293.8 293.2 293.0 290.0 286.2 277.4 268.2 265.3 274.8 287.1 297.0 311.5 306.9 304.2 301.9 294.9 284.3 273.4 265.2 260.2 255.6 253.0 250.5 248.2 241.9 249.5 257.6 266.3 274.1 283.1 291.4 300.0 306.7 297.8 291.8 291.6 292.1 293.3 294.2 295.2 294.7 295.4 294.7 294.8 293.6 293.9 292.2 288.6 286.2 293.2 302.0 314.7 323.4 330.6 332.8 333.2 332.8 331.6 330.0 328.6 328.8 329.5 330.4 330.5 329.5 327.1 319.8 301.8 314.6 328.3 344.6 361.8 379.9 394.6 392.8 395.5 395.1 393.0 387.0 381.8 374.9 369.5 364.8 366.7 371.0 380.5 387.2 394.1 396.5 395.6 394.7 391.2 390.2 391.2 391.7 390.4 388.1 386.7 387.1 386.6 386.8 389.0 389.6 389.7 391.6 392.9 393.9 392.6 391.1 387.5 385.0 380.8 376.2 368.8 364.3 359.4 354.1 349.1 344.8 341.5 337.3 335.6 334.7 332.9 331.3 330.5 328.1 328.3 328.2 329.9 331.4 329.7 327.0 317.5 306.4 299.0 292.3 291.6 291.3 291.0 293.2 294.9 294.9 293.8 291.4 287.5 283.4 278.8 275.6 271.5 267.2 262.1 257.0 252.0 248.2 247.0 246.8 246.8 247.1 246.1 245.3 243.3 243.5 244.5 246.1 247.5 249.1 248.3 248.9 248.6 248.8 247.6 245.6 245.0 245.4 245.2 245.8 247.2 248.7 250.8 250.1 249.0 248.2 247.9 247.3 246.1 240.1 230.4 222.6 221.8 220.2 220.0 219.3 218.9 218.4 218.6 217.1 217.6 218.7 219.3 219.4 219.2 218.2 218.1 218.5 218.4 218.4 218.2 218.1 218.7 218.9 219.9 220.1 220.5 220.2 219.2 219.1 219.4 220.4 220.5 221.0 221.2 222.8 222.8 223.6 222.8 221.6 218.2 213.2 206.2 210.6 217.1 222.4 228.2 234.5 239.9 246.9 253.3 258.4 258.1 258.6 258.3 254.8 251.8 247.8 242.5 240.4 239.6 238.2 239.4 242.1 246.6 251.0 256.2 258.7 260.5 259.6 257.3 253.6 245.3 234.8 227.7 224.8 225.6 230.4 237.8 246.5 254.8 260.8 265.2 267.7 266.7 260.4 250.6 238.5 226.9 218.2 210.4 203.8 198.8 194.0 189.6 185.5 181.7 178.4 174.5 171.0 167.9 165.3 161.6 158.6 156.1 153.0 150.5 148.4 145.8 145.6 150.5 157.4 165.9 173.6 185.8 199.7 219.1 235.6 250.6 257.3 256.0 249.8 239.9 232.4 229.5 228.5 232.8 236.0 239.6 243.9 248.6 253.3 258.9 262.0 259.9 260.2 258.9 255.9 252.8 250.6 249.0 248.0 248.7 249.2 249.9 247.9 241.3 221.5 222.2 224.0 225.4 228.0 230.7 230.8 222.7 219.2 219.0 220.8 220.9 221.5 221.0 220.4 218.1 217.1 215.5 213.2 206.7 194.7 192.3 190.6 188.6 186.5 184.1 182.8 181.4 183.3 184.5 187.5 189.5 192.1 194.2 195.4 196.6 195.9 194.5 194.0 193.8 193.4 192.8 194.1 194.8 194.6 194.1 194.8 195.4 195.0 194.7 195.4 195.9 197.6 198.7 199.4 199.0 197.0 195.5 193.9 188.7 180.9 168.7 165.9 179.0 191.5 204.4 218.0 231.5 232.1 233.9 232.1 230.1 225.2 220.5 215.8 213.6 212.1 213.1 215.9 220.4 225.8 231.2 235.0 237.8 237.3 234.2 229.2 222.6 214.8 209.4 206.2 205.1 205.2 208.7 214.9 220.8 227.2 233.0 237.4 240.7 242.1 240.0 234.6 223.4 209.5 198.1 194.0 191.6 188.7 186.5 184.4 182.1 180.2 178.0 176.1 173.9 172.2 169.5 167.9 166.1 164.5 162.3 160.6 159.0 157.3 155.8 153.6 152.1 150.0 148.5 146.8 152.5 166.0 175.3 190.6 201.8 214.0 222.1 226.3 226.3 223.8 219.6 215.2 208.3 198.8 201.7 205.0 209.5 213.4 216.8 220.5 220.8 220.9 222.1 222.5 222.4 221.5 220.7 219.9 220.5 220.6 218.4 210.5 201.6 200.9 201.7 201.5 200.8 200.1 200.0 198.7 194.3 194.6 195.3 196.1 197.3 198.5 197.8 197.6 197.5 196.8 195.5 195.1 195.7 196.4 196.8 198.2 200.0 204.9 212.8 217.5 221.1 220.7 222.3 222.8 222.5 221.5 220.4 220.1 219.9 220.2 219.8 218.2 212.7 206.4 213.4 221.1 229.7 238.2 246.7 255.4 265.6 258.0 249.6 247.5 247.6 247.3 247.8 248.9 249.8 249.8 248.2 245.1 235.7 215.3 209.5 203.9 192.6 182.4 172.5 164.9 156.7 156.5 159.3 164.5 166.7 168.9 170.0 168.7 162.9 156.4 151.2 148.1 147.5 149.3 153.0 159.8 168.2 175.8 181.5 183.5 182.1 175.7 166.7 158.3 150.8 147.0 146.2 148.4 152.7 157.9 166.5 173.6 178.4 181.0 180.4 176.0 167.0 157.4 147.7 145.4 144.1 143.6 143.2 143.4 143.1 143.4 143.1 143.0 143.0 142.9 142.9 142.8 143.0 143.0 143.2 144.0 143.9 144.2 144.3 143.6 144.1 143.7 137.8 139.8 141.6 143.9 145.5 146.0 148.2 152.0 157.7 162.0 164.9 165.9 165.9 164.9 163.8 161.7 161.3 160.0 159.8 161.5 164.3 165.2 165.5 165.6 166.3 165.8 164.9 164.6 164.4 163.5 162.0 157.9 153.7 195.2 249.2 307.1 311.9 313.7 315.9 320.0 272.7 168.5 151.0 148.3 147.5 148.0 148.0 148.0 148.2 148.3 147.4 147.1 146.2 144.6 145.9 147.8 149.3 151.4 153.1 154.3 154.2 155.9 163.3 164.4 164.7 163.8 163.6 163.3 164.1 163.9 163.4 163.1 163.6 164.1 163.1 161.5 157.4 163.6 169.6 176.0 182.6 189.1 195.6 198.7 197.7 197.3 197.1 195.9 195.5 195.2 196.4 196.7 197.1 196.6 195.5 191.0 180.3 179.4 186.9 194.7 203.1 211.1 221.8 224.1 223.4 224.1 223.9 221.4 216.3 212.3 208.1 204.0 201.4 200.6 202.0 204.9 210.9 217.6 225.0 229.6 231.2 230.3 225.2 218.5 211.2 207.4 205.6 205.4 208.1 212.4 215.9 219.5 223.1 225.5 228.1 229.7 227.5 225.2 220.9 219.6 218.1 219.4 220.3 223.2 222.0 221.4 223.4 222.3 222.1 222.0 220.1 219.1 218.9 219.2 219.4 220.3 220.4 217.9 214.2 209.1 202.2 197.2 193.7 192.4 193.0 194.0 194.2 195.4 196.3 197.1 196.5 196.7 196.6 196.7 195.2 194.9 195.0 196.5 195.6 194.9 195.3 195.0 208.9 222.2 226.1 226.3 225.2 225.8 225.7 224.7 222.7 220.2 219.8 219.4 218.6 217.9 217.6 218.3 218.0 217.9 218.6 218.5 219.1 220.1 220.4 221.1 220.6 220.0 220.4 220.5 222.2 222.0 221.2 220.8 221.0 221.0 221.3 220.6 217.9 216.4 215.0 212.4 213.7 223.0 233.9 242.8 249.2 254.9 257.2 258.1 258.9 259.4 257.6 255.1 255.0 255.1 255.7 256.4 259.2 262.9 265.9 276.9 285.5 293.7 296.6 299.7 298.8 297.3 294.9 293.1 292.3 293.3 294.3 294.4 291.8 285.6 275.8 269.9 267.7 265.1 263.0 260.7 258.2 255.7 252.8 251.0 247.2 243.1 243.3 248.9 252.8 255.3 254.7 252.3 247.7 240.9 235.3 232.6 232.9 236.2 242.7 249.6 256.4 261.2 265.0 265.5 264.2 259.5 252.1 240.5 229.9 224.3 224.4 226.4 230.9 239.2 249.2 258.8 266.1 269.8 270.0 265.8 257.8 246.8 237.3 227.5 216.8 213.8 210.7 207.8 204.7 202.5 199.9 197.8 195.2 192.8 190.6 188.0 185.7 183.3 181.0 179.0 176.7 174.3 172.3 169.6 166.5 163.9 161.3 159.2 160.6 169.0 181.0 190.6 203.9 216.3 233.9 244.7 250.6 250.7 248.1 244.2 238.3 230.0 223.4 222.1 230.8 240.2 250.5 253.3 249.8 249.0 248.2 246.9 245.9 244.8 244.7 244.2 245.8 246.5 247.7 249.7 247.2 244.2 235.3 229.2 228.3 230.0 241.9 233.0 226.8 222.1 220.8 219.8 219.0 218.8 219.7 220.4 220.3 219.6 219.3 219.7 219.4 216.5 212.1 206.2 200.9 196.4 191.0 190.3 189.7 190.6 191.2 193.9 196.3 197.1 197.5 197.0 196.8 196.0 196.1 196.5 195.8 194.0 188.8 185.5 195.5 205.0 215.5 225.6 225.4 222.8 220.9 220.4 220.4 220.6 221.2 221.5 221.3 221.4 221.6 220.9 220.4 220.0 219.2 219.3 221.2 222.2 223.7 225.4 225.5 227.1 227.3 229.8 230.3 231.4 230.0 230.1 228.7 228.0 227.9 228.0 228.6 230.9 236.9 243.0 249.6 254.7 257.6 258.3 258.2 257.3 255.4 250.5 242.7 236.4 231.9 230.6 232.0 238.3 246.3 254.5 261.5 265.5 267.1 266.0 258.4 248.5 234.8 229.8 225.5 220.7 216.9 212.1 208.0 204.1 200.9 197.2 193.8 190.7 187.6 184.7 181.3 178.4 175.6 172.0 169.2 165.9 163.1 160.3 157.1 154.3 151.0 151.3 160.2 176.8 190.9 208.5 225.2 240.8 251.3 254.4 255.0 251.3 246.1 238.3 225.3 228.5 233.2 238.3 243.7 248.1 253.6 251.2 248.6 249.9 249.0 248.1 247.0 246.0 247.3 246.8 246.8 245.5 243.5 239.0 230.1 215.6 211.0 213.9 217.5 220.1 222.0 219.4 218.1 219.6 219.7 220.2 220.1 221.1 221.6 220.5 219.7 219.5 220.6 220.5 221.4 221.8 223.3 224.2 224.3 227.4 236.8 244.1 247.8 248.1 248.5 247.9 247.1 246.3 246.7 247.7 248.7 249.1 249.3 247.9 243.1 232.7 224.4 229.7 234.2 238.5 242.5 250.9 253.9 261.4 275.3 294.5 310.5 319.3 329.8 331.6 330.0 330.6 329.6 327.0 320.4 309.4 299.5 301.2 303.5 305.7 308.8 310.1 308.1 300.8 297.1 297.6 297.8 300.1 300.5 298.7 295.3 291.4 286.9 281.2 277.7 277.1 279.9 283.9 289.1 296.4 301.1 307.3 310.4 311.4 308.5 301.8 290.8 282.3 276.5 274.1 277.0 282.8 290.9 296.9 303.1 309.6 314.0 315.1 309.4 295.9 282.6 278.0 271.9 266.4 259.3 251.1 244.6 240.1 235.2 230.6 225.7 221.7 217.1 212.3 208.7 205.1 201.3 198.5 195.0 192.2 189.0 185.6 182.5 183.0 193.8 204.1 217.0 232.5 253.9 273.6 291.6 299.0 302.4 299.7 293.5 288.5 281.7 273.5 270.3 275.8 282.2 288.1 294.0 300.9 305.2 300.8 297.1 297.0 295.9 294.8 293.9 294.4 293.7 293.0 293.8 294.3 292.8 292.2 289.4 287.1 283.5 276.6 272.6 269.7 276.0 278.5 279.6 278.8 278.1 273.4 268.1 263.4 261.2 257.7 254.9 252.8 249.8 244.5 238.3 246.2 255.6 265.9 275.0 285.4 295.2 304.0 298.2 293.9 292.3 291.7 291.8 292.8 293.2 294.0 294.6 295.4 294.8 294.5 293.5 293.2 291.4 289.5 288.6 292.8 299.4 309.8 317.9 323.2 326.9 329.1 329.8 329.9 329.0 329.5 329.0 329.0 329.5 330.0 330.6 331.4 325.9 320.5 317.9 318.5 326.9 336.3 341.9 345.1 348.3 351.9 353.8 353.5 353.6 352.3 352.0 351.0 349.3 350.6 356.8 367.0 376.0 386.8 394.1 398.3 398.4 396.6 392.5 389.6 387.3 386.5 387.7 388.7 389.8 389.7 390.4 391.3 391.9 392.1 391.8 387.6 376.7 347.9 336.5 340.2 345.7 351.2 360.8 366.9 363.1 359.9 354.6 352.0 347.7 341.5 338.0 331.5 328.6 326.8 328.0 329.9 329.2 324.5 315.1 301.3 289.4 280.9 287.0 289.7 294.6 293.7 294.1 292.5 290.4 292.6 294.1 295.7 296.7 296.8 296.7 295.0 290.6 283.8 272.5 257.0 234.6 238.5 238.7 241.2 241.8 242.4 243.6 245.0 248.3 248.7 249.2 249.2 248.9 247.3 243.0 242.3 240.1 239.4 237.4 237.9 237.3 240.0 241.9 245.0 246.9 247.3 247.5 248.2 249.8 250.0 249.0 248.7 248.6 248.8 247.0 244.3 238.4 230.8 225.0 221.4 217.4 215.2 212.0 208.9 205.2 204.2 208.7 213.9 218.6 222.9 225.1 223.9 222.6 220.4 218.1 217.3 217.6 218.2 219.3 220.1 221.3 221.6 223.1 223.0 223.0 222.0 220.3 218.8 218.3 217.9 217.7 218.8 219.6 220.9 222.6 221.5 221.2 219.7 215.7 209.3 199.5 189.9 197.8 205.2 213.6 222.6 231.7 239.8 248.5 254.1 257.3 258.4 256.5 251.7 247.9 243.6 238.1 235.1 234.9 237.6 242.8 249.0 254.4 260.3 262.8 264.0 262.7 258.9 254.5 247.2 237.2 230.5 228.5 227.1 230.0 239.3 249.8 257.7 264.3 267.3 267.7 265.3 255.9 245.8 238.9 230.2 223.7 215.7 209.8 204.5 199.8 196.0 192.8 189.2 186.2 183.1 179.7 176.7 173.8 171.0 168.5 165.5 162.3 159.4 155.7 151.8 150.2 151.7 156.9 163.7 168.8 177.3 190.5 204.1 220.1 234.4 247.7 254.0 254.0 249.4 242.7 234.1 228.1 231.9 236.6 240.1 244.5 249.0 254.1 259.5 254.9 251.8 251.6 251.2 250.0 249.0 248.1 247.4 247.2 248.7 249.0 249.0 246.6 241.5 234.4 228.2 222.9 222.0 221.0 222.1 218.6 216.9 216.2 216.6 217.5 219.6 220.5 218.8 216.3 215.9 215.4 213.7 207.3 198.9 196.5 195.6 195.0 194.0 193.4 191.6 190.9 191.1 189.5 189.0 189.8 190.9 192.9 194.2 195.5 195.6 195.2 195.1 194.2 193.8 193.8 194.0 194.4 195.0 195.1 195.0 195.4 196.1 195.4 194.7 195.6 196.6 197.5 198.4 199.0 198.0 196.4 195.3 192.7 185.3 172.1 166.7 176.6 187.1 197.3 209.3 221.7 234.9 234.6 233.8 231.7 228.3 223.6 218.9 214.1 212.2 211.4 212.6 215.8 220.4 226.8 231.9 236.0 238.0 237.2 233.0 225.5 217.7 209.7 206.1 204.4 205.0 207.8 214.0 221.5 227.2 232.3 236.2 236.2 232.9 228.7 221.5 221.1 219.9 219.2 217.1 212.1 208.1 204.6 201.0 198.2 194.8 192.2 188.9 186.7 183.8 181.6 179.5 175.0 170.9 166.3 163.7 161.6 159.5 157.8 155.8 153.8 150.9 148.9 146.1 147.3 153.2 159.6 166.0 173.5 180.7 188.6 195.7 198.7 199.4 197.8 196.0 192.0 184.1 184.1 191.0 196.4 202.5 208.5 215.6 218.0 219.4 220.4 220.4 220.1 219.0 219.2 219.0 219.7 220.7 220.5 219.3 218.2 214.5 208.8 219.3 228.4 238.3 248.4 260.6 253.5 250.5 249.6 247.8 247.4 246.5 246.7 245.1 245.7 247.2 247.5 245.7 242.9 238.4 229.0 229.7 230.2 230.8 231.3 229.7 224.4 219.0 218.6 218.0 220.2 220.9 221.3 221.8 221.8 221.8 221.0 218.0 214.9 207.8 211.6 219.8 229.1 237.3 247.0 256.3 251.4 249.6 250.9 250.1 249.4 248.7 246.7 245.5 246.0 247.5 248.7 249.2 248.3 243.4 237.7 227.4 236.1 245.8 257.1 268.2 280.4 294.1 301.1 300.5 302.6 305.6 307.3 305.5 302.1 296.3 291.9 288.9 286.4 284.9 287.4 291.6 297.1 301.7 305.5 306.0 304.7 301.0 297.4 292.8 285.9 279.5 274.7 274.6 276.7 283.4 288.7 296.6 301.5 303.5 303.9 303.3 302.8 299.0 294.4 290.1 285.7 279.5 273.7 265.6 257.6 252.3 246.0 241.2 235.9 231.6 226.7 222.8 218.9 214.4 210.8 207.7 204.3 201.9 198.0 194.4 191.2 188.3 185.0 188.0 198.8 215.9 234.6 255.1 277.9 291.9 300.0 301.8 297.1 293.0 291.1 291.7 291.0 288.8 283.1 278.3 282.0 285.8 289.8 293.8 294.0 293.6 293.9 293.6 294.2 293.1 293.9 294.5 293.4 290.5 289.0 285.6 285.5 278.3 272.4 257.6 249.2 247.1 246.7 246.5 247.2 247.5 246.3 246.1 246.5 248.7 249.9 249.8 247.9 245.6 243.9 241.4 238.5 233.1 230.4 229.7 228.5 228.3 227.3 225.8 225.6 223.2 220.6 218.8 217.4 217.5 218.6 219.7 221.5 221.8 221.9 221.1 220.6 218.5 212.7 217.9 223.3 230.0 237.0 245.9 251.5 250.1 247.8 248.4 248.4 247.7 247.5 248.3 248.2 249.5 250.3 250.9 250.8 245.0 232.5 243.2 254.2 266.5 280.2 295.0 311.5 327.8 338.6 334.4 335.1 334.5 334.6 332.8 330.7 327.5 324.0 323.9 322.3 323.4 326.7 328.7 331.6 336.3 337.3 336.4 334.6 334.2 332.5 329.9 326.6 324.3 322.5 325.3 328.1 329.6 331.5 329.6 329.3 328.6 327.8 322.7 313.2 290.3 279.9 283.0 285.1 287.4 288.2 287.9 287.8 289.5 290.8 291.8 293.3 294.3 295.3 295.0 293.7 293.7 295.1 295.5 294.7 293.4 294.7 295.3 302.6 310.4 320.8 327.6 332.6 331.8 332.5 332.8 330.6 328.2 327.6 326.4 328.2 329.4 329.3 327.0 324.6 320.2 311.3 317.1 337.1 353.1 370.7 388.6 379.2 372.3 371.6 371.3 369.8 367.3 365.0 365.7 366.4 366.7 366.5 366.9 363.9 361.4 357.4 351.4 345.7 344.1 340.1 338.6 334.7 331.0 329.4 329.3 329.2 330.6 332.2 333.5 335.0 333.3 330.5 327.8 321.9 302.8 284.0 267.9 268.0 271.5 276.2 281.8 285.4 285.4 288.2 291.4 298.6 301.2 303.1 300.9 298.1 293.9 291.0 289.7 289.4 289.7 291.2 291.9 292.7 292.3 292.0 291.6 292.5 294.1 294.4 293.0 293.2 293.8 294.9 295.3 296.4 295.8 294.9 293.3 288.4 281.3 269.9 274.0 283.2 290.5 299.7 311.0 323.4 334.7 331.7 334.6 337.9 339.3 337.1 333.3 328.7 328.2 325.8 324.3 324.0 323.5 323.5 323.3 324.0 323.3 323.5 323.9 323.7 326.0 327.4 329.9 333.2 335.7 338.7 340.3 339.1 338.5 334.7 329.0 324.2 319.3 317.6 318.9 322.4 326.0 331.3 335.4 339.9 342.4 343.4 342.3 339.3 333.4 326.4 319.5 314.5 314.5 317.9 323.3 328.4 336.6 345.7 352.3 352.3 347.4 338.0 325.6 308.5 289.7 272.7 268.1 263.8 258.4 253.6 248.6 243.1 237.7 232.8 228.1 223.8 220.1 216.0 211.4 206.8 202.8 198.7 195.0 191.4 188.6 184.7 179.9 182.9 190.7 197.8 206.1 213.8 218.7 223.2 226.9 229.8 230.3 231.4 233.9 240.6 245.2 251.5 256.5 261.3 263.6 263.6 263.1 261.3 260.7 259.6 260.6 261.8 261.3 261.4 262.4 262.7 262.9 260.9 258.2 248.3 250.2 258.9 266.2 274.7 284.1 294.7 304.5 317.4 326.5 320.4 319.7 319.5 318.9 317.3 314.9 311.5 308.8 305.5 302.7 301.4 301.7 302.8 307.1 311.3 316.1 317.5 317.6 315.6 315.0 314.7 315.0 316.6 316.9 318.1 318.2 315.5 312.0 303.6 293.9 297.6 302.8 307.4 313.4 318.4 322.7 327.1 331.7 338.3 337.7 324.6 315.7 315.1 315.4 315.8 314.4 312.7 312.1 312.4 312.6 310.8 310.2 310.1 308.5 308.7 309.4 312.3 317.0 324.2 337.8 345.6 352.8 356.7 355.7 354.0 351.8 350.0 347.3 345.3 344.8 344.8 346.9 348.8 350.4 352.5 353.1 352.6 353.2 353.8 354.2 352.0 351.6 349.4 348.2 348.2 347.3 349.5 350.5 351.6 351.7 352.7 352.7 352.3 350.7 349.1 347.9 347.4 347.3 348.5 347.9 345.2 345.4 346.9 349.0 350.7 352.5 352.4 352.6 352.1 352.1 351.5 351.5 353.0 349.9 344.8 331.9 332.6 340.4 345.4 350.0 352.4 353.9 354.8 350.8 350.7 351.0 351.5 350.3 349.5 347.5 347.4 348.2 347.0 344.3 336.9 323.7 307.4 286.2 275.4 274.6 276.1 274.9 271.6 270.5 274.8 276.3 275.7 274.6 272.3 269.2 265.3 262.8 262.5 260.9 258.6 253.5 244.8 249.1 254.2 258.2 261.6 265.3 267.2 266.0 262.3 261.7 262.2 262.7 263.2 264.5 263.8 263.1 264.6 264.6 263.0 259.9 253.6 251.7 273.1 291.2 311.9 331.6 351.3 353.8 353.3 353.3 352.2 350.9 349.0 347.9 348.6 349.0 349.0 346.9 345.1 340.2 329.1 317.1 300.7 309.5 316.4 324.2 328.8 333.5 318.5 312.9 313.5 314.9 316.6 313.4 311.9 307.8 304.4 302.3 301.1 301.1 301.5 303.7 307.2 310.4 313.7 315.1 316.7 317.5 317.1 315.3 312.9 308.6 305.7 305.2 307.2 307.8 307.3 307.1 309.4 311.7 314.3 316.5 317.8 319.7 320.8 319.8 318.1 315.5 312.5 309.1 306.3 303.6 301.4 301.6 303.1 306.6 312.5 318.0 320.4 322.1 322.4 320.7 317.5 309.8 302.3 298.4 297.0 299.2 301.6 304.9 308.0 309.3 310.2 311.7 312.3 313.8 314.4 313.2 310.4 310.4 312.4 310.4 307.0 294.7 285.4 292.4 304.0 315.5 324.1 331.7 320.2 313.8 312.0 310.8 311.0 311.3 310.8 310.8 311.0 311.1 310.3 307.7 303.2 295.9 288.7 266.4 261.0 261.5 275.4 270.1 264.5 261.3 261.2 259.7 259.0 259.7 260.9 261.2 261.4 262.2 262.6 263.2 263.1 262.6 260.4 259.3 257.8 268.9 281.4 293.9 307.5 309.0 309.3 311.2 309.5 309.3 309.2 308.9 309.1 308.6 308.5 308.6 308.5 308.9 307.5 299.6 290.5 305.1 320.4 335.9 348.7 352.1 350.0 351.0 351.3 349.5 349.0 347.6 348.6 347.7 348.2 347.6 347.4 341.3 332.0 324.6 322.9 323.9 326.3 332.5 340.2 347.7 360.2 370.3 376.5 380.4 382.2 382.6 381.2 380.6 378.4 377.3 375.8 376.1 379.6 388.6 399.2 407.1 413.2 415.7 416.4 418.4 419.5 418.2 416.9 413.9 413.2 412.6 414.7 414.3 414.4 412.8 410.0 406.6 397.7 375.9 356.8 361.1 364.2 369.6 374.4 380.9 385.4 377.9 371.9 367.6 365.9 361.9 359.9 357.0 353.7 351.1 349.2 347.1 346.2 346.6 348.8 350.0 349.4 346.7 340.1 331.8 326.5 323.1 318.4 315.4 312.9 311.3 312.8 313.4 314.2 314.8 315.2 311.7 308.5 302.5 296.4 287.8 279.7 270.4 264.3 260.3 257.7 259.5 261.9 263.2 264.3 264.4 265.5 265.0 264.3 261.6 261.2 259.5 259.5 261.4 262.4 263.5 265.2 266.3 263.9 265.2 265.1 264.6 263.1 261.9 260.3 259.7 260.9 261.4 264.7 266.4 264.5 262.0 255.0 248.4 253.0 255.1 257.7 260.0 262.0 263.5 265.8 268.2 268.1 257.6 250.1 244.3 243.0 242.1 240.0 239.8 237.7 236.2 235.9 235.6 234.4 234.3 234.3 234.6 234.8 233.6 233.7 232.7 232.7 235.5 237.3 237.7 236.9 235.3 234.4 234.3 234.1 234.6 234.4 232.6 226.4 227.7 232.1 237.8 243.1 249.3 255.7 261.9 269.7 276.5 268.9 268.2 269.7 270.7 270.3 267.0 263.1 259.8 258.9 257.2 255.8 255.1 255.1 257.0 261.1 263.8 268.7 270.6 270.4 267.2 266.2 262.1 257.7 254.7 250.9 248.0 247.2 248.1 251.6 256.7 261.0 266.1 270.0 274.8 277.0 276.6 272.9 263.9 255.1 249.1 245.4 242.1 235.8 229.8 227.9 225.0 222.8 221.2 219.2 218.2 217.3 215.6 214.5 212.2 212.0 210.7 209.8 209.4 208.6 207.3 205.4 204.5 204.3 205.8 209.0 213.9 221.4 231.4 246.1 253.4 260.1 262.6 264.7 265.4 265.3 263.6 261.6 259.4 258.3 257.2 257.8 259.8 258.6 258.5 260.7 260.5 260.4 259.7 259.0 258.6 259.4 261.4 261.7 261.1 257.4 247.2 238.6 230.1 233.7 233.9 241.5 239.1 237.2 234.2 232.0 232.9 232.8 234.0 234.6 234.6 233.8 232.7 231.6 229.6 222.9 215.2 203.3 203.3 203.7 204.0 204.3 204.2 204.7 202.8 205.8 208.0 209.9 209.3 209.6 207.3 203.6 201.3 200.0 198.8 199.8 201.8 204.3 205.6 206.6 208.1 208.3 207.8 207.6 207.2 207.6 207.5 207.4 207.6 207.2 207.3 207.5 207.7 207.4 207.3 207.4 207.2 207.4 208.1 207.2 207.2 207.9 208.6 214.5 227.4 235.5 238.6 242.9 244.7 242.8 240.6 235.8 230.6 227.4 226.2 225.8 227.5 231.0 235.5 241.2 245.0 246.6 247.1 244.5 241.8 237.3 230.8 223.8 219.4 216.9 218.0 220.3 224.7 230.9 238.5 245.2 250.4 252.4 251.9 248.8 241.1 234.8 227.9 225.1 221.6 218.6 211.1 199.4 196.4 193.7 190.7 188.4 185.4 182.5 179.5 177.0 174.0 171.6 168.7 165.8 162.9 160.3 157.8 155.1 152.4 149.5 151.1 154.4 162.7 173.7 185.9 198.0 213.4 225.4 234.5 235.7 235.6 233.2 232.0 230.2 228.9 228.1 226.0 225.0 226.2 229.4 233.4 234.6 232.1 231.7 232.3 232.9 233.3 233.9 233.7 233.3 232.1 229.4 224.3 219.7 212.5 207.0 208.6 208.7 209.7 209.9 210.7 206.5 207.0 206.8 206.5 206.7 207.1 207.0 207.2 207.4 207.9 209.5 210.0 210.3 209.2 204.8 202.5 204.4 210.3 218.4 225.6 232.7 233.2 233.4 234.4 234.6 235.0 235.3 235.0 233.9 233.0 232.7 233.9 233.9 231.7 229.3 222.5 221.4 224.0 233.0 244.1 256.4 261.6 265.1 264.2 263.7 261.5 261.2 262.3 262.5 263.7 263.2 262.6 258.5 250.8 238.0 223.5 216.7 208.9 200.4 192.5 184.9 178.4 173.9 172.4 174.5 177.7 180.6 181.4 181.6 179.8 173.2 167.9 163.4 161.4 160.9 163.1 168.6 175.6 182.3 187.8 191.8 193.0 191.2 186.0 177.3 167.0 160.3 157.5 157.2 159.1 162.4 171.4 179.0 185.2 190.3 191.8 191.1 187.0 180.7 171.5 171.0 170.2 169.2 168.0 167.3 165.9 164.7 163.9 163.3 162.6 162.0 161.3 160.7 159.7 157.7 156.4 155.0 153.9 152.8 152.4 151.6 151.6 150.6 150.0 148.9 147.4 147.3 148.7 150.4 152.8 157.7 164.1 169.2 172.7 175.3 176.8 178.1 176.9 175.4 174.7 174.1 174.4 175.3 175.2 175.7 175.6 175.8 175.0 175.6 176.7 175.7 174.8 174.1 174.3 174.8 174.6 171.8 163.8 155.5 156.0 157.4 157.9 158.6 158.9 158.5 157.4 156.7 154.8 154.2 155.2 155.9 156.9 157.1 157.4 157.7 157.6 156.6 155.9 154.8 153.9 154.5 155.6 157.1 160.9 165.7 169.7 173.2 175.4 176.1 175.5 175.6 175.3 174.9 174.2 174.0 174.1 173.6 172.5 167.7 169.1 177.0 183.3 190.7 197.5 204.2 214.1 209.4 208.6 207.8 206.5 206.1 206.5 206.2 205.9 205.9 207.4 207.5 207.0 202.8 190.9 188.6 191.4 196.3 199.2 206.1 200.3 202.3 202.0 204.4 205.4 206.2 207.4 207.6 207.3 205.6 204.0 203.0 202.9 205.9 212.0 221.3 230.2 236.1 237.2 236.7 234.3 232.1 229.8 229.3 229.6 230.7 231.3 233.1 233.8 233.4 233.5 233.1 231.6 228.6 227.0 221.6 218.2 224.6 231.2 238.4 243.4 248.3 237.8 236.0 235.2 234.7 233.0 232.0 232.0 230.2 230.8 231.9 233.0 233.4 233.1 231.5 227.1 219.7 209.1 200.1 202.1 205.2 209.5 206.0 206.8 206.6 207.6 207.4 207.6 208.6 209.0 209.1 208.7 208.8 207.2 204.1 193.7 192.7 203.0 213.0 223.6 235.0 233.7 233.3 232.9 232.6 233.8 234.3 233.9 232.9 232.7 232.9 233.3 233.4 233.3 229.5 225.1 217.4 221.2 233.0 242.9 253.9 264.4 271.2 267.1 261.9 261.8 261.8 262.2 262.3 262.6 262.3 261.8 262.2 262.7 263.8 264.6 263.5 263.2 269.1 276.6 293.0 314.3 332.0 347.3 355.0 354.7 351.9 349.5 348.5 347.2 346.9 347.4 348.4 348.1 345.9 338.4 324.4 329.9 333.6 337.8 342.9 346.6 352.9 348.5 334.5 323.8 317.5 312.8 312.2 312.4 313.5 314.7 313.9 312.9 310.9 308.3 303.4 294.6 284.0 271.9 259.6 250.2 247.9 248.1 251.1 254.0 258.1 261.8 262.5 263.5 264.1 263.2 261.0 259.8 257.5 255.3 255.3 257.3 259.3 262.2 266.0 271.1 274.6 277.9 278.3 275.3 270.2 264.6 256.4 248.8 245.0 244.7 246.6 250.4 256.2 261.7 267.8 272.6 276.3 275.6 270.4 264.2 257.1 251.4 247.6 243.9 236.2 234.9 235.1 234.8 234.6 234.3 234.3 234.5 234.6 234.3 234.6 234.4 234.5 234.8 234.6 234.8 234.6 235.5 235.4 238.0", "f0_timestep": "0.011609977324263039", - "energy": "0.0001 0.0003 0.0 0.0 0.0001 0.0 0.0 0.0002 0.0008 0.0011 0.0009 0.0006 0.0011 0.0013 0.0014 0.0018 0.0015 0.0019 0.0023 0.0022 0.002 0.0028 0.0022 0.0027 0.0027 0.003 0.0029 0.0024 0.0025 0.0018 0.0017 0.0019 0.002 0.0015 0.0017 0.0009 0.001 0.0007 0.0009 0.0011 0.002 0.003 0.0048 0.0072 0.0088 0.0111 0.0115 0.0124 0.0133 0.0125 0.0186 0.0317 0.0469 0.059 0.0706 0.0796 0.0827 0.0819 0.0797 0.0729 0.0705 0.0704 0.0737 0.0789 0.0795 0.0748 0.0661 0.0513 0.0352 0.0256 0.0223 0.027 0.0461 0.0649 0.0849 0.0947 0.0968 0.0926 0.0822 0.0752 0.0732 0.0705 0.0701 0.0721 0.0729 0.0739 0.0732 0.0728 0.0737 0.0748 0.0753 0.0762 0.0753 0.0727 0.0739 0.0719 0.0707 0.0682 0.0634 0.0619 0.0588 0.0608 0.0616 0.0642 0.0687 0.0703 0.0715 0.0639 0.0521 0.0375 0.0213 0.0154 0.0285 0.0506 0.0668 0.0798 0.0889 0.0898 0.0887 0.0869 0.0842 0.0808 0.0777 0.0749 0.0703 0.0617 0.0514 0.0387 0.023 0.013 0.0085 0.0089 0.0302 0.0506 0.0669 0.0801 0.089 0.0936 0.0963 0.0976 0.0961 0.0941 0.091 0.0854 0.0772 0.065 0.0514 0.0362 0.0231 0.0145 0.012 0.0127 0.0126 0.0288 0.0511 0.0714 0.0916 0.1039 0.1095 0.1104 0.1067 0.102 0.095 0.0885 0.0823 0.0788 0.078 0.0778 0.0795 0.0806 0.082 0.0816 0.0795 0.0758 0.0727 0.071 0.0697 0.0686 0.0677 0.0645 0.06 0.0559 0.052 0.0518 0.0543 0.0559 0.0572 0.0571 0.0566 0.0563 0.0542 0.047 0.038 0.0255 0.0131 0.0044 0.0038 0.0044 0.0055 0.0055 0.006 0.0065 0.0064 0.0051 0.0038 0.0038 0.0031 0.0024 0.0013 0.0018 0.0015 0.001 0.004 0.0109 0.0407 0.0609 0.0765 0.088 0.0885 0.0867 0.0855 0.0823 0.081 0.0822 0.0821 0.0862 0.085 0.0793 0.0699 0.0539 0.0388 0.0287 0.0273 0.0295 0.03 0.0325 0.0534 0.0706 0.0846 0.0945 0.0944 0.0914 0.0878 0.0831 0.0798 0.0785 0.0786 0.0762 0.0695 0.0588 0.0437 0.0282 0.0159 0.009 0.0146 0.0405 0.0594 0.0744 0.0857 0.0874 0.0853 0.0816 0.079 0.0766 0.076 0.0772 0.0783 0.0806 0.0813 0.0802 0.0789 0.0777 0.0729 0.0674 0.0614 0.0573 0.0586 0.0646 0.0737 0.0816 0.0869 0.0902 0.0913 0.0912 0.0897 0.0871 0.0849 0.0837 0.0842 0.0864 0.0867 0.0783 0.0653 0.0458 0.0247 0.0161 0.0325 0.053 0.0721 0.0856 0.0956 0.0968 0.0936 0.0929 0.0884 0.0867 0.0854 0.0853 0.0866 0.0882 0.0921 0.094 0.0893 0.5694 0.0986 0.099 0.092 0.0899 0.0988 0.107 0.1183 0.1213 0.12 0.1197 0.1151 0.114 0.1112 0.1082 0.1032 0.0984 0.0933 0.0895 0.089 0.0906 0.0941 0.0979 0.1007 0.1028 0.1041 0.1015 0.0998 0.0968 0.0919 0.0863 0.0788 0.0715 0.0667 0.0644 0.0642 0.0654 0.0658 0.0649 0.0636 0.0621 0.0608 0.0565 0.0491 0.0383 0.0241 0.0109 0.0046 0.0042 0.0051 0.0053 0.0057 0.0058 0.0055 0.0045 0.0035 0.0025 0.0013 0.0008 0.0005 0.0007 0.0 0.0016 0.0084 0.0228 0.0493 0.068 0.0842 0.095 0.1012 0.1031 0.1014 0.1005 0.0956 0.0965 0.0976 0.1014 0.1007 0.0944 0.0816 0.0618 0.0428 0.019 0.0152 0.0182 0.025 0.055 0.0807 0.0983 0.111 0.1125 0.1095 0.108 0.1061 0.1061 0.1053 0.1045 0.104 0.1052 0.1053 0.1042 0.0952 0.0793 0.0586 0.0336 0.0187 0.0408 0.0622 0.0772 0.0896 0.0958 0.0943 0.093 0.0923 0.0892 0.0886 0.088 0.0868 0.0821 0.0731 0.0602 0.0464 0.0374 0.0374 0.0415 0.0438 0.0449 0.0449 0.0614 0.0761 0.0897 0.0996 0.1018 0.1011 0.1001 0.1007 0.099 0.0984 0.0996 0.0987 0.1003 0.1015 0.0989 0.0962 0.0928 0.092 0.0951 0.1056 0.1185 0.122 0.286 0.1287 0.1304 0.1264 0.1203 0.117 0.1152 0.1138 0.1134 0.1132 0.1057 0.0906 0.0707 0.0464 0.0202 0.0097 0.0073 0.0049 0.0342 0.0692 0.096 0.1161 0.1273 0.1297 0.1277 0.1248 0.1223 0.1178 0.1158 0.1115 0.1095 0.1092 0.1072 0.1065 0.1077 0.1076 0.1058 0.1027 0.0968 0.0924 0.0916 0.0907 0.0923 0.0942 0.0939 0.0944 0.0945 0.0933 0.0928 0.0933 0.0934 0.094 0.0943 0.095 0.0966 0.0968 0.0978 0.0994 0.0993 0.0989 0.0994 0.0996 0.1008 0.1029 0.1042 0.1053 0.1061 0.1066 0.1081 0.1102 0.1128 0.112 0.1116 0.1082 0.1051 0.1024 0.1013 0.1005 0.1006 0.1015 0.0999 0.1008 0.0993 0.0989 0.0997 0.1 0.1002 0.1017 0.1017 0.103 0.1049 0.1039 0.1032 0.1004 0.1 0.1005 0.0996 0.0993 0.0991 0.0978 0.0973 0.0974 0.097 0.0972 0.0965 0.0957 0.0948 0.0928 0.0934 0.0923 0.0939 0.095 0.0962 0.0971 0.1 0.1006 0.1005 0.1017 0.1007 0.1013 0.1021 0.1036 0.1039 0.1039 0.1039 0.1042 0.1033 0.1026 0.1015 0.0989 0.0949 0.0875 0.0735 0.058 0.0422 0.0235 0.0124 0.0122 0.0122 0.0144 0.0148 0.0403 0.0621 0.0776 0.0913 0.0967 0.0978 0.0971 0.0956 0.0943 0.0935 0.0917 0.093 0.0922 0.0921 0.0941 0.0927 0.0934 0.0928 0.0914 0.0939 0.0942 0.0941 0.0951 0.092 0.0898 0.0907 0.0882 0.0878 0.0884 0.0877 0.0881 0.0826 0.0743 0.0606 0.045 0.0317 0.0305 0.0317 0.0365 0.0397 0.0386 0.0432 0.0616 0.0787 0.0979 0.1078 0.107 0.1049 0.099 0.097 0.095 0.0916 0.089 0.0844 0.0823 0.0793 0.0775 0.0763 0.076 0.0767 0.0759 0.0752 0.0748 0.0734 0.0716 0.0705 0.0675 0.0646 0.0621 0.0596 0.0592 0.0584 0.0588 0.0588 0.0576 0.0566 0.0549 0.0523 0.0488 0.0433 0.0345 0.0244 0.0153 0.0086 0.0053 0.0053 0.0066 0.0068 0.0074 0.0086 0.0085 0.0089 0.0079 0.0069 0.0076 0.0064 0.0053 0.0044 0.0038 0.0126 0.0247 0.041 0.063 0.0808 0.0962 0.1049 0.1032 0.0947 0.0818 0.0699 0.0645 0.0704 0.0743 0.0781 0.0784 0.073 0.0682 0.0649 0.0679 0.0762 0.0834 0.0823 0.0752 0.0623 0.0578 0.0659 0.0784 0.0929 0.0992 0.1025 0.1026 0.1011 0.1004 0.0986 0.0985 0.0977 0.0932 0.0833 0.0681 0.0491 0.0264 0.0163 0.0165 0.0184 0.0419 0.0618 0.0699 0.0756 0.0735 0.0673 0.0679 0.0715 0.0719 0.0734 0.0758 0.0766 0.077 0.0732 0.0641 0.0525 0.0373 0.0298 0.0301 0.0299 0.0405 0.0586 0.0724 0.0824 0.0906 0.09 0.0921 0.0923 0.0894 0.0911 0.0879 0.0888 0.0883 0.0881 0.0884 0.0878 0.0893 0.088 0.0879 0.0878 0.0857 0.0863 0.0846 0.078 0.0751 0.07 0.0695 0.0714 0.0741 0.0775 0.0781 0.0791 0.0797 0.076 0.0689 0.0562 0.0399 0.0248 0.0094 0.0047 0.0081 0.0343 0.0565 0.0761 0.0914 0.0991 0.1002 0.097 0.0943 0.0919 0.0871 0.0854 0.0814 0.0767 0.0768 0.0755 0.0761 0.0789 0.0805 0.0802 0.0794 0.0778 0.0735 0.0698 0.0647 0.0616 0.0585 0.0573 0.0574 0.0566 0.0569 0.0569 0.0565 0.057 0.0574 0.0584 0.0582 0.0567 0.0543 0.048 0.0395 0.0292 0.0185 0.0103 0.0044 0.0031 0.0039 0.0045 0.0045 0.0042 0.0034 0.0035 0.003 0.0014 0.0017 0.0009 0.0003 0.0006 0.0015 0.0021 0.0067 0.0116 0.0154 0.0275 0.0503 0.0706 0.0843 0.0891 0.0866 0.0787 0.0753 0.0769 0.0816 0.0863 0.086 0.0848 0.0788 0.0703 0.0595 0.0441 0.0314 0.0214 0.0221 0.0219 0.039 0.0567 0.0709 0.0831 0.0884 0.0895 0.0899 0.09 0.0883 0.0889 0.086 0.0804 0.074 0.0613 0.0483 0.035 0.0234 0.0208 0.0213 0.0214 0.0347 0.0523 0.0672 0.0797 0.0838 0.0848 0.0844 0.0819 0.0834 0.0822 0.0806 0.0817 0.0812 0.0813 0.0812 0.0814 0.0811 0.0813 0.0826 0.0839 0.0882 0.0923 0.0913 0.0897 0.0847 0.0818 0.0817 0.0807 0.082 0.0812 0.0809 0.0808 0.0778 0.0701 0.0576 0.043 0.0266 0.0222 0.0249 0.0259 0.0256 0.0294 0.0511 0.0678 0.0806 0.0901 0.088 0.086 0.0842 0.0832 0.0804 0.0804 0.079 0.0722 0.0623 0.0474 0.0288 0.0105 0.0075 0.0035 0.0186 0.0403 0.0561 0.0688 0.0756 0.0763 0.075 0.074 0.073 0.0691 0.068 0.0652 0.0624 0.0608 0.0573 0.0547 0.0534 0.0541 0.0528 0.0528 0.0536 0.0522 0.0529 0.0505 0.0474 0.0455 0.0418 0.0386 0.0342 0.0304 0.0277 0.026 0.0263 0.0267 0.0275 0.0292 0.0301 0.0292 0.0268 0.0228 0.0189 0.0127 0.0085 0.0046 0.0032 0.0032 0.0028 0.0033 0.0038 0.0041 0.0041 0.0036 0.0029 0.0024 0.0018 0.0012 0.0011 0.0003 0.0001 0.0 0.001 0.0039 0.0181 0.0307 0.0425 0.0526 0.0585 0.0607 0.0615 0.0623 0.061 0.0605 0.0584 0.0554 0.0559 0.0559 0.0566 0.057 0.0571 0.056 0.0557 0.0548 0.0543 0.0558 0.0596 0.0643 0.0677 0.07 0.0728 0.0727 0.0735 0.0711 0.0667 0.0612 0.0515 0.0399 0.0274 0.0113 0.0022 0.0055 0.0068 0.0117 0.0137 0.0127 0.0215 0.0375 0.0489 0.0581 0.0632 0.0642 0.0641 0.0642 0.0656 0.0643 0.0655 0.0616 0.0585 0.0567 0.0481 0.0405 0.0305 0.0175 0.0088 0.0077 0.0214 0.0345 0.0453 0.055 0.0587 0.0593 0.06 0.0591 0.0585 0.0565 0.0551 0.0536 0.0534 0.0517 0.0481 0.0406 0.0317 0.0218 0.0139 0.0119 0.0126 0.0246 0.04 0.0528 0.0631 0.0701 0.0708 0.0714 0.071 0.0717 0.071 0.0704 0.0709 0.0681 0.0625 0.0522 0.0394 0.0248 0.0141 0.0158 0.017 0.0251 0.0445 0.0633 0.0775 0.0884 0.0903 0.0876 0.0828 0.0782 0.0745 0.0711 0.0693 0.0659 0.0639 0.064 0.0663 0.0687 0.072 0.0743 0.0739 0.0751 0.0736 0.072 0.0712 0.0701 0.0687 0.0694 0.0684 0.0687 0.0685 0.0681 0.0682 0.0689 0.0702 0.0713 0.0715 0.0717 0.0725 0.0726 0.0733 0.0741 0.0754 0.0765 0.0774 0.0782 0.0802 0.0816 0.0826 0.0825 0.0824 0.0799 0.0786 0.0772 0.0755 0.0749 0.0745 0.0735 0.073 0.0719 0.0712 0.0702 0.0715 0.0764 0.0826 0.0888 0.0922 0.0919 0.0908 0.0901 0.0872 0.0841 0.082 0.076 0.0734 0.0714 0.0696 0.0686 0.0659 0.0595 0.0525 0.0448 0.0445 0.0599 0.0752 0.088 0.0957 0.096 0.0935 0.0885 0.0868 0.0853 0.0839 0.0849 0.0836 0.0847 0.0851 0.0867 0.0875 0.0881 0.0887 0.0888 0.0896 0.0893 0.0896 0.0884 0.0876 0.0879 0.0873 0.0868 0.0868 0.0854 0.0847 0.0829 0.0816 0.0796 0.0778 0.076 0.072 0.0689 0.0656 0.0627 0.0645 0.0715 0.0797 0.0906 0.1006 0.1055 0.1128 0.1146 0.1155 0.1161 0.1126 0.1091 0.0969 0.1397 0.0872 0.0833 0.0768 0.0727 0.0725 0.0751 0.0795 0.0857 0.0903 0.0932 0.0943 0.0945 0.0931 0.0921 0.0886 0.0857 0.0832 0.0824 0.0792 0.0694 0.0575 0.041 0.0248 0.0201 0.0215 0.0215 0.0213 0.0204 0.0182 0.0229 0.0422 0.0599 0.0762 0.0883 0.0926 0.0933 0.0921 0.0893 0.0866 0.0831 0.0785 0.075 0.0727 0.0708 0.0699 0.0685 0.0676 0.069 0.0693 0.0699 0.0695 0.067 0.0668 0.066 0.0639 0.0613 0.0574 0.0551 0.0524 0.0499 0.0493 0.0489 0.048 0.0476 0.047 0.0459 0.0449 0.0424 0.0384 0.0309 0.0224 0.0147 0.0065 0.0045 0.0043 0.0043 0.0042 0.0035 0.0034 0.0024 0.0025 0.0016 0.0021 0.0019 0.003 0.0045 0.0066 0.01 0.0123 0.0146 0.0148 0.0282 0.0464 0.0604 0.0721 0.0788 0.0778 0.075 0.0721 0.0668 0.0666 0.0674 0.0689 0.073 0.0738 0.0732 0.0677 0.0572 0.0442 0.0297 0.0173 0.0225 0.0455 0.0624 0.0794 0.0905 0.0913 0.0917 0.0855 0.0833 0.0802 0.0762 0.0754 0.0737 0.0742 0.0742 0.0684 0.0589 0.0457 0.0283 0.0147 0.0299 0.0542 0.0689 0.0811 0.0879 0.0861 0.0857 0.0838 0.0821 0.0805 0.0805 0.0806 0.0804 0.0814 0.0811 0.0806 0.0803 0.0758 0.0722 0.0686 0.0636 0.0662 0.0682 0.071 0.0749 0.0737 0.0736 0.074 0.0726 0.0742 0.074 0.0719 0.071 0.0694 0.0682 0.0633 0.0547 0.0428 0.0283 0.0159 0.0098 0.0111 0.0299 0.0521 0.0691 0.0819 0.0889 0.0901 0.0909 0.0905 0.0908 0.0897 0.0876 0.0876 0.0855 0.0835 0.0825 0.0809 0.0796 0.081 0.0816 0.0827 0.0864 0.0925 0.1007 0.1087 0.1141 0.1184 0.1165 0.115 0.1136 0.1078 0.1042 0.0979 0.0903 0.0855 0.081 0.0808 0.0823 0.0862 0.0893 0.0905 0.0922 0.0905 0.0871 0.085 0.0809 0.0772 0.0724 0.0651 0.0589 0.0532 0.0502 0.0492 0.052 0.0548 0.0553 0.0563 0.0549 0.0518 0.047 0.0381 0.0269 0.0137 0.0045 0.0038 0.0045 0.0048 0.005 0.0056 0.0058 0.006 0.0057 0.0049 0.0041 0.0037 0.0033 0.0023 0.0022 0.0022 0.0018 0.0015 0.0032 0.0083 0.0363 0.0563 0.0727 0.0845 0.0876 0.0868 0.0831 0.0813 0.0789 0.083 0.0843 0.0853 0.0828 0.0758 0.0655 0.0514 0.0365 0.0249 0.0221 0.0245 0.0282 0.0475 0.0657 0.0775 0.0881 0.0898 0.0886 0.0878 0.0852 0.0829 0.0812 0.0797 0.0815 0.08 0.0724 0.0616 0.0436 0.0245 0.0119 0.0067 0.0144 0.0412 0.0576 0.073 0.0845 0.0846 0.0842 0.0795 0.0755 0.0727 0.0721 0.0717 0.073 0.0733 0.0722 0.0713 0.0678 0.0652 0.0624 0.0606 0.0624 0.0658 0.072 0.0784 0.0823 0.0865 0.0881 0.088 0.0883 0.0855 0.0841 0.0828 0.0826 0.0819 0.0776 0.0682 0.0545 0.0372 0.0202 0.013 0.0109 0.0108 0.0262 0.0465 0.0653 0.0791 0.0898 0.095 0.0951 0.0954 0.0942 0.0902 0.0879 0.0847 0.0817 0.0797 0.073 0.0608 0.0469 0.0308 0.0187 0.017 0.0207 0.0398 0.0589 0.0775 0.0938 0.1035 0.1096 0.1108 0.1122 0.1113 0.109 0.1056 0.0992 0.0919 0.0887 0.0873 0.0877 0.0894 0.0923 0.0951 0.0978 0.0971 0.0961 0.0953 0.093 0.09 0.084 0.0764 0.0691 0.0644 0.0627 0.0629 0.0646 0.065 0.065 0.0639 0.0623 0.0598 0.0544 0.047 0.0357 0.0205 0.0082 0.0035 0.0036 0.0041 0.0046 0.0037 0.0044 0.0041 0.0037 0.0029 0.0025 0.0015 0.0014 0.0011 0.0009 0.0001 0.0 0.006 0.0255 0.0424 0.0605 0.0762 0.084 0.0871 0.0862 0.083 0.0816 0.0825 0.0822 0.0857 0.0883 0.0872 0.0854 0.0778 0.0638 0.047 0.0279 0.0177 0.0193 0.0203 0.0343 0.0625 0.0811 0.0939 0.1006 0.0961 0.0924 0.0896 0.0905 0.0898 0.09 0.0911 0.0918 0.0925 0.0935 0.0928 0.0919 0.0899 0.0877 0.0855 0.0846 0.0872 0.0911 0.093 0.0967 0.0979 0.0967 0.0972 0.0951 0.0953 0.0947 0.0939 0.0914 0.0833 0.0721 0.0564 0.0404 0.0291 0.0273 0.0274 0.0274 0.0274 0.046 0.0657 0.0803 0.092 0.0966 0.0969 0.0968 0.0959 0.0956 0.0949 0.0956 0.0971 0.0967 0.0969 0.0957 0.0948 0.0937 0.0914 0.0905 0.093 0.0978 0.1031 0.1073 0.1086 0.1091 0.1102 0.113 0.1151 0.1166 0.1148 0.1131 0.1105 0.1102 0.108 0.1041 0.1011 0.095 0.0905 0.0893 0.0903 0.0935 0.1012 0.1065 0.1112 0.116 0.1172 0.1183 0.1156 0.1115 0.1077 0.1044 0.1021 0.1004 0.0998 0.099 0.0989 0.0991 0.1 0.1011 0.1033 0.1041 0.1037 0.1027 0.101 0.0976 0.0945 0.0922 0.0906 0.0884 0.0874 0.0874 0.087 0.0904 0.0937 0.0924 0.0834 0.0683 0.0469 0.0274 0.0202 0.0202 0.0305 0.0549 0.0724 0.0884 0.1002 0.1044 0.1066 0.1086 0.1049 0.0988 0.0944 0.0881 0.086 0.0874 0.0869 0.0843 0.0762 0.0614 0.044 0.0224 0.0126 0.0354 0.0589 0.0803 0.0984 0.1076 0.1107 0.1082 0.103 0.0972 0.0921 0.0874 0.0866 0.0871 0.0853 0.0784 0.065 0.0466 0.0272 0.0162 0.0158 0.0278 0.0498 0.0685 0.0841 0.0978 0.1023 0.1032 0.1047 0.1026 0.1011 0.099 0.097 0.0934 0.0917 0.0883 0.0841 0.0824 0.0778 0.0772 0.0756 0.0742 0.0755 0.0746 0.0745 0.0756 0.0744 0.0733 0.0712 0.0701 0.0693 0.069 0.0683 0.0656 0.0588 0.0498 0.0373 0.024 0.0139 0.01 0.0111 0.0138 0.0281 0.0398 0.0534 0.0656 0.0726 0.0792 0.0831 0.0845 0.0842 0.0828 0.0811 0.0786 0.0777 0.0779 0.0787 0.0789 0.0808 0.0798 0.0826 0.0839 0.0815 0.0816 0.0786 0.0773 0.078 0.0766 0.0756 0.0771 0.0759 0.0749 0.0745 0.0727 0.0701 0.0659 0.0592 0.0475 0.0357 0.0211 0.0129 0.0098 0.01 0.024 0.0452 0.0653 0.0831 0.0964 0.1048 0.1076 0.1068 0.1034 0.0973 0.0881 0.0817 0.0762 0.073 0.0729 0.0721 0.0745 0.0773 0.0804 0.0811 0.0797 0.0764 0.0727 0.0708 0.0707 0.0701 0.0716 0.0691 0.066 0.0618 0.0571 0.0571 0.0592 0.0597 0.0617 0.0631 0.0614 0.0605 0.0546 0.046 0.0345 0.0201 0.0078 0.0046 0.0051 0.0054 0.0054 0.0054 0.0055 0.0053 0.0054 0.0049 0.0044 0.0038 0.0024 0.0028 0.0025 0.0023 0.0049 0.0166 0.0275 0.042 0.0592 0.072 0.0834 0.0881 0.0859 0.0801 0.0716 0.0661 0.0615 0.0633 0.0673 0.0711 0.0729 0.0702 0.0626 0.052 0.0433 0.0365 0.0353 0.0373 0.0366 0.0367 0.0451 0.0561 0.0729 0.0863 0.091 0.0925 0.0899 0.0882 0.0866 0.0856 0.0861 0.0857 0.0847 0.0855 0.0836 0.0809 0.0816 0.0798 0.0796 0.0824 0.0848 0.0863 0.0863 0.0827 0.0796 0.077 0.0761 0.0772 0.0774 0.0775 0.0774 0.0752 0.0715 0.0638 0.0535 0.0404 0.0294 0.0242 0.0243 0.0232 0.0277 0.0493 0.066 0.0792 0.0908 0.092 0.0929 0.092 0.09 0.0881 0.0869 0.0888 0.0866 0.0871 0.0846 0.0837 0.084 0.0818 0.0818 0.0804 0.0791 0.0803 0.0767 0.0725 0.0682 0.0655 0.0665 0.0663 0.0691 0.0692 0.0693 0.0711 0.0687 0.0656 0.0557 0.0422 0.0268 0.0114 0.0051 0.005 0.0045 0.0319 0.0551 0.0756 0.0922 0.1007 0.1029 0.1013 0.0967 0.0918 0.0861 0.0821 0.0781 0.0746 0.0732 0.0726 0.0734 0.0762 0.0776 0.0778 0.0771 0.0739 0.0688 0.0651 0.0603 0.0559 0.0535 0.0516 0.0522 0.0523 0.0526 0.053 0.0536 0.0534 0.0526 0.0505 0.0479 0.0461 0.0446 0.0411 0.036 0.0271 0.0185 0.0094 0.0044 0.0037 0.0043 0.0046 0.0045 0.0045 0.0035 0.003 0.0027 0.0022 0.0018 0.0036 0.0047 0.0061 0.009 0.0116 0.0138 0.0131 0.0249 0.0403 0.0527 0.0632 0.0706 0.0711 0.0705 0.0711 0.0677 0.0687 0.0709 0.0714 0.0718 0.0669 0.0573 0.0456 0.0317 0.0184 0.0139 0.0151 0.0155 0.0306 0.0518 0.067 0.0799 0.0851 0.0855 0.0835 0.0832 0.0829 0.081 0.0811 0.079 0.0785 0.0751 0.0658 0.0534 0.0404 0.0275 0.0203 0.0192 0.0325 0.0554 0.0741 0.0886 0.0981 0.0993 0.0978 0.0959 0.0908 0.0864 0.0834 0.0788 0.0767 0.0775 0.0738 0.064 0.0521 0.0361 0.0216 0.0182 0.0259 0.0494 0.0658 0.0803 0.0903 0.0903 0.091 0.0886 0.0872 0.0869 0.0852 0.085 0.0828 0.075 0.0626 0.0471 0.0251 0.0073 0.0044 0.0066 0.0299 0.0561 0.0747 0.091 0.0999 0.1012 0.1002 0.0962 0.0949 0.0923 0.0911 0.0907 0.0895 0.0885 0.0827 0.0748 0.0615 0.0459 0.0263 0.0109 0.0054 0.0042 0.0061 0.0412 0.0676 0.0856 0.1008 0.1055 0.1045 0.1061 0.1065 0.1037 0.1007 0.0948 0.0891 0.0869 0.0884 0.092 0.0969 0.1004 0.1017 0.1032 0.1021 0.1009 0.101 0.0995 0.0973 0.0953 0.0908 0.0863 0.0829 0.0805 0.0794 0.0787 0.0787 0.0767 0.0742 0.0712 0.0695 0.0683 0.0637 0.0547 0.0419 0.0261 0.0098 0.0041 0.0038 0.0042 0.0038 0.0039 0.0039 0.0036 0.0032 0.0025 0.0021 0.0016 0.0014 0.0011 0.0012 0.0024 0.0068 0.0096 0.0135 0.0262 0.041 0.0514 0.0608 0.0663 0.0712 0.075 0.0737 0.0761 0.0728 0.0705 0.0686 0.0662 0.0651 0.0642 0.0639 0.0588 0.0496 0.0384 0.0204 0.011 0.0356 0.0601 0.0828 0.0963 0.1017 0.1012 0.0978 0.0962 0.0942 0.0917 0.0906 0.0882 0.0881 0.0908 0.0915 0.0914 0.0881 0.084 0.0812 0.0814 0.0888 0.0958 0.0979 0.7742 0.0994 0.1001 0.0944 0.0897 0.0881 0.0832 0.0836 0.0829 0.0793 0.0745 0.0634 0.0481 0.0338 0.0232 0.0221 0.0232 0.0221 0.027 0.0456 0.0606 0.0735 0.0838 0.0838 0.0823 0.0798 0.0775 0.0753 0.0754 0.0749 0.0731 0.0705 0.062 0.0511 0.0377 0.0266 0.018 0.0163 0.0288 0.0491 0.0676 0.0812 0.0902 0.0906 0.0909 0.0919 0.0918 0.0906 0.0898 0.0882 0.0842 0.0806 0.0692 0.0529 0.0375 0.0243 0.0227 0.0259 0.0254 0.0254 0.0498 0.0685 0.0855 0.1018 0.1065 0.1104 0.1107 0.1116 0.1098 0.1084 0.1086 0.1084 0.1106 0.1094 0.1079 0.1063 0.1044 0.1028 0.102 0.0996 0.0969 0.0937 0.0895 0.0856 0.0848 0.084 0.0829 0.0821 0.0802 0.08 0.0816 0.0824 0.0851 0.0862 0.0814 0.0716 0.0563 0.0353 0.0206 0.017 0.03 0.0507 0.0705 0.0889 0.102 0.1075 0.109 0.1057 0.1 0.0948 0.0883 0.0867 0.087 0.0887 0.0895 0.0885 0.087 0.0852 0.0866 0.0875 0.0917 0.0967 0.0997 0.1059 0.1102 0.1144 0.1153 0.1164 0.1129 0.1088 0.1037 0.0976 0.0942 0.0923 0.0889 0.0783 0.0645 0.0471 0.0288 0.0212 0.0199 0.0365 0.0585 0.0732 0.0871 0.0953 0.098 0.0994 0.0993 0.0968 0.094 0.091 0.0906 0.0903 0.0899 0.0897 0.0884 0.087 0.0852 0.0838 0.084 0.0858 0.0894 0.0909 0.0919 0.0918 0.0916 0.0924 0.0931 0.0935 0.0953 0.097 0.0988 0.0995 0.0947 0.0833 0.067 0.0476 0.0243 0.0119 0.0052 0.0088 0.0386 0.0717 0.0968 0.1145 0.1237 0.1232 0.1205 0.117 0.115 0.1113 0.1076 0.104 0.1023 0.1022 0.1024 0.1047 0.1036 0.1036 0.1028 0.1015 0.1021 0.102 0.1034 0.1038 0.1046 0.1036 0.102 0.1017 0.1009 0.1001 0.0992 0.0982 0.0939 0.0842 0.0683 0.0478 0.0229 0.008 0.0011 0.0018 0.015 0.0496 0.0788 0.103 0.1238 0.1307 0.1317 0.1277 0.1237 0.1207 0.1192 0.1193 0.1188 0.1204 0.1209 0.1212 0.1215 0.1211 0.1189 0.1184 0.1182 0.1171 0.1176 0.1178 0.1175 0.1183 0.1195 0.122 0.1236 0.1259 0.1307 0.1335 0.1341 0.1341 0.1323 0.1293 0.1287 0.1297 0.1292 0.1304 0.1311 0.1292 0.1261 0.1257 0.1262 0.1289 0.1323 0.133 0.132 0.1289 0.1266 0.1231 0.1179 0.1107 0.1033 0.0955 0.0884 0.0827 0.0779 0.0747 0.0655 0.053 0.0381 0.0171 0.0047 0.0035 0.0059 0.0064 0.0055 0.0056 0.0051 0.0042 0.0041 0.0033 0.0025 0.0021 0.0016 0.0011 0.001 0.0015 0.001 0.0018 0.0048 0.0321 0.0567 0.0766 0.0936 0.1009 0.1027 0.1024 0.099 0.0957 0.0895 0.0819 0.0762 0.0727 0.0686 0.0693 0.0682 0.0708 0.0739 0.0775 0.0798 0.0807 0.0812 0.0819 0.0822 0.0821 0.0818 0.0805 0.08 0.0794 0.0789 0.079 0.0766 0.0707 0.0601 0.0461 0.0315 0.0195 0.0218 0.0238 0.0239 0.0211 0.0151 0.0334 0.0534 0.0704 0.0847 0.0921 0.0927 0.0967 0.0981 0.0977 0.098 0.0966 0.0951 0.0953 0.0941 0.0939 0.0947 0.095 0.0949 0.0965 0.0962 0.0959 0.0952 0.094 0.0942 0.0944 0.0951 0.095 0.0913 0.0866 0.0791 0.0665 0.0529 0.0381 0.0279 0.0294 0.0301 0.0318 0.0327 0.0303 0.0285 0.0256 0.0426 0.0587 0.0713 0.0847 0.0882 0.0899 0.0928 0.0955 0.0977 0.0995 0.0996 0.0983 0.0972 0.0951 0.0929 0.0891 0.0861 0.0833 0.0815 0.08 0.0805 0.0826 0.0848 0.0867 0.0863 0.0883 0.0898 0.0936 0.0957 0.096 0.0961 0.0948 0.0936 0.0931 0.0936 0.0945 0.0953 0.0967 0.099 0.1017 0.1028 0.1036 0.1031 0.1006 0.0982 0.0942 0.0919 0.0894 0.0894 0.0899 0.0911 0.0916 0.0907 0.0909 0.0902 0.0899 0.0907 0.0913 0.0904 0.0947 0.096 0.0986 0.1025 0.101 0.101 0.0994 0.0966 0.0958 0.0952 0.0942 0.0938 0.093 0.0923 0.0912 0.084 0.0716 0.055 0.0362 0.0257 0.0254 0.0243 0.0267 0.048 0.0657 0.0844 0.0984 0.1027 0.1046 0.1014 0.1005 0.1 0.0983 0.0982 0.0958 0.0931 0.0867 0.0748 0.0598 0.0416 0.0306 0.0307 0.0514 0.0719 0.0882 0.0988 0.1034 0.1023 0.0997 0.0973 0.0933 0.0906 0.0858 0.0838 0.0829 0.0782 0.0679 0.0545 0.0372 0.0215 0.0181 0.0191 0.0188 0.0297 0.0514 0.0676 0.0845 0.0944 0.0985 0.0989 0.0958 0.0927 0.0876 0.0848 0.0839 0.08 0.0737 0.0625 0.0478 0.0326 0.021 0.0172 0.0163 0.0267 0.052 0.0718 0.0877 0.0985 0.1009 0.1004 0.0982 0.0957 0.0921 0.0883 0.0846 0.0817 0.0807 0.0792 0.0725 0.0623 0.0477 0.0284 0.015 0.0136 0.0228 0.045 0.0666 0.0838 0.0964 0.101 0.1036 0.105 0.107 0.1081 0.1064 0.1054 0.105 0.106 0.1075 0.1108 0.1115 0.1116 0.1111 0.1106 0.1109 0.1104 0.1104 0.1099 0.1115 0.1139 0.1172 0.1176 0.1171 0.1155 0.1118 0.1111 0.1129 0.1132 0.1139 0.1141 0.1128 0.1135 0.1137 0.1163 0.1179 0.1184 0.1198 0.1175 0.1168 0.1153 0.1115 0.1093 0.1051 0.1026 0.1002 0.0981 0.0947 0.0925 0.0909 0.0916 0.0928 0.0923 0.0906 0.0858 0.0835 0.0837 0.0901 0.0995 0.1048 0.1084 0.108 0.1036 0.1016 0.0998 0.0979 0.0961 0.0929 0.0898 0.0856 0.0792 0.0685 0.0548 0.0401 0.0245 0.0141 0.0118 0.0274 0.0527 0.0735 0.0898 0.1002 0.1019 0.1018 0.098 0.0934 0.0881 0.0813 0.0796 0.0794 0.0812 0.0791 0.0711 0.0593 0.0413 0.0222 0.0249 0.0532 0.0763 0.0974 0.1112 0.1138 0.1134 0.112 0.1096 0.1063 0.1044 0.1011 0.0991 0.0982 0.0958 0.0941 0.0861 0.0725 0.0549 0.0343 0.0206 0.0176 0.0238 0.0468 0.067 0.0844 0.0986 0.1024 0.1024 0.1005 0.0962 0.0911 0.0865 0.0836 0.0823 0.0838 0.0846 0.0763 0.0644 0.0501 0.0296 0.0168 0.0221 0.0453 0.0651 0.0817 0.0958 0.1028 0.106 0.1067 0.1064 0.1032 0.0993 0.098 0.0941 0.0888 0.0796 0.0637 0.0461 0.024 0.0138 0.0118 0.0135 0.0276 0.0513 0.071 0.0887 0.103 0.1092 0.1144 0.1164 0.1153 0.1125 0.108 0.104 0.1008 0.0976 0.0971 0.0936 0.0922 0.093 0.0905 0.0921 0.0927 0.0927 0.0927 0.0917 0.0925 0.0931 0.0942 0.0933 0.0927 0.0915 0.0895 0.0883 0.0875 0.085 0.0779 0.0653 0.0488 0.0285 0.0171 0.0241 0.0261 0.0342 0.0565 0.0728 0.0904 0.1029 0.1083 0.1128 0.1133 0.113 0.112 0.1089 0.106 0.1051 0.1034 0.1045 0.1039 0.1018 0.1002 0.0972 0.095 0.095 0.0954 0.0957 0.0958 0.0948 0.093 0.0906 0.0889 0.0885 0.0896 0.0907 0.0932 0.0943 0.0956 0.0958 0.0956 0.0952 0.0929 0.0925 0.0903 0.0907 0.0936 0.0988 0.1074 0.1131 0.1185 0.1217 0.1239 0.1245 0.1238 0.1222 0.12 0.1212 0.1219 0.1243 0.1249 0.1248 0.1263 0.1259 0.1275 0.1287 0.1296 0.1295 0.1283 0.1252 0.1219 0.1207 0.1192 0.1171 0.114 0.1091 0.1003 0.0887 0.0716 0.0517 0.0314 0.021 0.0213 0.0207 0.0201 0.0194 0.0175 0.0162 0.0455 0.0651 0.0801 0.0956 0.0988 0.1024 0.1071 0.1078 0.1061 0.1062 0.1046 0.1018 0.1038 0.1009 0.0983 0.0999 0.0986 0.0988 0.1018 0.1005 0.1001 0.1007 0.101 0.101 0.1023 0.0995 0.095 0.0917 0.0869 0.0845 0.0785 0.0684 0.0541 0.0386 0.0264 0.0246 0.0256 0.025 0.0246 0.0214 0.026 0.046 0.0638 0.083 0.0965 0.1023 0.1039 0.1043 0.1029 0.1021 0.1004 0.096 0.0927 0.0899 0.0882 0.0877 0.0888 0.0904 0.0938 0.0971 0.0986 0.0998 0.0997 0.0985 0.0961 0.0902 0.0867 0.0799 0.0745 0.0722 0.07 0.0692 0.0686 0.0687 0.0676 0.0651 0.0626 0.0601 0.0571 0.0536 0.0483 0.0422 0.0343 0.0259 0.0166 0.0093 0.006 0.0051 0.0053 0.0067 0.0064 0.0067 0.0062 0.0051 0.0044 0.0032 0.0031 0.0023 0.0023 0.0027 0.0059 0.0375 0.0602 0.0777 0.0893 0.0865 0.0789 0.0681 0.0598 0.055 0.0536 0.0562 0.0614 0.0666 0.0719 0.0743 0.0765 0.0789 0.0784 0.0795 0.0808 0.0795 0.083 0.086 0.0904 0.0948 0.0965 0.0948 0.094 0.0925 0.092 0.0931 0.0923 0.0916 0.0929 0.0902 0.0863 0.0779 0.0626 0.047 0.0316 0.0307 0.048 0.0628 0.0755 0.0835 0.0854 0.0867 0.0851 0.0858 0.0841 0.0832 0.0832 0.0815 0.0788 0.073 0.0616 0.0466 0.0325 0.0212 0.0191 0.0207 0.0182 0.033 0.052 0.0671 0.0805 0.089 0.0902 0.092 0.0917 0.0892 0.0859 0.0824 0.0804 0.0795 0.0794 0.0778 0.0776 0.0754 0.0747 0.0745 0.0718 0.0743 0.0754 0.0764 0.0764 0.0762 0.0749 0.0741 0.0732 0.0714 0.0705 0.0709 0.0712 0.0727 0.0722 0.0729 0.0724 0.0718 0.0716 0.0685 0.0667 0.0649 0.0708 0.0801 0.0893 0.0977 0.1017 0.1047 0.1044 0.1032 0.1003 0.0949 0.0895 0.0851 0.0818 0.0816 0.082 0.0824 0.0855 0.0873 0.0887 0.0907 0.0897 0.0872 0.0854 0.0828 0.0789 0.0776 0.0749 0.0722 0.0703 0.0679 0.067 0.0667 0.0669 0.0677 0.0661 0.0648 0.0639 0.0612 0.0582 0.0511 0.0409 0.0297 0.0164 0.0065 0.0048 0.0045 0.0035 0.0037 0.0037 0.0035 0.0031 0.0028 0.0018 0.0018 0.0014 0.002 0.0017 0.0029 0.0046 0.0069 0.01 0.0265 0.0439 0.0583 0.0699 0.0768 0.0781 0.0764 0.0752 0.0723 0.0734 0.0732 0.0739 0.0724 0.071 0.0709 0.0711 0.0698 0.0631 0.0519 0.0386 0.0211 0.0239 0.0416 0.0587 0.0727 0.0828 0.0873 0.087 0.086 0.0839 0.0807 0.078 0.0749 0.0703 0.0626 0.051 0.0373 0.0241 0.0166 0.0166 0.017 0.0262 0.0452 0.0577 0.0675 0.0742 0.0757 0.0764 0.0762 0.0742 0.0706 0.0663 0.063 0.0622 0.0634 0.0644 0.0611 0.053 0.0424 0.0291 0.0184 0.0168 0.0249 0.0472 0.0645 0.0788 0.0908 0.0931 0.0954 0.0972 0.0962 0.0958 0.0936 0.091 0.0885 0.0867 0.0832 0.0796 0.0754 0.0706 0.0679 0.0674 0.0707 0.0737 0.0799 0.085 0.0877 0.0913 0.0906 0.0884 0.0853 0.0811 0.0781 0.0758 0.0738 0.0714 0.0641 0.0536 0.0403 0.0254 0.0136 0.012 0.013 0.0268 0.0417 0.0556 0.0678 0.076 0.0816 0.0837 0.0835 0.0827 0.0801 0.0756 0.0708 0.0655 0.0604 0.0594 0.0568 0.0551 0.0555 0.0541 0.0547 0.0543 0.0523 0.0506 0.0486 0.0454 0.0429 0.0391 0.0351 0.0323 0.0292 0.0277 0.0273 0.0276 0.0283 0.0284 0.0277 0.0271 0.0244 0.0213 0.0168 0.0117 0.0073 0.0033 0.0029 0.0027 0.0027 0.0042 0.0039 0.0047 0.0045 0.0049 0.0041 0.0038 0.0038 0.0036 0.0039 0.0031 0.0032 0.0026 0.003 0.0077 0.0169 0.0307 0.0444 0.0549 0.0616 0.0647 0.0637 0.061 0.0593 0.0577 0.056 0.0591 0.0603 0.0624 0.0651 0.0657 0.0657 0.0659 0.0676 0.0691 0.0716 0.073 0.0725 0.0737 0.0745 0.0754 0.0766 0.0775 0.0784 0.0785 0.0792 0.0774 0.0765 0.0749 0.0697 0.0599 0.0476 0.0326 0.0193 0.0159 0.0168 0.0172 0.024 0.0424 0.0534 0.0638 0.069 0.0679 0.0681 0.0645 0.063 0.0609 0.0596 0.0603 0.0617 0.0635 0.0632 0.0631 0.0616 0.0589 0.0588 0.0575 0.0588 0.0649 0.0674 0.0717 0.0751 0.0746 0.0755 0.0759 0.073 0.0715 0.0695 0.0678 0.0661 0.0613 0.0517 0.0402 0.0256 0.0141 0.0134 0.015 0.0144 0.0248 0.0451 0.0585 0.0688 0.0762 0.0777 0.078 0.0774 0.0756 0.072 0.0698 0.0673 0.0632 0.0592 0.0491 0.0382 0.0251 0.014 0.0085 0.0167 0.041 0.058 0.0731 0.0844 0.0871 0.0893 0.0899 0.0895 0.0897 0.0855 0.0812 0.0745 0.0666 0.0611 0.0578 0.0578 0.0612 0.0668 0.0717 0.0755 0.078 0.0777 0.0782 0.0794 0.0788 0.0806 0.081 0.0829 0.0849 0.0847 0.0848 0.0837 0.0816 0.0783 0.0693 0.0563 0.0408 0.0231 0.0138 0.0135 0.0145 0.0262 0.0469 0.064 0.0773 0.0866 0.0888 0.0895 0.0891 0.0869 0.082 0.0779 0.0752 0.0744 0.0755 0.0773 0.0754 0.0673 0.0551 0.0389 0.0205 0.0192 0.0455 0.0634 0.0753 0.087 0.0891 0.0883 0.0883 0.0862 0.0859 0.0854 0.0839 0.0825 0.0782 0.0694 0.0573 0.0426 0.0245 0.0106 0.0088 0.0165 0.0488 0.0697 0.0866 0.1004 0.1032 0.1053 0.1047 0.1043 0.1032 0.1004 0.099 0.0938 0.0888 0.0836 0.0733 0.0583 0.0413 0.0222 0.0123 0.0144 0.0164 0.0321 0.059 0.0778 0.0913 0.0999 0.0986 0.0939 0.0912 0.0867 0.084 0.0829 0.0812 0.0809 0.0799 0.0788 0.0765 0.0744 0.0729 0.0723 0.0754 0.0828 0.091 0.0974 0.1018 0.104 0.1043 0.1043 0.1031 0.1009 0.0977 0.0944 0.0925 0.0895 0.0856 0.0747 0.0606 0.0435 0.0301 0.0249 0.0253 0.0253 0.0287 0.0501 0.0659 0.0843 0.0963 0.0984 0.1003 0.0991 0.1004 0.0989 0.0998 0.0967 0.0946 0.0935 0.0887 0.0908 0.0919 0.0865 1.0 0.0799 0.0873 0.0911 0.098 0.1002 0.1047 0.1068 0.104 0.1069 0.1052 0.1059 0.1046 0.1022 0.0987 0.0924 0.0901 0.0868 0.0858 0.0866 0.0878 0.0902 0.0905 0.0914 0.0898 0.0898 0.0887 0.0845 0.0785 0.0682 0.058 0.0495 0.045 0.0437 0.0438 0.0452 0.0455 0.0447 0.0437 0.0417 0.0397 0.0375 0.0338 0.0297 0.0242 0.0184 0.0134 0.008 0.0039 0.0005 0.0011 0.0007 0.0009 0.0006 0.001 0.0004 0.0007 0.0008 0.0004 0.0002 0.0001 0.0003 0.0 0.0006 0.0005 0.0018", + "energy": "-74.2206 -71.0224 -68.4589 -66.2722 -64.9809 -64.2851 -63.7253 -62.4551 -61.3279 -59.7553 -58.7354 -57.3656 -56.4995 -55.2416 -54.4269 -53.2257 -52.2402 -51.4073 -50.2108 -49.7776 -49.042 -48.8501 -48.8406 -48.9131 -49.0304 -49.2074 -49.4136 -49.6386 -49.9166 -50.1808 -50.7379 -50.8993 -51.6198 -52.4751 -53.6283 -55.3858 -56.9495 -58.2487 -58.6172 -57.4522 -55.0175 -51.6719 -47.9491 -44.6147 -41.8682 -40.7055 -39.537 -38.2951 -36.7145 -34.9701 -32.9534 -31.0802 -29.1639 -27.399 -26.5553 -25.6153 -25.1022 -24.5266 -24.0971 -23.9256 -23.6075 -23.3799 -23.3706 -23.5236 -24.2367 -25.6559 -27.7623 -29.8598 -31.3011 -31.6734 -31.1082 -29.8474 -27.72 -25.1 -23.237 -21.9099 -21.3315 -21.4543 -21.6826 -22.1624 -22.4462 -22.8811 -22.9807 -23.1876 -23.146 -23.4293 -22.9874 -22.8893 -22.8989 -22.5563 -22.5085 -22.5248 -22.3715 -22.4259 -22.5676 -22.8144 -22.8617 -23.1382 -23.024 -22.8515 -22.9502 -22.9721 -23.0082 -22.7943 -23.1336 -23.6263 -25.0475 -26.6222 -28.5175 -29.8087 -30.1343 -29.813 -28.3668 -26.3037 -24.1352 -22.4482 -21.493 -21.2747 -21.0606 -21.2474 -21.3003 -21.895 -22.6236 -23.6046 -25.2215 -27.2328 -30.3243 -33.5473 -35.8034 -36.9307 -36.4451 -34.2649 -31.3232 -27.6877 -24.3919 -22.4809 -21.1762 -20.9023 -20.5477 -20.2399 -20.2948 -20.0923 -20.7803 -21.5777 -23.2607 -25.6499 -27.9869 -31.1674 -33.7629 -35.4897 -36.0821 -35.1215 -32.6975 -29.697 -26.4193 -23.3955 -20.967 -19.6448 -19.292 -19.2111 -19.4017 -19.8228 -20.5795 -21.146 -21.6981 -22.1436 -22.3063 -22.5179 -22.36 -22.2452 -22.1105 -22.1807 -22.0718 -22.2232 -22.7001 -23.1438 -23.7893 -24.5009 -24.7988 -25.5128 -26.3679 -27.1331 -27.5141 -28.1093 -28.3467 -28.2941 -28.4755 -28.5741 -28.9917 -29.6696 -30.7769 -32.8606 -35.8398 -38.7703 -41.6263 -44.1522 -46.282 -47.297 -47.9733 -47.9471 -47.6825 -47.5408 -47.5465 -47.839 -48.0932 -49.0751 -49.9778 -50.8759 -51.9207 -52.0876 -50.6752 -47.804 -43.3024 -38.1545 -33.0047 -28.5087 -25.0356 -23.2043 -22.5502 -22.3126 -22.0074 -22.0279 -22.2299 -21.9573 -22.1159 -22.2112 -23.0081 -24.693 -26.7794 -28.755 -30.8732 -32.1367 -32.5823 -31.871 -29.9987 -27.7429 -25.4157 -23.8714 -22.407 -21.7381 -21.5051 -21.633 -21.6289 -21.828 -22.0438 -22.1878 -22.7342 -23.5123 -25.0741 -27.6845 -30.7719 -33.057 -34.3193 -34.2033 -32.6946 -30.3951 -27.3442 -24.5469 -22.7546 -22.4515 -21.9385 -22.0188 -22.025 -22.2062 -22.1978 -22.3888 -22.3291 -22.3362 -22.252 -22.2067 -22.1885 -22.6653 -23.0535 -23.3872 -23.5953 -23.2902 -23.2094 -22.8828 -22.4682 -21.8859 -21.5506 -21.4876 -21.2417 -21.3624 -21.2578 -21.0669 -21.0886 -21.1853 -21.6643 -22.2887 -23.4819 -25.2428 -27.1553 -28.7389 -29.686 -29.4084 -28.4121 -26.7746 -24.7531 -23.1755 -21.9601 -21.5026 -21.1973 -21.4377 -21.4233 -21.5077 -21.6741 -21.7132 -21.5157 -21.4862 -21.3434 -21.1846 -21.0712 -21.1376 -20.9068 -20.9372 -20.4825 -20.341 -19.913 -19.6552 -19.3478 -19.2383 -18.964 -18.9863 -19.0845 -19.0373 -19.1148 -19.432 -19.4804 -19.9108 -20.1022 -20.1881 -20.4743 -20.462 -20.5142 -20.3125 -20.1675 -20.3739 -20.2778 -20.3932 -20.463 -20.928 -21.4316 -21.8108 -22.4934 -22.7957 -23.132 -23.3729 -23.3888 -23.4662 -23.4907 -23.7646 -24.1518 -24.9435 -26.2572 -28.2556 -31.2318 -34.8436 -38.6767 -41.8664 -44.5801 -46.2148 -46.8065 -46.4681 -46.2001 -46.1686 -46.4399 -47.1651 -49.0503 -51.083 -53.6693 -56.3417 -58.1089 -58.1922 -55.6639 -51.4024 -45.0306 -37.9533 -31.466 -26.0829 -22.4503 -20.6385 -19.8901 -19.9184 -20.0599 -19.8368 -20.0285 -19.9428 -20.0024 -20.2064 -20.8759 -22.1798 -24.6293 -27.4974 -30.1485 -31.9098 -32.3567 -31.652 -29.2946 -26.2233 -23.3811 -21.0877 -20.1497 -19.512 -19.3612 -19.3273 -19.4075 -19.3221 -19.4412 -19.5947 -19.7418 -20.1575 -20.7541 -21.8502 -23.6019 -25.5402 -27.2366 -27.9517 -27.7351 -27.0064 -25.1216 -23.6196 -22.2264 -21.1254 -21.2089 -21.3359 -21.3812 -21.6848 -21.9208 -22.0177 -22.4558 -23.1222 -23.8326 -24.8658 -25.8217 -26.9615 -27.147 -27.4434 -27.1138 -26.5146 -25.1762 -23.9983 -22.6977 -21.4417 -20.5839 -20.0822 -19.975 -19.7648 -19.5554 -19.8358 -19.7525 -19.9443 -19.9688 -19.9232 -20.195 -20.1295 -20.262 -20.4441 -20.4095 -20.084 -19.7787 -19.4961 -19.1506 -18.7373 -18.4231 -18.3846 -18.337 -18.1864 -18.2689 -18.6023 -18.6601 -19.0459 -19.6656 -21.3252 -23.3637 -26.6846 -30.2468 -33.224 -34.8127 -34.6241 -32.4549 -28.9187 -24.8904 -21.6959 -19.6443 -18.6739 -18.6243 -18.4747 -18.6673 -18.9802 -19.1148 -19.1982 -19.6744 -19.9484 -20.4128 -20.6935 -20.8352 -20.8833 -21.0337 -21.0989 -21.0265 -20.841 -20.7132 -20.6288 -20.4317 -20.3363 -20.0926 -20.128 -19.8818 -19.4821 -19.3102 -19.202 -19.2845 -19.2782 -19.5371 -19.6853 -19.7868 -19.6893 -19.7532 -19.8981 -19.8691 -19.7091 -19.5994 -19.2751 -19.1834 -19.1992 -19.1809 -18.963 -19.013 -18.7268 -18.7991 -18.7449 -18.5433 -18.9126 -19.0186 -19.261 -19.5014 -19.6235 -19.5584 -19.5991 -19.6243 -19.7914 -19.8448 -20.2209 -19.9774 -20.2206 -19.9738 -19.9535 -19.6423 -19.6718 -19.6275 -19.6696 -19.6417 -19.5612 -19.6504 -19.5496 -19.3866 -19.6689 -19.6952 -19.7135 -19.7935 -20.0273 -19.8793 -19.7374 -19.7264 -19.8867 -19.8268 -19.516 -19.5534 -19.4724 -19.3803 -19.2221 -18.9296 -18.9016 -18.6837 -18.4439 -18.4496 -18.3458 -18.533 -18.5063 -18.9731 -19.3451 -19.5202 -19.3447 -19.3875 -19.4652 -19.269 -19.2432 -19.0268 -19.037 -18.9922 -19.3255 -19.8208 -20.8148 -22.4845 -24.8016 -27.8128 -30.7476 -33.4222 -35.485 -36.2209 -35.4442 -33.2698 -30.3717 -27.5499 -24.7302 -22.8069 -21.3856 -20.7135 -20.193 -20.0101 -19.9561 -19.6777 -19.7028 -19.6577 -19.6971 -19.7025 -19.6955 -19.8717 -19.5256 -19.5452 -19.5658 -19.5086 -19.2687 -19.3009 -19.3641 -19.5387 -19.4669 -19.5201 -19.3435 -19.5016 -19.437 -19.1131 -19.7199 -20.2466 -21.4128 -23.0851 -24.5845 -26.3444 -27.7945 -28.8103 -29.3682 -29.077 -28.2515 -26.7871 -25.184 -23.2927 -21.765 -20.5137 -19.8159 -19.5642 -19.5604 -19.7266 -19.9452 -20.3362 -20.7311 -21.17 -21.4279 -21.5786 -21.7766 -21.7046 -21.5287 -21.6229 -21.5347 -21.7526 -21.8691 -22.1529 -22.4755 -22.7914 -23.2449 -23.7326 -23.9936 -24.4704 -24.8905 -24.8703 -24.7438 -24.8841 -24.7698 -24.9088 -25.1698 -25.7576 -26.2298 -27.1733 -28.6745 -30.9977 -33.6948 -36.8287 -39.8163 -42.2246 -44.0287 -44.5346 -44.6656 -44.033 -43.4765 -43.2793 -43.5216 -44.2621 -45.0188 -45.6584 -45.5654 -43.9758 -41.0564 -37.1821 -33.077 -28.8948 -25.5259 -23.3646 -22.2178 -21.7243 -21.6822 -22.1248 -22.5413 -22.6598 -22.6295 -22.6208 -22.6969 -22.5399 -22.8372 -23.1291 -23.7865 -24.6314 -24.8022 -25.0717 -25.0254 -25.0687 -25.0871 -25.0711 -24.7569 -24.2101 -23.1859 -22.5049 -21.4996 -20.895 -20.3668 -19.9053 -19.9573 -20.0651 -19.8444 -20.3563 -21.0406 -22.1344 -23.8455 -26.1111 -28.2674 -30.3276 -31.5132 -31.5888 -30.3896 -28.664 -26.1914 -24.3722 -23.0604 -22.4618 -22.3754 -22.3095 -22.2751 -22.5548 -22.8005 -22.6092 -22.5945 -22.7615 -23.583 -24.9063 -26.5349 -28.1245 -29.0503 -29.6664 -28.8502 -27.5732 -25.6432 -23.7982 -22.0971 -21.0597 -20.4537 -19.9858 -19.7806 -19.9103 -19.8889 -20.0917 -20.1661 -20.3168 -20.3383 -20.4049 -20.5811 -20.6856 -20.8215 -20.9474 -20.9518 -20.8466 -21.0436 -21.118 -21.1015 -21.1668 -21.28 -21.32 -21.551 -21.3956 -21.4096 -21.5895 -21.8728 -22.3245 -23.7903 -25.8607 -28.9534 -32.204 -35.3816 -36.6693 -36.6917 -34.9861 -31.8304 -28.212 -24.8361 -22.1053 -20.9327 -20.3314 -20.1128 -20.2185 -20.44 -20.713 -20.8403 -21.285 -21.4309 -21.7384 -21.7292 -21.8783 -21.8859 -21.7483 -21.8916 -22.1027 -22.3758 -22.6087 -22.8961 -23.0514 -23.4533 -23.4067 -23.5822 -23.9733 -24.2359 -24.2986 -24.3606 -24.1843 -24.1856 -24.2476 -24.3905 -25.0061 -26.0553 -27.546 -29.8803 -33.0528 -36.43 -40.1578 -42.8977 -45.6406 -47.1275 -48.2351 -48.6046 -48.7893 -49.1778 -49.851 -51.0541 -52.8178 -54.7757 -56.6213 -57.1955 -56.4098 -54.1285 -50.8364 -46.8677 -42.8423 -38.4821 -34.356 -30.7785 -28.0252 -25.3608 -23.5357 -22.2808 -21.9043 -22.0219 -21.8224 -22.0402 -21.7965 -21.9312 -22.1184 -22.5008 -23.4805 -25.1912 -27.0593 -29.1179 -30.069 -30.5308 -30.3339 -28.6722 -26.8509 -24.5958 -22.8568 -21.7534 -20.9903 -20.9418 -20.7409 -20.9282 -21.2796 -21.5875 -22.4374 -23.3118 -24.8758 -26.5376 -28.5122 -30.2908 -31.7517 -32.2411 -31.5542 -29.9405 -27.8888 -25.8688 -23.9479 -22.5217 -22.0009 -21.4956 -21.7271 -21.6832 -21.8502 -22.1254 -22.3166 -22.2216 -22.1694 -22.2971 -22.2915 -22.3879 -22.4159 -22.077 -22.073 -21.5143 -21.3345 -21.0393 -21.1258 -21.0952 -21.0114 -21.0985 -21.1197 -21.0423 -21.2437 -21.6145 -21.9795 -22.7338 -23.9725 -25.5303 -27.4682 -29.4785 -31.4738 -32.7615 -33.2796 -32.2817 -30.5399 -28.6579 -26.5852 -24.6131 -22.8336 -21.8422 -21.4501 -21.318 -21.2423 -21.5616 -21.8017 -22.3106 -23.4853 -25.8596 -29.1878 -33.2945 -37.0092 -39.1415 -39.4815 -37.6114 -34.686 -30.5624 -26.8732 -24.5688 -23.4578 -22.8471 -22.336 -22.5074 -22.5606 -22.6775 -23.2206 -23.6606 -24.319 -24.5791 -25.0829 -25.3954 -25.5825 -25.5443 -25.6072 -25.5607 -25.396 -25.4128 -25.3443 -25.5549 -25.6759 -25.9449 -26.2293 -26.978 -27.6415 -28.3277 -29.1257 -29.67 -30.2291 -30.9416 -31.3818 -32.0376 -32.4041 -33.2422 -34.1829 -35.9762 -38.1632 -40.8001 -43.4407 -45.7023 -47.5048 -48.6783 -49.1055 -49.2072 -49.4824 -49.8002 -50.7522 -51.9729 -53.596 -54.9772 -56.0618 -56.8114 -56.25 -54.2032 -50.6809 -45.7707 -40.5457 -35.0827 -30.3007 -26.6948 -24.5786 -23.7554 -23.591 -23.6757 -24.0313 -24.3309 -24.438 -24.7493 -24.8444 -24.936 -24.7098 -24.5987 -24.5418 -24.1368 -24.0086 -24.0184 -23.8119 -23.7447 -23.8288 -23.628 -23.5074 -23.3243 -22.9633 -22.8482 -22.6148 -22.4411 -22.9457 -23.1944 -23.6563 -25.6988 -28.9182 -33.0974 -37.0074 -39.7213 -41.0942 -41.0508 -39.7325 -37.1841 -34.8491 -32.8987 -31.2714 -29.6473 -28.0927 -26.3547 -25.6249 -24.9925 -24.5358 -24.1002 -23.8046 -23.9157 -23.9496 -24.2169 -24.8658 -26.2047 -28.3084 -30.7233 -33.0411 -34.3789 -34.3721 -33.0777 -31.1101 -28.7305 -26.9057 -25.5103 -24.8052 -24.6324 -24.5232 -24.6251 -24.851 -24.7237 -24.8951 -24.991 -25.7727 -26.3981 -27.7778 -29.3682 -31.2386 -32.9771 -33.6746 -33.2045 -31.8108 -29.6338 -27.4211 -25.1425 -23.4201 -22.6933 -22.2769 -22.1863 -22.1232 -22.1081 -22.1744 -22.3393 -22.6308 -23.8195 -25.3658 -27.6463 -29.8237 -31.6311 -32.314 -32.2434 -30.4573 -28.4223 -25.8913 -23.7636 -22.2912 -21.5204 -21.35 -21.4091 -21.6721 -22.3957 -22.7823 -23.0344 -22.9846 -23.3433 -23.2386 -23.2052 -23.0156 -22.8087 -22.6899 -22.7535 -22.6954 -22.7863 -22.7177 -22.8563 -22.9521 -22.7932 -22.9691 -22.8359 -22.8123 -22.7208 -22.5771 -22.7028 -22.6346 -22.7041 -22.7125 -22.6704 -22.7757 -22.8563 -22.7769 -22.7209 -22.6464 -22.519 -22.2206 -22.0444 -21.9252 -21.8911 -21.8825 -21.7024 -21.6431 -21.8687 -21.9959 -22.2445 -22.1816 -22.2414 -22.1976 -22.1268 -22.2085 -22.4077 -22.4945 -22.2976 -22.5161 -22.5192 -22.323 -22.2544 -22.0227 -21.677 -21.3406 -21.5813 -21.5651 -21.93 -22.1505 -22.4293 -22.8904 -23.3815 -23.88 -24.4708 -25.2983 -26.4586 -27.8662 -29.2628 -29.5699 -29.5423 -28.304 -26.8776 -25.1101 -23.2903 -21.9509 -21.8055 -21.4972 -21.5428 -21.6686 -22.0081 -21.8894 -21.9508 -21.9395 -21.9648 -21.8069 -21.79 -21.8336 -21.9295 -21.7299 -21.7564 -21.6376 -21.4934 -21.6405 -21.4344 -21.2067 -21.1893 -21.1045 -21.0567 -20.9647 -20.9858 -20.9448 -21.0138 -21.1996 -21.5103 -21.5935 -21.3514 -21.8373 -22.1618 -22.3662 -22.611 -22.3256 -22.0711 -21.5745 -21.0167 -20.412 -19.9282 -19.6547 -19.4732 -19.5553 -19.5301 -19.7448 -20.0363 -20.3051 -20.4744 -20.9528 -21.442 -21.445 -21.6559 -21.8676 -21.7157 -21.6346 -21.707 -21.3216 -21.2035 -21.1015 -21.079 -20.9257 -20.9746 -20.8354 -21.0194 -21.4726 -21.7156 -22.4601 -23.6824 -25.3555 -26.7378 -28.6068 -30.1091 -31.3751 -32.5915 -32.9645 -32.4617 -31.1735 -29.3143 -27.0827 -24.8519 -22.7083 -21.2353 -20.6732 -20.0844 -20.2674 -20.3508 -20.5633 -21.0552 -21.4046 -21.9718 -22.1315 -22.2962 -22.4095 -22.2777 -22.0943 -22.2757 -22.3081 -22.3254 -22.3289 -22.4917 -22.7661 -23.0361 -23.5435 -23.9307 -24.3096 -24.9101 -25.0951 -24.9547 -24.7469 -24.6459 -24.6064 -24.5901 -24.9336 -25.557 -26.2582 -27.8019 -29.8138 -32.9594 -35.9257 -39.4058 -42.0515 -44.0515 -45.3898 -46.2167 -46.6298 -47.2406 -47.88 -48.8678 -49.8544 -50.9084 -51.3212 -50.6605 -48.9419 -46.5841 -43.8994 -40.9142 -37.7478 -34.8563 -32.0995 -29.1815 -26.6579 -24.7023 -23.2379 -22.6246 -22.1454 -22.4223 -22.5492 -22.7446 -22.734 -22.6589 -22.6631 -22.7402 -23.0449 -23.7674 -25.2571 -27.1949 -28.7062 -29.7217 -29.6588 -28.6664 -26.8148 -24.6662 -22.6297 -21.1687 -20.5265 -20.4251 -20.7685 -20.9958 -21.3511 -21.5097 -21.8236 -21.8726 -22.4739 -23.3067 -24.7052 -26.4372 -27.7553 -28.145 -27.9286 -26.6379 -25.0052 -23.0768 -21.5935 -20.9698 -20.8502 -21.1704 -21.6571 -22.1102 -22.1342 -22.1537 -21.9319 -22.1458 -21.939 -21.9076 -21.8166 -21.8685 -21.9552 -21.8599 -21.7618 -21.9267 -21.8386 -22.1542 -22.3363 -22.517 -22.5167 -22.567 -22.3395 -22.4131 -22.1708 -22.6599 -22.5559 -22.9595 -23.3438 -24.2813 -25.5508 -28.171 -30.9788 -32.9956 -33.9859 -33.709 -31.9537 -28.9996 -25.6511 -22.726 -20.5415 -19.7784 -19.3213 -19.2394 -19.3649 -19.4016 -19.6947 -19.6789 -19.9202 -20.0336 -20.2058 -20.7315 -21.007 -21.4139 -21.6184 -21.871 -21.8244 -21.5529 -21.1414 -20.4964 -20.1894 -19.7415 -19.5283 -19.4173 -19.4322 -19.6277 -20.0234 -20.4444 -21.0436 -21.7975 -22.3377 -22.6508 -22.8457 -22.8491 -22.5666 -22.4191 -22.182 -22.0246 -21.8089 -22.2969 -22.5384 -23.147 -23.8052 -24.6052 -25.1711 -25.9721 -26.2643 -26.5626 -26.6188 -26.5586 -26.5448 -26.418 -26.614 -27.5612 -28.9764 -31.2976 -34.5187 -38.0193 -41.5312 -44.5173 -46.6256 -47.6678 -47.8864 -47.3977 -47.0727 -46.5755 -46.4395 -46.5136 -46.7688 -47.4563 -47.9535 -48.7201 -49.7657 -50.9523 -51.1537 -50.3002 -47.8174 -43.9245 -38.9329 -33.7801 -29.1152 -25.4844 -23.3702 -22.6886 -22.4297 -22.4028 -22.5718 -22.4981 -22.6515 -22.5713 -22.7983 -23.2499 -24.5684 -26.2757 -28.378 -30.0329 -31.032 -30.9769 -30.0481 -28.3182 -26.4714 -24.4973 -22.926 -22.1177 -21.7951 -21.24 -21.0301 -21.0918 -21.1116 -21.2531 -21.4855 -21.9189 -23.0272 -24.8987 -27.4676 -30.6621 -33.2668 -34.5232 -34.4689 -32.9916 -30.8341 -27.7384 -25.0265 -23.2204 -22.5164 -22.2762 -22.3845 -22.4743 -22.5578 -22.7795 -22.8253 -22.9169 -22.5729 -22.6178 -22.5441 -22.2477 -22.5561 -22.569 -22.4764 -22.5667 -22.4484 -22.195 -22.0204 -21.7744 -21.4363 -21.5026 -21.4233 -21.1457 -21.3316 -21.2636 -21.6069 -21.9104 -22.8553 -24.1867 -26.0757 -28.9808 -31.8138 -34.392 -35.5175 -35.2431 -33.489 -30.2565 -26.8645 -24.1237 -22.1556 -21.2366 -21.1126 -21.174 -21.2004 -21.3935 -21.3167 -21.1706 -21.2109 -21.5778 -22.4181 -23.6595 -25.6358 -27.686 -29.3965 -30.5052 -30.41 -29.1694 -26.9623 -24.4211 -22.1762 -20.3212 -18.8934 -18.5276 -18.2871 -18.613 -18.9352 -19.2253 -19.6275 -19.9202 -20.1575 -20.2844 -20.5113 -20.4754 -20.443 -20.4505 -20.2338 -20.2919 -20.3555 -20.4055 -20.7279 -21.3389 -21.6466 -22.2994 -22.9289 -23.7267 -24.1917 -24.3483 -24.5597 -24.6169 -24.8535 -24.7996 -25.1178 -25.6712 -26.7458 -29.1895 -32.6218 -37.0092 -41.0489 -44.6899 -47.2333 -48.5462 -48.5417 -47.9046 -47.3843 -46.9253 -47.0153 -47.1742 -47.7556 -48.3876 -49.1642 -49.9897 -50.8281 -50.7818 -49.115 -45.9823 -41.6961 -36.7808 -31.5827 -26.7049 -23.4942 -22.0835 -21.1894 -20.8016 -20.801 -20.8996 -21.1961 -21.222 -21.3826 -21.4511 -21.6044 -22.8609 -24.4067 -26.6462 -28.6811 -30.2956 -31.2614 -30.4442 -28.9563 -26.3427 -24.1621 -22.034 -20.574 -19.9116 -19.7693 -19.8702 -20.1109 -20.0172 -20.2082 -20.0183 -19.9728 -19.8368 -19.7904 -19.6069 -19.4944 -19.5515 -19.5689 -19.7639 -19.8727 -20.0433 -20.1329 -19.9514 -19.9718 -19.7646 -19.6555 -19.8168 -19.7967 -19.8371 -19.9088 -20.0474 -20.3902 -21.4267 -23.0667 -25.0757 -27.442 -29.5224 -30.8681 -31.1651 -30.6201 -29.1601 -27.3755 -25.6344 -24.1661 -22.5988 -21.58 -20.9253 -20.9473 -20.7042 -20.6308 -20.5993 -20.5555 -20.5184 -20.3923 -20.4291 -20.2983 -20.4332 -20.5595 -20.7015 -20.6209 -20.5959 -20.4872 -19.7523 -19.4161 -19.0663 -18.6188 -18.3592 -18.265 -18.4415 -18.6539 -18.3559 -18.6768 -18.5084 -18.4885 -18.9837 -19.3294 -19.705 -20.4622 -20.807 -21.0499 -21.0354 -20.9593 -20.5503 -20.2734 -19.9519 -19.8696 -19.7813 -19.8345 -19.9885 -20.2275 -20.4473 -20.5909 -21.0493 -21.3222 -21.5493 -21.828 -21.9517 -22.1317 -22.1846 -22.0367 -21.8317 -21.8017 -21.7321 -21.716 -21.903 -22.0467 -22.0235 -22.1038 -22.1608 -21.8203 -21.7777 -21.6204 -21.2413 -21.2208 -21.1728 -21.3295 -22.3606 -23.7919 -25.8738 -28.2184 -29.8374 -30.4827 -29.8752 -28.6665 -26.6951 -24.1599 -22.0142 -20.5663 -19.9148 -19.6203 -19.4598 -19.552 -19.7035 -19.9691 -20.2513 -20.5961 -21.094 -21.6567 -23.0686 -25.3078 -27.8451 -30.0909 -31.1345 -31.0214 -29.6565 -27.1887 -24.3605 -21.7554 -20.4498 -20.0057 -20.1495 -20.4316 -20.8337 -20.8536 -21.0577 -21.2349 -21.3997 -22.0464 -23.1415 -24.9969 -26.9459 -28.389 -28.9554 -28.5617 -27.2588 -25.2975 -23.3393 -21.2998 -20.1468 -19.6781 -19.1672 -19.3494 -19.2462 -19.1568 -19.3383 -19.4605 -19.5029 -19.7294 -19.9812 -20.302 -20.7073 -20.9735 -21.2989 -21.5046 -21.7762 -21.8992 -22.0978 -22.1661 -22.5009 -22.8841 -23.1695 -23.2333 -23.1541 -23.3585 -23.6722 -23.7666 -24.0943 -24.5962 -25.8028 -27.7091 -29.7482 -31.7591 -32.8169 -32.9266 -31.6587 -30.1515 -27.6775 -25.782 -24.0105 -23.0222 -22.1423 -21.7685 -21.494 -21.3217 -21.1924 -21.1519 -21.1683 -21.3343 -21.1755 -21.1586 -21.2718 -21.0313 -20.7064 -20.9167 -20.6136 -20.6041 -20.623 -20.4655 -20.4859 -20.4257 -20.585 -20.6693 -20.9955 -21.1937 -21.5108 -21.7775 -22.3235 -23.2066 -24.541 -26.576 -29.2238 -32.1694 -34.8789 -36.3812 -36.6387 -35.3011 -32.7853 -29.571 -26.1263 -23.0534 -21.2855 -20.4522 -20.0319 -19.983 -20.2933 -20.5514 -21.259 -21.7852 -22.1716 -22.6604 -22.7431 -22.7044 -22.4479 -22.2949 -21.9106 -21.7049 -21.8429 -21.7515 -21.822 -22.035 -22.2294 -22.4407 -22.4301 -22.7984 -23.3706 -23.8268 -24.1383 -24.4226 -24.723 -24.8896 -25.3364 -25.6016 -26.2836 -27.0834 -28.5217 -30.3793 -33.0898 -36.2046 -39.3525 -42.0798 -44.2657 -45.7241 -46.0769 -46.0541 -45.9391 -45.9871 -46.5077 -47.273 -48.3379 -49.2657 -49.2749 -48.0164 -45.2438 -41.5121 -36.7972 -32.1988 -28.0734 -25.0419 -23.1063 -22.0796 -21.4801 -21.5773 -21.8839 -22.4126 -22.7313 -23.1226 -22.9154 -22.939 -22.7761 -22.6346 -22.8409 -23.5549 -24.7165 -26.2226 -27.3799 -28.6856 -29.3979 -29.5172 -28.8529 -27.6261 -26.049 -24.2569 -22.5005 -21.2621 -20.398 -20.0856 -19.853 -19.9665 -19.7815 -19.7926 -19.7699 -19.6608 -19.8234 -19.75 -19.6261 -19.7447 -19.8102 -19.8406 -19.9845 -20.0263 -20.203 -20.3882 -20.5452 -20.8214 -21.1406 -21.3382 -21.7357 -22.1169 -22.1289 -22.4917 -22.2993 -22.9711 -23.7878 -25.1383 -26.9204 -29.0014 -30.8435 -31.9388 -32.0517 -31.0284 -29.6347 -27.397 -25.3227 -23.6431 -22.4921 -21.7592 -21.4421 -21.3952 -21.3572 -21.3738 -21.4701 -21.4495 -21.4844 -21.3894 -21.2839 -21.4734 -21.5471 -21.5079 -21.8836 -21.9318 -22.0862 -22.1022 -22.4688 -22.5156 -22.6061 -22.4103 -22.4902 -22.3797 -22.4348 -22.5163 -22.7019 -23.1855 -24.6153 -26.5195 -29.3646 -32.9403 -36.7878 -39.2128 -40.034 -38.692 -35.8991 -31.743 -27.7128 -24.0522 -21.5917 -20.7633 -20.5816 -20.4417 -20.7704 -21.3673 -21.7939 -22.4618 -23.2147 -23.5411 -23.8069 -23.8417 -23.6293 -23.6954 -23.6753 -23.8208 -23.8022 -24.0645 -24.4956 -24.8657 -25.2858 -25.7143 -25.9809 -26.368 -26.636 -26.9371 -26.9796 -27.123 -26.954 -27.0661 -27.1779 -27.2489 -27.6849 -27.9726 -29.0598 -30.8732 -33.3205 -36.619 -39.7934 -42.7522 -45.3591 -47.1408 -47.7526 -48.3622 -48.3365 -48.7536 -49.3855 -50.1251 -50.7682 -51.3074 -51.0193 -49.6748 -47.9199 -45.5055 -42.5007 -39.8353 -36.6381 -33.9185 -31.1134 -28.54 -26.42 -25.2461 -24.412 -23.7908 -23.8336 -23.4041 -23.092 -22.9092 -23.0801 -23.2314 -23.3519 -24.1795 -25.6842 -27.5708 -29.7676 -31.6443 -32.7897 -32.5564 -31.0546 -29.1269 -26.2574 -24.2579 -22.5025 -21.7269 -21.2313 -21.2524 -21.2132 -21.0364 -21.1612 -21.2474 -21.3382 -22.0034 -22.6928 -24.0358 -25.6014 -27.3417 -28.722 -28.9955 -28.5623 -27.0848 -25.036 -23.0403 -21.4262 -20.5998 -20.3897 -20.5385 -20.8679 -21.1842 -21.5578 -22.0004 -22.2104 -22.8792 -23.6893 -25.2006 -26.735 -28.2937 -29.5541 -29.7105 -29.2044 -27.8775 -25.6114 -23.8346 -22.1759 -21.5212 -21.0095 -20.9318 -21.0337 -20.9906 -21.2154 -21.2455 -21.646 -22.6262 -24.4726 -27.6508 -31.3773 -34.5952 -36.0591 -35.9974 -34.0014 -30.5776 -26.2644 -22.1806 -19.5003 -18.4963 -17.8693 -17.8402 -17.9765 -18.048 -18.324 -18.6301 -19.0577 -19.5583 -20.3508 -21.9711 -24.5362 -27.3188 -31.2423 -35.0407 -37.2118 -37.4078 -35.8436 -32.5962 -28.3182 -24.2482 -20.9208 -19.0693 -18.0975 -17.8258 -17.6262 -17.5899 -17.839 -17.9646 -18.1481 -18.3713 -18.8537 -18.7853 -18.8962 -18.8483 -18.7595 -18.9001 -18.745 -19.1159 -19.1008 -19.4477 -19.6405 -20.1994 -20.5032 -20.8663 -20.9958 -21.6586 -21.6294 -21.8463 -21.9214 -21.8622 -21.7642 -21.8517 -22.019 -22.4684 -23.2469 -24.6782 -26.8457 -30.1774 -33.8848 -37.9651 -41.535 -44.2037 -45.9485 -46.7108 -46.7035 -46.6723 -46.8872 -47.6601 -48.6684 -49.976 -51.164 -51.5799 -50.6966 -48.9064 -46.0487 -42.0764 -37.9505 -33.6113 -29.8768 -26.7473 -23.9133 -21.8584 -21.0019 -20.7399 -21.0166 -21.4672 -22.3214 -22.8357 -23.1114 -23.067 -23.3073 -23.2979 -23.9393 -24.6495 -26.25 -27.734 -28.555 -28.9528 -28.3394 -27.0997 -25.4204 -23.2714 -21.9585 -21.0006 -20.6333 -20.4008 -20.5387 -20.7484 -20.8404 -20.7414 -20.9108 -21.003 -21.0525 -21.3961 -21.4402 -21.4562 -21.5278 -21.8901 -21.7003 -21.5035 -21.3344 -20.8983 -20.7436 -20.8314 -20.4842 -20.8259 -20.7448 -20.9925 -21.2648 -21.4964 -22.0699 -23.0146 -24.4679 -26.5179 -28.4428 -30.6571 -32.3864 -33.3 -32.9829 -31.3624 -29.4502 -27.1261 -25.0663 -23.7549 -22.9312 -22.9587 -22.8765 -22.7032 -22.8062 -22.6833 -23.1079 -23.2794 -23.9424 -24.9518 -26.5782 -28.4964 -30.4671 -31.7457 -32.3122 -31.5475 -29.9405 -27.3295 -24.9493 -22.868 -21.6119 -20.9368 -20.4755 -20.3335 -20.097 -20.1491 -20.1718 -20.7024 -21.3996 -22.6967 -24.3598 -26.3724 -28.5255 -30.2851 -31.3425 -31.4856 -30.3329 -28.918 -26.7478 -24.6705 -22.8776 -21.5127 -20.5846 -20.2444 -19.9253 -19.7475 -19.8446 -19.6221 -19.6607 -20.0173 -20.1394 -20.5818 -20.6796 -21.0616 -21.4048 -21.6271 -21.7225 -21.8197 -21.7804 -21.7328 -21.5131 -21.6205 -21.5524 -21.6299 -21.8341 -21.7966 -21.7791 -21.8763 -21.5601 -21.4245 -21.4448 -21.8091 -22.8685 -24.3823 -26.8755 -28.811 -30.2297 -30.3452 -29.3332 -27.5329 -25.3645 -22.926 -21.3954 -20.7127 -20.3332 -20.2298 -20.342 -20.5577 -20.5453 -20.6458 -20.7425 -20.7774 -20.8602 -20.9638 -21.0044 -20.8331 -20.7881 -20.5228 -20.1486 -19.7432 -19.5793 -19.3857 -18.9624 -19.1909 -19.2567 -19.2975 -19.3271 -19.6563 -19.5688 -19.8422 -20.432 -21.3756 -22.4807 -24.7536 -27.0885 -29.5298 -30.8228 -31.1128 -30.0879 -28.7528 -26.5424 -24.6636 -23.0547 -22.2271 -21.8108 -21.7809 -21.8129 -21.7207 -21.9654 -21.6258 -21.5536 -21.6014 -21.6396 -21.6278 -21.6156 -21.516 -21.4812 -21.469 -21.3864 -21.3556 -21.3372 -21.3478 -21.0977 -21.1114 -20.9301 -20.8877 -20.6876 -20.6439 -20.7654 -20.9624 -21.2078 -21.841 -23.3162 -25.1608 -27.9105 -30.9156 -33.1954 -34.2776 -33.7245 -31.5762 -28.722 -25.081 -21.9159 -19.8628 -18.5973 -18.0246 -17.5098 -17.6121 -17.6043 -17.7372 -17.5824 -17.5699 -17.7562 -17.6849 -17.7432 -17.889 -17.9554 -18.123 -18.3651 -18.6576 -18.5929 -18.7493 -18.6488 -18.6615 -18.6959 -18.8546 -18.5706 -18.8915 -19.2307 -19.7793 -20.7332 -22.0054 -23.9248 -26.9536 -31.9766 -38.0979 -43.6772 -46.6098 -46.7468 -43.8604 -38.6269 -32.208 -26.2866 -21.8607 -19.607 -19.0046 -18.0158 -18.1521 -18.1189 -18.201 -18.0411 -17.952 -17.5495 -17.4327 -17.2134 -17.1648 -17.0455 -16.9665 -16.817 -17.1388 -17.1141 -17.0972 -17.1997 -17.3733 -17.2752 -17.2109 -17.0805 -17.0432 -16.8206 -16.9593 -16.922 -17.0779 -17.1707 -17.2845 -17.5657 -17.6874 -17.8052 -17.714 -17.6475 -17.6276 -17.5279 -17.6815 -17.7161 -17.5824 -17.9637 -18.1498 -18.3225 -18.5086 -18.9625 -19.1304 -19.4526 -19.856 -20.1132 -20.5134 -20.8348 -21.5245 -22.0361 -23.2133 -24.2365 -26.1012 -28.3748 -32.2218 -35.59 -39.377 -42.2547 -44.7369 -46.0646 -46.6227 -47.0511 -47.3802 -47.6167 -48.1686 -49.0185 -50.2017 -51.4029 -52.6203 -54.5296 -55.8849 -55.8819 -53.7581 -49.7067 -43.9582 -37.7332 -31.0295 -26.0437 -21.8827 -20.329 -19.7143 -19.8108 -20.136 -20.6937 -21.1513 -22.3183 -23.1063 -23.5225 -23.8767 -23.8632 -23.6656 -23.3996 -22.8477 -22.4853 -22.2575 -22.2148 -22.0549 -21.7272 -21.8135 -21.7322 -21.7298 -21.5344 -21.5508 -21.3435 -21.2167 -21.019 -21.0959 -21.6244 -22.7303 -24.508 -26.513 -28.5583 -30.3698 -31.6662 -32.1969 -31.6948 -30.5659 -28.6353 -26.4991 -24.3954 -22.3506 -20.6744 -19.8284 -19.3193 -19.2263 -19.0258 -18.9863 -19.1457 -19.3764 -19.2237 -19.3772 -19.4169 -19.4146 -19.3924 -19.3894 -19.2229 -19.1726 -19.166 -19.0788 -19.1731 -19.0981 -19.169 -19.4423 -19.502 -20.0235 -20.6479 -21.4407 -22.449 -24.0094 -25.906 -27.6266 -29.203 -30.3982 -31.4256 -31.852 -31.6618 -30.5302 -29.3001 -27.9542 -26.2878 -24.4934 -22.8519 -21.4019 -20.4436 -19.7594 -19.3034 -19.0939 -18.938 -18.7738 -18.8819 -18.9879 -19.2587 -19.5213 -19.7573 -20.1834 -20.1185 -20.4335 -20.5368 -20.6022 -20.5611 -20.6169 -20.4308 -20.1922 -19.9149 -20.1319 -19.8569 -19.9749 -20.2294 -20.0526 -20.423 -20.3786 -20.3134 -20.1671 -19.739 -19.5549 -19.4676 -18.9712 -18.8401 -18.7543 -18.9433 -19.0184 -19.1183 -19.2945 -19.5342 -19.9374 -20.1396 -20.2848 -20.3401 -20.5968 -20.8792 -21.2342 -21.6241 -22.1423 -22.345 -22.1033 -22.2955 -21.9878 -21.6699 -20.8734 -20.4402 -20.1877 -19.8651 -19.7458 -19.7406 -20.2015 -20.0182 -20.1577 -20.2081 -20.1459 -20.2853 -20.5319 -21.0024 -21.9926 -23.623 -24.9757 -27.1228 -29.0385 -30.1753 -30.5057 -29.6046 -28.328 -26.5998 -24.6543 -23.2316 -22.016 -21.3918 -21.2395 -21.5268 -21.4401 -21.5268 -21.0719 -21.1475 -21.1679 -21.9554 -22.8021 -24.2453 -26.2008 -27.2189 -28.0709 -27.4961 -26.0461 -24.1005 -22.1132 -20.7587 -20.0646 -19.7965 -20.1028 -20.163 -20.4728 -20.8364 -21.0494 -21.4711 -22.0094 -23.2092 -24.787 -26.9186 -29.1657 -30.7974 -31.8354 -31.4278 -30.0655 -27.7791 -25.3368 -23.1097 -21.2636 -20.2866 -19.8009 -19.7893 -19.7824 -20.1815 -20.6725 -21.1437 -21.7135 -22.5416 -23.5896 -25.0045 -26.5534 -28.5863 -30.2087 -30.6927 -30.1403 -28.7632 -26.7739 -24.6297 -22.7059 -21.5178 -20.8818 -20.7331 -20.7284 -21.1205 -21.252 -21.4365 -21.4008 -21.8892 -22.3622 -23.3654 -24.6753 -26.7521 -28.7892 -30.8283 -31.2266 -31.086 -29.4701 -26.9978 -24.0499 -21.1459 -19.727 -18.7278 -18.0071 -17.8732 -17.7965 -18.1042 -18.2646 -18.2301 -18.4663 -18.5152 -18.5596 -18.9725 -19.3237 -19.3771 -19.6207 -19.3294 -19.264 -19.0223 -18.7991 -18.7158 -18.541 -18.3026 -18.123 -17.9525 -18.145 -18.267 -18.6745 -18.8539 -18.8401 -19.0178 -19.0236 -18.6969 -18.7609 -18.3893 -18.2403 -18.1248 -17.8228 -17.8442 -17.7489 -17.6914 -17.9836 -18.2662 -18.5957 -18.7987 -18.7733 -18.7318 -18.8442 -18.7606 -18.995 -18.866 -18.963 -19.1814 -19.375 -19.8594 -20.1396 -20.2512 -20.341 -20.2519 -19.8568 -19.9385 -19.3342 -19.3368 -19.0798 -19.0325 -19.3507 -19.5528 -19.4938 -20.0004 -20.4335 -20.8338 -22.0129 -23.6032 -25.7291 -28.4131 -30.9551 -33.0056 -33.1897 -32.5711 -30.4179 -27.5544 -24.3929 -21.529 -20.114 -19.4111 -19.2432 -19.3546 -19.9287 -20.0562 -20.5982 -21.0257 -21.2603 -21.8398 -22.784 -24.2148 -25.5413 -26.2476 -26.2436 -25.679 -24.544 -22.9335 -21.1549 -20.2421 -19.6868 -19.4435 -19.6343 -19.9117 -19.7934 -19.9895 -20.0026 -20.2648 -20.657 -21.4283 -22.6587 -24.6719 -27.5394 -30.1426 -32.2909 -33.0221 -32.4347 -30.4886 -27.8947 -25.4346 -23.0012 -21.8424 -21.2221 -21.1674 -21.1797 -21.1715 -21.2073 -21.3865 -21.475 -21.6623 -22.3763 -23.4766 -25.318 -27.2706 -28.9599 -29.78 -29.6323 -28.3288 -26.6175 -24.1402 -21.977 -20.432 -19.5468 -19.1139 -18.8277 -18.8178 -18.8064 -18.8578 -19.2328 -19.864 -21.2043 -23.1017 -25.6567 -28.5966 -31.6428 -33.2751 -34.129 -33.0122 -30.8244 -27.8653 -24.8278 -22.1633 -21.0172 -20.1793 -20.027 -19.7568 -19.8479 -20.0403 -19.9841 -20.3949 -20.7575 -21.295 -21.6231 -22.1512 -22.5514 -22.9114 -23.314 -23.5279 -23.6669 -23.5983 -23.4654 -23.3044 -23.0266 -22.5748 -22.47 -22.1736 -22.0939 -21.9601 -21.7101 -21.4169 -21.3704 -21.7984 -22.5428 -23.8954 -25.9639 -28.1126 -29.9841 -30.9355 -30.8256 -29.0541 -26.6696 -24.0698 -21.4999 -19.4876 -18.1406 -18.3696 -18.3888 -18.5397 -18.8303 -19.1209 -19.5162 -19.6631 -19.7568 -19.8765 -19.8412 -19.7759 -19.8017 -19.7265 -19.8715 -20.0446 -19.7861 -19.8982 -19.7283 -19.7611 -19.5291 -19.646 -19.6044 -19.6551 -19.5176 -19.5947 -19.5647 -19.5238 -19.6443 -19.5168 -19.6617 -19.5813 -19.7009 -19.4882 -19.5455 -19.6949 -19.4459 -18.9335 -18.7793 -18.6301 -18.1615 -18.0562 -18.0801 -18.271 -18.2259 -18.2959 -18.0164 -18.174 -18.081 -17.7226 -17.798 -17.6436 -17.4798 -17.639 -17.4745 -17.4514 -17.6052 -17.5233 -17.5753 -18.015 -18.0078 -18.1721 -18.5203 -18.5829 -18.8907 -19.5477 -20.6744 -22.2323 -24.0897 -26.1601 -28.007 -29.655 -31.2506 -32.529 -32.8382 -32.4505 -31.2271 -29.3537 -26.9014 -24.4528 -21.9953 -20.2665 -19.4596 -19.1493 -18.808 -18.6827 -18.6986 -18.9753 -18.8717 -19.1776 -19.181 -19.107 -19.1296 -19.1292 -18.8607 -19.015 -18.8612 -18.8373 -18.9558 -18.9701 -19.1191 -19.0252 -19.121 -19.3225 -19.453 -19.8711 -20.522 -21.2344 -22.6483 -24.5987 -26.6496 -29.027 -31.2827 -33.2551 -34.5163 -34.4578 -33.1515 -30.8937 -27.94 -25.1917 -22.5379 -20.3414 -19.1436 -18.4872 -18.1497 -18.3306 -18.5961 -18.923 -19.1974 -19.7578 -19.8137 -19.7934 -20.1655 -20.1691 -20.2504 -20.0084 -19.9459 -20.108 -19.9685 -20.1484 -20.4749 -20.9557 -21.2268 -21.6064 -22.0213 -22.6616 -23.2627 -23.6913 -24.0596 -24.2181 -24.4873 -24.8564 -24.9882 -25.6778 -26.816 -28.5732 -31.3123 -34.4573 -37.9553 -41.2636 -43.8539 -45.6585 -46.5806 -46.6924 -46.5206 -46.2146 -46.4855 -46.8556 -47.5329 -48.4054 -49.2594 -50.2476 -51.0499 -51.5411 -51.1204 -48.5606 -44.8966 -39.9928 -34.6016 -29.4946 -25.266 -22.083 -21.1612 -21.2032 -21.6846 -22.493 -22.9477 -23.3928 -23.5403 -23.3975 -23.1014 -22.5853 -22.2802 -21.7332 -21.5007 -21.2363 -21.3543 -21.3843 -21.4484 -21.3648 -20.9769 -20.3804 -19.6962 -19.2946 -18.795 -18.7469 -18.8575 -19.0224 -18.9051 -19.2221 -19.5192 -19.8656 -20.5955 -21.6266 -22.8898 -24.5628 -25.9 -26.9404 -27.3382 -27.1838 -26.2555 -24.8281 -23.2128 -22.0845 -21.7329 -21.3224 -21.2081 -21.4209 -21.4255 -21.6385 -21.7678 -22.437 -23.3177 -24.7494 -26.8611 -29.1486 -31.5806 -33.3756 -34.0789 -33.6684 -32.1632 -29.8661 -27.5817 -25.3129 -23.3074 -22.3505 -21.6977 -21.4293 -21.3814 -21.2662 -21.326 -21.4997 -21.5874 -21.6653 -21.6741 -21.7117 -21.946 -21.7758 -21.7644 -21.917 -21.9627 -22.0378 -22.3632 -22.5648 -22.6166 -22.7663 -22.7115 -22.8907 -22.9711 -22.9478 -23.147 -23.264 -22.9508 -22.9261 -23.0484 -22.7185 -22.7441 -22.677 -22.7846 -22.8129 -22.6182 -22.4433 -21.9957 -21.1341 -20.1509 -19.2625 -18.8425 -18.2414 -18.3116 -18.3749 -18.673 -19.1745 -19.6585 -20.2176 -20.3107 -20.5881 -20.5907 -20.6656 -20.5882 -20.6377 -20.3593 -20.4176 -20.1571 -20.1216 -20.0975 -20.4175 -20.731 -20.9937 -21.8629 -22.4703 -22.8876 -23.4494 -23.7252 -23.7737 -23.724 -23.5752 -23.9974 -24.1826 -24.8022 -25.671 -27.1638 -29.3996 -32.2777 -35.6094 -39.1789 -42.2325 -44.603 -46.2874 -47.1766 -47.5582 -47.7753 -48.1783 -48.7794 -50.1195 -50.9688 -52.5893 -53.4512 -53.5687 -52.4898 -49.9085 -46.2038 -41.0088 -36.1339 -31.3201 -27.9681 -25.2707 -23.61 -22.8497 -22.9148 -22.9255 -23.186 -23.5846 -23.6884 -23.7245 -23.8645 -23.5322 -23.232 -23.4661 -23.6692 -24.6407 -25.9765 -27.2265 -28.007 -28.202 -27.7759 -26.5076 -24.823 -23.3286 -21.9879 -21.6843 -21.1443 -21.403 -21.4387 -21.5666 -21.8548 -22.6144 -23.2502 -25.2239 -27.3737 -29.9882 -32.8728 -34.1237 -34.8278 -34.0747 -32.1574 -29.7338 -27.0442 -24.901 -23.4937 -22.9435 -22.7624 -23.1098 -23.2626 -23.6728 -23.8419 -23.9442 -24.0473 -24.3975 -25.2328 -26.6681 -28.3491 -30.322 -31.6002 -31.806 -31.0285 -29.0799 -26.5549 -24.1037 -21.9726 -20.5892 -19.9266 -19.6503 -19.6186 -19.6761 -19.7662 -19.8395 -20.0156 -20.2702 -20.4139 -21.0403 -21.3951 -21.7757 -22.0709 -22.0593 -21.9532 -21.6363 -21.2788 -20.877 -20.3881 -20.1193 -20.06 -19.9783 -20.038 -20.3624 -20.6547 -20.9841 -21.3429 -22.2443 -23.3217 -25.2809 -27.6483 -30.721 -33.5415 -34.9831 -35.7515 -34.4817 -32.2794 -29.2419 -26.564 -24.0613 -22.8284 -22.2535 -21.8454 -21.9027 -22.2005 -22.7576 -23.3049 -23.886 -24.4872 -25.1099 -25.5407 -25.9627 -26.3374 -26.4729 -26.3338 -26.0179 -26.0788 -25.7341 -25.8124 -25.9986 -26.3723 -27.0153 -27.7267 -28.8621 -29.4399 -30.3185 -31.1558 -31.6322 -32.0178 -32.5494 -32.9467 -33.7702 -34.6464 -36.1173 -38.0773 -40.6054 -43.3228 -45.6051 -47.3188 -48.1112 -48.4934 -48.0003 -47.4845 -46.8468 -46.6664 -46.8632 -47.0948 -47.6046 -48.272 -49.2464 -50.4295 -51.4459 -50.8883 -49.7751 -46.7052 -42.8317 -38.07 -33.4125 -29.7214 -27.1165 -25.6063 -24.8163 -24.8433 -24.9918 -25.0123 -25.2707 -25.2494 -24.9958 -24.7526 -24.258 -24.0228 -23.6568 -23.4167 -23.3495 -23.6213 -23.3261 -23.2836 -23.022 -22.6739 -22.315 -22.0708 -21.9035 -21.8722 -21.9385 -21.8899 -21.9362 -21.8734 -22.0286 -22.1374 -22.3349 -23.1934 -24.5257 -26.556 -28.8076 -31.1115 -32.5599 -33.3987 -32.7133 -31.4803 -29.32 -27.1868 -25.2327 -24.2718 -23.6321 -23.7118 -23.8263 -23.9802 -24.3549 -24.3917 -24.6069 -24.4762 -24.6782 -24.6337 -24.8144 -24.8071 -25.0671 -25.0711 -25.3226 -25.3209 -25.2968 -25.3318 -25.0553 -25.0176 -24.765 -24.5209 -24.2666 -24.1829 -23.8299 -23.943 -24.3111 -25.0477 -26.1032 -28.1467 -30.3414 -32.9762 -34.9841 -36.7095 -37.1322 -36.0177 -34.0158 -31.3713 -28.4891 -26.066 -24.0071 -22.9151 -22.5423 -22.0653 -21.9726 -21.9717 -22.4839 -22.6457 -23.5557 -24.3446 -26.3597 -29.1679 -32.0659 -34.0894 -34.6645 -34.0735 -31.7971 -28.759 -25.2579 -22.402 -20.6472 -19.9266 -19.6903 -19.8228 -19.7634 -20.1472 -20.4997 -20.826 -21.7994 -22.2799 -22.8286 -23.1154 -22.928 -22.8692 -22.7474 -22.369 -22.0495 -21.6607 -21.2601 -21.4038 -21.016 -20.8896 -20.6437 -20.6457 -20.4368 -20.2575 -20.1086 -19.8336 -20.0455 -20.5255 -21.4515 -23.3361 -25.91 -28.5078 -30.8336 -32.2478 -32.3071 -31.191 -29.0759 -26.4112 -24.0622 -22.2593 -21.5708 -21.3821 -21.6277 -21.8736 -22.2721 -22.5571 -22.6794 -22.8034 -22.8948 -22.7387 -23.3037 -23.7948 -24.7054 -25.7965 -26.4259 -26.8205 -26.3025 -25.3487 -24.1823 -23.0226 -22.0361 -21.5974 -21.4643 -21.4333 -21.4127 -21.3621 -21.1911 -21.2476 -21.7225 -22.6963 -24.7607 -27.9539 -31.3147 -33.8342 -34.5395 -33.7705 -31.583 -28.1341 -24.5294 -21.7448 -20.1189 -19.5279 -19.3349 -19.3001 -19.5453 -20.1651 -19.9807 -20.3165 -20.7037 -21.3852 -22.5638 -24.6184 -27.6119 -30.3603 -32.7188 -33.7567 -33.3937 -31.5838 -28.7473 -25.5022 -22.7798 -20.9737 -19.8991 -19.6236 -19.7401 -19.9811 -20.1457 -20.5435 -20.6209 -20.8576 -20.5531 -20.7363 -21.017 -20.8289 -21.275 -21.2096 -21.184 -21.2235 -20.9843 -20.7252 -20.6697 -20.5617 -20.4801 -20.5949 -20.6785 -20.6651 -20.6451 -20.8901 -21.1082 -21.3335 -22.0641 -23.1609 -24.7033 -26.3743 -28.3393 -29.9209 -30.6291 -30.4025 -29.2897 -27.3763 -25.3298 -23.4606 -21.8676 -21.1282 -20.8004 -20.8259 -21.1962 -21.2485 -21.4524 -21.5699 -21.6142 -21.5135 -21.6507 -21.5861 -21.5632 -21.7054 -21.7115 -22.153 -22.0669 -21.7956 -21.2647 -20.7264 -20.3444 -19.7896 -19.5179 -19.6148 -19.3964 -19.675 -20.0045 -20.1912 -20.3226 -20.2072 -20.4537 -20.6723 -20.8731 -21.3117 -21.4837 -21.6823 -21.7814 -21.8545 -22.1655 -22.3477 -22.6454 -22.9787 -23.6932 -24.5033 -25.2039 -25.9952 -27.0418 -28.0481 -28.7184 -29.1927 -29.3678 -29.5527 -29.4315 -29.1374 -28.9997 -29.1533 -29.5467 -30.4637 -31.8287 -34.3426 -38.1285 -42.9047 -48.3857 -53.9433 -59.0457 -62.8988 -65.3144 -66.2831 -66.344 -66.4382 -67.0987 -68.6142 -70.4137 -72.4929 -74.4599 -75.8168 -76.6435 -76.0909 -74.4846 -71.1057 -70.2525", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0001 0.0002 0.0002 0.0002 0.0002 0.0004 0.0005 0.0004 0.0008 0.0009 0.0014 0.0014 0.0012 0.0014 0.0016 0.0018 0.0018 0.0021 0.0024 0.0024 0.0028 0.0028 0.0029 0.0031 0.0033 0.0035 0.0035 0.0031 0.0029 0.0024 0.0021 0.002 0.002 0.0021 0.0016 0.0013 0.001 0.0009 0.0008 0.0013 0.002 0.003 0.0047 0.0065 0.0092 0.0116 0.0125 0.014 0.0149 0.0143 0.0144 0.0147 0.0144 0.0208 0.0443 0.0712 0.087 0.0895 0.0797 0.0519 0.0209 0.0022 0.0007 0.0008 0.001 0.0012 0.0009 0.0044 0.011 0.0155 0.0188 0.0222 0.0206 0.0175 0.0136 0.0055 0.002 0.0019 0.0022 0.0019 0.0019 0.0019 0.0018 0.0013 0.0013 0.0009 0.0005 0.0007 0.0006 0.0009 0.0007 0.0009 0.0012 0.0017 0.0019 0.002 0.0021 0.002 0.0019 0.0018 0.0016 0.0014 0.0015 0.0015 0.0012 0.0011 0.0013 0.0022 0.0049 0.0078 0.0127 0.0165 0.0177 0.0171 0.0132 0.008 0.0029 0.0019 0.0021 0.0022 0.0024 0.0026 0.0022 0.002 0.0016 0.0016 0.0013 0.0013 0.0018 0.0023 0.0027 0.003 0.0029 0.0025 0.0021 0.0011 0.0011 0.0012 0.0015 0.0013 0.0014 0.0014 0.0014 0.0014 0.0016 0.0015 0.0021 0.0036 0.0057 0.0083 0.0108 0.0126 0.0124 0.0112 0.009 0.0052 0.0023 0.0019 0.002 0.0022 0.0024 0.0027 0.0025 0.0026 0.0026 0.0026 0.0021 0.002 0.0022 0.0022 0.0021 0.0022 0.0021 0.002 0.002 0.0021 0.0022 0.0022 0.0022 0.0021 0.0021 0.0021 0.0021 0.002 0.002 0.0018 0.0017 0.0016 0.0018 0.0018 0.0016 0.0015 0.0014 0.0013 0.0019 0.0023 0.0031 0.004 0.0047 0.0055 0.0059 0.0061 0.0059 0.0053 0.0045 0.0033 0.003 0.0022 0.0013 0.0009 0.0005 0.0007 0.0022 0.0083 0.0175 0.0186 0.0193 0.0168 0.0069 0.0038 0.004 0.0037 0.0031 0.0022 0.0016 0.0014 0.0012 0.0013 0.0044 0.0084 0.0165 0.0233 0.0302 0.0347 0.0331 0.0294 0.0221 0.0107 0.0035 0.0019 0.0016 0.0011 0.0009 0.0008 0.0007 0.0008 0.0006 0.0005 0.0006 0.0005 0.0012 0.0018 0.0038 0.005 0.0052 0.0049 0.0037 0.0018 0.0011 0.0011 0.0012 0.0012 0.001 0.0009 0.001 0.0011 0.001 0.0013 0.0011 0.001 0.0009 0.0008 0.0008 0.0008 0.0009 0.001 0.0014 0.0017 0.0018 0.0019 0.0016 0.0015 0.0015 0.0015 0.0017 0.0018 0.002 0.002 0.002 0.002 0.0022 0.0036 0.0085 0.0126 0.0132 0.0134 0.0116 0.0074 0.0059 0.0041 0.0036 0.0029 0.0025 0.0019 0.0011 0.0014 0.0012 0.0016 0.0018 0.0019 0.0019 0.0028 0.0034 0.0759 0.0025 0.0033 0.0029 0.0022 0.0022 0.0025 0.0029 0.0029 0.0024 0.0021 0.002 0.0019 0.0021 0.0024 0.0024 0.0025 0.0022 0.002 0.0017 0.0015 0.0016 0.0015 0.0016 0.0014 0.0014 0.0015 0.0016 0.0015 0.0016 0.0016 0.0015 0.0015 0.0014 0.0016 0.0016 0.0014 0.0015 0.0012 0.0012 0.0013 0.0011 0.0012 0.0015 0.002 0.0027 0.003 0.0033 0.0038 0.0045 0.0053 0.0054 0.0056 0.0051 0.0042 0.0033 0.0027 0.0015 0.0012 0.001 0.001 0.001 0.0032 0.0065 0.0121 0.0166 0.0171 0.0161 0.0122 0.0069 0.0039 0.0033 0.0028 0.0023 0.0017 0.0017 0.002 0.0025 0.0031 0.0039 0.0059 0.0087 0.0109 0.0147 0.0192 0.0202 0.0191 0.0159 0.0107 0.0037 0.0025 0.0021 0.0013 0.0013 0.0014 0.0013 0.0012 0.001 0.0009 0.0007 0.0007 0.0018 0.0037 0.0068 0.0109 0.0141 0.0141 0.0128 0.0088 0.0032 0.0021 0.0018 0.0014 0.0015 0.0014 0.0015 0.0016 0.002 0.0034 0.0107 0.0215 0.0297 0.037 0.0433 0.0476 0.0489 0.0475 0.0422 0.0334 0.023 0.0108 0.0036 0.0025 0.0021 0.0019 0.0017 0.0018 0.0018 0.0019 0.0018 0.0019 0.0021 0.0021 0.0016 0.0015 0.0017 0.0018 0.0022 0.0025 0.0028 0.0159 0.0023 0.0024 0.0022 0.0018 0.0018 0.0018 0.0018 0.0022 0.002 0.002 0.002 0.0032 0.0053 0.0056 0.0061 0.0061 0.005 0.0041 0.0029 0.0022 0.0017 0.002 0.0022 0.0023 0.0019 0.0019 0.0016 0.0014 0.0013 0.0014 0.0013 0.0011 0.0012 0.0011 0.0011 0.001 0.0011 0.0012 0.0013 0.0014 0.0016 0.0016 0.0017 0.0014 0.0014 0.0011 0.0013 0.0014 0.0017 0.0018 0.0017 0.0017 0.0016 0.0017 0.0017 0.0018 0.002 0.0022 0.0024 0.0025 0.0026 0.0028 0.0026 0.0025 0.0025 0.0022 0.0021 0.0021 0.002 0.0019 0.0019 0.0019 0.0017 0.002 0.0015 0.0015 0.0014 0.0014 0.0014 0.0014 0.0013 0.0015 0.0013 0.0011 0.001 0.0009 0.0009 0.001 0.0009 0.0007 0.0009 0.001 0.0012 0.0012 0.0016 0.0018 0.0024 0.0028 0.0028 0.0028 0.0027 0.0025 0.0023 0.0025 0.0024 0.0021 0.0022 0.0021 0.0023 0.0025 0.0027 0.0025 0.0025 0.0024 0.0025 0.0024 0.0023 0.0024 0.0023 0.0024 0.0028 0.0028 0.0028 0.0026 0.0022 0.002 0.0021 0.0024 0.0025 0.0024 0.0022 0.0022 0.0025 0.0039 0.0056 0.0071 0.0093 0.0113 0.012 0.0113 0.0095 0.0071 0.0042 0.0032 0.0026 0.0024 0.0022 0.0022 0.0022 0.0023 0.0023 0.0023 0.0022 0.0023 0.0023 0.0023 0.0023 0.0019 0.0021 0.002 0.0018 0.0019 0.002 0.0019 0.0017 0.0016 0.0016 0.0015 0.0015 0.0016 0.0019 0.0025 0.0079 0.0139 0.0178 0.0246 0.0307 0.0351 0.0411 0.0454 0.0456 0.0426 0.035 0.0229 0.0104 0.0043 0.0037 0.0035 0.0038 0.0034 0.0034 0.0033 0.0027 0.0024 0.0023 0.0024 0.0028 0.0025 0.0024 0.0022 0.0018 0.0018 0.0023 0.0028 0.0028 0.0031 0.003 0.0031 0.0028 0.0026 0.0028 0.0027 0.0028 0.0025 0.0019 0.0018 0.0015 0.0018 0.0019 0.0018 0.0017 0.0021 0.002 0.0026 0.0041 0.0051 0.0061 0.0067 0.007 0.0084 0.0094 0.0097 0.0097 0.009 0.0077 0.0064 0.0052 0.0035 0.0019 0.0017 0.0018 0.002 0.0026 0.0027 0.0037 0.0054 0.0062 0.0064 0.0059 0.0044 0.002 0.0008 0.001 0.001 0.0038 0.0163 0.029 0.0423 0.0572 0.0687 0.0831 0.0894 0.0852 0.0744 0.0531 0.0319 0.0188 0.0119 0.0046 0.0028 0.0027 0.0026 0.0025 0.0027 0.0027 0.0033 0.0037 0.0038 0.0042 0.0056 0.0079 0.0119 0.0146 0.015 0.0142 0.0115 0.0073 0.0036 0.003 0.0024 0.002 0.0018 0.0017 0.0016 0.0013 0.0017 0.0017 0.0022 0.0056 0.0121 0.0192 0.0241 0.0277 0.029 0.0264 0.0221 0.0165 0.0081 0.0045 0.0036 0.0033 0.0028 0.0024 0.0024 0.0022 0.0025 0.0023 0.0024 0.0023 0.0021 0.002 0.002 0.002 0.0018 0.0019 0.0016 0.0014 0.0014 0.0014 0.0019 0.0022 0.0023 0.0019 0.001 0.0011 0.0012 0.0014 0.0014 0.0013 0.0013 0.003 0.0038 0.0047 0.005 0.0046 0.0037 0.003 0.002 0.0015 0.0015 0.0018 0.0019 0.0019 0.0018 0.0016 0.0012 0.0012 0.0012 0.0009 0.0013 0.0012 0.0009 0.0008 0.0007 0.0009 0.0008 0.0008 0.0008 0.0006 0.0008 0.0007 0.0007 0.0009 0.0009 0.0009 0.0008 0.0008 0.0006 0.0006 0.0006 0.0005 0.0008 0.0008 0.0011 0.0019 0.0029 0.0031 0.0032 0.003 0.0028 0.0031 0.0041 0.0043 0.0045 0.0044 0.0037 0.0028 0.0024 0.0019 0.0011 0.0007 0.0008 0.0014 0.0028 0.0066 0.0106 0.0129 0.0147 0.0137 0.0111 0.008 0.0044 0.0037 0.0037 0.0041 0.0039 0.0032 0.0026 0.002 0.0018 0.0023 0.0021 0.0031 0.0071 0.0113 0.0187 0.0247 0.0268 0.0262 0.0213 0.0137 0.0071 0.0023 0.0021 0.0017 0.0016 0.0017 0.002 0.0019 0.0023 0.005 0.0109 0.0168 0.0196 0.0215 0.0215 0.0201 0.0197 0.0184 0.0152 0.012 0.0067 0.003 0.0024 0.0021 0.0019 0.0015 0.0012 0.0008 0.0007 0.0007 0.0005 0.0005 0.0005 0.0007 0.0009 0.0012 0.0017 0.0019 0.0023 0.0021 0.002 0.0019 0.0018 0.0021 0.0021 0.0019 0.0017 0.0017 0.0017 0.002 0.0037 0.008 0.0123 0.0171 0.0206 0.0242 0.0271 0.0275 0.0256 0.0217 0.0147 0.0078 0.0025 0.0018 0.0015 0.0015 0.0014 0.0013 0.0013 0.0013 0.0012 0.0012 0.0013 0.0016 0.0017 0.0015 0.0015 0.0026 0.0027 0.0033 0.0041 0.0036 0.0036 0.004 0.0038 0.0039 0.0039 0.0036 0.0035 0.0032 0.0028 0.0026 0.0024 0.0023 0.0023 0.0023 0.0024 0.0021 0.0018 0.0017 0.0014 0.0016 0.0015 0.0014 0.0011 0.0011 0.0009 0.0009 0.0008 0.0007 0.0007 0.001 0.0006 0.0008 0.0008 0.0011 0.0011 0.001 0.0012 0.0014 0.0015 0.0016 0.0015 0.0021 0.0028 0.0036 0.0044 0.0043 0.0042 0.0037 0.0029 0.0023 0.0018 0.0012 0.0009 0.0001 0.0002 0.0009 0.0053 0.0116 0.0138 0.0144 0.014 0.0084 0.0044 0.0031 0.0023 0.0018 0.0014 0.0011 0.001 0.001 0.0008 0.001 0.0009 0.0008 0.0007 0.0005 0.0005 0.0005 0.0005 0.0004 0.0007 0.0007 0.001 0.001 0.001 0.001 0.0011 0.0012 0.0011 0.0013 0.0018 0.0026 0.0022 0.0018 0.0018 0.0018 0.0039 0.0051 0.0051 0.0049 0.0038 0.0032 0.0025 0.0024 0.0021 0.0021 0.0017 0.0017 0.0017 0.0016 0.0016 0.0014 0.0015 0.0015 0.0019 0.003 0.0031 0.0037 0.0036 0.0033 0.003 0.0023 0.002 0.0021 0.0019 0.0018 0.0014 0.0011 0.0011 0.0011 0.001 0.0007 0.0005 0.0004 0.0019 0.0052 0.0084 0.0107 0.012 0.0111 0.0097 0.0076 0.0057 0.0037 0.002 0.0014 0.0014 0.0014 0.0014 0.0015 0.0014 0.0011 0.0012 0.0012 0.0015 0.0027 0.0051 0.0084 0.0125 0.0151 0.0148 0.0134 0.0097 0.0042 0.002 0.0016 0.0018 0.0019 0.002 0.002 0.0018 0.0019 0.0018 0.0019 0.002 0.0018 0.0019 0.0019 0.0018 0.0016 0.0017 0.0018 0.0019 0.002 0.0019 0.002 0.0016 0.0016 0.0018 0.0016 0.0016 0.0017 0.0015 0.0015 0.0014 0.0013 0.0014 0.0014 0.0014 0.0016 0.0014 0.0013 0.0016 0.0014 0.0014 0.0014 0.0013 0.0013 0.0014 0.0015 0.0012 0.0011 0.0012 0.0011 0.0011 0.0008 0.0008 0.0006 0.0006 0.0006 0.0007 0.0008 0.001 0.0012 0.0016 0.0018 0.002 0.0019 0.0019 0.0017 0.0017 0.0016 0.0013 0.0011 0.0008 0.0006 0.0006 0.0007 0.0005 0.0005 0.0011 0.0021 0.0025 0.0028 0.0026 0.0023 0.0021 0.0019 0.0016 0.0015 0.0015 0.0015 0.0013 0.0015 0.0011 0.0011 0.0011 0.0012 0.0012 0.0011 0.0009 0.001 0.0011 0.0011 0.0011 0.0011 0.001 0.0012 0.0012 0.001 0.0009 0.0011 0.0015 0.0012 0.0013 0.0013 0.0015 0.0015 0.0015 0.0015 0.0016 0.0019 0.0025 0.0027 0.0031 0.0027 0.0026 0.0027 0.0028 0.0031 0.0032 0.0032 0.0032 0.0031 0.004 0.0025 0.0023 0.0018 0.0016 0.0018 0.0018 0.0018 0.0017 0.0014 0.0014 0.0009 0.0009 0.0007 0.0007 0.0007 0.0006 0.0004 0.0007 0.0007 0.0023 0.0084 0.013 0.016 0.0193 0.0224 0.0228 0.0236 0.0231 0.0206 0.0179 0.0134 0.0097 0.0045 0.0036 0.0035 0.0032 0.0032 0.0035 0.0036 0.0035 0.0034 0.0028 0.0023 0.0024 0.0024 0.0024 0.0021 0.0021 0.002 0.0018 0.002 0.0021 0.0024 0.0027 0.0026 0.0026 0.0024 0.002 0.0023 0.0018 0.0019 0.0017 0.0014 0.0014 0.0013 0.0015 0.0015 0.0017 0.0016 0.0018 0.0018 0.0019 0.0024 0.0026 0.0032 0.0042 0.0045 0.0044 0.0043 0.0031 0.0022 0.0021 0.0021 0.0025 0.0038 0.0059 0.0087 0.0114 0.0133 0.0148 0.0148 0.0131 0.0109 0.0081 0.0057 0.0043 0.0041 0.0043 0.004 0.0036 0.0028 0.0015 0.0012 0.0012 0.0011 0.001 0.001 0.0025 0.0063 0.0122 0.0156 0.0175 0.0168 0.0131 0.0088 0.0029 0.0025 0.0022 0.0022 0.0018 0.0017 0.0016 0.0017 0.0014 0.0012 0.0012 0.0006 0.0004 0.0003 0.0008 0.0022 0.004 0.0045 0.0045 0.0043 0.0026 0.0022 0.0021 0.0021 0.0021 0.0016 0.0015 0.0015 0.0015 0.0017 0.0017 0.0018 0.0018 0.0019 0.0016 0.0017 0.0021 0.0023 0.0024 0.0025 0.0021 0.002 0.0019 0.0015 0.001 0.0009 0.0008 0.0006 0.0007 0.0005 0.0003 0.0003 0.0005 0.001 0.0022 0.0053 0.0072 0.0087 0.0087 0.0072 0.0055 0.002 0.0012 0.0009 0.0009 0.0012 0.0011 0.0009 0.0007 0.0008 0.001 0.0013 0.0011 0.001 0.0009 0.0009 0.0009 0.0007 0.001 0.0016 0.002 0.0023 0.0026 0.0027 0.0028 0.0028 0.0026 0.0023 0.0023 0.0022 0.0023 0.0023 0.0024 0.0024 0.0022 0.0024 0.0019 0.002 0.0019 0.0018 0.002 0.0022 0.002 0.0022 0.0021 0.0019 0.0018 0.0017 0.0017 0.0014 0.0014 0.0013 0.0012 0.0014 0.0016 0.0012 0.0015 0.0015 0.0017 0.0017 0.0018 0.0024 0.003 0.0039 0.0044 0.0046 0.0055 0.0052 0.0052 0.0049 0.0038 0.0033 0.0025 0.002 0.0015 0.0009 0.0006 0.0011 0.0019 0.005 0.007 0.0073 0.0071 0.0049 0.0037 0.0035 0.0041 0.004 0.0036 0.0024 0.0018 0.001 0.0019 0.0029 0.0067 0.0115 0.0162 0.0199 0.0232 0.0257 0.0247 0.0218 0.0171 0.0091 0.0044 0.0029 0.002 0.0016 0.0016 0.0016 0.0014 0.0014 0.0015 0.0017 0.0015 0.0013 0.0022 0.0033 0.0045 0.0054 0.0052 0.0048 0.0038 0.0023 0.0015 0.0013 0.0013 0.0012 0.0012 0.0012 0.0009 0.0009 0.0007 0.0008 0.0008 0.0005 0.0004 0.0006 0.0008 0.0007 0.001 0.0013 0.0014 0.0015 0.0017 0.0013 0.0013 0.0013 0.0015 0.0018 0.0018 0.0018 0.0017 0.0015 0.0018 0.0022 0.0046 0.008 0.0097 0.0108 0.0112 0.01 0.0083 0.0066 0.0042 0.0027 0.0023 0.0022 0.0021 0.0021 0.0016 0.0017 0.0013 0.0013 0.0011 0.0016 0.0023 0.0055 0.0095 0.0141 0.0182 0.0198 0.0187 0.0152 0.0109 0.0057 0.003 0.003 0.0025 0.0026 0.0026 0.0025 0.0025 0.0025 0.0025 0.0023 0.0022 0.0022 0.0025 0.0024 0.0024 0.002 0.0018 0.0016 0.0014 0.0015 0.0016 0.0016 0.0019 0.0019 0.0018 0.0016 0.0017 0.0016 0.0013 0.0014 0.0013 0.0013 0.0013 0.0013 0.0012 0.0011 0.0012 0.001 0.0014 0.0021 0.0035 0.0043 0.0047 0.0049 0.0045 0.0044 0.0041 0.0034 0.0031 0.0025 0.0022 0.0022 0.0015 0.0006 0.0008 0.0063 0.014 0.0158 0.0165 0.0149 0.0075 0.0041 0.0035 0.0027 0.0024 0.0019 0.0013 0.0013 0.0014 0.0012 0.0016 0.0025 0.0048 0.0074 0.0109 0.0148 0.0187 0.0205 0.0195 0.0169 0.0112 0.0052 0.0024 0.0021 0.0019 0.0016 0.0017 0.0015 0.0011 0.0009 0.0009 0.0009 0.001 0.001 0.0011 0.0013 0.0015 0.0017 0.002 0.0018 0.0021 0.002 0.0022 0.0021 0.0024 0.0027 0.0028 0.0027 0.0028 0.0026 0.0027 0.0029 0.0092 0.0163 0.0222 0.0282 0.0306 0.0307 0.03 0.0261 0.0219 0.017 0.0099 0.0036 0.0021 0.0016 0.0016 0.0019 0.0018 0.0016 0.0017 0.0015 0.0014 0.0013 0.0013 0.0016 0.0015 0.0019 0.0019 0.0017 0.0016 0.0017 0.0018 0.002 0.0022 0.0019 0.0022 0.0023 0.0022 0.0023 0.0026 0.0028 0.003 0.0029 0.0029 0.0025 0.0025 0.0025 0.0024 0.0028 0.003 0.0033 0.0035 0.0035 0.0037 0.0037 0.0035 0.0036 0.0029 0.0029 0.0025 0.0025 0.0025 0.0025 0.0023 0.0025 0.0026 0.0023 0.0023 0.0023 0.0021 0.002 0.002 0.002 0.0018 0.002 0.0021 0.0022 0.0022 0.002 0.002 0.0018 0.0019 0.0019 0.0025 0.0027 0.0059 0.0092 0.0143 0.0182 0.0208 0.021 0.018 0.014 0.009 0.0042 0.003 0.0031 0.0026 0.0024 0.0025 0.0023 0.002 0.0015 0.0014 0.0014 0.0013 0.0009 0.0015 0.0033 0.0048 0.0055 0.005 0.0045 0.003 0.0021 0.0024 0.0021 0.0021 0.0021 0.0018 0.0015 0.0013 0.0009 0.0008 0.0008 0.0009 0.0013 0.0035 0.0064 0.0115 0.0147 0.0155 0.0147 0.0119 0.0064 0.0038 0.0032 0.0031 0.0031 0.0028 0.0027 0.0026 0.0025 0.0025 0.0025 0.0025 0.0023 0.0021 0.0016 0.0015 0.0013 0.0014 0.0014 0.0014 0.0015 0.0011 0.0011 0.001 0.0008 0.0005 0.0004 0.0007 0.001 0.0008 0.0009 0.0008 0.0014 0.0025 0.0044 0.0069 0.0081 0.0082 0.0074 0.0056 0.0037 0.0027 0.0024 0.0023 0.0021 0.0016 0.0015 0.0014 0.0014 0.0015 0.0014 0.0013 0.0013 0.0014 0.0016 0.0016 0.0015 0.0015 0.0013 0.0012 0.0007 0.001 0.001 0.0011 0.0011 0.0011 0.001 0.0011 0.0012 0.001 0.0011 0.0011 0.0012 0.0018 0.0042 0.0064 0.007 0.0083 0.0085 0.0071 0.0063 0.0047 0.0025 0.0024 0.0021 0.0022 0.0024 0.0025 0.0029 0.0027 0.0024 0.0023 0.002 0.0021 0.0019 0.0022 0.0023 0.0021 0.0019 0.002 0.002 0.0019 0.0018 0.0023 0.0023 0.0023 0.0022 0.002 0.0019 0.0021 0.002 0.0019 0.0017 0.0017 0.0016 0.0018 0.0019 0.0021 0.0019 0.0021 0.0021 0.0019 0.0033 0.0045 0.005 0.0056 0.0058 0.0056 0.0056 0.0054 0.0047 0.0043 0.0038 0.0032 0.0028 0.0021 0.0015 0.0013 0.0018 0.0019 0.0021 0.0022 0.0025 0.0037 0.0045 0.0059 0.0059 0.0059 0.0051 0.0037 0.0018 0.0013 0.001 0.0011 0.0025 0.0113 0.0206 0.0278 0.0334 0.0396 0.0412 0.0407 0.0374 0.0293 0.0205 0.0122 0.0068 0.0034 0.0025 0.0026 0.0027 0.0023 0.0021 0.0023 0.0025 0.0025 0.0025 0.0023 0.0019 0.0015 0.0012 0.001 0.0011 0.0015 0.0017 0.002 0.0022 0.0022 0.002 0.0019 0.0015 0.0015 0.0016 0.0018 0.0017 0.0019 0.0041 0.0095 0.0159 0.0197 0.0237 0.024 0.0225 0.0196 0.0148 0.0101 0.0058 0.0037 0.0029 0.0024 0.0024 0.0023 0.0019 0.0019 0.0021 0.0018 0.0015 0.0017 0.0014 0.0011 0.0014 0.0015 0.0012 0.0011 0.0011 0.0008 0.0008 0.0008 0.0009 0.0011 0.0011 0.0009 0.0007 0.0005 0.0005 0.0009 0.001 0.0011 0.0019 0.0035 0.0042 0.0049 0.0054 0.005 0.0044 0.0039 0.0028 0.0019 0.0021 0.0021 0.0023 0.0024 0.0022 0.0018 0.0015 0.0012 0.0012 0.0012 0.0013 0.0014 0.0012 0.0011 0.001 0.0012 0.0011 0.0012 0.001 0.0009 0.0007 0.0006 0.0006 0.0007 0.0008 0.0007 0.0008 0.0003 0.0004 0.0006 0.0006 0.0006 0.0005 0.0007 0.0006 0.0015 0.0023 0.0027 0.0025 0.0023 0.0026 0.0034 0.0042 0.0044 0.0044 0.0035 0.0026 0.0022 0.0016 0.0016 0.0031 0.0046 0.0071 0.0097 0.0125 0.0139 0.0143 0.0146 0.013 0.0105 0.008 0.0039 0.0028 0.0027 0.0027 0.0023 0.0021 0.0017 0.0015 0.0016 0.0014 0.0018 0.003 0.0064 0.0101 0.013 0.0157 0.0163 0.0146 0.0116 0.0067 0.0038 0.0023 0.0021 0.0016 0.0013 0.0014 0.0013 0.0012 0.0012 0.001 0.0011 0.0023 0.0068 0.0112 0.015 0.0176 0.0177 0.0152 0.0116 0.0077 0.0034 0.0033 0.0029 0.0023 0.0021 0.0019 0.0014 0.0012 0.0012 0.0012 0.0005 0.0007 0.003 0.0073 0.0123 0.017 0.0188 0.018 0.0152 0.0092 0.0046 0.0027 0.0026 0.0025 0.0022 0.0021 0.0019 0.0019 0.0018 0.0018 0.0017 0.0015 0.0026 0.0031 0.003 0.0026 0.0034 0.0045 0.0049 0.0049 0.0038 0.0027 0.0029 0.0026 0.0024 0.0021 0.0018 0.0021 0.0019 0.0021 0.0018 0.0017 0.0016 0.0017 0.0025 0.0033 0.0037 0.0043 0.0043 0.004 0.0035 0.0028 0.0023 0.0022 0.0026 0.0025 0.0025 0.0025 0.0025 0.0021 0.002 0.0017 0.0014 0.0014 0.0014 0.0014 0.0015 0.0016 0.0015 0.0016 0.0012 0.0015 0.0017 0.0017 0.0019 0.0016 0.0017 0.0018 0.0017 0.0017 0.0015 0.0013 0.0014 0.0012 0.0011 0.001 0.0008 0.0012 0.0016 0.0024 0.0023 0.0024 0.0026 0.0028 0.0037 0.0039 0.004 0.004 0.0036 0.0032 0.0031 0.0021 0.0019 0.0018 0.0013 0.0022 0.0025 0.005 0.007 0.0082 0.0098 0.0201 0.0367 0.0497 0.0552 0.0561 0.0487 0.0356 0.0196 0.0042 0.0012 0.0011 0.0008 0.0007 0.0007 0.0009 0.0011 0.0014 0.004 0.0065 0.0094 0.0097 0.0089 0.0072 0.0034 0.0034 0.0034 0.0032 0.0029 0.0022 0.002 0.002 0.0021 0.0022 0.0023 0.0026 0.0024 0.0021 0.0012 0.0012 0.0018 0.0026 0.0038 0.0029 0.1129 0.003 0.0038 0.003 0.003 0.0023 0.0032 0.002 0.0022 0.0022 0.0023 0.0045 0.0105 0.0144 0.0192 0.0226 0.0238 0.0233 0.0197 0.0146 0.0084 0.0034 0.0025 0.0018 0.0015 0.0015 0.0015 0.0019 0.0021 0.002 0.0013 0.0012 0.0023 0.0055 0.0103 0.0128 0.0147 0.015 0.0128 0.0102 0.007 0.0036 0.0027 0.0024 0.0022 0.0019 0.0021 0.0023 0.0024 0.0023 0.0028 0.0032 0.006 0.012 0.0161 0.0204 0.0242 0.0258 0.0266 0.0251 0.0207 0.0153 0.0083 0.0036 0.0032 0.0034 0.0033 0.0033 0.0029 0.0026 0.0023 0.002 0.0019 0.0019 0.0019 0.0019 0.0018 0.0016 0.0015 0.0014 0.0013 0.0013 0.0012 0.0013 0.0011 0.0011 0.0012 0.0011 0.0009 0.0009 0.0009 0.0009 0.0008 0.0008 0.0005 0.0008 0.0052 0.0097 0.0143 0.0169 0.0168 0.015 0.0112 0.0062 0.0027 0.0027 0.0027 0.0026 0.0024 0.0024 0.0022 0.0018 0.0015 0.0015 0.0014 0.0012 0.001 0.0007 0.0007 0.0011 0.0015 0.0021 0.0024 0.0023 0.0022 0.0022 0.0025 0.0026 0.0029 0.0027 0.0024 0.0022 0.0019 0.002 0.0025 0.0028 0.0056 0.0098 0.0151 0.0192 0.0211 0.0201 0.0171 0.0126 0.0073 0.0037 0.0034 0.0032 0.0029 0.0032 0.0031 0.0027 0.002 0.0018 0.0017 0.0018 0.0017 0.0016 0.0017 0.0018 0.0021 0.0024 0.0024 0.0027 0.0025 0.0023 0.0024 0.0022 0.002 0.0021 0.002 0.002 0.0023 0.0021 0.0025 0.0025 0.0025 0.0032 0.0028 0.0026 0.0041 0.0069 0.0073 0.0074 0.0068 0.0041 0.0037 0.0033 0.0029 0.0028 0.0027 0.0021 0.0021 0.0018 0.0018 0.0016 0.0018 0.002 0.0024 0.0029 0.0027 0.0026 0.0021 0.0017 0.0018 0.0017 0.0016 0.0015 0.0016 0.0016 0.0014 0.0015 0.0016 0.0014 0.0014 0.0014 0.0017 0.0027 0.0028 0.003 0.0027 0.0019 0.0014 0.002 0.0027 0.003 0.0032 0.0035 0.0033 0.0028 0.0025 0.0018 0.0018 0.0016 0.0013 0.0018 0.0019 0.0018 0.002 0.002 0.002 0.0019 0.0019 0.002 0.0019 0.002 0.0022 0.0022 0.002 0.002 0.0018 0.0017 0.0017 0.0021 0.002 0.0023 0.0027 0.0028 0.0031 0.0031 0.0029 0.0028 0.0027 0.0023 0.0023 0.0022 0.0021 0.0021 0.0025 0.0025 0.0026 0.0028 0.0024 0.0024 0.0023 0.0022 0.0019 0.0018 0.0017 0.0014 0.0016 0.0017 0.0017 0.0018 0.0019 0.0011 0.0022 0.0031 0.0038 0.0043 0.0042 0.0042 0.004 0.0038 0.0034 0.0029 0.0022 0.0015 0.0015 0.0014 0.0012 0.0012 0.001 0.0012 0.0026 0.0041 0.0049 0.0051 0.0046 0.0044 0.0043 0.0044 0.004 0.0032 0.0029 0.0021 0.0019 0.0015 0.0015 0.0014 0.0014 0.0014 0.0013 0.0013 0.0011 0.001 0.001 0.0009 0.0009 0.0008 0.0006 0.0008 0.0009 0.0007 0.0004 0.0007 0.0007 0.0012 0.005 0.0094 0.0138 0.0188 0.023 0.0257 0.0248 0.0221 0.0169 0.0093 0.0074 0.0037 0.0024 0.0022 0.0023 0.0023 0.0022 0.0024 0.0024 0.0023 0.0021 0.002 0.0019 0.0022 0.0021 0.0022 0.0023 0.0023 0.0022 0.0023 0.0023 0.0021 0.002 0.0021 0.0023 0.0026 0.0025 0.0029 0.0048 0.011 0.018 0.0225 0.0276 0.0301 0.0307 0.0321 0.0337 0.0328 0.0313 0.0265 0.0192 0.0132 0.0076 0.0035 0.0028 0.0023 0.002 0.002 0.0022 0.0023 0.0028 0.0029 0.0033 0.0034 0.0032 0.0035 0.0034 0.0035 0.0036 0.0035 0.0035 0.0035 0.003 0.0026 0.0024 0.0025 0.0029 0.0034 0.0031 0.0032 0.0036 0.0033 0.0033 0.0033 0.0034 0.0034 0.0032 0.0033 0.0036 0.0038 0.0039 0.0036 0.0035 0.0035 0.0036 0.0038 0.0037 0.0037 0.0032 0.0028 0.0027 0.0027 0.0024 0.0026 0.0025 0.0027 0.0033 0.0037 0.0035 0.0033 0.003 0.0024 0.0023 0.0023 0.0021 0.0018 0.0018 0.0021 0.0021 0.002 0.0019 0.0017 0.0019 0.0024 0.0055 0.0103 0.0149 0.019 0.0235 0.0257 0.0256 0.0232 0.0186 0.0121 0.0066 0.0024 0.0022 0.0018 0.0017 0.0016 0.0016 0.0013 0.0009 0.0009 0.0008 0.0006 0.0006 0.0008 0.0011 0.0013 0.0021 0.002 0.0017 0.0017 0.0015 0.0012 0.0013 0.001 0.0012 0.0011 0.0009 0.0012 0.0013 0.0018 0.0029 0.0059 0.0094 0.0118 0.0148 0.0173 0.0187 0.0189 0.0169 0.0138 0.009 0.0043 0.0029 0.0023 0.0019 0.0021 0.0022 0.0023 0.0021 0.0017 0.0011 0.0014 0.003 0.0069 0.011 0.0139 0.0161 0.0168 0.0151 0.0123 0.0089 0.0056 0.0026 0.0024 0.0021 0.002 0.0018 0.0014 0.0012 0.001 0.0012 0.001 0.0008 0.0007 0.0028 0.0064 0.0089 0.0121 0.0136 0.0128 0.0114 0.008 0.003 0.0018 0.0016 0.002 0.0022 0.0021 0.002 0.0017 0.0018 0.0019 0.002 0.0021 0.0024 0.0023 0.0021 0.002 0.0019 0.002 0.0018 0.0017 0.002 0.0021 0.0021 0.0022 0.002 0.0022 0.0022 0.0022 0.0022 0.0027 0.0026 0.0028 0.0028 0.0028 0.0026 0.0025 0.0024 0.0023 0.0022 0.0023 0.0023 0.0025 0.0022 0.0021 0.0021 0.0021 0.0022 0.0021 0.0022 0.0021 0.0019 0.0019 0.0017 0.0021 0.0021 0.002 0.0018 0.0014 0.0017 0.0022 0.0026 0.003 0.003 0.0029 0.0027 0.0025 0.0023 0.0022 0.0023 0.0024 0.0023 0.0022 0.0024 0.0025 0.0036 0.006 0.0078 0.0103 0.0109 0.0095 0.0081 0.0037 0.0021 0.0025 0.0023 0.0022 0.0019 0.0018 0.0015 0.0015 0.0012 0.0009 0.0009 0.0009 0.0006 0.0004 0.0007 0.0012 0.0018 0.0022 0.0019 0.0019 0.002 0.0021 0.0025 0.0023 0.0023 0.0021 0.0021 0.002 0.0018 0.0021 0.0022 0.0023 0.0024 0.003 0.0048 0.0086 0.0123 0.0156 0.0152 0.0138 0.0105 0.0034 0.0029 0.0032 0.0024 0.0021 0.0019 0.0015 0.0014 0.0009 0.0008 0.0005 0.0005 0.0009 0.0018 0.0053 0.0094 0.013 0.0149 0.0143 0.0126 0.0085 0.0037 0.0025 0.0019 0.0018 0.0018 0.0019 0.002 0.0018 0.0017 0.002 0.0023 0.0025 0.0029 0.0037 0.006 0.0096 0.0113 0.0114 0.0105 0.0076 0.004 0.0036 0.0037 0.0033 0.003 0.0026 0.0021 0.002 0.0019 0.0026 0.0026 0.0025 0.0022 0.0023 0.0021 0.0024 0.0023 0.002 0.002 0.0016 0.0016 0.0017 0.0017 0.0018 0.0019 0.0018 0.0017 0.0017 0.0018 0.0016 0.0014 0.0015 0.0037 0.0057 0.0095 0.0155 0.021 0.0253 0.0247 0.0218 0.0163 0.0083 0.0032 0.0026 0.0028 0.0025 0.0025 0.002 0.0015 0.0015 0.0014 0.0012 0.0011 0.0011 0.0011 0.0011 0.0011 0.001 0.001 0.0012 0.0011 0.0011 0.0009 0.0008 0.0009 0.0007 0.0007 0.0008 0.0012 0.0012 0.0013 0.0014 0.0013 0.0015 0.0015 0.0012 0.0013 0.0012 0.0012 0.0011 0.0008 0.001 0.0009 0.001 0.0013 0.0013 0.0015 0.0014 0.0016 0.0017 0.0019 0.0023 0.0024 0.0025 0.0025 0.0022 0.0019 0.0022 0.0022 0.0023 0.0018 0.0019 0.0022 0.0023 0.0026 0.0027 0.0027 0.0028 0.0024 0.0025 0.0046 0.011 0.0155 0.0184 0.0213 0.022 0.022 0.0215 0.0204 0.0183 0.0149 0.0111 0.0077 0.0046 0.0028 0.0029 0.0028 0.0029 0.0027 0.0026 0.0025 0.0024 0.002 0.0022 0.002 0.0019 0.0017 0.0016 0.0016 0.0016 0.0016 0.0015 0.0018 0.0018 0.0017 0.0018 0.0018 0.0014 0.0015 0.0011 0.0012 0.0024 0.008 0.0151 0.0193 0.0223 0.0252 0.0259 0.0264 0.0275 0.0254 0.0201 0.0158 0.0088 0.0029 0.0025 0.0024 0.0025 0.0026 0.0023 0.0021 0.0021 0.0019 0.0016 0.0014 0.0015 0.0017 0.002 0.002 0.0019 0.002 0.0015 0.0018 0.0018 0.0017 0.002 0.0018 0.0017 0.0018 0.0016 0.0015 0.0015 0.0016 0.0018 0.0016 0.0017 0.0015 0.0015 0.0014 0.0015 0.0016 0.0018 0.0017 0.0018 0.0025 0.0033 0.0034 0.0039 0.0044 0.0047 0.0061 0.0067 0.007 0.0066 0.0059 0.0047 0.0036 0.0032 0.0027 0.0026 0.0029 0.0031 0.0035 0.0035 0.0029 0.0025 0.0021 0.0017 0.0012 0.0008 0.0005 0.0005 0.0004 0.0003 0.0002 0.0003 0.0006 0.0005 0.0007 0.0008 0.0009 0.0008 0.0011 0.0013 0.0018 0.002 0.0021 0.0021 0.0018 0.0017 0.0017 0.0016 0.0013 0.0013 0.001 0.001 0.001 0.0011 0.0012 0.0011 0.0009 0.001 0.0011 0.0013 0.0014 0.0015 0.001 0.0009 0.0007 0.0007 0.0005 0.0006 0.0006 0.0005 0.0006 0.0008 0.0012 0.0018 0.0041 0.01 0.0163 0.0187 0.0189 0.0174 0.0117 0.0075 0.0052 0.0025 0.0023 0.0022 0.0021 0.0027 0.0023 0.0016 0.0017 0.0017 0.0016 0.0017 0.0014 0.0014 0.001 0.0011 0.0009 0.0011 0.0008 0.001 0.0009 0.0007 0.0009 0.001 0.0008 0.001 0.0007 0.0006 0.0006 0.0004 0.0004 0.0005 0.0005 0.0006 0.0008 0.0008 0.0008 0.0008 0.0007 0.0011 0.0016 0.0021 0.0022 0.0024 0.0024 0.0024 0.0023 0.0023 0.0021 0.0021 0.0019 0.0017 0.0015 0.0017 0.0015 0.0017 0.0013 0.0014 0.0015 0.0016 0.0017 0.0018 0.0017 0.0017 0.0017 0.0016 0.0013 0.0013 0.0012 0.0012 0.0011 0.001 0.0008 0.0006 0.0008 0.0007 0.0009 0.0009 0.0008 0.0007 0.0011 0.0013 0.0016 0.0022 0.0027 0.003 0.0035 0.0033 0.0033 0.003 0.0025 0.0023 0.0017 0.0015 0.0013 0.0012 0.0021 0.0036 0.0055 0.0065 0.0064 0.0057 0.0044 0.004 0.0038 0.0037 0.0037 0.0034 0.0033 0.0024 0.0017 0.0013 0.0012 0.0011 0.0009 0.0008 0.0008 0.0018 0.0044 0.0081 0.0115 0.0135 0.0128 0.0112 0.0072 0.0036 0.0034 0.0029 0.0019 0.0019 0.0018 0.0018 0.0019 0.0017 0.0016 0.0032 0.0054 0.0081 0.014 0.016 0.016 0.0148 0.01 0.0058 0.003 0.0026 0.0025 0.0022 0.0016 0.0015 0.0013 0.001 0.0006 0.0007 0.0006 0.0005 0.0004 0.0011 0.0048 0.0097 0.0139 0.0172 0.0179 0.0163 0.0129 0.0077 0.0025 0.0021 0.0019 0.0019 0.0017 0.0015 0.0015 0.0019 0.0019 0.0023 0.0021 0.0015 0.0013 0.0009 0.0008 0.0008 0.0012 0.0014 0.0018 0.002 0.002 0.0019 0.002 0.0018 0.0016 0.0012 0.0011 0.001 0.001 0.0012 0.001 0.0038 0.0075 0.0093 0.0107 0.0108 0.0094 0.0078 0.006 0.0048 0.003 0.0023 0.0025 0.0025 0.0027 0.0029 0.0028 0.0029 0.0029 0.0027 0.0026 0.0023 0.0021 0.0023 0.0024 0.0025 0.0024 0.0021 0.002 0.002 0.0017 0.0016 0.0015 0.0012 0.0012 0.0011 0.001 0.0008 0.0008 0.0009 0.0011 0.0012 0.0013 0.0009 0.001 0.0011 0.0012 0.0012 0.0013 0.0014 0.002 0.0032 0.0039 0.0048 0.0052 0.0052 0.0054 0.0051 0.0049 0.0046 0.004 0.003 0.0021 0.0016 0.0011 0.0009 0.0012 0.0032 0.0065 0.0079 0.009 0.0084 0.0059 0.0037 0.0025 0.002 0.0014 0.001 0.0009 0.0006 0.0007 0.0006 0.0005 0.0004 0.0008 0.0006 0.0006 0.0005 0.0006 0.0007 0.0008 0.0008 0.0009 0.0011 0.0011 0.0012 0.0011 0.001 0.0014 0.0015 0.0017 0.0016 0.0015 0.0015 0.0024 0.0057 0.0105 0.0141 0.0167 0.0168 0.0139 0.0103 0.0049 0.0022 0.0009 0.001 0.0009 0.0007 0.0006 0.0005 0.0004 0.0004 0.0006 0.0004 0.0007 0.0007 0.0005 0.0006 0.0004 0.0004 0.0007 0.0009 0.0014 0.0014 0.0018 0.0018 0.002 0.0019 0.0021 0.0018 0.0017 0.0017 0.0016 0.0016 0.0022 0.0048 0.0081 0.0112 0.0144 0.0156 0.0153 0.0134 0.0103 0.0071 0.0043 0.002 0.0015 0.0012 0.0011 0.001 0.001 0.0011 0.0009 0.001 0.0006 0.0008 0.0014 0.0022 0.0027 0.0055 0.0066 0.0065 0.0062 0.0043 0.0016 0.0013 0.0012 0.0012 0.0011 0.0011 0.0011 0.0014 0.0015 0.0013 0.0014 0.0012 0.0012 0.0014 0.0016 0.0014 0.0015 0.0012 0.0013 0.0014 0.0012 0.0014 0.0013 0.0012 0.0012 0.0012 0.0011 0.0011 0.0012 0.0015 0.0019 0.0019 0.0023 0.0038 0.008 0.0116 0.0143 0.0155 0.015 0.0123 0.0092 0.0059 0.0025 0.002 0.0016 0.0017 0.0015 0.0015 0.0013 0.0011 0.0006 0.0006 0.0005 0.0005 0.0004 0.0004 0.0014 0.0031 0.0054 0.0061 0.0063 0.0061 0.0041 0.0032 0.0028 0.0023 0.002 0.0022 0.0022 0.0022 0.0021 0.0019 0.0017 0.0018 0.0023 0.003 0.003 0.0036 0.0047 0.005 0.005 0.0044 0.0033 0.0029 0.0027 0.0026 0.0025 0.0024 0.0022 0.0023 0.0022 0.0022 0.0021 0.002 0.0018 0.0029 0.0045 0.01 0.0157 0.0179 0.0182 0.0164 0.011 0.0057 0.0034 0.0024 0.0019 0.0018 0.0015 0.0011 0.0011 0.001 0.001 0.0007 0.0007 0.0007 0.0006 0.0004 0.0008 0.0012 0.0016 0.0018 0.0019 0.0017 0.0016 0.0015 0.0021 0.0021 0.0022 0.0023 0.0023 0.0025 0.0025 0.0027 0.0078 0.0133 0.0173 0.0213 0.0246 0.0241 0.0232 0.0201 0.0147 0.0099 0.0052 0.0027 0.0026 0.0026 0.0022 0.002 0.0016 0.0017 0.0013 0.0013 0.0014 0.002 0.0021 0.0034 0.0029 0.2076 0.0039 0.005 0.0038 0.0026 0.0025 0.002 0.0021 0.0021 0.002 0.0018 0.0017 0.0017 0.002 0.002 0.0021 0.0019 0.0017 0.0016 0.0017 0.0015 0.0016 0.0015 0.0014 0.0013 0.0013 0.0013 0.0015 0.0018 0.0018 0.0018 0.0014 0.0012 0.0007 0.0006 0.0007 0.0008 0.0007 0.0005 0.0006 0.0003 0.0004 0.0006 0.0003 0.0003 0.0006 0.0012 0.0015 0.0013 0.0012 0.0005 0.0003 0.0 0.0 0.0002 0.0001 0.0001 0.0004 0.0003 0.0005 0.0008 0.0006 0.0004 0.0003 0.0004 0.0004", + "breathiness": "-77.9261 -78.0903 -76.2631 -74.8024 -72.7392 -70.6521 -67.7797 -65.0427 -62.2356 -60.283 -58.9152 -57.657 -56.4598 -55.4988 -54.1626 -53.0342 -51.7234 -50.6935 -49.6901 -49.2752 -48.9963 -48.3282 -48.1203 -48.0416 -48.2354 -48.6535 -49.0555 -49.6931 -50.3222 -50.7522 -51.208 -51.4784 -51.7017 -51.8718 -52.6906 -54.4327 -56.6939 -58.6859 -59.4552 -58.6372 -56.5247 -53.0762 -49.1123 -45.1933 -42.1919 -40.3061 -39.44 -39.0662 -38.1855 -36.9468 -35.4028 -33.5423 -31.3892 -30.1669 -29.2582 -29.4232 -31.1588 -34.9103 -40.1299 -45.8555 -51.1102 -55.7268 -58.8539 -59.9883 -59.2415 -56.8747 -53.4402 -49.266 -45.1146 -41.3956 -39.2154 -39.4396 -41.0482 -43.6996 -46.5231 -49.5859 -52.0246 -53.6472 -54.7872 -55.5414 -56.4191 -57.791 -59.2232 -60.5747 -62.3509 -63.5545 -64.3711 -64.4422 -63.769 -62.6182 -61.0781 -59.1721 -57.4495 -56.1941 -55.4036 -54.8388 -54.4998 -54.9707 -55.2159 -55.7066 -56.2726 -56.9703 -57.6084 -58.0207 -57.3289 -55.9763 -53.4012 -50.2001 -46.7445 -43.445 -40.8462 -39.7875 -40.2339 -41.9017 -44.4719 -46.8802 -49.2627 -51.1752 -52.4755 -53.076 -53.4337 -54.1196 -55.0037 -56.0799 -57.2087 -57.7683 -57.2377 -56.1051 -54.6576 -52.9171 -51.8284 -51.7303 -52.747 -54.5095 -56.3248 -58.0623 -58.8053 -59.5717 -59.4777 -59.2224 -58.9268 -58.6744 -58.1953 -57.6441 -56.4716 -54.5453 -51.8135 -48.9228 -46.031 -43.5381 -42.0972 -41.989 -42.9484 -44.5465 -46.4306 -48.7561 -50.5731 -52.154 -53.2181 -53.7829 -53.72 -53.8006 -53.861 -53.8514 -53.9458 -53.9402 -53.8919 -53.784 -53.6478 -53.8479 -54.0031 -54.0994 -54.2762 -54.3176 -54.4375 -54.5865 -54.9329 -55.3378 -55.8893 -56.6698 -57.3044 -57.7825 -58.3743 -58.8302 -59.2754 -59.8667 -60.3745 -60.6244 -60.387 -60.2471 -59.7963 -59.4721 -59.2325 -58.5929 -57.8719 -56.5051 -55.0353 -53.4513 -51.705 -50.2252 -49.1396 -48.3749 -48.3673 -49.04 -49.9823 -51.6421 -53.4252 -55.3029 -56.5452 -56.9545 -56.0531 -55.0589 -53.2923 -51.5472 -50.0946 -49.4378 -49.2409 -49.7762 -50.8221 -51.7013 -52.7055 -53.627 -54.5583 -54.9562 -55.1803 -54.7141 -53.3633 -51.0827 -47.8211 -44.4101 -41.239 -38.3406 -36.4092 -36.1493 -37.3552 -40.2588 -43.7335 -47.5777 -51.2832 -54.653 -57.0313 -58.73 -59.8239 -60.6527 -61.3869 -61.9588 -62.3377 -62.195 -61.1881 -59.5415 -57.4112 -55.1505 -53.231 -52.1181 -52.0213 -52.8942 -54.2539 -55.8951 -57.6109 -58.8824 -59.6433 -60.212 -60.41 -60.9009 -61.3917 -61.6983 -61.8809 -62.2297 -62.2969 -62.416 -62.3289 -62.227 -62.0787 -61.6551 -60.8701 -60.0313 -58.9485 -57.8849 -57.2801 -56.7613 -56.5449 -56.5031 -56.5683 -56.3914 -56.1876 -55.6764 -54.9742 -54.343 -53.2269 -51.6809 -49.4848 -47.1698 -44.8115 -43.1207 -42.2828 -42.6602 -43.7726 -45.6741 -48.0121 -50.1337 -52.327 -54.2586 -55.792 -56.9707 -57.6805 -58.2343 -58.6292 -58.4155 -58.4774 -58.4001 -58.3355 -58.1054 -57.8652 -57.6058 -57.0236 -56.906 -56.4872 -55.7002 -55.2724 -54.3938 -53.9095 -53.8322 -53.8672 -54.1222 -54.4693 -54.7192 -55.1077 -55.2981 -55.7202 -55.982 -56.2427 -56.4205 -56.4814 -56.6081 -56.7813 -56.4498 -56.45 -56.4297 -56.0621 -56.0394 -55.8769 -56.2929 -56.2255 -56.8323 -57.4993 -57.9356 -58.4243 -58.7099 -59.1169 -59.6763 -60.0936 -60.5839 -61.073 -61.3599 -60.9897 -60.738 -60.2484 -59.2846 -57.9715 -55.9241 -53.6702 -51.5035 -49.2467 -47.6265 -46.3479 -45.9397 -46.2302 -47.6134 -49.7277 -51.976 -55.2113 -58.0027 -59.6374 -58.9463 -56.5144 -52.242 -47.583 -43.0217 -39.7163 -39.039 -40.0756 -42.3343 -45.4283 -48.2032 -50.95 -53.5302 -55.1137 -55.8068 -55.4584 -54.5174 -52.6173 -49.9994 -47.1192 -44.3892 -41.8521 -39.6272 -38.4197 -38.2847 -39.4756 -41.315 -44.1393 -47.0889 -50.0904 -52.966 -55.1455 -57.0333 -58.5881 -59.4848 -60.0957 -60.0443 -59.3472 -57.9176 -55.8281 -53.031 -49.6768 -46.0265 -42.9087 -40.6281 -39.9629 -40.4528 -42.6504 -45.6216 -48.8949 -51.9719 -54.4259 -55.7075 -55.8594 -54.9001 -53.0689 -50.0256 -46.08 -41.5631 -36.9622 -33.045 -30.1599 -28.3175 -27.7701 -27.6168 -28.052 -29.4024 -32.0538 -35.4794 -39.7747 -44.2099 -48.3331 -51.5751 -53.9572 -55.0552 -55.5661 -55.7829 -55.8806 -55.9431 -55.7763 -55.8035 -55.8636 -55.8882 -55.7943 -55.912 -55.9405 -56.0182 -56.3915 -56.7725 -57.2585 -57.5892 -57.7929 -57.8544 -57.6778 -57.5406 -57.3694 -57.1537 -56.7209 -55.7916 -54.5115 -52.9013 -50.9923 -48.972 -47.1427 -45.6013 -44.9333 -45.6133 -47.0119 -48.688 -50.197 -51.6876 -52.677 -53.4486 -53.9403 -54.2091 -54.4937 -55.2756 -56.1024 -56.7195 -57.0407 -57.2731 -57.0219 -57.0068 -56.6734 -56.5089 -56.3141 -56.071 -55.7356 -55.4771 -55.1918 -54.7717 -54.8187 -54.8779 -54.7286 -54.7066 -54.7074 -54.4607 -54.2758 -54.4556 -54.6992 -54.8874 -55.2552 -55.1802 -55.0318 -54.7044 -54.2231 -53.6334 -52.9065 -52.5546 -52.1822 -51.9643 -51.6933 -52.0157 -52.0873 -52.5247 -53.0965 -53.6541 -54.1325 -54.6661 -55.0493 -55.4324 -55.8447 -56.2834 -56.8062 -57.1815 -57.3325 -57.279 -57.1174 -56.6638 -56.5022 -56.692 -57.2133 -58.2307 -59.2061 -60.207 -60.9819 -61.5772 -61.7686 -61.9034 -61.6614 -61.2397 -60.6926 -59.9778 -59.137 -58.495 -57.8979 -57.4376 -57.0544 -56.5908 -56.1259 -55.826 -55.6976 -55.3793 -55.3689 -55.1443 -55.0911 -54.7188 -54.5747 -54.2038 -53.7221 -53.4644 -53.2432 -53.1507 -53.0689 -53.0316 -53.0287 -53.0281 -53.3716 -53.5805 -54.0265 -54.1903 -54.2993 -53.9858 -53.5587 -52.9057 -52.2628 -51.8576 -51.3651 -51.0941 -51.0235 -50.9377 -50.3864 -49.2946 -47.6935 -46.0426 -43.9159 -42.1514 -40.7061 -40.3149 -40.6343 -41.8856 -43.4581 -45.5725 -47.8075 -49.5127 -51.0868 -52.4023 -52.7371 -53.492 -53.7828 -54.0977 -54.0005 -53.983 -53.5527 -53.0572 -52.772 -52.6173 -52.2943 -52.1859 -52.0156 -52.0335 -52.1385 -52.2598 -52.6446 -53.0037 -53.3088 -53.2599 -53.1233 -52.6165 -51.3749 -49.7342 -47.3827 -44.7066 -41.6934 -38.54 -35.5435 -32.9638 -31.0317 -30.1352 -29.6037 -29.8207 -30.998 -32.8318 -35.6292 -38.9798 -42.2345 -45.0336 -47.3745 -48.8155 -49.7002 -50.4047 -50.9738 -51.7992 -52.6304 -53.0443 -53.5814 -53.7897 -53.6319 -53.5987 -53.7089 -53.8932 -54.0184 -54.1012 -53.9636 -53.6736 -53.3835 -52.9764 -52.9107 -52.6627 -52.6313 -52.8419 -53.0261 -53.4053 -53.8335 -54.5016 -54.9823 -55.3855 -55.6365 -55.3996 -55.3171 -54.9891 -54.5909 -53.7837 -52.7339 -51.6875 -50.2036 -48.8224 -47.2403 -45.867 -44.5942 -43.5984 -42.8749 -42.8432 -43.6685 -45.3326 -47.7636 -50.5606 -53.5107 -55.5262 -56.272 -55.9273 -54.6968 -52.856 -50.8973 -49.5901 -48.3427 -48.2112 -49.1394 -50.6358 -52.5245 -54.2295 -55.282 -54.9634 -53.0177 -49.3607 -44.9151 -40.02 -35.202 -31.2621 -28.0321 -26.1198 -24.879 -24.6501 -25.3803 -27.3643 -29.9051 -33.4108 -37.0644 -41.4103 -44.9869 -48.0865 -50.1019 -51.3572 -51.6594 -51.6249 -51.1872 -50.677 -49.7162 -48.1479 -46.337 -44.1884 -42.012 -40.3614 -39.4749 -39.5991 -41.0636 -43.412 -46.0775 -48.7501 -51.0002 -52.8504 -54.2471 -55.0079 -55.7183 -56.1087 -56.0948 -54.6029 -52.2832 -48.9449 -45.3034 -41.5826 -38.3082 -35.8447 -35.2638 -35.473 -37.0979 -39.22 -42.0164 -44.7975 -47.3437 -49.5743 -50.8072 -51.8514 -52.5344 -52.7209 -53.2039 -53.2251 -53.159 -53.3191 -53.3871 -53.4278 -53.5876 -53.7652 -53.9746 -54.0325 -54.2752 -54.3687 -54.4382 -54.8704 -54.8615 -55.1245 -55.385 -55.7996 -56.2621 -56.4669 -56.7022 -56.9252 -56.8937 -56.2129 -54.8778 -53.2722 -51.6275 -49.7701 -48.4397 -47.9101 -48.4635 -49.7203 -51.3593 -52.5931 -53.6014 -54.2699 -54.7474 -54.9113 -55.0663 -55.9393 -56.3525 -57.232 -58.157 -58.9554 -59.4873 -60.0375 -60.3382 -60.4488 -60.5526 -60.3136 -60.0475 -59.6873 -59.4661 -59.0868 -59.0829 -59.02 -59.2034 -59.5879 -60.1012 -60.8225 -61.1265 -61.8761 -62.5739 -63.108 -63.4622 -63.4595 -62.1307 -59.7983 -57.1648 -54.4842 -51.6715 -50.0629 -48.9576 -48.6408 -48.7897 -48.7501 -48.7915 -48.5888 -48.3145 -48.244 -48.7746 -50.1017 -51.8732 -54.5117 -57.2505 -58.8298 -58.6658 -56.6033 -53.1994 -49.1073 -45.4573 -42.2612 -40.5496 -40.9421 -41.945 -44.0307 -46.0088 -48.1078 -49.6336 -50.8407 -51.5962 -52.5235 -53.1539 -53.7114 -53.7297 -53.3078 -51.9321 -49.664 -46.8872 -43.3878 -40.6588 -38.2323 -37.1636 -37.4767 -39.1193 -41.7183 -45.082 -48.6627 -52.1445 -54.688 -56.255 -56.8476 -56.7238 -55.7811 -53.5298 -50.4202 -46.5245 -42.9523 -39.6185 -37.0265 -35.5304 -35.1192 -35.6601 -36.8018 -38.6167 -41.3744 -44.2275 -47.6159 -50.9039 -53.6826 -56.324 -58.827 -60.8817 -62.9867 -64.5005 -65.3255 -65.2913 -64.5362 -63.1558 -61.3377 -59.5414 -57.6769 -56.08 -55.068 -54.4224 -53.9997 -54.0107 -54.2232 -54.613 -55.2031 -55.8676 -56.1685 -56.4367 -56.2866 -55.4501 -53.461 -50.7641 -47.2667 -43.9216 -40.465 -37.6401 -35.9086 -35.0412 -34.8338 -35.4704 -37.0468 -39.4607 -42.42 -45.8042 -49.1043 -52.0129 -54.3191 -56.1074 -57.1534 -57.8722 -57.9928 -57.7109 -57.4414 -56.8917 -56.2746 -55.5138 -54.4798 -53.5406 -52.7814 -52.3099 -51.7138 -51.6032 -51.5313 -50.9142 -51.0096 -50.8523 -50.9529 -51.1577 -51.5496 -52.1205 -52.7961 -53.4885 -54.1658 -54.8466 -55.1783 -55.5178 -55.504 -55.7352 -55.7154 -55.7396 -55.7294 -55.7036 -55.6414 -55.7303 -55.9092 -56.2173 -56.938 -57.8112 -58.7568 -59.783 -60.3853 -61.1195 -61.6076 -61.9125 -62.408 -62.4263 -62.2803 -61.7731 -61.0499 -59.7101 -58.3497 -56.9596 -55.7975 -54.7535 -53.4834 -52.2833 -51.3536 -50.8129 -50.9429 -51.3578 -52.6778 -54.3589 -56.634 -58.7269 -60.6742 -61.7817 -61.7403 -60.4137 -58.3067 -55.7396 -53.1556 -51.0151 -49.5587 -49.0255 -49.4186 -49.9278 -50.8607 -51.9482 -52.7984 -53.9226 -54.9669 -55.7715 -56.9521 -57.7312 -58.624 -59.1876 -59.6126 -60.2451 -60.4965 -60.873 -61.2935 -61.4613 -61.4349 -61.204 -60.9927 -60.3706 -59.8811 -59.3318 -59.134 -59.0597 -58.3916 -57.4637 -55.92 -54.3325 -53.3826 -52.6218 -51.6089 -51.2004 -50.6258 -49.8976 -49.0722 -47.9472 -47.3098 -47.6843 -48.4002 -49.4147 -50.2655 -50.905 -51.547 -51.7844 -51.9461 -52.2187 -52.375 -52.4511 -52.1256 -51.791 -50.708 -49.4198 -47.9763 -46.9803 -46.5112 -46.7041 -47.3576 -48.2611 -49.6466 -50.4557 -51.505 -52.2309 -53.0422 -54.0114 -55.4134 -56.9137 -58.3251 -59.9749 -60.3209 -59.4136 -56.9438 -53.2924 -49.0824 -45.0582 -41.8753 -39.6021 -39.6617 -40.8961 -43.411 -46.5883 -50.0575 -53.4763 -56.3712 -58.7493 -60.3553 -61.2613 -61.5463 -61.1157 -60.9158 -59.5288 -57.388 -54.3755 -50.8004 -47.2499 -43.4532 -40.6987 -39.2967 -39.8833 -41.8031 -44.5321 -47.6162 -50.7069 -53.2771 -54.991 -55.614 -55.5348 -55.4527 -55.343 -55.0969 -55.1877 -55.3541 -55.3874 -55.5051 -55.5499 -55.4475 -55.5819 -55.6876 -55.2902 -55.2427 -55.1176 -55.1083 -55.2654 -55.6532 -55.9187 -56.0203 -55.9685 -55.7914 -55.4954 -55.2519 -55.2664 -55.5975 -55.8187 -56.2749 -56.2979 -56.2689 -56.3933 -55.8967 -55.5313 -55.3184 -54.9728 -55.1295 -54.9244 -55.524 -55.5814 -55.86 -56.2416 -56.7841 -57.2793 -58.0912 -58.8088 -59.8297 -60.8628 -61.751 -62.4256 -62.6496 -62.6071 -62.0631 -61.5595 -60.6919 -59.6694 -58.8888 -58.0411 -57.3365 -56.8245 -56.6847 -56.4922 -56.5887 -57.1848 -58.0955 -59.272 -60.6457 -62.434 -63.8309 -65.0548 -65.0969 -64.1538 -62.13 -59.683 -57.0361 -54.7682 -53.4407 -53.0934 -53.669 -54.7165 -55.6542 -56.2521 -56.7692 -57.079 -57.2488 -57.8712 -58.1534 -58.9047 -59.2442 -59.5853 -60.0032 -60.6461 -61.3613 -61.9223 -62.4965 -63.157 -63.3848 -63.9363 -64.1216 -64.414 -64.3592 -64.1308 -63.7386 -62.7819 -61.9252 -60.574 -59.445 -58.4608 -57.5302 -56.7 -56.0565 -55.6151 -55.3495 -54.8544 -54.2913 -53.5333 -52.9612 -52.1733 -51.6123 -51.2908 -51.1901 -51.2563 -51.244 -51.2568 -51.4928 -51.4636 -51.5733 -51.8655 -52.2841 -52.9694 -53.3125 -53.7435 -53.9744 -54.3131 -54.6018 -54.9498 -55.4149 -55.8537 -56.2819 -56.857 -57.211 -57.7614 -58.0668 -58.149 -57.3802 -55.8094 -52.8366 -49.1495 -45.1081 -40.9575 -37.3489 -34.4939 -32.8915 -32.045 -32.3118 -33.0071 -34.002 -35.5848 -38.1767 -40.9474 -43.7875 -46.4852 -48.6153 -50.0943 -50.8973 -51.017 -51.0396 -51.265 -51.3999 -51.8646 -52.3309 -52.6814 -53.021 -52.99 -53.3555 -53.4763 -54.0345 -54.4614 -54.445 -54.3721 -54.0303 -53.633 -53.2856 -53.3157 -53.4814 -53.722 -53.998 -54.4376 -54.6363 -55.1306 -55.6908 -56.5747 -57.0097 -57.389 -57.6248 -56.9548 -56.0661 -55.1612 -54.0106 -52.9357 -51.593 -50.5209 -49.4093 -48.4171 -47.6654 -47.0631 -47.1889 -47.6463 -48.7091 -50.2263 -51.1247 -51.3096 -50.7182 -48.8617 -46.4609 -43.5964 -40.6223 -38.1446 -37.0731 -36.8346 -37.6066 -39.333 -41.456 -43.4444 -45.29 -47.101 -49.0612 -51.4633 -53.7477 -56.4419 -59.0151 -61.2254 -62.4338 -61.7744 -59.3872 -55.7323 -51.6111 -47.23 -43.4477 -41.1086 -40.7608 -42.2399 -44.6557 -47.2896 -49.8793 -51.8526 -53.4225 -54.4821 -55.2357 -56.3879 -57.8021 -59.1793 -61.1592 -62.5095 -63.6317 -63.8983 -63.5188 -62.1541 -60.4591 -58.5019 -57.0434 -55.8769 -54.5604 -53.9051 -53.5165 -53.5331 -53.8947 -54.4631 -54.9837 -55.4469 -56.06 -56.1939 -56.0157 -55.9746 -55.39 -55.2285 -54.9936 -54.6927 -54.6444 -54.7084 -54.7789 -55.366 -56.068 -57.0481 -58.3824 -59.5115 -60.7754 -61.9539 -62.9129 -63.7654 -64.6964 -65.4744 -66.2442 -66.5337 -65.6445 -63.5155 -60.8122 -57.8389 -54.5303 -52.3016 -51.2249 -51.9916 -53.6469 -55.726 -57.726 -59.4954 -60.4558 -60.8175 -60.7903 -60.3173 -59.94 -59.3143 -58.7691 -58.6789 -59.04 -59.6261 -60.5021 -61.5118 -62.2154 -62.258 -62.0168 -61.0499 -59.773 -58.2191 -56.7594 -55.6078 -54.9488 -54.8959 -54.8457 -55.0064 -55.1661 -55.1615 -55.1977 -55.0625 -54.669 -54.5438 -54.6637 -54.4981 -54.836 -55.5038 -55.9996 -56.5774 -56.6866 -56.8462 -56.7784 -56.5239 -56.4519 -56.5654 -56.7635 -57.0673 -57.2838 -57.8807 -58.2054 -58.8825 -59.568 -60.2324 -60.8173 -61.2867 -60.8132 -60.0001 -58.7721 -57.2707 -55.9372 -54.1542 -52.9069 -51.6014 -50.3378 -49.2774 -48.072 -47.3208 -46.9304 -46.8654 -47.1852 -48.3617 -49.933 -51.6645 -53.7104 -55.3583 -56.5273 -56.9183 -56.4568 -55.8947 -54.9565 -53.8648 -52.703 -51.4319 -50.8293 -50.2249 -50.2975 -50.4186 -51.2953 -51.9385 -53.0334 -53.8318 -53.7219 -52.6318 -50.7234 -47.9858 -44.7333 -41.8379 -39.1044 -37.1163 -36.1693 -36.4681 -38.1282 -40.9945 -44.3443 -47.9292 -51.0958 -53.6812 -55.3618 -56.4939 -56.9154 -57.1857 -57.4823 -57.4638 -57.4208 -56.8959 -56.3355 -55.1743 -53.4684 -51.7295 -50.4807 -49.3834 -49.9224 -50.8541 -52.7642 -54.845 -56.8999 -58.5942 -59.7144 -60.456 -60.9673 -61.5199 -61.8546 -62.4892 -63.0884 -63.6896 -64.1388 -64.2362 -64.375 -63.9879 -63.1872 -61.7857 -60.66 -59.4207 -58.202 -57.5715 -57.1604 -56.887 -56.8799 -56.9902 -57.0833 -57.1328 -57.2123 -57.3449 -56.8078 -55.7428 -53.807 -51.4175 -48.7141 -46.1147 -43.9538 -42.6689 -42.4762 -43.6516 -45.5328 -48.0069 -50.3706 -52.3483 -54.2147 -55.4329 -56.0868 -56.7413 -57.4795 -58.0813 -57.8621 -56.1578 -53.0273 -49.1407 -44.6647 -40.5923 -37.0927 -34.622 -34.2234 -34.8173 -36.2742 -38.7572 -41.4 -44.3217 -47.0127 -49.0389 -50.482 -51.3688 -51.9274 -52.2794 -52.3889 -52.7673 -53.2534 -53.7368 -54.1635 -54.5202 -54.6181 -54.7245 -54.9134 -55.01 -55.1676 -55.1405 -55.2696 -55.3027 -55.2355 -55.5112 -56.0832 -56.7131 -57.4068 -58.0231 -58.6942 -58.8703 -59.1634 -59.532 -60.0448 -60.4403 -60.0776 -59.275 -58.2662 -57.1404 -55.801 -54.7229 -53.7035 -52.5352 -51.7196 -50.483 -49.343 -48.5213 -47.7762 -47.4278 -47.7171 -48.0389 -48.9429 -50.0256 -51.2512 -52.7578 -53.8992 -54.81 -55.0888 -55.2719 -55.3533 -55.3531 -54.9046 -54.702 -54.4147 -54.6429 -55.0404 -55.5882 -56.7165 -57.9163 -58.7267 -59.0283 -58.6468 -56.7922 -54.2956 -50.589 -46.6633 -42.9799 -39.8633 -37.6916 -36.6576 -36.6921 -38.1557 -40.3969 -43.5986 -46.9222 -50.2011 -53.1496 -55.4795 -56.9271 -57.9603 -58.5927 -58.7988 -59.077 -58.8836 -59.1855 -58.9176 -59.0355 -58.9724 -58.375 -57.4882 -56.2577 -55.1946 -53.9212 -53.0607 -52.53 -52.4347 -52.4839 -52.4993 -52.532 -52.4142 -52.2753 -51.2443 -49.7897 -47.5867 -45.0634 -42.2504 -39.0152 -36.4073 -34.1219 -32.6047 -31.7309 -31.7035 -32.9245 -35.5449 -38.948 -43.6931 -47.9149 -51.6577 -55.1371 -57.2052 -58.4329 -59.1038 -59.278 -59.2622 -59.2878 -59.0295 -58.6682 -58.406 -58.6915 -59.1546 -59.8886 -60.6378 -61.1341 -60.9546 -60.6293 -59.4556 -58.4477 -57.3165 -56.3266 -55.2099 -54.5133 -53.7133 -53.1868 -52.5596 -52.464 -52.5062 -52.7535 -53.1591 -53.3206 -53.0893 -52.8737 -52.4121 -51.9209 -51.6973 -51.581 -51.616 -51.8044 -52.2281 -52.5692 -53.0154 -53.7082 -54.3201 -54.8525 -55.3146 -55.625 -55.9447 -56.0027 -55.8886 -55.672 -55.7697 -55.7908 -55.7492 -55.918 -56.0763 -56.5502 -56.8916 -57.0862 -57.5339 -57.6169 -57.6807 -57.5123 -57.5459 -57.5467 -57.6298 -57.4872 -57.5686 -56.6822 -55.2669 -53.0329 -49.99 -46.8783 -43.6503 -40.9511 -39.7947 -40.3335 -42.0339 -44.4903 -47.1591 -49.6836 -51.5569 -52.929 -54.0891 -55.438 -56.5622 -57.3539 -58.338 -59.1859 -59.4312 -59.1096 -58.1421 -56.927 -55.238 -53.8073 -52.7935 -52.3112 -52.5847 -53.5876 -54.7236 -55.9006 -57.1931 -58.1267 -59.072 -59.7345 -60.6458 -61.3265 -62.0783 -62.4525 -61.753 -59.6119 -56.5523 -52.6471 -48.7212 -44.8153 -42.3524 -41.5962 -42.2149 -43.7787 -45.5589 -47.0426 -48.5285 -49.2865 -50.0573 -50.3985 -50.8299 -51.6147 -52.2674 -52.8532 -53.6606 -54.4277 -55.0237 -55.4614 -55.8136 -55.8242 -55.9295 -55.9133 -56.2621 -56.5671 -56.8516 -57.3962 -58.0337 -59.0277 -60.0099 -61.5062 -63.0907 -64.3616 -65.3591 -65.3955 -64.5162 -62.3823 -59.0893 -55.2138 -51.2143 -47.5862 -45.0174 -43.6385 -43.6479 -45.1881 -47.2805 -49.588 -51.5156 -53.1124 -53.9222 -54.3284 -54.7183 -55.0143 -55.4864 -55.5978 -55.7853 -55.985 -56.0813 -56.2962 -56.4919 -56.5472 -56.559 -56.5891 -56.5768 -56.6497 -56.6377 -56.9036 -56.9461 -57.0225 -57.1685 -57.5667 -58.0179 -58.8773 -59.4199 -59.8667 -60.4359 -60.1936 -59.3608 -57.897 -55.8842 -53.2553 -50.9147 -48.8114 -47.3898 -47.2108 -47.9212 -48.9344 -50.2213 -50.9526 -51.9188 -52.2156 -52.4202 -52.6444 -53.0768 -53.0902 -53.5688 -53.684 -53.5247 -53.2597 -52.9901 -52.9601 -53.2359 -53.7677 -54.2205 -54.8549 -55.1673 -55.2012 -54.7487 -54.2236 -53.3293 -53.0021 -52.5678 -52.6232 -52.6574 -52.9974 -53.7718 -54.717 -55.5534 -56.322 -56.856 -57.1483 -56.7976 -56.2599 -55.8068 -55.3944 -54.8524 -54.343 -53.413 -52.091 -50.803 -49.3047 -47.997 -46.8362 -46.294 -45.9393 -46.4848 -47.3641 -49.1683 -51.2895 -53.1107 -54.4702 -54.8208 -54.5392 -53.9555 -52.7978 -51.8539 -51.3076 -50.7168 -50.2714 -49.8772 -49.662 -49.9863 -51.0052 -52.8563 -54.9673 -57.1342 -58.3769 -58.1277 -55.5222 -51.3847 -46.2608 -41.3208 -37.0582 -33.6856 -31.7811 -30.8985 -30.8832 -31.4673 -32.6417 -35.2544 -38.1647 -41.8569 -45.6025 -48.9778 -51.7183 -53.4794 -54.488 -55.0411 -55.0584 -55.0061 -55.0476 -55.117 -55.353 -55.7373 -56.1058 -56.4043 -56.6699 -56.7326 -56.338 -55.8308 -55.1657 -54.72 -54.6979 -55.1156 -55.5149 -56.0919 -56.7662 -57.5084 -57.2868 -56.2361 -53.5916 -50.421 -46.7673 -42.8744 -39.1981 -36.9638 -35.851 -35.8438 -36.9841 -38.823 -41.3893 -44.1984 -47.034 -49.564 -51.2798 -52.547 -53.5794 -54.134 -54.6511 -55.0435 -55.4187 -55.8255 -56.5011 -56.9713 -57.7507 -58.1792 -58.815 -59.2638 -59.6504 -59.9268 -60.2902 -60.5133 -60.9432 -61.3775 -61.8214 -61.9897 -62.2552 -62.3025 -62.3749 -62.1777 -61.844 -60.8178 -59.1761 -56.8158 -54.5777 -52.0502 -50.3366 -49.5644 -49.7578 -50.5941 -51.8041 -52.9765 -53.835 -54.5904 -55.2215 -55.8617 -55.7419 -56.0245 -56.3068 -56.7518 -57.163 -57.9269 -58.6169 -59.1924 -59.9922 -60.7187 -60.9807 -61.2175 -61.1012 -60.9709 -61.2674 -61.5932 -62.2392 -63.1002 -64.0522 -64.6672 -65.2675 -65.8547 -66.1905 -66.6966 -67.2687 -67.6344 -67.8448 -68.3966 -68.512 -67.9934 -66.9325 -65.5257 -63.1795 -60.8772 -58.4085 -56.0057 -53.8982 -52.1667 -50.9589 -49.8651 -49.0236 -48.5842 -48.724 -49.4555 -50.4677 -51.5728 -51.8414 -51.3464 -49.966 -47.94 -45.5188 -43.1158 -40.9666 -40.218 -39.7713 -40.7196 -42.0819 -44.0824 -46.1905 -48.0669 -49.7339 -50.8783 -51.6463 -52.1967 -52.4481 -52.8094 -52.6351 -51.7731 -50.114 -47.6475 -44.9703 -42.0026 -39.1882 -37.4836 -36.6813 -36.9776 -38.2344 -40.4929 -43.6504 -46.8424 -50.5226 -53.6474 -56.3162 -57.9117 -59.2238 -59.9563 -59.5511 -58.0998 -54.9807 -51.2695 -46.8241 -42.6475 -38.8755 -36.488 -35.7416 -36.4403 -38.4141 -40.9798 -43.8497 -46.5812 -48.92 -51.0164 -52.7195 -54.6336 -56.6016 -58.689 -60.16 -61.0361 -60.3455 -57.9463 -54.2958 -49.5709 -45.2428 -41.4002 -39.3363 -39.6692 -42.0529 -45.1645 -48.392 -51.5929 -53.8095 -55.1655 -55.8504 -56.1046 -56.5105 -56.8328 -57.4208 -57.43 -57.4181 -56.8556 -56.0992 -55.0629 -53.8155 -52.5434 -51.3343 -50.4984 -50.1181 -49.9573 -49.9524 -50.381 -50.8941 -51.6094 -52.5566 -53.2059 -53.6985 -54.5183 -54.9851 -55.6078 -55.6837 -55.5236 -55.0419 -54.0005 -52.4852 -50.743 -48.9969 -47.4896 -46.623 -46.8771 -47.7175 -49.083 -50.3587 -51.7392 -52.6231 -53.1599 -53.6082 -53.7278 -53.8064 -54.1672 -54.4646 -54.9035 -55.1472 -55.638 -55.8567 -55.9611 -56.1748 -56.2517 -56.4057 -56.4742 -56.4836 -56.6196 -56.444 -56.6148 -56.5397 -56.6012 -56.6691 -57.0842 -57.3674 -57.9249 -58.2911 -58.6093 -58.9948 -59.3669 -59.2308 -59.2394 -59.221 -58.8841 -58.2156 -57.6241 -56.5196 -55.2815 -53.8209 -52.1869 -50.732 -49.6533 -48.6261 -47.8819 -47.4704 -47.6124 -48.5358 -50.2601 -52.0177 -53.9599 -54.6282 -53.7731 -52.467 -49.8203 -47.0385 -44.5227 -43.301 -42.9618 -43.6698 -44.2063 -45.0745 -45.7816 -47.3712 -49.5677 -52.21 -55.7275 -58.7611 -61.4587 -63.2648 -63.2468 -61.823 -59.2951 -56.1981 -52.6714 -49.5038 -47.0609 -45.4559 -45.3385 -45.8706 -47.1124 -48.7087 -49.8177 -51.0381 -52.0294 -52.3357 -52.6255 -53.0679 -53.5304 -54.0527 -54.5695 -55.1413 -55.5314 -56.1198 -56.8406 -57.382 -57.8907 -58.3432 -58.7403 -58.9642 -58.8827 -58.8069 -58.3221 -57.8989 -57.8666 -57.5133 -57.1398 -56.9861 -56.772 -56.0453 -54.47 -52.4241 -49.6525 -46.4027 -43.0881 -40.2232 -38.2128 -37.3663 -38.1015 -40.0331 -42.7226 -46.0159 -49.6187 -52.7315 -55.746 -57.8926 -59.2403 -59.9993 -60.7497 -60.6091 -59.7995 -57.7642 -54.7649 -50.9779 -47.3253 -43.7743 -40.7746 -39.2561 -38.6117 -39.0587 -40.4309 -42.5664 -45.478 -48.4652 -51.1937 -53.4358 -55.4235 -56.7503 -56.8351 -56.7138 -55.57 -53.8263 -51.2352 -48.1609 -44.6723 -41.0798 -38.1264 -35.4604 -34.3326 -33.7538 -34.1784 -35.443 -38.0271 -41.0875 -44.2552 -47.3878 -49.7111 -51.6458 -52.2824 -52.6104 -52.6435 -52.7425 -52.9447 -53.0603 -53.3356 -53.7945 -54.2626 -54.9955 -55.5788 -56.4162 -56.9575 -57.3302 -57.5216 -57.6459 -57.7639 -57.8297 -57.5986 -58.0111 -58.1474 -58.4826 -58.7751 -59.2446 -59.3281 -59.8143 -59.8017 -58.6419 -56.4367 -52.962 -49.0982 -45.0732 -42.0482 -40.3305 -40.7749 -42.7543 -45.6042 -48.4463 -50.7979 -52.666 -53.5203 -54.0716 -54.2813 -55.0328 -56.0452 -57.5112 -58.7154 -60.1749 -61.5208 -62.4504 -62.9231 -62.7682 -62.2297 -60.9491 -59.3042 -57.9418 -56.7484 -55.9669 -55.5558 -55.4905 -55.6948 -55.6251 -55.7279 -55.6797 -55.5677 -55.3952 -54.9533 -54.0152 -51.957 -49.4905 -46.3059 -43.191 -40.2968 -38.358 -37.884 -38.9951 -41.1755 -43.8025 -47.2284 -50.2534 -53.0751 -55.3131 -56.9031 -57.8311 -58.6354 -58.8597 -59.0787 -59.0053 -59.0374 -58.684 -58.2669 -57.6865 -57.1034 -56.5437 -55.7577 -55.3806 -55.0544 -55.0358 -55.0721 -54.7402 -54.5792 -54.1434 -53.9166 -53.8629 -53.8088 -53.7366 -53.8246 -53.7631 -53.5181 -52.8284 -51.975 -50.7651 -49.266 -48.1398 -47.3473 -46.9828 -46.9849 -47.5899 -48.1301 -48.7775 -49.4608 -49.9873 -50.5256 -51.2326 -52.0334 -52.9936 -53.6135 -54.2931 -54.4707 -54.5637 -54.5707 -54.4135 -54.5539 -54.585 -55.0271 -55.3559 -55.6816 -55.7796 -55.7471 -55.7263 -55.3238 -55.1548 -55.4507 -55.5538 -56.1918 -56.8862 -57.2392 -56.9235 -56.1631 -55.2631 -55.1827 -55.4353 -57.0277 -58.3323 -59.3502 -59.9624 -59.3732 -57.9961 -55.514 -53.3845 -52.0337 -51.0536 -51.1395 -51.186 -51.6972 -52.2369 -52.8694 -53.5877 -53.8629 -54.2735 -54.3618 -54.1872 -54.0231 -53.9057 -53.6369 -53.3602 -53.0878 -52.8766 -52.8469 -52.5639 -52.6074 -52.7958 -52.7653 -52.965 -52.971 -52.9479 -52.9873 -52.8628 -53.064 -53.3341 -53.5527 -53.1651 -53.1234 -52.7869 -52.7812 -52.8266 -53.3307 -53.7008 -54.3562 -54.5005 -54.8988 -54.7298 -54.8752 -54.8613 -55.0427 -55.0525 -54.995 -54.8854 -55.1919 -55.2954 -55.8696 -56.6956 -57.3491 -57.1637 -56.4633 -55.3269 -54.1849 -52.8228 -51.5707 -50.6662 -49.8312 -49.0572 -48.4155 -48.0528 -48.1598 -48.2535 -48.6304 -49.2678 -50.068 -51.4592 -53.374 -55.9345 -58.6323 -60.7431 -61.7525 -61.3412 -59.5419 -57.2854 -54.4993 -51.9479 -50.4402 -49.3993 -48.5808 -48.3645 -48.4608 -48.5881 -49.2702 -50.4193 -51.7032 -53.2264 -54.4723 -55.5255 -56.2599 -56.8832 -57.171 -57.8608 -58.9102 -59.792 -60.8988 -62.0342 -62.6646 -62.9758 -62.9167 -62.533 -62.2701 -62.0534 -62.1775 -62.1427 -61.7894 -60.167 -57.571 -54.028 -49.4726 -44.6812 -40.1919 -36.6771 -34.77 -34.144 -34.7878 -36.5878 -39.3082 -42.4998 -45.6935 -48.5835 -50.9533 -52.6159 -53.5931 -54.1765 -54.3995 -54.6801 -55.2199 -55.6375 -56.045 -56.1094 -55.9834 -55.7514 -55.4438 -55.2148 -55.2793 -55.3317 -55.4525 -55.517 -55.1812 -54.634 -54.0779 -53.7702 -53.3746 -52.2918 -50.4905 -47.7227 -44.227 -40.4332 -36.844 -34.0673 -32.2615 -31.6129 -31.3879 -31.4387 -31.5999 -32.1063 -33.4442 -35.4768 -38.6513 -42.1697 -45.7157 -48.939 -51.313 -52.7323 -53.3404 -53.3176 -53.0466 -52.7948 -52.726 -52.5403 -52.3895 -52.1627 -51.7686 -51.536 -51.4391 -51.1994 -51.2469 -51.1639 -51.3416 -51.5784 -51.927 -52.2231 -52.5323 -52.5539 -52.7594 -52.2892 -51.8363 -51.5062 -51.2685 -51.0443 -51.0394 -51.1656 -51.1396 -51.0813 -51.248 -51.3296 -51.5355 -51.1781 -51.2384 -51.2721 -51.2693 -51.4769 -51.6242 -51.6572 -52.1486 -52.4843 -52.639 -52.8026 -52.3791 -51.7219 -51.3712 -50.7804 -50.2796 -49.8629 -49.7097 -49.8685 -50.0314 -50.0697 -50.1935 -50.5707 -50.9854 -51.6816 -52.4723 -53.8765 -54.996 -56.2642 -57.1626 -57.3336 -56.8643 -55.0656 -52.26 -48.8283 -44.7087 -40.9551 -37.6747 -35.3748 -34.3796 -34.1337 -35.3496 -37.5392 -40.9358 -44.7397 -49.1872 -53.5221 -57.2519 -59.9377 -61.8768 -62.7919 -63.6548 -63.6258 -63.2123 -61.9149 -59.8754 -57.4205 -54.8715 -52.4811 -50.8024 -49.8705 -49.6693 -49.9469 -50.178 -50.8302 -51.583 -52.3459 -53.0671 -53.9012 -54.6376 -55.3793 -56.1862 -56.4056 -55.3126 -53.1847 -50.0069 -46.4101 -42.8227 -39.2999 -36.9596 -35.3406 -35.2799 -36.4938 -38.7122 -41.8283 -45.426 -49.1198 -52.2592 -54.9853 -56.5529 -57.3379 -57.5986 -57.7701 -57.5217 -56.2987 -54.0404 -50.9567 -47.4844 -43.6913 -40.2386 -37.7886 -36.5569 -36.713 -37.9942 -40.3657 -43.2118 -46.333 -49.3465 -51.91 -53.7856 -55.3085 -56.2203 -57.235 -57.984 -58.7346 -59.1096 -58.6814 -57.0615 -54.7926 -51.6024 -48.831 -46.1767 -44.3567 -44.1898 -45.4609 -47.4904 -49.7937 -51.7258 -53.1309 -53.7745 -53.8106 -53.8177 -53.8471 -54.1249 -54.3312 -54.4128 -54.5722 -54.3023 -54.0262 -53.8646 -53.6103 -53.7781 -53.9348 -54.4834 -54.6645 -55.0666 -55.2143 -55.3353 -55.3111 -55.2519 -55.1343 -55.1356 -54.9426 -54.6053 -54.4358 -54.0377 -53.8208 -53.5517 -53.1531 -53.0931 -52.9445 -53.226 -53.3579 -53.6306 -53.7375 -53.8985 -53.9128 -53.8072 -53.4593 -53.485 -53.3978 -53.299 -53.6063 -54.1143 -54.6768 -55.3298 -55.5492 -55.5738 -55.2615 -54.7558 -53.8566 -53.3763 -52.6746 -52.5954 -52.2843 -52.0509 -51.865 -52.0373 -51.8353 -52.2249 -52.4149 -52.8168 -53.0842 -53.4285 -53.9746 -54.4962 -55.0224 -55.0715 -55.192 -54.239 -52.4265 -50.4468 -48.463 -46.9813 -45.6902 -46.0188 -47.4183 -49.3855 -51.5448 -53.4038 -54.9492 -56.1173 -56.9099 -57.2387 -57.804 -58.5892 -59.4519 -60.7837 -62.2034 -64.1236 -65.6906 -66.9214 -67.3442 -66.9469 -65.8726 -63.8658 -61.728 -59.6414 -57.7621 -56.2581 -55.4875 -55.1515 -55.013 -55.2951 -55.3483 -55.4656 -55.3312 -55.4845 -55.3766 -55.0337 -53.9771 -52.4576 -50.2251 -47.4805 -44.7597 -42.6823 -42.01 -42.7956 -44.6814 -47.154 -49.6761 -52.2519 -54.2803 -55.9434 -57.0636 -58.1012 -59.3548 -60.0429 -60.5486 -60.1019 -58.8452 -56.3369 -52.8389 -49.0611 -45.4282 -42.4468 -40.9386 -40.7828 -42.0797 -44.3392 -47.1016 -49.7791 -52.3441 -54.2243 -55.5322 -56.429 -57.2861 -57.357 -57.1335 -56.5348 -54.8338 -52.5043 -49.8719 -47.2092 -44.8545 -42.9177 -42.361 -42.3903 -43.7573 -44.9893 -46.9298 -48.4034 -50.1003 -51.3952 -52.6322 -53.753 -54.4506 -54.6856 -54.8722 -54.9078 -55.064 -55.0408 -55.0763 -55.0045 -54.8899 -54.4735 -54.4294 -54.2913 -54.3935 -54.8079 -55.1603 -55.6626 -55.697 -55.5297 -55.4884 -55.2745 -55.2256 -55.2839 -55.391 -55.4155 -55.0596 -54.2457 -52.4488 -50.0248 -47.1656 -44.1765 -41.5892 -39.9107 -39.6685 -41.1816 -43.2994 -45.7903 -48.5817 -51.2873 -53.436 -54.9015 -55.9276 -56.3509 -56.7744 -57.2018 -57.6191 -58.0865 -58.3216 -58.6859 -59.1942 -59.4959 -59.9035 -60.2248 -60.5089 -60.6877 -61.1834 -61.3278 -61.4727 -61.4711 -61.5003 -61.529 -61.4512 -61.4904 -61.7436 -61.5532 -61.4034 -60.9748 -60.4062 -59.9004 -59.4835 -59.5087 -59.4983 -60.2676 -61.1959 -62.0003 -62.6271 -62.7289 -62.5917 -61.821 -60.5228 -59.5407 -58.3632 -57.8183 -57.1886 -56.865 -56.7551 -56.8961 -56.7792 -56.858 -57.0045 -57.0029 -56.9649 -56.8766 -56.702 -56.8227 -56.7578 -56.6011 -56.4235 -55.9975 -55.7026 -55.5138 -55.3386 -54.8328 -53.3838 -51.3225 -48.4155 -44.6742 -41.149 -37.688 -35.047 -33.3893 -32.702 -32.6444 -33.1195 -34.3324 -36.1579 -38.4221 -41.148 -43.9333 -46.4483 -48.3114 -49.7502 -50.7361 -51.2162 -51.4227 -51.9249 -52.067 -52.4727 -52.9252 -53.0795 -53.3476 -53.7533 -53.9368 -54.1995 -54.556 -54.6697 -54.9299 -55.2098 -55.726 -56.2043 -56.8885 -57.2603 -57.7528 -58.1234 -57.6565 -56.216 -53.4426 -50.0638 -46.4283 -43.1243 -40.0651 -37.9343 -36.831 -36.5244 -37.1967 -38.1565 -39.8272 -42.2006 -44.6562 -47.1742 -49.7371 -51.391 -52.4066 -52.9443 -52.9012 -52.8778 -52.8999 -52.9553 -53.2484 -53.7003 -53.8585 -54.1949 -54.1732 -54.4537 -54.5987 -54.9181 -55.059 -55.3368 -55.4414 -55.3963 -55.3703 -55.4721 -55.6277 -56.0866 -56.4924 -57.0799 -57.7069 -58.1149 -58.5427 -58.9321 -59.3613 -59.7478 -59.7469 -59.2004 -58.3927 -56.9541 -55.7892 -54.5988 -53.6433 -52.6997 -51.603 -50.6244 -49.2949 -47.7993 -46.9091 -46.5122 -46.687 -47.3264 -48.775 -50.5136 -52.5261 -54.3107 -55.9236 -56.6549 -56.486 -55.8345 -55.0223 -54.6629 -54.1989 -53.9469 -53.9401 -54.2429 -54.826 -55.8289 -57.0126 -58.8514 -60.723 -62.3719 -63.1745 -63.5072 -62.9261 -62.4751 -61.8502 -61.2028 -60.7757 -60.358 -59.5095 -58.5642 -57.2228 -56.0588 -54.9828 -54.564 -54.3475 -54.539 -55.0354 -55.5487 -56.4854 -57.2217 -58.1854 -59.088 -59.9869 -60.9686 -61.84 -62.8795 -63.947 -64.7662 -65.4199 -65.6127 -65.748 -65.7433 -65.4386 -65.3195 -64.9736 -64.911 -64.7016 -64.7521 -64.327 -64.0766 -63.9354 -63.063 -61.0944 -58.2225 -54.7719 -50.5903 -46.3898 -42.6978 -40.0625 -39.0825 -40.0219 -42.4029 -45.0039 -48.2746 -51.0671 -53.4791 -54.9307 -55.7802 -56.2531 -56.8921 -57.4143 -57.7802 -58.3396 -58.6337 -59.0463 -59.019 -59.3811 -59.646 -60.1079 -60.7083 -61.3476 -62.0256 -62.6747 -62.8256 -63.0079 -63.1506 -63.4372 -63.8116 -63.9781 -64.6247 -64.8841 -65.1097 -65.078 -65.1703 -64.8676 -65.1017 -64.9734 -64.7276 -64.2892 -63.3218 -61.8808 -60.0399 -57.7725 -55.8635 -53.8842 -52.6319 -51.7146 -51.3997 -52.0234 -52.562 -53.0189 -53.2508 -53.8829 -54.0672 -54.0065 -54.1538 -54.2817 -54.3557 -54.7558 -55.3835 -55.8035 -55.948 -56.2342 -56.0551 -56.1422 -55.9223 -55.8851 -55.8689 -56.2603 -56.5742 -57.0582 -57.4467 -57.7969 -58.2691 -58.8925 -59.4285 -59.9446 -60.4173 -61.1182 -61.6866 -61.983 -61.9526 -61.3974 -60.5999 -59.3654 -57.8733 -56.0205 -54.0385 -52.3601 -50.9862 -49.8813 -49.4768 -49.3792 -49.88 -51.311 -53.2409 -55.4227 -57.1 -57.8336 -57.093 -55.0686 -52.4812 -49.4739 -47.3296 -46.0744 -46.1683 -46.6139 -47.6898 -48.9153 -50.1627 -51.3887 -52.6428 -54.7324 -56.8252 -58.671 -60.2917 -61.7402 -62.3423 -61.9748 -60.1859 -57.348 -53.5867 -49.6655 -45.8323 -43.0132 -41.7728 -42.1159 -43.6275 -45.8025 -47.9871 -50.3229 -51.7945 -52.8991 -53.2119 -53.2516 -52.7786 -52.0339 -50.5571 -48.6358 -46.8006 -44.7203 -43.2986 -42.1013 -41.7161 -42.4294 -44.1659 -46.2232 -48.364 -50.324 -52.2073 -53.5768 -54.8544 -56.0411 -57.1682 -58.8574 -60.5056 -62.0765 -63.0676 -63.4015 -61.6172 -58.6449 -54.5044 -49.6427 -44.9408 -41.4842 -39.5952 -40.1985 -42.1918 -45.2723 -48.5543 -51.3186 -53.4874 -54.697 -55.2117 -55.1079 -55.046 -55.1711 -55.5756 -55.8933 -56.4001 -56.942 -57.8443 -58.5864 -59.2514 -59.5375 -59.2677 -58.6022 -57.7776 -56.8598 -55.8011 -55.0897 -54.7085 -54.5016 -54.8575 -55.4298 -56.4345 -57.6964 -58.6589 -58.5836 -57.432 -54.9851 -51.7965 -48.5944 -45.4937 -43.6059 -42.9978 -43.6677 -45.1793 -47.2049 -49.412 -51.3388 -52.7301 -53.5166 -53.8192 -53.8997 -53.5783 -53.2265 -53.2157 -53.3709 -53.604 -53.8885 -54.2255 -54.6681 -54.7936 -55.1063 -55.2056 -55.0871 -55.1611 -55.2754 -55.375 -55.4107 -55.7552 -56.2045 -57.156 -57.9825 -58.8889 -59.6619 -60.3594 -60.6413 -60.9556 -61.0935 -61.5249 -61.4446 -61.5581 -61.0648 -60.5076 -59.5968 -58.5319 -57.2482 -55.5691 -53.6248 -51.6473 -49.7405 -48.2016 -47.3151 -46.7866 -46.9725 -47.65 -48.6366 -50.2504 -52.4199 -54.796 -57.0293 -58.1492 -57.7425 -56.0258 -53.2713 -50.4921 -47.7683 -46.6241 -47.4947 -49.289 -52.1425 -55.4695 -58.6788 -61.3861 -63.3466 -65.086 -66.0361 -66.5914 -67.1081 -66.9069 -66.746 -66.6015 -66.1886 -66.0837 -65.7935 -65.2278 -64.6231 -63.9385 -63.1286 -62.2895 -61.5952 -61.2009 -60.7131 -60.4282 -59.9478 -59.6038 -59.2092 -58.656 -57.6222 -56.1562 -53.5325 -50.3417 -46.9051 -43.4373 -40.3923 -38.4785 -38.0149 -39.2506 -41.8251 -45.1923 -49.0409 -52.4866 -55.5758 -58.0133 -59.1945 -60.3032 -61.2554 -62.5243 -63.6247 -64.3761 -65.0117 -65.3634 -65.8399 -66.0087 -65.9844 -65.8922 -65.2511 -64.267 -62.5722 -60.8498 -58.7608 -57.3487 -56.4797 -55.8679 -55.455 -55.2454 -55.1999 -55.0109 -54.6749 -53.6232 -52.1168 -49.7899 -47.4894 -44.6389 -42.2125 -40.4042 -40.039 -40.3534 -41.9172 -44.3556 -47.2081 -50.3423 -53.5028 -55.9815 -58.1412 -59.6703 -60.7919 -61.0646 -61.2911 -61.7073 -62.0296 -61.8595 -61.3367 -60.0692 -58.6464 -56.8514 -55.6003 -54.9021 -55.178 -56.2661 -57.4115 -58.9627 -60.0818 -60.4383 -60.6428 -60.408 -60.0017 -59.5872 -59.0344 -58.609 -58.5495 -58.5414 -58.4517 -58.3751 -58.3557 -58.5127 -58.3869 -58.6932 -58.7964 -59.1317 -59.525 -60.0145 -60.1519 -60.1902 -60.1154 -59.6083 -58.9625 -58.3771 -57.4335 -56.6968 -55.6911 -54.5668 -53.0483 -50.8853 -48.2554 -45.3244 -42.7109 -40.7989 -39.6771 -40.0257 -41.3398 -43.2155 -45.459 -47.8907 -50.071 -51.78 -53.4943 -54.968 -56.4582 -58.3069 -60.1795 -61.9821 -63.7082 -65.2528 -66.1517 -66.0052 -64.9634 -62.9213 -60.4127 -57.7279 -55.115 -53.1752 -52.2202 -51.8948 -52.3155 -52.5461 -53.3195 -53.8466 -54.0525 -54.0728 -54.539 -54.8196 -55.0698 -55.3279 -55.5571 -55.493 -55.658 -55.095 -54.4678 -53.5925 -52.82 -52.2562 -51.7371 -51.873 -51.9735 -52.6135 -52.995 -53.6343 -54.2995 -54.9568 -55.655 -56.2965 -56.6306 -56.6833 -56.1059 -54.4311 -51.6619 -48.4625 -44.9411 -42.0179 -40.2522 -40.0092 -41.5632 -44.5663 -47.9347 -51.4158 -54.5255 -57.1239 -58.9772 -60.3635 -61.5021 -62.5437 -63.459 -64.0276 -64.5377 -64.6757 -64.4841 -64.0392 -62.8455 -61.2924 -59.6365 -57.9364 -56.6003 -55.7005 -55.0972 -55.0264 -55.1903 -55.6501 -56.3086 -56.6927 -56.8942 -56.8735 -56.2045 -54.6052 -51.8375 -48.4691 -44.2754 -40.409 -37.0618 -34.7759 -33.8396 -34.3416 -36.2917 -38.9555 -42.0113 -45.5072 -48.7308 -51.7873 -54.0651 -56.1012 -57.6881 -58.9055 -60.01 -60.8683 -61.3912 -61.8501 -61.9538 -61.8944 -61.5339 -61.0771 -60.0521 -58.9197 -57.5347 -55.9666 -54.5208 -53.2831 -52.4952 -51.8914 -51.8626 -52.2232 -52.9278 -53.5925 -54.4201 -55.1523 -55.521 -55.7052 -55.6777 -55.5311 -55.2889 -55.1765 -55.2346 -55.5014 -55.6985 -55.9303 -55.965 -55.9261 -55.794 -55.7415 -55.9372 -56.1354 -56.8441 -57.4348 -58.5012 -59.5889 -60.6814 -61.6117 -62.3223 -62.8438 -62.9019 -62.8223 -62.5719 -62.678 -62.6823 -62.479 -61.6263 -60.7357 -59.5702 -59.1277 -59.3891 -60.669 -62.558 -64.6531 -65.9979 -66.6999 -66.9166 -66.9097 -67.5579 -69.0411 -71.2881 -74.1376 -77.7652 -81.023 -83.7293 -85.9402 -86.2857 -85.5799 -85.9852", "breathiness_timestep": "0.011609977324263039" }, { @@ -410,9 +410,9 @@ "note_slur": "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0", "f0_seq": "149.4 149.3 149.0 148.9 149.0 149.3 149.4 149.6 149.9 149.9 150.1 149.9 150.0 150.1 149.9 149.9 149.7 150.1 150.0 149.7 149.9 149.7 149.6 149.6 149.5 149.5 149.6 149.4 149.3 148.9 148.9 148.0 151.4 165.2 172.4 185.3 196.6 207.8 216.3 222.5 228.1 231.1 231.9 230.7 229.2 227.6 225.8 227.8 235.2 248.8 259.3 265.4 266.9 265.5 264.4 262.3 261.4 259.7 260.8 260.5 260.2 260.8 261.9 261.5 262.6 261.6 258.5 250.4 240.9 251.4 262.2 275.1 288.8 304.7 310.6 312.1 315.8 318.7 319.7 318.5 316.8 314.0 311.2 308.1 306.7 304.1 303.5 301.8 304.9 309.4 313.7 318.0 320.3 320.7 318.6 316.0 314.2 313.6 313.9 314.3 315.6 314.4 309.0 300.6 291.7 299.1 305.7 312.6 320.1 328.3 334.9 342.5 350.7 361.1 366.7 367.6 346.1 340.0 338.2 333.8 329.3 325.4 322.4 320.5 320.1 318.9 317.8 316.6 315.2 316.9 316.3 319.0 322.5 327.0 336.3 343.3 348.5 352.4 352.4 351.4 350.7 346.0 343.8 344.1 345.3 348.5 350.0 350.8 351.6 350.8 350.0 347.7 346.3 346.1 346.2 346.3 345.4 348.0 348.9 352.4 354.8 355.0 353.4 351.2 347.3 340.0 332.5 332.8 336.7 340.5 344.3 348.5 350.9 355.9 359.3 363.3 361.1 356.9 353.4 353.1 351.0 350.5 350.1 348.9 348.5 348.3 349.2 349.1 347.0 344.3 336.0 328.5 338.6 347.3 356.6 366.0 375.5 360.5 354.5 351.3 351.2 348.4 347.5 346.4 346.5 346.5 347.4 347.1 345.6 343.7 338.8 329.7 319.3 304.6 293.1 283.9 276.0 270.6 267.4 265.8 265.9 268.1 268.7 268.3 267.3 266.1 263.7 262.6 261.5 258.2 254.3 247.6 250.5 255.0 260.6 264.0 269.0 271.3 267.3 263.8 261.7 261.1 260.9 262.3 263.3 262.5 263.1 263.0 262.6 262.1 259.3 256.0 249.1 265.6 282.1 299.2 316.0 333.2 352.0 352.4 352.3 351.1 350.8 348.7 348.8 350.5 352.4 352.5 351.3 347.9 343.1 331.6 313.5 303.8 305.4 307.8 309.2 312.1 316.2 313.9 313.2 319.6 323.6 326.6 325.4 322.4 316.7 308.9 303.1 298.5 295.5 295.3 296.5 300.5 305.0 311.5 316.8 323.5 326.8 327.3 324.1 315.8 307.0 298.8 294.9 291.9 291.3 293.3 298.7 307.3 316.5 323.0 325.9 324.8 319.2 309.1 302.1 301.6 299.6 295.7 287.2 280.6 276.2 274.6 272.0 269.5 267.5 265.4 263.5 260.8 260.1 258.5 257.4 253.5 253.5 253.9 252.6 252.4 256.3 259.6 262.9 265.5 269.6 271.3 274.0 275.5 282.0 290.5 295.4 304.1 311.0 315.4 315.8 315.3 310.1 304.3 288.4 285.2 297.6 311.8 327.0 320.1 314.9 312.9 311.7 311.0 311.6 312.1 313.0 312.9 311.9 312.4 310.5 305.8 296.3 283.3 265.4 263.5 259.9 254.9 252.2 249.9 250.5 251.4 254.0 256.8 258.7 260.7 261.7 262.5 264.1 264.4 263.9 263.4 262.5 262.9 260.2 256.7 254.3 253.5 266.0 299.9 310.6 310.5 315.3 316.4 314.7 313.2 309.8 308.8 308.3 309.1 310.7 310.3 307.5 303.7 293.4 288.3 303.7 320.0 335.2 350.3 363.4 355.3 351.9 350.2 350.1 349.3 348.4 347.7 347.7 348.2 347.6 347.1 345.2 339.7 331.9 330.9 337.7 344.8 354.5 361.9 364.0 368.6 374.2 376.9 377.4 374.2 373.2 373.4 374.3 371.1 371.7 378.9 390.5 399.5 408.1 415.4 419.4 423.5 422.7 421.4 416.5 412.3 410.7 407.4 408.3 411.7 414.3 414.1 413.2 411.2 408.0 398.7 384.4 378.1 380.7 386.4 392.5 398.7 405.1 412.8 421.5 416.9 396.7 382.5 374.6 369.3 361.8 356.8 352.7 350.3 349.1 350.5 351.2 350.7 349.9 345.3 340.7 328.8 317.1 308.4 309.7 313.1 311.5 311.4 312.1 311.4 311.7 311.8 312.2 313.8 315.5 314.7 312.8 309.6 305.0 296.4 279.4 257.5 255.9 256.5 257.1 259.1 261.8 256.9 258.0 263.9 266.4 268.4 270.3 268.6 267.0 263.5 260.6 258.8 256.9 256.2 257.9 259.2 261.1 261.4 262.9 264.0 263.6 262.4 261.1 259.7 259.7 259.3 260.1 262.2 265.0 267.1 269.7 265.2 257.6 258.1 259.3 260.3 262.3 264.6 265.8 266.9 270.0 267.8 250.7 239.6 233.6 231.1 228.6 228.4 229.1 229.0 229.4 230.9 232.6 233.3 235.3 235.6 236.8 236.6 235.1 233.6 232.6 232.3 235.0 236.0 236.5 235.4 234.1 232.9 233.4 233.8 234.6 234.7 230.8 225.7 229.4 233.0 238.6 242.9 248.8 255.4 261.6 268.3 277.5 270.7 270.6 270.9 270.5 268.5 264.2 260.9 258.1 257.5 255.7 254.5 254.4 255.6 259.0 262.8 266.1 270.4 270.9 268.4 263.2 260.5 256.6 255.2 254.4 252.8 252.5 251.4 251.9 252.2 255.7 257.1 260.2 264.2 268.0 270.8 272.6 273.7 272.5 266.3 256.8 249.0 241.9 237.8 232.5 228.0 225.7 223.5 221.2 219.3 217.5 216.7 215.0 214.3 212.5 210.8 209.5 207.7 206.4 205.8 204.6 203.3 201.5 200.6 201.7 204.4 208.8 212.8 225.2 237.7 249.2 257.6 260.9 264.4 266.5 267.3 267.2 263.8 261.9 260.7 258.6 258.4 259.9 260.1 258.2 258.2 259.0 259.0 259.2 258.8 258.6 258.7 260.5 261.5 261.6 258.8 251.5 241.6 229.4 231.4 238.4 245.5 251.6 244.4 238.5 234.4 232.9 234.1 234.5 234.3 234.4 234.0 234.3 232.9 230.9 227.1 223.1 213.1 207.9 207.6 207.3 206.7 206.2 205.0 205.6 203.9 206.2 208.0 210.2 209.7 210.7 207.5 204.4 202.6 201.4 201.4 202.7 204.3 206.8 207.9 208.6 208.7 208.0 207.3 207.2 207.2 207.6 208.0 207.8 207.5 207.8 207.1 207.7 206.9 207.0 207.2 207.2 207.4 207.8 207.3 208.6 208.5 208.3 209.7 215.3 228.4 236.7 239.1 242.9 244.4 242.1 238.2 233.8 229.4 226.8 225.8 226.7 229.0 232.6 237.3 241.2 244.1 244.6 244.9 242.6 240.4 235.3 229.9 225.3 222.8 220.8 221.9 223.9 226.8 231.7 237.7 244.0 246.6 247.1 247.9 246.8 242.3 236.0 229.7 222.8 217.5 213.1 207.5 199.0 195.5 192.4 189.2 186.4 183.5 180.2 179.4 177.0 170.6 165.4 162.8 159.4 156.6 154.1 151.8 148.8 146.5 143.2 142.9 145.6 153.5 163.8 174.1 182.8 194.5 204.0 210.2 209.7 209.3 206.9 207.4 207.2 205.5 205.6 203.8 207.9 212.5 220.9 228.6 230.2 232.0 232.1 233.3 233.4 233.4 234.5 234.0 233.4 233.0 230.2 224.8 216.2 214.0 213.6 214.2 214.1 213.8 214.4 213.8 209.7 207.7 207.4 206.5 206.8 207.2 207.2 206.7 207.8 208.2 209.4 210.1 209.5 207.9 203.4 200.4 204.0 210.4 218.2 224.9 231.5 232.0 233.7 234.8 235.1 235.7 236.0 235.1 234.1 234.2 233.5 234.1 234.5 233.2 230.6 230.2 235.2 239.1 244.4 253.6 262.0 266.0 266.5 266.7 264.2 261.8 260.9 260.9 260.9 261.8 263.6 263.5 263.2 260.8 253.7 248.3 243.6 255.7 268.3 282.5 297.8 314.1 317.2 318.1 320.6 324.3 323.6 320.6 315.6 310.1 303.5 299.4 295.9 297.0 300.2 304.6 310.0 315.8 319.5 322.0 322.1 321.3 318.9 314.8 308.7 300.0 293.5 290.0 289.6 293.1 298.3 307.5 315.3 322.0 325.0 325.2 324.5 319.1 308.8 294.8 291.5 288.8 285.9 282.3 278.5 276.2 274.5 272.7 270.3 268.4 266.9 264.3 261.0 257.3 254.1 251.1 247.9 244.7 241.8 239.2 237.3 234.5 232.2 232.9 237.2 246.2 258.1 270.4 281.4 291.8 305.8 312.7 314.7 312.0 311.2 311.5 311.8 311.8 310.2 310.2 310.8 310.9 312.6 314.1 315.4 313.5 311.8 308.9 309.4 310.0 310.2 309.2 308.5 307.8 305.9 302.4 290.8 274.9 273.9 276.3 279.4 283.7 290.0 282.8 269.3 264.7 263.1 262.4 261.8 262.4 263.0 263.1 262.3 263.9 263.9 262.7 259.9 255.0 248.3 240.8 233.1 226.8 226.2 226.7 227.3 229.6 228.9 229.9 231.5 232.6 232.9 233.0 232.1 231.8 231.9 230.3 225.5 225.2 230.2 234.5 241.7 248.3 256.9 265.6 268.6 265.0 262.2 262.2 261.8 261.5 260.7 261.1 260.5 261.3 263.1 263.3 259.9 248.5 247.1 250.1 258.0 266.4 276.7 288.3 299.1 300.5 303.1 304.7 308.6 312.3 312.9 314.1 313.8 313.7 312.4 311.9 311.6 314.4 318.5 322.6 327.3 335.0 345.0 349.4 351.9 351.7 350.3 348.2 345.3 346.2 345.8 347.1 348.8 350.0 350.6 352.0 351.4 348.8 349.6 338.0 306.8 294.0 295.2 297.9 300.8 304.9 304.8 307.2 307.2 307.9 309.9 310.3 310.4 311.1 311.3 311.8 311.7 311.6 311.3 309.9 302.8 288.0 297.2 304.2 311.8 318.8 327.9 340.5 349.5 351.5 352.2 352.7 351.3 348.8 346.1 345.0 344.5 342.5 340.8 341.1 342.6 344.6 345.8 347.3 348.8 361.7 376.5 385.4 393.1 396.5 395.8 396.0 395.1 392.7 390.0 388.4 387.0 388.0 390.6 389.6 382.2 372.2 364.4 358.4 375.7 396.6 412.0 376.0 364.9 356.8 352.3 351.3 350.2 350.1 349.0 349.8 351.8 352.2 351.9 350.9 351.3 349.5 345.4 338.5 329.8 317.9 305.6 300.9 303.2 302.9 309.8 315.1 320.6 322.9 319.3 316.1 313.2 310.9 308.9 308.0 308.2 310.0 311.2 312.1 311.4 310.0 310.8 312.8 314.0 314.6 313.3 310.6 308.7 310.2 312.1 313.9 313.3 310.7 298.9 302.3 307.3 312.2 316.8 321.1 324.9 329.5 334.0 338.3 344.1 341.7 320.3 315.8 318.7 320.6 318.4 316.7 316.3 315.5 315.9 314.3 313.4 313.2 312.5 314.5 318.8 325.6 334.8 343.9 349.6 354.9 357.9 357.8 354.9 352.2 349.7 346.6 343.8 342.6 341.2 341.7 343.5 346.6 347.7 348.0 348.9 349.6 349.7 347.8 346.9 346.8 346.5 345.9 344.8 342.7 341.9 342.0 344.6 348.1 351.6 357.5 361.6 364.6 366.8 368.0 367.3 364.4 359.6 352.0 347.5 343.2 342.0 342.7 344.5 347.0 351.6 355.4 360.4 362.1 362.0 359.9 357.1 354.6 351.6 348.0 346.1 345.9 343.4 344.0 344.5 345.7 349.7 353.8 357.3 359.1 361.0 360.5 358.8 355.7 352.9 350.8 347.9 346.1 344.0 343.3 342.5 343.1 346.3 348.7 351.7 354.4 357.6 358.3 359.8 359.8 358.1 355.7 351.9 347.1 341.4 337.4 335.6 335.8 340.1 345.6 351.9 358.0 363.5 366.1 366.7 365.9 364.0 362.0 359.5 356.0 352.7 351.5 349.8 349.1 348.7 349.4 351.7 353.3 355.2 356.7 357.2 357.2 357.0 356.3 355.5 355.8 357.0 358.0 356.5 355.3 353.3 351.4 350.7 351.9 352.6 352.6 355.7 358.1 359.0 359.8 361.5 363.7 363.5 364.4 364.0 361.7 358.6 354.9 351.3 348.9 347.9 349.4 350.4 353.5 356.2 359.8 361.3 362.2 363.2 361.7 360.1 357.8 355.0 353.4 351.9 349.8 349.0 348.2 347.3 347.1 347.0 346.5 345.7 343.9 343.3 341.5 336.7 331.6 326.9 321.3 316.7 313.7 311.8 310.8 311.6 312.6 312.4 312.9 313.1 314.9 315.1 311.7 302.9 302.6 302.6 303.3 304.0 304.3 304.4 304.5 305.1 305.4 305.3 305.4 305.6 305.0 305.7 305.5 305.4 305.0 304.7 304.8 304.9 305.5 305.3 305.7 305.7 305.8 305.8 306.1 306.3 305.7 306.0 305.3 304.9 304.4 303.5 302.9 301.6 299.6 300.5 298.5 297.5 298.4", "f0_timestep": "0.011609977324263039", - "energy": "0.0002 0.0013 0.0012 0.0022 0.0026 0.0036 0.004 0.0046 0.005 0.0056 0.0055 0.0062 0.0059 0.0064 0.0055 0.0054 0.0052 0.0049 0.0046 0.0041 0.0036 0.0033 0.0025 0.0019 0.0017 0.0007 0.0007 0.0014 0.0017 0.0032 0.0059 0.0285 0.0492 0.0664 0.08 0.0888 0.0929 0.0975 0.1016 0.1016 0.0994 0.093 0.0871 0.0839 0.079 0.076 0.0681 0.06 0.0567 0.0591 0.0651 0.0696 0.0748 0.0784 0.0801 0.0823 0.0824 0.0821 0.0804 0.079 0.0771 0.0754 0.0736 0.0735 0.0744 0.0735 0.0658 0.0541 0.0395 0.0263 0.0209 0.0205 0.0212 0.0364 0.0539 0.07 0.083 0.092 0.0952 0.0993 0.1022 0.1021 0.1021 0.1003 0.099 0.0991 0.0984 0.0972 0.0963 0.0961 0.0949 0.096 0.0968 0.0968 0.097 0.0967 0.0973 0.0974 0.0977 0.0963 0.0911 0.0836 0.0714 0.0564 0.0422 0.031 0.0277 0.0303 0.0319 0.0332 0.0327 0.0313 0.0287 0.0235 0.0368 0.0578 0.0715 0.0851 0.0913 0.0923 0.0953 0.1006 0.1021 0.1046 0.1055 0.1036 0.1021 0.0992 0.0962 0.0928 0.0908 0.088 0.086 0.0847 0.0845 0.0862 0.0887 0.0915 0.0931 0.0941 0.097 0.1015 0.1059 0.1089 0.1109 0.1111 0.1095 0.1092 0.1075 0.1073 0.1063 0.1059 0.1061 0.105 0.1043 0.1035 0.1014 0.0985 0.0968 0.096 0.096 0.096 0.0942 0.0871 0.0772 0.0654 0.0515 0.0417 0.0329 0.0303 0.0291 0.0289 0.0285 0.0263 0.0245 0.0315 0.0574 0.077 0.0951 0.1065 0.1087 0.1078 0.1038 0.1013 0.0963 0.0931 0.0907 0.0879 0.0821 0.0708 0.0548 0.0381 0.0249 0.0229 0.0232 0.0265 0.0464 0.0669 0.0869 0.103 0.1077 0.1105 0.1074 0.1046 0.1051 0.1018 0.1013 0.1002 0.0987 0.0983 0.0966 0.0934 0.0902 0.0869 0.0857 0.0869 0.0889 0.0927 0.0945 0.0996 0.1027 0.1039 0.1071 0.1044 0.103 0.1013 0.0995 0.0972 0.0913 0.0818 0.0663 0.0492 0.0319 0.0178 0.0165 0.0148 0.0296 0.0523 0.0725 0.0897 0.1024 0.1045 0.1031 0.1015 0.0952 0.0903 0.0864 0.0821 0.0783 0.0748 0.0647 0.0521 0.0375 0.0225 0.0171 0.0183 0.017 0.0267 0.0535 0.0716 0.0879 0.099 0.0995 0.0986 0.0942 0.0917 0.0875 0.083 0.0802 0.0772 0.0735 0.0645 0.0533 0.0374 0.0208 0.0112 0.0076 0.0166 0.045 0.0705 0.0917 0.111 0.1192 0.1251 0.1258 0.1245 0.1231 0.1168 0.1115 0.1055 0.1 0.1005 0.0996 0.0996 0.0993 0.0992 0.0984 0.0984 0.0968 0.0953 0.0966 0.097 0.0982 0.0997 0.0956 0.0904 0.083 0.0744 0.0703 0.0697 0.0687 0.0691 0.0689 0.0679 0.0661 0.0629 0.0552 0.0437 0.0301 0.0132 0.0057 0.0058 0.006 0.0053 0.0054 0.0047 0.0038 0.0028 0.0023 0.0015 0.0016 0.001 0.0028 0.0116 0.0278 0.0429 0.056 0.0648 0.0743 0.0813 0.09 0.0944 0.0956 0.0929 0.085 0.0785 0.0734 0.07 0.0684 0.0696 0.0703 0.0716 0.0734 0.0716 0.0688 0.0621 0.0504 0.0362 0.0186 0.0269 0.0526 0.0746 0.0918 0.1034 0.1079 0.1082 0.1037 0.099 0.0906 0.0806 0.0758 0.0732 0.0723 0.071 0.0616 0.0492 0.0346 0.0175 0.0119 0.0138 0.021 0.0375 0.0534 0.0697 0.0811 0.0877 0.0922 0.092 0.0892 0.0836 0.0761 0.0711 0.0683 0.0688 0.0697 0.0691 0.0661 0.0587 0.0478 0.0355 0.0295 0.0453 0.0652 0.084 0.0985 0.1067 0.1102 0.1108 0.1079 0.1045 0.1011 0.0962 0.0934 0.0887 0.0787 0.0653 0.0486 0.0264 0.0159 0.0142 0.0176 0.0309 0.0511 0.0658 0.0783 0.0908 0.0986 0.1043 0.1068 0.1047 0.1008 0.0983 0.0965 0.0948 0.091 0.0834 0.0731 0.0586 0.0433 0.0283 0.0168 0.032 0.0557 0.0753 0.0917 0.1023 0.1056 0.1056 0.1045 0.0995 0.0943 0.089 0.0832 0.0788 0.0784 0.0795 0.0824 0.0868 0.09 0.091 0.0918 0.0914 0.0895 0.0898 0.0889 0.0904 0.0926 0.0925 0.0927 0.0916 0.0882 0.0841 0.0744 0.0605 0.0443 0.0232 0.0075 0.0028 0.0024 0.0039 0.0048 0.004 0.0298 0.0566 0.0738 0.0878 0.0956 0.0953 0.095 0.0934 0.0939 0.0925 0.0902 0.09 0.0878 0.0905 0.0907 0.0877 0.077 0.0613 0.0417 0.0199 0.0101 0.0313 0.0528 0.0694 0.0825 0.0882 0.0896 0.089 0.0876 0.0857 0.0825 0.0791 0.0782 0.0766 0.075 0.0691 0.0579 0.0432 0.0236 0.015 0.0146 0.0252 0.0443 0.0631 0.0802 0.0941 0.1025 0.1064 0.1074 0.1074 0.1066 0.1045 0.1036 0.0991 0.097 0.0954 0.094 0.0943 0.0928 0.0898 0.0884 0.0862 0.0839 0.085 0.0835 0.0839 0.0839 0.0824 0.0821 0.0815 0.081 0.0801 0.0757 0.0673 0.0551 0.0408 0.0264 0.0216 0.0213 0.0205 0.0196 0.0172 0.0162 0.0435 0.063 0.0771 0.091 0.0939 0.0948 0.0962 0.0963 0.0953 0.0972 0.0968 0.0951 0.095 0.0931 0.091 0.0932 0.0956 0.0975 0.103 0.1007 0.0997 0.0989 0.0976 0.0984 0.099 0.095 0.0897 0.084 0.0797 0.0764 0.0724 0.0639 0.0508 0.0378 0.0249 0.0206 0.0202 0.0193 0.0193 0.0163 0.0158 0.0422 0.0621 0.0807 0.0935 0.0973 0.0994 0.0986 0.099 0.0976 0.0942 0.0907 0.0875 0.0863 0.0861 0.0872 0.0904 0.094 0.0976 0.1018 0.1067 0.1076 0.1094 0.1107 0.1092 0.1081 0.1028 0.0976 0.0929 0.0891 0.0873 0.0864 0.0852 0.0859 0.0813 0.0777 0.0752 0.0715 0.0692 0.0662 0.0619 0.055 0.0462 0.0371 0.0268 0.0173 0.0091 0.0067 0.0064 0.0067 0.006 0.006 0.0064 0.005 0.0058 0.0044 0.0029 0.0021 0.0022 0.0021 0.0027 0.0042 0.0362 0.0593 0.0768 0.0874 0.0865 0.0788 0.0687 0.0593 0.0559 0.0547 0.0558 0.0603 0.0651 0.0674 0.0697 0.0731 0.0745 0.075 0.0763 0.0785 0.0815 0.0859 0.0905 0.0942 0.0967 0.096 0.0936 0.0918 0.0908 0.0911 0.0923 0.0897 0.0891 0.0873 0.0787 0.0697 0.0545 0.035 0.021 0.0139 0.0265 0.0468 0.0625 0.0747 0.0821 0.0833 0.0832 0.0837 0.084 0.083 0.0831 0.081 0.0784 0.0725 0.0615 0.0464 0.0301 0.018 0.0167 0.0183 0.0189 0.016 0.0268 0.0505 0.0655 0.0796 0.0869 0.0877 0.0889 0.087 0.0864 0.0834 0.0811 0.0801 0.0791 0.0778 0.0773 0.0758 0.0732 0.0718 0.0711 0.0706 0.0732 0.0745 0.0751 0.0755 0.0741 0.073 0.0712 0.0696 0.0686 0.0685 0.0679 0.0692 0.0698 0.0709 0.0707 0.0706 0.0701 0.069 0.0667 0.0662 0.0652 0.0713 0.0844 0.0943 0.1023 0.1067 0.1059 0.1048 0.1039 0.0987 0.093 0.0882 0.0853 0.0839 0.0857 0.087 0.089 0.0912 0.0937 0.0964 0.097 0.0981 0.0967 0.0951 0.0922 0.0886 0.0842 0.0806 0.0764 0.0746 0.0729 0.072 0.0715 0.0695 0.0691 0.067 0.0649 0.0632 0.0608 0.0557 0.0471 0.0361 0.0237 0.0093 0.0036 0.0035 0.0039 0.0033 0.0036 0.0032 0.0027 0.0023 0.0022 0.0012 0.002 0.0015 0.0011 0.0013 0.0028 0.0034 0.0058 0.0089 0.0228 0.0401 0.0542 0.065 0.0727 0.074 0.0723 0.0699 0.0678 0.0672 0.0658 0.0641 0.0622 0.0612 0.0617 0.0623 0.0597 0.0524 0.0427 0.0299 0.0154 0.0196 0.0377 0.0547 0.0686 0.0792 0.0836 0.0835 0.0823 0.08 0.076 0.073 0.0697 0.0629 0.0526 0.0408 0.0259 0.0159 0.0128 0.015 0.0154 0.0239 0.0415 0.0548 0.0646 0.071 0.0731 0.0743 0.075 0.0741 0.0684 0.0638 0.0606 0.0611 0.0616 0.0626 0.0593 0.0497 0.0398 0.0267 0.0171 0.0147 0.0241 0.0464 0.0625 0.0774 0.0901 0.0946 0.0982 0.0997 0.099 0.0982 0.0964 0.0963 0.0943 0.0927 0.0907 0.0864 0.0809 0.0782 0.075 0.0755 0.0807 0.0842 0.0866 0.0901 0.0917 0.0935 0.0936 0.0916 0.0882 0.0838 0.08 0.0779 0.0764 0.0752 0.069 0.0579 0.0448 0.028 0.0155 0.0129 0.0114 0.0224 0.0524 0.074 0.0962 0.113 0.1189 0.121 0.118 0.1138 0.1091 0.0996 0.094 0.0896 0.089 0.093 0.0958 0.0981 0.0995 0.1019 0.1026 0.1022 0.1003 0.1003 0.1018 0.1027 0.1043 0.1037 0.1021 0.0986 0.0947 0.0908 0.0868 0.0862 0.0851 0.0827 0.0818 0.0822 0.0822 0.0794 0.072 0.058 0.0406 0.0207 0.0069 0.0052 0.0059 0.0056 0.0057 0.0057 0.0051 0.0047 0.0044 0.004 0.0037 0.0028 0.0026 0.002 0.0027 0.0025 0.0072 0.0146 0.0344 0.0544 0.0716 0.0838 0.0893 0.0912 0.089 0.0861 0.0847 0.0825 0.0841 0.0865 0.088 0.0894 0.089 0.0867 0.0866 0.0859 0.0856 0.0861 0.0849 0.0845 0.0872 0.0914 0.0946 0.0983 0.1005 0.1013 0.1025 0.104 0.1055 0.1058 0.1068 0.103 0.0903 0.0743 0.0511 0.0268 0.0168 0.0162 0.0178 0.0289 0.0539 0.0698 0.0819 0.0905 0.0884 0.0883 0.0874 0.0884 0.0882 0.0884 0.0894 0.0892 0.0881 0.0843 0.0798 0.0741 0.0664 0.0587 0.055 0.058 0.0642 0.0712 0.0756 0.0776 0.0786 0.079 0.08 0.0785 0.0774 0.0766 0.0763 0.0744 0.0692 0.0602 0.0466 0.0329 0.0201 0.0131 0.0136 0.0138 0.0234 0.0459 0.0615 0.0751 0.0838 0.0856 0.0854 0.0849 0.0831 0.0802 0.0783 0.0779 0.0753 0.0701 0.0603 0.0457 0.0284 0.0153 0.0121 0.0143 0.0166 0.0267 0.0468 0.0659 0.08 0.0906 0.0951 0.0944 0.096 0.0954 0.0934 0.0921 0.0886 0.0847 0.0812 0.0793 0.0782 0.0783 0.0768 0.0764 0.0765 0.0772 0.0786 0.0801 0.0806 0.0809 0.0819 0.0825 0.0839 0.0846 0.0852 0.0865 0.0876 0.0874 0.0859 0.0767 0.0623 0.0472 0.0251 0.0122 0.0121 0.0137 0.0241 0.0456 0.0617 0.0783 0.0897 0.097 0.1062 0.1086 0.1115 0.1114 0.1073 0.1038 0.0986 0.0908 0.0785 0.0632 0.044 0.0254 0.0144 0.0097 0.0185 0.0467 0.0703 0.0894 0.1067 0.114 0.1167 0.1181 0.115 0.1109 0.1062 0.1016 0.0983 0.0949 0.0945 0.0921 0.0914 0.089 0.0838 0.0814 0.0821 0.0869 0.0964 0.1058 0.1125 0.1183 0.1192 0.1151 0.1079 0.1002 0.0942 0.0923 0.0926 0.0921 0.0894 0.0819 0.0692 0.0522 0.0333 0.019 0.0204 0.0512 0.0748 0.0921 0.1051 0.1062 0.1054 0.1023 0.101 0.0993 0.0969 0.096 0.0965 0.0986 0.0996 0.1006 0.1029 0.1033 0.1034 0.1028 0.1 0.0967 0.0981 0.103 0.1072 0.1139 0.1145 0.115 0.1148 0.1142 0.1167 0.1158 0.1147 0.1127 0.1097 0.1078 0.1068 0.1043 0.104 0.1048 0.1048 0.1057 0.1075 0.1082 0.1081 0.1072 0.1052 0.1032 0.1012 0.0989 0.0931 0.0861 0.0727 0.0551 0.0395 0.0248 0.0223 0.0237 0.0248 0.0258 0.0254 0.023 0.022 0.0331 0.0578 0.0727 0.0877 0.098 0.1003 0.1063 0.1093 0.1114 0.1122 0.1099 0.1074 0.1044 0.0997 0.0951 0.0902 0.0866 0.0849 0.0865 0.0887 0.0901 0.092 0.0914 0.0924 0.0948 0.098 0.1004 0.1012 0.1006 0.0994 0.098 0.098 0.0987 0.0997 0.1018 0.1023 0.1029 0.1038 0.104 0.1057 0.107 0.1101 0.1112 0.1138 0.1151 0.1162 0.1177 0.1166 0.1181 0.1175 0.1195 0.121 0.1233 0.1233 0.1234 0.1244 0.1252 0.1287 0.1313 0.1315 0.1294 0.1253 0.1219 0.1216 0.1199 0.1211 0.123 0.1237 0.1261 0.1266 0.1269 0.1269 0.1305 0.134 0.1373 0.1402 0.1411 0.1414 0.1395 0.1376 0.1353 0.1343 0.1342 0.1356 0.1383 0.1417 0.1442 0.1459 0.1462 0.1448 0.1443 0.1436 0.1428 0.1419 0.1402 0.1376 0.1365 0.136 0.137 0.1374 0.1396 0.1434 0.1478 0.1533 0.1574 0.1608 0.1628 0.1604 0.1569 0.1508 0.1444 0.1378 0.1339 0.1309 0.1285 0.1283 0.1284 0.128 0.1262 0.1252 0.1248 0.1254 0.1278 0.1288 0.1284 0.1281 0.1274 0.1244 0.1227 0.1202 0.1167 0.1165 0.1147 0.1158 0.1178 0.1181 0.1198 0.1214 0.1245 0.1266 0.1288 0.1285 0.1287 0.1291 0.1301 0.1303 0.1289 0.1285 0.1261 0.1254 0.1266 0.1275 0.1303 0.1339 0.1342 0.1376 0.1389 0.1405 0.1422 0.1418 0.1418 0.1398 0.1405 0.1369 0.1337 0.1304 0.1257 0.1239 0.1211 0.1205 0.1181 0.115 0.1136 0.1117 0.1118 0.1114 0.1101 0.1092 0.1061 0.1048 0.1043 0.1055 0.1063 0.1048 0.1032 0.0986 0.0962 0.0942 0.0925 0.0913 0.0893 0.0875 0.0835 0.0783 0.0729 0.0672 0.063 0.0595 0.0544 0.0498 0.0457 0.042 0.0391 0.0369 0.0349 0.0309 0.0265 0.0223 0.0165 0.0095 0.0053 0.0023 0.0012 0.0007 0.001 0.0002 0.0003 0.0007 0.0002 0.0 0.0006 0.0001 0.0003 0.0 0.0 0.0001 0.0002 0.0 0.0 0.0 0.0 0.0 0.0002 0.0 0.0 0.0 0.0 0.0006 0.0 0.0 0.0002 0.0002 0.0001 0.0 0.0 0.0005 0.0 0.0005 0.0 0.0", + "energy": "-60.29 -56.248 -54.3315 -52.07 -50.1553 -48.6564 -47.2927 -46.5325 -45.8739 -45.2955 -44.9018 -44.8888 -44.5817 -44.8736 -45.4268 -46.1463 -47.1838 -48.5215 -49.8868 -51.6363 -52.7589 -53.5267 -54.0256 -54.3926 -54.9775 -55.2097 -54.6106 -52.6143 -49.2338 -44.2314 -38.3355 -32.5606 -27.2189 -23.2013 -21.2486 -20.0919 -20.116 -20.1335 -20.3078 -20.7387 -21.4605 -21.6898 -22.4547 -22.9679 -23.4618 -24.2992 -24.4137 -24.359 -24.673 -24.1225 -23.6207 -23.0595 -22.425 -22.0094 -21.6639 -21.3234 -21.0126 -20.9434 -21.0064 -20.9738 -21.1955 -21.319 -21.3407 -21.5255 -21.8762 -22.6154 -24.8425 -27.2993 -29.8323 -31.5963 -32.5246 -32.0027 -30.3881 -27.8193 -25.1357 -22.8648 -21.062 -19.9775 -19.5008 -19.1457 -19.1331 -19.1355 -19.2882 -19.4095 -19.7675 -19.9371 -20.2686 -20.5606 -20.7152 -20.7003 -20.8013 -20.3704 -20.1442 -19.8762 -19.5876 -19.6235 -19.5937 -19.8044 -19.789 -20.107 -20.6892 -21.7127 -22.7859 -24.498 -26.2277 -27.6986 -29.0747 -29.8501 -30.5007 -30.7802 -30.7024 -30.1268 -29.1944 -27.7766 -26.4443 -24.9774 -23.6807 -22.3443 -21.7463 -21.3753 -20.9203 -20.8912 -20.7894 -20.5861 -20.5736 -20.6209 -20.7258 -20.9298 -21.2893 -21.425 -21.7711 -21.8514 -21.8192 -21.9489 -21.6788 -21.4172 -21.2829 -21.0376 -21.0661 -20.8679 -20.8411 -20.8709 -21.1364 -21.3183 -21.5729 -21.3256 -21.5106 -21.3461 -21.5318 -21.1775 -21.1291 -20.9823 -20.9118 -20.6744 -20.7152 -20.3316 -20.4586 -20.4327 -20.4735 -20.643 -20.6069 -20.9539 -21.0963 -21.8827 -23.2398 -24.5696 -26.5221 -28.2766 -29.7738 -31.0794 -31.5983 -31.375 -30.6529 -29.6036 -28.3733 -26.7971 -25.0803 -23.545 -22.1832 -21.2247 -20.624 -20.5491 -20.4716 -20.7377 -20.795 -21.0877 -21.5221 -22.1182 -23.1322 -25.212 -27.3354 -29.4904 -31.2332 -32.0049 -31.5359 -30.2811 -28.2665 -25.9178 -24.0214 -22.2678 -21.6086 -21.1184 -20.716 -20.7971 -20.5588 -20.3885 -20.4475 -20.5078 -20.9382 -20.9304 -21.1808 -21.5227 -21.1966 -21.4946 -21.4048 -21.3065 -21.2398 -21.1215 -20.96 -20.8968 -20.6856 -20.4652 -20.5646 -20.4183 -20.4426 -20.3073 -20.2142 -20.7287 -21.5638 -22.9513 -25.297 -27.3988 -29.8185 -31.3104 -31.8277 -31.1281 -29.2737 -27.2767 -24.9112 -22.9197 -21.597 -21.0685 -20.9596 -20.8396 -20.9749 -21.4072 -21.6135 -22.2817 -22.9385 -23.9305 -25.2888 -27.145 -28.9168 -30.8818 -31.9735 -32.0845 -31.0644 -29.3722 -27.0025 -24.8465 -22.8074 -21.4868 -20.9458 -20.5697 -20.7782 -20.8746 -21.1421 -21.1731 -21.6265 -22.3872 -23.4545 -24.9441 -27.1244 -29.5122 -30.8954 -31.7373 -31.13 -29.5348 -26.884 -24.0967 -21.304 -19.2292 -18.005 -17.3314 -17.3627 -17.5779 -18.0636 -18.5151 -18.9444 -19.3499 -19.7421 -19.8212 -19.883 -19.9412 -20.0164 -19.7441 -19.6472 -19.4748 -19.3175 -19.332 -19.6045 -19.9586 -20.1669 -20.8106 -21.3405 -21.9414 -22.4595 -23.0026 -23.4731 -23.7545 -23.8215 -24.0667 -24.2551 -24.911 -25.8928 -27.4831 -29.8249 -33.0521 -35.7726 -39.5583 -42.0121 -44.4965 -45.9884 -46.7694 -47.194 -47.2117 -47.5555 -48.4216 -49.6111 -50.2067 -49.7698 -47.9704 -44.6564 -40.2792 -35.3868 -30.6766 -26.7627 -24.0275 -22.3635 -21.1963 -20.4914 -20.0581 -19.8358 -19.8435 -19.9354 -20.2905 -20.3161 -20.3727 -20.6775 -20.7275 -21.1704 -21.7841 -22.6097 -24.2045 -26.2539 -28.4772 -30.3953 -31.3921 -31.1007 -29.9261 -27.8761 -25.3408 -22.7319 -21.1776 -20.1548 -20.0527 -20.4497 -20.8282 -21.3933 -21.7956 -22.2413 -22.5959 -23.1063 -24.0144 -25.6676 -27.4265 -29.5962 -31.0137 -31.628 -31.2652 -29.7986 -27.9991 -26.2028 -24.1828 -23.0069 -22.698 -22.4901 -22.5302 -22.5796 -22.8964 -23.0346 -23.2647 -23.4235 -23.5372 -24.2005 -25.3068 -26.9084 -28.5795 -29.6287 -29.8631 -29.1599 -27.5206 -25.3685 -22.8795 -20.7049 -19.7259 -19.0407 -19.0394 -19.2059 -19.2816 -19.6318 -20.0928 -20.9181 -22.2646 -24.5129 -27.2413 -30.2142 -32.7679 -34.5799 -34.9647 -34.127 -32.0496 -29.2585 -26.8171 -24.483 -22.8902 -21.7337 -21.1142 -20.8368 -20.6818 -20.556 -20.5526 -20.8633 -21.6757 -23.3952 -25.9362 -29.278 -32.1888 -34.0315 -34.5968 -33.6071 -31.102 -28.0091 -25.0111 -22.9061 -21.7757 -21.6137 -21.4601 -21.5566 -22.0006 -22.6612 -23.0919 -23.6231 -24.0203 -24.4266 -24.724 -24.4975 -24.3275 -23.9581 -23.7411 -23.4492 -23.0063 -22.9781 -22.8238 -22.8219 -22.874 -22.9689 -22.9101 -23.1095 -23.2167 -23.2169 -23.686 -25.0751 -27.5284 -30.6983 -34.9935 -39.372 -43.291 -45.6989 -45.6188 -43.0844 -39.1969 -34.6662 -29.8034 -26.0954 -23.6634 -23.0048 -22.8701 -22.8718 -22.8407 -22.7396 -22.844 -22.7788 -22.7345 -22.8907 -23.0723 -23.7255 -25.09 -27.1341 -29.7264 -31.7015 -32.838 -32.6358 -30.9637 -28.5457 -25.6629 -23.0985 -21.6367 -20.9101 -20.5403 -20.5671 -20.9796 -21.3269 -21.9221 -22.3223 -22.7653 -23.3857 -24.6829 -26.4568 -28.4229 -29.8057 -30.4739 -30.1579 -28.8267 -26.745 -24.5227 -22.3165 -20.8258 -20.0249 -19.8307 -19.6773 -19.9274 -20.2362 -20.6144 -20.9236 -21.4696 -21.6839 -22.0593 -22.2621 -22.5071 -22.5021 -22.4094 -22.5248 -22.4495 -22.2978 -22.1257 -21.9741 -21.7053 -21.8184 -21.8577 -21.8242 -22.0926 -22.2309 -22.7398 -23.7413 -25.1828 -27.1052 -29.0929 -31.067 -32.9542 -34.4652 -34.8116 -34.4384 -32.9947 -30.8404 -28.2836 -25.5964 -23.3522 -21.6375 -20.9389 -20.4457 -20.2533 -20.5254 -20.1449 -20.4296 -20.4509 -20.2326 -20.1982 -20.1732 -20.0882 -20.2195 -20.164 -20.212 -20.4974 -20.6613 -20.7816 -20.9602 -20.8932 -21.1608 -21.2153 -21.4246 -21.7058 -22.1454 -22.878 -23.8073 -25.2821 -27.1487 -29.1561 -31.0973 -32.9701 -34.6388 -35.873 -35.8615 -34.6101 -32.6149 -30.2845 -27.3766 -24.7671 -22.7089 -21.2132 -20.2383 -19.9713 -19.8989 -19.8595 -20.1538 -20.2885 -20.4334 -20.8031 -20.9046 -20.7137 -20.7635 -20.6086 -20.4627 -20.3445 -20.2674 -20.3351 -20.4026 -20.6622 -20.6874 -20.9584 -21.046 -21.4132 -21.5435 -22.0249 -22.4511 -22.686 -22.8854 -23.32 -23.884 -24.3638 -24.9615 -25.4532 -26.201 -26.9703 -28.3173 -30.734 -33.7677 -37.2819 -40.1018 -42.8954 -45.0847 -46.1062 -46.4261 -46.5427 -46.5832 -47.2081 -47.9491 -48.8632 -50.0635 -51.1437 -52.0667 -52.5179 -51.9495 -49.1998 -45.4209 -40.5524 -35.1473 -30.1827 -25.8145 -22.9936 -22.1427 -22.1877 -22.8613 -23.4821 -24.026 -24.4128 -24.3571 -24.2022 -23.6157 -23.4152 -22.5214 -22.3355 -21.87 -22.0328 -21.8865 -21.981 -21.7719 -21.6863 -21.341 -20.7141 -20.3712 -20.1043 -20.0892 -20.0903 -20.2247 -20.0981 -20.1877 -20.1524 -20.4685 -21.0734 -21.9991 -23.7781 -26.5099 -29.3107 -31.682 -32.734 -32.8672 -31.2232 -28.8508 -26.0242 -23.5847 -21.9624 -21.432 -21.1872 -21.0491 -21.2033 -21.2607 -21.5706 -21.961 -22.9552 -24.3005 -26.186 -28.7147 -30.9742 -33.2398 -34.9162 -35.4191 -34.6096 -32.6989 -30.3305 -27.6414 -25.1919 -23.1693 -21.9777 -21.5008 -21.1589 -21.0828 -21.1281 -21.2447 -21.4188 -21.6362 -21.7976 -21.8786 -21.7557 -21.9107 -21.9201 -22.2869 -22.4479 -22.7732 -22.8974 -23.0906 -23.2681 -23.0848 -23.4488 -23.2951 -23.446 -23.6347 -23.3805 -23.6366 -23.4589 -23.3463 -23.1293 -23.0441 -22.9288 -22.7361 -22.8918 -22.8564 -22.6118 -22.4608 -22.1925 -21.7893 -21.0222 -20.353 -19.6425 -19.2197 -19.062 -19.0154 -19.2414 -19.3966 -19.8145 -20.0255 -20.5774 -20.7775 -20.9374 -20.9355 -20.9926 -20.997 -20.674 -20.8293 -20.648 -20.5804 -20.8242 -20.9137 -20.9355 -21.0909 -21.4272 -21.6919 -21.9913 -22.4362 -22.4009 -22.5548 -22.7729 -22.8694 -23.0091 -23.4153 -23.715 -24.6041 -25.512 -27.1461 -29.397 -32.351 -36.011 -39.3245 -42.4341 -44.9155 -46.6166 -47.3224 -47.7913 -48.0028 -48.2017 -48.7532 -49.619 -50.5714 -51.8385 -52.739 -52.8986 -52.167 -49.8405 -46.541 -41.7412 -36.9636 -32.5331 -29.1805 -26.5484 -24.9089 -24.1237 -24.0146 -24.2059 -24.5327 -24.7841 -25.0397 -25.1775 -25.1943 -25.1355 -25.149 -25.3383 -25.7899 -26.9728 -28.4431 -29.3491 -30.2505 -30.328 -29.8226 -28.5862 -26.7681 -25.0541 -23.7613 -23.1781 -22.7346 -22.7198 -22.6297 -22.8895 -23.1226 -23.797 -25.0491 -26.9547 -29.122 -31.9074 -34.3088 -35.9787 -36.3086 -35.323 -33.7115 -30.8145 -28.2148 -26.2247 -24.9333 -24.3711 -24.2336 -24.4052 -24.9296 -25.0273 -25.1197 -25.2254 -25.3502 -25.7903 -26.5674 -28.0932 -30.0138 -31.8522 -32.8783 -32.9976 -32.2186 -30.2225 -27.8288 -25.214 -23.2086 -22.1772 -21.7257 -21.277 -21.3718 -21.3949 -21.3385 -21.4561 -21.6635 -21.6382 -21.766 -22.1153 -22.5132 -22.618 -22.826 -22.8615 -22.6689 -22.3956 -21.8549 -21.2063 -20.7998 -20.4398 -20.1417 -20.0891 -20.4529 -20.7509 -21.2026 -21.5674 -22.1242 -22.9041 -23.6507 -25.2219 -27.4434 -30.2466 -33.0245 -34.5082 -35.0463 -33.639 -31.0789 -27.5338 -24.0555 -20.8002 -18.6522 -17.6595 -17.3428 -17.4173 -17.84 -18.3277 -18.4633 -18.8478 -19.0908 -19.0821 -19.2078 -19.2854 -19.0977 -19.0184 -19.0801 -18.8275 -19.232 -18.9857 -19.5187 -19.6236 -19.7668 -19.9658 -20.2473 -20.6305 -21.1162 -21.2732 -21.4907 -21.6724 -22.0507 -21.8551 -22.0947 -22.4029 -23.638 -25.3297 -27.7585 -31.1569 -35.0845 -38.7698 -41.7685 -44.0085 -45.2219 -45.3871 -45.1592 -44.7507 -44.3558 -44.5697 -44.8382 -45.5488 -46.4885 -47.6439 -48.5854 -48.7219 -47.517 -45.3437 -41.6076 -37.0164 -32.2941 -27.9557 -24.5785 -22.3452 -21.3649 -20.9725 -21.0569 -21.3411 -21.5598 -21.5375 -21.7357 -21.3582 -21.3737 -21.0372 -20.8449 -20.8455 -20.885 -20.9771 -20.9551 -20.8825 -20.8257 -20.2744 -19.6549 -19.0307 -18.5361 -17.9399 -17.6797 -17.7419 -17.8116 -18.1197 -18.1313 -18.7116 -19.3518 -20.111 -21.7638 -24.1988 -26.5676 -28.7713 -30.164 -30.5224 -29.6811 -27.9735 -25.778 -23.5638 -21.8745 -20.7204 -19.9361 -19.7846 -19.3789 -19.3839 -19.2672 -19.3877 -19.5693 -19.6146 -19.9436 -20.3752 -20.5472 -20.8264 -20.945 -21.1369 -21.1966 -21.1673 -21.1081 -20.9632 -20.7874 -20.6075 -20.7161 -20.5287 -20.8027 -20.8053 -21.0958 -21.2146 -21.423 -21.929 -22.7537 -24.1771 -26.1402 -28.5895 -31.3233 -33.358 -34.5215 -34.1024 -32.8707 -30.6216 -27.9437 -25.2886 -23.2935 -21.9439 -21.2589 -20.9051 -20.6544 -20.8664 -20.8552 -21.5031 -22.2741 -23.5703 -25.6697 -28.3862 -30.8618 -32.7989 -34.3186 -34.5149 -33.2115 -30.8953 -28.1329 -26.0093 -23.7664 -22.2282 -21.2597 -20.8951 -20.6922 -20.6604 -20.8918 -21.2299 -21.2437 -21.7268 -21.9791 -22.4913 -22.6716 -22.8076 -22.962 -23.0535 -23.122 -23.083 -22.6385 -22.455 -22.2207 -22.1227 -21.896 -21.7126 -21.6517 -21.4118 -21.2539 -21.0886 -20.904 -20.8906 -20.9163 -21.3496 -23.0061 -24.9464 -27.5641 -30.0266 -31.8088 -32.4467 -31.9551 -29.9571 -27.8412 -25.3051 -23.4073 -21.9367 -20.9815 -20.4607 -20.216 -19.7571 -19.7535 -20.1197 -20.7766 -21.8869 -23.4757 -25.8056 -28.7436 -31.4237 -32.9333 -33.4904 -32.6263 -30.3355 -27.2964 -24.1431 -21.748 -20.3111 -19.6199 -19.3242 -19.4902 -19.7794 -20.0518 -20.0414 -20.3034 -20.3911 -20.3045 -20.3769 -20.5649 -21.0204 -21.5614 -21.9978 -22.3435 -22.5252 -22.504 -22.4101 -22.0081 -22.0831 -22.0822 -22.2231 -22.5792 -23.1319 -23.6008 -24.009 -24.289 -24.527 -24.6655 -25.2378 -25.9675 -26.9007 -27.9917 -28.9159 -29.1845 -28.8922 -27.5724 -25.8422 -23.9494 -22.4083 -21.5243 -21.4468 -21.6233 -21.7262 -21.9969 -22.0678 -22.0275 -22.0952 -21.8379 -21.4676 -21.1859 -21.0977 -20.6418 -20.6319 -20.607 -20.3126 -20.2344 -19.9555 -19.5725 -19.2896 -19.2054 -19.1311 -19.0104 -18.7369 -18.6181 -18.7227 -18.7251 -18.7846 -18.8438 -18.9752 -19.0349 -19.2948 -19.4282 -19.4562 -19.5054 -19.3044 -19.5946 -19.5888 -19.6511 -19.7602 -19.8081 -20.08 -20.6077 -21.0809 -21.6449 -22.3701 -23.3697 -24.8715 -26.5988 -28.3021 -29.4703 -30.6861 -31.5382 -32.0848 -32.5096 -32.0054 -31.1697 -30.0136 -28.1386 -26.0796 -24.0244 -22.2168 -20.9423 -20.0185 -19.6874 -19.3129 -19.3767 -19.3756 -19.6807 -20.0861 -20.7055 -21.2911 -21.9537 -22.5134 -23.0162 -23.1874 -23.0497 -22.9757 -22.6481 -22.2368 -21.8304 -21.2333 -20.7383 -20.4668 -20.3143 -20.0248 -19.9627 -19.8283 -19.8305 -19.8264 -19.6108 -19.5639 -19.2579 -19.0734 -18.9253 -18.3852 -18.1166 -17.8694 -17.7747 -17.5923 -17.2383 -17.4382 -17.3997 -17.341 -17.2238 -17.1652 -17.0108 -17.048 -17.0374 -16.9434 -16.9983 -17.0935 -17.307 -17.5927 -17.9311 -18.0927 -18.2108 -18.4744 -18.7973 -19.0842 -18.7583 -18.9045 -18.7578 -18.6485 -18.4836 -18.6186 -18.2923 -18.2369 -18.2113 -18.3357 -18.2476 -18.5355 -18.4998 -18.7726 -18.8187 -18.9176 -18.8066 -18.8192 -18.6478 -18.6288 -18.5686 -18.5395 -18.4063 -18.2611 -18.4372 -18.4045 -18.3573 -18.6433 -18.6524 -18.8425 -18.9279 -18.9918 -18.9075 -19.1216 -18.8611 -18.7539 -18.8398 -18.7438 -18.6803 -18.7645 -18.4068 -18.5244 -18.479 -18.4101 -18.5367 -18.749 -18.9082 -19.2364 -19.3482 -19.6635 -19.7592 -20.1171 -20.3532 -20.4089 -20.3866 -20.6183 -20.4567 -20.4811 -20.3666 -20.3084 -20.2969 -20.2092 -20.1243 -20.0682 -20.0994 -20.1242 -20.5138 -20.309 -20.3969 -20.5224 -20.4136 -20.247 -20.3441 -20.1197 -19.6146 -19.5689 -19.1386 -18.8917 -18.7436 -18.6926 -18.8216 -18.7124 -18.7097 -18.9096 -19.1052 -18.8878 -18.9882 -18.9847 -19.1193 -18.8774 -19.0649 -19.106 -19.428 -19.5553 -19.813 -19.9772 -19.9498 -20.1579 -19.9841 -20.0402 -20.0737 -20.0539 -20.2022 -20.3205 -20.3375 -20.3677 -20.4958 -20.6122 -20.7192 -20.6928 -20.772 -20.8753 -20.8846 -21.2495 -21.4351 -21.4603 -21.5772 -21.6008 -21.8309 -21.4385 -21.5949 -21.7552 -21.608 -21.5663 -21.5271 -21.4878 -21.568 -21.573 -21.6501 -21.8905 -21.9186 -22.1948 -22.3154 -23.0767 -23.517 -24.2606 -25.1889 -25.8819 -27.0939 -27.9091 -28.8709 -29.6822 -30.5465 -31.4983 -32.7503 -35.3351 -39.099 -44.1129 -50.007 -56.2466 -62.0749 -66.5396 -69.7291 -71.2146 -71.7395 -72.1739 -72.7452 -73.6841 -74.8674 -76.3149 -77.7886 -79.5897 -81.1304 -81.9268 -82.2565 -82.4197 -82.2436 -82.4945 -82.472 -82.7845 -82.9527 -82.9509 -82.7845 -82.5385 -81.9059 -81.4429 -80.7394 -80.2036 -79.9272 -79.9735 -80.3118 -80.2411 -79.6753 -78.7096 -77.5125 -74.2785 -69.4427 -65.2144", "energy_timestep": "0.011609977324263039", - "breathiness": "0.0003 0.0012 0.0018 0.0023 0.0032 0.004 0.0049 0.0056 0.0062 0.0071 0.0071 0.0073 0.007 0.0061 0.0056 0.0051 0.0048 0.0048 0.0045 0.004 0.0035 0.002 0.0014 0.0014 0.0013 0.001 0.0012 0.0017 0.0015 0.0025 0.0032 0.0035 0.004 0.0041 0.004 0.0044 0.0043 0.0045 0.0041 0.004 0.0031 0.0022 0.0021 0.0022 0.0023 0.0023 0.0022 0.0017 0.0014 0.0011 0.0009 0.001 0.0009 0.0008 0.0008 0.001 0.0011 0.001 0.0009 0.0008 0.0006 0.0007 0.0008 0.0006 0.0008 0.0008 0.0012 0.0048 0.0088 0.0151 0.0201 0.0211 0.0206 0.0163 0.0102 0.005 0.0022 0.0019 0.0023 0.002 0.0021 0.0019 0.0021 0.0023 0.0021 0.0022 0.0022 0.0026 0.003 0.003 0.0031 0.0029 0.0026 0.0026 0.0026 0.0022 0.0023 0.0025 0.0026 0.0028 0.0029 0.0032 0.0076 0.0137 0.0185 0.023 0.0278 0.0302 0.0322 0.0342 0.0358 0.0351 0.0324 0.0273 0.0197 0.0137 0.0077 0.0036 0.0028 0.0026 0.0023 0.0022 0.0024 0.0023 0.0022 0.0022 0.0024 0.0027 0.0029 0.0029 0.003 0.0029 0.003 0.0031 0.0034 0.0032 0.0028 0.0026 0.0023 0.0026 0.0026 0.0027 0.0028 0.0027 0.0029 0.0029 0.0027 0.0028 0.0026 0.0027 0.0028 0.003 0.0031 0.0029 0.0025 0.0025 0.0025 0.0025 0.0023 0.0025 0.0021 0.0021 0.0022 0.0027 0.0036 0.0077 0.0152 0.0209 0.0258 0.0282 0.0277 0.0268 0.026 0.0247 0.0222 0.0188 0.0132 0.0088 0.0043 0.0032 0.0031 0.0034 0.0033 0.0029 0.0027 0.0018 0.0016 0.0011 0.002 0.0063 0.0109 0.0153 0.0213 0.0243 0.0252 0.023 0.018 0.0133 0.0067 0.0027 0.0017 0.0018 0.0017 0.0014 0.0013 0.0013 0.0013 0.0012 0.0013 0.0012 0.0013 0.0016 0.002 0.0025 0.0028 0.0028 0.0023 0.002 0.002 0.002 0.0022 0.0022 0.002 0.002 0.0022 0.0022 0.0022 0.0024 0.0026 0.0026 0.0051 0.0093 0.0118 0.0139 0.0154 0.0149 0.013 0.0112 0.0081 0.0029 0.0021 0.0018 0.0017 0.0016 0.0014 0.0014 0.0015 0.0014 0.0013 0.0012 0.0026 0.0054 0.0096 0.0131 0.0164 0.0184 0.0177 0.0155 0.0115 0.0069 0.0037 0.0024 0.0021 0.002 0.0018 0.0015 0.0017 0.0015 0.001 0.0007 0.0007 0.0006 0.0017 0.0038 0.0052 0.0063 0.0066 0.0057 0.0044 0.0027 0.002 0.0021 0.0021 0.0022 0.0022 0.0024 0.0025 0.0024 0.0024 0.0025 0.0019 0.0018 0.0017 0.0021 0.0019 0.0019 0.0018 0.0017 0.0015 0.0016 0.0016 0.0021 0.0023 0.0025 0.0025 0.0023 0.0021 0.002 0.0019 0.0016 0.0016 0.0016 0.0017 0.0018 0.0017 0.0017 0.0014 0.0016 0.0019 0.0022 0.0034 0.0047 0.0052 0.0055 0.0057 0.0049 0.0044 0.0034 0.0021 0.0014 0.001 0.0009 0.0008 0.0012 0.001 0.001 0.0011 0.0012 0.0016 0.0024 0.0028 0.0034 0.0038 0.0038 0.0039 0.0035 0.0027 0.0022 0.0022 0.0021 0.0022 0.0019 0.0018 0.0015 0.0017 0.002 0.0033 0.0058 0.0069 0.0066 0.0062 0.0042 0.0019 0.0016 0.0018 0.0016 0.0016 0.0016 0.0014 0.001 0.0011 0.0008 0.0008 0.0007 0.0009 0.0023 0.0044 0.0074 0.0112 0.0125 0.0119 0.0105 0.0059 0.003 0.0031 0.0026 0.0024 0.002 0.0016 0.0015 0.0012 0.0012 0.0012 0.001 0.0009 0.0007 0.0009 0.0011 0.0017 0.0028 0.0035 0.0042 0.0039 0.0032 0.0028 0.0026 0.0026 0.0027 0.0026 0.0022 0.0017 0.0014 0.0014 0.0011 0.0012 0.0018 0.0031 0.0055 0.0081 0.0131 0.0168 0.0176 0.0167 0.0133 0.0072 0.0032 0.0024 0.0025 0.0021 0.0021 0.0021 0.0022 0.002 0.0019 0.0018 0.0019 0.0018 0.0019 0.0021 0.0018 0.0018 0.0018 0.0015 0.0011 0.0009 0.0009 0.0012 0.0013 0.0012 0.0012 0.0008 0.0009 0.0009 0.0008 0.0007 0.0008 0.0009 0.0009 0.001 0.001 0.001 0.001 0.0011 0.0012 0.0015 0.0015 0.0015 0.0013 0.0011 0.0009 0.001 0.001 0.0012 0.0015 0.0019 0.0026 0.003 0.0034 0.0037 0.0042 0.0045 0.0045 0.0037 0.0028 0.002 0.0012 0.001 0.001 0.0012 0.0013 0.0012 0.0014 0.0013 0.0011 0.0011 0.0009 0.0011 0.0011 0.0012 0.0023 0.0045 0.0052 0.0053 0.0052 0.0037 0.003 0.0026 0.0025 0.0024 0.002 0.0017 0.0014 0.0012 0.0011 0.0011 0.0011 0.001 0.0009 0.0016 0.0038 0.006 0.0101 0.0125 0.0125 0.0118 0.009 0.0047 0.0023 0.0017 0.0016 0.0019 0.002 0.0019 0.0018 0.0013 0.0012 0.001 0.0011 0.0009 0.0009 0.0008 0.0008 0.0006 0.0005 0.0006 0.0009 0.0007 0.0008 0.0007 0.0009 0.0008 0.001 0.0009 0.0009 0.0012 0.0034 0.0099 0.0141 0.0178 0.0213 0.022 0.0219 0.0206 0.0175 0.0141 0.0105 0.0069 0.0047 0.0031 0.0027 0.0024 0.0025 0.0025 0.0023 0.0023 0.0018 0.0019 0.0016 0.0014 0.0014 0.0014 0.0013 0.0013 0.0014 0.0011 0.0012 0.0012 0.0014 0.0016 0.0014 0.0012 0.0013 0.001 0.0009 0.0009 0.0029 0.0097 0.0145 0.0173 0.0192 0.0204 0.0207 0.0207 0.021 0.019 0.0147 0.0116 0.0059 0.0025 0.0022 0.0019 0.002 0.0019 0.0018 0.0016 0.0017 0.0017 0.0014 0.0016 0.0015 0.0017 0.0017 0.0018 0.0017 0.0017 0.0018 0.0019 0.0018 0.0017 0.0018 0.0019 0.0016 0.0015 0.0013 0.0014 0.0018 0.0018 0.0018 0.002 0.002 0.0017 0.0015 0.0014 0.0014 0.0014 0.0016 0.002 0.0021 0.0021 0.0037 0.0042 0.0047 0.0054 0.0055 0.0055 0.0063 0.0063 0.0066 0.0064 0.0059 0.0049 0.0032 0.0023 0.0017 0.0021 0.0026 0.0031 0.0032 0.0029 0.0026 0.0023 0.002 0.0016 0.0012 0.0009 0.0005 0.0003 0.0002 0.0004 0.0004 0.0006 0.0005 0.0005 0.0008 0.0008 0.0007 0.0009 0.001 0.0013 0.0018 0.0018 0.002 0.0017 0.0017 0.0015 0.0014 0.0012 0.001 0.001 0.0009 0.0008 0.0006 0.0008 0.0005 0.001 0.0012 0.0015 0.0017 0.0016 0.0013 0.001 0.0007 0.0005 0.0006 0.0006 0.0004 0.0003 0.0004 0.0007 0.0005 0.001 0.0014 0.0023 0.0057 0.0113 0.0169 0.0181 0.0179 0.0155 0.0095 0.0068 0.0032 0.0022 0.002 0.0017 0.0016 0.0015 0.0013 0.0013 0.0012 0.0012 0.0013 0.001 0.001 0.0008 0.0005 0.0008 0.0006 0.0007 0.0007 0.0008 0.0006 0.0007 0.0009 0.0006 0.0006 0.0007 0.0006 0.0005 0.0004 0.0004 0.0006 0.0006 0.0006 0.0006 0.0006 0.0008 0.0005 0.0007 0.0009 0.0013 0.0015 0.002 0.0024 0.0023 0.0023 0.0021 0.0021 0.0017 0.0018 0.0015 0.0014 0.0012 0.0013 0.0014 0.0015 0.0015 0.0014 0.0013 0.0012 0.0013 0.0015 0.0014 0.0015 0.0016 0.0013 0.0013 0.001 0.001 0.001 0.0009 0.0008 0.0005 0.0004 0.0005 0.0005 0.0006 0.0006 0.0006 0.0007 0.0007 0.0011 0.0013 0.002 0.0024 0.0025 0.0032 0.0033 0.0033 0.0031 0.0026 0.0023 0.0013 0.0011 0.0012 0.0011 0.0009 0.0017 0.0026 0.0042 0.0052 0.0052 0.0047 0.0043 0.004 0.0035 0.0037 0.0033 0.0032 0.0025 0.0019 0.0013 0.0012 0.0009 0.0009 0.0009 0.0008 0.0009 0.0018 0.0046 0.0081 0.0106 0.0116 0.0111 0.0091 0.0057 0.0035 0.0029 0.0022 0.0015 0.0016 0.0016 0.0018 0.0019 0.0014 0.0021 0.0033 0.005 0.0069 0.0127 0.0145 0.0146 0.0138 0.0087 0.0045 0.0026 0.0023 0.0019 0.0014 0.0008 0.0007 0.0006 0.0008 0.0005 0.0003 0.0004 0.0003 0.0003 0.0008 0.0033 0.0073 0.0114 0.0141 0.014 0.0127 0.0089 0.0036 0.0014 0.0014 0.0013 0.0011 0.0011 0.0007 0.0009 0.001 0.0014 0.0014 0.0015 0.0012 0.0011 0.0005 0.0004 0.0005 0.0011 0.0014 0.0018 0.002 0.0018 0.0018 0.0016 0.0014 0.0013 0.001 0.0007 0.0007 0.0006 0.0008 0.0007 0.0025 0.0068 0.0094 0.01 0.0115 0.0108 0.0093 0.0085 0.0055 0.0018 0.002 0.0018 0.0018 0.0021 0.0023 0.0022 0.0022 0.0022 0.0022 0.002 0.0019 0.0019 0.0021 0.0022 0.0021 0.0017 0.0018 0.0016 0.0017 0.0018 0.0021 0.0021 0.0023 0.0021 0.002 0.0022 0.0023 0.0025 0.0022 0.0018 0.0015 0.0016 0.0018 0.0016 0.0017 0.0013 0.0014 0.0015 0.002 0.0026 0.0032 0.0039 0.0046 0.0051 0.0052 0.0052 0.0047 0.0045 0.0039 0.0032 0.0029 0.002 0.0012 0.0011 0.0012 0.0021 0.0034 0.0037 0.0034 0.0032 0.0033 0.0036 0.0035 0.0034 0.0025 0.0018 0.001 0.0008 0.0006 0.0003 0.0006 0.0007 0.0009 0.0009 0.001 0.001 0.001 0.0011 0.0005 0.0007 0.0008 0.0009 0.0009 0.001 0.0013 0.0016 0.0016 0.0016 0.0015 0.0016 0.0019 0.002 0.0024 0.0041 0.0068 0.0117 0.0149 0.0161 0.015 0.0123 0.0082 0.0026 0.0013 0.0012 0.0012 0.0012 0.001 0.0012 0.0011 0.001 0.001 0.001 0.0013 0.0012 0.0016 0.0014 0.0018 0.0018 0.0016 0.002 0.0018 0.0018 0.0017 0.0017 0.0017 0.0015 0.0014 0.0013 0.0011 0.0009 0.0012 0.0012 0.0018 0.0029 0.0058 0.0094 0.0117 0.0136 0.014 0.0122 0.0101 0.0073 0.0036 0.0018 0.0014 0.0013 0.0012 0.0012 0.0012 0.0012 0.0014 0.0013 0.0012 0.0012 0.0017 0.0028 0.005 0.0095 0.0136 0.0159 0.0157 0.0135 0.0095 0.005 0.0026 0.0025 0.0026 0.0026 0.0025 0.002 0.002 0.0018 0.0019 0.0018 0.002 0.0018 0.002 0.0019 0.0017 0.0014 0.0014 0.0016 0.0015 0.0014 0.0015 0.0015 0.0018 0.0018 0.0019 0.0019 0.0019 0.0017 0.0015 0.0015 0.0018 0.0017 0.0024 0.0042 0.008 0.0114 0.0131 0.0129 0.0115 0.0086 0.0028 0.0018 0.0018 0.0016 0.0015 0.0016 0.0017 0.0016 0.0017 0.0017 0.0021 0.0019 0.0029 0.0035 0.0035 0.0055 0.0089 0.0095 0.0094 0.0086 0.0046 0.0029 0.0024 0.0023 0.0018 0.0016 0.0016 0.0013 0.0013 0.0009 0.0009 0.0009 0.0009 0.0009 0.0011 0.0012 0.0014 0.0018 0.0021 0.0024 0.0024 0.0026 0.0025 0.0026 0.0023 0.002 0.0016 0.0012 0.001 0.0009 0.0007 0.001 0.001 0.0013 0.0011 0.0009 0.0008 0.0011 0.0012 0.0011 0.0011 0.0009 0.0008 0.0009 0.001 0.001 0.0011 0.0012 0.0012 0.0012 0.0011 0.0011 0.0013 0.0016 0.0018 0.0015 0.0016 0.0016 0.0016 0.0018 0.0018 0.0018 0.0018 0.0016 0.0014 0.0017 0.0019 0.002 0.0019 0.0021 0.0017 0.0016 0.0018 0.002 0.0018 0.002 0.002 0.002 0.0018 0.0017 0.0017 0.0018 0.0018 0.0021 0.0021 0.0018 0.002 0.0019 0.0017 0.0081 0.0146 0.018 0.0225 0.0251 0.0259 0.0279 0.0281 0.0273 0.0256 0.0224 0.0183 0.0128 0.0076 0.0034 0.0019 0.0017 0.0015 0.0019 0.002 0.002 0.0021 0.002 0.0023 0.0024 0.0027 0.0024 0.0024 0.0023 0.0021 0.0021 0.0021 0.0019 0.0018 0.002 0.002 0.002 0.0019 0.0022 0.0022 0.0024 0.0026 0.0024 0.0025 0.0025 0.0027 0.0031 0.003 0.003 0.0027 0.0028 0.0028 0.0028 0.0026 0.0025 0.0025 0.0025 0.0029 0.003 0.0032 0.0032 0.0029 0.0031 0.0031 0.003 0.003 0.0031 0.0028 0.003 0.0033 0.0035 0.0032 0.0038 0.004 0.004 0.0043 0.0043 0.0046 0.004 0.0038 0.0034 0.0031 0.0032 0.0034 0.0031 0.0033 0.0035 0.0041 0.0044 0.0044 0.0044 0.0039 0.0035 0.0034 0.0035 0.0037 0.0036 0.0034 0.0032 0.0033 0.003 0.0034 0.0034 0.0036 0.0035 0.0034 0.0036 0.0037 0.0035 0.0034 0.003 0.0032 0.0034 0.0034 0.0035 0.0035 0.0037 0.0038 0.0043 0.0047 0.0045 0.0045 0.0041 0.0043 0.0041 0.0043 0.0044 0.004 0.0036 0.003 0.0029 0.0024 0.0026 0.0027 0.0027 0.0029 0.0034 0.0034 0.0035 0.0037 0.0032 0.0032 0.0029 0.0028 0.003 0.0033 0.003 0.0032 0.0031 0.0029 0.0029 0.0031 0.0032 0.0035 0.0037 0.0035 0.0037 0.0035 0.0033 0.0032 0.0034 0.0033 0.0033 0.0032 0.0027 0.0028 0.0029 0.0029 0.0033 0.0031 0.0031 0.0031 0.0029 0.0029 0.0026 0.0025 0.0024 0.0024 0.0023 0.0022 0.0023 0.0024 0.0025 0.0025 0.0025 0.0023 0.002 0.0021 0.0018 0.0019 0.0017 0.0016 0.0015 0.0015 0.0017 0.0017 0.0015 0.0016 0.0016 0.0018 0.0018 0.0016 0.0015 0.0015 0.0015 0.0015 0.0014 0.0011 0.0009 0.0006 0.0005 0.0006 0.0007 0.0006 0.0006 0.0007 0.0006 0.0008 0.0012 0.0028 0.0034 0.0034 0.0032 0.0018 0.0009 0.0009 0.0006 0.0006 0.0005 0.0004 0.0003 0.0001 0.0003 0.0002 0.0 0.0 0.0002 0.0001 0.0 0.0001 0.0 0.0 0.0 0.0 0.0 0.0001 0.0001 0.0001 0.0 0.0002 0.0002 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0001 0.0 0.0", + "breathiness": "-68.987 -64.3189 -61.0924 -57.3045 -53.5403 -50.6384 -48.2766 -46.6099 -45.671 -44.8201 -44.4643 -44.6199 -44.626 -44.6507 -45.2028 -45.8809 -46.9277 -48.1074 -49.5236 -51.4416 -53.5548 -55.7004 -57.8713 -59.3856 -60.9157 -61.4132 -60.4383 -58.3081 -55.5858 -52.6239 -49.9316 -47.9821 -46.8419 -46.8444 -47.2533 -47.8338 -48.1853 -48.7049 -49.2432 -50.0128 -51.3484 -52.575 -53.9475 -55.0577 -56.1453 -57.3865 -58.6301 -59.8974 -61.2149 -61.8773 -62.7816 -62.7377 -62.8628 -62.3664 -61.9179 -61.6208 -61.0136 -61.0296 -60.7592 -60.6082 -60.8829 -60.8775 -60.8049 -60.2136 -58.4128 -55.9817 -52.1981 -48.3897 -44.2964 -41.1097 -38.784 -38.367 -39.4971 -41.908 -44.7855 -48.0491 -50.539 -52.6538 -54.1029 -55.0451 -55.7508 -56.3531 -56.7869 -57.1077 -57.3484 -57.1253 -56.9098 -56.5118 -56.4196 -55.851 -55.8814 -55.9102 -55.7707 -55.9394 -55.8341 -55.7515 -55.6628 -55.0569 -54.2491 -52.5905 -50.1393 -47.1414 -43.6475 -39.9501 -36.6848 -33.9995 -31.8543 -30.6927 -30.1888 -29.9948 -30.3128 -30.7145 -32.1835 -34.6159 -38.0598 -41.8143 -45.6477 -49.0268 -51.6957 -53.4403 -54.309 -54.4381 -55.0708 -55.1714 -55.484 -55.732 -55.9124 -55.7966 -55.6633 -54.9814 -54.2303 -53.6399 -52.915 -52.4271 -52.2902 -52.2103 -52.5408 -52.7471 -52.9381 -53.0179 -52.9773 -52.873 -52.922 -52.9578 -53.0632 -53.0151 -53.3102 -53.0049 -52.9967 -52.7505 -52.7788 -52.5551 -52.4445 -51.9768 -51.7888 -51.4338 -51.5227 -51.552 -51.7273 -52.0535 -52.1807 -51.9684 -50.6179 -48.9276 -46.3052 -43.2728 -40.2175 -37.3358 -35.0481 -33.2682 -32.5595 -32.1834 -32.2977 -32.8787 -34.7484 -37.4073 -40.8322 -44.4658 -47.9498 -50.9725 -53.4069 -55.2218 -56.1608 -56.731 -56.9437 -56.3941 -54.497 -51.7657 -48.1804 -44.5455 -41.1811 -38.0826 -36.5005 -35.7679 -35.8065 -36.743 -38.3981 -40.9262 -44.0138 -47.0557 -50.2431 -53.0322 -55.4415 -57.094 -57.8132 -57.8629 -57.7272 -57.0013 -55.9972 -55.1953 -54.2013 -53.2856 -52.5289 -51.7795 -51.3325 -51.2469 -51.318 -51.6401 -52.3221 -52.8518 -53.3064 -53.6634 -53.7083 -53.646 -53.4625 -52.9444 -51.9086 -50.6598 -48.472 -45.8356 -43.267 -40.4148 -38.1899 -36.6145 -36.3237 -37.0581 -39.043 -42.0395 -45.2659 -48.755 -51.7915 -54.4823 -56.5308 -57.7244 -58.7178 -58.9291 -58.5107 -56.8764 -54.573 -51.3053 -47.6179 -43.6054 -40.2588 -37.5906 -36.1982 -35.8864 -36.6149 -38.1628 -40.539 -43.5652 -46.5421 -49.5568 -52.1593 -54.0769 -55.353 -56.2016 -56.8209 -57.5277 -57.7726 -57.5402 -56.3675 -54.3106 -51.8698 -49.1171 -46.7633 -45.2067 -44.844 -45.952 -47.6109 -49.3868 -51.2933 -52.6103 -53.0004 -53.0325 -52.9877 -53.1184 -53.1575 -53.5371 -53.8872 -54.1793 -54.6761 -54.6649 -54.7585 -54.7481 -54.4668 -54.5458 -54.4991 -54.697 -55.2058 -55.4152 -55.8113 -55.9338 -55.8134 -55.6954 -55.5038 -55.4389 -55.2309 -55.59 -55.9904 -56.6968 -57.1339 -57.7767 -57.9558 -57.9563 -57.9286 -58.0269 -57.9091 -57.4993 -56.8721 -55.5058 -54.021 -52.0983 -50.5886 -49.1923 -48.3313 -47.8299 -48.2313 -49.1706 -50.4457 -51.9823 -53.7129 -55.4732 -57.4081 -58.8312 -59.6447 -59.0453 -57.6627 -55.8548 -53.7896 -51.5064 -50.1914 -49.8111 -50.1477 -50.4869 -51.299 -51.5473 -52.1465 -52.5749 -52.7632 -53.07 -53.2627 -53.3926 -53.222 -52.8146 -51.8914 -50.7166 -49.7898 -48.888 -48.571 -49.4353 -50.8446 -52.5906 -54.5045 -55.8156 -56.9812 -57.7916 -58.286 -59.0919 -59.9684 -60.8863 -61.3431 -61.2143 -60.3026 -58.0205 -54.8161 -51.1736 -47.18 -43.7579 -41.3738 -40.7998 -42.1796 -44.621 -47.4541 -50.2201 -52.8768 -55.2007 -56.7924 -58.1782 -59.7325 -60.9948 -61.9477 -62.8613 -63.5423 -63.6287 -62.7393 -60.709 -57.894 -54.9331 -51.7708 -49.1838 -47.6899 -47.6239 -48.7137 -50.1223 -51.5493 -52.9807 -53.8393 -54.4722 -54.9749 -55.53 -55.8934 -56.4238 -56.5975 -56.1537 -54.7722 -52.2176 -49.2487 -45.8112 -42.5867 -40.0285 -39.3783 -40.1546 -42.2119 -45.0581 -47.9834 -51.0353 -53.2901 -55.0564 -56.0787 -56.8883 -57.2162 -57.3158 -57.5202 -57.7257 -57.7282 -57.7737 -57.5848 -56.4807 -55.2651 -54.0071 -52.8664 -52.4458 -52.4346 -53.2598 -54.7446 -56.4817 -58.3631 -59.8274 -60.6142 -61.0246 -61.259 -61.1334 -61.305 -61.5553 -61.8669 -62.2289 -62.3231 -62.6264 -62.5355 -62.33 -62.2271 -61.793 -61.7121 -61.5725 -61.3872 -61.6393 -61.4512 -61.4329 -61.2078 -60.5271 -59.5793 -58.1374 -56.4319 -54.5258 -52.8626 -51.4834 -50.6399 -50.2933 -50.2947 -50.6025 -50.7857 -51.0665 -52.1446 -53.3222 -54.731 -56.423 -58.2714 -59.4193 -60.2715 -61.1537 -61.8107 -62.5671 -62.745 -62.8518 -62.3801 -61.6299 -59.6628 -57.3889 -54.8367 -52.4427 -50.6068 -49.3851 -49.0948 -49.4908 -50.3218 -50.9957 -51.726 -52.6485 -53.4486 -54.3077 -55.4426 -56.8672 -58.4611 -59.919 -60.7472 -60.3986 -58.4124 -55.2714 -51.3692 -47.1717 -43.3378 -40.9383 -40.3392 -41.9695 -44.533 -47.6368 -50.833 -53.4 -55.5244 -56.7543 -57.5025 -57.8338 -58.1949 -58.9361 -60.0742 -61.2682 -62.5144 -63.7767 -64.8201 -65.6367 -66.2904 -66.6499 -66.8286 -66.9513 -66.9509 -67.3204 -66.8376 -66.9608 -66.5065 -65.807 -65.1023 -63.6899 -61.3092 -58.0767 -54.1618 -49.6164 -45.2901 -41.4039 -38.3288 -36.7816 -36.2537 -36.3639 -37.3767 -39.0019 -41.1821 -43.6949 -46.0674 -48.3157 -50.1329 -51.6488 -52.5634 -53.2426 -54.0081 -54.6659 -54.9413 -55.3676 -55.4677 -55.562 -55.6386 -55.5426 -55.9115 -55.7412 -56.1454 -56.4644 -56.8184 -57.2834 -57.8375 -58.3892 -58.9324 -59.2697 -59.6648 -59.5719 -58.5967 -56.4849 -52.9369 -49.1119 -44.9859 -41.1909 -38.2737 -36.7337 -36.2933 -36.5891 -37.4728 -38.6812 -40.6558 -43.1847 -45.8757 -48.5489 -51.1162 -52.9532 -53.9756 -54.3827 -54.1196 -53.8539 -53.7141 -53.6233 -53.5216 -54.0586 -53.9496 -54.0983 -54.3425 -54.435 -54.5953 -55.085 -55.4297 -55.8578 -56.279 -56.0506 -56.1465 -56.2062 -56.2532 -56.828 -57.2134 -57.9166 -58.3607 -58.9357 -59.2506 -59.4931 -60.0918 -60.3052 -60.8405 -61.0255 -60.9927 -60.3529 -59.5041 -58.3813 -57.0235 -55.7756 -54.213 -52.5229 -50.9504 -49.4018 -48.3024 -47.4901 -47.022 -47.2905 -48.2793 -50.0998 -51.7914 -53.6962 -55.6204 -56.8986 -57.3099 -57.0605 -56.4091 -55.5576 -54.7226 -54.3594 -54.3351 -54.0007 -54.5272 -55.0948 -56.3296 -57.848 -59.8053 -61.7918 -63.125 -63.9297 -63.9795 -63.4273 -62.9149 -62.2053 -61.9346 -61.0888 -60.3665 -59.1077 -57.8382 -56.7971 -55.7931 -55.311 -55.1832 -55.3312 -55.8271 -56.3205 -57.0557 -57.7931 -58.5564 -59.6522 -60.7366 -61.913 -62.7506 -63.5455 -63.8914 -63.9564 -63.4938 -62.8329 -62.1447 -62.2498 -62.4974 -62.8472 -63.417 -63.8128 -64.0702 -64.1024 -64.1606 -63.9227 -63.7693 -63.2668 -61.8985 -59.4793 -56.1322 -52.0101 -47.9359 -43.8065 -41.0139 -39.2798 -39.7117 -41.0857 -43.7165 -46.4865 -49.2868 -51.6936 -53.6118 -54.968 -55.8139 -56.3855 -56.906 -57.4446 -58.0542 -58.2175 -58.6738 -58.7314 -58.7521 -58.7548 -58.9664 -59.4513 -59.8863 -60.681 -61.4958 -62.5862 -63.7049 -64.8597 -65.8588 -66.6424 -67.0054 -67.1553 -66.9987 -66.5707 -66.0202 -65.4216 -64.9867 -64.729 -64.5907 -64.5715 -64.4348 -64.0295 -63.1911 -61.9037 -60.1087 -58.2325 -56.1438 -54.1659 -52.8235 -52.0335 -51.8412 -52.3409 -52.539 -53.0571 -53.3512 -53.7536 -53.9493 -54.2906 -54.3294 -54.4602 -54.6426 -54.6631 -54.8765 -54.9748 -55.3692 -55.669 -55.7409 -56.0721 -56.1008 -56.3311 -56.5433 -56.9013 -57.3338 -57.498 -57.8841 -57.9337 -58.3185 -58.5901 -59.0803 -59.8483 -60.7426 -61.6322 -62.1024 -62.3693 -61.8972 -61.3935 -60.3917 -59.3531 -58.036 -56.2221 -54.6135 -52.8915 -51.4429 -50.4692 -49.7566 -49.6342 -50.1842 -51.5734 -53.209 -55.3158 -57.0676 -57.6432 -57.1179 -55.6133 -53.2515 -50.4922 -48.4225 -47.2634 -47.037 -47.8416 -48.8531 -50.0871 -51.3168 -52.9534 -54.4109 -56.3329 -58.2673 -59.8216 -61.361 -62.3162 -63.0927 -62.7404 -61.3187 -58.5294 -54.6958 -50.673 -46.3475 -43.4808 -42.307 -42.633 -44.3039 -46.5678 -49.3216 -51.6423 -53.5479 -54.4466 -54.6425 -54.584 -54.0983 -53.0982 -51.4551 -49.8357 -47.5214 -45.1848 -43.5013 -41.8008 -41.5188 -42.3293 -44.1383 -46.749 -49.0489 -51.2646 -53.1575 -54.6156 -55.9518 -57.2348 -58.811 -60.5804 -62.0462 -63.1115 -64.0166 -63.6762 -61.8607 -58.7382 -54.7543 -50.2549 -45.8665 -42.6737 -41.4163 -42.1083 -44.1654 -47.3103 -50.2475 -52.8206 -54.6788 -55.6882 -55.6656 -55.6091 -55.3 -55.4358 -55.6203 -56.1491 -56.9057 -57.9963 -58.9045 -59.6085 -59.9877 -59.9257 -59.1026 -58.2039 -57.1635 -56.1896 -55.4292 -55.0048 -54.6466 -54.8239 -55.556 -56.4956 -58.082 -59.4891 -60.0821 -59.834 -58.277 -55.6539 -52.5435 -49.2284 -46.8337 -45.2979 -44.8027 -45.9772 -47.638 -49.2628 -50.8624 -52.3241 -53.1811 -53.5564 -53.3468 -53.2708 -53.2693 -53.7788 -53.999 -54.4611 -54.7014 -54.7173 -54.6251 -54.4219 -54.1655 -54.2624 -54.7182 -54.8209 -55.3701 -55.5575 -55.6695 -55.6737 -55.3207 -55.2447 -55.1461 -55.0098 -55.0444 -55.0418 -55.2706 -55.779 -56.1901 -56.6451 -56.8187 -56.8015 -56.7551 -56.6477 -56.3202 -55.8696 -55.2149 -54.4157 -53.0111 -51.4445 -49.6655 -48.2249 -47.2302 -46.397 -45.8515 -45.6667 -45.8663 -46.5453 -47.9085 -49.8196 -51.8643 -53.0745 -53.5444 -53.3777 -52.0344 -50.1777 -48.7538 -47.8199 -47.9469 -48.6447 -50.092 -52.1589 -54.2537 -56.3221 -58.3121 -60.017 -61.2865 -62.2646 -62.523 -62.5927 -62.1629 -61.8848 -61.9714 -62.1683 -63.0094 -63.3754 -63.6052 -63.5368 -62.8964 -62.1469 -61.1238 -60.2062 -59.4832 -58.7611 -58.0806 -57.6068 -57.0013 -56.7068 -56.2423 -55.2448 -53.5091 -51.2665 -48.4032 -45.0716 -41.9107 -39.3946 -37.8984 -37.5513 -38.7732 -41.0194 -44.1122 -47.3002 -50.5678 -53.2565 -55.4596 -56.7143 -57.427 -57.8238 -57.7385 -57.588 -57.2888 -56.7373 -56.0168 -55.4823 -54.7226 -54.1322 -53.6223 -53.3396 -53.3004 -53.4994 -53.6645 -53.966 -54.3497 -54.9624 -55.4405 -55.9905 -56.8946 -57.4747 -57.6143 -57.2976 -55.7339 -53.1991 -49.9888 -46.5826 -43.1123 -40.6162 -39.1641 -38.7379 -39.5119 -41.2468 -43.8595 -47.0783 -50.3951 -53.3201 -55.9593 -57.7108 -58.8571 -59.3629 -59.3423 -59.0804 -58.3044 -56.7931 -54.8936 -52.0827 -48.5537 -45.0876 -41.7263 -39.1288 -37.798 -37.5767 -39.1131 -41.3882 -44.2 -47.3097 -49.9121 -51.5572 -52.795 -53.4478 -53.7097 -54.0358 -54.2871 -54.9039 -55.1275 -55.0622 -54.9714 -54.7018 -54.3234 -53.8237 -53.8199 -53.9192 -54.3573 -54.8263 -55.1405 -55.3609 -55.3596 -55.2368 -54.8651 -54.8651 -54.8822 -55.2245 -55.4691 -55.5297 -54.8907 -53.9003 -52.1618 -49.9605 -47.0324 -44.5655 -42.3204 -40.5818 -40.0648 -40.9041 -43.4579 -46.66 -50.0959 -53.1165 -55.6446 -57.068 -57.7858 -57.9418 -58.2048 -58.5168 -59.0898 -59.0823 -58.3148 -56.8164 -54.3137 -51.6952 -49.2425 -47.6914 -47.2468 -48.0794 -49.1324 -50.7912 -52.0667 -53.0494 -53.8221 -54.2183 -54.5898 -55.1158 -56.1274 -56.911 -57.7687 -58.3824 -58.7173 -58.8267 -58.2426 -57.3341 -55.9867 -54.3073 -53.1233 -52.0545 -51.4567 -51.2496 -51.3718 -51.6928 -52.2972 -52.9497 -53.8268 -54.6866 -56.309 -57.9521 -60.1594 -61.8923 -63.392 -63.9609 -64.4547 -63.8833 -63.1596 -62.2222 -61.3683 -61.1545 -60.694 -60.7775 -61.0253 -61.5436 -61.9357 -61.9362 -62.0132 -61.9542 -61.9068 -62.0433 -61.8894 -61.899 -62.1727 -62.1649 -62.1698 -62.1662 -61.9492 -61.8245 -61.6561 -60.9684 -60.327 -59.3207 -58.1919 -57.2508 -56.2878 -55.8576 -55.4004 -54.9457 -54.6934 -54.4446 -54.2525 -54.234 -54.0697 -54.068 -54.3063 -54.3463 -54.4102 -54.3179 -54.2866 -54.2062 -54.0812 -54.1071 -54.3666 -54.7433 -55.3911 -55.9516 -56.3573 -56.1704 -54.8705 -52.6083 -49.3294 -45.2469 -41.4859 -37.9916 -35.1188 -33.237 -32.2195 -31.7622 -31.6504 -32.0457 -33.1208 -35.0085 -37.941 -41.3816 -45.0342 -48.6508 -51.1877 -53.1171 -54.2401 -54.3454 -54.4656 -54.555 -54.877 -55.2281 -55.374 -55.6607 -55.5909 -55.4622 -55.2379 -55.3297 -55.1017 -54.7721 -54.476 -54.2928 -54.1038 -53.8029 -53.6293 -53.7558 -53.8669 -54.0288 -54.1705 -54.5873 -54.7141 -54.8413 -54.8863 -54.8243 -54.602 -54.1827 -53.9509 -54.0196 -53.9864 -54.1606 -54.1696 -54.3787 -54.1573 -53.7655 -53.2905 -52.7283 -52.4224 -52.1649 -51.6478 -51.5226 -51.0673 -50.9699 -50.7439 -50.8721 -51.0046 -51.3314 -51.9045 -52.4349 -52.8437 -53.2663 -53.4047 -53.4223 -53.6278 -53.4194 -53.353 -53.1004 -53.0914 -52.9864 -52.8946 -52.6166 -52.6108 -52.723 -52.7742 -52.9851 -53.339 -53.4427 -53.9982 -53.908 -53.9317 -53.6298 -53.0566 -52.328 -51.6303 -50.8903 -50.425 -50.2071 -50.203 -50.4793 -50.7269 -51.0092 -51.0874 -51.1355 -51.1453 -51.1346 -50.7377 -50.927 -50.7233 -50.877 -51.0061 -51.1111 -51.3807 -51.5827 -52.1116 -52.3901 -52.8331 -53.1539 -53.1072 -53.3473 -53.199 -53.0432 -53.1331 -53.0344 -53.1215 -53.0087 -53.0511 -53.0823 -53.1328 -53.424 -53.3227 -53.5651 -53.4621 -53.5964 -54.016 -54.281 -54.4761 -54.9263 -55.2899 -55.613 -55.9876 -56.3824 -56.5314 -56.5732 -56.6727 -56.2382 -56.0198 -55.5596 -55.0017 -54.5655 -53.917 -53.5994 -53.393 -53.1248 -53.1911 -52.987 -53.144 -53.5522 -53.9644 -54.2828 -54.6409 -54.6722 -54.7589 -54.5238 -53.9537 -53.1804 -52.8215 -52.2896 -51.875 -51.5467 -51.3904 -51.4573 -51.7038 -51.9135 -52.3933 -52.6759 -53.0833 -53.6067 -54.0378 -54.3047 -54.5553 -54.1737 -53.8516 -53.1688 -52.5993 -52.2408 -51.9324 -51.8074 -51.9192 -52.4091 -52.9648 -53.9542 -55.5969 -56.8675 -57.9047 -58.4294 -58.5807 -58.5339 -58.2961 -57.827 -57.6024 -56.8888 -56.5426 -56.5275 -56.4701 -56.8181 -57.5002 -58.6691 -60.0952 -61.6493 -62.9092 -64.0569 -65.1172 -65.365 -65.4016 -65.0677 -64.2829 -63.1836 -61.5961 -59.8814 -58.7576 -58.1326 -58.8159 -60.6683 -63.085 -66.1606 -68.8149 -71.0262 -72.6802 -73.954 -75.3697 -76.619 -78.0454 -79.4245 -81.0939 -82.5824 -84.0567 -85.1098 -85.5096 -85.8208 -85.8318 -85.9604 -86.469 -87.152 -88.0377 -88.5062 -88.3646 -87.7617 -86.8444 -85.5661 -84.2786 -83.4264 -82.8996 -82.8371 -83.3658 -83.7583 -84.1166 -84.1742 -83.8346 -82.8483 -80.5313 -77.4644", "breathiness_timestep": "0.011609977324263039" } ] \ No newline at end of file From b5eacb135d235aaf53f59d2add97ac88df1a4781 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 2 Jun 2023 18:22:43 +0800 Subject: [PATCH 414/475] Support speaker mix in variance model --- basics/base_svs_infer.py | 97 ++++++++++++++++++++++++++++++++++++++-- inference/ds_acoustic.py | 67 +-------------------------- inference/ds_variance.py | 44 +++++++++++++++++- modules/toplevel.py | 12 +++-- scripts/infer.py | 11 +++-- 5 files changed, 154 insertions(+), 77 deletions(-) diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 17e5a977c..6390f1d6a 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -1,8 +1,10 @@ # coding=utf8 - +import numpy as np import torch +from torch import Tensor from utils.hparams import hparams +from utils.infer_utils import resample_align_curve class BaseSVSInfer: @@ -29,13 +31,100 @@ def __init__(self, device=None): self.model: torch.nn.Module = None def build_model(self, ckpt_steps=None) -> torch.nn.Module: - raise NotImplementedError + raise NotImplementedError() + + def load_speaker_mix(self, param_src: dict, summary_dst: dict, + mode: str = 'frame', mix_length: int = None) -> tuple[Tensor, Tensor]: + """ + + :param param_src: param dict + :param summary_dst: summary dict + :param mode: 'token' or 'frame' + :param mix_length: total tokens or frames to mix + :return: spk_mix_id [B=1, 1, N], spk_mix_value [B=1, T, N] + """ + assert mode == 'token' or mode == 'frame' + param_key = 'spk_mix' if mode == 'frame' else 'ph_spk_mix' + summary_solo_key = 'spk' if mode == 'frame' else 'ph_spk' + spk_mix_map = param_src.get(param_key) # { spk_name: value } or { spk_name: "value value value ..." } + dynamic = False + if spk_mix_map is None: + # Get the first speaker + for name in self.spk_map.keys(): + spk_mix_map = {name: 1.0} + break + else: + for name in spk_mix_map: + assert name in self.spk_map, f'Speaker \'{name}\' not found.' + if len(spk_mix_map) == 1: + summary_dst[summary_solo_key] = list(spk_mix_map.keys())[0] + elif any([isinstance(val, str) for val in spk_mix_map.values()]): + print_mix = '|'.join(spk_mix_map.keys()) + summary_dst[param_key] = f'dynamic({print_mix})' + dynamic = True + else: + print_mix = '|'.join([f'{n}:{"%.3f" % spk_mix_map[n]}' for n in spk_mix_map]) + summary_dst[param_key] = f'static({print_mix})' + spk_mix_id_list = [] + spk_mix_value_list = [] + if dynamic: + for name, values in spk_mix_map.items(): + spk_mix_id_list.append(self.spk_map[name]) + if isinstance(values, str): + # this speaker has a variable proportion + if mode == 'token': + cur_spk_mix_value = values.split() + assert len(cur_spk_mix_value) == mix_length, \ + 'Speaker mix checks failed. In dynamic token-level mix, ' \ + 'number of proportion values must equal number of tokens.' + cur_spk_mix_value = torch.from_numpy( + np.array(cur_spk_mix_value, 'float32') + ).to(self.device)[None] # => [B=1, T] + else: + cur_spk_mix_value = torch.from_numpy(resample_align_curve( + np.array(values.split(), 'float32'), + original_timestep=float(param_src['spk_mix_timestep']), + target_timestep=self.timestep, + align_length=mix_length + )).to(self.device)[None] # => [B=1, T] + assert torch.all(cur_spk_mix_value >= 0.), \ + f'Speaker mix checks failed.\n' \ + f'Proportions of speaker \'{name}\' on some {mode}s are negative.' + else: + # this speaker has a constant proportion + assert values >= 0., f'Speaker mix checks failed.\n' \ + f'Proportion of speaker \'{name}\' is negative.' + cur_spk_mix_value = torch.full( + (1, mix_length), fill_value=values, + dtype=torch.float32, device=self.device + ) + spk_mix_value_list.append(cur_spk_mix_value) + spk_mix_id = torch.LongTensor(spk_mix_id_list).to(self.device)[None, None] # => [B=1, 1, N] + spk_mix_value = torch.stack(spk_mix_value_list, dim=2) # [B=1, T] => [B=1, T, N] + spk_mix_value_sum = torch.sum(spk_mix_value, dim=2, keepdim=True) # => [B=1, T, 1] + assert torch.all(spk_mix_value_sum > 0.), \ + f'Speaker mix checks failed.\n' \ + f'Proportions of speaker mix on some frames sum to zero.' + spk_mix_value /= spk_mix_value_sum # normalize + else: + for name, value in spk_mix_map.items(): + spk_mix_id_list.append(self.spk_map[name]) + assert value >= 0., f'Speaker mix checks failed.\n' \ + f'Proportion of speaker \'{name}\' is negative.' + spk_mix_value_list.append(value) + spk_mix_id = torch.LongTensor(spk_mix_id_list).to(self.device)[None, None] # => [B=1, 1, N] + spk_mix_value = torch.FloatTensor(spk_mix_value_list).to(self.device)[None, None] # => [B=1, 1, N] + spk_mix_value_sum = spk_mix_value.sum() + assert spk_mix_value_sum > 0., f'Speaker mix checks failed.\n' \ + f'Proportions of speaker mix sum to zero.' + spk_mix_value /= spk_mix_value_sum # normalize + return spk_mix_id, spk_mix_value def preprocess_input(self, param: dict, idx=0) -> dict[str, torch.Tensor]: - raise NotImplementedError + raise NotImplementedError() def forward_model(self, sample: dict[str, torch.Tensor]): - raise NotImplementedError + raise NotImplementedError() def run_inference(self, params, **kwargs): raise NotImplementedError() diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 60eceaab2..ad265c7c6 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -83,72 +83,7 @@ def preprocess_input(self, param, idx=0): summary['seconds'] = '%.2f' % (length * self.timestep) if hparams['use_spk_id']: - spk_mix_map = param.get('spk_mix') # { spk_name: value } or { spk_name: "value value value ..." } - dynamic = False - if spk_mix_map is None: - # Get the first speaker - for name in self.spk_map.keys(): - spk_mix_map = {name: 1.0} - break - else: - for name in spk_mix_map: - assert name in self.spk_map, f'Speaker \'{name}\' not found.' - - if len(spk_mix_map) == 1: - summary['spk'] = list(spk_mix_map.keys())[0] - elif any([isinstance(val, str) for val in spk_mix_map.values()]): - print_mix = '|'.join(spk_mix_map.keys()) - summary['spk_mix'] = f'dynamic({print_mix})' - dynamic = True - else: - print_mix = '|'.join([f'{n}:{"%.3f" % spk_mix_map[n]}' for n in spk_mix_map]) - summary['spk_mix'] = f'static({print_mix})' - - spk_mix_id_list = [] - spk_mix_value_list = [] - if dynamic: - for name, values in spk_mix_map.items(): - spk_mix_id_list.append(self.spk_map[name]) - if isinstance(values, str): - # this speaker has a variable proportion - cur_spk_mix_value = torch.from_numpy(resample_align_curve( - np.array(values.split(), 'float32'), - original_timestep=float(param['spk_mix_timestep']), - target_timestep=self.timestep, - align_length=length - )).to(self.device)[None] # => [B=1, T] - assert torch.all(cur_spk_mix_value >= 0.), \ - f'Speaker mix checks failed.\n' \ - f'Proportions of speaker \'{name}\' on some frames are negative.' - else: - # this speaker has a constant proportion - assert values >= 0., f'Speaker mix checks failed.\n' \ - f'Proportion of speaker \'{name}\' is negative.' - cur_spk_mix_value = torch.full( - (1, length), fill_value=values, - dtype=torch.float32, device=self.device - ) - spk_mix_value_list.append(cur_spk_mix_value) - spk_mix_id = torch.LongTensor(spk_mix_id_list).to(self.device)[None, None] # => [B=1, 1, N] - spk_mix_value = torch.stack(spk_mix_value_list, dim=2) # [B=1, T] => [B=1, T, N] - spk_mix_value_sum = torch.sum(spk_mix_value, dim=2, keepdim=True) # => [B=1, T, 1] - assert torch.all(spk_mix_value_sum > 0.), \ - f'Speaker mix checks failed.\n' \ - f'Proportions of speaker mix on some frames sum to zero.' - spk_mix_value /= spk_mix_value_sum # normalize - else: - for name, value in spk_mix_map.items(): - spk_mix_id_list.append(self.spk_map[name]) - assert value >= 0., f'Speaker mix checks failed.\n' \ - f'Proportion of speaker \'{name}\' is negative.' - spk_mix_value_list.append(value) - spk_mix_id = torch.LongTensor(spk_mix_id_list).to(self.device)[None, None] # => [B=1, 1, N] - spk_mix_value = torch.FloatTensor(spk_mix_value_list).to(self.device)[None, None] # => [B=1, 1, N] - spk_mix_value_sum = spk_mix_value.sum() - assert spk_mix_value_sum > 0., f'Speaker mix checks failed.\n' \ - f'Proportions of speaker mix sum to zero.' - spk_mix_value /= spk_mix_value_sum # normalize - + spk_mix_id, spk_mix_value = self.load_speaker_mix(param, summary, length) batch['spk_mix_id'] = spk_mix_id batch['spk_mix_value'] = spk_mix_value diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 4f77a2b58..84df8a6cc 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -34,6 +34,11 @@ def __init__( ): super().__init__(device=device) self.ph_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) + if hparams['use_spk_id']: + with open(pathlib.Path(hparams['work_dir']) / 'spk_map.json', 'r', encoding='utf8') as f: + self.spk_map = json.load(f) + assert isinstance(self.spk_map, dict) and len(self.spk_map) > 0, 'Invalid or empty speaker map!' + assert len(self.spk_map) == len(set(self.spk_map.values())), 'Duplicate speaker id in speaker map!' self.model: DiffSingerVariance = self.build_model(ckpt_steps=ckpt_steps) self.lr = LengthRegulator() self.rr = RhythmRegulator() @@ -104,6 +109,18 @@ def preprocess_input( summary['frames'] = T_s summary['seconds'] = '%.2f' % (T_s * self.timestep) + if hparams['use_spk_id']: + ph_spk_mix_id, ph_spk_mix_value = self.load_speaker_mix( + param_src=param, summary_dst=summary, mode='token', mix_length=T_ph + ) + spk_mix_id, spk_mix_value = self.load_speaker_mix( + param_src=param, summary_dst=summary, mode='frame', mix_length=T_s + ) + batch['ph_spk_mix_id'] = ph_spk_mix_id + batch['ph_spk_mix_value'] = ph_spk_mix_value + batch['spk_mix_id'] = spk_mix_id + batch['spk_mix_value'] = spk_mix_value + if load_dur: # Get mel2ph if ph_dur is needed ph_dur_sec = torch.from_numpy( @@ -219,9 +236,26 @@ def forward_model(self, sample): base_pitch = sample['base_pitch'] pitch = sample.get('pitch') + if hparams['use_spk_id']: + ph_spk_mix_id = sample['ph_spk_mix_id'] + ph_spk_mix_value = sample['ph_spk_mix_value'] + spk_mix_id = sample['spk_mix_id'] + spk_mix_value = sample['spk_mix_value'] + ph_spk_mix_embed = torch.sum( + self.model.spk_embed(ph_spk_mix_id) * ph_spk_mix_value.unsqueeze(3), # => [B, T_ph, N, H] + dim=2, keepdim=False + ) # => [B, T_ph, H] + spk_mix_embed = torch.sum( + self.model.spk_embed(spk_mix_id) * spk_mix_value.unsqueeze(3), # => [B, T_s, N, H] + dim=2, keepdim=False + ) # [B, T_s, H] + else: + ph_spk_mix_embed = spk_mix_embed = None + dur_pred, pitch_pred, variance_pred = self.model( txt_tokens, midi=midi, ph2word=ph2word, word_dur=word_dur, ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, pitch=pitch, + ph_spk_mix_embed=ph_spk_mix_embed, spk_mix_embed=spk_mix_embed, retake=None, infer=True ) if dur_pred is not None: @@ -284,7 +318,6 @@ def run_inference( out_dir.mkdir(parents=True, exist_ok=True) for i in range(num_runs): - results = [] for param, flag, batch in tqdm.tqdm( zip(params, predictor_flags, batches), desc='infer segments', total=len(params) @@ -331,6 +364,15 @@ def run_inference( param_copy[v_name] = ' '.join([str(round(v, 4)) for v in v_pred.tolist()]) param_copy[f'{v_name}_timestep'] = str(self.timestep) + # Restore ph_spk_mix and spk_mix + if 'ph_spk_mix' in param_copy and 'spk_mix' in param_copy: + if 'ph_spk_mix_backup' in param_copy: + param_copy['ph_spk_mix'] = param_copy['ph_spk_mix_backup'] + del param['ph_spk_mix_backup'] + if 'spk_mix_backup' in param_copy: + param_copy['spk_mix'] = param_copy['spk_mix_backup'] + del param['spk_mix_backup'] + results.append(param_copy) if num_runs > 1: diff --git a/modules/toplevel.py b/modules/toplevel.py index f83483f79..4a8fb8570 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -117,14 +117,20 @@ def forward( base_pitch=None, pitch=None, retake=None, spk_id=None, infer=True, **kwargs ): if self.use_spk_id: - spk_embed = self.spk_embed(spk_id)[:, None, :] # [B,] => [B, T=1, H] + ph_spk_mix_embed = kwargs.get('ph_spk_mix_embed') + spk_mix_embed = kwargs.get('spk_mix_embed') + if ph_spk_mix_embed is not None and spk_mix_embed is not None: + ph_spk_embed = ph_spk_mix_embed + spk_embed = spk_mix_embed + else: + ph_spk_embed = spk_embed = self.spk_embed(spk_id)[:, None, :] # [B,] => [B, T=1, H] else: - spk_embed = None + ph_spk_embed = spk_embed = None encoder_out, dur_pred_out = self.fs2( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, word_dur=word_dur, - spk_embed=spk_embed, infer=infer + spk_embed=ph_spk_embed, infer=infer ) if not self.predict_pitch and not self.predict_variances: diff --git a/scripts/infer.py b/scripts/infer.py index d62f26244..fe445b47a 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -2,6 +2,7 @@ import os import pathlib import sys +from collections import OrderedDict from pathlib import Path import click @@ -169,6 +170,7 @@ def variance( if not isinstance(params, list): params = [params] + params = [OrderedDict(p) for p in params] if len(params) == 0: print('The input file is empty.') @@ -196,10 +198,13 @@ def variance( assert hparams['K_step'] % speedup == 0, f'Acceleration ratio must be factor of K_step {hparams["K_step"]}.' hparams['pndm_speedup'] = speedup - # spk_mix = parse_commandline_spk_mix(spk) if hparams['use_spk_id'] and spk is not None else None + spk_mix = parse_commandline_spk_mix(spk) if hparams['use_spk_id'] and spk is not None else None for param in params: - # if spk_mix is not None: - # param['spk_mix'] = spk_mix + if spk_mix is not None: + if 'ph_spk_mix' in param: + param['ph_spk_mix_backup'] = param['ph_spk_mix'] + if 'spk_mix' in param: + param['spk_mix_backup'] = param['spk_mix'] merge_slurs(param) From 6dd8d9879fe382353809c0ed4e3f0364bb7d45e6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 2 Jun 2023 22:39:21 +0800 Subject: [PATCH 415/475] Fix parameter mismatch in acoustic speaker mix --- basics/base_svs_infer.py | 14 +++++++------- inference/ds_acoustic.py | 4 +++- inference/ds_variance.py | 4 ++-- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 6390f1d6a..3a15ee4ae 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -34,18 +34,18 @@ def build_model(self, ckpt_steps=None) -> torch.nn.Module: raise NotImplementedError() def load_speaker_mix(self, param_src: dict, summary_dst: dict, - mode: str = 'frame', mix_length: int = None) -> tuple[Tensor, Tensor]: + mix_mode: str = 'frame', mix_length: int = None) -> tuple[Tensor, Tensor]: """ :param param_src: param dict :param summary_dst: summary dict - :param mode: 'token' or 'frame' + :param mix_mode: 'token' or 'frame' :param mix_length: total tokens or frames to mix :return: spk_mix_id [B=1, 1, N], spk_mix_value [B=1, T, N] """ - assert mode == 'token' or mode == 'frame' - param_key = 'spk_mix' if mode == 'frame' else 'ph_spk_mix' - summary_solo_key = 'spk' if mode == 'frame' else 'ph_spk' + assert mix_mode == 'token' or mix_mode == 'frame' + param_key = 'spk_mix' if mix_mode == 'frame' else 'ph_spk_mix' + summary_solo_key = 'spk' if mix_mode == 'frame' else 'ph_spk' spk_mix_map = param_src.get(param_key) # { spk_name: value } or { spk_name: "value value value ..." } dynamic = False if spk_mix_map is None: @@ -72,7 +72,7 @@ def load_speaker_mix(self, param_src: dict, summary_dst: dict, spk_mix_id_list.append(self.spk_map[name]) if isinstance(values, str): # this speaker has a variable proportion - if mode == 'token': + if mix_mode == 'token': cur_spk_mix_value = values.split() assert len(cur_spk_mix_value) == mix_length, \ 'Speaker mix checks failed. In dynamic token-level mix, ' \ @@ -89,7 +89,7 @@ def load_speaker_mix(self, param_src: dict, summary_dst: dict, )).to(self.device)[None] # => [B=1, T] assert torch.all(cur_spk_mix_value >= 0.), \ f'Speaker mix checks failed.\n' \ - f'Proportions of speaker \'{name}\' on some {mode}s are negative.' + f'Proportions of speaker \'{name}\' on some {mix_mode}s are negative.' else: # this speaker has a constant proportion assert values >= 0., f'Speaker mix checks failed.\n' \ diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index ad265c7c6..4c987fe12 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -83,7 +83,9 @@ def preprocess_input(self, param, idx=0): summary['seconds'] = '%.2f' % (length * self.timestep) if hparams['use_spk_id']: - spk_mix_id, spk_mix_value = self.load_speaker_mix(param, summary, length) + spk_mix_id, spk_mix_value = self.load_speaker_mix( + param_src=param, summary_dst=summary, mix_mode='frame', mix_length=length + ) batch['spk_mix_id'] = spk_mix_id batch['spk_mix_value'] = spk_mix_value diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 84df8a6cc..71ff9cb45 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -111,10 +111,10 @@ def preprocess_input( if hparams['use_spk_id']: ph_spk_mix_id, ph_spk_mix_value = self.load_speaker_mix( - param_src=param, summary_dst=summary, mode='token', mix_length=T_ph + param_src=param, summary_dst=summary, mix_mode='token', mix_length=T_ph ) spk_mix_id, spk_mix_value = self.load_speaker_mix( - param_src=param, summary_dst=summary, mode='frame', mix_length=T_s + param_src=param, summary_dst=summary, mix_mode='frame', mix_length=T_s ) batch['ph_spk_mix_id'] = ph_spk_mix_id batch['ph_spk_mix_value'] = ph_spk_mix_value From c1208b81c3cc1032b95a56678b520dd92081fb51 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 2 Jun 2023 23:29:41 +0800 Subject: [PATCH 416/475] Fix missing spk_mix in variance model inference --- inference/ds_variance.py | 10 ++++++++-- scripts/infer.py | 7 +++---- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 71ff9cb45..536112b45 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -367,10 +367,16 @@ def run_inference( # Restore ph_spk_mix and spk_mix if 'ph_spk_mix' in param_copy and 'spk_mix' in param_copy: if 'ph_spk_mix_backup' in param_copy: - param_copy['ph_spk_mix'] = param_copy['ph_spk_mix_backup'] + if param_copy['ph_spk_mix_backup'] is None: + del param_copy['ph_spk_mix'] + else: + param_copy['ph_spk_mix'] = param_copy['ph_spk_mix_backup'] del param['ph_spk_mix_backup'] if 'spk_mix_backup' in param_copy: - param_copy['spk_mix'] = param_copy['spk_mix_backup'] + if param_copy['ph_spk_mix_backup'] is None: + del param_copy['spk_mix'] + else: + param_copy['spk_mix'] = param_copy['spk_mix_backup'] del param['spk_mix_backup'] results.append(param_copy) diff --git a/scripts/infer.py b/scripts/infer.py index fe445b47a..239ac5f7c 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -201,10 +201,9 @@ def variance( spk_mix = parse_commandline_spk_mix(spk) if hparams['use_spk_id'] and spk is not None else None for param in params: if spk_mix is not None: - if 'ph_spk_mix' in param: - param['ph_spk_mix_backup'] = param['ph_spk_mix'] - if 'spk_mix' in param: - param['spk_mix_backup'] = param['spk_mix'] + param['ph_spk_mix_backup'] = param.get('ph_spk_mix') + param['spk_mix_backup'] = param.get('spk_mix') + param['ph_spk_mix'] = param['spk_mix'] = spk_mix merge_slurs(param) From a4d52211687b92b755cef53b2e87994a090ae7ba Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 3 Jun 2023 21:27:24 +0800 Subject: [PATCH 417/475] Complete configuration schemas for variance task --- docs/ConfigurationSchemas.md | 1262 ++++++++++++++++++++++++++++------ 1 file changed, 1067 insertions(+), 195 deletions(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index fd7f6c048..14a41b479 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -75,7 +75,7 @@ Sampling rate of waveforms. #### visibility -all +acoustic, variance #### scope @@ -488,6 +488,78 @@ required str +### breathiness_db_max + +Maximum breathiness value in dB used for normalization to [-1, 1]. + +#### visibility + +variance + +#### scope + +inference + +#### customizability + +recommended + +#### type + +float + +#### default + +-20.0 + +### breathiness_db_min + +Minimum breathiness value in dB used for normalization to [-1, 1]. + +#### visibility + +acoustic, variance + +#### scope + +inference + +#### customizability + +recommended + +#### type + +float + +#### default + +-96.0 + +### breathiness_smooth_width + +Length of sinusoidal smoothing convolution kernel (in seconds) on extracted breathiness curve. + +#### visibility + +acoustic, variance + +#### scope + +preprocessing + +#### customizability + +normal + +#### type + +float + +#### default + +0.12 + ### clip_grad_norm The value at which to clip gradients. Equivalent to `gradient_clip_val` in `lightning.pytorch.Trainer`. @@ -570,7 +642,7 @@ path to the word-phoneme mapping dictionary file. Training data must fully cover #### visibility -acoustic +acoustic, variance #### scope @@ -590,7 +662,7 @@ Denoiser type of the DDPM. #### visibility -acoustic +acoustic, variance #### scope @@ -614,7 +686,7 @@ Loss type of the DDPM. #### visibility -acoustic +acoustic, variance #### scope @@ -666,7 +738,7 @@ Dropout rate in some FastSpeech2 modules. #### visibility -all +acoustic, variance #### scope @@ -708,13 +780,21 @@ int 4 -### enc_ffn_kernel_size +### dur_prediction_args -Size of TransformerFFNLayer convolution kernel size in FastSpeech2 encoder. +Arguments for phoneme duration prediction. + +#### type + +dict + +### dur_prediction_args.arch + +Architecture of duration predictor. #### visibility -all +variance #### scope @@ -722,23 +802,27 @@ nn #### customizability -not recommended +reserved #### type -int +str #### default -9 +fs2 -### enc_layers +#### constraints -Number of FastSpeech2 encoder layers. +Choose from 'fs2'. + +### dur_prediction_args.dropout + +Dropout rate in duration predictor of FastSpeech2. #### visibility -all +variance #### scope @@ -746,26 +830,23 @@ nn #### customizability -normal +not recommended #### type -int +float #### default -4 - -### f0_embed_type +0.1 -Map f0 to embedding using: +### dur_prediction_args.hidden_size -- `torch.nn.Linear` if 'continuous' -- `torch.nn.Embedding` if 'discrete' +Dimensions of hidden layers in duration predictor of FastSpeech2. #### visibility -acoustic +variance #### scope @@ -777,27 +858,19 @@ normal #### type -str +int #### default -continuous - -#### constraints - -Choose from 'continuous', 'discrete'. - -### ffn_act +512 -Activation function of TransformerFFNLayer in FastSpeech2 encoder: +### dur_prediction_args.kernel_size -- `torch.nn.ReLU` if 'relu' -- `torch.nn.GELU` if 'gelu' -- `torch.nn.SiLU` if 'swish' +Kernel size of convolution layers of duration predictor of FastSpeech2. #### visibility -all +variance #### scope @@ -805,127 +878,127 @@ nn #### customizability -not recommended +normal #### type -str +int #### default -gelu - -#### constraints - -Choose from 'relu', 'gelu', 'swish'. +3 -### ffn_padding +### dur_prediction_args.lambda_pdur_loss -Padding mode of TransformerFFNLayer convolution in FastSpeech2 encoder. +Coefficient of single phone duration loss when calculating joint duration loss. #### visibility -all +variance #### scope -nn +training #### customizability -not recommended +normal #### type -str +float #### default -SAME +0.3 -### fft_size +### dur_prediction_args.lambda_sdur_loss -Fast Fourier Transforms parameter for mel extraction. +Coefficient of sentence duration loss when calculating joint duration loss. #### visibility -all +variance #### scope -preprocessing +training #### customizability -reserved +normal #### type -int +float #### default -2048 +3.0 -### fmax +### dur_prediction_args.lambda_wdur_loss -Maximum frequency of mel extraction. +Coefficient of word duration loss when calculating joint duration loss. #### visibility -acoustic +variance #### scope -preprocessing +training #### customizability -reserved +normal #### type -int +float #### default -16000 +1.0 -### fmin +### dur_prediction_args.log_offset -Minimum frequency of mel extraction. +Offset for log domain duration loss calculation, where the following transformation is applied: +$$ +D' = \ln{(D+d)} +$$ +with the offset value $d$. #### visibility -acoustic +variance #### scope -preprocessing +training #### customizability -reserved +not recommended #### type -int +float #### default -40 +1.0 -### hidden_size +### dur_prediction_args.loss_type -Dimension of hidden layers of FastSpeech2, token and variance embeddings, and diffusion condition. +Underlying loss type of duration loss. #### visibility -acoustic +variance #### scope -nn +training #### customizability @@ -933,27 +1006,31 @@ normal #### type -int +str #### default -256 +mse -### hop_size +#### constraints -Hop size or step length (in number of waveform samples) of mel and feature extraction. +Choose from 'mse', 'huber'. + +### dur_prediction_args.num_layers + +Number of duration predictor layers. #### visibility -acoustic +variance #### scope -preprocessing +nn #### customizability -reserved +normal #### type @@ -961,39 +1038,39 @@ int #### default -512 +5 -### interp_uv +### enc_ffn_kernel_size -Whether to apply linear interpolation to unvoiced parts in f0. +Size of TransformerFFNLayer convolution kernel size in FastSpeech2 encoder. #### visibility -acoustic +acoustic, variance #### scope -preprocessing +nn #### customizability -reserved +not recommended #### type -boolean +int #### default -true +9 -### K_step +### enc_layers -Total number of diffusion steps. +Number of FastSpeech2 encoder layers. #### visibility -all +acoustic, variance #### scope @@ -1001,7 +1078,7 @@ nn #### customizability -not recommended +normal #### type @@ -1009,43 +1086,43 @@ int #### default -1000 +4 -### log_interval +### energy_db_max -Controls how often to log within training steps. Equivalent to `log_every_n_steps` in `lightning.pytorch.Trainer`. +Maximum energy value in dB used for normalization to [-1, 1]. #### visibility -all +variance #### scope -training +inference #### customizability -normal +recommended #### type -int +float #### default -100 +-12.0 -### lr_scheduler_args.gamma +### energy_db_min -Learning rate decay ratio of `torch.optim.lr_scheduler.StepLR`. +Minimum energy value in dB used for normalization to [-1, 1]. #### visibility -all +variance #### scope -training +inference #### customizability @@ -1057,59 +1134,458 @@ float #### default -0.5 - -### lr_scheduler_args - -Arguments of learning rate scheduler. Keys will be used as keyword arguments of the `__init__()` method of [lr_scheduler_args.scheduler_cls](#lr_scheduler_args.scheduler_cls). - -#### type - -dict +-96.0 -### lr_scheduler_args.scheduler_cls +### energy_smooth_width -Learning rate scheduler class name. +Length of sinusoidal smoothing convolution kernel (in seconds) on extracted energy curve. #### visibility -all +acoustic, variance #### scope -training +preprocessing #### customizability -not recommended +normal #### type -str +float #### default -torch.optim.lr_scheduler.StepLR +0.12 -### lr_scheduler_args.step_size +### f0_embed_type -Learning rate decays every this number of training steps. +Map f0 to embedding using: + +- `torch.nn.Linear` if 'continuous' +- `torch.nn.Embedding` if 'discrete' #### visibility -all +acoustic #### scope -training +nn #### customizability -recommended +normal #### type -int +str + +#### default + +continuous + +#### constraints + +Choose from 'continuous', 'discrete'. + +### ffn_act + +Activation function of TransformerFFNLayer in FastSpeech2 encoder: + +- `torch.nn.ReLU` if 'relu' +- `torch.nn.GELU` if 'gelu' +- `torch.nn.SiLU` if 'swish' + +#### visibility + +acoustic, variance + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +str + +#### default + +gelu + +#### constraints + +Choose from 'relu', 'gelu', 'swish'. + +### ffn_padding + +Padding mode of TransformerFFNLayer convolution in FastSpeech2 encoder. + +#### visibility + +acoustic, variance + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +str + +#### default + +SAME + +### fft_size + +Fast Fourier Transforms parameter for mel extraction. + +#### visibility + +acoustic, variance + +#### scope + +preprocessing + +#### customizability + +reserved + +#### type + +int + +#### default + +2048 + +### fmax + +Maximum frequency of mel extraction. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +reserved + +#### type + +int + +#### default + +16000 + +### fmin + +Minimum frequency of mel extraction. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +reserved + +#### type + +int + +#### default + +40 + +### hidden_size + +Dimension of hidden layers of FastSpeech2, token and variance embeddings, and diffusion condition. + +#### visibility + +acoustic, variance + +#### scope + +nn + +#### customizability + +normal + +#### type + +int + +#### default + +256 + +### hop_size + +Hop size or step length (in number of waveform samples) of mel and feature extraction. + +#### visibility + +acoustic, variance + +#### scope + +preprocessing + +#### customizability + +reserved + +#### type + +int + +#### default + +512 + +### interp_uv + +Whether to apply linear interpolation to unvoiced parts in f0. + +#### visibility + +acoustic + +#### scope + +preprocessing + +#### customizability + +reserved + +#### type + +boolean + +#### default + +true + +### lambda_dur_loss + +Coefficient of duration loss when calculating total loss of variance model. + +#### visibility + +variance + +#### scope + +training + +#### customizability + +normal + +#### type + +float + +#### default + +1.0 + +### lambda_pitch_loss + +Coefficient of pitch loss when calculating total loss of variance model. + +#### visibility + +variance + +#### scope + +training + +#### customizability + +normal + +#### type + +float + +#### default + +1.0 + +### lambda_var_loss + +Coefficient of variance loss (all variance parameters other than pitch, like energy, breathiness, etc.) when calculating total loss of variance model. + +#### visibility + +variance + +#### scope + +training + +#### customizability + +normal + +#### type + +float + +#### default + +1.0 + +### K_step + +Total number of diffusion steps. + +#### visibility + +acoustic, variance + +#### scope + +nn + +#### customizability + +not recommended + +#### type + +int + +#### default + +1000 + +### log_interval + +Controls how often to log within training steps. Equivalent to `log_every_n_steps` in `lightning.pytorch.Trainer`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +normal + +#### type + +int + +#### default + +100 + +### lr_scheduler_args.gamma + +Learning rate decay ratio of `torch.optim.lr_scheduler.StepLR`. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +float + +#### default + +0.5 + +### lr_scheduler_args + +Arguments of learning rate scheduler. Keys will be used as keyword arguments of the `__init__()` method of [lr_scheduler_args.scheduler_cls](#lr_scheduler_args.scheduler_cls). + +#### type + +dict + +### lr_scheduler_args.scheduler_cls + +Learning rate scheduler class name. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +str + +#### default + +torch.optim.lr_scheduler.StepLR + +### lr_scheduler_args.step_size + +Learning rate decays every this number of training steps. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +int #### default @@ -1145,7 +1621,7 @@ Maximum number of data frames in each training batch. Used to dynamically contro #### visibility -all +acoustic, variance #### scope @@ -1193,7 +1669,7 @@ Max beta of the DDPM noise schedule. #### visibility -all +acoustic, variance #### scope @@ -1241,7 +1717,7 @@ Maximum number of data frames in each validation batch. #### visibility -all +acoustic, variance #### scope @@ -1289,7 +1765,7 @@ Maximum mel spectrogram heatmap value for TensorBoard plotting. #### visibility -all +acoustic #### scope @@ -1313,7 +1789,7 @@ Minimum mel spectrogram heatmap value for TensorBoard plotting. #### visibility -all +acoustic #### scope @@ -1331,6 +1807,30 @@ float -6.0 +### midi_smooth_width + +Length of sinusoidal smoothing convolution kernel (in seconds) on the step function representing MIDI sequence for base pitch calculation. + +#### visibility + +variance + +#### scope + +preprocessing + +#### customizability + +normal + +#### type + +float + +#### default + +0.06 + ### num_ckpt_keep Number of newest checkpoints kept during training. @@ -1361,7 +1861,7 @@ The number of attention heads of `torch.nn.MultiheadAttention` in FastSpeech2 en #### visibility -all +acoustic, variance #### scope @@ -1387,7 +1887,7 @@ Due to some historical reasons, old checkpoints may have 3 padding tokens called #### visibility -acoustic +acoustic, variance #### scope @@ -1435,7 +1935,7 @@ Maximum number of speakers in multi-speaker models. #### visibility -acoustic +acoustic, variance #### scope @@ -1459,7 +1959,7 @@ Number of validation plots in each validation. Plots will be chosen from the sta #### visibility -acoustic +acoustic, variance #### scope @@ -1515,15 +2015,167 @@ Parameter of the `torch.optim.AdamW` optimizer. #### visibility -all +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +0.98 + +### optimizer_args.lr + +Initial learning rate of the optimizer. + +#### visibility + +all + +#### scope + +training + +#### customizability + +recommended + +#### type + +float + +#### default + +0.0004 + +### optimizer_args.optimizer_cls + +Optimizer class name + +#### visibility + +all + +#### scope + +training + +#### customizability + +reserved + +#### type + +str + +#### default + +torch.optim.AdamW + +### optimizer_args.weight_decay + +Weight decay ratio of optimizer. + +#### visibility + +all + +#### scope + +training + +#### customizability + +not recommended + +#### type + +float + +#### default + +0 + +### permanent_ckpt_interval + +The interval (in number of training steps) of permanent checkpoints. Permanent checkpoints will not be removed even if they are not the newest ones. + +#### visibility + +all + +#### scope + +training + +#### type + +int + +#### default + +40000 + +### permanent_ckpt_start + +Checkpoints will be marked as permanent every [permanent_ckpt_interval](#permanent_ckpt_interval) training steps after this number of training steps. + +#### visibility + +all + +#### scope + +training + +#### type + +int + +#### default + +120000 + +### pitch_prediction_args + +Arguments for pitch prediction. + +#### type + +dict + +### pitch_prediction_args.dilation_cycle_length + +Equivalent to [dilation_cycle_length](#dilation_cycle_length) but only for the PitchDiffusion model. + +#### visibility + +variance + +#### default + +5 + +### pitch_prediction_args.pitd_clip_max + +Maximum clipping value (in semitones) of pitch delta between actual pitch and base pitch. + +#### visibility + +variance #### scope -training - -#### customizability - -not recommended +inference #### type @@ -1531,23 +2183,19 @@ float #### default -0.98 +12.0 -### optimizer_args.lr +### pitch_prediction_args.pitd_clip_min -Initial learning rate of the optimizer. +Minimum clipping value (in semitones) of pitch delta between actual pitch and base pitch. #### visibility -all +variance #### scope -training - -#### customizability - -recommended +inference #### type @@ -1555,47 +2203,47 @@ float #### default -0.0004 +-12.0 -### optimizer_args.optimizer_cls +### pitch_prediction_args.pitd_norm_max -Optimizer class name +Maximum pitch delta value in semitones used for normalization to [-1, 1]. #### visibility -all +variance #### scope -training +inference #### customizability -reserved +recommended #### type -str +float #### default -torch.optim.AdamW +8.0 -### optimizer_args.weight_decay +### pitch_prediction_args.pitd_norm_min -Weight decay ratio of optimizer. +Minimum pitch delta value in semitones used for normalization to [-1, 1]. #### visibility -all +variance #### scope -training +inference #### customizability -not recommended +recommended #### type @@ -1603,19 +2251,23 @@ float #### default -0 +-8.0 -### permanent_ckpt_interval +### pitch_prediction_args.repeat_bins -The interval (in number of training steps) of permanent checkpoints. Permanent checkpoints will not be removed even if they are not the newest ones. +Number of repeating bins in PitchDiffusion. #### visibility -all +variance #### scope -training +nn, inference + +#### customizability + +recommended #### type @@ -1623,27 +2275,31 @@ int #### default -40000 +64 -### permanent_ckpt_start +### pitch_prediction_args.residual_channels -Checkpoints will be marked as permanent every [permanent_ckpt_interval](#permanent_ckpt_interval) training steps after this number of training steps. +Equivalent to [residual_channels](#residual_channels) but only for PitchDiffusion. #### visibility -all +variance -#### scope +#### default -training +256 -#### type +### pitch_prediction_args.residual_layers -int +Equivalent to [residual_layers](#residual_layers) but only for PitchDiffusion. + +#### visibility + +variance #### default -120000 +20 ### pl_trainer_accelerator @@ -1657,7 +2313,7 @@ all training -#### customization +#### customizability not recommended @@ -1687,7 +2343,7 @@ all training -#### customization +#### customizability not recommended @@ -1711,7 +2367,7 @@ all training -#### customization +#### customizability normal @@ -1739,7 +2395,7 @@ all training -#### customization +#### customizability reserved @@ -1763,7 +2419,7 @@ all training -#### customization +#### customizability reserved @@ -1781,7 +2437,7 @@ Diffusion sampling speed-up ratio. 1 means no speeding up. #### visibility -all +acoustic, variance #### type @@ -1795,6 +2451,102 @@ int Must be a factor of [K_step](#K_step). +### predict_breathiness + +Whether to enable breathiness prediction. + +#### visibility + +variance + +#### scope + +nn, preprocessing, training, inference + +#### customizability + +recommended + +#### type + +bool + +#### default + +false + +### predict_dur + +Whether to enable phoneme duration prediction. + +#### visibility + +variance + +#### scope + +nn, preprocessing, training, inference + +#### customizability + +recommended + +#### type + +bool + +#### default + +true + +### predict_energy + +Whether to enable energy prediction. + +#### visibility + +variance + +#### scope + +nn, preprocessing, training, inference + +#### customizability + +recommended + +#### type + +bool + +#### default + +false + +### predict_pitch + +Whether to enable pitch prediction. + +#### visibility + +variance + +#### scope + +nn, preprocessing, training, inference + +#### customizability + +recommended + +#### type + +bool + +#### default + +true + ### raw_data_dir Path(s) to the raw dataset including wave files, transcriptions, etc. @@ -1821,7 +2573,7 @@ Whether to use relative positional encoding in FastSpeech2 module. #### visibility -all +acoustic, variance #### scope @@ -1899,7 +2651,7 @@ Training performance on some datasets may be very sensitive to this value. Chang #### visibility -all +acoustic, variance #### scope @@ -1947,7 +2699,7 @@ The diffusion schedule type. #### visibility -acoustic +acoustic, variance #### scope @@ -1999,7 +2751,7 @@ Whether to apply the _sorting by similar length_ algorithm described in [sampler #### visibility -all +acoustic, variance #### scope @@ -2023,7 +2775,7 @@ The names of speakers in a multi-speaker model. Speaker names are mapped to spea #### visibility -acoustic +acoustic, variance #### scope @@ -2043,7 +2795,7 @@ Minimum mel spectrogram value used for normalization to [-1, 1]. Different mel b #### visibility -all +acoustic #### scope @@ -2067,7 +2819,7 @@ Maximum mel spectrogram value used for normalization to [-1, 1]. Different mel b #### visibility -all +acoustic #### scope @@ -2158,6 +2910,54 @@ str train +### use_breathiness_embed + +Whether to accept and embed breathiness values into the model. + +#### visibility + +acoustic + +#### scope + +nn, preprocessing, inference + +#### customizability + +recommended + +#### type + +boolean + +#### default + +false + +### use_energy_embed + +Whether to accept and embed energy values into the model. + +#### visibility + +acoustic + +#### scope + +nn, preprocessing, inference + +#### customizability + +recommended + +#### type + +boolean + +#### default + +false + ### use_key_shift_embed Whether to embed key shifting values introduced by random pitch shifting augmentation. @@ -2192,7 +2992,7 @@ Whether to use SinusoidalPositionalEmbedding in FastSpeech2 encoder. #### visibility -acoustic +acoustic, variance #### scope @@ -2236,7 +3036,7 @@ Whether embed the speaker id from a multi-speaker dataset. #### visibility -acoustic +acoustic, variance #### scope @@ -2326,6 +3126,78 @@ str valid +### variances_prediction_args + +Arguments for prediction of variance parameters other than pitch, like energy, breathiness, etc. + +#### type + +dict + +### variances_prediction_args.dilation_cycle_length + +Equivalent to [dilation_cycle_length](#dilation_cycle_length) but only for the MultiVarianceDiffusion model. + +#### visibility + +variance + +#### default + +4 + +### variances_prediction_args.repeat_bins + +Number of repeating bins of each parameter in MultiVarianceDiffusion. Total repeating bins in MultiVarianceDiffusion are calculated as follows: +$$ +B=N\times B' +$$ +where $B'$ is the number of bins of each parameter, $N$ is the number of parameters. + +#### visibility + +variance + +#### scope + +nn, inference + +#### customizability + +recommended + +#### type + +int + +#### default + +24 + +### variances_prediction_args.residual_channels + +Equivalent to [residual_channels](#residual_channels) but only for MultiVarianceDiffusion. + +#### visibility + +variance + +#### default + +192 + +### variances_prediction_args.residual_layers + +Equivalent to [residual_layers](#residual_layers) but only for MultiVarianceDiffusion. + +#### visibility + +variance + +#### default + +10 + ### vocoder The vocoder class name. @@ -2380,7 +3252,7 @@ Window size for mel or feature extraction. #### visibility -all +acoustic, variance #### scope From c4dbe37925937db5ce37e0033509a81379da3c11 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 3 Jun 2023 23:29:33 +0800 Subject: [PATCH 418/475] Add missing `diff_accelerator` --- docs/ConfigurationSchemas.md | 33 ++++++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 14a41b479..60a235b83 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -638,7 +638,7 @@ Choose from 'gloo', 'nccl', 'nccl_no_p2p'. Windows platforms may use 'gloo'; Lin ### dictionary -path to the word-phoneme mapping dictionary file. Training data must fully cover phonemes in the dictionary. +Path to the word-phoneme mapping dictionary file. Training data must fully cover phonemes in the dictionary. #### visibility @@ -656,6 +656,37 @@ normal str +### diff_accelerator + +Diffusion sampling acceleration method. The following method are currently available: + +- PNDM: the PLMS method from [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778) +- DPM-Solver 2.0 adapted from [DPM-Solver: A Fast ODE Solver for Diffusion Probabilistic Model Sampling in Around 10 Steps](https://github.com/LuChengTHU/dpm-solver) + +#### visibility + +acoustic, variance + +#### scope + +inference + +#### customizability + +normal + +#### type + +str + +#### default + +dpm-solver + +#### constraints + +Choose from 'pndm', 'dpm-solver'. + ### diff_decoder_type Denoiser type of the DDPM. From 1f6cd1008e8ccd8bab0bd3745935a19e91969ef1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 4 Jun 2023 13:19:02 +0800 Subject: [PATCH 419/475] Fix KeyError in `trans_key()` --- utils/infer_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/infer_utils.py b/utils/infer_utils.py index 90a2b90db..c962fa36c 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -72,7 +72,7 @@ def trans_key(raw_data, key): else: new_note_seq_list.append(note_seq) i["note_seq"] = " ".join(new_note_seq_list) - if i["f0_seq"]: + if i.get("f0_seq"): f0_seq_list = i["f0_seq"].split(" ") f0_seq_list = [float(x) for x in f0_seq_list] new_f0_seq_list = [] From 6a5c3afc5fe33fa45ea01d123e993e5a903fc15a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 4 Jun 2023 18:44:38 +0800 Subject: [PATCH 420/475] Remove unused code --- utils/infer_utils.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/utils/infer_utils.py b/utils/infer_utils.py index c962fa36c..12ccb2057 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -5,8 +5,6 @@ import numpy as np from scipy.io import wavfile -head_list = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"] - def merge_slurs(param): if not param.get('is_slur_seq'): @@ -45,20 +43,6 @@ def trans_f0_seq(feature_pit, transform): return round(feature_pit, 1) -def move_key(raw_data, mv_key): - head = raw_data[:-1] - body = int(raw_data[-1]) - new_head_index = head_list.index(head) + mv_key - while new_head_index < 0: - body -= 1 - new_head_index += 12 - while new_head_index > 11: - body += 1 - new_head_index -= 12 - result_data = head_list[new_head_index] + str(body) - return result_data - - def trans_key(raw_data, key): warning_tag = False for i in raw_data: From 4947f9b0ddaec0ea223c7d8325abd9f02008edee Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 12 Jun 2023 23:48:15 +0800 Subject: [PATCH 421/475] Fix typo --- scripts/infer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/infer.py b/scripts/infer.py index 239ac5f7c..e5bfc160e 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -130,7 +130,7 @@ def acoustic( ) -@main.command(help='Run DiffSinger acoustic model inference') +@main.command(help='Run DiffSinger variance model inference') @click.argument('proj', type=str, metavar='DS_FILE') @click.option('--exp', type=str, required=True, metavar='EXP', help='Selection of model') @click.option('--ckpt', type=int, required=False, metavar='STEPS', help='Selection of checkpoint training steps') From 1ab32defca18ff333a098903ae2750d9361988e1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 00:43:39 +0800 Subject: [PATCH 422/475] Remove default value of `--gender` to allow None input --- scripts/infer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/infer.py b/scripts/infer.py index e5bfc160e..bfaedd093 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -44,7 +44,7 @@ def main(): @click.option('--title', type=str, required=False, help='Title of output file') @click.option('--num', type=int, required=False, default=1, help='Number of runs') @click.option('--key', type=int, required=False, default=0, help='Key transition of pitch') -@click.option('--gender', type=float, required=False, default=0, help='Formant shifting (gender control)') +@click.option('--gender', type=float, required=False, help='Formant shifting (gender control)') @click.option('--seed', type=int, required=False, default=-1, help='Random seed of the inference') @click.option('--speedup', type=int, required=False, default=0, help='Diffusion acceleration ratio') @click.option('--mel', is_flag=True, help='Save intermediate mel format instead of waveform') From f8b0a95b8f99f3c766764def103d889383fac925 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 13:10:55 +0800 Subject: [PATCH 423/475] Fix wrong hparams key --- deployment/exporters/variance_exporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 7146087f7..a380867de 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -271,7 +271,7 @@ def _torch_export_model(self): ) # Prepare inputs for denoiser tracing and PitchDiffusion scripting - shape = (1, 1, hparams['pitch_prediction_args']['num_pitch_bins'], 15) + shape = (1, 1, hparams['pitch_prediction_args']['repeat_bins'], 15) noise = torch.randn(shape, device=self.device) condition = torch.rand((1, hparams['hidden_size'], 15), device=self.device) step = (torch.rand((1,), device=self.device) * hparams['K_step']).long() From af4d8ec8e64e686de69f5437f0d475896ef6ba18 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 19:24:14 +0800 Subject: [PATCH 424/475] Do not support old DS files anymore --- scripts/infer.py | 8 ++------ utils/infer_utils.py | 33 --------------------------------- 2 files changed, 2 insertions(+), 39 deletions(-) diff --git a/scripts/infer.py b/scripts/infer.py index bfaedd093..8512f29fe 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -83,7 +83,7 @@ def acoustic( print('The input file is empty.') exit() - from utils.infer_utils import trans_key, parse_commandline_spk_mix, merge_slurs + from utils.infer_utils import trans_key, parse_commandline_spk_mix if key != 0: params = trans_key(params, key) @@ -118,8 +118,6 @@ def acoustic( if spk_mix is not None: param['spk_mix'] = spk_mix - merge_slurs(param) - from inference.ds_acoustic import DiffSingerAcousticInfer infer_ins = DiffSingerAcousticInfer(load_model=not mel, ckpt_steps=ckpt) print(f'| Model: {type(infer_ins.model)}') @@ -176,7 +174,7 @@ def variance( print('The input file is empty.') exit() - from utils.infer_utils import trans_key, parse_commandline_spk_mix, merge_slurs + from utils.infer_utils import trans_key, parse_commandline_spk_mix if key != 0: params = trans_key(params, key) @@ -205,8 +203,6 @@ def variance( param['spk_mix_backup'] = param.get('spk_mix') param['ph_spk_mix'] = param['spk_mix'] = spk_mix - merge_slurs(param) - from inference.ds_variance import DiffSingerVarianceInfer infer_ins = DiffSingerVarianceInfer(ckpt_steps=ckpt, predictions=set(predict)) print(f'| Model: {type(infer_ins.model)}') diff --git a/utils/infer_utils.py b/utils/infer_utils.py index 12ccb2057..607d77c2b 100644 --- a/utils/infer_utils.py +++ b/utils/infer_utils.py @@ -1,43 +1,10 @@ import re -import warnings import librosa import numpy as np from scipy.io import wavfile -def merge_slurs(param): - if not param.get('is_slur_seq'): - return - warnings.warn( - 'You are running inference from a DS file in old format. Please re-export it in new format ' - 'or ask for a new version from the provider of this file.', - category=DeprecationWarning - ) - warnings.filterwarnings(action='default') - ph_seq = param['ph_seq'].split() - note_seq = param['note_seq'].split() - note_dur_seq = param['note_dur_seq'].split() - is_slur_seq = [int(s) for s in param['is_slur_seq'].split()] - ph_dur = [float(d) for d in param['ph_dur'].split()] - i = 0 - while i < len(ph_seq): - if is_slur_seq[i]: - ph_dur[i - 1] += ph_dur[i] - ph_seq.pop(i) - note_seq.pop(i) - note_dur_seq.pop(i) - is_slur_seq.pop(i) - ph_dur.pop(i) - else: - i += 1 - param['ph_seq'] = ' '.join(ph_seq) - param['note_seq'] = ' '.join(note_seq) - param['note_dur_seq'] = ' '.join(note_dur_seq) - param['is_slur_seq'] = ' '.join([str(s) for s in is_slur_seq]) - param['ph_dur'] = ' '.join([str(round(d, 4)) for d in ph_dur]) - - def trans_f0_seq(feature_pit, transform): feature_pit = feature_pit * 2 ** (transform / 12) return round(feature_pit, 1) From 51acdde6758fb6b9a57815e7c0154442b59fe52b Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 20:45:18 +0800 Subject: [PATCH 425/475] Restore compatibility for Python 3.8 --- basics/base_svs_infer.py | 7 ++++--- deployment/exporters/acoustic_exporter.py | 14 +++++++------- deployment/exporters/variance_exporter.py | 10 +++++----- deployment/modules/diffusion.py | 2 ++ modules/commons/common_layers.py | 2 ++ modules/diffusion/ddpm.py | 6 ++++-- modules/fastspeech/param_adaptor.py | 2 ++ scripts/infer.py | 3 ++- utils/indexed_datasets.py | 2 ++ 9 files changed, 30 insertions(+), 18 deletions(-) diff --git a/basics/base_svs_infer.py b/basics/base_svs_infer.py index 3a15ee4ae..e040993a7 100644 --- a/basics/base_svs_infer.py +++ b/basics/base_svs_infer.py @@ -2,6 +2,7 @@ import numpy as np import torch from torch import Tensor +from typing import Tuple, Dict from utils.hparams import hparams from utils.infer_utils import resample_align_curve @@ -34,7 +35,7 @@ def build_model(self, ckpt_steps=None) -> torch.nn.Module: raise NotImplementedError() def load_speaker_mix(self, param_src: dict, summary_dst: dict, - mix_mode: str = 'frame', mix_length: int = None) -> tuple[Tensor, Tensor]: + mix_mode: str = 'frame', mix_length: int = None) -> Tuple[Tensor, Tensor]: """ :param param_src: param dict @@ -120,10 +121,10 @@ def load_speaker_mix(self, param_src: dict, summary_dst: dict, spk_mix_value /= spk_mix_value_sum # normalize return spk_mix_id, spk_mix_value - def preprocess_input(self, param: dict, idx=0) -> dict[str, torch.Tensor]: + def preprocess_input(self, param: dict, idx=0) -> Dict[str, torch.Tensor]: raise NotImplementedError() - def forward_model(self, sample: dict[str, torch.Tensor]): + def forward_model(self, sample: Dict[str, torch.Tensor]): raise NotImplementedError() def run_inference(self, params, **kwargs): diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 610be883b..c2e6d9cd8 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -37,9 +37,9 @@ def __init__( self.diffusion_cache_path = self.cache_dir / 'diffusion.onnx' # Attributes for logging - self.fs2_class_name = self.model.fs2.__class__.__name__.removesuffix('ONNX') - self.denoiser_class_name = self.model.diffusion.denoise_fn.__class__.__name__.removesuffix('ONNX') - self.diffusion_class_name = self.model.diffusion.__class__.__name__.removesuffix('ONNX') + self.fs2_class_name = self.model.fs2.__class__.__name__[:-len('ONNX')] + self.denoiser_class_name = self.model.diffusion.denoise_fn.__class__.__name__[:-len('ONNX')] + self.diffusion_class_name = self.model.diffusion.__class__.__name__[:-len('ONNX')] # Attributes for exporting self.expose_gender = expose_gender @@ -78,7 +78,7 @@ def export(self, path: Path): model_name = self.model_name if self.freeze_spk is not None: model_name += '.' + self.freeze_spk[0] - self.export_model((path / 'dummy').with_suffix('.onnx').with_stem(model_name)) + self.export_model(path / f'{model_name}.onnx') self.export_attachments(path) def export_model(self, path: Path): @@ -94,11 +94,11 @@ def export_model(self, path: Path): def export_attachments(self, path: Path): for spk in self.export_spk: self._export_spk_embed( - (path / 'dummy').with_suffix(f'.{spk[0]}.emb').with_stem(self.model_name), + path / f'{self.model_name}.{spk[0]}.emb', self._perform_spk_mix(spk[1]) ) self._export_dictionary(path / 'dictionary.txt') - self._export_phonemes((path / 'dummy').with_suffix('.phonemes.txt').with_stem(self.model_name)) + self._export_phonemes(path / f'{self.model_name}.phonemes.txt') @torch.no_grad() def _torch_export_model(self): @@ -111,7 +111,7 @@ def _torch_export_model(self): v_name: torch.zeros(1, n_frames, dtype=torch.float32, device=self.device) for v_name in self.model.fs2.variance_embed_list } - kwargs: dict[str, torch.Tensor] = {} + kwargs: Dict[str, torch.Tensor] = {} arguments = (tokens, durations, f0, variances, kwargs) input_names = ['tokens', 'durations', 'f0'] + self.model.fs2.variance_embed_list dynamix_axes = { diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index a380867de..cc128298a 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -37,20 +37,20 @@ def __init__( self.variance_postprocess_cache_path = self.cache_dir / 'variance_post.onnx' # Attributes for logging - self.fs2_class_name = self.model.fs2.__class__.__name__.removesuffix('ONNX') + self.fs2_class_name = self.model.fs2.__class__.__name__[:-len('ONNX')] self.dur_predictor_class_name = \ self.model.fs2.dur_predictor.__class__.__name__ if self.model.predict_dur else None self.pitch_denoiser_class_name = \ - self.model.pitch_predictor.denoise_fn.__class__.__name__.removesuffix('ONNX') \ + self.model.pitch_predictor.denoise_fn.__class__.__name__[:-len('ONNX')] \ if self.model.predict_pitch else None self.pitch_diffusion_class_name = \ - self.model.pitch_predictor.__class__.__name__.removesuffix('ONNX') \ + self.model.pitch_predictor.__class__.__name__[:-len('ONNX')] \ if self.model.predict_pitch else None self.variance_denoiser_class_name = \ - self.model.variance_predictor.denoise_fn.__class__.__name__.removesuffix('ONNX') \ + self.model.variance_predictor.denoise_fn.__class__.__name__[:-len('ONNX')] \ if self.model.predict_variances else None self.variance_diffusion_class_name = \ - self.model.variance_predictor.__class__.__name__.removesuffix('ONNX') \ + self.model.variance_predictor.__class__.__name__[:-len('ONNX')] \ if self.model.predict_variances else None # Attributes for exporting diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 50e1c2d5a..ecec16b20 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List, Tuple import torch diff --git a/modules/commons/common_layers.py b/modules/commons/common_layers.py index d72ec869c..7e5fd08d4 100644 --- a/modules/commons/common_layers.py +++ b/modules/commons/common_layers.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import torch diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 64f81760c..339b159f2 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections import deque from functools import partial from inspect import isfunction @@ -298,8 +300,8 @@ def __init__(self, vmin: float | int | list, vmax: float | int | list, repeat_bi timesteps=1000, k_step=1000, denoiser_type=None, denoiser_args=None, betas=None): - assert (isinstance(vmin, float | int) and isinstance(vmin, float | int)) or len(vmin) == len(vmax) - num_feats = 1 if isinstance(vmin, float | int) else len(vmin) + assert (isinstance(vmin, (float, int)) and isinstance(vmin, (float, int))) or len(vmin) == len(vmax) + num_feats = 1 if isinstance(vmin, (float, int)) else len(vmin) spec_min = [vmin] if num_feats == 1 else [[v] for v in vmin] spec_max = [vmax] if num_feats == 1 else [[v] for v in vmax] self.repeat_bins = repeat_bins diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py index cb87eb87a..454cff09e 100644 --- a/modules/fastspeech/param_adaptor.py +++ b/modules/fastspeech/param_adaptor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import torch from modules.diffusion.ddpm import MultiVarianceDiffusion diff --git a/scripts/infer.py b/scripts/infer.py index 8512f29fe..fbb855e2b 100644 --- a/scripts/infer.py +++ b/scripts/infer.py @@ -6,6 +6,7 @@ from pathlib import Path import click +from typing import Tuple root_dir = Path(__file__).parent.parent.resolve() os.environ['PYTHONPATH'] = str(root_dir) @@ -145,7 +146,7 @@ def variance( exp: str, ckpt: int, spk: str, - predict: tuple[str], + predict: Tuple[str], out: str, title: str, num: int, diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index a3965c0b4..81dd52637 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -43,6 +43,7 @@ def __len__(self): self.dset = h5py.File(self.path, 'r') return len(self.dset) + class IndexedDatasetBuilder: def __init__(self, path, prefix, allowed_attr=None): self.path = pathlib.Path(path) / f'{prefix}.data' @@ -79,6 +80,7 @@ def finalize(self): if __name__ == "__main__": import random from tqdm import tqdm + ds_path = './checkpoints/indexed_ds_example' size = 100 items = [{"a": np.random.normal(size=[10000, 10]), From 0b585d0fb069944907b3add166b61aaad1ebabe8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 20:45:37 +0800 Subject: [PATCH 426/475] Bump dependency versions --- requirements.txt | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 93391e72a..297c44405 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,11 +8,10 @@ librosa<0.10.0 lightning>=2.0.0 matplotlib MonkeyType==23.3.0 -numpy==1.23.5 -onnx==1.13.1 -onnxsim==0.4.27 +numpy # ==1.23.5 +onnx==1.14.0 +onnxsim==0.4.31 praat-parselmouth==0.4.3 -protobuf==3.20.3 pyworld==0.3.3 PyYAML resampy From 94c0b9f240b57b626ae1c73c5960fa25dad64b8c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 20:48:08 +0800 Subject: [PATCH 427/475] Add `PYTHONPATH` envs in binarize.py and train.py --- README.md | 1 - scripts/binarize.py | 7 +++++++ scripts/train.py | 6 ++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 05ed6228e..5de394a59 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,6 @@ This [pipeline](preparation/acoustic_preparation.ipynb) will guide you from inst The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop/) dataset. ```sh -export PYTHONPATH=. CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/acoustic.yaml ``` ### Training diff --git a/scripts/binarize.py b/scripts/binarize.py index 44256012f..767e947a3 100644 --- a/scripts/binarize.py +++ b/scripts/binarize.py @@ -1,4 +1,11 @@ import importlib +import os +import sys +from pathlib import Path + +root_dir = Path(__file__).parent.parent.resolve() +os.environ['PYTHONPATH'] = str(root_dir) +sys.path.insert(0, str(root_dir)) from utils.hparams import set_hparams, hparams diff --git a/scripts/train.py b/scripts/train.py index 48ffb921c..0ce341cac 100644 --- a/scripts/train.py +++ b/scripts/train.py @@ -1,5 +1,11 @@ import importlib import os +import sys +from pathlib import Path + +root_dir = Path(__file__).parent.parent.resolve() +os.environ['PYTHONPATH'] = str(root_dir) +sys.path.insert(0, str(root_dir)) os.environ['TORCH_CUDNN_V8_API_ENABLED'] = '1' # Prevent unacceptable slowdowns when using 16 precision From fc42b41ce722fc4e5ffe4d80c0d142c80e3e1d6c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 21:04:21 +0800 Subject: [PATCH 428/475] Remove array indexing --- utils/indexed_datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index 81dd52637..f4ea8fbcd 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -33,7 +33,7 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - item = {k: v[()].item() if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} + item = {k: v.item() if v.shape == () else torch.from_numpy(v) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache.appendleft((i, item)) return item From 1a627c4159be1b2fade0b693c1373e5ff24f0b11 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 13 Jun 2023 23:33:24 +0800 Subject: [PATCH 429/475] Revert "Remove array indexing" This reverts commit fc42b41ce722fc4e5ffe4d80c0d142c80e3e1d6c. --- utils/indexed_datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/indexed_datasets.py b/utils/indexed_datasets.py index f4ea8fbcd..81dd52637 100644 --- a/utils/indexed_datasets.py +++ b/utils/indexed_datasets.py @@ -33,7 +33,7 @@ def __getitem__(self, i): for c in self.cache: if c[0] == i: return c[1] - item = {k: v.item() if v.shape == () else torch.from_numpy(v) for k, v in self.dset[str(i)].items()} + item = {k: v[()].item() if v.shape == () else torch.from_numpy(v[()]) for k, v in self.dset[str(i)].items()} if self.num_cache > 0: self.cache.appendleft((i, item)) return item From 3b48c0ba75071de6b5c3ca7b1291c406ed68fdc1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 14 Jun 2023 20:34:35 +0800 Subject: [PATCH 430/475] Fix inference error --- inference/ds_acoustic.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/inference/ds_acoustic.py b/inference/ds_acoustic.py index 4c987fe12..b37727dad 100644 --- a/inference/ds_acoustic.py +++ b/inference/ds_acoustic.py @@ -6,6 +6,7 @@ import numpy as np import torch +from typing import Dict from basics.base_svs_infer import BaseSVSInfer from modules.fastspeech.param_adaptor import VARIANCE_CHECKLIST @@ -108,7 +109,9 @@ def preprocess_input(self, param, idx=0): if hparams.get('use_key_shift_embed', False): shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] - gender = param.get('gender', 0.) + gender = param.get('gender') + if gender is None: + gender = 0. if isinstance(gender, (int, float, bool)): # static gender value summary['gender'] = f'static({gender:.3f})' key_shift_value = gender * shift_max if gender >= 0 else gender * abs(shift_min) @@ -184,7 +187,7 @@ def run_inference( out_dir: pathlib.Path = None, title: str = None, num_runs: int = 1, - spk_mix: dict[str, float] = None, + spk_mix: Dict[str, float] = None, seed: int = -1, save_mel: bool = False ): From 00420a6d604b03dbdc0274065e31c0a646cc3c38 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 16 Jun 2023 12:44:18 +0800 Subject: [PATCH 431/475] Rename files; add new sample; add credits.txt --- ...64\345\206\215\350\247\201\345\225\212.ds" | 0 ...1_\351\200\215\351\201\245\344\273\231.ds" | 0 ...00\345\215\212\344\270\200\345\215\212.ds" | 0 ...22\345\250\207\345\205\253\350\277\236.ds" | 0 .../04_\344\273\231\347\221\266.ds" | 0 ...5_\346\201\213\344\272\272\345\277\203.ds" | 0 ...6_\344\270\215\350\260\223\344\276\240.ds" | 0 ...37\350\212\261\346\234\210\345\244\234.ds" | 828 ++++++++++++++++++ samples/credits.txt | 41 + 9 files changed, 869 insertions(+) rename "samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" => "samples/00_\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" (100%) rename "samples/\351\200\215\351\201\245\344\273\231.ds" => "samples/01_\351\200\215\351\201\245\344\273\231.ds" (100%) rename "samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" => "samples/02_\344\270\200\345\215\212\344\270\200\345\215\212.ds" (100%) rename "samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" => "samples/03_\346\222\222\345\250\207\345\205\253\350\277\236.ds" (100%) rename "samples/\344\273\231\347\221\266.ds" => "samples/04_\344\273\231\347\221\266.ds" (100%) rename "samples/\346\201\213\344\272\272\345\277\203.ds" => "samples/05_\346\201\213\344\272\272\345\277\203.ds" (100%) rename "samples/\344\270\215\350\260\223\344\276\240.ds" => "samples/06_\344\270\215\350\260\223\344\276\240.ds" (100%) create mode 100644 "samples/07_\346\230\245\346\261\237\350\212\261\346\234\210\345\244\234.ds" create mode 100644 samples/credits.txt diff --git "a/samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" "b/samples/00_\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" similarity index 100% rename from "samples/\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" rename to "samples/00_\346\210\221\345\244\232\346\203\263\350\257\264\345\206\215\350\247\201\345\225\212.ds" diff --git "a/samples/\351\200\215\351\201\245\344\273\231.ds" "b/samples/01_\351\200\215\351\201\245\344\273\231.ds" similarity index 100% rename from "samples/\351\200\215\351\201\245\344\273\231.ds" rename to "samples/01_\351\200\215\351\201\245\344\273\231.ds" diff --git "a/samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" "b/samples/02_\344\270\200\345\215\212\344\270\200\345\215\212.ds" similarity index 100% rename from "samples/\344\270\200\345\215\212\344\270\200\345\215\212.ds" rename to "samples/02_\344\270\200\345\215\212\344\270\200\345\215\212.ds" diff --git "a/samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" "b/samples/03_\346\222\222\345\250\207\345\205\253\350\277\236.ds" similarity index 100% rename from "samples/\346\222\222\345\250\207\345\205\253\350\277\236.ds" rename to "samples/03_\346\222\222\345\250\207\345\205\253\350\277\236.ds" diff --git "a/samples/\344\273\231\347\221\266.ds" "b/samples/04_\344\273\231\347\221\266.ds" similarity index 100% rename from "samples/\344\273\231\347\221\266.ds" rename to "samples/04_\344\273\231\347\221\266.ds" diff --git "a/samples/\346\201\213\344\272\272\345\277\203.ds" "b/samples/05_\346\201\213\344\272\272\345\277\203.ds" similarity index 100% rename from "samples/\346\201\213\344\272\272\345\277\203.ds" rename to "samples/05_\346\201\213\344\272\272\345\277\203.ds" diff --git "a/samples/\344\270\215\350\260\223\344\276\240.ds" "b/samples/06_\344\270\215\350\260\223\344\276\240.ds" similarity index 100% rename from "samples/\344\270\215\350\260\223\344\276\240.ds" rename to "samples/06_\344\270\215\350\260\223\344\276\240.ds" diff --git "a/samples/07_\346\230\245\346\261\237\350\212\261\346\234\210\345\244\234.ds" "b/samples/07_\346\230\245\346\261\237\350\212\261\346\234\210\345\244\234.ds" new file mode 100644 index 000000000..26d10457c --- /dev/null +++ "b/samples/07_\346\230\245\346\261\237\350\212\261\346\234\210\345\244\234.ds" @@ -0,0 +1,828 @@ +[ + { + "offset": 19.46849315068493, + "text": "SP chun jiang chao shui lian hai ping SP", + "ph_seq": "SP ch un j iang ch ao sh ui l ian h ai p ing SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A2 E3 E3 G3 A3 A3 B3 A3 A3 G3 A3 G3 E3 rest", + "note_dur": "0.2575 0.4110 0.4110 0.4110 0.4110 0.8219 0.2055 0.2055 0.4110 1.2329 0.1027 0.1027 0.2055 0.8219 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 1 0 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 26.047260273972604, + "text": "SP hai shang ming yue gong SP", + "ph_seq": "SP h ai sh ang m ing y ve g ong SP", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D3 E3 E3 G3 E3 E3 rest", + "note_dur": "0.2541 0.4110 0.4110 0.2055 0.2055 0.4110 0.4110 0.1000", + "note_slur": "0 0 0 0 1 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 28.50958904109589, + "text": "SP chao sheng SP", + "ph_seq": "SP ch ao sh eng SP", + "ph_num": "2 2 1 1", + "note_seq": "rest A2 E3 rest", + "note_dur": "0.2575 0.8219 1.6438 0.1000", + "note_slur": "0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 32.70308219178082, + "text": "SP yan yan sui bo qian wan li SP", + "ph_seq": "SP y En y En s ui b o q ian w an l i SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D3 A2 D3 E3 F3 B3 C4 B3 A3 B3 rest", + "note_dur": "0.1736 0.4110 0.4110 0.4110 0.4110 0.8219 0.2055 0.6164 1.2329 0.4110 0.6164 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 39.197945205479456, + "text": "SP he chu chun jiao wu yue SP", + "ph_seq": "SP h e ch u ch un j iao w u y ve SP", + "ph_num": "2 2 2 2 2 2 1 1", + "note_seq": "rest A3 B3 B3 B3 B3 B3 D4 E4 rest", + "note_dur": "0.2541 0.2055 0.2055 0.4110 0.4110 0.4110 0.8219 0.2055 0.4110 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 42.482191780821914, + "text": "SP ming SP", + "ph_seq": "SP m ing SP", + "ph_num": "2 1 1", + "note_seq": "rest B3 rest", + "note_dur": "0.2575 1.2329 0.1000", + "note_slur": "0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 45.79383561643836, + "text": "SP jiang liu wan zhuan rao fang dian SP", + "ph_seq": "SP j iang l iu w an zh uan r ao f ang d ian SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A3 G3 G3 A3 C4 B3 C4 D4 E4 rest", + "note_dur": "0.2336 0.2055 0.2055 0.4110 0.4110 0.4110 0.8219 0.8219 0.2055 1.8493 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 52.423972602739724, + "text": "SP yue zhao hua lin jie si xian SP", + "ph_seq": "SP y ve zh ao h ua l in j ie s i0 x ian SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 C4 D4 E4 F4 G4 A4 G#4 E4 rest", + "note_dur": "0.1788 0.4110 0.4110 0.4110 0.4110 0.8219 0.2055 0.6164 1.2329 1.2329 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 58.911986301369865, + "text": "SP kong li liu shuang bu jue fei SP", + "ph_seq": "SP k ong l i l iu sh uang b u j ve f ei SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 A3 D4 E4 F4 A#4 C5 B4 A4 rest", + "note_dur": "0.2661 0.4110 0.4110 0.4110 0.4110 0.8219 0.2055 0.6164 1.6438 0.6164 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 65.47876712328767, + "text": "SP ting shang bai sha kan bu jian SP", + "ph_seq": "SP t ing sh ang b ai sh a k an b u j ian SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A3 B3 B3 B3 C4 D4 E4 D4 D4 E4 F4 E4 rest", + "note_dur": "0.2747 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 0.4110 0.2055 0.4110 0.2055 0.1027 2.5685 0.1000", + "note_slur": "0 0 1 0 0 0 0 1 1 0 0 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 72.07465753424658, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 A4 G4 E4 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.4384 0.1000", + "note_slur": "0 0 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 75.36232876712329, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 C5 B4 E4 G4 C4 D#4 D4 F4 E4 B3 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.2329 0.4110 0.4110 0.1027 1.1301 0.1027 0.1027 1.0274 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 81.68082191780822, + "text": "SP a SP", + "ph_seq": "SP a SP", + "ph_num": "1 1 1", + "note_seq": "rest C4 D#4 D4 E4 A4 B4 A4 G#4 B4 rest", + "note_dur": "0.1000 0.4110 0.1027 0.4110 0.7192 0.2055 0.1027 0.1027 0.7705 0.0514 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 85.22534246575341, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 A4 G4 E4 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.4384 0.1000", + "note_slur": "0 0 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 88.51301369863013, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 C5 B4 E4 G4 C4 D#4 D4 E4 B3 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.2329 0.4110 0.4110 0.1027 1.1301 0.2055 1.0274 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 94.83150684931506, + "text": "SP a SP", + "ph_seq": "SP a SP", + "ph_num": "1 1 1", + "note_seq": "rest C4 D4 E4 A4 G#4 B4 rest", + "note_dur": "0.1000 0.4110 0.5137 0.7192 0.4110 0.6164 0.8219 0.1000", + "note_slur": "0 0 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 98.39657534246575, + "text": "SP jiang tian yi se wu xian chen SP", + "ph_seq": "SP j iang t ian y i s e w u x ian ch en SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A2 E3 E3 G3 A3 B3 A3 A3 G3 A3 G3 E3 rest", + "note_dur": "0.2336 0.4110 0.4110 0.4110 0.4110 0.8219 0.4110 0.4110 1.2329 0.1370 0.1370 0.1370 0.8219 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 104.97191780821917, + "text": "SP jiao jiao kong zhong gu SP", + "ph_seq": "SP j iao j iao k ong zh ong g u SP", + "ph_num": "2 2 2 2 2 1 1", + "note_seq": "rest D3 E3 G3 E3 E3 rest", + "note_dur": "0.2336 0.4110 0.4110 0.4110 0.4110 0.4110 0.1000", + "note_slur": "0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 107.49246575342465, + "text": "SP yue lun SP", + "ph_seq": "SP y ve l un SP", + "ph_num": "2 2 1 1", + "note_seq": "rest A2 D3 E3 rest", + "note_dur": "0.1788 0.8219 0.2055 0.6164 0.1000", + "note_slur": "0 0 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 111.5472602739726, + "text": "SP jiang pan he ren chu jian yue SP", + "ph_seq": "SP j iang p an h e r en ch u j ian y ve SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D3 A2 D3 E3 F3 C4 B3 A3 B3 rest", + "note_dur": "0.2336 0.4110 0.4110 0.4110 0.4110 0.8219 0.8219 1.2329 0.4110 0.4110 0.1000", + "note_slur": "0 0 0 0 0 0 0 0 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 118.12260273972602, + "text": "SP jiang yue he nian chu zhao ren SP", + "ph_seq": "SP j iang y ve h e n ian ch u zh ao r en SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A3 B3 B3 B3 B3 B3 D4 E4 B3 B3 rest", + "note_dur": "0.2336 0.2055 0.2055 0.4110 0.4110 0.4110 0.8219 0.2055 0.2055 0.4110 1.6438 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 1 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 123.90513698630137, + "text": "SP ren sheng dai dai wu qiong yi SP", + "ph_seq": "SP r en sh eng d ai d ai w u q iong y i SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 D4 C4 C4 C4 B3 A3 A3 rest", + "note_dur": "0.2045 0.2055 0.2055 0.4110 0.4110 0.4110 0.4110 0.2055 0.2055 0.4110 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 127.16369863013698, + "text": "SP jiang yue nian nian wang xiang si SP", + "ph_seq": "SP j iang y ve n ian n ian w ang x iang s i0 SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest B3 C4 C4 E4 E4 E4 D4 B3 rest", + "note_dur": "0.2336 0.4110 0.4110 0.2055 0.2055 0.4110 0.4110 0.4110 0.8219 0.1000", + "note_slur": "0 0 0 0 1 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 131.29383561643837, + "text": "SP bu zhi jiang yue nai he ren SP", + "ph_seq": "SP b u zh ir j iang y ve n ai h e r en SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest B3 C4 C4 C4 B3 A3 G3 F3 rest", + "note_dur": "0.2130 0.1027 0.1027 0.6164 0.4110 0.4110 0.4110 0.2055 0.2055 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 134.17226027397263, + "text": "SP dan jian chang jiang song liu shui SP", + "ph_seq": "SP d an j ian ch ang j iang s ong l iu sh ui SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest F3 G3 F3 G3 A3 A3 C4 B3 rest", + "note_dur": "0.2113 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 0.2055 1.0274 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 137.86917808219175, + "text": "SP bai yun yi pian qu you you SP", + "ph_seq": "SP b ai y vn y i p ian q v y ou y ou SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest E4 G4 A4 G4 E4 D4 C4 D4 rest", + "note_dur": "0.2130 0.4110 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 0.6164 0.1000", + "note_slur": "0 0 0 1 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 141.10034246575344, + "text": "SP qing feng pu shang bu sheng chou SP", + "ph_seq": "SP q ing f eng p u sh ang b u sh eng ch ou SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 A3 A3 C4 D4 E4 G4 E4 rest", + "note_dur": "0.2695 0.2055 0.2055 0.4110 0.4110 0.4110 0.4110 0.2055 0.8219 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 144.36917808219175, + "text": "SP shei jia jin ye pian zhou zi SP", + "ph_seq": "SP sh ei j ia j in y E p ian zh ou z i0 SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 C4 D4 E4 F4 E4 D4 D4 rest", + "note_dur": "0.2884 0.4110 0.4110 0.4110 0.4110 0.4110 0.1027 0.1027 0.4110 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 147.28013698630136, + "text": "SP he chu xiang si ming yue SP", + "ph_seq": "SP h e ch u x iang s i0 m ing y ve SP", + "ph_num": "2 2 2 2 2 2 1 1", + "note_seq": "rest F4 F4 E4 E4 G#4 A4 rest", + "note_dur": "0.2541 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 0.1000", + "note_slur": "0 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 149.3640410958904, + "text": "SP lou SP", + "ph_seq": "SP l ou SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 rest", + "note_dur": "0.2250 1.0274 0.1000", + "note_slur": "0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 150.9667808219178, + "text": "SP ke lian lou shang SP", + "ph_seq": "SP k e l ian l ou sh ang SP", + "ph_num": "2 2 2 2 1 1", + "note_seq": "rest B4 B4 A4 A4 rest", + "note_dur": "0.2661 0.2055 0.2055 0.2055 0.3082 0.1000", + "note_slur": "0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 152.28698630136984, + "text": "SP yue pai huai SP", + "ph_seq": "SP y ve p ai h uai SP", + "ph_num": "2 2 2 1 1", + "note_seq": "rest E4 D4 D4 rest", + "note_dur": "0.1788 0.2055 0.2055 0.3082 0.1000", + "note_slur": "0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 153.99246575342465, + "text": "SP ying zhao li ren SP", + "ph_seq": "SP y ing zh ao l i r en SP", + "ph_num": "2 2 2 2 1 1", + "note_seq": "rest D4 C4 C4 D4 D4 rest", + "note_dur": "0.1171 0.2055 0.2055 0.2055 0.2055 0.4110 0.1000", + "note_slur": "0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 155.53184931506848, + "text": "SP zhuang jing tai SP", + "ph_seq": "SP zh uang j ing t ai SP", + "ph_num": "2 2 2 1 1", + "note_seq": "rest D4 C4 A3 rest", + "note_dur": "0.2216 0.2055 0.2055 0.3082 0.1000", + "note_slur": "0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 157.28013698630136, + "text": "SP yu hu lian zhong SP", + "ph_seq": "SP y v h u l ian zh ong SP", + "ph_num": "2 2 2 2 1 1", + "note_seq": "rest A3 C4 C4 D4 D4 rest", + "note_dur": "0.1171 0.2055 0.2055 0.2055 0.2055 0.2055 0.1000", + "note_slur": "0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 158.39657534246575, + "text": "SP juan SP", + "ph_seq": "SP j van SP", + "ph_num": "2 1 1", + "note_seq": "rest D4 rest", + "note_dur": "0.2336 0.2055 0.1000", + "note_slur": "0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 158.82808219178082, + "text": "SP bu SP", + "ph_seq": "SP b u SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 rest", + "note_dur": "0.2130 0.2055 0.1000", + "note_slur": "0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 159.18253424657536, + "text": "SP qu SP", + "ph_seq": "SP q v SP", + "ph_num": "2 1 1", + "note_seq": "rest F4 rest", + "note_dur": "0.2695 0.4110 0.1000", + "note_slur": "0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 160.4736301369863, + "text": "SP da yi zhen shang fu huan lai SP", + "ph_seq": "SP d a y i zh en sh ang f u h uan l ai SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest E4 D4 E4 E4 G#4 A4 E4 E4 rest", + "note_dur": "0.2113 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 0.2055 0.8219 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 164.0986301369863, + "text": "SP si shi xiang wang bu xiang wen SP", + "ph_seq": "SP s i0 sh ir x iang w ang b u x iang w en SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest E4 G4 A4 G4 E4 E4 D4 C4 C4 C4 D4 rest", + "note_dur": "0.2849 0.4110 0.2055 0.2055 0.2055 0.2055 0.4110 0.2055 0.2055 0.2055 0.2055 0.4110 0.1000", + "note_slur": "0 0 0 1 0 1 0 0 1 0 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 167.49246575342465, + "text": "SP yuan zhu yuan hua liu zhao jun SP", + "ph_seq": "SP y van zh u y van h ua l iu zh ao j vn SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest C4 A3 C4 D4 E4 G4 E4 rest", + "note_dur": "0.1788 0.4110 0.4110 0.4110 0.4110 0.4110 0.2055 0.4110 0.1000", + "note_slur": "0 0 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 170.70479452054792, + "text": "SP hong yan chang fei guang bu du SP", + "ph_seq": "SP h ong y En ch ang f ei g uang b u d u SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 C4 C4 D4 E4 F4 E4 D4 rest", + "note_dur": "0.2541 0.2055 0.2055 0.4110 0.4110 0.4110 0.4110 0.2055 0.4110 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 173.71849315068494, + "text": "SP yu long qian yue shui cheng wen SP", + "ph_seq": "SP y v l ong q ian y ve sh ui ch eng w en SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest E4 F4 F4 E4 E4 G#4 A4 E4 E4 rest", + "note_dur": "0.1171 0.1027 0.1027 0.2055 0.4110 0.4110 0.4110 0.2055 0.2055 1.0274 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 1 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 177.30239726027398, + "text": "SP zuo ye xian tan meng luo hua SP", + "ph_seq": "SP z uo y E x ian t an m eng l uo h ua SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest G4 A4 A4 G4 G4 F4 E4 E4 F4 rest", + "note_dur": "0.2318 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 0.1027 0.3082 0.4110 0.1000", + "note_slur": "0 0 1 0 0 0 0 1 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 180.14486301369863, + "text": "SP ke lian chun ban bu huan jia SP", + "ph_seq": "SP k e l ian ch un b an b u h uan j ia SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest F4 F4 E4 E4 E4 D4 E4 rest", + "note_dur": "0.2661 0.2055 0.2055 0.4110 0.4110 0.4110 0.4110 0.8219 0.1000", + "note_slur": "0 0 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 183.46506849315068, + "text": "SP jiang shui liu chun qu yu jin SP", + "ph_seq": "SP j iang sh ui l iu ch un q v y v j in SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest B3 C4 C4 B3 B3 B3 C4 D4 rest", + "note_dur": "0.2336 0.1027 0.1027 0.2055 0.4110 0.4110 0.4110 0.2055 0.8219 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 186.7527397260274, + "text": "SP jiang tan luo yue fu xi xia SP", + "ph_seq": "SP j iang t an l uo y ve f u x i x ia SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest E4 D4 E4 E4 G#4 A4 E4 rest", + "note_dur": "0.2336 0.2055 0.2055 0.4110 0.4110 0.4110 0.2055 1.4384 0.1000", + "note_slur": "0 0 0 0 0 0 0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 190.4308219178082, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 A4 G4 E4 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.6438 0.1000", + "note_slur": "0 0 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 193.71849315068494, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 C5 B4 E4 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 0.8219 0.1000", + "note_slur": "0 0 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 196.18424657534246, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest G4 C4 D#4 D4 F4 E4 B3 rest", + "note_dur": "0.2541 0.4110 0.4110 0.1027 1.1301 0.1027 0.2055 0.9247 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 200.03698630136984, + "text": "SP a SP", + "ph_seq": "SP a SP", + "ph_num": "1 1 1", + "note_seq": "rest C4 D#4 D4 E4 A4 G#4 B4 rest", + "note_dur": "0.1000 0.4110 0.1027 0.3082 0.8219 0.4110 0.4110 0.9247 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 203.58150684931508, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 A4 G4 E4 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.6438 0.1000", + "note_slur": "0 0 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 206.86917808219175, + "text": "SP ha SP", + "ph_seq": "SP h a SP", + "ph_num": "2 1 1", + "note_seq": "rest E4 C5 B4 E4 G4 C4 D#4 D4 rest", + "note_dur": "0.2541 0.4110 0.4110 0.4110 1.2329 0.4110 0.4110 0.1027 1.1301 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 211.74931506849313, + "text": "SP a SP", + "ph_seq": "SP a SP", + "ph_num": "1 1 1", + "note_seq": "rest B3 rest", + "note_dur": "0.1000 1.0274 0.1000", + "note_slur": "0 0 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 213.1876712328767, + "text": "SP a SP", + "ph_seq": "SP a SP", + "ph_num": "1 1 1", + "note_seq": "rest C4 D#4 D4 F4 E4 A4 G#4 B4 rest", + "note_dur": "0.1000 0.4110 0.1027 0.5137 0.1027 0.5137 0.4110 0.4110 1.8493 0.1000", + "note_slur": "0 0 1 1 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 219.1585616438356, + "text": "SP xie yue chen chen cang hai wu SP", + "ph_seq": "SP x ie y ve ch en ch en c ang h ai w u SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A3 E4 E4 G4 A4 A4 B4 A4 A4 G4 A4 G4 E4 rest", + "note_dur": "0.2935 0.4110 0.4110 0.4110 0.4110 0.8219 0.2055 0.2055 0.4110 1.2329 0.1027 0.1027 0.2055 1.0274 0.1000", + "note_slur": "0 0 0 0 0 0 0 1 1 0 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 225.79383561643837, + "text": "SP jie shi xiao xiang wu xian lu SP", + "ph_seq": "SP j ie sh ir x iao x iang w u x ian l u SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 E4 G4 E4 D4 E4 A3 D4 E4 D4 E4 D4 B3 rest", + "note_dur": "0.2336 0.4110 0.4110 0.4110 0.4110 0.2055 0.6164 0.4110 0.2055 1.4384 0.1027 0.1027 0.2055 1.0274 0.1000", + "note_slur": "0 0 0 0 0 0 1 0 0 1 1 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 232.38972602739727, + "text": "SP bu zhi cheng yue ji ren gui SP", + "ph_seq": "SP b u zh ir ch eng y ve j i r en g ui SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest D4 A3 A3 C#4 D4 E4 F4 A4 C5 B4 A4 B4 A4 rest", + "note_dur": "0.2130 0.2055 0.2055 0.4110 0.2055 0.2055 0.4110 0.8219 0.2055 0.6164 1.2329 0.1027 0.1027 0.8219 0.1000", + "note_slur": "0 0 1 0 0 1 0 0 0 1 0 1 1 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + }, + { + "offset": 238.95308219178082, + "text": "SP luo yue yao qing man jiang shu SP", + "ph_seq": "SP l uo y ve y ao q ing m an j iang sh u SP", + "ph_num": "2 2 2 2 2 2 2 1 1", + "note_seq": "rest A4 B4 B4 B4 B4 B4 C5 D5 E5 rest", + "note_dur": "0.2250 0.2055 0.2055 0.4110 0.4110 1.2329 0.4110 0.4110 0.2055 6.3699 0.1000", + "note_slur": "0 0 1 0 0 0 0 0 0 1 0", + "input_type": "phoneme", + "gender_timestep": null, + "gender": null, + "velocity_timestep": null, + "velocity": null + } +] \ No newline at end of file diff --git a/samples/credits.txt b/samples/credits.txt new file mode 100644 index 000000000..6155eba05 --- /dev/null +++ b/samples/credits.txt @@ -0,0 +1,41 @@ +## 00_我多想说再见啊 + +MIDI: 班超BanC +Pitch reference: Synthesizer V AI Mai +Tuning: 飞弦p https://space.bilibili.com/5347090; YQ之神 https://space.bilibili.com/102844209 + + +## 01_逍遥仙 + +Tuning: 赤松_Akamatsu https://space.bilibili.com/29902857; YQ之神 https://space.bilibili.com/102844209 + + +## 02_一半一半 + +Tuning: 笛鹿FlutyDeer https://space.bilibili.com/386270936 + + +## 03_撒娇八连 + +Pitch reference: 瑶摆桃桃烤地瓜 https://space.bilibili.com/23346333 + + +## 04_仙瑶 + +Tuning: 笛鹿FlutyDeer https://space.bilibili.com/386270936 + + +## 05_恋人心 + +Tuning: 笛鹿FlutyDeer https://space.bilibili.com/386270936 + + +## 06_不谓侠 + +MIDI: 帝国妖月 + + +## 07_春江花月夜 + +Source: https://www.vsqx.top/project/vn8437 +MIDI: 何念生 https://www.vsqx.top/space/2960 From 95e81a0b4c722959a1003b91e2be5f4c66643e27 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 16 Jun 2023 13:05:25 +0800 Subject: [PATCH 432/475] Add tuning --- samples/credits.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/samples/credits.txt b/samples/credits.txt index 6155eba05..d1eb0ca29 100644 --- a/samples/credits.txt +++ b/samples/credits.txt @@ -33,6 +33,7 @@ Tuning: 笛鹿FlutyDeer https://space.bilibili.com/386270936 ## 06_不谓侠 MIDI: 帝国妖月 +Tuning: ZhiBinShyu with PitchDiffusion https://www.bilibili.com/video/BV12T411t7Qg ## 07_春江花月夜 From 76602951d34c805169981d6a3259b1484450743c Mon Sep 17 00:00:00 2001 From: "llc1995@sina.com" Date: Sat, 17 Jun 2023 01:50:29 +0800 Subject: [PATCH 433/475] add DDIM sampling method --- docs/ConfigurationSchemas.md | 3 ++- modules/diffusion/ddpm.py | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 60a235b83..16be6538a 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -660,6 +660,7 @@ str Diffusion sampling acceleration method. The following method are currently available: +- DDIM: the DDIM method from [DENOISING DIFFUSION IMPLICIT MODELS](https://arxiv.org/abs/2010.02502) - PNDM: the PLMS method from [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778) - DPM-Solver 2.0 adapted from [DPM-Solver: A Fast ODE Solver for Diffusion Probabilistic Model Sampling in Around 10 Steps](https://github.com/LuChengTHU/dpm-solver) @@ -685,7 +686,7 @@ dpm-solver #### constraints -Choose from 'pndm', 'dpm-solver'. +Choose from 'ddim', 'pndm', 'dpm-solver' ### diff_decoder_type diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 339b159f2..14a6ca34e 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -154,6 +154,15 @@ def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + @torch.no_grad() + def p_sample_ddim(self, x, t, interval, cond): + a_t = extract(self.alphas_cumprod, t, x.shape) + a_prev = extract(self.alphas_cumprod, torch.max(t - interval, torch.zeros_like(t)), x.shape) + + noise_pred = self.denoise_fn(x, t, cond=cond) + x_prev = a_prev.sqrt() * (x / a_t.sqrt() + (((1 - a_prev) / a_prev).sqrt()-((1 - a_t) / a_t).sqrt()) * noise_pred) + return x_prev + @torch.no_grad() def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=False): """ @@ -261,6 +270,17 @@ def wrapped(x, t, **kwargs): x, torch.full((b,), i, device=device, dtype=torch.long), iteration_interval, cond=cond ) + elif algorithm == 'ddim': + self.noise_list = deque(maxlen=4) + iteration_interval = hparams['pndm_speedup'] + for i in tqdm( + reversed(range(0, t, iteration_interval)), desc='sample time step', + total=t // iteration_interval, disable=not hparams['infer'], leave=False + ): + x = self.p_sample_ddim( + x, torch.full((b,), i, device=device, dtype=torch.long), + iteration_interval, cond=cond + ) else: raise NotImplementedError(algorithm) else: From eb0eebb78a826a8446b8fbc0c0e38b1420816375 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 17 Jun 2023 12:21:28 +0800 Subject: [PATCH 434/475] Fix classname logging --- deployment/exporters/acoustic_exporter.py | 8 ++++---- deployment/exporters/variance_exporter.py | 15 ++++++++------- utils/__init__.py | 7 +++++++ 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index c2e6d9cd8..64e586377 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -8,7 +8,7 @@ from basics.base_exporter import BaseExporter from deployment.modules.toplevel import DiffSingerAcousticONNX -from utils import load_ckpt, onnx_helper +from utils import load_ckpt, onnx_helper, remove_suffix from utils.hparams import hparams from utils.phoneme_utils import locate_dictionary, build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -37,9 +37,9 @@ def __init__( self.diffusion_cache_path = self.cache_dir / 'diffusion.onnx' # Attributes for logging - self.fs2_class_name = self.model.fs2.__class__.__name__[:-len('ONNX')] - self.denoiser_class_name = self.model.diffusion.denoise_fn.__class__.__name__[:-len('ONNX')] - self.diffusion_class_name = self.model.diffusion.__class__.__name__[:-len('ONNX')] + self.fs2_class_name = remove_suffix(self.model.fs2.__class__.__name__, 'ONNX') + self.denoiser_class_name = remove_suffix(self.model.diffusion.denoise_fn.__class__.__name__, 'ONNX') + self.diffusion_class_name = remove_suffix(self.model.diffusion.__class__.__name__, 'ONNX') # Attributes for exporting self.expose_gender = expose_gender diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index cc128298a..64aae416a 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -8,7 +8,7 @@ from basics.base_exporter import BaseExporter from deployment.modules.toplevel import DiffSingerVarianceONNX -from utils import load_ckpt, onnx_helper +from utils import load_ckpt, onnx_helper, remove_suffix from utils.hparams import hparams from utils.phoneme_utils import locate_dictionary, build_phoneme_list from utils.text_encoder import TokenTextEncoder @@ -37,20 +37,21 @@ def __init__( self.variance_postprocess_cache_path = self.cache_dir / 'variance_post.onnx' # Attributes for logging - self.fs2_class_name = self.model.fs2.__class__.__name__[:-len('ONNX')] + self.fs2_class_name = remove_suffix(self.model.fs2.__class__.__name__, 'ONNX') self.dur_predictor_class_name = \ - self.model.fs2.dur_predictor.__class__.__name__ if self.model.predict_dur else None + remove_suffix(self.model.fs2.dur_predictor.__class__.__name__, 'ONNX') \ + if self.model.predict_dur else None self.pitch_denoiser_class_name = \ - self.model.pitch_predictor.denoise_fn.__class__.__name__[:-len('ONNX')] \ + remove_suffix(self.model.pitch_predictor.denoise_fn.__class__.__name__, 'ONNX') \ if self.model.predict_pitch else None self.pitch_diffusion_class_name = \ - self.model.pitch_predictor.__class__.__name__[:-len('ONNX')] \ + remove_suffix(self.model.pitch_predictor.__class__.__name__, 'ONNX') \ if self.model.predict_pitch else None self.variance_denoiser_class_name = \ - self.model.variance_predictor.denoise_fn.__class__.__name__[:-len('ONNX')] \ + remove_suffix(self.model.variance_predictor.denoise_fn.__class__.__name__, 'ONNX') \ if self.model.predict_variances else None self.variance_diffusion_class_name = \ - self.model.variance_predictor.__class__.__name__[:-len('ONNX')] \ + remove_suffix(self.model.variance_predictor.__class__.__name__, 'ONNX') \ if self.model.predict_variances else None # Attributes for exporting diff --git a/utils/__init__.py b/utils/__init__.py index f42ed8b69..c4fa45114 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -258,3 +258,10 @@ def simulate_lr_scheduler(optimizer_args, scheduler_args, last_epoch=-1, num_par return scheduler._get_closed_form_lr() else: return scheduler.get_lr() + + +def remove_suffix(string: str, suffix: str): + # Just for Python 3.8 compatibility, since `str.removesuffix()` API of is available since Python 3.9 + if string.endswith(suffix): + string = string[:-len(suffix)] + return string From e67e2a625687b066a68bfd7d12e5b6b145ba619c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 17 Jun 2023 12:49:32 +0800 Subject: [PATCH 435/475] Explicitly annotate `speedup` with `int` --- deployment/modules/diffusion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index ecec16b20..6c2b887f8 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -68,7 +68,7 @@ def denorm_spec(self, x): m = (self.spec_max + self.spec_min) / 2. return x * d + m - def forward(self, condition, speedup): + def forward(self, condition, speedup: int): condition = condition.transpose(1, 2) # [1, T, H] => [1, H, T] device = condition.device n_frames = condition.shape[2] From cff5da2f2976a82c04199a4b2afba0c55d33eae4 Mon Sep 17 00:00:00 2001 From: "llc1995@sina.com" Date: Sat, 17 Jun 2023 21:23:18 +0800 Subject: [PATCH 436/475] Add DPM-Solver++ and UniPC sampling method --- docs/ConfigurationSchemas.md | 5 +- inference/dpm_solver_pytorch.py | 738 ++++++++++++++++++-------------- inference/uni_pc.py | 731 +++++++++++++++++++++++++++++++ modules/diffusion/ddpm.py | 45 +- 4 files changed, 1196 insertions(+), 323 deletions(-) create mode 100644 inference/uni_pc.py diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 16be6538a..62eb4dba5 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -662,7 +662,8 @@ Diffusion sampling acceleration method. The following method are currently avail - DDIM: the DDIM method from [DENOISING DIFFUSION IMPLICIT MODELS](https://arxiv.org/abs/2010.02502) - PNDM: the PLMS method from [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778) -- DPM-Solver 2.0 adapted from [DPM-Solver: A Fast ODE Solver for Diffusion Probabilistic Model Sampling in Around 10 Steps](https://github.com/LuChengTHU/dpm-solver) +- DPM-Solver++ adapted from [DPM-Solver: A Fast ODE Solver for Diffusion Probabilistic Model Sampling in Around 10 Steps](https://github.com/LuChengTHU/dpm-solver) +- UniPC adapted from [UniPC: A Unified Predictor-Corrector Framework for Fast Sampling of Diffusion Models](https://github.com/wl-zhao/UniPC) #### visibility @@ -686,7 +687,7 @@ dpm-solver #### constraints -Choose from 'ddim', 'pndm', 'dpm-solver' +Choose from 'ddim', 'pndm', 'dpm-solver', 'unipc' ### diff_decoder_type diff --git a/inference/dpm_solver_pytorch.py b/inference/dpm_solver_pytorch.py index 9d4cb8c2b..23e4d3c05 100644 --- a/inference/dpm_solver_pytorch.py +++ b/inference/dpm_solver_pytorch.py @@ -1,6 +1,6 @@ -import math - import torch +import torch.nn.functional as F +import math class NoiseScheduleVP: @@ -11,7 +11,8 @@ def __init__( alphas_cumprod=None, continuous_beta_0=0.1, continuous_beta_1=20., - ): + dtype=torch.float32, + ): """Create a wrapper class for the forward SDE (VP type). *** @@ -46,7 +47,7 @@ def __init__( betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) - Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. + Note that we always have alphas_cumprod = cumprod(1 - betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. **Important**: Please pay special attention for the args for `alphas_cumprod`: The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that @@ -59,21 +60,19 @@ def __init__( 2. For continuous-time DPMs: - We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise - schedule are the default settings in DDPM and improved-DDPM: + We support the linear VPSDE for the continuous time setting. The hyperparameters for the noise + schedule are the default settings in Yang Song's ScoreSDE: Args: beta_min: A `float` number. The smallest beta for the linear schedule. beta_max: A `float` number. The largest beta for the linear schedule. - cosine_s: A `float` number. The hyperparameter in the cosine schedule. - cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. T: A `float` number. The ending time of the forward process. =============================================================== Args: schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, - 'linear' or 'cosine' for continuous-time DPMs. + 'linear' for continuous-time DPMs. Returns: A wrapper object of the forward SDE (VP type). @@ -92,10 +91,8 @@ def __init__( """ - if schedule not in ['discrete', 'linear', 'cosine']: - raise ValueError( - "Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format( - schedule)) + if schedule not in ['discrete', 'linear']: + raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear'".format(schedule)) self.schedule = schedule if schedule == 'discrete': @@ -104,40 +101,37 @@ def __init__( else: assert alphas_cumprod is not None log_alphas = 0.5 * torch.log(alphas_cumprod) - self.total_N = len(log_alphas) self.T = 1. - self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)) - self.log_alpha_array = log_alphas.reshape((1, -1,)) + self.log_alpha_array = self.numerical_clip_alpha(log_alphas).reshape((1, -1,)).to(dtype=dtype) + self.total_N = self.log_alpha_array.shape[1] + self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)).to(dtype=dtype) else: + self.T = 1. self.total_N = 1000 self.beta_0 = continuous_beta_0 self.beta_1 = continuous_beta_1 - self.cosine_s = 0.008 - self.cosine_beta_max = 999. - self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * ( - 1. + self.cosine_s) / math.pi - self.cosine_s - self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) - self.schedule = schedule - if schedule == 'cosine': - # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. - # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. - self.T = 0.9946 - else: - self.T = 1. + + def numerical_clip_alpha(self, log_alphas, clipped_lambda=-5.1): + """ + For some beta schedules such as cosine schedule, the log-SNR has numerical isssues. + We clip the log-SNR near t=T within -5.1 to ensure the stability. + Such a trick is very useful for diffusion models with the cosine schedule, such as i-DDPM, guided-diffusion and GLIDE. + """ + log_sigmas = 0.5 * torch.log(1. - torch.exp(2. * log_alphas)) + lambs = log_alphas - log_sigmas + idx = torch.searchsorted(torch.flip(lambs, [0]), clipped_lambda) + if idx > 0: + log_alphas = log_alphas[:-idx] + return log_alphas def marginal_log_mean_coeff(self, t): """ Compute log(alpha_t) of a given continuous-time label t in [0, T]. """ if self.schedule == 'discrete': - return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), - self.log_alpha_array.to(t.device)).reshape((-1)) + return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) elif self.schedule == 'linear': return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 - elif self.schedule == 'cosine': - log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) - log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 - return log_alpha_t def marginal_alpha(self, t): """ @@ -165,32 +159,25 @@ def inverse_lambda(self, lamb): """ if self.schedule == 'linear': tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) - Delta = self.beta_0 ** 2 + tmp + Delta = self.beta_0**2 + tmp return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) elif self.schedule == 'discrete': log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) - t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), - torch.flip(self.t_array.to(lamb.device), [1])) + t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) return t.reshape((-1,)) - else: - log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) - t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * ( - 1. + self.cosine_s) / math.pi - self.cosine_s - t = t_fn(log_alpha) - return t def model_wrapper( - model, - noise_schedule, - model_type="noise", - model_kwargs={}, - guidance_type="uncond", - condition=None, - unconditional_condition=None, - guidance_scale=1., - classifier_fn=None, - classifier_kwargs={}, + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1., + classifier_fn=None, + classifier_kwargs={}, ): """Create a wrapper function for the noise prediction model. @@ -288,13 +275,11 @@ def get_model_input_time(t_continuous): For continuous-time DPMs, we just use `t_continuous`. """ if noise_schedule.schedule == 'discrete': - return (t_continuous - 1. / noise_schedule.total_N) * 1000. + return (t_continuous - 1. / noise_schedule.total_N) * noise_schedule.total_N else: return t_continuous def noise_pred_fn(x, t_continuous, cond=None): - if t_continuous.reshape((-1,)).shape[0] == 1: - t_continuous = t_continuous.expand((x.shape[0])) t_input = get_model_input_time(t_continuous) if cond is None: output = model(x, t_input, **model_kwargs) @@ -304,16 +289,13 @@ def noise_pred_fn(x, t_continuous, cond=None): return output elif model_type == "x_start": alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) - dims = x.dim() - return (x - expand_dims(alpha_t, dims) * output) / expand_dims(sigma_t, dims) + return (x - expand_dims(alpha_t, x.dim()) * output) / expand_dims(sigma_t, x.dim()) elif model_type == "v": alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) - dims = x.dim() - return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x + return expand_dims(alpha_t, x.dim()) * output + expand_dims(sigma_t, x.dim()) * x elif model_type == "score": sigma_t = noise_schedule.marginal_std(t_continuous) - dims = x.dim() - return -expand_dims(sigma_t, dims) * output + return -expand_dims(sigma_t, x.dim()) * output def cond_grad_fn(x, t_input): """ @@ -328,8 +310,6 @@ def model_fn(x, t_continuous): """ The noise predicition model function that is used for DPM-Solver. """ - if t_continuous.reshape((-1,)).shape[0] == 1: - t_continuous = t_continuous.expand((x.shape[0])) if guidance_type == "uncond": return noise_pred_fn(x, t_continuous) elif guidance_type == "classifier": @@ -338,7 +318,7 @@ def model_fn(x, t_continuous): cond_grad = cond_grad_fn(x, t_input) sigma_t = noise_schedule.marginal_std(t_continuous) noise = noise_pred_fn(x, t_continuous) - return noise - guidance_scale * expand_dims(sigma_t, dims=cond_grad.dim()) * cond_grad + return noise - guidance_scale * expand_dims(sigma_t, x.dim()) * cond_grad elif guidance_type == "classifier-free": if guidance_scale == 1. or unconditional_condition is None: return noise_pred_fn(x, t_continuous, cond=condition) @@ -349,20 +329,34 @@ def model_fn(x, t_continuous): noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) return noise_uncond + guidance_scale * (noise - noise_uncond) - assert model_type in ["noise", "x_start", "v"] + assert model_type in ["noise", "x_start", "v", "score"] assert guidance_type in ["uncond", "classifier", "classifier-free"] return model_fn class DPM_Solver: - def __init__(self, model_fn, noise_schedule, predict_x0=False, thresholding=False, max_val=1.): + def __init__( + self, + model_fn, + noise_schedule, + algorithm_type="dpmsolver++", + correcting_x0_fn=None, + correcting_xt_fn=None, + thresholding_max_val=1., + dynamic_thresholding_ratio=0.995, + ): """Construct a DPM-Solver. - We support both the noise prediction model ("predicting epsilon") and the data prediction model ("predicting x0"). - If `predict_x0` is False, we use the solver for the noise prediction model (DPM-Solver). - If `predict_x0` is True, we use the solver for the data prediction model (DPM-Solver++). - In such case, we further support the "dynamic thresholding" in [1] when `thresholding` is True. - The "dynamic thresholding" can greatly improve the sample quality for pixel-space DPMs with large guidance scales. + We support both DPM-Solver (`algorithm_type="dpmsolver"`) and DPM-Solver++ (`algorithm_type="dpmsolver++"`). + + We also support the "dynamic thresholding" method in Imagen[1]. For pixel-space diffusion models, you + can set both `algorithm_type="dpmsolver++"` and `correcting_x0_fn="dynamic_thresholding"` to use the + dynamic thresholding. The "dynamic thresholding" can greatly improve the sample quality for pixel-space + DPMs with large guidance scales. Note that the thresholding method is **unsuitable** for latent-space + DPMs (such as stable-diffusion). + + To support advanced algorithms in image-to-image applications, we also support corrector functions for + both x0 and xt. Args: model_fn: A noise prediction model function which accepts the continuous-time input (t in [epsilon, T]): @@ -370,18 +364,65 @@ def __init__(self, model_fn, noise_schedule, predict_x0=False, thresholding=Fals def model_fn(x, t_continuous): return noise `` + The shape of `x` is `(batch_size, **shape)`, and the shape of `t_continuous` is `(batch_size,)`. noise_schedule: A noise schedule object, such as NoiseScheduleVP. - predict_x0: A `bool`. If true, use the data prediction model; else, use the noise prediction model. - thresholding: A `bool`. Valid when `predict_x0` is True. Whether to use the "dynamic thresholding" in [1]. - max_val: A `float`. Valid when both `predict_x0` and `thresholding` are True. The max value for thresholding. - - [1] Chitwan Saharia, William Chan, Saurabh Saxena, Lala Li, Jay Whang, Emily Denton, Seyed Kamyar Seyed Ghasemipour, Burcu Karagol Ayan, S Sara Mahdavi, Rapha Gontijo Lopes, et al. Photorealistic text-to-image diffusion models with deep language understanding. arXiv preprint arXiv:2205.11487, 2022b. + algorithm_type: A `str`. Either "dpmsolver" or "dpmsolver++". + correcting_x0_fn: A `str` or a function with the following format: + ``` + def correcting_x0_fn(x0, t): + x0_new = ... + return x0_new + ``` + This function is to correct the outputs of the data prediction model at each sampling step. e.g., + ``` + x0_pred = data_pred_model(xt, t) + if correcting_x0_fn is not None: + x0_pred = correcting_x0_fn(x0_pred, t) + xt_1 = update(x0_pred, xt, t) + ``` + If `correcting_x0_fn="dynamic_thresholding"`, we use the dynamic thresholding proposed in Imagen[1]. + correcting_xt_fn: A function with the following format: + ``` + def correcting_xt_fn(xt, t, step): + x_new = ... + return x_new + ``` + This function is to correct the intermediate samples xt at each sampling step. e.g., + ``` + xt = ... + xt = correcting_xt_fn(xt, t, step) + ``` + thresholding_max_val: A `float`. The max value for thresholding. + Valid only when use `dpmsolver++` and `correcting_x0_fn="dynamic_thresholding"`. + dynamic_thresholding_ratio: A `float`. The ratio for dynamic thresholding (see Imagen[1] for details). + Valid only when use `dpmsolver++` and `correcting_x0_fn="dynamic_thresholding"`. + + [1] Chitwan Saharia, William Chan, Saurabh Saxena, Lala Li, Jay Whang, Emily Denton, Seyed Kamyar Seyed Ghasemipour, + Burcu Karagol Ayan, S Sara Mahdavi, Rapha Gontijo Lopes, et al. Photorealistic text-to-image diffusion models + with deep language understanding. arXiv preprint arXiv:2205.11487, 2022b. """ - self.model = model_fn + self.model = lambda x, t: model_fn(x, t.expand((x.shape[0]))) self.noise_schedule = noise_schedule - self.predict_x0 = predict_x0 - self.thresholding = thresholding - self.max_val = max_val + assert algorithm_type in ["dpmsolver", "dpmsolver++"] + self.algorithm_type = algorithm_type + if correcting_x0_fn == "dynamic_thresholding": + self.correcting_x0_fn = self.dynamic_thresholding_fn + else: + self.correcting_x0_fn = correcting_x0_fn + self.correcting_xt_fn = correcting_xt_fn + self.dynamic_thresholding_ratio = dynamic_thresholding_ratio + self.thresholding_max_val = thresholding_max_val + + def dynamic_thresholding_fn(self, x0, t): + """ + The dynamic thresholding method. + """ + dims = x0.dim() + p = self.dynamic_thresholding_ratio + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.thresholding_max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 def noise_prediction_fn(self, x, t): """ @@ -391,24 +432,20 @@ def noise_prediction_fn(self, x, t): def data_prediction_fn(self, x, t): """ - Return the data prediction model (with thresholding). + Return the data prediction model (with corrector). """ noise = self.noise_prediction_fn(x, t) - dims = x.dim() alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) - x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) - if self.thresholding: - p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. - s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) - s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims) - x0 = torch.clamp(x0, -s, s) / s + x0 = (x - sigma_t * noise) / alpha_t + if self.correcting_x0_fn is not None: + x0 = self.correcting_x0_fn(x0, t) return x0 def model_fn(self, x, t): """ Convert the model to the noise prediction model or the data prediction model. """ - if self.predict_x0: + if self.algorithm_type == "dpmsolver++": return self.data_prediction_fn(x, t) else: return self.noise_prediction_fn(x, t) @@ -437,11 +474,10 @@ def get_time_steps(self, skip_type, t_T, t_0, N, device): return torch.linspace(t_T, t_0, N + 1).to(device) elif skip_type == 'time_quadratic': t_order = 2 - t = torch.linspace(t_T ** (1. / t_order), t_0 ** (1. / t_order), N + 1).pow(t_order).to(device) + t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) return t else: - raise ValueError( - "Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) + raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): """ @@ -478,32 +514,31 @@ def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type if order == 3: K = steps // 3 + 1 if steps % 3 == 0: - orders = [3, ] * (K - 2) + [2, 1] + orders = [3,] * (K - 2) + [2, 1] elif steps % 3 == 1: - orders = [3, ] * (K - 1) + [1] + orders = [3,] * (K - 1) + [1] else: - orders = [3, ] * (K - 1) + [2] + orders = [3,] * (K - 1) + [2] elif order == 2: if steps % 2 == 0: K = steps // 2 - orders = [2, ] * K + orders = [2,] * K else: K = steps // 2 + 1 - orders = [2, ] * (K - 1) + [1] + orders = [2,] * (K - 1) + [1] elif order == 1: K = 1 - orders = [1, ] * steps + orders = [1,] * steps else: raise ValueError("'order' must be '1' or '2' or '3'.") if skip_type == 'logSNR': # To reproduce the results in DPM-Solver paper timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) else: - timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[ - torch.cumsum(torch.tensor([0, ] + orders), dim=0).to(device)] + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders), 0).to(device)] return timesteps_outer, orders - def denoise_fn(self, x, s): + def denoise_to_zero_fn(self, x, s): """ Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. """ @@ -515,8 +550,8 @@ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=Fal Args: x: A pytorch tensor. The initial value at time `s`. - s: A pytorch tensor. The starting time, with the shape (x.shape[0],). - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + s: A pytorch tensor. The starting time, with the shape (1,). + t: A pytorch tensor. The ending time, with the shape (1,). model_s: A pytorch tensor. The model function evaluated at time `s`. If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it. return_intermediate: A `bool`. If true, also return the model value at time `s`. @@ -531,13 +566,13 @@ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=Fal sigma_s, sigma_t = ns.marginal_std(s), ns.marginal_std(t) alpha_t = torch.exp(log_alpha_t) - if self.predict_x0: + if self.algorithm_type == "dpmsolver++": phi_1 = torch.expm1(-h) if model_s is None: model_s = self.model_fn(x, s) x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s + sigma_t / sigma_s * x + - alpha_t * phi_1 * model_s ) if return_intermediate: return x_t, {'model_s': model_s} @@ -548,70 +583,66 @@ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=Fal if model_s is None: model_s = self.model_fn(x, s) x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s + torch.exp(log_alpha_t - log_alpha_s) * x + - (sigma_t * phi_1) * model_s ) if return_intermediate: return x_t, {'model_s': model_s} else: return x_t - def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, return_intermediate=False, - solver_type='dpm_solver'): + def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, return_intermediate=False, solver_type='dpmsolver'): """ Singlestep solver DPM-Solver-2 from time `s` to time `t`. Args: x: A pytorch tensor. The initial value at time `s`. - s: A pytorch tensor. The starting time, with the shape (x.shape[0],). - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + s: A pytorch tensor. The starting time, with the shape (1,). + t: A pytorch tensor. The ending time, with the shape (1,). r1: A `float`. The hyperparameter of the second-order solver. model_s: A pytorch tensor. The model function evaluated at time `s`. If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it. return_intermediate: A `bool`. If true, also return the model value at time `s` and `s1` (the intermediate time). - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. Returns: x_t: A pytorch tensor. The approximated solution at time `t`. """ - if solver_type not in ['dpm_solver', 'taylor']: - raise ValueError("'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format(solver_type)) + if solver_type not in ['dpmsolver', 'taylor']: + raise ValueError("'solver_type' must be either 'dpmsolver' or 'taylor', got {}".format(solver_type)) if r1 is None: r1 = 0.5 ns = self.noise_schedule - dims = x.dim() lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t) h = lambda_t - lambda_s lambda_s1 = lambda_s + r1 * h s1 = ns.inverse_lambda(lambda_s1) - log_alpha_s, log_alpha_s1, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff( - s1), ns.marginal_log_mean_coeff(t) + log_alpha_s, log_alpha_s1, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(t) sigma_s, sigma_s1, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std(t) alpha_s1, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_t) - if self.predict_x0: + if self.algorithm_type == "dpmsolver++": phi_11 = torch.expm1(-r1 * h) phi_1 = torch.expm1(-h) if model_s is None: model_s = self.model_fn(x, s) x_s1 = ( - expand_dims(sigma_s1 / sigma_s, dims) * x - - expand_dims(alpha_s1 * phi_11, dims) * model_s + (sigma_s1 / sigma_s) * x + - (alpha_s1 * phi_11) * model_s ) model_s1 = self.model_fn(x_s1, s1) - if solver_type == 'dpm_solver': + if solver_type == 'dpmsolver': x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - - (0.5 / r1) * expand_dims(alpha_t * phi_1, dims) * (model_s1 - model_s) + (sigma_t / sigma_s) * x + - (alpha_t * phi_1) * model_s + - (0.5 / r1) * (alpha_t * phi_1) * (model_s1 - model_s) ) elif solver_type == 'taylor': x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - + (1. / r1) * expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * ( - model_s1 - model_s) + (sigma_t / sigma_s) * x + - (alpha_t * phi_1) * model_s + + (1. / r1) * (alpha_t * (phi_1 / h + 1.)) * (model_s1 - model_s) ) else: phi_11 = torch.expm1(r1 * h) @@ -620,36 +651,35 @@ def singlestep_dpm_solver_second_update(self, x, s, t, r1=0.5, model_s=None, ret if model_s is None: model_s = self.model_fn(x, s) x_s1 = ( - expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x - - expand_dims(sigma_s1 * phi_11, dims) * model_s + torch.exp(log_alpha_s1 - log_alpha_s) * x + - (sigma_s1 * phi_11) * model_s ) model_s1 = self.model_fn(x_s1, s1) - if solver_type == 'dpm_solver': + if solver_type == 'dpmsolver': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - (0.5 / r1) * expand_dims(sigma_t * phi_1, dims) * (model_s1 - model_s) + torch.exp(log_alpha_t - log_alpha_s) * x + - (sigma_t * phi_1) * model_s + - (0.5 / r1) * (sigma_t * phi_1) * (model_s1 - model_s) ) elif solver_type == 'taylor': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - (1. / r1) * expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * (model_s1 - model_s) + torch.exp(log_alpha_t - log_alpha_s) * x + - (sigma_t * phi_1) * model_s + - (1. / r1) * (sigma_t * (phi_1 / h - 1.)) * (model_s1 - model_s) ) if return_intermediate: return x_t, {'model_s': model_s, 'model_s1': model_s1} else: return x_t - def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., model_s=None, model_s1=None, - return_intermediate=False, solver_type='dpm_solver'): + def singlestep_dpm_solver_third_update(self, x, s, t, r1=1./3., r2=2./3., model_s=None, model_s1=None, return_intermediate=False, solver_type='dpmsolver'): """ Singlestep solver DPM-Solver-3 from time `s` to time `t`. Args: x: A pytorch tensor. The initial value at time `s`. - s: A pytorch tensor. The starting time, with the shape (x.shape[0],). - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + s: A pytorch tensor. The starting time, with the shape (1,). + t: A pytorch tensor. The ending time, with the shape (1,). r1: A `float`. The hyperparameter of the third-order solver. r2: A `float`. The hyperparameter of the third-order solver. model_s: A pytorch tensor. The model function evaluated at time `s`. @@ -657,32 +687,29 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., mo model_s1: A pytorch tensor. The model function evaluated at time `s1` (the intermediate time given by `r1`). If `model_s1` is None, we evaluate the model at `s1`; otherwise we directly use it. return_intermediate: A `bool`. If true, also return the model value at time `s`, `s1` and `s2` (the intermediate times). - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. Returns: x_t: A pytorch tensor. The approximated solution at time `t`. """ - if solver_type not in ['dpm_solver', 'taylor']: - raise ValueError("'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format(solver_type)) + if solver_type not in ['dpmsolver', 'taylor']: + raise ValueError("'solver_type' must be either 'dpmsolver' or 'taylor', got {}".format(solver_type)) if r1 is None: r1 = 1. / 3. if r2 is None: r2 = 2. / 3. ns = self.noise_schedule - dims = x.dim() lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t) h = lambda_t - lambda_s lambda_s1 = lambda_s + r1 * h lambda_s2 = lambda_s + r2 * h s1 = ns.inverse_lambda(lambda_s1) s2 = ns.inverse_lambda(lambda_s2) - log_alpha_s, log_alpha_s1, log_alpha_s2, log_alpha_t = ns.marginal_log_mean_coeff( - s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(s2), ns.marginal_log_mean_coeff(t) - sigma_s, sigma_s1, sigma_s2, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std( - s2), ns.marginal_std(t) + log_alpha_s, log_alpha_s1, log_alpha_s2, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(s1), ns.marginal_log_mean_coeff(s2), ns.marginal_log_mean_coeff(t) + sigma_s, sigma_s1, sigma_s2, sigma_t = ns.marginal_std(s), ns.marginal_std(s1), ns.marginal_std(s2), ns.marginal_std(t) alpha_s1, alpha_s2, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_s2), torch.exp(log_alpha_t) - if self.predict_x0: + if self.algorithm_type == "dpmsolver++": phi_11 = torch.expm1(-r1 * h) phi_12 = torch.expm1(-r2 * h) phi_1 = torch.expm1(-h) @@ -694,21 +721,21 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., mo model_s = self.model_fn(x, s) if model_s1 is None: x_s1 = ( - expand_dims(sigma_s1 / sigma_s, dims) * x - - expand_dims(alpha_s1 * phi_11, dims) * model_s + (sigma_s1 / sigma_s) * x + - (alpha_s1 * phi_11) * model_s ) model_s1 = self.model_fn(x_s1, s1) x_s2 = ( - expand_dims(sigma_s2 / sigma_s, dims) * x - - expand_dims(alpha_s2 * phi_12, dims) * model_s - + r2 / r1 * expand_dims(alpha_s2 * phi_22, dims) * (model_s1 - model_s) + (sigma_s2 / sigma_s) * x + - (alpha_s2 * phi_12) * model_s + + r2 / r1 * (alpha_s2 * phi_22) * (model_s1 - model_s) ) model_s2 = self.model_fn(x_s2, s2) - if solver_type == 'dpm_solver': + if solver_type == 'dpmsolver': x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - + (1. / r2) * expand_dims(alpha_t * phi_2, dims) * (model_s2 - model_s) + (sigma_t / sigma_s) * x + - (alpha_t * phi_1) * model_s + + (1. / r2) * (alpha_t * phi_2) * (model_s2 - model_s) ) elif solver_type == 'taylor': D1_0 = (1. / r1) * (model_s1 - model_s) @@ -716,10 +743,10 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., mo D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1) D2 = 2. * (D1_1 - D1_0) / (r2 - r1) x_t = ( - expand_dims(sigma_t / sigma_s, dims) * x - - expand_dims(alpha_t * phi_1, dims) * model_s - + expand_dims(alpha_t * phi_2, dims) * D1 - - expand_dims(alpha_t * phi_3, dims) * D2 + (sigma_t / sigma_s) * x + - (alpha_t * phi_1) * model_s + + (alpha_t * phi_2) * D1 + - (alpha_t * phi_3) * D2 ) else: phi_11 = torch.expm1(r1 * h) @@ -733,21 +760,21 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., mo model_s = self.model_fn(x, s) if model_s1 is None: x_s1 = ( - expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x - - expand_dims(sigma_s1 * phi_11, dims) * model_s + (torch.exp(log_alpha_s1 - log_alpha_s)) * x + - (sigma_s1 * phi_11) * model_s ) model_s1 = self.model_fn(x_s1, s1) x_s2 = ( - expand_dims(torch.exp(log_alpha_s2 - log_alpha_s), dims) * x - - expand_dims(sigma_s2 * phi_12, dims) * model_s - - r2 / r1 * expand_dims(sigma_s2 * phi_22, dims) * (model_s1 - model_s) + (torch.exp(log_alpha_s2 - log_alpha_s)) * x + - (sigma_s2 * phi_12) * model_s + - r2 / r1 * (sigma_s2 * phi_22) * (model_s1 - model_s) ) model_s2 = self.model_fn(x_s2, s2) - if solver_type == 'dpm_solver': + if solver_type == 'dpmsolver': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - (1. / r2) * expand_dims(sigma_t * phi_2, dims) * (model_s2 - model_s) + (torch.exp(log_alpha_t - log_alpha_s)) * x + - (sigma_t * phi_1) * model_s + - (1. / r2) * (sigma_t * phi_2) * (model_s2 - model_s) ) elif solver_type == 'taylor': D1_0 = (1. / r1) * (model_s1 - model_s) @@ -755,10 +782,10 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., mo D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1) D2 = 2. * (D1_1 - D1_0) / (r2 - r1) x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x - - expand_dims(sigma_t * phi_1, dims) * model_s - - expand_dims(sigma_t * phi_2, dims) * D1 - - expand_dims(sigma_t * phi_3, dims) * D2 + (torch.exp(log_alpha_t - log_alpha_s)) * x + - (sigma_t * phi_1) * model_s + - (sigma_t * phi_2) * D1 + - (sigma_t * phi_3) * D2 ) if return_intermediate: @@ -766,28 +793,26 @@ def singlestep_dpm_solver_third_update(self, x, s, t, r1=1. / 3., r2=2. / 3., mo else: return x_t - def multistep_dpm_solver_second_update(self, x, model_prev_list, t_prev_list, t, solver_type="dpm_solver"): + def multistep_dpm_solver_second_update(self, x, model_prev_list, t_prev_list, t, solver_type="dpmsolver"): """ Multistep solver DPM-Solver-2 from time `t_prev_list[-1]` to time `t`. Args: x: A pytorch tensor. The initial value at time `s`. model_prev_list: A list of pytorch tensor. The previous computed model values. - t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],) - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (1,) + t: A pytorch tensor. The ending time, with the shape (1,). + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. Returns: x_t: A pytorch tensor. The approximated solution at time `t`. """ - if solver_type not in ['dpm_solver', 'taylor']: - raise ValueError("'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format(solver_type)) + if solver_type not in ['dpmsolver', 'taylor']: + raise ValueError("'solver_type' must be either 'dpmsolver' or 'taylor', got {}".format(solver_type)) ns = self.noise_schedule - dims = x.dim() - model_prev_1, model_prev_0 = model_prev_list - t_prev_1, t_prev_0 = t_prev_list - lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_1), ns.marginal_lambda( - t_prev_0), ns.marginal_lambda(t) + model_prev_1, model_prev_0 = model_prev_list[-2], model_prev_list[-1] + t_prev_1, t_prev_0 = t_prev_list[-2], t_prev_list[-1] + lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t) log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) alpha_t = torch.exp(log_alpha_t) @@ -795,55 +820,55 @@ def multistep_dpm_solver_second_update(self, x, model_prev_list, t_prev_list, t, h_0 = lambda_prev_0 - lambda_prev_1 h = lambda_t - lambda_prev_0 r0 = h_0 / h - D1_0 = expand_dims(1. / r0, dims) * (model_prev_0 - model_prev_1) - if self.predict_x0: - if solver_type == 'dpm_solver': + D1_0 = (1. / r0) * (model_prev_0 - model_prev_1) + if self.algorithm_type == "dpmsolver++": + phi_1 = torch.expm1(-h) + if solver_type == 'dpmsolver': x_t = ( - expand_dims(sigma_t / sigma_prev_0, dims) * x - - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 - - 0.5 * expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * D1_0 + (sigma_t / sigma_prev_0) * x + - (alpha_t * phi_1) * model_prev_0 + - 0.5 * (alpha_t * phi_1) * D1_0 ) elif solver_type == 'taylor': x_t = ( - expand_dims(sigma_t / sigma_prev_0, dims) * x - - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 - + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1_0 + (sigma_t / sigma_prev_0) * x + - (alpha_t * phi_1) * model_prev_0 + + (alpha_t * (phi_1 / h + 1.)) * D1_0 ) else: - if solver_type == 'dpm_solver': + phi_1 = torch.expm1(h) + if solver_type == 'dpmsolver': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 - - 0.5 * expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * D1_0 + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * phi_1) * model_prev_0 + - 0.5 * (sigma_t * phi_1) * D1_0 ) elif solver_type == 'taylor': x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 - - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1_0 + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * phi_1) * model_prev_0 + - (sigma_t * (phi_1 / h - 1.)) * D1_0 ) return x_t - def multistep_dpm_solver_third_update(self, x, model_prev_list, t_prev_list, t, solver_type='dpm_solver'): + def multistep_dpm_solver_third_update(self, x, model_prev_list, t_prev_list, t, solver_type='dpmsolver'): """ Multistep solver DPM-Solver-3 from time `t_prev_list[-1]` to time `t`. Args: x: A pytorch tensor. The initial value at time `s`. model_prev_list: A list of pytorch tensor. The previous computed model values. - t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],) - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (1,) + t: A pytorch tensor. The ending time, with the shape (1,). + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. Returns: x_t: A pytorch tensor. The approximated solution at time `t`. """ ns = self.noise_schedule - dims = x.dim() model_prev_2, model_prev_1, model_prev_0 = model_prev_list t_prev_2, t_prev_1, t_prev_0 = t_prev_list - lambda_prev_2, lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_2), ns.marginal_lambda( - t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t) + lambda_prev_2, lambda_prev_1, lambda_prev_0, lambda_t = ns.marginal_lambda(t_prev_2), ns.marginal_lambda(t_prev_1), ns.marginal_lambda(t_prev_0), ns.marginal_lambda(t) log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) alpha_t = torch.exp(log_alpha_t) @@ -852,39 +877,44 @@ def multistep_dpm_solver_third_update(self, x, model_prev_list, t_prev_list, t, h_0 = lambda_prev_0 - lambda_prev_1 h = lambda_t - lambda_prev_0 r0, r1 = h_0 / h, h_1 / h - D1_0 = expand_dims(1. / r0, dims) * (model_prev_0 - model_prev_1) - D1_1 = expand_dims(1. / r1, dims) * (model_prev_1 - model_prev_2) - D1 = D1_0 + expand_dims(r0 / (r0 + r1), dims) * (D1_0 - D1_1) - D2 = expand_dims(1. / (r0 + r1), dims) * (D1_0 - D1_1) - if self.predict_x0: + D1_0 = (1. / r0) * (model_prev_0 - model_prev_1) + D1_1 = (1. / r1) * (model_prev_1 - model_prev_2) + D1 = D1_0 + (r0 / (r0 + r1)) * (D1_0 - D1_1) + D2 = (1. / (r0 + r1)) * (D1_0 - D1_1) + if self.algorithm_type == "dpmsolver++": + phi_1 = torch.expm1(-h) + phi_2 = phi_1 / h + 1. + phi_3 = phi_2 / h - 0.5 x_t = ( - expand_dims(sigma_t / sigma_prev_0, dims) * x - - expand_dims(alpha_t * (torch.exp(-h) - 1.), dims) * model_prev_0 - + expand_dims(alpha_t * ((torch.exp(-h) - 1.) / h + 1.), dims) * D1 - - expand_dims(alpha_t * ((torch.exp(-h) - 1. + h) / h ** 2 - 0.5), dims) * D2 + (sigma_t / sigma_prev_0) * x + - (alpha_t * phi_1) * model_prev_0 + + (alpha_t * phi_2) * D1 + - (alpha_t * phi_3) * D2 ) else: + phi_1 = torch.expm1(h) + phi_2 = phi_1 / h - 1. + phi_3 = phi_2 / h - 0.5 x_t = ( - expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x - - expand_dims(sigma_t * (torch.exp(h) - 1.), dims) * model_prev_0 - - expand_dims(sigma_t * ((torch.exp(h) - 1.) / h - 1.), dims) * D1 - - expand_dims(sigma_t * ((torch.exp(h) - 1. - h) / h ** 2 - 0.5), dims) * D2 + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * phi_1) * model_prev_0 + - (sigma_t * phi_2) * D1 + - (sigma_t * phi_3) * D2 ) return x_t - def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False, solver_type='dpm_solver', r1=None, - r2=None): + def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False, solver_type='dpmsolver', r1=None, r2=None): """ Singlestep DPM-Solver with the order `order` from time `s` to time `t`. Args: x: A pytorch tensor. The initial value at time `s`. - s: A pytorch tensor. The starting time, with the shape (x.shape[0],). - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + s: A pytorch tensor. The starting time, with the shape (1,). + t: A pytorch tensor. The ending time, with the shape (1,). order: A `int`. The order of DPM-Solver. We only support order == 1 or 2 or 3. return_intermediate: A `bool`. If true, also return the model value at time `s`, `s1` and `s2` (the intermediate times). - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. r1: A `float`. The hyperparameter of the second-order or third-order solver. r2: A `float`. The hyperparameter of the third-order solver. Returns: @@ -893,26 +923,24 @@ def singlestep_dpm_solver_update(self, x, s, t, order, return_intermediate=False if order == 1: return self.dpm_solver_first_update(x, s, t, return_intermediate=return_intermediate) elif order == 2: - return self.singlestep_dpm_solver_second_update(x, s, t, return_intermediate=return_intermediate, - solver_type=solver_type, r1=r1) + return self.singlestep_dpm_solver_second_update(x, s, t, return_intermediate=return_intermediate, solver_type=solver_type, r1=r1) elif order == 3: - return self.singlestep_dpm_solver_third_update(x, s, t, return_intermediate=return_intermediate, - solver_type=solver_type, r1=r1, r2=r2) + return self.singlestep_dpm_solver_third_update(x, s, t, return_intermediate=return_intermediate, solver_type=solver_type, r1=r1, r2=r2) else: raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order)) - def multistep_dpm_solver_update(self, x, model_prev_list, t_prev_list, t, order, solver_type='dpm_solver'): + def multistep_dpm_solver_update(self, x, model_prev_list, t_prev_list, t, order, solver_type='dpmsolver'): """ Multistep DPM-Solver with the order `order` from time `t_prev_list[-1]` to time `t`. Args: x: A pytorch tensor. The initial value at time `s`. model_prev_list: A list of pytorch tensor. The previous computed model values. - t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],) - t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (1,) + t: A pytorch tensor. The ending time, with the shape (1,). order: A `int`. The order of DPM-Solver. We only support order == 1 or 2 or 3. - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. Returns: x_t: A pytorch tensor. The approximated solution at time `t`. """ @@ -925,8 +953,7 @@ def multistep_dpm_solver_update(self, x, model_prev_list, t_prev_list, t, order, else: raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order)) - def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol=0.05, theta=0.9, t_err=1e-5, - solver_type='dpm_solver'): + def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol=0.05, theta=0.9, t_err=1e-5, solver_type='dpmsolver'): """ The adaptive step size solver based on singlestep DPM-Solver. @@ -941,15 +968,15 @@ def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol theta: A `float`. The safety hyperparameter for adapting the step size. The default setting is 0.9, followed [1]. t_err: A `float`. The tolerance for the time. We solve the diffusion ODE until the absolute error between the current time and `t_0` is less than `t_err`. The default setting is 1e-5. - solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. - The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + solver_type: either 'dpmsolver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpmsolver' type. Returns: x_0: A pytorch tensor. The approximated solution at time `t_0`. [1] A. Jolicoeur-Martineau, K. Li, R. Piché-Taillefer, T. Kachman, and I. Mitliagkas, "Gotta go fast when generating data with score-based models," arXiv preprint arXiv:2105.14080, 2021. """ ns = self.noise_schedule - s = t_T * torch.ones((x.shape[0],)).to(x) + s = t_T * torch.ones((1,)).to(x) lambda_s = ns.marginal_lambda(s) lambda_0 = ns.marginal_lambda(t_0 * torch.ones_like(s).to(x)) h = h_init * torch.ones_like(s).to(x) @@ -958,17 +985,11 @@ def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol if order == 2: r1 = 0.5 lower_update = lambda x, s, t: self.dpm_solver_first_update(x, s, t, return_intermediate=True) - higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, - solver_type=solver_type, - **kwargs) + higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, solver_type=solver_type, **kwargs) elif order == 3: r1, r2 = 1. / 3., 2. / 3. - lower_update = lambda x, s, t: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, - return_intermediate=True, - solver_type=solver_type) - higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_third_update(x, s, t, r1=r1, r2=r2, - solver_type=solver_type, - **kwargs) + lower_update = lambda x, s, t: self.singlestep_dpm_solver_second_update(x, s, t, r1=r1, return_intermediate=True, solver_type=solver_type) + higher_update = lambda x, s, t, **kwargs: self.singlestep_dpm_solver_third_update(x, s, t, r1=r1, r2=r2, solver_type=solver_type, **kwargs) else: raise ValueError("For adaptive step size solver, order must be 2 or 3, got {}".format(order)) while torch.abs((s - t_0)).mean() > t_err: @@ -988,10 +1009,45 @@ def dpm_solver_adaptive(self, x, order, t_T, t_0, h_init=0.05, atol=0.0078, rtol print('adaptive solver nfe', nfe) return x - def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time_uniform', - method='singlestep', denoise=False, solver_type='dpm_solver', atol=0.0078, - rtol=0.05, - ): + def add_noise(self, x, t, noise=None): + """ + Compute the noised input xt = alpha_t * x + sigma_t * noise. + + Args: + x: A `torch.Tensor` with shape `(batch_size, *shape)`. + t: A `torch.Tensor` with shape `(t_size,)`. + Returns: + xt with shape `(t_size, batch_size, *shape)`. + """ + alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) + if noise is None: + noise = torch.randn((t.shape[0], *x.shape), device=x.device) + x = x.reshape((-1, *x.shape)) + xt = expand_dims(alpha_t, x.dim()) * x + expand_dims(sigma_t, x.dim()) * noise + if t.shape[0] == 1: + return xt.squeeze(0) + else: + return xt + + def inverse(self, x, steps=20, t_start=None, t_end=None, order=2, skip_type='time_uniform', + method='multistep', lower_order_final=True, denoise_to_zero=False, solver_type='dpmsolver', + atol=0.0078, rtol=0.05, return_intermediate=False, + ): + """ + Inverse the sample `x` from time `t_start` to `t_end` by DPM-Solver. + For discrete-time DPMs, we use `t_start=1/N`, where `N` is the total time steps during training. + """ + t_0 = 1. / self.noise_schedule.total_N if t_start is None else t_start + t_T = self.noise_schedule.T if t_end is None else t_end + assert t_0 > 0 and t_T > 0, "Time range needs to be greater than 0. For discrete-time DPMs, it needs to be in [1 / N, 1], where N is the length of betas array" + return self.sample(x, steps=steps, t_start=t_0, t_end=t_T, order=order, skip_type=skip_type, + method=method, lower_order_final=lower_order_final, denoise_to_zero=denoise_to_zero, solver_type=solver_type, + atol=atol, rtol=rtol, return_intermediate=return_intermediate) + + def sample(self, x, steps=20, t_start=None, t_end=None, order=2, skip_type='time_uniform', + method='multistep', lower_order_final=True, denoise_to_zero=False, solver_type='dpmsolver', + atol=0.0078, rtol=0.05, return_intermediate=False, + ): """ Compute the sample at time `t_end` by DPM-Solver, given the initial `x` at time `t_start`. @@ -1040,15 +1096,19 @@ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time Some advices for choosing the algorithm: - For **unconditional sampling** or **guided sampling with small guidance scale** by DPMs: - Use singlestep DPM-Solver ("DPM-Solver-fast" in the paper) with `order = 3`. - e.g. - >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, predict_x0=False) + Use singlestep DPM-Solver or DPM-Solver++ ("DPM-Solver-fast" in the paper) with `order = 3`. + e.g., DPM-Solver: + >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, algorithm_type="dpmsolver") + >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=3, + skip_type='time_uniform', method='singlestep') + e.g., DPM-Solver++: + >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, algorithm_type="dpmsolver++") >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=3, skip_type='time_uniform', method='singlestep') - For **guided sampling with large guidance scale** by DPMs: - Use multistep DPM-Solver with `predict_x0 = True` and `order = 2`. + Use multistep DPM-Solver with `algorithm_type="dpmsolver++"` and `order = 2`. e.g. - >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, predict_x0=True) + >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, algorithm_type="dpmsolver++") >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=2, skip_type='time_uniform', method='multistep') @@ -1074,72 +1134,116 @@ def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time order: A `int`. The order of DPM-Solver. skip_type: A `str`. The type for the spacing of the time steps. 'time_uniform' or 'logSNR' or 'time_quadratic'. method: A `str`. The method for sampling. 'singlestep' or 'multistep' or 'singlestep_fixed' or 'adaptive'. - denoise: A `bool`. Whether to denoise at the final step. Default is False. - If `denoise` is True, the total NFE is (`steps` + 1). - solver_type: A `str`. The taylor expansion type for the solver. `dpm_solver` or `taylor`. We recommend `dpm_solver`. + denoise_to_zero: A `bool`. Whether to denoise to time 0 at the final step. + Default is `False`. If `denoise_to_zero` is `True`, the total NFE is (`steps` + 1). + + This trick is firstly proposed by DDPM (https://arxiv.org/abs/2006.11239) and + score_sde (https://arxiv.org/abs/2011.13456). Such trick can improve the FID + for diffusion models sampling by diffusion SDEs for low-resolutional images + (such as CIFAR-10). However, we observed that such trick does not matter for + high-resolutional images. As it needs an additional NFE, we do not recommend + it for high-resolutional images. + lower_order_final: A `bool`. Whether to use lower order solvers at the final steps. + Only valid for `method=multistep` and `steps < 15`. We empirically find that + this trick is a key to stabilizing the sampling by DPM-Solver with very few steps + (especially for steps <= 10). So we recommend to set it to be `True`. + solver_type: A `str`. The taylor expansion type for the solver. `dpmsolver` or `taylor`. We recommend `dpmsolver`. atol: A `float`. The absolute tolerance of the adaptive step size solver. Valid when `method` == 'adaptive'. rtol: A `float`. The relative tolerance of the adaptive step size solver. Valid when `method` == 'adaptive'. + return_intermediate: A `bool`. Whether to save the xt at each step. + When set to `True`, method returns a tuple (x0, intermediates); when set to False, method returns only x0. Returns: x_end: A pytorch tensor. The approximated solution at time `t_end`. """ t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end t_T = self.noise_schedule.T if t_start is None else t_start + assert t_0 > 0 and t_T > 0, "Time range needs to be greater than 0. For discrete-time DPMs, it needs to be in [1 / N, 1], where N is the length of betas array" + if return_intermediate: + assert method in ['multistep', 'singlestep', 'singlestep_fixed'], "Cannot use adaptive solver when saving intermediate values" + if self.correcting_xt_fn is not None: + assert method in ['multistep', 'singlestep', 'singlestep_fixed'], "Cannot use adaptive solver when correcting_xt_fn is not None" device = x.device - if method == 'adaptive': - with torch.no_grad(): - x = self.dpm_solver_adaptive(x, order=order, t_T=t_T, t_0=t_0, atol=atol, rtol=rtol, - solver_type=solver_type) - elif method == 'multistep': - assert steps >= order - timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) - assert timesteps.shape[0] - 1 == steps - with torch.no_grad(): - vec_t = timesteps[0].expand((x.shape[0])) - model_prev_list = [self.model_fn(x, vec_t)] - t_prev_list = [vec_t] + intermediates = [] + with torch.no_grad(): + if method == 'adaptive': + x = self.dpm_solver_adaptive(x, order=order, t_T=t_T, t_0=t_0, atol=atol, rtol=rtol, solver_type=solver_type) + elif method == 'multistep': + assert steps >= order + timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) + assert timesteps.shape[0] - 1 == steps + # Init the initial values. + step = 0 + t = timesteps[step] + t_prev_list = [t] + model_prev_list = [self.model_fn(x, t)] + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) # Init the first `order` values by lower order multistep DPM-Solver. - for init_order in range(1, order): - vec_t = timesteps[init_order].expand(x.shape[0]) - x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, init_order, - solver_type=solver_type) - model_prev_list.append(self.model_fn(x, vec_t)) - t_prev_list.append(vec_t) + for step in range(1, order): + t = timesteps[step] + x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, t, step, solver_type=solver_type) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) + t_prev_list.append(t) + model_prev_list.append(self.model_fn(x, t)) # Compute the remaining values by `order`-th order multistep DPM-Solver. for step in range(order, steps + 1): - vec_t = timesteps[step].expand(x.shape[0]) - x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, vec_t, order, - solver_type=solver_type) + t = timesteps[step] + # We only use lower order for steps < 10 + if lower_order_final and steps < 10: + step_order = min(order, steps + 1 - step) + else: + step_order = order + x = self.multistep_dpm_solver_update(x, model_prev_list, t_prev_list, t, step_order, solver_type=solver_type) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) for i in range(order - 1): t_prev_list[i] = t_prev_list[i + 1] model_prev_list[i] = model_prev_list[i + 1] - t_prev_list[-1] = vec_t + t_prev_list[-1] = t # We do not need to evaluate the final model value. if step < steps: - model_prev_list[-1] = self.model_fn(x, vec_t) - elif method in ['singlestep', 'singlestep_fixed']: - if method == 'singlestep': - timesteps_outer, orders = self.get_orders_and_timesteps_for_singlestep_solver(steps=steps, order=order, - skip_type=skip_type, - t_T=t_T, t_0=t_0, - device=device) - elif method == 'singlestep_fixed': - K = steps // order - orders = [order, ] * K - timesteps_outer = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=K, device=device) - for i, order in enumerate(orders): - t_T_inner, t_0_inner = timesteps_outer[i], timesteps_outer[i + 1] - timesteps_inner = self.get_time_steps(skip_type=skip_type, t_T=t_T_inner.item(), t_0=t_0_inner.item(), - N=order, device=device) - lambda_inner = self.noise_schedule.marginal_lambda(timesteps_inner) - vec_s, vec_t = t_T_inner.repeat(x.shape[0]), t_0_inner.repeat(x.shape[0]) - h = lambda_inner[-1] - lambda_inner[0] - r1 = None if order <= 1 else (lambda_inner[1] - lambda_inner[0]) / h - r2 = None if order <= 2 else (lambda_inner[2] - lambda_inner[0]) / h - x = self.singlestep_dpm_solver_update(x, vec_s, vec_t, order, solver_type=solver_type, r1=r1, r2=r2) - if denoise: - x = self.denoise_fn(x, torch.ones((x.shape[0],)).to(device) * t_0) - return x + model_prev_list[-1] = self.model_fn(x, t) + elif method in ['singlestep', 'singlestep_fixed']: + if method == 'singlestep': + timesteps_outer, orders = self.get_orders_and_timesteps_for_singlestep_solver(steps=steps, order=order, skip_type=skip_type, t_T=t_T, t_0=t_0, device=device) + elif method == 'singlestep_fixed': + K = steps // order + orders = [order,] * K + timesteps_outer = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=K, device=device) + for step, order in enumerate(orders): + s, t = timesteps_outer[step], timesteps_outer[step + 1] + timesteps_inner = self.get_time_steps(skip_type=skip_type, t_T=s.item(), t_0=t.item(), N=order, device=device) + lambda_inner = self.noise_schedule.marginal_lambda(timesteps_inner) + h = lambda_inner[-1] - lambda_inner[0] + r1 = None if order <= 1 else (lambda_inner[1] - lambda_inner[0]) / h + r2 = None if order <= 2 else (lambda_inner[2] - lambda_inner[0]) / h + x = self.singlestep_dpm_solver_update(x, s, t, order, solver_type=solver_type, r1=r1, r2=r2) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) + else: + raise ValueError("Got wrong method {}".format(method)) + if denoise_to_zero: + t = torch.ones((1,)).to(device) * t_0 + x = self.denoise_to_zero_fn(x, t) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step + 1) + if return_intermediate: + intermediates.append(x) + if return_intermediate: + return x, intermediates + else: + return x + ############################################################# @@ -1198,4 +1302,4 @@ def expand_dims(v, dims): Returns: a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. """ - return v[(...,) + (None,) * (dims - 1)] + return v[(...,) + (None,)*(dims - 1)] \ No newline at end of file diff --git a/inference/uni_pc.py b/inference/uni_pc.py new file mode 100644 index 000000000..4226570cc --- /dev/null +++ b/inference/uni_pc.py @@ -0,0 +1,731 @@ +import torch +import torch.nn.functional as F +import math + + +class NoiseScheduleVP: + def __init__( + self, + schedule='discrete', + betas=None, + alphas_cumprod=None, + continuous_beta_0=0.1, + continuous_beta_1=20., + dtype=torch.float32, + ): + """Create a wrapper class for the forward SDE (VP type). + *** + Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t. + We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images. + *** + The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ). + We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper). + Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have: + log_alpha_t = self.marginal_log_mean_coeff(t) + sigma_t = self.marginal_std(t) + lambda_t = self.marginal_lambda(t) + Moreover, as lambda(t) is an invertible function, we also support its inverse function: + t = self.inverse_lambda(lambda_t) + =============================================================== + We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]). + 1. For discrete-time DPMs: + For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by: + t_i = (i + 1) / N + e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1. + We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3. + Args: + betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) + alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) + Note that we always have alphas_cumprod = cumprod(1 - betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. + **Important**: Please pay special attention for the args for `alphas_cumprod`: + The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that + q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ). + Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have + alpha_{t_n} = \sqrt{\hat{alpha_n}}, + and + log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}). + 2. For continuous-time DPMs: + We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise + schedule are the default settings in DDPM and improved-DDPM: + Args: + beta_min: A `float` number. The smallest beta for the linear schedule. + beta_max: A `float` number. The largest beta for the linear schedule. + cosine_s: A `float` number. The hyperparameter in the cosine schedule. + cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. + T: A `float` number. The ending time of the forward process. + =============================================================== + Args: + schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, + 'linear' or 'cosine' for continuous-time DPMs. + Returns: + A wrapper object of the forward SDE (VP type). + + =============================================================== + Example: + # For discrete-time DPMs, given betas (the beta array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', betas=betas) + # For discrete-time DPMs, given alphas_cumprod (the \hat{alpha_n} array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod) + # For continuous-time DPMs (VPSDE), linear schedule: + >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.) + """ + + if schedule not in ['discrete', 'linear', 'cosine']: + raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(schedule)) + + self.schedule = schedule + if schedule == 'discrete': + if betas is not None: + log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) + else: + assert alphas_cumprod is not None + log_alphas = 0.5 * torch.log(alphas_cumprod) + self.total_N = len(log_alphas) + self.T = 1. + self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)).to(dtype=dtype) + self.log_alpha_array = log_alphas.reshape((1, -1,)).to(dtype=dtype) + else: + self.total_N = 1000 + self.beta_0 = continuous_beta_0 + self.beta_1 = continuous_beta_1 + self.cosine_s = 0.008 + self.cosine_beta_max = 999. + self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) + self.schedule = schedule + if schedule == 'cosine': + # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. + # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. + self.T = 0.9946 + else: + self.T = 1. + + def marginal_log_mean_coeff(self, t): + """ + Compute log(alpha_t) of a given continuous-time label t in [0, T]. + """ + if self.schedule == 'discrete': + return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) + elif self.schedule == 'linear': + return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 + elif self.schedule == 'cosine': + log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) + log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 + return log_alpha_t + + def marginal_alpha(self, t): + """ + Compute alpha_t of a given continuous-time label t in [0, T]. + """ + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + """ + Compute sigma_t of a given continuous-time label t in [0, T]. + """ + return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) + return log_mean_coeff - log_std + + def inverse_lambda(self, lamb): + """ + Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. + """ + if self.schedule == 'linear': + tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + Delta = self.beta_0**2 + tmp + return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) + elif self.schedule == 'discrete': + log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) + t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) + return t.reshape((-1,)) + else: + log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + t = t_fn(log_alpha) + return t + + +def model_wrapper( + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1., + classifier_fn=None, + classifier_kwargs={}, +): + """Create a wrapper function for the noise prediction model. + """ + + def get_model_input_time(t_continuous): + """ + Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time. + For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N]. + For continuous-time DPMs, we just use `t_continuous`. + """ + if noise_schedule.schedule == 'discrete': + return (t_continuous - 1. / noise_schedule.total_N) * noise_schedule.total_N + else: + return t_continuous + + def noise_pred_fn(x, t_continuous, cond=None): + t_input = get_model_input_time(t_continuous) + if cond is None: + output = model(x, t_input, **model_kwargs) + else: + output = model(x, t_input, cond, **model_kwargs) + if model_type == "noise": + return output + elif model_type == "x_start": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + return (x - alpha_t * output) / sigma_t + elif model_type == "v": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + return alpha_t * output + sigma_t * x + elif model_type == "score": + sigma_t = noise_schedule.marginal_std(t_continuous) + return -sigma_t * output + + def cond_grad_fn(x, t_input): + """ + Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t). + """ + with torch.enable_grad(): + x_in = x.detach().requires_grad_(True) + log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs) + return torch.autograd.grad(log_prob.sum(), x_in)[0] + + def model_fn(x, t_continuous): + """ + The noise predicition model function that is used for DPM-Solver. + """ + if guidance_type == "uncond": + return noise_pred_fn(x, t_continuous) + elif guidance_type == "classifier": + assert classifier_fn is not None + t_input = get_model_input_time(t_continuous) + cond_grad = cond_grad_fn(x, t_input) + sigma_t = noise_schedule.marginal_std(t_continuous) + noise = noise_pred_fn(x, t_continuous) + return noise - guidance_scale * sigma_t * cond_grad + elif guidance_type == "classifier-free": + if guidance_scale == 1. or unconditional_condition is None: + return noise_pred_fn(x, t_continuous, cond=condition) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t_continuous] * 2) + c_in = torch.cat([unconditional_condition, condition]) + noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) + return noise_uncond + guidance_scale * (noise - noise_uncond) + + assert model_type in ["noise", "x_start", "v"] + assert guidance_type in ["uncond", "classifier", "classifier-free"] + return model_fn + + +class UniPC: + def __init__( + self, + model_fn, + noise_schedule, + algorithm_type="data_prediction", + correcting_x0_fn=None, + correcting_xt_fn=None, + thresholding_max_val=1., + dynamic_thresholding_ratio=0.995, + variant='bh1' + ): + """Construct a UniPC. + + We support both data_prediction and noise_prediction. + """ + self.model = lambda x, t: model_fn(x, t.expand((x.shape[0]))) + self.noise_schedule = noise_schedule + assert algorithm_type in ["data_prediction", "noise_prediction"] + + if correcting_x0_fn == "dynamic_thresholding": + self.correcting_x0_fn = self.dynamic_thresholding_fn + else: + self.correcting_x0_fn = correcting_x0_fn + + self.correcting_xt_fn = correcting_xt_fn + self.dynamic_thresholding_ratio = dynamic_thresholding_ratio + self.thresholding_max_val = thresholding_max_val + + self.variant = variant + self.predict_x0 = algorithm_type == "data_prediction" + + def dynamic_thresholding_fn(self, x0, t=None): + """ + The dynamic thresholding method. + """ + dims = x0.dim() + p = self.dynamic_thresholding_ratio + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.thresholding_max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def noise_prediction_fn(self, x, t): + """ + Return the noise prediction model. + """ + return self.model(x, t) + + def data_prediction_fn(self, x, t): + """ + Return the data prediction model (with corrector). + """ + noise = self.noise_prediction_fn(x, t) + alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) + x0 = (x - sigma_t * noise) / alpha_t + if self.correcting_x0_fn is not None: + x0 = self.correcting_x0_fn(x0) + return x0 + + def model_fn(self, x, t): + """ + Convert the model to the noise prediction model or the data prediction model. + """ + if self.predict_x0: + return self.data_prediction_fn(x, t) + else: + return self.noise_prediction_fn(x, t) + + def get_time_steps(self, skip_type, t_T, t_0, N, device): + """Compute the intermediate time steps for sampling. + """ + if skip_type == 'logSNR': + lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device)) + lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device)) + logSNR_steps = torch.linspace(lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1).to(device) + return self.noise_schedule.inverse_lambda(logSNR_steps) + elif skip_type == 'time_uniform': + return torch.linspace(t_T, t_0, N + 1).to(device) + elif skip_type == 'time_quadratic': + t_order = 2 + t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) + return t + else: + raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) + + def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): + """ + Get the order of each step for sampling by the singlestep DPM-Solver. + """ + if order == 3: + K = steps // 3 + 1 + if steps % 3 == 0: + orders = [3,] * (K - 2) + [2, 1] + elif steps % 3 == 1: + orders = [3,] * (K - 1) + [1] + else: + orders = [3,] * (K - 1) + [2] + elif order == 2: + if steps % 2 == 0: + K = steps // 2 + orders = [2,] * K + else: + K = steps // 2 + 1 + orders = [2,] * (K - 1) + [1] + elif order == 1: + K = steps + orders = [1,] * steps + else: + raise ValueError("'order' must be '1' or '2' or '3'.") + if skip_type == 'logSNR': + # To reproduce the results in DPM-Solver paper + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) + else: + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders), 0).to(device)] + return timesteps_outer, orders + + def denoise_to_zero_fn(self, x, s): + """ + Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. + """ + return self.data_prediction_fn(x, s) + + def multistep_uni_pc_update(self, x, model_prev_list, t_prev_list, t, order, **kwargs): + if len(t.shape) == 0: + t = t.view(-1) + if 'bh' in self.variant: + return self.multistep_uni_pc_bh_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + else: + assert self.variant == 'vary_coeff' + return self.multistep_uni_pc_vary_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + + def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order, use_corrector=True): + #print(f'using unified predictor-corrector with order {order} (solver type: vary coeff)') + ns = self.noise_schedule + assert order <= len(model_prev_list) + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_t = ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = (lambda_prev_i - lambda_prev_0) / h + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + K = len(rks) + # build C matrix + C = [] + + col = torch.ones_like(rks) + for k in range(1, K + 1): + C.append(col) + col = col * rks / (k + 1) + C = torch.stack(C, dim=1) + + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + C_inv_p = torch.linalg.inv(C[:-1, :-1]) + A_p = C_inv_p + + if use_corrector: + #print('using corrector') + C_inv = torch.linalg.inv(C) + A_c = C_inv + + hh = -h if self.predict_x0 else h + h_phi_1 = torch.expm1(hh) + h_phi_ks = [] + factorial_k = 1 + h_phi_k = h_phi_1 + for k in range(1, K + 2): + h_phi_ks.append(h_phi_k) + h_phi_k = h_phi_k / hh - 1 / factorial_k + factorial_k *= (k + 1) + + model_t = None + if self.predict_x0: + x_t_ = ( + sigma_t / sigma_prev_0 * x + - alpha_t * h_phi_1 * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - alpha_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + else: + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + x_t_ = ( + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * h_phi_1) * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - sigma_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + return x_t, model_t + + def multistep_uni_pc_bh_update(self, x, model_prev_list, t_prev_list, t, order, x_t=None, use_corrector=True): + #print(f'using unified predictor-corrector with order {order} (solver type: B(h))') + ns = self.noise_schedule + assert order <= len(model_prev_list) + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = (lambda_prev_i - lambda_prev_0) / h + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + R = [] + b = [] + + hh = -h if self.predict_x0 else h + h_phi_1 = torch.expm1(hh) # h\phi_1(h) = e^h - 1 + h_phi_k = h_phi_1 / hh - 1 + + factorial_i = 1 + + if self.variant == 'bh1': + B_h = hh + elif self.variant == 'bh2': + B_h = torch.expm1(hh) + else: + raise NotImplementedError() + + for i in range(1, order + 1): + R.append(torch.pow(rks, i - 1)) + b.append(h_phi_k * factorial_i / B_h) + factorial_i *= (i + 1) + h_phi_k = h_phi_k / hh - 1 / factorial_i + + R = torch.stack(R) + b = torch.cat(b) + + # now predictor + use_predictor = len(D1s) > 0 and x_t is None + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + if x_t is None: + # for order 2, we use a simplified version + if order == 2: + rhos_p = torch.tensor([0.5], device=b.device) + else: + rhos_p = torch.linalg.solve(R[:-1, :-1], b[:-1]) + else: + D1s = None + + if use_corrector: + #print('using corrector') + # for order 1, we use a simplified version + if order == 1: + rhos_c = torch.tensor([0.5], device=b.device) + else: + rhos_c = torch.linalg.solve(R, b) + + model_t = None + if self.predict_x0: + x_t_ = ( + sigma_t / sigma_prev_0 * x + - alpha_t * h_phi_1 * model_prev_0 + ) + + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - alpha_t * B_h * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - alpha_t * B_h * (corr_res + rhos_c[-1] * D1_t) + else: + x_t_ = ( + torch.exp(log_alpha_t - log_alpha_prev_0) * x + - sigma_t * h_phi_1 * model_prev_0 + ) + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - sigma_t * B_h * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - sigma_t * B_h * (corr_res + rhos_c[-1] * D1_t) + return x_t, model_t + + def sample(self, x, steps=20, t_start=None, t_end=None, order=2, skip_type='time_uniform', + method='multistep', lower_order_final=True, denoise_to_zero=False, atol=0.0078, rtol=0.05, return_intermediate=False, + ): + """ + Compute the sample at time `t_end` by UniPC, given the initial `x` at time `t_start`. + """ + t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end + t_T = self.noise_schedule.T if t_start is None else t_start + assert t_0 > 0 and t_T > 0, "Time range needs to be greater than 0. For discrete-time DPMs, it needs to be in [1 / N, 1], where N is the length of betas array" + if return_intermediate: + assert method in ['multistep', 'singlestep', 'singlestep_fixed'], "Cannot use adaptive solver when saving intermediate values" + if self.correcting_xt_fn is not None: + assert method in ['multistep', 'singlestep', 'singlestep_fixed'], "Cannot use adaptive solver when correcting_xt_fn is not None" + device = x.device + intermediates = [] + with torch.no_grad(): + if method == 'multistep': + assert steps >= order + timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) + assert timesteps.shape[0] - 1 == steps + # Init the initial values. + step = 0 + t = timesteps[step] + t_prev_list = [t] + model_prev_list = [self.model_fn(x, t)] + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) + + # Init the first `order` values by lower order multistep UniPC. + for step in range(1, order): + t = timesteps[step] + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, t, step, use_corrector=True) + if model_x is None: + model_x = self.model_fn(x, t) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) + t_prev_list.append(t) + model_prev_list.append(model_x) + + # Compute the remaining values by `order`-th order multistep DPM-Solver. + for step in range(order, steps + 1): + t = timesteps[step] + if lower_order_final: + step_order = min(order, steps + 1 - step) + else: + step_order = order + if step == steps: + #print('do not run corrector at the last step') + use_corrector = False + else: + use_corrector = True + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, t, step_order, use_corrector=use_corrector) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step) + if return_intermediate: + intermediates.append(x) + for i in range(order - 1): + t_prev_list[i] = t_prev_list[i + 1] + model_prev_list[i] = model_prev_list[i + 1] + t_prev_list[-1] = t + # We do not need to evaluate the final model value. + if step < steps: + if model_x is None: + model_x = self.model_fn(x, t) + model_prev_list[-1] = model_x + else: + raise ValueError("Got wrong method {}".format(method)) + + if denoise_to_zero: + t = torch.ones((1,)).to(device) * t_0 + x = self.denoise_to_zero_fn(x, t) + if self.correcting_xt_fn is not None: + x = self.correcting_xt_fn(x, t, step + 1) + if return_intermediate: + intermediates.append(x) + if return_intermediate: + return x, intermediates + else: + return x + + +############################################################# +# other utility functions +############################################################# + +def interpolate_fn(x, xp, yp): + """ + A piecewise linear function y = f(x), using xp and yp as keypoints. + We implement f(x) in a differentiable way (i.e. applicable for autograd). + The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) + + Args: + x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). + xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. + yp: PyTorch tensor with shape [C, K]. + Returns: + The function values f(x), with shape [N, C]. + """ + N, K = x.shape[0], xp.shape[1] + all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) + sorted_all_x, x_indices = torch.sort(all_x, dim=2) + x_idx = torch.argmin(x_indices, dim=2) + cand_start_idx = x_idx - 1 + start_idx = torch.where( + torch.eq(x_idx, 0), + torch.tensor(1, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1) + start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) + end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) + start_idx2 = torch.where( + torch.eq(x_idx, 0), + torch.tensor(0, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) + start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2) + end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2) + cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) + return cand + + +def expand_dims(v, dims): + """ + Expand the tensor `v` to the dim `dims`. + + Args: + `v`: a PyTorch tensor with shape [N]. + `dim`: a `int`. + Returns: + a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. + """ + return v[(...,) + (None,)*(dims - 1)] \ No newline at end of file diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 14a6ca34e..c49be05f9 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -247,16 +247,54 @@ def wrapped(x, t, **kwargs): # (We recommend singlestep DPM-Solver for unconditional sampling) # You can adjust the `steps` to balance the computation # costs and the sample quality. - dpm_solver = DPM_Solver(model_fn, noise_schedule) + dpm_solver = DPM_Solver(model_fn, noise_schedule, algorithm_type="dpmsolver++") steps = t // hparams["pndm_speedup"] self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer'], leave=False) x = dpm_solver.sample( x, steps=steps, - order=3, + order=2, skip_type="time_uniform", - method="singlestep", + method="multistep", + ) + self.bar.close() + elif algorithm == 'unipc': + from inference.uni_pc import NoiseScheduleVP, model_wrapper, UniPC + # 1. Define the noise schedule. + noise_schedule = NoiseScheduleVP(schedule='discrete', betas=self.betas) + + # 2. Convert your discrete-time `model` to the continuous-time + # noise prediction model. Here is an example for a diffusion model + # `model` with the noise prediction type ("noise") . + def my_wrapper(fn): + def wrapped(x, t, **kwargs): + ret = fn(x, t, **kwargs) + self.bar.update(1) + return ret + + return wrapped + + model_fn = model_wrapper( + my_wrapper(self.denoise_fn), + noise_schedule, + model_type="noise", # or "x_start" or "v" or "score" + model_kwargs={"cond": cond} + ) + + # 3. Define uni_pc and sample by multistep UniPC. + # You can adjust the `steps` to balance the computation + # costs and the sample quality. + uni_pc = UniPC(model_fn, noise_schedule, variant='bh2') + + steps = t // hparams["pndm_speedup"] + self.bar = tqdm(desc="sample time step", total=steps, disable=not hparams['infer'], leave=False) + x = uni_pc.sample( + x, + steps=steps, + order=2, + skip_type="time_uniform", + method="multistep", ) self.bar.close() elif algorithm == 'pndm': @@ -271,7 +309,6 @@ def wrapped(x, t, **kwargs): iteration_interval, cond=cond ) elif algorithm == 'ddim': - self.noise_list = deque(maxlen=4) iteration_interval = hparams['pndm_speedup'] for i in tqdm( reversed(range(0, t, iteration_interval)), desc='sample time step', From 58942a387676e95d44cb1d4f46d7d9ae81cdbd27 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 17 Jun 2023 23:37:13 +0800 Subject: [PATCH 437/475] Explicitly annotate `interval` with `int` --- deployment/modules/diffusion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index 6c2b887f8..bcca20d1a 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -45,7 +45,7 @@ def plms_get_x_pred(self, x, noise_t, t, t_prev): return x_pred - def p_sample_plms(self, x_prev, t, interval, cond, noise_list: List[Tensor], stage: int): + def p_sample_plms(self, x_prev, t, interval: int, cond, noise_list: List[Tensor], stage: int): noise_pred = self.denoise_fn(x_prev, t, cond) t_prev = t - interval t_prev = t_prev * (t_prev > 0) From 626e54ce3f9d31d14d4482e765b9e3761949ea1e Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 17 Jun 2023 23:38:21 +0800 Subject: [PATCH 438/475] Fix KeyError when training a variance model with incomplete functionalities --- training/variance_task.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/training/variance_task.py b/training/variance_task.py index 5bd52e9bd..b9f3d8f73 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -18,34 +18,37 @@ class VarianceDataset(BaseDataset): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + need_energy = hparams['predict_energy'] + need_breathiness = hparams['predict_breathiness'] + self.predict_variances = need_energy or need_breathiness + def collater(self, samples): batch = super().collater(samples) tokens = utils.collate_nd([s['tokens'] for s in samples], 0) ph_dur = utils.collate_nd([s['ph_dur'] for s in samples], 0) - midi = utils.collate_nd([s['midi'] for s in samples], 0) - ph2word = utils.collate_nd([s['ph2word'] for s in samples], 0) - mel2ph = utils.collate_nd([s['mel2ph'] for s in samples], 0) - base_pitch = utils.collate_nd([s['base_pitch'] for s in samples], 0) - pitch = utils.collate_nd([s['pitch'] for s in samples], 0) batch.update({ 'tokens': tokens, - 'ph_dur': ph_dur, - 'midi': midi, - 'ph2word': ph2word, - 'mel2ph': mel2ph, - 'base_pitch': base_pitch, - 'pitch': pitch, + 'ph_dur': ph_dur }) + + if hparams['use_spk_id']: + batch['spk_ids'] = torch.LongTensor([s['spk_id'] for s in samples]) + if hparams['predict_dur']: + batch['ph2word'] = utils.collate_nd([s['ph2word'] for s in samples], 0) + batch['midi'] = utils.collate_nd([s['midi'] for s in samples], 0) + if hparams['predict_pitch'] or self.predict_variances: + batch['mel2ph'] = utils.collate_nd([s['mel2ph'] for s in samples], 0) + if hparams['predict_pitch']: + batch['base_pitch'] = utils.collate_nd([s['base_pitch'] for s in samples], 0) + if hparams['predict_pitch'] or self.predict_variances: + batch['pitch'] = utils.collate_nd([s['pitch'] for s in samples], 0) if hparams['predict_energy']: - energy = utils.collate_nd([s['energy'] for s in samples], 0) - batch['energy'] = energy + batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0) if hparams['predict_breathiness']: - breathiness = utils.collate_nd([s['breathiness'] for s in samples], 0) - batch['breathiness'] = breathiness - if hparams['use_spk_id']: - spk_ids = torch.LongTensor([s['spk_id'] for s in samples]) - batch['spk_ids'] = spk_ids + batch['breathiness'] = utils.collate_nd([s['breathiness'] for s in samples], 0) return batch From 1090e01599fe976cd22257781928ddc626e512e5 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 19 Jun 2023 12:19:27 +0800 Subject: [PATCH 439/475] Adjust configs --- configs/acoustic.yaml | 2 +- configs/variance.yaml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index c674eb779..232c5ae08 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -65,7 +65,7 @@ K_step: 1000 timesteps: 1000 max_beta: 0.02 rel_pos: true -diff_accelerator: dpm-solver +diff_accelerator: ddim pndm_speedup: 10 hidden_size: 256 residual_layers: 20 diff --git a/configs/variance.yaml b/configs/variance.yaml index 5524e69a3..44cc0374e 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -84,15 +84,15 @@ timesteps: 1000 max_beta: 0.02 diff_decoder_type: 'wavenet' diff_loss_type: l2 -diff_accelerator: dpm-solver +diff_accelerator: ddim pndm_speedup: 10 # train and eval num_sanity_val_steps: 1 optimizer_args: - lr: 0.0004 + lr: 0.0006 lr_scheduler_args: - step_size: 50000 + step_size: 40000 gamma: 0.5 max_batch_frames: 80000 max_batch_size: 48 From f1e8dfbefd435706dede6eb9e197cbec37be08f8 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 20 Jun 2023 22:02:43 +0800 Subject: [PATCH 440/475] Remove unused module parameter --- modules/fastspeech/variance_encoder.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/modules/fastspeech/variance_encoder.py b/modules/fastspeech/variance_encoder.py index 80b967b8a..b7cbad651 100644 --- a/modules/fastspeech/variance_encoder.py +++ b/modules/fastspeech/variance_encoder.py @@ -25,9 +25,6 @@ def __init__(self, vocab_size): else: self.ph_dur_embed = Linear(1, hparams['hidden_size']) - if hparams['use_spk_id']: - self.spk_embed = Embedding(hparams['num_spk'], hparams['hidden_size']) - self.encoder = FastSpeech2Encoder( self.txt_embed, hidden_size=hparams['hidden_size'], num_layers=hparams['enc_layers'], ffn_kernel_size=hparams['enc_ffn_kernel_size'], num_heads=hparams['num_heads'] From 224fd33f39b7796d0b1c47db5cfd46e68e03fce9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 24 Jun 2023 02:20:51 +0800 Subject: [PATCH 441/475] Support custom spk_id arrangements --- basics/base_binarizer.py | 27 +++++++++++++++++---------- configs/acoustic.yaml | 1 + configs/base.yaml | 1 - configs/variance.yaml | 1 + docs/ConfigurationSchemas.md | 24 ++++++++++++++++++++++++ preprocessing/acoustic_binarizer.py | 8 +++++--- 6 files changed, 48 insertions(+), 14 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 6d061dcc7..8cd6738c7 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -49,16 +49,16 @@ def __init__(self, data_dir=None, data_attrs=None): if not isinstance(data_dir, list): data_dir = [data_dir] - speakers = hparams['speakers'] - assert isinstance(speakers, list), 'Speakers must be a list' - assert len(speakers) == len(set(speakers)), 'Speakers cannot contain duplicate names' + self.speakers = hparams['speakers'] + assert isinstance(self.speakers, list), 'Speakers must be a list' + assert len(self.speakers) == len(set(self.speakers)), 'Speakers cannot contain duplicate names' self.raw_data_dirs = [pathlib.Path(d) for d in data_dir] self.binary_data_dir = pathlib.Path(hparams['binary_data_dir']) self.data_attrs = [] if data_attrs is None else data_attrs if hparams['use_spk_id']: - assert len(speakers) == len(self.raw_data_dirs), \ + assert len(self.speakers) == len(self.raw_data_dirs), \ 'Number of raw data dirs must equal number of speaker names!' self.binarization_args = hparams['binarization_args'] @@ -66,12 +66,15 @@ def __init__(self, data_dir=None, data_attrs=None): self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') self.spk_map = None + self.spk_ids = hparams['spk_ids'] + self.build_spk_map() + self.items = {} self.phone_encoder = TokenTextEncoder(vocab_list=build_phoneme_list()) self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] # load each dataset - for ds_id, data_dir in enumerate(self.raw_data_dirs): + for ds_id, data_dir in zip(self.spk_ids, self.raw_data_dirs): self.load_meta_data(pathlib.Path(data_dir), ds_id) self.item_names = sorted(list(self.items.keys())) self._train_item_names, self._valid_item_names = self.split_train_valid_set() @@ -127,9 +130,15 @@ def valid_item_names(self): return self._valid_item_names def build_spk_map(self): - spk_map = {x: i for i, x in enumerate(hparams['speakers'])} - assert len(spk_map) <= hparams['num_spk'], 'Actual number of speakers should be smaller than num_spk!' - self.spk_map = spk_map + if not self.spk_ids: + self.spk_ids = list(range(len(self.raw_data_dirs))) + else: + assert len(self.spk_ids) == len(self.raw_data_dirs), \ + 'Length of explicitly given spk_ids must equal the number of raw datasets.' + assert max(self.spk_ids) < hparams['num_spk'], \ + f'Index in spk_id sequence {self.spk_ids} is out of range. All values should be smaller than num_spk.' + self.spk_map = {x: i for x, i in zip(self.speakers, self.spk_ids)} + print("| spk_map: ", self.spk_map) def meta_data_iterator(self, prefix): if prefix == 'train': @@ -144,8 +153,6 @@ def process(self): os.makedirs(hparams['binary_data_dir'], exist_ok=True) # Copy spk_map and dictionary to binary data dir - self.build_spk_map() - print("| spk_map: ", self.spk_map) spk_map_fn = f"{hparams['binary_data_dir']}/spk_map.json" json.dump(self.spk_map, open(spk_map_fn, 'w', encoding='utf-8')) shutil.copy(locate_dictionary(), self.binary_data_dir / 'dictionary.txt') diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index 232c5ae08..a08022d58 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -5,6 +5,7 @@ task_cls: training.acoustic_task.AcousticTask num_spk: 1 speakers: - opencpop +spk_ids: [] test_prefixes: [ '2044', '2086', diff --git a/configs/base.yaml b/configs/base.yaml index 58aa46298..7d9f37325 100644 --- a/configs/base.yaml +++ b/configs/base.yaml @@ -25,7 +25,6 @@ win_size: 2048 # For 22050Hz, 1100 ~= 50 ms (If None, win_size: fft_size) (0.05 fmin: 40 # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) fmax: 16000 # To be increased/reduced depending on data. fft_size: 2048 # Extra window size is filled with 0 paddings to match this parameter -num_spk: 1 mel_vmin: -6 mel_vmax: 1.5 sampler_frame_count_grid: 6 diff --git a/configs/variance.yaml b/configs/variance.yaml index 44cc0374e..631500a72 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -5,6 +5,7 @@ task_cls: training.variance_task.VarianceTask num_spk: 1 speakers: - opencpop +spk_ids: [] test_prefixes: [ '2044', '2086', diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 62eb4dba5..8602927e8 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -2822,6 +2822,30 @@ required list +### spk_ids + +The IDs of speakers in a multi-speaker model. If an empty list is given, speaker IDs will be automatically generated as $0,1,2,...,N_{spk}-1$. IDs can be duplicate or discontinuous. + +#### visibility + +acoustic, variance + +#### scope + +preprocessing + +#### customizability + +required + +#### type + +List[int] + +#### default + +[] + ### spec_min Minimum mel spectrogram value used for normalization to [-1, 1]. Different mel bins can have different minimum values. diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 1f7536e69..7a6f532a1 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -241,20 +241,22 @@ def arrange_data_augmentation(self, data_iterator): aug_args = self.augmentation_args['fixed_pitch_shifting'] targets = aug_args['targets'] scale = aug_args['scale'] + spk_id_size = max(self.spk_ids) + 1 + min_num_spk = (1 + len(targets)) * spk_id_size assert not self.augmentation_args['random_pitch_shifting']['enabled'], \ 'Fixed pitch shifting augmentation is not compatible with random pitch shifting.' assert len(targets) == len(set(targets)), \ 'Fixed pitch shifting augmentation requires having no duplicate targets.' assert hparams['use_spk_id'], 'Fixed pitch shifting augmentation requires use_spk_id == True.' - assert hparams['num_spk'] >= (1 + len(targets)) * len(self.spk_map), \ - 'Fixed pitch shifting augmentation requires num_spk >= (1 + len(targets)) * len(speakers).' + assert hparams['num_spk'] >= min_num_spk, \ + f'Fixed pitch shifting augmentation requires num_spk >= (1 + len(targets)) * (max(spk_ids) + 1).' assert scale < 1, 'Fixed pitch shifting augmentation requires scale < 1.' aug_ins = SpectrogramStretchAugmentation(self.raw_data_dirs, aug_args) for i, target in enumerate(targets): aug_item_names = random.choices(all_item_names, k=int(scale * len(all_item_names))) for aug_item_name in aug_item_names: - replace_spk_id = int(aug_item_name.split(':', maxsplit=1)[0]) + (i + 1) * len(self.spk_map) + replace_spk_id = int(aug_item_name.split(':', maxsplit=1)[0]) + (i + 1) * spk_id_size aug_task = { 'name': aug_item_name, 'func': aug_ins.process_item, From 65b4be379b84b022ef841ca9e21ff0178f54149d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 24 Jun 2023 02:36:49 +0800 Subject: [PATCH 442/475] Modify param names and docs for ds_id and spk_id --- basics/base_binarizer.py | 6 +++--- docs/ConfigurationSchemas.md | 2 +- preprocessing/acoustic_binarizer.py | 6 +++--- preprocessing/variance_binarizer.py | 6 +++--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 8cd6738c7..63abfab89 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -74,8 +74,8 @@ def __init__(self, data_dir=None, data_attrs=None): self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] # load each dataset - for ds_id, data_dir in zip(self.spk_ids, self.raw_data_dirs): - self.load_meta_data(pathlib.Path(data_dir), ds_id) + for spk_id, data_dir in zip(self.spk_ids, self.raw_data_dirs): + self.load_meta_data(pathlib.Path(data_dir), spk_id) self.item_names = sorted(list(self.items.keys())) self._train_item_names, self._valid_item_names = self.split_train_valid_set() @@ -83,7 +83,7 @@ def __init__(self, data_dir=None, data_attrs=None): random.seed(hparams['seed']) random.shuffle(self.item_names) - def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): raise NotImplementedError() def split_train_valid_set(self): diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index 8602927e8..a1f4ef15b 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -2921,7 +2921,7 @@ List of data item names or name prefixes for the validation set. For each string - If `s` equals to an actual item name, add that item to validation set. - If `s` does not equal to any item names, add all items whose names start with `s` to validation set. -For multi-speaker datasets, "spk_id:name_prefix" can be used to apply the rules above within data from a specific speaker, where spk_id represents the speaker index. +For multi-speaker combined datasets, "ds_id:name_prefix" can be used to apply the rules above within data from a specific sub-dataset, where ds_id represents the dataset index. #### visibility diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 7a6f532a1..e7bff9cbb 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -53,7 +53,7 @@ def __init__(self): self.need_energy = hparams.get('use_energy_embed', False) self.need_breathiness = hparams.get('use_breathiness_embed', False) - def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): meta_data_dict = {} if (raw_data_dir / 'transcriptions.csv').exists(): for utterance_label in csv.DictReader( @@ -64,11 +64,11 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): 'wav_fn': str(raw_data_dir / 'wavs' / f'{item_name}.wav'), 'ph_seq': utterance_label['ph_seq'].split(), 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()], - 'spk_id': ds_id + 'spk_id': spk_id } assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' - meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + meta_data_dict[f'{spk_id}:{item_name}'] = temp_dict else: raise FileNotFoundError( f'transcriptions.csv not found in {raw_data_dir}. ' diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 431025900..23ae71fad 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -50,14 +50,14 @@ def __init__(self): self.lr = LengthRegulator().to(self.device) # self.smooth: nn.Conv1d = None - def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): meta_data_dict = {} for utterance_label in csv.DictReader( open(raw_data_dir / 'transcriptions.csv', 'r', encoding='utf8') ): item_name = utterance_label['name'] temp_dict = { - 'spk_id': ds_id, + 'spk_id': spk_id, 'wav_fn': str(raw_data_dir / 'wavs' / f'{item_name}.wav'), 'ph_seq': utterance_label['ph_seq'].split(), 'ph_dur': [float(x) for x in utterance_label['ph_dur'].split()] @@ -79,7 +79,7 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id): assert any([note != 'rest' for note in temp_dict['note_seq']]), \ f'All notes are rest in \'{item_name}\'.' - meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict + meta_data_dict[f'{spk_id}:{item_name}'] = temp_dict self.items.update(meta_data_dict) From b2f9aaffef97dd1f9683aa115bd4bd057b5ab4a6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 24 Jun 2023 23:11:53 +0800 Subject: [PATCH 443/475] Change `repeat_bins` to `total_repeat_bins` --- configs/variance.yaml | 2 +- deployment/exporters/variance_exporter.py | 3 ++- docs/ConfigurationSchemas.md | 11 +++-------- modules/fastspeech/param_adaptor.py | 7 ++++++- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/configs/variance.yaml b/configs/variance.yaml index 631500a72..edf947e2f 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -70,7 +70,7 @@ breathiness_db_max: -20.0 breathiness_smooth_width: 0.12 variances_prediction_args: - repeat_bins: 48 + total_repeat_bins: 48 residual_layers: 10 residual_channels: 192 dilation_cycle_length: 4 # * diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 64aae416a..5c1743303 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -358,7 +358,8 @@ def _torch_export_model(self): ) if self.model.predict_variances: - repeat_bins = hparams['variances_prediction_args']['repeat_bins'] + total_repeat_bins = hparams['variances_prediction_args']['total_repeat_bins'] + repeat_bins = total_repeat_bins // len(self.model.variance_prediction_list) # Prepare inputs for preprocessor of MultiVarianceDiffusion pitch = torch.FloatTensor([[60.] * 15]).to(self.device) diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index a1f4ef15b..f4c2d195c 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -3203,14 +3203,9 @@ variance 4 -### variances_prediction_args.repeat_bins - -Number of repeating bins of each parameter in MultiVarianceDiffusion. Total repeating bins in MultiVarianceDiffusion are calculated as follows: -$$ -B=N\times B' -$$ -where $B'$ is the number of bins of each parameter, $N$ is the number of parameters. +### variances_prediction_args.total_repeat_bins +Total number of repeating bins in MultiVarianceDiffusion. Repeating bins are distributed evenly to each variance parameter. #### visibility variance @@ -3229,7 +3224,7 @@ int #### default -24 +48 ### variances_prediction_args.residual_channels diff --git a/modules/fastspeech/param_adaptor.py b/modules/fastspeech/param_adaptor.py index 454cff09e..7d905066e 100644 --- a/modules/fastspeech/param_adaptor.py +++ b/modules/fastspeech/param_adaptor.py @@ -39,10 +39,15 @@ def build_adaptor(self, cls=MultiVarianceDiffusion): clamps.append((hparams['breathiness_db_min'], 0.)) variances_hparams = hparams['variances_prediction_args'] + total_repeat_bins = variances_hparams['total_repeat_bins'] + assert total_repeat_bins % len(self.variance_prediction_list) == 0, \ + f'Total number of repeat bins must be divisible by number of ' \ + f'variance parameters ({len(self.variance_prediction_list)}).' + repeat_bins = total_repeat_bins // len(self.variance_prediction_list) return cls( ranges=ranges, clamps=clamps, - repeat_bins=variances_hparams['repeat_bins'], + repeat_bins=repeat_bins, timesteps=hparams['timesteps'], k_step=hparams['K_step'], denoiser_type=hparams['diff_decoder_type'], From cabfad7fdf54f7aae04fc77dc7d71c0c2776d3c4 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 25 Jun 2023 21:26:24 +0800 Subject: [PATCH 444/475] Show more detailed validation losses on TensorBoard (#101) --- basics/base_task.py | 14 ++++++++++---- training/acoustic_task.py | 6 +----- training/variance_task.py | 6 +----- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/basics/base_task.py b/basics/base_task.py index d7292a65f..636332329 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -4,6 +4,7 @@ import shutil import sys from datetime import datetime +from typing import Dict import matplotlib @@ -13,7 +14,7 @@ matplotlib.use('Agg') import torch.utils.data -from torchmetrics import MeanMetric +from torchmetrics import Metric, MeanMetric import lightning.pytorch as pl from lightning.pytorch.callbacks import LearningRateMonitor from lightning.pytorch.loggers import TensorBoardLogger @@ -76,7 +77,7 @@ def __init__(self, *args, **kwargs): self.skip_immediate_validation = False self.skip_immediate_ckpt_save = False - self.valid_metrics = { + self.valid_metrics: Dict[str, Metric] = { 'total_loss': MeanMetric() } @@ -171,9 +172,14 @@ def validation_step(self, sample, batch_idx): return {} with torch.autocast(self.device.type, enabled=False): outputs, weight = self._validation_step(sample, batch_idx) + outputs = { + 'total_loss': sum(outputs.values()), + **outputs + } for k, v in outputs.items(): - if isinstance(self.valid_metrics[k], MeanMetric): - self.valid_metrics[k].update(v, weight=weight) + if k not in self.valid_metrics: + self.valid_metrics[k] = MeanMetric().to(self.device) + self.valid_metrics[k].update(v, weight=weight) return outputs def on_validation_epoch_end(self): diff --git a/training/acoustic_task.py b/training/acoustic_task.py index 6d951a5d5..deb603776 100644 --- a/training/acoustic_task.py +++ b/training/acoustic_task.py @@ -123,10 +123,6 @@ def _on_validation_start(self): def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, infer=False) - total_loss = sum(losses.values()) - outputs = { - 'total_loss': total_loss - } if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: @@ -136,7 +132,7 @@ def _validation_step(self, sample, batch_idx): self.plot_wav(batch_idx, sample['mel'], mel_pred, f0=sample['f0']) self.plot_mel(batch_idx, sample['mel'], mel_pred, name=f'diffmel_{batch_idx}') - return outputs, sample['size'] + return losses, sample['size'] ############ # validation plots diff --git a/training/variance_task.py b/training/variance_task.py index b9f3d8f73..c0426fce0 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -160,10 +160,6 @@ def run_model(self, sample, infer=False): def _validation_step(self, sample, batch_idx): losses = self.run_model(sample, infer=False) - total_loss = sum(losses.values()) - outputs = { - 'total_loss': total_loss - } if batch_idx < hparams['num_valid_plots'] \ and (self.trainer.distributed_sampler_kwargs or {}).get('rank', 0) == 0: @@ -190,7 +186,7 @@ def _validation_step(self, sample, batch_idx): curve_name=name ) - return outputs, sample['size'] + return losses, sample['size'] ############ # validation plots From f94ec947dca7ce326318e6070e03ae43a41e4465 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 25 Jun 2023 21:57:05 +0800 Subject: [PATCH 445/475] Fix code backup folder structure --- basics/base_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/basics/base_task.py b/basics/base_task.py index 636332329..faf18812e 100644 --- a/basics/base_task.py +++ b/basics/base_task.py @@ -345,7 +345,7 @@ def train_payload_copy(): code_dir = work_dir / 'codes' / datetime.now().strftime('%Y%m%d%H%M%S') code_dir.mkdir(exist_ok=True, parents=True) for c in hparams['save_codes']: - shutil.copytree(c, code_dir, dirs_exist_ok=True) + shutil.copytree(c, code_dir / c, dirs_exist_ok=True) print(f'| Copied codes to {code_dir}.') # Copy spk_map.json and dictionary.txt to work dir binary_dir = pathlib.Path(hparams['binary_data_dir']) From d83460ccda5583e139d58136cb8a989064f43664 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 27 Jun 2023 20:34:09 +0800 Subject: [PATCH 446/475] Fix dict iteration error when getting the first speaker --- deployment/exporters/acoustic_exporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 64e586377..947b975c7 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -56,7 +56,7 @@ def __init__( # In case the user did not specify any speaker settings: if len(self.spk_map) == 1: # If there is only one speaker, freeze him/her. - first_spk = next(self.spk_map.keys()) + first_spk = next(iter(self.spk_map.keys())) self.freeze_spk = (first_spk, {first_spk: 1.0}) else: # If there are multiple speakers, export them all. From c2e0027b2c70769f721960913e5ceb37887b797a Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 28 Jun 2023 23:51:55 +0800 Subject: [PATCH 447/475] Fix conflict of item name when merging speakers --- basics/base_binarizer.py | 6 +++--- preprocessing/acoustic_binarizer.py | 4 ++-- preprocessing/variance_binarizer.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index 63abfab89..edde29704 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -74,8 +74,8 @@ def __init__(self, data_dir=None, data_attrs=None): self.timestep = hparams['hop_size'] / hparams['audio_sample_rate'] # load each dataset - for spk_id, data_dir in zip(self.spk_ids, self.raw_data_dirs): - self.load_meta_data(pathlib.Path(data_dir), spk_id) + for ds_id, spk_id, data_dir in zip(range(len(self.raw_data_dirs)), self.spk_ids, self.raw_data_dirs): + self.load_meta_data(pathlib.Path(data_dir), ds_id=ds_id, spk_id=spk_id) self.item_names = sorted(list(self.items.keys())) self._train_item_names, self._valid_item_names = self.split_train_valid_set() @@ -83,7 +83,7 @@ def __init__(self, data_dir=None, data_attrs=None): random.seed(hparams['seed']) random.shuffle(self.item_names) - def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): raise NotImplementedError() def split_train_valid_set(self): diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index e7bff9cbb..74a9ecdd0 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -53,7 +53,7 @@ def __init__(self): self.need_energy = hparams.get('use_energy_embed', False) self.need_breathiness = hparams.get('use_breathiness_embed', False) - def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): meta_data_dict = {} if (raw_data_dir / 'transcriptions.csv').exists(): for utterance_label in csv.DictReader( @@ -68,7 +68,7 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): } assert len(temp_dict['ph_seq']) == len(temp_dict['ph_dur']), \ f'Lengths of ph_seq and ph_dur mismatch in \'{item_name}\'.' - meta_data_dict[f'{spk_id}:{item_name}'] = temp_dict + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict else: raise FileNotFoundError( f'transcriptions.csv not found in {raw_data_dir}. ' diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 23ae71fad..612253341 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -50,7 +50,7 @@ def __init__(self): self.lr = LengthRegulator().to(self.device) # self.smooth: nn.Conv1d = None - def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): + def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): meta_data_dict = {} for utterance_label in csv.DictReader( open(raw_data_dir / 'transcriptions.csv', 'r', encoding='utf8') @@ -79,7 +79,7 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, spk_id): assert any([note != 'rest' for note in temp_dict['note_seq']]), \ f'All notes are rest in \'{item_name}\'.' - meta_data_dict[f'{spk_id}:{item_name}'] = temp_dict + meta_data_dict[f'{ds_id}:{item_name}'] = temp_dict self.items.update(meta_data_dict) From b9e0c6d6b63fb69809a13a88f6d014ca15298fbc Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 29 Jun 2023 01:44:22 +0800 Subject: [PATCH 448/475] Fix incompatibility with Python 3.8 in `NSFHiFiGANExporter` --- deployment/exporters/nsf_hifigan_exporter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/exporters/nsf_hifigan_exporter.py b/deployment/exporters/nsf_hifigan_exporter.py index fb893edfd..aab00fdd5 100644 --- a/deployment/exporters/nsf_hifigan_exporter.py +++ b/deployment/exporters/nsf_hifigan_exporter.py @@ -9,7 +9,7 @@ from basics.base_exporter import BaseExporter from deployment.modules.nsf_hifigan import NSFHiFiGANONNX -from utils import load_ckpt +from utils import load_ckpt, remove_suffix from utils.hparams import hparams @@ -25,7 +25,7 @@ def __init__( self.model_path = model_path self.model_name = model_name self.model = self.build_model() - self.model_class_name = self.model.__class__.__name__.removesuffix('ONNX') + self.model_class_name = remove_suffix(self.model.__class__.__name__, 'ONNX') self.model_cache_path = (self.cache_dir / self.model_name).with_suffix('.onnx') def build_model(self) -> nn.Module: From 8bd93d15a9ca3a129bc6f8f15c637396ecc84f25 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 30 Jun 2023 02:11:05 +0800 Subject: [PATCH 449/475] Add draw.io files of architecture graphs --- docs/resources/arch-acoustic.drawio | 179 +++++++++++++++ docs/resources/arch-overview.drawio | 123 +++++++++++ docs/resources/arch-variance.drawio | 329 ++++++++++++++++++++++++++++ 3 files changed, 631 insertions(+) create mode 100644 docs/resources/arch-acoustic.drawio create mode 100644 docs/resources/arch-overview.drawio create mode 100644 docs/resources/arch-variance.drawio diff --git a/docs/resources/arch-acoustic.drawio b/docs/resources/arch-acoustic.drawio new file mode 100644 index 000000000..89aa81c7b --- /dev/null +++ b/docs/resources/arch-acoustic.drawio @@ -0,0 +1,179 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/resources/arch-overview.drawio b/docs/resources/arch-overview.drawio new file mode 100644 index 000000000..06b94b659 --- /dev/null +++ b/docs/resources/arch-overview.drawio @@ -0,0 +1,123 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/resources/arch-variance.drawio b/docs/resources/arch-variance.drawio new file mode 100644 index 000000000..a2e3d3ae8 --- /dev/null +++ b/docs/resources/arch-variance.drawio @@ -0,0 +1,329 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 33befcfc57339604acf71ccd72a64258b506252d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 2 Jul 2023 19:26:25 +0800 Subject: [PATCH 450/475] Use typing alias --- inference/ds_variance.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/inference/ds_variance.py b/inference/ds_variance.py index 536112b45..ca76a7a6a 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -11,6 +11,7 @@ import torch.nn as nn import torch.nn.functional as F from scipy import interpolate +from typing import List, Tuple from basics.base_svs_infer import BaseSVSInfer from modules.fastspeech.tts_modules import ( @@ -288,7 +289,7 @@ def run_inference( seed: int = -1 ): batches = [] - predictor_flags: list[tuple[bool, bool, bool]] = [] + predictor_flags: List[Tuple[bool, bool, bool]] = [] for i, param in enumerate(params): param: dict From 1eaa2ca1e494501ad55935686668b199193df1e0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 2 Jul 2023 21:31:05 +0800 Subject: [PATCH 451/475] Fix element replacing bug --- utils/onnx_helper.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/utils/onnx_helper.py b/utils/onnx_helper.py index e8d56d0ed..9fc3f6fad 100644 --- a/utils/onnx_helper.py +++ b/utils/onnx_helper.py @@ -141,13 +141,11 @@ def _add_prefixes_recursive(subgraph): if io_value in initializers and initializer_prefix is not None: new_value = initializer_prefix + io_value _verbose('| add prefix:', io_value, '->', new_value) - io_list.pop(i) - io_list.insert(i, new_value) + io_list[i] = new_value if io_value in value_infos and value_info_prefix is not None: new_value = value_info_prefix + io_value _verbose('| add prefix:', io_value, '->', new_value) - io_list.pop(i) - io_list.insert(i, new_value) + io_list[i] = new_value _record_initializers_and_value_infos_recursive(model.graph) _add_prefixes_recursive(model.graph) From 21de5a111b3ed535beb965ac53a726f99dedefc7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 3 Jul 2023 22:37:37 +0800 Subject: [PATCH 452/475] Fix missing `mel2ph` when not training duration predictor --- preprocessing/variance_binarizer.py | 2 +- training/variance_task.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 612253341..bb75553eb 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -110,7 +110,7 @@ def process_item(self, item_name, meta_data, binarization_args): self.lr, ph_dur_sec, length, self.timestep, device=self.device ) - if hparams['predict_dur'] and (hparams['predict_pitch'] or self.predict_variances): + if hparams['predict_pitch'] or self.predict_variances: processed_input['mel2ph'] = mel2ph.cpu().numpy() # Below: extract actual f0, convert to pitch and calculate delta pitch diff --git a/training/variance_task.py b/training/variance_task.py index c0426fce0..87c1c5321 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -39,11 +39,10 @@ def collater(self, samples): if hparams['predict_dur']: batch['ph2word'] = utils.collate_nd([s['ph2word'] for s in samples], 0) batch['midi'] = utils.collate_nd([s['midi'] for s in samples], 0) - if hparams['predict_pitch'] or self.predict_variances: - batch['mel2ph'] = utils.collate_nd([s['mel2ph'] for s in samples], 0) if hparams['predict_pitch']: batch['base_pitch'] = utils.collate_nd([s['base_pitch'] for s in samples], 0) if hparams['predict_pitch'] or self.predict_variances: + batch['mel2ph'] = utils.collate_nd([s['mel2ph'] for s in samples], 0) batch['pitch'] = utils.collate_nd([s['pitch'] for s in samples], 0) if hparams['predict_energy']: batch['energy'] = utils.collate_nd([s['energy'] for s in samples], 0) From 782f004a571445eacfbaea5f7542d6deae853618 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Mon, 3 Jul 2023 23:47:17 +0800 Subject: [PATCH 453/475] Change variance retaking masks from zero inputs to zero embeddings (#104) * Change variance retaking from zero inputs to zero embeddings * Support separate variance retaking masks * Improve generation algorithm of retaking masks * Fix device mismatch * Adjust function signature * Remove padding index * Adapt ONNX exporter to new retaking mechanism --- deployment/exporters/variance_exporter.py | 10 ++++-- deployment/modules/toplevel.py | 14 +++++---- inference/ds_variance.py | 2 +- modules/toplevel.py | 37 +++++++++-------------- training/variance_task.py | 27 ++++++++++++----- utils/__init__.py | 15 +++++++++ 6 files changed, 65 insertions(+), 40 deletions(-) diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 5c1743303..85c137322 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -367,7 +367,7 @@ def _torch_export_model(self): v_name: torch.FloatTensor([[0.] * 15]).to(self.device) for v_name in self.model.variance_prediction_list } - retake = torch.ones_like(pitch, dtype=torch.bool) + retake = torch.ones_like(pitch, dtype=torch.bool)[..., None].tile(len(self.model.variance_prediction_list)) torch.onnx.export( self.model.view_as_variance_preprocess(), ( @@ -609,10 +609,14 @@ def _optimize_merge_variance_predictor_graph( ignored_variance_names = '|'.join([f'({v_name})' for v_name in self.model.variance_prediction_list]) onnx_helper.model_add_prefixes( - var_pre, node_prefix='/pre', ignored_pattern=fr'.*((embed)|{ignored_variance_names}).*' + var_pre, node_prefix='/pre', value_info_prefix='/pre', + ignored_pattern=fr'.*((embed)|{ignored_variance_names}).*' ) onnx_helper.model_add_prefixes(var_pre, dim_prefix='pre.', ignored_pattern='(n_tokens)|(n_frames)') - onnx_helper.model_add_prefixes(var_post, node_prefix='/post', ignored_pattern=None) + onnx_helper.model_add_prefixes( + var_post, node_prefix='/post', value_info_prefix='/post', + ignored_pattern=None + ) onnx_helper.model_add_prefixes(var_post, dim_prefix='post.', ignored_pattern='n_frames') print(f'Merging {self.variance_diffusion_class_name} subroutines...') diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index df8290e3a..9649af20f 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -156,10 +156,10 @@ def forward_pitch_preprocess( pitch=None, retake=None ): condition = self.forward_mel2x_gather(encoder_out, ph_dur, x_dim=self.hidden_size) - condition += self.retake_embed(retake.long()) + condition += self.pitch_retake_embed(retake.long()) frame_midi_pitch = self.forward_mel2x_gather(note_midi, note_dur, x_dim=None) base_pitch = self.smooth(frame_midi_pitch) - base_pitch += (pitch - base_pitch) * ~retake + base_pitch = base_pitch * retake + pitch * ~retake pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) return pitch_cond, base_pitch @@ -177,12 +177,14 @@ def forward_variance_preprocess( self, encoder_out, ph_dur, pitch, variances: dict = None, retake=None ): condition = self.forward_mel2x_gather(encoder_out, ph_dur, x_dim=self.hidden_size) - condition += self.retake_embed(retake.long()) variance_cond = condition + self.pitch_embed(pitch[:, :, None]) - non_retake = (~retake).float() + non_retake_masks = [ + v_retake.float() # [B, T, 1] + for v_retake in (~retake).split(1, dim=2) + ] variance_embeds = [ - self.variance_embeds[v_name]((variances[v_name] * non_retake)[:, :, None]) - for v_name in self.variance_prediction_list + self.variance_embeds[v_name](variances[v_name][:, :, None]) * v_masks + for v_name, v_masks in zip(self.variance_prediction_list, non_retake_masks) ] variance_cond += torch.stack(variance_embeds, dim=-1).sum(-1) return variance_cond diff --git a/inference/ds_variance.py b/inference/ds_variance.py index ca76a7a6a..41d67f821 100644 --- a/inference/ds_variance.py +++ b/inference/ds_variance.py @@ -257,7 +257,7 @@ def forward_model(self, sample): txt_tokens, midi=midi, ph2word=ph2word, word_dur=word_dur, ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, pitch=pitch, ph_spk_mix_embed=ph_spk_mix_embed, spk_mix_embed=spk_mix_embed, - retake=None, infer=True + infer=True ) if dur_pred is not None: dur_pred = self.rr(dur_pred, ph2word, word_dur) diff --git a/modules/toplevel.py b/modules/toplevel.py index 4a8fb8570..350198609 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -1,6 +1,9 @@ +from typing import Dict + import torch import torch.nn as nn import torch.nn.functional as F +from torch import Tensor from basics.base_module import CategorizedModule from modules.commons.common_layers import ( @@ -69,6 +72,7 @@ def category(self): def __init__(self, vocab_size): super().__init__() self.predict_dur = hparams['predict_dur'] + self.predict_pitch = hparams['predict_pitch'] self.use_spk_id = hparams['use_spk_id'] if self.use_spk_id: @@ -80,12 +84,8 @@ def __init__(self, vocab_size): self.rr = RhythmRegulator() self.lr = LengthRegulator() - self.predict_pitch = hparams['predict_pitch'] - - if self.predict_pitch or self.predict_variances: - self.retake_embed = Embedding(2, hparams['hidden_size']) - if self.predict_pitch: + self.pitch_retake_embed = Embedding(2, hparams['hidden_size']) pitch_hparams = hparams['pitch_prediction_args'] self.base_pitch_embed = Linear(1, hparams['hidden_size']) self.pitch_predictor = PitchDiffusion( @@ -114,7 +114,8 @@ def __init__(self, vocab_size): def forward( self, txt_tokens, midi, ph2word, ph_dur=None, word_dur=None, mel2ph=None, - base_pitch=None, pitch=None, retake=None, spk_id=None, infer=True, **kwargs + base_pitch=None, pitch=None, pitch_retake=None, variance_retake: Dict[str, Tensor] = None, + spk_id=None, infer=True, **kwargs ): if self.use_spk_id: ph_spk_mix_embed = kwargs.get('ph_spk_mix_embed') @@ -147,16 +148,13 @@ def forward( if self.use_spk_id: condition += spk_embed - if retake is None: - retake_embed = self.retake_embed(torch.ones_like(mel2ph)) - else: - retake_embed = self.retake_embed(retake.long()) - condition += retake_embed if self.predict_pitch: - if retake is not None: - base_pitch = base_pitch * retake + pitch * ~retake - pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) + if pitch_retake is None: + pitch_retake = torch.ones_like(mel2ph, dtype=torch.bool) + pitch_cond = condition + self.pitch_retake_embed(pitch_retake.long()) + base_pitch = base_pitch * pitch_retake + pitch * ~pitch_retake + pitch_cond += self.base_pitch_embed(base_pitch[:, :, None]) if infer: pitch_pred_out = self.pitch_predictor(pitch_cond, infer=True) else: @@ -172,17 +170,12 @@ def forward( condition += self.pitch_embed(pitch[:, :, None]) variance_inputs = self.collect_variance_inputs(**kwargs) - if retake is None: - variance_embeds = [ - self.variance_embeds[v_name](torch.zeros_like(pitch)[:, :, None]) - for v_name in self.variance_prediction_list - ] - else: + if variance_retake is not None: variance_embeds = [ - self.variance_embeds[v_name]((v_input * ~retake)[:, :, None]) + self.variance_embeds[v_name](v_input[:, :, None]) * ~variance_retake[v_name][:, :, None] for v_name, v_input in zip(self.variance_prediction_list, variance_inputs) ] - condition += torch.stack(variance_embeds, dim=-1).sum(-1) + condition += torch.stack(variance_embeds, dim=-1).sum(-1) variance_outputs = self.variance_predictor(condition, variance_inputs, infer) diff --git a/training/variance_task.py b/training/variance_task.py index 87c1c5321..1a8e044b8 100644 --- a/training/variance_task.py +++ b/training/variance_task.py @@ -52,6 +52,15 @@ def collater(self, samples): return batch +def random_retake_masks(b, t, device): + # 1/4 segments are True in average + B_masks = torch.randint(low=0, high=4, size=(b, 1), dtype=torch.long, device=device) == 0 + # 1/3 frames are True in average + T_masks = utils.random_continuous_masks(b, t, dim=1, device=device) + # 1/4 segments and 1/2 frames are True in average (1/4 + 3/4 * 1/3 = 1/2) + return B_masks | T_masks + + class VarianceTask(BaseTask): def __init__(self): super().__init__() @@ -114,25 +123,27 @@ def run_model(self, sample, infer=False): energy = sample.get('energy') # [B, T_s] breathiness = sample.get('breathiness') # [B, T_s] + pitch_retake = variance_retake = None if (self.predict_pitch or self.predict_variances) and not infer: # randomly select continuous retaking regions b = sample['size'] t = mel2ph.shape[1] device = mel2ph.device - start, end = torch.sort( - torch.randint(low=0, high=t + 1, size=(b, 2), device=device), dim=1 - )[0].split(1, dim=1) - idx = torch.arange(0, t, dtype=torch.long, device=device)[None] - retake = (idx >= start) & (idx < end) - else: - retake = None + if self.predict_pitch: + pitch_retake = random_retake_masks(b, t, device) + if self.predict_variances: + variance_retake = { + v_name: random_retake_masks(b, t, device) + for v_name in self.variance_prediction_list + } output = self.model( txt_tokens, midi=midi, ph2word=ph2word, ph_dur=ph_dur, mel2ph=mel2ph, base_pitch=base_pitch, pitch=pitch, energy=energy, breathiness=breathiness, - retake=retake, spk_id=spk_ids, infer=infer + pitch_retake=pitch_retake, variance_retake=variance_retake, + spk_id=spk_ids, infer=infer ) dur_pred, pitch_pred, variances_pred = output diff --git a/utils/__init__.py b/utils/__init__.py index c4fa45114..6ef9d7a69 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pathlib import re import time @@ -34,6 +36,19 @@ def collate_nd(values, pad_value=0, max_len=None): return res +def random_continuous_masks(*shape: int, dim: int, device: str | torch.device = 'cpu'): + start, end = torch.sort( + torch.randint( + low=0, high=shape[dim] + 1, size=(*shape[:dim], 2, *((1,) * (len(shape) - dim - 1))), device=device + ).expand(*((-1,) * (dim + 1)), *shape[dim + 1:]), dim=dim + )[0].split(1, dim=dim) + idx = torch.arange( + 0, shape[dim], dtype=torch.long, device=device + ).reshape(*((1,) * dim), shape[dim], *((1,) * (len(shape) - dim - 1))) + masks = (idx >= start) & (idx < end) + return masks + + def _is_batch_full(batch, num_frames, max_batch_frames, max_batch_size): if len(batch) == 0: return 0 From af06e30d111562f085016a03eb34a84b4581a911 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Tue, 4 Jul 2023 01:34:48 +0800 Subject: [PATCH 454/475] Update architecture graph --- docs/resources/arch-variance.drawio | 355 ++++++++++++++-------------- 1 file changed, 183 insertions(+), 172 deletions(-) diff --git a/docs/resources/arch-variance.drawio b/docs/resources/arch-variance.drawio index a2e3d3ae8..8b8aa49a0 100644 --- a/docs/resources/arch-variance.drawio +++ b/docs/resources/arch-variance.drawio @@ -1,327 +1,338 @@ - + - + - + - - + + - + - - - + + + - - + + - + - - + + - + - - + + - + - - + + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - + - - - + + + - - + + - + - - + + - - + + - + - - + + - - + + + + + + + + + - + + + + - - + + - + - - + + - + - + - - - + + + - - + + - + - - + + - - - - - - - - - + - + - + - - + + + + + + + + + + + - + - - - + + + + - - + + - + - - + + - + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - - - - - - - - + + - - - - + - - + + + + + + - + - + - - + + + + + + + - - + + - + - + - - + + - - + + - + - - + + - + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + From aba86a5183cf95467ef160fa073e78ff77cbe6a1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Jul 2023 00:31:05 +0800 Subject: [PATCH 455/475] Fix unexpected access to `pitch` when it is None --- modules/toplevel.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/modules/toplevel.py b/modules/toplevel.py index 350198609..a93ed1e34 100644 --- a/modules/toplevel.py +++ b/modules/toplevel.py @@ -151,9 +151,11 @@ def forward( if self.predict_pitch: if pitch_retake is None: - pitch_retake = torch.ones_like(mel2ph, dtype=torch.bool) - pitch_cond = condition + self.pitch_retake_embed(pitch_retake.long()) - base_pitch = base_pitch * pitch_retake + pitch * ~pitch_retake + pitch_retake_embed = self.pitch_retake_embed(torch.ones_like(mel2ph)) + else: + pitch_retake_embed = self.pitch_retake_embed(pitch_retake.long()) + base_pitch = base_pitch * pitch_retake + pitch * ~pitch_retake + pitch_cond = condition + pitch_retake_embed pitch_cond += self.base_pitch_embed(base_pitch[:, :, None]) if infer: pitch_pred_out = self.pitch_predictor(pitch_cond, infer=True) From 801e05eddf601d953e1f0eebe88de0f766e2b326 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Wed, 5 Jul 2023 23:58:25 +0800 Subject: [PATCH 456/475] Adjust comments --- preprocessing/variance_binarizer.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index bb75553eb..3c9f3d0e4 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -33,7 +33,7 @@ 'breathiness', # frame-level RMS of aperiodic parts (dB), float32[T_s,] ] -# These operators are used as global variable due to a PyTorch shared memory bug on Windows. +# These operators are used as global variables due to a PyTorch shared memory bug on Windows platforms. # See https://github.com/pytorch/pytorch/issues/100358 midi_smooth: SinusoidalSmoothingConv1d = None energy_smooth: SinusoidalSmoothingConv1d = None @@ -48,7 +48,6 @@ def __init__(self): predict_breathiness = hparams['predict_breathiness'] self.predict_variances = predict_energy or predict_breathiness self.lr = LengthRegulator().to(self.device) - # self.smooth: nn.Conv1d = None def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): meta_data_dict = {} From 21d160e93fe678d2c79d26a64f0ab548ac856b43 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 9 Jul 2023 20:35:31 +0800 Subject: [PATCH 457/475] Update diagram --- docs/resources/arch-variance.drawio | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/docs/resources/arch-variance.drawio b/docs/resources/arch-variance.drawio index 8b8aa49a0..b32a20e9d 100644 --- a/docs/resources/arch-variance.drawio +++ b/docs/resources/arch-variance.drawio @@ -1,6 +1,6 @@ - + - + @@ -117,7 +117,7 @@ - + @@ -172,7 +172,7 @@ - + @@ -230,11 +230,16 @@ - - + + + + + + + - + @@ -270,7 +275,7 @@ - + @@ -295,7 +300,7 @@ - + @@ -328,10 +333,10 @@ - + - + From 8a818b269c04b0bf1273abff609da321c73b2025 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 9 Jul 2023 20:36:00 +0800 Subject: [PATCH 458/475] Update descriptions and logging --- deployment/exporters/acoustic_exporter.py | 3 ++- scripts/export.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 947b975c7..3c85bc37b 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -37,6 +37,7 @@ def __init__( self.diffusion_cache_path = self.cache_dir / 'diffusion.onnx' # Attributes for logging + self.model_class_name = remove_suffix(self.model.__class__.__name__, 'ONNX') self.fs2_class_name = remove_suffix(self.model.fs2.__class__.__name__, 'ONNX') self.denoiser_class_name = remove_suffix(self.model.diffusion.denoise_fn.__class__.__name__, 'ONNX') self.diffusion_class_name = remove_suffix(self.model.diffusion.__class__.__name__, 'ONNX') @@ -283,7 +284,7 @@ def _merge_fs2_diffusion_graphs(self, fs2: onnx.ModelProto, diffusion: onnx.Mode onnx_helper.model_add_prefixes(fs2, dim_prefix='fs2.', ignored_pattern=r'(n_tokens)|(n_frames)') onnx_helper.model_add_prefixes(diffusion, dim_prefix='diffusion.', ignored_pattern='n_frames') print(f'Merging {self.fs2_class_name} and {self.diffusion_class_name} ' - f'back into {self.model.__class__.__name__}...') + f'back into {self.model_class_name}...') merged = onnx.compose.merge_models( fs2, diffusion, io_map=[('condition', 'condition')], prefix1='', prefix2='', doc_string='', diff --git a/scripts/export.py b/scripts/export.py index f9a9492db..69c5dc1f9 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -49,7 +49,8 @@ def main(): help='(for random time stretching) Expose velocity control functionality.') @click.option('--export_spk', type=str, required=False, multiple=True, metavar='', help='(for multi-speaker models) Export one or more speaker or speaker mix keys.') -@click.option('--freeze_spk', type=str, required=False) +@click.option('--freeze_spk', type=str, required=False, metavar='', + help='(for multi-speaker models) Freeze one speaker or speaker mix into the model.') def acoustic( exp: str, ckpt: int = None, From 608858d17a987535ae4c647a5a763ceca501a9aa Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Jul 2023 00:38:01 +0800 Subject: [PATCH 459/475] Finish documentation for version 2.0.0 (#106) * Refactor README * Delete old docs * Adjust diagram * Update image in README.md * Add images * Update references * Update README.md * Finish GettingStarted.md * Remove redundant paragraph * Remove docs * Remove images * Remove unused images * Add link to Releases * Add link to discord server * Add explanations to the configuration system * Add custom dictionary guidelines * Server -> server * Add performance tuning guidelines --- README.md | 199 ++++-------------- docs/BestPractices.md | 176 ++++++++++++++++ docs/ConfigurationSchemas.md | 20 +- docs/GettingStarted.md | 116 ++++++++++ docs/README-SVS-custom-phonemes.md | 119 ----------- docs/README-SVS-deployment.md | 75 ------- docs/README-SVS-opencpop-cascade.md | 111 ---------- docs/README-SVS-opencpop-e2e.md | 107 ---------- docs/README-SVS-opencpop-pndm.md | 114 ---------- docs/README-SVS.md | 85 -------- docs/resources/arch-acoustic.jpg | Bin 0 -> 112564 bytes docs/resources/arch-overview.drawio | 72 +++---- docs/resources/arch-overview.jpg | Bin 0 -> 74626 bytes docs/resources/arch-variance.jpg | Bin 0 -> 215408 bytes docs/resources/diffspeech-fs2-1.png | Bin 202692 -> 0 bytes docs/resources/diffspeech-fs2-2.png | Bin 145505 -> 0 bytes docs/resources/diffspeech-fs2.png | Bin 218998 -> 0 bytes docs/resources/model_a.png | Bin 55023 -> 0 bytes docs/resources/model_b.png | Bin 178518 -> 0 bytes ...tribution.jpg => phoneme-distribution.jpg} | Bin docs/resources/tfb.png | Bin 218539 -> 0 bytes 21 files changed, 380 insertions(+), 814 deletions(-) create mode 100644 docs/BestPractices.md create mode 100644 docs/GettingStarted.md delete mode 100644 docs/README-SVS-custom-phonemes.md delete mode 100644 docs/README-SVS-deployment.md delete mode 100644 docs/README-SVS-opencpop-cascade.md delete mode 100644 docs/README-SVS-opencpop-e2e.md delete mode 100644 docs/README-SVS-opencpop-pndm.md delete mode 100644 docs/README-SVS.md create mode 100644 docs/resources/arch-acoustic.jpg create mode 100644 docs/resources/arch-overview.jpg create mode 100644 docs/resources/arch-variance.jpg delete mode 100644 docs/resources/diffspeech-fs2-1.png delete mode 100644 docs/resources/diffspeech-fs2-2.png delete mode 100644 docs/resources/diffspeech-fs2.png delete mode 100644 docs/resources/model_a.png delete mode 100644 docs/resources/model_b.png rename docs/resources/{phoneme_distribution.jpg => phoneme-distribution.jpg} (100%) delete mode 100644 docs/resources/tfb.png diff --git a/README.md b/README.md index 5de394a59..db261b2e0 100644 --- a/README.md +++ b/README.md @@ -1,179 +1,62 @@ -# Usage of Refactor Branch -This is a cleaner version of DiffSinger, which provides: -- fewer code: scripts unused or obsolete in the DiffSinger are removed; -- better readability: many important functions are annotated (however, **we assume the reader already knows how the neural networks work**); -- abstract classes: the bass classes are filtered out into the "basics/" folder and are annotated. Other classes directly inherent from the base classes. -- re-organized project structure: pipelines are seperated into preparation, preprocessing, augmentation, training, inference and deployment. -- main command-line entries are collected into the "scripts/" folder. +# DiffSinger (OpenVPI maintained version) -## Progress since we forked into this repository - -TBD - -## Getting Started - -[**[ 中文教程 | Chinese Tutorial ]**](https://www.yuque.com/sunsa-i3ayc/sivu7h) - -### Installation - -#### Environments and dependencies - -```bash -# Install PyTorch manually (1.8.2 LTS recommended) -# See instructions at https://pytorch.org/get-started/locally/ -# Below is an example for CUDA 11.1 -pip3 install torch==1.8.2 torchvision==0.9.2 torchaudio==0.8.2 --extra-index-url https://download.pytorch.org/whl/lts/1.8/cu111 - -# Install other requirements -pip install -r requirements.txt -``` - -#### Pretrained models - -- **(Required)** Get the pretrained vocoder from the [DiffSinger Community Vocoders Project](https://openvpi.github.io/vocoders) and unzip it into `checkpoints/` folder, or train a ultra-lightweight [DDSP](https://github.com/yxlllc/pc-ddsp) vocoder first by yourself, then configure it according to the relevant [instructions](https://github.com/yxlllc/pc-ddsp/blob/master/DiffSinger.md). -- Get the acoustic model from [releases](https://github.com/openvpi/DiffSinger/releases) or elsewhere and unzip into the `checkpoints/` folder. - -### Building your own dataset - -This [pipeline](preparation/acoustic_preparation.ipynb) will guide you from installing dependencies to formatting your recordings and generating the final configuration file. +[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) +[![downloads](https://img.shields.io/github/downloads/openvpi/DiffSinger/total.svg)](https://github.com/openvpi/DiffSinger/releases) +[![Bilibili](https://img.shields.io/badge/Bilibili-Demo-blue)](https://www.bilibili.com/video/BV1be411N7JA/) +[![license](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/openvpi/DiffSinger/blob/main/LICENSE) -### Preprocessing +This is a refactored and enhanced version of _DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism_ based on the original [paper](https://arxiv.org/abs/2105.02446) and [implementation](https://github.com/MoonInTheRiver/DiffSinger), which provides: -The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop/) dataset. +- Cleaner code structure: useless and redundant files are removed and the others are re-organized. +- Better sound quality: the sampling rate of synthesized audio are adapted to 44.1 kHz instead of the original 24 kHz. +- Higher fidelity: improved acoustic models and diffusion sampling acceleration algorithms are integrated. +- More controllability: introduced variance models and parameters for prediction and control of pitch, energy, breathiness, etc. +- Production compatibility: functionalities are designed to match the requirements of production deployment and the SVS communities. -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/acoustic.yaml -``` -### Training +| Overview | Variance Model | Acoustic Model | +|:-------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------:| +| arch-overview | arch-variance | arch-acoustic | -The following is **only an example** for [opencpop](http://wenet.org.cn/opencpop/) dataset. +## User Guidance -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/acoustic.yaml --exp_name $MY_DS_EXP_NAME --reset -``` -### Inference +> 中文教程 / Chinese Tutorials: [Text](https://www.yuque.com/sunsa-i3ayc/sivu7h), [Video](https://www.bilibili.com/video/BV1xN411F7yM) -#### Infer from *.ds file +- **Installation & basic usages**: See [Getting Started](docs/GettingStarted.md) +- **Dataset creation pipelines & tools**: See [MakeDiffSinger](https://github.com/openvpi/MakeDiffSinger) +- **Best practices & tutorials**: See [Best Practices](docs/BestPractices.md) +- **Editing configurations**: See [Configuration Schemas](docs/ConfigurationSchemas.md) +- **Deployment & production**: [OpenUTAU for DiffSinger](https://github.com/xunmengshe/OpenUtau), [DiffScope (under development)](https://github.com/SineStriker/qsynthesis-revenge) +- **Communication groups**: [QQ Group](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=fibG_dxuPW5maUJwe9_ya5-zFcIwaoOR&authKey=ZgLCG5EqQVUGCID1nfKei8tCnlQHAmD9koxebFXv5WfUchhLwWxb52o1pimNai5A&noverify=0&group_code=907879266) (907879266), [Discord server](https://discord.gg/k5Uk2RfUFs) -```sh -python scripts/infer.py path/to/your.ds --exp $MY_DS_EXP_NAME -``` -See more supported arguments with `python scripts/infer.py -h`. See examples of *.ds files in the [samples/](samples/) folder. +## Progress & Roadmap -### Deployment +- **Progress since we forked into this repository**: See [Releases](https://github.com/openvpi/DiffSinger/releases) +- **Roadmap for future releases**: See [Project Board](https://github.com/orgs/openvpi/projects/1) +- **Thoughts, proposals & ideas**: See [Discussions](https://github.com/openvpi/DiffSinger/discussions) -#### Export model to ONNX format +## Architecture & Algorithms -Please see this [documentation](docs/README-SVS-deployment.md) before you run the following command: +TBD -```sh -python scripts/export.py acoustic --exp $MY_DS_EXP_NAME -``` +## Development Resources -See more supported arguments with `scripts/export.py acoustic --help`. +TBD -#### Use DiffSinger via OpenUTAU editor +## References -OpenUTAU, an open-sourced SVS editor with modern GUI, has unofficial temporary support for DiffSinger. See [OpenUTAU for DiffSinger](https://github.com/xunmengshe/OpenUtau/) for more details. +- Original DiffSinger: [paper](https://arxiv.org/abs/2105.02446), [implementation](https://github.com/MoonInTheRiver/DiffSinger) +- [HiFi-GAN](https://github.com/jik876/hifi-gan) and [NSF](https://github.com/nii-yamagishilab/project-NN-Pytorch-scripts/tree/master/project/01-nsf) for waveform reconstruction +- [pc-ddsp](https://github.com/yxlllc/pc-ddsp) for waveform reconstruction +- [DDIM](https://arxiv.org/abs/2010.02502) for diffusion sampling acceleration +- [PNDM](https://arxiv.org/abs/2202.09778) for diffusion sampling acceleration +- [DPM-Solver++](https://github.com/LuChengTHU/dpm-solver) for diffusion sampling acceleration +- [UniPC](https://github.com/wl-zhao/UniPC) for diffusion sampling acceleration -### Algorithms, principles and advanced features +## Disclaimer -See the original [paper](https://arxiv.org/abs/2105.02446), the [docs/](docs/) folder and [releases](https://github.com/openvpi/DiffSinger/releases) for more details. +Any organization or individual is prohibited from using any functionalities included in this repository to generate someone's speech without his/her consent, including but not limited to government leaders, political figures, and celebrities. If you do not comply with this item, you could be in violation of copyright laws. ## License -This forked DiffSinger is licensed under [Apache 2.0 License](LICENSE). +This forked DiffSinger repository is licensed under the [Apache 2.0 License](LICENSE). ---- - -Below is the README inherited from the original repository. - - - -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - | [Interactive🤗 TTS](https://huggingface.co/spaces/NATSpeech/DiffSpeech) - | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) - - -This repository is the official PyTorch implementation of our AAAI-2022 [paper](https://arxiv.org/abs/2105.02446), in which we propose DiffSinger (for Singing-Voice-Synthesis) and DiffSpeech (for Text-to-Speech). - - - - - - - - - - -
DiffSinger/DiffSpeech at trainingDiffSinger/DiffSpeech at inference
TrainingInference
- -:tada: :tada: :tada: **Updates**: - - Sep.11, 2022: :electric_plug: [DiffSinger-PN](docs/README-SVS-opencpop-pndm.md). Add plug-in [PNDM](https://arxiv.org/abs/2202.09778), ICLR 2022 in our laboratory, to accelerate DiffSinger freely. - - Jul.27, 2022: Update documents for [SVS](docs/README-SVS.md). Add easy inference [A](docs/README-SVS-opencpop-cascade.md#4-inference-from-raw-inputs) & [B](docs/README-SVS-opencpop-e2e.md#4-inference-from-raw-inputs); Add Interactive SVS running on [HuggingFace🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger). - - Mar.2, 2022: MIDI-B-version. - - Mar.1, 2022: [NeuralSVB](https://github.com/MoonInTheRiver/NeuralSVB), for singing voice beautifying, has been released. - - Feb.13, 2022: [NATSpeech](https://github.com/NATSpeech/NATSpeech), the improved code framework, which contains the implementations of DiffSpeech and our NeurIPS-2021 work [PortaSpeech](https://openreview.net/forum?id=xmJsuh8xlq) has been released. - - Jan.29, 2022: support MIDI-A-version SVS. - - Jan.13, 2022: support SVS, release PopCS dataset. - - Dec.19, 2021: support TTS. [HuggingFace🤗 TTS](https://huggingface.co/spaces/NATSpeech/DiffSpeech) - -:rocket: **News**: - - Feb.24, 2022: Our new work, NeuralSVB was accepted by ACL-2022 [![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2202.13277). [Demo Page](https://neuralsvb.github.io). - - Dec.01, 2021: DiffSinger was accepted by AAAI-2022. - - Sep.29, 2021: Our recent work `PortaSpeech: Portable and High-Quality Generative Text-to-Speech` was accepted by NeurIPS-2021 [![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2109.15166) . - - May.06, 2021: We submitted DiffSinger to Arxiv [![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446). - -## Environments -```sh -conda create -n your_env_name python=3.8 -source activate your_env_name -pip install -r requirements_2080.txt (GPU 2080Ti, CUDA 10.2) -or pip install -r requirements_3090.txt (GPU 3090, CUDA 11.4) -``` - -## Documents -- [Run DiffSpeech (TTS version)](docs/README-TTS.md). -- [Run DiffSinger (SVS version)](docs/README-SVS.md). - -## Tensorboard -```sh -tensorboard --logdir_spec exp_name -``` - - - - -
Tensorboard
- -## Audio Demos -Old audio samples can be found in our [demo page](https://diffsinger.github.io/). Audio samples generated by this repository are listed here: - -### TTS audio samples -Speech samples (test set of LJSpeech) can be found in [demos_1213](https://github.com/MoonInTheRiver/DiffSinger/blob/master/resources/demos_1213). - -### SVS audio samples -Singing samples (test set of PopCS) can be found in [demos_0112](https://github.com/MoonInTheRiver/DiffSinger/blob/master/resources/demos_0112). - -## Citation - @article{liu2021diffsinger, - title={Diffsinger: Singing voice synthesis via shallow diffusion mechanism}, - author={Liu, Jinglin and Li, Chengxi and Ren, Yi and Chen, Feiyang and Liu, Peng and Zhao, Zhou}, - journal={arXiv preprint arXiv:2105.02446}, - volume={2}, - year={2021}} - - -## Acknowledgements -Our codes are based on the following repos: -* [denoising-diffusion-pytorch](https://github.com/lucidrains/denoising-diffusion-pytorch) -* [PyTorch Lightning](https://github.com/PyTorchLightning/pytorch-lightning) -* [ParallelWaveGAN](https://github.com/kan-bayashi/ParallelWaveGAN) -* [HifiGAN](https://github.com/jik876/hifi-gan) -* [espnet](https://github.com/espnet/espnet) -* [DiffWave](https://github.com/lmnt-com/diffwave) - -Also thanks [Keon Lee](https://github.com/keonlee9420/DiffSinger) for fast implementation of our work. diff --git a/docs/BestPractices.md b/docs/BestPractices.md new file mode 100644 index 000000000..723c89c3c --- /dev/null +++ b/docs/BestPractices.md @@ -0,0 +1,176 @@ +# Best Practices + +## Using custom dictionaries + +This section is about using a custom grapheme-to-phoneme dictionary for any language(s). + +### Dictionary format + +A dictionary is a .txt file, in which each line represents a mapping rule from one syllable to its phoneme sequence. The syllable and the phonemes are split by `tab`, and the phonemes are split by `space`: + +``` + ... +``` + +Syllable names and phoneme names can be customized, but with the following limitations/suggestions: + +- `SP` (rest), `AP` (breath) and `` (padding) cannot be phoneme names because they are reserved. +- `-` and `+` cannot be phoneme names because they are defined as slur tags in most singing voice synthesis editors. +- Special characters including but not limited to `@`, `#`, `&`, `|`, `/`, `<`, `>`, etc. should be avoided because they may be used as special tags in the future format changes. Using them now is okay, and all modifications will be notified in advance. +- ASCII characters are preferred for the most encoding compatibility, but all UTF-8 characters are acceptable. + +### Add a dictionary + +Assume you have made a dictionary file named `my_dict.txt`. Edit your configuration file: + +```yaml +dictionary: my_dict.txt +``` + +Then you can binarize your data as normal. The phonemes in your dataset must cover, and must only cover the phonemes appeared in your dictionary. Otherwise, the binarizer will raise an error: + +``` +AssertionError: transcriptions and dictionary mismatch. + (+) ['E', 'En', 'i0', 'ir'] + (-) ['AP', 'SP'] +``` + +This means there are 4 unexpected symbols in the data labels (`ir`, `i0`, `E`, `En`) and 2 missing phonemes that are not covered by the data labels (`AP`, `SP`). + +Once the coverage checks passed, a phoneme distribution summary will be saved into your binary data directory. Below is an example. + +![phoneme-distribution](resources/phoneme-distribution.jpg) + +During the binarization process, each phoneme will be assigned with a unique phoneme ID according the order of their names. By default, there are one padding index before all real phonemes IDs. You may edit the number of padding indices, but it is not recommended to do so: + +```yaml +num_pad_tokens: 1 +``` + +The dictionary used to binarize the dataset will be copied to the binary data directory by the binarizer, and will be copied again to the experiment directory by the trainer. When exported to ONNX, the dictionary and the phoneme sequence ordered by IDs will be saved to the artifact directory. You do not need to carry the original dictionary file for training and inference. + +### Preset dictionaries + +There are currently some preset dictionaries for users to use directly: + +| dictionary | filename | description | +|:------------------:|:----------------------:|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Opencpop | opencpop.txt | The original dictionary used by the Opencpop mandarin singing dataset that is fully aligned with the pinyin writing system. We copied the dictionary from [here](http://wenet.org.cn/opencpop/resources/annotationformat/), removed 5 syllables that has no occurrence in the data labels (`hm`, `hng`, `m`, `n` and `ng`) and added some aliases for some syllables (e.g. `jv` for `ju`). Due to pronunciation issues, this dictionary is deprecated and remained only for backward compatibility. | +| Opencpop extension | opencpop-extension.txt | The modified version of the opencpop dictionary, with stricter phoneme division rules for some pinyin syllables. For example, `ci` is mapped to `c i0` and `chi` is mapped to `ch ir` to distinguish with `bi` (`b i`). This dictionary is now used as the default dictionary for mandarin Chinese. There are also many new syllables for more phoneme combinations. | + +### Submit or propose a new dictionary + +You can submit or propose a new dictionary by raising a topic in [Discussions](https://github.com/openvpi/DiffSinger/discussions). Any dictionary to be formally supported in the main branch must match the following principles: + +- Only monolingual dictionaries are accepted for now. Support for multilingual dictionaries will be designed in the future. +- All syllables and phonemes in the dictionary should have linguistic meanings. Style tags (vocal fry, falsetto, etc.) should not appear in the dictionary. +- Its syllables should be standard spelling or phonetic transcriptions (like pinyin in mandarin Chinese and romaji in Japanese) for easy integration with G2P modules. +- Its phonemes should cover all (or almost all) possible pronunciations in that language. +- Every syllable and every phoneme should have one, and only one certain pronunciation, in all or almost all situations in that language. Some slightly context-based pronunciation differences are allowed as the networks can learn. +- Most native speakers/singers of that language should be able to easily cover all phonemes in the dictionary. This means the dictionary should not contain extremely rare or highly customized phonemes of some dialects or accents. +- It should not bring too much difficulty and complexity to the data labeling workflow, and it should be easy to use for end users of voicebanks. + +## Performance tuning + +This section is about accelerating training and utilizing hardware. + +### Data loader and batch sampler + +The data loader loads data pieces from the binarized dataset, and the batch sampler forms batches according to data lengths. + +To configure the data loader, edit your configuration file: + +```yaml +ds_workers: 4 # number of DataLoader workers +dataloader_prefetch_factor: 2 # load data in advance +``` + +To configure the batch sampler, edit your configuration file: + +```yaml +sampler_frame_count_grid: 6 # lower value means higher speed but less randomness +``` + +For more details of the batch sampler algorithm and this configuration key, see [sampler_frame_count_grid](ConfigurationSchemas.md#sampler_frame_count_grid). + +### Automatic mixed precision + +Enabling automatic mixed precision (AMP) can accelerate training and save GPU memory. DiffSinger have adapted the latest version of PyTorch Lightning for AMP functionalities. + +By default, the training runs in FP32 precision. To enable AMP, edit your configuration file: + +```yaml +pl_trainer_precision: 16 # FP16 precision +``` + +or + +```yaml +pl_trainer_precision: bf16 # BF16 precision +``` + +For more precision options, please checkout the official [documentation](https://lightning.ai/docs/pytorch/stable/common/trainer.html#precision). + +### Training on multiple GPUs + +Using data distributed parallel (DDP) can divide training tasks to multiple GPUs and synchronize gradients and weights between them. DiffSinger have adapted the latest version of PyTorch Lightning for DDP functionalities. + +By default, the trainer will utilize all CUDA devices defined in the `CUDA_VISIBLE_DEVICES` environment variable (empty means using all available devices). If you want to specify which GPUs to use, edit your configuration file: + +```yaml +pl_trainer_devices: [0, 1, 2, 3] # using the first 4 GPUs defined in CUDA_VISIBLE_DEVICES +``` + +Please note that `max_batch_size` and `max_batch_frames` are values for **each** GPU. + +By default, the trainer uses NCCL as the DDP backend. If this gets stuck on your machine, try disabling P2P via + +```yaml +ddp_backend: nccl_no_p2p # disable P2P in NCCL +``` + +Or if your machine does not support NCCL, you can switch to Gloo instead: + +```yaml +ddp_backend: gloo # however, it has a lower performance than NCCL +``` + +### Gradient accumulation + +Gradient accumulation means accumulating losses for several batches before each time the weights are updated. This can simulate a larger batch size with a lower GPU memory cost. + +By default, the trainer calls `backward()` each time the losses are calculated through one batch of data. To enable gradient accumulation, edit your configuration file: + +```yaml +accumulate_grad_batches: 4 # the actual batch size will be 4x. +``` + +Please note that enabling gradient accumulation will slow down training because the losses must be calculated for several times before the weights are updated (1 update to the weights = 1 actual training step). + +### Optimizer and learning rate + +The optimizer and the learning rate scheduler can take an important role in accelerating the training process. DiffSinger uses a flexible configuration logic for these two modules. + +You can modify options of the optimizer and learning rate scheduler, or even use other classes of them by editing the configuration file: + +```yaml +optimizer_args: + optimizer_cls: torch.optim.AdamW # class name of optimizer + lr: 0.0004 + beta1: 0.9 + beta2: 0.98 + weight_decay: 0 +lr_scheduler_args: + scheduler_cls: torch.optim.lr_scheduler.StepLR # class name of learning rate schedule + warmup_steps: 2000 + step_size: 50000 + gamma: 0.5 +``` + +Note that `optimizer_args` and `lr_scheduler_args` will be filtered by needed parameters and passed to `__init__` as keyword arguments (`kwargs`) when constructing the optimizer and scheduler. Therefore, you could specify all arguments according to your need in the configuration file to directly control the behavior of optimization and LR scheduling. It will also tolerate parameters existing in the configuration but not needed in `__init__`. + +Also, note that the LR scheduler performs scheduling on the granularity of steps, not epochs. + +The special case applies when a tuple is needed in `__init__`: `beta1` and `beta2` are treated separately and form a tuple in the code. You could try to pass in an array instead. (And as an experiment, AdamW does accept `[beta1, beta2]`). If there is another special treatment required, please submit an issue. + +If you found other optimizer and learning rate scheduler useful, you can raise a topic in [Discussions](https://github.com/openvpi/DiffSinger/discussions), raise [Issues](https://github.com/openvpi/DiffSinger/issues) or submit [PRs](https://github.com/openvpi/DiffSinger/pulls) if it introduces new codes or dependencies. diff --git a/docs/ConfigurationSchemas.md b/docs/ConfigurationSchemas.md index f4c2d195c..de5fdb523 100644 --- a/docs/ConfigurationSchemas.md +++ b/docs/ConfigurationSchemas.md @@ -1,16 +1,18 @@ -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism +# Configuration Schemas -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) -| [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) +## The configuration system -## Configuration Schemas +DiffSinger uses a cascading configuration system based on YAML files. All configuration files originally inherit and override [configs/base.yaml](../configs/base.yaml), and each file directly override another file by setting the `base_config` attribute. The overriding rules are: -This document explains the meaning and usages of all editable keys in a configuration file. +- Configuration keys with the same path and the same name will be replaced. Other paths and names will be merged. +- All configurations in the inheritance chain will be squashed (via the rule above) as the final configuration. +- The trainer will save the final configuration in the experiment directory, which is detached from the chain and made independent from other configuration files. -Each configuration key (including nested keys) are described with a brief explanation and several attributes listed as -follows: +## Configurable parameters + +This following are the meaning and usages of all editable keys in a configuration file. + +Each configuration key (including nested keys) are described with a brief explanation and several attributes listed as follows: | Attribute | Explanation | |:---------------:|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| diff --git a/docs/GettingStarted.md b/docs/GettingStarted.md new file mode 100644 index 000000000..479ad082d --- /dev/null +++ b/docs/GettingStarted.md @@ -0,0 +1,116 @@ +# Getting Started + +## Installation + +### Environments and dependencies + +DiffSinger requires Python 3.8 or later. We strongly recommend you create a virtual environment via Conda or venv before installing dependencies. + +Install PyTorch 1.13 or later following the [official instructions](https://pytorch.org/get-started/locally/) according to your OS and hardware. + +Install other dependencies via the following command: + +```bash +pip install -r requirements.txt +``` + +### Pretrained models + +- **(Required)** Get the pretrained vocoder from the [DiffSinger Community Vocoders Project](https://openvpi.github.io/vocoders) and unzip it into `checkpoints/` folder, or train a ultra-lightweight [DDSP](https://github.com/yxlllc/pc-ddsp) vocoder first by yourself, then configure it according to the relevant [instructions](https://github.com/yxlllc/pc-ddsp/blob/master/DiffSinger.md). +- Get acoustic or variance models from [Releases](https://github.com/openvpi/DiffSinger/releases) or elsewhere and unzip them into the `checkpoints/` folder. + +## Preprocessing + +Raw data pieces and transcriptions should be binarized into dataset files before training. + +Assume that you have a configuration file called `my_config.yaml`. Run: + +```bash +python scripts/binarize.py --config my_config.yaml +``` + +Preprocessing can be accelerated through multiprocessing. See [binarization_args.num_workers](ConfigurationSchemas.md#binarization_args.num_workers) for more explanations. + +## Training + +Assume that you have a configuration file called `my_config.yaml` and the name of your model is `my_experiment`. Run: + +```bash +python scripts/train.py --config my_config.yaml --exp_name my_experiment --reset +``` + +Checkpoints will be saved at the `checkpoints/my_experiment/` directory. When interrupting the program and running the above command again, the training resumes automatically from the latest checkpoint. + +For more suggestions related to training performance, see [performance tuning](BestPractices.md#performance-tuning). + +## Inference + +Inference of DiffSinger is based on DS files. Assume that you have a DS file named `my_song.ds` and your model is named `my_experiment`. + +If your model is a variance model, run: + +```bash +python scripts/infer.py variance my_song.ds --exp my_experiment +``` + +or run + +```bash +python scripts/infer.py variance --help +``` + +for more configurable options. + +If your model is an acoustic model, run: + +```bash +python scripts/infer.py acoustic my_song.ds --exp my_experiment +``` + +or run + +```bash +python scripts/infer.py acoustic --help +``` + +for more configurable options. + +## Deployment + +DiffSinger uses [ONNX](https://onnx.ai/) as the deployment format. Due to TorchScript issues, exporting to ONNX now requires PyTorch **1.13**. Assume that you have a model named `my_experiment`. + +If your model is a variance model, run: + +```bash +python scripts/export.py --exp my_experiment +``` + +or run + +```bash +python scripts/export.py variance --help +``` + +for more configurable options. + +If your model is an acoustic model, run: + +```bash +python scripts/export.py acoustic --exp my_experiment +``` + +or run + +```bash +python scripts/export.py acoustic --help +``` + +for more configurable options. + +## Other utilities + +There are other useful CLI tools in the [scripts/](../scripts/) directory not mentioned above: + +- drop_spk.py - delete speaker embeddings from checkpoints (for data security reasons when distributing models) +- migrate.py - migrate old transcription files or checkpoints from previous versions of DiffSinger +- vocoder.py - bypass the acoustic model and only run the vocoder on given mel-spectrograms. diff --git a/docs/README-SVS-custom-phonemes.md b/docs/README-SVS-custom-phonemes.md deleted file mode 100644 index c7809bf5f..000000000 --- a/docs/README-SVS-custom-phonemes.md +++ /dev/null @@ -1,119 +0,0 @@ -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) - -## Customizing your phoneme system for DiffSinger* - -*Exclusive in this forked repository. - -### 0. Requirements - -#### Limitations for dictionaries - -The current code implementation supports customized grapheme-to-phoneme dictionaries, with the following limitations: - -- The dictionary must be a two-part phoneme system. Namely, one syllable should contain at most two phones, where only two cases are allowed: 1. one consonant + one vowel, 2. one single vowel. -- `AP` (aspiration) and `SP` (space) will be included in the phoneme list and cannot be removed. - -#### Requirements for data labels - -The preprocessing schedule introduced validations for the data labels to avoid mismatch between phoneme labels and the dictionary. Thus, your data label must meet the following requirements: - -- The data must contain labels for `AP` and `SP` This is due to some code implementation issues, and may be fixed in the future. -- Tha data labels must, and must only contain all phonemes that appear in the dictionary. Otherwise, the coverage and oov checks will fail. - -### 1. Preparation - -A dictionary file is required to use your own phoneme system. A dictionary is a syllable-to-phoneme table, with each line in the following form (`\t` to separate the syllable and its phonemes): - -``` - [] -``` - -For example, the following rules are valid in a dictionary file: - -``` -a a -ai ai -ang ang -ba b a -bai b ai -ban b an -``` - -Note that you do not need (actually you must not) put `AP` and `SP` in the dictionary. - -If one syllable is mapped to one phoneme, the phoneme will be regarded as a vowel. If one syllable is mapped to two phonemes, the first one will be regarded as a consonant and the other as a vowel. Syllables that are mapped to more than two phonemes are not allowed. - -Vowel phonemes are used to align with the head of the note to keep the song in a correct rhythm. See this [issue](https://github.com/MoonInTheRiver/DiffSinger/issues/60) for explanations. - -It is reasonable for the dictionary to design a unique symbol for each pronunciation. If one symbol have multiple pronunciations based on different context, especially when one pronunciation has many occurences while the others have only a few, the network may not learn very well of the rules, leading to more pronunciation errors at inference time. - -### 2. Preprocessing and inference - -#### Configurations - -To preprocess your data with a customized dictionary, you should specify the dictionary path in the config file: - -```yaml -dictionary: path/to/your/dictionary.txt -``` - -If not specified, this hyperparamerter will fall back to `dictionaries/opencpop.txt` for backward compatibility. - -#### Phoneme distribution summary - -When preprocessing, the program will generate a summary of the phoneme occurrence distribution summary. The summary includes messages in the standard output and a JPG file in the preprocessing directory. The summary only covers phonemes in the dictionary, along with `AP` and `SP`. Try to balance number of occurrences of each phoneme for more stable pronunciations at inference time. - -#### Coverage and OOV checks - -The program will perform phoneme coverage checks and OOV detection based on the given dictionary and data label. These checks fail when: - -- Some phonemes in the dictionary have not appeared in the data labels (not a full coverage of the dictionary) -- Some phonemes are not in the dictionary but appear in the data labels (unrecognized symbols) - -The program will throw an `AssertionError` and show differences of the dictionary phoneme set and the actual data phoneme set like below: - -``` -AssertionError: transcriptions and dictionary mismatch. - (+) ['E', 'En', 'i0', 'ir'] - (-) ['AP', 'SP'] -``` - -This means there are 4 unexpected symbols in the data labels (`ir`, `i0`, `E`, `En`) and 2 missing phonemes that are not covered by the data labels (`AP`, `SP`). - -#### Inference with a custom dictionary - -When doing inference, the program will read the dictionary file from the checkpoint folder and generate a phoneme set. There are two ways of inference: - -- Inference with automatic phoneme durations, i.e. inputting syllables and matching the left row of the dictionary. Each vowel is aligned with the head of the note, and consonants have their duration predicted by the network. -- Inference with manual phoneme durations, i.e. directly inputting phoneme-level durations. Every phoneme should be in the phoneme set. - -### 3. Preset dictionaries - -There are currently two preset dictionaries. - -#### The original Opencpop dictionary [[source]](../dictionaries/opencpop.txt) - -The original Opencpop dictionary, which you can find [here](http://wenet.org.cn/opencpop/resources/annotationformat/), are fully aligned with the standard pinyin format of Mandarin Chinese. We copied the dictionary from the website, removed 5 syllables that has no occurrence in the data labels (`hm`, `hng`, `m`, `n` and `ng`) and added some aliases for some syllables (e.g. `jv` for `ju`). It has the most compatibility with the previous model weights, but may cause bad cases in pronunciations, especially in cases that the note is a slur. Thus, this dictionary is deprecated by default and remained only for backward compatibility. - -Phoneme distribution of Opencpop dataset on this dictionary can be found [here](http://wenet.org.cn/opencpop/resources/statisticalinformation/). - -#### The new strict pinyin dictionary [[source]](../dictionaries/opencpop-strict.txt) - -We distinguished some different pronunciations of some phonemes, and added 4 phonemes to the original dictionary: `ir`, `i0`, `E` and `En`. - -Some mapping rules are changed: - -- `zhi`, `chi`, `shi`, `ri` are mapped to `zh ir`, `ch ir`, `sh ir`, `r ir` (distinguished from orignal `i`) -- `zi`, `ci`, `si` are mapped to `z i0`, `c i0`, `s i0` (distinguished from original `i`) -- `ye` are mapped to `y E` (distinguished from original `e`) -- `yan` are mapped to `y En` (distinguished from original `an`) - -Phoneme distribution* of Opencpop dataset on this dictionary is shown below. - -![img](resources/phoneme_distribution.jpg) - -*`AP` and `SP` are not included. diff --git a/docs/README-SVS-deployment.md b/docs/README-SVS-deployment.md deleted file mode 100644 index 3eed6657d..000000000 --- a/docs/README-SVS-deployment.md +++ /dev/null @@ -1,75 +0,0 @@ -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism - -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) - -## DiffSinger (ONNX Deployment) - -Currently, we only support exporting MIDI-less acoustic model to ONNX format. - -### 0. Environment Preparation - -** Important:** The exporting script is only tested under **PyTorch 1.13**. For the most stability, we recommend you to export your model with the same version of PyTorch as we used to test this functionality. - -The `onnxruntime` package is required to run inference with ONNX model and ONNXRuntime. See the [official guidance](https://onnxruntime.ai/) for instructions to install packages matching your hardware. CUDA, DirectML and default CPU are recommended since the model has been tested on these execution providers. - -### 1. Export to ONNX format - -Run with the command - -```commandline -python scripts/export.py acoustic --exp EXP [--out OUT] -``` - -where `EXP` is the name of experiment, `OUT` is the output directory for all artifacts exported. For more functionalities of this script, run - -```commandline -python scripts/export.py acoustic --help -``` - -This script will export the acoustic model to the ONNX format and do a lot of optimization (25% ~ 50% faster with ONNXRuntime than PyTorch). - -Note: DPM-Solver acceleration is not currently included, but PNDM is wrapped into the model. Use any `speedup` larger than 1 to enable it. - -These attachments will be exported along the model: -- the dictionary which the model uses -- a text file carrying all phonemes representing the tokens in the model -- all speaker mix embeddings, if a multi-speaker model is exported with `--export_spk` options specified - -### 2. Inference with ONNXRuntime - -See `deployment/infer/infer_acoustic.py` for details. - -#### Issues related to CUDAExecutionProvider - -In some cases, especially when you are using virtual environment, you may get the following error when creating a session with CUDAExecutionProvider, even if you already installed CUDA and cuDNN on your system: - -```text -RuntimeError: D:\a\_work\1\s\onnxruntime\python\onnxruntime_pybind_state.cc:574 onnxruntime::python::CreateExecutionProviderInstance CUDA_PATH is set but CUDA wasn't able to be loaded. Please install the co -rrect version of CUDA and cuDNN as mentioned in the GPU requirements page (https://onnxruntime.ai/docs/reference/execution-providers/CUDA-ExecutionProvider.html#requirements), make sure they're in the PATH, - and that your GPU is supported. -``` - -There are two ways to solve this problem. - -1. Simply import PyTorch and leave it unused before you create the session: - -```python -import torch -``` - -This seems stupid but if your PyTorch is built with CUDA, then CUDAExecutionProvider will just work. - -2. When importing PyTorch, its `__init__.py` actually adds CUDA and cuDNN to the system DLL path. This can be done manually, with the following line before creating the session: - -```python -import os -os.add_dll_directory(r'path/to/your/cuda/dlls') -os.add_dll_directory(r'path/to/your/cudnn/dlls') -``` - -See [official requirements](http://www.onnxruntime.ai/docs/execution-providers/CUDA-ExecutionProvider.html#requirements) for all DLLs that should be included in the paths above. - -In this way you can also switch between your system CUDA and PyTorch CUDA in your virtual environment. diff --git a/docs/README-SVS-opencpop-cascade.md b/docs/README-SVS-opencpop-cascade.md deleted file mode 100644 index 225380bf4..000000000 --- a/docs/README-SVS-opencpop-cascade.md +++ /dev/null @@ -1,111 +0,0 @@ -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - -## DiffSinger (MIDI SVS | A version) -### 0. Data Acquirement -For Opencpop dataset: Please strictly follow the instructions of [Opencpop](https://wenet.org.cn/opencpop/). We have no right to give you the access to Opencpop. - -The pipeline below is designed for Opencpop dataset: - -### 1. Preparation - -#### Data Preparation -a) Download and extract Opencpop, then create a link to the dataset folder: `ln -s /xxx/opencpop data/raw/` - -b) Run the following scripts to pack the dataset for training/inference. - -```sh -export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml - -# `data/binary/opencpop-midi-dp` will be generated. -``` - -#### Vocoder Preparation -We provide the pre-trained model of [HifiGAN-Singing](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0109_hifigan_bigpopcs_hop128.zip) which is specially designed for SVS with NSF mechanism. -Please unzip this file into `checkpoints` before training your acoustic model. - -(Update: You can also move [a ckpt with more training steps](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/model_ckpt_steps_1512000.ckpt) into this vocoder directory) - -This singing vocoder is trained on ~70 hours singing data, which can be viewed as a universal vocoder. - -#### Exp Name Preparation -```bash -export MY_FS_EXP_NAME=0302_opencpop_fs_midi -export MY_DS_EXP_NAME=0303_opencpop_ds58_midi -``` - -``` -. -|--data - |--raw - |--opencpop - |--segments - |--transcriptions.txt - |--wavs -|--checkpoints - |--MY_FS_EXP_NAME (optional) - |--MY_DS_EXP_NAME (optional) - |--0109_hifigan_bigpopcs_hop128 - |--model_ckpt_steps_1512000.ckpt - |--config.yaml -``` - -### 2. Training Example -First, you need a pre-trained FFT-Singer checkpoint. You can use the pre-trained model, or train FFT-Singer from scratch, run: -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/cascade/opencs/aux_rel.yaml --exp_name $MY_FS_EXP_NAME --reset -``` - -Then, to train DiffSinger, run: - -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME --reset -``` - -Remember to adjust the "fs2_ckpt" parameter in `configs/midi/cascade/opencs/ds60_rel.yaml` to fit your path. - -### 3. Inference from packed test set -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer -``` - -We also provide: - - the pre-trained model of DiffSinger; - - the pre-trained model of FFT-Singer; - -They can be found in [here](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/adjust-receptive-field.zip). - -Remember to put the pre-trained models in `checkpoints` directory. - -### 4. Inference from raw inputs -```sh -python inference/ds_acoustic.py --config configs/midi/cascade/opencs/ds60_rel.yaml --exp_name $MY_DS_EXP_NAME -``` -Raw inputs: -``` -inp = { - 'text': '小酒窝长睫毛AP是你最美的记号', - 'notes': 'C#4/Db4 | F#4/Gb4 | G#4/Ab4 | A#4/Bb4 F#4/Gb4 | F#4/Gb4 C#4/Db4 | C#4/Db4 | rest | C#4/Db4 | A#4/Bb4 | G#4/Ab4 | A#4/Bb4 | G#4/Ab4 | F4 | C#4/Db4', - 'notes_duration': '0.407140 | 0.376190 | 0.242180 | 0.509550 0.183420 | 0.315400 0.235020 | 0.361660 | 0.223070 | 0.377270 | 0.340550 | 0.299620 | 0.344510 | 0.283770 | 0.323390 | 0.360340', - 'input_type': 'word' - } # user input: Chinese characters -or, -inp = { - 'text': '小酒窝长睫毛AP是你最美的记号', - 'ph_seq': 'x iao j iu w o ch ang ang j ie ie m ao AP sh i n i z ui m ei d e j i h ao', - 'note_seq': 'C#4/Db4 C#4/Db4 F#4/Gb4 F#4/Gb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 F#4/Gb4 F#4/Gb4 F#4/Gb4 C#4/Db4 C#4/Db4 C#4/Db4 rest C#4/Db4 C#4/Db4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 F4 F4 C#4/Db4 C#4/Db4', - 'note_dur_seq': '0.407140 0.407140 0.376190 0.376190 0.242180 0.242180 0.509550 0.509550 0.183420 0.315400 0.315400 0.235020 0.361660 0.361660 0.223070 0.377270 0.377270 0.340550 0.340550 0.299620 0.299620 0.344510 0.344510 0.283770 0.283770 0.323390 0.323390 0.360340 0.360340', - 'is_slur_seq': '0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0', - 'input_type': 'phoneme' - } # input like Opencpop dataset. -``` - -### 5. Some issues. -a) the HifiGAN-Singing is trained on our [vocoder dataset](https://dl.acm.org/doi/abs/10.1145/3474085.3475437) and the training set of [PopCS](https://arxiv.org/abs/2105.02446). Opencpop is the out-of-domain dataset (unseen speaker). This may cause the deterioration of audio quality, and we are considering fine-tuning this vocoder on the training set of Opencpop. - -b) in this version of codes, we used the melody frontend ([lyric + MIDI]->[F0+ph_dur]) to predict F0 contour and phoneme duration. - -c) generated audio demos can be found in [MY_DS_EXP_NAME](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/adjust-receptive-field.zip). diff --git a/docs/README-SVS-opencpop-e2e.md b/docs/README-SVS-opencpop-e2e.md deleted file mode 100644 index 4dfd0a709..000000000 --- a/docs/README-SVS-opencpop-e2e.md +++ /dev/null @@ -1,107 +0,0 @@ -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) - -Substantial update: We 1) **abandon** the explicit prediction of the F0 curve; 2) increase the receptive field of the denoiser; 3) make the linguistic encoder more robust. -**By doing so, 1) the synthesized recordings are more natural in terms of pitch; 2) the pipeline is simpler.** - -简而言之,把F0曲线的动态性交给生成式模型去捕捉,而不再是以前那样用MSE约束对数域F0。 - -## DiffSinger (MIDI SVS | B version) -### 0. Data Acquirement -For Opencpop dataset: Please strictly follow the instructions of [Opencpop](https://wenet.org.cn/opencpop/). We have no right to give you the access to Opencpop. - -The pipeline below is designed for Opencpop dataset: - -### 1. Preparation - -#### Data Preparation -a) Download and extract Opencpop, then create a link to the dataset folder: `ln -s /xxx/opencpop data/raw/` - -b) Run the following scripts to pack the dataset for training/inference. - -```sh -export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml - -# `data/binary/opencpop-midi-dp` will be generated. -``` - -#### Vocoder Preparation -We provide the pre-trained model of [HifiGAN-Singing](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0109_hifigan_bigpopcs_hop128.zip) which is specially designed for SVS with NSF mechanism. - -Also, please unzip pre-trained vocoder and [this pendant for vocoder](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0102_xiaoma_pe.zip) into `checkpoints` before training your acoustic model. - -(Update: You can also move [a ckpt with more training steps](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/model_ckpt_steps_1512000.ckpt) into this vocoder directory) - -This singing vocoder is trained on ~70 hours singing data, which can be viewed as a universal vocoder. - -#### Exp Name Preparation -```bash -export MY_DS_EXP_NAME=0228_opencpop_ds100_rel -``` - -``` -. -|--data - |--raw - |--opencpop - |--segments - |--transcriptions.txt - |--wavs -|--checkpoints - |--MY_DS_EXP_NAME (optional) - |--0109_hifigan_bigpopcs_hop128 (vocoder) - |--model_ckpt_steps_1512000.ckpt - |--config.yaml -``` - -### 2. Training Example -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset -``` - -### 3. Inference from packed test set -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer -``` - -We also provide: - - the pre-trained model of DiffSinger; - -They can be found in [here](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0228_opencpop_ds100_rel.zip). - -Remember to put the pre-trained models in `checkpoints` directory. - -### 4. Inference from raw inputs -```sh -python inference/ds_e2e.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME -``` -Raw inputs: -``` -inp = { - 'text': '小酒窝长睫毛AP是你最美的记号', - 'notes': 'C#4/Db4 | F#4/Gb4 | G#4/Ab4 | A#4/Bb4 F#4/Gb4 | F#4/Gb4 C#4/Db4 | C#4/Db4 | rest | C#4/Db4 | A#4/Bb4 | G#4/Ab4 | A#4/Bb4 | G#4/Ab4 | F4 | C#4/Db4', - 'notes_duration': '0.407140 | 0.376190 | 0.242180 | 0.509550 0.183420 | 0.315400 0.235020 | 0.361660 | 0.223070 | 0.377270 | 0.340550 | 0.299620 | 0.344510 | 0.283770 | 0.323390 | 0.360340', - 'input_type': 'word' - } # user input: Chinese characters -or, -inp = { - 'text': '小酒窝长睫毛AP是你最美的记号', - 'ph_seq': 'x iao j iu w o ch ang ang j ie ie m ao AP sh i n i z ui m ei d e j i h ao', - 'note_seq': 'C#4/Db4 C#4/Db4 F#4/Gb4 F#4/Gb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 F#4/Gb4 F#4/Gb4 F#4/Gb4 C#4/Db4 C#4/Db4 C#4/Db4 rest C#4/Db4 C#4/Db4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 F4 F4 C#4/Db4 C#4/Db4', - 'note_dur_seq': '0.407140 0.407140 0.376190 0.376190 0.242180 0.242180 0.509550 0.509550 0.183420 0.315400 0.315400 0.235020 0.361660 0.361660 0.223070 0.377270 0.377270 0.340550 0.340550 0.299620 0.299620 0.344510 0.344510 0.283770 0.283770 0.323390 0.323390 0.360340 0.360340', - 'is_slur_seq': '0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0', - 'input_type': 'phoneme' - } # input like Opencpop dataset. -``` - -### 5. Some issues. -a) the HifiGAN-Singing is trained on our [vocoder dataset](https://dl.acm.org/doi/abs/10.1145/3474085.3475437) and the training set of [PopCS](https://arxiv.org/abs/2105.02446). Opencpop is the out-of-domain dataset (unseen speaker). This may cause the deterioration of audio quality, and we are considering fine-tuning this vocoder on the training set of Opencpop. - -b) in this version of codes, we used the melody frontend ([lyric + MIDI]->[ph_dur]) to predict phoneme duration. F0 curve is implicitly predicted together with mel-spectrogram. - -c) example [generated audio](https://github.com/MoonInTheRiver/DiffSinger/blob/master/resources/demos_0221/DS/). -More generated audio demos can be found in [DiffSinger](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0228_opencpop_ds100_rel.zip). diff --git a/docs/README-SVS-opencpop-pndm.md b/docs/README-SVS-opencpop-pndm.md deleted file mode 100644 index 8ec2b89f0..000000000 --- a/docs/README-SVS-opencpop-pndm.md +++ /dev/null @@ -1,114 +0,0 @@ -# DiffSinger-PNDM -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - -Highlights: - -Training diffusion model: 100 steps - -Default pndm_speedup: 1 - -Inference diffusion model: (100 / pndm_speedup) steps - -You can freely control the inference steps, by adding these arguments in your experiment scripts : ---hparams="pndm_speedup=5" or --hparams="pndm_speedup=10". - -Contributed by @luping-liu . - -## DiffSinger (MIDI SVS | B version | +PNDM) -### 0. Data Acquirement -For Opencpop dataset: Please strictly follow the instructions of [Opencpop](https://wenet.org.cn/opencpop/). We have no right to give you the access to Opencpop. - -The pipeline below is designed for Opencpop dataset: - -### 1. Preparation - -#### Data Preparation -a) Download and extract Opencpop, then create a link to the dataset folder: `ln -s /xxx/opencpop data/raw/` - -b) Run the following scripts to pack the dataset for training/inference. - -```sh -export PYTHONPATH=. -CUDA_VISIBLE_DEVICES=0 python scripts/binarize.py --config configs/midi/cascade/opencs/aux_rel.yaml - -# `data/binary/opencpop-midi-dp` will be generated. -``` - -#### Vocoder Preparation -We provide the pre-trained model of [HifiGAN-Singing](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0109_hifigan_bigpopcs_hop128.zip) which is specially designed for SVS with NSF mechanism. - -Also, please unzip pre-trained vocoder and [this pendant for vocoder](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0102_xiaoma_pe.zip) into `checkpoints` before training your acoustic model. - -(Update: You can also move [a ckpt with more training steps](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/model_ckpt_steps_1512000.ckpt) into this vocoder directory) - -This singing vocoder is trained on ~70 hours singing data, which can be viewed as a universal vocoder. - -#### Exp Name Preparation -```bash -export MY_DS_EXP_NAME=0228_opencpop_ds100_rel -``` - -``` -. -|--data - |--raw - |--opencpop - |--segments - |--transcriptions.txt - |--wavs -|--checkpoints - |--MY_DS_EXP_NAME (optional) - |--0109_hifigan_bigpopcs_hop128 (vocoder) - |--model_ckpt_steps_1512000.ckpt - |--config.yaml -``` - -### 2. Training Example -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset -``` - -### 3. Inference from packed test set -```sh -CUDA_VISIBLE_DEVICES=0 python scripts/train.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME --reset --infer -``` - -We also provide: - - the pre-trained model of DiffSinger; - -They can be found in [here](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0228_opencpop_ds100_rel.zip). - -Remember to put the pre-trained models in `checkpoints` directory. - -### 4. Inference from raw inputs -```sh -python inference/ds_e2e.py --config configs/midi/e2e/opencpop/ds100_adj_rel.yaml --exp_name $MY_DS_EXP_NAME -``` -Raw inputs: -``` -inp = { - 'text': '小酒窝长睫毛AP是你最美的记号', - 'notes': 'C#4/Db4 | F#4/Gb4 | G#4/Ab4 | A#4/Bb4 F#4/Gb4 | F#4/Gb4 C#4/Db4 | C#4/Db4 | rest | C#4/Db4 | A#4/Bb4 | G#4/Ab4 | A#4/Bb4 | G#4/Ab4 | F4 | C#4/Db4', - 'notes_duration': '0.407140 | 0.376190 | 0.242180 | 0.509550 0.183420 | 0.315400 0.235020 | 0.361660 | 0.223070 | 0.377270 | 0.340550 | 0.299620 | 0.344510 | 0.283770 | 0.323390 | 0.360340', - 'input_type': 'word' - } # user input: Chinese characters -or, -inp = { - 'text': '小酒窝长睫毛AP是你最美的记号', - 'ph_seq': 'x iao j iu w o ch ang ang j ie ie m ao AP sh i n i z ui m ei d e j i h ao', - 'note_seq': 'C#4/Db4 C#4/Db4 F#4/Gb4 F#4/Gb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 F#4/Gb4 F#4/Gb4 F#4/Gb4 C#4/Db4 C#4/Db4 C#4/Db4 rest C#4/Db4 C#4/Db4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 A#4/Bb4 A#4/Bb4 G#4/Ab4 G#4/Ab4 F4 F4 C#4/Db4 C#4/Db4', - 'note_dur_seq': '0.407140 0.407140 0.376190 0.376190 0.242180 0.242180 0.509550 0.509550 0.183420 0.315400 0.315400 0.235020 0.361660 0.361660 0.223070 0.377270 0.377270 0.340550 0.340550 0.299620 0.299620 0.344510 0.344510 0.283770 0.283770 0.323390 0.323390 0.360340 0.360340', - 'is_slur_seq': '0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0', - 'input_type': 'phoneme' - } # input like Opencpop dataset. -``` - -### 5. Some issues. -a) the HifiGAN-Singing is trained on our [vocoder dataset](https://dl.acm.org/doi/abs/10.1145/3474085.3475437) and the training set of [PopCS](https://arxiv.org/abs/2105.02446). Opencpop is the out-of-domain dataset (unseen speaker). This may cause the deterioration of audio quality, and we are considering fine-tuning this vocoder on the training set of Opencpop. - -b) in this version of codes, we used the melody frontend ([lyric + MIDI]->[ph_dur]) to predict phoneme duration. F0 curve is implicitly predicted together with mel-spectrogram. - -c) example [generated audio](https://github.com/MoonInTheRiver/DiffSinger/blob/master/resources/demos_0221/DS/). -More generated audio demos can be found in [DiffSinger](https://github.com/MoonInTheRiver/DiffSinger/releases/download/pretrain-model/0228_opencpop_ds100_rel.zip). diff --git a/docs/README-SVS.md b/docs/README-SVS.md deleted file mode 100644 index 9fa88b059..000000000 --- a/docs/README-SVS.md +++ /dev/null @@ -1,85 +0,0 @@ -# DiffSinger: Singing Voice Synthesis via Shallow Diffusion Mechanism -[![arXiv](https://img.shields.io/badge/arXiv-Paper-.svg)](https://arxiv.org/abs/2105.02446) -[![GitHub Stars](https://img.shields.io/github/stars/MoonInTheRiver/DiffSinger?style=social)](https://github.com/MoonInTheRiver/DiffSinger) -[![downloads](https://img.shields.io/github/downloads/MoonInTheRiver/DiffSinger/total.svg)](https://github.com/MoonInTheRiver/DiffSinger/releases) - | [Interactive🤗 SVS](https://huggingface.co/spaces/Silentlin/DiffSinger) - -## DiffSinger (SVS) - -### PART1. [Run DiffSinger on PopCS](README-SVS-popcs.md) -In PART1, we only focus on spectrum modeling (acoustic model) and assume the ground-truth (GT) F0 to be given as the pitch information following these papers [1][2][3]. If you want to conduct experiments with F0 prediction, please move to PART2. - -Thus, the pipeline of this part can be summarized as: - -``` -[lyrics] -> [linguistic representation] (Frontend) -[linguistic representation] + [GT F0] + [GT phoneme duration] -> [mel-spectrogram] (Acoustic model) -[mel-spectrogram] + [GT F0] -> [waveform] (Vocoder) -``` - - -[1] Adversarially trained multi-singer sequence-to-sequence singing synthesizer. Interspeech 2020. - -[2] SEQUENCE-TO-SEQUENCE SINGING SYNTHESIS USING THE FEED-FORWARD TRANSFORMER. ICASSP 2020. - -[3] DeepSinger : Singing Voice Synthesis with Data Mined From the Web. KDD 2020. - -Click here for detailed instructions: [link](README-SVS-popcs.md). - - -### PART2. [Run DiffSinger on Opencpop](README-SVS-opencpop-cascade.md) -Thanks [Opencpop team](https://wenet.org.cn/opencpop/) for releasing their SVS dataset with MIDI label, **Jan.20, 2022** (after we published our paper). - -Since there are elaborately annotated MIDI labels, we are able to supplement the pipeline in PART 1 by adding a naive melody frontend. - -#### 2.A -Thus, the pipeline of [2.A](README-SVS-opencpop-cascade.md) can be summarized as: - -``` -[lyrics] + [MIDI] -> [linguistic representation (with MIDI information)] + [predicted F0] + [predicted phoneme duration] (Melody frontend) -[linguistic representation] + [predicted F0] + [predicted phoneme duration] -> [mel-spectrogram] (Acoustic model) -[mel-spectrogram] + [predicted F0] -> [waveform] (Vocoder) -``` - -Click here for detailed instructions: [link](README-SVS-opencpop-cascade.md). - -#### 2.B -In 2.1, we find that if we predict F0 explicitly in the melody frontend, there will be many bad cases of uv/v prediction. Then, we abandon the explicit prediction of the F0 curve in the melody frontend and make a joint prediction with spectrograms. - -Thus, the pipeline of [2.B](README-SVS-opencpop-e2e.md) can be summarized as: -``` -[lyrics] + [MIDI] -> [linguistic representation] + [predicted phoneme duration] (Melody frontend) -[linguistic representation (with MIDI information)] + [predicted phoneme duration] -> [mel-spectrogram] (Acoustic model) -[mel-spectrogram] -> [predicted F0] (Pitch extractor) -[mel-spectrogram] + [predicted F0] -> [waveform] (Vocoder) -``` - -Click here for detailed instructions: [link](README-SVS-opencpop-e2e.md). - -### PART3. [Customize your phonemes](README-SVS-customize-phonemes.md) (🎉**Exclusive in this forked repository!**) - -In PART2, we observed many bad cases with phonemes that has multiple pronumciations, e.g. `i` in `bi`, `ci`, `chi` and `e` in `ce`, `ye`. However, the original codebase has heavy dependency on the Opencpop dataset and labels, including its phoneme systems, which bring difficulties to changing the phoneme system. - -In this repository, we decoupled the code from the Opencpop phoneme system and dictionary, configured all information of the phoneme system in one single file, and released a revised version of the Opencpop pinyin dictionary. This refactor also made it possible for customized phoneme systems and dictionaries, such as Japanese, Korean, etc. - -Click here for requirements and instructions: [link](README-SVS-customize-phonemes.md). - -### FAQ - -Q1: Why do I need F0 in Vocoders? - -A1: See vocoder parts in HiFiSinger, DiffSinger or SingGAN. This is a common practice now. - -Q2: Why not run MIDI version SVS on PopCS dataset? or Why not release MIDI labels for PopCS dataset? - -A2: Our laboratory has no funds to label PopCS dataset. But there are funds for labeling other singing dataset, which is coming soon. - -Q3: Why " 'HifiGAN' object has no attribute 'model' "? - -A3: Please put the pretrained vocoders in your `checkpoints` dictionary. - -Q4: How to check whether I use GT information or predicted information during inference from packed test set? - -A4: Please see codes [here](https://github.com/MoonInTheRiver/DiffSinger/blob/55e2f46068af6e69940a9f8f02d306c24a940cab/tasks/tts/fs2.py#L343). - -... diff --git a/docs/resources/arch-acoustic.jpg b/docs/resources/arch-acoustic.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a98b21e98490552c24172a7d83bb1138efc18a75 GIT binary patch literal 112564 zcmdS9bx<79voN}Z1VR!l1PD%$;1Xn!MUoIaK!D&DTo;GMA!vXs?gY2s&f)}j0*kZ2 z;?Cl3Z*y;bb-#bUSM}=m>Q%j-nwg&NKBxQi^r}3D%{(k^; zzvol`dItc;IsOka|94~@6H_N+w7?#!b*Tv-$sq8~ty%vAyFz`JZSR5wMNRKVARepT#dsZPirK_owKS65s?- z0Z0Qr{u@8~84Y&X0D#~b0Du|wKQyCc000&Q0KA_29~wg@0DvC|0Mz~cAKL%;#NN=+ z@V~lyjJ`fHGXnsQ3jhFIEdYRM6aaXp^Itsl<^NzCEt*A%w#yEEm;-D8rT|)iEWj3E z4B$j#JOFOMI{@$j0gwP-KKcj$u9#?ejP>{*JjKF#f`#++*)yD{I5^K<;J} z4lW@s9zFp9A;Gg3FNt0f5TS8`f1P0bYl->zDY_xSbDZaB>i$|FmRrs`49pCS~J>qtfwzt;NxOp;Qo_G z3t$sIeffsy-78|c56WXWB&4r-Gz{bD1;08te~*r>tg3|r1wN{%sapiaWVNr6F);G- z@v9m+xdbQXS9h^UXqlQh#^)B)bc`>*b@fk}IFvLF%w`gTtQ<+p1mqo~6;q%M`e)ex zvI>32_;1B%rYs@g5ym5oCoi7hU_E*AUovPW;o~PS-@J>)B6{^fS;f#HCaZFcm_+q^ zI~@-%pOIq~e3|~^SO3^UexQU?Hp3^Ajw8m0Ilv1{v>rlCLV!5nzA&2(@Z-lLI@+f{ z{OJCFhdv@+Q4O=P3u?AZ&i5>E&-+X5^2s@}L5nUkMKCXlTB;#tQkr?*UtkU4fLv%! z_1{Rk&oQjiJ$Y41?CN>fJ-zo2WY!a@U&^cMHiML%4kBT}!h@zr zzB=W}cKQ!C(W2G-oyNz+Pdurw7!h(jr4mbtf~B4)VTeP_GPIvJl3yRgOU7x&e*w?ZOM9{W zjGwocTPIPm1eG}{v4wl(#paOT1RYW2P9~$ss7z_YJ4!Doz)qN~rDxH6p|o*)(e?8L zA87sY`}}T~1AaWids~yAp;nsIqAxD5VPBFR7FOok&u#NE=_+FYU>$)-fS#k~3r)Y; z8BK4?!yW(!R44a0bTH;e%=rpN{Z+b1`i|4o7u9R{ovfMJeXM1(n5931vo&?NgRy!Q zzHIB$d*W;o@xd}iL zLEc}Eq)qXE{7`F;N5k?oj=Iz4PlLNlwDme#)lz1QIes=Sn&vlsP$I745lW+!+~ZM~ z={fT?C8bx~=bR^CEah4Wk*!tzcz5WUI{4ec!$ZrwKBZ@$A9IC9GE55Qy*NIKbuirI zinRAj7}L0uUs3wP2g4!njJD8?7@wgJ%U>1|I8WVw!Zwd>$I(`?C;#4_)7onAkEl*P zEkui?WqUtdsf0XST90EVlGBAd+W0qBt{mFtuD4bwzIt`X~Q&~nY17z&pVfu>%Qx`oP{ zwM29Hq%6#Au8&F<+k{dP(Q_3dG2xe;PFG8heq;PK)=D<&B2xLCUl4h7;~zw?^AkHa z!;+7L<@!^lnG6SJ_4;~vzQ{7v+X5f$(v@p>c0+9g_4#h7C%`PF!8rmA?N%|H5)l9F zaam9u4^Zg2jv8+Y{`(!xfWPlQ=)7PGO`ROM`mJnL7P?Mu3SF9jHb_Pr=ufNYTJ$kA z3%~2@2T&x8q|t}#9F;^c8QEqhGGR`viB$FD=ZryqedIfJ%`osB zz9g*j$Ph~*8;0v#Y2lMKXq1~pDr~Xgk`*&Gc5drg@T0C}c+J9c<4K3YHI`r|Q=pnL z(W*J)U-m}BctcA54S`!<(ZYz7Z5Np0Y-WY%0mt#VIbT+QG5vW2(8Ugv{#--Lc17du zgmpStAv49nYVf!7^)5%ufrd*zl_fv%m=p~=pN%P_w3W^fMRPt$qAkXZtzY`sCvu4{ z?+oo4*~Z}@Lo!@L;~kbFcES|1X=ydgLB>E~E#sgr!qH>^y`8vZ`*z!Ft%zF7CsMiF z*m#MEJ2(gme$GBg+Ne$Pt_HQfky7;K^>lr}sbDILZD*3-2U@KfwI+r7&hjZeoB~O5^UWgq|+8C7H4w3n4(3vboeb0_igZSP^a|cZ(`m!5fcmcWg(! zHq#2$qcZC&A+CM#+l5MoB0@F^)6|e+ZwcFByRPdy@Bb%j&m)%UE2E%B$88g9jEU!h z^J$H-%+FvBH)f4nK6%@JDc!&?;Ico|rHu!(3aHie(jf%T8(r2pJ*DmhJiVU{>_p;{ zOO{-C=uGA4RDt47dAgNlUy*sQgr%fd+&>;u9aocn;TVa6e)G*Ot14D}_dMa*zCa6N zhfDA_ZY4pJbMlP%c=$Dhd*6zc`YP9TiQ2rmDRXxWgVK_*uC-i&r&Mo%g(0WKic)oP zBRJ>VH7f=N7?-zkPl%=Sc?d)#hDO+5G%U=j5%px4xK4kh%rkF?nHn@pA2BTM90We2 zkmtTr>&0SBmUeT=jbX^IGbrBwXhuTvdfkO`OjvVJ7Xn?YI0e~aP)0=97iSA&fb!t% zK4+1C?j5g5)-|lcNE?>0MJHk>TCv5y0o@JlPYC>eH{@uQH8M9m1*H$?7`HCjHB{@0E?wgU8ONug zV?X-v_~lb#)tSROQ;pUqMWvc=m8d~cQ+kfC!4L?Xr?7?}uQ*EjUC>S4`|-+kh(z6B z(>V0?YBT-fcQ>udp3>mA@=EQ#ZhL@i~>G z{*CCx7i&iyDh@$U8dc-`(vZ}ubx^RFAhkp?Oo`b&tjdebLO*)Li`g;=UWd@(b4l8x z=E=(HW7{sRttPB#C6#=KCO;*W=wm(4q*fIbZJdV@J}Hb@05R`V4~cG z)eikef%Sd-8*mmw$>CZ06aFUYv=}}9m^-xyb;O^Rxr3D6sPBr;u`Qpd@lDf|3F$7R zZOjwYb23_2dOxbMuPt)hpmM50hfmB+f)ujo7-k?>z;CZp#^mnPLlP z*jQx$^y2UdowxvFKUVgY@1=HV(b}}Wn1yWYKVcFN2(g=XpuRa%#Zn44b}#jZ5f7H| zk*13F&_B_`b;eD>N%B?M5G3%8f{s$g59;%^Qo161DmGl@PmZyraf4%!_F-DK)%|z2 zf)cV!Z4i*r>(NcnH+b=1f*2sa30H7~m408~c}Xzy;4gqzTC5T?viS_9su*n#?FQ8pqw?GeVtJ%nrTgruh->)DhAyV1s(eb zF_u{|>H4V&{m*CaRVx;9kJEyeGHE#lcYj-*jF7XkSktC(xyZRX_0l@Ir@n&P&pXcx z5_mthY4kPmBpu>G;W^!?6A^!&92*~>9L5F7=cdGv%`)ucQt6!n>-V>wxHN{g!|>)x zbu3Cp>{Elj_0GO>^{}!@*p-w86Z*ZjEG*nH4#oydT`5=Q?%^;LTJ;)d_LjsY-C^#U zYmqQ?W?oSwf{H<|I6-t+DmE1$Bx7(UwRW2{x5vDB5JPoV1y zFo7aV*`0LZ_^tzmho(8B`t^!ao*L{^g1&+@#SpN7b;D~MJPI>6@ z2x~RJTWdg{^H-O_JrwDr`S-IKV!!0E&;zq^6 z!ulKS4(me35{@+@`K4na6z0&PImmEpR&CEN_`vvMF-3mU3Z;rz=i{$q$!1v-TU!|v zGCMS{{&I#Vovqtr`()6iJ3EEkYOs&nBq~->ZYuMZa+H=q&vD8dsKESspRr!f{pgnD z%}nrkd2$NuZ=$OHfoDhmg9;lEY2G|7M@jkAR-F3A1?736n-H`^)%1f+-#2y6E>7FQ zf3}J(2v@DKk2b5UJmF2vB#d>qA&bZB?fF{)zl8<1roh>p6r8;4C#H!M@Kn)}G0XV& z4u8+Hj@ERd28M2`ug5^Deui_y=0)%G=knIeTp?_nc~d)gFxisPbqG$7f~47Tqktj~ z#D-=51k-eb;T-zSb<_Le{m0gxAn$BWa+LaZ$%hnG1Ae z#aH)q$X-+0Spyjy_hdA$9XecpSmKGgcAdfpqgSWG=WQd(Vj_83`2Fhrx9Jv3k{Cg_ z>pv>joHr2Jk6xVfi{CG|-5nW-A993X+lpP=vQac35MT_%17LVD?~ZSo;-j-i{=Pq};D!tTRRAVvqFgdGHI$3@5+SjlRVK*(1 zY|F1=dXbW)e~cfmN_nOfkr`q1A-aq(YH^ZFbD}Ae|k?y2>5>s{jy2WEa4f@ zMr~e;PwjTo@c_7$Xm6tKtb70*+if80u7K+&2XL~&^G{w>?X9b~T@Qc(+_u?-Np@n7 zemTNMZ|2n%$9u*06>5@Nc?Ab;-NdQ*T2e-0!H`*8ukshdpux5b1l9v!Z*X?852Wqt z8smHa0Jy5E?!OG&TU~4(mbHb^f0&(m0Q_uYu~$}2_Kup8f-mR#?TOpa3@2n8m=sC9 zB+Hh(p=$e->RVM3)vwu@0i@a;n(0J^zg=k!E4_p3vyi$Fzqt>!(@V>4qPbhUw`!r- zi>n<*&YPEKniveL-QhwF8qm5z66}?y#WvUUa=(^eGf*pWkCmT>UlrY%SBXuNgjj5H zq6Zy*RaKtt@N493!_YIczu&+Ml^E?jri$%JuK!c^9 zVqx7oLkIdMEn_BK`>oqMXdTKb-}ho_(pq}j;9MKU+pYai9cV3(klXBgn}V^|-8X5w zLrsITqSl2ATy#6x`Ixih*uD<{)wXT?6Vr8pDiEImwxm1y?TBv_6*C$w{zQ+!IAW{H zBrRTl+nrRI_^o-BoI!;!yS7p)g7icU^#Cw&*fSTe?^S@^5&H70yEDD6?QC&ROfcvl zb&Xd6SUaX zH!cxr? zwiVth>Xxp0d->M+^H{4rQoN1|9b7`^(u;V#lK-t$nKC{+rZEKVT->p>fv}@u^!TTi zt_|`%OqJAR^XDKxjqEi7AA|HD~2yg&t(~5;(&Sj{EFvh&w2GBko zZtZ7}DDoDlFZcsE*pF=Ujcr$JA^ho!4vnF3E|V+$Nq)iVR4rgB1jd=F%4!5uEn1spqUnnSLzxx+oI^zxE();|U;&i)ox%=m`FbC@z zy-vF;iQ7p})f@Vj=CtIos>RfU{Se=CQ+$S1l6tf}s;&s(VeMz`Mgtb1rw??Rp zfw7!vCcXFbq^4;e@(mA}O|vmj{MIO?G1B9NTiW@G)moU} z*?ZQVL?z>cGfyn-IYKt)HG{y2tuI+`gmPVZs`tfypx4d8aoItG%H|DyP(ePF=>E$C z0H{yo$xs$(hvI6xlD?hPGWlgZiFdYP9&FG*NtN@@s2NFlvb_i~sSp7v0pz{1FTegO z`b(hGn{p;Ypbl*YwS0G}{eixjkZmku=cXEZZaEREe%K4m2t}TTTvgqQ^<~jK0Hl54 zXjxIRu#Jmyp7iRqa_a{GnLFt_Vs5d%&fuFZgcQk3|=M7@ps)2Gq)j!AyTuHrrxm(3O* zpxGEay`dYS674|;9yfZlN|GyA=vh}@(!e!$A>Y@~SD0}sNwYCl<@--|%aiV9bp2TM z17JveIL?EYc=$lyRDXv@ZdwQ3@qna4+FnTCSX=$(KV#7o^OK4J@Mp%o;k`}xTcZrr zML9Q2ImWaI=G+{f5|)@@V)RAEH9Q)Ip6^zb#ojUI%3a%v;iQhxb0KQvUeQ>;U_Xc% zQqIdSI%={5{}QAF8{?we%g$9tnpDiwzodG#*N%=hFXUda|WK}Crvxn-E{V! zBy*iR{*&}^{(YZyz}s=SH>usZq|)0T3f!^Hd=R#vy%3V~M$aLyX*5}p_3d7tvBBZU z?u5zOA~nhOk$JCx2AN2;s(@bNvXm*=KGQn3ckly1lglFsz3x~VucDxnJOU1f>T?!z zc=yp2a+3m$vjyQ@gYM-f7EFG}RK3X-sq6~ThXW#aqy}Gl&TZ{Qfx6zG@Vs^AXR#&b z8{LFcS0JKTQbolqJs$XE%aJvVIfYxKwW64F3q8Fq(F*bmz2?O6OEy1VZtm5Ysw3ZS zD%OGC*~w|UCT;k;_E1jawmk7lv7`i2=0>2xnLqa8{tU7Ec;$j#W1o?-d#=}3s~MGM zJEGn-Kg;Fainvmm(zp8|SwW*`E-B9|YhCAhpYhR^`iq^})NK)DSa^_KKaKNFdI;b7 zS2oZOI1|534?sE%$=&|u$hWXvYqAaBAdebv&V*jGTKck(9|-%__>US7el%gIY>{?6 zE6Cwk`1|cjeMBfl6L`J5BHIa}Y>`z%=HDy)w4>6t_N@Fl=PKZMez0Zm_>qhy!k6bP zrnI1WA0~N0fuMJPuB5+`eXOmeyA!Pimny+?2VfdaMyo3EZVwKnIwQVSujWJZ%IUv7 z>usFq?c0D25l?CeF}^YjW>qWd;Ty}`q7<;}YaFto9cQ+rAmRL%3=XVFDBC1`v1rFs zz?7!S@|P#T%zO0faHp2TRnD{D7;K0Nq|aFpgA#T>B73+Ys=xp786?>-lwwHYmpUs) z{aZ)-2E<&<+Phq9U9YkTDi3x~FRr3`#q*|tZx+b4ow}0HSa5~tqJ72wp{oE(J17L} z>=aTOq`EdRx97~VqJE;rVdAQnL1H#R7I z$p>1-Nv8y=w+H^Ht@Dd?UE!wG8ftwBA9E8GJLzx@QHziYSeP{JRb1I0#9D>dDw8J_ zt*(i5x{UqUEZ@N>7CPi*tw9O#q)hJSRyBW4^%lj#L}dU4EP4 zgxu1-V;`$epn_qW4tA=$SOOzK!KEtBrm5W}$Ig7Wz2*d!Y|L3NHg~+1!GQ`$@0)85 zm*EJ@M`CA9aygE&yg7m8ENaRsgq4AOe%int(h7p_L$V8oT_*9%5w`O=zqQ8pRxtZ8 z*VR)hJ+|Ao%~P91?@9iYN6qxlwEV%_59@P&CvxGR7ES^CQkeczu0&<^-Ho8sQp*Ft z#q0G7MTB{JdVI{cNT+qo$rmp~F+1Y<^wi~{Nk+hn5b-8>YoO?j=z@=0>?}YAf59iX z#$|J-G+k?jhA_g&4Rb4E_c${Y<$gSZ)H6wbO)({TXm0-Q$qgz?ww11^WUYpNjpxIJ zjL*&~Zqj|p-Q=OwvYqGkHR)(sK61dLSY?fQRc(71B^GWvk7wk1rLpY10Q^anxV!sp z>r?oC{D&KoGqo#q_)pj0@$*WwCO#;q_gl3B8#pYKcnjp738z=);GVld9Zx@CbeBLm z%X3DYJY@=Z;i?E$eDyd(qoa?zCWiaF4KtBt>W5if0_g6>Z5 zW*c!-SVrR%YZ3X%c(qSseQO`)^R+*&6w#{&=aOG%dd4eSB`WoX_}x`Hd-=D=SN;au z{v{L^vaK^7x43_u0W8)fP$TGpL}c1l6>Vwj3u(Wnd9&5xa8ajyQG@%J<&CY`_uF41 zerE?)kJeX?s`El?(o^Ys|BBfOE6EOgRjwQ(IoqCe%AP-A4}yY->bIJ<3qwpcpGN$# zo3DB2io3BRfNXH#7Tgvb=XOOUzn=ah51LF-7{IEHtadetu8@vtkt_GV;VhvlD)eWqNt`E6>GdB(WHxXh1tn6l-{cJoZ})IFo&91O(PS{ zrK{i(z~*%0hJav=gpjvRRa%*DAiL@EG{a%*6zl^cn3!F(9Yf)_$BuiI8EiQ786UUd zS*(sZZs5z|I*O~QJ~6CldVK+8lj*vB$vXSI z6W{5Ao<<={9&Yme^l%D}p`S^& zU1`(b?nl}dOUmWyhf=zzmpHTV>Tr~f3bqP`hwQqdv~R41P`owJ{&mFKfzE|!ciGE5 zv8stVy2u_iY@b}1X^UJm*G`FQ*>O6yKP1<)|M+``vasY^HX;Kwc)`Tt1RI^m_Q@bxy>Sq{yZINtt zABP_0>jemfDCP!TJQ4X^Xae4WyV8T}^qBm{EB#LP#|xqxdqcZ>Vl}jfUe9}3@@LV&$cJigAEazKo zmjxTjw9>4!Qh0P}&cbptQct=kL0!GMGGZp}0+Ahqif)D$i*E!gJ7ms>dhq>@5DvF7}2X<-eV z$&ea^eUuj)rnqE{Rx4RC?EZ4m(vnJvN^^bAD!8M&_^opsPNMhR&|2b?MD|QCq5TZ6 z;aN=Gn-8;e4w~yY{IB=|;u{F1 zUIQ5JT=&|HLz`Y=>=bq~flaAZ%16wSW+g&FL>C&nV8w4(0)#NVgqaGMH_Sk}vWsZI zBjclp?!ITd`^P426AKz8dz0@ALLW}qb&5A*l7|VKmeMY$NTu0?qdri1KUZ~qQv-K$ zK2}Nwe<{ykbav*}Y&ovsB6cDo42sM))(7q=J^*aW;!ishw+t;RV<;qul8^6SHUfu$ z04Pj*=4#LfOg?Lsn-@{YIB=`rb#yF+GMr%3*mLBoD_4O$074%C2x83=kzvSDywzPq zR;^F&88R>9uF|kewwYM|)GcSimPBFwRR;Ybr#~GRpLf!Kv^YfeHrUzuoumiq`Od1z z7dqRzcHh)MinaFq1^F+reI5XldTl#l+48jPGkAHf_HosP9eO_Oms0yIutaO|rb{~8 zO05;s?wkDv+u`lj-RezCS9p}pLfyhw-v~lnoq4!~->i#W34|?obK#FI>2r={$dKA< zUnDO}k)wl@x6(J3wKsl#Pm!iHpSOZOmEzJKZ0o7%S$Ds~6KP(I6kgsnaVj$5h;jk# z!5r~OzNj|6O+jv&_IdXCHbk_s=sy-h9j`tvr+K;;nfQkG%KzqY&ef;z<}PnrO-Nb= zm?Pz2z^FTtxBd-dY)?9~q?26{g!O&Qioziuck9BtqLr~>9HW7UriKK^Fi%CJnp=cX zJLakcANQhjw8uzTdvJHj8*^ z54STs2D`EEf`zH)L>siR5O_^oMUW1m>)7*Y5iV|&acs?eBsX>KwyeQZ?BvZt6l+y` zzhu&m$v{Oom^a^4o_wq(FM~<-lJwUkA=O1<>aKsS(Y8$c;Bdngw%YMcEt7InqsPao z2t2LnYJL)|VqgB`vJ|2=b*ejdeXhMU+O1Li4AZiNO*zkB3$VK{2GlfiI>Udd?`?y? zy||16ww)MiY#Y_q6qdhSk*Rjl3JLrRN1rf^Lm;k$@iGRrgX5oCu0;iTnMrl&|oJ-jZRtP4Z{Wm0p*7VrKoxOB@Svdb~k34K2!d z&7-82igx?>(B$tLxd*^&MOPH< ze&d~~9k=T((z?Y^hc&>K7|;5q!NYqpA??MSlPm=vw0dOf zI>kQ~Eb**UhwEqdC(6tYzRsdfJBZp?HkigH!8<$M5|0%uxDdv}n_5hAvZ`BBQ}npX zDxFN-q$@sq=abwG1~>Mj0FV7D1Vx~6jt*}x#Qv<* zOMw(vn%@he36A@vGHzmMV2bBNPI(up+LV}2_n`$=h=~0=ZJTAR8yUpyYCk}Yui}cj zDW-pIpVRbi1aFfabg;QYqFq^sr>wm?i!f<|mm@w!o}2J5r-p>WMrV(d<+g%dRZvykmo1*$B;dhm(x$g$F*osM3c zCt`bN;;<(Q;&y;siXTb+in7Tc+6PbQJgczJ(Laez+?fnJ<)RcZ`DkYxyrs!?E_xo;cy z-Q*VM%d5?A*NES4VjpAr9kmlLDi)vRz~b&(5Cn2}#3t10v1hPZOG)Pu%53#|$R5NX zxX4Jr={wD0k-efSO-^2q9mUr}R1;`7=-dj~C&OUl)oYr2n7jaEL7JY49+lsk=RII72lO>0hD6;gdZ*|yy|wUi?!j*fl) z(Y{M+5K8m9$u~E-0fo%GUUyb_Q$jg3PdvQpDDWqT-8XLk+PeytU-*+ULiA%VG(29z zh^Lv6-Bf|n=C~};ENVEu_*^KUnM;Kdl^QOtl+FsZFSRCLS-3e@|2wl7H>_I7h4e}A zJ@1YrR_Mf3K6A^NwxteSj`%q@X1+97@gsaNFoKv5~W+(L#ps))6s4k-blt1 z$O3dZenRe0w4k8=Zmm&9Z#L$x?j&e=6&u^dIm3cIT4H4Rlvb=kpBRYF++_x1954JrH&n6w>%C zRQKw;`gv;D7WDC9J*rtEbA)Y*0cua^SKaW98`E1zJZWC|$XX!Rwcb@Rz0`+$Q@LuV z)H8UY*hQL}z~2{)Rgik=0gC)?CMQ-P%|S3|D6cy=U}1o?+1YS!M!;oN#*+v0ItqB{ ztv>LE!gvO?-^Ity~aRDs0==$){*|$pyH^mOK(A6>xyeJ+}hvwcDsfmcW;q zO(0AfuB{on@~^CAtF_ZPiy?gRBo_%WQ$sSS*(~cFmfdW#C57?P(MHmVPFUPqadZtQ zXk*Kg6|MLh;M7>0oYiY5oorKf z3O&kw$sKSOGSF&OI6s%UbpP@F4EUJi_q57SgAWtIqKavpy3;FO67-&8%JeCymK%0R ztE2Ge3-EOQpwXPx%+#Zqg|^H*$=0f;T|qysWl%^LsG7gGS?)W&95D`2sYbH3ooU7U zHd?K74c9~|YKY)I%z&rM#xCl_5x0HT#^$gU6rigOI1M76`?_xX(V^Nu2V`BnCOmMy zj|sEwwQXWXt#q*J4YxGY&`w0@ngm} zMb}kw5fG^tYVHFX+MuQ(=`S$fY~{53xvN7U>O&r_heydl&OTh~U}MK$7#cbDTJ}m> zQtzyc_ht82?e)cB&qAG{rv+}9yP3JTv`GTU@-!PNV)=qIMq8|mIMj?M;=S(Yqv!{KAlJCe{mIw%^NEAv#IC09;@bnF z7pgHHZ~%6W%kj()#_V6V-V~==_qcBt6~3DL9{At#;X5^?skD!mbqh@P^l8=~4R&?e z=q1m|go=Z``6B?1j}}{Y+5TJsoLI@x^*3~V+vRl5)e*Igg`{1O_+6W{9u}_Aa(?+b zjeLb#uM+jfqj$uk29nX(@)$(J^jj>dg{}bb$YkdBySl16i||q9X5TTy#@t~WrFrqx z#D(|jmo}!ylR9Hcin=A}^suLyN3z#-EXETl%_}hT5X;ReMwye~?zXj%!J^vv5QE z=;Pi~-ulaN-|M2nV>!L*w7UHh&K?q#Ai>)46)zfB$2gvSORnlmIuA`Syirz%;BoM^ zG$hYM;k_%Tuj>NxJPo;YT;Z=p z&nc&4m6%?@@U)OR1Cg-c_9}$sVE$2)0G8j;0EFIN`WMHW?2y`+Y!CC}mm+n0jWg^D zWozJX4$tCJW7;mJpaj81AJ*VCG2;3d`dpdi6dBmph(1yU_Nn^hK`OqSo4cOX%Y?_Z zeBT$y_%duf5~b2v9pO242PHdWy_0rpA9Ro#hGPiUa13V!?9omCss-D$)C0hkkO`H# z0^e9ePIXR#Qe0Gvl5tZ5Ld5s&>O{p~LRFC5{dr2fgG(N8&H&jw{o3SvsvGYwrC)ub z^4&jU()O*hBOz2b-`PyIw&%4+uhw>3yCPN2ZIl*T(yB8>4`v0Wtl+f|052PM)i8MN zO|w=QGYzO6%x7*d9K!dqit5AcpOsAr^+rT7{Qb-t<3vn{HwW#b7GvH7lCG(m=*8v3 zk5g%uH7OqAt<@?n1v!e52PKF8d85+(RMO1IhCQaMkEd?l60+XTpC4akGhJ!?$}dC! zwJkO<8~*t2v`K7O%+}R4Ic?Hh|F#tPReRIQ2rdxPzCQ>JRkI0%CCyVFL{~=N7C&#- zdyyKNzeew3NP_%)x!X} z%ieg;ENe5v0FbgW25?W;rYC7=xsb@ZaW#l9ptPbN$JN*X1v4_(V%srCJOEa;RCq2% zJ}1b8_c}Fz9sp2#*01)dTIctsOJjf3Sh-@-BWl;I+%4X05GbgRUvIvI+%vA5S<`5I z0pg)3?p$gTKvTUiSa+ZF7i`E;sa_Rn*i|RQ?n~YFglZm0j8iB1Rk>Wg)3~k9Q4YyA z#hYe$hu&JlAM{Fc(OO3`8BHlAwTBJ7Lv~egLETza{qcrQu02YJ`2AV^`&o48LGvKZWDWLdFAU)cw>wft_de?O#QGN`_(sNKn=2p%ju!x(LohV zdS}n{CPLVfj?AqN0Anac0;W17Dpy-9_`6AL6_1{fD`s!%FVP3pSIUkD;^<=iVtnkk z=-34vY~WHvi0Q7kOP!qSQ=|BD25Vfv*-7!&!nD9NSv?E05yxWx zUz?tYoH*2O;h^Qb3?4=%1j0$@5ukWQ7bbdr>Y$?V^@R&XF6T@J18lTDbjyH4KKYer zbt?G$go{7=9e12bL6<|JAUtDGLn>}$xx(p!sV%#s=$+{DUffA~s$=}PtW}=BXh8U1 z2gH!i%A}H{6p~p+;$pk)0nqQE{W`@70v$=GAe9&1=3VyBtjxBOs{cMVI5}k%@{uV; zy$8SDg66v9he?TvSE=p(>2-o;zB6>6)r@w3)P0IPejPh|uz4z-)p=pnbTW4=)w*4z z+66I&j()c?h0y&vWI5!%*qP-GQ-3nz`q`~FI~YBqY27bQ*z9|Y?z&s(RTJwcs^AxR z=4#*#MB)QL?VP%Le*m;3_7{ZMy%`a`IXhu3`HVbh3GcJN zKDaVXo;4~-KcJG6WZgBXeJ3-gV-tah8<`x_P3=&Q|>Mn z*3bo*UV(Wl$^66uW0iB1*Q(1Y$6fk-l@oZ(CRrZmO(~~cZc|a8DIXzVN$(B)(APnd z)m}%&$ifkuF*k-`*L^qoF_@zpL#d^&xZc!R+&e{~z_e7@C8xnl_;?q5bW_G_4MNwG zf8y4QAItwaLOn_Vd!(wsl(+Dy8|XM~;M7IrJ&JMOy@hQgo2g&s9PnZ0OtqDzX5gUV zB)p|2BdNrBBOZziK>6b6;ef@uGr<+J1*Tcug(-8ulh|p8TT?qIo9D0N_m&8whK7&w zf%5X)rda8$IZDu!xjz!_u2dgu#CmP-$qFeh9snxh2x60BET9+Ox5Z<%%yUt^v$@E% z@31g5eVuJ-hqL^Z$3nY2I)@jw<@2I}`A2=o{Kx-fnJw_?Q;;uAD_A2C5K!@%_Gc|u zr}qXi1{9|q#^;=WA{*Kl6EnL+_+DohlT62qZd0sPKs+?8;=bC~>~L7!-?3ksq23V3 z?5m&@8rf3O(PLWs74!*>s$5Z&S%y#k% zWCIP*UkO8_T zt@6zK2hr1Sw7cg^{E5!J!G4Nnk<-t#oE+55-K%Nfarz>OP;cR>@5?(E5ym0nMDWOJ z&U0^>abxSDs;PmE8s}|v1%zBDK87g(muQ_Ht}E<4SxPL_7E6i>)7zW=$;2UFKO--} zdwyu0>(~SOPVc+&d~0qCi3$BwMo_V*J3fih#4WFG^N|`Pxs#dMzcuxgHTY;~6H}3- z?HVT9w{aS~shK9c)34_l<9qi2*p|jAB;U!rs~b%jbzV4Ec*t3A-+91z{~1>jE4zZ*^(*M^qQF^MRbJ|Isf#UX z_19iAUO{Z93?@8b&@)_^mvCNAzxS%?9`M%!0W5_!nH`JvIs3MXd1DfK__ZC^MYD$f7;yYM` zSqxjsXLJ;^8+|LAWeY=w$vpXVF z7J3X`2S{yRCNLI}xDD9rdWIlCT+gg!_uaS4*qsm!9GlN~CNApgvW8Cxgzs#N%4q1^ ztCXY}>xZ1eloPC1)?noP$*;IfQG^3nodr`19!fg=>~^@&CQzHh#Hj%+ghzf2+fmkb zbA6{7Mf7nyl_Gn?Zl^JREbq8*{Ac_r)zi6L<_@`S=@l=@C3ru@)l<;bCyo;>Nuu}b zoB(&7Ym(M5wolL()<`n+h6b+gyfG*@V(=_Mq1WE-zF31uRZI>?-k9%*V>0HaUCxio zqL_BJq~4E5F#W*m={k`?FNiFa)I!J`dZ*2K4k{fB6L;>GJ8$}-I+8apctcbhP;c5k z&AZ22j}CXrvdGJ=a;Q)&P%P>xDI*lkz-)alE_K@rMrssG(oTdi2C#0NpJ(#6b5Kwn z>hxh=9Eg%~G4{M;TMqTJ@gdleJ8=iTOm}pEL&?~7cujJ$!(Oto&P+GeW4qt04Qr^V zhDw`$>5y;Jtv1arkIq}cj$RaaE}m$Z~r_vGrmi z`uJoI-*=#ndkkoyr~2i*i|V={Es_6&ds*;1VhN0Q>6v~cy0vOBY|?ad6c8W)Ze0nx z)GpJ8Z@-)D_!_wA-8kH2SxD*X--rzmId}k!`H)-ajn&qmcBcol1-FZn>s>(Yi2Tl> zHkv0sh$P0{@fVSDyMTy)DJ^~VoI0hjV?2E~AcYM6W1B&+pDiHo?>QpbrOZqI!4dN1HQ%=G!iLa~c@9*kn zaHDHnI6!qw;Ny~JGA`sHkG%zl)1iO@n9GTEWgZ`tIR8!=pnKf|zT0iym0vmogMR4C zp6o@bsyBoCmSOg9l$T?b&eNYzQv+Ka8sDijC#tGdG_;ko4R#m>qBbL$GU?&9kp*R; z^Qa$bZ90=)cn}&cdmsHU<6s5L_)$%h(@i}J30{*od-n6h2Nc^4uJLQlrRuzS)$EIUF=MGE*R76P(`U&?5fHK<$xVjY^`EUmEC`A#cMp^|mE3$%624vo@bqk} z;pex8n$M5D=umAorJWm|+L{8W6ZZ*Y3)qhYYaV06W|7kUTsgdzg z%VCVrA+jOxKllM0L!l)a`?yU>x}XRBzxpJxPHoS9`?#}fXVYjYeiifR=JwWnM568L zIbG3Ili&B2@0M>rRA3G@RRrdJ4>oHNW50|*8G02c_vRetz%4R{o`SADP*rrrF;D$m zZxQ>*4FiBJUN5-0tVN@#@G;c70mcdNs~l0blrsNe7n)uAxZ1~RgCgV@7lWxgqM%HLzz_wUzMOm{>yO$J#LVLUt1A4~s z=|bLGh7h5-d|ca_w`D(}*J{&bP`KE>JRfuSt8|Qm`mMJpe3lF&A@ls|0`LH!o2^2~ur0-)uux9u0nmul5e*s%S_%u-G(?GZUe;;FDZx6%Rkal6 zV2#Yp?M78c^7MX1b)RImWQ;vZyYr@s%u;rTKwV3pY%m`HlHso|ifj)PRUmd*z8*ha z=*H!y+7yOpP4AylCc(;XGqH5}IIVod>{#>|o(hO8tEqtr*GtEppOr-H& z@{QtZoVN;{I#*Y6pv*mneqXTq5--BHvMC(hhH7dHI3jefmQyHpv@qxA-TQ1PQbpaG z$o@gc2s;YiRPHce3RP(Br*JnZcE=oREBq!bVwVvorqF)nEkC9b$I#}zFFXTLRDJ*` zpXtu1=sH}RVO`=R_rjHw8rxVIGp&>P8zM~pZc$8UvCVx?n*W58a-}b=n1*!bDsBcA zSv&zhj#wx;{%|5>Rro=DSPtUVnEZQQp!-!GWUzO7)OxK}&B^vUm^3|?6gnNQr}iTI zS*8+`6Kug~aCv=^V4LCeV92A~jnzO!i?XR6HsU$`LCVrAKjY7qu*)m1x4DgiKc{u9 ziWkS{s(>^BL(5e|;p8pp7PH*!cd{=}5vUSb!$POInP11RK>^LX@bvs0_=cOBgzKG5 z)c&P)J(8 zR8*RP6zS4CqV!Ni1f)yvAiWb>2pt6lq_+S90qGqi^xk`~0g})=gx;Gw@AsYm;+%2z z82=dG#oibHO~$%NR@Rzpt~S^6JTnp6u_@*hL2^9-80gI-7kxO;tIS4qagyF@WTn3| z>jYCFSj|r@;QVE*wf_9qgu3?FL`~`rzui>AJ_2+9QyVo-*(z6J0=qMgPx}oktMb`r zsP`@0M?UsmE4N6Rz4GZt#%}IZgKOfo7u3chXco*GE`6r+Rh6Mp(5DTJQyR7X-hLbN zDgA1Us}`h00RPmG$IVGN^)QJUutr4`t-O2WP94He3C$-E-#0PgUA@NLB((d(LM6<5 z=clw3mZ%O*%eH>lDoV&Oep?!KAw5AtiRAL5rJ;pAajaMEB(1rd&pizTkEkb{XO(~v?3^-{O ze_RHle6xUzhpko&b&$6VrXF4SCQ2Gw(C1{e}Z_^YIU_ zi~te2t%W%GM?BsQC~NoJs%ZjN|E~uyM7vJsa9shw4}~N5Oq3{qN61J=f_BS&ESc3Fb<%?u< zYglsnvI{ZZrT!!zuD{%Hb+)KABb7}ecBz*p>9Je=p*o$#n*lmXheF#hgl8hER)4uR zwO^ckuyp2=%ES!K{d>(46}k(v8!m`?&@{=X`T2Bf{*k-;hAUD~$d~Q(2Xtiyt>mk1 zadxQVQ;8h@Yw{?Cs}U zlQtf5xu7>44cf1yB>P&a6eGL*qd#AK1$gXf)0-qp>>=)E(#^BW;x z%mkDtHKv=^0l=kI2{X@wNu)TFH$Zc38EeMQf(UHtt8c$hZG=Lo&lN(xwKr?lqyQmeH8s`H28kgmfw&x1v+h0nOD94|h! z>%2;(Ap)hNAW)Oy^*Pv8UWuE z6K~SIE6CmJ<6cJ+i^gskZmmM}$A`o9z3>F>6l9}c<(X!hr!QA5T3x&V8-UduDg2j& z@dw7|@b11Mvb__(O*)1lE|qeB(@C0?)f~kZw)W@uSahS{slFvAaTM(?&PeovD5vh|@WBUkm3@FxmCz3fK96LPOtZ!mI+&wKCs(Pz&KU2!qZ)ztr{?9Rh|Be@DoVQ~z zCNw<4Z|pz9_E_&>z(j_VGw8)trq#P-FX*V6W-O6O1h{)D(L$LzZI zHbwlhf^QUS5b2-E^V@R{omKO@Gbm9hs_awBZUN7Q%mfv&*W7MlsNNY|;`G8hJ$mGW zKd0rPhI*BX-jb~W9ZflIj6abj$KlaxTe_KYhQ{%ovd{w(aX1OpeA~*C_QoJ2Y^`xF zC*a%kB=K_11b!_my3Dt~W-YQE+cFOEQQ+-cE4b|3laX~E8dAGun_`ig1|8nIzJke+ zQIGKjU-+mMu{_CE--9mlnCYT)A8x+&sD%F3G^_@@A#KVqH+84aC+Qm}1ZN||5k1rf z-1b!Pv(BQq_LVo2+8W*hyP_1Lb((R7B0S-h`(dy-s`Hn=o&Gsr8wM1bbi0fM$gP%- z`MFg95%VbZ86BjVJ$SLW?UvKtr`uMkYuu3D*T4ZsSn``&#bpGI{?7M&rZbdr58lVE z5wEfMTFztf!eSRTE3gH09aew2P8bfQ;!r_hSb(ha~+_fM7dk@GZijes;HL z+2g3L%A0I|(!Z`}-*|fIV6pa>w}Y~ndj2sbFlWcCV8tA_KQD1|pq#oavPV9P3xnCv zHK!=x*&wjy+n=TBm@~>Lf;9g0y@6x}iw1q}#ah{PX9bG(hd{hEM%?7j{PUD(43!m} z?6Qn^q1D+>3f&mu^plg}4&Ffx*TtRrIdQf{G^2P}O?3oNvB9{Kl7gt*KPSR_3qh7C zh|-ZfG6Mu-7m(@+dfRYr*)`bzs+CiM3!&!@*cGt*TryFxULJ8PG_bYU*Tdi2(nfRr zc$agz!~x`hAGzLMW}}DqXR@E@zTo2=rNlF5{a@P_S|0xnAl2caflCvrh7&hlKI1ZlQngMZi%BrN>{^+kkhRZTb8{L}ZUK5DYv!>g z0Du>LQ$d^A^lgJ3=Ay#uuUUV z;%OD`#Zc@(DSkcyPe7-iBv87YpNR_mG7r> z7W4ap&-Ji;pQxh;Yu5D}=H=O5+j!g@pMa>qr55fZguHaX-O?6(RU+k*P3s9K_4e^{ z9Igwe%9hfiuAQx}$Q-^pVFIJnhN;PEHNl)z3z<@cwPWou_k*|(aQ?`Kl09$ou%Brs z3t0LUZPK_3ChXOHs3ForJIE^)_37>|kfx5XEwXoXkRAxBFs|0qi9k{gcxL0_I5wW2 zl=+v5yi$7=;WYNln4iCz6yg`p=*-LL^uwkJ8k~XNEagsLg+AwVN3;ZexgW{xk=+vl zz89B(48x!F>dk=`%hu&z>Y@^b-ck;BaY0amXiCAGT=O*wfucC=Cq|5riLVFW7gonj zJJyjj95;hNl~Tg4x%R20;i;ry;a!=TvL~UsD{>iydTWDfJo`*1UP#zofl&d6>S@Gr zo9Is}l5qch1WyzyIxI}P-{_r9%J}`&TKt8!l9gur#9b97cUv-oBAUG#V!KEVJ&;&A;1x0)$?Eg& zr&j=3u;28hi?3xX9C%1G@)rxPx2s;O@ndCVZ?LZ+__eF!NACfnAStT-$$BXuzP02B zU&ICEwIq43#oU<1n}=ekVsZQ7r%4qKWHOslX!tE(&4fUvMJ|7 zHJs*r2qv0ORO_tw2N9w5ILeeBv?Iz>R5NG}4Qb=4AjfZtXAFob3AH&I5WdiGI2Rt-hWWAhu49PTR7u(x+tV&-00G^nx zcXFwozO<w>^?HU8GPQF&iJmBK^o#nA_tXYMI8eQ4 z^X?{?9mOdXhP&BZgkW+W#5Q7cc5%8x?F`+}mD&_fNf@uLWSm#0LuC&kuUY;6d42x@ zg>?)29drx^hK8s3&HhKn4AZ^v7c7&%jcV?N=q_Y5*lwPjIs3#pUuZ6B*0SMU$L%Sa1lXRSQjnSxGo5AqtW#*W8Xq5+^T< zM?lQ_pPO%@9IM^%=i!om*Whwp_MLVAm1icc#*yN??wlpuVn-7d2s){QoA4!zqi%YO zmumcO-%uMj;kGNBid)-~{y~fwXt`#gK?(dXmU73v#>-*Gi3zgfu=J>~c_;9~)QkBPiq2*+45zEBD(U(8zGipr zg3YR<@EkN5SUxBPTa=@n<}SN`BcC zE2z;l(kpdua%j&Qej}VR{X@0vjv0r%1lIwR&4GgQhXE9>Y8O(~vxl)b&Pph)84)AD z)xx>6_i}2ZAu2b^TF76HcKIAmf63%~FD-xP+7}k5ze@c$1WY?>z6e8er7A#kGdQ`u zL1pooT)#e$#i!QNsYPA?Qr3gO>O#(tV6JP$1V{op>OG;AhXge7U|#8i;5W^2dhGrU zpz5mr_Fi;VW1Q@n%NCf%e`S!`iJa*8%W0{sLNbz;frIF3P&;TIR=pSXE4FD*SJ3I+ z@^TQOW+k%Ug;%-77{SY3=iy%>b0hv@ zsr+dIf8Dr-ODfA=+HaVTbh%41AoIgI%$L?>b;k5d3bZ5YQVgMN=XDBKJX59-tdOOQ z@`haw-kKY?IYl)kxJR?aqVs!6Ti2V4T2Tq779Ep@T0O^-x3FhxbD85fPMFXjSriYH za@zKDanZ<($CTZmK#`q(oIlr1tByH5sh87Yuk)+5^6elQug&QmenGKYzqptDz080r zJ3iM2qS6I{F8MTTRF5eCeU*5|6_Y8i9;wGxCU;aZnH|5MmMfU_dJc}qquU#(B}~(E z6MtJwkk0952@bdB*GxOBCCO1)K z%W6a*4AW#cL=z{bVBp>%%E`6#v_Xz+Zc+o-^G_>ovTe2H@Dpyw{t%mMbR|y4NcGDh zNHqceVm^4FdNnZfr$VWWM>VCc_lsir{z^fG2}4V_o|}VdghQ(n>Dk-2?BV@xPlji& zMN70hE|PHG`8S@O_=Y3eb5aw?8o#_pSk^78a0_)=59O-Jw=mXJ>}bs|+J~j+d*;~E z6=S^+y2dDGwP>R3B9&l~(JI=JUr{zia`X~p-|MK zr*h4`G4dU}Qm4$2uTR&Q9<&`?sOvpd|OZUFe2Z0Vp? zhA*EADVqk&P0zCWX~1YT7veXvyEk=41<2fIx+s7BaiymM{W`l`RpBNMYevXQc+Z1| zsK_y%S-C1++^isGKrC}akmmV_#gH$;E$XhmR=-B}TydT(@eHeEz|M<$GqGp$A*u~K zpr(5F1LY8@IW6OUR@SG%Un3skQjg6WY+0}en7=#1hse>IB(zOs1mNVz8e_kN;czQ~ z5wY)4VYWTb*^3Qj`C#i}X&17F6oyL#o&GDZoFd-@hX~pTrC>_NTyC*GB@$uL=QoH_(HD|Bh%&d@rF3eqtmaqHqD{-6Wr$3p& z+DF5n{pJ5eEJlOZLM_5Q#nTYb>tvf(Alj0Cy(;VGGe9+e2wRTgm+)uUHYZ#6wk45nN%@}#AxR$|e z_uP<7{8>6FgTX|xT_dX8#N4RFH}W>Djr%;HF7g^DGpwDpPBZpZ#tdbYotQ6>)w;{G zxy06<;9hsfe;%%>LBWykC6Du4;zuQP%=6Rh1$p}Dbt6Wg@EViGwAA!(&^ir64Ky>8LdP>%u@i$d1PytQfvho)22PD{b zx)|d;!w#UuxHINX4K_Lr2%PM(IeSkO`e~cmENjrf@>_CGz{8Xo<$7 zQl}wX+x_bu?V`aqvl&(VO~}OsLj7K$CXR}xo);J>%Ny^RX6`Yw1dViJTO!Jdyi)h` zD_s3k}AD}$=)@S{c zWZAd9EEvtaZ~{<afP=zb&GORfw?;y?Fx4~ ze8pdSi%0EMlq_z)2QbX7&YlaNV|ESIv&K8cZ-kK6;@8F2s_fJi{H^{+(;gKf62;lg z8l}vWJr3F(nwg}jg7DWIa%OGY2E$xyLE8k9m)bR3Xi;+jTQsBsaPD-*LtdMZR6QYl zwvSW)Wu_VJ;>lD6gsqxh05p6@io*&Pjgr18V5BCj^mvLFyLQ?;3oRF&TmzaVa+h5M zMP2RDt6yeLgz|Ti2;hoCIPb#KDtG3ZInB|EpMd!zjj{c9X_Fv}E2rVGU+>5HA)57k*3@e@IbK zi%M`m^3*2^gb2@dEhvM_EycySlB-LdmNR{Xcv?b@uRQ>NN=>Qg zgtWr0<#1vMHZ%P0aMS+G+7(R(`g-fg_vBI%Yu}&!sxY$8DVe#44(fyTn~cClSbKPcZSkb`c=t@Cm|29sRU{pWk+b=i zCMha!La7swQIx!0S^Z?YjucTGi$;}r*S89~*M?BkaQc<((WICDN71)1Urvtd_?E^m81y>N}iLprX5Ok$ch| z7qT8d)SSbQq`KZc=1!S9AM>g>>%LRzz{Teg6`Shhf%1+KB{)+wt!?y`5t+90jtKKMv`kek`vd6WLfzT*ZLy1e_Z!h;x|h8+BR3B3CGW#R z%1EZBrX99X^Egf__m&rrM{1(pk83be(!k9Z>M5Xut9&oZ@f>A@%2lFWE2S5z zX4Upd4H#mycD$g7BO{5DxuYRD_LGN_hvmJ?mye~DTt*Bw08WyJ1;Gjf>4-{4={8&O zZhyg=qW6^Z`TGY+BQ-N?eA}$lnHt&hxOB?fo}S>VxV}jwbAD-uF?2!ib~*-Wpt}8J zQpE^9@(k0k%#9gQx#;bmLR-&Rn=J&(z;FIc5zAfl*2^dltiQ4;OyCgMm$6J>m)52g z+HU>DjZ2Vr%QJ?74GJu#w9+|D3ge8^3jW1PUd2SpJ`$EfA_yF%qu-pC|FT>27$$B? zH;Oh94c!+aUL5Gvs$IByrYGa_IkVM?kD}h32%3`fS$$^%~7LJ*O0T zy9n3jeR|hDRTqq=8C&9NaiNtemDU726`F5LGGNPD^k!tewqvd2hKvUCfrESbPhW@7 zJg_z+*ZjsRB)Z(`=F9l+~fe6h@RFzLG!jKOs-iTCc)TnPf{u=OPFs_f=J*N2> z16Zy|8)eG?0_Dj~JXlb4&t%GdGUgkk$L9D3UJ`}Z#`6`c!7ufwy*tfp7!J9?V3w{4=-^nI&7Ob;Ila^iR&Hjkd|mFsmgXE438ADgG#V+ z(?Qtn+Y!I%7_Qx+X~-OCi0gi8-pD$yWgEHUIEG}El1wjb;Oxn5Oow1rj!(Uy zBGdutMO14drH5&#)=pw(;&M2eS@RcbavMSf+8h>!-2Y;oCTeMc%CV}Rdr}XigXresmh_mq$q53cnwTV(VD66NjLo1(aXOWpl+=uHlmd0PmC6DW^+Q0<$GwP7W zeXbOH0rzjPw}EOEI7_66_Cj|7hmF$O-eLG?>pf|8&Q#+|m#=KEwlI#~Zy+2|owzM6 zuynxllk*kzw9GGc55>$_1m8_KbZGvSS9HfQb&DOJVrF>h%Qk|>WMoeM&JSp_c~*Ep z`CqKV61{5epJ+GbN4~8mn7Rw`svDlk3on&4ScK)uQflc0H1stmmyUQT{?3U>Je}oR z$c=MJnH2l1snH_I#``286^Z((nnG7vX%=pDUc zWSsy*6h0j!v>B8+@Msvfoi1hLO*rVzHo2Iv5tI8ubWHw77^t7uVeS>^_HnIa807CD$BC_AI3 z=Cg_#&V7#wzV!mv4N@W{dsrZoa%$MR`;7tTnsRC;YPjqxQLbZs*I4UIi1kkE9=r8* zu*)WA&k9%|p@QP4nTZO$XoOg2$lQTbC;p5pNr;i{6o!u;=_#4xM>wc4~L zMeV}Qwk)#L8CYn`Rt_1LCu!m_b~^SNnA6d|G@4fkkslwnWu4I8Q0W)GO;uOxf`iQ9 zlvDeWzINebnJL!`@lT2q7Pn@x=3qE<%UsBbg78e6i26XjS`&q?sEiIEQ58)+&0&qn zulK&7uU;x{x(oRuLPmWZZ@bmk)%?&T@`Wl837HMsrC+^}wivL-u!QB>wpHYh^?er{ z;HsYcRN7vk<7mPK!TO4!?&kj3`$sVi{oVbuJF?Dhb{v1P?!57Ih8aWR>yS>B4m2eY zvEPz)B;%%gRHrJ3tAn+fPJ6zwqB?OBgzvLSxCT`EYD9Bvm)GKEtdP2WhLfx+9$Xyc zvMk)O*@Z~L5*V-RPhUE}>q?A>g`KyRB4>U7W2p8I(i_~Nl?nOJP3e^@^7ZmD72Sw~ z~&|L(cU9zMiJw~PAngp8(qJ8JOY<&TUuLf zR(bh0`dJpEv01oT3$X~o7c?$-e>8Rl|8` z?zBV@zLOKvwj__pLx77LTwFgj6?6|XGKjqQ1jH>#8=NH?iEDg^HvI-zJ1$P!Gng;T zF4o(pEPXPr_=(q@n;(m~@CNJds7I9jiYldOEKaqx%XWtrn=xJEHlRxee;EOqypbCjGy&U?2$<$Q{*s^&lwplC8xr&PGD-U z4;Z*Ir{fg@jG$dkyqX3#a)&&IsG_w0Vx5y|k!JDgzh#1E_j!#C4^l)n#>h)D3=|}M z?U8yl8}p2la!NI@J~wq=r59b2O|^C$jNZc_f?Iy-2xx)1^#@AIJhfCgX=sPJ#e3JZ zCudK2StZew4;H335AFxGcnU=J#J#T?z&{-QqMfODet&CKeTm@&T+R3XtbLT!*iBpR zB6wO&>I7a+`sG4IRM|dXoac{HUQx+JGvzjBoFU`lMDhV zW`xS)>M#C?=eQ9#Irgf)9d_3cW&LIFhX#E0ywfLz5{Ksozc<%$nf=4}ckElAj!*gR zyp;-=*M5%m4@l38_}3Y$FK}*Kujji3QH}`_aCSgih@36NHvQ&jqGfl1E`PLo#Y!Cr zxR4Hh9MjAnATrNuCd%`XCW0^RjI)}@!%6S=`$gse(Qp?;KtrolpVJ9ouz>#J#j~^W zN}KX1+OZCAkO{-64kq0Dv+Jr)_b=AgL59uQ7Nc*r-=~}4D>qi~bl-$%WV-BMte(ob zB%|dZ7j=&lH<5+vKrWqEjL7#Z2B@*Z42iiY^q7;PqTV^AD0d)PcD@ zx@Yfr!7j41nE+gBj{w&Vx|k13BSqTj>(Gm5GXeq)B?Pjv@{Hw~4)neIQS-7IBE1v2 z6K%^tOH%)=?I76Oure@H^A`|u| zkf#1Lu=YGsyuomuWpB{n?g)b}rZM~#Z&pF<60H(smUu#q@XfDU1Jm!1FJKgX)cA_&rkP^GbyQpm&k( zQJXd!SLC~vvBUT_I1}Q`pmAHrgJh4$N8f4#?!!e=v_Rab-@r4>{;!al-LH8z6E$He zyc1z?LH&pbkO3Ad5127N__1%zA4R=Lf5J|jDqq&aV~eZr-+CY3L-V#y&ecK7;HlU? zPUB@Fq5iQZVz`vtteQi6WW?DXDXXSNu3&uE(}g}R*A7d!5*WW?tYc3_Zj!o5(-?6Y z1I|5fr$?YuJNo9?}=!YjqG-_j`AzJm7-jzItR)-N@87x zOEckEIL3&=%^O^5__n@+_y{#waLS&QLG%a3*Jz~Wr?Hwa@#h5`3(%qsDe*{IE$5kU z`8>IW`J>`sRPbYVjgp)+0}}y+w08u5T0!UAtmf0*j|SNDClrhW65qBBiU{;}Zr)Ng ze#Jzcy${SL9qfMxwitU{c$uH*yfShGrC*7^qZvD;bHOOhY>;r&Pj=SiKu(J(FMIM* ztE&LEB;N~_0t1Cuy*+jnbK6x06v6&axp2UlSLNcM?dX+04KB+unG=Ona0}wsj zPQWoeF)T4Sw<+fR1KUS|{>H`eiK`Iv_65b7%LAVs*vdunpQFxV(Tj;HtKH_|kNSST zVb853!rosl954Ub7heFCr7F1wN!ruYs6VB@rPJTu60kcGiL9n2pgoqFi@Gr>(yeY7 zjp=2#1h6aWd+buRirA`m?birUn=3%C4Qgtf#&xxXQbiUEDFtnz=fZ|_Hq68%W9~Q2 zyDRyGb-D!^ItI#2rX%<}PY`~&6YV8BUbda&I@4gY{HbPu-4 zcYuJtzq#dQBFF%_kWmfpRlDkFe4=8vYOCXpG|pSxiMFusS+IB#WJ*L%PuVowR6fiy<^`fJiOWjaZ;73xhQ{*nvsX2*n(}7Zya6t z(5>w_S0DOoZO?)OSyCaMo7iSzvAb&RQ(VC*f^Vt9ztTLNGoM+5i=x&FikRxWW`^F9 zZP{sPa;{8-ttzfdG(Tolc|H-z>*h_~l^Dzz+*)iOq6%rry>{LcxuY_~nc+TT(@Sb7 zXs4;^_UrX2#!gm}y?Yo^Pn}b3o?k({U48%E17=B~!m`jpck6h@I~lqhd&@C?f$7g| zUoq{{;!|-Qi1JPe$UWcj97VPU%YkCTyImjh^AKGgFzfTeAK*(j|6M=DwUtYNKdC*Y zs6ZwZ_VvlhDFU=Au%Ppx3UM3G1*VJkKAN13i0$leerdj-yh_@UapzQ>y-Ri-vvT>- z$gV>9%Al=!ALRE-E_VTJt(1+G&&q;PX@0%lKx>r@wi&hg4lKzx2!L zp23bu)wvSK{JyQ7-s@9ZjoUDy+(yk0$Fq5W##W$0U&=b0*g@(5qU;``=Oj(>7t5N< z0_g|m8%fI)f0-Q+@a?UZxiDn*JONS~ZC;PRPS*#cuUK|xgr%28yPU)=s{b%;)PDao z%&q>wIV@5~B{GKlUO2CBXi>n_$hkI9;*??Ci7 zLVK3@_=*i)ZdB`)G2-j8&jaS8pthCAD8OpCNBV`=eZ!)|$Y5-jTm0g&Lp?%O(v0u} z;W93eB<(#9;MAk5?EEcs^Y?g9I|r)YH>2E$z() z_@HALj^+S`zCKm!`|^Zi4@QnJ1!|dklU2}p4@cWEUjtBG=;uKJ!{52ZCM3gA@%4SY z6uvW8Ha4dAU{h{q)>fd(4rjW}BLvwgjsLiw%0`M%VxK(ly*74X+Mxk!2jj9gxhlh2 zkrzi=VUH{1YGm(tJf&1fG|}jpu0GRuWA-A|Hj6g&@x$EB9q~lgbJc9jnb2h&a8HWY zPR^B6c_#V8Pi1>{Q<(gP-~lI#ZXbnGwK{Q^$yzZdz+NbqBP_R2VO|)1F`$&b?ZF@j z!t>2IAuRR7SM2Qshke6fM{E5|Ox#KnB8_zsTWTu(^}v=2KSxE#wFhxbprLPOXe?Gx-AB59Qs zHIq*GC|1_<)D6gE-{F7i{X_$8PQ7Xh+BM;sx)#1oW;4BUl&Hq{h$E}t_~0RqWgIgq z^sd(H*n^(aJ3OAOYSAz`&4u0gog(t5(R{S6Pxua-+2UxWY5+>Vu_bRtq=VDVER1i< zx*SB+VhxiTZ4ADF+%QduKz}c`j&B$w2)&NC8CHFJhmUf2ZMxHFtWVe2hW=F~wAIU& z&hY+Dv{#}~9#iEaeV-rQ&PLlB<>qS?sMWlRt) zeFls_h5WIPNz8?Rjh{@O4U)LSjpi>w26&QVzYG?6a2(a!IM!?(UlrMFJ|qN&YbS2H znvXZhqCS=UNlckQ&-7#MkmN>SuLV*FaY+2E6pNwNC;UKW zKyCH~$JKWy+|7$8zKKot?o+7)r;Z-_X0Gz6+M&{q4E%7Drq{JEm7#8GvLQ@A-6ln! zQjqS44K2Ju>Wvr8Q``v=T%$3sc9gqiP}bgwcBH-l#d=`AeeN9+h2ota&JBdZy!d1( z#?KPdIj7y)34gWGL41r-eAsC3)7r#J0o0b8cv}l*;2XPHLjh9Nk2>c=b=2JKVfOmM*H9zz$>{NB zKq8k4ZGxmtTyu%NC7!sbn!5g|K?2LOCDu&a2a6nsEvkrVe!j*iwbnlJ>UNksC#}iw zscclMk*P`YwRguSZsxC870VN@KNeCYzm*1sB`o`S1YoZOg5Q4181SygfbRKWeZ5KO zdZvT3i#fj!s}#|L$Uu+ld(bND8&r0oct9jjmQ%bkUnOPHpr%#OM@^IbiWs|YF_{Q9 z_c{2dMdbJT-0#s1Rk32{akP%n9RUBwRW&db`6BF4RHFl#*hk`UD=t4~Qr4pLd4H8- zJh9S6Gp|576?pwfK*y<5Q$4do+X*?&wVdl^?yvRRiB@ZJ2b^44`{66D-sYy!m@_)! zy7LZ}ca8pMhntFg$7YDUZohs`CFF$rEb;S`L<|aTZxoKGw+{0SMho}nTITgetd9!| z{C+VG-z}G#KmV9sM(>xq%;#47U_lMx3d>1~0S>k-IcP29I4JXHmF67TE5&1-yvibapKf1wHwOEXRB)b+U|!W9?TIMd2CAGlxyQSmB~2UP zuGlkrAR58$2hTnChN3@CF;|^G=pJ@grZtn0*`JMwCP*@GtaUxchB@1a(_-PC_H7)8 zwEFejk+FgcCgNNJSkJ_f=ReaJOTRn_|YD z9W7Q;ixDSA#%Sy}>jo+ly^6&Pm5-TvE*braSgqi0-ef}Bx{ ztL)%g4rdw1`_hB7A>*(8I;}V%mM`o*`Npi!HIl}dSD6UQ-CLV`Ja^g;hP$M_FAPrp~WH6SbfRp@4O&NUV5 z&C_$WM?!&-55BBZ+;svI*Lh3*37(xkB3}Mdw;!;|?q}!uaXC54XI?H72L2Om`9f+b zczinx;fw&&fz9Cr|4$>Fo8-jNv_JaiYeUb~uzmX7v0X7Wd1G$w>eujtrquV6_7`+==L07C z=|rk{U2Y2#euVLN5grUU)Fd`f0pX7a{r_U2(^{%e_n%ii>oosTcX4nBOPa~IQ0v$| zS(u=e?H@*sVcIOeRQx_#I@(ZN67kB!YJVB~j zjKW4hgNk|v_ZuE{_H{Pf_JuaT?~B8m7q64gx*me>=P-SoXL13pl)h>NB}U2iA4WOY z$nZ9VzI2xBJPZ)-EyKLOM692cTzZO+%~Dbh;(N3m%q+9tYYAkP90?S0swm$KO!avQ zmXBuQ+!lP#XrKAfe^^4g_*62t!}#cYCR=D37>oqcHd@vDJ+>q*>X#Pq18MBP4enIM z#a%e5I1$Jxe%hKwI}|`HF~xs8*_kI~(hnN)a?%+YMu>76s{1{Ew@FjAcaKMilbygZ z%`lY!9oJo>PQvkAUTTF9(>-hO>VT7qN-Gv#Zm_!6D$MVs`uWu;HpR(Ht5IF^jo_5? ztAt6_ro0#HR#IJxeoyNa8N3P4SIk+czWsB~S{Hjk4L{#5lQNkz7lq6jsM?@^sk+-r z;IQ_O7;dScHnd5`!*AkX&O?~7eajUcKiQB?6Z*}dosMwmQuwU*J}zknmboXP(JE#XoMrUPibp@@okP+;_E+<(e5X6k!oNt~5~jZlUY@+?OPXm?eRTcMklN)Y z3cVv)q<%eh7^cn9FfCLi$6ztLlSwc@4|Z1kO^5{0)D5^}q~Qd3HO?SF+>O$9)*#%U6TD8Uw8V z_}lna1cuZp3m|$(@U+e!C&2!K2i;oZ<~?>O-FXET3U%^BIIhaq+;aGQT&}~+5!Ebb z+8zKAHo-G~tbohMgZJ^NGa-RSSlnyEcfG)WW}aBqzYN1TPC;mnF^l+`m?Y@_w_yXu z=iR*F+e_ggZ~~1eWWY9N;(7nSDt&O%C+vqvnm-!^9{uC;o(#*EU&YIj4d4LHjkCgS zlHe5)%*0c{ne8>BH^t0x$wllx;G(C^!1!? zHuO~!vGY9~2hV%C(Wkavaq<#AzLfpmP|!ot1q`oO2!N<;rM)8>_4(vQ#aau-nB$)X z3 zOw1h0%gX+Om!Fqj;BayLaYmqnh=P~E*{43NrSI$g$HUD%TiXL-L)sTt_iF(Ea( zAH&Cnl$)Ar)BTSpiiDM7iD^iJ7$P%|L>gUB$i&Kbv>`s3?^O2q=gUcPU3c_lCykji zQ>*d{3!?i#ALXoK`n^7W%l%!Okr3JO)h|3cM^&lSzm-e7So8m$RO8Q!_YD0Qye)6e zuFR_SbG81bng3hpf750~oS>h)i zH-_GimklWYM5uk+l;4kaUmy%U$JHVKscT~l2YxUt=`@CAQ|Dy zG8LN6w!F|I`-}B@od*nG#~#zpN0n>QtiMxS#TRd%Xa&J4=eRwdvwYUKS7;tx|k`@x05p zkci2!ybR)`w#}9ML>kp4Fk5Je*}rdo;XO58wlFGry@ycb#vVRXv?n&pQ3*6`LxZV zEy=k*7S!8u6)|wc5NU@Ih*u3VP2VK`{6guIVfBgzmgNm@7*__qgi)wWG{dH}KVnmQ zlZ){K&ud{XN#ALIo}8ZgtSZelE+{uKCHjtwhtah+8M#4DusX1b-}^yM%bJ|VqONXN ze4Jb)W8>7@Ct`Z-tVR(vFZwBiJ-6y}LlHQbzaH_R(qKHlIBk4m(ewi08 zDP>7nAZNOOTUNqDrrda7Ys(?YtFL#LuUSuMs9ik?I!$h^Q+p!{(n5; zxli9;ENQsvpZwbozm+NP9(A89wSMQ(=G}F_-)mAE?Y`sk&S=|LBhu~qcMAd}J@>}S zLAM9uO}g9Et_}d4YGd@o2SU5^ULA}(3hu~95Q{&~w!C}zc{+=2W2Kin*h=82KTtnq zqt1yM-%@+&ho>0a-8e1HXgsLhzfP`PA3J&-q=grECPf!k&W%%0{2j|!E*08TPIr=| z+9lxz>TsXWqc*Ibla&|Ky_bCT=)g$bi{dUMMKI};zbvULKbe=O^lB2@m_ZO!HF!s! z^!(v|Z121J3|zV7fR>Jcjoxi-4^&IZ5xb5DYjrPwB6R9yYe~wT7R5Wsf*sR*?l1JG z@5HepX{!(|f zZf1Asg>I9HO1jH?jHb)Ox-jSsX@J5|z$&Jnx44Z>H?OJbS1-bL@fUS`X(UV(JeBL=5{b<+GiMt%$Ln_ zWch%B+%!f_RZdDI*p5$)N&NvAKxIax30Y>fh6O%A6#e8siK1Z|EP3mE(M|ALD_;b+ zKqhWg&MU{cRs|Oz-r2YC;yu#i>l^pE%}g~4BO(eTvITUZE`emIRQj5TU)f}|W;>0N z4(< z=Cxkj;rKTTk#tg<0UjYvJQsBqr7zCG))7++boQ9coKbgPVr0SDH((#TqEU{Rl*Iz`np%I zYHl(ZKIPi0{wkl7PceepMfr7&RIJU85Jp(Un+Y76lWiKqY|&*k69PHSf8L66#Q5k@ z8eZ&oc|eCwN;Z)VM!HUDL0J$dfE?&rzA_rCVE_q8#q zlpjA_m~ng++C)7@aHGv0y#7@6YxaXdo6_9E+6f`zqlFW-ZVuc58F{)G#oaxfei2dI zqUIG*TS*`yT#!fE$YLj$T32%BtnS8mnau<3&E4^2j4uOZ&@lj>O)1R} z5Lxk@ciPlX=Qt0`TAcq>CjLQNGGS@|(*3+S9Tu&}Z1LTWtuUdKJNPB^G4J#Ag~xg` zD*IP9OD2lg>XCI{Xy4hGjGmHXbV3c@wNe7M*U-=*LdeOO=-5_G z7#6+Cb!Y?2%5n?H5`?76uf?rT>Ff#yGq=eRA+?4Fc*rWiaM5^6q!=0FIz_<**Yt{8j92* zhq;qBbU>t3-~2+aQme>0H0#@T?T>#R6|qj+q^mw=86Z(xH>Qd94KXEm)B&DddPBHg zk{`!4Bpr=?^3D#X#AO|PTWW5&7pI=a<8%Gi`^5!g)Y6^{wOb}8YD0e+|5siSe-iGp z^~N9vw6+2H+cRLpxxc9|a7wcC`M?TsS5h)veh8ou@t_YSsserytq&4H2*$ri5*F(=~x*dbZ_NYpz_0?%ecIi@_u~ zb?(nmRTq^;IgbKg>bR!uo#ex|#mgc03kr?X&-_?AgkU}eWxt(plFc8EP4Lr4AE27T zJ8TD(uypH-6q@J9^WdvfK~Yj#vjD+h`n3s0Qq#Bt12Y{UydQ z-`|JV?HkHf(tBdA@_OrG!>gVn0#~;BgQ*C+)HS&Y?7>swC?1mvRGk^vmD*buJ)N-G=EcIq(%j^IlZj)sUA$u zA9Gk#SgG*SA^`1Gqdm>JZcVpGt`+xT6YqBhE919ID%o_bTe^*F?z}LWP}Op^k*7tS zRweIH%xFdK5nz6J?oiF|yAf-=lG4WwIEBN8D*0Ox24!P9 zk(ToezFK{034)qkHkfJ^?^eLi2f&E*kW@T7QcH zqoyIIBn1BPZZ9i|b}|{ahb`x;QLSW`o!up#o!vY$*rhyd=izvNlTqL&VNQ%UQcb8? z|6@1zGZ=1#H=Q80h4p4bQXN*}L_Iv4{-gQN(~N_IOmRvUtGsHhm1mk7GblpkZ(2f< zu{5~vh@~o@D(vBS-EYFOk(W%+kcDf5B4517dKVgc2T1bsF-<4WSL9x+SDxG5aGPF< z|bX@~By#{YVfzbtj<6jZ5cbS26onyO zI;W5h&wuTSrmQ%sTPXGPj)cQ2WMCu&3!Hj2O7x-53KlM9e4(HxBjX7#vJ-Nae<;#j z<4HsWh&XkUWK54O5v9LWD%6`LCg?CBQAg|~m{)<^g1C`j6kE7^Unv{cZcP8VB@%4$3m9;EYc;h%5%;I(xpchZ4~H3UyQ z>GwTigU+**AYkf}7&c)WhoLD7KECflr$Je>l7bV?a$5GJ?-QznxNMd=b})D-A}LryeqKFDz8H^NVJjl16f*G{^Wn9? z@!msl8?QU{YM%FX^0&pypS_OH1MY3)&IuQ9lwNaU&9~r7-;_-Y0|Z$DJy$%4Kytaz zclXTpxgfPO=PKw7oRZ+2R`82td(`4=YDrw{AGg^KuP04=1kLI@b5uSBba}MCZez_{ ze5u;A+X>?(&QsQwlA+xuDp=30N%01KG}&)&7O6eLHJrM)zKAKA%WmaTDGDdPxC8tE zGp!`IRIA2#lZ{Q8n7~hOsKh~rUzleGEWT5wy95Y281boucC!bVcnuLP&oxLk!(F@!*)MNTzV#XEmC} ze05AiWt1D#o0jQ6#OlQOn>7Qi_0AQ9x`%U`;7(se8@sSjwl7!A?%WQSzKmf< z&YB8oUpbgg-1m8*oWv4_+k@{``?Imul!18TO^fvoQ{}@FaR#?6Jq|mhEDwM!e-mIp zBd5j9?Oz76WREWbA-%SD>g`*ydkO$H!zHOF1s^NPmhq+6mIP&Z&)M>`UzHQOxYK=r zis8`~*7BIAt%0TuXRl{4g4uGr*e#X)$s%CW9cr*9g49%mJXj?1i z!@)n}(UGk2qxO$pPG$%3(3HRvdr?5c5wJi#R+St{MKN(jFe+1agX!!uC-dB&PU9|*%5^^No* zm%Zu8@K!#aHYA}~pWbiigDp$xFNvu>qYX1%a+63ES(?uO{6&lEi2{SnQt)3mkhrYY zd>wz5`Cp*M;P?g{$_Qt7MD~Hb%IPRX_bt8D1AT#40@A9<0tbrdhdh{%*@4g0xb#H=&M7L#x%1WGofaF1GhUv1h_aZBHr2O4wS5%Ug{zxZW=Z2_<=*9eL zpZ@2S8hQ_bZ3rwzS(Fb527lU3s79?G`UKbE_OBd4oR6m%U@4FFY^s=5%_B7LQcRY` zev02aG(dzMv{UtS-<5Qsb#YKrLyXN7*&lP}ox|U&TZpT|%kP49# z7QhtH02pa3`xgqqZS`UMV_Fw#jntbW0>aTCdfH8K`ZE{Mqp2>1@HckTXBtU zg>j`Ho^N`uv-cO|C^|ftadKpD`r~Z!l4tM^9lmqjVMKChezoJ5=BO&`iRF1=)|CSzcZ&oxC6xcpXz zmwejayfC+et04MDO;xw|3!-sg{P5H1GwKEXT~n1EOj-XW0kCPRQb~*Fim^d$mZxU; zaS`a%y4{c}&8|1A84pA?mjPzWn&*(^e-81x7HX zRjSUP8ou0R#T{7QIZC|v0Rt^H9LLRHguG6yo}4&nKMP!kUzgzZLkXr^Icqz}P8eud&Xyd0Yo`a#ksS8H#8 zIV9|+XaIS9*W+Wz9clRIfm93Sz_t)lg(mj1aj9_WdO=BcT4l$<`ecmQ?(-A;EIG=W z5Xze5Ki!H#wQ8I;Tf(@?PKL?ZalxFP4wSaG6K!qoJmz`k5z+v+$00T!VVkqcvX&W} zWn7V?TEAq~c+yeIA1;0pFP6J}vT?*O>vU}Vzj zv3KXHb=n1q&<||Z?P(A`JKdLiYk7Y45&uxiZf#d|GKOE7gNVb0$vUSbMaP6#&!m9X zHM)wszw`%posOEsLLa9-buQ}%PIewo^OaS1E(5v&6l-u)Safwt(pMkWVAHA!24Tk6ToaO1w23i$(mxMil_r zH}ZYca{V+>*U=0EHrPj!0RU!teH~&RuByxrvg-F=M4ZQtAGOcH#<9BXyfkkEG^O#s z4fjrEWJ@Nd?X9}A;1~Qjxf9?~PneBW-0k^6YFi>Ofa7Tmm)r2x{lsH$PbMub7}h@5u5o(VVcNI?vkQCd$&1mrV^zN3q7L|H1Nhe0E5755 z8hPN@J-ley!s;9o`Mvdxe4}Jt^2>s<4-|!H zrjpiD(x8TN9j?I|t)G-Q4IF-zL)7uhYr;_DMq3b9`D6 zSB1$_xznF|RI=Akz3Io;vY+T-ap47b@U<9CV>#AOeFSk44T&*zW3oe%mA%28765GU zXz4tT$k=xaP8Mfl8S!*u^iAMD)o|*6_=crGoB{w1=E}XO5fT{hR_PjKA$<01LW0SRJjt$9^k^FkKyQ{?r5h zB>iDlM~$Zar*og_SaiL>weM@2drtFhw!PS(`W3}MR}npL-!fSJ@{T?tqc)Szu8nG< zIjuqr+<(l%y8soJp5ckP_b)B_&rGfc=1;~5vQE{Te{6-EAI_fd>IpeS%m9a*`J9+- zZkud)mOt?z^sn6X90OvQa-$cOUf%Og7iZbdiCPf+-ILUi-k92+=<PYmmH-S6ZE-zmBkdl#@qxz$fV%EsxL#0w;Gf2X)+Vrm2*htt>EWKy)#pYyN z8}&{Gs?TjharcODo$NQbqybo42;NubvRNP|_NgJ0tbg^zlrLf3yo!2cxniSe{TU94 zg=$C}&diAB?$^<+s%1*B2^uAXj%I#M_`+wT^Lypu!(Eh$Lb4c^cF0zD=<#eRDC9K= z>9hPrKD9F^*OG^F`DFy((|T#&7R-T!PsOp$RY_U`Mfw)F7%{^xRpz#n4-XetlF?l< z>F*QS!&0t)t}#~@N}pIxlOM2~+~Ii81%jv0tzjDFnA&5nBOhFc6QX&UJ?xi`UUcx* z-RyNHl`$D#<+0^&sW6vY2|be)a!R|fu(A7|jvBxuOTcYB>E}Gs zht?Xba6T>sR^XHgvQI?uc9c=Tilc7fh)PUtNULSF_H2es-TbAgS|^_wx_nsN%T-Og z9NW|{TgPDOZIM`6ud#%fR($L>Us;$gmTmqQ&biXrQt@guhu|JTOv~ zVLPtni73f<9qu8In@nl4a`-Bu@ySJo-a;>*7I$uTb>-LbG6jZx_wYAX`~?o*7w8V* zeuek#8GPdmMHYjYa4WDUf~L&|SvF7m-wOV;TwxGLUWxA2dirgJA1#v*bYZ&ONnG48 z7TV=>qoasQkhv`D$PG=}r?~BfJ&w80QtdytrLiTMDcnW*m)v^WIT7}B`+pXbouJsA z=m=(?Z2XZ#`BNEYNoQq2+Al$~atib#c?bZMzw+ZLGWs40qOuS;BdrCB2ERn>tB_W` zDm|A?4%IDUDXEo6jux9&6$D!Sn7CKR`i$Ov7L#^1t1!cE%vF9Zw3#=({`mr1&{gQI zXY3A{nQV7wU!BtGv|N8t#w9BLtnCFS`9HIsai1OB6qR1ls&;bm1rLsNkyP2feWaWC zA602pw_>j<4`d5{C9rBVGf`4T;g4wv2-({z9N*GO8(o+ouIr zV^Rs)UYEKb50#aE&iq-PJ#O|{El?ThN6!exvg|>{k|?a zgj%(~%mI?b?Heq|RSKX9({_06cLbb+f8jxCTM;YJQtV6DgxJB#YQ=loa9dtN{%e9K zmRexg4XB@E*L8dYPRAz8BWUWe!xoi`{B~4pZz?*Uk6j)DiNv+2W{<`d{rugjPe)dc zxAiF3E)Cxj$h-Q?{@n_D%GVT!VfRPT3b~=q%4|J;Vs$hVHw*~S0CHl5*59&XWL0#K zxN6i0#F86c$t>00;}cD^#=OnkuqzXya?=n46x4Frnql-64B@ZQ1Pbfe()!y36hTM3 z3EVxQ^3w`YdAUyw@vfq3brQ(eOSZQt2lb}LUoGjjn%;7S+jdRx@5P~_ zq9UoJU_CKNP@W5F^a)0q|4tQdK5qn7Q3DaP2=SNuT}$-86AXhoKj_g&l(8 zAri(tjHbV-_g+QSZZ>1rvpF`69>9||R=A=nQ3Z`Uj$dabI;$kLdTVr78JhJruzLKp z6WxUN=9VCL7kkk3i2efc9FV~=v24g%gh07{f~?Juin;S8GxG-lr0zusikFiSZ#np;VwUUR7ribm#4Sw#;WTWQ!ysW7R6=x(4@17$Y>{l zc5?sn*RRFE%sAq|op&V)A&~&HkyEFvtOV4+c9q5#iYkE{Az?eHBq&V~pL^Hlg($Jc zAL7nT*C{`lVa2qxUM=ojt*?@%~(sK1a^TS1v=S^{1HZNCKeeZ!0-+oM~6sRPF;*JjS=n@^&p2FLINOMDC zE~#m#6XhDeRBy7#r0+4I_l7I)mh_~6AVkC{aYgWwvCFNnuAFv@z0;+o?M?MTS*4GT z(lyJL3Kg+k17DBTF(7hB&B<7JTs3^t@4K(fvXRAkS>n0ecq%Xbl$%NH;D$HNboNr? zzb^f^Ibw$5mD?@M8AGRnHL{)my7b@HIMzXcQ(tA?W^G&9Muh*pwCdm1EIO|BS6&DE z$xIH6QBnN8bo$@cv@N0%fft0gAr)!x|6ezv=16!?XLHHtXfu8}T;z^}8*f>YX|KR~ zjKltKc5WlPgiOo%MzzbP3IOQj5FlVM&$Xa97G}s*mk@$O@Be*p z|M?BB*&-G$yD%dks8ur;irKiBf4(Dfv{M18y=-kU5>s&teXo2oaqvWWL(Q9z;a$U+ z2|N?+W7JIMg6x@GUjZ=3G=?yVx~&flx$kN0@|)##9&CLDd~^>FNQ(^tS9s#~IbA5s z+|&$g4*ZxD+Qmnyoaq?Z;*E(u4uRbatO&WT+TXa%#ILjG&sNT~E*3nN7;A;oALIso($2zUsf~*EqGdi=9|@Iof9?83I}=`h;0(s#|+@G9K35 zTa5QfsX=}Bse1P2N&WZWfc2+1bWorE3rI0Nh128ic~ViZ_Mk0OfduHoqHVVUm1V$91Q@(Y1OK0s+aZ%=}9lE(rMK1}g)=HA(gxf3s8jVwu5+>K7S&SMz5}!?N zoDEd42<9JaPr_`sRMVRLG!&eDTLG+%v~OWV&Tvp)1?4y6%K{$i9$E&W93KStOyKOU zizc)?E_`NC{AbAFzkip*S(Y+rfO{XDF^%Gk8s>G&r;1kz^;Jwg#ii<(knRK(=2B3jBi= zfC-C{=695|0RRr}fX>?xtYxY#Q;_$&Q9_58`&6f{#_aX229uJ$NRuhROo8LaZu=E{ z#fY>9?3;A+{TaB!)S4tXyl*4P8CtgafG67}W4xnt;J(04%pI)EteX3q zd!H5WcKh4rHO#7hBa=>bibifph;oh0vwqfE;g}L|^qI1;P4dkJVE|P_ETfK{3g$4a zA(p|J*Xrr$jqoUEM9@pzLXQC@K;9DPdC8frSNDh%6c&;i)Qtcv~|6y|U-*=Gnk-d%8ztc6W zpHRK34sY7m%M-r?zrII_lp>ti3ny;?K>uJa-}(q9SHN>#kJTtNhBd=%z9A3N(yUEo z^qTNU^EE*_{&y=kAGO*(#R%W7$`&T*7IOhipp6-1n`!5O^(k-*yc(X%X<{5Id*Pz- z>EMR5OL#(+3oJJKJSR7OIsc@*$i=K9+Fj1eTw!aD_9@+Um5N>UjDX>Bt+;Q)%c9C9 zGK9PKhu&GCxVw=oTD{LD@2V6b;9<|fj_cPq=U8P&0(B$Px%W2t*x?%YbPv!;D^BHj8Roks7>OxUHMQ7oHvf4O`92wlNRtJkh{|im%4*G=$=${eIdN zWX&7BrhnG4Miaj|3tfzsj?-T*sOpz*l~VyCy3TsZ-{EyL^GX65#2eDp?4xOPzcV7?G&adn2wq-D0WHw9w)1f0;-6E3|Mfl>CvQU1r1~n1U13)F@-W-Wm8&DUP53+hH-mKIf zLUCwwW6Yx}7d~HD(6v!+=la16y+a6zH8tJ&2B|-4`RPJP=fB$aGRn*u^s(0Vq)LU` zoVsO2nL27t>C+3vP$tFk35n$Q1AY&o>N^Ojn8VO{XBFNkSy`n**DdH{<+pGb2WuV&KQ06Vj{3(0*^aq%Tb?`C@pOMU7LdN~Pz3jt6%K-h zUs-*@=ROo+r%og2U6tIH>A^S>&r3}CZ-zXrPskl5N)JC#i<(*OOVg6ooH7SCzjQtX z{9zS`j#A^;NHU_%TW^-~_-A}VlEo>Jkr>5_y{THA9P|L&P(pwDq7tV7(mcCMTc%zgrAl2|MoY=QQ&L zW`x;TS%=qLopM**P|oHhX~{~~ck^-Kzdk;m@uN}wn#R;gsda{51lpgb-y%CM^i;YBs;v2KW-9zeEMJA-Sv3m_Elwh9YEn< zFDA+wIAAOTz6H)R3G}@v{dyzuHk`Y7mqgwB?~B{5A&qjyNvczKvKp14#U_8>KmG5E z9q+6m{hw&eY0ff;I2eMN;%CRJu;S3~g%J)O}} z(NiPVr4_vfvlgJ&-&1w29d(!*B&el}K8^LNX&)H5$LNUY2MUc1cc-$cs0eCwHnaG(d);*?C!JM|Lq^r|H3zBvaK~I=nn~8ym-9t|x99O*O7cey{s{G_ue_Isp*6Xd`$+|egPP~59C#hM93Mc527xf2O^QZqQ2$N-SA}Ox+BFUbzFVR*H z&xtX}P18Al>W$50>bmlbnq@43UZ z;EK$;x^H+lPZ{-tN-nkX1PwS`Nu;WEv3pwl>IOxe?0ET=dW_WLodoW2w;JErN}oH7 z22FEvb-yX`2DVqPb$Xx)2dSV9>ioSst~ziD`>vYEqK7kO1?g7rb~ReqWWIRPZO?t@ z3QWX4Y#}estJ0X}b-z0CQCF3-tPx<>QYQDt?=cdJYbU!0k&~+AZ>SS`CjW>yk6iWp z0f)aO1SsMO78j0SYx{2BysULvpzM-o5$LgEry1-v`f*Qi!6jz0!KQS)GlqtMt#$4cu>jwEbJa*4)nDp=P{lAAhtgEg~Y;u^Qgh;B-+XmL?+WnT$FLp^*mDvF+@$ zQt?hnkY!Ps3uV3Db={`9WrrQQ?ogV@Av1*WsJ>P_X5-`8El<&3IGU?E9u_xT9Ji># zYg6R;V1hcSS9J^SxV^7~hyF>qzn=!X_4034?z|tn8(~&U z03Ps?WO2|O?{@`Ak=F5#Gu0o1s zb2Fb}`(cW3IO4A_;fv9B|i8hWQW8m|~IB zK{eL^G1O;_2X5+uq)JZDxslw5Dt2-GGYKKxM6Mb*%079|9XaxONTaGc8J=(1NoaQi zNnfJH?^A5+JffEqMVzk_^p%`l5`P(dQZpf|Me_Ox(zP^ud1fDgJr#XaOQQZ7E3dLA0#a%4v8T%QG}7KvJz zQ2sayx~B9dO6MKV&Nj}CAFim#DPDnuf^>%zSaH5#6L}JC^M9HW6AlV9QFYD8y{7W+ z1kkXck9BIa34r+IjaD_thZ8TFHbi13_$v)c5pCnE?tPG!VXM^LSLWU-J0NMd?m`dd zwfF6Sm|arGF1Nss+!sVHA?evIZ0il9nmyigN9xA2gJ-30*!<HRLyV2B8cE&?jD6?J zVt}7aHz~{>-nDVpY~&OR$Md4Mc4W8s#=Z0Y!l5{D33j)5tzYDWmsM_dHw0PV)~Roe zDuD43edeyHk?R|?(6O-z%%(ufhcWF!JvEaDuEP?F^67Jy&DtRjbV}0C8%*W2D4*y| zWMZ<1zSIZ$TfNVl{ODlXBTI+mB!aIJnS9yJu{u?8=h*OKdVf)G`!Zwd72IB@WQ#E~ zH@k>jOPE4O>_zmGfP18X0Y@Uw2aFmMg&Qz)36Z9OAj-<4ALe2^2^TM=;|zFUp_5Zg zCVD#bLMTu}D$tV%XK!e?TC~&%4SGXnRQSxQ4zulI_jZhgPUD;_k7qXSwDCH1U^bKJ zOsd`fxXP@L%RrJ`E9X7m(=Aj4(b%@PQL$NXCSh4$iHlC0!AG;i)NqasNtJ;F6dhg< z^^{bAp|Jv|TlWc>>S2EZFoklUVhhza=T?$iJF`%joO;P>PH#AfxSvR8&7}fQR!vkJ z${sN~isWov%)d&U;%VlM76(c3E|R|??4{T5R`|2nL%;Bj-Bf;(5B_I@sMfV`j({ioS^RrD1Sj{1c5{`yu9rh_FGRyMCgmgFE& zZvH?W54Wak#j63J z;$tkbsXmXg^pi_pKJl1+U=PjTQF0FU(w!bg&S{I)e$C0hI$=!cu^Fh^Tx+%o0%O}V zKC27l!<=-lz~isBz)zbFML5_F!wM3stljo-&5@Yw6adgjer{q!aie?A8a9XA%FnjBfrjU=dJR-ypD1$1$!e%+K>}Va6h0>C1c zgdfMC=`fQvEyDQuVc9VhJtwNe3x(5Sg<`0X-< z^{>65sQ7CG<=dj-mn18rsoJH4qqFztUsrP}tsASgyst9gMYX-)4O{6e?rb(Io70SAG+x_ZQ9) zS*Tw@)pv)(t3xTpu{#{1Y55L^koDURzr!JZmgKYBH%}cH2p4el-25^wEImFxqigZ6 zjPIiO6jcf<_+`c&QSAY_5=qxSvXgj-_;63g(9DKn!tmT-Kxm}3&wfJE&&C0i*2h{D zwgVF(TU+B^y|M*__yz`09dIXczNef136bI1ttF)&J@cY;01LZEkk(Y1U{ldR_ogl#ZF z*OCaT2oUTGNo330CO%aNvjx4JNviY0IFB9rLk~?05Ns)6MzYT9K7Dtw0 z{yB1NZfjF|ZyOF;P1ZfTJ!!qC&((w~tXugbgwfi&z~f+)j?!cLUO(~E$Z1-!kYkG5 zSzYkAt!dOAsF%mQ_BoZa|9bLkqnASfag<;~bn1wqtpFx3Eq$s?3(3}Yz>;I_7$6bG z#mV+UIWR)5ZNfO{r1&H{beJ_U%y~(G|$T(FM)wb>RFcb`MN_SZ2 z@M|9FGkNYkLTKBeN1tX!@n?Bx31x6-NPsMa9?907yPPdAi z_|^FnqWB+sYtOmDOcx2H55Qt&BiwH7P=o!ehzmH6&_&)(1$Y(0AJTS_6Lr12ZvF`L z7mhZ=FM~#z5CC19&`6aWcU=VbHGHjJ+%C~O4LF`u5qmFMWL$|OEKPL93?x(QNZ;|} z0pE78Ay19Ia)E2CHd={DC3l%jqi>4VfQ9I@6bjJcoOZ?t9K`0e6J;)Uap6_1>JbCX z`+s>U8fqivVKXI96^tGlmZhg1scFg#tUsofF_xbp`o><;30g=DN>L7`|H8nC(9++8 zC+QFgx54vkDw=YUB@PZ;JrT~T<}H@;4{9?p<)hI|)do6_(?)Mc0iu<&CFFD-6r_Q_ z$i0c0zimXAkf>zH@N5p>>ofI|%@iXpo8|kdDe857S>HP~%S&-#G;kv2a&KolNwSy| zxI*lBNzLL+eN%tGZr(iMS8Z_UCuLLKzRLH9L!#_m3Ap9`hO|GyS{z)WcQLM)k1V~S%LggD z4zw~n7#x)Gzm5yPyI+L1z;hYu5GH5ocjAIV8t!i)S zPmCub>!T9!XRhb~j?qadl4tel;mg9RuWZ&NKgi?k-j5eL`@G+z-M%xmo4z7c+dwqU zI}WQ%P1lJ!eI9_DydeFs3@6~xa|)d9&fA}xh;P&7wRSq{oU-M#xx`_Y-rjiuI*6n| zhbM)3(McujH~W1wwMIRLd$?lh;5;cdLjeP>3ZLLDKct~^xB*c3rmR+7j$@6ytt?$U z*>GqsD>K?0v0U`dP)N)Bea(DWlv9UGyC(M~UDbqpCLAq(c^h#O~o{mS<1VL>e@{c4kAvT$XWzrKj zI~4{K^GEd!@g#Fj1s@ndw?rZ~$8)W_QpR+&*C*7hOI$xVYpR>8Scw%D!nZGsSUqQB#BzLh6<}b)?Cy#rC?t^yC&X@ z(%f9_O2(*T5HrHMKP5+D}!MZvojN#L0GtD6p$o_G3WWb=2S70Y2fBF+M3c^Z3 zdrW2A-q+#V3jdDVta}z}8s0JFsjAwJ?Z72(YP8RNNgke{B%<5sRU=K#VvgTyKh*?y zrk;YlYS8lQVu6j|O%%UXqp}&bCGL0N$wUK1+;qso5?VwC5*-}=Q$ZEetRI&Cj>sIp zmBIM1>;9a*>4QJvd0%{Nhh#eN__7mA6-I78WDa~u)pw|4lgA4vdu_;3%yG?IZ0~mc z_(9Zzz?B$L2ivh7&C%(|r51U6F_B5xk@~)C+Z|aAp03LLRJ(~@H3pf7EEaCpQ9_5i za~gj{nKM;;vPz}Lr2<-A(M@l%i+`CTRWi|otgdHRJz#cj@*~}|-y9fj)nqFP741&U z>+K1K-g^b*XiRXCl5@+{i9mRdY`vnB9a5iTEr9O3YK(i2IZT02h2&xW>$7bfsqUY4 z_;z>Pfq#c~YZqx5Grmb5;PvZa>N(8hky(j;86toB>lrOUbi1<~?iTdj6=>7?C{mDd z)KKKiANa$V%Cg6r9Wfz;>N6oM;dbW=(QYuWbHqM+;7V{lX1eSbetX~5`6hEZ#8o29 zv||@9^s`BxyZt2~dE;eX@$5+3)N_9E$oLv_+MDmDyWejAti8>mn?FYs+-M{iEyf?0 z6x{VL9|ZId?^&>iFtYK}MJ4yfU+MST@>Eq#?_8h?3Skw;9 z9YM&v9a=9TF*V0hZl&U1;jl=@m_pQuo#z4Di+-t_+d(q8mOUA~n zCYo5xEV+m`n6RRai)*4`NGB^X3H@V7&ji7@G zKFza~b#jUAddH9|SPv26p7f!f8y-Ezx}%5pJF_+Q-CVMzKIYr$)qE-o2!NlMs4c`- z2TY**xd&jW0iq1?$)db~m3Z;~#;7%IZ6&Qn2?=7{$GJ_>{LXO+Xx*Ijt#6;3;n2D6 z=I3-eEc(5y@#f+7PKQm@KEtnxD)v6&9)NhJu*8q);A;&lHr$*KOx6F3Z`vjPNia&29xdib91> zuM*ne)#$i=Z;t%9kMrRFhGO%CpSh1(%j@aPSIM zN{DZ8PRnd!vmx@#2a4nJA}qx^hcOCGKcbmE9RJ+4Tz1TUU)x`HJ)ACvD|Lu^>2w@+ zlk&DEC~wvY;H(Fb*i;yFC%vucOBpht>+&P7^)7qYvsLXUT^2kAu9Y9K^{Q_#!Bdf%flQx`8jJt2C|A>A!l6Xd?U=)k-t(+K@+GA6e^`2F?@H|S_)1AE`dPG1N8v6fU zr!Be$Zl3I^>{9LuyLkCW{yE0JaJH4#`_*$$+8w0v2mk%~Pr?#U>`?ohKB}adCQ-L) z`(uBqIYmI(uwQ1ts?6&fL$cR^@0Y{_bTRxhJV9OavhKBG`eiP|qr>BEmtLV3bO`ym zkqMy~=?5{0qLzor?E%&?gra}Ebp$cs@TolUX6c~+hrG88i(~E5wqX-O5-cRaB?;~X zcL+(4MuUgOg1a=*NMkV|KsTX*#@!tnclY29jk`3#9rE?wGxI!q=9`&!=HGi9@83GA ztM0D4tL{~6UF*DrkeeD5+xjXR7UuR&ZPCl+0TeBUrC9fHM~oAo?H<$-HTai$9pUA| zx6xq-ogsd8()Q3+wEMp+~EQyjW&h)`u3#;kd+_W}Ovd<<^txiV4gZ>-NThkX@~N)BJ1v}}i8u=&#z zDLaioroJzt7N;;3b^mWkJ4K10yFh{d-&hNd;=i#x*?V($wM6_z8!Zdc3dbRmlUokq z<`p2>A1Ip_pLO8+940wQ5#R4`ZY4JS#^RAJxgS|C7M;e{+9ft<1;ux7YW_B_;OkC6 zN|aryHSiGeYW3NtMwax!Dsyo5+YHFNA z)RHApmzHnjIg$a#c|1HL4fs;6^kW81h+~)T3^Oxdi8$O=qy7Hr zX{HDrIS}ocUC@b+Y~$h#C?)Z|_Emf@82F;T~*!E5tKVGjXly8?<0PwgnzDYh$jwm6o$jG%a7JF}-~ zWSlP@q4?c8h)Gsrzz!m(&?T_HQ91XkX5J}R#B?Ooh|9X&;o^2Sc_Ms53OU?!q@Rjd zN>mFRHgw)A0X}9o_R?K<+rrbJ z`HZlQ*mJw^T?4JSPL&W5(16HKf&F9(K7BcTtL)MNx#7_(y)}{nEUr0E!-uY+5KL&` zrUAoG2ioOOM62S4PVWSYYLk3AD}YWS*A=IOUv%~CtVfiU?t9o_*(~OA{CYA16>DSP z-sQa()HHnila-;cZep_CiO7lVS%jA2na1Zu7HeQ@8AC1q#QvK!3Di_ijCSn}SMiJ59=>SLjsv|(Dls?BOH)%5h{PWb$ayQG72A9qky@aKD${OU7- z!m$Vef%~+fI10G@Qy})R3UMxn>xvV%A5EbjKWH1J*cl?rNh`mM>4c_!5#I8h`Fko;*sk zyjBx2;wRru_FeuP(CvISS{WgZ`>~{=U2vxfP;`u4DsV~^JAe<~?&%IZl@yB_lGT2g zS%@fsUOu=6Ff@$V-PMj6E!tuTU`+$M0^n&w8m@@(QCQYK)30eP<8rSmyy(V$oCj%D z1{YWkTwU<^B4|6ehLC86U5l%})F9Sn*X@o|>DFZIDy5{Hg-QEX?*6G;nfib_rQZqT8Ty&wtbsx228&ya{PC2rQveFxLHNl?-U6UTe+2YUM zC?zU$IforTXUPv+yAEaNR|-9^pTB&d#?YiSSGquk7B-5Fo~@cE+^xWBWh|;DSMTP= zD9q#;0wnQc7YplE49c|hJ>y3S!XM|@>r#&uBMQHj1>hQ{Ma^d)V;{6Ltr4>LhMv*x zD5sA=1=Gj6Y~CKW8;5iZ_vCU%SCSJAr0%!OurvBV30}C^FZY(N&wVWCHGI|XJf$Rv zfGu6cnFw?tm%)V?B-Im-y*PDEPB>EXDp0UBHQ@DDTn{_n_?6^j_4%)tv@cr@>Za^E z1&w&@lSvSUY;iPEXjg{sC(bctPzFmpx||q?9_n-48D~3vP2;wOc&&-PZ6=`y6b745 z6dcjikw;in2qMagQ1YxE>pcNVE>16F{t`w@;TWm1{keE~$qCuEH&j&S_av&paG-*r zT!Kv?Wv1uP4~{r8dNP(dAh&1HK&` zMfF~g@Rg31Z-UxICy-P8gu(nb7TK78zHLE+%3XByb4!f-fj*ZpreqVuO$EYt$YJXH zhB0OSJf}JW`w(htJHukW5LpkRmFjlrfDupcPm0&7KHbh5Z(`#6HEw;o?fI?}f4#)4 zYtPyF9<+5aSDfTr_s|91{hZXt+m7&yjN9az+ag_`T_MH>Hg-%YMjA5q4V=hP!T7`bnMk$~6z zw@Nq;;5^%D4i2omu-cLKX7Sj0*glTQCi_DA9}OQHJ~%hqk4m^ax^->ku*~k>ewjQ4 z@XjtQiWtbY_IU#r7SnA`qxtmpCWLUQZbUn6R>ccc=S+~BqQ^O zWLZr&3y--45414cVZLt|8|CDuj;-YjO`Ibf5RqyJB|@0f3Tr<&45ikO%Df*_m*L2a zj8bgz!hpO+U*B`5&fR={kx_Awq@>=4iBCN5R4f`kaBD}Hkt>QbX}2Z3OhZ`p;+ahI z{&K6AHWiv1mAQw%75pfvBj~Z-28q=eb(PFpkJeWhE(#)rt-e)Zo10AE$x=nY1rc;1 zP%a)NxkN69A3sVb8bcepc(O^p+D~2F0ZO<5qY6~Bv(jn-_5}#L%C~m$5CC&5PUc8c=F#1zVjopjisTrresr61qgf~z147ADJC6zTYrPxud` zO%cHi-yzw;Wle(<+wG2_C+-4b8KM2SqsLCQ!#}x8gY&nxiwZ4d->Gr0!$taOf1bgy zs3WVj-AAzHE@-D{X+my0d`ErNx?X0AeRJ;7mzWYUGb;+DpkQxLTq2M_bD{AfYiYfq zmv_%9H|5X`q&u3$z!;c@3c$6`GCm#>#XHzFsih?rx#j2#LVE>RE>(?fZzDL=WP}KLd7nr&z?WRdxvvt5{0sqiO=@sUxknnft zsohL_Pv8BjWsAIwtctE-uXG=zU?Ds;qG$5mCVHY{Z=uw&cKDzbFkYHmASfcoXeK2*QqJV3ZP_#v4vlM7! zdsP3RxMtD-kd;$4|1yMb$~*Arf_lgnow@lDH>>^>>&h7<8?{<1qS~8+eQr+ePI|a) z>D!>ZvrhN$>ilGR0B2%Yo*nhi^P0V3D&c7?F%EXjSH>+q?A{%N^r&4oCbeI17j{6} z2G;EiC-JuOA5D4Gb)aS|HMP|Mak2Vz;ZOz0Zwl`F#F%&_Aif3j;GPpU8Sz(){<{nn zXVBmDH&(#x2gdyT5!U5EGD=3ca{9q!S*oJ}pa65y{bra4njBLzEj;JlMhA71=DTs^gz3FbL_=P*V=a@n~Rwy?C)3fpJmUYN7Cposq zhJ2An98rlTqfsluj8S;UN-`K4v$E;{a~kMroTtO!yyDfBy-WK`2MLX`#640$RqRoF z86tC)kM|szJau#f)kcvid$ZwPk47y2)BM$+aB@ktC2!QX(?^TmQcsxNw8fz}9DspU zCH{s*+Aaz?2dDHpLvFWmv_0Yupp>46kSR~2y(y8%Xmp=$O!zDj%7>giK~80niVBea zu_-U~lCWZ8^g@)F$8|f1pThgquMyO>PkLM(xA|f+BoIm~o%V4=UYExptos{%=fEb?*$0!&+u3!uJ;Y;UC| zX*C;@+Bu6LGX^W0sCQ#!MAlE>hZCRXXfp(3+&@+rAh?A?Rnkhu&~BOOr2KsGIO^hN zPx_9_8Ju`&1{wFL#NfyYn<<|Rtigk#7nAsJKjx+7{oIR=62=>IkZ@MmJm#-Yq&xT^ ztvzO{75e>KHN zlpwvb&&*=&&iI2yP)=iJdbC4SNHr$O)fd&8txTk~BS~32MA796(?jxg6q$Ak z&WvO=gd2eTn)yHuPVg3o3kMAFG^bqO9t{KRrrY7na~LwrJY^ts1;FLYk~QV7e-P-! zEHd=D4KFy5k}HNXk^@D#1tR!twr0eY59N;D(w*V_oUEywdmf%s z6sUM6P{^mIsm*U1!c257y8lAJCo_K_HAYF&pA0ypFB76|hKrzqwC8fL(dWR~!?)AV zrID@~eoJ3wdH3y_4RmtC3^hAllPQjE>r|wg4LLQ28xe&`XBW?3+rCSK`n>L4=CXP5 zN!pY_@_}Cmsr^v28GZ+o$`JgvGq13x?Am8?$NL)H6tCaV2(1&%kL}h0^cnX)VDX~u z!z2_pKdQM*vsT?E*=$I~O1B^MHrN_)VxS>M4&#u~QmbY3L+I2c$X?Vn; z&uDINpSCHa?F`fooen3$eI&EMAmU zuTJ?GO=zaaN3afLKf6F{npVmZwr)s`q*4~dZUL;`&m>TXr}66#(^@konD2y0^$NmZ z?asCD-+!D@m04lc6Wq~P7C;Kk4LrA|IO`RE&T%Q^!ATeuAa}d$LDv73+|Ge%GOom4 z@Q1uQh38CbiM2C|SgKRDP!Aw%{suVq$<#Zekb*nKN+*SKnkNj7Xo{>x(9P1XD0!{8 z8ZD%{+kG7TD6)6vxH$8w-OuYtydPJ1}mZ!l01c{Le73HB16p ziza7!;Y{jPgC#oFd{&f}Fuhk+u&pjzIr+GsP(qm}V)M%B=OnxtPUY2hmk%<`jmdAK z6Zo7x5v*mfApqA_W32|-C=FRXi;k@Ak}peaXf1~Hq5{#FT+PiYZl@p%bzXZpL_*h& z$!-NhoF8-!NtI_Xw__w^NJ<>A;gnJem}&VeGc#KxR6tl2KVVj+pz;Bs9bCL4S*Dy`0QoH@qaPpRz`!;!O`e{lmdrkMlwe41GNY+in#C>6}jL2SV zEDtO^*;^cKoNw_P&b!5;LwJ-V0#*G`cU8OWW64Z;O}jjhuQopIuT5_-SR%*F9=YK^{dPSGbXn z)pBK%WUb~|V{~6W#a$5L+dsP^x-Pk>(X}YJO2EHA;NKPE7^%a*yQu{x=FN5ALJh5Iw*4Z-Nm{3` z2IR)$R5^m)zj+va8jZz!N-Kr`!tCd_&0eCt)~)rITilUz0{n7^v`iH}GbUX{5gGt~ zxi)WDNwPx&>Znd0eS3s+;oA{jR^Q=4bVmv~B(9cFC9=BMZ-2VJXZAclH|MUNs67un z80ObdP~R&2z~}y98Mt~-$JDe{qJ0!Gp9#WmlU)c1IVp;lkJ{2kPl*-rlT%56p+hJ875h@hzz8H-&-A?{=gPE@GY~Ltw%1M*+TRl;NKtKtw z;6}msm6+MxBjaukP3jY)(-n_%VT_g9ddjkucW|iI-dBVDR8x^EP%x@&ubZ~o2wnbq zvkBf59obF3c93u^@O zmUM89aGEaDp*l`aZfAok+JHGDLvaS(0*oGpvD{%C$R#-^DhsplR{ByavZ}FP|W5shxXfndG zpVHL;xI{B5x7_ksqT_MKo~90nOQUhE&c?Na2b@rp9xoQsuOsUOO(vCRbB1}g8#6El z*M4<>@mYuq_YIJ|gd#BP;2MV94z{R7DEVsp2;*lZ|J(_|e`iGfiYV#&OW#PtEP_+mAy|*s&v{8Z=w(}Wd9ZCvB)ET^p__xu9A6u^UXX?V5XK*zI$+p73u(>e_Uhtr7fHu>8u_QOBhQ5%w( zl`U5?Gun4#d;}WvY9-Ydpt?G08qo;@IB?F!8NnsvKRFSJ8>%GSY+bgd69H|oPOXW%RRdT#@kHJ9HVz3<@ zBOk#2f?GefHRunX^tu0<*%Xx!4efDO?~`&zGn}O)(S(oyDP8y z;UCEe&cYw{no*h;!CGw~xzF`oH-h`7bVR0nS$mwC26un|!c)1P6 z$uEDs_^y4vK%iVUmqyZ(dCH?^6j_(^ys2lKjTs?_*Ib&1)l0UPl5TE*Yb7=HGhi)lc&uZ1dUf+h?LMJ3A#O)f04jP!B2IQck=t60 z&NFf>iK*dTiAVi-I5$+@{^`U^Dtf4Ms{n8K4zI9>M;zCP0|@f_6%~9OHlQ}v;K4Sk z>~|SIV_huZHBq;zt0L*942MIDBE7%I8#aKP#**fUqX8a%FKavt3ahnd1bGPi_?U;5 zsvy4U=PJ5-lJ77UJtSp68V3o%t8pDJ6+~vJtPZU~9fg6$UO=y4PR1z*SG}gdVS~#8 z*Apu;<|&B6=h=_K4{95Dqcs$+K()IZdh299FT{S3MFu4qj#eAh1lK|3yBY{*9AsxI z=>&qx-AU4Og0%!ub7BlHIEm@hgjLR8>T78xhUD|9p07IT@-$?8AkzF;?;M%wk-*g( zqo}OLU#;fr-dLRPjug`BJFiQNz!Q5lDG6h0v7h?Gn|6FCTB@_yyFSa~R;XdrsK`^r z*I{QJQmr_%P0>9jbzegW{G?m)yg{V&32?gCnHsveCNEweJZ!^S-HTGEF3fc08LGS==UGkYdg6jI*G#?U?3IFl}AlQDUm+}&cbF;b0Q*S@Y=mGEs%ZqCxRf= z7ZxgHR%V@PNYr5*8zcE6vD|BsA&Hj_ZG0P-eCQspAj82vG`-cylY{*MW5bJD-*x3F z2c?#xnJFUP`J;S%gYg-xqTJ7`ET0C{RCj%btMp70q7O@Z-o8!5Vm#5mHp<`5^szGs zd-2cVbKEBf$jnGJF-L0CYK;Q0828NW{WN1GQZdflh3U*LmERXEk|2kRy0W#3W;rvC za9gPa5SLle&+UC3+l2XrrcAvZeRa(x;+(KY!5>DwW)0f=Fxhr!wVntp&t@Zc$|gEN zbc&G8+|$C4t1Y}!58K^vp)zWN6?nl6<5ep2VuJc)4PyPVux=@+N#5s5!(7!KxOVu# zF;a?>K0iL8I#3QN*Ep=$vQg9h!p7{ z)@`gyZg36eX=YE%P}_NQlw2>WFDl8*q6o6FJGUG2(+rqM*iRm_CDL>26pWa)$pa(W z@$>T8^|;6sarw)@+8ddy8?!{(6?9D5J%KRX?(CJ7S& z{290-*c*i6(1tnt>VtM#3EtMnGZ7WM8NO#&_T;l&^ApfuO;BX?X@u(ED(1Q4&#%2Y zjrg6)eW?iE#49F*{oI*O#0h7UK2Cb-S08-L;ZpvwBCjdsvw(*Jn*Fi&xPhQ*yo|>Y+n~xjMkj0;&%LC_JJMMJ8Q#MCoN#XgF^-XcGDPIm4!8pD`J~w$?O}{%3$-b_Kw-1L&O*v6#)aACv@o8qQDsZe zu3KWyWA=5uJj8xhNfco3|gqC^?-4P{=_2lhGtdo0qDP^FO@7poI zrq)9y#&@Nu>Qu7M$(AlcH^-CY3jNJwf=r7^qr;<;nqX&md~sjWK#_ffJ5)>gIoJ)P zHj@?3312LM-*SN|#&6vYWBqu^bOi1dnMq=EpF(R zMcG*r(^vtwAVI2-4cm)_bZ7B;y~PIPxnEHy_EYD}_KkFv+-M(Fkf?J;jjcml+_(@# zk^u8+CaD(p-L`?PNm>2z4*uC=zU2vi;x2dapRTO`vA|sha=Lf1;FRN?Ai(2WJg;-b zK2@1{W-w^?>1w?(@A)xVw(yqzwh;@&)xrA>@2V}py{5cQcJtoBgbb~UKp}!MmJudL zJ9~x^tY5a`kJu(6-nF8VWSRc0r!|>?nGq?If?_6IV^O zklDIA??(xpL?t#^V}Sq;v=?q2ykFP5dU%=%kxNY3uA+x$!%657buouAaAS4E*Bff; zAVrtN*Je3R8qsm+=TY?M&3P5nidoKoyG##sSweqx`f((P9nW^9ec_F zQ3Bw{r~61kV(z8(Aj>H?i>TN<4%JS1>@B5;=AazEL037dYBK5UJY zk_B>f=rWg+npvwuG5Fm2BxJY1zJI5T#-HG<3{r1KZWqmnh;P|id(yvhX%>C5zwS!3 z-a=OWQMKPJa_NFObxS96-9W%B&S5lzJ}WIAk~Q0e7P!Rw+(-BETQy6aE}B}&>+-FE z0Tq7Lwu`#z#cA|Xf#S-}SpIC^_Y&{!Pdl)PkCmR8C7dHqZmb5-6sQ*LDipX;G&-W%=A4`j%dW~7n@i%X zaN9S_eeu0si32!j_JWIGF*d1+_6?MIf=f7K$>Uw_j%&d)_h;uQlgKxAt7v+6CO1B< zAYtFR8U8-M)!utf3&PD9`o0UPrmX(`mZY#&}r_=h$`n*dFmqxqWl;lJM*+||rZ8`yb|B@rEzZm{rTi$ zY?ww1u(j_Zh=WRqnjUVhm4m6iSNj^0t&2j?!bF~T-)#`&&O^0jKs|=kQcG|`v$?}W z#mO(OJW};1)8~W{#oQ8K5~7zr*K^bI8DyW_WY4+v=N^ZWBJ-2QnJv^*p2_{fPv3B* zyYTxWz9Rmr|1qIBy7WaJF>bp}X}^VvIOQAmVnNlCqJu{=O3GHQz_FPQ{95XA>V*{m z3Jx%&FNh(5KZ~~oex_%S>&f`uJc`WNoAvKvA2C@>qh`6NO1Rb3e!rsFfGVu=m8g_o z7B2j}if^+WH-Sq1LA2no0L07bJK^xKP9-Z)AuW_nR3Gcc6V8c9!vus+dcTb6IHmch zNHiWfYtf%p3-4dD{(L8kiFN-&l`hM`eh2X|9rfX$JTE@R_fSaK^6VET^@1-bZ!tS( zlNP?^Ty`Yx#boA#NUBEdVKWrmg-HM5Pv^fifJxu|>4{y9Mo`J&QcM*EZ zYHVdzB1H*;odpd-{Xg`#_YphEy@zKfm06V!K4ibE!M)xy!|-V&B;y-os9 z*S9O*kOufKW4$2z=7j5uR=6N-`Lj2?)SX|bIxccsx0R3%ly%5mVM~d+1c|SFNStja zwvF8lqSqg2ZJO3Es$$JW&ngcG{8HnKp+CQ8?VHUI3^$NtmpX%{e|Q<9 z%1*?p^|<5R?HhJ8K*4;WVaoaOq0iHklas%fSkseJ=0uN5)+by2G1B~6%+X8Y3xC<} z7lB{oownaBVz@uk)U7iRp$}z6&BLG_ZdY7yQ^TqUCx1$GI?vGM8lS$^KE+ep64=#p z;e_DkW>!d;P?F+Cx78_vhZrNZ04^E%j6!mPMPad5BK2|eRJdXNu-88wSDCp$scCic zs^?=vhVQOwD3*ke(#^|6=NGKPt5NAN0(Wu2Njkbcs!zGP6dX>~R3RJV{c6h7^s{1l z6qA+EEh?(v``5`y0P*OwFWcUrBO`+)!b=;&)>O;S-;1!DMX2D>4hirV%*tN*DJb%T`u#~g?eaR(l z_7GuAr+rbfNW&BMU`9xXL$XZAJeDuMy$QvO|VkHo>xmI7L}nRS}r$9Ol^##>6^wOP1-X> z#U4{Z^eRs}>xPG{aBX|;xYYHg8Mirrr2_V9G_4l0;@6&8nc=raMX@Qxh+JnS z%{w6MzA5ARMx!Eacbg%%hu_f*VLbbuC5seAwk+5|qG&p#sbsX&(2h2b4ULJvYp35u z$5})d`MHGduP~j;rvdGWXW@FhbbTpSr!(NHt?FT?x@re?6}zg z2kc}r%2q#+OLR3YgwZjO=#r{W7IF0Mnvjr&z5I)i_CElvkR`n8;G|p-*9$d2AuOn# z;)l!8eam#Wn}y6SfnUH@$kv|;WES?namis45f{vh)=;H$-&M8nB|=z%uPZ}iW3Bg7 zg+9m}6VB>a`|H%Eo*#WX{>Ps#vgK06QTtx`OMKU@CcD|@0pO@++ZSB+pWjCHKd&y} z|B2a<{BLtKWHQ~E!WDDlnHgp@_wKSZ{xKodtZ7WNe`6_K7;(uDja6@&tX=xyH)H>Y zdHRx;Zn=(z?q=^dmPF6<$aN%6@B0G?l}cB4`Nqzs!?4Jw(k&|Saa3@X%_x+0qm{7xo#9YLT-Qjg7Kf5)z+vuoP+hGlyjS9GtQv6V_ zC!m;y^kB$mx&2Ag3^HMS3Yf*080m%JE!c~$!T|5IH)uA>oc$wbD~14#=xSSb ziJ`Y^=2h5kAP31ohUZN0o{IQ9<g@4tv>epQtl@YM}=-Fe?vU*3HZDulGjsvD;{ zpGBXa&D(KL&}0>H!(Y*VPINTv{dr#78h{9RTjNVoxoc4$QlvR}OnATfLz%%kV%i}_ zw1mtn><<|EtN37(LLVqGOYdI$kor&R@lfU9Mzhtjz@79s9 zVXU&f8-aO6*AJrc{(3Qyqf$Jl`l@v)8jCITm_*|?LN~y?xZBlR31jo|vGA)eS^9+j z(>Y`E{$6}z*2PrbUyI0c&92LjaMlesRS3R5gV2TvLxWQd$|vn@JXfloUy6aswU+x3 z!vuBZ%UsxybpU3Ub?O0CyJ<%fo78b?zq4b-zza2`{ z1-g+VxQC&-+@V0v=TtH_NBmqM{~SwVt=zF@{2BFEpueL?I68aec&k}d@}GwqG|2GE z8-NhDaY8~4dF#ovBE&>b0jG| zoS^9t8e~!~UXXh@AeB=yR(eIvWY^wvsiuWnD67TPE*XKAqI+&SnZ!2{oW9FMBm)!c zmKZn=PElCtY92Q7^5yW&max{_kHa_!?w;YekD%Wb-cz3|aY5?4Pk+W15JbFg}XgowWkl%Cn2I9 zqN+ZUruG;F&GY7U&MpU~r4O}jv1eNdP!0<6?|ZJnMFJ~Ss*ct7J*ELaRp<8bv~VkA zzuVRWOR8q=24L$-pz6tvneF^-X8YkG>OeHc=^D5ZG5#3L!SyF>Mz-P2&P}|0V7InS zj)nH!q3|bQ9%^)X)?r-%DD~KJH79A%dY?&Au>{0(D_DG`FE1TK;9EFgvQLc0UfeHh zcINWYT33w(O%>Qo_ILt48|+QAjo8t}`poEGWqorMLdJ7wzLgq^Z9Mk+~V*4!K}WWBKPQpq7G8#ni;*;h-?TIaiHn$mQR z{R518t3m^>Av|Xk2oPrZ_)vX5G{pBsALrqkNdK{2WB$igT6dxpbJAzO))~cZDBl6W zFTPBr;VP!#o}9ZXQmd5eCsBf?St)q&o>*=6b&+ zN7Ie6{_~g3#02qr)n`oon?d7QUsX0z1c?<)Z!>0PCTjMb+Cy*aw_&pSh9)@?CABjg z#skzDET;Pbe~8_@z)4uFr7my}v;a43l2)!y>c*(gvn8lgNI(2~;!4~%^~c!J=X9Fl z2l*hoPJumQ81PY`WKK;**V3EQr7jzb;l2lJUalr31!mZ{N)T0trrqz?c4iOe#p-9r zCk8DIzJfkTkqcfc^fF&+su^?WOsZN3*U7{F*UQU)EknNp@A`6A9R9}Aj~ch0SE;>H z%Q_cXuAb#KEYz9}^30Ad$qNBcoJA|{no9df&-bmq(@9sJ9<$MGfQQ<1v%EtIEXot&njhN~5Sl3Pqp*L{6vZWUR zY(7EjUawsYcy^Oh?BuYfUtQyX#Iw1VZJL%xTV}n(yz>$&#_AqZ-Gfx3V1$Iuv@Jn` z1go`=lYFL^0DuiIr%wU3UkSW9BBc zRe}WO8anUY3D;^Kgf3jcn%jc`(B4=|N*I6*MzGRJ}Fju_$L8?%BK}x$<)k zI1B$AXpDUSS~| z5!TmJVDtTMlTnq(^Ef!*>ddj%BejQfoc!9Lv52$}!QN*hZTu{Z*yQ@-*Za2r7Quu< zC#lc8Lbi*p*)S{G?A3^@PvDkMNf3XXux^ zgTrx=p4YRMdeR&c0Ke!6@aXgLv2lN3g6ps_xX1s)W3oJLDW+3A$%!P@w572(M^vGt zlgsb8YtJ;5+l<5Y3`^}_)p%%+JC#F1flG3&oj=RZ>l^P&Tb4A>GwtOTRUjv|darsi zOdvzESo!^XUS_9LDohQB52N1x#=_X5`xW$Yta<2m$}#z9X|-Tvi!kPxS6*K;eLu1g z(vA3v4ExZBs-1LG=TyvYQ;DJ`zepO?dugh%v?1iZP`?U=u(0RSF#@{3)z_@{CquQb zW;89D=GeG}uBMZ4RhSI8;we|1XMRFx@C`=ATdCPw|Gw~L;<8vYl}w*c*cs?te=|Ud z?Qo5?7|Mz#v9|GO(yqEWftV^Z#hqouU+t@@pD2~qB-xG5(N%6sVE}Xl_U>d#_VJ-f ztA1^!!ht5YCoX{sQaZTQ&MbMljVjCGPmB82AV>*MwJ@XlxAiyNpLI23L@rSqe8Q32 zUa!@*(r$fLhGc5&s_F`~j`I~2&*f(|XY!=8xwRPvdU4cV;ngSI6rR*@kw0jVTf5O* zB`?wuVeKr9nWFX+q}9p>?gj)$NNQ#J5*BXK3zap@iHUkZ>g?}b{l{|Mf7_2r&!vg{ zE6Bo&Ha=rqT$;ZieYfnS>B^}YL`6|J%N0Gpl37qoX1J198A1p(ukYCqtE`j_L&abd z^RA2I3>T=x8ahrIfQy@YzoU?flhtqz_aF9=+2L`*sWhC1UN675PRVG4o zfzQd+KNWI@E)u7Cg8&$nb@Vy@AV7dOJhfl*9@9^vt5MZUtP|)>KZ8W=#45^qI}AL= zVAcn+n;`5Bqf;)-cGGCv^dE3tEIW2hQA7)dtL)DRt`d$bYY3_QSn?2_(j1kNp|;z$ z2qOZBK&U3jk1voZx?tvlu%Q>eD(||I z0WWP{hM57Uno~iHPROW<#!dsK#L;y%RMA?mI|&)T^(6@FMTZH-lkH_g;5iy=m6eK~Ep%Y<%> z!>|H2L4XgkwjQ{$%{Dd2nbIhB$&zkZop zr69i$>+Gx7a-!3$9g}ga7s_E zCj-ohsGEdwsOcqz#G%7v?*D^tE9zIl$fk-O+0 z>8EWwj@@k;zi=uXD1i)p2Ho#DT)wYnvDfwMpzfjmd#aI5*VtYPysTw_7+b!Mq}`~G zt~O|!VuuqkL+Hdnbo;f#L4f6YYn?dkrjV6r<~<$1&&NS}d%Q1BMCaX9loCE%lHmaf z&&|Py&2}f;V+qJTubYxD&DWy|BB3ih92tGv7f#DfA@P?5F+Jjd%^$AAD^c8Y@lBO; zk7;l|!5C^mT#S!_Gvdj#B5s``UY+b*lRL8s8!A32U6ev9&Jw3n`J0Z$oF$Auh7I=R z9N)@F?sJ3wU^^2JgkMLal0vED#=jW+a?AMf8_UVZqrTqw(Z*4|l%rq26HAQ$#97Ex z{!E81QLTlXo?1r?VP({$iat_iW_(P+i`Au%3_TzNoNwWNnJUjVT@m0gE&Oz@k1Lf4~8@B7P?J5!BkJswSIiHj;Sh*{o{H86d6Q$dZlk3@%#`2D)i6SYk!|w0~=~ka7-2H73p0qPm zV#7_cny}Ekkl_@P`iHKe0Aiqhfk*gm!cOHXk~|gKXiQ2G4WAs3xE&OUr#jwv^5wK} z0zM)WK9kEyR#VT;;lQcoHgbn#ou+6#3wZFT^KQp!ACuTr7`q)`L>1gc6lh_Znf~`1 zrw&#PApT(GxwhMa3$^e9AC}o>rxn;@m50reZ}W&ak)NaC&78xS#a;MmV0*F!(9W;3 zV)PbZ+T$cpoSI#)6@-{e;E7;*OEp*=9T8I}w2$p+xklLc>B8|$SOm1Z*y`l>@@yL+ z)At*Tb}`+&G3?8!*2!R=J~0PNx^Z*d|u=M>bt zc6^K(tXBy*gy8dl@y9yZ*%5<0i;TSR(?f-}?(nLI+L6;oJsg;L$ZPo*Tf)D@L%N|; zUHgB=LzmY1um6mPxJ-_Nzh5o+VC+Z|uA}`&1%6|>3!k%N>bkZQ^f;FVExOIS3rSxf zA&|w}Y=5mAJ#3XBzgk3 zn9fQt9QxJ!!~5r(CG2MtX!t66v8ze?An2mIt1%}Ke0^zHZMS~>YjI%0-`dmZE||E6 z8+Tv$*N3y16)Lr-XUWqL$MzroM(da|lq)#pPBX1C__qy>E3~gI=^XfZ8n@mj*1qRcg5dCT*A)bWgMOMp&k%z9 zI4)F1^JjB@`S0IPd^OL?rDyFA=2Gex$haTQ*;%O5pU`0SR`xxIE!|+BSV-(Lj~$g% zu)IxECwl+*oL(3F1pd_^g@iesp6o#gH&qIvyF(c$t7l%(ZjnM2e@STW{zOTeW9EI& z7lYBsD&Fy=AgSwcR);bs7bF~7tjJoMq}WqZ1H<#}tJLH+F&5sItYWys*o^_ws9l0+kvIFN1Gw zNP=~ZD`4XgE=JGFNkW2M+$;UITCFq((;2CaJCWkwT|+9|c-|&CSg)8u3DH^XZY<$N zi>ow2XB?^)h^HJK{X%ZdQ_|*grZq9$EF7Dk7e}RHZY*>!T7f}h#W@GO>pTZ}w-*-* zzp=#C!O3D)&`Z%9R!`j^a6|p^rEdw#h|sT%^`rW;o#qCS^Alws8T+%cUy7KdO#8~O zduQ#_p29Eo>ZObl6BFwl#vEdtI;{S{D5!a#oz4`~^;-fMz?QO>>scj4b7?;U^h7S1 z?5b!x8+Y@TzbNwi5{!n>GW>M8vd>PTh&};x6wtfzE7@>MHRoR+Q=PoWq`LqAZI81Tmf9+xL<|9UTLQQp?g+S@69)Bkj&|D?CLw_&1PH-`6Wj^z5JGTwXe_ui-q1KnAV46{1P|^` zuBvC%eCC>yD9Wc z#KE-=wJ%HE%~xt3_a2i?{@Bp8lTKV6Fyt_%)Bg0x2ZR3E*3IG7(TtF$_d zeG^CS!bd;JA$;ZN9vz?Nz3R}@j$QD*>xD}&#kcN9Cy>)}x=Y{U{4o{53Kg0Ud0o5$ zFZ!)|SiYGer!Iye&5lp+f`=Hm_> zOo!E%Gx@der}v}BrKrq$6At^(Q+rj0)=`YfCpfFG$J1!4c|z}a+F)2(_A`SN#e{s| z&Jn4IE@1S4E`jNu8;ALtZ&d0}8r@n*vylPpUG4a!A3f|?z0EqrPDbcO=Lt*FZqIS} zdxOG_0P$Wx0L};>G5`xc=YFtL=8o-7I2eW%2d{iqr%-_8#c!dG7BY99F9Rmmg5>_V zMARs^{yp~m7a01VhJWBw#?`k}=LjuQ0eMHqm>s`CA?T$~RJ?+bSc6vYq?RI`y?JPk zCxeVYLR=Y8rps*RW0vB}U0axm1B@TI4v$}Tf`$%jjcT{fWTpvVr*u{PgbmPUKC)NC z6Tq4!&cEUM6qX`O?Pvt(oc8tAE#)@E{F*!+{|-=S4~m820Jma7iRoOb<*| z9QhWcycuw3>rX7H&9h_OQmJPG>Y|HkM|4tLCPjka8{%eQq_Z#wNb{;{RVsuWXS0BG zthJQFyL?6j&%~>XCwT1b7+ZfZ_pH}Sv?~Q&2^q==t40J(?%{5cM;8XG=}2mA_t>wU zkpMdeF?`z{+7cIn#5gA8O`<`H|gp=`IUs6 zxETzY-L<2dK{HEQDe~WFZ#~z~pcg-mOsruW)k!U7Gv`x!?k~gfWnCKEyP=cOa&&6L z0-hN&^HO!+--5gp(9@TFRL{wD&WZBF2dP~Oxo-D6=h!{@ndwSD!jMwIhw*QhaKDVd zR1C#-Hl_YN(A{VDcmF9%etT28b)9nIxTxipv6#AF9dawESiGY!P3nY<70jMQ1i7j6 zt8wxI0np}nVHXuI$YmmdGAY1LJm~^b^JvX^sJjFCKJZgov{6HWb>y-%t?$0a6j{L2 z{RzkM1FXGErO7fqUQF@2t&1!jG$04RTq1kjo+RC~9ZUy;+xkL}ck&z_T!2?q5*9wH zA)id#43uDIOm(tGk!y|>Lp;8nI@;A0WoN-SChyRv{$h4lb%_GI+w2P)O&s0gcl`)d z((+16t=gXBRXc7r9=wLyP^_gUoces4`;8WUC8*p|p-i-Vi&I!WcGa(DnMkQM*`i_O zTI3cqu7)%JIa=s=Pvv0mIz;xdsOAx<(0j-^ui-co$~nNqY-RcW6+a)={*N_R-Voja z_?V8p2X#-c%&nvBdrbbmAd^bGqDh3q#Z9fJ`&e;)h%g*n_d0nYQ$%KZdI_r_6TBA9 z?=?r6zvW?S20*pI68=jI>?jg?rq2J&PC?G#`5T#$D!oo#3|NL68g@Y69}8jHA475T zFqJ;GC|<1t9_!X7+{aD%v6rNG2dhjHHCz&3d~&!nUab3rTjJClM!a8lfKH8Po9rSiyHoc+EK;#V@jOXwP)Hr|hkgu`74j%U)H( zOOu)klp~v#)p^HH`Sk*gS+am6{hbF3viLQ!IO}B?u`$*f4TgmZT)GY)7oJsfWE65~9iIvqvw&skeXo|+FQtp+^^wDYs$nFt*t?GCtDz?k*r4H-7_ z31VtoT~n!(tFlX1<>!X|AY|ReNd%GdKr}b!KzKKpqTOh4k-o^trq#wQH8nPlfA3`x zOWvC^m(UX<3v(*?tzhR8t`Ve9S3NVad`gkB)WDszM;X|)H`G^trSvT>tU&vpC#hKI_EFArX3;9|sr%C*Bmd z9aRaRzj+r-Ks)Hm@?}Xr7lQkDrTH+Qj1B;qk2_{<_F3j!2fDPwl4INUUzqp}IArx&nsJHP*jv1vFe5CGUK= z;w7+78{U0o-~VBpHP1ae>C|_u+Z7NzVL?>O&YQquhX|BX8ir>OjrA^J^0XN>&G!;S z*=nZ01h0+w-%-sEkn7)<8sR{;CbLP7?b|rBq0rvEj5H2pMdpW12#G0ZhAa=XO1}r* zw{^C)oeuYL&@rOw{k>p zLvBQi7Q-gYXCg*h;!L$UOR6m%M-(TH$~{TZ>b(21m*8qVj~u_IuYY_imF4wl7V0?B zkgl--t<}&=H%|)=8+pt}x9656`-0Lk3M!i2v&pgek<*Nnq&q>yLsz#7;Qa(1oVHu) z%ZqX;*-x<;7Z5qm+BE?N2W<0+5jKTCH6Rqq0e->1eolUn80o?R?#TY|##Q7%HT+DZl3&ed_)}L%%fxW7 zKU!J`%oed7kB`rqXpkHCddE;xgz$v!Es}kG6x`Ujiq5?p$<<^F#?AG-FV`FyUXF{u zIo1VrA`Qfp#yQOkXGKWEz+JD{L`lTywij7@eCy>_M*2OFrggh^yn$LQu-2Ip?wvGI=KYI63-`?0 zlnh0c%S!oFWYUVk>!>f)C-JbHhf`7gUf^XC{?D2_8|53bRgOg&eI%=-&BW%MIq~xv z-3F@9qvUQ72HlgvhlBGq23MIMIvKl9=t75r}#c@?pb)U$-w&Qd2acF(Ck?jyEn_ z%$bPW?X!>PmcB;@H?pIs*$M^Ozv4_O^(?Fab|@V_hp;>gY3!=ym03|hEkKf(4fj5! zGX+|e75hBB)AUg>y5E(%RbcWnb(i>@%=|G(^{}h;{l?dCd7kC}v_ktgp!a`dhoZ8| z$g|KLr$f&5FnYTDggf` zv-OO9g;B6gle?o1O2r}lx!o7SX1*h;G1gbi-Xh8eI--TTggUCDu=-T{p7bl&+o%cu!Ly(~r(5P~GO2OWAHCf4u*cY$H~o=YhSA~R84(v$ z4X(2q>r`#^qA)x?LA2NY%7Hehb*Pe=1~AByVGNj)qM;9?nE=I}3unzG*IRQkuiKih zlBz*01-BJ0>(`wkt{%NNV5I)HTjD>327vOC%)X=XPQLBCHv2=r&M)?3j;iKIua9i% z)a)An@aF#S%k}=JX7|qzR*f*W4qKY6F|OP_r!S4jH>H=l;FN%x%f_Cb@V?B15rP3P z;^dhmjhg5jqaC})mI(#or{&$B-0Ni-{dK>h>#j{afKPD>-KMwK=MJCR zMaKo!BY>5G1xHLJVzmG6)7+Vblh}eKE1`5fGkdOOlxsN6I?Kz-Ez4`^N4ICy5<9oe zcjSivpFcwgu~Zs7{)|%N;^lPD3<3V(=r9VXJNS)u25PkZyAQ2fpvjRf(cHcN`I9c$ zl_j1f2A5cpohTHczn-boQS0u$yf%D*eY5!?*Q`Kd(LgRr_*-jOUD&2P zq2C{Hf&MHN!I2!QZ!>LW3g?S&rccQxS6=Fm_wWDpI{)9`VnbiCHO~({?LX>Xwe^|v z`GoGO&Cz9(-VS#17~h*4P&dsiawm+QzKkL>ckJp#cT6Tq@JF;Cj?}aI{BUb8v+(cI z`i)lPTI|dH;6NX<&p`d^-R7#l)+%PP;*pr~W^`@{G&FLk!7QD|!*L!=C&;A)NMdMF zj+o7g2(+q*b%AC0etNvU??iF;sm6CwLSs6s&B9Hf=cLN$&uu;iT^~QNFY-l>!&!K; zCq%0_>gO}Y*ofvmtEV}*XNwO6SXT?4!?+&IR>t#d4$Pk5_bNfY5TjR6JBA6cvJ46% zMVf)0hkK9fH!5yoW*0w z@o@fB?@n>{kgqdoMpNB3FTbD!A|U*#h`~)A(bm5SE2Z|cifcW(Ex2q1jQa5VMHy@r z01s3OUyh9={4n3!p$o@Ih*p)s?m>bO(;gU(-{KZ?a+5tk)fY98`=16)K&nD9=xTF0 z(P2{8jsbxp&CEZ`!7S8*XT2BL__w|Fh$AFgoz9#x>sBUvvOH;BlqSWRSy5Cp6sL>$ zZ5X-cWT>_^u{&dR_!-B(+sJ^WfB1wUV^OE8pozXUz6eN_2?vb4#J;8+#q@gap*(oe zh;kjwf9Y*3`_Trat+no`Gx@k-Mk-Vclg#b=H>gTu^`L+sJAc_B{QDoqgP-w$uA-2( zuX@0f@VsMr6qjdvQ5)rJ#snet{2>5ZSRF=tK}M%_fo;2D@Xb5XY}p@z%{TVahpS+Q zqI$r{9WB0bvKMm?>%pQe{WL`#ksuK?#kh+j#v;4*yiJ`?K--bqOSA90r8ZZ~z*ZQk zhYYQCWAq#atcZgI%u0~Ld8c;|#97-W>UalI8ttCHADz@)O|d&_ z7@{!K|ME*}DYl)o9(#wsC3du}Id;b`4+qeN_LTiQNW?E`{w9O{hUrr3d{wzNsEsVy z7PfrOd)&JN*kSo`ONw;R^rsM0qB|rH92v)($O(A%{rf*y694aV;r}7$7ewM;Xd9od zDzBKf!@(7yZ0u!gUO&IRHLvD@v}rH1{b(Yy>m}_DLt#6xDHcFkrmVy;gMGS`|F6g` ze=LMi#Q{nPFOFh(kp7JW)zrS`s4>Gg7o|<=Ub|vwBKCYktgeq@w*vutp(l1kcsK?u z=j$cM^r<6Za*JF2qmkrE_cT+uCzJgQsF35FH{On!k+aC0vd0qg8I*F6Y@NOA{m;T){g!+KTcf+S1yxW*t_A#W|KD*K~!2R!f@F{p6VT8=@x? zR@d|jwG0j0#@V(?q!ZsP-A-3DHS01bB5i@0Ya-%8BsDx1z=r678paO-bB>Qbg;CZW z3w&Ei6bqpm7o~mEW>cBei7ZLz8(?RunVb-HBBvi7muKf$yon$0!eh+;h{6a!vFKEsN_? zd4~#sEhg_o&9it(EMcb#P&e6`2+rw-iJ+BbgdGv9{XjXgm;4^Nn^;`84(w4y8GTfO`{a|tE_^^Q!QQeO4jrn6`3X2!rJ%t?qljlUQAC-34I$}8gt@jc#;(T z1=1HvN&UA-+~COLut2~mEmn`EXhSq#_y=GrT^2vuh5d@*!<6IO2?lwy8Vg=7tqx^S zZdfIeR_j(SknRIWYWe_*vI7qjwR))e)^gj-Dim>6j5f z=rMjPmSuRNMWoIpdMK`7@%u@f3;Y9f5UaE{ZCUrOD- ze(P@Yr^A$RKJmELY!0ZMqByb@B*nj2IP;ToNsgHCU%50uR|BRVuHCp#bH&gFuAKNE zI%o7%R6#7yZA?mTv74gjqzIx^-_P zA(>~J@ys)HmeIU5RUWJ@qU{+HdDs8gY&h>Z=T{1f6?COTFQRB`c;Bz*uyNt6Cpl|H zxeSbMNtpX|viG33;%4tcuKr?B+?*S!c|}?C+V5S>0*Y~Eghxbc&(fD*(^+_MXsjk>BH##1iy1hZ4gI8mG^pZoM+UFTaxSP9R5wpFz>2~oq;j>$99 zxHPlzOQ={s*E7X%0Uw{|@A85-Rf&0iSfJn=k?N37Q!iv)+a~SU6~_#5Nmg*lCQ`C0 zHyz@~R(;rwO!$hHrA1aDdL0}{Zpm5483Atmg%$78Zw77~BYEUZyJY%qNz1Oyvm7-B zje`YT(K?fTxSRs4Z-H+ggqveJ17+6EMSfW;aFZ|Mv~BT3yw7b~;oowoVX=D#e4AeY z^<@Jt6S~t_SK7=jO*xRiIVGxbm06nStv)6l@7y($2a=Zy2xq(y^xS+%|4d&l!j5Q{30luHO}QV6-8=@z(_}?I7QG`4*K=$Rb3w z5<)bQwg*}XOxagBe!TZ@CWx``7Eu`yh*;@xAIiymG=7od?*As{77oURJ5QDXyCHIA{sM@Hq45n)2h zkSvVG09k%!8ji?#m8k0S0F-+icFQvDGA3W7>>j|0vJ$X;_#<`AsNz>%0)ek~ELrz9 zePm3VoQIH?tw?aZ<7gK}3d8aG8f&7qf%Lw=igvci>xMyYGHAWp{>|$ToqU+DD-F0R z+=?wDX&_`N2hkt`E5Oa{77La^_lgpTI6)}0E7snVJZZh1|8-kw<-}Um`c<{ynWpY? z0#?LmaM9il&npJf(2OKk!>$1IzUw}@`SyU^=eHpZLqb*}31dL=+QGcl>XVVKSba9A zrI@y8h=;V;qcOP|l3E}_DH-G_^K(yvhl2`K(j{}41azHDbR_OC!bNYi@Gy7|5;{ZT z&1jB}^*z#Bas-R(`h;%Ky-;SwXK|-=j~U<3b+uc+a!XB1S*kH$@AMtrE*s#23r1IT$!Vu}f!t(g9Z5U_BAS%HT2QN3mgOm;dJS8fkK!D4e2EXw zf8h;BiuRc6N<=i&M>Lo1&s33aogd zM?$V%Z9mDEoee-kdoYaF7)P~SMzbQNUJIGH6B?*Z>#<`aHS);I$qtU8EN|m$i6$0( z7V7(GPspHmnM`jL@p@pZ>>*lM)G^@?m&buFwmGD#p)OQ0!{)+;>Mlp!6lN95eASxO zqTETk!_fxopG4GYgXazRCp}))wUv<*iV%$_59XlM)DBiX!bbeEn`R4aIo#g8HDnC% zew7h%i&5X>CR>XuzVy75@y*S`tMg}+{Acmw8zXSJ0+>dJlA>AttnpEcv%Ps{Q9dXT z(To!7qJAp0-;s7`-_1@Su*+_*9e*3gs~sDMM=`_8%_|mZdq(tKc1q79SvpdzowY>3 z+f>`Sd%tp2``z&k8g3#H!*MqL=60hN7O$w#n0)=}Jx+x(J99M)g@m|u-d%QoK# z$H1GXN%X{O00!sSN}Vqnp9mF|UY})Ej@X91R+R=;^SGde;SCFGm@r3m47piR%z3F1 z+5XS5P5)9_KjMnrz7KSX_%tzFwWl)mT?1Ll2d_b42ix_Q7Wl%?lMO2y1v~e_w;d3LOK|ioT_LDBr>`p-mx*Hy4f~Tb&N_Xy)x^-Y+;2xuJ#)^+H zT;&hf=)#{jaz{)}1V5z{K^N23I^c-f%-D=t@5)$2Jb36swh=K)7R#3!>N)=Wy9x&K zH=1QCqWKDZc*-)R{sCboN(ZFWkIj(t|;yqM)97{?x6aX_@^FP4X0_G3K8 z1CX}iGCxYr2!rG9qZ_ivO;`lHO%MC*-TEC^ZLht2UcVr3NNE%TBP|{~A+~if0a5w! zDJQ$JYu9;ve9C+ddBOv#-5lyehU6j`(vNB|`6kHMczGN@tVkk@cBsb%PdTV7@3=;9 zRU|$%txVQ?^}2@LV5l1LYJ>Cb%&?S@X#b%v;@3c6ul4Vo*{Qocesw{aL!UD^w(-te1Ja<8`T9 z)f?3(2N2I6$AnU{KNP0+K9z~Gjcf3{6Aa98e21(KFiv>JopdUfX$XSvFU0vAz~(Pcp&7S~|)VG5lpng6PlFA?)V0Yh^Lb-1Z_M z?BS`8ukRj%ex{|eeK^;X7EW+S{ez~pYRZ$ow#o0)XQSD&<{cKOWv%CX^EQohyBx!n zZ_u8HPb#uzn|nzD_1@a}&gpV)ql=L7d4RsQF8SDy#sz*eb&_?H z43B>U)Y*pGZR-WzBJN(`BBbH&91ek6`Vhydqjk^Fpzi6nK(r>$vUw@WjocG4)! zn@24L%o}@`#f?0R`R<-jzUbLWC%;8s3ne`;PIf5QSb3OkXP$hAul$-UqVuWe_HVS# z_0_#VME09ySWw%Kom4z@|C}fDyU(!30ZV9fR`ZQP37s;%4W<>e3u1nHDh#rAy&w*hna@ov002W)bPL_<*y$Yk-nF#jcQ zbrYlDi>XiYz^@_{kS@zEnYGvSzRodmACCkwyPI$#nzDMg3R({n8)W?)p|#1h136+0i% zCoSkvDyRB)1y^!$BVOYjbZ4V2gi5hPQ4n*hue?p%GA#o*kLw}FwY2$aveJ5IiFIAc zc97zwsRde)jQUcIw=k<)b$p3mbRWSP^he@1Xeo{}2NE0%N$n$zI7`Ma{4okU06a2u zB((0X(De3KFN#B0B1$91&js{ssMEf{O80+UYDhk^Y<}<)(a`3Ks~q2M$B|Cjr`%xe zd0f=oH9Py^IENy)CB5`>1F~0K$B{s9Z%gDL!{2C`b8^;#+X>UgHUPAi6Rhoh|y+XvEm$P^{grAM{cRm8MBTH}hD<@X27x0oR^9(t;D;8&D87(z-jXFs)v3{cgPRSyf zuW_|86rnQ51g53JpChJ^cJIZMi{gJ-v+{=()w|&tGLty_B;uzm43Y$X3tn=^UL1bc zJ7V~wS<<<`jI$ZXmTQ)!?lz(`-(k+w);GHMbd%9_MQdXVVRUS570tqO9clNZ=c+g} zxBXmgZ8{E2qHRRMj@_D;wMeLFw`$7v20LwOA-Mt32C5*7V>KHwhkN%IEh`uB=?3?> zx-0u?Puc9yR5e#Qo*jR#3G5_AzoxiZ@!eWAqg7W`pUfJ@mi!sfI%P}J_-V>`1s}=W zV2KMfA|X5dIq0zNxBw0yl2QF(CMaFK077}if2aw;Ag#}J(<(ouTppqg^tcvKH@ogL z8mQyYeK8TiK2D0*dc&^J;R;C7Yp-gvX&BRy$A3C4G!S~gPIfkTW~2dV;iGqx7SbJ~ z&qcjXnOrH7n}Pk*#p|K#V3Zo}Vb7y?NmhflpF4c=|k36}RMlYRSGr#Z$=f_b#+ znn(u`%68MJC^K&XCN0-Gi!DV8YHfZIg_ONLsEcdapByuCtFygxSCRgLw9Q5NqNX@j zYO*=x^S$iC^VMkF^YRV1ar@*bB>&dj+5K$g*yuM}1OIQdb5&hwbqHligW@*05vWpV z*}!b+ItydjeC!8j1lq!Gj{-CD*qEBijW-20bfB?(|!)9D6ukEyPV2KDIb9*;7+= zv3v^@+RpX^FK0qGD9Wr>o7(y040VsrV|L*r5WC4A$er>#%T`5>+U4T-R2a60th{4k zGzryHVDSElrJHHjGEsu#w(ce(HQdxe7n!#HtgXM;~V6RLB9*L8}Y` zOEszw=*kiYlX8DvcRA1{Q@wqHyQI_PoLDBxH6g}7@n8cd z4}PQhaHR`3Cm1Lf-8mFinXtzukoj3yktQkB8u0SGxrFUGvpb6i@Ec-$cRtSp;2K3q zD6nhlvFr1_cb@>uS#i{~=}gAgdD4J4h{YNrKmad(h*;7*WJ&7&$Yj2-L-)Gt-ZAaZ z5UPP{oNXY!v~$tEclnK$od1{jU%HkKAK%TUXn#!kxUv0bxHCb65^11&bl$*cX_n#`O>-%R$Z=2U5O{wK| z<$19gYl+`f`x9z-nw5o{64tIQbvReeSwZqJ&wPa7)KTYc$n#lU3F530;q;&9iI^%F zb%q*&zuvbN1;^P=iln-bwPP!0AF zM#`e@DDkY`ZPK0}TXSY{BMlGvMayh)Y|mauQY-~z8K^$<7WhzWSUSnXqv+tip)S^$ z7bDT!#%}S+o{9$vcn(3&;Gip#&Phyy3J8$Vjr#f-$<8A&+MQDw``iwMdk*^JXFM9X z$zbncr?6LYZ!ZE*WLlLb?f=MYiQ23!8DN@Wy*Gjh=ct za3~qmNn4=rcgd}V)Oo_B$dG+*%lKO}KMq+@AyzZUuDW`>Q697qB6dx~=Tyi8mtg+c zIfMQL^!X8f6TB=4N>uuji5Fhfci5^I^zQ-Ujo`DGF~cF zM$0gdq`>G4%MK;mckTi@z5%;%&wtFv2h6DJ`qT|;DJv>$^%D8Ak9WAa4i_433uH)(hzS{wHn-9o#yYOU&QFCgQ3`vP@j zI_Rp!Ck8gf9MzRN$ZZE2&E6gw`wHUGundwKyUsn4i#d{_WTjIUcB@Jt{iTkJa9>P` zc6=?n&U0hC2i{+*U=gEY=jY@rG{b-cqfcPuwWQL-v^a%pd(O|@3LnGiwb%uWUCl!M zrN&_7lXf51;K(=VS?g}uArZvG*R1)Y{SN~=#QJVoz~cIOQT2yCvkIGns8_8|sA5x? z8HB)VtF4AX$(^L@p6JSdDBr<+bS<_^o0(lZfAS1#-bxu+El*qO;5N4(o7OOLQUS$W z3g*TBgryrf-(Kll9JFR1IosIVeKW-mIwSp7LZF@`H-{@5H9$K%c9R#l@G!l|(`#fl z8G3TG$1RxKqj#}D9Z;D^ips&>B0e$ti3!SJDVA*$vjBXjqulk5H()zpbQ@~?!0rO^ zc55vsE0_m2P;LN0A0~rG=ZYOX%Ga{UVx1)rI%7AS&@_0r2GTUI8g*$=bk2p3Nvoq= zh9a?}Dn8i>0re&m?(3~m>#plK6qQQ&h3w}v^w+9Jgg+-5^}~PsT7!Ic(B5opGS8QdHkYG{mJa3mQ&Or7}>>x>}Xo{ zE)NI57b*BE1Ar0Ql^H{1shUgWTqi%)oY*Y)Y^gutO4ZFz9znA_$Vq4N;^3@jwH;~T zaW$lhzQ^8@kCzv}IAK4awKJYKdXl0~N?xRMwlUOPb^I$y`nF@i_zaZD+XpNWZAMrl z+?z-$?{+*<7j3R3=llUfMv#iRqdhIgo#UkPOJQ-ni-Tbu&S9{TM6D^x7h={x^uoZ{ zAb!$#pD1x#{bF4JVVGkQLi*^Q#_*MjeyUd*~=)9uE=@Xok#BQ zXqB|)lfq4d)xpIbMlOU*(W!Xa@y1JLDX^KDZci$OLX75N!#| zKB5G&8fJ`Y;Y`GpWN*@?1Qev#@OV81FIb~d+H*(p#f0orOzqOz3&TDm%SPVe25QNT zQD`%Zd)mEvAC)cjg^qNSlk#sO z3vG2`WNP__Bk&uV-&Cx^JbpEq4~T2V=m>ARM-$dblQ9?d&8LvC@v+f zlI;+9c|mNxSIMncefr=>D0UdLuy9?KO9j;D?X*LKpTNw3jb-_NNMr25-%r{_e-#z6 z`(NWXXfNt`diT-xv+!xYKPXpQC>C(E%g}Tss@TSuddwk#jEUxA(cJZ2srP>@I zR4MRA@McCl?^=3ycx*Ij&8A>s`YI*&=CM_9sFk&B6NzttzjE60FH13)3;i*!z#=Zx zAQmu(wcm6|TeVBYK}mJ;t3&2uqCKHqbkSG5`fm};QC*N2>9-&?De=qmW8Wzu8B+J9 z$%|9Jqnok6MH7hkOC}DNUbuR{(T-7o7-zqhlY^nWuhhV0+pg;0BAs>o&we%YYhvcV z(Yiu@HFK#q{w_$u*DGlT~|%Ij}MvrB%J_r%cKY)tBNcDet5{FYexw zn`nrzD*ZZ``EDpFjw#SnpR%Re^M-M$>>%r2&3S0g+2#`8mw5XkYs`ql6P&5Vzh&f? z>6OG1(&1JNp%oKJK}VoEj)Vnd4(S|Kkeu7_gi&kwzC?uqpLk_J4RdCWCHo-!^MM;- zbl2LIlvmQ|81;ko94`(~(o?jTm>Pdqnf_Dp#Qy;Q`#*&b`;W`&qNa|nQN}!zW|Mf< zKb(sG(mVDa_CEU0IL1HDj)vVzDr&|>QL85Tu2on;&YQTou?vMqObm*Do-i*4q&{N9n)!snw6q48hnH5A_H+RtPKF|6QRddIDzk0l57CPJvSRaPd`tB2p-6QOzvt}aa` zal_*M&KF>73kR+uA1h-w5aud535bFjl!05f;NpV3n~=0v0>heGWZiwauU6Gz>3RO2 zQ5gwmvhEF^1he?(PQSEq-yQ@3?Ll)16(QU0Zlmp=(Qc>W#>9PuYOff2_^nvMb&M;M z?EFVPaTfi{DetjAqp9$Of#N&T^jL39k_#X8)BnHUWje#**}KTzQ0~s8a=-gFxeXal|3vtR5cr! zC_wYYZ|Xd7B{$vA11k1sW(F54zROgEpRJ~Z`0Wr ze>c3n%}5?2gPZQSBCaN91US1Ab);fahcIIC2`Xx;Y^_gbqqIO&N&wL-sDiZTE6pFU z4l$KnTB84*zaCVl@So}0{=+tde>xTX>vO)i>f3plL}{H7ozHQ&zqzWk5>9{W?YYa@ zVGY+c*1NRB$|fv|PMg%;t8E65YDVx&0{V;*ZqcV@=3s6erA^=b9;#1|mo8Bqnntm2 z61nq7_=KYkHTnnCRpX3xftro)m4KJ(`@dfzhxNNo}yi?W3=b+Nwc z42ES7qaB#oeDk7y|GJr%Z-#2id;5X_rM;}hl_`<&1f_Rz6vVUk zF>@CsD{}u@t@F$Mr`NYEPOZdW`W83HId8Q;67r0!r58-xjvPh3rlDEj{$JGk#UMPr zRdl{3GHGWuYBSk3#YC-lePNII2OBwzsebkSA*xumlE;<4T=ho=EHpGXPuE-jj~gSv z?Jwe+WHF^zu)#&Ay2fNptvMzWeO0oWLtnNeVo`@?Nm~E9X_|+SncAe4mF!qe0PJ}#@EgSG?x0rO( z>$5W=8ndtBrZ-o|%F`cnuhPWz`6g?XIUH_I!>DUioRyQ0>rQS6wsTywk#2vQXRH}j6y??V6%b#Ul_gUhFVmeFHRQrEZI7AisQpbvC~7hRuTVk0 z(7I34*XlB4u1o7=PiAAl8n${y;t+kQYYPo)pGkhd(bc<^6AAZOtXlOkLWsKzdoT;x zd_auM9%si257Dld1hdSdge6tt?Lt$@$ZE|Wfac4A9wK{_5rZwdP~p`@oN=Xkk@iB~ zF^()~8`Wbrq8V!Y1RtouA}hsnEtkV?k{L(iZ6PV6`fId#;D$o(I$_38I;pLUQngLd zecExA0eFg38FLaFyPLQ7(#JfvT+z8StdunnRm5-2l6Fz*qLFT6teP!>HPiBTAoMx4 zOWh~4g@KF(kg#A#lK5m%Q>j$7+mVcLlPu*YH}pa}G=)%&6u>t?PlQ|#e_A$F+8AaY z3Vk>H%ZgooJmRUl_)A9AIVJncQy5Jy%ovhRsmf0Cv)52l4(8tCUyL`}jrS8B=R4aq zQp(EjYh=x@TFtF_&?)DxZ*YR|49XXsy#x3UWmV)&zOqC)cBzCBz=4o_>mLnX7g07o zPM#Br&&RnJDqq_`5t188N$D*cb0IAuhIF9?&V)>4#U@^S0Xl10DHqCsfrFNP)AJSORe{=>E!C7M#WzfLz zjHHxrv`^gtYh&ZV$e8Ps;C7r+pUcH!VPgu&JL43?@Mk0 z?$i+L39bIq9#wSJcc%!(Y{2RWN}|YOO2Zv%;gEs2P*|()c>=J59KM^Hb;bd|rod0x zlxAIg{!)sOQ|-BQzIjz0+q9jdHf>5Z4-rk4*1B~raT_{fa-rNFd<;s5my}hHYK_bH zk`2)=15@^s(8!`yexlVBhqPCU5WUWVlV>y3X(AnT8tfoNm8F@#D%$Xwe;BU#S4O%T zQ@aPp;DfVGQ+VbMJ?mfd`Dm}b7@l9xK~JuMgjB{j8?0l9>nPUvJlHDV5E8>MJSWUa zi+X>f*xr;1w2F-Yj9rxxv+xVHsM`mGj?OXf$RK}i{ZNe?f3bi4g!je z{{GCDPT}j$TGLKbnu0 zxi7CnuJI+V`57H%YKEQQ$~Gnn9JWtA;+<*`d1x zi71DbRF#E*T^_N$0Q>%S@Zx>cZLw(w#Lywd&3VSEo8`UU8E9gHt5WbussXhdSM4vT zOE~UzUt8F`_zIO~ZTM*F`=3`nO*+$OovUo;JmV%M;|H~{vG$d%S3s;lFS9vt3WOd> zRAWi!;?#PlOH68FQL84c{be)$DR%3?st}D^>?j4Ia5FkyUhe_B>1M>?I=k)eI9o+5 zZR4gmLtU9`ba-xhQtj9-Ec8<3=P`wH`+*?8hk3xEd0of!#7O@pU!{PvZ0soc=#xi8 z-z6!Y<@xsz=nSwe!4p)g^PJwZmpF{Tu5$ zg4qqt?FFE8KA6msl}wr6TDM2vIsWxp^B@c($`ssG*cRCuJLE>m2ApcA)(*BQTGpuM zdB#pcR3G|92aW$q$E?~T>8qL5$`XE-A~NIX_9{n)#Xg_tsIOl=`qkYM_|Tc+>1Hqw z;xs)@LHL2=N>xbsy960mC`+LFJ`wef50SiOv-;>qg#aJHqKsl+Cu^o@knF6zu>C$F z(knZO(6?$QUbCH{aKJG^s5c`lfKXSpziY zHYS;`Ee#mp?Zyh+%kxD-nrO;M=z=Z*l$}T?ixALba-JscPNzB1uaT-ki$haI7rH2& zCcc)AjQB-|`j;XNHS8(~=fYAwfftS|`Cp3ajg$}87L)=>5Gt?3mTMpUDs=%q@dV00 z?q_$#!P$9twPLR9*}hWzwv*Oi!jT5}PC~#{<##K1Yh6Fe zKfSKPQNof3&7L>M*U-vW&i4~PJ%xk^z}s46p1m$h3k!qSkOi!Yvg3f5lRCS-1&N{> z97F_PfTsg+di`r`RlGc_#%At8U#4`m7cr{Q zQb%usQ`eJ4rl;iD&AHyTNxTmZR$<*j#w>< zefNtpVOx0ECRgUDl@>{yuGsU8WtB!*LpiTX4VRj3&TuBnc0-XjcZtga>eHvPpM8vG z7vjLGPjQKq`^7}nx~P&`0gV3398UWG*Tw5kqbL9BpZ4Fsk#6f1a}*ZGL@SDkX53`L zJJ%n65+kKcq%jhxp+`r^Zpo2lT9$gea}99W*g4Ja{Yua~+>pPTyI|DA%5N-|BeNDU z8X1n&W$(RuD$hlHcy}I-$4yc<4SF0ODybc+pROP(0L;!)o@1Yhw9`T+iS5&V57Fcc zQ)GtQS41di4ehGP$unlGaq0&7FY{v({V!g2MvhYl;i*+G1MRksI+j-SZ0K~n%~_jl z2X$3D7wqnUSVu!MPDd2cG1MPi%R^N1`epr=dbzjTDF>*wRDR^`ZDBneAJyX3_qXEY z{*mf((0jond+6^r@?LeVIL}<+!=IP74HiGDy4`J0l~>x}YIYf)an>pU{_z%NQn^%Q zdIFcU00m_PXLALap|C}<_>X+Qy_xS}mw86Z{z}}jamd>3qud|)gk|a6ru|K8Z;xuf z7B9c*Q}tL#t^AqqM&_X2tG+nK#1O*HSi`Ac?qt64)F1g4$XvIqsG^qnRE>UCVoyeu zE1He%p?KV@oTPnq$Z2v*l4Cy*pdOGQn`&IrQUesAgDKfL1>iNyC&j#XNKh+tn7o~W zf?Y&4-1xLXeZ+)+7^x7}@!b9pGm1m<>8N;@#Tb4~TMyb4HDZ%f_4OH3P8Qa`@5{_M zzcs-Op^C#qFV$-u&EQKvVNts{vK2NK7CP-t0gg>*y%mocxJZ!P$n0==G;vwpvRR?= zJTzLyP)fW@Mh5>MwT1ri+U9S1dRUaue_b{HsZ=kKVh08NYk1aDJB(f#xIRtmumrz4Pe2kiqep9O*Wt(fs#0sPRMQWk<#s zyV0d2>fvfVre^oo@tz8HXA4-~&mW3V=A%Ac5q~nH_}1|Rr2zShY(bmwH7xrrcS?HX z{;xPtU{UZT#?9ym*oo!fq#iduK95~GY~57LVEK=yktmv6RwBzePO3HnMyu&l>DAn) zzqM}aMGTVUzYI*xg?XQfdPYm|L>v!Kca_XeYp40wq%8M9Y==60t!z2WH=Fd*QPwnxB*iPMAP7Nr8dSW1so zL9#T%TaS{MKG@Y8`5nUVGb_%0^{Tw?NcTwU`mhm3L{ZH+2(HWo{uQT0`8it9(whf& zzA}8FJf`ORJ6a9`mC}YlS3^3i8hPxvcc!q5-EduCg7}>Yd9pAPlPO&V{b3dOGA6KMW!9!-Ux~D(clw zzSBzf(EZ;pV^ft&{%5+@&Hp?mu3YW@+1^_+DlWWXAfuRi1CkZ|JfLj5_#pxe@VT*z z$9E4;zk+IqlQuMDYU(x|h2?ym~HHrWnr_#T-k@yUY`S)Icw>9>ecP3hA zglVsukv;r#<&}d2m^q0W-+2d*dI7}y@1;IocjIclzuitHPL06!>u*gG-o?P8QprE< zDrEm={nPA~?S3{^ig?Z}HLr39pbBENnZdI;9*qwEI^+NJeRQU8uG2APBnijTa?zFo zgs1oz<(U&DN@zev7Cl6iaGS@UCvzD$qg&>+|D7=5ye~Rj%&Dl4_2Zi)eH9v{PSL2{+K@VGolOT{TybAqud1x{`n%(p&mgTp@7#*_HU}36;xBHp z++R?5zDM+0bRwr_uZ_FPaYWT1zi3%X%E?D5rE(MnhdP)-Oi4m{+WQYe-ZAvC{xA8H ztetu_E+adP^QBWqN--y#Ztc3T=bhy0Y1xPZqLr|the;-e?i{1-)olGFGuG|F45!@^Fh6B@Lqep9stLZ9;|TMc}rL5t=0Z`qd=!W zqi11GVlYO@g6S4v(OqjyP7bO9lEkdX;9(TbWfQG`^Sa<7{l@kJ;Cs{eh`WE~LGiCU z{rkHqxr5AuQ-zeoi^2toW=?8w2(qH8s8Y2cU&{1y4BT9yw1|YUyVv$o<(`*%Anb_f z*=}VFnbbCu4j%93Pmx23{4?Yvr%W@vIiaDnzI8r08+(CV&R+A@RfUV>6d5rn(KEgW zz4TO|ZTbU#;+^CK{p7?K%hqp`mbh-ZwnYp6n`6t-FtiP9)`2V)3CXsjJRG~0>;pK+Bb4F_L9A>L! z(p2c7LVh-Z%FmjC=KhRx z`0rL!2OHt%IlwG|Y)VBuhI9L9V)c+}*mK=~Kpu6s!rzSWV2fB;cxiA)fqpD1xB<%q9;TNw$(z7EM?!)3%Bi9Zm=7s=~22^S-}r zQDw9H4(eYM1*3VlMtUtVjstlgNZuca3iz%#1X`ZGcr_hd54`a*E1Cf@`yRTI$nJvX zKPVTT#EMJJ3y_ZfjVsIUCmX){UG530Dc2NKDM%eeb72-0^t}xE7*4!- z12?yYp%#7du6!n(ioe2UTOkg+A+!{?UdbUH8Xi=umI%nIuoFo2rVfS0$)dD?^gp?- zsXr01g-w6PzYM3;NMzRfH9T}rXIh*#q3?OCy^zE&6<0q$@6YRFTh}mF z{f*A&vpX2BwF;#CZ#2cfMbR+wjyZ?GL1;;CVJL0Ld`!8JH{w>e#_&>_leX`GLT0VO zbgz3h*uzdV(`5~H1lSh4KsxiO*x|r54qL}dx(nm2W3;T`YIEC%F684W#X#DeS;cp` z^8wawuE`U`^AuLX#RobcHN77Bt9CU|#N9|2KLwItAq$1YqV&LSc~Xk0mft=7{?#jl zukb*kb|r6<@T~4s;l};FCmORONj7y~y@n6k8G@52Dn_1u?pjRCHK7*sm4V`LBlw$N zoR{p!uq=@ug5wYjM`tC!;`4duwfhg}tKPe0*L4)g{E~JAxh9>D*5N}7r`OFw*M&$Ekg8!COJuGy6OjI# zb~S19>>U1G+N?4k8JiDhvMEcjLN4Py{usB2k0*`eCvmqr;W^Dy(35Gw7d`G_oFF^F zSGcWz6R>*w0JjXV{jwjl34A#Qd{POPfP#_$UbK;-V-cexaeXky-BcW=yt@JL2M##2 zl#C2?5gbUw=2>l=0$1XN2My!vdNSj&fS+O;zI6@G4{9E&Wtb_sg(AIu`l}rAxMTD z!f;e_;dt3*2fyPIXTy}8QSd4Vc>!|bge7&5Re;(X9FWhhdgDH&hs3n~A{nU86WQE) zF{#3u)-S#Is2AM38&6vZsvNmpYcllNB96Dn=G)A0Mdh7#g7=^2KewP)S8a*mU&2>7 zB11y9Fu&r+>?>)um4$GQ81Qb2zr^6h48{tZM-@piZzO-l3uOo>8?Xfk!-&y3VITI% zMy$@8@Y7`pW{y;J>{3iC!Hv6gZH4)K0H_%#>LuGIlPA9HVogb`2QNo{#`nG1us@7J)80 zclRbqyvg9axth_+rLR~j;iwX@Nv8xf>KIegp0h(7f?(bazkHOIY)r^}JWW?003Z4augtselD3Qi_Ey*zwHD7rK!hBF(WB zwo;tf6d8egq=XScUKA5x*!rc!!CsL^hD+=!9UO*~-|lT%s5AbG6ZGn$x9#lD#v!m^ z!o)irWjOiDyO7Yh(j@QO5r4nmeRFYnFc*G;I`~S}wkv=4XuyHha=s0A^t`6(-Oipx z>&s&e`^p_fGnBQ|q)*n7o^Zgq#VIz5k>+(XOYLhmA#=!hayp}T3#8F9Hn{`u4%4ew42waQNWLA=#dgY zNlr_-^#Rst|F}jZGw<*&na(@NYH>0k0CH*Y`iY*+Xi&a&Tr5a$O`rwVVo`@(W=5~O z8U%cClrxrJ729SJ++5Er+87k{M8kn@tid6w0u;ndh(vWexQL#q7q2~`URp+KVgbwP z)A!QiIpF*Iv#zLnVx1pH5OGRXa<2v0R-5KRj2tyll7_Zrvlmu%aS9J<#d<|HT%UI+ zeSXk|Bldw>aFr*qcXy~U2<;*2qQk-9(Ptgyqunx3gQ-8J^Q^vAW9}if>5u=W!b%Wi z-2tUY>X0Q$h)?kZ%21}OJhbo)_8%36Y1QxtDH+L}oI@mdxROvXN$OW<% zK~9z51Y&O%0LZ+^aUbE8mrH^_Rlq--apJ=IQBgXfg@~Fs7FU5`v)7dIGmo)!xVSD4 zLpgklut%9I)uZrm+wF`bZ?5MDJFDA*#5S)BT{0SMKnG&6jpm{tw<_IypbOgHXI}+` zH!RQJMEamuBHb3EixX1AfAXEF((ydE(6xyGR5)!U9jfrgwaeZf}`ks?~H~$576ThRmh9hMrG!eRWnqKO||to7~g}dec>~6 zsGS+sl?fxl^GbUJk<#>adr}{@$i$xTY~IE4Z-@@HOkXUbqsQX0Gv)F7-ILe5by^-Z zT!5>1TIFAH@F|!4NY5>uzEfE>ziKNfZzfZC8W>XK^6GbY`hPsP{GYk3|Ao8x-y%^veg`y@XxY*a8A6qSWBJSRQGd>^&C% z=V3REMKtV;Pyy-Ct4l|~Ght;`sC{4n9PXw4By}JUi^r@E+0_jn^GS=($tae!w#_+8|y(G~uiyC6xzfHBCRamdhzWZvr z5L$hI!Ss=lnD3(dZR0jR6VyM^9tQN;%{7q+PV># zR{OIdGY9;Hm7}37p*1v{xV?pHOsirIc?RYAHjJklEdJ~Mi3Z0q#L2|B)O$% z8Vdj@@(rZr?k)4Y+f_nAB#tP_M<9GQECa`bgO&R;64?sxANH&{VemXV-#l-diq;J2 zcYZvY+r%Mc^YxvR+C;JLQAmFdvWUG<*_yJHZFd5)(h?2{c=z$!i+qNRM3>&#IhIs; zUFw?$@`k$2nbk=!j%>^@r?FW4a3O}aC8~2Fa<~Z*#qP?620Z*|PT=3&ClVsX+`zU# zwkD8Qn33(?$Q|m##?!C=G0^hJzT#ST@WA%t>E&$bEDK0Z$d5`!X`|~*;pv<+?_+%e z%Y|BEs^YqIlL~W9#U(|UOY{ki|C&cN2&4}@vXiIP$nG;D8@r2*L2LY69rtxT>mn~&UPGE_7h`LYtFgyC*+GE4wqZ4r-$cornmyVKMxb3OFyaUx#rGGnL zCh1%1uX2v zjnTl>DMFI_`%PTCc7}G=4yun9lZ1Oq+;m*i+YUJ4!L2GzQ425!!i`USDa;w+*&_lG zci#>&)F1WRBz4N~OBfDuQk2Mm9}6wVZc=eDb=cqF(kLC4ljlfY&qSD(-<(|UJ(Krm z^YwKG&;_?KSI-n{1Yp7OD9XHl8qSp0V*+b` z?@#IA%pbv~5}2?Zwg6Pxx3_cm!7dst^EEc-Z=A%wlNp9ny8Y=gq1tvicABA!Cu3mJ z^1H|0zwol_w>z>wZr!#3m&DUoExwo9hJV=5{Y=%b!y+_16&BsalddR9nz?VpQ>`qq zcka8P257@kC2`b(R1LT4R?lxNho!|qIT+#(w?%*H588Eb9|dKLjv#cuRDrS>bNe=S7-2!o%*96ZhsmzYGroU3|h^&Gi)^7o$`2>#`oPSd|FD zcBc;#Rq$gFb*vQg>(via3RKhl2ZsdqLbg&gln7O5Gm zql>rSuhH?cn}&ez-R2EqoTE8g&FczQy=8RFd*7`F3lbohT1>_!s*=1aEZ@|AL{O;Q z>bXQ?3dbN{qPyVrZLb>=8mx2j7Fq(U)93ZwCya%4Uv)SNt)m^soW4FiWAx8)mcBQ0 z{Xzr-38eqda;C+SPIWn_@h#;3D92Gzf++F3*#PvsFsVO}I z!*%tmJVkERS$r#T7e`S;R>)b*Zj-*Df$Gb1r1C?*^6vUR_clUZd6Z{>ER9h-F+{cH zkyyU4e~FnU;F!-ib+qMSVkB`UM=_v9fD(PkS8&Wq!hn{20o>Ktth<_{Va!(R zh3QodLu3cLF^$kK*E?pj?mXLpPFRImjq6_!mUM9=XWUg9uFp=3$qr>c!Ugwd z6}Jqe8r5HOlb`kpv8SR^##++*G42-hz>D2c2ORf6=8QN%BfgPNUM>*s^{`Ub@O=Vt z>b_R+V&&PmymO`bDS8~y6uhNxWgL!(+@ny!73C#DakM4@bVo`Lv{<4f*YYW7VTisX zdX6@3Z4lIy0w{n#>3TFG!wwTIlkrGl$#uFHteC{mnuKf-Ue{ zy+?5K=jJoM+ZO*)_+9Aov1rQGE$+@+UxCRaUoYl@I?*f>&9U@;Sjb8M9(L=SmiqHJ zM~w9kYd>W1%Ewc2bNR_WJH4yRC+)d)_{1?8{btMOL2fF|Cn4jrw%c&DN6k=a|2$|y z=&RfVC|*5S2e0#!(N_v<75Ajsiv8xVn38SQCu8rsu###MSn%%mXO$-TbWsSTw3~Xd z=t>c_yV+=wguqfAv*#C> z*m=B5A7fhXz%D9cWs{Bv%4kz!))J~5VD6+uOM9Gw_1MA6DW<$qt>-ICxK$3X=8Jie zWRh1;Z3z$;KIJ_eqFp1|nlBUc#Tsw!d)z#O98lL6<44sYyXBO$4Dd*z?ps+n@<@EP zsYo5Kd54v^^^SR}11#ifbBSo(FO)1WD?zRyLkeMEHOB?BBYPDmXS?(7A0IoGh!23b zCjn)r3`eiewX{4s1R^xQ`Fmi|TVCb5Kur|7@uW9otANc=v%9SnsUzeX;qne5FJ@D^ zo+%XC-E6R0)d|~*(tK}m;;BB&Y4WOMeA9~1?;xVZB0J|nCt>Z32(`Yv7>?-f=#HB+ z*SH4rQS-63mC5<>L$7&&AMLx zD1vR&9l;XT?z`&DE|zmo;p5EuzCt#zJg;AEejHa-!i+OCD7i#TnuUkUMo6km82nX@ z97C2>IpyBVn}M+({a&r87o_s0To=N|Jq2v))R8cg@p(a|%EIPoCy*w-P2AQc$l|31 zEf4(f&Unltflz)gZmArtSPKz@p8Tnmptwgfh(pj91ICZ%^FDI-^8;;XZU6voAEa(% zT4X6#*t!-B9sxCSRPXh1LY9dhhB?&g8m!{Ih0}9&Zm|}=s1g$%4K|R9nShx3+k<%IeF)9Ll@)m`Xv#cG} zGb=!(a-k2qCB&xKAlfDn6O!AfvR_bPHDPydO+zDQ6DEe%J-fWJPLKFB*Lb6Z zcm^K&Ww)C9>=+E7PNG$ZvJybBm(!Ie6o&VY`!Q~4%wT(}#2(zGwLK-*2Y%t|83A57 zutE1vz>X?FT-Ft@3Qk{tEozB-Wuq|>((c7a)RT)`bF$5IaDP43+3R(2MDv+$@y2>R z&m-yF2GC+nv7}R!0fwqc$f>X-qi%vaKC>{(1OMaGflyy~&)#asc;&Wubl*cQU4DD# zSbTD|ZhD8&5aMbrbGL|?Mam*}WI3T2P=v@JKXy}iWBT06m*y_d$Dr_?wPW7AO2Oij%@XRPF<0|odA?+Z3=&?`#*jHz!$o17AMWI9Ol>W-xRsad<4XZIA0)e zF(sU8E(LZF2}Kf;aNS$ZWmv79j^SHz72;|W$&1yB&%d>7ie>ZVFEcMZPAzsh)19Z0 zd_^vs(N*C^V~Al^FPom;A`%ZbD?gDLutZad`5!TmPRvKq%whtk9PGd&g-sGl+Q%Zq zcI|Iw?~)uxegxa+#3D3{^P>4sf}jt^V@Q1==4x$(K0|D;{MK==f{C;rv##0~qr z_D^Dkg=s0}pP9p|Lhn5NJYG?O-T^{9N~KtAVk}p4O%u!fH{ZLZhY!1}X2-_sUFP>Q zTQx8iK@~z%=?Fz?CH-;4lgu5ZaPN>ovW~;+$I|Z$-c5 z?hO1i|0qF5Gx@X+rsrm^BbV9(TxZ!ZSwiEQ*_>c%k#|P3%qnC=q@S6cqrYAgN5xUX znkD|E@|6pB#*%}+DKlYqRa6Z9Gi6;Cwx0CbS=&(b3-shP)>N$qZ^;`dyz#kJ;6FmU z1eJ1MHd)>|XV&gWtuKwbf!602J%o+jtTI%tB32;k*_Fqqkd@c7Hglq#DhLSEhQhuN!o z38tL&8*Qal4aFtzqHSlXM;>s9*8*HQ=trtUaxQ0Vd*uExjeoRh$Mc2 z_5PGfwvgtmqcVJ8&1Eg{Ey9&P-v->eVn!+Efg*lV8klFFJZGE-7NkHrq+OI0AUV zK_XFMrgI&rXVz*j<`lf7-AWRmrAKc_V^?zrlVQ4->KXCOuv7x$Y#*{-&N&xAdGTU& z8;-1NRbfuBZXDuE6nd__(D(w+>GVN>P`$CBUS!*BEc7zY{w(1Lp;mZ*J=VWg#Yr7k zvuaJs;>#vPX`)^=Z8(g4LIV5KAegwM+wD81$N7rd4P3a#UOg(3-9D?A05_3Gch;u6 zZgP`QM|@g9(pV{(RDCg&^7egHr(eKhF|fJ0&SG79V+m@4cjvJ{(e{aZi0&Nc9r=wv z8^cwwdxUc$)K`HeNlA@8vxkK2L;cF#0wzRbH;$xfvyL+rfRC#$)DxeNxkzhI$5Dxk zQ#jar-o|kiI;dBwyELw2OV)8J0gfe&XX@;bxjNxd^n@r$iag|-&&|b%(?_L`MZxwS zI*Wz6Kj|BH=4}2sbbo?8g=Iv#sx#~;n@)hvOPd1mCnUAxaW_sF@<*4naxr0}eVG*S zRwpNpRQ*khapdd1j(LTfP%+aL77O1|Gph*_oBpjLD$Qhz6IzkpvS2MeOf|&mffJ7l z06ClJ1{O3O`sJ&%Jm60GpH1M{Ijr$8lJ2+aOCBC8;rX(Ra14Cf?67n0=dfqhBkwrk ze>K!cNW4$&l4cox4bYJD_U7tLO}$fkgYD6$l^Uc_hB2`xvA*A67ASmk^JUqzf_JaIxNOx_^08>$ z`lhL093itQl&FOQ)2m8o>X1zB%)B6ccUX$SClip9Zd>Nj%*WOUzD!(j3q9mr0fk8c z)m5@yuEzNuPiXw7Y0t_mFz0lL>_C%?cqeK1q2K4dp-Tb8dA1Hg+-=WjE|(n=%nj5| z*_;oE`T$0T^|$yZro69uN7^-; z`sU(Ur`NA%aag9vL`BddlAv}#fV?~@rS{&J-%j#KN&kgnX1}@AVd6F z`K(#({89DoV`TjMJe4OwXSFbuz#Oi)=XzNiS zUFR=Y3DNFvmeUh&6-*LlFwC~AgCm(*-$eeTCH{cat-g^^_`zmOWDuPDvwhWnHmcrx z#sB!&kqhNEZ>zr>OEMU3gOquL{Hi|uTzTb#%p|IL{F-S-bkwL!f%3~?=zAHpSHIsd zNw4@BSI=Y26N=s@>Xiq7ZO*o8`4gnG!|Vaj!^UYVV&Gl=xxyu^L&b z!Vkzx_2lEkzsMldukY)XKj5zPdQ^=F&Su;PYuG19ljkxVoc^fSUiuD~)Avi<5r zc`gu+zeF*u!BnNB0di9taGv&iYkWs9iG|sa+Ets?fU6#o#pgt6xzuI&r#=!)-G3qk zYrRu?64?oSv-ByDLt{o>&pcM@mmGC%~+ z4EN#jf3KT78MZO@)pX$W8u_s2SELEa4q$8P-9cIbipAzr#y3pQ`Js@{56k*?;E^>B z78^tPiM7}N)=Y7zpZ5FT4T}F!SLn#7*26^J2Ue1Oi~Lw?N-8M0d-PU^HMkuBMZcGM z_PaavzfdCR_p9*VJDEV$o(Ylv%WQ%@nH@EY|tOmZ$Xxfsx^a`#LNZ zj5K}3V4y8Qo*3a2As`gEk>tsL#N7=^4u1Z8lW45a|&3 z49=}7tcKD)$+u(Muq#6^xgee3Z3ta08Z&Vb7UiF_Vc{5FJ>HFLAi);t7X`N`?fbkM zHG$*iTZY#3V!TQnAZ_m5?b_$~E4AoBa&2FtTSFB=OUNiE#1q>!B!r>IB{xA^7y3KqcezYVq0EQYro6n0Y0aIFX?n{Vlv zbzauH%r2?OK8QwM&5qq;*bCAnWWvx9>I}&7y~DLHD)<2T&K^rU0cavDWO(M+3 zbx+2$chnAXn4GOT_fKd3;y z{70p;e<~dCKlW9BnHQh>Sc=00xphM5bSSOL!FDKO>cBSdFEvq# zNb}UzHU6XHp03UOmB|mTrlGO*#y7<1-rsqf^BuY{v{6l5NAL5hYqMi1VU`6JwtSEf z%C$UK;>=?+jkc-D8ihSNb)g`#^~yCFJ55f&$IrpMZ;#-KEKkjOI^eQ3vm_o>Q4&)L zg;Z@DIEt2hfND-hj$8`E&IF-RSc0`thQAG~p3C^<^VWF7 z_`Hr_Dp>OFH05eyy1gek^5u8}Lt1t)A5h6OvZ~)PITRr9?lUE6?Ii7KIFo~f{es~= zbL7`e@>bP-J&-9bYw2up>?w$%9i(dKICRamZCG})lahmG~E=YN;?{Z+F0r8HHjt_XwSLA}<)Y&%6S2U^Y;79?<=HLL{nW3!onX2&zNbjA&tx2-7)@}j`wfQN6f%fhuCk2Q0BECD8iSVb912Sd-8y#Rfw!2B3A3={iiP5^>j8qu5!_p zt`vFuBI)AFZ>H_yH)$hO_c2<*O{r73MRqz3Du!4CT-z6wtHWL6<`si}pSS7SY(j3} zH&1bofVZmnsB?YVgV8`r(I75WL2C95x}BGgcY;8xL?t{nk2&D>rCM!X#;p zYOFLxuC?t2!)99b{ehEVz_ag~l#` zODuTytm5<%dz{M_n&h|RuQHnloyJ^;QaLf=%4pz{ERSLiCp=*>=jg*M(Cg-luFqm> zPxV!xV(1>)9zyK_lm1$((yl-5U&2pAE*Yu-zv4I{KAnKc0@JQ|uxH0ul^H%Ok2$`I zsV#0{43=Rpr=>&VBydni@R;{T&|37)@eVaH>S*d(uRFj{-aPTrPxV(Ew;0=|-dG86 z?VhK7^H|&9eUpiLs6idHp(fj7kNn#MbW@^> z6|@>~2^7$;I{gt43)e-I5dKk?+6zwtX)6>yQ(1Ztv!l~`(ww^X@^v>#pwQjq2X}=Y6RBez* z;r>JS2T}2(<#OAu5ms_B%l@eaFPEAH#~L5BSW_oupBFopl15G!jUnGzSA$f?TGds% zPU2LQ&k`PWLQxE`*7@BrOV1CXy4AEp24Y=2iss0s-W;9m;6np>DMGBjC6pQIkcj2~ zr3pE#?n+SNcBD2NCtkJ!3mJwAA1OrXB*bgy3S2!X%(gD)O?y(B8*YgLDjd%_+@twY zc2GT)b<-iN*l7HcynOzQD{;0c6gkIR4+?GyR(KlQBqg4zWv2Q79U}dfbL#7kNc9>3 zxXltOPRTe}h09sDM1^FdlK{d+0(DqviCO?30e#b&U3v}u3=|182e0A4aLJYH7tGWf z*~KIMoM}Zp#%Xn-i>=Jc61!91#3LF@7!P~Ztg$y1 zvl&1P8l^Ir)G;qC)zH)LSshZ$-FnkF62@Fo70FcA~&l)On7wds6Kq`y^((^XI>8MI_q0c_O7 zB0mi9)#*AlZWT=XS0NyD^u4!LoH@L18@-L3u~T=#rg$Z+?DX^nS2QEg#@{b zEib$M&X-73W(MHyY`C`w-q9*8C3)-GT`l&3o0x`|5Q(uE&|_T)>_k2-H~h{%&9J{5 z;6AR@RFR3tTF!Dx&(xet#PIC^f5m`T89hm`j)$Gunc4LQ^PWw&dJ@=PV>I@AW?dKy`C85sQhM|TPIKSi zRpEFFpyl$n1Af|C4PF({oK|=QEYQX`Gf2IZ)3HfJzN;z~=uBj7+iU1#;eKX%pnP!% zognaoJ$U@xSJPWk}$8v^B$-3ZLi~fB^~We_yMn#a72t|_x)RLTt?G}{r-s*{#6a_6)$!Ut^w*^&zI=g zH+p>4gF#YNhewv9W5$q>{A`N{d>X?&CXPimo6>IyY$>We8C2i@gC^lL>`BVhZGz)61>J zR$QEC<0q#ep~A0zzj}3h=Ef8X+N^Z4{dTA0lkdN78zTo&3@K&>LvDM9xL=%g?e(O`x_VPA5z>GPPegI z7KK+6*1zJMS^b8T`iCU&ZopE-6>-UhLB+2)D}Sd^{V^rCi9USrRJ0ib$0{qtVo6pU zzv5ie)k|5gzBwtFuxSv|HuW&6!wVq0@M;upP~1~sc@;!B2dEC$6zjG_d4{*$;r!(< g{xARMq^3VELH`q# + - + @@ -14,108 +14,108 @@ - + - + - - + + - + - - + + - + - - + + - + - + - - + + - + - - + + - - + + - - - + + + - + - - + + - + - - + + - + - - + + - + - + - + diff --git a/docs/resources/arch-overview.jpg b/docs/resources/arch-overview.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2acae69bfbb5a6d2179a1e09abd539ade767c4ae GIT binary patch literal 74626 zcmeFZ2Ut_vwl*AlMK>aX6x~uK2uKMKif*?QQHqd+Kp-H58k$mq^u2Y9w19wsR5g^4 zgd!!05Xx4mp$Q?h5RhIIdJ~Z^-t(Qa&wcK_-*^B2Jb!zhf2=2KjWy@H<{WF5wZ<58 ztn3f$e*ye-*TBdCaNqy{aDcY~_Qwwl7~Q^Yf8WB)z{pttZy7BB9v!|80C;)(`&tca;^$*u{&vNWJO`BD!ym{0f_UX__WdXw0AMKm7nuK5@(CwrKNJtJ%==0C@+#*E zd!EOicl{fG^#||xH(uulAL#$kpNF~sgZG76+~)C4JpQWdKk<(L#G@Yi{=moaFgjkI z0YB>cK|dru?d%PM@Q%lMKQVwGzye?h`1QyA^UitXg989GHvoV`FaAn%Oa}m{&j0}7 zp}*3k-T(k+{{R3eU4Nzh)g})ed>#I+xWl~tK^GSQU=i}hd`aTPA8*u305Bjk`#G}JU5C5R! zM~@ykdgA!WlP8X!IC1jy+0!RaojG;l1Rp=&nX^Cr#Q)RD)8_=v{UpHSfBI3#fge{M zI((dW;ZLVdoZ_YaAEf<10sO}g^&L_CrlFEA+7gg0j>W+y86_wI*C^uis+g|YPz$cM1 zI6b4M&p&)@;MF6_6X1s!e;0yx{GavkGPU`6v*f^`V=PrYwJu z>SnZRPoS~}axHW7pr%+;X9I3+Yc(#E`26RCmE&T5sKFcxY*vuWCj9zLRWa#AmzpbNCix1{OE7-IGndk( z@0SiMHqhIGgE|6LyTQ&L;&UwTJfkKx`A=IHGBg^q58o3k;6Nq~tTvS~n>DOW3tdPR zzhGPS_ukQWUAwZcIL$Y@LoAj4K&0Eh`rOx=I#l4X878P(EtrbKD7ICT>{^Dh>2o$r z?LW!gPJuxWm*m*LkK*3%nlo>h-9TLSzo4f-bjH6BVb_hBt*w`Zvgdlc)eNIFH=2lB zg0wq+-|SL9*yxrMsgn3NkwSeRzZ*SBct=WEm4}CoU>Cmv{gys|oa^V`Y;XR9<@(dh z_Pgl?%{_0|i=glulf8?n+5AFdT5rMp8hgIth}vH&AJXu_0r)N838nM`XS+5e-+84w z=jxWUiZ=Y(MAvZDkeevrARp4Y4t+ft@S*yS!F)IVuqw#wbV;FzC586g4R%jI zC!gn;m|x3{_ec9j3$ln*S0IphB;&Aiu}ETJb?EzEtq43uhf*q0td$rOxsx(vbczSDZOPt3%NQ9o-AA)xRrVgvt{%2@N2B7=~b zSxs7ceivI$jc{RhdV0QZ?#Lu(p~GnBTNX9Nt24B7Y+h3GH%Ta>_ED{?7rU;l-NE`c zo(e?4TAFF;96j^~OP>)hBk*&dXz5u*+Oq=DLiMjl*#vios{D_m{%`HF$qHu90_<{J zZpOCNjuKh%#<=T}kwC%as)P;}=U}bJnIzv0d9jMzj6Jrm$d33)>?&-CQOn0MFV>WVRs?~FeG6B%ChT{C#@bYSzsz5zY^wbpS zUYCG|ms00SjZ&2Ojobz~KM^u@P({>a!<#~oziDG)E>*eS!)Tdgr9IASw9DE9hBf!6gGfzNeP&W^f|FdEnlOhOon=~-Ky!fQT=jJxN*kK zf(kTjO=V2m+!OjWf~=>*Ft2Cdz0t8`JrSB@cy3ivBSa#U9yQ4dU>9D{6HGm$7;yIL zXW9kIoDZKyO-as#*A}8KjXjT+G5;loINdB&JVQtn&SDt~DSr34sR9(=b-h4k-|f-9 zQMk-`8_2$GlYOqw%uu=~l_4mQLF{K#{$nKth5Ub9>FUd1fR+oET~j)+58sj`QbcgLs)uc z3ADJ7)jibK(rCI5_*3X`BTAE^8##2*p^D8-OA`YAU7m9I1)K~KSA2Wud#8HtV0Ar_=*`TQQx@}ne)YXe zW$I&zI;6YYtcM38cF5%LnZ$185o|_z>+ERfLR~ARv`Fhz9%9!izj-6@cy3GKtn6lo ztjD!?ZDxur`P>X46$9_=ZzU$1CR+!I^TpNuaUnm8>GZ(S;>}lR`P3^masBt=?rzuL zD?{xmol_C<_w_4H7X>HMh_p2S)c4~(wD_XgyHi6!+LvEF2>5AQvlywBo3aM^&TrK$ zvEiAxCaXUf%J8pVPWu$LDrS+fkayu^)Whge7%I9Oe9=sP2euSq&N3~rB)1K%aVHHG z7kX2JgyR->L|ds!ifr{A^@Ft!nMAcLi}J40%9qKvoGF8YUew$2VqIS?py**IJ-hfj z%(6fGWn0zR=bw-dZWX+jy}aW^i@H`J+2}dA33A6M1?9GRzuFVcE0}SrTr<&!oI=j% z!n9#fbZKt3M$O~+%W+>UPkP#%t$+*_Lt`c1=Kn5~7_(KiWJe@1JAL;7a=$UNf9mlH zc&B=iS#z#Gi8U77dI&v0kU9$;M4m5#CP_nQiOmwk1N@FdULVPdw23MiJ_z#FJV;uK z&U!hx*S7;A4wco4OVtgK(u=WNU*$O8pw3`Y_^A|-SVzY`;N*=QnYn8B;x7C?pfAWl zE##N=kY7CgP}R47w<)Uzrsd9Sp9zvF^p$BT?~u>FmMOFA2ga~5YMHMO4{O`0GMVy` zH&a^EHVo`Vb0$w0N0aK)#fpm!4H~9>B}EE*^#}O{-u%=@v|XrN8q{DIH!Ek{d1PC0 zhS+qCX@Sev(U4B;(E9;3PQpDxzo0{o*WVbaaQ_g$5t%N*?t)r7Inync1O4 zv4p{O38NJ#BQaujOALwO+w6jX`e~zhJBAk-ki$2F7o=L;!_V9 z8>6gw1C3DHNO6~Er9gqr;{xYX46kgH^C08+BkW?B)esYA!keL9L49!#5GlV`7_Z$? zO4(l38<56(8~Q#$6n59tqzLkx4iZHQ>wNES<))FSc5n2ughKVycJ~2SZ?4`!=QN&e zNI3H?d}$wWQ(1Xp_?la%EFOfohh_Q3Vq`6+Nu1uDVh%wR5;hTI=C%2viR(NauK`LJ z?evJ(IrImIzo*@F>dD*`Ugn*Zcl%^cWCEzRT;2P7pw?wZ2&0ys<2P;zZmkBxgdSY! zLS3O(n-m@=5~B$kdv#qOE8pwEbF-A)nttnro8=Ac4Yjb6v!#)$t2X)0*LuL_NclH} z!X(9RjM+!uH4Y;z#ps8f^M3~&<1;}joOcH~8{ao5F_4@J27yyBdh9SDPE!0QjEFFobGANGPi^e$b`P@hMV}Lbb60Ii^hbr;r^pNZZ-EuW!}v; z&KF2yMYZ4JQ@ZrmQ2x>Fgz_!il?oK5q;JJ!#NW8xFjB$8gLwfST&i3kArt!0NZo@? z`uVL_X9t2#gzK*4#f^HqR3Ro8`f<3oBS1=m$40C;18Acw_L5tOD}JKUrGvGy9ZDaq z_e(TKjF9RvyQ!Bi^qw{Z5vwgwLQy4WD%x9sqvw<)?KamWBx%`?`XZS8Bw41(Te*+5V3iPKskCbj`AnG8SNBp(-LzC2 zk;gB?h@m2Vfo-R3&?W0I0i@PMV;VrD;|0$^6;HIkT9zpoe(RBMKBz8xVehEV z6jvt0g%E?X#Sw9@GKkExnepavgEO1OqY{++9c^8Mz078Ts&HghAuSBguK4-gNJ^_~ z?PoYiB;FSSEk`DXqy)i&<)+lj&@h*xdA=@n5*~qJR$VkZSfB0qj8GxkQSQT7Tz{RR zk!8ESX5neAP?B42MVyHA_brwG5|UWzL0K|M3#)6;rS1dbNtrV;jFqu?VYg-%&7B(A zeEymGMh!R*v53mb%W> zEYHAumFG9Mo>yy$1OnGOrwbO!Wm|GmW}Rd1HGI!!E9jUxp7**lhHrXpB;}(Ig=4sD z!`$zaFw>l;xvinEb@IK`&9!i@f4HrEXrHn(k2ii7>mK)rC>b}m+KBmm<$PR*x`{wy zdx&`lbRD-P_gj0#BI>}CCwQ6C&7weH)BQg=)tGoLJ0$T2OwFx=*i#fEdC_0K64#kk zJ#&0$;CDNfc+GV`u@?3Ne9Tfn{Ka$Ng~BgB51M@{C^NobR%jtpf8E>6(xbNi=gJ;C z4kT|q;4|g1lpb^%mF$m9dp}TP>!Weky<{Z10LY9*a3ZQl7KpJY#|e15Gjj=H-@C+D zb=)^r@;ea$Bxm)u_F3~`;&%>Z6d&~B>PYmwYSFMI(o-X_&ZUFg+y?IYG_xT1F;| z{fiwh@IM<5TrLB;K8(3`=zuj5aeg&Xn-Z>S-ygDWisGwNY;)NZDj% z*X@X#Qyj)qmrTwxi2Fr*r8SIO^R}nH!9;~6(DVMl`szRvX(m;hY#c2^&(8eT!W1BS z*G7am9_!F7Wg6qb$v;!30&S#oS<2O-0n+zgHT~Kwfdlhn{8DZRy^VN0w*pV!@gOnD zo{8M}nAD79SDBUsPq$L&-UhPEx%K85#PAXYla?iOJ^hnYlgRdtj@h+P6I9syp+R<8 zbd#ibndV&lC7)j^eV~sx&_8EaLqw-gl8%txX6+!JHlSxHu>ocE#MVEmwy^9TzH&F? zG@e-R+t6};)d6Q4SsKPzm5Z}#&er#*>lC}+tO)2D>Iwh7u?Vk|Z7>u%x zg5a^j>U%Jd>E$2;X+6*A<=&>`=9Ydx-(2K!;6fcs$s#S`=|TzVZjr|)v$5#G+)@@H zq`ryb4{ut)wwne2l{QW zJK;d!`oL&q8nz8q*B?>PE5|)$5ub$X=sMTg^&9a;s|2tw7w2s!bKHUd&tiJA9OR$XC2bi_m z!|7ocmIVXFC-woxTvhdqeL&o(0(uT?;gSc@i`&bj_=1J@poW24gsg`5qur!lMQ?p1 z43!;2D|*>;?;NhmoF>n2ODdNr%`EpBc~JUd$Dm7$bg{d_YbnW~lgv=Iw(Hp` zhu?)QRy5v_A4=Z_X|*?z5Z|qx{Jr%812ZP|N2g5J*B4#N#nC-kh$q@9OS+Y}d8(`M zG*pUMGBwRg$t3#kKzbtchD+6$=R2~kx}2!)^+vGB?^gOnOmez++jI~2+%`A6hIQ&*)wL?I1ah%(iAB-~)Kuzm^ZQ{Es;SaOIw@nM2`sO8|6@@q zoJ(jE7|zVa?D?4sg--#EnIMp=5yaeFs=}B!Cl$i%3?_1k#9A1fqaB*PoKig941P?K zaD_VHx3Fh~j^^zPD4hl}}JX1-`0s%Uni?bV<@SzSS!8ITQ=9}>Q|$z}gkhwy01Nnw}8 zV5WtF+XBW@58r)B!0T^*8p^h9beZcfHGTb}f66{mfolcBG*ySHHEbI~T$649X`?H+ zwP)FOS~5DW#i$QO6GLggJDa|Zy-eqjD{KjU1sTFK=$36pZBR%zB)qWJjF%fk~Exe ztHq3vM7ER%?41o-(7bcXv%3_gi(J}Bq0szGGLmv|`iVdL7PPgHm;#IXti>Dn$CD=& zlr{qwz@KDL!k_HK!?o?DXy0zTySDbFow8~M%ARPp)U^{8j)a<>x40!%;_1uj?Byb> z-(DHdU?)00&TbZaixLO*OdRoNnwo(Lr_p_q>MUpd!NA?)!{Ti=r?elpR>nDeD62uR z=jjz$vJj^Dflm9EZWN8=$-rr7pOML+C9@ZNv8H9iB#Tt;vm6ncEHYgSel_VET1d;e z!LBEV1q{LQeVD(IwyMuKD&bS^FZU-rNy=F{K&sC4^h#4{cP)D3v1{hheE7b>)TsaTos1E$S}|4z zCF1DFbpOeP0=Z_ptcQznHz{m_89T!tj19JOxX|Q?wdBL_i&LKD%9ky_y{+54U z4E(lat|6#BI9RM-0(_$pI-lL_%}$#L^w?+?rXo=pbM6KT&MKgm*~HN!aDQIo3!2F+ zd#=ecYM!J?Hgc1DpceE&!MNZ&UtBnu!}Se|iE$n+N?!+0jhcJ*S9t}-_PeE=V|Xpw zWddb(zw)cwt*?7}Awn-&`W9AHe$xyW!%PrEt}n#%-BZHOb#$v!B4RH$Mz({Ry>%gt z3(7GqzFB_f@?vPxwH7w+GwCl^S|Jk3mnG9&$Qny9NY}Sx@qAVda&iiR8Np)l@u_MF z8D%}v(CF$C?(1|IvcJ~c|rv*(A&w6-Be6Ll&s4lw|4Km=la4wZUzPeV;A)yaSqDyg51Lv8jm5o)tu@#u&8MP&eAl8?e5;nJTTo0nRe_gP%?SNn*aTpBd zHq_xs)|MW^oi%*q;M+um3ynG7E*-9n4-r#&Qd6h((0m(hiVYbVv!oyVHnF|%ZFUNO zRmqf{OO;Ul4qw||Q|E2~k)w=589HsKRo&PJxIe081V=Lj@D!~P^8C>}hYWiZ=61#i zj@sj0gcTHAHQu96qC~5!B0^}4vO;&IbHVPUH5HaaX=)O8`i7Ywk=3RoiKNUDXvpe- zL7}kPBggu@BIuy<)Q+Qu_fDU!%&TcTxl;jd>mH`b0&|0d1DEE8-M7yKgfT*ivDceM zPk9vD5*spZV8DXvP^eDI^A9T;MxC7(h8{9RU87%}CCSfKi zv(M`}X^^i_ec_gkPWPS3fM0bpH+@VwiNQ(V->zt*$q@Gu$TN5&64mNml3$8l@ogJe z#O`5+<2}*{u=k(eQ>IEatVJ3OJ{g&uf{C}s8&r%%D9-qe)ftz>a@5me)EuBqx)9?A zH0#WBon?sew)#{%FVJMs* zrod09vvoGMdryP4mq!n(`T!bK$l%-(nDVnm8k(EYW4k+{`X&shfG-NR?d}hLxqV+LwpZmyxl>bx z(Mfpus$^FQCH?iS9DTHO&scDm%Bk!g!iFGZn0Ud+p2CGDnrqiJ+z$D|I!$8c7xCP5 z*C%dtMl|)uW#(tUj2J3_l*Ar=8C|s`q?U zwubynIo~TYg&FaWa`XM~o*e6k>M;kk4VB~rcS3vFCz<%*x9M}pPU6ZR!|D2PxbZx^ zW!73&9v0Fq7L$q3#cjQ6FTLJck@;!$G-b`)?>$dJ%d@;2Q9I0?_^n3?5a+nsya z<~N^!4DD2R(GPv<1v<8p!TFECt^K-IhD#;E1E?O;ljX9bYU-o+bm%)?0jd_Bw4EYD z?}mjOG#S>ya#*QnG>)bWqlu zXl$Ll0gJy=(^11n5fBhY4{UH3pkGfRo&7^=*4J0>jySP3JYbNB%}eI@r^w!YCqtWv z1t+ajztYp44RD5{Ax_iC?7G&r`XJf42!Wjv4SZEL9!RtG)ho<(R;ZFE@;q9h!|lFB z@vuOkMpeC>UA+Czz6y9*n2$b1Af8`X4>Y0R+HKX5hH3t4HemI5vYw_a0j6_8vM9vJGcDhbT! z)q|~J^pTtT$f(C6oBj7rHB2G81x<^U&keq-vs3I&M%gk%){UFeih^Yc>tAc}so#1| zp13-yRd?cTNH={DWyiDl0&zs|$W06HgBy1{JOY}HOSVqrlB%G)=;2nek=C}>8Rg89 zZ*BCFK>_a##me%=!yeaG%hfe%yL=^NR|%^(tlzJl;$YO8&QHWWHrsu=m=zr{cM5xf zX$o?9W~W@r+&nI%I-z9rZWr4($Vpf1`HWLZ@7uYk7N9}`r_{92ltq?6FrFRTLc1uh zA*3b!h&l))mD7Vy#(u(tp3}}8GBcriwozYYmKp*j6j@usGf6((dhiU=iN(dnB(UQc zs3Rvh3q_@#ol-kQ@~R$MQX?SU-uLuF!1UgX9LttQC^7|{LCnNu=l<&G_%O{t1{n7M z@DtD3^}nHG77OO)h8{(vyx$pJd^$<`0_HHgbuR2?M~xxaCN;z{#Y^Z8!Iemi7vA!6Dt08n z0)0W*Y>Y|&l&!!@52DPM=%S zg&;~oobXnN2Gj4Zt@OTq0B{4eGc~#r(R%s0-K*}2GID$A%}CXNa0P^{S!7=kRb(SK zeuSK?pr&%IyRoyTD0r(%;4{7v=8W`jGq-_(^oUV0J9eeMxWE9ryw;3|qv3x(x~5LM z8jp}~G7CgpoEx4DBwG=CUB1x@_5$q+NF}cXON}=~>Q7Y~U3+IU5gmK2gx#ZSL(8g* zC(Y{J7JT+~>2Zl%dDbI3b5PUt^`&dn*k{vAdQ2~2hvYcZTID#m&Q@;7FfU+qYpn{`HLHfs{0F;*GR?oV|y=?Ai zl7dHhdTUIsh$CoB#C5aj=0ND|mSCXmz1w3kVb@x?bMd!02DSvpk!SSS>AW5yv3a@_ zre{~HF4s6x>rF+la&dS1%5is}IVygu;ljP@@m^hU|EEXtKB(0-KBuL=eP^ygg_mIx zmrU3+Thc10Y|0zaWSC-y!bbH`#_nU$GA&U_+BlJ$XGRqJV>`w@udVt|#H(sH@s^CJ zTRz|JVQP_Fs#^`h+3nM5iR)RCC7{KC!VhY zPW_0%`VX`;utbeRb4w7`?d~-Li@p3GYL^89 zeFibv{DL7=efBBdF!&03z|=YL>WJ}zcL6@sv>-z>k^8j5BvB-WH)D0ouC!(4K6+O- z0P!+~GBn${&74xxEM2><7hv^$fX^c;v8m`4@p{xqPHk|dTXXl+PZ|p)QS+c9c821L zAQ;>%4%&9azsz$ITjYzsF}IwI;}(y4RWEvL_8*jWRpZvVy5jh&>}!OWh03UC&d{1=Xl5vx=!WZbK~8rM$F zx)Vl9H(!=td)dVv?ld&0)tOp?4u&&6hhHKy1Cl=0&07JBH8Qjw7Zf@rk{k-LyEv1R zr6GcdNme%%U(;O~`y}jU!|ZIy&PSt&T7ylf=Zuz?n=KO#I(H+6yRJXO7PJ}8ZcK`~ z`X>6_2LVZJG`&x`5Q`3tiXIN#9lr#f++F%Y8=Qbfu;BZEh|tRh+dfG_1``(pGmYh5 zfA!c0%y42C0YQeP|Dn78L&BM7-_=qZeWMZahCRkieJ*mK_RP*F_(7OO$6cNhoa-OL zVOM)1(my)Fh{^>H6WAW9Co}LvR@gf;iBY{=lPp46g{?nct=xgHtCWJy44?!@LV9*x zjy3HtTdQ>oCrt*a3&}lJBbPKEP@OMwcI7%o2Hnbb+t)edYLI@0o-gUrmc+M9Q&S67 zGLM)B749a)m{fIY2IsV~o=FD#t3+5-m;ozoc7_6%=hJL=HtlQ)&1oJR+QAlo9W~$w zD^n?9mqHHSYFH;yrJKru+g3Y@7uudlmnU5v=|#Btxf#uV#zRTtj->tN2euiig>ic(o-e1umtDTyMTq zQm@BnpPNLD-1R0w?i-04R+l$hmAb+iB^}WmcKDgyEeWn(Wv?K!W`vQO6Rro>uD~M8 zWnr*^^%*n&(aUX*A2lzZS`Rowg$Mb#QE*=YFQ-|4fPx##AJVFN?)6?MHa7 zw}?ggMa_-Xf>!e+$Pm$)++#ZAK0hc@^ytncyVw`+OAPc5zL)=fDZISPiiy=6t1#oP zZgrZ~*edv4SRb<#=5$kUi}CFPOx0weAb5`LO<`W7kSwCLzYISbKAFgpC>_Wo6pt>q zrwLc*=WMta78HgqJ-O7Qp)6xrp9srp$)tu)?Hc!vc%myaf#|rxe&M-DU6k~SCl}W;D?c|%V4xu~v;C5AcR@&f3EOxda5&?H znK=1tS?cGW-rb;B!Ges(K|8{+Gsd)aHfB&u3A6jQg}NO%0cJ&hKzhea2zePv>nWmK6E=-uvxMCyxp(yK zQF)1w^h?8B17dCi=MP*^xzU`BlA za_Mj5BF?biocte-xX_ezJINwtb%yv428z36(b?T#0=;be<45nEHpIhHvFq5(h!LtC z>kF;HP89>1Fc6+HWxt9Dx#aR>Ys~ia;G&=NLuc4aDiQeAIldjskqpzxhLwcVvk^6f z(eiIOPx+rMAG^izPXpDrBotDMd}5Aye+?!FenJ;?_?Z*Bg~Ca_vt5m1*%$2-EX5Hk zgJXTuh99%=;G0kFbc5gJ)L+V%J@Q+trW#XT@l6lv!md-lwn6~JT6`osW_J7;oY>Y4 z7K>h8hjA?itu?hjdzeITqO1O;AI+fF*8Y+X7H62eHg1FM$v{Wb z>5T1%+2YlxvxjZ#gr-hS!5GSk$+;^i;Vzfx=Emu`7iOodb{s-Pfqu%iOg-OF!fMuL zW@ws6v3QB#&|P}e+)%y8FxHG7Y)%kUd|>VU3i72-+h0vE6%pW!tK!V0%6gONTmEoR z+i3k#h7X^ZH0z<}gLaU}06i;5E^FRRY&}#BYZ^7F2QvhRbi?2GCd4YHAc+))px)T| zjn39(R>f2A@UwN0U|h2vLphGg6h$?=jA)uwM?`mjd5_FI_XbT7b(VT4tHLEtnt|av zFq7X7O$YSD@*zmX%)NO_-b$0PxoP3&KPR`bXCUS-g~d6R3aMY({xCsHPz=d}{3UGd z>aS;YA2an$LaMnjnLQ9-#4qb_@^oxxP4TW;#nvef17zpEXoX64F3wSg&K@fEUYMaV zDuQvrXHAxi=VL{ezA^P6$;!gBSk<#iD_%V`kA!b(?aUMbO#@Y&^!n z)93q$xsWOQ7ZbYjW3O*13!tyv`b+zgl{wxuS6^CAglCCKc-}kL7N;{PYFf(<3>n8Le{G+iNcSMI6NCKC^($2 z4uPB_7ZlQ8HMwV@&U9T8fkA#J9wznQ4H~^S)O^p2keqH)ZNNzsp?wvqM|GErcGrn6 zYU!psfptqL;4jsZ5^sNZm5lDpqW`o}s= z$&+oVF03_Y-kdl8=6K0;Ayw$npBo0ds4B>{Ghz3LW9XKCVk3~0K`fqQ3ccTmT8oH3 z&$9Qv+}Inj-1+(gg$W5Qp@U2`mtP0UPk4x+tIR#L=16ChZ(vc+#V?lhiEU;?;#k+V z_fGZPt0_vCu+%H0Ny2MwNmM<3ZCmp>gPVDX7q7lwOI9D$6@&4sxLX>M={XoK)HHMJ z!pu`I@luBE$$NV%ySBd2c+xvP=w^*}5Y|4WZKS?_PC4tLyBA5CF@qMpsO>$lfN1b{ zry)&ef?6imJ&D<-`8xG!ZRXB`5sf=2J}b^du6VqO{h3K$v7wHetKZ$5?L;-*KY4UL z>3TTzu?}1;;)Q%M>YcJl@AAI@1ZMaff>0jjV*Sdk6*WtDfpR4e7e00@SYK9Hk_hw4 z6pJPdGS$@#nf2kt37MaxM>v%E;6uMXNaX`H%syO1%Zc9prO^Wk#I;JM1j1|0`332< z3*{HXC8y@Sd)GpWBUbgARK@MNO-hkY?y6w>)J|Hk^k{pCw==rqM~Zs zMlwZB%a&scAB)52EAc~8)u`Uigm(d|slm_C_$-nrB%PlIB$n#wn-YUNldBt{=CDTs zwkY96-V2kOFiRR{ApN^l&zlNFj&AMz0XShk3_=&nWZQ12Y`A#g20c;ERvszcy(S_= zS#3#HST+75E8>=5rn*L~d=r?6d&qNE7Tb7`$*j{Gs;0_usN7!;kS7qx?5XXkeud^o zJ01#_FumgJQb!D$P}jnQ!u=Pz&)4SXUocOxZ7@WqVqh#&lPu#dmu!>!qR(!`&o?^p z_gc79Omuw7c8&4oE$ob-wpDHg% z7u1-WlY`}-Hx)F#EqwvjFQC@Y$>(&g((y6Qsl=voj-kC{41*f6~7dc3&Vfth+X=~6NhGEgV%P2sJ0($n@&k712 zWD>B)#kI1c9M_bUsTmdDakk-aEw=t+wON_^4b%FPW$)VJ^{(Ii&UEc(dQ344&5dPD zTHNgMI$beWOTAjbRF9v!ooM^os&e&qdqZ?cZm_mLhDDG&q$`wZqYW-ADE*}Aw(QX# zP*zDU7$QcTNByx~t?nV7CPr;`Z|10_4qc@#&doV^np4B~c#g%}g0JRAgkFCzIf_hR z*jmK9Kq0lelsh^7eiCF3)K_MXsWBLKwV>%F!~-cfBS&;Bem^(f3QNiMcbPAc$$sVb zWjpkG>$wk7#{Q;0z_Jc~G`^ME5`N3<$hB+u{3n`@Sg^I2mjRiuDj}-}K9F!Je;NA# zAocJ2=4uIToPQ?N4N(>}Q1YZW_4}v`H#^h1o%2;mpXI!_y1`GjeEA7hvR++O`404w z0yigpm7mPEF9+hk7U}Vf;o_F9f%k|ZL}wMQFyUhF#458X=vR6t9i;EyzHLw$d-x~D zEpd>JSNzpiaVXK@KAdON>lTzsyB#9~m9kCVlr_5L8(WrL4Ar;f#&*MmCRM&nF_2K0 zAVkZdj~L_eqrXj`8??GmQGGG@#Bu4B>(J|zVp2oj*x z-nf1Jz!NmFNWvB3m?4M1Sa7}B=18_<K*t$Pf)%-R=JDNLfjz*TIB{C6vq3KYx!ZsT8!P~-fGh!^a5QdGv{JP!S$?& zuf4Ua0Ykg-;Za$rv#hdT$xF{N+qKdYEF6S6w>`IB%@NROcN|9&pUy8T$>i5C=@w*% ze1|H=ANUQ0@QSl=eBV(g5{$q|=yo_t7KD%ZC)jQHmbO)uA9CEKZ^dpr%YK$Oi|?W! z`Y##Ke8Xz<$LVVQQ-hae9|ESTyCNF}#7dovJ<@8;3u6lM6X%YmIahR9Cw{pAoF~5Y zy$4Y^?V(CFNeMBjqOz(2*+Rqs4S-Vxxw1Tqq*WkW{t#7JUO|q_yz7XfDBl`PJ31tu z`scK<;CuA4@%Z;Ps}>2VUylc zzt+CJGu$fQT?7wyTCR1Qh!9^ z{qmoqrs~Y2n|0ymyRP_pHeHN)3}mJa>@t9sxusQ@+Yb}V*YFSCF?a#`PjKXgdV1mH z-lS^K^Fs)YFv=~XxZBoP=C4z5n7&SmJkV=+n!aKBomMa^yJYnS5fu26SR$Y4-lX}E zIa#dpd+WTOB>g2jS-6mAcMz+B(!6VE z5)xmb6>mh++V&=1%6HlUn;Ve$3v+EhDxKDkX|dTtA(fA!9U4!=EtN7c^Qr6F8Q;zUjuXC1R6P zLT6u7LoPA>#Bx%r?tg!|PO^AZ`+yPW2U!t`?_(s{jF;ls=E%z`LESgKXUbZp!MbPA zZ$z#B#k>6PTz&uvvUy^n(a&*xY6)r@LngBpD--VzDEs>|SOiJ48%DB|cEQn?fuuB& zC_N69vF^R#_~&_fV@=P`VP@mcV`^18)JiWEHE~KmI#Rmz4ThYk5<^mKcY7gu0u#fV z1K!`W+b(}nI&(ftX@ZK?Y4np%nc&N*?H5(x>nIK|$?uUJ8#Br*HxkIghCmXDOgOg` z?wvS#=B7ATp#s9k!4s6eW zis9gvsOV~*{`|{LQ#m5RowC_zZ0^JET=(7waOAyRi!L@1CIug-zJTwhq230%T8Ei* zuPNigIUSjX)(iSPjGutV94OG*%y5W(wR{{yn%+f=7Sc<b&q_jj;iHKm>nw6F7tw)_iwtd;@TtK%$O`~K&2s`lEa;dbk#ppfF5 zf5%2`S@!Sd-`na~0XF{y-Ku_YnaZwpemx-8(;WUdBt2GlNs0FrxWmqkBo%K_8t)1dle*Xtpl5JzpO;H-vXyCbJ|A8Hu`z?GF zvymF{;BWWka&4=ow(s>nupuOmy`KEt>FpHMU(gWt^|LJQNO{-d8Fy5@j&6Hn&Q@AA zWB2dagaEIIYG{4v=((c5pl3r04(x`p`mFbU@3v3rFtP`#BRatAK3p*Zyi%B8_l^i{eQl*>fQ$s^PgkGuc2KC(Jy+<{@Ant z&tj8P84^msEycFcGWW2*4}t%Dy!^`&{Qtw{|3ky&ug;3@5-T%qNV+JpS?ZTT!=)Jh zHGRhS4lk5FFJ_pihScVK=>iug`Fv=pFY_%Ez%S({FN*ch1K>@%oj!W=q*kxrPtDd* zkmd3XLwJCVK&= z(f{tT`rke|jB1ECgT=b@%@;%(8bWLnSrZ{}XY}1kL(b-^U(%x!k3zn_plx?%uIh9J z6ac9>Sx=P#J(GN8Z7VO0X7`mi#eT0RYMu$dlLgP1ZW*r|g5i5K*?}m}X49_k?znG= zoAYmnHPGoqLb_-Bgk}ri%70h1<0RV$sh_xK@(?$V`IQYz=mjg%x3}ODO7yG_mQNyA zAwbX;whrC+>@(P_*J@qAXd6Y?xm(pBG#{Wb@h}{Atwip7to1%1c5WXqR++zKk|MxT z{wDvJp|3hl|Lsi3{r|2Y{~mWmQ7OVgDB>a`+%VbwpIT?I>f2Wx>ymwd`fDN1>u(JG z?aBz|?f3pX7tnNuQe*CSqWC9t07VM$<9{dqMwep>d7+vuzlA>m7mnY49&9w3PwX$& zigdFpYZXg3Ic3`{u~-bX4%FA5Ei4#}ae40@tTXe%e-N$h-_ zHND3B;L)cZDk+>S?DBhwi%23qI7nRW$M+$qYsGN&^#0hq^F}IEZzo6k^cv7RleZXn zSu;F;uIX+5-7g})V<$&?rZ&6YZ&}HQT=Lop+LfiHg{67zZkorTl|A1a{N;zlhu((P zTdl+*_1C0oJ`WZ5#=*PbHuqbuL=)!xcpo^Kjo#P((zncK@G;=tCF4@F)#FN*&!HxF z=`AAb*j2qLk8a|mDbJyYSMiP=i7E8_!vk>OXI`;?(LvDosST~nmG?J+sqpjr0R5tU zK;!y8V0p;0;x9?|dEWBfjx^k_NeSl5o~TaC-7avC`M{@J zH{toL1OKeBkI~Y%+Ipe28*JCF;qz{AbDcf2>z?T#r#}N+h(`H($BJPsOQ(kO zE;SdJ2lkj4^(vlSyPLM_@(BrQYAN^UNa*V&r!D$f%0c!40&(`KH5kGUx*6TF;`UW* zPq@lK-mBm}Qq}rx6Gx5FFvyRdN79p4OF1SpDKIUZfsdATfmP0nFMUO4&yX42_if|` z&lQ>HB6Xs2{L6u%vQ>dF*g|#UH?ius4Mk*~R)qwkLfTPTebiD)k$&(#U_3I@Y4^fg zlcVe$vxevA2Qf)qzQouA4B%CsN4+5XGiAd!ahRBL#xeKx;SgPIg&kW0EwQy+AU+Ec zpJiEiel1p9()*7G6^{;mvNazQYPWEkFTw0WLQVzrMcZK$D|)DIIfQB@UT_012xbuC z-m9pH^cWNAE`3a$%j^E7h=1uJ3F`l35DS>vzhO;|ee<)iBE{qntl^%gp_YB{q^E)aePpCyhq8g3qcKMKpDsKa;0?I%+HQdT>aDof zr3;|gk~;%m2N2WLfDrytlN1DV5C#v`u^M@Fxg6y8XCYZhsxX((kXLw@hW)@4_PWoB3!e5+c?Z4cr(OItW9-EM9_fbdcwJKF!gmxR#PgSx@ z$#zWNss)+dHpvv0Jd+=zr_T=lAMCw%K%2>uFUpMLIAfwQU>h(Pkxb6uKrq<^Au=`@ z5oB@@VVp3CWP-qmWTJ$Ggb6|xB8+Xa2nI>yV3QHaYJ$A|X;e0!A>3!_jH?ez z8l3ulH;1%DlK_hir7MG%8jAUa8_R3^`i2F8uFt0L4ZQ*M{*(kPk&%+K$ffl5mtzhx z8E!hPxGdBMHZ8M9l=tBuaTa!nhsD~nPM~?`279ORfPer~lFrqdy(=U~qa@CkI=zFr zpNuNbW;$9{6rE2ocMJz6PfB4PBY-J*r?B=+KR+dlt?~RdC!WH^1YzNKFFTZa}+kl%Zt&1?a!``sb{Vm({JVL*1$xcqM%~nrrrKjDYS4pL%x3AXDVGlGP>V$HYBMnJ*uT8?zL*FI5Aj-y3E; z3iabLeKG=08yABnZNjZ^AIBTEU=G2XgLH{amH@b;VL;IjllTz(W3%Zh)o;utG|R%%6Qm5nZw)3%Td(BKTy% zkcCA-WcJ6;miS{&V-w)RD;qzB5jDdKwsNbl3^&i)EQ3U$>-=F`;`?3|*%wX+*M4{+ z%BRLFJGOd~E#uTq6X`TlvXY(JiCVXeIY{zUNic6jl-g5Q$Ij@*hHTMS+CAnx=kLe;5)cTv% z?jtDghS5Dm;OSO0-|ha>ZEvDKw8ePLu$2sc@=+D4{XESYIFM}`PI}l-onHvXn{Zkx zgOYV}j!3e$?aitnXLI;kLU82ABC9T17WpYZvtmkwx4HOu1HrcbUjJh&uV?Pfw@2iE zY`#^>9$1hvw6#^Lj7f10t;2Mc*$<-E!wa2iS-;IJmXQe##2=?>M~)V8?$E7mxnaog zzF*b2cjkH!i=^3?jF>Ez*9s*ilZC~wG@%DG_nMn&X`lz28U`sD%Rs;MX&?}|0t71R z$NtT-F){Jm{rS!k{Li+cCYx*StGV;mk?BTpj4)ah(=7h?^#1MTA1_aAZ{7%6&6b}E zk$auXfojM*TTlMAR_V995nO3|yd3Zi|Y1o-c0=5T!vjtE>0>Ln|JV4<;wL)Vrw{T-a2b+Ge~1= zes6^+&VUt^SZ=(MK(~{w&(IE$sZn4K0_EtZwk=&z^Tjh z3#qn)E)BZV$;D}!LkKLHMjk=+OBAuOq3gnEqafYzefvesl5$rF$1G0%bc?y{Z%q@$0t)EjtA|%2Nmmfn zEu5QDLX~j>Z?)>J7PP6hlY3I%nF3YteTgVi%ZBsOq6>JK+mYIu({(%fDc7zC_L^}} z;vD&KV6ERuEq~=&Px^lO|G8tiG>x(H%Ur%2+PGNbd^2Ey+u~{vfkLg(g`LRA`c~Le zpja>q7Rl_0sS%uW3Hxutl>b*Ii>rXyh_(woZQqdjg4pqTPdL7N+TX1{b!NRGzsY{= zN~sJ@@>=Z`PLDSeEntsAs(#IfLOG7oN~*b3{>08+k&0XFw}uedEFe67SI_;NF{QEf zhJ%#CP@E$JHIT*Wo>I>tTg5z3bkoCpzi97EE)pk&1bLG%oUt_pL39!tlaAhF7I?0^ z>19yNYK&GH>f1`PW2K;(en9>5N6Yaat8wwx(6?0I4r!-?`wUT47E8zI))(agRH}H# zvy)|cy}C1!9G`LH@5j%aT8x?}T&lI6vG~q(K}j+}Etln8U3ypnA5oQB-H3$(uRpIE zw3TXNfiF85Bn9QD_ONBXeQ0>@{ioI{a_o1e9*>7*32CFKfOQk+b=6_5ICOdjK+Z1S zQwqbI8Kl*ZSMCl^GHy)(=QD6qO6FCRD)!k2iycvj4@C;*3=e_|G2!hWt}fAqD5kcy zPx1ov<2{CEO93tNNhEyh;WjXC^VHM*Wh@TX<6d9u5TwlO_p^v-K&-`Bpp7u+crX4* z2hHCP--C3S#dnQl8rE`09O3v zUAg5MsxDQV0eNRm$OgeVoCeRm`xAoyIgh|K@N9DNPEmx-8-~ZW0J7SY3<4TCZ{>Hj z7C@x4;!AQ^gEWAHXbgo!3YU(G5ZdU6bzCk`qnO(Ym!KBx?*cUgV7Jio(_sFT>?9q2 zhHD@{0yRCVX87gi=QG2vRw+M&4a=PnB1J>)xicI#hDFqE>-cIBN3qZN3IPaQ5QUAg zX>RZzyxLKVoA&s*;?6ZGMT-_y&RL8xkZY6>G?2M*Z0=H2@~YxV^g={rzBdIrBWsP^ zpr!!315rp2RVl#E%<+0y+|;y&uW}X8&I^t;GlBdIcYbIwHa;P683;7szWuQ&Sx3NE zwv^#uHVvGMj-dhXOr0Y0wpk4FL+x7f6>;EY#v2_9`T0MGvwu&83x_!>A||e$_;h4D zeW91?dg{RPv57obOlgAS3PB5NB$vb3VY8{YWeR;_lsT&=>vaxfZ%8WWyz|RIX+v&FDwLGG4 z3Q;3S2}~kyxlm|Dad;19X`j)}8X!!P|NkP%?d&WePF9^Be=O2f%m7RYRh zaN0oFpG(&I&g4Xh8Pmp(Z-f^qi65G9GDi-)wF*ly!%!qK6fec*M{QEE;pc+RKKBqm zdly1z;zQpXf<-k+_v$FxVV$0|l4OA+C@he+5gDUvdzTuPso`gH&qmo;Qo#{ZiWl?q za0kx|3?~;S7hT~ZU(3o|*J=@*C+W5s1@|thCy3d7RCv|t8>g$^Ywb15LNuM}4~uwK zkx_+>`K-7RaJ#Y01A(HY1dMV=lW=6^`L4_-4dDK{wQQ-a8Do^B)lJxZSf)I2+=(ze zTA|IlQi9GPwJs&gDLW4)`g+FGxzv7+T)i}*zLUxOohhoQS-9omvgVeCLgJutVC7^h zDe`2ohv4TiwPnSdeP82OTRf_)02n0V+1Qe5byt;tR}z_3gg>KW(~%83V~43;7&}fu zAu7d1(gtKGtmP)-(kci9uI5=(#q`n{a}$bhAjLpQTV#l8X+hP}$XKP!FB>$L@X^!ie(c`t~dd7Xqgqo#$tvk5QW5m@q6Fuyoq0f zqjXS&T1Ruq1v7$8B;Z}wkZvU$5Q?w$7vq7Ee<$`Z_eLm~q zj>#p1tBYL}s*4{Jxaem)i^B~YEQXacA8rSJGu5Tju? zoUl=&*qQR-ueeg8k*aKGD!xKC=A7C5WjtH!GN`VX5073pC;6M$d?mxg*s1ZyTGrcb z_C9$btA~UqYVTUxvtG6qgb8@YW#D{$`9wg@b_onQ1@T2X`3WS7et{K;B_voW0Z2<} zINtF<(h$b3tKGFAi7jihs(pUX283o63N>T1)-ov#C3256I82 z=qC}r(Jo@2#q{iwE2?kJni)1cj%(bhY_HVE7|fM&D^KGG5}Pv#?*hS@p78GyGzBGvYW zWP(qvn?8^&O!m<`>tZb!EMuIb?vdBc5LZ*2&jsCM>$Uk6KSufqlLcgb0^lG>Pjzff&+>%%G>aJ~^8Pyu%U9v4Pv=MaLnp8rpd!Gy-n!2d=$R>=k z0ZXWf{+C-)-L%jP1_XmAHHUhie9z&xUv)Z{Iimb zdU{5iPbW-l&fB<{Z7&{WIgFZL6EkcRXfPYTRlJl}{V|4ojtAE^^dKGxBC@1s1bWY9 zqTH&&g2vXA1B>oIdqT??_YGGh7S)7#D-Ai9CUK2Ur)j=)PLR=Dw;IS+p0eGwLiGb6 zB270nCxsg~Dlbwp#!f9E`h6YE}vxLQ>udaWM9{X^#j%x-LhEg z68ds(8RNiZbJ4Ux4%}GZeTPjHoP6?hVE-}}b z#Y3yZe9+#R$cUjW#inUrXmUGC%u=PKbVfXc1m(I_f7 z=t}q%#s!;Rnfk8br(Ld-!^jaQTS&%{X#pZBFa1EzSd>+t_<3?8oePQ{6BY;(^}VaC ztT04cTNj~~y3lc^8%*y z=y7#ztAVstBs|MiZMFf5i5^Ly{Nh$ki|%8!@{cp(_Fl$1?5bAY#H8RcHI&()Eh>$a z!Cj(!NUCi`$B^th5;pQvT7OjO6X)Hqw_kc_iGOb7l_9{2o-R&~2u2I<)^j+k7`OVdv}%^BKTAOdtSxBn1lqSeJw?8Z3Y^ZoS(^q0>*iw5&%LSXNtmGKhL=m zK;n;;pdQ|+9Fs5>3Bq}%m~_fZe9*~0HaN^DWKcL`P!n`HO*!6iAU-OVqiHeK9HUvo ziY&}4cv^nRCLiQJ(g- z26Er;ZAIgh#Vzzo03+fxiN(oDZ5RQ3nM;?3la@)|=wdokLPh;G|6ED3N-fb5?|(%k z-}CaAC!o3^BiX5X?sL71P+o;d%A%YHuXLRIn7XMB%(17}q-m&6=Y#-w9_?Z_ z109(j?PWMMm-%9{>gdH`Kg=Zj@O=N;h^9d>^?>qluz2t3>rpy=d5eAKVmWY_&P=C* zDP0>gNzBpPs@Q1|@sL>#Y!IeCx12Mu-v8n4$Q?R+&7yhAXkBKS%%6U~4g9L@)HCaZWicvXJjDO1u+mk&V^<~PGEbm?Sx)Hm1 z4?0iU7{J`CFHbK*s-2H^B@5NoTsM#!dzt|Kj*AgU_z&gwdB*W42W=t=2(qZ58{6kv(8jj z63XCHtov1gA05Ay#qD+(tVCb4luGTO1zPu~gH}8-ibRq)#5*!(BhY?gYds`z&ey5- zx|^CkDhb?m*Rc&PMRz4`5X@crTU`pZ0qBYgUFFzkYM32`p3*^}+(5=J*F+t(822E) z1;)&ZSvb2~KpF6GQ|;&fh>`wR+Wx@nbF8$Q_u59#En01i#f~Vec-omDWjGj}g=)QS zgkK}ZkAH??Mt*ko!GeCNSX>aHct&1-hT;2Y`yd@HSe9+D^oH~xeag&yrGB(MQ@q`) zWqtCaJjWHmWFL={_?hhtu-p3C7u4MsIN*>~Xq$1b0pTz;D>+{%U(IX|xxsO+$4sN` z3p`nq-*^$qxL}nv^*3WgncY$QSV^-qJ(9C*g`>d>G?>>`?&O0bOC~n+jGxNc>PV=IGecjp| zFPUu-XkJlG&DVQ-FaGtP{NfuDkT%ztV^MpYZR;KRS;8(X0IJKWX&uc}xe6 zF3D~Cf<}dfv)2riEgJSp`qm1r>}h&#zCX~@WHUr*nQ2?!I%f~(?CuqD#kNeSpqe9+ ztW~|$FZ%)p;?RK=KR4|f#~t!jhmJ(>x5v58vsLy@u3lmDlqwl&KgKbwCA|R~T=3aR z1W4u{wIe08MNGe`9Y*c`&O=-)=8rD=Wcf4cimWhTbx z4^avQPZlD|KQ%Y7%Sw%*uAE!{s+yWm;vi>$mEv2&?f=DhJM1^N*x7)oHRrdKqVo#cZ{%;oU{>Jx0YKr<>q6VHEMYgS`42Z+Cqh_F;EGeFWwm2 zXn5Z=e*WfiK1^F%)zLA}acBu%BGOYf%)kzQ(_cmAEpC3|p;?n1oe(Y!5DF^lZH=A< zFrdN~a~*CBPVIBus?2OMj+=bph^Y?e9-b}&g4kucn4_fUs3pO^ADJ;XhiaC5NJlvey=48uo#PEp@{N5gzsn9$$3cj?bF!_fk77mkT7I)rf<>hQ9t^prcj&Gx($G z`s0XGN`$c#NmtTo^4L;wgKXI*{u}SMm+US!5?l6E@M#xzyO7cLqtrIZy{yyY&@1fJ z?`ZpB!si)L81LfhmWMpnt-do^+yAf0bP8GNs&A`5!s|x6&#;tLz_;m96OpDrE6$y)0`bdC~pmY0Xh)ASyxQI}{mBIE#TzpI=&N%_Q29=K8cr?_qn51$bhI zMQiZtetRZzTG2?dWaOW35%1er92F)rkSa_gKv%4kxj7iY1)@}oa1}S9tXM?cif!fyycG)hQ2dhetyJtg}pDkBEa!s^s3Z(CR%v^ilKJcBXgW)dR z2j&v=8)G%!nO+$kA$ML0t{&;{Xm2h5d=~JJ|K*w#nUz^BWXj2>nFbx()bOxkgk++( z`L1c4YIZS5(=0M^4@3~~odIkv6;>9B=7Bx}hRJAWU%s`1#~vw>gQ_KCD06g$BP->K z>9e3BbP=v7sv1@dfQ3d6i%M&3Km{CA0*N?7*sq`LSz(j#cmuZ*D)xBdPptx>PF7I$ z_i&$f%#i3$HNVV4`u)M89$U>t)vO>k3fny}lCLC&=>+^=hJE9_<%C?zD-W&AIopK! z~6J=JC)F)A6^m+$7LOy3xWF@9qF?=4Seo z!-H1w?HaHmH*MUN-x&%t4zz1<$n^PXwJ34e*Ny{!X4DVq6z5e$X~sDyI>Hk?5L+uF zsFE>r;+tX@_F_KQpLBPcEnzc#!4=%nH}UpMro>KI&7dU@Pz6RCY#rnWg3$ngIJxIx zdrt5^FOg^VZa}8umstg@Us45jmKUKh?KxNKkxQO43{I0@YJl2>a+|l;PAhB{*GLJS z0OcY&{sC@vwSO+$~rcE>*C}JNW*=eD0&~9E^A=Go$0pX9$S(JEfT)b>f* z%rKIIIsPagS?v`&_{l53-|@4U8ewqmdO@+i?=!r|)cus0quhOp?K*rKl>WR15_57G)fEIG!X+J_&E(&n@nGnxiPiA{YT~2 z;?_JWUW-s>L?IaQU*P-BWE!H*!w9A-57^zJR;GBLQ>n_096a|<7N zUm6HKepx-8K%^4+kPvc(V(YXmBVU3U^2SRY!;R4LS@Ocd)|uC$h4 zKv{AfvPfVF$axj*eXabUj;5bHMOXKO7~{j{F#HgKOk}8^e`t8ZeV#NKkc?5Bg!@bX zCZ8{#WyBYZnwHiTsbY8Hy%h{{cGVQ)Cr#?ipRV4C8KTkdH{8^yP#nFi;b%czH)aDG z;uPNwkqbs_dhKLfV2IItj^T)Hva+|B7Z01M#m3kfZShs2O%fo`V0zFlw!-rT&j%dd z;=KV)RpV=l?ego%OKQ7f?PS0A$`ysa>cw5~VcAmXWqJIvo$YQCkd>I4rR_fE`o`*; z*!A{BB9`$lsWyU@G^y(3!8^1u9VCK7gljhJ^k8R4nXmW=)`#8Cz`wC$zGd6M2PiGc zm1LI>j;ENvC*hx9To*s7`*6)jH`ALG1EF`~{uhp5vK`Qml{qfe@EVR`vtXsgBzb(mM(%YSb+D zwg95>khgI+5AfwZ!OVeL>2{C1k2Jlyp=e^nzzlB;>xn?rF%=(&ULLC!- z-FdvHU@}c!mATUoRFGs0WaG%s%lI;@A*Cw*rEcftR)Q{cOWXVX2(k#O==5?^5}?VF zvh|?RWqu8vR|MtcihVZl-npcCW!I~i=GRgNT(gA@B}{~??z~T0hE#v>J7_z zRyCQfPb<5qLE8nx0`a4qA0>IzwoLr={Z6;Ii3S!wPDChiab7*gtawE6Bl3H9f&rU@ z%$G(3NEZGR-Az+gu8ow8lR0LSmkbiM{o}hTs5aL`NbUec!}BC#R2;Y!*5}!9Dl_2o zH?g~o0$auvwFvaK5d=SSyD=&O{bdDQkryItLvv^&lWh7FJMH^r^Pf`Wlc z!CEns7G1a@S!LbjGjE&&)j<#Ty?EMO@E???p7t2&TvTe|QpG}T2l?bXAVI}gACW>X zn-!qVOURdZTG{QzGWXS8nLuxh@X396 z+h22{#EZBsSbsJE!WWWCUG}Y)P-P2r@bx8m?ePil{33tJqUA!evkG8glb&*k=(r7w zS;7-QMY0TIdBmImUuf^7F)x7t{D`C2&%{8r>+cNibWm_X$N8noX@(+?5TzfC*0d1UE;T)&5d-gvJ$Zs^VB+AMy`pe^VV z+HG#R%YiSjYN79DWRYn=nBJD|OMLA@y@bJAy)yP!`+=iC7w6{jbKrzIy0YOrk7hnX}4}Coqlxn;?*9lBJyt6f0)yu%0!4N9ba~(s4rr8Y*^fpqCIs=DM#MWBE62tZ29YBqKSl5-+ZHX( zVJ*UWyzVphSls^ce(8tNg~t}l>f*oJcKQt^MBKj|5;p|8ed$yQ!HPUZ`=Fbgd5vz z@hx@4hbogkZ%O#QCi|>Mw_$gev}ca`Y3?#z#nVkLl~bu1%URzBksO1b{fBzkL)9SUFo7_XQPVyLj5t@ORmZi` zw}wXA#ub--!xU0$jFQbBZ@EqqWzVPKtilip`5HBC{5O2jRI~L!xLBka)R~del>E7@ zHUQgl|2?uN;U?W{({v)u-x*zRVS(Ff5XngZFU|X#sv4`Rz+Mu%Wg+?yF1E-nFXD7^ zev?lHP%e!KFSVEsd#!kdD`30|Xl7{ivcZ5L!NLYK1JVveUQk#&Z=D_Jt~Yk%W+e#_ zmD7@GKDFju4FW!8$_*QuN>;8pZp#f{&!V#KPFglwTBfwgQ2SCpN^$LL7AIzRMsYbI zi^7%1;x#~BG{2E)dqrmEl9(;!v`<-2^n!ca^FZk$>xnM z&FPq2*UL1&&_nqX79KVMDzX` zd>gA*cI;riyTJ4y7UbDn2ZG3lE;W^YYihASS67HNw1Ra~B1$^8@+0wjKz&o!7lulX zyXG0qwJu8O!AxGVzx;78%BcMt-su%)_f&sEaYl6?QS_xnU?``3a%TRE zzB8EbOjeLXTI87IUkkTyUJ%-}m;2m#3rFivzf|1rz8Yg`8ID&2-B+ouFOhloAR5)u zL~>@Ngu+}EShj^T@W*%PUx#C&-{yU+JZUYZbCt9@+$|;+s_b3D1;7XyRSPLQt!OOE zguOZtRQqCcfZQ&E#xF4dT(`zkRbSzW9QY;08ed;O2xB&?@TOO7yB|&Y`t7@iW0rXP z(<36KD`pP_M@MSMf&vfk9L2vcKh(K-bi4QfCsnP!F4nMW(SUlDjktDn$^P@>h62VR zMqW3>U`btscsMp*CdA0r(KR*PFsv3&RN{^K-7yExLfMZ0cP4sdv@hh^RQHQW?>VN5 zxBnoo)49N4i=Ojdb*58Xd2u!?0sdfGk+xI1!u4W5Xg2F5s?@E~9(g}2SREY=*+~;} z`{?;(vQH70Z2Sw(-@}`=iNho^;>2*8b1Pcw424U96%Oq2 zQtcz%mpptx7!{YR(q?2uD#LGFGK8T>20WrY)n0H^$q)nB&Wnh4cRh_bksnmpFz<5H zzXeU0NlM*SrBT;+(bT24;d=QQ%-ke_IA6ZSl8Ss`$!CHv7vEwTBmfmBsm(?FR2<_` zWwuJM&mNXoGm*7Cq1X2S;NXCx_jRAm0LVwwqLv67SG**9mLz*J`O8NoK4RZ2ogGPp zS0$CXOQ#TfA54Q3XARjxywpriuFtO2 z!Wqu;DXR1f<+W8H^9=Yee+|0#4T0do2%aCpq78>edAepTeWRWpy)xAVipD)P?HRhvgx7)7)0QlIl#0xvaKgdJ3X^S&sk8plD{HvMjUu$@p;! z5!M+D?lHJaw#}JVu=h&FT)7HiC2-XM28No~@KUpnJ(~{DK)hPvMe% z)Z5ttlta~0bj>8YQ1z_sRR?qpsE=wR&eA;otc6Cc*U4(!khMeQ-squWPxXrVzs;|v z*Pp-t`jdBhb)jXhfXTh(%(XTa#0Geg0X`2Ll-HF1Y^eV@itAiS3;T11vt-)z4x3=Q ztt1_0V+fP*XmYEv^$;^#ewChHkyMr3G^AZ|aB5vXf&`(?yK`Ky(#{h@@BqqfMN=ya zh;!*Th(KdZ77D$A%n52toXZK7&-Bh*)Oi$#@M$>)c=CzW9up=xCs#wgYmG*Dc-HkX z9A9``RjOzY8f|-GWiC3wsE073DSCtM#T8P_6<#>7FX<)UDE596n=or}D^Dvhad{S6 z|JP98%sW?P8YiPt7?dx?wX3-`^`oHxXeEpI@ad}_u33ov&qMDlIvxXVNRc zLJOAdxsOvTClqMURzeyqOE)&s&qW2(ad77Rb($WxS4=|CiX_%BGmN2o&c8%QQyrzE zWwl>)p6$zgj#Da1uv=qWA_dRWD@ZjacZDPXr7|7p2aiWtzB8pYuk=rwI`xh+Q#v{G zw}XIx1!FJOeNeL?`_6|dLfVEU=EBK?~8u(iPaC4r99V# zwsZHj89q@|Qg6(6Q*|sMKlJPix9nE9S$Be0O2DAvmI1Z%%Mb{;=Lt9bB#8i@fo+2O z%oSI1DzK(swrEnsUNWH|dx7wF=!2qUf&xF_$p=`wq`rq-IdJkycvRH7JI>YBRMYyH zOO*K`))x?ts++5O82iNT{Cg(~cyAwfr7#eEW;j)f()n-~4G?hh#ar_`_*W=C8f8R; ztJ%Tyzr+-0WVxJXBz(~`ro{78#BNkUUlj+-=JFK!@;ykE>2#biTefuC@g*%m;Sm%I zS^|4;L_gQg@=#teJE`-dfQJdbEG@056XItpbJ1~(y1@^B2)~1HfAi7tE?iP!qR)R@ z|J(UnEsjkxlPmao*7oDOOE!x+YfL6tbID>i_*g7hG^IK9navs}FV78>Rs}ERr?o*4HCAAS z-dp3g_Nt!r+=XuCanabL$`Q;~_LsBQ|G{1}{r+G|C<$^|l`!XMycA*BKm26PZ-w01 z@3pm1J=4@bYdX;cS)*HAJ>FwF$v!^|Pio>hE&&~zVnE??0e(HRzG1rAFeuxjf9yJv z5`N>Et$DXM&}Z1qt50VrRT#wIlhkt4rM(|OY|Ye24wyratwpbH9$o78%hHcdUgmeE0Q>E@$iEiOn*G`0FZNLQ@VV8_ z=MV2wdc(OVPLAhSS_a6EE_a0~iBzKBZYwH|L0z2Jf z%)1-t>CrKR=MT$Xs1V0~%Kfoc`AIh`+kiPQIY;MeV1#DKG`bArMRNpdIQ_Jx(YKsU zUtat0^No5@E=-{xKyWVojI(_uIz#zvQlIj#IBLE3Pz{ICuK4E{GLQ#V2Mr!9vcY4q`%FGNHzGP;~1Yt6f zkz*6>b2xGPh#Nz78k5X>`||j- zh+AgRR;k^%u8<=2XkglqQUS_WwCrpUpNp*?&i-=x0fP-cqF2u|HrxE$mE#?&zeai9 z4}%8ld}sQUbL1M0zw>)W{p)jw@yk~M>UU@B?KxB^nIA2r%tk3eg|Z$5X!SUpJZ}r< znK#%raX9I5wy9{SmZZ47>VPlLAId^}wPv4hj8ESv>eeq2_jc7l6~Re5Tf1dujM!WC z;1KJwNK>isiJdfko(u3{XQNz<=^JgJ-}D?KcgDgvwIesUp1*w*x|fl%!qHgq$iHmf zbe8US;b1pZ#1&q%y(uj;CK0_xusbSt+HYDFeZwQNXSTbS^{Ihjga@a+>kjqXwqqw$ zu>B1s8EUIxdV;5PkKk$%$lEL3>G{>ab)P5_utIk+s6?3v{hjd{BzHUz4{pZYQ+%$TVFvd&WU z?!=72q1gC#dj6u9+F#`&^g4^*$0ylci2a?19V8^JIPvy-1EV*()3!y(QaF#maO9v) z8-OpAf7%chdG|>tn}&YQPnH5Evb-(jtyn*xyr!n%h8d6WuZn_v6EsWiu|D{#erW$y zr(&FbnYZsncGI~3k5r|9kI?>LRP+aPNT!XuvpuxGsMa`uVSf#1Kwv1}4V-&32`rAC zV6~jdo~O!V*giOUviYwA@LloCj>F?U#+_zn_OCze14bSkJfy%CaN%B& z3u)E@&H@QIlKCRO|B7|xjP3R`*t-@xjvGd!8JX)dAwT!m`o6^0U3ni}oSoD^+1Kgl zpWj|uC$0LHsmJU8S{wmomvVWRFE@O1%|-Bu83Ti#?`%KGIN%N0&F7>2e*I(ulvO0I zvQWoo?7LYt(Z8?V`6sa7Y%K}sZ0Gv9k-htGgNwq)#i&ARpZ8EeJ|un^sBX|Ld!NQ1 zn*mFbWow-c#F_KMVXqL|5Qp~FSM=&cw?20?wX~iNSJM6zKP;%#coLrF#B~at=0ti#F?uE$?OR zrz#^)uCxLf2lb~WB#PBPj5a8n|Dfg`@%Tx%|4~!gm}JdC=i&09_*?Z1rjeti;l=ou%7#wKU$=R84#E_Sj!r2bP?-M+ z=D`LU)ZU4qL)Il`VSKdT*RG2_{2RgFm3{y+uKFh@KPFD>_RpYV&49L%J3HSBcIpFm z(SO$F?+`Wqi1G3B?+E{;IKg;5x|z8nA91g2pHrPVT=M9`((cf^Dz-o8nERc-C-7Sx zXm?#rP32bP57|fHXuO6nnoAh&QmZ$YRxyNQN0ruiu|FV}vR!T+DoOjQ(RyQ%Yk-z8 z?FY2)+Vafw>Ztpv^fyY3!Vkdk)X3-@#`~PqQE)y(?SEhW@sE1{TRS$8{SjjXM-tf) zj1g!8`@b_KCZ17`owY~g|5=+qvgZE={^SCnY|XOadC(NY@3{Q=DhK7-m*w$n%-OrZ zBR%{F)BgMY}V;Pf}e|b`?T5cd%UbwBC@YPP+ot9rx1?s8OmzIy_Pf~&cWKf z;Nz1vFj&&TO-+N>=i|8W{XqZvrc~3%9CZ(xu)93268I%H5G)wuGVVpZ?$@ggsqv}^ z)u#m?IA`C4v1Lbm@RuVMh$*miQutlU61zlqw(F%|h;VpFNzd9{|IiXpRylNzC zBgF^6Tsv!5=8?nQ)$Ftm|`Wf_s3)u_kycHVb z7$(_iB<+;!c3Dse-_xGeE8*4>Lek&jF5~FQ^0@j7{<-a9z#xB~t{aYnK4k6XJlHy| zMZDoh$=>&jCD+p#r`E?)FTCtKbSS52T0-j{TF=j>De$GCi;|>tc&9u#`xTAZx_}JS zMl?D)N(_;L^u22d4K=+drR?Q-P(W1(_Pj z7Jof&9KG~;UM z3ve$c&eVx^Dnu$C@iypN z1?j;ytMNNCtd1AK!A+pQJ?jA*OmF7>n2qYQu7NqN7${i=GX9Z)&W~ zX7J&$Lj@ZOj!H~Vf?**_WdQNEiYvbklY!~YoaUfVACzc5VOgwx7g}lKxQL$fm>-s5 zkq__TZsE&cM8SUbdNf3td7rHM&UEJMtU2EAl|^}G_=&Bky7Ya`p6g(st>hkjj9tl= zm@^O7fyes|`ad(Lm&=ecX$^{n-*`~4$i zv%Wj)``yX@?C<`R_X|3TrPF19l1Vja43kSj>VCHOy%qlabYU8Q-BKs{xs?Wbv8OtT zi%9(B;)R)gpD2Q|h-M;#S+z05N57U(H1p!icY{7>h(BY(7x(S&Fww^sJrBsnlV!W@ zuQdlun!z-QW>fb1doMO4^|a#FTqMBz;)=N~hS{u5@O?6k*lMB2Q6RMU(Tjp6LySp6 z9bS?|h2-*aXEh6tgWmz;eK$NLIdE;bzBa2A%XK8$YlbZdpNf}}=pxx+vsP9XCfjB^ z8U8X`(=^hPy5zo-!TXE7(_#D}+V+)}+N*rfq+qi7RGccrKI8c?ZOrD#O0!1q zyn*JEk(Eo<`w+OSz=O{n5$T=%)j28XDZ>oE{>YZ1lyI~%34UGmvAJ`hl#w@S_Vi5e z!6ugII^N2k?#apz#hjy$XWocae&>53J&a_mpPbE`)=-WV9e#AjLtECw?0L^Z>bGsd z`^97Um8PU^gPR zDji(ea~kL`fq4s)ZS%L|KJ+SS2+s+j{INbsYQTZip^%p+@zji!gck^z=iK3{mSb zzem`m&}^;}mL*Z?JO?v=^Ys^4%$ew4@UCH@d#UAWy8^g+_bJWRTYmE@G=d5hh4thR zfpS@OR9xNmBI+VcfUw0YN7b|?+XJ-{>+WnIbbq??_ zi0xufgU-`;F2`k)-Nx(q_PMPp5m8iTNXLFb+9K6$SR!$iJkz9{OQ&W(v$`9vHS9QM zcsoP+73$RmMx0y9RT-z?Iqp>xZb&}KmB8=GQw_r=x(YxsBMqLbcg|(?cK%SJ{qt`@ zM)zkHp9pjSP7a{B=P!Fz0p4`Vw;AB9>N8~rdchzg3%Ltc1hnNy=-f|!6#wtvcg^`X6f`7ZZl zd$G%IN=xNZsMr9gPOivNS=jU^PX+~uyW47_B)VX&+{nGRq)a@H^zoBXCH}dQch>VO z$g{Wb_v6XHqHg=6Z3^K>_z;kC!34PPth{D#n8>JL1swm;hTzN`+`V7b8_-FVYe8x{ z6q{Q%#5lCzs7Yn1VWQ}Jqkovui<);@roZm}eFwoV^l8B1e6?Y*a!@WvipyvkGRo6#6&FFUc@HJ85xn3qREDG3x9TU6QUh%MWuZeAATb zA8$kZ{Ie74;=%aIgO!f&$+&$rCnj-3bD^I%X3UF#l;_$5UHKp=I;g(2T#z?xG;45A zh|#KX@Zq{JIXo6I-WETJw3TE4RWCO>IVdZ|kf5%gx>=E6LB!Q@aMjJ8Kowr`N!H$g zmky#8*&1XK#|1i6Z#3&x`QAIepuloeN!j5jD9G3;3}glc?aN@1`sSDR7)=uV6}2_S zlA>x{=YlFXO*kHIVezHR%yT!qoCzQW!U{;MICS0j6DLSB-L3BtMzIBu6hFjm%*C_O zLv3j?_CysZHEB2l=#{!20A2E`L~mUqYTUAYKgO)Kg&KKZRW&bLcVWMHd)y zqJa)8bj=CS;9|%(C%yW%Pnk5;vs`ApaIOBuJ=-R)tNk(ul~I8pl#LM}2iT);HX8Vu zZ!{2A4NDo-XY3SYyEmqKLDq^m?*`W|B_1duB3w=$1FT{}3w_QcVcY)UW}qCsFZGy$ zzW;7hiU@EcJ!yU=NuW8SIj_oN_2^MZEfWihyXde}t-x$%N-7D*<;d091t>pMJk!hL z2Exv#3?qaZf&Tf$FNd+m(iadlmi8322bMEqZ~TwRTPUk?i;e{pFX-O%jR- z=Fr1EAxMtJPJxk|O@(7W3OZP~GMLuW*UN5m1)f;d! zKh6Yh6U9m{b=A;@M+?!edFX&Xm|nWyAdC+hl)vDf6}GsOsEvP*d(trCjQHf;s)3+so%oJ>EtBiu= z5wsIB0asdys|@6i^H0gIK82poI(7Ofaz&oQPz-at*6^9VB)#pt?mS6$oo4%smDte2Hwl(jyKlnOB>3I=upSd}Y?$g6If1`;=AeyIqtXAYk z&X+bOT#$cN)zp9XJx*Tnm(|Oq?mAk}6VG^B_QMRU@i6sIogE1hCVjC+}7WrP@HY(FH`8SSW{-> zQ`?$4>OX1~LZi2`NIwdCs}*v7P3z$seQ%cCIhpJ=;qw)(zmG=hHGOJc6;pi!Mz`^i z`Z;HOsad@SEN&BrNn0-UUTA4RT{ATcC5i0@-SHw`Gd9u0B|ot?*!%G9Wb>~QvVsU+ zFfwE3d$#KG`=(KRV`7hgzOi>`e=f!>zN+gf*Ec)3qvMF&~a z6ogA;d%)nA2u15k;chwPjD{!ClFUPe0835PfXXC^k3q6Twg|vN_PuZ49f5es%{HnG z04TZgzaF0lKxItywLPsM>4VT*w2~6NT7G*<9UM?)O5Ikt)Sm-3?{b4S7rk5zg+Z&^78seuZ-4H12?xaRVB zO8znsk#nLp*E1|gBkZ#DLq$37m5S}}l(xdF`P$yGHMz4cGW3e)P#K@x;Z{eyzl?*7 z`zI2QR=<4@`HbzVH~k?tQjn!;V&@R{g6=I|lTvDPsLSbep7!|asS65q<3b}v#~z2R zk7Ao|t@E=?a3XL}UU4sX_iM^Nzn+Vu*M@jiL=Yw-$WRHu71wuMvEv_7Sj~f6(A2q; zi3}3R4u>)+l$3KdZ}lDsH8jU?8_FbBv|(n)AWO1CGi53m&q7n68fz?3V14EJ z;-sN9!@VI{MeXEM+$(cY8nE4obUHJoYy!Q8R$AL>{l|*)_b$T!%wZ_iSBcS`Ig`kr zTHEq{;%$Hl*XKl@Tkd;nId{U#3>q^|qsLnmNO5>1qGs6S{(Sqoiv$b>zb1 z#__Il)4l!c1{ZtG0la=-Y0VU|S_(m%DuoxCIsQC$UTSC;b!Gx&ugc1i;Fc^waZUXV zW#BtUsS>JTX+7^ItEK{xkzXnsVEu;FQw_d75P`7=D#gdHdY53RnwH8OxS;Rf2gzaomZ1u z%snqT<4makLHb%UItPjuEM~i0n3Zf{G|8R5rpbQ8l3=qdb20}i44`&?(02YI_w1*{ zj*B-rgX|%&4rlU2N7F3GmqB+Wtpd_WGI}fLTkXYG@+$t_kzxdwHO!4_?mwFC*z-a3 zogp%+H%8PqN)XM64{|MBY|XfR>92(Lx!~ zM3Fo;yC)>%Kq0dpSkCeG==zNSPDdm;9gm<#8^I0B8KKUsfB@($mn0<>Q$xBsC?f!a zn?8BEX4sfCdjFyleo(QgzJTpFAIor2mK%7NSgLtxy-untEP;vU`czvLn3(Zv)ve|~ zx-MDGqUlxvk$1P`?J=?%%Jv50c? z7a2q4A6k{6O9VOor}H0m?Xtu*;+l77*CZNK3OXi^Jvm1oDmAD-SPR3lTE290U1`Xd zTC3$4unIOHA*)o4`hbVSF|jju?S4r3aYU^|WZmtuRt7HJUbL~YY$dWfY=``GAgNby zWmh#d)bIu-S>bkZ62+z|(VqU}<8Xb!!DE%^k))el1fOu3+`;q$k>X-HPVTHHX~8Z! zv)kAuyDv22M+>FOSdvo&<39hs;rn^lQkK$cK`;FY=b+ne$-u@_;+axvd9=j2)*Epm z8N`HpecE7z-Pj-Tb;+@_4%2lq{g5JC9-4HiJJyE!%MaChy{PJ;cN}}G5PFCqiLm58 zc!JIvJPI>su<2^8)k80r6Cccq1K9@ydW$w}&lXEpq~qra-41O*E;%1uR6)DJHfNyo zUjZ(|bjfb&iaXDk-1p)0w1$!zRY-!y5Dpd(f_NWrhmPqTxbj6C;I?s=OiWd*F!a}; zV`3tJtayD<=cE}_B;8vX{p|P!NGP}dZ~;79g4=q{_(-3RJzRMB%3>eW5kMF|B?R^!rJiJX*p1++6>st#Y=r zr-wK)Z57LnY9hmdCuh;I)gXJ9)>dIdD==~tivkJnOrEPeT`hX(SoM7z#Vg5CX!kQ5 zKkOe{x$ljpC-J@A#I)yU8SvKcJHCSZVG<$Da=ll9?TZSgs7P(Vi*=?+zeZpu`Q$kl zbgAc4tC}{uA-x5@fn+(w4$aj2wRxp08}%r3Ae&R?a3o25(oX(rQ|_*u z#;2&0+6A)(8&57#s+)B=^hxU)_|i@T11GjY6FxL&zg&Yky)F_>cl&L(%ey3)e%A#s ztYWNMfS-k*ijA(3=+>9kEmG3W*Tc^yaBE}(d(1w@^_q2Sd%c-%xmeA;4mEJynp9jw zcGSNC!DX(DoEzu7I=XURCdM*HQ&CQ`lregH+>d;p8}(gOqIc)}r;dK1G5K2>T4PBf zwQECDFUp)ZnQPjJ6aJpb%E|4i=?0v6m6D zK$6~NpLH@6-6W}wtEZ1}90!6m0c1RX-p!=`{=Qq(r;c+E^1YPJU)X4ppmTh1 zH3=SDDYE@-=RHU7+VgY8>~l|s%J%nVd-n}3s15p7HipH@#A<_DNxIVo4>?m_(`F$` zrCe^EfkLpwP@j7>`$9K;&-Gh?(A z{oAS7`BTr|qOqyLrxRRJHC~yj&nsK5zbb-NpPQh2opEoPMXE>@aZG9qGhK@V_KmsOPa2`4V3YlM#5~Ss}4QMXGa^fM6zm)b$yZ^{H;53f$7_ zckIgu5aj6ozGDw`rL!gxuZn(mY`y#*_U1?JfN-Mq7V6{6ri#lZUY{&S=vhbBfstkN zFxkdj;0vHh<=NUkU%~`zJ2rQbpF4Z?fW8j&zGY5Vo6;+lz;uB9<-qKu(i!Sb;_NA# z*QPZ-x84M68n95n5zXNEiFp|Kv3F|a2|7AZg>#rija5F?RNczMOGsjgb5X~i2lzcO-Lms|`;(ZXp z57jg#&g+vivH9^rk9+p1-o1_jh|@xH`YFUy|7Kg3r%6gkQlyxaMm;b8#a= zQ7!8F{TcEddoS;!!%wCw>Nu=B<(rb6xJ?;Z!tFIX?(y1h9M)K0^2>*4B-c_=c&a4_ z(Pj_c4~uRQT}=gv!_xQUKY#im9198^H+$$WoQdc}A6K?0b}?F?IDI%5 zNu=&ck+(vdT&P)JF^m!|SLak6llR?>m$@w3DHE_8BuyYsK79q4pvEJX_$>3UMvma(oj!(=l!7@>{BNnCfq+#3?mj1S8$Gxy-WN1uorT5Um^x+TL`1zCp zT5(#xZ5)v53AfT557!1?n5Kv6qB%(&Lpn3!P3fv;e^~LdJbejCsyzJLj5h4j$YhGg zmr^8T_QkUTf|vO0rjvI@7zj#|q%2YZ22Yi#UrJY^6l@t3l zx1Pm{>n8r}bl}{OG9uxT8L5rO52<#bwKs@bq#tWWe9y?XoR%3%%3nt4ucTi5jpg82 zv$_s5>Vkf|Itwt)vLr^#tbrz@wOW#eVes419~H)T3e^wwX}foLmJG1^k;6~Q#f++a zRr>yHsH0Px@|^0YV`68NL?b@J42`AZb`vj-Q44cc*n7tbZ!edTPE0_PKT{Xsfx}!N zgS{mq+XFjjGB%ibyz13+E837pHkdF-XGLf|Ja7kNZ*l4LzVwPrO?7uHbQ+U&0ocv5 zgKa!9J+&t9pg(W3<^>B+xmHxYJ4u13_dS%o|3F$en=io(Vu;VzQW6OofFhL5(GYc= z6&)V^_-C1;afX4nzQ)`aGB`+!3z`Pu;qh(STq!OalXJfyq8PP_vZftnpX&7+UX9-k z3Wbv*2Zk?$MpX1_Hyl4k!!yk%H;)Ek<(*@K-RT9g2D8;23?z74Ck`yIw zFWmEd78uJz47CJ0E*w@|ROQBb&yHY+nZ(KOa?GR)|{ zY;!3oHn}*_hx}F5L9!m6(?xeC`cG93rYi8s#)8=V_S;U^x+mcBfhIPJ72;eYtFeID zZUZZ+mYfJ-xc-9EFFE^O1^zhjV~y)nBf8LSyi&+Htz4Loov$hMSin2Y8JoU zwU9aqm3tZ3T&3MpQL&coQ;9%ub4-jrR`g=;eJKobr1cmu2rRRZf*HZ4Q(V8o6G53x znAcBQd#@3Oe*AK!kygOCLvDG0FA8Q+Jn!T~U(=3%>i0Ec))s2p+b-AbMXVcMiBnpM|PFF`2jw--*qPn*=uYb8RRWIt8%EjNl% zSC&M|*-vjQQ-|e0Ow#%5j5y5QE)#ZBlffYR*-KYu#gJ-{K^G2Bq!>k5P3x=x64t;3 zsDt$N!Kz>Rx9?c4I-Tdc7M`+MW3^$@(OHO6Rv-r6Zt0o=3eqICNOZytpfh!imScF} z(%0rMOPAZj4-s=RY9rDq&3Lbm%~*ENM@>aV{R1ifXj17SJ|HDj#+g2lS5jce=(1=uN*(Y{}9n%$*!GFdLNGGkxM+bG(A#k@|N} z79S7MimJ7aIu5_Ty(Vp?r5kx6FQCVA*3(x{nNV6y#U)S^sCVICjdhN16?={K%%QjZ z9q$bg1)8ep^*zOfkmV6Y1@x9&vRteGv!eS}UJZ2B+W`cb4zF32i4aQo zpZf7158d_aFipFB^;s|1?3PyfyzRE87vw=ZR0MTI3tXieEP>Is5&j)r|SZg3qY#iL1+o=QAuexxc2}?v)v>?*6RuM z(wZ!;-LdY6`-hiP0USpoOJQYnb#h4H>}ZgcPt}iI!Cye{@7NU&d@-}b=Qi44>4|s{ zBAR@w=bUGpsM7)v9ZOruvcvnSWz5JBT`|@H8^$0CJXH1Djrj+$`kFQLA_sxJ@(qzI zeX&Eny>5FU-sBdl+j*(-K)evw=Q3SuJ-ZmzhY`OPUcE_ZEqm%@0lpNrwr^s|MLFV2 zw%0{rViI@jdB7ubM6?WU4fatTG~t9q6Awtz}8UXYn03uV_F85tuv z_k*vo3H}B0A-%)E<1g^VcQ%I>Mk3iB=#*U5*gBdZd&%XFA^W3)~uT>U4dmtLA> z5^`_iVO=HPlizC?a)Sa{!q^Ap$_G!pFSfXK8^_MFEZxq>r@y0b;S49>8bfdss0_+d z1Fe$G)@kJq@k0thZi$!vX599vFHrT!h`?Pq4#6~MD<>#`Ns@A&(uiuAaK~juE+PZ& zTwO&<*K^M{hW)a)%-xk+Gal4Ddv1;tQ^RZN{(Z+58AksQ^k%AZRQ~ZCOEqZS9hn4|&rgC_Zj6)gaxk2PKi6wgM7l=}KoC^sj<{jJ-tdNm9kL z0;~&iR#P}`SV4v*F{gdP`2;6~Hw9|zRz~MJv08A?U|;+yUCR!8>DWddA#pNX%BMum zJcPegFWCOTO(Sm%vZfk{n3$Cus;3mHDBXdUZ!g`sl;2fkFccF=$<=I-;Vl2G%J!p} zx3rR{)s>Vu5zv{5K?itfY!7%(4|pG&@3rOdYxlre`sk3|z(l&gikrYeQKd6RKxLMT_TX*>D zxI|y)gNHJvju%mul7xabnVMpQy1En)1QNhP)JKs8=nw-7;K_=1^Jns`w7C4L$mQKH zzq-ZqF^H(bTNUY^s{`1RGM(}uS!+Mcl1zsB6%j$|5s~Ghf{~JvI)R@IdVXZ^r=40a zy@{x;>7VoXE5M3*=z~FRF*9OhU^=Tbzc8{*j^Ps{+*&kOS`>unjy%Jx8x{t;=|G>b zY{E*$gBDigMeN+1)ri0+{nb8JYN#G%=k;pQuOTZ+o5BF@x6k%;&KF<&qGsdqaA5gT zo(k8|d3I&bnev`1Q*cMyX-?7scAEn7>%q297Uzu#GEfKE+fA(W zrStw}y}iwfuPK(@ZCciYxh)juO$B@y5s*Ml>=1B&#qGhyY3Oey?W&uG%d6Q~FSsws z;J*08D%e*I&7*DyqC(g}H#y3BH}F*JDFLP%L64wcOyXE*8IWTybQQKR81z}~s%>$J zBWzfy!0WWOtBup8YpR#E9t~Nk`phl5Rag2QeXE(4jUIg8+tydW_fw-c`ZpIRr3~lY zHbBt|XqY~rf&_;9Q5lt;96C&W`YR?i0y~k830Je$;!3~i-`O&irIANvhP&!K4>zGb z=HONt9swVhY9D!OX_4sD>N%Cy41c=a($vyy{$O{E%xmPg>~YNzYkzbQQ`OKA6!Z9{(` zyI#<5VU<_lV#Yu>%434n^T^oN)2F&X5j=cIL2I{Bs0Ky1??!{2=7SfNHtFY6AzsG- z9Tg%yG{R32Tfx^#PDbh(`S5|v7(y}eEXqqp0Ux62R$~1u;bX4Ka)8|XXuPY%xmrZ9 zccCqQBQT!Chs6yh#~g>GQ-G6p7Nv9L%Qiebc-?685XU>rNRn0BgK6({f$=0k)c83Cy zF7Y{g8>_}3rsy2QpClm8NKs_z+pc5I6{(vn)6YIlL_L`VDgTn3N-E5!$pIR&BD3WWuJJIDix{nI-e0V!WCXKnA*E z*`#g;$5L_fM>JVcjY%^f3_U(*8%lsZHCh$FNY}gg5by63YTNd{7N9za&y`|wTU`c= zLQDZJa+5I#+{EK0a<@!odBu$WyjBg0jXW92Y??87wlyE9NkVowDATj7liK?Ma;+o9 zFk`~0Wav(d@3@7VBS6|GXr4)<8W{sx$>iv_T{;wRCZVPEm$q^xsg}5;S&)8hq*m(j zekA@8->$gQbwR7O$TqYR0YQl#K#lJ#bhk+lJY`PDIQir`?1cSoHNB`wSA9XrrRT;+ z58g=b(IfBnw7NhrGzXwUo$>AyQ=v}bbJQHWo4Xy}$G>8p=&QpVa?15H1tkxxscqPY zSM+z?DuYxIFPbB^Wt~ZaRoD0fpFDR>?*P4K z7rpw6!)YYFI(AuI3v)tP-8Sr8vinHApNCYn{b9j?+qMOhWAxCTAjHB0$hYIutsmv2 zRa(wDd@zB!$s4O_7@QlG_A)asrRVfK=$jI2B;DQ_T+x_zpb51 zr|j*4nQs-?ee$vt;Gv19SfeBfiz(pK&eGXqMz4}Eu=1&m#nT}?jGc?NYAi|2b)F#7 z{LeX_YIK$eNxo{;OpVYaok;vJ+&E)_2z(}yfHXY#)(iYb@fWnYR1R$F$01j<6q3Vtw?w;vTX#4b}V?N8YWM(oId^YfNs|sXO0+dK9 z%F=XjCYbj*T}hhW?7E#aRq3|FH9X>l8rI2mwA1%2g)F+ z7li(RUGmB!NIxw%|9sjoE4D|lL$fK>`;xCn)9*tvwC8}mVgYB}q2DJhQ&VHS#zL-8 zuNT>4dTUfkO~_l%GlZIRQU1+&$A&mRH~7#dPQnsA7A|&r0lrOdYXtT6UXUJ2o8Pxy z@(Q-;Ow>>+^D(Au4nIiR0TKK32AcJqEjBQ2x(Df*ikMG!x|wYKjEnUD()N%8cY;_c zYaC^p!dRUT9`gpJ>O+*OBRIzgG-ynI`I^9ekcK<1=PzM3!Sfsx#C7tl`O2Y)pZ@l1 z-$~y-`}+&&y^A%0D!B)BkqTKfUrpOyIwjYJHUHwf*IX`>9GDNaqTKVR4R= z*axsu@b$rDBzSurVpq3Ac=wHSYFSk-Ew$LyOn-d zX2f=*Ax8(o%vFg&p><}8V&D;VEIVe1E+j-lLl@#yp2!Z^9aDA<4O4DV9a^@b7!D01 zQopB^IJlkF)Z%{-2psEOs?QXJBTpa0iX9C?&nMu6BpA0SnK#e3^5eQce^+_-eN%H> z?K^l(8RKR^J-r{gdkD{lU*;6#q~zuTpWryaWsM?ynnnMfywj zK^MF$=8S}oJ|LFTYc#*)p7cW9rp}e;-gh2egH@-%IbF#(fjbDo{w_7wuQLY?Zo;pn z4bqA8LCJ3&McNtGT({c}z{oIvCkrj75RElh=n8#nN6M;cmsQ-Wm1LbR{Yd3TG-eyk zm^1|iKdWlHwJs^!B0_4S&z(M6J=aVrJ!)zP%R?kjp5IdT;TK&1>u$P*1^aFYwV(zH zj>iQCiGp?dxfk9>R+oevqBWJi3GaZQF#T9I==U90!ivu|dcM(9xUtq(#duatx4B72 zB%+^7#ICeBiiLri1S#prIw2*1TX;3to7moF8}Z!`wP)~{cA@ua&yiQ904cc!pW#|+ zLYA5^Y}fN&QN*bev03MGd$n0M%wy1?yma%H8HIHU11Ow&8Mp;B(g>i9z~@XZ398EH zFTFh%CU#V)nU76BK*K^p5br5f0xON`WvxV585@|LDECuQ=ulP*-yKz!kdVk$Yk4>$ zGkv(J>~m1!WKojU(!y)M54cO#8A`^5r82iFk}t&bj0SHO%H6| zgnQ>*3$fX~{cDbPSK#LEDfUuO*J4^lKi`2}03~%x#BOw`qz{bYN6wq%bxtxgg`WRr z2P81KN85)V<3oSHp=jbtmLs1)-s`i9E|9=c%?6Z5R9?W&VbW3 zfOv4m#x`jOgVh9>i#rXte0ZaJVtL}EyO&Eu<+awB(ttSxxuP@WHqEsNU0S+Q&0T7r zTKTB9Xk|btg!+sbyvnNlnVr^5E1@&oz-W=ya5oIB0^KX?D<1I;C@Hd@kmT@5^aZo( zwD#4aFi*nK;W`13qA8l$JI5$ z-|-(gK^HmanWvt(O?CP&@YB}Yf8TMk&H*`w=O{r5oMjth6*8ghqj4D`YKR#vJ_$4v%=Lih2niXor10_`F+EbGx9LslXBI_JWO zAD?oMv@MW566c&sHEJ>ho!OQG+f@ssBa|r_O58SSF%HCHO*PZk^T7Of2`r$BqZw*(q)T94UsdhbK z#R7yp#$M+fsW6J<8yNtlWOxme=+HIj1sA|mw;yBCai#CbzK1-p?E(4*F_Ve9<@|6M z8Rz01c7g3eP)-K~ECwWFDnVGWMRdT$U3M?HXs}}31#H#(RnD^8QM%1q6M3QuYQsGrY)#+Zhg7iYCovlhsPsZOZRuFYAmo3#%ej<=C)COrTG(%SjnjrQ^+ zlf9azj{Xplfl-AJTHj?`o<0QoSw45xqhhX@(8kT3)`bJ%^lB&+UiXvukEKY_dOxRT zW-eYhmw7FXU9;EY)La9BQT7DC2$c}7DH;n@Dp&W4$(V;Rcb4_5PiDG_re{qz@)U2c ztEfjK(UoAnW^XT1JNtU|bj~E8=H!bi&Uc#FJfc5C+j#;IxjVZ1ZP8)?2JXm&gp%=Y z0wB3KhKyVP82W?FGv@KwfWB;F9f9*&$npdC;5xn1SK%(_i##iclUZU+d%t&h%zY*H&P+BD=Zf495eYd~NN!-*-G6+p;mx zwz0TtQqfkFQDcP57{)xc9(vdd8nlsf0+hBL{|Hq3m`fYn0auIDQG|x3PWPF=ibr1R#tFWtFt6~-+kkP@FxSyymR>hQhX8XNO!}gY<0q> zD<7`=Tq^S72M^ubS@!_C!-pe#tcc;I5hJ}}v3XooGhp3{fb~4 zbB?E62VP6&2}l!DKDYK5+9OF>p;WLUl+j*hF>cB(S|-1Cf-|{8BjJkN6CkmzT#l)G8Gc`Wz0pw!K!05<;B>N9IYfRuN#|1(!q~G!} zr&Z1&U)dRt-M;K!jpRDiO2lfm8x|w!TQ`1jnjQ*vN@9!rnfF5MBbWdg*8QPP=T`Ld z1@yDQ8|WGf!0>PfXwV7!{QPNrsIwquIH9j#jxlpZxg-4Qxi{F%Lnw)d@#T(&WpD1V zKTRxisHy|Oj2TA(w5iUUu1$HQoL{mNrlLBhXKDV;M~n!BIcA=8&HMfsyzyg4zHQ}+ z@@b`#iZ_=6FoOp{peB7BjTj{kC|ZDL(rV*wO8Y2gkCE>MQfqH)BrHP0NP^>o`KTaI zgM#lFswrJ&P*}B$j8nl(o$WGIKh@ErU<(=@yZZnjBRd6FoW9>ox~1p=nywQ+1)mnFBFEzg!dOeF)$w4CUP;(87Z>asi>%*aojMnWJ=l#Pc{zV*peTRZoXkFbvAm93|^X6+Q23xyJg=)AIfPGW6LYP zAEsy7@e;FksYGjBC=XfaV{o(Dxd20mh$qQ_le(c*2<&H?2uuu1C!^GMIX#%vtM3o5 z?i~k|uN|z(Q}`qiynV0y18PQqasAp$S{RI~7FLX=7@y_e>E79!VSl6e4P9fJJ(%~R z61>~&afPWhDc7QN-I}IRZ&3Xxr3lRpmpBp3hBc=)Zc84!mbrab9k;UbH6REET17~X z@>eJ3pdv)ToI&M`AZwbI#CHClWNZH0v-^(I(qNfO_PJ3m3Jx6ijk*>|4zVE=>ONt1 z`Kg`Gt?E`lP!b|J;I*z^ufzL|J^mjy5sY%kY6v4QD`?vVI0^AE%Vu;RzJ76z)`wh7 z?s{SLJq|8f5nTxtAErNW)QEB8IIRwUgqS#9tbRh1c5uZJr)}^2P7Ihi7 z=1L!X%pkQ^#gTeF0>cRVonMMb#n`WDX6}O>UN%Nm~#uk&>v0s0cjFgJrf2Gj;Fz}Q4*tklMo%Z#<+L4di;8Qnl zlAr!a;&E;4T{MiWBr1jMR3gM|)qj{_!`#U6cPJZgzVgyipxXBFCtH5e!AuK$=MliUvV&%(a{gD(jJpei94ff z#L2Z28}(xDe38W)Oz(dhk}KTFO}Cuc044fFw<;C!nhnc?Bx-p_P9?^imYYoV%gtF1 zdQ;#ALZ((wFnIw`jkK~i4i+Bz4mG*H#$_2|FWXe;9*r3NeRrdgl5v zP+LK8NIA1byUc<8Tsr(d6g-|9WUtxH5-yw(h4OOXlz}PZbQH;1(cA4cF@8Ig{8pG; zKd5rRMI|sS2;DU2;&xiobuA!h+LKWsI^KF$qyFJL#$zwTK)oc#^3iKyTqO>)m%k|; z-|U&6cRL~Zv%;!Cua`HSm&0|C5>H}C5fBhjf81it>Ci7jEV`r)Eez~@-13j}!n2{e-@>_+3irEQ;+O{?X zspFpKhD~5yY5me@Tvx}SY?omT_yF(x=ZzoQKh%NpI8MG&Z`7a|a-%o_jmg}3g_DLA z56!tKG?mJzhzF=mtE1sA3E#MXnX8;P-+veJA=amAAQTLI7RwS6w+Xw=p!y-oM-gbU zI#6E4ZeiSwAPxa0P$!HL58r&H`y3y((}YzZXsmSk1)yrvcz8oP2Mc&hQPD=->~oSt z84=DaBWQ9rBbOWMq&bw-J;T*-{_Wdy6vQ`~GrU*H z7hHAv7mmJVZ5(<7MHk`<2oiXaQ(svZ%`2+X3?|7$&F%~dQ`^s>kH&KPT(JBi8m0KY zHTnq(EL5>ab2=b@FgnC{l@%<03f2k&(eueY-gYq_PjJr+tuuZmOv*cSO)BHbG8T$uQ@NW&e#Phkl{N7^u_WVbP zq)y`R)l4L+cK`9%4Shd8KG@jx`;NO`ebXd`+yBt~=|7$x4823__$>eO4E)QZ(+TxY ze&6wZ#AmTHV&A`%Ti8+l>_2|M|4;kBP(J?m${K%YbNro}#J{LGm#{gPmlwvSf8X)j zoeMmP=kgz#|8$ub=`UM-z8)NX%=p@LxWU8r&M(}!h$8LX@J+4JSKJkuI|HviCC#RM zhz#l!+(rCrDUg4@)&C9m>3q}Fh@<1?jh$Z02CMQ#scWNO^~gB4Z2Y8Mlh5huV@2*0`d+$ zv~U}Dd-0`SY#I|-tq_|!{U;#uFT&`qKTrRQr)Oj&UQ>Tq-KAZroWj2SFZhx_#r->( z@9tmSE!^r~eAoa^R!ju1+vontoBXr8<>7w@``w+HujJ>Rn!Z#qj5lrl_~#w|Nmxw$ zX8Yd}fxG_fuBaHXmHK9QI$cO;9yI+EiTM{A^@q!ULgD_z$L-2vjXK_n$zBnLT-;Xg z`WHma@Bal;E-YL)XMZ>}7m2kvjoSQ^hWQt1_3oE{K^#1U=f+ySUkq&vY#Cp8`qyBW zVCr}xc(d&O3H){yB7u&O4u9kSVQH5j4pCeaG1N=H=T=(J1<0I09nvslM8fiQ8fpm0V==y@J4i$Ynmo4jiBKQ`q5owdgsm(R>&`eH!v zgI6zmA4^!3EcFVFp0u}eF_#jHBxhV?n3X@} zrq8dEw@GsZ!1i`a6%~6**i(tUStMXhSO}J@#*5z{XV*dr)I#$T`t$bUN54aAPC*?& zFK%35u%yrKm@>Tk&rd}X+1>vvs{D=Ivh-vAc(hdvl|tz5avqp*UgvtFtyeLUc;dno zK&;nHE6tYc#uj=iKegp0ie|&uN6wcd8Gi!Gh->kZZ@I&ilSPw-T8Cm84Kp-nmWdz&19iZ`<^HfDxDzO*~d8DOyPh zOYxC}!7tFx+}-=rHxJuZK4W$^VS(v4BJcP6l>anG9;&XK!09hR%|OsisXsniN(zEL zdG~tB?Qv;-HcXWDZi9;Lvng`psJfn=sxV)qeQ-^fl8_}efKm=l^poAv_$PorzZE$H z3)aw?hiYoTzAt>a`j{IudfUz~}ELb_jWX%TD32DX0 zk;&Szy92BT!5U;@zk4NV$TaPkoK2R69lLN<2y)SN3KD z$b)ZheE9rQp=&wD$2Va(>t>nzIU)2aWMEVdEX~bMj#f^+!#UD^@cQ?`FNW5Nt(K}k zlU+Q!iZ(9MGW5nWD~2o&D^^K1F{ zH~$+}=itAx90%aXFVj?BHXR!!HNl#{dRKnclwCWre=~a5i zn>pvqoO5R8+;iUh-10s*|Lncj^JMR}_bU6h*8Z;Vhs1H%+Pj_j;0u>@@W1L!JgghC z&r)d~tT|KPK?D$@*qwBTkYL%>o|9r-oz`lBmJ>Lfa zo|92G*?c!8v(x!Pktpui=URK%Q?5CxWMJ*)qSM)_#&J&}3>C7dv$n{YnOmzpS~a=7 z$p@||s5zeBzOJe!mw=1)4;Xm#Y9VEpKZ9eAQ>g|6T2)pq^QVGqjb)3{#K$rgW?gj7 z?qeU=;%PV#3}ilvlOP#5kM4stcjm(2w21JX^lKkOuDGvg0suyKg`bz1bRj6kO1yrP z3`_`cXJ|mw?DP5@*FDlu9||-jPUPjE35?Q~`?zGh6--#u+rT27qUO!chm?k$4a$+|iNed+2o z*Fw6i7h#tGfJ%l|2qR}!oGDc7s+y>GWfd#CKQ#Pw9l(x=JO zsl5aW$LNF|;2GfGKtZ1h>a01uw3VJ{qw)~`GXv!E2>3Rse84n4$fQd=RC?)7JUL%a z67N-8O%v~hljLP?xwgwuWJKgmfBLYMfp;vMmpD@V&ZF3xFz+FE5vEa8j&gW90n>nR ze$5=61-Sj$k*^bB9dA!&_UOtWY@`VKv{emQ1|&&W*LRaTH`k&x*GaS5T^K9cSz38K2YK)m{D!wB(08xJl% z9+^|;QV>p9y-UmUwsH(|MX6r%G7f274ITkQhONg$PH9$* zbnb|r%P%vL^)J8DYL#l^UD`QBUW8SW)BP|eF3_g7E?x*jUMNZoK=$24xLtWQc6f?N zKLx@&ea~!)A1^kuo{v&_NWfC__&jV!jVwpp5HnyMbH_d&Um_1LzJVvBbmgeP=3D!W z@f{B{_hwl^i8i=m;Y5p4$u%Rq(#cqeG=}t%Ns|PFoo(|yEu4|VJV#EIr=vPY&U*LN z0Hh4@)N>*z(AADYEESD8>NFWwngsOKfN*@VCSWTotEg!kCCB?O^o)(&aVg(QG53DD zO=Ey^RqQV=2QR7AOn{s2rwabSmU5x9g=_r%*SNoxf9Yw-f(LW~DrYeh=rzqWfpJY* z*j$bak*|MU;A0ljfTDc99pqeL5oA^XCErM3y>B2wsD5TxojK9zbQoH<)VKl8bRz@_ zKF=3zluoi)_cKtQ*9+7)wQ)$ckdxUtbvBI|<*Xg|D;-DyhAwGD5XeEr7p2lbSN^poAVXfOn zr4QM4!lysS*Cz?SpUC0=Nub?f(cZ9ETtW*JpQsT;&ks)Ec)(Jlnr*1!Dsy4c#^-MS z1TMBs)X3lo752M2ZZ*sF_L2b)7u5|Ia}={@=gCbu9+ZRH`AZ%q;5xJ3)x|h0ks6t_U?3A5W3mRV$DsQVgsgI#nOs>p0x5?S6|c)< ztXHfi?iH(J5*Znh7Sfo*@6L=?Klwl^T*;7IWN_gAB7;&Xkj_6_ldKWwTbKn2oVc=$+oNFZ64tZhu^tP-AJ1Rp+cbK=jda*yLIoNBJV(|5A1c#;1~ zefj5)4$o|tG~IGlXDnjseDhGoWgf$ATSq6*lwQdo{xptfDb|q$4%k+Cp<`ShWg#K# zC}Vu-qV=O^F)F+@_p8&#)ngp{Y)~{Cp;@8rnX@|>7lRkIKjAD=qUeP>VBMiQct+L; zDiDJ=J=Q<5@zJee0ZJOzh_#~!-n6h1^4$O{q+sG=u~UxK*0~eE(rcL&rY!3(ZM1&LlM%QZC|Aoc%De>gKU#E@iC%!-sD+Q&E5LreT% zrGs1kJ^6>iOYD5w^xIaV+ayu6K6u*KvY|A%BwP#rt&4bZsx)A}z0QoE6MZ$4DXM*E zn7nArm%0G)9&i4swg{gj1aM&19`pgU+0b&Vuf`cuBScHO z20gN0?*`sB;C75F_3b7waGbc3aWU-!w|wth$W8o=0MTkSwaTzbYu1PR0OVGXodmze zwtPmJB|0UyLN$aI3s&PfOH=srVmOoO_RZIK(4#HJP%@z+hsw$$iNmpVgIwo-T`c-* zK*8C5^>^~U!PS^6(fgqBy?|5AIUIx!^DhXW&z8b}4^LkF_YPH#Mns)vDc@&d`9L;WG$aa&&=qPEfwWWYUui z9p5H}kf({R^DO%bJ0`X6N!{l?x8sDovavFF5CUk2(5-*)QCH`Y@rpu1BjeSCE^L!E`R9jJb#&e03Sh4fWYQYIz-C2&5T#yWq!zfHDtT_5FIH!(z zHoxekq*e!h{KCcaq4s;)Os~ERIRq93RaEU^$yxbSgRiAOB&D>$Kq{LMSmXY-=Bm7z zrk8y~5-1LQzlgN8uRV+Pd=A~obIHbngX5KHruy;&nMWw`nhTYZGo##T*JpH-7JhhL zTEj#TYj5n>Bwuc_Pz0aU)z{h1@f(Mt9Mtj7YZo_TKDQAbn4Gd9S(=DQYdBXLvMJ|X z8PHA^SVN0pSmBDeorp}MNDm_Mh)#?<8GtE;zfPS=bDa=U}!P@s36L~Ihqz^%|Njs zd==dl!ae)SZEN)<4(kru#W1)qj)thVy~OWx*+xg3yrJ4NAB2;83H2pQy7!<9?fM8- zigKNgl)L1ed$ob3LkneUi$c zx{65Gu86443^bsJaA)gd@}ups$L*H)0%mLc&a%X1sh5L}ky9FImRsCyMV7QVj$oD2 zsv1il@6^Nqvi$iv228aqW5cEskmSjnA;$XUO_KVaLN9pIbdBGlmdS3l*1B{GeF-}Y z?1RqJgS>KDmTX(B0=zX_$>Z$yxIlSFzr>H4s_?>Wa1Y>t+{fO+%OIV+dSCsCJBzL( zed;y44=H6MUI#-{CL9)dH6O{xmgQq?LDU&5^G!*~cyBsi7!2Xq^}dk3&f#JfHuR=B zTlF11J9#_G+z96L*1@asys#qdda<)`f_8yvSaaVh05y#o z!z}N-2uuO%>>e2z!&w^HnGU8wEg%pl@;ytcT;x03i;71-6-Kn(VQN8dCc-n6qesQF zuv!IWtN!5_OO9WI{)UGCgrbUp96KgnO|P)LoXQw^9hR|)YHr-u!JZzm z(Z&k{f(0qxDxJW6XtlY%50s(3%ng@IqibKyaYJ z;@U$+Ld(}#)g#j^7h<>@qMv#D^23D2C5^=in1prDDCOA}o73Y=p`KE}4vJR7b5W`H!#rL@ z)!Qk3uc~+AQRmgva|94Z0Kj#=#u?Z6z_rRn-!EJo zYi{w2RweZn1@48%U2)%SK;6RSHzETPimEDk3Ii5(*tt88`ZjC2TbARqVRjxL@k(!& z`;!PWKw(JFceK!kHd6^#FOTx`Eu5I$pWaXqFIV_4A zc)3{hk#&~Wa0s`VP{z|+rmstvgqSu)`438yYdz#sIrPYoLb}48UZPnMz1YLg{koWpeT7gBP#58Ro;jLb-!&B3C=&+esupMuI*7I8;47+~*k5 ze8r(Nzg8jOq2HTx2JwdEl3|6-;|vH$Z-2k^x$pbpC1ywnVguC1>X=HeB`S%#=FZ){x+E1H7V1oW1nM~l zm;JfTfR+uglhoa)QnGv%3u#Vo5`LzFANRs<3LJ^Mx^z77D4U?4WbJITPRpldq2jAO zVWm#BhhiZMd^moH4P9J%t}~6sXRNvlE!rHTCq@qXp0^ope+GTm#;)D)^7Wb?J}7|* zE~xT&{)9Ytf6sn7!ilaA&n|NqyQ6fcq{m3x_|6(yZ#JT_L_#Aqx-m|VI{e;sHiqm3 zCMl)7b+f&orKfeWeD|EHP~a0gcWTPF*F=oO@0#~qHb_7R5_@Ho1p_Lf>Oc-nI|cb& z#d=p2(P+Luoa|awrSg6K^rte|ZFIWa#oT%87z3vyPzs%B5da{@2Tteplv#%JMR@Ed zDFhy=TYz#qUq6&Ee5d(Ae26`i9tBn{l(6S=GN??>!TrAH&1;Ne1$pD(Q`c5K{8r4zIked zo#tR;KLK4K7uN>ecEjA9!0R3xz=SAc0*pKw6P`2pgdf%RA==$~d8tQQ?Zf~!bKj5M#uhhzpn$3_=+km@j_6RZzEU->)sIwnAqg}m;kSrk$buj!7j&i}}F<^6odNRzw$JRp_O+pe5$AriACzE1a% zaM_phV*-ruDO~_L7i9ON8c){sHPeyb;CC|mDp>N&E|Ha;tnJ@%%#bVkkBqd)*3G`=e9Z=RZGxd9?O74&rav!k7+pCSBWjD&T?P-ESOzJMC|rFg>?w z(_zsZKgLxl*+ZB9i1?FG86hpkS!fJroPPe5;P$UD#{V2d_}3dSao}X&;lIv6?*D&} z=r_O|Z_1LzrhZ=9;(t;}ZuB42xNV!bCCn`RWSU3u71s7hSidQ9E59_OKQ2Sbd9-`@ z3)e%mQa$sxvQ)Y9g=>IQ$QH@JE2Q7$fc)RC7X2=R|KTJ0x9zB@L_2^Kwc0_X{6$t@ z1Nkfbl4JdL+m;y{&|_!wUbdu7i<#>Um4DLM515&Rc&48pYACNI5L0t=YcKqz)v`1m zaT;C!zixEI-#3c?L$~w)x?A8qoYPEC#`mjY>kmfZrDS?zoxSDR9=k4x7Va$7VH8Sw zIJ&74WjlBfhkbW5%G{vX04A9?K!l(drZff@R)E(Z9dj`B$Gpo2YYUU7CkGFDO&&r7yJP@sxBE|Ol2J*!M8BDn zkH>JE!R2?;c5lzV0%J1z5a50FtE^LCtdc80rw1mHK2T|ig#tu9hpVII>&3_e+C-}y zih1!a8N7Xl1{4?!%E_i(8`fzW^Mouqk$KyFi8%9X=}k=3Dch}(UZYdr328;oj%emP z3+@QCXlvQEmeN4OiLYxtr91~~vv~z84R~JJH*8>+dEadKJaZQ%TO}+!8IMEti8Lu* zLh14VmuaUhvxa%s<`h8i1KtAft5*yBaQoiB)5;#YI(Ed$>+1FCMKj97(?y>xW$hHs zECvEt5Dl|uH!?jZ{La_D;)hip<9T(h@g(PJ^(TGerbTh=1m=@ZNJO7_lTrstn}@Xm zG?yi^`PaG6nD`!?pn6{&qWVG7zitoVt*B^HO^mh#7OGdZSm?j7l_s@H`BJ-?y@uEX zMD2XmIF47FcZH;T{bhwKh9GVFO@4x$P@Gf$IJAfKrHBz?l@t!%wXuS{JBEfa?3+Ip- z6h$6#in(=AHjQsB>vNAXRO6v#G+(?h)!UVY2HHn{i~U_i+k@y4Dw`uC@#Mmn$6@mn zABmmg_czs!{7^XW8+&H~@x8B>+Z&{gDH61S>{C0l>k2V_0&%!k5+x?>7u0Zqtr)7v zkegyS?Gx$xK2ezwVb{aLBFA@sU~5(l|KA}Fz7C2xVPmw)9Ues5P$*lD?fcZH6(%8e zzqIf5H0dS9RL$MCPaHsW_KX)9np~+=*B8g%`Q1bK7l;4;8}pz1@gF7IzhKD=zi^3( zo(UCfuLbJ-iO0fMkOw|nOFoFLdfsiqEshX6LJYepFdB1DELHN4kV0X|pP~W2CZ zygv+YAFrMNVNZ9hVN0ug3jSJA_D}NnXG#D65di<}E`Le;cjK5p{_prdm7q0|@BaH` z8`=&NXXk^5AZFhtW|Jq(bcJ!?V_Lk4lBX;AmjhyTJh%BZ;&}!AzgGCe2{}^sPs<}? zgUc}zt2$w1Wwjww{t^eJZ6NU+?e56sZyzK3j>Bcg@+uS*ptoqJX}l{d%8-I7$b>Fi z?e-r|cZC1XH0@zoV102~BPP1b?2+^(!8+w&@j9dEj}L~)Z(LtZ8QP$zSWRn7jgi7Y zNi%{nqq^O9K8jums|NG@uG{=W?te}AFV>ht4EU0enjL?~JQ|hk4$U*Y-%PQ)8P{X; zq0|(Fil^!w96Oj6F9~raOBTcqD$PbZBd^~3V2Lzxt)t<&+f>Vl7x4qV^eO@t=MHdP z(_f$iFp(00Y>9zyJ|Ddv*)H?yD~2C5xYjC_}?-NZ`(gj(^$Sa2Ro&BDOo`9xYYH{U)lMN4+3a3Fj;J(eh8tTJle7OR5&Wgz{ z9_T+b9T(Xh7J~=5H7)i=44-5oc29L>Y&3Ed&&_$`D^CK?s_sA;?z?^-Elc7ya>m#G|LnvK+i{p&;g+K^QZ0!h46(qKVP#g8ZL>MGD^twq@qIpIf{fwc*Q z2M^1Tb0v_L5*R69>zZ&y46|KUbyC}y>0j~A8wD=4Ft;UO0dxfs7>e4w~i#+AcPV)iy1$w1u%!pw2 z8~$AKB(<9;9GOdnpSPzj%SmHA=1}S?oz?n}k}~4qi@F>Kwj^@~0N876tc3pvC zE{@{szL#!D-@kE1T>zCD9brQ|a^_>AC49N1Knp%2zsoK2@t!X-&P&d3AuW1ZCl)rZ zW+ycktq)Ay_kHK%xo>yU%5`*D7`f@4I_RH0SPjhxptb7^tNUrhtAqM-Pd8o^m3)v_ ze81K;z$)DX8icBsuyjh=b;1m4D_9s#csd42Rp1YLaFilf)7UrgkDv*0NeU%a?ktU5eYmDV~+K(2vyII;#HkY4Uk zVMAb8Fni28Qd)#LAzC3;PvZw&&(uXsbj_fg*Rq{JSF6B+)b6ovFLlw%wptGx_|M(l zCi}aa32MNW+NH;4Tdzw;eXa@mA8twkYixR&xJ59BoYAY75TZ@eb2DHtcywN>z5cuFQq_z$VG{?Vp@y7c%jn6}+G3pzbzoA5CQ{?TU#uU$4V*^dndLYbS5~Y@J8hYJYy$TFgJ3{3)PGiFd$D4RZ$0 zvYivfU_*3dfeEz6FXagCK*J$pOYxzF~#iEPCWthv1e{U-xTs3c055iaPsVRMD~m8`_h z;jWgFwSBYs&Hdq-H zW@ny!2r4%&OunMX>q=7xKs6FubNe&q{2p$sSlnQ}^ce|Hy^K8hUVhzvhiD&FjA zfqs(&KjjXwTY9MIR;o}&2EvW3E-Tw_`e~aL)z^zxuV@75kxbm=t&6fdqFUP1W~s_K zLUSbWgA$dLQG?h~gkYZ?Vx1o}xJJ9x1TvMnN}(erwK0^>kxsEK)zOFgKu3D2?@}9c zRYzo%lH{}NTLKHqYKCFvjY;P+wRz%OW5A9Cf)*lvSde@kX_d3b??Q>Poorgvj1RQQ z);5v2sYJ0~I+akKPG-N1U0)%ZMW!u<0*ID%F#qYp>j}&Dtxl2Arl(K?2G1{37re-H zrfs^S$|nlu3>a>q%afonJ4FI)P)=QX8_?AA*4!r+-GIu8&_@;CDtsYaDJ2DRy<|I> ztvx!7e4~+hyv`6))KTt_@W)e!(F7_bFLTd$E#eE8xP2(~Q*^W#Ht=|6gL>RH_-nu1 zu_>kZ>&b#lJKK+on@{^!>E$-dRpFi!BHm?yE`D<%3>pU<;N;te3z zZ`lkn06T8C3cF`AkMYeNJ`w+^PiSxMZmx^}hQ?HnXZ4CFLPkCbjx(5(_PoFjSWUz& z1=)-+vS0qS|2`D8Y$sSf8Y^;P3>%vs7N_X@fo&TLUs-875Z6a^X>|oU3|MTdh&G?V z&b*ibVLN^raH6*Q%r=$4kHt5~5wgna<8pBf@kOeYxTB0OC6Vl991Ndkpr*oGTMm;V zGG;`Ch89kmU=b+wkB)2$1|Da%+irg4gZhQW^-W`9Z>(WS4Iq`muf`8Lvs>tGdIyWJ z-Mtj(t(8s>j#o$BmCSdDOywyIGeXu5NEoI@XAE)<)EtInC~-aU`Yh^OD!#rZ;yKH(Z$U)D1RLF$?O;>mM`a*e{<<;yww2kv4;$bUd?_U0Al4qvTyIe?1%F z#fo2ycO6U)B$NpP&90qB`d6$Y=(RuX8)jvwl2LElpRe)iMQ)nHB=Zde>NBZ+Sx~Pi z)}wED>qM19-dqvw;<!JfyU1-xcOsIzy5bAt`;1x=EK`B z*+^wfr?qxpxoe;l{4VT0NB=smGjN2Xo;^F2pO#xSALBHzj-S=du$|IhOXZ2+##*JA z`c^pXw^JDUrzv)d->TClyz*f{v*@d(d&4tK7uyJdq}H7EcS<<}pP#1m(bHTjpU3_b z(ly7vq+(TW@8W$~dIWb{f8!eP5_kI*{nzK>Mm(JH0>w;r;dcR%31(ns32sI{Xt4H@ zCV;~{Mv!)pVstdU3>Y>K>YdF}C^YFItWqh50T%4J{0iqP- zLd*{1Xf$GDQ*twRP`d`6PZe)eF5mu^5f$^KMY|>y+a!A3B_}`&Kg`v8QKHc=B%2y zj)QRRd~h3koE5rl9k-M1vjI_#>m2#%tFcdC>)EJy0lsjV>MMGkS6+XfAb~5ollM7Z znB{G^#}-X*wp=_MA}gf-g$u{Q>5*Y^&VWxA@9UkyKEtvvtIa&jtGbPDA-Hyj&(vXhlF|@4`@;pQpo7R7;{E{)+QqK zJDV?D6aA1;m(zU)KYWnCSY)&$r%_tJDT|MHWZILPMPT5^ce zYqh_1kH1xw|7_Jb%>uz1l@u$^>Pw(gx#KRU~nEQo@vk^xx>Zkub9=y|-jS7fBikG#;L z`)>#XyEaHFD<)tLjmmCI2p4cp*3?8lv?k}tU&zVDs^Zig-ukXC6nht6WcFM;y`}|K z9yF23-SA4jP-=BHHY+Q??Gn;;dIH*{#Bwib#FHfxC>c>*!cmyk=5#%3TR zuje+DrlIL;>O0NR#Z98SL=H)FFf#Sg8Fb6HTr`(7_hZu+q^U-5g2aqq|%vU+Ftts0cd%dJJ(?vAbo zenb)p$5@cR_)F%K$s~`Kj&cr&UK1Vl);s{wyclWs|HRtu8nef}I*nZ7U&(S;aD z?W;!j2G@Qp09YR4#oKg^!rchP0rwr%S8)4UU4zJ+ff zXi9*IUD<6|*U0HD|4#V=yIliSYTv!eURrAsO)@p-)!B*RR_Da(#n|W>PyX{=n*O(J zxo)c;eCIDF-IE00pKM5IdGUV2U+}@I&5()(jU$r^lc0DyMw1UtMD}`-(-=zcu72dc zxvrv--ILb)F1~NPY;z-NXTvp_9%wUMEWQi*nMHd(xI!T7#Om5pSP+93U!8>OA06PQ zl-oe&%<`@HXQ}n^inNtUSni=(qPM^}Cq67hNcHsmIejzTt^ zw<|BpiR$jK(3E-OIhE3Z<2b4U?TV3%i&B59(3%MD}oKSSf)%$)Mpg_B- zk*@uwnYo&#@CDF| zgP+;cIi>wnpdzi2jVRspTz@+ULUJsa=?TZ1CNL>x>N=LGZHWZ02z{$9hc5dI6U2~0 zr(~No@bNwib7=#=tgP6XnrY-@2x45Z&+KgZh?R;EHx9 z9bzlPVe+pn;*scMZ_$O#FEzuQ;66q!WpC=fcIrV)pQ1BAmrA;Pocbo4I=n01^s8Oc0s;%vep9Fjau z{L4>ur};H0sA;`J$mX$`wo9x{a9HGfM}0nuE3Qi~7LGX2e>NS4U_k;p50@B`&W4pA z2B{a@%qsLAzA@|mvb3+~t`RqX^Ta&qi5^I3kzCb^)*`Qq@5w(=5>}m3#B04-_DX0A zC8yK)QEQcT>%~xQu@0KvIvxzPW3|N#_aUnqbIB&JkE&e#kQt$E+VZM>bsl!H+9A)? zYDONeVlxHg!wv0PNO}RNGg#R0Q=VlQqI&fP zif3v{H*bQXOZL6wHwmlcr8j^%Q8JpyBZl$ZSr5ZdO4DpoL(*r zEiwGW*|D!I+iZ*M3wvU`+y_H(>ekjLUFC6w8`;`M7p(YFLXS1I=M@9yqsp3cgp9)6 z7b2Qsj?1@c=T$}VE55r>VRz1?x7@W@3<(0feQ(;U-#ltEh>!fOF#j9ueEo&}KLGm` Bh%EpB literal 0 HcmV?d00001 diff --git a/docs/resources/arch-variance.jpg b/docs/resources/arch-variance.jpg new file mode 100644 index 0000000000000000000000000000000000000000..65e975af4e90651396bc73e12512a6d3926e7c86 GIT binary patch literal 215408 zcmbrl2UwF!*ESlvqNs?7(p0L1w&~Kh(xe205JE@k1PBrcCA7V*5Ck?Ly(uM-gn*P# zLfcA{E+q*NAkrZ;=^g&~zTbQHe_iMM{&UWCR>;iFta)ZuX4b4JYo3##lOKTV`Z{_# zfK#UcfK$v5;AHaDke;Td{Zmt89X&(se>1)Tm~{3x0Kg0GhcMNBe9ywt>fVJP|9;{Z z@42Jzi(m2oK+JNtDZfew00t!fhdlqM}2~h8c7^5g)ytPIv9hsVy>|UN+jW*JH`s4nXJ_K;zY00^>&Y``FESsze)-a6X6pY( zI{6#GcJU(REX$cw{D9MJr_QjQI%x**F`N6$ufJcq_-{IY;o`Y7XD^*T#lqx=Tn8{~ zW=ekHGTRN7^A`cv0H;o$IeYH>1-6Sf*zaGu$;mJMXn^C^ZJCEA=FdM6`vnxAKrJkN z5dMLGzKKgHC~v+aD68<;6yocLj89DZR9aTiCj>Tg2zZJ!Hy1AhKz{HM|{edqaQ<>V2*W&IOBZ#_11C>PLle3|og zQcF<5$?w(c+%=(!guLdH5x^B@W1MC?!v@d*>|_3^_LQE_{TBK3x7>oM{=WH^GgQuZ zADyT)9{*u%P8t-?%|4Y>rqoAU-x=c+2^-V}u0(LUiKauZ2PHD7xzaTWI=2r=|WsES7(g}GR4 za8SIQE>y;i`ZfDzbObJ{42R52FJVjWf_Fo)0o2Sdmsg;^o{WV;C`HCK;*x~_P zb`Z!9$5$AX@&D=jQUt?=FCg{4>zgAve(0kwA#3xHW*yj$&CS007%e|^>8;3T z{}K*WQVqrf`{PW@P?FnLfr1X-VyAU;IXFzXW<1ggW6T%`sXqgNtd}QEdEveCchAAI z#@0~LN+kuAQ;URI{T z4>L&lL*%3^*Ce}v(_os1wz;BEB@RaquCx(^wR_lny)b28*lokKV8{?r) z-@8qOzBQ#{$MW}YtO&M!;7Y0&y6GMFxq}B2>F7G|E5WJ$)w4B;@{J#1VkD?QT*TlS zZkJ8%vmA0KLgM#?Q|ZvnIURxa&uOS2kABPbt9>M>i4GbM*3%tXm(fuwEOszZmGbqs zGaFOfLP$!#z!`B{Hy>Y%pl|Tly4hRHKDeuQw&F?449ZM!L*9xIwv5n;uTg;L;E*5e zS)eO7wp=wmKTtC*I>oCx)i5gly3ea&E1KR&o>JM|#9OJ<^U|M7fXf|&4*cs&=|I6) zQy(?HuLpd}M33(wNUz&+Nx?;wf}l?(P+nh~h*(_Y*0tb$#d_Xvg-m6R*xphCd@fZN zUoRQ}4xq#?T;22X7s&dqYocy%AnQ^^1M;#wDXn44=!wLSi}%i_%^0oSrB=bI&FYZC ztsub>R!q_%sWZ~XrGjcxY{@c0v$kXju>zh~NDIrfsFk-B`{YoI$@8Q`KoH#HF zg78osDw`R0CHv)9j+)N#d4F;*thUSgK^)0_t(Rg0EUJnUYNZ?QTVXj-IDHJo>~XQD zOo5caH9gWwMlJF8tc44l$ye`5(^`7<{RAZSX^+hYln}}zmqY8}aw{HAIUzqmCaMVd z%#1I#2AWK;t~rHM|3kh-cn5TZaitiZe-T8#A9OlNm{EXzIjTO3OSP?SHdO3JAbWf{u{btFzlMd62`ZdhD*J}^I}e+ zpg|WWU@^GB0fB^EDn*V5_D(2rw#?Lz2EH3$6uK>-bO&hbELJ0_3f}on*X#S$I)4mv znbs#MIl%v%^if;Z)yo_SvTES;>W6#KC{j_g51UF4zkczm8^K}w5^pB6xEP;}Hs9Ki zo-aJFxdd~kguiv*%El_)55h*fXyzbg z3KSJDm`Ym>se!$fc>8lE#ZG2SG3_=dk3Pd_3=fs>iPtTXOnQ(D6Yw`zhkh&3^58Ie zTA;$ik!&r+#ny0M%{d2R9P-p1IH+_+9;Sa zN*$OM1FP-j&Bce+*FL!n>n+(zUKGBgr;DMY4HsoANGqe}b0RQ3*q`Zw@lgF)Tobq5 zY$=C;Ukp@4+@riE$Re9v-jMRG8b!HsLtIh1=BfIEV-k9GHnVuusKw6j?WgZw1f0Er zxYk|kAGV1he&-vd!d!&(DVW)O<>Mh%9fR#*SH`I<<7+%6>$R3)U>F%%zMq8NR1v zw?k~;7(4AWejmX@Rq6W7d+l$pAZ@$sK~3#2T;24w3UI#Jd+2t(a#>P-e|=#lY__$V z>V(=_UUDnBKHzg9zbd2AcukmA<{|O5xDDde2o;wGk7CCsHwER7)>%zoAELm+}sIXwvo4|y!*bB^jC)B^CPBxZQ|8>LOMzdlxrXz(_BsAc zZ#S~nK~!#kT~Tsg@k9@dB#}fNUS-21tMpWRJfD^-Eq1NOt+h4t)7}_c zTb{j91z!EYXczQ=2>B6o%AM^E@jr}Nc4OXMign!mJc+f~L+PPb1? zMMiF6c2a*R`KLpF1dX2qUr|hF)eeDlKbeJ*?tChY<{G{|4cl7EcKsUecM#dAk@34T z>4JKSy$|D1XMm$0m7WO^%QTIzzaba-VKoj-JLYE!2KY7H*z z7`cq-J20y9xEk%T{mD5n%X4YGVa=2FRO;cD5alJ-5q- ztN52{+h$q9qYk+&n%ee+L?&3SQ4(w)vaoIBVyY(u(IUjdql*y*GD^|Y+6>gG;dc?X zTD9)wTO}NQX$BNFwNm&t`IYd<8WdDkQzmiOLtGq2WIGTt$cnbt>&nRMRUx-IZ0%x1 zpM7o$?36W2rKBUZQ6>Yq_ag<<^cYHfv(klynbW39koI!_F|e@HhymUP=ze5Z`@*{@ zrfwlqf@@lK;aGpOsA@YZPAJ^XY_<2a00eRyof*cdgi88Bz9Is8NNlO->}8$0|3%h1 zkudBc!F8xpUiT%jTQQ2X+1LXk=PkHI#LyB|j z+l|>b=kzzbqx&B3XkJ0jZt&M;DR|ZT>w1$cFnZq!k0uQat6xRE84LU7m3qY6@x>&p z&6h=AD<2nf{Ru@~`J454ZWV%Gg%Ch%2s^SG`fsL}|0JCaAW5E?o2yCA7E{a`@88hv zG8&7m3WkP~?B0dJZ4ZM_@&7+&xjJ8cEjw%0E~IEYIM|^3uC$83`q;AV_5zA`uO?t^ z>1@P*YWsf^S?n-mq#yo}o?(GNz_1#Y*FvT+R>(;fUb2GbQEE>S2IJoTfUgM%KW)t zGc}G|8u}$OGl(1+3{pzu91JysdeY574w%MZJnyxoG`ba|6XkiLg>F(<37EMdf^~C=e zh8mX9;4yznn0yD9Me@(zNG+A;pWmh>BzV>MGiY9wO1s5(-upqk&`{BOn+eWG$nizqDRUqEtfu&lN>re5r#G7^_(`&F<2QK*HF zq=^`)8O~=VmSnlAL*>d&>3B$9UIJ;rjVpKbDcj2sOAWTN7SxT{C`TE>g$&3lu=zgJ#gThbpl8lRK~5nc*C~<6ZEagEUWMA zBiho%?sSj?tT0E;e*o}Bs?u&cQd#3$8oUE(+;Ee>Hj9gEz{aWwv1whw_~wXX3n7FO zt^J1dB55n5?!s%eivw4_R;hm}vJ|R3@(s?brUY%)cM(_3Ym{)764t3tv0XFTalGY~ z2+u-v@`KZH}GcZUb<0PE>ZASLM68`OF3I!OvFO2)gsHU2{U3dfskE#H?pTy63ye>{s1q^9P$L zPsA-{TnaZ4=Z;J1-kDzb5iDr#P3LWN;Z9DpSj-E}!KF1|(5MS)TrnXs1A5$3{@q%$u$&xtL?ODi z=Wn(v0(zf8>UeC;R3)iK5*eq!5|vL`fFi5AFGC=u%Ee`Ru6}Mf+|BWiM{FS9afSkRE1a2L~P6jMmYR-c?Um6ftV#;HseB+>Nza?^Hg|4UPO|(9ZPC z>@>ZlyR*B>`~b=R`~N|l|B1zSX>r~iCb|Ihc!ox`DScQ|*YcS4wJK(Aj;s^8(74D+ zZE%lab1PW&Bj6rOad&YAd2h^4$)`=^<7f3{ljjxmF&SkpTC_rU;h){Zw}kXi7DG5( zte=bsG`QQv27c2&JrqQldiqWD!9=LmdcD7=PbqFrDKt+D0%VmOm~|eN9V${16M8w3 zU>!my#I<)ysP&L^p?ds*BeT6Ki3&eG{WqO6c%HA=w}LZ3W_|2c%E%mS+LXkGQITiH zqysiOL0TH(YhIx(mcaokJ8I^3dsc4n=g_T6wA|aAMF`UFEuovSaEJX)*SI%JPKhb* z)6!mav*7Z$4effqWxcYdd*acB<&fUh#(gxSPnD<3rV*~uO~{U&Y_SYNWZ!b1kR7{G zuKPz(Z7kiJC8nF`Doi3O@3Sb%(61iT96;2OMGb?G4@27fp!NvTNI0+RPW!&Rv1Jg1 z)4R;E<+q!jC2ZN`92eEJ0mJQP5vyY7SF`!VphQWzu{q3cc`!YfIv6DCAu9Gs9}O&o z@k7jXn>T|Y;3i6@*0t{^fZQWSP&;NgOL8c?VIO`1XgE;G;95cyXq9=ZJycMGLf?(0 z{6!Z}ny2XY8PUHlVsq#O4n?OTZ>yy?eqc_{u=lw?TU@;_`xmoI#6jc+*rnSupen8j zS^nN1KoD?c;ZXIdFHPm|y;UpQeOng=lO75q)wIBm1lb<}J#p>)aa`My9Z{4_x7ttR zO3CrOMg5|!L;}>!uQeph)zqws#mvss?+F%G zFt_Z+%SS7D5~o*Q#W2&N6_s_s$dQ)vXH>`rw#OGbK@!@S)Vti|c&**%!9&|h$|JKy z={cF1*IX~zv-;jK?7rF8-XILaWO}V-3=m}5RD>Nxhl;1kceKSmaLh*Au7YW+LzS9- zSu?g#<1mRxOARRlH4X3&-#rsH+rFCG;&zIKtHzx-_zPip4x=DM>p~=roli8n2#dFp z1)MLx_s;T}GJh;*^jA@k?_h#|P z(G{Q^h;A8YanAvD0+_IZsvITNgnenu`MK$^M)9;+h2c`NuqnOYTWiP|7j{(*QR3X~ z%ygO3nw2X12hXbe@wDdM{V?;E3Jjl=RPvzK)Y4K; z()?Si{)``vY*b+v9Sb_N&SGVW$Gz+f{|Z}#0W%}$jOC}DU|_oxtVDcpeolqZ@mDt& zyxya7&Q#8+CpN>sa`a%dlCeVRO}BE8d0w)%Wo`kODQEfU~4_Efpb?oqqb$1Lf{@#thxN_1U9Z?|fVuc5IR7YtqA z`PaGdw~MR=GUO|C!@}jn>&9=k)09hBv}1@au~|mZs-k^LR&Cz<`QoSR%wuO>zm zm}Akfb%-IvaL>UZ#JT|Hjsw2`0pM!?)ewmFjF(Y2S|#@&oneFJPt9zA3O_erw9eJ; zr4?}M$)A=m8}JN%>QJfYg=9pRcWVpmjTy)QD=i>2;lp_OWx5mH`*h${&Ko_bdAcExv!rh->A>vbofN9My0}T18l-H6jHk`v<`E zF93j#%Zj8we*=TY0V8a1k%Gp$LZ)KIZ4#Q=^E^R-#Sb&Szxra@$$&O#$=?>DK28uDWT)NE~4b! z*P!i)b0_qygt=E=J#zv|-C!Uoa_`<6S(kT-b;EMdAr8RsVQ(-=b99+Up;F1rtAuQjm zf3(ei!WH-dhg0d4dP0B|fA_dR0=7FEZ7aNzRP~U>t*@MJl0n{LbUr)*NJtxodcpk_ z=B$uVk+6-hX?#Oe$oZ0%3bo(HaR~}M_zz6O0{XR zbTggE7#+-diB*%AxAEw{g5BhcY%ESMc)zybVk_zUhh>>nAqhxI^K*8xdx*2ZwOcfd zs1+N3My}a!ck$Ml1q?cC+a|~vr?VSJnJSS7)?jHN4Vy&(#I676{5jXW2j$LOpKKa& zok>ii!F!ImdoUO*XBp5|*(06~UmMPJY?x7#4WCf1mPk=G+{MOjH6PrD&`pos8QT~F z<4p}J9!B%>WgiK>Vraje>*Ke0oC!c>hcLGiDZ6!qHH)GBKC^PPs8 z@KL0n_)IjW*bNsfinFj;wePu`7WZt&$_1&AE!)ZOfa*5b3IK+_9=`m5Ajh^a=Pg|D zQ#t!#`^faZ2hlL{u%G`ZzLU#$K~-ymYPQ|g?osy#0FnB~|0TvhKnw&r+$$uxy4zJC z`-^LnX)iM&oJ+p%IXB)`z`U@^@ulL_`ueb>ME9x_z$S0Q-|dZR9<7`g-c=A3l}kSL zynipUs}U(8@~V63zQ%s--?-d?d<GuuTwID{6nX-ziSh_ymC1l>3tAXJ41Dxk4|0 zNIu4g+X80_@bsnSn*68+&gC|(U3{#NA{@*c$Hu4i{D4lCQ@T}G#Z_(|O4qj)2`E!cNVz0>u8o zBM9dZ*f3*RCD*ObqF4%!#mT$V)Wpk5+BsTG_#V99E+^VhjePfyeg{141OPY+uG4P~ zq)1>VYMh3~u8!9_*Yy!k083)YORV<~7e6pEKcQ;vrI#ZH4%?S2WV{a*^>)rzhuHCO z(I%EhM+V4?o06fr$2v^4K6Zd55&V0WWu$<%%jQwdcjs#A)l_MrKpEEK)N2sOdHto0 z7UT(F-LimexNm0M;krL2_8TAw^5Y*}`@f-6Q&}$HFG5;bNy1`dn{0}z?*x9sF01PA zST>BEBVjrI8(H90Et{8=4Q^{`r-86oQQU?rff9 zz^fJln~pIXd6I!9`0`|l$bTzr{Cz9@il<#;o}~M2OrY|tfOUeEXhh0lkB|*eT-2!c*)=0-+g zVd0>*-JUY(`&+9Q&XnK}H4TLp`CjP>pW2on`m^HFz!f4`9+z%-Oi1|Jy5E8Gd9g23 zeAVZ6*YZfG6Jt)^l_VOOLYYhLeGpx<5?Sd%OH^@owOoJieLt-%t0}x{)VZqjJA~;3 zi6>psu-b%`01xyL@{au2=FLvvw|SdL=p#2=QZkZZ6%*ZlBQzNgUW)5Vcp_S~=+*A` z@+~c9`qqORX%`(L`SVKh=*YZ64=|y2g%EC?G6OL-oVLia_o5bIDTf*l+LktkS9gHQ zX{pQol9Uv82DeI1sx1|$#_wTb6-k)^RVv@+<$YNnQ58m>|1s$R!_=T*9U{t=@Qj5X zN6};3J=Zr7GMlt?Rr5}uTe)UISEpQVu<}zrHwr88WvwQx`u|K7Q5xxabR1|&6qjdTMMYavU&e@o7Pd> zx6Np!8nDBUhkHpHnn{zKUFP%SFtP4R4E-tHPBh1tC)=mP$IouhK4E_4Qv8_nf*5?L zyO@-J0w_+hvKZewkVK@?Ys{r|O%OFbcqFW==g0~hnIk(}U|LsG-ihFs_vlc9RN^KR zhsQl|fzkEg%HokHowhPkyCYb*Pr(MUz`zI*fmAUQ->W|x^5Lob%sCH}an2yTfNSio zL@q*V;0Cw1JzJ2-ASB@c*){WGBTsSpLDe`GVf}%lzv!nG;f5A|O@;TS2k%*9`UxPA z(x8nfcw-Z=>L^a@+mRT>@y{qM|McnoYGTNo%s|p3IMK|&l(CGt>CpwZ3wqkGDN|Ai zrRuTu8Q19c{p!iW0I}rxoa}BB4>9B!gQ9hn)Ob1$M!#F~{b{cNPeZ?2(Sgd{Zq?~> zSaF^sFeVZRn=h#-mHM;K*odOXA9`mpJ>2W&mXFTpM!nlN8Ae-wS>G!IllRvA4oXg0 zLCv|43?(N0d=vtAs z4I%p@6@Uvu&;DcV%~99!LfN7?`xAggCp?C};G`4&x8)mG=53kZfFq6T?~@Lh#}A#p zg|3GepsZ_taQ#ZDnwi+0+ zwnaCS;+a(i($h0J0c0~r(0WC`f2Hb%r6bHUE7aY-YQV8Fl*vJKa(Td|?DMs2HV6Z< zixNBa4W9uB^M7Rd2Z`y11y9Pz{j7X-9d&e}-JHI|pQUD;&~uD&kMR%**017PawxOE zX+j;yorDG(;d1k%HF^>Y5_k`{N@_ydsUx>kyp#`Y1@gn_OYssW&3Cfgw{~tW+!DoO zc}L1cFaJx)cAeWCuJ7OGLd4UMWP`h&j=fWY0L4q;)`q*!V!R|W>>Dj9C7 z3x%+<3X+LeXV6)S2K@WoFgSkadas3!E^eD8MCvI*d;wm491t9nM(Tw=9*-X(dM4hp z0CG*6MGJeWD9bw>qqk;%@*7Gizg~)cOX?akxmQ#@beC1jt=?~|#QTwWxOReA`lc0% zRO1O(`FM+}Gp_5fWiitmPT9_Ejdwkwv~JqFn+^9_z{OOJj@aaFYrh!zuqT;(n2}}w z&`dR)yJPgt(PlBkZTdY`)RIBpH5MOGwg1{tln95gZcXOpWoaEGkTS!*xNzj}911k^ zi5b>=l}`oe6OAp&>|h=8!C&?29T~Q)Yd#z{^L+?ZfoV}AIleQ!Y^{wjNEw0C`VEU& zt7a<=8-19|d7tIpkY!$Ag5VsDxHeNfyKe<&c5uozkt@AQ?gSu+p@juQOhx+UGoW{n z{n`_!8*rRnYAzQ)luAVlr$zI7A9!o}>JGoR<7-Kssb`zhmR{2K#mlOTh_O`$yh@N>LhmsImoP+#zq=82j=!&tQ0|a7VKfr7OwVD{m-jv zB{ZuRL-4bnJ&-N3t>Mf8&~C;Z8=gtj&=ae>B(Xj9JAPTutti>M*62-=uV456n%VQ! z(N$ayHmjzdc`d0L?Hh6A`9`eyZH3c_$0Ly208M$cOwn}E9!Ju5)dLy0<#-#I?(v`_ zsCwnkVp~g+{WkNv&#sw$;WfOb+a|`gga~B1fpb2zLOl(WA@af_ggH+uZ+MRPfZ3oe z$HLD2*u6+|p|pOh%^OXH9PkfZqiJL0>lqdY?o zV+pW-3iDtqVwjDpT8P0?Qd5UlEbb91Jj_-}A4cC^NxqVBr=$$3ecGh`>2SubPyyO) z2C9f}&{Ms%cuVg~>yK5pgN=8MY#ch)7-5BGLuYAO_A*DmB1~bi>>GDnW0o&Lq-xhh zCJjv$DIOxocN=f#I~=`MRqi|M&$R>wyIc2>hl+oinstolhc;jrZZ08qN79M&QRJ@H zl86nL#;mPI(DDNH`%^kxKdVYW^tm>3+x13epYXmjWr6Z2W}qq!MH1P(n8lY^zoD?b zm-Y_P!g#Us8#@AlaZkZB$cP#F@B(??J1v%Bm5(hF+`Rm%CrH8vi3dO!~t2fJbh z4MC52md*H)Z^$I%MZ0@Ke#420_J86cCuGUf1H5a~mg^llKf_v6oy|MnEb{agDROZe zx=VaTp?1ed6o9~-uEIukhDW`^__?h}A?I_fU*5+wpjnc%1)LpSqWPkOL&f4aloM4A zChl=X<|wdge)9_0$G-tx@Q@|K{Wh1pbpvwrQf0u#jc>5xZneAqA<;k83`IC{{iNjPWPqE71 zTmX_4;p-2_$A;$WA%)TOus!8UWs+_or-WuJPON12Ks=zN zEGTVvj3=@djGaf)3#xC%$#71X3&ki8)<|c$K8{FqtKB?697i@X4%LU*T1Q8{?PU%_ zkEWHpFzoY3W%4MTZ4_OT$fy14 zw|^vZ^JCSI8GV36mNx#rzF_vg)t6@gRDjT_*tw>#+*bls6EgLck}9Tj z#CrFMdq6bYN9Gq4XdhobJf1Sq7=^2*DC3kW$0feg_; zR23*AULatz@BUzuW3fAs(YLd>n>csVRYepBqeRlm_+4CtUHu@k-@dw|)m3$<1)q{s zK*!|2=BNhFLJ+S5>3WK$+;`$?3>T{4=>XmaOY%O)BSv0Knz+`tWLk2ezYBb`#*?_c z2$F;J(8Ewf2ju{!%UQp~zRd8de`>Riz+HbFuqOiNkmY5OfsI&mzTPwHZ0G{G>ZrKZ z?U6=6@aJGUqNW}fHf@?t#mwH^;f^t8^;deHR#w1Etc-?6@4B27MOs^rTU zF{gR4b!%-T#J!ch zx0%_oulRZ86_zCs2_u?{fkkBGAYFhFS)rxbmJn$(FW(nw-->S|V=6BxO{I+)Zr4)B z72;UkwR}G-0O3(-)|}p-`uW26=0eRQp3BxVc9c6oq>?wm0~CthEra5$r_w452!>um zvu9if^0)24hP2|ruw|$~8E{7|wP)1Wv@Nsl`xCk?I=c&MqxfXz`+L4<25!+Si`%+t zYr({wtg#YxAM7_A;_Tft=zcYgprR&thfnV^JH=57LV$(7*3#7PIsxqB`;LVgYmpf{ zYh?8{#xNtQT|K8Ln^x=`WA3l3Y$t%2^B!-B7`}g?Mtl))sGTBL=ZvBc(+)l)?HJqa zlaoGb&hio5Tdki&>%qy$6oP|@Y5SFm1aqAk`?~`384*!G7U+xF^`|}TBj{pa!j@&R z*<9n+Xfm!kt?_v&^PpZ!LwUWee`w7!hGWD-*!_82_G29J!;SDU%D1pU7g+npA=|zq zgxzv`XkuuPSQewI&1_$)@2Np3B?drk_K-(|&1aQZ{zVlFw0B#(& zAc}Pw1NXZSCje=?7i-kKTYBtY#EJbUfU9M$;dP9Wec;ZB^+A<|aX2q{DR5P7?)on2 z1Q62##~yN}8Nar4c2AJfZst2b!_X_Odz0;4ZVf$RSUT!?mA}Df(Qz~1wr_glONIe} z8QFiyqqKK!Ts*7^(FkJCkD=+xTMv zv~d};q#bvtqNozuzSEJEm=^O@j~;NNcMifL-#%N~nX>#_)RuTb18CctS4L#(Lj}u# z9B8W-lt8JnryixrW`A-O2Z4`y=?mr^v#a8$iKG5;f)+(#f6t4(@gQt4dqq6w^MJF3 zd@!_-9Ij2W5{pyMA{*a20UXC3wq(h5IY$;OEu8@LU(?2BvTgo26aQa3B(Z3RM(2vf zn2BA95i#3Ep^^|bN;`|;U)nc~lC-Do^2fL$x6MjBdhZ*JQ4e+E#@xM2ZHifqEp6r} z=qnnI_)n8rvWj96DLHA34#$T*X)cZrGZXI2WEgF8s*GkH0XrGK?{5tMWy9|YS#|ta z9Nlob~WZ*izV$ z+JST2gGJ!f*w59ZVq^gqlv4h!ui+df>`3rF)aEe-(m%hgeU8=e!xral2V$GJk$HU# zHt{bw(-|4iqMS@s_-x_cI_Wi$gP`I>d()LV733j!!_F`HD_)&Xk<6PyT)9(7N)m-y zU+%G!tnAEJW%qvw`vq-J+ zP_sZg$kG$#TBI|ydce}$? zyv7nk%Pq-EOFY=`bs&c4jg(`g=<&ffc6own(e&^vp-sEJZ~3U2z#xGSn5@ zajR|#sOYo(M9U=<#C%E*BQEQTZ2oom?yhEpDu#%ndA3O;&7J_#<_t_^8#?mdA4G)9 zS<9AV-n)OOq7fx{y(YhSVbG#8IwV*g#z9>nXWvyIZ6$9jb|o?VwXc`{w?m1-Mwyq2 zsCEMokEGTKfZ@^5kuLojf9l}~?0rs3nnTds8;n8_e=r-N1iwFRRQ4p;m6rrwZZyzV zQP>wZdw_+R?|eeJzWTjxV0wq!nLRBFh_VcJLE3Kwi6%xZf6J=*SP&`l_KEF}I2`#v zv@}D`RJ(SX%hx-(!4UN&sL)px6ha)kY`f|I=3BNlDDzDDWu}>Wi4%*duJU(30hpP# zu74tTHQqgG42qSify}_~Ve0hk@0%qs# zx?z8u;uZ-8OVQY<6fdiwHbzXPj5Qs`%C+&~7v*=914kRb#2JXsI5eoAxn?g%HFj#T zUvV`Kmmb9XRQmUnF`^DtEAc`iqiF0wI6YkHoc_E0>(U2W1{LuG)gW88i@@Hl76Bmdx%=btBly?ieiqC0=rPP`$Tqz1W6I{|z}&zWzY0G{7QWvIIP zN6z=$DiKluW;>)+1&tG9Uw!Rhg$;81xtH^5I%8qM23O+3rFak8Td^=Xm6_>EH6`A;y_YjgqGuezr zEK^|4zpnZWVgA(AxooTZQcP_dvKZeJ4@A||X9>1e7`iJ_BlWU)QAJZh^{ZRQyoS<=|x@orUAUy{WsxOtD3KMvHJ1G8k>RlOpX2&3i zO11Yq`U8RS9skTc>mk^*uD3Hv>kf`r1g2aDWwg1e;|BlT^^|pQ< z6_%a=qFdqDnRD$0#iN;c^U){Bc{}&JyEN;`W3JrrairYc{uJ+u+*QrX_0wD8{wSTJ zvl_ZHS#gbzuJv^$J^QahSrm=rxi9ytalOq|X^SF8vo{S2u=iLXt|zv6PLqrJcxFuj@6DBIcy&&GGW{KjL3-D1c5rsVY^Qj|lIH0l zG`rn5KW*=IbN`mqxpxI@cFNtBREQ!F?`f!e2{z-u-s#6^MY^gmB07Xqdri{{BrMlZ z!m@7s)e|RxsQ^W?j`8Y@k7Hu;wT{wBP8_G?5jNRLe4l4WlxfSlRTCq6b?jqc%e^_% z!Ou(84w1&Chb`)&B{iG$$>m#a>XSyyCSnaii<9H5(Ukh1Wifg={j zbVbUwBpkEjKQ?@53eCTZx^25Ef^~kii$+Hj*5+(|I#M$H`@g#VI$phSvIcID{-e@; zHk5i%KXAn>SiwO;>uNmn;0~)oC5F06e?#%8DtcZ2I&%VeP|*V3M1hz`;%tTsA;+#B z3dnGJaxe0{=}yURbNRS=V$?RbdE-~fqw}wbek#WIXoM63mUDhPFZcwYwB$D%L(KU} z94^f&BE$SQbI=PV?O(MmeRZBvnO;hB->I!uyLT!Ncay3X@^u&IS1agWoyC|Q#LnM} zw(cvdnf7pI zp@{{#RMNP$f#F$g%mqNdZN6@k1ZS!RM@L5{sB&77OGl)L@+2a{`t9mh*yL<#i%sl! z4aCl`>%hWt7@o2wUCtQj^(WbI4bI@&N|s`2W6vTrMjp>joXLxNyL6QXTsg4CsrWX$ zS7~h|KTz(-FUaqJU)`)ZSGjCN-Wr=-@{!sQZjGE^zQ%H-gsZ5j4N8w2FN{-jLN~aG zZ1OEmI7=J7Nee2rj+GfR+n*sg&Zs6VBo>+1puLOJUSttV?%B`gi072HC%Q~tJ|7!~ z^I&Cw8ZtMrCxBBM%h&F1qvz7b=nGWV+Vr~8#jqDAfSyB57)0b1=Ch;x2|zb<=fN+} ze)z1!>55XMk4X9nAoJBEz1u$Yc)e4^uqQCh^-x8l|I@9VAXf_Ds{Vf|QxS7#pyqQ2 z!oRhb9g!!x>_-Fz99yc_*)88bx?$rTaPa}w+|9k;GP7em-jbWgi%#_&rxa8c{$AP} z<#FuB>}G4$9F0#s`-vxjKNjaZfEWt@wl)9g@*)oHx{88O8w6SpEz)P9*J~g*io5EW z@0g`)^u#L(Gp9G7qNtTmt{hn=$5*L!Tt6}mNtRN#7C5QFcMZhdca$Bvr28BB#O)TW z3yzeScK5kX*Hqgc(;2{=7=Do$D0~;g>+w8F>Kq#dt>AP~T z-n)D+zPmPMGDbndC8358PY)a1V9d>zlJ$uJi3(aj$4KBeHovK=bE`4tSX-A>H=s7S ziAcv76gI0Qt{aBK2`>(!afYYH`ok~!q?tw%10LkKT0e+RUG8ykRoWepHERuWRhp@R z+`{Jg{?V-}c^oomxPYfr_wT))K5|ULz}|R|rrzvt_P#ODWqh%ETG0kh8zCxzTHsZ> z8+Z~d5vG{#{1Yk9?!@@5Z(oPPeg@qHWheNHq7h^49Ii^t`{%A5Y4d3~OyRVSe0qJ! zz(S*C%F}|M=>p;75*jxrZycWh)-1YjhWiKc$k*1#UQFIf&HSYCMp^M_if0#7?;>>q z;Bc*M+kr6r#6#~8y5ZF)fa%=vuS{bgj*7W|FeVqOyC*T*3vMLzjou4m-a|;TwDP$- zKHsXRu_q@z7y{ZFWV?j+boAF)DY2AXhb+_71?};q?9kOO!T!M!7&^x2wtf2hA<@1aN77L@sO% zK6!N&7jLtW=~z@L{bA58YCzYfe!5I5D$&t{O*Qz_JYjO_xwnIRqq+T%$<5uxD6;`J zY9d%&8*E5sOV%K+nj9KOayK$xI(V)?KOiya543g*nitFGe-=rS1r1AzNxlDzz4s1l zGVA(<<2dS!jyi~lQb$0LE?uOJN|72v2)(HE(4==_8I>X+C@nMz5J*BmN+_Wzy(1+F zEui!sS||a+xt)35ndiOE`@G*X=Q`hY{(0{|!o7EH_PzGrYwx|vZ>>Nz7cB9Qz+k<- zY!eYatdI<=ulky>n15i?GsOCboFmYqs<6vFVWI^@NA}%3sspg@6cm_zuelj(M9mh7 ziBOFc?5zcYv32rqBo-K}-sZklC{N?+kZR_rYs$r~&GGbp7*+5o%r?Hz1UKUigRntU zOx!*rL4mHuf;IMxb#|piv_oKNO0Q!?sMN)l`=HYozHxj3-T#0!Zh>i?zU1Vwa9idG zWWi=+5Dg2(_tpCpFDzoPLnVfT8ZsPnQdXROZ81P51fO*jU4$#G_k4+C<&3rDn`W8( z#XjuZZJDe@1-Z7_Iz^8BSw%J$2($q28KTpg%~9>t^kGE)vu#R3VxhJZ5?f&3^WuSK zSL&{f?1lDz5qg#L2#$=!Q3?9d{Lj$jvgRd-H6*DSo7_{iJ3PnABG+T4XJE07O`FH5 zY9E0Xyq=u90A&97-!FVX%u-+4OUZRA=KR{?SC0dYcg8guz6&r*o2kO>n}}DwIUWaE zCbvUHU+(LuZr!LJ2OG9~@(#Gt>S=W1XLh<08S)^O#~}S*RH_=|SQU#cxEQ z%U+Yl;51x;9EXk!W5YAZEr~MLuNy|J`8PRr{Y>$0)g|8_!u3^7+^ru?KLYvC&A;>g zVA?i=PWKSR`+(_JHO3j46xn@_`vpE~lZNoK=l29%EUC5b@uvgpH+65~x1w33RiNtn z^;@KIxgr;C)*u>q5$v!+X7( z6g$!BUwhF$Mz(1#Ja2ggq4R8~V1A;IxIW+E&RDMZ^cjSS8^ zb39+ChLanoK@}|P*RJn-f%VsS)!^b@oS#jJ)vg^jZ9hbFw&*gu)lzX;13P}Y3WUNSJ0&u0X%#8Bp{bU9|U z2>Mv@g`M$f;YEpehZxxLtB&^O@ubt_l`;tSw4 zCJXr}%D;o{W_QO$ZSgu+mNG0~BnP@8x*wMwBp?jygBZb!^_Fiy=b$&fbq!w`t-oa< z2;aSQ8Omfzw~M;-F}57}!PYm`c&`k8M(8}O>yNGdNn+>iAk)cT1mfUX;l3$XgKnSP zEw*QS8O`)-^W_`)mBKos?bqAa9PE8Ba8j|b|Kqu zgM?Zb33i6nFW&;X-_?wuDT3iCM8KZESl7c&tm|nP@dU5p#)WydIxsPX5 zTxksqtRUF4U-T&N)O!aU`wV#A@U0p}%C*H1A|tAM6go1HN;v|Z{CEU9=$%tJ^ki8x z50BAtNm;_uE9n;S*3RCg_|Vl^e3*AvS>bB>y)LYdFgw|fJkq;6e8qt+qlW2U{AkDK zl#_$8kjIh_={5Qw+oQznRYFTh$T^NV`C63(7esBSl1MV>l<@CboDq_Q^T*%n@)_kW z?T2=rgCSyK<^(XrM$MuaHt$eJAJPp$W(meE1s4$;wWz6df3|WPvnD8Bz1ghk&@6lz`v_)(bw)XG7~$n&>+2Rm6?T_Lc(<>M+I^eN zjsX}`IKzpquvE%{F`1Wc^tD--ku8(&vaXP07b>Abm+if4m+1u`{@gV1a+KP#Nke9- zeCxn2Y#lhz&(r9Q_tTjo@%_EkXWqHY1xF6q1IK&vREfuygwNs)wMZAiJY5E4GDW!(~q3RJTt+NBED2p%25HeZgPaq5*SFNlWS;-Lo6V{jTZK{*ZA(?Hmg^ zlPQ12irMU@)_4Wn>`OTrbpR`0P}mpqIJmPu?#USmoXn%NeYV7jmy*DC{&PU7TJw~@ zq8jbJ$9@kUUkWceQ0axWnJCwKE}K0#-cj^Bvw}`{8nF|6Wwcg#zE2XUv`DKf(Y7y2 zP#ekf*u3Oy<^paiWIVKD>Zcph8C3DJuDpBIp>J_nev|aT`6}-6#aMWCI?WO9@Z_A~ z$3GhU&!6~!-)LKmnDgty){-2D=M}CeAW|-tL{e;&mJM3Z1ELK#FbZ=9v|WS3=M3<9 zWzvAk+Y1e}K+)^}g!4V2|8#+S^b&<@$53P`J8?9wS+DG8)aq`Iu@r$bFM|F3D~}S3 zU&b1yXb)DX1h}77o^c`G3Fjz3V%wf3g-ymja(p8iEpRBcm`TFbE>>JpG;_Y2$h8E^ zCxs{~v)#)CyXAN=A*D}WuT>Ud!1U`HR&=#OtyF_BMPl*(p8F2!b#cdp{#eW))pXc@ zWX~N?R?8rXw>fO*E*_>AE!?*NY|y44Mc4lrg5wdX1KZ>)p#Z8Pk!sjm&G}{}BYh^~ zS_V-;aDLGl0zCrd>VBZZt>FKC!OZYf9%$mFcDj*_|hq9ZchDZe>rH9b_l^p0&NS5CVu5nUJ*tC-|y2&qE> z@nE0zIYoryELTWvWZk~i+TfOIdiu&WRGyfDv#IElo(E}2;uLCr3P z=-vYb62A4|SWg=|K9#p>P@ST<4(Zy?hWf%ZMNoVJgY%ia=M7UJMi3*IREB!iEn(Nt z3a!W0Fe{~$YvM7H3a$L|#)`!X7)Z$J$lu18e0+j!seHWs%hr6JCQJ3k?O2CkM_DLG z@y`Wvq@YU=A97C0ZPUZ@=GfsVDCeq%GXOqPC8pJ((lP=5o4DRdsMZWsH*U=8^G;)c1Hd5QU} zNfS%~yIhDw^c6GK%L|vOhtfj#24@X&t+po`G%H{2qEF~+GhxBypDK#F?+;X<9`8IR zS~pj3b<1h^7(lNW>?Uuy!Mq;fyl-qcH*#LeUrcjb1k0`zjgI=(T{HYt)%n>1J~!bW z^CRyL2iBzW`v{(*#c0tu2n|axm~e}_&FR07H}rffQo&iR$ZoH5g`~PY#Vuq|Et}j$ zENZy%$uKn!3&4C`zo~F*gq`(apXHDELt{C&pndqF(^!2E*1N1ZIu=6kEYN@3D}JVY zo80LEWBMv_Xx+1JSsznr?@0|B07ogU%PbVa`qZH~Bd`60mUs%OKGN!Lp}lMW{b}wB z&MW*R3j!f=$f7MnKwCtV1)>bkjXi~xuU-@Zu!pzJoV&EO-hJ`UXS-mLXrD3aXu~if%J&l?e87Jn94M14JizKPVfHuEQ`J~<(A8zYW8&hxT zV3Rzw8`I#RK5XBx;gr6zVqtHzpT0vCL&Z;jLmN_SOhCRS~(UO}v69 zW1FABk!Ur#B>a9h2Zqus>UV8KG0xiESSd#G9ci1ibD>h(u>b?3VTT4?-sju6t*fMU zmhjaUjpA%0qCYiL*n5u9Of2OOKP6xcB3T;b&C-j9kQ6<(x zhAT#69ku!@-SY4ozg={wgkN%e*qJ{ZZrPtP5dqN(YkM@`*ACUacK4y_SUqMYHa7`J z$4m+UdfrYNRMH+D>9gFL!er?spjsjb<;J5LrF9nM`m3sicS5qX(xPe)_2MOWu zi0Dij@+!b7lpB4Le_v3ntpVXgC1wn_eP}#`TrF3$g?XV+60{}#&sz+cA2}qF`%M{q zZe8k7y>1y9yrO~6Tv~`e9Ob~jFFr+aYOfp6?5lD2o>Fil1Yse|2p=w0iZR{N8jC`+g8Q8~icwW8pQ|7+wjWnBoww!Crc<7lh6!u;<+12u zjva4FAhS!tVOWVMiIuFbt|0p|uJNI)Z#N4ZAIR(w#>x*!v3e29H?f1cyPQz) zLJ5OzW}D{CXajxT8k4XVl7&PtgNnBSs~&-Dv&Tx1)dcpDUjUDVTOuth=!Hk#A&=#w zxzE9dxw4VX%nMg<6px{5hTV1o)b14KM&@SfZ1ca}d-sJpio)Bsd(EL!&4TiZUHvyn zJd_&11o9D>)#GxWEbDsN^7)4m106cCS-oBf6K?^ZL9X5u{*52=E?$by{f)D4LgwvW zTul+voG0(g>qXmL?P<~$wE|4g{~ z44D_oa5593!iBBo>MxR`#iHmjwY@vn`!e@l$=q!z6sgQS70iH@R*mo3QqJ7?6JRN7 z@JrAKb78e+J3npJnFclv&xysDJpCo|XvzTxVA*w=WUn3YZ)ZxZxV~LAR?pVmKiD^% zrPD4hGtVl4VXqjcs~GFd>IQe86&Efm)pz!5EA=Xgc(v6u-!)87hVQ zbGuB|N&fHN9ykcURj*qpQ&h{+Xq-&bGP|6)Um!olcZeJTMEQWu>h9oy6g4aRw#j{* zhyTGbDqy_*EV-K|omJm}e+q;*m6w~9nlO3h=n}02OIk)H{Z1!{2Nv*coq|Bd=9}wE z2HYlJ=4A>HMdSrhGj^uKgk7mlXSns|_XOAxD1Nr`b-isj<9q*o2`K)3`GV5LO^Tjh zk2T3ZxE%;MjQ@v`?T_~U<5SeimsT)TU^68nDuOtfTlh^>iyW}1-v06!5#24`esBpm zqX1dvx4pxv*_4d76kt{TZfpDaA1#26tCAP>c~V?nYB}K|71Ts)rrvpa*C%j}vKSTj zZj&x#estnzSy%3{{$)Wzxn|r!-?mVlo)Blr0z@Xn1MPTlEzS6r;=1m3&bR7Lp>Nd}-2kFXV+2!& zUnkF^!*-d$?o4)~Aj@FtFVkSxAm+VX0-BX)m2^e_y7%I4JZ? zUzCn79yZ=<05})r<=b#Oia%$Ty0Ej9Xr@n_g<+z4WCo8t{Zq*18Jje zn@@Hcl!=B3WSck%2Hz*+((gXP7P+?*H=)H28sPWEs6<(5X;a%oA;NMnq}$uX$H-{W z2#O1y$Ei7cuZ}$YU0B}-Z1av!&}FYoyj%xv)|h+!Ak1GzX%_Fi5`TTHRacw)?w24H zm`O@|TPqe#HAJ~hiH)_aB{Q!D)nPqmL+el&R*JFa93MjJUWFck#b-)?m1z6;eRiAp zHU_^rXsihN02#a7+AhPw5;z^P9I*mljw+3VDX0j(Y<%ec)xAZ?IHm9r5{~ngxkxe& z6Pp!AJ0k7%+p!|i)|xL**r2>D3*^kg60IfKD2h^?IqnrCleQ8guq*EA+lIN?povat z(w)fKU}%=3${5~6GD6|z(6Z2ldU;<$wq4U|4-zGU_L}Yy6q3;I6b3@O9ST|d9`w@X zKGSa_Fz&h~0TVzhLu2>0ZEl3>C;O$wOD^)lIvJ;5cXV0@z8s(KhQ(_(WCEuF7Pq6zdMetUjMu?> zF`p$O{qhJS`GAShbE3br=rXRf?j{_uH=nuFfkRYtTNkxEuqDc4Tq$EMPggYYvz>00 z8Wm65&*VfbZTZLLB8~lpa(P+g&Fu73A2-7nY;CNTF)} zLTOi3C@%DOA*k}|Mop16`>Zxa@q&ul(C;=n4cf;Fu}HZ?-Km@yv}RF6mvg87^>*2X zb_*1xRYNl}N?yS~KR)ORkx2cia5nx&_J&$_c;O;m0|wm;A6@iih0@GOt98z9mA)hB zftb&(gOOKK&pgCn{MxH?=FEmjutnPh)`r|N_wg1Jl(@wwi{x}9h0pdT65A=_sKO{~ za+}bHS{H15h}5 zhb_vPX9S0p0KR*eFVy^0LbQg}fyi8KCwWG^{#D;i*Y#WL)@AAg?lQyp#13pkk6>nW ziQh6ROjd6?qROOsD@)pP_gSq?PVf4tz`Js6Am?3hq?JHqIvpa)o`KnL#kTU3YVZ?y zyub0{t3;d`UGg##Ig@X0!}gXNg^i5d^KtXnYqJtfD9C@xQeBr`IN!JGFH(^+;Z7g5 z(uzSLGYU`jlRj~M?jX5L#pTadu{Y({>b$IRel6D?>GAbC@{#l~ULV5ksi{*rgkqR18lW6Ji^}Dy6Nw1$A%Wt2`_(9}- zJQ96L|EDg~=@`vyo)&jF#6JYuD{ZFo*8k~>Q!%pfdEF&@M4jQFTTgE}L54DaU7H!#B_7zdLNLq&|UeN35Nj#T>TQ6=W81o`=NIb z)XHh6u3FXDFQv44?Pv?`9cNp^yKgEoK6!f-X-OkKLKe^HxFb@rdXe>NJxrWC`%B za`-POYf*5NYlR!P63`ywF7CQrj7&5VWez_3lX!8q=kCRU|4VH%(mEvV>2dy zj2Cd)I~pe#bu+<19|l8LY4-qWOZP^>ZXC_WL?HDby^O#cjEy(~t)*KX-zwx>xxW7y zFea(!0z&nF88v>wxuEuNO3(QS)H3^7Xw8k@`@7+hVytw~tHVbxS+HlGMc1lL-XgNk z1#UvUg2hQ<68^Orgm?c?9)6!i&~f2){#?oeoo5Lvp|N>d%pvh?Ur=>mLRWM?W@!s{ zVP#w2)PzetJ4v%dK}x|`8VUX|lAM5ezh!g6=fw(duJ~g!gtL_TTsI7YOx*%(4qN{^f};`klbI0FL74DsN3UB46@_Sjg1XuacEZ&x$K==M&QrvDvR-c+`^Kg1Mz=9oPUd7YrD?_G zoN&mRI8LF}rkg?TeHR=9^Nr^mTqS=3T=g~GR{`#Px} z%RbxtG0X}V%5C}Ib+aADU5Cs@NV^<~m1N8}4Mv%lg(TP%-ZOi0{G$Q(0`IP1MPszCs zmXU6>pZ+-P;GI!it|QDv#_rVe$K5JdFOTfGXwD-5S7f)Vedy|wG15oIDy;e~)h!J5 zRfrayjcAxFaUzFLlYzJ})mkg3UZjH}jB-WKsAOyw$gL#8$LEf%6S%HiA7S|-@rrb` z+JG(|7D?)-7W$_ah@Sn4Y^tzepA(0z73g!+S+{A zG={I1{~AFq^4TCnMl4|omSFLBKbzyr5{YgMj+^{>u?TW;{vgxty6YvHDn>WQ4>|H8 zF@qG@H{&l0quSx`Wk(9Ck&QbRc(-h!w-Xjz*qoLwt+DO&!F<7qkCL|_nJ|qiN(z=O z?cToD1-Y}2^0eN4b&XqCQXMtNYk*Nn8WRgYe{d)1p$NugooeaRf!DnD_2EFw;%)0i zE%7{~t$G2N>8N;1T{A>7f)A4&uUQ<141)WBJsxS`U`6E%*7lEM2L^0+uB2{R=+VLR zm!mA>N}0;h#ujGwm5+DZ_9?C28ROWDn-j5?*NhL(!To^$)46cg zc^5wb23Wbpe$dNp-(2wsw5k-48V7j3Xj)>Hz`m2X#YkwfS8p}J@^GKu#GRK-ktfcz(HTAvBdRET2ue)v$;e%O5ef zCS{;eM(P?SYFT-41>jRkDMXYzScc0zN~*~tAi6b`Aep$B>T^)X6XTT5S-!pP^{zbK zW^r>l>Q0e=O6oQBkwC9D2Y3K(&&_AmszxWm?EXYazVm!!rNyOn&c(*=_SeiFT;fzK zixpcJeN(!s#6fX`KBM~b>2>i3b>L@nX5(7?CW%uy1cFbjN5pmt+%AQE+4BA9yl_}b zUUUT2zU6&wUg&I#RMNv@g0cjX*w09gQz3*B@L;zYweQ6>g>?E}6w08d^}z9RQv-TJ zEVs(^E!*95r!X;Vm5G<6Ob@~u-6O?or0UZ7;3A>tpw)RY%`ogpI`sSK5D!e5R~3#Z zIrqb0+Vmw6%P8(j+l2J{mcjx#K`ZqlDv{J}`amf26LhUKW>#Jqu;r)*x`%R)TXdDB zDgho)unzgt_tm?5*|t{OM&)FY*P5@IC|>y#GPP6rn(<=KtxDnELKfK>ujE*=**?Y0 z9DYiGqZ%#eGLG%`eQLF|X^8IL2Rww}CbpJTuG~g1M$(3S-`jyYt)fVF4as|%o8k!03OR4j_>EPWF z$h^Ngb&B3DY#rHAI=_0BZ`XvY9CdgR9er_<8YbLSCw>H)+!fzvZhUp#aMZnH)195{ z6;{#*=3TSsWKN~3HQ!7EutDn~mvGBINk1s35cJ^A%66L?G(8v(S*;2j=kVmbY~}AX z9y4F(i$q>YQBm9RYZus$jABmhppY6KM%keI|WT`9NJJ_wzC})uR@f&B)7b zJV;l0B?zqj>mBYFJeFvEdHb<2N9L-D+N7RebJh0s%*2ggviM95IcS!#)t|R7a#!}p z(fJmx%mTt)-Zf~_VbjRuxJxKBG{8D@8FTXMo`2t%bA3TxyxNzpEkMfZsMxaVEz4dV zGy5IxxU{xH$(0{gYay9vABbQ~4rc(o#7+8qkAmdxkX_QQp(0}3?5;tma5#g0xFGHy zH;9)Lsv(KuddkuEJLT}G#>+J(@O8JmT>I8NCRFq0Pmxpavh9)TqHff$oN!&b^BEowh=F+4Vc z&%(6EhX7mHZzUZ|;X_J}uc&%YS@FBkRMz?Jv}%On;Ad}K?u`M}3;%Gm7vKJhN*8hZ zf&m{c>m#V@&@OL5>9pKr;CEjBx9cMx%*6OJU?0926UjiLnm-)4e>8a8x8vMUKNlg@qun*p=8KTRfxvv^ zVkxI!lEY6y?okhmm_3`pnlMaEsfnBq<7;%V^E;)N&igLk5n+I1>aErE#Qb2Dt}FVc zez0AC)iit+FyNEeDZ9iZ#p<>AbbZN)JS4H!)pex;1ez54s}m0Rs|KN(9|jO{B>VW# zb1u)BERO5k28?vjpwlEcQh~jyop+!lWz}O71z-&% z&BGJ=pEu%Gl<}LopcZ!}+Ng1ma0Pwu{Ubo`f0H3`Sa)*8cn@t`V!@)=@&JVS<#(Eo znN91IDc^Yr!wq&EcgBjYi;*jrG`Rdn6@<81F*_h5S{kXkEw z@D`ERov(7P!k^)LqpG@-r(#o1y>iJoW0b49RZHKuvbKIM$FoEsG{*6;ztbDCVqEkT zYA>o|=hhgG9fk#_IyYa9z)af34_0;5YnX?d{kEVMp~*wh97ABS!13h67hNhd1+`Rl zHiqW;?)h39kI1fIcOWB5)&`*~#e+urXOieqdkdz3LGG)^7bA)LXG5>$NIF}>Osn@? zV2OdQ(Qzz5oDqwAy6Igx7nW*=%g-^z?GIvh+zN85N67x7kIaeAsj_ZCD3NTgf*I?g z?=Y9TWU$J#SIV7NV^}2UP zz`IsxGDFNnSNa|!r+B84Ieu-VG}xtnxX5#W1df8AOY92>vY_i#hfy>{k3eGq0nZ}b zg4AZ8LlmW|>r?{XlINY=%Y1<#w1%nCL4gU_7H*`H3Wx|RrAZH#zKV_1D^0y_^3}ix zY*A!pG$8M~LT$|3W&F?iVRz+%mCMFf65e%v;?FO;f9sy*5G3ggspD%qD+RMKqU!|H zWLWj#$S>ngv7gA9JM~-B!$)bmrg`px`ZBEbt?4o_8BQf2mCUROMyo;#rRaHs-qYNdP2-=aXeJK^P4w6E zcnQ^mUAYIke($z#sqR;@id>EJL6YOhbd_eriD3Q(Ze2dfJQsg`io4y~Zu@8X8_BpK z&aFouHWPgGV65FziY$p396frv{!+rs*0ZcI(dQ zZ{O1Wjj6Z!c#Y}CS1PnywF z9=pV*UnOP)XsCWKUfQDPEVL&b4|JGY=!rmXodJ{TuF`vRz^j$vzg7g_u-B_KjP~_8`{Hx`p_3N7s|z=kK3CzANjflPpU9UR5NRo9W9C zVO*+U@^$jThFvtm>S!3_R?zU^`TPk2JLAO4~>suheKMyASbJ(8^(0yoOb0<%{{A zV}gm!eu{bhBn{}bn253>q#(|M4^1>vT&Wp%A$-8Ha zy5$!lWqP)wdtEiR2$=rFVqncjCNge@%Wi?R{98HuKpc~Q30 zx?1a;^y~F4{Fyl(N&99#0+ynRS8;)7``zuv&b1u+e&b}K}Xu?3AkAH+reqYuZ-Tu|jNCnD@T zXl9O`;!s_o-O8Nwt8qY}t+bZMio02!==B!Ys)T3Nm)o3`940&?9%je}Cu2$UG?JDa zv>_6>JeWqzjAw@0ugy=g+^K@WEN5WQGTB7I*T#Y_ zgp2|W*_Pl9bTV3I;$4@f_my^aeW`a(j%&PeaPtnjzt|}i&K|F~$|=hCXjd>lw@9mS zoLGK+47b8zH)VZe8SpR!l-aT4*tp$V8b#;Ijk+%=Zk4Fi6ne9ToW?JXEzgTz^U!#N zJ{)nd6&~hVU&m}{K=gW}{7C*z$b}9T3I2r{?AIN`2YJzU37$&WmO+CG6=6GLRB$kg zr`*{>|1n8i2$LHp5EB%YuICQsa<9-Wtsems!Cn?2?3R8DE|_S6mnD$_0&pg#=n(tkWRAqRf`wSG^m3sKtNFxx@g z*aFXf{_-c<>(3SP=gsNf+pL+~?ThuTbBb!i)5Jj_8A77j3+KjG1iGJuNVFT4b70o% z2IJ${S8py34QEUCZjQ!kv~P=MFmL4zJzgJQNHYRUi@&Bk=Vn|tu7P8?6nf-)sU*?_G>7c7?gEk2Oc ze6XYqR@U1duMiWaIURcGbp7eq{Z1*rY!&;_OT3&18bAKizJSSY}q%I{UL zj3iW@K0MzmQ-3(q07<5rTlgqJXT4TN*c3S_Bg3(X{jMz-mjp+3$c}i>QY>6F4K36x z%#6A_!q5)CT|Slw2S%*=j+2(`Y@E|$F2A_;l4cjQ`vlW1=k@FY$F83J$lfA0RJtd; zBl(4WVd%6e0Fa$5#i{LsgVm)g8KTv6I9Ei5Jf_S_pFE2 zk4g;oA!d1NId<_AP*G)-Vq`C6MfzE!+U`Z#DB~RV00sci+TlfT%${YvMXvpm^Vk28 zoLxn!`xb0O6l3)Ti8Q4w6YlSvXVdY9>P%S0fM=yqzVL`=_pN!cmx^w0&1Q|K4CUsf zF2u*n+RSMP&3HDV02pr==czsM;&-?eAD!YA<|o^9ZaEI3E(^<7(f$4g;FS9nH2X(5 z{GS~7|9x|Q>TL>^w2p4wI%dAMRfBXJ5glCQq%#trWKAl(3NdV)2)X?6@!VG<1#`AS zH(d$#$j+Q#nWSv(iJpS(ZJ57JxpvRXgjx^QnA)(xIenPERcR*SMP)LY@7H6!e*(_` zAml)&DPEx%VMMyC@OW!lubQI^*=M`!6DpURCwSolGGLr=(NnJ&acl^S?2 zFG<_4xCn0$^YgN@O{)w?d1&!YBP)~?fee$jT5WRP`##gr%ONL4NFycK{0Ou(EpG}U zHQ6DGEMML&<(y?R(d#zq3ad90`*LiK{N!JZhl(zqOtmc^9G9RM8$uvW3s|9qv4Ti> z^x~Xk1k36Dmlg7%=bBc9}n}bwmF+Q@0djHSZS5`vv6vTCPDk z2x__LrLibac%1~jlN{!m=GFIVDzV1klg>MGryQ5;#m6XG-zQd<*Y3;#^Kc)hCbMiU zlUJh+Fe|wLf-^dQsrOd()tAtsOu>7quMTv$MxF*G#@?vZswl%pEF3;W^Jd8Lo45@0 zR|sUG;EY3V(Adu%a@1^A_Xn~>%llElB^ib{u6HnL}u*COw?aobWDw@yU8ypUV z1bwDC$FK!7=6lFY`v#a-{?sbtIl7||;ubw0U$RQP+?u+ zIP0rMishsWDI6(6mC4A3C~8`kUYW^&^xnX($S5Ty#GlA5y$=;}{m|dcJND@6YG&^# zL*9YsR_1hlXh((Dhe?H+0XV$=l=Dnw?xUh53pVD{RhvqZGV=W1WMe!P`sr~e%|nCE z5Xl3U>GTEh`wi6mfU|SwBEOEvSUymHhI^a-OG>QER8U1r__mzicI-IvYu)1;sSd)| z{M~!+wF_Xe)6*-FDRw@kot=}jiCWbjuCx~U=3A#CHEc%KN)_E`4oJVb>N>Q%2Mnhy zQ6Z>(1GL%s>+i#Ke0O>lUO~~1oj-on`K5QJspL;Hm4lG4yAJv$uc_B)0afjZ<1|isg+IU0+~ld`&Tbdk=$v1OM6!z}FLoq>`S^BO)2YFHe0*0&l<`9- zUSPf)pTGYvX#VHwV~sTqLxpN@hNP6bHxlD%@3bLo zs2Hu4Yqswr>{BjhsA>u>Z9yqYuNr!kcDve2Ri)-j_2p}E;z4(N2MOFT zNQp#`Ye_}+qE+TaPsZXY>Ww=-Ca`s?XMcesY~2Jkx}o#K;wZ0z3}$Al%tz7Ix$gHlTf4%TdPPIKk=*Bn}0>#zo|ww=2xY`;%vnH5r}7p2og(`n;d>KJnyk>-nJx8x#fft7U8!VmX(&{ zX{{JB#)^cpj;V!*4-V%?Q0gJSBV+%?ApE2HxTXWJ!x3gVrl#M|P8 ze?v4+)D?vZ;?zAXIKS7K+lm?|EU`}~^lZ&$C_MDi3$+`NxC z0`*GU?U%Pa)!&w14x%?~n}vP#wq9G@_eImhl0pntn^&TI=z4I!Ze`d{)arIt3mIE~ z(YL4sSbm|A>Z||0cYl1Fd$X2Qlc_ChxMG&)XYpc5sZH%;GM+f0w-_1W&MMayUoMZr z=7?!^VKEg=eQZ##CACz+`IWam`f4FO(x-@&Rw)wvb%C2bQI2!-%(>k_k=X0*6$@AWcrOgf&V+rfIwGFUZ0PxIFq^X z9b(yC4w4Y?ad-c574wY{vt$`Bzh=bG~8hu1vk4tGmm4RGSy0 zYH#K49~WW#Rpbc|Q&ROe`Rdm@-E9QwsQatfisFswO(*SImF~&FK+;8j6{Y^EWzw(z z^P>NEL)SIYMEN;Qzrr#gsHkW6)@E@asza1FW@JChVkv}*NxZ&5K8L)%Cmd)Tkd$GN zJG-3#UFdre{7V@2+_|tKN9KU*cOSRzt`Bb#>(7)U1XX@M@C#WjYFNo^N9G?(2>q)V ztFFeKsf1grcHD5lg6wVnD%$p)Z*}ZhJdhYZNWnh2V)9op`JY;bzds54r{e#-=-WvC zKN`Br&#y*B9R`xAmuu%N`TbQsJW#w#liQJ0BUhKNtDe%t^l7v)bB(O&T)3rx?z;c9 zasA&c96!DN>!A@B*P(UqA$rI?`#%*=F8<$q)&EIbH!|%wOs6JaJ^wAZe74s?eFs*o zmV2N2^5^9jzm|Gxd@9K$t# zDj(VRMzWWv_1>?wi;dTkHlQo5#wG-%PrWkPda-9E34jjY?HyP6_PpBI;;73?kCSy9kdoyNJziCt zt6gc3MpT6@dLco_U@qwNrSjleAx{Io{ebJICGz*^F$VQAEV7kNCsU{;M<7gyQ{aBfLXbPPbwAz0Ewrlp%rNgj|aF~{y-h4A^)nr7abT*S2-Z!D#KQ~Y0Wpt)~-U^9lSnUz5>vdjW zL6OexO6ily*GfKNf}-YV0=Wy-{=aPUaoV_h>+Q7ek=abArlY%JYpwUQXRRxQVb1Qa z*KH#m{W_Vs>sfO&&Tr52uKKzWw&!Fe?GPDRLG8Q(F?Q*T{#0HOx}rV^ge43Z&_`7^ zSu68cLOb@g%0qydh&~RbIi%$w``sgunRgIRY`l$by@D=f;eNv3qem3vcqrbNA?B&K zI*b^slT|*P@~KsDpE~TpmjiSBVmWqmP|}Ql8u&SmT7&)U8S6Y@QHs72#dW94U@}c( zM|6c}coTdCQg@#D zuyvw!YK13r&Uh4<9`!iSkPWdYcWX3XAfl9~qKUnEZf)0(-}tB%>NcaIv;_^FkK(VC@!c{ zxeZ-1`n*~IRZ@QxNU!6Ka|7l%E13U)eujK+PrzG%1_gRyg6Ytq{~fL-{K}#)-s)Wg z{l45(P7)GwIQx%f^?MMTG0}Ov z+Oh7D|HVnkDjTQmUL=CSuNAVXn~9o7EJ=9W?c2~@@Sj}>IuyBp|9j+Id+-ww8%~sI zjuB2Dm1HEfyvnR`PeQMZA*x8_z(jwpjbjGASP}?{SB&?^0A0Wh&Rh%nJ8W(K{s(m8 zRMoifGYc_}r2M+D4IO|@z0ONt{kFCAh9J@(!eUhKjkWEQfUVbl$t%J5Z?g4g&}rPi ze|Y?8{I{ry|8NrVcMp&F+P}lq;`dL$Ud?iMC}ldK0c%7iw{}+uc~g3BE*LP&$p=G` zXpis#xxuZ&v&wM_b-{sq3i@N%7g(5iL{=EWck~{;s2A+CBf5z=xH6(5iL`d^D@omQ zkNdW@bT9CB*HLM|{af1M|A|llx~x#bDH{hI|5z!%+1^}@MF0TUX^6Oksd+6ld@^3% z^X{jHd-;)7%cH?qa%O-2$>o3RHODgPsYazW&?p{m*`80KE0!Kgo7A~yQG|yGcfW*k zMNHWBZ82bPoAGZN8+Y6GTNrfuPB;#p_+zAFKupR6tri#W7+OCU7DaFX{^6aloJsTL z=5JgUvzwjTUDr(wiue<`{%;ZVUq19*X^$L*r+F#;VR4^6qK0onTt#H(wf~akmIifv z01{dodyeR6!g-=3mbGOZ6V%4_p=Dd5=cRZb*R;(AY++lSz^n}mcjMz~_p2VRvdgZ7 zcsZpSvm4n& z_McT-mwCHqv~O~q?LA}S8XOxtkTGccXT6q>#4n+-Lu#oV@sDqEW?TZ=;N{Iz$#4C$ zK4#wX*L~g(2!#ZJFOL zU-kb)>eG*7YXra6%H2c770tiu{qIbk8ajNwSA!8c{!ieC~GZ8%4XBInK>a=KB|4g&Rp-m+Qcxykh=!F$WC#CMQCxF=%Uuow}}-iyUb zDTRO-Il&&j{d(nvmB`jBWprx}CM3BDXA>{jgTg!@i|IT}HBGwfM65DV}bP&%xcTN>iyymEJ{^4go14w7`;1fFOikm%2cTfS>^(6b%9?B$z-59qAC7l!O|PPC%;k zwoZKS-S1j^pLc)Gz5ASVKljWZlgWIZnWxM%^Ze@fON~CN7_l6h%iql#`l&0xC)i*i zc4vUL57U$kJTSGolxlaa(t99MUSoPmyLVMTDAXiwXTK`DS6WF+1k-o z`*CPi?CR3e8x}9(&$>=rr1I*c`VJ!tMFMkNZpxpztOgHAA_%cfn6R-0iME=WU(BUe zE^{ddYTwCHSbti;V`l5az8?k%{g~D&=L_tme~Y=-TMWSpLbSybEhW1FI}?G$pqJq! zH}QU~vTMbc+g!7_a?sVx5L>kM6s>ktSNw^>g&U(n04<^_yeP34W`muyCo>&w_!%uX zA}edw?D^e$M`x#WCYprsxk62H#VTb^aIlzTjlCne-e{+#O3bDOSQ`YbLMroHt|U~w zDL>uR(r(n$G<*9@1g^5jVp(#3_t$Qt!hOabJ=tdrugg&@%9o>kmw} z&>q&(3A@93v-`tpqHx^5Wz*ydSn#-b8c=hIeVt`UPhpzN4337mD}4n?($>eAwD~&Epxf7}Ji* z1fYsb0Wz9^u(IB-7EcD$ZIXt0NZw(b4G6s!Q7p*WVu_W_V)nEaRr%538IO-6)B43X z2CU%;#v2D`;)xtxnhp{1@xp`hcBmS*JLojPTc3*~1K@qQevDHJ$T96g!ff5IHO_tg z-)r5h53C`dphu7Ry)P7OWD}E=FN$7|NiCBl+T2xPMrsROH0Mv6YvWV6*gwnq06(m?`y9$S=75ek)w z`eDTqkp1__*V9bWOL4aA-Rr!vbrWGA!R~{o?jue}=keI`T{B2=ymZ1(b%A$XR}m3` zsNA|n#0&r~{y4yo=;9(n!*nYg5P!0K>+WTQ_x&(Vd^eHjpU6~ejeYcvy|-7xg)slV zKmV0~zeN7+{nmmWqrz5Ky&MvLPv=q7h=46om;3~3i}4Rn5Zivf>y6#-+wZ@QQRYhW z!~xxBEa>bUoB_A)ZxU)>dI$FbGz&Z#p8?>OyZ7*(6aG&I+Ic+qF_=FcSS67&vaR}B|(tLEo9;A2D^|v`OtdQXTbzX)x2#TLw@O_nTj)JC2%Y``e^}p zdu#qtSR*Hg!dUvtR*0*%`6#HGKaN(Fb*{Q^fZ8(Pt->$7h(}}!nt;2_b8+SDtt{Wp zs^+(SVh|0xY7ObfED~{E8<) zYfqGLzA+S>GlFs&4ymjNf6#m8dsJvj>A?;&6l+9K6v|z<9p`)YAQiEHhmBR%RGLj* z?tw?MLM&t4N&C6mO(9xGXa$fM6QfcDSr~%M!7>N*AgT@@;nh757-4Z>0{3KnJz&v+ zs)064p5k$3Jq7Vi>Zd(8+wbiyvJ%o z^zK9x`Mt1HkIh{&vCrMR(|g@Q;gLW1nhj0#FOk4ld0nv?9a3ETMZPUMkPTg9FMGal zs;Tkku2nSD@=%+>UT0V8Dcld4Q_Qb0tr##9<=>R|-C3(qT=UvA`HTV4pl!P6l+R4X zG`drnSvnOK&ANu1?>Tk+-|Edcad$2{L!h%-bRer5#649eQ$$$@XsX9X_nW)cmmZQ%dpI$NHXuA5@J>DO+%}rnh4VHuq(VgYP88 z^J6LIS~;WnVQdRf$3&;Zu{*xYNuKNiVW5jtK^xVUF**-S-KISY{Y_hU(_ExCstlq= zSf)zeAruLp^{1(;G9TfS#E$;S`54R$t$A5H-oa56`Sp+FUSPJw~3#fKwp(8jEmr?a=C70 zOQaxxD|1y(Oeegk+plQF2_J0Q9QDcTexj>W{MC9M1+2lHa;w&)i{^*lN94w5_6I)Z z2)Cc1geaRS@QYPcf=6Npob2bJOcJo7ZhA@)Z8eRNb~7r!A3@3-)lBQ{&H4l-=Q zB~_Hf%tG03%Z5CZxgxe3+)Uu+2zUcYB*wSD70oX|>J@;1NOh}XFTLiJ8%f4wtSNc` zQzbcnDq$?NHRTyDXKYm=l&g}WW{fGwK|%uc1*N>~7K&ggKy2rLeZCl;H8`pdtUt+i z;ic)?qIfp6^m3Xt^wM0WFcGxr`&b|_+{nkitrQhXw0GKai%W(N=POt9 zG94TX+kVjX2-XXpw+%8u0EM~jc}0wU*3pM%|VK~Dp)>x)7F%n&4z$;(F#U) zS=F747t;{U*veCW{h6`Z45{hDY+5zKUA z(vUecr#-jzuw(!c0}>yF7A$uzr%uWj=0=qL(0n5y^$NTBs1wLB810G<8E2 z8Zf10loNX5LIHzcFM)y^Esofhjpz_n9f)c2f{lDcs*8J%IH za=Ba4bWD{w(K4?R-nEuB-mPf8Av4Q&iFaE*s$+D5Jhmrid}5Y+8?eJHuNg*X5N z8V!Anj+=yyGvOP;Ao;#hZ2P>10Ut`ivbrPL0V#LLRJw$GshoVV!%0nRHSxmqC{DqI z3gh(??+RR6B$F3}rXto09-F=${93iCvX4UW_seLlsTcV}HX&OuuUh`eDO*hmH9dxU zqv1+Wg?NX#9J|HfIW$70zl&`zvxW;Bl42f+5y-3(-TIhRq+>Hy6yPXBo3#n=nw_6B z29qZuaq(}3rdnT(=|BzVJ~mODvppPq3(1y1+q?`;m&C0%O^boboM_WNek2kJ7vTsJ zfAvVovRSUB`3-e!NfKC=^}sgUXbB(zQ29O#?~~>s_`h}}*bM+7%0NYnC8C%_Cf)XB zS7G*-Qc~G@+>)1pU=M`G!Bh=cQ9aUS!@p`v*Xk9>bc5K_c3j&fmf&Ct(Sh!^jN2`BlPUvgb_gZ=o3xL_3KMyD+~FUSb*Kk9?=mbt|fK zjCzV)Vvw1XO+{#;xV9;)@?$O}zu)gHj=wYIpVMOhN(eMi{8wjpFXg` zqqZx*oGM!Qd5?eFWRFWAX+iP6PK^Z_LsB)^#QDgu>NV5ABB|x>zOq(%sb;?BpFS%5 zE`n*o>ZYqCvM_mt&FZC%R$2R1k9J6YMyuq*)!x9O9_9tn!kFE>8$Z2ZDHGR!YK2wB zregnA)qnl>lm__d$D0*6|>RzT$dF`M@c`yMlsQw4-c}Ez3!6HH`4&=A}i*<{&h*F10Pon&! z#=T~xaf)=N$KuC6QW`=j^;IJPsG_&^DSEUpMl=aN%3o;VJrRlg_Wh4VTIO_?>fG+- z3B{7fZ|S&oZG9JmHab{D4L2M0FroYPFy21E*gk(rUP)$$j@@)%7;Me$nVFce0{V2Jdc{pFv$`!lJMtQ#)-~l>Vr!r`$_Ab^ zo_=LuiZkgy)MfFV*CyFHOfehRql#AS;FakbXbDZ%qVvvUzWEa_*yJM}vV~ zU^p#g7~f7A`&ER^VUPhN&RQ)wE3Shws7~!NA|r}0i4K+o zHoD%5wU$A+0QT!2?m{igEXZYyvW$h~aN8ewoc_b3e)uDUe6dF10FkTJ221wov+wOq zL6My@Hzmu?x#N<`Wj?{_0im zbrNG{j~6G??sFYZ9R7&)BHhpVdCR=JgBQGdqGfDg? z!yj^Z?4wh)1%T|uo~aTeGZxy~#-$eGPcFH-1`#qc#(V4n-CqW@zCtYHV^V}_t+>co zgFI9Fl1Eff%Y<#tgTN#9$369uCsLp8b9>ay!{O{1PzN4R7*6|lr!PlHF3iR+%WpZf-^isz!pXzYpI}PU$gYtg`$O~+ zVLa5RS1G0nW{Ryel%=P})or7yv;AWkzT8l4U2BP{X`Mw~lPY`K=Jg(9cqcv3n(wvi z;sB&BV07U(nfQ$k*Ev%^0SQC~DE7#&LA3lq2c(!J)sp*mnq5xe0{2K+YVE?#7upYP zgDr-Ft~wh|aabA7iixj{Y)wKjj)9Fyx1y~!I=por|Mnpq#BF9FFUr_+4T@ZVhwO1? zRB1qgbd@fhbF@UTT?&q?vf_S_OY)OvX$$l^WAnRZ?Qh$|@{4SZ0xm~bJQMT_x76zm zdEZP|05M~m$-;*q2T7HVdZ9k-tFaeqxxwxkc4=IHpi(wFXKZ5G2`ekk5Mg_>DN$(((ovM9GVoidMt+FA%XadOv|ekiMWKB1sI!Y`%S?oMip?^W z6P}Vcciggdj7Os1RL-(3in7<7 zR5xdWtz}l@=%b>Nt;zA;Zhaxa^Su|T3Ll3c61$&>jbYdqmCC=7+lUp)!YLS>Q%Zyk z(^A0K+I!o$+SAkdSqcvKmUX?kAYb3ivg#D z?tO}x0GSwiqF)fKs-Ug9c9?h+3N@JLKrC2(q^sRTnj#MKGJ>Dd-rxP?rr&*_UE!>= zTq*UCGH6t%c?08uRvm@_(>y=3U&-)w_&8FW0*H1Rc7AsgpbCF$CFO5!i@y{qyf_da zNX};JJg-5bB7cx$-@hMkadZm4;PkLO84PPL`R?mKx0M*uQYBGbSRBZ(rp?Ta!fUt> z=zEGmp>z17Fb=3k#^BXxt5gVp=D>fT~iG9JOg!^en zmHa9jh+Fr3@7|R&h_Z(M4jwdFk&F|rG9SN_I{7HoyN$7sl2xS3zgq}ScLu`)#m{f8 zG>2kKhvZsDDqJz#Aq$?t>n)*Kq|N z!#~Z7*SmApxhhs_+*J4~142oAo$<=y!>GVD8cbBgJj6VOGh(Q^drk|uIP^X)mEQ%% z@*j+cEG)m`emfEXM$)fteXS-)_2sAU)FRfs@6!L(+4~LQD|J}D3hO`iAeDI|ODtDP zkblhy7M6HtoBS7!8RG&4K?kdlnT!day$R&9b?#_({sjPVT}Rp}IaN`h_U_ydpOPSZ zq~-tD?%@r|fK)etJzbN>j#zd@7X+KycTNH!u(p7{X^Wj}o)LiV`m&3&&5!znu+(Io&_cF25 zsh0j;Cd7IE>u!#ttnr1>mHBPU{##`SEsud}_kmxA1;TEmlpx@3sG0-L=E=tr+uIu= z5jEAR14A4HMkH!Xii5d^;xew6aklc-Z!EeHDSD#n8Mv+eY$; zTe(Ian{&PD>!fuTIY5QLcg1raxjf-Lvo3~{dcvzI*is)qJl*ztVLGAev98yE{T=VJ zeRl@KL-ywF7dJ){oqfwf-+9`^ebZuY+etkx8q*^2M^>g$W|~IYiu`NLpGy&U7rHFwb3KJ_bvYm#Cqd6bnlAk_L@ zw1@qKtuZJRZu>o2+VIncrpZ+?Yi{(@0{_xwCO2=&YZI&e%S=ods+>bb-ZADzadZ@@ z=i04fr}aes*Ti?GX+X)&!Z`fR&soGl|4xjU7x|#c^s6ltbcuiIG)Y>YvfapQ$YAB6 zk1_2E2lC??tfqkk61T-?`dxxB0MQdrNJ1+20ZpyN@Z!&LJ~FjXYK(ClA`*Rw+wS~0 z{P;+c!AYfSObt+N%43&rcbb~Ab29Pp9mZDqJzgV2dgWS7)m9bK7x8C`h7unH6_8gI z#DQiVmw-(Bo`&qSABoW}dt-Ch)Erk6DUcX6#ilQwexW16)zDnV_ER`;>z!r%2!-q3 zO~$Y(=(JQ}bl3W)RC)6jQ}x7n2xk1^@_@fQZ?U;Lc>FFi#*t&kXw$Q{0dvo8L6^g1 z(qQ;;V>5CLu`@=aqtZNy zt*_?GT|YQ6;FH!uXXZK?LUE+c)I7%cLKf~V{P--tGP`mLP$j)+da82p(;m(CS^cx> z?cRtQ8}tFBEQhY)rI7O=()ifYhf5WKO-CR`)Gu>4`*$ZBgS87uWr*=xVR&4nnUJit z1b-_~wICdz1%!}zc7*A+!3u!aq=qi!pyT%!o=5l&A7)-@Oxl^GoE*6%1(>*TkeE4I z5e5&ff!!z&(O~<>4&uD$DjRJhR5wdrv7eQ5cM}$3_Et9*i)31t5(v!_(FO*M~PPS1}3D ze-RrN;B3fAcNuMnM;RCCmLP~G^g}+9Y#E3id9e56+c66JD`8Zw zxJ`=}jZI0>J{o;_HJ%d zarbUc%0QX0{`IezUF9h_E(KL3Nx=;Vg5xHmOKDWkFJX6Yrpd=WdF)A=kUova=H)x; zUc@LoA6sZCXdmpXu~!t`?do#^OB;Rv$ByNfOAr6~IsRX{SpC13wN6}bBN?7`DjI>-T{@CgUI8T>N`t;%C z8l`rVm_ZA?z$MYTueH&Ga*ivseoaxqWXE9J@q!htxmm3x+U_#KxIC(FDI$(6dMWIB zGb+PmH0VN_=5;S)e28FZUP#s~9T+YdR0hywv-C~p7cQEX)a{XM)xj_t7LQ4zE}hGA zTOongLf-Kg;?tUiC?Y0LW~^8#+5F$lZpyI- z&?qYJ=TLd^N;!Dz_tbGA`@ru>!=Mb6vZV*VpA49`wH3A#^jvbrt<{9~swMyOW7L)U z+5p+_nOB%P4V-IMwSAZj5irXO3fBaNmPLuK4md(NcVG4MJj`Wl`25S;Dy(l*?3#FBX z(sWIpW#FT;-n7l?)BxM6Q3(@~g|LMWA`WVEA?6Kkva;MmuVQQDd*lq`Uq`)GJH=T( z6L=xk7TPJX-Npey-LdqCW)n!tS>sxsaPx@lTlQ^L`B-MQd*MiO0cV16%$RexOwo%M zp5C?n-UaZ!dbLhVm?d<;Kt~o4Bwk(RO}5$M5@f~YSZ@e8sFfsk(w|S3E1tCzYJKIV z9Dt+>T2e?jEC%Ccgsm{)9^ajsx3K1|3#An-q6QX4q0KP`9$N~}dtTC9(;#*);+Gs8 zsyL2sOy5Y3L)rn7UlyxL?1+6?no zB8z@M@$xU9{eNQP6|lw#yt?EsX}^ywxI2_1ZvhP2l&kaIw|4hZ7`fN)ac^NVj$uQKe z1}tOH@*^w)(zBqEhq#c0CPhMm;>7euUZ?$? zn%CB~f3n01H&1a7H6vEn_e}pPoFx6=N}Yny;)02z&AIgV)sMq<dhBh^qZnxJ9IXaIQTY zeEz$k=CvQ3u|7^5K6S0Lx_=c`YT8vf9G1m?RF8A#xBq81P=70YBloNWLO8%*gdlO+ z@Nb>q`>XUc+qb13&fWO`VV5(gUvrvDS*8B!=idhCwaLrkuDAc)puku17Pmw4m18cy zm@C~@;jaBSHUS|vlt_o_+jQ;tZPqLSSK{S>YfQ3bJX*~WeY{evV4e-W`5pZ2KN)_3 zf7JBu|IdRfmMuavTC!QaiAy$N6a`%%JB*fPOLHwO^l1)Tpf1A`Lj#l@Bk5Y} z@zuQcUWF=^vP0GN|N1fO{^KFu{onM2{`Iu&Uzh7OZ(_{DQ(Brp%By|?HaV4;-E+n7 z!yjwZ{2;r!GDRK6G&4-Q+I$q*dudfXBYEI;nUcHBjNez=ed=g1JW!{wcX(!P&~q3{ zwcee+yZo-n4(lRklJ1n8Y-(7<6{oi!ob%m@Cq+2T1(F95($>~F2&cqRry!zRK7>4D zs7A8&x~GMLE&C<^(l;WZExW*Qs9&)yJRDNvrZ-VPS$Nf`3ziM^&e+zC=iUudyQ~`2 zy~YAn_`77sYLsB{*~nbA$-nzg7H0hSTU913p6VtTD|-Md{iu(rz1t$r%+Hw#YUW|a ze?ylB3@BJrIjNqNDi1!=9%GqrHqwWgudb?_|HH6LMZr8WvF~RFnR1Q59(lB8Ee-Ye z%Kld#-%77ZA@u3Zc-q9{{qSvZOX%a!@}((gRV-9!WtgTKm{G<9wHzUM+)1(R02pss zpdAaJ_jUA$N}1)^H8~i(8cDj3v*~&7-rQkS%0JL9V@C~dNt-t{d$&nT2#>8HnTsb( zKnbgCwp=R49wC|38F1}0*X)EWRRb9u`xlsmR^>Rf68;7_g5R=S$N-$V?%SuLBP45| z49iNkpsz0a$*Z;mJl_m@$#=&?DP!^n_dpQf+IYsTjEvLb!zyC$?#4-(g<6D)2|oD{jssIKz#Y1XERD?oBD-~7Y z+|=sd|7}FO!^3^~y1A5KwX>F#fme5H3f~7$o6-+Fg+0mH8Xk#)=@DgfPF%GaSKmY= ztq9#}EWDlA3biek_unGZ;gr>@Iz(is(}Ej&IG3jb57(ZF zf@ws;FMVG8LgbNLCSG|&w;L7=1hT70iM#K}69i`bf<6-#S_NKHp9e}30B(VVp*8h~ z@`*LX>7_i~2f<{q2DNLQwRDh^IvN=G?P` zVUNsRcH=|h)V+Y0Q7@JO7x>L-+WY}knUa=Oxo$rQi*toCKED6f@BjGwe?~sTe{A*t z_5YOUy6@tvNw>w3x`PXuOpFA({g36=`{d%|h7W%ONH`_>Om{zW$|dTs*zQjjw)2=y zbp2JvYFspvvcBH`(C9Yg^S{17%gIkuBC(x5F=4z2g&x8IPDcRz%+Q6h40SYKGAag4 zrdmbFe)JCZNapJdYQJQ`fCuECJ&SdHdWq&bxV?Yv)H9!Y|&o$Z^wa&a+C%S{xQ& zBC=Vy{juONe^W)jsIplhd7%rS<0OPhwON|ok5l%x+|5MMNF`Or%xMKc_=4as_ z)wr-1**#1CeQ_>r5hs8x9W?drMBW(f;va!Nql)Vvhgr>9Mx5n8A@Se0K&F`U0m_~u z61PL+_s~eckAx_5El!y^(vu)>!+VL4`~~|85iSn}pci_-Rl5A9TmB zYQ@)D0)DBb#?1mj=jV-V3LelCH!)4#V)lvtJ?mIgR#TNP`<9oMUT!}uH|tux)RK)Z zYVq;$Q|$bXj0w|1%`QS<1JVRh%oe`@HE2Ef^3_Yd7FzA!;7nhzDHA01C+h=~cWVv# z;nu&+Mdqy4+z*+08g=-Y9`%n&`(=kQ`j1K59C&E)&q*7~S+$kSRn`^KwkoH9jZlFB zK(^zV&(h6|hmPVEaavq4i>!Rd^FS*>@m}TXz z?7gMScjh~%#6i=8C!tDfQomLBjNjS#qnx#2f)UErRO;}1?t_T7A;z^+uiy z-`lybd#yY&$@fgt94j6+mA+^TxTQ7LGAtqJDMT-&Bmmk=sO{Rn+|M&7r78RuSi;z9U9%74=SD;&%W(yzV}oZm!xa{UAr zroYs@n5_$bMY;M{f74I=?Bo?8 zUc8L{t;T63#gtpV0bX;};boA{yjUdDtPwi93@``Sny|a@YQgGNuj%E-EzGH75P6yC z0Tg=rQG7eWDC&Nhp+}qd8*04(kkx)*J~7I!HQ1(hgh%pv@lLp#7-ds*6)fb;aeeu~ ze8zt7-3lxOvCO|oP<39^hSm6uDrHWUBj?6;3=Q?%v*fKp2{)87T@y=;y2E5Ul(G>O zjy85I{(S$nEV-=(sKP4cDPY>iEY?LuPkd)_Sy6y_ zm2Hm6Mte(D8v0PSlFDpEJ6>O?5h|!ZWx8|Woy8{JYA_Fa0E4NsnzqG1XKmYk-&0;d z4ePmTcY7|-^9-5G=Qw@7zbHWVQLC~Q!3GTs${1_I*+j3hhF|dso*x?V{Q|ne3>BEU z$tSq1O<80Y#I^khv0S${m!$hxAcBXgR?w6ti><*1U6RK0r8C9Xn~KLIdw$7X)X6Xv zQjqm`3^$lIrc*aTyl+(!$HMKHJBYrU0HR8>75HaolGV=)Hkbf9gTe42B$832Id zdW(K1ncoh*CcI|rkxeANpjU=JQi5}#wvo#AEmdf)0TZdydr8(wCYfv5=s^#>wL&sY z+r+!VLbNE|F7P8}|3Wk~WU|w>x5|4j&`}h#@&WSj@9TyiidY1hjx3)V;M_mIs_-t6 zO53)OhulL5n0CUZCo*t;$_jy_QpE#>7AI0jS8cW&V!Mng*A5!s%W2#Sii!|7o7B&r zI?0CL0?t%m$2)+Nx*8D~gEXp^ui~_iB@rjrcg~kC54X)-R3A^j)C^Cu@Rx@}Z~;0C z@l1*u-7184<_e(Ws^+E>``622pBb(K*CZs6+1HThPOY?B935ssK~IJ!(>s=gyNo|+ zofJY=^SSH1@}qm*co$TS(PoJGRX;bI3@*xuki(-=*qwl2vR-fEqA_m2?JB1|%9x`T zFIM;YCs5MV67_t;AhkJYoAbm6(ka8zX<$)tD@o)m(Yb5~S*Qs^Ci%M_D~P=fRHeay z?3}pkln{=0kkwO0h$l+EMb)k&hKBqD&Rm|pr~n`UpTbs$D(T(=74k!8pHr%kqB|5+ z(a7)T@acv7s(Vc&CKSHLpkg|1F zj5JM)hoVJS*d<~f_^@%|?e~_ZJcIn0ejoJ6Cro@<8rRS@tt6xZg^6&ga|{w@bha{s zw}@Lvq-ZevuUGFgC&3B067P|UuVme^wd9{H@qbWVExy5uWv0MUhs5zH_H$hSF#Pv~ zYp<)r`oE_QB2`4(8_7NUtPyi#n0$Oargb zA#7a}c@Eq6Ca?@J8RVY<;N)>!*g&)tjFmX1+>%+&e09-;A6BDYIPW zW<#4d#PrG(JNrya#YKGzcRR4+2+Ui1+h|;5YN(yHP#|K%u)*1sOML%?h5pHcLp{GK z7TFlDu>R7QtzKMawi57WLOx0lm2wN#unNOzZ*&;NWA<<)TOZjQ6Gx5xeFg26n3^i3 z6W9817WJgUQzVFS)&zg$P%*3j8>8+*>Gpx$Pd?G9(xap<`%(*`0%ezq>f62%p7{i= z5%qghSMUZN?oJSeoSrm7ETL9zqT1Wm)V6J4DXNGjfB7}bslgBBqbsF29BI&;W?s?b zakwYTo;0>yVjrwL#;kt@;StTVM~{K znyE|HrQ=3Hrk}eUtF4~RkGn4qYx8gP{5!sC+wXf1PqpHKnYo+IeaoDoS%&d8l?*?032o&x}XBXecL-X1aGGDiML zEkdf?)-}IO#j5!FXLEpgHw`*irp)gz1simpUDyT&w#89Poz7D@4L*nQUj4Ol!W~7C z^ztGOj2Dte_6lI;ndbx&3G!W#mAHX<(bdXbR?!DM#Ok+7WuxWcovA_N{>X)WuS^dC zdz;EoU3qV8XN_Us>h;`5*`xc8;K0z+_=z zfumq5hNvCZ+o#jS3?qXFY4w04QVS%Pnm{I?I=4Jh2+g4=a1jFOh+ce=mP6E=Dolf< zNxyxdt>t7j3(mp=wX{sGFbM1&_#p+4xG|4us--mwh0QB0svP!t)8CkzHLZ9!`zBTs8sC4DS6=!J3rvLwP;VhUOJd z!zgzd1Mk`CbQx-#%bazt>9-Z*)(q?5Pm!HA74x>X^&2)B(pX2=T5YYoYHwm*==_QVg1*oPc(Nq&wNFfx;5D5Gx zU2^X2Kxl>72YnEM4l-(MA;sEr4w|FtHKUi7BVT#%8^4XT2Ujv}U`_)q zvf_^wKLiY3q7?1y3+oY73I@UK7J$%To8m-DEP4s*CSm*XX1{`mD`&xA!WF4lvP4lK zDrZ^O6h>-%w(u$S#+;8=114KF3peiUDdcJHJxT=J2S~L0;=+mtUL!2lE0>b=e=|_Q z@+Na<(s|8)OW{yy{V`|(o!gA>cG05q3`}5M60JI9B&qU{*}?t8^jMbW&+Ow#p~WRq z=$1cOw9h;KyMM_$?*a~JQGHVnKJ7Cg&rS>m|6D@;A8qsF8{EIh>roDq_Iya@`uvaD zu@LLi&`=~so@Yx7IE)8ZtMc!+K5VN=sXBuY|LiU-kur)bE*%f(=O7RE86k+s97`Zk zkG`yo_me99;66+3x@U5w(9vUi5iywyrydEJTtQ9U+g?FT!u_6Itgj4Dw#ps9wU*rX zjJ3WYJXtBdB75!k5|GtqoSF(cR%Ddo5EI2|T^gaLQv6C}bi^UX68**Dtp0}D6yJ=) zACYf=KfcmG6WmzS%;c3h*#d_2maZ+*Zshu)!$0nUp}l{sEv{V8^vp-um0*5$PqxdQmBhn8PNE*7hBQd`0?m%BTk7cs>na` zNp#_+ZpCLhmz(xXup<1H&XM_m1s`B2I83^-4G1gr7+U~Q9> zo|DHyuMUzk4kH=H(z)v=JSPiZ*4~nUR!nU!quSW`{lB z>L(^qoAw|{GA7PBFT*AmS_O^Z5D2l7@%C9k#4H^=HZ|LeTuRq8MtwR=evm z1)<~14sxX4S$%4f*Z4Km%;kNy&9XdCJJEk_rH;KIa-k?xw4hrH2p*X~^UYZTQain` zG0f6;`B9Di^0sPVRLsP>C(hg3@E&^4-BP8^Qn$twRlOk}i>bmd46NTNHu7n5<&E`B z=M5FfzX?_MY4g63b}waC5#jA6k$JY>BE{?>Q92ZUEjSZhm&p3CSHJ0(m#TeB(35qK z1(6%CmgSco^D+YRPCI-|>D_z8RekMQ{OZ*!!JI=STDdD47r*?;a?W#dvj5Bh$NT=H zFUD+OIqmUutF?gro0mo2X?WoFC07lxLuN-8k3`$WVPte4FQF#pxzV|f0qi&yp8wcuJlzWWmd zZSp}+!y0FON41*T)GbGN`=2boa)d%w-l_I2MhezF+5)Wg@QifjZBA?@S~em_w{}J> znN|x*d+;yc=;3PWj!}+h3lDWu`A!9^zU+Ip{;IgYmr3wdggLzky(4!8aZJG9IBgqQ zCxrCbYzvk3)ozqmI3hGzzHcVK%9SLO_g$3kubDK+@x6T|ej@NFB9)Vg?RI_V+vM43 z(TK_i+;Z)KN0DM$f3hs;e28+L+KE^$Ks8^!~>f zmCX7kKDEyAuT5T_L1dzeZ4tnPJ7c`jbRZTXr8Bz(3`GY98v4e}EVl#$C-bx=sKYUu zf{u-A6W&B zTsvf^^C7!|D2%bS;NqfX(`zk@&6S6$D$ygJ9bx(0&gTKnDVR=EW{=3%Tr^b~t@O;D=86-Mz@+ zh*s+vqphZNu5c;W(B8%s{nwB!V(n<`EIjBURm-1xcSI$qPmF6vAWz!@6cB{M`1B}S z`4GgU%K3MNZKrxTTRZ{|V$R(oj5{dA))d+f)y%wAhT^~L!IW4ox%p7edG*N9iE0;bUB=r zyPj>PF{o=$G;EGbdUQM;TAxVw1DxtTI1@7p2<#v!(4WEYF`S%pTy1R_OFUVqgc8$U zsUm~&H{Zk8W}7lC=*8ugP8k*l6*AmxwkGC+Io01s^H58p0bwEAfRyD^GiiS;WhYzi z=CzVQiFDUu)aPlx%9Zhnd`wY1{KQ{t zM_1J-4(-qDgA5%EF9)z|!TJYUhM%S>3Sx|FIa6WQA+fd^9Y5~ZyuX|DjIhmaewJK zcX{pE(jO5Br|TPvFRuJL`falR^npU`;L#WJF=vQmwx`hi<4tqc9^cM*#NB~WyPZl; zFzo^D(!^p%)x zmPAO^?lZt?(O>(C4Eu3^Isp$8$h{eEcqA~bG16t>V$T#Y9IpMzqPN?#{aOAA%cZ(u zp%Oz-gS{bqG%IENepqzzz0z*<{u!XWGk8CYIT|KJKly2*XCpLrc-l*`(DkO)W@knj zHxCZ(F}^L30++w_u>oD%?GwQt`FVL;U&;{r?N1h2VxMe&#jMOIVaY`$)MF9i`P7F$ zEIdx2Zr`8|d~l8HGI~20Te^V+JfKV#INdUR%xUOd1TWG885&@+;%7|&Z13YLF{H&` z_&Rncr(_f9CPaqdF#XE=N$bmD_aKbrtvKB?>eui;3_+G#o&~dsJk{}YI$K&=u8Xe* zvsBG@y}l4sAFw4rXs5K@K7Xx;ek_FQ>Xts!YI0(We2WXzqKAuCq1T=6OqyFMiBPMf z3)flK>q=TFVi)o9wG#(ut~U-h4*WDE1%F!!|)?-IeZW2T?7AvhvL=4 z(*4Tefc*Hfcnr6Z0n;B7yE_9}rZFS0%txMxUs(<1++fseykkn2L1AC((v8m_=H+et zA^Y`lgGeXiMbvfeqsG4-e=$0!S#anmJbi4+T%rk64j}(rkzVoC)b ztQ}3w@Q}Q&Ff^6J7y3RgN-RYYFtkI8D7GtW8toM)c%p7UPM zcYW{mUGM%U_u5%|?Uj4qS^Hjh`Q0(IFlN3Lmz>4i=xQxLZY7}6mf!~(lQE8Gv$>7( zP0PxW2`ewhQ8MrkEI^$rRqzSC35#x^fk;`zpe&07oMO0f);BS?0oCjq5&YeZVUS~B zcw%TpQC}616T#j7fneyq8@$0h2M}pPu5@=KU~)#i$;CwZba^BB27Z^`NA#n$PQf{v z1*ZwIozD4H7?U z@ItR1z=fVlie^@eqEl2xcE+eMfGR}8ti8M8!1)s|L47GyGVCs_W#U{TC)4Kn1as+v z=Qt1;)Fe>E5{mNH=^?N+h}c2Cb*VbG6qG!wTF)N>rdw>_w>|r8W3{52^JB^B)Kp3X zj%jo|prpXtFn@6f&s}2qo|T#Giv89V^o@km^2=CI8s!U zODYp{N-K4&!C|`wMNq|$`T6-gO;i+FJ=>2xLnmYOWVR0y&)3m4am7vKYTkrw4bip z_^}4F`mp*^)R2H_HJP#kT|VwWTW+2~zLeOU_zq zh+zYbQ>-M}{Jnh7yEuo=#y%X%I8o!tJfD87r4ndf0~PDXiLdF;b9QVjD&$?UDF;hmkS6tu ziHx$$#n&3Rh{sKL0IrEwUlwaGO0H%V&X#CTyjzITfravh)I?p!C127}=QlMl1ak-6 zmOrl|ip0#-4&zAYwg%2Hu-S9!G7N`2W z#tzd+Dx=;Vb8LW$&16QbZC*I}3E`?D>@*4u8$AqW?raIB_gU1ScX% zt>vV%#p_%6QIVfiO17eZP&i^RMG-!~uFHt1lD{+VxvX}sjiljyg0-t}H!qp{bx0>k z;(ypv(@JjjF-FIuQcdCQr`a z{um&iYgkF>vqQdhu4+pgRE_5k_VPkM$z0$V`E=mG+HEiw!u`p`7p`HCW~izJzVMK& z#X1=~=9QZEHl~|yF8l7C+LDT1hNVW`=sHk!3r3cC3(Y#fhMf;-vGnlDip5*F#-{S2 zFbC(JTrA?aSh2eq#w!Um>L)u|MV6+jl4Hk`5xFkqo`>~|Y6@Flk@^bH=fJU;(M^L3 z`YEzLxQP!hJ+lyACnv<0O}<_yuAeJ9QVV1 z`kNSWK0{GO=veFG*t~1A7`UoiWgG%&#)Ba`a8MW|9OBQe-9M_*9qB#4GUNweG%FZ(hzP)6RvF`GKWbjAi|l}6Z=1bn48%}Ei1Q}irxt0D!Y~@ zRZ)O+hJLkI++3)+65ei8=7AYliB8n6xjR9ulXU?OcdVXiGKLt%!s__iO{_XzP&ae( zGHbn!{NfulkUZrg0ZOjB-2q4R1t5u~7FSMcL4b`z7W-DAyDdL57JeL;@{`gCG^Y#>;^?P|okyCfk-7*2S1%;%^G6I8yGitn@T)CBWiPDhClH5^X#%ThH zKN=A?RlBeEh32&n|I!MnUlFye4c;k|RAhxJY1==jReN9WK73ZAM<`|T{dl88XiKdv z4*P2-kneSR=1s6*8I`Y&Ia+N2Tf+9&UlQU6Me77W`ccGs4K*Fq;Jr$-p_1U;@RLCe z-!6Jw_n6k7dUzfv*dmLi&`=zxzR+h|oQEv{Atf_-DH9b9`(~WArT%%0X&(BZXlp9& z)ZfaAd|ff6r|Ta2pij8m059R9P)+8)bD|i7%sfIv$rQ-T<`h;Y-cb!+JP(QGpg71mLcN8tV9z;&|D(7nzPZ`GI z*_X9&!MhfPulp=fI~i#o%6MUSXvGf}lC%-NCxc;tVoAF<{l~ydh)9=JnT_P9zj1l0 z+IY4Gzn~NS7Z1sOV0RkoCmM;^oQ4B|I@pZ)LPGF|Ufv8&dOeP{i;R?PUU~xg09BAb zU0P-N7GkJ~n!bT>7xO~atNFe|z&It3xPZ?@!VXIoW7BLMLMSh`zP9+^rG z(46cr^SEs!2G<}ae~B358d+;Kkgna4rVF7x#n=H;YpS?PCP_IC&+Mj7RxECiBID!N!v&s9v)0w>aXfS zd>icNYyIH2!k>&=^`y*!!mVW>7f)n(8vg|s9SyZ(T29Sg8$*XckHX54g{%^EL1DZ| z>@fgtO}B+Di^0_bd3boSJiM3%jx-$AY0k%PN$%Rnuzo$iXy)(6i0U{@Pu%l5s+^20 z#YGv%#_ir}9V}kCC5g@#%CX(bN?eta3x+=SdmFp@3J$&78ra|KLY|bRm4qDye!~Z{ zyJWTOVKdx!v0SBgT}5ryXbXtQiuCdK9f!%h|Fs)R4|IHzv{xV11ol@|6;$U>=U9n- z6;?rtWtF&j&;~JFz6i#NS*NSJyASt5+~oT`PTBKLyG+1$EL)H^+wLg`A)SHesd2J2 zvU9Z^Z1n`phng!knQpnRYDO%tK*!X|XHZ)Q~Li5?k?(AgFoJpd8g zlM7VxtU2eAGc4v7N~eZ}Eb{Y!FJfsyQZ~aVNF6y`{kTGlFPj=jEUmX3@p(qMsiSAy zteu6j8k>_Rbi+i`_|3qP+)xaGpLzgTmLO1lC|>?ZJ)|@EbilG{^%NK|2ht+Yz0>H- z3vbvAABGz~Z`e#pIlmTbduBsGoX4!@Brst$oUuZ)Y7C#Y3SNnPvrxe*dDJC7gS*#JGS=pD$n3KXj;hc zNp_R)AB-K?^2Sq3A@?kjd!!6ZpI4nY`dD(x`W!kB7GiQid~dyl0#T{;p)jG(YF5G@ zJa*dGnIe_6-y|UT$Fr$L$ZR{C#c%z%jgQLvp0efJ@4@};NuOkL# z&rA7=Hm}_cmcrRTd>cpx-M)0S!$&7vd)xpjC8rXHdzz=5QwtFBrGNy8nKN1WnqmGv|e}@sW>-mN42m3WfW%|3RK3f-YYX7Vp}qE zhVWR=+1AqP{XEMdrU=|uVFil`k~G)c2_M7FteD}uJ9JBY-I^ddAoLJBZB3T*Bz>x3*96{-cupyb`swvpzHCN?6mczeBp+{fyt~_;1ql5)loOE z^3jw|==Iaeaopg{$wfRcy0SL+wa?m^4%cMf;?;) zkhW;uahR0Yu@NwWaNO#i2oaI6o_)uWL3+!0k#=VHW8pDPPUXZg5u+k>dJ$W8nYhsh zY%4W632Lh95SLGus8kr_8&tXLjX%JS)meO8ZdwtjFW~9+*<6ZlAjUGDTRyUOueBa4 zHxzY7)-xeNvpUwCKdV4hN4ecEmVWy&0wm{yGL06gtUIAzffZ&23?K8Zp3v!dq9vL^ z;edME&`^LHGaid>L5C8uk<1yJ0kxnjmvl)xDIFN*QzJO3XvJF|iL1rC*kh=?Zl$0Q zb^hVJy!NK(2MiIn&4fEkjtwK7Q7t%^lu>>sk1LmSG`x_@zoJ_E<%5h}+e#SrW!OsD zxSaAHCAUTBw(mKlL?$AnlmOGPZLf(1acKv#xUSNPEiiqql>llYldSwI9 zd*Y1TZ2Rib8&p~YJ(|~1_mT>=>#(kVl`RObgyNl-OpQtPbRVy=^W{AAfL0vF4d-0^6-`hnV*WEeN87a z6USZ$Qi90+<12jF(Ei<>VLN7EfMFe5ffX{J!1uUgQ3-5yA){WKUI=TIb7XPAx8`$t zm6)X&lp&Xpf;{|EC&Rz7@0Qi$FB+hO*-Oz|w+}OT*b;+=p!=t5-`?lEOa7JP7s%=^5+AMII$+pD)!hPAJn-!R{lr5=*9$ z@24-qzY+uOKv<)`krhK>4xUN@MzOT(+Vq7TQevbnqvI-RS>#zK=aM1wa^8TjZ32tu#1D zkRT5Z)-w{y7V`$L`fLJ~Hrp|>xEI7sbeQFEt>tBwlKaHn_pNr`&h^K77JNf6^XuYL_vyPc@L4ol|Fio#|9+j*$zLhEV5%*T~&)U(# zwSxA@>> zu8wyd>noT?vQm{e2Xw~C1Ymw zdVi&#L$Z-yY`G>y>dCc);{l3UkP@koidl%pXq04D!|+RjVzvv%cj8)ZdQ<1No}kWA zbtzqvqQnE9vsFEpXTi-OqSp{6LLhyQyV7;Q(8#MGD0nHe^IBZ5dVxl{e1_8~*QMjn z|6<+x%Txc{gmgq}mC33`PPZx*J&0oG4SrGGoZ}iVuR{&W3T==!=GwWV4O#Ncs zORrn}c%3@BQT; z;NLwdS9--kG-L3qxM&L)hr{eOl_0j9O8soLVEC@)Q2OaC z&HlO3JP9C+3A@39%a{uNC;(Z%q0GCZ8?yzRKJoI}sd@R{lXZ$CaCX-U8Gvf#l&|`(wFtb z_X^C;4mQRms%Yy5s3V9;ec;7YFb3W{A%v*&}M_q_17gpwR@Vszu zFQ%^3&WhI8`(Ep0`ArBn@@?iAR5DWppqqFLTbri--G;H-zdhpTvD*&6m_zvaz=3_# z)-pQst3SD*_B#_wD#{){V&UE%vm{VvG?%CKN!W&?G0< zQ<%DnraP~8A{&xTW=!7hU$wpV-Z)xSE?KG>l;Pz1b@Al1wHr0ZhnUA%uSV3<-^!Uo z@4?^&xwND-q1{|6QOATgt3f*P9=#f#UC|u4oRQP~v(LR1u;YdcSgTZ@OWyPTACi7l zt6A~E;OeEK)x`t6>iR2Nlh9Xn1JKAW?-isrr$09@KS8KzhL(Oz=$++kH#-_WH;=!XM)c-MZi|%f z-2T1r^ylWyFGl{O@bv$^(f{9BcLJZF=5Q$oxL|BVh(`3PM~A2x?hA~)>(v|BY|V&Jb#SKZ8jiuYYk zKVYe8@RRg+!vVD-1akVG zQ)ziSlq6E>S>z=fNH=T@>w@e@g{M>=*%G@lBKm{i%F*?>lbYEcbJ_QHOnhxhX`Rj9 zhqFJIRQ8YDdw?m85XT`jGnaLSh0bUm%5`|eKOM=Cf#?L>bs;Old%_N@71+9FBRfgr zSLTCvB)mU#LgxwtvqvXqB5XgjD;_Kn=jen{0uLm*;Mexa6oVfSuB!TJ=X48@;B53F zIkeiCM0gELhv>XFD%Pb@JzUU>iGvPK#QBsvdFkIx#S<*I2%gc`aG0Eq+ECNdirYU> zY>%#6t##{Cgrqx~5i>#bhM^_i+optvDg2I2BGekBHcdpSz-oNf0z6N@8x$-(=?Lg- z9WE=1i^79tvwMo53OK~c0$N(j{P`UkP$s(Gfxqd(|BrG-zWCnw5J^l33&h&H_fqe_H+Ev zmNt1K|3d-b#);YD;{83P$W2+ZwFn~}pM!{+1_g6)NU$A?>%_nuA`kD>>Vk zUe*SEhSz%7bW`Gi^LScV17b~U8H;^dj^ zeZdwwv$~wrv08k1byeW*Wccf;^TTj%-RUGq^O1X56Al_u{bgO+zDgPjq!t3S+ITOj z?i!UL_EXNLGf{fhZZMC1B&r-{WOCv4W{6=?NT$Db|f%A5k>C_Ip1J{tg$Nx?#;5JCVEnVS(=CzcvC3+ZA zp!z{az0Ur3v~+cDx4g)jT{*YzOEG+}>3O4tVTin;Y$L~qY7z1;O3NAqary&kDRNJc zE{i)nylD)Y?;sfkFD)}Tjp_3@is-&l09o|6$$<&+U9egXZcH%Z?lGh~1nHi)$2OYf;@Ro;sXA3xaid~(fc8OD(MZwFu_6~V&_uH8y431Ug%qyeW zPD$uai@f3c2v<>T6Vz#?u-S>V>Pfy=LF;6xQU-2A^JsFf2y>Q8o1_nOsAcLW6~k1~ z0~x56zsZ4)T*e8@8{bR5zWZK%8m7qO`_NMg>syR}k3(aVI(B$4jOR773Cq4m$d^*_ z^-%KMizK_*#~t`YB#(SLs8;5Jn)%cPTQ*a;pvHN7-3wyP!@^gSu$(-jq=+ocF!<@B zU|n6?vhH$1V%hMu%hyT;(9*?28tC{}ia{8h}%_%7DVLQ~;xd4qPK0Mye4U{Ke%$qUV3>X;%Nc_fihXaQYzn~iv zLe&sy0iHPULBD+MaBa^u+{OirxDZgIB1i;-hj~=Dn9xD|8%r_-64?Sww%mw~kgLf{Z_V8+4WB2VwRQQ;0b4jtx?#%Ntw9oJsY|$~=oT-~ z80-1-G$RCU<8i;%D-3#eB)DD>gJXkrprak}W$7g@so&MBynhcj?!m@(A9KyOTFccV zPlb+IX}ZA+$UiI8X!IoWk97n@t3)#m7pbT5Ii=87shQyJIfhXNk}hetVXGf> zf|cX6g}Zj$XQM({+E@>&=U1xlDdsG7ry68o)GsC17&Xog$gX14!;o01VxGI+9q@Tu zPGG~1&x`7Fd2gL7Xyy(i+b9Gnyd(aczJQqofUKNF(Id9i0@|_xD;=yVsU1s;c;-pL z9UfeXXKn{IB%a^wg7fC4!%G+YA1eB`>yKsHTj$J%0E*qx#qF};wcU`Tfr%0e@!ue< zuxY~CsGo*LX&iKSWA8y&5{zcIce1oirfMw0q*ix)L`|m$H7X4bTn${wGgusN2NN7y zXf4UQ;Vy8sz+t?A+qa38Isv>>on>SF(#AbNkqlFh<=}`*Rm?dA@4270qbZu-cKO_1j%Nsg1q87s z<#pq1ea{P@bZ}oJF-e1?auSTID&i1_8(O<7S32#94aq#Jgg=qBRB_DdTSW zvmr(a=h-~fC=fdf8VDfQ0EEXg(D1F5iRg+sZHKQ>#b)xx0cA-S(AVSf(IBShPi`}* zKMafmR_!eFRoicDKAjLEQwRH?p6TdojKwQoliUR;ZAxCHdWwpbcv48AiC9#xxs)m$ zn#s_F#M%Ff8)=hTs+a)=qir$M#h#0}^U7jQ5(XI|0UgOYOwZs^=T@-uM5jh}jMRaV z&56M$LM{wC$||x(JtMNBAUvVuzZZk~`saW~$Z*F0GfW2louV{a_x<6S$EsjE;ZaPb)7C zD$crowK)Gxy;;XBG{bw-TnA^9opDd)G>>ab1!C`o`K;2TsxdsPS39Bg_>F@~t#HHs-WjhWAVV!s`mCyVRuTn%3rwILwO!o3IlZhZU)Ui|NvEdPfM zgM1v+?hoxsXLeMzo8gu2d6+yBxQUF+RZ3Uc*b+#QD9E1E5%kF;-=bLTVKDaAx@umX zWuxh`Ve!r=GRVMCPGku)C_akXPE)_ua8+BD)xdt?H)y$k!Mvoh^2M}kOeO?pwew;Q zu+lk5ou=F1yQp9W+^bmW0W;*8vrV7ysw|I6K$lC6hg+2w z{`+RU*J~d=pE5hoi@$ENgUZ;etwKn%QSJ6g`=|^ocu!>8X5}l3Mco8uUb4q*wvj)< z`7MKxe(ii-7Ht0nfqfcQ1D&YJa0Nm_WFBDd@Wx-dYN2Dxx)Wh|Tj{MEGJQPh6{`I1T{>V-BXao~$|kX?rC@&B`-xzx70VCu!Qo>&F$3__dYw!re>W=i zov?R_Hdaqr##a0tk%@*|FrM+80@PIWR z_$rjIM~Xa{&gf>Em290*CqF#tqJz#CPRaZP8raA!Z8v4KD}{>@=i#m8`$lf}*{jP} z;F33Z#vT@f(o0Q7g|L~r>A_7wL6UjIAbF;(jBZ&CzZGsIxV-~k-WNVlv+%6nVVIHZ z*ZoNkb=x4u4X4qh^j7q#S^q-?@b)2tXm2n(KF1h5U3vw{NE>p6;J7$80+35AD&`jl%$Se%nY8W9;cTrzXtW zn5|HAxe**&>LVS7BKJeghrWM^dsWd{=Z%3h=xUlAZx(C;hHp9IP{pvxSA*HX7$e+n z5C(4L0}0o^MgrU!b&|v^cRyA{xFnT6cFe`#az&vXSW1b7M(=+!jlhBrh*^hE;9$eY z9+P|5TgvKhRXUN&M49V*kBtH=FBl2~&zc9ayx{Prlam1xebvtJwj>?s&IRvuB~a6O zjGF$ZOA@bW_@fFJFS+pB0Xt*cwy2xgSrsJ_=Rd|#j5vUV%;?2x@y_~R{_;UcJRRsy zz9peOp1+LSsJXxrpm@$zOsT%rEVdHyx$cukpL3G6P6Jq*BR2=gU?M$XHl9D1u_j@@ zkqfKjKZR}t!wtuarUc&>rhOu@LDGEIK37jg^|v8qbovVGNv-cFoWQwC8>+Sim0WCWK+4t*$u(X zf@=IE@%Uun7PHw*eFgdTR{k26g~q?Yvr>abgBBx?E&M2MkTkX9>rzIO zA~$)~jWQJKRWQpIWt1k-6dj9k6&@R0Jb7EgPvz1x%|Yso>)SB{8USSF?qkRLuwuXT z$75?FtVJmqi18e|WqfscxFgr#5bleq#K-qc+a%xjnDu$#;5mx9@Y<^)gg9E1@KCA7 zw)bPiJfsreD2Z}{R)h9k(jIeyT0APw4S3MO%KA^N z4o}7$T-DpII5Ie$Z^~{Gpb+@p`=1P!TXGZacDE{{Kylw=UsK=Aq&@2)pd&mM``W1U zZg*!bj8}OSp~sO7x_$P~ihJXDt84CSYLyY*!OUYS0a1N}C)zJ#x%t+5j{@p@U;!#n zA;I^0fZDXW=I>gwvrl6_HG$=`g=HCbMu7+mT~vt&2~b)}${E)x53T4m4LoD(b1J2r z=bgO4$2RApZJa3?l`HBw;~9B^R?|5+_)*)4%5#g_^E$J$B1H1NcX)H*v(XZj`f;_Z zRjlL*v0BY^oDo@(_7g*2RSZrXZP`i1hEvZHm9xm8;n&Pb@-&EFn~*S?isRV4i_-N1 ze_W!;)^jjgAMoy;Itd?4l6i8eYNA9XH)VhSL0{9)h1Fvs>9!?n(NBc0QNa$UmxDW= z!c=;O1&h9W&)XNeKlv(QZoQ~l91Od$=gaLd*68DCK56aRP|QKLEGJYPCc?wVBzagmXO0AVw=7cFAn|hd!IPC zLZQ1m!VRyvZ?6?_Jfg{^$WG?{_ga8I_oe^-MmQAb;eo#wCGDaPY7+6!{ac3SHe1#}d?|j|8oU_@XG&W$Y z3E7@lwYc(4h{8Z}+M&`u`1o6~`!GPo(_1w1xofBEQ+Bo$OgK$xbUrZ~J14ge+ z1Q;prI84Vhn?~}^v^vWqj7XhHlybY!pUs~oWSnNvq%oJCm40n;g})!J8Sc@SG@?Ix zM#`3xX0np~29q5nKa~w-*(tf(o8d29PPl{j1~kEnkm0yxxvl4`-jf#d76DaET_AHx zle@P`iPxIi)12nuC-ly)^45xZr}kioTuMYYyo`FTo{(vSK`*FS^InKl_AlN}-muUz5M>_p}j zHDvJZMZdpd49^#QPWcLJBHQ8{IA3MfFqh@O*2n=CNqD&}`kB$+i5)S|b9y?y*y~jg z)vYi+8^gg4<-hsKIJa21wdC8)p%O(VOl#okRmmUbcGWT2xo1VPoV;!U8iQYV;LuJb zTmpc95ZHfchBCf)xqA+zzsQqP)&*}BEr5%lJIk{$zhfkEK$5OOK3#7zTvwokOj|pV zXvF}B3{yG&*C_ZJiv!%!ziVG=^pPR+@px=XmMXx?eKL@U_;!@{ytRWQRWMtmW(6ED zh$r-b!S#4M&ubYCGN0uFg+F+w_?TlJ_vv1*J+Bu~KU{Nu7&70( z{`!LohN3r61*odJAOir@#3jrcyMXOU^j8H56FNtas?A4Q^i_$ghD|6W(9mtf6dg!C z9qI$e;n+A_p5H$_x&QCYyMLR|alzlXVy7~}P+YZmE9OSI6(b^er1F`fb0K!tvT(I> zw7^9dkGve;MF~Lit7Jsdo97{Cg{5VFBXfdwZb*^GyWQ)Ks3P%-gXPTk~d^v&RkiBc=}QJ4NEd7N}m{~Ia>h3vMh z`8i`iJ`*=r!(%2qC{9L^HF!5)$SK0L zz*IU=c7reSQvnT-rOe~(?BCWAyRkrVa%4pZK~$EFdy+p`5hPrFeqPHee}T?f!_~ya zbMRY-Em{Ki?byXOKu1Ci1UllLQ}pU>ciWnf@pZ)58h#e!!AcLmPUH`ed8jPnSiih? z3Wl|?1*TeAo9LZdf3o+qTJk^vNONaFzDZBNHlEY_aN11r;)n{$r(?OelEFejYWR5_ zj-$$ynE}CFx@IcgxU9&4^oo`+-4W7?lh^EbII>4hQQ*VkU*f%$(Xi3A*QG)@FY0{_{g3iyoXOhQ68w@!7$ojr~U}$GoRbMW9X)`+) z#Opn8C0dyKy(4j9Xx6d~lU-9nKRcBBRpGTBUo)(5s@;H&JR62{1k7)j%cj#u&EEC3 zs}TIa_}lNB*?Sa+!1dDzaa@7Nb-+q#F7uW-lc1Fm7cBNi% zb(dxKiG*Tz-yIFoX1lqKWz9KD+l!RN>6GAttdu**l2hR|c(EjyRNpHUHpxrpb=*pW zLZsFFOu{DIHfuihT4tR1d_s(UDPTSzs|7#KL21v%;g;g^YM=}06*fWD!uZBfuOQmv zqC7qpY_MhVijkS&DHpNDHe3z~M0v-`Ae7EKuC$Y>0zz{8!HIxL`%c4WmTEHDsAeD1 zMx|7h6T8cDOe`I{tTXp294zJ?kTz;zTd-2IWgzbGY$Q+;L=rCYE+ch=iq^)Q(8aS! zJpMa*#3bIgUE9mg${Hxw^2BZve1Ed}2q-hz%phum-L7}IzMy7iKAqD90>Pw+5du|U z1)}7pzNPJZ5M52kano!HkbJyGO?0*hQLXc-@vgRd86($EK?Sv}cwY#FR%-{?PjDbc zLb{$BT-`>-Gak%Sf zw}#A)fE*GhGRN$)(cZ9aEkQNS*Z9rx7aS+kzpr@xuKoQz5dVL|1M#;t;Gz@hdXkmA zw9ljiZ-=+SmYT*i5QicbV0;w<-b%oLOYrcHgofdoE^uoMPVEt=l*!ry8TP`P$|$Z# zp~&9Q;&JEg+aHd7u`C+M(XMVbJY(jqg`c|eU&r2?W z-?+Z$s_}UQ-TPdeD6-Fe&weq4AlFy<2cNtClbZ<)lK9_UI3);udl;4Lafoo*R68i| zB2r3=;uobfuJ)%$a26gAQROlxZ?%}?gZ&rcxBkYaKR!=1LP6a}2ga=F{uAS#`x{h%yE(2`ZuJ>_94SC%@~v zE~cCORPvxZmpX~#3I2s)iJ06afRku0*KJaqE+{Ws9N^`2)4m-9GL1AB>Nb{Bl-{^N1l|aQbM{c(;x%*cD?_ z8#(Z1upz}IsjmcyLodP0R(uwx;A!?6KM&@rbAppxn`MQMZMhKJZiL_fiLnsto2E1x zrJ-oG;k+#xCh44RT!9QR%D}&|dpZ1WQSj5|Tz7}#EN@0L-u7CDtD}=8&Q+=RhOL{E zAf_iEDWxQmKIJrPi+1Q7eexUEWKW(_@StioRY4&3$iq>>>3LRGwhz?W7IY;PH?Hqo z^=@!^*#bJ_(}t^u<>1whL1Z^2Q65jFUuPz*D#<4cyTP55SKqhBC5UTFn>gT>+x+?#fmz{6r{6!+h*+w?BW+B}SUgkD55?1Y) z9zVJ{_MsBIk@9}V@@{(pq)aqiGK{7K2*EgN3jGdQlrZ2VZ0v}4$1>vKhGH@)nUJ^L zW(I>|(PZ+~Xh!{A*jF3dJWCf$fAuzQ^;n~;cS>9H=V3J^tbMRveJj&2Jft4(OQR0A z)!&KJ|LOC*|D=jTbGm_7gx_IhSprBxuN)xW)-B+rGt0^2`xH{N`*;GvK)_^Vaaxa3 zW(M-^?{0=0+PfqUuQ7!eHON~xP5cYfv>@DlRk~_Rm2E$*AP|%98!j0Nh3l069Cj3} zM>xsw^!9(Vf|`448P`w_Wj=d-L%`&;Qc3=~_Z!CR&m1#4GRA-0mW_K+k^9K((2AsR^uBmIpTsZ+AABq_;gZ2r&m9^7-5hQm3* zFpX~p6O`GOh0z)hUQfFZ#>2Q1* z0?$IR_=kI^A=>kJ9$w(x0oBSDGb3xayPhtB!vle459`I77Q;=-Ckt`|8DR^OcdP2?By3e@8d1KVQg2cMIlTOwhn=5t^ zOT!*94|y}F(jD(SnrVfINE2#kBZ614%Um!Hq+rNE_V-+lpGD{2_tpQu^*7~{IBN$Q z@bJ0q)h{jo9u{=mlBAq&XeqA)kbT}4lQwv_o~5BGW4}BMC9z08GbfkcsfW702zVVg zX`3NcE|aOUhb|pykm*6L_iy!YP4N1fvum9h=5Dv&qR~vyxxE%k@&xH9PCMe)98QL- zu7-X>{HnwTEQJaP+BfSM48r*|@aNhpbA6-@=ss7d)0pZ_!ge6Ah&!2@JCbAUU9|@h zwl`>as;JL-&ORwh@<3{%T+xcYAxyOZjmkBzly$xgOfzY8?{>8z;B`bV6u&%2Grr5NyR+;nbru{L*0QASoaPC~E{=F!F=VZ(RG0 z*&okzt-aNBmyS7f=>GJ3py~fh9!@BYVNe*8 zUu}*%7%eq7^EPBjYj2~a`*<%K#K*7VtH*cTXlI{F=$Cn&k?)qTy+dga=w|+mF9}?k z^WsJ*z;2(2E=m+dr-9y;W-=P2(H}pQC)7LAJ1uc9&@iy>XU=ZY2L<@e^yv>P!GzZs6B zv3;l(j9J~L?c#XgkpxY_4M(i1tzI_d?$qK*^3c4^B9ao>{YmA#_2q;_>V}a$XCkvR z+btSd)78R;Qsw=0uPs0nH7{vCXqxZ3sGjmX6U0l{ehj8G#F*T77QQ^`W_AMibHljl*fBAkk4K9vptZ;pUp+U@PQYP8hT3$<2+V{zb%<5F7*wla`lUE~#vturR#p+TJH4 z;`KgWV4}VRQX9b3A&uU4t?IXCJzqR}iJNBt^gaQHeBV-FLjf~6)r*-&M!tN^|1JFy zcFfx7!98p7AZx*P6NHWAjs7w%i;hqS$<$?8L#g3X9(cCpv?}gJ93m|DS^SH*9@AyP znIIpe>5N^@uQOY=<3@M#4<+Gbs^#0?xPr_C+|RNEn?3!Y-@pAeI}r(6-S-*x#8|Ah zAC>a2-5Du-KR%%3-dI?6!Mi7r3Jtgfvgnen=H=5i`H@SdqAwVMLUA!*};mX(>ttFJ{;y4c}kO@1jf=uyREzaa=AA|NVE7L(@kd5_kr!zv#MZ6FFLh@g}CjA_1!UmKL7ivCb=!)V)%`b$0Q( zyt3J<6^Ro_YrAZ3)ljLWAJ}QVIyjY8^k{@+=!mO9S~k^wgrmnkwPhQe+&r-J@vJ8q zdA)QYjv9C&P^e(Dx5B5=Q93EsJW09*d2D7thBpWGtQI2>vo0TMv1(1)_;cuJ*N_v3 zl9rD7%*U-AS(vZVV&t_R=kr&~o3P(a&+5&&`_)Xn>INP7z1{)+o&_@J{;6@Qs5|w* z|Mia9PL{fC=&K%HP9yA(9FaK>zxFBDzQ^%REfyw;$HB&J}$eC-fm&d^S-Nu4- z=JV}`HISW~J6Uy#p}3&0v<5loy8rRDkI+7t{3xH98`E?pv$QEAVGL))Po?xw%+=>lETU6RK~*?BDL{b>OF4kPB!`ER0rEYYY;Z^NJ~EA z=Du|>PGXm3eK3dTaye}A9$`j)Of*`~sLh4GtBO%nwaLQfN|lbRSW)I8(!jym+D3zK z;$mOQl{!vSB+w2%_1lDh$WU{8T5KA0t}|mk9IN-N!A{BeMg`3q1*@s5Lo^lh>#bMrebyhK$$qaM$(z6glY^iU-<={2H4KuSOeN$4n@06{|UHhPmTT|)^;NRUtynuT74 z5PFf`L3+nO=YID+p10il-}{d{#(Te?F<>wD+Iz3L=3aa6x#pVld2B78z%S@Pl*W&J z`@-StQlwscBqWB4X&^OY*Ken6$5t6MARWIm9KZ;iJHJ5jhTE)y6~kL@oQ35d@lr`F zQby7?P^L^>`@GzQBxG#pm=N?tr|P$}0XcF^?J zFjLJcj||iz`h;YE;pVd`Cj_Zh4yAtelHgx^uB7O}-%N{q@=VD2`lOKB-ZN6(l$Kk> z1^g6p+jCCwT@l&~ww#rEsKJKbMq*oWktCc)vWEKN?%5GVNAQe9d-hr9irbC{YSysx zsUj)-E&byZdzKm=3QabS%a#-MB~z}bZiw8zeB3p4AOrCber^Q?j?r~G$ zZj7X$mNS|Q!p?nRZ&>V#83u?MogIo9MCRIQ=)sK?e+#)5NXJ9UG0qyo#=9Z;OFH?9 z&;!|z5*mCGWI+@sBuhYoY7;)HFA)s4!QqB`@CxY^c^1fI)S8JOXPS@&3RAuGvsuAV zjgjU_GIOVd58h!ah&aGAKUlF(ROfF@E;e-Xu0f~VY%DC5NjJL~z#_tON%9uvc@`H; ztzNB6p&!f05H{}_`6TUBejh>NjOs-wY-CL1V2WB zxBoaWe&0hM9IRaLfD(ruYYh%`pUyz#_OA;m!$pBY;J*U9{o9FW8N;VUy0#(>S4z0a zb~@GLd)Jn>QYigvC*7yN2CGBF6=tFe`n}K)n24A8#av)14U#2*&J|*s$>UEhIug@(jIphZ(jf7S zw$k%~nDFKE^2q=#vke0KLFK}fTG5wTU$IeDCG!Xl#euQ=yKk!Bk0SQvx7V(1bU?N9jz2Wb^~bG$yPALH z)BPr4_mtcJKM_IiNqFCp*DXhX$1e>uI=7}~v6&OKz1&l;dkhv&1P#81mArFH=~MS8 zgAb`A?OYx{LDKMG*K~j@%rOI9p!4D9$!`pFH=U{PtF5+`emWfnJ*DQK*)hyqm_?*Z z>=MV-!8MJ`H$zGF?eURsqdV}MmPB+)W|tdu*%%p-7Pzh_&l@Bw3+tcInS3)EtI)pP z(-ChnRa7=2x+QbIk_#6u{Hql@u6>k+MR^IiM7O`qy7hD|6I4-=FMlhY8|qIQcaquJ zqEn_yDs<^dGQ#6%Wr?v)+Y-|95Ko0`iZ<}#)+$V4&$ZHkHctZFz!*2mY?mY{X$nV1 z?AY7@yPEcA^g|W--$alwx6$E5e!IZH-KrGFG1=&854q8_kN$0TC8|mC?8Wm|g`4MD z6_X+#)%yZ_6l+l`Ocr81Tqoc-+2D`$XE$%P5qui?j2sGkZn(I=!Q%(d*O~-FwOw>Z zUm&er?sG!0f<8V_XeZX}un|477s46+7{iS!8t4wq55t0iuBk2MU3%-<`;7tj@rYhV0B@LjD6m7J>0-pi(!A658RIb zkmZRu!bZeSb{2WGdj1}HM-Hf9zqf#;z&!W zZ?+53nZ}m#FT-+58jLr?2(T_zHF6TQA{{#RmXJCbneo5c-SoQpf z2Ms4L;L-qZB60-Vk|o1ieWv8Xm%_4C+f|?aRQl{6zB&IAA2K)Fy8>p4S5)ode2?!? zs5`UN5N2f_Ht3*fx$N? z4`~@ti!_C5Rj`H?PSnsZE;1I5867?zi`?VQouso>ms{W*hrvvV>iqsCcMDKO(9fnB zyJ}4_=Y4$*^raCV4c&vz}ek51ei(|gna6S?mcWc{5hl)UcTgjrbt&43NbP7 znEKSqI8Qc#H_Z&7FfMEtTrdVZy&?17qjMOFLN59}KAk^mW8TnIpsQUw>bGwEJEn&y z@PV(*&eC`U^$+loQ}B6X5!JLvAB#(EN)&pC>dcH0aNZE)a#B&^B<4o%JZyhe$9Y_{Zkducm7KT)$NO+v`enX z%drP>@t*`qm@Q4SuD>Bu=%=hn=`Cexk-@6QCmfT)rS8#E7f0v$Zd z4)mXy$UjxlsuKX{!uqHFfHf_9imjK?N+-GFVUlhI6{G@z2(Lew8V`k! zi!SaA3ClG5+0LN+AYQz8(D@xYL~QZxiRR>t6-FGWy4 zyT>dU`4@zLE7!CiKF_Uy1{LNx-BItHlTQL}l%*%#%V$d)rVZzKj2)s< z5lFafag{W+bG1x$)Q8S8dAl3RW5wR9#T#-18k5WqwDMA9=>W}mwx(kWs{oimNRcBe z`cCRVhSFHRm9v{cH_Y#ZSIwQukx&F~c{9wGAg{&*ZY`fnSxVVHzu^6ns$lvc^iGW_ zp5O{M1rUC9Nq*t5G)c^J$3k@$eLb7E-GOIXcT3Wwtgh*J*mfB<3e`)u?)zbCXj6TG zvp&~U^kS{ZV7@_Xzq!b?Yj@G2vFL{Yz~d}A0qeF-5=1r!6vqK3u*n7wN~^8lOU2=Q ztLr-NwU}@jix0DQs3f1M&<}&QHY`kBt2I5zz&P9j%ZC3;j$#{6wq*<6QmU!5ES-0U z*4@e6a7sfNaUG5}QNyCfSZE;oYo2e1p5}i0t8z|HU2FYY(B1u#!&TTHa zdME-zb&3IzBkottL`#Ys21V|yQO!if9Fxx*?tlhO2~anhykTRlIzT+eAm8CJ`Du~+ zYH;?rXiB@E(A&_insut13g~ri!N8L5xw=Uys*H8MYPh_mvow!sMnRD?He%j3{YihT zb_CUltZU*NV@Swy7{epQO>S^I?Q}OlgroA*Wvk7@32TbuTD{t0Fhx-f2i*H zi6R|>iz&}c_^9MK;W*_>C%x~s>>;y-C~X z3y-k;6^%QQT7EO~>f4F~kz}uS%7t?8eSf~K9 zUzyDBWZpol16T3N)W%7qB_b-Hl207$u#sA%GH)`RhQ9uXNA%x!{43(ge|pv&@TmXh zSo3!V> zg{%+X4BiB}*Y)W4N)y3DGcg*B%@J8=hYRO=)_?n4O8>=M_fhnWKht~q$bQ|F} z(rSB3RTO}*eCKYNQG$Ubauy>=W3P&n9et##y{f9(IsJv<9$h#_RNp9n>0Eh-2%*dQ zuAj?Q1%Q^4q&rP>wzx-3kq2a$kQ8G!Jp&rHHftfvTWf1Ga3#%d+ct?n;Ugg{Ajk4A z^(9akth4#n3#o548$dk*!gsOY)fIOZ4B91RJdFvgoRr9AFmF`0_Fw~$)zh?%IYaIR zpxq68I61mT{W+I-q3dEJ0x$?(sbxSp%Fn#lk0Zu0!JY05c(moxKYUl59zL^i;g!_2 zHXEfRsE4GYtqPr=*e#=$#4Y%X-`8k|zTB?|W#K(`3SO^2`WE*K4PP=9_G2_{5;CPZ zb{El!X>8QF1!;BGGD^n%(rY|b`os3O26+pxNj#PSVh6aN&W02gmWjZHWm?ZP+ceMr zR9r8Y+8W49q^YAw zazhbBR8cr1qd~Mo7T#gCcq-7Zlq!&4JLKyl2+O|)%peu}&El;cO(Xjv`!ekduoOZ0 z8>qZzKmGd?L*JZ{g)2Wy8rQut#b#!xyde7VuwQF0Kau#)K6-aP9c|R%xyVMr`v^)- z-^#PgCFs_g$k<0&sR5MT$6Q6X;+WkG`@BE_4^-v`sby8g=6K^J?py#j7n(usr2p+y!J2&9WGFx{IzcQ>%#dka8kFo}CH^+j9XfvKGqXkvSFgqms z9=KN#NuK7=Uq5B$xH;kLUh~e54zsO-v>J+9%QaASOK0O@WT!Y&R&h0t0)d2zrJu{@ zog34YTlwGC%}T{Vjbt$x{GNM8K?N`b_y8jl-fA zSWKhAaZFeirezkmHPf&4q{}x-l;Zbe_26d0u$kmxsM5pT9-y_QP!4p9Xh2_zEwcLf zF)(p~U+}Hm&xLxAVStM<+U5!o!@W}4HU`n|eywy?caKq~qM%!6 zvd@L(C!)oNV6$|gg5 z1+LH%ZdKWDo$z4M^-$z&nb1mEq@ti;>Um&$2$r#PMipe1;_@Czy|gZWX%a)a@OEqG z{Gu;lU&LcM1nmujsZ@Kg*U@tjlGrI|0NSPe*d#+#Ue3FGel@Fz$x!F5ZOs?gwj}nv zs(y3p)a|Hb&tl_3Fddt^6OqHCCTR`ZShf=<;F*MH-{4{VJ~!{(|3E!@;Z$^{3FR9T zn_p#&b+n%EubGSICwuNxKow^+E#xKr6`63$#6jnzH#A~?45gpb87$Qs*TP(~lZ)@6 zr=@^AR}ib%+66KNwL(Yy-1%~N*(!f;rZ2dC`r)ETH=r@y+i+UPo~EQp1LQoc(~_B~h16~=g3PXJ__Q6eAM9I}nxHOZQdKrJSmTGy6fv8EK5?X6 z9n>x{Hnj)@S{wY`3;*WerhD0do3Lkow`UH9Jh&zm6XI>HV~-P@Mz%&Sq06*o2cLzc zy4(Gl*>2|~DF731TZoI)kRUS+jTVm2|Aw*ak@>il!j${5bdBppo7Fowh3%pF zE?RySF5fzb&zb`e7l}}aj3dm!m9jpLJ}^@%@<<+6F$xTjS!&_4Y>?!MYx#;QzFu9m z$I@EMlru0r{@d4$55t?|PdC+w--gPtsOFNKC4TiyJY|#uSgQ*~z3vV8r90ilT1dxz zT4qe)3%tIRGIy7RXE|vu z*@n*Lm6#x)(A6I0Xt;7D;wWP&@SH6y_+7@+nYT;%Ny%q>l#HCqjWrkB0=S2*=COAE znnm$4DI~+fn)9L$o1*&&0HE$8+&O*&ISHre)RK!GPMK*8@7eGsqKMCBW-IS&pP^uhce& zB|ALZ00Tdk?vflZQg8SL5KRljYrihy1XoH6F~OVe9+%)-PO1n1ntw4YeWg!JQIIX* zW#7{^w_b%Pot|ZM2xSy}cB<@pQuK4X8AFQo?-La-Uwrve?QopboPnKU_rLuezIWp@ zO!8va>91a0u+zifYCTKXjCC(z4B?&00ZxW_h;}+5=(ZT%`A8N1bA2UjQcDvsj%H0W zfjxmZuC4C0E(l5}7}x4xpslf*;gC!y2T5pPrQgQl*C+l8oYN<18ih(TbYSy&!LJLD%Wtj0`bxtCI}=NB8p(3*Mq&)0kv;KWrFdDSiD zLwH}!Lwc$+f4z(Fry>^0`4NOR`Y_s5q;l9u1e8|%UZ-U;Zy1@x2xa6NzrZ3p{JB_T zM{Ko%z3JkH*_sz1ZLu{YVuGHzY|xo(WHyGJt7*l}?9kMT8dBS7dnNK^UnO)xXQYlS z5i;oM{g>kw&z|fWG!@ECjbT;vchH4pNqXEXkLJ9p32`h0&?5MtIg}+?EA;HP-0ol4 ze(E0-uO~MkTU`}=SjusgxuYh$?uQ#H0+96fdSolX!osI_7~cwF_wX@Eygo;|;9hAi zr(HnHP*G}G5!YoQ+6#(o9}{$ySo9nFd9iFf)1xH%Lo+&*Y0i0QxO)Kx4EPeIAY#_G zalUdq`isn{OBuX2;>Gy>Xhu9Q_?XM$C;ZGT_*lp=Qx(x@)SecX>?UG(@%cc1C#mfu zZT32sQ4qGe?xz+^i5|4Kd9JmzAae=Yxn!WuvjaXzu>Qu)b`*i7WxS#36izc-eDu40 z5VAP8aJ6b-;>b!1f9{xXK-qU8h`A?;*44x3Mg6?jm;$?Up!Gg*yXQnO|GGsMeA}vD z=uVvo+`xDA{i!~8BY3gvA`?}t!u$x9MkitVlihajd|hb1@IaQjx9=JsRle26tEa&m zmv47Gs(9fZeh=Z>m3wU|W)!ZNw>UeVho#VT#%JR$mqn)mj`gTzTws%urLbi<@W?~A z*q-6C=7-;<{m*{>75R5(PyN{)q`Gm_K)Pv{q`au6b(#NWEqQwp#Zy;Aym#t-GhOmi znSQsn??9eLW(#kWW%2=~oW>hxu(8&P99O{1M*|Ap zF}X*U?#U0XUW}OI6W+cOnh6Ce3rKh@*C6aN5Y}DB($X-EP(|(-7Y`kd^!D`qonTKo z>-J!t>?S1j(=V{J4nL^TQvy3hK`q{gJ>Ao5{??xTsss4(aZ3SzQMF}f znP({zZ+yAhr`h%xM=uzU5@IEe#W;O+dTa;%X?;jZ?`f(70txei)FwX^-7Xo)meL(h zjc?Q6=Z#}wfz+Y4J-q)C<)C{dP4eAprAD6jS;|L|?h%^X1Co|7mV%Sajq5~-vq1I2 z3VPRXMdZM_0nuhK;jfYw#aUc86Q7n9!P89DrXJJ7wvhx1P!Mb5m{MnzhuSFdQ1yeh zhRxPteWRnT`;XWkNg{-WG$a&t%=h9DMP3)oR1ouo-U8Z;x-MIxqg3pSqhSnmkZT5< z-hV*Ly0Il-in~RiiO80ZEeQ@q#gt?jBN8n|=Nr<_kwA#RGE;0{jhS_}u6vvA7WzZG zg^nvbQrXFh+;{%AUM9~G$C_8ZUtK2(cW6z6N;}VhJs_|#7`T5QZQi`|tUgx9(?lq# zWyys_XZ%ZU8_8kuEr$U-<5t-tIvr&V zb7n=gJqiE`Z94@XXg<(>)7K2h5R&<9n^r9@Y@m;>}9PI zruB1%LxdJ2ukzxO$R@WVR1lNenh8){DM#ViqGD~9X5DH4+{BT-R)C_~v)S@Rx#6=S zTta4NgmrSQo%7s`v!5{jC3R|V3>ZH2`28fE@+E6nJL7%#7l=gGb?Q-{I3srFFJBjJ z6Q3x2d~9l>oJ)ocS+pnU>zm{4c?uj8Y^+FMMw%?90GB&1o{ zRt?%C26i;mQaM|FZSggdgBNL6tWl!lXmQ4qK}OxvW7WuWZR$ z4decb9VGwQv3s$vZy^~N&{QirlqxvCG5krtiw9zDgzdvxrsMGrY}4HQ+^Her z1|Uf-#r0pkOCQq-O}W}7hNEQKfB`YS*I>OhJkR5gjKb)t4vdNmJmCJ1?OEjhf5quu5lRTU(PDRrjgc#}LSuresA?Dw1sNY&+wZ7L zt&=<Bp5~@D%=|RnJ4Gs(0e1PN~y2EL4(R9A|?EM%VQ&r>jzD{-IN)k%M!&CVSKu zTJdtO%>&T+TC<8l_I6xBm4~$dS&@dDXV~J)P>xLmi2Ttc_g=Ui;ZIwf}41s|Jo@eBsvCg05zDjJmWu_Y9bv7FqG! zb7>^fxDxsWFD)fABz(7c=M7hXlP$K+6yXeZgS(r5N%n$x09+1kFw>YjpGG)F*vX+v z;S%keomUArlF_R3MHL*zK6tJjcSq4~AiGfdL6RS1kwrd0ry)ZaHvw|Us#&gRd&yZ* zSWqv0%`1N|D|bq#UV`EmxpGy2H?i-q%#6%u2dK#L!`ei5MeBLOwh zp$CCyhT`$2zy%%h=S%!pBcvBSZ@fAY+$<`U7%>es@WU?5S?l;BYYx+h!}x&MnU|p* zk`3>}-RC%CZ&yt9Y&kpg%12KSpGR#Ke-?9gf33bml9eVN>kBd&faSRn;TV3qWov*7 zp@rfyHw!&_QqdZ!?XB~Lz*ejH8&%kHD|uy#1K{Nte!h5*ZsYuA^q($a=~KuDs>B2DpzA%0^hj(C&nsgTiw3FB z7dBf@3Vmm6S!ZyIu_omV3Z0REv$%NUZ_LZ{6($89fst%=vkFf#6_WG2tg-lA^}Nj0Q7Ij7;=|t7JEE$&8g`Mm6ZtEW>5U zo&U@L{k`sgy%GBV)`9<1f9lddn*ihq;j?o!*D#^$(V~Snw^AM-FXbIUwC~g@6)iRq zVNU{$i3QtLk53oQll~lwqW3q8mCycO0(!pl^XfW1x9BsNIz&?Aj7gx!&id)RN|o-8 zFX|>jzv-A8EUNM7vAX=+`0O`db^IIiiYJ9#cL2DwNbRH~1Caob;=3R%T_JRwyjed9 zh$roRNc;OX{zcj3ZzWXc_ksy?iwn+C)8kEJl7}g)S3idqCF82jo;2X3?kGGgNV<(5 z8o#23tH$r>tszh91ZnSxd1SrM7Alf9FFbjs{NPwU9loOFHM26<(dLF$f;bjMvCF>O zpTXe7`;Yzn5B8k8NSiCG6tR@|gtwd*D6iobJ4~#4TCO0h(7;Z$lNi*b=^E+tC-NJ& zNT0h?>QI)~x+C)rfYcZFMO*nR77?T20V&c-{P8g|D(sfw1%1CSsww>v{#E7Tm7O<{ zDLn3~keOl0!L$($I#x_A`1LWv<&w95GadXpH~!zt3G-K+52y$wNJoNK;EU-EN^rKN z2&ln>fiWVsz`u12hRg4OFGo%dlwADI@L|lNfZX^3GG3-Rg(B1`9C|eVUooCSzN`AuB~zWNQ92MsKs1%T*Sr|Ka>y@lL>Q{! zyt*Y@{a_RHC{rig@*5qx1rGSmAilp!vpjxL9k7u7OV5YiNx9fdCvSGk_A)!Ob2e?W zR$yWSLQP|--$Fg{tjjD@YubBi$*036{e^w%(&YXN2c_1px*y+6m)b=X2YxiayVdnd zvCOdUanVg*yH8T@In7+6M}wFuXynfM_1%2`-$ro_hiAPHsdpLko=wVS zU*K|?bydt%R-f$v4i#FwhsV}PPF(&&xM<;XjP-kln?aJ{BMS%0G7dlb3p4v*zx z1U@yT?`P=D>2~jz=>E|ol|S0$(K=P{diR>rc>AQ6LSSr1n%^J7a{kdSo8e`i@NdPl zCw5F^>QY6m4cm^_QGD0b@3p3=bhVe# zeytLC-ZtfqDqDuG=qO_m>3eK_-%IE~$cxsq9v6Z<7oM8Dmw0 zd{;HDucXvZTikGQ+#yYkDBGXt;!^>`621qaWoH618e3L^O|H0j>a|8as>V> z=1Je9^B~(YSQ-VQ<&`yldBzfcBEPAaq-tF!gg!lM)@9VGDIn;x{ zGw9A{30TlaRqd;PKG1powU3N2zPIhtM5o11?%MR|Hv64h^mWXA{m85Jb0s}mW3^Kz zRg8pnJXBA4P+g2robw#mRRwEH@$^RXP|L($;QSVMymNTPz*3D=46L6dc#X6GFTo3C zWo|`I%(xWxTCfJg3H~Nv28C+`J=?Gfq(m698zi$3;Sl|dp*Z0`vCsbgZF}lM576|} zcGV?YpXX46USsP*+&jlb!O;r`#y)1u;jODRaVusijo|ZLp#fLQzisCiJJjHgA>NpX zDKl41T$l+vjV661)ufeXFo-*j^Y=St`}a-!i$3iCD8}!5a`!&<4JQZNdrWaR+y)?x zk|y+kkX*i@Yv|;h2Y3*t$bU?{yoAi>a8sRZ% zVuMazG3(M_IAGNw^={dpgmaPv{Bc{TB1wqV@tM2HX_{e+)0lATr+yiEmarmAMWi+8 z8(Orbc8v3No2mKs(%J>*?+kOTWrvA6KRsWzfiGRTlsdZm3LI1~YPg%Oo4gy|rQaJM z0@ilY8Ge!V4p-b76HZ8Cj9K#)^iqX81>9DWW=<>%xakZ5$*ORJZ$7N|;-P++9{+%y zG+0vl@Yk;Uw=5@W5tLwv%_qq5~Haii`P`~k@emm>1oj95%B#u#KJ#i-TNokJv)fJT+q z^mg4Bur!|tyLqdo{adH_yLP!w^t8B{(N42!06iSHmIW#?ex_#U-MZ9w1`F8grNh?` z&L(~GtStJAG3S#mQK=$y^E*d^kx1>sWWn3S-a`qrtXe_jO^FpA;~M)GqdKFfT))`U zVK)#$g7aS^e9I8Mn$6usNyZrjIL?EP5&%B>#pjRi{@(5Xjy+>{^7FBWW))Wb?n6q&Ci-O53Ajjw#4-$!a zW=1F1Qkv28o72zF{=Sv}J^Ac($s~%dW%@sSN*q6NrHGBZh0sAvBe`n%_FyjG8D!1+ zS4}?riTLkl=vPQ>-8w)0nK9Y8cH9X0UHj*}`tR~uZ*TVtxQsa8VJrBX?$U#9{}h1p z&(jS4dcsevxxVD!B~P~Y&7W}Cg3=Zv=!5=V2!(a4bb}(ulr-wcGgKuO)sz}!jX32Q7{%E^28|&%6owEI{pyfEp9zW5W|7pSaiCwMONX0GsO*_<`Z-!%ybB-0JENDL`B`(g&CZ`)`IzJTvn=5fVS$v`C2ki-fIcB zGKjXb1`?x+T~5EO2u!m?!ZYT?nG)$q0cw8olBov3hHduQ;I`UhWjZ#-A4QQ1qqDI$ zOKIl#NkV*9jC8!3-lHxf19*1>^yKg@s{8xu#ZBkUgVAPbDOg8G>s#w9e zl&IP6!cpaqBH869IY1CZaH_xB-1+W|%7}$tt-Wd08g<_-)n=lJyRolkV3lOPqUAw| zr-udTX`FMWwNz;6iSf4dXs#N}`cjSYl?n{z$FLYybyK z76Po^=2Ec0F+Gn?4?I!ZSe;!?=SlDHlVkG_AgRTX6)mkgFe62Nk&Ch&7g7Qaf+vR|_ZysG!?ciAlp3+K!L#F+ki_T(Lwn@6)>v1kmS_j<-j4pwv!Ih- zX(*sIv~mXv@i%`N=1&l?Gm)Pyn1ODmOrBhMUw1FTA%r})XX^~gqfF2-V#!rGWflWr zy>YEo((wRpo;bMeYC#OhV0M%>Rp6{j{V}t|La!GvsZ8eZmBf7^`T#Mjg4?Wn>jg## z6sool(DB^8@-oHM65f2abO?_qSFKmo*5$3QSGN;-{4zZn)`4mgfwLuzA#k?^;sU%B zq@X7r3hK53avhtk*fd0%-D`%c_5bBmSNegGd$jHY-&ia9**SXVbiY8T5TCksil`qdyO| zr%lIJS(5Q!Xyc0p6Q2T1v<2r`rmxo97Y_P_H`K3+QERX(sVN}?9@!5IjRNWfCz`~w zFu&26urDdwn=M~e=aANSD~@$r6*fS5?;tP)vJG&#Ow8^RMA64fEwClf`+gM_X0Rms zZuk8(Yx?1;>_z@8c_X-Oyqu5G#lobh*(`H2T01Bj-a{U{s=DIQyKHrfsy6p!(%{=7 z*xf*O=C){os95!Uw$Ena3mGfj2t~6^*4EpisZ4pzf|x=|3$sg% zafMDq4IRz-W(QB7Sqf4oN3hifti(sB_th0E8g4J8XTo=gD#X2B=)2(inNZ>H8_)Je zM0U|37gv)TC$#_7-L0uxHD)iZ=7z*3PlejH%YN@A|8h%KMNuZtj`!94$8My&vHUs! z*2y3dOA+U>zMRFXvmK;>$|cM=vOwWj)^NOGKDc0WY+ZXUbWkEfjUq4Z|6!XCurj;Fp}*kxpMwFq4d%)al`E1WzYbw7G0Ic;d?%iV8r zMT&r1*f@D39&7kXcyGbV5wGflRvb~XO<7l2wyN*V)48JFgPwL`2hb*jvJ^6UbrkaS zy+Q8IaFmOk5>#q+);GsA9bIO+3YUhYKJ?=9xR^`5sD@K(o>j><*%B;!z9?OP7k}1dr zNa&l)656us_T3lX8HhvMxwJhl*7z?bPuWX%9UDR3B$~0|KvLKkEx1gtiMMt8+lI6e zY0qELsPEFFb{F5JG>1!|@-7wK$4&F1r7TfK2(EpAg%G0+qR^prEHY4S8DEDHPUrkY zT5`y%91(_=BO=1OHy*)i&vogn^#Q+V@M_jQ+!ZL-mvc_hzctw;(Me1*@qXJ-z=&1JcZhAluG*5Pnf9dVpdJi$}<>V_8>CDn( z5#WJBB~d%p8V=Ey&RBL2OBd55OpihtM<0TXbOxTd7-wKJjukhDvzG);a|FIsHYuZ| z=uDD8Ztmr9bt7M=_@2h;cWcJQsfOy#~9B%`xofBFV z6k1rwg@0;&!|yNQj;QgEm<8k?`-z#KT&?u1)X2FLj{ zdmvky4R%NYwFOo}Mq6722F7{$FXD8H^j~`p3L$!T6pj!Rmp0}s*bzeZi$+RND5|r; z3Y(u`gLoK?l4tM#I;zlMWHXRX=k{c*5Q}b^8mNxIgl9*a>8`d0EnO=%SeIPKV~+=Q zkFAV%TWGyY^WPaVO_oBkKh+bcSEL54VayxHE+(PLBmb@C~6yvi+Cmv&s<#VK_WVAoyO_2xR?q zl0)yClUVz_|N}} z6@S~Gj(d*>piDs0f@y0qfvbQSKEufy1a=~(NeCPp?%oBm_=mQvDI zpEz<0xx$tr5TI__d6M-GKLN51qnS}?@(CYsXJgLGzUVNkaa6zjrWTyM6`p9qF8%U1 zYkYM$3LW=2scXr6|0p^xfPBGGq}^5N{AXz5>>wZMvmM$+8syQ4P5h>oH9S_3UKFk* zG*fN~R-Y7c7Qin0^Rnv+g#zg z?e!3#Emi2!PN+1c$-1JSzGkj>7to|OhXXU&+swq;D%we}L5B$i53#6duYB@w>T3tJ zTf=5xQ-f`fYZ(6b$KL8_zvYQ`nW2ybJzG@oky;%8@sPZ=Q^ z^Jd;$Gr^QQf+7Z?j%}ptbka1pUgi9Jbr5eliB}ago->?kUIjRSKIph%-3SOz4Vz{- z_4<4jL+?=qf1mK7js!bjenyRgD4RgU)b3q>14^Po+T%`}O{ZeQrF4G$R)IikVrvmsBRa{B+SP49syl>iEKK z#-WAHG+a9p!=-iAgwMD3AgM-H_e-HkNtuu0SYU$l{sGx*xG?T7odS0luCgdIb-DY3 zjd-}-iOvS+qmUz=<}N=W>BFtOAox90N&5(~7~~20&R~e!ZE15;KUy3&y+bFT8;!dx z>Ljl30LJ`Go~M>q{XTn z=P(4BnABsQZXGef>>;aJX}DlGr}d~xIcY`1G_svYkIYU7-~x6DRIoBF08nbFkPu1- zD+7W^NoE?Z`A!5!$~81FqQ=SBrFRD>ucR5mLc3glOw}n#w=MnVm`=d$$MiRN@-f~v}o_hit4uy@M-Gq>>>mTKQ0#h!t(o*BQ!6)(Sz z2z1NS!rTZ%m9xsw{_Qy!y5&Mf1B`6AGTcssRXKM2wf5T zr!pN1ZBvWRHa3|ZGJ2+A%NX-48dTR7v$H1MkR+r%SY=u6n>Zwm2@(T<+ZIv+%?>IC z$^$s6A04fN)la0qGwebT4B{3!2@ztMEd}F^<0p$j0UZAN{;NRHi7dTZ#o?K_T~26( zOJ-9+P!rvpzYXNzfb3Oq_y!-jXK&_?uJqpZs_v48(9X$D`oY5V zU*+297~w##YaT~vyCZBV!}YrEqK53q;)V=*>w~{bz&2uhw8}BQ|FCOc#hyVtGzSvt zlKDA3NHB+fx}5xX1~v|(mKWBpT?2>hh3gcmVgHCly!c8E;?v3;EDs?KVD^esoTer*6nq$#)@feCdXUQ;A;JQ$j&d zUc@hNisF{&j(hwL!@dus;d05hGL?F5HXs^m1T2;gJcjbC02CaZqc=$BfTWLCx3ZW) zAXF0@G7|}fVLC^xC(jvHKCAPYq*d?C1!cy-As>Y^5=#qeZ$Gx}uFux&g|6eUyG+HS zHWcvN3K2;2?M(1f+DQ3kX2+TEs#H$vk@x6BEoNa~XIjaZuj@PmA1AuZs}P5e%XaU! z&dmm;!Q|M69HuQewgU9(2vDeDaShZ9ew!ue+(8bJP5v{HHO!&zr+vrhl%BYnePl%cPhjVW9^<_66n9n`M#?B z`QnuQYM)aemgb$WLifkM$bR#6{q^6zG2)ahYhw>vPsf_`dN=|nl;!J5fsC6+fMJVSJ6(T@0rhTj zLi~&tk56}Gd-bZ76}Zk|>u%DYX(LYIp8fe-{HOB&FhA&Dul(Qp#3rtJENNw}=HoQO z@Jn0aw4-@l9k7Lng$0RgvA_M!I4p2md$3Un9=>tuQs~X195h*3&!Hy`O^Vxoq;}Ga z9qbB^SDnL@#!Cl~I*W~@8`GuBVYBj$B3?~Xc{r!79e-bR-&UrNfzM~viF<*&@s!Jtg@LOw|LRux;H2hvd8!EH7><~rPmo|qQfGb8ym$qiR6q?zS!CA% z9#{D=?WHPV9QtkG0v|O<1cI6M8f|!uR#U}SOG;x-Er9^_I606AA(+&f9vsV5uwvxx zTr<3ta)#PEs%5o$;0gyrEvvf+*)KYNQ7dj7ig4gh$gEnA2R%M@zFstV|+eRH;aEtF6rP z@RA)O3S&}cq1~_NzI%qo+yNLBv3XG&%Tq=*H12Dkj`{U=HVUD^%Hw0cfbDapwb6RR zt$S_AGPz4+*_cF|PGk?pYD0LL)liJKk6)#lln$Lv#S9oD=qKR%UA_!ywWH=bp{$JI0XVN#k;Sx^CmGj z2~tp|7HjHz9@&Y8iMp50l3_x1^fUPmYWe~tO-vQYjq-1UOHwC*?aJk`e3-sh4&}S} zixLASE7&=?k|udAH8noCN#l_md+J85xO4P*k1kjrbsY?3E_I|V)xYN7YVrZ9QY8?D z9`ABH8|mi2qhBt~<7S=WxxN6J#3A>jxeISPlH>Tv2pa60ksWXQKIlDs5{8$g8TjK> z@$uZ6B`W99`S09S?DZhJCe;CW-;4`%_6&!K>CG9YA*Dw}&0;dYP|2ySG0|#orjwl} zaj#@4F4ZQ#Cpdgn@Jj(Sl|zl1quz_Ru&|iAgJvBLOVqwiVVXYt9A~0?s>j4WmC0vm z_nfBj-J)1{J9dMIubZdQZH{V*`!)*t4aj6G@pR<5PdK(q-HmD*RV$x6zx$;m${}zF zLgZ`+*%?2t9Du+~TIRJp>u zF%Q%8U;3uZtmj>P>YDc_lLHt3SuIl~TsMIv%t&l0Iz>u*)x(2PDsfN?$v221x&1tO zCnK|>a`{~x?f@`=?oLc_=t@dbl_4?r&k4Q!F^(PJ`-2UW1JPXPu?I1xBOAhH92^qG zSZtM4l-yhzGPpP7V3P`AVth23{c~*j>+{GJufrZy4{=2G7jEk9mfoLb^qj?a2rh4* zr{B$tuFS5etC{u5)EbYwX+GtUQsA2uEYY7v21B~y1xC|F0(ogh zADAxCSS$qq-~%8Li1Hv8_*;~-uw7QX5;w}O7e^v9c}0+EdPw_$(o@R$Z#Q=a2c8)8 z4>*+eP{~jU#fA=DuN@pmKrrJnZ3+|TKj65DrwGrx8ozb518m6d>w`1E+sCV9FJJ%A z-Blr~qG7cqO{4JH!{*4Z$rsG|$PiU2-4UN4iz?=f4ZP&Hr6=$tT^!)sc1LuH$B3oB z{q?ZPZV=A%b#GB(6i&jIsNDz>=%@d6%P&eCv6iv^!@lqIrh(K{E3# zavf5k;;24FpQM_|RDS-edTtxhjyGK}@B5rFS)~Imn0N1TqZI6at?P2V*tN_r&&EE@ zvk1OE{0ja|^icPAzW@_NUZC;L+lI<{XQqAmsA%o6M4O@tkG_NvFT#p} zUbZWC3?8srKSbyqhV2%jG?!QKB9OX|f@K8cnWZb7&X_SKMmX)uj+vGXJAgZ?Bk=eweV>wm$u$cV zgZ?7hUq6&Py5y>v=ZcT1Ex($3-R3E*M<_?7?yA23{m@0NrRRQpiW@->4t|XuVZV8Y z_}gB=)lEMr`f@nz@V52sg_!~XsJ8G*6Jt|xw*XUJGnc!3Ev?IUxNqY5)@P|_Dp*E! zQ4{O(Gn*61I~=~pCxVVs@Dt z>8$L8*88nNx6)o62+IDKZuv%uz;>wmSox0Fo+0(o`Ac*i;C{N5GhCEH$Ah8I1Q<3PmWX4Fw)m&EmQ<0j9tiLDlxga#!3lbXGZjsWyC z$`x%4>Ia1*fnOp{im!fKW=yIH8P2NA-)rwsw#hMzs~EN?|J#o(lUvAGUH&Lq7PmQSKDa8|S(nh$Bw zOlJQyWJu^9ORLMdR~0Fw0Na0#>*jVt2vpEQz--+S5QRA#pNVMWG~-M0PzdPhBJ7|(2*M} zO2&I{AH99ADHjd$ns?|_^#OZ1;Rn2xPs|F$G*P}G*7cA4eyEM?ujO< zW~zF9|DL_|MlqAe=0ATN`J$_?DaxFvzQV1jrB`t=|9y(Kr z5bw;Fn7{H{!QG*6&TmSbWK}Q%AxvrkzcFxe=M{oq)h71hJPU}+>603j^Db?ppGa44 zoZj?kA5KI`?76ajzOHifEVsFPmOV#_3(*H7qNW7|$M_`G4;8k+L)%Ef^pC#Ly}s77 z*e%B-?rPOI2IB1K^mL!1ScZ`LI-O3@3A4CSx}10OM8FT3qL;rZYt1Q3`E`Xz4<5f; z`CMzEJT~G6?ZA00_d~y8u8qV0Nzt^r_Dbbl(M{sQe80*{du?^hgDnk;KWeYo%X7>u zjP35`;SQ+>?ZL#X5!8?R>k3wN-YqD^FnaooDg%DHNWOvMfQhW=N8DeQkUAgE#s!~^ zru$tv6YxWb{G${#5wBuT!zya%EagW{=N9N$)e^jxTisWlnW+8MK6XEZcoFv687eVZ z2)_-`B8>i{rsw|ieA~{W!*%cX_~RCiytDp-*@WqQ;rk&Z#%3~BchH@uvw)O=Soo=? zuy5Tl{5bS`4ct~y`g$L3vX}{&PWQOJ@YlV{F5hHL(ULQak`}Hb0^{cL(s+m6RhGZE zKsMihG#;6k{S)LgKs3j`@NSonpj}*xx71@}5boZ}#nvwY4S`lI5^&)LRT+WE^7A_Lg!u2o8Osgmc+{q%1ig(xT3bA2FRkpf| zEhZrmzoc1~%4ip9`VZM$ZuC?5@!B3x z*efl@p-h8$#QR~qHHURCPh&|c_A{_Dv;qJhtYa@?*)AEt`{mro#wXD5ZTHO5qH9$p zLbLrrLKC@mXb$$k4b%07F0klqSjKF@QS%yA8~Bt4{^3NI_;V0k6q_p4bipT`sGGlF zSZb`vbjla-1h{icHVi>xAB~!O$fw(x4%VNo?10TBA^d6gx;q-%F0`wB#8x-|vGBqo zk-s@s-)=2+AJDH)&)?X5o`HEcALiK~ndzh8^3NJV(PM#(Hj zE0wL+pYGoV)~F|GBtk4IY8%}lK7BH&0!h_mI*#6?3P9qq^o`(v*$E~*0OqmBZ$G~; zhd5e4%pSacxQvt)?`oRGnL}hu2C$;G=Cee|>5{LVw}y6fn0QPrz;>&A!=iiw6P0#4 zJ;h!*H-+ZQ`NBVn=Ht$o2mslIr&Gq`n(aXAZFioGL~F=|OkInPt`feEA$IBSNo4Sl zsK^~Fg|4;9i1cGBIP$3;A6?;waVWq~$(qP&lFC0e8_Aj7#74Gzqi*YWvQk3pAVx|_ zJkREr=)08LtTCoa3J3tk1J`YwGW8{*VBkai$m}KRjq*0BQn!?mm-+G&*BLmNQ+)bk zk{IKGtr$0ne?^H2S*IME?FbpQqoF@LEhfSiL5J3-It_2GC9Af7B(HLHpE?mdWmY7{ zBjIP{6ccA^QV_DftPD$u9@&^I=c%2P#w!`@7-iTd0cB~*CrsdZdV7;uQ%*Lu&bh#3 zC}#tqxl5~$BPa;;x>_$alF0ioo!^VI-!oK*AV5|rS*}QVJ{oXZ!Y6ooGD3T9!P?$a zRS>IWNl+CxlO1&MalE@ zpnhC62>KbZX?T^{FXBhL`QRV(x7LLn%m&f_z=Wi;-#rzaJo#b^br1^xS4?_{C5enuQfpgc8O^8U~~eIy``v0iTHYNXL}_zdzHO4 zH+gsm?BhAA7OkM;Qr!g;o%cH_(>H5644%Y`nfS)U`WPFB z=<@ULP?72US*3#{(ZIs$k@YgzXilv#D~p@yCV1^(XN!a0d~>MZVtH4LRo_yX zQM5;x_6IR55fhozgZEC{KA}bZ(+p{(zB`b&xTsMsAzSY*o}|>!1uSE))L!@7q&_U% zmud9o5x#=uY1xS`tP=+k-UAixA&4OMxvlxfks@aq$XQOcB!%GO!yRf6Bj!{mf< zCE>(wkp1RP6U{>j)%hB)cve+V>#3}&^k@mE%yng zusrU*MPP371Ej8r!-VU&t92fn*g(`)GFIM(e#Em)-i&sCbHrxEt zZSarS%T}!c?dA*J!2`rstOQNE=!Ea9wH3`xk!ygYK!@5xYIq1Zkcd!6|=XuTGpO#x4mnzW&`ZXj?TL@q*=3MiwB;bDx z)Q16`FYe$3ac-X4iWSK%J`lo?JclA-%cpXnDCBJHExh;X0}*4dYJ0BN3njbZTIHDX z8wDJ^Yipp-lM8D`?Z!=A^}@~md(COU_%F{}u}$?w_3SL=%uw?T8;=jIUNO@VvawTr zELqI}LZy$p68ri2dr`7>F$Dz$`4JwPP_~A4Q)Dfs%H21i9j+Qz;^>PL>VSc@%#PojR0 zT`fZ$1o3q}tNt~M~QPa%A;b^kE`U)!~L67 z;Upk0{7r|qW(-6tY_<@#L`E*v65q)7b8f$$Sk(7JC+3yco8!&TocpSOA(VHi%S0mB z4kNu6Um5t@#u8ja{7gNggZQi{=A>aiPmPR?4tz%=AeDpzL1!@QJKiRSE-)9P1;+EW-Rqwp%=FrPlQx4TdYy@1vB z!}$0})~D)|$2e(cVg){pk!-y3O3#~dF5j=ZeHwrpOU6%U>)aN9-^3Hg<)p)O1YBlw zH=_kh2p4@z(M9%p_3z)R0$I6AB&AlcB%T(MlhE!`s=&uLij``wiHC$VL^p3>{JQ&@ zW6~RhtD3~3jKfNq*BN(VY{>?x%9 z5=VrP~IZ0x+>YZopinXRu{;IOwzGMX7V(zPghzEHt0D>xI#k zh&b}Ss!iwI6vT8|)hvh5nRR&{etrO9%vQ__iNsa)c0|eFsZDSV%N>yeQVZuq%<#k@ z(+hUE?w7{smA# zVNlB2zJ3aY++tk}2LIzowAZD~5_KMid?(lY!MU9P4tXF@UagcbZ0Xa5c!BYGglXCP zLI^5AcH1Iy94ZEZoHf;|7mgcKFbI$!*304wgI7iQ)V1{Qhm;WRba}7H5OI9%S#ee| z2u+f=3T{Bu*o8H@u@_g)FLBdF_{t;g^-|0@1K__=GGK?RVZWFbtz44D9hve74hB~6 zjUR&ZY0wN*4D#dx_To#NN*GXDGyK!(7u_;xEvdR7}BB}Uw8@3Eq_ed)4b1JANLLY zS|ZgDD`{L0^dn%fAFSZ`{@x8wmo_Ie&?w&2wwz1%xs{8==JxuRizpftdaCKDnr*$=%SHkA z{5e+|K7SblIQ*We`qDOZ)SdGI2X~v6p8HfaOto-i+caJZJI9T^1T7o3_nAa?6pX*9 zJ`tcYhYD%8fb7L3jU2|uW`rgXg?=S0{5BI7@t|8ZL9ooyzWB?_A&HVHUFZ5q{TT7) z2oes$-DlI-&@G!B^d@p1&2heeA)t?U8&gc!7LfWZS|zw9Jocs3w)AG_!2U|&dT3}} z%4|ScN6qvY0f%eA*-vRxsRQZ=14@;N z%2Zui^o5Z80bkqlOuFkx9EWP~@^0#eHrJP$^w)~h4+?Cym;s{H%4P~bVPrG_n7a0B zwhbe2!LskX4J!wC;Be+sCP=#eA9<)M*IJjB9Ud!A-*@|-N2l+!4J+4c{ji9`r~b&J ztxpy0G!MSEH7RYKrf?H;*jxBl?x6uyIQ;L;(!8&ek$wL&~^0iO&U3p!<_^{PDAktO5Pm zcq$WdHm_)uy)t+==7_3ql@h;UPHtY8mgB&UzC?ZGgRSfykyeja3Ugs}eZfs`{1pFf z-VibL+sHGp1BbFaP{OCPEP=?RTFMJ;CrcH@8|9(hbb%4=HrdYafZ&yCzlFFG} z^q-uOK`F*eq`k}|MH`d3dtkOBleMPk8&y&!G-ODfzf|x;qG!0MF=8w#RPiev_KGhY zTJGdpfnWkbp9$F#TJF_@mhl$)h7bbv&cgQE7*(QQecYQ4oI)}zm-3R%H+zX08a<|8 z_qefM7q660mIFi|`8MFV;{Hu@zylU%fyH8xsfT-VbDzYYV7S~zU3_4x!|O%n?v`h_ zhx7aKGykq7%z<0li48GBwVp#fXUj$oJk)=Iy-pf%-4^n({ALJu@+kv3_$2u)dAMqD zc%?bDt)L`tz~3{DIq4!D=CYfeP|F|>cVFL^5**5as^b8G_UXQhsRbsKN2d;xZ=+RE z8+X`0)oEW{%*uVmV4CO_wE9jP9p~BtRiO@_dQ^9ea16Wv>^p08oU>P;3U!?OQJo5U zLlg9=)+2*5{jJ4_`BvVirZw&I~J%)bEGRF?$~0HfHUQo_7TD6!5)2@3kZT=+9@TLgR*0r1YxeW%jPh zo5|!d6~ST)*Pn{uZv|s(J$aO#AKNK%h`#v|B-DB4^H~HBx{%IN*9SSuD_eHDN#jFu`(**~Ay;0=|BgKO%=rtFam%P(SD?UEdZb6ouw<3Oh$ zPGR;m;6t7U3zze_wxtvr&DJX{bNzVzd!!z(Ko+_`Q#`P58m9QA0D$$w8v)VI?52TF zzFKq7uxA(c(DAD;5|6GGnOF);=7ckLu;a-yJt9vl;alb8vPZdx^Hl;uw|TKVOm#& z)y%Dg!?qoU)_?u^r?#Dm+(CzSFX=BWlV&zEmxOPhsr zYKw17Nkv&F$Tya+uaMce?DQv-TX}MCtQ(rAPbgG`wP>fdz`XGG=g1+zNd!hL$s;G% zf;qv$tqvgfj=vCd@QCV!8=W&tyfZUzS)WPL(ugccUJJNjo~T?u&o3TVHx%Z)>8U8= zm;0=-BH-b0JurzM-{v>O@awYVP)VPb+9)j#4!xj9!App754C$feH2Ab-Qt~?J>qGj zA|lsMAtYYDvUR&Q@%KYP4XzzgeZZOYyVKXd0qJT6U`mO3Dlaf3wV`kaSZ0m%bT*qF zwf9V3Da+T+i`CRoGgcKN@q1Kt@CWOB46CC~p+j!##=1DZv)eL+1r~uj2MCJ_+Jni; z(w5@I%kr&;C>-)}?YxIl;{xgze?I?XaQt6975G~R>!0gD{hWA=e-ZNnkxf=%B=1Y8 z9sK-L9LvR0$HX+dR`>p>IOAc`UH+IP9yA}n$rq56Ou$?eM-aPvf~RwP-NBR%$vWxV z)V@v^lfGRt_k6j)Yu5$-=JV~?l4k2RAuTnSMN@3m2uS>^L7&}tLJ7w8Qght`vK}zc zNcd>}fy}An`M$&0At^Pqn{ir6gz&bs3VF$Y32x43i^{%UMu|$~Az=dYQ%mQm+)18LyOMv7i6&qzGm;hJ4MP#@4VmEQ7zw3yFF()?G z;zg$ipRAhtU1@Xj!hT@qtWTUo?}Qd9xk%>-)F0Gh6-jD3ce*p`vpWN)Tlq3 z>K{?KsqakVQI~b8_vi|kzM60`)K8P#)drQeQS{Iriq2Q5iQ&QpuMgW(4s&;=7k+G) zdl-`Djg|MjEy+#3UoVE2^|#x3j%@CySVNsl$`w8-J~7P}oAfTf#M4!%3=O1AEbmBORmN)l}jT*f75`;94MS0%~8mv!R6T70rZeoflCqn0Yg? z)(VR|FXcN@51_aV2{5DaSoFQcxkq##l7^BUW3I-yjQJYa7pDn#@YhY~02<3FRffz0 zYey`Y#E!42q24*)zE-r-aRCw*CGhUe*&LOl@&4VtS&7N^5zr>yC|+;v@hDCm{+ie+ zu;y05^FThS<3S-7H|C{Mq4U=8d*iGPb4n8GVuBorDLD8Q)mB>}?<{m{T#I1|Mjpc3 zvq-}pR$e{h#Oe`He^Yz~yEpv2qF#+`7tj>(Ue~d!0wRohDphE1Y0&dUcyQTBOT44R zRE60vt67l7asd!Fjgfvb{z3P)aLYRPMC!Oa&=uf8_I+tw6h~R9=a6H$JL<6ju{r&Q zFs$Sc%z4YCc9U@c;#~+9itBB&RD59a7Om@GF884DU0!LEqn5)bKs*M!g8M?;Paycu%LDl11Db0`|Q`tAsy91D0J zrcU&y@_tdP8k83}lJhPC6E5AYR9w=4kegt*H!o)Sm@~5bHTzT;;I@-1WIr8pL9Wxm z7ss0^1rUjVd_MHxv1p-p9j#o;QSyqil%W%|qw$ac_yxj2K%nsIgFS;0 z-w&;!Zl4#|(K79Qi%>BQN$}b_sb*4>7QUdps%BM4riT;+L)g@0LSHP%_x-Y6EN!%_ zoW13wHQvu0qnLCj?2}(>Z_dpmjiAP0FO5<)m`E8#c$5 zREWVyN8%$3f>YA@O8fj9Ls5p;-&a@ym+4p93Y(KS^BPuEzPKqyqgf?#rXWIY1r|4E zEGD#DY1UeH?O3Mu6#_X|wI&duruUg#vR|M)g4=MvMU_nC4nE4^E)i8Sb+5CY87^f} zSJtw)9miqW#6WD9p-oi|&Lz$e23<)fSDkG$ zHvIIL>VM991abth$vyWZr2k^>ewxXi^v^l}SepDzR=`hLf0e)Ye^Ko@pk=$7N{`qn z+nuD_{vY~iynW6tQk$oJIavG$O8iZX&KnNB>l2*Rv^h5La{t1|8(Fp9cmk?q`KWzs zp10*Ni&}Z~g^1oQeD_^ikvP-EIqqVgW#cypLTW}o?}S2vGckQSJi4_i)!6#F1dHn> z78b|b4?lb|=w2+k?_jm^j%5}$a0MV08~PGu%X@v=Fk^<}3;3a%cGahsFv1m>^{$78 zckVkv^6sj5u>n@kg$+WjAVyi1FP7{(d^q&3pyI+(egYf-uUyq!-pNbNoloj26LOPj zsoEEO)q=P!wh7$M;KmocNT2bKb7>`wHipkwOBf-FN&@j&k5_b0V^ZYl@j8Nru4CST zOk-akMD>@ChoO^$GR}sX1JX@~mbt!=d$vXl@q&mTh9d?7hI#`}_wi?;6`8Qjypf=@W8=x5hOwmN2BNo!M+k6XLer;@I;H53XM*B+ygw@7@vZ(jK|6&R zPF-}V!R0;WS(=bmqH4#+tG2blX3hD^0j5I~j2j?5ODf|q=1@z`IAOsW)CmCcl$&|$ zy^)L>{Mt9(BIU{|*`gWMG!W7jRgrI6Ghr%?=p{p zn3EW6<_Ejd=!(Q0#jMWcb#Jg3uKf)byOmzC2p6-UfgT;Y8CA~DPn;bC?fg%BCCo^a z^%bSihZmnueD}t>Vk3~$b2u^#%nbBo9zVNh$useJuODQp0eAHmYnhuNe~M~UZZb(> zuN!*f8VcX262)}~Fe*2)5t!@`B@k?>K7WCUcy!Bo$z1`ni5=zMC$1=zt;KfsCS9zB zKOlJ%t8~fuya-U|9&IuLBV_YSTqXDT`UZ1h4*rKedLnS>bj)DUna}IG&ZK_PVTh$A zgkR=5Kisbjx*f-!4>4x<18eZv1rC|_ooc#HX0qw}bxU4$gSddAN7p6Ha+{rOH%m`V zZkeFDh&PftLs@7vt*V53Me~npw`}Hqn_7GIdVx=$&rnG(t}1;P$6?%n;{ZbR(;7CV z(=M;gV2KDkz=Nn0D*3-#9$&rk>}{srg%Y6)C$-~zPa3u7YPGvJH8y>lVAA813}9=6 z6l4?q)mEtU2AwJpzs={Ac@vt0S-zO5`SQu3%Kf^>>B=^|aUvj4H=Mu7JzeSB6Z}b0 z`~&;<)LQ$vO6+7MJ{16vGQs3Vo^=#T95bJkcW-_;OBWhy49Indu(*rASq8FKoQmF~ zMt38>JF#FL_66~d70ZoS<;GhFy|r>F)E1Q@u)MJ?j-o5#W+fB@`aGSG$ic+Fng{;rdXZ32i*4%}JN6?2(UB3a0ACe`}7x=bZEq$l$~;>U!Q?XCv`&GjKb7?8!CLlbggovp+yo5^2!w_bSO z$BOB{&OFlD(-Li8ANah0Jsc$VuuxkN?-C=*4|CYcX6IP5`kIqay*{zi6oDJ5rVzPZ;aj zCOKa+75ZNk380bD+}2`tcd?>YRiYPLri@%*J+#7n8XGhGk{^>pjUE$S;Mxj{eZF@J zLl3IHDcB+dXGkUcYXCw_rhJm}7qq}ycas@rL-`4QeH>J)TMm`xa|q`g*JMkwEASdj zrb}NU1ggi=f;^TjjyA5mOv7R5g$*>YwTD(r8$IX^mcsqyY{uKz65}XEe&5Jih^GQQ z*t4f(>g8YWhm3;sOk&q!j?-FkkFa)~nn{1!TJu3-%8lW{*$w@UJ>K6BVRteXvRy9~ znGndfu~BnkhI##C3H^l?9ew^Efcil9B6U4cQ4>k++k#y(Dsc$I;)FE1_3I8t+a4=; zUy@f)mZ?fUr2E8``winh`Aq67_d9N{cHPy)-G3c`Ho!*K00pKL+YpcH)9dk8E$BC?*v<77FJD)d^^>_e4-XEf6h*3f@68Un1IIMMmVb#q^RD`R> z+sa|W!p!EoSW`idycB3@Slj@)a-p$&o`kzMt-$eoP)70^PHZ_CV!$>@d$s&ULeMiN zu4>qzz|{g*WZX2;r@X-gqBJyCG0ulv0`W}M%Y01|-#uCc>7SOmlpPU~vxQCk0QdN{ zR>&Lg5p~3B+&sXjd%;wxcJW%Fm;7%g)d~eqc|P13^lv}Yk%-QWME{B-N*=T&`)wjh<0S;P}RE`L(FKa zghF$MO|f;twakbyBS{rRiW%H27NsKyrXn{AK;=6dV5Zky$n|G4H3&#cKOyq>L*g6M zr8?3eRJJsz;Q4rW>ins@(-ioCB_CLIeywFH2BK)h?Sc3@bL_ZrLgTZ}$?lWh*Ha4O zzc#&+%(nv-_&w%h?tlbe`f{xfFE?i8CsUx6?`3I+?=<&-0(g8~F0fFt#`Vli6hy=v zeYk%=#EhnALWEL`7E`J-1GQF?U~2~hvAsE^Ls+7Jg+exZ4SkPbwbkteyRr-|vub3| znnaW#N@0;*D1R%pV2r~2n!LLC>(Pr2WnwZT{1x9RAJt9dm%Y(gVE5XHT>ulK0P7{W*N7v zLjZ4E@`v{DWs~pqjwY*2&zpSe&=myQ==F&}1!$_OFI85pL%c@BvNmz zb6V(yXAKFydz-H32)SG9`L18Ffs*^Q#sJjVRY&P$=UCAW3$LsZ#tk^;ePPH=nSIln zaqDk)ujqH{v2d#V+N99tpkZFotkuV6+1$m{La-(jF`{{2r*zQ1)%9Chy=(f#!pXtj ze)wzj928uh)TuTHONkc&84m!kc`% zfHz6T;K2g(ZnfLGS|+a`@*L0`{HRmmn&7AfpE&MyEv*Ubs^S?H_QCn$?oHSXlS=L$ zNivOQgwBsDoUc}{gfcC4!HXqz4?B2SPv`qrfFPqAjUxsmhC#!4u4^){QC7Hhojk}< zASpk{pKh$UbnR@NsLYwzCqS-p(5h839ADTy=nmf>r#no7Z)O7cw0zBwmKX>^4uPR0_)QGE_qg|9&qg=?b9`s|rvLDN z{XSd#;Y0A-Pr#zzwf*tYKy%|WFYgv;H_+_Ru|B+!xKbv-Xugd*lhQq`R&O>glxw9}CqGw+SPmu>JZbVxM5W{$uOpwY6ws-H$}{&dlT}ZodY+z`89wN-91;U$&ofAk$KD+iC>$}HCj~IRHSD_RXqc`-Pt#xf)lmMID z+Q9o^2&qk7RmVje+TA@<|t#UEzmnj;GGDY zUOY+m-Z$^4RL{>YnJQ04+ClsV{PWd@R)IQ#yF~h!Loj66&PV3qL1fy?1A#bpdwm5p z(xYv%u-82X?W|>R-*Kaf zlyFpZl*S%~3)@HFRpds?wy{Jea|gl^wC z3n_o{<-!?1USAeZ%?%|*UA9IBZD3W52C*WPKr~IEPEt3nuW^~{&1vbO))P_$eLE?rx3n*0+%{!CpOZbDWJJ%9NERhusfN5 z>k_E#ihYkq=YrSU(~3_^ReQ?Tcy{(L5AFtAv(s=RMbR)PPq^2mnsv43SJ3|Wy5hR< zjjb7rs^r)YMwL4Jxrn?8Fc`AL-F5h6hZ1uRfLVAelR z)!XyxFRe89Tx>7TBPEs!7{E4C2A)c+$4gY=Ovt(b-?%K>zLtq4ito}Dy=Fp9JwfbQ z?eB-S|8%9Dk#mjZvxK+9d)Ru3Z_FKMux~rXteVVaxzGws+;<+OB)5r|Kw#<+)^ ztZJbn%G!Cy`w(wv1HOGM1`g$f96*jN)hEFBm@qARy}weq5T;z#I{jwU?j~n*+MR;N zK-UhpQU#AJ>uHkHc$`3=(Zif(Pq~6@yG|$4FA=!>$I)Z<&fNuh-t3Y1RMxGVYMoXQ zG@t0o#OvP{A)wFLcyGK2eMfBMGSWb&H%EUVup%?_B2pkpB$uEyDyiKs8!PzP$nk5X zI?2X)#f%x$Jm@};zCKQwDCszywwK*30z!<<9#p!aUA|&rL8HJ6kufS%i7p` zu_GT4 z+SJm0SJL8IPH8k>;wNvCDOd+3Z|;)sH&ogJ5Cr!j0BcDlspW2|?A&0&nSNIj zinOnqil(yuxBxF^tubt2iWckQb53XLf8Ei=iE}PIFUjbj8HqDB6GWF9GnF1>#c+HC z;P+8uS`FAkUp_YfU^+5&xl06|mZaxaoGhDd-ccCyNYERP&wCLLYuDbVLDTYdJgV5! z{%xfwX&qfzj2DMPS|wdK$1XC#i{XYWB@PQrImtW>Zp=f|{JG{&90ZTKDM- z;|?QluS-*(=2w)v2BoJR4Hb>vWP2K0dG-pH1A zm7#Y7bleEjjRQ|Ul>r`htl+%E=lc%hQx$w&~s?kUo>`eHYsWZ$QSHF3=!w zJjVgfd4VT)CpgBXPWD|gb9^wuEzl`mIiw*1d_jR2gM43(CV|)HZ5t|A?##C6KL%Hftm!7EK^ ztpFulXXob~{in1QrN*uv1+|pQO=Y*pc|p*R-tur}$2gJPIby=O zZ*!5_1>r_DZ@YZSbU%^#xa0|60ym(uSam4BCFly7#~D`9*) zon)t^MiDR2M>|v{ok^h!skY>zjwz%&d;m6aD{unHb4*<2xygm#{s&)(m$J64-%a+) zJp{qlxhIqKo;Lwo_{5{m%U_IfO(}5FBT=y^IG&0tcSBlyJM$^$jkv4!;hM9M0y1L% z1q{CGZ_+s|q5D|h=pUV*NYce^`l%W0?QF)E56~(T5ZCgT3lH|h#?3h2PWHTkT1m-W zv<~u4T=l9;cFpx|H>IZPO}nP!x5mN-#Ec`y_x0NmEyoVEmb-pVgkNA8ADZaOzGaQ~fPbEm zF)XkYe82Pd89&dbnYKlHf@@~R*dgaZ4XukUHRt&|J&z(Utp~rE#b50VmdQ$fwhO2uwQ?<9{+3b?0+RfmnlfSN=J%#(M%-;WDsMuY$~?6`g3U3K-zKF?%SXpR^2O#inp5>ogVHx8+~_Z z#~^E}UVW>zkA5+IbX|>=*FR+0<`YK-^)S{qnnL4w&t5QSR|}APE}1L_U;1Lr3FC+0wUVF%{7`^SyoXaz@$7D{29V@;8@WV z^58_?!9B#~8*|&E)lbB3zWQ!l-1(8b{2ZmC9$4@V**0;A=@?n{BY8Y?4Aqt{w#8qx zee1hb@kjFZPm;eIGJi_`w^nqxmahQS%DpBc++D5XlzvksE&Jfi!ULx)#;z^tlxKTs zLQ+!h;JdE@HS@%6g}vr_eeC%CTqb-#ztO5 z+@tJ>bWX8x#O~d0{QhrOd2f5MJ$*Qjyd6U2G54X+C+B$|T`tisvhsuwND8L{6Y>#* zLrw)CW6)=OcX@p2w9N%8gPP{&QSP8JMvk;uSA2%iuXn4IlTM?k+q@JJIX8NG zDgQ#$#K`Frlv!&d^LkF=*XG$)aG&Usn3i>`qY`RW>6%&R$Ang7K`@h+$|F@^Frxg3 z(ilDVGK+M1*b|mhq@K!wJT#X%UThNGG~$z{ih|J)J0(Wu=hkE->#v;bF^`{m@eSQ8 zKO?bgy&Po?$ftp4XA8O4qaPg)kNzgtdxU4OU=On7)a2j1$kD%;wWPAT{y&)e?y#n^ zu3u&xXU6^_RjN{?!w3YVtCR=`7)S^$pmYKOrAt+3L^=jcFrg_WKth6)&;mh4I*1Sg z(uWSANeNZ(o_XK-zL|Ny`R?=maqn|)o`;9C&)H|6y>oWfT6^uae!osCy4G69JZ@06 zXnecwGMkjyaMVU5wiG%Rzg6kdx4JLr9`b0S*iTLH{Rs%z{?^KnIA1XY5;i;og$BSl zyP;61p`m^~K>FYfP|K@9oh~HFpqAe1ar0)F;Z_N~k-@dPsanxiUS(b6wgiD2+20#N zldSVN)#fa?TnugxJ^bry5{+|<|1n3kPwnb7vUbgXd4Q_Pgu&lDjC%V*X_#6Z_BNbM zU3}8?VOvFC_T2$t#LTjI+VFFA^hpp+RxGzRY+L8-EdJr@J|dt)h`rUxuo!5JQqMd7 zJlYFtu<7U1A}GDfNP{1f-jVBxa?}7%5H^}>9m6-)J9H1#u~KoplRD?Kl`Kqq6&z0I zjh5={h0cmbWptjMeX!QqDt)juZgXE#TPoN5e_Z7+KZ4djS`-=$zwUPxLzVwTC>ryr zJcHhpRG>>SZlvmT%+pdeAhg;JBB_c+B)oO6=Jw+Qg0p8QMPnRJ<-2NSp?CJ~E5Ha(tX|@(TNSUm95PUf8=c)vHBB{%WY& z1CbxjiDS6&2HNZ~or??oS6L5+3_c$|F8PrJ1yf7lq%gHsbY(+wak&FJDZ}p#Ra%tlkU|mlp~x9AOpYv7L}7l zVU4(kS0bHkt38zM;jh2h99?4{quuHXzf_eRt?y;jomwu1He#8LKmMkNfbxmuLTL1F zLN8wV;bF);xcfYB;Blo$72a-T&SF# zOSic|W2?tkj!u9VRr)t8>$fw}Egln{a!;R&;tHGel7`kcIwpU=wXg|5_|=V;r-fAaa&P11KZn{o`SX_y^&^|nh@vmu zB7BYQDt@y`tsTF`dqu10KbhE&AKbfpeG^{V|>j*H!TSUyqNe|D(rh_}(DLT~m- zYez~~&!o$?it>?-LqySU*5am04g4^^EBd#}dMa8^oIWKs?K%1X{><)4t8G5TBlu|j z>n~m7+>LiOCT1f$JHH_&cDFXCz@-EI5iTDcsurEQ11FtAjCyH!t)yz#mAVHn{pABd zWN&w)2FlP~C{nCuKFZzWq~2={j}7|lY`jzAS+1*klR4M?5f<5RQ;h?YMLw1>%+eh& zRCO7&{V7<3dc?r;qGOhQxzzG^Au+36bh(HX2k*EDrw@nD8?E!nom_LdVMldgzWX)} z)Ff9!*jC96_H?1;`ikkK_2bS0TvpwhK1lqA6{rUszrkglp7{hMB&=QDxw{xG$`cTRh_nKpo zi0WfyP3edW4#TuHuPe-rD@YM$N+Hx%p!-GtjG+-*3PPkhP20aX`s|=rQU5ZWKRdVn z*rInd)#UrEx?yt22nvSZK^4^4FXaaEKry$gY&_!HD_(4C@KBJs%Rtsf;RlLPP4GwW zRuLiaBs@u^X_f!*v77;xgmB9~vFK^-X}71zpUw0MuZ0&EWrT9M>LzsG&n+Yqs?9Xj z2xO53mm!#Aq5)h9{)j7Z$qB*rt|(%Up{=gkRkJrmNIT9dnG!d$HmQ1uSJc~d_co78snenS=vN=0=2y%XR z8hkOYCDequloc@CHrqsbeg+tsv6VS1l(9^=E*$#yZQpWZ-C3No1`9;q>~wxfTb}qf zxLHe*b#y^WjxOW_3avh8cQ7)LSX}TBmUVn*2AXy7S{`{ zuP){pMI7_?OQ{be+mo6Xd4cwR*eCIRdkRj{`n{$eZR4o>GSjR!KMGIz<3=CnM(X92stzt1<~E5m2ra4=Tz4akA=@ z4B?9HAKs4{52)Bb?6D@hF_)^ih?SNPdR}!`*3oae=XCAk6Wk(Bm<$&&Ul*84#`U=L z5=r7{VmwXjO=tirn0e}QtXI`*Tl)m@4HJ?ui1nB_0uj@Yn~hi3#H|$j8MgOnKtxjg zn1jrLfuHRJ0XX2FQqGH%4ep?NLvV_LoDF@4{VUS@-Y|aNB@vp51#O+Od5Cqz?7I zo~fYnG}%ozlvE0J3{k@NT=B*}CDin8U9nznp%DndOMQkDsBY#!d`1vYbBcAqtpFS8 zEeq(&GNyylcI|2no7HOp5(fEQD@tQ@TF9qq%J3(9qb=ci=!i3p1@lv*hW&7^QwcIak&eYjfTvdtr0|z@&LW{NTEc(Hwl|SueGPyK}e%(7PDNeX!!n z1hV2Kax;HY!qu+kyYx4#oCcxIxE-Yjv!Sd5L8-4CCIa#wBCbXR2PthYG+wxrcP1jF z(*hdy%35kxs-4C0eV)T#WXInW{uFZgUwI(^RsYXO2S>|5*n->4esUID~^TND| z>fvW5Ab@cn_o=JA-)_C=j;_awOFhg`7b@;7yT{_)n<*lQTb(&Pb54bhvXtWJ!^+uS*g z$=#zA-q&c7uM4@JY5x<-umz{fm7}(DH}V59n;?zlGZsP-AMr2R#fjZqMaL7j^Wf=$ z%bx0Igl>G^Bv^M#H-$cZr^p+wPY9AvN2nL2zA6@HvY3JJ(BY!^*^!2cK>%X@nx|sk zR9$i=>|Xh>YmalGn;Imc-?$gzu6o*FSiRH|3~kAk@GmXt2WV>od-Ova`-J_9(|BVh z)nu}7YrU+yi{lD6SpnaU&0M~jQ+=x+w$(LJY!!&4ya~LAkYOHN2B&&lSgjo1D0w{X zhpK!3X&rvs-lr^=G(lBCPDqTd-DI*VKpkjmeS0~|cG2&I0E2)(NMBmZ<+@Nesxr?| zzgTVj(3D5(*QXePju5pu%9FMhlHI&~Y{u$b;~QkfrhHkG;2hqlRR{&wSw*0WvUyh{n-O)pni>;N6h->EqP<0r>OXK3&@tn~uAS7aa(7~^& zU7F~PT>&dR)^Hq$h1IU1oTbcNv z!h1@Uz;MSdX+Z}6Y@{G*4IIog@U@jNDwPF5PttMStZ`$9?%TwvnAS{iZtbc2VGhpr z5<5~3D_X7En}+IoH!$8}Eg#@lI$|kvhO^!o71+b?H3=luMf#cIba@(LvLmo>CDA@e z%oX2zucWlvyoFkMreGHRoT;osf%$2M5HsmKhAf+vX)`LYG?^UX?861N#EbVW;K^%i!*t^}Iy0qy**EYBekir4+hK}TeiUlOhHj4Bx-hZ*LKtlpc!4d9z}BwMh= zTB~lmlh}>!V@Q$M1Qkfx2Hn8Gzk+VPR$K6mW|<5V%3Xss&S6pXNH`-=L=wZ9w{F-Q zMW`ZRk&7(W150C(%8m9@`~xZ&nh9|>jD#-gA1`VRbb$nk@(nKYX)s3Ud=|yk4=4j+ ztl@)5Q=8<+{L`-Ff?`Z}m@cuOIsJ-}flcfC=vtLQ z#jrtsjh5R_0s}Gk+V&Qi#6?abnn-eU5gHrBh!n+^ljtfWrU2_p%weJRC`ll@*JMO4 zzyAI6nw7Wlt68hb*5c#yo0l1_T-mYX2z=8Cy-Y#YY~Nv_GJ>5Ag$YQx?3Q)?I7%D$L)!_aQ( z9jA1NF(53Vj$ZKW`YA591)7zF3`Z9B9NwHd2dfFpu?!HF?q!usRoUey6JDJ+mMK{W zaBBfK)%n2GEAT*;HAoE>z$oUMZK+uYkw=XjBLI89cYHt z=JH-vN4vwkIHV{E*un}_%$rO3W|)TetUmK%Ho<&XtzVmhU0(EM*PfvhlP=XVZ?3|1 z8nzb(K#5N{!+OP*41(u=*}CIh-Ni?wFXinTziNtwGce0##oi5euyup*9oFh1acHi;^A$iZYe2k zlig`ovoPY$*p8Ur#3k>=!td+ArJs1UiZuormM)W~YlXk#lMVO1j%^!0?lPQgGjUw= zirqG4B`kogLChAP2fH4`iL5yoS$~p;9qP&yjwvJ$wXIBN7YOfdywW!97Kg+Z_m?~Y z;@&N`FI+{h;7_fcwgp8uOYfU*U+L3o;>ygbzBP8}A?NOGMSd{v^zAoWDFDQ|g=&UK z3OB<+xi@Lp8PSIyGx=n<6V!IKBK=<1)oa;*N?NHlJ1!A1C$edQSCjE1IV>}c&Rp_mgUl12M**9(I? zPR5bhd~!*1lV=D_zrp~U{kqzaSq~MaKua#@LqPhQfl^CKTfGgN>$cBeI8V;iz6Gy{ ziP9B*Qb|ZbX>*_vT-;A=)lv;P%{hIse`3@4BQKsUSBcPrfP%8zA&4ck?%PQtE1 z+ujA=wF@J70&h}W%c5!_8r6PDv|Vi(GaI%yqEbb^Rdp?nq*E=|>X*)svUb>K2pBcg zu!ni!R!R$g{%rA~a=)t4&bk6C6|QRk+ROstH#x)%jMLR(`mX`V%cd50(oa5akS<1M zoYa?Njkn61yFhG+1Gxc5v9Vd^zDxB^CM6df?|U#2>U7pz&@lg53%FM$oG~uS)9t+C zS(xNeViDg}V#@=Nc3{-!=UGD~S?=9K7O4{wymVEU?}n(0$f_Lp!xMek3EX0h<4lF5#q65Md4PWiRAo##Dy| z^qv$q{lb`ne^<`}`J$eGqMUc6RPtV3-YH&~tOA>9{Tkv30J-XhzMMn@ulVsV|7xG)LG?;dqum0 zp58GD(`u&OA5x_~*FINK!fL%q=XD2Df)-f_@zJjwn=|mzP1LgIm-LLYR95mcJfAL8 z02PRPT^#~~kPTdb80Fv*l1m}JJ^d~=Qi#b9l9K_@an^WEVSVu?^9uBGEJea6DM&pZ zE=JUlM9fS?j_27sgYds_>q1}w-Mew6eVKJ%IU>tkRCi0%O;r^5@EOo**#cnYn^q;0 z-R|^}Dz=N)!)Tg~GRM@mc9qM=7k^4P9J$n4o{9BR*1{Y=)5ne^Q%q=)I{HK3q01jQ z32N4+Wvf+Z3B(g=Vrk@RiTUcwzx-9JojrG6C+UMwXzU7U;)SmNM*CjQ+*r(IL&nWU zVo~AYNz@=2QQ_(|_-gzQO?ZaidzxZ-G+cWR668|jtDs1r`RSo<~GY(>izKg;Q zCc-bhOs`LJoIx2%ylx1)3D5^&iZZ*#+}D`UK@NeWdjKr%&As@U0WUIg<$7tv%QG*&*_4V;{-!2+^5;k0 zhHY2Eg$BFcNu$hyO+)?7teYeI(4`9kvFJDPgE|e<4=@z#+7CDOI<+^}DHfSB0IKrAiiKT}1R=C0d}NXE#GPM9GN1G;fbIhSmgOL5~P$R-qi+Rt6vdna+^Y!2nGGDGC0 zp9pNVx|Eu%X)IE;lp>yWx>uv4sHk|bg9rdKS3?n1Qc3+j$J>%`b@Y_)vlA_Lvh1;} z0bEjuDhK6RH4p=X{{CS@RC?vC8e`=Ge0HI-(>KL3lK&&W4&73 zAI}_W7pU{PY?V2NQf3kE`_$Vmt6!yTsnN{h2DZDc_#7085g3({`s6h0Lwze^~11)A?;=d zXGeF-E34?7?C=nZV~B6*!XAvv)g?ko-F!<0c29{`d*Ll zz$jO_YVpZvMqu-X93rPu7PSfLn1Q$K&H2zs?YB^3s_5bctR;B6gky+uW_=seqTpUh4v z$K?RSAlDGM?JpIFP+I8I$By(E0nw6coJ#a-I>`Q44(6*EN7cm*s{e9@#3^3^;|oTk8)FzSbd^EoUsLpyf~xFlC4Zrh>|^ z2s96+u)IfTR;1_V(JI2>nQnc#yI>b{2p-Sy-_N#KO zIRu)3psyb{fV2u)y-D(hadHh`=OxAUh)I*QH%zVzuO>VSSFQ-J(pwYXbF7@ghQY7j zDRE;hsRNO$i|S~S7+W3J&a7Jc)r=V-GL)r8BKpUAaKp5egKB)&s7_WwMInrzb=okbg;Wn&IP6FmD`;5_5F!r->$ z7>ZaCai)vonC(q~+4l1~Yk6w^@cK@+6+p%CGzIR#r^h;1zk&Zfo%!E{-0#KzNvO(S z{Bi&k{GUiJ{=U(_J9|t%_;%!t;fyV8O5{Q}_=g3~L$T=V=~{m$n=k&C1_02&_~{2I zKHLx}Iey+efo5xUQV$auOk`6dD~E3C*ZA~GX*kqK2$nj*+)SjVPyNJe6+Fv{0o{6F z%RoSv3I5iOF%6vCr_Rx2BVKNS7E)f@L=IWao>IMVKVwW|<@x79O{?LOhBIN}%emt#;%{+rIqggVJJZ`?8sCf3$v1-!b;!_|q|r?(K?e z)9Q5rBT{Dq3Ur8_f3z<0pFMx&{;0(-aK6f1VBRDgmi|ZUI{(@68{Nke#}|8iGC?1n zZXYNbCYQU5W^E;r;TB<}v}GBE?_g0Y5De_r9z)ZM2mk*W8^_X9$Cj4WOi0uUzS>IxW)sxrRSflsb9dqGvvKPfA=HgR)2-|6J? z!=P_)fji0Gj>QxH5#Glqn`|iR@@mDNrIo9M@@p)a9e;l%(*Qrs9m+^rMr2*Jyw0U5 zZxc3gKBDD{-AK||SYm^1Xp5h!`KvmJY*gI^&CK!2wNLZvWX=7LryRJkY3m@~V(;L{ z+!RLmILQ8CUmp}^5mZbFU81ob6r*4gff3|3^zaN4Y757ep~4%sD48feA>pSbc%hgWS0#sSUp z%Ht@0ogMjXDUdc`5F}B}k-LuLBiTPPxmq&6bWRxpUf?b}vsSkphrD{zf_jamDFl0VJ^n3X67mssH%YzeaHG3<#m%A}lF<%c?9@S4vYV4_)elDMb=PfuQa9Y;Gv@ zq5!zKHY#_(oqNa{?escyoi6{Y!PX04pI0`Y_QugV;frno&T(WD2G+F5@uL!L}4F;$qY~QiE>d&Q) z%(=LRl;x-E-zSCxS+g#lG{plYqt!fSliqe7Qgk0w6{YbIK^-3QZ&SJTvw5W-_p6RI z8@xg1tXaqEl3RI9uF5GqbNf%8dD8F3Go1xe zlB}xga$9vAg05Ki()#*)p<*DZxG*%V+XYXW0G$*sV>>y;vI;o`u4i@Z27K=kq#+Bw zJ-aBo!j_KX?wTNX4Uw13dNq2_&b^ufob|4SW=b#KD?*#^;aQo;|vAtXv)>9ca^2+w{%qKHR`Cfa3l>rdwQ8;H!CAK5XzPs7)jrEc; zFn2}fYCuQ>Kx#<57J=-s=l%?-mg-PBqW%6=OjT|VEFflDXv>n)+aqisofwi32y~?` zbG6~6o!D;9(77qD+yHnw!09qd`ZWZ=LXRv<2s9qj>)z9NkZwvIJqQh1p?*FHM{TQp zyHtz(U>4cn%0Heo$$i1hS3$)~5d-&Tkgd02B+eycp{~NBRAc-g2(d1a1f;eep@uWh zT$)N|l7er&(~3G4Md-dKFlb+X`ugR9AXnH4`q=4mV$v`;@r9U?E2T{69Ef8Us7hJv z-fw10snZwSxCiB^C{@U;hQKUnC%=d|2M3$)Aw6)SD)zL@qE+|27YU1XKc^5P?#$LP z3kHQYjcY<09Vz`GVVy4WRCT-^FW_G1MCd>pxm-q97i}eJj5`2(- z1S5&F%@^04J8k7bo3ey67?S1X#|^?gC>VP#8(N`aTe()L=e9RSEHY6kQPG$k9`z+m zq|s%5YvO(Sz7_e4{k7_=$SJeBL$l|0el=&Be&~9gkZM;hA5~7qcpEEMP>uxBCB{H{#E(au&Gn0r?kG0a*MledtK*@ zYZa}5SW8C1s_GPrFQNsA2~`Jq=wJZel~QS{omb=XcI()g&1tD~yg{Z4?-m9}OXcaz zu}gKMXkkWeYQL$HW(7`QkADTj+Ag){uD{kKobZc*`9%mPCl{0=9s7jYs+cg1A8K-R zbFcq+ZQcyA`IrrLk6;z}@2rBHHTX-bB>_(haB1OC#%A#K^?N-s0- zvbxg%m#6C>?(?ZA|C$Jp^}zwjA3Q>w;886W3IJ0jx^W$?>KV8$=uq$+OqG(q6pV`Z zH#mrR)<@R7H4_6Zz*Oj{k9bJjEQ+VCjpORpUk*lz9B|hL z#W&2#9 zf6E|BqJqv7&gl@>E#w=L@@(tAxq@;@k1Qiz>)hErl<%eNk6?(dlc|b(^@voUjCdZv z(woHvueZ(V1$AUi>6IcWRfLnt?^Rr5tQ*LEv3W4V&l;60$ih$lvXMWV-UA2^j`N+x zN8bx0y()Z<>Z7HRVdMg5%6J|7n`71MhFFhJ+pug;?DLV0dMi!}xf)36RR-J3$<5}Q z&D&`P=Hp6{iVIA<%>#^Pnwz0!`2hyUn7Ib8E}HzPfp@Z6fV$S2iWhL&=ZcE`E8WTZ{;)LcOR8kP)E^Ml)M z3}vdV%bC4*z%hX6Yl`f$=*1`<9i6^hseWS>{^YRQGPxJsnBX|^@}v?+H^Q5vqOkvJ=Jwz*)= zs1#5AVG06r1kF~alDilTzy&PyU;3$nag`e9oG#De*`95E4+*}O}Mcia0 zIm(__gj%SU$kx*vJptFdN93Eqm;Ygs{muWs_%MOJt5A0E{eYvF2R$w;IZQ8Sv(viF{x%bU!*c_JLJ++nN>XJKE_wd7oNYz* z6zfu_lH!3J!*d_M#l7FX@i9X5HW6gcU*`H8=jIb9iRvCAuX@P^e^z@XK??Bj__-b} zG@j35T}Mv-aDBNByW(7Jv*GUUufc2IwCO#Q24I^&l*7dZ-7`fnJ95G&e-rI; zx&gLsVYzQd71NRZeSbd27p1S6_Isc8_q->MT`w7aQP=8M=33G%GGnWx)a{pQA*XJ* zkUvkdZs)U9yHWr>ls}>Fiu@*E`%i`QayMNZ>{k-8{awesy~=NGde=(csh{0h{#Hjm&HPf8vzb5L-Y|LE*0Ycl0!Ns779>;w5^QSp zFfvM{9A2QQ^>88kr%@GiX~jT*IFnaEwsS0~MKjP~=)yTvx9q!~%?ll4JBVtSC6c4) zzV7*}es4F!V{)B`jon@~A|xZ&JSX~$1x!}<^5cJKaDKAXH3K(lp?rW&kj;;tdx|74 z1m#XT_ho)NlmbBzMXem#IQukTtxiOR01%5*b*PbFM?V4|n`Q9j8|a-;c~3>0{xfjl zi|=ekHiAus-{lV811M*W+80MGv?PsXATZA0N4?7cK)tM3wt~c@h5nY}p;M|Cz-m}w zJ?6OQ`{OcU1%BMMbgQrXxS{D3)|@)DhOAXbVAK`f91p^LKFHroww>RYO=f;{rNq59 z4J~u@E@4tR)%#jiw^VNIy~w`YMzQcrwf%e=EZizvb}Q^7mK2FJp$g||Ki}+>h^tJ~ ze3fjB^$4|gj7p25qxRc>{5F%34NP@orE2YT3wc}N?CEe@L;wbdVL{0eN&cF~I7#~q ziL6bGgumSI>>M(eozty1$#+AQrcLZ06?^;aqwC-gq@|_VE1C;Eo6A~ zOhFHR)#FH_T5wqE`A$_m$L3@G&Wv^9~| za7(p~iM?4o6G&nwd2NWp% zKun88qDT?(sIJIGF%LTz((tcyLm1leioS8*KqglcEQC6Q|HMh0!P4taUd^oUi0c@V z<)07Tg9IBQ(BZRp-@_Nr_OOGVeC3$Zy6=(jB24Cu-Ft8(Ea*Mkig!+T3#~%F!dyAW z1Jjq0aVG)bwx@m3ZP`2qX8ZKXSsl5-BSPt6rxKf+QOya1obO(^Q;YgO`WKg>F=BS^ zN%>@^aR?#)l|X7MkAVUvDGUVDK#RoEWC;h$#E_HIF^yIP%%P#pSB}^3Z05$(%8dAg z!NjJ=n01}tF^Pr&o?MHEVs@G`IpjMDM z!16W{wEg@U#*u#XsBi+l?bM=8{qg->^2pmy4Hl8sRZ+%?u{b<|B`xNFYhu`ar zef4ujm)THVJ^r&%2_<(B@*Ij_X&<|Wf|p@EU4nb0bbt(ldikv#%({pK#3*p_of zf@yB64#Y#bX`$5&9o;N=S*hnA7x&0el_WowbfiuwoQB~L``nMgd85q7gMp8Ln5iWj zOw{A_#jB$r8t+eo(h~?SDEo7|4BP6sii(O5WBFln<_X z00|cBIp&V6aDzdOYB@s;me^k;ej%xjOfyPEX(jaSmY;0ALzbO=;q-X1qNPa%28(Dy zaCX)>?8|oBQxLfjH@vG>h&evS(wUX7~=w9F4WkauV_Yu;WuRHlk@qBGtyG?!hm9A8EYJ2(e3PxbYnqa4Q#*dSa&^VThOs;vcgXwCq z4WKL^yHd@%d?MLn8;^_+6T&h8NfuM^CrQTHmSTXwSVeBb^whKmMxr`49ByE-1kDn} zRwD2d@dJzX-$<=UbAuu9G{uFis55Qpr^X3cQv0!hm*eg)pBGzOY4FBCo5MBzXwJN4 z`}Zy@3_tR?oqbgmE)kI}^^-m}`zMWabLB84P%|c1WHnTa?5|NY^I6u{ zI>KTfPjz9592`D&?s&R&IZ`(IdSk#j!>gx4W7mk3nrxz1^_881Uv4ekimG+c{--{- zQA#&K;wFn|b}MJxH2+kueMDgyEsC^V!J56aP2t=DQ>k7F-oY(1`GBAL_ zwqp(KCLSV+E|xYc%ejqF8g38CiknnEHt|B{d%MiJFfwqEwmB*U;Ip)(|0Mq(R?W9M zMHk4`=C2R1ULgg=Mfp`%Qn0eFNC0ea_&q?(AoZku(((o@0`ldq^4jR(eJSW>#Ex5^^a8?$a_>5=Lja2?xfPz&xs2>$eiNOPMz$X!SS4-_J304 zYfsIEdPve!@i|pUc=X|Dq`bq2^?>R@`IKC#74g*$z4{HE$jHNE7WC5P#>59m?(Eld zS1D$r`#BVZNARfamMGqQA;k}R%F(wTurRu1JoM=gZxO>BlE{NrP=s| z7JNvpl4J4=ygHQaw;HfA)t3~Pc(PS~BSDZ+E%;$5i#u^#=fzeSNn2pPfJc*>IJ~kw zGJ_>$J!YFlzQjotKS=>%8xy!w2w0gHgJAijxuk`;TD#SQbc{0|^-?`i|2Em~$$cUf zY;~INeND5O?L9Jo=%zY?U2R-J6hHnR_ER$yVu^7jL|^8a}E7{7PiLN87o~A zxJIv>M>V3*y(KjC)2zSkTWqLFwb4S`zmtD$CRR1Mvr}u*1~lrPdZLh;D)6 zSB_7!68sj6%5M(_O?L_C!_eJUq*Q;w#DP%$&nFHq+vtTYMsxQ%^>RlRI0dI2^oUo& zf})7_PW{KD>8ekP{#5u`KV*mgVSY@}H@U{{3H;qxjuxZl1H$~;lj}&HKie?hw)XNJ z^$m{ITXcY-zu7I*CD!~qL=T<)r|-XVR8OSvDzNbV(M6-C2Z+yeP`BN2p7**zAsve7 zkC4XG?_c_qdS@nx4LUIeDqa8!1 zsCc8?{S|ZyvM|oBC~WDV%xTYCq+T=jaJ+V#@~*UD{aoR5YazlUl6Hl?+e{#Q-nSxaVxLfW>NQ?4!jZ9D*M#d+3_Ab_DvA375ZY-V`4>A^P@1+|8TjQjHj>d~@vpYGr zYn`?aP7Hi(yjr@v7HvWe(VCDCo3xzCyflZY#mk5NtY#LEC=Wkt`@rEb^eS`J zth*+%o_SC_&U1vm?5~+lc1Rhk7}ftr^JiZ%?<-$!G#PqUR~(D+eTuV$J=c0F0r=memx7fLj@U*WSlb!8LzR}~SosOng z9U!8T7JHHRDz7h%dSCFZW;%!@Su<|zVT87jJ_AH{ZCKjdNlU@_CF{)DpkNZuWw@8a z<9VDMjIH+b2@M z@JjW%D*J~s$uxZna8kccI#T1+yy=r;MCm0IQ==C#T|)odof->-gu6`O1C0R_Xa^e*^;`hc}D1a48=4{IDIHaGSWBxBJd*qW0ILUd~UuI$LzU73qVIl(lvy zQ|>m${Q(XRS(+0uMQZGZY zhJ)k(r9+74DQ9&4DGTO@3tu_n6q>XDVsi+ij%`tXAKe2T@PBdKl!WH7Rave5W@`th z3o^66+yD0WnNbezuz-fcQaGLRwe*++fIlF1<)3Z;+3E;w*n*{0(sods6Ftyy{XZKP zH@kUXFE+RAnI%G8;}qtIOZIN4S{V-$0?g7V)jZXpWIkT4Wg&;5BgmBjF`sL&425lY z|EB|IZuqw>MAw(~p#3b>QkHD%VGI`yLgbfJ5dJ{VyO;#{o&$Gh{ocF5oBgwIipS)j z1-s^Tn9zlB04WGVOekNySL4mr^m6Klr5v1=`r+b7`zP~=B9MR_Tizy*kk^1%_IvUn z$c+_qGCoW*X=i5E2xCgkeSil0wK4=_z`@3SbZw$A&O0#qw(HEBNTFp}`nJgs*brxXIdk zZMJH_wBgT(W+Qs#Ew-Wt7Q^7N=g|*lJZQmjh(JdGC9r<9h!pXW=`CRmEr8Q1m$?&B zpei(L%h2}1X_I+kNd1PM8x-^kwzRe&5Xp{q`{L4h+eJ6IDLQBHxXzb74nWHB0RA&3 z=yBwY+31mF$?U)Sa}-+X0Sz0i6cGj3?y_e!Q<5<*_Xx@3Nc=!cx&ME$_a0zPW$WHJqhlGd3?iU3XQ-@F>7Ds9=iGC2?*H6#@ArK7|J-|@@5{qOvXi~nT6?X%*M8sM z`>x;57^`2ut)zRJec#|+_*UPsSZ>5WhDyMjfa`AHvE1X0)@6>bz{k(fZ#I7VG=6eetIIrFw(ZS1r7V5L*dq zNp{KD4y-zST`j23MqeO-qt#Cwrjp32J#s!kwHuj2(km(m?IXqTn$&ig=Zw!)ge8Pg zf6VtCzw9S|c$4~k$j?12?25&2<=mud^>PJ;Rf)X#{LR4!=xb^K%~GWvT~shLSEZwK zo2>h2l4(;}Kc#0%%`KN!$S$kDZCHfbwBYWV7<*y!7jtbZ%>5{W{hm}du4bPBCFN2F zYI+X1v@=KtS|mr^Id69H8AB>yJj4;}ZZ-N8%A*gupmy&qCcG2kxR8uus+kpqSQG@^ zE8MEwem6UCZpo-Awr=E^gB)sDGGN{OQ*Q&G(^knXvD{~8)Rg5M2y2Q?Afp2>Cp2Un zo4!N8&VD=*u4++%b2mB8@sVB*YM|MV!I^|W7F&}P?Uk(_5|rhVK#lm}^RE~78_?bv zocs6tkxV4DV$i5Oa<3`EC1f9juNvQj&{*%_vbXNjerL(qw-0rZGR4JE@7W*6ErzY7 zcW4Gx#+Bk}x_${o^OGTz{#>Rb#L0AZGWl-r?m%}|Vy@RN1XtQ^$t`M70DsIMFOAgI zhHDS9TUKFwHugPo6?^qZF&<*8p2t-}L>N|919RSGf@g?%!6s03Q;zOH)OW|*PE7zM z2Y7J@$kFK}t-->9k(#24K65ZliG=2OBy&0GT#!Usm(B_Zv91QrNC-^c~6^Z_UWZ7>8E%IKC}FcG(KMul&*^N9Ua>_$eyz3b>b6=OREgl8 zI>Y2tLzVzs11sIe)$W1{g7gv1Sw9haWc8pogo!kTOq_q64PE&*oy<@3-8Aq^cH{B_ zdk;`G=la*qJaxC-cnRweZsrc@8sFi+D}@EESmY#MBNrCIU1VahI*#ZPo!uQRH7i;U z?~Gf|B+ZBXdz>OY`>7miLQ@DSaQ;KfCrUXM`X84Q2V;`?o*}>0B{fBUZ%rAoJ2P8(-Y>3H{0N zzPQ?RD~B1jrX-d#d=FSo{K;bZlQDCzIKR$nVrcjv5nn-uF|oJECoJdj4K4W6n5?Zc zJCl*Dqr^9pFk1IS^GwZ9RFhqzTVvDg|w;n?<{M4l?>PGd5`Mdw8tZZLfYsN-`Fe# zX}~RQ;g7zmvKJfP;UwX!uY0V|Unr_k9{c`6eu3rqyw0EqXpFh|1o36J)t(JjQb0St zN^Fl&|8+RA`xlQF);v6hw{-8f7A16u*TTD9FUxsL>;qkdkd9u%+VU~fsgqi(R&T@K zuMTv&oNN23kWKOQ$4w ze_(=AL$xy!SxbG_h)uXJ80_qCBEd}L3lj_k0hvLZ%63JQCG^#~XoqsELVLaz$Gq-e zwu?&nLDpf)H;sSAyjs0ts|QEgM_1JO)u?mSM6PBRhA}TLSL89%e8)&CbNH0H1MI3* zM6%OtoDeA*+vHDYjEJDV5O_v?wrGP4+&EbWK&J&b3W$DvP{9{xGzvx}bk zLH(_7{qpwZ9tmblv1C&fwm5T5?)lT_4{uBWLZj#0;{D%UGHA9b{rW==(o4E)i>-neQt=z4-1GA~Qvg7UoT59dF_`>Ko&G2q*Z3z?`FaXf zS0-;)PtN{Z;V43VH3yR&gdn8Xj&dMPi2T%RPK2JqtY5 zR8c}ivR>)}nZq3Eo` z9NA8cHCn#VPP2S6T?EgT>Q$O1+e1)HqAcDPqLvyGg5ssgcm2We+JUpW6yCj#9{@qR z-jeDV-YuT1Ihpa6Zghuo8W`Ad!5ARt9e-`awzn#3uAuQ?C9tZ~Y9!M7g$B6sW{Jt5 z_f0~8ycCC_qj3f3QwM-)ux|M{sg{5Mya@C=+MQw}TU56nC>Yh6S~Is;pIosWSYL2Y zh3UqJK`$9qIx-~QCho0PdwMW2kgp+u#ikjXk0UUx zsQwn&syxid$H{_$q&fCcGbr_nU@ygCuRo{BOrvAI?sj^@~!c2-t zrnNY968qvARn@2Gv7b$o`!GqKU;sg+}-9tH}<^ZVG92l{YVP7us<@h5T)D%AJu&fD3k7%I8d4BE3)n0>EN$?Oaf1r zWKRH-jUN-f@3}=ef6sW~Gohnc>J2=P-dZOtM1W;`EvNC=ETfdEfkUd$-(svrGnmV>bjhiwg2+Re!_qNB) z1-7PFllh+Nw30$B^Wx8RH-juk7hZP<=|p?P1?Gf8>n_x|u)nB1-Ckzfo+7nAQ6KsB!S@WqS&X1E9QC`_#OGp{0pj~OC)QFpGo zD}L*)Lw2#dwV7)Z<_*p%dvp_$J`vp_*;in%9M~g43-m~}|9kkSPowzy)n(27#OmK! zAc?OYYUFmDS+!*fN4)py^2QoPgrigfOq4Og$@ouSc#yslmv zD-*W0-)$YN_a8m_f?;;_WW= zlQ`IDV!hM@K|>w#(c<)6e>golq(#1Myy8Q9^n^tIHKSD9TT%~bs)PzvK! z0>i`F`cl%7GW#C@ zN#P&ET>nxUG220(ko}#dF`0d@P@|xTW4|Wrcb1EG#;X_Yc{-A*bxdmM=$oP&77@l) z5?jF<$GkO}a$I3>%iYxl4-1(EWyI`4yM6qW?mR}ZzAHIf#v<<-+JAHkvJ=QXov(jHZVnH|E1rgZ4++7aHgdsPu?8j#Lhb&LLHAPvsJ>Mgm~%@_XBtjW!?0vyZUtIA;;%DZk$g049mqFj6E1%2aRS4$-yc zyRs+$qOOO-AQpU!3usm+6vBL_u-TKdVU0z~36T9$7BOhc{KlT9d}jy&lP|jRA<-pO z6#Lp%D>%-?(#<_5H%#QNL#^b$(STFn6_9fpy9Q@U3X+^H5WaLxEBNKJx(xRlP%-Wq zXb<}|z@BpfTfdR{2*fVe{V361-{*(T1A`LYbGUNlwh}zYscgNd;UbUjf&xdqO#nj^ zpUjD%RR56&RO1ssC*-}$yZ>PEjUQ!L8gq~)2Y2fo2{BE zC;F|}4b1?qv)Hcb<{?9SKIzxNe5aNVm%#po1-`iEw8tDlU*bQXdK{B1W#Ww8Et^kH z4rXIhdUE2o-;AasDPSl;1LH|RytDKBMoNAjaMxI^c$+P`vk=_J08YIJG zA?J-A%={x}_RCHB*nX|A&8b2_ha4EdPOPM67P;bS2JCD~dKwsu&LP6dgokBY2Kh-x zE=A?qD#HX1qfD;mIG_CqhGEp$-yH}-~1obcfi z>3q1cTOtciauB5c=yYw4?T2^plmM>;)5~>o&(e4?B}LK#8~7RQ3&(`C4@4oov=GM{ z)~hkEgK?hSmW};tW<=enjL9GSLInzp9;t+12ix;`l$lanOPibAZTo>8w?sN-9(qyEI0H-JwMWK1egg>6v0}%(t)gMDT%F3^N39l z*EO51prsPwYxA}7P(K_$lOnL%|MO!;}e7B#YXB~kQVeiEOIH|8_oV@#MLl}e?8)R|Dh3o z6{UCv>s3ghYk0SOGDZR*dUk+$ZDPJI*~t5Jd%S?>^5rjv>3~l4GV9F!TbbD3Sr%SL zkHg&*u9*~YTfTH#w)GDvo#y0bRVB%kShZys8?~aB|8}=zszT0U-?>!fKm6lzNNl?+ zbYC8hd`?=7=a{oUvBDHl(`BqE9Dsz-J9>1OvPJo#C)}A8ru(1@F12Gzh%Q9k(39wsUSFx ztXx5{FHZg~UO~d-n!P`+$X&snhv$AQXblevb=gq(h`Uwu{QH^@wtOlw=U?bk3PU)| z1C@ttwR=fs?`x=taK@BFL2S}h7nqQqbBz` z@Xcfxl~sMVUABvzvUD>?y&3BP{9`)KJc88xF&VqitfQgrA%% z;Sw)3jLwQN=xnhyF)$u`exv6X7Speq_y6)Nj)D%bnPItuf<}A8R+qq>yw322}3mtTDs2veSv_}&aKjJXa18+hXmfC%8 z97bF%`9h1aL8EE2$UT&@=+eT%RKi9fYXxJ@p%qKPywa*WlbZD;Trj1v{$ijcZr&=v ziBogqk|4CYKEP4RtG{@;x6pUBe6%OTtPi_3VqG>8Z5xtDxv5YZdYRnIWcX^Z9!VDU zKT8kB3FPHIL&Xhklpx|ogrdfcEU2K-G5wrM2@<0KvJp+UIb&NQKBCTrH3dA3r#CGY z`GWnV-Hh5_1f^|M_2O-c{47<{=L&9IfB60%p87xf^3S1?{~m-N-?sCT-l|jS%$1wv z_C>&CZbe;QLOwpSRCB(0GlaL$S6rO5?2ZB{r&E9oO|vnkmR7qW#>^}{GL0FkF&Zlz z2z^E~ajYN4tOV{??kmIfT6yGTWw(kUy;d{Z=<`kgLc8bv2kqXOH70#IWdf_fu<6tw31*UOZVehSs zeZB4%R&bR9`mjjGcna$ML?yw^zPsVF{^CBb+%yv`LK7R-LrqnavR7MK`~+$>Yaad#J~65RT|%vORT3!JBc$vTTa$^CFwl-?dr*l3cpsTyi7OO zT+MlNUt9oDhla-o0N1`FK1GLQ2WqRk*oq&6^}lHQ8@0}NGV<)(7sJo~p_xP$b@ZDH z=qsM}um3(sa|s#RO$Hqjl0W|;#q@6;c=6l7bdL0wxA|5Zkby9dfv~`)lEd%{)6nJZ zJdV`)krLU?eci>ZYBZafzeNUZ+OHOJ|DT@We;xsoniV6joW`y1TWMx06M8Yzyg6nY zbiWyg&6dZdzm;K^O;mgH=wR>@pdWTqXw*Pw>BsvYQA zmGx#P{pbi=ZHQdGo|4QHu8~9o+2XJ^@ql}*7-~_xi=saQw~`C?NJFYceo5TxUA@hd zacv3u`5$)Gblk&GVRiqI?$h&HA$;%uhwqcw#c6^V&C`+EIS4SoRSODWA?fuGxqRK4 zA2Kqmyv$^2XVbs}vZ*d5{VY7<@Bis_{MR3u9E+^$T1StrMa=xcyvtHAE8Gxjki))C z0*9jB9&84X(%O7rT@YFyz&@-|xw`9ZN(>n@4rPSa7F=YmVbKK3`frhQ>?g&P)WgE- z7~{C3qcY7-Gh-o5Bg;jHT16%(SfY|89N8zpu}{$s?0l%2)!A6GnoJN_WM z+IJ{maCYjp{TXA&kc7X`Aj*!8{BZQlWg>6)UR62!!S_A&`d^}d;nimSytowS^MBDU z$eeW#U)5)-Mh4G@M`kG9g~&mYPoDK>z^tVy-ySsz7eyHHJap0V;gyRBFizv2{(~Zt z;_~-Sd+FyPy8S~|c=F`s2e*-2ANq&>>voB=s@D6mk3lDHn)c~d08s|BK>IhdK;h5P z?;k>A^T*U)y{-?`fQ8(T;nxrE@94aG46@i~NLg2jxo45d9BTr&-5Q?kRboH^*u;~g z`>plEswu_OtrW1C7D=WUL2$7L500s~3HDbf(mX_ZtyR!{QW{$G(vcvfxmW!M73Guc zzSRA99GzV<+?4rp2ibPFo+JU^j*>iNg&`2I8(W}SFphe12{`xf-;$?6pSflb`=4+9 z&eHZrde(R0IunGG(uaxPq*?uwQohN{`v1yq|Mj?kk96R#dtts@>Nj^gn5mC_o&L(> zL{zl9ETT0SdeFDYb?`fj5bO;L50k6lBnmybn-t`jb**bH#^|mq)|?Dw z&ASo+T$ivxLdi;ORyd?4(3Xs5yU%#aE>r&Fp5KT1i&lI|K7NfCe@*jT7E`(sG8^ltI`kLCEorKio zA(tyHS{+CCR>6|dnA&hTx|QG42=mn|_H`z|vp`=@wA|x>&2p6+3&Gw3aAK(nEh<8f zc$%Ca7G2p>M={h?fy2oKR91c85CNTdGG{Hs5*-yqfvMq;vzpKqnt;dw_vl_m_5j(s z-YE`Tlt}q;U=Ugq@MR1;j}R_MDSm+#f(&Ddd?j(%(u5+V7{y2r9`=5M`qs|XkCMPZ zcg;X+n@^ui8VBDzb)4~wSCD7BI_?CTHOig9V`2et`Fvbps0IFJO-zvJjp+Hds_$ON zOB?@UbDgN0!^Nw}RyH`_D#ZmK$tl{5EzoO6S}LY$ItVBO=9n}xmcGgCe?m^HTp{o|7+*Hag5slcGlOi3X!BI#U0~Ie@d$2-pK>Vjp8i^2Uj%5Faz?~0fe{7 zZ&umZcJaBHBQFCgP;1bGK0K`A$-BcE2?SEgddZggDGSY7&G{>S>9smh(PSLD?kNMo zB;Dg$YPk|GRwxJF#~Tu{Q8UY!2kvanwueTrTBg)xNXMpL>2KsMTILTjFvP>${Ei8b z7Y2^+in2CG>wh1AOOXld<%~uR>~xS<(suP4O8oM;?d@||F&yL|L{Gv95uffKlp^&= znVf;}l`ldDc&nH&DKMgHb#Rdh4yiYatpfX%U`MUAlvHLv-DM_`rh^$(#G}>~Pt1Gcpg2sZ+;QUm;IYvgR$NAVfDXEfS>bY{ zPZwZ7Awm`@g6I;a)WE7Gk*TJ0`3OvbAKjl~F5jP2j^D8NY+jJ`bKKLr53I{{&m0lD zqAv-J^RRW@$pI<#n&7IvVmK^u*r=3lc|ZCyiKreI3G?UId?K@{_OZ{4Sb6Q+HTi#XyYECc*z7x{7cQk0U_PyvKyatmQR zDa+bc$`eat&Gp1pw1H%)>z(beXaPv(EW@wLA`Q2NMVLF>NL@@{PtTLF>74zg_x%ZG z_9_d@MI-Bd^XlWttbKThvhG?jZ3QBXccsjKEx_0N27P9i{ zH|JMP8@M*?H?(nPEZ;4SKKknf_s{;f*PJ*+)!Ur5-xFgh4!oCc~#81eAR!Nk-za>IQYNYhjwX% zCa)#1$6t67o?2==A$;iAII}kpw|7a(9%lFKcb15mI3UZ(0On!7w`v+Le~?U=8LBgO zYgxY1KG0?FUxZA`l=njwY3mB)n1I@&qU)`20(_4IZvxfrzC3JHh=zpP`J7C098P&y zYDuhqrru@mgZETZgIJs0!j)?;jgg8W>r59+r*i-PP=%Hmyi9rV{F)}Fh$ zmk7BYcuE|$5P|<*h6Ed{-l2ba+<)&Rbon62ot7G06DMxp)0iLsKx_#OJo=l>!itT66T;K{2E?(w(}71|Rvbu^9`V zpl|!ckMAygGQ5-5uZmOQHBW;pMZ0CfwoUbWxPe{z?4ZqMFg{Rwr*(|l<*-vw)N;CJ zQ3@Zee%;bKZRE0zws30k&v4}z`oeGHbW)>%1LB27hCAlfPMQGw6W8y2K;KSva7!uh zH}IbCX`0p?Nr{f#!Hqe05JOrn?%6ptEO)0KgkVaH&5+dL8DtUqvhSAq8F^5=ikFvd z#aL@J?XxV`MP+J1Rn~b$gxZHSCN7HYpWb@J>=xh)0Q)r=wG01lXVaaJ& z?hYAKG8{j*P;Dq@sN_5UJIjQ|+r(a3rMVV0glb;$%9W*Ed9`PI#$ZR}q`tKo_|C3a zno0qz{Ps+En*{ngdEE;!WJ0G7af%I2P~5%PW9@eXYAt+kly;a~+RNM44!~ZtoRW-J z+-O9f8-Jl7Ab=ChLSt(rrm| zEuSPYS+KK5A1>d(kMD7ajcF(fs41gNz}WWaNxHC&?&m!uVh!?Q?QIk!h&O>VtiDsV zIg@sRLNPVPgRthUui^70+;ezz^>VK$qe5YU*I9tc-f}CWdXksFtvlGysr%;umrGNu zzpn0EzX7*<%>UfA7~gZ=9KD$0g3d3K1TDIqfM}~(Ci4}+7QTTHahtfLdP3z6 zoyZ8e*+slRybLs@ki=ImB3+X#Auq#&4a=0)t2qRS_AV*t=hu2>DxY)k>v&E!kVBR+v` zJ}xp{{hB>@eHVWc&8N)UsIEPXQC%H~JS_9^p0;4Rbz?d8QoPo?1J{2uReSFrGw5%} z!0bXQFQP&&#*77AKktJ->|Inr7vfkTNk9fEX5F97y9`XJx>WvUC6?ROV|Tx2eajn;Ff$ zwaN!#JA{?5HQoE#*K9gF-}Om9>@Bql#4zDdxe6NS{4Z>>88Fv`2v3Q@T*@$;mg zT)9?97brZ#UOdDO27Boj7>oQ;tEw}zIg&rx<&HA*2PjyHiwk70#C$gMYTB)3FBL<4 znOZcwW{xDJz)2Xqr)+iL?(3P?vk`YrtuPk?|MB|9L1X$`UtPEV81njmn#+1hzj3>^ z_t9EjfMsIfgu*S>NCO2A=XR=HJR=ZeX9Ww)4#0U=k5$-9@KxBoe;lv_zo0z1m(*cM zm&m;L+EKh@P)347nQH45ACT>-n~#^yj*AVa9N<@N5AD&B6Y7+k9(<0u*C}XEBOTSvIQLL+9_|9?eoDUz$P1FloXa zSNt*wxt>d7$h$%}i`AmrUXbV$V)OINu7D&B6(AU;;rB~%W zroIzX>&nzzhr^#Z*Wc}~@Ep-^$t>F5d6|`+yBzL492__@Ji3MRsC|{e1|3~i%Ky3X z0z9oQb;zeemcLF}jxqbp0^~M6qd5|+2%-;FV~Z{tfzi{CZz}qB%Cb_0vpZZ1!a$y$ zz^K+Y(b$1m`#DV`5=hgKYQj>u$qH9fW-cX^D@?+8S9dVdNm8W<5Zr!OS; zw~(`i5M#EHapK$F7cA zWCg7i$6`t(uu(nvUJ}Tvrm4*nS&Af55*4*9nMn$f$~ zg>h8#K4(k4EC+k0mNz5m(#1ndHyWFnod+k3hoF=+ZA#|pR)j^DnD;3W%<&0mP_fG` zajzKi&5>-cv>`kG*=zGW70b~bC9yX7z8&a5mUEU*SXkinKrrgzZBV~R)0g=f>cGAm z)cOv(q))IyFN-y$V!BVGnE}d0ogiPK0by>OMmE{dj4luNRHy(-T@%Pcb zV1SZQ%B-#$Ei|NIIf91R-@AOE$~{U?d(F^-;on?Yc|tcQ&F>W5$)?uc*vQb1o0xgi zQZTH!p%<+&YYj7v z_O%2B-IG5ec2hl%+rvH%h^IrSQrHQDo<%ozUiZ*iKStOYcJc%zXXRrx4WZLs7H32t z#~>djjmBqw(3cNuPr_{;_#+CKGzkkJu{m8nN-d*`-H{Hm%l^)~@z^Z#bLBQ3TApmZ zeAV{o26ac`rq)4X`2Z!`-x&A8N-x~pYc3~AQI}Z!1nuKahfuF$t>dP6f$?q%gte0m z&8&GeP?JpIO(NAbb>3dz4@;;rH*Mne*0DT^%|@Q`B~@4FC3w}MLPM@JI;B?y;j1`0 zO;pnaTlUuofzWWR<={8{Tvp3=4@PBDRw>k8V6}}8oo*)=7G(x zxjT&f`(rn&NRr02Aa&eZK}^mIi>ag_T%A_ArR~5hXx1=_+T(Yl7Asp^t+eGl@Stulvb7u17ajd*Y^UCv&VC;wo1{hp}D$LLnf_|_rwPN4E`8t&OpZCk0cP7O}Vr96Q! zE0$}pP9;M8v3-`*mwLn(-k!oZnIzP|>r zH4DazZYjy1l_f~Hw{2v&4oF_!!w!W=4$PDZ3;BaDQc^QS@9+R1YlDAh@ z>fnvO*|n-*-c6&u>pA zWjMru1>fX;*y8WWPwoH!x$riUfFWtn>y5Cv+NUa?i+3o~gTd;|x3l3VI@=943oQ*1#m;`ILZv_e9R?B-auWQR? zV^g1ON>jn&Jn~LXnMtYH#H+SKGQiG-Y(JFZYWhHxiTZP3&mKp(e!i^RO0+e0qmvji z%lB~}mIObJ9}`zT-57^d9zodXUkTk1*hvtI8W0r_4aI)y9S(CUa~x{Q5pD7v3Cz;< zM&kLy!67+SHg33J5a|j-Qa<2To%G36Vr_edTm941#bOOE%2nakvc_esCeqx+xsO-g zOf1}3z_0$;CP~>0?Klq|vTM}BQ*CMujYv${2PmO9jUz1z(E$JYVh*lh+JoKZcTQO_tpkCth3bQ(@S^CuaaN@BcER|Nj|cdr-&c z+uf?jM!xGfAi-JTR+ReUKo}yBjc>9SDhaFwmN7c#s-4Cjf{fkn6GJk#wZmO&#gBD5 zzC58mXwT?A5UiK8Lq`ZXG2>YijzM!VeZ@1e^=B`bpT~}_5>++;i{N!F``=W)J+R!lc-6tx>F_hkSq@hJvP3AQp{5;ma9BQfq31 zW@XaH0OE9DlUremh+qn2twnx>qivZ1@Xe28l!|J3&FQlyzu#7B8yNSpa|E>UYAtZM zTsVRk{?1}79RyeDzAOAOo&l>i)0`9jh|Bpo52m-`wb!jf77?!kh4ffM0Bfvotg2F> zg)!kdTS2S0{WpOs7u z1YiC<*MoW7a#a;DxD_Lt)4skZ^%`-HKq;U5)MRFqSXvN#5@ZrJ?mAd4ZAAh%pYG2* zEB+=vHJ?-ee4)86&oI(`7)#8~-Vzso9BYyQQF4JyND?SrvLvwm&g zF@rHN>UwQDZ>{ZefV`S>wuHh^b0^Wp>{8RsDs?~qw-u0Wx}|6(6|8jm>}So@svSBI zNFONzf!NqUfhUv3Ec?N3#etl2<;6;NE_>Ly38Rdn;SukiF2-(En4)4xg`!EKPmQt4{8-YkjN_N&-V_ocUd zg3N!AMt(G{ZJmwDnYI3S-*^6#&o#4qmKPQ@!#^TVMbCM-8l(Z%;EDQi4enG-1cGG5^kI zIpljG0yVw1CBnZ92J~Fo@cAOJ)L1uS-zJedal+3`C8Iq}BE7o!xu4Zld@RHKtz)Ro z7pi0L67e~ACnT@a1-@wgEBaO6`!e9dY&-{WUR%)7Az(|XweF|eV7oS@gFBB2 zO~M5)Mu13?Gp7YdFLfku6+4f~WdQ9ECPMz_W)``(bKTlK#?&db8q`p)^VNo>tme8d z7`mq|Igdypj08Y>l?OK`ijbPVk9#jgk0mM_8kN$KQgQAIxar_#OKoTWxYV8po>df} zuO9xqnG(Sc1!|KRrfQn!RD96^JQn$+F7L-nNRyu==y(6D+Z~%Fip`(|R+QieS~ldn z3Zc?3L|SfklrI~fz3%qQ*C-oW>-l|Wh1S(g?x(NrPu#jHZq*U_@kONTBqrs$wCu>` z+R^$Tr!U!xeR+h?)$guZcI@H)h{9E`{v`+pf5LL2eu{mX4HIF;rLoi_`KK+}iis9$ z?h1Xi?O)b;P8B?h^~z_2R`fRA1qa3#ZBU2`@4LrRf-?!gSwr%UEg9|-DQ>ytLX^U# z1KehXwv@wkPk{t%1){rNkb?l{bq13ibvb&X*TXJ?*0Pf{ATPE3V>CXmUaXu?wTyB^ zdl1OJJwYXX13F`PXU(C$lJ+!FQy#ls=Q7Clf+r^rh_C!bO3k~zzB_{t5p|^&6iv*sDo1lvRzL)^9n5t2QrH<16SrtgSjNcxPy;qLm&{R1K_la+K@U z+9|Opv25aZsJ~y526nQ0{M|p+D7nZNdjtB zOTf1vtlqAbAU-DgjAJQ@A=kmmspX(R085w!uAsb$yA?y~lY2{XGr--Vn=#|-$}t)0 zf`(cB>IZ6dhh=N2-?`QNAlXe)6ptOxn#kRSq&fe^hHXdedc59tU^EW^`OP ze~hMh{#YV=f9uR$VXq%}ru8EOiUp8_c@M}aLYrdZFBOO8FbjZZ8`h^2y~wouBu`O8 zufn#68(G42D(UiTq@KlW6nl=&V07nvl?_5mz>8oT719f$1k~(Yar0o9Z#k<;*UnK>f7~qz zE~K)PIqioJ4u4S=4K>xf`1EHwG$*~8TD6c?Add3@XT-u5rD`rsZS`8?@DmrDhPv|e zvt-E$QIzq2T{W|H3)`02jE_8%+*8oj0Ga#ohDnWBi+N-@ftm!U0`%CxzsWE$LFtvjoXx5L%$hEDymHE!LgDHA8!VL+^PClxGLF}zG{ z9d})v!|Kezy61LAQ6G=Zmx|3-#ukt*n=&7=d_?(F4G>t&22Q?cND&AtHiglv(~q0G zeNHvwnPi-Y3!NV)vTVG-SLb{;HvkfQJObHUc$40$Orn4y?N={mRFi;%I69RUU7^Wd zX=6hJCG4HVTHCc>?IYvPCZt#_rj7?7YVA^jO+Z+t|Ju$i{5wk--d3GS z<3at+Dpf?{b-_;9J1{#@B-l#Yltk#wr&^55sHNg|Qf7&goUws4K|aY6Sc#iq?9kntr1(BD!^7z&s? zV>U%@;Yl%e3IpnGP9(XG)3mV58vuL2o_9EkO#>CTLkMHz1apb`8K=5 z?&Cjt4;d0fG-s1Oz4MpZ#32xUYU=_yyts@F?xP9-aPay5EdnUmF2kzi=u}6s#Y({N zm!5=A#kzOznhBcN@_??@bRj?{>(W}|Tcb7YR80&?EA6NJC0(<-=sfLIkA~$KaUiZR zYCKaoE-k!vk0X=6)ZR5#=5CIK+%KA~)6ObhOg~mr(}?Es^vCGzF|zFIZok(8I2`^) zdbwu)`;#Vr1S9{1_BSV}p%$q)U0#H{rE*n!wScI>F-a}-?~uY?Q?vRrpH&Pw;zGlT z&N1{Nj_4BH*J4p7?;|4|zNpIEPCu?CqgFw7<&7Vg**7bW(~sz&sblwJ*DZZ5AHgBL z$&q_1(1s`)lP&h`oQvxalxO(9PNoXgYm%A{V?&6cC6=msA&%$#p5N5%kA8% zeP}9v1)i6Q&Kt@w7vI9Dt_pwmidzDtP|-2vFqx(217=NN-KOR&wa%Soc7Jh)w6&)%^4k`1d0VbiJr?`CCwEbH1J{n_3dg@COwU7Hl z@nAVRqkswXc{VSuZkFD+!Ql#vD_iNoJnco1m$R=R#@r`VG8F7`0Sskjz>8Vskf+06 zY6`?F94}j9r?T@3bIQ~wYE?bluf}UF&do)Dtx;An`%DVh4_6<84CYc@jKpc7Z{~I=z?}V#$bC`GTO8(P_l^(2U9B ztP4A2_vV5Q%urL>4I(rLwPWq1pdx(ko*tMrnPs=e&Sw7o`|!TTwlGvV|VRl z#qzBNpO6j0pU!{=K1@$~7}6+{H@G zcg~Tbn3k#Wr~*RM(+_CQKhtCk25ZqVZ~YRyTz(xA1ZRsJ>1-cePHcG$&LIDAG14XV zNwVv*88N>tq|9o}y{MwIjniVza6W~AB8uabl+4h+8kRsz`fQG&hqA&7)MSB(~bD#L%ob zw0E?`0{4@n4=y*rr>%XCDFn3|uf!cM&%wdg8biUrjh_tsFZSL$tf_2o{AI>b$3BP= zX%<3}K9qn|$C1#B5JG5ykt(4{fY8CwQ6UHnAT2Zv5K^5qQSpJGo`&LrQG4x-dRt1j%@CqL-U{;LH<1 zeM^R>nX~CxX;{zYOTCN{mzKRWOuu<+cB~vd2Hir*G0PMZ_0FRG*eTfoWnN{GbM~I* zmB5H;Rjh<|&M|OupQp!}-9}7SCVR=5@_~6f_lyzLXfr{T5>`l9W%9YND_R5_0?swX zSx#TFVUts2Xm^lwupyr>=;4pQHi68g0~6x&@XobS*5t)_Za&gr=ocQ%x{J*5g^%*u$!bkpu#(mzbF!ZK)i7?n~O{u$0!&zBr6@>_^Tq}#J@bS;!j`X zqK&`mBW6&bg>@qn2wW;aC|;aP_&Q%Ty~ai?+^~6tFE*I(^8`9?l_f+gx z>GE&hj!Ca=t*b{qyz+cTkL&h)%?X%3@KsEd)!*Enze)|~&&8h8IjY}TrQhee@C?>% zedoEeveOW`^mjkXf8~I4Ol(Qc=z+OVn19I@X?T~5mK(?I45u!l(R3&!IW9-$mo323)NaQXIBN&q!aZzv6D`jBFA}saZ zDf$1*3UxLpek#<%RYn%e@X@0T*i?s>K?}_k;usX?JjI;2u~C_<2_q}V>PaWlO*I(w z^1kVhJ(Mpz{1egT$MX6tU8;j*gKo`8>+Od}6+rbx3!osVd@yjdIWWARh^<+~6KFLe zo80Z#X#-$k;p1EPpT-LNH#59!7^X@1SVj!@?@`pn$4_rs4!*o6E(rc0ep@exA6%s% zsqiG-w}d+4n-7>C?6w`;>5!o)o2)EYsIITR~U zWqCl8irFV=g$-#1u2Gh2m3^JQnM}ZJv`D;K5X439;SiJ z&tT1LV5M&Q6iD=`yKv8-3ud>Tv{kz;bMtlup|rZwjEWPU_SR#^aa(>#1UWd!7?!N2 zog=|mpi$--y6h{gRk<_0+2%i}&{_&J0vz{cbY=RXow5A`R`gIDArs5$)Ci)F1;J25`%4d6d);B!e8IXB2n5pK z-`n40X!y;;JI92CE%X>(R~k)|N^}=>bxo7LTj!D1I!W(P)>DAg^*M5xV(N>AZHNGm^{FQZ*RmQndhNFdLv9s1y1W=WW^virfM~ z^Q<~M&8-@1cO)I9YJ_k=X39Q~D)SM~ako-Ce}YEIGl%aTfR@X}q(M``vJYev}92A-V8cAM^nN|Tn5P8PRH{H?F&-JFC8M@eBX;~Q~ zWh5YKcu{QLE~r^a|GUsTYVuHINmvyY7JaTYRv+ z$R*%uDXODcE#hG^L?@x(%`%;<1v87Ly>1cO59zS?Iw?IxVbMf2&3G4@ApxPlFu}hn zAS78hZd?Ai>sSmm-MbtFXQG1bRwKT$7M<}|RBmYc#vZ4pY<^BydeP&8ZG!EuEU%nW znW{N{TLm?udKrITC&g^e@qvWVT!CTjkxYIV5-aL6af`M#$W4NV@WshfJgte-8s<4x zqT)f_txXX@QQyF5s)l??99>;xK{k0v9YbPy@t6zL0zM^ahUI_ZDR>w-LrQA`zmCxC zeXv0+9ay>34m$RI^}At2-E&|`;D+iq@V?V1Y0^(|S@qKhB5{jIZmTp`n2=sUPF{7q z9=K!gH>mA4`B=sseCMt&j%fUXcR5~9ClL9=j2;CrnrJnG;-af{pIo6kKtUc<2*J!& zDhD~?b(}SVQU`(iRuZOn9bV)KwfR(t4%{;sgC|*=_PI+_h_emvtDXX&<{~|JD&YDR zea`!9M8=-uZDR|732OxWq!R_dtOm>&o!tic~B?XfhO@V>=wz>AZj%A*K~ zGA3y<@rMe>8-a`YMv81x%KeO)=9^R1*eUaBs8LeuKv`L9>ceu0U_6=`-(D|yL+-=! zncMwZ=f{8X2z%<&Y29Ud4sbjY^pw>W?x+g1kVG&#hpsVlHY`heK7Z*Dh3*Vm;4JsXi4=kt88@X>MlUKA? zE+IO1XPuGjx;{&=_#6jqmF~Ul!z2OesOup*AI?kMbh=vfadRDRsgrPK>jY!q_bipF zIfQX+NT#%Q?0%oct9{SV-Y)qH&&ZMQ({DZ(ihm$iB^{$7{rm$cTC+(>lmuN4GRsa3M76CgQ|R&t*u)6i5~Ir-ZuuiU2iQno!UKfToXeY3XZaL#7u=d{$z zZ(P)30x{|K-?w|1rg*yF4{Nw1{D8PmyrzN>vNtUcsu4NAR7GZ}t0HqAfnu^8<*t0+k#Id*g%dK-ZvB`@M27-h;|reUg3`jCdJWH(%2)_80-(gb zD{&JyhD=9b-iIRQ#7)dZQs`2CbKRz#SBdpg*U}ZM6iN=i=en5YYB^RcYuAx5mYY!Q zgb#u|>#}7LuO_jwaQyan5@lb^IQDtV2lip5eRIS{il~D?2q`amd6&ioS1YuBM+4rB zS*!FSB8Y_8AyUQIoO^JN>zjwjBY6IRcPx@7>t6$0%z02ztgQX1Y{_iC*4#A~0CNND zbLhTE?^x9)MG+6WXIGwV#oD>(+@pZXewgYRW?HXY6)stVZZ`|DyrZEUG(z<0)kXrj zaEk*iU#I!mYs} zfQMS9o}7)d3}9c+=K??(qy%&SI%5mLJ971plXH{a^oTtuqhd~IJ4_=2WX7f!O_S*1 z?{C=@q)T^GrW|~FjWSqnAx7cicZc!AgP!*T{qTtc@o<{x?zW}BWg_Cv=cpE*mDYg+P7DIwLmR&SR;Edwb}eMwa*R08kTNB^0Go*=JOXY~D;5ZXr3)SHq@* zoh&)v-9>NY!=YcnGR%@R>d@7durhYgn<7Y(g+-(rcr>+tURA)4P(J-~=&E~LajJ!b z{cNshaE)at5SDrEINU19r=3($6O33Ud=!-c-Oq098j6`suuTYxg+`X!lcY+5g_y@d zKK564rDpLiVwqRQJV=5>^fW@0xSV6iDw0mI+?=d`Yn%FE@q`-0waQ?gByR!#DN;^z zxK_Q~C~?~PcbgRUwK2k!1ZWG%>+LAYv;w`{u?KDIx9$fXZt&kZli7{VqSt!k6Ba)O zT|IZZQ4!`JNkUb*P-k8zG$lF+8B})KTqNo-$~HE`k(+2%KJ*~KhvvtsZ7z1plD}Ny zja5f(y1(>SehD*uzcrxV?{f#A?`?v^i&Fg@UId-3%xAy;ehg)4T7kmJi}z`)JocQKR)+ zLYl^uaurmZ9SwT5Wh@+sFsYyHdMIg0p7W{tU>HB(yp3?5trjfTI!pfn+^}= z_VbD}B2|MQe;C*+u7p|_Ke>djvDV;5v=p3fp-z}36j=o@d4X2?HH$=I(TU_{Or-t$ zS+r{=TjxF+976lyAY}o5oNI}fxmsz}?NyK}(yI;?MDK{wBuby+V+DWhc}|M5zpa%* z@-4s_H`S~Ay{FjsNC1)VIH}$FRijdia%)U5)T|LHdum)=aAuQ(#4S-r(j>~}s+$r8 z>Ri|ipTTZ(5E+LXIpcdNX`pphcTNq9c16~fXqiH{QcsVIQtVgi0;kfm+{$y(Pp_!f z%IUsv1}=(-xM}DmT76nZb2GudoO)1fEpm54M!C+lGB91U=}2LHy3ImRM<1rRp_ZQt zPy@%8#Fv0KOT18Pe2PNiPeDvRsBmp0@72u?qa{AIiExuufmxiRcWMx;1N08J2AGhN zMl6Q6Y+9O19Im9Ym$D|1>X2qGIA}?vo+jiLIr|J#j#cY8r^P($TCNydJIW$*@C-22 z$}~w!;`_D-H$6VW?viG)kBu;hPPrSIz1Vt{AgG3dOq~=hNWQ6oP)$`2oUAY|thT+m zcn#v(P^hnEK-aWu4?-B^k#ylyecv1diR>dO>~fQ0MRK0eeDnG!OzCxG|95RAYVEgX z0x#gHtQH8Ls~^sp2C~wZj8|iSyM2coCK<~XFPpJz`o)&aMaf}%N0kJPLZ|j0jEk!4 zv;)$z?66h-C&?ikWq@{JYT#uARrc((i*!L@YMh#Mklz>uieiTv)jvO7Jr|)qfw^&B zAUH+2V(bATFD$6swQA!nya&{$iB@}+B<|w4sn=Rwo5O2AP>pucCJ?v6YJtAvx-$!o z5qH=tYDfIW>}Lyx$SRn*v9=ssYz1>_63H1TTY-9E2wccL9!GrFU`V|-*3QjQ+Hq(i zihIcrB_BZhxn`Ix!6&DWBnG3yrMOAEe%V9N@`qc@BHs0|NKB(glNZX?-p|%s*UD^M zG*MP{B|uLYC**q;**z;&cFmQesSjlGLc4WSNUq_ge2I{;1zFWahqMWEC)BgFn@hxn zkMquioD$uLlEk3--fl2bIhc!eS)wj|_=`uy!;_3LARjmD!{^_FY&!oIWb-+1og4M| z|AOl*mqV|heRn_dAaZp)>^&DmUlzJ}t$%!^H0pr$_b8mRr2hh%IQ1C8Bb5#@yrYI4mvw*aE;m~rc~w8xOo%+;P>f|Oug5` zs8dwKhi_o+ZdyuoJC>RqHI|0HUlMbC)#;goS zcB_2l{s}dQ+0l6t?UX-~=jZhE=*`qRnQ6ny_)tGCVhrn7+&Q&_ zpSD_sW)soq`eSyFZfNLZ_wT>9d3i-I_5P;n87q7Bm7j^_3_~hgtgkk>ed{QI3G*%e zT9of6?xu26Ax+a=-zSZU!Wj}~)iKA4;j+g;s$=2=C;T=MS7%O+Z7(OH>!4v1Cmt%5id2^(M zdWog#XOoUAIWAS6S@)Y~d`Op*1psxGj8&*nnskVWY~4C`OP#k9yotZ+&4Y`;wKhOr&&cgto zdUjP^4CnA!=}ZJ=>PW$qf^eTNA>_O^b!oBxTB+>raIo({ zSV`ENI?suBZ+K2JQeEpN zp-k$xN*D9ZKJ<)t<2+{>TPwqc89!_omA^;t3CIB^r3SRJ)E9STMj7C+(g;%dMYHRh zFkmZrQ=_DCLsgdABd9N~oGLiH%NLnN+X}#{x0P?Hlc)>j0B3&yRbRpNnh10G0qQ zL&SHq9PcG4>l?bx$8Au3!g2(=uc#m=9=$K5P>a#tmdCApN`@jQoUOfMk~BA1oLF8$ zr4g~YlpAdfH5U_B_WTAlUra%Dq8B`$#>Y}g_3)7Dkv*X*nagQ7!lxQSxi{1Kb^E5J zvrdzeQ$W+qNo)%5wp8*#uENb*T_R$^SHJK)-jWUHoO#4bFzC?0@ zbY_985pf14!%O6p>

K?n^0NbIw$#nOuL;YZ!BIsV%aH|0VE+MVeh_ZE8%9{gaVt zqb>8ho8xw#QY*tr-Zi811cyX7F}2}+>0oW)1g~*`N^iM7$O1qPldfvSBNwZE`UUsZ z?0uo?%%HRJdWQP8c-!M(tzvVAj+6{E(J$6+n(HBErG6D{qCbc;=7iL}`g8oJrc``c zy7O^cW&vJ)N#d=Qd?w}DV*~hsxS*M#<4Lh1G+dTgjb(XKEOj~I3POm_ZW;j`elawk zr7j$@5^>uzK1;P0AxBNT5hq%&yDJZ<595$+r;zHr=oTvyZ}D8PUJS!vzeXKpnm_i> zI&e}TzaUujbLc0>qF}&8hyI%pu9ty8<>VIV@{89U;$x3jLdY}-vvmdj))wL4xaY4~Dx^t#z=O2?@!@@htM>;;qq`goGSKVG3msAA=sOvBSqQ z89!=Y%1A6TNgrJcz4NEo@iyPg8w2W20%c3~bLi$1q#w61tNPIlx*FAyqw;P1GXf_~ z06_TY!sf%2E8e#I?0%s8{AOQ{dgrIzhr%frs-S~XmSTmk4fZ80i=$uD0Ewv|Y8nUu z0AjBA^{9V3;w#({OY&y-oNS0%A-I*^RU!8OI#NFGe+5xW#hCRK)Qn2IlZmblqa(5v z^woeQ3)S#c2QJpJR-&a8`hNRYmXn|QrHnv-rmH_qvo3He^}D`N^{n>NT#Wif0kM&{ zz>HGx5*WM${O8U8e!?YrKOUJ;4xX7_Y+cjTI|F}iYMMA$;JT@L;4?-WKO^ownp`7= zODs$xGOX!`-H0vh>t%Lzc;CV7efJ*n%h0m}hfyaW_FgG6wThc-AVD-kkCE??IAfNQ z$zO2fq`LUK<_8PF8va9m0%ugk`0{Uy$Q8?eZ|k|BZZd1H_rri2OyEXMM=K{m zz*hS%Ru4<=Vso(vLv?t|D~{=&Zl>oz=#HWYu!|bT1~b-+o(it&d#lP`Z_;3(;_SO< zSXHp^*f(qZ{C{55+I)kXUwAH7-M5u9HKygqy>bpvC}D7;cr0B2`kJRJt?x2AQNj- z^E>93;+@Vo3=X67g3k67(V%#Z4vanJq_u3hjeo6dtTwexTH4q`l$-Bi#R{y)iHFG~ z0l1tBcB&Qg8S6Y6MYAquAj)V{iVB1^aKi-u(ZTy6Jz_?pFr>PZ6w$_j>YnZm%!n43lR` zvno{@=$A#fO8aT>3Nh2w@gOKM1T)y%7hv4UDivMXc%r=QFfnYA_JI6VCPJLG5yT)J z!O5#o-`JNQEc8W0TN`m+y_|rg#Vq=fnO;#ycg3=QY6x{p@8O%g!v*B$-(FcC3O!6G zXY($B2ZYf}=2@Pqe*9wTr_Ey+!=%^x8o6A%I`^@8mNXfFg<(ng0W}tM`_})}5dOvg zPgs8B+jIne;pyLoAE`8#Xxj<7QHMsZx{oXnG<}1aR)M^#I%yt0UBGSK*|6oTny`VN zv|s&;3jgubzcp4yyIxGPv{gL*9l9t;o%3 z_C6Pz_jPpF&y+jpS|SSi**Lkc`+w7I`g?v!%J>QR5kxvRLJU5 z!%4e|=t<4z4ltYbIB@+?K!XOoqVGe%=Gl1}k0L(>&1?>}kqKvpF(csM5C0I`fA@Kk z@B2i#$Y+FyStZ!kAWWa6~fi!FGl1*@=AUFvl+|2TTORTMiEp;xrCwfzALlaVxx_>h6 z;atmk8G|kKTkqG+8wL})V0 z8L00VByZ!rEm+MTg_e$CZdilD`X3OIZ7m{%D1<2`4mW1?g<3fU8apUuZ|YOA0MG`= znwln+=`FfbbP`&R|J*$Hov_KnUcZ*|y3<9be%2A_9n<@60M=WFc8qPI^NMJfNSLY~ z3BxIfM>DKU+2sV^nVw1OrUItgh`T3_sUB3GhMl;VJPBHWa~5*;VVRrcDVKmgg!&Z`c<%$EI<7u-O+LAA0zz4R^uamOr|S2|Dmu8b`l} z{=2IG{`*%iH7`fKj=Z%W-c!$XFI7r(?q|pqT{92;eNq1HKdXa>2fQV8?~}Gt(;^an zx6^x&T_QN2OkA)RQSnQ%BPLNC37uQIa7-3)h4Cz6VtpZB*z!}bVZr6YcO791W$t#R zDwmkvk?B|3hvL~u(=esc8$DguX8!IB+-}#TQL4A{ZK097paw0#f8a-D>C$(?ageF-) zTrDk{&h;oyp>m%*ho7osPrW61eI#~lv=aq9O9;k%y=~^Ydlq!+7E+Yh(%Fr}9Y_;> zdAJe#SC0zp_}(T!3sME#(wg_)U`SWh5X6!M4I0Pb;$`ZU)XjTfPaa1Qz~^{ zWvCZ9Q61^K#UKl=4^-4^y82Q1)@sxJ{|H>fZN6PQ&io!V>j*T*Rh+^EjLWJGRP{~xal|8^LE6!@0_1g;jBdxeDs9Ju}M3gd2QgBJz+^F|aL53T>gQu?pTzJj_7 zZun5x*W&M^Lf4Hegp0%OG~5+Rc|R^HUF&z1ZMc*h8Qv*><+HNsPi6Ejnrp!J%kzlk z*%n5kb!jXtGyV(D()hdQCBNK9bxL;u>Jub13cJ8W!ptfzI1;WL3>%AS=Da z*`N8KM8LtG5tEg!6q~^sF;!_?W@U^ht2gR4ce0N}9#vd+0mR08&pKm*J<|3vXQw+- z-)|f=g7urQ?`6L)WBa&_X1C^!Z6xX(>V;ow`NH!{u0!sV%+YLz)s}It9`pP&dg535 zt_L3!{!$=xXHt0(cl%ImeR}i!^khR~!(9LF`HL-&^2;OV`SYTH+GrQ9gN(Vn*1?*< zGUVFli=oJ{lSqFjZj}p%Cr`QjW-+9*d8Vgr5}>7f+YcmkzaUp@_u`m zrgdB-W%Y-;KL+9dmwu#t&`d#p*7?GdXa4b)@zt*r+IC(!{yB>3KRePkFVfZ&eJ#4D z=##Mc|LkNg^8fgd?KH7uv`oA6gv-vkYR}BRW?e_HH%N#u zXfPsjvy*Msz=q{DBi}t+0-tLifdWkB?p`z~j34a^hc%;r^i^FvLlLGPq>2Tp^sA+= zwX%C5M;$A>_kQt|3v$m}6!%SIGV1N!n$@g_nH;uFtxPIm({2EJ;J-Qr@i8Xa|EQ z@~hcLf7~4ZXJ5W99S{kue6T`I`*!{ss@0cROdW9!wjYSB8K*ABvn_ae)sS0~@s=Ny zl#`S=G8JbEhb}s4zU=M$pvS3v?p{{cw&UuA-2(Z;5LLQhp;;uV^6F`p(h=0@#XJ^- zQ{QgXMRgA(DMUC;&!_9pT5EYG?{vO$Q$dtrchf@(oZgf|MM|4`I?%lwZYa-8oPnSQ z1Fa$wi4mq z%}7!JZgyVMEaAk4OelM-7DYaKP`G}gV6%v7b~W*sUuV(if2-&Gz~tOa(}Nw^==G+8 z|2xHcG}Q9z!1IpTc7avLnYG@k#!3u1W$~wp$iV(K%^hSvhki(w$oX%B+a&l|BSxOW zap)DW1^kB>b~KwJmzg5{)z)%FepCRGJc9KrU-`&q3Q=D&_b$V&&_t!;R1e!D+m|T2 zHGBSc+8x&p->gK*M>$w0BDo8ItlbX?bE9y#^b7hwSIt5=Q2Jlmd+xOsus&HCo_2fB zx$l@|7-U59pA~y{=-3w(^4waMBJW|RyKh5{rVN-Kjq6rEV&=ZwZhLS4>GvLVUrzED z;_9OO7f^3(u;tU`TNcdsWBuGku+|r!08e*AkQ{1@!fHk5kBRt$d=s>Ep2!v=-v*XD z+)u_7=9oLHM||60@*=p`v!qzU=N&&iDCHR=_yGI+F3j1+_4#XFt&POvM&37y*kB*5Lp+}5gg^&n*%!wWA&8f@%T1!Ih3YCl;nNp=i9EGY+=!TLb=Br(BQQnyb1wRNI!zp$sH0Q7~(%LuF^`1zt|23V>87 zDCxN3OYGzAHwP1=Mc-Ad?l;ea~>~R0W&@>Duq-q9N$+Y%vTXHI``oWvG{BPNmnD96N7 zPy78#F$40=7Wzx=??^+PCM)gH!_4aKG6?+p0rw%*<7xLRlMT;W1y(JFxt5ef5Mza$ zA7esu69Q5NUxX=+ls@sTXmMsO0Xh^2zbWi?gX5LmM)5Um1O?;nC+3^l`Sab5Q_4E`Hr%)&|;MqHC?3 z4bue_s4kCx_T}ULnpsh_OQ|bbHM#F4y)f*}Bvd z4j=IwpS(E}KjMp~`@n#qDdFB-qh~xq+#!WKab}t+Bk}--y6H9%hsiBWOuLDYhhDrR zlM&bXA_6A8 zQBSyVFS85TKSibX-S#ryoDUn^%e}VPGxjSFD)o=sm+-+M|9+OX$rVcXxF3z41+f~v z>11mzRQ0L;vgw)JEI&rF+WgNSPKZ?agPNN98F3s4wZ5q6>S)3WXbA`C5v#a^G5NP=@ANe<~CD|0`${o_nqj1ntj zn3z@O7^+LYXRW0(t5KBhtfuOvU3%?Rhj%)y*3{ctqH|bMRR;zZi&P#v$`cx1VMcf| za=-4DCg#|-TSGK@Gk9)X|9vDUY^SE+l&R+{a_P7F4p35nTIx@)F;j3^3gAt>udBYk zX={>1dn{|NA##6+;o%!wV+9c|mWN$lz!W7Bet!2GC#b4eNTkkTZh<7TqLK=_h|CY2 zj*tt;sj%QCY=aK*e#gl$aZ^{0?N4=VW#_e#)b2?l--FsXMh~eRIU6joiBS z?Pnjq6>qMlDs7CfzI2hf;}*qp0jKrT??-++8;!rxoH+%Vv2{cA_rAk7Q{rcnJ^7<- zru-JOYZWNmP(Q3FAvcnD^auBb3GG{bBscM#r8+*i7|c*c%M5G_1cSGeAR|tvz@w(M z$|UR04+1~g0Rf3PU;7xei+g|~Z=NdqvmW7uHK&4`4fTzK{;nZ+rPVB6i(cmE{d_UH zGu3}7qJMDgD61*NNNX4ItDEnVmeaxU#^_)2Hh{Dq{?BjyuT4lEwobA3ir)PHr&P9+ z{u=lOM`3KPlB&s7{*Qq)D+`j(j_2B`foAhC1>K6T1i*% z&m#4$H5M-@R*T*K1E>8CuQhX1UPv*?K<`B+pQ7-dpVJS=JRAQMihus?Z2XI`LW<@> z=8+tzEvYQgidqz@e7g{rpthrNl)Tw+^7f)m(E-8kH^4K)#)OxXp+ryiB1zJh%RWgy zecD@f!)iZcs7mWsS(Q@nPTwQHcn8K-yQoC41_)*?J3l&6W!^LS9cD!gBh`;!{`AFbQX~wIjm7XjursBdeXHq-BHcFsU$W zt2V79gEj(oMo(s2KR>5y$_uqrW8l*QUGs7;vfWI^%H(BL(KImo9;lQN(+|JJ+w$X3 zUC1I$h;VCLobL@6}3CXXNC%5&qV3!UYvSMrp5+gTt7Jd2c59@d&gEUnCO zy=?}92{6SPY6XgDx}&HSIZSEQJlkjnA78>{i67<0n+2NfmB4$$s6)_X34Clj4mzlJUVur=QyOXrIjj}~}Ndfxj($d1j)xqCeS7@20wxsyGE@b-9&?e zhH279f#LYc83Y%)!yahN;^0cVLs}48$uhODs-P35QxH$kEdAL&NF$8j*wy(A??ziEX0{ z`9S1vO&Ij;Sfdgv*LW-6l>m5oeoBd|ngYT@uRi=W!NW;TE) zvrvkK{ev7?CZC3?TA&NB!;W{n?^1mi!ngKDZsk&9grTJZNH}MMT?dOQDrRKaY5aJ%s!0C!Wi{cZVwwr-L|kdANkO$JSmr?o;%oE95BpW zvd&K>)Q=O*9U6ttHStxqPNF^Zm`Pj+DP=LXGll}t#3~fmN{;?m8`uv8bXnAnbXdhFpUv#!1h;U{g((>Qen2X;odgt2n1wTg$KMrNFJL$Ob@d?1^R{rP zT`bY#R!3jM3TBEhG^=nmvcT@@$Qa173G>XJrJIHxd3jZeKsa!8okj*#;J8RAD|iy% z(!zsl7c=<3Q-RsYe&--NbXilQ&&hwO*b@*$bA=-Ehtc?14pTQA=1Zoh<1;0guCDl}m+_;3mx2ztk;wEoS4V{P& zb`m^r?FXxYut3>-okl3!3eg=pS3xNbQh>|u%0oT+z0RSlWV%b&>aow>-vI&xqy}qs zh&Yc@ajkQXg<%{b-P9``-*S`!H)JT>t;=vMSb)eX!Amb8N64tSKDf`ihNOBo8diTr z?3tNkB{h*j9 z8$e%ITrY>$$$}T9>NS*3k6bu@Mf$FjHUQw`t9HCO#4aH~3e((5Hv=PZYZMexT7E=Y zTE6MXw=a8r%>zfmPF7h+zAnMJCaI?td)n;gGOVnelc?gS36%u}9Wq%zTSamw=b(y4VWMLG_r@U*iKO98&lVG;}nj>E>y5EkO_ptXJ zEALZ-ys_6iW#PxG^7^uBrx(NNrpj!-l$?^J7te0)S+LiGu-NtG+^=JneeLI+8Bsuz zOa4>Jsa!^X0lsl=J~`~Ep5Fxb5fA3MxYn2~QORQK zy#dmx2FS{1Y?@)~II<=;nBD<7^BkKqoTOvh?NP}iAkA}Y2xqSU)S18d-A85+DD83B zo?TpdO_`!2K2P80@$XRKp%Z}<2WKc=T*xda+MzgYl(v*X@qKuY?KuLwgD8ZR-Z>7^ z!k1NWv3Lb~8}tquaZ5CWUn>1h2&SnrVH|Rg8;s+Vnlj>3Zm^f!|FFQ!gPIpP+Un|D z)R;o|N}=nrEs)HS%8Tvg-1_&=XVcphPw(O7U%pg*xiDpu zG0-L{Tj~t75Y7UomZCb^2Sj}hoQekv&2GfI?K0(BMBSa6b(-$_x_Uz3T^HN&Im%P< z*~MO|9l&A6h|i7{INsKS|3OY)?$`Y8OnX4o?TFUWv|6h2Wi?F#`Qg|N7xAYHLSl>a zc|cdNpyEr6#m&+2I^HEa?6|cz&dF`^Q~ev{(#t zJU>%3s+GQK`JzxuTx;q2rG#f*=L7JW)KJehmA<0~1(i-SRd+1p;PrM0iM!v}-<8z2 zT(E|bQV&{$^0|AQC@hCh!22dyUhg5#x9))F-&NBAtL7vHhggy3mJVCPdt*fv9+H~D zX7p5z&uCBtIQK3neab%h^F;LKYxrO+Ho*JiB1Z9&sn*((!~WeAYj=(NFI=lm20#bw z??>KkaFSL_#nqg#0Zos0e5jW5x#llMSWwIq zvJN&h7XLIJCZ?!wY^#~W4H&2h2HG)T@(0V-809;pTP5#pO9jq9o36|aFYR|)-dqT* zXcNDC&HU`CW2qG4;LI2ZxuGu5(GW!{muXELUD_`_@p)snY?UgDSSUgCT z0pBkuK+_Jq$Z1$a4c1UYBY~9+($g(LN>}8F&6=MEnROE!6av;R6fg&K&WW2wLLYx# zbZ2#z{m}hjVIUWbkfG|Xfz+Tw8dYqWwud4ypIu%e znS2i-H|({zjSWEVc=S)5?y1U7M$>GDbdy`r4KH9 zM#0lTj;nKF0yC`*W}K@$xX%T(9P}?&kD3dMu09dz?6**%?zz)p7Jb%Jm$QlkoaB-i z>ViXQ){L5pV#I85QUUM!&WJXO?Hv{`T8%&%r_2tmP*{57iouhp5tqUQ1jfh~+Jo_A z3+3<|&53%A}3zXVEkU_JY$}@nU$v>jc5TW^QD%H*XG;8 zZ;MLI&naei)Gx;1%LHqOOh#YitFM{aZhwxl8s@U9mK*Ms+65I-E=#L9Z!-EF<#!N< zUe4YdSek~-!XyR&3-s(>xU8$U0pcaIQTR=Wr9{85jQz&){`fJkk|8$26nsZbSz0=~ z6^Op+unFGHXU1fYo%f}0k7D}nhou&3tfkJ`R%fr|U?vDAS(^#uz3@GWL_{@OLh{Hrrdi^z`!$Mt2Mef+%j+#-mDFKGj0e@Q&zB|J zXSB`49Z(#S-d9^8NYb3cI>in)BAc{9>P{h~>id+?$vt)1+d=CYr?9 z`V^w|P)?WPHA|WpgaRCY?YCV;oh1$g&{Rp+adx)0CNlvuMG{%nZ0WH*3vXpI;=#X%2r51 zb+>a7N5BM(D7NXqa(jux3z_#2vbUH|&Qv)(%$k8x`3H1x*wblu&muwD#aymuu*Rw= ziI)za+GlE4PU_eleb;|vQaO0txE|Z%TV+2#mua1wOD3S8NnNX{FJ`!IqSDK;G(Xxy zm2z$Po4X`R@XGNvPqDUDC^w(hDO82Qewy*wdsKcIFKAgSnu+ApW#enr;T6`v2aQ3; zgeNYX?5iW`d(F1Xffy>M5I(a%rYhQS^+ebqGpT#opBrXK65LwXxZSJITzF-?&pVx>cvi}MTADthr#a#&GxukRdkqXu=? z;EFY^agJ}G{V$xQzdY9}pX75Aq~)<+e&J!Md(ou9t& zSVwI>I#!AQ$Eu>fbCBbM>464tky>X~OwP_GjQ_BYva*u@oEm2#YazOOuqT+~$oGn8 zZ^5(wv+T;Dze6=P>zPn-^^uRz%8M$Tg{x8Xoe!$2^l0t%0N*ZV9T)B0Su2=UWJ{sH zT-Qi^ayV?;+rZ~qqpB?=Twh)zGm6h0Yj^mKRy(}sVd6d1u?hj_n5+{~E7LnSb1rix zUxwn|c!-pc-Q@BwQXKG$ZiSToqQ=**oSaHd(z>B+LO_ADg7tis=%vKdc9D&G z8EXQ>;n>_yJK?X(7K57A7jU=+TcU`smq%*r!eDC5jGt_6sa_&d2BlX$`Ut!bLsW{V zN3DA|cA5p0J-=1b&$+Rs8Pg@jFuCx&cE%^s#8;^Yq7B~fGFys+Ynj1*Xe#Ear=8gv zX8O{#ZeA$a1i4)&;^dU^a{B3SiEpyk--?dPj(N8672uLkvqiZ^w&KpBB%8W5`XP~3 z>SQPA&DSg)4~JulzA{~{}}NKHTGY*v_d^EKXD z2VN&4CJT0?s7M68Pq=^=^Qo2%;)G6Qnnv53=x2Od!Evdnoyd5k@uR6A_?*L1g2NFM zNA!%UfRFwR@k4ZZx9d!);xclz*M=J^r#=?Ye;K}06`gg3xP(%xN(xXs?>42W&t30p z5GI{LcFOG`LjXS96a#N9^8JIS3I)|pPa$vIBS(C@xjvE#=sm{!ijEjZ?Tk2x?-~QA zc+=T!f8*&!H=2^}S>=1pP#oXp{c_T`I73G%@~tggd!Sz?_=`B_e@6HNYMY$p(n2si{H3nlXZn@W#ohpwK}m+vjEo%6&uzm_6D% zG-mME=i)2H3Tf^Z0JAMkxFMtcJov*E4VI4%H##T{t>vus&GOl*NN!*Zev>byeyTsN z2T$i*0mFL6$lO8?WfEJajB<@@QBI;!1AICoCZ$%xKR3yn0>H^$rbAJ$*}PUi)_XvD1MnA%pId&r=@yBY#T$+U{)|LNTdvf$saNaEiE+iEIoD{S%>C zHr#fZ3zuQd<<0HOqCrZiO&!|HZ;jD@86g{?0V|(qYt$HQRual1#MifNsV8s(AZ7N0 zN{rcZ_1zKmgh@_&ezEBf#&17g*;i&Nw!<_vC@HZXcAqCB#Dd@rhBQGX2tFgz(Zqj% z_2!{o=jAV@Q{Op$+3z$@yp9$5@UZ>p!!|Zs^wi5!J_kKQ=X4#nE>$yjZ|vf`~J{?`z&ZhJ7{Z_PSBXBhumR{_~Ww0pgiiF>mqt{gO?^k{`sC zUgl@0qdS)ZujOxb3JlneRQ*)_V7R|6u$^Y&{s>uy-<*}m$ER3mXuKu^Vgzze8&Msov8T!|C_t11`4%#bmTo z_?IdSr<@Z6YugAVsI=KrN*Y}rin1nOBeWwM5>zTx&w`h!Mjpd&Evw@)(;>e4+X`OfomIW_R1 zO_(nist<>}@9V%Dw>udlDYt))qOu@sHwh-2(1ff4gfr}!!f^~`hM={@M0oiN!v=HeBIQj86 zOT@=(k#!Njhka7Ytt+9MS3Ou0`HT2lgRgVNn)?5OmZ&uR-IMVZ{W6j`GnlxVELO+= zCXw8;b#MV?P@8&_k94UZ5vZuVHTwLY^Mu#@rycy5YP$xG_4?Pgv=uTXC%TQ7Q2`-c zo95U_wbPa(gxPtV%i}_f-JZ?o#@M!g5_pv|U$Y_Skmr4aq;p(TYhOBfzx=wex&Y8n zD@@ezRS$ay0}R8aV18assB05G;=4Fh@SWpk%+4d=;h$gspPH+OyyJ*~+z%WO)zlXq+zQ@1EQW(XLk>Wt0cY)th@2 z2Il5Nk#pkS;%rRnHcH?j_xu<>A>H4D(?gS(Su)`sB4MWasKQNLwkOMG1(Uh1yL_9& zlg-}r$MH`i*@3+E>q;Ni=zI7tgb?kORp6Kk+VLnQkR66dPzL}&pLUxrO}NL3Akhnb zx<(adma|#SHZ_{fBR2bW_j$6AM>T$AS$P@D3>^QBP$Ch-pW-> zhX@SNTJY0El$4ZbJ{G3Jj{Y49Zjw#T+{XN*S&-lQoug{SZYk|Bemv*>j{a)V=L-J$ z8iYu*wwTgUGQXWNYV-1I*W;DVEn^WL5qduk zh+2IrS49@t52WCu=AJ0El5Y(Lx!%*vyWQ_T~#B*B0o-6wZ8+F%Jy6?`Pah&#X%V^<}i#EBL# zRkEb5-zw_bSk`j5VDkC_WVMexxJb0MH&wcw_A5}g}*~mk*Yt)zp z1n@u!h+5e^)vJmECDQ402oK3wktHdK?p61EPjGHVx}q|yhEO%kSePHiT+_2qF|LBC z>&C0Sga49W8BnLMZ*OG0X4Ab2B}Oy|TQ9^QuF1HWXL9L~Eo`?+g8Z%_s$I`E0K7kIxY0=Aw z0M`^JDA8+Ay2p}AdLLxtmNOrLpQSkCtwpbPT+K^m6)AJ6Y^>Lo&zbPM#3C&66U#M` zNErR4wd;rm+6QfA#AKZp-k8!@Va?k9gvc+iJJGCis{5r&8e9PLC>5I(5iU-6rkP21 z#l(ho#70)JSzx9>14UR2c8nNcnDnhD7xpr=+k+gvyohMDm~V9f0t9Vyqa2zPI5A*u zh?=+fk=QrR#du4UP?DXyFSaq>`nu7Rrg?c~CR0U`gc0>Ak&jlvXU>bH1n@;u?2|I1 zDif5~6$>_8D;no)f64b7s*vrhMIMaXv=Tmej~~$c7;{zV@>g< zv3UczyR5z;Yp;m?IXF8@K_`xUJg52V*s@hiD$7 zzT!2jO(0>?H?3Pf1X1J7aSnd_-?r3$y>M*Jwp701*@%3e&~s`foS#wTBm*NTQCB+$Daz6EDUJugCbS+bSlGxsHroyiDU4gs6ntuO8Za` z(bu;Fc~JEAv7*Xl;7nG2}@TKb$r_9wz5z)I3X$j84g zYA}30mTP0dEF(FMhvpJzokZYPDkOL?OW!rNBgInIAYYmEa{uXoXR>@Utf8k=F0{C?d%`lyoz*BQLN9k2Hp zSh4`D5;*DsaG%f zVI%$X?+)|X<8d>=H>c}&2lS5pf2Qjz?Q3qh#PAuGUd18;;ntbTeI~YAHUeG%m_}QP zXJlkFk+g7Kbq6$&4nq0F2G^ZzSxhKB)tL|1mXaijJAvDc#}wVX>`G64g6KWOFThGmyz2hA2{FNzGJaz_%rbNH_x{vIS(7F43QqB-*C!+zNV zE#8$!nSX0pMt_X+)XVhwCQ-?!w#z0srt5~ijd$&j_X8eIc5hu?tn68C`OZMjJs3ULZxV zm%VV}qDKMda<>&Ah%OB3a|PZb^!MHVR)Q62hzptXdfE_d3IkiOJ|YHc3<7zJr2u0Yq;5&QFGe;dU1%yWHZKSQ z@3FMO*jVj@G%LMklVZfqZ;OW56+t&Q>1SL+35Z;BcSK{onI|je^UL41`N_{Tp*n#H zy_M206TKzE1FUDHBrK5dUrTM>Y*gF^ny@$mi@E$TCN>@t!u_rsd3dX4Sb?P~Z5ao3 zw6XVyO|r5Aogm%+%#&ZT0h%3-+rjFE8M9{s!{ClQ1ZOObWD~MrplB_Nkx6Ejr>V|7 z44Q|gVuWW?2)3iT0MKX3V0s|>)~7cfI|=U%%mTU&b~Q?-T}(xQD$$c4r)Er+KObLR zOgx|EF(sg;l`ryz95nzSd+^K3BzbG4RXh0MeEikSN``8k=@DgitIBOp;2n3{Uyh2@ zLiN4Ld3ZdT_#5w=IIjFusHpbxjL+z_UJb%GWQ~OUNt~W+>4bF>(fAtIgp6Z0WJS%` zaMyv9+gkC~PzXA8-MG|6VMvON9I)M2?%+Sx+du_)`+C99Z77-vXw%6v$!s(CS|07| zwNM_~E4#(H`^m2cz)%s3NACIT0F%k7ha!1{0ubv|eFX67HH1p+lM4aBW7lRf zW-5AHvCj*K24};1$*L9ZDuDUTmm@N2=h84~M4@Ltl6#xBiya{{XXmcB;Tg_$>=1=O z)MGV@8y@oXj&qktk7xM1MpXsb#GoXKU*=?104_Z{$C#Gj{80M!lmW~~c#YOM*dBU} zI-&k!)R=b0aF!vWFDNv}*(S8g6;qzb99$gq?5t$DRt+!37i6j5v@v_j5am|X)J3@+ zdO_FN=&Us+XFn#%HZJ)$*fd`XY`1l5Oy!$5(f9DA`L(+&A(muh;tZL_ExSw`0cujh zWmV*oK5Y7(n;zbpR)3+*U6eo@o|r9+(-=Fq1ptbqi*O4xizKj7^|`r!ig4cJOGUvw z3MdFwfCng{yqz{GaW~eSQF$*XE`Wam+A5vU%2?|2Rg9iYv4=`UMMiCUcLn(&ty4)U zV$*{_|2IL|Ao~-jTl7vSv5Cz;L5%6oG|n`z{mXRh)ycS``jo?&Lu<}}s_Qe#wjAe6o`wslQE;7+A-M3FW;nIazI%pWw zo{Yp6IJf`IQT)Gz0QH9-!A5qfyT6=y#0vZaM8XiVXutQucV&w;P`epH{NoFMe;hx% z1^0htFaD{(T~))b_-kcKFZf2^B37(Y(iG`3NINK7Ho^qBJk$s|i|$mW?U!nKU^K z`MckwsEID6lz+G)FD+?(ICwgEPKe?p!tB*TdzFY?&r!BQ3u&mod-KjhA7TOqJ!@1} zGGxy`YxYP<+cEzjL$$gu8^?nhW&UU1I-C2SUF!hxKYG?tE;C=4S}4u=b*R1pt(g{GC7wuYDmA<#=JK;io+4RfBjbz3GofJ znVL$tJOcv=#L;^F^1=R(#RxM&)cdvHi6L3LKlIuO z7Z8FkWo5Qyib}JqZ+}fy@h~x(?32K)HhgJ)wehIfH3Q}m>Q&^I4(cuCDwgq8R%`}| zC0c2Q;aGVwE3-i3`4(1Jt5*NdQq@b4T3n{Dg$;`u!TgOckK>%CyRoDl6~ z7#_oDHrxO>b)J=GuL{`<{Fue*ymhB+`&8e>=7L-&ct8}qn582vD<~*fiFw;xhh^B( z!GtjlzXk8*DF+%v`C{ShsU#0=Xl2Lx<$OZbU|s~VFR*YYyp5%e1U8Fu%M2MxJ47zq zGo$etosJwl>UxpUkFi9f63QrZpwUPVc`Npc3QM0h=gAuNetx3nG?z~sx_IUs(GRU-#(qaWt9 z9geHl4=8Zfs+#(lJ?R*Ieo!?X*&4bofAD_e`qY@Rp~*HTZ2E%YS^wC$_=1q7l_8MC zxJYO2w^B8BsJz@RPSLTt{X54kozuJM&dZ61(~i5(O@C|f^bJv3(Q{YM{yknb{^P&T z_VnfAo{jG6!@SyQnRl>NkynYr8g~A7E{zI9l7Z7JL3fDNC0&EkkLCUD0Xv zy-^8{hi%x1=WQfg_#y{4wo<)V=In)Y38e?11KHpzRM{^0!*`CKRECunj+6+itx`j| z>1Fb7ft6%HiK}7PNK&7)pC{Rzf8v3*aWk+_cO$fCPX5dhHtgII@ZTEa9*e@I78Ih= zKLF!~2ztuk3$u80F*4=|4JxA00>?h~+Ja6ziV7Iwa}jh_@UtTgsP@4W;?qKYyJ_K4jgxT zd}rC}Xe;!FBCM&#?6JNCJa7bc)_Taiz94%})`ZG4JC97G^Wpb1gA&#%!azMZVG*@e zs%?rIn`ENzlVIT7J6d_*quGv+d)O8hUM`}yBzdo0C2WD~vW zybnFIVp_l^s=8E5Ig$xOe9)Oo{tqUqf6+ZI<99#g?oj$hYJA9SC-C&_I{Xl}pbh-aQCLmgll-;ZAK4q6#>T!J$s0K6 zRK%JhfxR|z7PsFrWGe{2MEEU}XJXI>(ab^XmH{`)Ed1Zy|G${zkL0n_T`2GRaEDoi zvmSsk(Kbri8l>m5OtHZpU2@V2*f9eVy|2$Fl^ZaX5(|@SA=Ut;TB+}t_1b0bUi&<4 zt1oLeK{{S4lqDpx-TYiYUV_xzn5@VagdKF%v5liw(Kw6V?BtcLp2K06^cxGJ$4A2! zW8zAM+7%+1L0*zhnBH<6hA(;w9a^=Po1$UqCxy!U&hcVY%0F~sqGZsoL5{|CEmUTX zDAv5}>B)N*kSL)qw`AXIy%lDg>y49AOLv_=4mhr0_4E?80u=1~Rq0F==-#Hp0L>@Hx99aC8 z_HMq&vTYyI|9Og4_`Cp;P)%Y~w=Zm}fbU;WJda9S?R=Ddio#gZ=wCV$$9Ge_D8GIM z{jPt-=a~w`aHFHh$--O&PW+sTbp=3ilzDur4)5hbvwZisv>85$!cEvo#!!U5tgEYb z3YY(|JO7BxW^5FEhDLGDH2^lnc^3wc^l-@&z6Reil6j(wR*0T%#r_3R#x2P0-BgKsG10!R+0g-s0;7!Rm@jurKbe+f z|J1dUL-z8$Wk*B_x|6}0DVyd5GOAK2TGg_6F}RJGPJ&NwyNIgo;h~lBMnKJAC;q-jwzM!~k7c z*A?r4|r-(Kz8BOR0LahJgTMIpwJ?;Q6p zIbWSXETuYGU0W)RAls~0dFQ%*#L}-hV|p!X@Y6MtIdt8877aahFJ-j^Du!79+RC?8u~R1(Uo zsgxhQ+ZR+I+E`H7FMR<}Cz2l?;7mw8k>HH?u_QP|x9lD0m@H7ntKR=jQ*oYDze1{u zxjyvPJkXU5gY}mN_{~3%QU6uvnsdqXc^<}CURltJAwpg6cKjUY-g)}qoy~KI$Gvc! z?OYXs>u-P?QHLA>|El|+X{Cswr!UOix)2+dx0R2^Nqm@lktzNAzMT;)ml4S2?Xv@q z1+?TN^gxx~#9|0X&otzV#8GKO*j`W@n?vIDw|d+#8UYt;wp1sSx({=r+aJ6yZkfK9 z5PShXpFbCnpqb_ZC9ah0TvU~RZ-{rtje01j$9mm4n3m$1Q~XlnnSpT z&oY8W+D7*Fwr;E5QO~>tUbKiCP zH>UWXr++A@@kpsV$J{TtjP11}@9fzSQ+J_EbERx)*Jn6#(d?H%T5(Mc8%tNC z-0{@zw&sV83LEv}ULCt}iU+87)rHQ0x&`Q}D2Y|*4kc(1xB;e8Z`<#06BUZ4VQcF? zXnX`qFNTr6R9ALN0mmqk2JH(E<=SM95f-8I)jzZ_=>zim`exHDQ$v9-yacpa+awWe z9DB}V0H0vYx;^Ku~SC`Q*&4AlEh7UTHGKG)8o-uj+%0f?6)ZxDiT9X<<4* zw5?C3!)JH;J2sK4^~>gu>({DIRtXipGx0Cc+2}4vXro=J7xc;Q(F&y)_#E(;lEEsk zu4+Y47SQpXZz-d9Erq)SFZ~(V5657{p_*UVdZQ8rgq4OJhl*@!S zN++PRY)@9ot|v{K_gsfuX=9oL8=aj7jpQcI!-4I5dbBZNlC5ab2pGN*+d1!zcz=q& zp`_;4`<*$jp>CHdmJEgmD{N^mwZyzvak>#sNh}DMF-uD+LM~_Jz%$5W7&sY%FShE- zbYlnxwU?E`SKEbhC+Lj92yT=~wb4meQQynuxtZ1bAj6HQj)d9hejIM4;>G-r$GPrM z-_2aF7M^+?k?!b~x&%(`qT-uI?7Fa0^8r^kpyKmt3D$Y_*3Zm zZ~70~!B5-x!?B6$7v{8{#zv2+fMndSm=WuK0#@~Iuz5}h8=$=A5~%9E(ShRGI1g+U zj$K2!4mobsxdI`+qwV=S)0ezEi&kAs+ZQX0WF?`^P#{o%;!_wb3kh;&b8V%lLO@9< z@eKEci(G1Ym~S5=o;^+B4VBDf$ZAnTFh;v?`Ch!bBtgf^9NxQ_Dg z;KsWe>zlHNmib1)cL~U*M?d3fcLJUTJ$H4v05nWap!d;d@r1UQkg*1!a+SFf^5=IY z8#XiQu>-vItLJPN--%JyjmtJSiAOG}wbp`T@0q}=1ge2Ij^+nS3yJ=pZxj2>HQ0+W zSQw8U?U$^LHZ~CDxBdB*iRP^$&QZ+Ylcy*)yPjsq66%Z|ZI8ZWJPG~ndauOBHJ`s} z_Bhu5_7^!iGGK_NFz>eFE-v4Tvu%QRy9r-O^ieR{&gp%#cej0szgXZpkQXkmDAzXN zE$Q_rG~9jQ(rXpf(}k*Ty?sg(G+N@~UR8>5VHns_KEg<#R%PB`9w96&49>V>S&;uQ zaG>!5Qxj2CC68}f^wvPn7C~!re!DjD_}+$3RuqHcBt;~UIXIvKcb}el_HF7})vb=} zI`*!11b|BkSS^oIJ)lL@MELii*c(dK?8^~y3lo6TKV{tgrCQ?`N{7B3Tz{j z44RXss&viAVY8G+?4}@~0WWM!!ocf#g{huGO z;cSUkj?pIL0EmI(SxPoF2UmHYVJoLxY}E^C*pY~UvK_9+01xa}hkO9Q%4+NV##1|7 zeY&%1Q9Umm@T7EfcAT0+j6%D7GHwVG$Odq0Rs*ftf;l>25!RbZ5%cqehoGN3J-qzV zj5mD_nurdAxM|43*u(Zo`-ID!gbUVxUy}b1&$dOpR!tfKm>lMmDE=|42+%& zDWn26TJNVBr`R9X!jzeAM&oYz%FdBwxAA9dHRO}MtKxYNT8;`BEE4ky@n*%hJkE+V zByD5i3#dIi((J}92Z6URw`vZosECzn7qvp` zMTMG=xwWlI&ei}(QMPf;v1lXxi2GIKw;!1!f>!uCuC$NQ#K>B1dsB|%@vmJ|&tKxM znx=z3YVzyEMSFy!3}Q-v=S^W6A+$lwq^7E$ua!nj8SM;C^p+soL3PXG5>vOy*L3_Iy)~Je!;5<>MqkgrXC43e1$#eW=k32O zg#XQxC!}FCx6`5%qVas8*~^L25Bf^taKVT`KQC% zYY+9(1pgFeClgYpR5KKt!3m?2blvE*6xVg#0m~(a~dtYkv`ECR!Vtp1JTmS_=xb@pXM>!^2h6@5~D~ zf$LmvA){Ov9lsIdR$%CYwwQ%0!==zw8O`-*>t)F&_;f{T+=vG@F`>6l6Ik4hs6^s> zG_0rQy(yz7>|4JGYe=QA}RBBDhVna$1!S zPrO=nyUUQ2%7de|R*Ix(-?T2{lK4jQ%6GhFi6(`zm6ycpD7H;Mnq(U5O z3y5LreDAqWy;!xu4c|7h^!TVKNl?`Cn|sFK|*BrPLpNz#$bFDx2@esVTBbn?o9e9WuZjAD0*o& z?%YhN$7q?$P-_EZOOn=pm6SCAcm}&W@Ae=W1TjE~X-AW!^8nK*ee|FJn}oN*e%3N@ zj{90<2Z>utU=R8YniO6kItnA5?0!S>5NOWsF zuRv3S)ht_iz@x_5oBUx3U)3lr5iT9aPr!#R3O*&ph_)$md*JN4C}%ErR`jftkfj|M zHlnLUah7M7yNmXfsUhlEem-?57bJY?7Hj7~Nm|Z_I@LO_BfCDMdE%O4#*I`8}JktLFE3|CG%@ zj>%>=R%vHV>4N-5U0|F?Wy7$qk`A`VOelwGJ~ac*&cOiicVsO(!A- zhnOLV#-U#G3GDCVhSo`5Vg;0E9tZ=!#AY}L|aRUD85Nndre1x5hz0c z%-Bo`KGQRy`NDz{pqg&=OCZFrjqtQOQF^H@&^ERfBNCp_&ezbyU>MG{)Cfz7x3Fj9 zA2aM#g54t@fssxfFpZgHLmU6`{zerEu)7L&DZera7)w@KG)jPbkk2S1S}c&!))Jl+ zv=){aov6lU(p>pz+HyXNJiQ|brMib-RFHjkaR~dY^JzgQvm)@3(DX(uG0w$RDetRy zamM-A1xF3N`XRP0>GSc!)^)=$#!p2 z!F?9ba;J?c=e=LKH7d`&kbSS$$3ZKQ}ra#$U=Mr36v0KM`tvPx0R;IxD%&sI=XhcU3SC3#f#(E zOe!Ya4PE$j=o`YW{wy^v6WQ%*F*N@Gdm{C;Dn@iJVP9Ul$JGE-0YK;T2uaGWJ%XfL zd4FDH)5W(fA9%Yl2J%(P`F}^TQ3^-2YZFfunTIY6ln9H@;|t08tB6z$5}cMcTBa}d zY%sAEWb5vO^h4L?ySpdKMZr^QCcj#ZL@!|~3VnVQKaLw3l>4ndzCe6YOQ%7Jue#)8 z1)@z8t2tl-({4SNX`)Q*uaq3&PXQC7s|x1&6${fE0B8}v;OdON(w?DA_ZiMI7#We< zGe5j-zV7BITRQBO_EOjP>u=wPKD~@amWlusN>y@*H*@i*Vkj#><{ZEcJdV~$h^&+$ z<)gS4nCk-9PN3bFEj-Bp1dvbl$BLE8;TdXRx`e}4PPi>g)d%Z?*>GJL5;%Clp5V0m z&hfPK5TJQl@5Xly2Cv?*PZ-!#!h26^*8iRKPK)KyE_*+ciOH9ooxi3=KL&>j-d~5K z=cK=Lz(aoF`0V|UKJ#o?Oy&c^r%*v3FJs!~$ShvVqO0MtX63lW)(22&upGM~q}xs! z;<+ypAF`=Dxc5jwFeNC`SP73+yhj4 z?1Ps^>{{m?m30}{_McNnRJ?p9Y>3k?74eNAQSJ4gL!jin`P6Me$jr6~CP*ZdY)iq$JVPHtNBvg^EEzePT{0@?ANbmd@9T&%coflM*z;Y#9_5*DJzWECdiK&Fh(a=1| zl+V>FmB0#s!S7G!9^w05KM7S(c;wJNn&{xp@}EUdQ=2`+Jh_v*3>@}d6R61jT(yLj ztXnE?w3SnQnHz5hz#XlGc@fXzdyOMh=OjNmZ@-n5MRk9zh_CI7^y}%B6>Yq1B6Nbq0 zGXu}KdT_%V6LoDa<7YevDo|s-Mt|c^AD8;ZCfl`{&^y$b3hOcbW76oa>A!X1|Etgc z-}yY2wv)7Tl-HW0p}%s3|0*r_GfiHm_a&cpDNnJj z9CF<(B05JJA=DME8U1=3MvFC%mhw$sq&|M${6(fGwc~RmSOuj0o6#ns5t>-;PSM>!o)YPfrT6mlOHMK;sJ@jz^)e9;s>V_$1xO@Q)(wCcFmDg zI?ygDUr930urlX{p;V2iA#u%zjfLnV>71aD=uVV+`Gw=uo$i*f&6C~R0%HuzH$H3E z5AXOKHmhxNMGd#b?Fq@9JyK-*&9$ho-rk*?p?^)Oz0JM>w9@X++%slFGGtqGs0zIK zqj1|YU*#EBn;RCX2>PnkJZhBPTyiv?M;j@n@D7c{s@P!nZ@kW2_<1~fGs&52IcvLc z`&_8U9X?^o@|(;O;~M*r`@UWK9QJVPD!C)fU+((g=zX1+=3s|o8??Hie2G9Zb#WNPT-lH@IJC+j&pXuStEloe9Hk=S9D@q_hL@S%jc-Zb z{a?{L(+FE!Dqtts$YSX=zF*NsXu8S`r-WLtQgl3zox`bwywbhlK~z!Wv2{SS>u^UG z1ih3ks4{NBa8CTTY~|7gqe`g|tH%h#XUee?X`^!fw8ovb3V45efjTM+F5tips*r#> zx}lAmJ7%4OQhyvOsnKv5Je)`4iPe?-FZoe|ec`gd1KcCV<;Tco%hI z9pAc1XLf})^;fjRz}zP6_G)k2bR$|Ux2y&1NVjb^8O@a2XRpN^)MGh*PRaZLR z0q8xCwqTFiNGND!+R5fjPqND@6Vh+`c`C+FYH_;0cU_!?Ta1o+ceM^4bR*cty&!U~ zob#nS3YmSsf4tmH|Ml?C#&qQVrv?&xp&ag?&_Y~>%%tG)q@F5Y(>u>0U-{_doke;B z?B(86NKJ2fNov93%c5$jma(o@$o-|)b9(wf4H(j8klRoHP{4Mh>Z03g_(s~#Z&}oL zo0WYg4^iAr2}vKcZy9_Zii@2kI~1_DM4gvxkbkg#x(=$RJf0?%cCzk+Fht*-f;A$y zE5R_N^R3kz?VSU4jeP0CD&*U8m7x;;1-XUMiD|`i$#+hi_k0TAdEm(o;R7X*4L}K( z*~cXbTy}K#b?0@tBWv5RcQwjR3wU&h;#+ePo2KCPV6rm7B&fhMp_PArV&%HBc`PJG zpqe1CK-HJL*zWSC&=oAD7yb+R0tr z(knHef>iClz?DW{zs{y%Q9ZI^#aeN&l}9D2L*fr6cg-IQMIVVD1amxeY_km@{(248 z^Pr(Q{#SwZUEpJ(7$LFh;A0QR+W3w`I25V4?kK(M9Pr-`!fxyEWZd)LInI!u9Quz- z1CcA&Ij~U&=sSm#J&zk4?3Cr~2%xJ`M8mzj_%Tss7j1 zl}L8Bdzb@v0Q+h{!|C{6W6h2fasSXQ#BDnU2cJN^SPkdh&K?6C)$Ht?ValVy!6NLO z3v-V#aR&i@O?wXi6?`02DmnYxz}dFP_cwjIjo%kzjl zd~|Kyy-dfh-#5N8*O;68HPq&g15BD8+d zX^#+Wud=mZz{HjVgpE+y6-~94IKooHD8$#OZ6lU1icy4yQ`gV3QBdkj zi?%gE&?b_2=dQRh+KE#k8S+zOk0qU3?rezy+3YZ;q$c>{!(THdWfxVXv8Asw*V|ju z95r9RbKO;a{NX%mP;kN{y!W|eME7(!tWhkptfJ|IK*>^t60?|Q|78TdC%SL3gz1k) zT;#+)KfeH^0UeVy@{Z-|H{OEP4ws@d-=$-``%n$Rk%y=Bj#B zYeOia_d)IRYQT2nRD+zX#GFkJXmw|5z@W5+>jS^3o*PG^p3}~J|ph}5{56eX4{5C6=R*8aHa`D=}95~~iKzli3*saBeU}ilo9bHQMP=XSQy`VE-x8q*7;L1{O z#j|&?1Nv(RE}ZN|1WJeSiISr;-TA&g3tf%sntQ!1MQ2G{x|kThFjzZ7gO88`bn`oH zpX3$mHtSsA#+^kNeV(%bN{RH_@G+;%wlnQ(50Bbn&BS6N4xaziT;iqGpyCiHz<1Qv z#iUu;OcDrXwfO>6R~_S)s82o`u-ssz6~M}aHEz@&=lcZW^|IV+a4?N#?}8rAzBcZx zkNn&P@h#I0%DRCh#Kl3IhN1(eqY@1_r)CD>RZ{FwbImmGk8PGR)QJez3w^xWp7e$8 z^F`#W2rSgF-?;4^rjsu>Xh0fbnF;PTV0#4+w2pRrgm%+z{%75k$-NKeK>Y})?LseQ zCbqmh5QBGUwxntDMf)}LN%LZe19VnlXgaDzO8L2!6z$_+k>Y%ocJkK#-9f=eNC_#? zvXvlF%!uZIR5>6an(P4afa9YFh(6Nln*}%N{EFElQ%h3@W$8D&p3bVU&Q*-c?b@4V z+Y7y(!7?wAh(WFu&6a{%Q-P$ZA)vG+2F-FArYM~QjEVQ@2r*=Qpi1*a=`wXg?o?&p zfY`QF>0}>yHc%<{%;&by@GAhT@M2P+3edk@;hiDqjny8#NTa-nMZ_yM1?0k}{S^0% zyqGfKW#3x4;z=phu?V$LzokkaX>253UqRda{u8OQo(nw8!(){YHWEha?CV5;jo&;d zH^g2CUJI)UOaKc|ym4zpN<)+FxII_{zzx(>?W~drQ?l*pZPYiqz$J(2`M~ewT!2|? z!7~QAy8}sCHTFnvA~hUhwdwueBiuC`BFV#xOC?Q8u8)+3l|)F=>6O<67%-(UA$GXe zJ}^eaTTQ4^qb)KDph=|0R9i$1&)KfzOn#wG-Z0;vfgQ@qKYQ_|n25b$N{o)IkfNDC zuD!eS@5aTeklx;b-cEg6(EnB4dxkZ&t?k0tU1C{?fYMxxs#GZf0gr5D=8E0YVB1QW8i)Sr&Q|lF$RvA@q(EJ=t&Bd!Mt{d9QumcYi-VeuP{j zb3Sv9XN)=KGoEta0zh2@zXVUW`?{$rEfrbQrQ94@yqxmyYmZd?UbqFWSm2e%yg?kd zVupF%7{EPak`*Y&U~F{Gq_$a0Y#K?5Pua0Xva>&p0&taa8F(+tLqt2^C(BJ`QB18J z$%5;)1{$f%&AFlGDhg8Ybn+q?yod2cf4t5Z3mEb6ii#4fnQPF5h$a78t?^Wv@(0tv zN5z(^Ef)*G(u3nW=IndXzX+6WSpw0z?3>uk1N=RIiHH3vth&W-Z3+o5w6?b8eRMF4P zX9*W)M?*_eAn8IliO|P6xR&#B{!A7pNUWY}^l_RTAP`#E^02qVx2wr>#tnWp^L}Zn zj=rl)l?!1ea3kH%k~mN=GsmpK6X9M!ppkFE6SiMrY({E^%!i{t8@@=oG8l()vCp;m zO9Fwcjl`juKes1TlX+qaR)77TA2>RP0R+5yc;w)6{JD+2j2}D}*MzJ8{G|Wq)?-&2 zj%PSm)c6ggjRg?a%uS2Du@i{U^`;R*|0sN~EXiPX4KM5PlJuS(^Noz7DRPgDzfMfPq%KF(6{?JhYqYP)gd^ZlvI zr#=>s-!TiKgideR>zEH#+`m>$bN4FNg%vAI=&rt6c+=%C!;MuXW$)d1|AXfuaOF8t zOmi#j-uG*XWBhuzNssnRf7wPt)5V{`4}EjGwZtiv12vRgZkyZI+$%-be_kkF{WX^v zJUTG-eCMO`?>gAI`ii!5tG_#+%-j_SJ~0(`f{w4jZph7^Yh61R3JJ6R_mjo?x}C>E zKszrxIokdUYoGS-34Ij%n;7$hQ?w&%HBZ?rCvf=R#EkbRPi^ca&Ciyu>RjD7_W0Ya zp8cB`X>!fAl|{dTyfNn|J^zt??|=gRXPNQBf0P+tuLg~u0B$C}XwSD)jl7+D{ zsk!h~*(~XTto-PB(T#9jTf9bXYjO}Jt-wVC*dO3yw6+`7gxrAcEwE2!4)HE*=hb2FAn>SdHH2_c-FbkW2ypvCT&7k2l@vIXdEFYj;I`+LwV@#TGYK-M6pF=1>vB`(Ri zyomaz7cd#$Nfs&|j_;w#gtR;UWk?h8_CE3T`TsVOBT(vh+_wW@&!R-E!{Yc>qJ_eW zip`Z@or+6U7ZOMCOUtHA2z*-^JO=&EI&|sZMjWU4Ie71Wtm5TCe2@buxPt&A&pd_f4s`qIu?W+tb9W25+RF+)+W;@%z8fn5Km zUegGl)Xr`B$w2?U^4iX!7{Qit>9=C1yb>g?qQ0z!`t?OmLH#U)!0la*kB&&+{g21- zblIN;NgWs7?mi=H&MAuTb!R1=`y#n(@cq$^NB@^c`1s#%L>fu|+v;!x`hPj;zuo+? zHX~0;$fWXWmPHa&Axx<#_?3;6-}2YW3crC+=+nY!pDT%D-cv)w;zo{-1LT?rp&T0! znbhJO$4z0GDv7G|%=ON4otrI8aU6|Ti0e!RgbX)lR|V^Jvn`)`Su}x1iJI+(sjF`& zdBUdhs1_YJ-}eTIt*y3tA-6`gjR#I@VY$94J8LizI#|#;y}B?A%SscdNTl=!s><*FtF+wf385s zDA8_O`32?V+1H*{5~J!7sKOj_>udKW=S!(~51Dj&Ye_PDTfmgzIA_ANv){h;b1~CVnTt>fWg9%wFFaH%@(aOG$G2^1knyX7g!vZlr6c@`v!ad4}#Ji_;M;u(lr*5WarpN&yQ@|UHo zg6t;|1zZZ|0c*=>A>yD4PV=h5U?`#57W5!iI?vQh@p~%G-5?>Z)K*_IhszYH2_7Qy zWIf{HxxIGAqo>3{vwW^-`$i?mE%P+)45f`xm|meQpIdLxG29qPZtI*h+<-qEH@8~s^@&x<{&-C_*&@-gjX_xBzYUR0F6FZ>KZ@#9tLw2XP z=$#Dx+x(6B>2W)t1=8>kOE0Ei*30|2BHWFINE-4*>$|&wCbnaIdhX>qwbA&&{Y<89n`BeMS z^kD{C5nge70D^1pA0|OHV_*;zWcOqcU5Z5MnGDx)z$AME3c8RAA$n>W>$f&|>n#WP za(5ZaZS|TFV_Tj1uDyAwl(Uzy@^=#+A^_oSqVHRoL9Qt4*=Z_)iN(_8l%X)ZilTVW zjl&bVqepv&^+ii@qs912yE*Li>zH$D<1UVi2jn9+I8kc@39H+f?V(W}$1INsmm!NI zGkIY;h_iO+1a)ZZCj6Tn6XJYc=y+OyP#mHcmh$z&&Il-R>LUGq9;GYQA=|`;;{~yi zd(&MS;mn^-C|~!VM+;DVh)&HJnV316SMBzmPXAzNmTos6&+8RL>bU9D>pm@XQG zX5Q11N>#hoSV12SJIcS5R>)v#yBlwCH`fu4**bq~0Y)D-+5U2Z%wY=h9_A4($tb5(3AxZ>IYvWg=A_n5muC|p3O#dCAL;P)jQ+B@2?CZI z)9rA*X4WDLvKDYnDQk?|3O!hDO*YtZDeAg*W4+4%+f9%acrHLLYw@?5kV%(?F+-=*|eqYFug9pVF$G2c3+AJmQa=OG`|4|>Z~IbQ4c zLChbBnVZlD)K6|ENezI!#N-U|SY{yA`X^G-Jmm#5MFV-tal z=UeSnM(&|Iv*XX&y2`u%DD{a@wjqRV4K=<+D9{sfbdFYaPh~m@WPZ6zX6T;|OPlrT z?d?o&?w?Fm&-IpwZ^c!X7909tYlh^m1~QnpAV-m4hN_+WeG}0aA&U_ZO(4+fl-#8w zmMfj-$m`GXH|kcir!-$!qFE=hUuo*vHWy-wxTx@TrxHSkwd{%ENR^INP1hAcnE;~( z6z!?d+cBHbLhwZtAR|wS)x{2V`2DPRhzp-r&g7KwH@+~Ib9*A~@p3>h8cf^!6z%d< z`ZS0QyS?`;cgb%liE?yKa?1HsP#{6b|CH99$M-Hd9r|4AGFRm|{v^MBpzIRXk2!Z< zuDiF#psYTn*0Ef}xVyt(Ak=o}%sEU8&g_{3$d3QgI=gvUeSdMQDYIt3t?kQJQ$xtq z)M$27ecWdnByrO2+=ans9~K1do9@BR-ZI0zB3q1-=N4;P6gFB3yStQI!T?Pm!sP49 zA;gwuP#=89k#cW9;E2!mq8b+0sQk*tr;lM`Ib}Q1Yx9$9O_rp*qBO1d4)44#CVX=XR(vcXV(nNOzL0?TsMW7(QvN}=#|SsNN$Gpn#Fy|Z9B;48GYLiA=FRpA#CQ&K@j12A zTex4F3n0Z8BJb2Z_4r%N>7aoH#5dJb!%v!r1DJDNa>HUUVS_TX#4?iv8QQ$j#*C-V zb9{9sCX%0f`XwdVV*|`{#kYOvttmEWs&2i-{#V$WnG5z2F$_1eGyXYoCDPqzDCwFmhpttiRC$C*Qw z>!<0{p|)BZTXtqD3(=bd{S;a4`y!QUscx7xrqB?0=D z1Qvzui`Yq_%WY~wAZCd#2E;o~R|7}`Ql$mnwUu+4-(Ts!21-w}iz}@feAPpFQM&lE zi~>RlQc9I7>DM0Ikpx8PR=U(ZZf~&qDfDS>_oZ)6O}7p1T~kY_8fAeSBg^>s%C#!G zynoL+RA`(nBPIm>!GmIk-5Ne&T>BijL`#mn9qjK+RHvM?tjnT3fn1wixvacZ`l1smmsfyX>{n-OT!xwa)Qo z7Ez>XAdc$O-#}k1j_Fe~VvQa;A?mw2rU6k1CW~p8Z%2rX9~qP_aAUktq^ie zaLeh^5;{KSHV(TocUei&HNz-KcZ=hp6@|3RrdB-3#Ndf*hM==XALXvn5xq(-6D$A_ z)Rh6Va$?sk{lPPP1Iu$bpOtieL*CJ!NtMC5yl!Lr9xcg>CuG3q=4Pi55>hWgpTN!U zVr=}TRq*(1Jbp7HL@W8r{a#b^=srNBy}e9uz|FBhY$+Bz8vPwzw44E)U#aEPNfz?p z{2di$>u%w%%>#QFDfMbap)(OVFx0fb0demk_sZsIZagU5Jb&~Z`FU8YL(YWK!XS0`@{sZ8}IvA`QY znX=##VrffmegS4pn-!K5< zCJ|<=9DeuVp-s01pw4N{QEuRs!&0U~pz1Q1N?r=>r7m*0hvyLN4mpWZ1;VPKvV$XQ zUY9L9mTwe%dpV?bt|)2U7hi8YM~W&-gUgSXWVR;E6(-=(#%#I8U5ToEUq)2AnZY6B zFAuCg5-Y43ww~%n;?HG$jeN_Ch-vR!_m+Do+4J z_n*9+2zuc|>scwY070~Vd?$Gi`3FU%qr&mj#kz7)&@67SYFR0y8uR2U9 zAT})^*Gzn%VOc{(OLa41RUvD5>n5oBiy}+-Q00}+QxL1e3lG-CFH{=cWFjo=TYabk zmN5T7?bwO4_rfstQUiPrR-yolq~mzm$o|1zDgRnAkT2N^&7g`y>k9Y^>_yldR6aH1j_ZsW$X$`5#$850am*M4Ladd)-mIDX_rW ziSxR)XASMcOo6xbwNd74n|bz|L?1Nv-$Rw4P=AG`TK#qRzvEo_hjac(aFW9b6Z#V( z4trxJ2SMLnL}w87s_*VQSS<@*zBo8Kddg}(zujen>~-+z#Q0#~^8R>u!QfW7+AQs6 ztrH$b{C4_=PJm{1H@K1bp5vP`04$d&2EWD|7J~j5skf51l{TbT>|5*@NC zMW*)YXjT|y4(xhCBHVYTd;}_2m%kcy?&P3WBy?~Z`uYf^m-h!Vqd%#hexihUA+tW+ zArw)svnyMX6233m>9QXc@NU1eGDd)Ty68MTH22rS-ZwfeQEaC&KUfwD@nE>)fmumX6gaYBWPZ&~@p;Q;`q>c_|WV zA*Lv$%OFr@NMeB^SmJf;IHu)pt<4C>=aPmjd>WOyY9Zo^yph22o=Da7N&UE0D&LRK zE`Dqs-ZhnPq5S<}cK$TXqI~_e^^gpCkaM+Uxv=VsD$q!G5n~@O6_ht^)Jf2@SK+J4 z#+P_pQq4ws`^>y3Zm77vi6>qxoBEbsKL6THxGAglT03X+3xmeSOnDn*+yi-A(0}F;hYsufLKQ=BHYo zd>bpNRc$d6xPXLM14V|L`%wl{D58O7srdXnK0VLt?JhBmYv9-`hEreMF++8;h88sj zUucKF88~LhHE>YXjdRW;#gQWhoQ*<*#>zJBIl@znghr#&i#$J#{gw?mj#0MQ3E;Ha zuJ~(TTD1FyLriX08<$9TM>Gub9CSU}bRk_=SiVWh5Wq|xx4LIh+)g*&RDcgCn)_#=y;y`^6f7ls zlK@`eWTXtQM<()j1c-KZ)&b)>w0oF!!-KOjnB2IQx2XaVLT(0#f~*Z&{KNgD4c2`4 zMmmz^{L^bqbkHjyORuF3+Hy(1uZG7fMM_oz0|pdx8xQ~9o$+ugl3+DtnIeh6%ZA6d z#+P%$slKyZoS9?~Sje0E;iV6IZ{$UENYiG7ZCJJd#dOQyREn0|#1tgOZqg{oFI&Qg zi9qt3ImJ%)fA@OBIg*J2Mg9)*@q9@Nqz7_`(R*ir?Sp-C=^JZy%05186POYP;iF6VV3$v3CPm-R zMtWRo(o;^pJ_U9PSMDe787&Jc%&RoZmwr0~$d{wjra&5cNSN0ov`7J&84_uzgOf+_TS$+nNfOq#Ffq`VFUJ`^wS^oX<;wJRK8-p4ZI7 zcZp-g>V$^(iO1F*JczhI{3_G8Xc;am`XcCJ?>D_>!n%;2KpnrdToIbYM)Y~N2R$Mc zn}KQI1?r;3VOpD5(!9a$$!TP;N^5!JRvRr?X_^ad{32wwmP^T7*kRqG;6=9W@SKXD zoSujFccNYGLSUa22lrmW{h8MtyyO)($f;xp-t&*$g9FGYZ~LT}XV#j|!ldSKp*BB% z*r$^4y}Qg8u{YKfqB7-ll`6v0v0oEy?TV)`2cplcM26Ay)H&I`B8{ zmpb3hruPoK8Ceiq82|KuR7wGrc!`La07Ib4 zjL7mIJdu_5UW!8Wm~2RCNme>yNY0GkJq5z)^sF*01%(c1(~|1$p}%!ZfUgbDpJ(a_ zBrtwwn3?FiWnWA!ElhIP(6V0FDA%PoN~opBw@u8fTrN>Ftf=vKT+6*rM*y-W%wBaf zhNde&pjymlf5E_X98%wTU#53Uh3aJJ=8qdb@L|l!zz_=Emwo1Sq3|JkwSdBRs4xL+eQy$N{mahl}M_3o%yyr0w;S>$> zhzX_r`+N4ZUIRrfxH3Tho$dVDVuTx-fgLyo7X*4&c|gMvrm?Cr)LPL1CK|L8GAM4a zRW+}9DEWHBCip?oCd~OD_Yo{?`|N;4{Usdni-iaCw6h%7R=kq2oYv>6E|7{w>RX@b zv_}<$4EYO}In*6QA6r>@_SvO6tH*vm$=WBWBf}-VVqySt^qg&Z9z|+k85PPV1|o}{ z7U2x!P~cvcC+=M;m*BnWf(5sd5zft)WkaC7{eRRZzp&}UepwaE87={*?>Gfy+iDD* zf$PX%N?a||;c_a?#6W*Z?X8vK%Hx>~8Bf_W+FJgVcQ{k!r!|sjn3Kw3;#NmrM46}Q z_S`1hXmJ_mz6~|!dbQ#xvxVa7}2`MhF~XPE?-T;b3KL#pOpVtElNrd37soVddh{FVKJEkXlO z?x;jwW&H7Z$Q9ZlpHdr5D}}v&_^oGHg7r~HvGV!>?~Peo*sqnAmEAOitt#a>UWJog zm5ZyOfT!7^vitfj+8qK-M_3(~ntWkVA$&uAga##mm)Z0Sz|Z7FY4s(bD^kG_4{&C+ zZWkxJ@xB&qR}pPRd9R9Mx+9CGyfY$vgCQyfj$C@3fJEN$2C(ScWZPw;wbM|nft-oVa_O5 zmVo-I$MskvA6F3YP72|YREnIujL!?d78y>GAppV`sjl&SDikib5Hqh~{p62k`GCko zWwZN+owf{u99u*bX&UuR%hIJNqVh-Im*diiJu@G+EQD{qm;tuJ%SS~qXqUj8~& z4BbfF5)M^{4vB02wnqYny(4!UV*8lAOx2TFc)EoA2sWO!DGM{Ww+k#5e)a*Ce~dr@ z*$ygK=RiuaY4z!w<%~iS5hhdz+jWBpT6{}qM`Y?LRkM*UW%*A$vhr`834yscbKm6N z?cL-}zXTm-aErW*?Rr%q3P(d@4jh(FbjRrR%enb}xUp*&xkeN3_s#CO7!h`VC_4CQ zcHz`eX*4`(HKDqlp?j0@P~AiK7{~pI%S8mM zrHgukjLpWFMvbcNk7>IzGZzmO?PDH%*rDXmfACz-!*12)z1USsE`{P>n;&TcT}We? z9fhcZZW?N#vZN4o0Puh)Gf+O%>XctXF5DBxyxsTmQjAGHWh0P$|v30V&pt8ubkyoZ7KttgErmHZb z=+c)f7Jx1&KU2}wub7h-`MG>l@x23wOFCuq!AP{yBIQ0}J4G{;+Qy6{uzU%;IW;YzJ++V-e#{F@e(Ktn)w|A})#ohvWc>8fL4=}F&P zwY244+b>;XNhu$A_i8H3ahcBa0Zn&Q=;C5AO*pd6=!+?DPduUOmcYl9*C(hcS{<`v znW7|%ZtZURZtD{X_dxSLKFjj1V@Nbrwf@$F_EP0~S@k!I0`RED2@qw;&b~Ai=2QoO zvsw#Jh%+y=>=51+ecrSV^f9HM#*>mVV#lnk{S%7F0cDWH!#>-R;Z@<(7TN-imPV-k zogEjK7xMI|nP8$=MXww5l*t3RLtb?(@WLbFWhtPrqH|_&Va)!DT6UO26$*=WNz?V53jNUBvVPj=B!xJm-s&sJ z8?JSc9BMj?yEpB8IH-Bj?|biz+RrSMB2$<;dri@_A<#a{t_L<&>EuO_icg?R=Je-H zA^cgi&C|tUL~~2nqUobUskm{E2`Ff|K8{j9 z&t%%#8u#iD#kzz2!0@V-W$hw{-8G_OviSzw_hO{_1rs15a%-lSVWZhNb9>-jBnb>v znwOKKhNVM^f_^+jJGNhP7|-F_ep#;Jx&G~3l8g@BI{9vgVR&@ zNW-~tFNzM1dC`rA#-E~nn|yzB!dmTZdg|x?;q*Hcjdc~Rl#UoxWN2-~hQF{^$!1G} z6EVEogEIN%Knx-lNo=?*UA{D63-C8FCf7=Nj>Hp2z(Ib5imU{pgX6`l?!gXh;SeZzfHAf$<4^Wt~VGsFaN3dbeINeOv^B9 zW*6nwRaTNmsT%`m<~|>qHX%5XaNd*lE!k3*)9~!(B#Q&poijWlLH zW1a7P3sCO2>Y*i&mgkgfWrqvhPp}L-?Y9vmUJ8GmNmtta#Hv~Jh)+#INNv!NzVxdl5bdWZk=IZ@zcymS;}9R<^TDYe==rsrB3a;3c0@SLVlm#^`@>=7K!JZBnYfF!d^aPA1KD)O`cX{yzV5<3Ve+{3@@sn|j<1rV3PxPaL=V zJ131_Lh(F{Q#8P{%49S_Eff_p;e0abrN>uikQAwz)3U zKE9gYm?KIsT^*eN7Pdbl5XzRq6blaOitmN&?RiN-S%rXV_eD+d_!@=+hFqUM?B(mvK~gq&(yC5x=mR4-*_yPKX{ zw%rLtm2<(jC&6IINDE%ZzS185CO0K#W^PSQaao{|QMO&&*En!xASfYGh>%3(?tNjJ z7M9GE^5WDw&vKJLhJ>?q7IS9R2TMi?KiRB zn0(JgU=kzZ9r>>MZoM=mp?uyEd*EL@T4A#@^~y4lW!5dm*imKB*goz|C2Y~;VY-Bl@7|<@w%M`gcdpB}EW^akh&3+1yyG;l>X(a?N}`e( zK@={EjpsMJUi2>5#ahGQHm4^^T-{lJl9X?D?G*2YZRK6&LS%)7WawyGVR~nv8aM6- zMj(;MQ+eXSA(;8d8lBTgR|9QztKxns!NsOhJ1sIyyfP(je~Z~k=nQtM3^{PIKUrbB zU-0^Rr$@9y)TtAf>4oJF2+t;ck~?c#n|J#(kUJ<~@2M-jshSd-x}Yp0GGP~k-m&IA zj%o5JoOqHkX6B1YK+hDXi_s-r4D6Hadja?#JbgZ_q0$DXU7y&=9$pNu8HW7u7@>3O zNI`09!o^vkNc4sapK5gN9VG#r&a=ST9KFFGX#4Kcn;ZfWYZUA=B1O4ngGX z-uGlRwnTkY-4%T40G}4cV1RfOM$(+BlPy=Z4tKj(*5*E*+9k~e%{$$?gy6>8Yx$w= zSfOCQAz5eVZppA|-Fp=)Pz(%E*JUCwm{E1EB#A1yzmiIXs|mCz7ej2dU}GD#PDgL= zQ65wk*hYjHA*$@xjB(}LlFRWkjcYS~dco{9T}1kDC%2To8B6k83?xAA?Dwt^(V?Qk z1DJG*{6=*{UI_Ef5LAJ_@j$nxCR^DzNZzglQHIC&Dsh+y{Id3H_N6kXl16JtU^*A}*C8ef}(9Z&c4V;RGw;#H3 zmp*=@TzFAMmm7JC>g%@D?UkpLU}|baMgfDJ_li>5BXbhVi?xF!lxAOAIDIZF<`|sr z+Nc6^Cv>8&F8y+g%Bi7TH**vOY9;1?ld&%X2deP7x}-$R$frZ`pT6w#1w^2bU6*lP zb7izv_j2viw0T`e5_TFm_b%z(a#n5)oov5}phBY!rhs;No9UN2*u(WCi?&BRwnLB` ze`>{KddKf#?=qw@5#3Vw(kIumCjyJrUc2s|0YSz$((bruBnW57OBcT_PB6s#V5b|I z;5{tt_iy&JfcG09oXFZv* z-1Qpx>ICH$7VmHosp_N+KvLYwAwLm>N7K^94nNp43iRfaF+*0+TtA-BI%(hp+#Fm? zLqfb!ug*m5Jgj{+^QuXnP!LQEzbRLx-Vh9_%bb~spUePkSdV)#_syie({Ld2ILEo} z98gAb0VKVB!|BezZ++vT+R@1!tb?~7W6s=+{4NOCu(^M;LDe~^Vd?}rjc@B}>Rz7m z6`!})b35}3*7ZEE<$to`#-sn`$En3!*zqxkwj@saARa=#7{ujoKcDY5iQBu-64fR2 z>I$j7>-!$ZGRN4kBmxeZb0B&HX%2~AY5tLs4{J2r~bp zNB^dx_J7w<%)VXU|7VBAFGLrN+YZZ0uopQ@=&|}s~P=2|Ly+~So*LYR;yDN zW2d`)C$P1yx6*~l?jg->n5B5V>$0F;K(peqeWI!9_M(x^ zoHak%wsi@PG1QXGfDBmF{#}uU`a&f*RKy$m??nUp-TzTDV3AHjY|6Ib>B5HkDg?3?@_8YinCb@}s97>#56Rm2fBjAO(VtxLPr=L2iAB{tnKRzy z%{`;*rcMv~bi;xhT*Tt&jtPn7XytDG&cC3-WlJ17_!Ei$>ZVN+0Q6;u<9(G#7W(OJ ze2yAzbInXxovUcLlYP#7;I>;Y95M^yyud#YNnGTiR5Z177 z;xSb2QKhM=Gz_vsA_$sGmv^EsGc78Y6HwQfXMQW`k;9h0iH!oD0-L;;WL`CbN$L3a z+@7YcFg0BvBUdjo6Y7o*^Tu6xCd;3~T-7^wnyl>=MX_zWkFA#;p!TQ7R=ck;^3c9@&^9=y!!{y$Vk#Hh@>_2woljg<#WC1@ab8h<0Oz*r!UTAk8-2c_qZumpQU@x7_O6fJxuCZ=WsMuCME4_D~nSsVTL>fFa+`Y^6TCKh@m7 zu<_z^(~-t4Fgc18VGaL%C?bNxV{o!7xN`%raeHj7&Kx?=sn((YO-io#nNbZ0P zMEPGLzh~<(4z%AzRam`ujJQ~kKHODY;qVJOzHOdx-`g6xa43CKd}}zS+&a$DW(XdT zPIGwRsvc}DIfG5{=1+eC1S8~1IXTm1I2yGBAyjQXIXU z<g*CMWKca=YOS_m*uzq|Q|s#2Rug!;$tN2#C5Z-X z?g+W2IjZC=K|g@78i{TUp^Q*vMToS#KxvY+Q<3euP6+z@ySB372_1e?$Ii1cOn2h# zIpy93JZAOI4x&_@x@%Vf93XqzUQN2o?Zp zVkS>ID>ItZ<7A01&)CCuuUb+vlF zY3Y;_P=XTb8%m-3FFuUP;hm6T$!)YFHij$%-l&?RNA<-%ejRLk^2K3s)foShf__(K zzL9DPCBQ^gl{z4ya`KS-+OW^Vu)44e&tB&zkO}RS=L}UEra>SQ@^bj9Q(Q@203l5-QDAG)7aS4Joxpq`CUrMF+1Z?sZ~^kwRjhgU?-w)_)AYs)vA~C#ceTQh|9CSSwA3xr65shbmUS#G!SNjWw1mE{Q#%mktUxm@(vy$#BV>tDX{&UTXotMeBM?N=Q z%T=juOe$<%4ElmE{l2(_Sku*Y^Zw?TxOjU3&wrh*GB^ZR75qK+K+mW=Wq( z9rE+-?ON$l3ttBbydR6+SGMpWYIn&h z*Z`ThitsnNQ4U&-&%Vqb+f+|2kfi;_xsk1dv^BBs!*o3fuN*m8?JVh)n{VP6>S+7AZ@8dS6H<-?i_~=HWk@D4#q(U==mv z?MCBGg3{X~0|;q2r~Wrd;1#r%=1}mk?(R*UoWh?3( zD8IEfiI3$?!V=p=AIsb|rF|mUJ6%zym=D~$r}2|v`}vK9^*cBbh|j0upkGQb2B^zA z_>u2hC*O^F)LKKMQiwsXU*W5-Mu-t_+$nHmInAQrDdAg-uD{wp%>-h^UM<+~;e^zy zT*7R5_Kxepz)Pm?w%WlF6EmUVeOm1XqXd$n?2B_kg5RRw_RR~OsqRI4HAu(e0OEO+ z3>qWMp{UNYJISR0=K3Vh);y;@uvlf_i^ zt-2xbpf>H})G9$dp-ye!Q!xFfKZ>*Hm&weV6Chr%8S3y7KvU=UFJy>=2!}J78d@d* zvBqz5DJKo)rSG1)KlS1JQ#cub%%C)+k`RQE5tk1C6e|%;$ZTedEy|TkpT%!2IRMcL z;pkDJC$-OIainjSOi~n52}%XwbxEA|p6%0Io+G+fk2ZyJ99bGB45;;H^%k}BkjN3w za|Qe806-$Na+jUT=tdW*r^`#q-DPXHK5~M8@R(m~ z^|{|>*H@&km}YUzfD{yFYXl}AXtz|^m~?U33EeDS<~_Q~zCI>-dm~MX>PCyW*X*n5 z(AqrzYVOU5hv`hMXrk0Y2C+T;U_D)Y0}6nK;FV!xpB~=);&?*@mwGaoe2NQpP9FCA;bI~HLI2sH8s$%$vO`Z zO)}0Sm4G94&=Maf?O6Qj4pr7Yy-orTBgT(ovgpuCT3-yY0HG?(imYTAe};D_s-JUB za{s}DgUbKgU^*TW|6^nCr=ESMWM0Pb{n5MScO(igfUgw>kfB7!I!!I^Cmv7e#TWkm z*ZF)xZX(fT+nZf1MYVs2rS@katHAZq;mFLK)W9A3^rq4b_)u$WKx7P{iBM*a?>p1T z2#UW;D0&@jVpkiX)S8g%nd?c<)lf-MDQjeUUySP|b9dNQhcCZ9hD=}CEBuxOn^~Jm zVtsKX#lJUuSmxwi!Xoo&_B5$)sNMedCQqiBWaXJ+U%CDB`w759)Dv#jH8RkYEbhT< zv#^#*m2Hr*=ME>$eL%%f9I><_PbDMMlekz_*!fbEc?J=ErdQ>yK}nH&Ry)nBLos^tHFeTn%o@V@|RiQ{Mh literal 0 HcmV?d00001 diff --git a/docs/resources/diffspeech-fs2-1.png b/docs/resources/diffspeech-fs2-1.png deleted file mode 100644 index addbe984ad9c1793ca2232faeb697c3538b01018..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 202692 zcmeFXWl$c$wl+GrOK_Kv0Kwe}F2RCpa0u?si@Otoy9KuwdvVv`5Zv9}otwSSclLMd z+^W6*+@Ggvs=BAUXS#Y#uV?ys)(TftkU~QyLIwZ;?TfUyG62AW0RUPP5&Cb-OHi0R z00jVF#6?uyGmc;E23?P|=VE<226+2jR_?Awc;~DcD`pl?)Lw2;>h)4m5-3VOq=dXo zQiyy|j>y5`Ks8aJ;Af#MMileJ6>kq$1b2qLYMI0(bx@43Fcc^*%xhbX?KPbp5p4;? zylnD}32k|1H93npAzyuNm&@{4n-i>L9dmdk-QyA3yUR(XOq3~5E>WfXNAdORvy>uMV{FR6zbzP5#te z_0;^;YZLo}?|rDR`|F>tZ`WVna)qu#KVQbh3i!T0%eHUJ`n>M>t_t~jc|F(q-j4em z8*;aj-vNI2Yr$F`uD;5_b+R2FWlL04R5z=?Z?CI z$5%pcS8shC_xNuILN7)CC^uJDGT#pP+qd|AwxxWw&)!}--k9HZI-d34j(oje=6qie zvJU?RIyXV$>`yleJ)z$|`0j`L?8iRG32k=xY;pT+HCOuHoxQH4_k4ai`09JQ=W~kj zx{mQ$Ep$5oJSV?h>bI|>w672NJ`ebwoxQY?+Xws$ai*~+|ABl(AJtx@-uB5~f;-Of z+aJq19)(^9-tL!tpX}aF&t7e_>0h_MzHUQo^l#MkcclJdmRA~~hu?Dwh|I~uJB|l5 z&x!gEiLu_t#vg<_pQQgA?pCEz7x>Rwufo52iGGOdztiBQd=|bL?sy(PYkxY`f1vl> z`R9tg*Y3Q99`l7BZ}i`8-k!7Hu9)90ncw>KpZoqCT;DF)-39vov3r#7xR30(Km5{u z2kN*3weNvGH;NbiHn6rCypySBx zrPBAx?sXL7xf8?dA=&q#>gB`Q%gWkI=GsH%>s67^UkmzN#=pgOJijIFRlifJ{Pvl# zrmw3e_gl4qk%tAKDeu||_`w6zidAs!{n7+b!W#Rcx&|i&QMl9W!13J#HpTaXH3H4w zq=4quNUbK6fj88`#X0M-9X)_6j4EUK>-pNE^6ati{F2-FEqd-|0bV$Q$UyDZ3MwaA zc~xsDp9ZHH;mKV<*T0n26&i+Wx~|_puVmk9)lhT|zfQciwR4tbU5VmbFfxAA*5!Lm51w;o2P! zTii`?1(rM<6h12)ApwV}>IT-oeXyU>an1T90EV-nhqvKB zEq_PKyDE;-6QCwH?DW~i&^bKf%qIU>kwX;Lm{{jV1cJVpe(9_UWIl_RcR^_ZNV{#2u(ji7P+HV5D0RFrO`gPhf1|&g3*|jR0_phm?PkQsO+4FqeN<#KLc&_aZD^6pp?G=j#JHts;=Knx2Jg z6t>xLF;tBYCgW8vZI7T=@v*1?L=>qG@+wXV$9ijXj{l6(fW-U~Yv}7_LL)8mh zx*}t-JGbmqK_Hzot_UbF-BJ(T8wgHOZ|ZB_M;RNB&Pz6#w@a>DZk^pF9(DjbQl|_S z+}d+PT8^PfHgZ&fbVy_)uW@X)PY4_$m20j^w88kj$OleYftn1w@rPy0F<3>byMO7& zEfIU&lUUmk`gvB-QsYB`U+s9`>=GFH&@o;c8rM*yWRv(n zORIQW&nv_02Mvb5%UYM`@oY+hxO0U$r@d2e6r%EbL3fF^WpAJI4F8J-<=?ty8=efe zGJ3ZLw%Om9VRG#3j$YK0kcDCE9YNo&Nylk76jY_*76q4szOde6#nQdyGDU z)}1GZvGs&W`vM{ZJ=3l|bkupB!uUNs$@fMSV`!>b^xGsKZ6ON_IsKG+M?Qo*Z%i9> zXOJGdKFLvOciCrmAr)ZL*1;6XBmwn<>&nu{?E5wSY2uPg2?jmTPV4y5?Y0PAeUOvr zdy^()@(R*yqYpjN$W6SM1=JhweR~P>Ck?%_tl-KhX zd52zA_TzNXO7}YS>dg1^oa+XFzqH`pwBB?#v(=l4pAzuTcg#BHZw5@+u$(@5H^z!3QskoJwTj z0tLlT+7e07fU=1WY{io28B#7|CkG{iLCLEeBGnM-@!0XJ3%01B9AY(5cC|nmkc7R5 zM5!vnJA8xAMIE%G^{a#_@|b3!KM0K2>Kud#1vQ4`uu#^?11UJHaddiNH6nhZ04==#hIkV@;L5y1D3gw< z&8UnQHt8T4=rSq~ASIKtbU*xj^jz7YPtAeJoZGm1N0vzw@R-Jw)g$(oTWzmKH(fdodU`TH`v2d8#?Tx2rrv2ZLQjFZ52(8`(2_(6GDT zv&>&=(F7&F7q4q~4Ri}>u@I$I=1L-!0CN6*4mS@;U`Js{d(}!*Ib`f2WKdiD4Ev6YF-DwAYn6)qr{k^MM_k^YZUEJdS@k_cg#}>`< z_s($uDOa3t z6j9fl1{%z6&rZo;)A>F^>)=FG?{A&v;vMXrL{=gfC5LKBjlo?M)~QK4=HowMCF?E) z^YVHq$Uc(V*zL!Ya+XCCm7nRSY;VypZR9qX4j5#*UJx&;+xR(pUm0(F{Ikd02ol zXez21KMogw#0!6DeoQ$Z&=d>H0Y27{nF?SfT*?h84_6tAnyVew5U2yw??q21zYL+D zVj4MnU*{47jvCVlIT@HfIeeK4`j;Q|FWH5z*k9t`9{-+m?OrN6a69pRFYv359>2aF z2)%^Z-NOTa2MAsL#&g*?P~ zgbFH9#a>>&-RHjT2YO7+;9kw^cqd)!>ULD+TOltu@h7kw8z`ehU#$c&i%1s&Y zSl`&8^+d3Pr`u9!3tgdN^(G-z&V&4pg4`$z#R`YrD$7%ViIUgkpE-5-6IhBNP?BFr z_XOnpz0g_O7>=30nbwNE`8fOhjBt!@c$y4_D9N~*@5qm}HX9HVN(fIsE$mk=;qPU? zp#DJ_8w}0jP`N;Y$HdTa@Uhx4+M2uIIg)s8reP#fq=ff|^>Yl!l2|AmRX9~KO^;~z z6&#}umD$ufVdep0n}2OS5tIgHrCeoOhE_uYa6cz2?i3b4pi9*(6E6r%dRrM3C)4E^ zL=~75LAK&n%DrrGT1=XHqQ-^(mprS4$z>0b@805taQnoj1dj+k1IAaQNUA`E{Q+DD zqy?QChjMRf8(VIze<~&$?gbZF8{qaEFre8+ne|xNx1NjV(tql|Q@PT(mEt(f5eZfw zRUuuwu@Hu%%LjYatWww7c!2W==uFJ@T-sylj-YEyfTxT?1; zjTW4$DKR8?tU}XKk}!5e0BE$eq(6V^k>vHhDM?e01*$F5L+ypo%E!r!DqoXzYO7&F zud$PcScl|*)Bn%<`#PWKDf~wQUhDSnj)wYW%ouvv6@f5u8m!E0O-3HccuvsOGhWr& z&lWE1H+ve9_}uMTDoiYrTJ};+c|t=VWz+0a4XKU~Gpl7iUYszaYk82U8VhSU^q1`u zq(XSO@r>xW9_V2P#iJd z-DTWIwdFptdS4&?)VRQl1dk5;qLX}YSxQ;FrC{Ft7S%p-f#fO8oS%| z;B#5|IAE%_;LYfwUus@fU?XatFSb~x`q!&eAQ!XNStZaw_^i$2`z-4?NF47f^Non5 zXIZ5B;>GXj=K{#2S0uhuyCh9k4=eTg3=>~}q{nu&T=3{uW<~ac4$n8darRZiTc{%* zm~8$B>C@_frDcLbjJ>*_{p#+!c4&*lI@md#$gMfz2Hh8;>^HCUG(l&{tKN4j*)I#( zH`)KBjYU3(MV}uhWB)Rs?B~BErShLF@a)OEhJZJtiJw(#{&N#%3ene zK1Xgodu~40`Q+Y56%FktOYJAJ9oMpdj5z+KU$puDzN-KafMeVftU}uY~3=#I)-_MpWHL+(lg6Gp=3!*LAq(=H~X7i?6*|GCb+4 z)x?7)4A7@t3h5!xcKwTf8PsGRsG2-Zu=OWz<}i+kLqYH^$X23K-&Inf^6W{pm_&;E z#|m!e{4kw2TVh6ftfn`(6&R4AKOrsV8mht&wxNUI>-{t`XR0$_ohZDJdy{AYa9^F{ z5{e(zc8iNKk|0}r;dQj+?uUM~i%VpH3JoWrZA19Uti^4>pTsDh@w~`Sj)_(@p28y| zk?8gV7fd#q28g-xATgHsuIoXk)Cid)8;OSvzbT|=H9YmBPZd4n7cXJG^N)~n4J7)= zWKE`?iyc9%1v`gke3+T$aM3jij&}3F<)bAO{ZX>2g0cB&x+Oh6zq*q?x=d;`^on;p zHU3NDBbEl%#vo;}8sqGF*z!tdsHstQbJ|#?(B#P9w*BUu!^X>nm9SvJfmf275l?D> z9$T28Vcd|wKKRS=MqT)EFcuf2ze#>lEw*hh4%8}O&y#9Lc5!Wn470Z;Ki<{o;paAPME1&`G*c zpk`HKfswff^kq>&TK$MnPw#Qe75ITR442X{I14TZ41!a%#Nk$r*RznEU{%b#Y1s-& z#Pz&f_ot~LEVeahaf$xJ+9nB0?y&j%_$tBQz`$o=e)C1C=6P|qXlS)HtmSbJ|8J9b zU)s&skbaOBg^J>s9&NS%j>6~Dc+(24Xgrd%csv=fjVwWTDos7v_URLQBTQf3ZCRhs zlS9G0XbwztNgjp(#RWeL?BHG9>x-n!L5a<{z;Y0=R^YBnYsb=!2qjiWlGL>n{?`zU zovWS^ciOa2MoIcCLZ8zMY0S}I4~_SVuX@iW*1;LKR?E-J{B`I(J$G_Mmbh!33>D+Y zybLGp&!Ih?&IW!Zbg#WjcbK=|w?p7gdXRWceT89xmWAW)+0E7XR)rUG_3oyB$<+T} zJzU1*x`*OUAmah~^qorCH;N~mKd7B@U2tU!?fd7!Ro8ZaITp&BI=WA|I=tbIxy*r| zO#P|DA(Jw-GX^sP8DBgFzm{PXf#>Fki5pWrZ?2)jjO~<1a3IENj?CfuHS}l0NHKy* ze-ff!*0lIwWWLf#KV`ZoFq|heAgGe4hGg(3Ptn3&hLcQJ5-NYzea2|$G{dzAlaFV5 zSi&+vl{v1&4+etBUfDEV?(sqIuPB3G10`;DNOF>HO-#IQqH;=lD6Wc1>{E-TVj)+) zn6q1cWfv$+y*qMt>wSWdPKv^R>?AH|A(G#96fTPIx^3ZM@h+=(QLFH5Rk*3nVH$N# zDbN#I7$YNR1IHFxSChuo^Hqdee^fh|?PsN$Ge%wFXf>m7FQfgA;RhWl5jRm>9s}M= zN4Ob*ZsHmE{#s)CLBQNlFBtmMa@Oz;!u!OD-(Rb9A3xzIu z-9Y9{-VtJq^##jQ1G7V1;g~zFGMH{$GJ*5+0o&|(;sbO+F^xx_ej^bJ_^Ey|4VE3( ztrnt*`IrPegrA&g_G%?Atz4XerOsbP)k<`_R>kCD!y-$nF{EygA@|&h=84rsorK5m z+T<(R#Ax&R??$v1FPq+oFsW>HuH{4|{UXpywK!Z|UE4$#_7#uhn94Qp3dxGPHv-xg z!>|{LHw-uD-{~hvCCK)^=rG~!@8a0zN-^X>gKs2Mr(gU%BTyJ)Ap%4;5nDvJzu*m; z6rGRLHT8>+{goTeQyes!h3QoFFbX|nj_-e85Bm{}n&U}466KFUZNyANv!GR32-yUA zTS+BZ*#{c)9MQP#w!*Vo!8PBm3O?-zC#@53TSUlD|PV2jz0zrVF&xVRIWRP zH*ePWdu3rHk3O%uvU|p$3-q6!Dz}rj8V-NoG1?$n!TKY5tcY@J2JI)Kx=(iORx!2W zJ=tHy+f`bw0oBO}`ElLY)ee_WiXd$;zT>$MpZVpL;fYEvhWC5uXD6SDi0BV00zG0$ zuDA1f3y@hiS|GMP9|Q%q+x3@xPZShESPmz$)30Q6NJ-*gZEqg3^t!%b->z1=M~8&E zb)UXMTlKO`PSD>IH8*oN(lq^4_U+#Wjo#;zldafGMyIha^!S``>hn%9^GGHak{Ly~ zO3_-O=JFsn8aEkufOjT=K^NFt&fLlgEZQ`fJgT^Q4`P{TWYSwJrvNH}HgPX=Ja#0G zL1~4Qp&SOiaLJP|iHx)&P8sFAo-Q~Qwe}Qs=wY_#m6nn=h!Yfge4vtJ|ARo;)U(I7 z^7)wTrLwLr9n9^Vz(qq*nn$2>fvy7|815^cKTQYCd7TiCRb^JZ;F|B`ef(xPfVOC$ zY%93==;CKsG3)D1Dskl*+w12}is1OGlrld(@#yHt=@$t*y{W46)V;8=wiDisteYi{ zA7E-S7X3t67JNr-AX725#h~+1+u%{elGAJM%djIWMc8R2ud!YQWjVy!pKlz&*|?u& za-~AR(qn#Q-pQjOVfeB!GEne_teJ~t9H=@+BjgM)moeR@xTn zAiKfxLA>r|(y_#kE$8n0#NHRu^6e(s@41;*^9in(S7h|LM}I1{ z$I{p}SX6{xM7(mI;#O}nvK14Q9CRynlgv!G^Xq~BmD6x6?gRV#ua5>m??N9};WecN zk4N*8;XHFXleJ+B<1GbAXq5R8KWqiD7x>DoPC#AibR|jU>+)Fv={RrCQ^|QV(dxxm zR%;0vxj;s&DeJ{vmj7tL6t#}EGlcz-Z}9to9=*cO!qTU2x*~ynRhX14#WiM)T$or=-PHE(Pd|q>90YUnI`^Pdv8=b-oZPV-9BXI z+OH#i*IfvWl$-mc#&<21b^S{!ZTvQ?tsBsbFFr*^CnFX8U%d8Yrxn*ursTT$_fH$v z`C~~4qXp3iXu{Mj1nrEU-Eq;P_Zm*uD^RJ-Kkky82Fl)X&WOn?w5<){{<~Q1N47`9 zA76q#g~p?rDrQz~3O6MA1uIxi?spy0n1`~DuvpXV;GH?d3~TW$jEuu{;&q4Bd(3Y9 z_%$Zd25)(2@exQk?_=XDkYObZ`Ef-kN2ThGF!KHUk~2u>s~nQHy@EvSZ!04lD*Xq( zoOcB;HwmbMqF2X#+&fvg&Ud`n&asU?HA|g;7}ahBQm`9v(Ma|!ZTlbsDQ~54Pr9(M$9;AkZaY5AvwrVQg(eldd}5us==mR zF}|Alvr1htDfQAe$5L{{aR4%C&IC7CBW#bjc^q>5`0zGIB)Z!uyJtCXUJTDp%+p;@ z&N8VV{e#ue&!b9FM^dq(bs501o@Qs@nXIVE3h2HZoU4f?CfwoQ(R;QE#n2xXaMRdG zf6tRPkVXJs%vGz+{y>;G%GU?m$=6FeQDEo*uVY|3WYl3&byD@a<^C=NRe+jk)Q=^A zA%6CvC{Th-KH|_4LI9{>?Q(A1ZgLwemPPN^mbFN=S4P3-o{+aojlZR#J}^YB#Z8lv zKUw7eS(hJo+rnkw=MzD`D?gQL9yjBFC5qjq!PqAg%-Hs|L`tD|D`i91qu(AIJC z(&$WT@qG*9P1FOEwuTVVGs|Koc8|tqa`Sydv#v6#6+=MqR^S93DrBdA?aXlG(2Kt_ zB5%V5DaX_6Dyzr!cmbWdpL(Vd#&lELj!u1xIDdFC-`k_rWkKI5rDvq7DSz&F+7;wR ztZm#&x7NVw$u^CYUxiDT?63RTBKx(}@R`#B*6(zvnCpN@rEcA;-8f^86D|gx<8ZG2Me8l9W+{1niFAhtM~g^lNxM8wwwEB%)6v&Nu6E$D?KPrtV0uOyMCgz zh?S33*Tl1<{S`ZoAWstD*_z9Tip;4qeW`botP+pdY%y?P`w5L?;2R84EwY4w;NN{M z@=4?0CKC+qhl&P$aR;S!B3A^3r{dL+h#AzTsw25{`4kT5aXX|p^2qgbtl%!ii6R8_p2kA4Gsv zpXj7C-m!073`9rHXMc-;@}rBU#j?Oo+e*f+z9lNgivB~K)X0YgfM!V_jE60DMwUF7DB$AmH^Vezq>wdlT+y$ExD!S&e+i=6R1i;w;BS97NPF5S*~?8$H{)_{ zYEtRwj`V?~+>lfr-@wvO*gVVMb-Gy?)QdT#qs<6nQ$V?JFQj60P*%b_bNWWIy^@A7o82w%>NmnP8QsRat> zMNnv1>vE}X(3hO1K>#+@boc0!vQN-VWtAT`LGpaKXI^|BS#vYvlyrS z9%vyV9B)-{(K{7$%dpbRDIHxG8TqD1WoyPyGrJ=fLR$1Sr*;sng%Ppld_|Pb@D0l4 zl6r*j%&L1ju5kEjXDh9VAoa8^Ev3`GNBsk^E%Of2sTvf@FEMYUlw&>&ScS+%(&G{> zakE?Ch=#pAOGXrC^;w`<`0M`7$79%!uN(uWkP;>+KhZkE7aLk)K4+rxBB%Bc%361Xsby?C)g88tj~?F=^{0u5IBu>-~N$UK$~E$|p%V zEGwW0A7X`1;!@WU|0rp=F#0|~7iwr~fn~i-893Wm{yy^Onfq*)ur5k z)c;^b>2@}bjmZW_S_#(dBaWDxh;#mQ`YWWtTWVCt(melMK1UYZs2&9!;ju@U92TE~ zWcD}=>PCmvw(vLwpGH4bLoq^bqF9N$$Tn6wbQLwAz-xcT&$2?F1Wlxx03gx-^%1vD zE!K7GqBglP;APWk|70K6Vw9#St2G+bgh^Z8=Wy}bbW@o_BC{rA)}b$Tps-PxmEIEG8-KI=qn;#=!lxf1CDyNfM7&l?O=o}?^0KaGs-~EYo zy&th_TFuSk6?>hYLTEqdiY=AaQEcfd#KLo(fXZ)>b#`YXibMR^C_~%k_$b`z9i(_y zhlU7wSb&d}c^u31$R5k+A{{?n;CY>Pt+}_rrFAK-w5Do^w$7FvvRepZMSrxw(_~go zY+M5O#ass$IFarjFjjt%${WC5S5iB)R_2X66hpA4uo%gyhxOAK^InPY!= z11?avJORq!;T--9d>c9Zra;8~yl)gPk|adDv(jn2=1&{&NbJ&XmtNjXy&X^kj~+Ox z^QRk@l*CGb9{XU^6z~Ra3*(z18fIUVC9L9%4F}b#N3M%ii2*AFxZHadr%Qe||_yc`#d;u)L*T=oL5AO*C|69yy2*~K>Y8Ml;bHnx$Z znv5~Ee*Qf*jEfZ{wDuFZ&@fs|D;tHqdj9&jv7!P}?Tz`Oe^T$Tl5kOk5%l0~t4#Kl<8Mp7_>e5x^I5 z|6JLiZ!zt!183y~3&z9j+$^QeYo`P%cU|)(&Ykd8^xo8OWC}tlh@!JqJ>2zoaG&4| z=$n16m{Zoi!v6@cr_{_UJvU=(h_vzA)h%!=^NmpMNeh^RpD?j{VLISbs~B4aC4d zcKfl{^_qxQqZ4{zkd20we@n$nQ-zR#A?zKTmABV~XKObh19M0+W{p;RM}W}J*euz7 z%=Krt2TwEnkj6;4E)1GBZ{zIHCvgml>sF=J^y~9Bi)lS1F4tp*iDG845t9*GSg;3g zezGFDVk~yVI$LF{{|@8_4&EgD<#`-rM|Dv%5_4`z&MceQcu4^6>ZF;PJLWBn*DcM*Ry6>O8u8cFAs{oyx z^ih2hLDz6R{EziqdY++71ixHPj%l}!VuA;@V7;p_i8>D~&Z$bMO$ z*jt>9(4kh->wYbEn?I8uCmML_-=7EBs5;t-WCH?_VGD5}6458p1sfVDJ}yhB7A>xx zH$oZO(hQo0PPjPL7uM-^0SN3Gp^ATU)PIu8z9!(|V6{*RQA~5`dL}FDUJFT*V!(ei z(rzF#^xcvqvF#PodRF+$*4xQ5MOAjDqSQ=$B+l&S7W>aUa1}K26@lX|Rdb;dQ3r)6v zV@swNAMS!2jL%;kku2;}Wq{yD}o)N?KZ6o@qZ zF^PW}i^|A&Ml&}8&L5Ft27BP*tonSpy0BdhVg?Idt9R>B(L;NRC)+9Uz5`3MB@&sI zcONXyA_(%$LK7ue4{LF~FS_wv63wo{Z<56xU2BvQ3{O5~e8$4Qh_9d@rSnL015 zho04l$XV4r*92{?_2juR)X9?}U2#WQE?Yr?&k9Z@j&NLSu+rU`b7>Su(^=dSup(K` z$2e!Lb-MY_>{Z^P$sQ^ObbGDkO^i)%Zpx0X9uq~a=m#}%E|?|14Jh^g5nZ!V7)+lP zWE2~%AIxucecmQ);I3|i0&Z5fhyE0W#zwXiGCDMEJz@=up`;bT^H0m^--Zube{iW0 zNS$+aNAxL{y&vk-K+)K@tU!i}xE#EJ0|sw2H&q&pgO=D<_^O;`D-pGj032*dzgeAE zQ8|qUN7jx2lDVDk2UHfL4yiWcY#k`!tJ5}Uc)@p_x74?0?U*?EkgAFY&zNsYOr*sA zf$R>8X7k*CmBRr|s!PhB#=3*8hMr-fx&-RNZpC|_f=bZFn;dTf^_S8?)e1WW;30I< zl$4WGXZTr`f~%HdKip~1akcB;;I{awplem?NmGID6a}#kUkR~N^RP_$?Hv^c6o<$u zv|pk+gQoybBKxtg7g96iofenkL8eZdn4mv$(-eErHyx8LuFCq^6}%rH?ZU>Og`uEv$CeKU;Av})3Kj?om7m^NL_Mn$tRyf9WkoqToWeuWK_@5ve)0lP zPFcH#7{8$!2^xEENX>#~V7(5>V-XGew7g*7X7GbCD`W>1^zOMd$smFw1Kye% zCiD@Bm<^e;AUp*$lHqsAUut|-FGhoo?$41Arv!N{Jnt`0k}iVSe_6sZ(DV>*QnXx? zw-qo_rE&@U0PE+u>pFfSZ*Av^4l?O3IbK9R;Vm@E5T>bw85kbr%~vfb9Z`R8K7mHV zW|J~>9dA!!F~uWDycWjxae0?=SyKHFnt*B$mpUCy(ywOh8TN60kc0M@6KB!94F>F< zb%9o50!#<~`PpywY)DjN@J7Srq5XNDS~-`2PLFEgpU(wYI|nPlC?T6c!HE3S?Z!)lVP6q3At9R?zCGM(PGM zFkqM(uv|iBVmLQ@%~)8RR3)@uvm*%Rsg{fgy+A4FILtcq1FtP(gas$*N&?(^P`|3% ziv4fyYOu8oSL_>>3z~5nOe4O>tIpNSdjIwgCy+ozkv zO~AtrV*!RG5biU{7qBFhr_r)%Kkw;Yp_`?w2}xox?8h+EwzkoCJzUb-t9$`T|@Wjh1OO&`q4tdAuZ=rb4C9?CdfcB-hHYNAImr@VGmTGY6&M z+%8O>umj{aUgdJi9^xe%-#@6wVNIHE3CN_cs3%PCPC!6BT?Y|prcqlqc|%`vw6e^9 z^h+Q{sXicX;_-V=(?ZV{^FFap^Ak`xZAV;&)l?%u5(T~f{<5+1S;-n-;2p_P=NCKo ze1ukm`52motdcBHgrh20x!~QVSZeEsinrSunqq9=PV?D~WDwYh0{N|2-dyfu-^;5YrsZ|^W#V=aFQEXEZeT!Fr>krD` zj98a`>mJJhyT!`vRuwe+nY_>qt=pGI^SvVbBIlCrj8MZR(D1rxHw)cII*mTCPfi{B zTVp$>Fb=#sCd2(pQ74B%MI1$=ejg0!EY*qE?&+u(fqocZE3D>AzBLg%N!LF0l9*fE z3~n=)uwZo-kn=4wz2^5r)7g`ofRzsV-`?!aHr?~6-xw+4CV3~QS#RYUwE!n@3?yyGo0FJL@<@3mdT!KV34DiWFHA(<~r zWz!YFgs!4RVj$nrZyB1qMp^<~EWgZOGKP#w%C@YFa?us_v@K}%=mVPBFn-1&U25`%Ok$k89rLGjMntj$#tn5}0I_V()1p>julo`c+Oa ze-#0<@(!2gV_+IP%Hd2Je)XEM-_FsX7M%(A5qgq2*qr!V-g4S7Es~9|3Mm1LIc<>k z^Wv5Xr29lS8*l&j#7YJJkYp3rSZe_P?H&N`$aQb8l`}1zkgPbjFSU^`arxeUgr-%2`fVpk4ucK( zYlWnXJ=a#((uq`_M^WK)EQ%RBCrnNMx_D&_0iJA6gOi26;hy|Dh9=ND0#(v1L*vlAmZWuaZifVZCwC zfSQ=Y12t!q>fOdqct$*!h1@Pb@+e_@p4odzly%0=VZ1g$o}63bapWVf9B3C~_ux#$ zvl%?GBHLv|E3Q?NUwBFubU^u1D$zv1$dZD?nD|KAnmX7HHuH_Q1nJ~`twnlbNYrZC zYT4dx%=;%q1smLcyyuoKI;x}wkvsd1r897w|BKd^!GiEY>fv!(?WYv64v8PXxI%{V z_7|c!j=1zb;3|Qb^8(>{8>jh9`VKa&n{aHc+5(W|;4!d6vhe|0d?t**x*&hNxCGO? zA;NhAg%eR2!g{?fZCo~k`Lf%bwrQ|p`rx?V%jO#5v258klVC9)u{8GX50J1c-85V? zJjmjxqOTaY7&PKJHm!jFetua`nFRBljw{rBR^(OQjXaz|*%Z`pKcbe?YM@q1jHhHA z`1|U;_WaMz-yZ6JGSz=MI68+;DdE-QM*CqYcmoh_a2u2$0z%}lfmKUr#vC`jl1W=C z38LgyPM_@*FAG@mnG8u;p)g^Tq=waHv^W2G~3?B}`8CF?Y_voV;u3fq|pL1GyBVPQ!pxb7l6_pxS@ z{aM59>d3pKJ^|6h-*Xdhx&TH|tnq+^*cK!5H@OQB=^|Sa@Xzpa|SIu4rf_7}?0d z1_im)%9lmK*K-`#hQ4!?c65e>a9YOT#4ODTx0vZDxt<;2iGM!rwkA*^KdSXz?Q*2E z&hCd}j+^yIkZhEY^oS+GYr=Z@0H{JN-^zgC|(`2byTdwA*>@R6|^+LBC zb@$+doR;?;w!wW7sme=C7^gqf7i{AX(@_c?e#Bec8X`pZ!UuUG=cfi}p0sn(~WO{@n3P{`o?I1gw=|w!MN8{1Ylii6*a) zW`S=iZ&Hc>SuUbC$X-O~B=icH?i7z?CkSoKX22gzd`F;b9V1J2zGLD(GYLmerqXW6 zSO>s2`{(8?gn5`d5>LR2Mx@A&ud5HQM0J5nWbci4}yjeB!^{R`Bfk{pW!+6tKV%g`P!P zt>U821ciW8f3-UU5-!_3`|x8 z_0!- z%oR64gp<4U9J{p&$P~pg1JctcQ7LZOp}wv5@8thIdY;k|^}uI3s02D^Oz~|SZ`NN4 ze~dP|qwjboqg=#+6Jh&6lxYCG}w z)(HB^)@SY@p>o;b)Ty58{nKTOJBy{d01@T(9F9g6;69Qh9bZ)3z(rSgbXuicO&- zg(Wx*C`?jwr&{YaJeC{YHaj=HY44xEXW2O#@k2cAfJVA>b}qovz?JQ@swV55ej?X8 zJjcGkGhRYrxS^?Ae@F^GJ79BQbD=6d$;5YP2hh zrLh)TV|!Tr5JVLKwM(EAeAg3L)M&u93A3J-o6<(G4Z`b>2P9`lZO8qi^m30(V54C) z$akhrJ5qaKCU2sV@m@;hJ}y1l`yzVWcv!?Jj;lYaqMto3N>9&;=3WuRJqrIv)mDtM z+MgAcWXh`<#}-Dj(mxvwn`V(L7OBvaM;x;6}y$C%_$W_ABCB^ z<)w-Fb3=@LH+3wE+DuZ*kxg8>=qfG~_Z^ z8hs1rkQu&ozs?2CsU+dWYgk=exEUCF{5U~2O|HSu8V6Kw*xa*7B znvwgA%tPugau{6`llJ0Yqki+e9UC8bSoS%a(%3sAou?yLtYdv{j_|9I43{+{9eqJs zakmFyKRMOc3rB{BCi?CDd|v28)o zr+*O{odLuvf?Xg+FbO{?ra%fc4Jseo5PMrS(d8~xYfdE5UF?|tu!su0H+*Hh30ceM z)yXfK5TJyr#1+;mJDtLm?|h0L>mheowq>%UE8NI9=H}$i%YWOLyig{7T5R6Wv6;N= za1^cS(rs?awuXU&RTQvoWFIqBsan=Lu1xh4$UgXs*h+J8rS7(9ORI2Ib@>Nc!Z z8c;wwgcjsQvI7)H)KT!4bcb-KS+rqEqK&UzR?_z-XnZgbB)i*oSRAoSA1GO{{8lHGF{kFj*w*k%QeAqo zhL-4;CA=F+{o>@;+EHL%a&X`4R{;MGk#sAPT2bZiGJUJ2RVHZmDnN(aryRx>*#>JR z@+^YDbVWdwxGOSz2A&0a`D@bHpNl`|X4{~}h+FCdq4-k-TNK6e^wg_b0S$LMDw}2c zPRseH_%FnkX5iKanYwqdJy0$A5XWE*NB8r)(Z#`6Ix~;;9<%tR#_PI*0-@bLnyY!s z(PkCU+@9IRTDPq4 zZ;nd;qtNr-ML5bBSI;$}>}DbxY`8DJ<1f=}7!QpK2BhifiP)Mu@p=|KWbe3*Q52RZ zR1gjgBxNFZDrst(kv;(PTGd3dPJ)>gtJG{m1&nkr8H29-lrK=l^Im?uUqo549gLZD zgjth12T?h2XB9@Y{fdYdLM%~R1||EN@l)5H#!iW6yJ)Fs;aQKNJ6(B&-)-T&5ADfB zrPZ(}r_>@IT>?~F(4d!mZ#rubD$BW~`+dEc83N&13G&1GtnJPdMbM^>!m(sjAm?iX z&u8F$FDo=cw8aVLT(r_-)sv`mL%;{kEnJiXsoivYBW^FF_G8RJB?PS(IpTP(znH`z zMWaFm;|nR`_g|jK?iI4`zaLp4DOu&driyE)%RHDW$`w?aS;k$nL_rx4jQD)T;wKoqg%p9R-9@?IN zoU3Q4b{=GIvu}Jioe3S&L($3BDUdP|IG9dL0u%GDw>-###f_((07PQhvT%y}txyiE zBnR@lSRBLvJyOfkZhmP(0tJ=cW{Fb61iAjNSlIESR)kf6A;VvzE0xJ!l~vFV5r?H>>@HK9;Y7Aj!0Q7856T~N9S*B1$=4+;-SqWpNN!Uh1HZZo!K}(1om|kR0JEsrD0pdCLJMJ zc1#^ihJ=A{i?0zQ!A6<#mlPe&@y#L9qn06wep0ZvEmf#RQ-=Ht41`wvQXiH8`=WdY z+?!gk!NQ;r+lJ3V2CQt-eRNIu7?9vGMp#=c9L9f2F*L9))jqa$1ML!X6dpb}cR+_eM`o+Cym1ggtwUbS)$F$7aZTK)i?Vt0316k|&p7 zY9>ith}xpjEFmw~&)U=r&*uxU8*$DDIikpd52$!7Kds|<(el{wc8)77{Xe;i$-c=a z*5^3~IoeW)1i`0%M@|aS&3(WeBSOR0rVxy}fmct1)v83XK~gAz`F{v|%b>X0Zfm!j z#@$^5!5Rqe1cwB7cXxMfB)C4fTW}|6<4$mQ4I#J&Cy>+6`<=6Q)!9|wu3A4=_n%d3 zu6xco#x;^afd*4r2XS#wta9z%3)w1|!G`T^8LnHKdnpqzWGQ4Iz0o3^dL)O7-s zPn+!T-#7~-$-}j!?{?&WK>PM_mwru&*xElM5_sMI81XNhRL(_+y!n6)G?dY4OJz%X zCHt@Mcltjr#PMZ_{@L6zE6rx6#lZQwwHcp2tWt)(!O$|?1(A-PNKJubt(CXsv*dIn znpIWn;h~P=mmqS<>sul8Ga>?otT)V2|8=&Xc8OTF8WB3UA1jFX2NN{E*S~Qu$FEfz zkHCVMy|fA`BG}x}@leM$W1$~r92CcwahBTN^b7FCmLADiTjzu@x02n`0X)FeXXMkR zG90D%t4Dlox|4(v-R>ClYkay#C{Ts`B%_O33W0i~V(0QRM0?D^{PqifUE5}tTSs)$ z)1Yn{lJaC|8!4LgFCQN2x8v zd-FN*?>sp^ayl0>NO)~+0qdEvx(clI2PW02(V$sUDI71xX^s>ZM6L(u ziTo3H468i?%&VTQ77NF2p19|vc@(#025{i;oW45B8rh>1xRStYfL9fluvy}V>xo6^ z*t^fb1swWICMRgje{B>`nD=bs<0YXwc`VcTOl@xAWr!c&QPd0uLHvJ=xDjyz2%CSC>xhhRU#77vP%n zYt<`Rf8NSmU}K4Og$Rk6AZ%LR!s`#_5W&+dv#Xkw6=FSdlPjaD4@&y{pstye<0?-B z-%91k{-mXr(cH*qOhAvZ~ixGPyCm%8rNnuDEXNZ9v{7P73@Sq&f%Ew7H~$ z`s4EKZO|n9`bxZC9`M629Wony+|4$NAvuFDy}A6~!=ftPa{~SV3<1$r#d6gHd2`(w ze(uB{2LQx21zJTqGwxhE@h6ld`jc89h*|1#M^>N7x`Xx_$5y%$CY3~aWw#k@U`I|& zMk>nGvV;-hXP$%flum4&BZPJctOO0Qs)9lq;3$_XORCQ{P)%@+YrbfO8ZVWmwc`bL zYdd>p%4e=u6=@yMdg3eX4^{d9uDc3|{dQ+_nTf~}!iMK%#Gv!4#8j5%(ROHuKOmFq zcNrz9;0WiPXz`^F^V4As;oQfO4`Eu3TFP+Ml&tGw6_Vl1O*~pELJTN?ie^LZF^4{g z+=D-SABDyl8K5FYtJuVLu#WT#{nV!p!p2{Hwzozcf@TON^R;FzeKB9uIZVixM>kCo zl#-UYYIJ7!wv!?WlW5jUz8v{;i^y_|usT9Ay^%Z!`JVe^5Kg}bbfN=yZpIM8U)-1X z_Jk7+7FRYZ6AWn(%4#`!IENOB7l;D*d+w4hEB|!w&M{{y?o0 z%_nGcdFFg(u`5(GGfU7Y|6;M6zGc87BhJF)&}?>A?UDwa=pg`D&b=F#e&_~NbOKy@ zNx)^DT{mXc-Bf-oRzsE2g}>Xv-rj71Lw;htk6QkUY{U2S+cVK?O2p=>Haz-qu@Nis zjmRlS+N~>MK$inLr8~0|RFGUozTpWA6}ZkdIafQ4~q4u_0&82zoCYDiP(e z!7J73;Cf_%gT3;)q^%m0MKe-{Hr(F_%M_6MPtD7ttyl9Gu0g>k1&Q;~mK$DQ0kN){ zfC)XOU*P-eEvqTc8cS}iGg+oKVTTo3e5=N6W@#oXhL-LRCW`r_J$l;%LzN1#@=hZv z>n8>E$cm?K-Q23t6jC+L;Y>=R**&R60Q?^u12T&4qsgtQ=iVL-^iadI8?QY_x(Zao z(VM*G9|@cy1tLEf^@C(&_s~Q&9b{{8qC*-kc=1^)hA~Q#W$V+;KM3e?n;&duqeSL6 zh{9+D3B!|b^hWnMk~4C#A6C#>zUY)lFhie`*mixxBbS!6$pR?6JnU81jTlO$g_Vf#} z1+GScpjzfwJ^t87X7aT8F@%w*j)#dBuyXf>;;|GbJiTK}J8q|oI~tR*f|Y)W{RGz* zLo^57XMMWskVK&tH`-w$UPiABzm8{&b0)?E0J3*g78U*k=hAnj2={DHqUpfegwMncBAy44^<|L|UA zjeZ$Xyj|)ByCO=P%+^a+oUOsrV=g4NC4_Pvj<)SnSc$TYy=BRV&`3*o06&q+WLGg; z{G3hno}j~*hS^pvZLKejF>$3LVa+_jq@O=I&ItSPl$_0Sa6Xq_Y=shc3}&N2(%Ca& zN*68TaGIp$sUp-o!xeiPLzuLWaAOy?1Z@CByd+F_NFe8LoVw%T^2_|_Ui>o+MQWc~ zp2EMG=Ko!%>{p`-cl&bnI-mHuni#mX61c_i@fzhH(LaNte@=kGzh?fB`~#zB{Jneo zUjJvjK45__a6#X>`wqrN3jWuF+X(|Ig_QFCm*rMO{2(0mZ?QxldalrVuU>lpz8Jr( zP(2q>J^zcO3i^RpkSY47%=x+O?;jX$N%V=~?@y|?!Qe-8(R=gYf35gfbfy1g_=9oj zlII@ejeqm}pR4|L;Xglvd3`tDu3;|W|Cn{(_WlFWI!b&y>izrh_s#q7#SX?UjJDMK z)MEV75`0(xwv_XE|CGm-PJE7~Vg}GxXEc&4j)x?1FLd@$@nB_*07)4z)E|11DJ70u zA|bXtnuecDEe52kRmWTajnhk)neX%ftMouu9v|ADwywb$f+}M;+qH(S>9ftl3+5c| zC$1cu{rIhkZDeKQInP#3oU1gmHSp=j7EJQ&DNPkxJBXJsf7+lqenT`8V5;!d z%OdYGZ&XNicGE|>nJcwL`C18XJH^x(Hn>C@6!IVMTk~FBUEK@PMvO_MCu5e8Kk$ne z-PVxIorg-@KzHfE3t27ujHrf%^3}Ge%$+om=L-TGw$7XXtdo#9apYBSd%5GQdxX0F z@LI*Crl-_ikhGWi_&Iip2S7}gn?J}d9$bl#W?gb~YZJO_VgJ)buPw=Tv2A0uVWW|I z(JQom03bnjW?jAawTTgs)y1}Zee2OW4X6nT5IUZuq)jGnp(y*RV<5t*;z?lA#FfRl z1I@R*zCvTv4+0EO5vBJsIOIN^OE^)KMZm1hUj(=o5;JY@y!S+J`BaR<(ffC zASfpkFtn0O^;gRD1tDeI>w~V~R78~Oj_=AsO-db&t41=9a^PhgMZB;E+6nq@+ITDT z_mA8eb;+%M5hyygd;0@w1{gf8=VgoQ+(mo_pHuHy9sCg&js@xdGwk@zrX4CQ&(_Fva?U*AWnw)EcEIC>MZLjk|`0UX(`I z=^5XcZ#LJF7PgdF$&a<;v@!NJxez$1r$#$N_9!?k3~$?P)n<-pk~k-=UE(k0yY*qD z_|+{_xQ`K$_(G+`Iiv5N_iS`<4&kybON0A4Se>=+-+2LVLwxFXdqgY{AbyE{Zg$-H zj?ZIiluTr`+$((XNSQcmO?X8RgZ_1cer(B&GpVJJ0GHN0^IN^{cIV`|Ynzo$CVn_W z2J|JD1<##a=DQ%UHnONqgdsSPdm;o8uDa&|XGhA|x=1g9p2Bx2r^AOO_=@T9_e$yqoE#IBIU`Wc>v*=pR0g``K{Ayw)9_@}n$qWU+lbkM6csX_oH z9_Q72nLnM6qQJv7s3UE(Or{@XY1sDfy6)V{ZGzK<9urb;rUOm@P}MRu5Wp~TNF=f(llko^Qxd}#ViTky0P8MUGQQ1Y9%)Kx|Iz~UKP>e zaJ!{hY~ebwMbO6Q=jmtmBJOCi=CRS+1>w zsVHRjwe$*(%BuxM+b^H7fD%Fu0!IgxZPul%{n1k$6i*83_a@O?Ddael#VRofP?pzN z_SOX@pPX&`K%;{?L|cOia(^wa6X}7MHZ`_e>}!IAL z$kO%@fMeYhqy6Pqg#tTZjM1r3Uw4^HXq1iYYhkJ+zEDb3i%x;KG9t%m#GsI+v#5|0 z>7RyG-eP+R(2E~x;4_k? zs!m$2%>gZfvMCGv)F~ zdAEJLm>p3N%m6SP+bWt`-t=RLu+@1$?Ug+5!?@qVR-++k;xp0Ve`}WfjF~bEiV%D! zqQsCZi6_*4{w3yV{+RPQ1PmJ>X#71feyuisJ1~Cx$spRnGSS5HcR%<~qUhhlzki%x z9D6bPMn&JQ|GnWt_TuI3Ujzx&-^Wp7k?-EY58lDhW1^rvqK^$j|J?y$Wx>u+pr!v5 zttIV1&A)drlx21AQ%>)b^4oV&l-+;HB%*hf%CE4p^7CKp5a`kR?^!|R$>ozs@8MEP z?hK4V_9vuN;J>O_pbiZE`ydZHUR}(*oy|PYh(5TBKB$Y{sZ+fk$-)?z%Ku_PPe*G` zpRW^x|NQ=V`5ORZodxt$J%WOFm0v4G??9qgppDko4$+6ho@=t+>lF)*81cu#+JRFP z%IsXB3EJ_NYi(4?LbN;dhfCC|?=ZAvIKHI&^*nQnv~B&(ZFHr_{P8y(p%Yt9Wzgb2#9P=3Gvf4@l{UU$ zfQ|=`{`(qrp|)k*Sd?jW+Mx?~!S}}yTRtCLKB{{vu=NGPk-|z8(dFN1nl0z9JEwED zsVo_53o^S^fz^5N2!z=m(Tib;+T~pddN`Zud3&#^kNJmc!HL?C0A;>U+z;~Eq}&x% zF31iJ=l$fGww@*$Wip|-dqsD2P%*n*uernai38lPNz%uBERN$A(^<P8QTa60-L7{l5 zlHkavZSYfZjIaATK-i8{+1&R4_=LxS_w5%asm8Vk(Q-hjXM7FfW_~hdP1jN#ZCrUF zy3BfwtJzC3s!VT7g(>fN#O@WALuBhEX?jn56N8Cx*N^IRy^K_5^4%ofjkK~ZyE@JAF=IV!nN-4|1Ng?|&90W^q2=A%bI!ncyV5-~S9&RYHZ1LE@I2_v`I%BbSlzrof2X^+9m zc0xYMlLb4eon16P-G`rg(m0BoPuLHqsEN4(epxjBTg z_B2*cB+ue41Y^8Kd!D>NI5l6R0~PG?9?+>C)&KN2uW6yS0cgWf2iV=N_2R5`y>#9B zNOvANJw8jivKgj$d>p+9XV@q@_!a@&BXBk|AC2TS|0(FH9`)>g^{4sTM^bu+8=vV6 zcu+F0M@FQ;EOd`Pb8D;6HMRFqiWzF}^vxeb=Qh3ivY_jEHN`JDb?oT>MHUr*c>d1? zcl9qN*7#Q1_%`Eh-uU?pHo|}rbp!vU;a>iI?fq-tdvD+SFYMR(?|JWQ$k+cSAi)sf zs4xN&3{F(vpx+y#{|al5b`Fh2VX&otQN6n_nPvY)`c2DD#T&r-a(2HGRTfY9m6El_ zix!<@SCHRGXIUOcy zM`{C>i*ct=^Z5D-)#|eqI58?h1Eik^i#C}z$sjsVVNK3#Adk1{d*-bPoYE{pUTl1> zuEx0V{H4JR@{Eg4K$_Q(=x8 z%3ZR)9*1rGJ;|$hui4KZq7clUjlnsqzHOc?%d&MbdsMlFEDPjwlkI>5@O={LGodHH z{~CuIEwAzX-M@}EeZz#sUB546;nZ_v3|j0zb=Y!qAhy!Y(L#vG za79lUZg*g*mGU`|-eHO>lCv=bSL(eLZzaWrPf<&_uMmO>qMyb=#K_cG#e#8b`2Mxq z_NmsQtuKg(3{bvrgiGW_Y$OGo{j`3T>c4-m8YsJmP3xahppw;C-VR#PTX|Kcky+T0{3>CqahTTXWbr$AtFGlu;pJ>{5giGkej+bnOk3jQla<>8Q;|H zkQ`(w8mmC~N_B5Otmn~WTf2#09d;M8q9SkM(tn~~xXxx8M7054<#aO6B~m3BmL+HG z(UFW=E(MgVMkJ)&1Ec5m<~J|K-?tSd_UXJ3Jn*^huCA7YFF-MIQTO~wBeK~`7t^b-)9pFl(Jq586o|+iljEr>wVcPtyt!w-3*< zdT*~{kusxC=gBMYAy`4?NixSFwSmCfj_G~G$xS*n2{589v7n-kcSzcmC&T6fvQIZ^ zSl@4N9%!s>|IQRg*iuV0Q_~=qjkaidVt@m{12@?k-cbf;O%KVLP*RHbkRiI2pC`N< z#^{ZZIBqSjp34?Fo-5+GDdv#bx}N`_N?EZUus-BqD@H-~v|8-}lv^}qw4_~cq$hW( z*g9oBqDE_K=2ffXZEojXoWXth7RORhSN^7ZVU0 zD1mw4ZxJ?aez28@Z6Vtm9DT2t3TJOWS=RAdr`M_pX;;7N;msHjtQ#xNUu?3$(JWcf z*Ie_i)CZdAUoS6K&WRl$eUZ1}DiU#9aW%#(k;Xu8M_tW14?lkX%!h69D)Vm5PY z+h-$fWrp>I9{?vZS`4RM-os~{>&%QJy*66_O=Drtx^IbvIciP+6%c*ZNuY)}yRHT54#&4kMx2?+qjjbmCPL_L>lS<9Y?-E)= z`8&o+r9EAB0LuCawOhWfrzTRMu)*?#W$ncrbdI*lKZmE~xuL-;fXEF8y!Rfgoi3Y> z21u1RH#T1_!a1Q?0U|3-rqog+YpoIh96~~yFFuhxtwrgM5ty4tN_hDM+~;qwHV0-d zAl6S;F5{|5m{%hY9lcts%nm7W)2&9IUQEL#D^N4CuRtl&wa?3eP3enzPc`max#7WlQOioTR%6+3S( zY|z8|5-r6~#$Uun-Wn-j2XS9&NS=JRb;r(H?gM77a%ra- zT%m?SE&)SBKXcot`l=6HYcu0`5Zhs8+#a-Y*F1R?(s)8YVe5^5?q(>4uj=}H(o7Gf zxCjhCVki~xv|h&*xxCfxJVeU>F4h#KlZ!3JP-U_o_u&&+nbAndq`=4W+OMdSv?II= zT|jMa`DQ>;U2z8IFg3lGoa1AbT6N)2hI5ob%-y(&Rcm(353lTL5~b??BG>562D$Wx zrnqKBaUWTmnh{3B7eZgkvSM=#AD*tK;e+ZdBA4Bv0I;@74Pf&g z$tGY7k2yZ#TRKn7=-SzZ7Aa+fJht(wg%rWg9L8tIMm=Q2R500rV3yHE;ea^LS_Ok1 zK`LtDg&K{eDpsGP9Mhaet6ZkS#i&%GN8C}TpryAB4~2^zO)CZKG6KMY!*RQ`4BOL8 zxJvgDaa15S@4^eo3zG?XH4oh?|>t|B5hVq zuoe4X>@a8GQ2>8gj+z`szHzI|`q_3`al`zXj{T_u>D7qe?up}<{4VKg^K_>*j8DBg zS8?akpT#VLla2_b&TG~)|5ivaAecmWQ%y^BiaG->EkG`)t&6 z4vKy7slM3GB4|*V(~up69=z`ov*uEW`-SG3SwDV<^>`<;spL8Ts29%8M~Q z`GV{3Yu!9Pq9OTB=pqNfk7vJ;wYO(i?J~^m%l|DGC^{}kXOU=~ zOi6$XrN;4lKD({347EZ{23+BZ*@bbNAf4O>laO$YZ--c&E3Tnw=z(E>CtRP!T*{iR zh@ktfj#F1_iuU+6q#C11vh!S%gYj-%;g&q^I2VGe>A?pOGC_;A;p;{FiKR0Q>(bF( zZq&1>3bK>gYk)@0ve=}| zBxuj4v9Lcu&gx5{;4(`Vc-8GYOBSEA&>pmfNu4LabS`WH9eWy_S2q%sX#2_EHEsq(HY3jmyqz$t(>Lr<0iuB zlw5Tw9=c56RC0ZqqA_=Uhy={u+IMAAGox~eZoev|U%KXE-IC&_2@^1YwJsHSmxIa? z7b9{A7V%o!-T~TZM{AkICM^6JX9+eL_-ma@bmA`JkGmB+ovfv-bF{M1X*!Vb$cQ{L z7QwgppFKl41V^HWe*3ABaoX8?Sntv6D`-#4!T~3*HCA4_ALdt1)pibq+s8&I5c-08 zYfcZ|x(r(pkOOW5*0)mi!-FK7QE zH5>iV!&l_G>mEG>a2t7JVXl2xy2w9ik>>lflla42`Y@LRBh-IFSqO%wCEe1(4<}S3 zD+uPgctHOgsPeyBM@^GtuPKeJQK`jvP{%E?Nu3n;2DEDvS$63sEBJiup(yWl#UA^eiBX)NRBt4fn0B)YGqy2viv-Y(Y}7Hi;sacpmY`=t+s3!0jdazms?^k`)8yN5DQ5gV0sw%6Kl1%*4{I`;B1Y&YN@IGN!(?wA|NKe*MDnb^9X956HY1tPcv z>QJcNrLII;h8Ns(X3L$AG~jWOYewqRS8Fdu)7KUv#C-l@I}r*#kh>TZaf8piH@e2z z95j1yGyM2^lVynJx3A{&=-H8kYIs~|$fE;NL_*n7S2U5TV__8eLu0YoMSiqka}4#W|=~&QVfZ!<%_y(C#w=gXQ;n>>=s7E55jQ*K)sp=8lrxk zgd-UVB|_}{-nsp5?62iKG!qlrh1`U6jDuvrN%vlz;9*T-Y?`UrnSjXwKsvIrl6VOE z#6E)x;=TSuo(r3Gk3!RSH;crVqx>tutxxM!z9 z+APsl%q|_aSigM>W@`hqP`LHOG7oCe?AyEU|Iy>EQ|&Jg903wGUcXAs6;S zYj5__x0UPNxm$zHPuj73w_#p%1^aU$? zbJdLvP$=<`4sxG{129|mc??NObmIvA)?qFSd}jd_^D1|t0eeuH!33X$A`Nn>~ zE7SU*e9QO6pO{{ULo<5>wdjL&hhYeQt)9tGJz%P}pc#P@SSTp*#j?L-=v3J}P=`0R z6pQvD1C1|QTbK0~PapGv$kU;5`uXKVx>eK-GT zp#~TR&UL<=`fdT+E5G0AJ!gM5#*Wzd(6|+mN#IVD8)U!dRC@3|r&2JEC)wleP*S*=fiGyCIZkO!ZvM1**3L86IPOJ2sfK>fx|HLm5+XygjVl z!-#pv32O(1x;LbAf}#U`+C7lpP17_OuA&Exdk$rt#d=bVCQ)pfFL@h2@NEZlT_8Z1 zrrM2Tdt2LT4-!8_Yn*6#{q=osyPOdO{ifPWOF=X3feui>ON&iK{YAP*a8mtan{mkSN19be1y~i`$i2LPNscDaw9X>6A(`9qCCQXF? zd=$_6O@N*orSH)Mbr?S+@mHB zh0kc}O1k0qBU^*Ho?Q%MyKPUuO$l9%5;1I`(m51c)Iu|~PRHbYQz=6ic~u6iT!A{TCd5Q(W}IqWtAQWr3}mGV&b z%e(mEJfJDPiHZ3RsR8hNfBPCFVKi-=Rl0`(;us5GiunERBu~>F87>Ckr!}azyyIK> zM7!h|g$8li!WPaS6vXVkmlZs8#^f;U$&Hl?2m$(W3$AFRQH5>xfy8Z_&iC3xYqnn& zUuyqXQxK}A%7$Q93X?bRm%x)ibxHnkpImLK7@X9-sXz&M^PW~^h@y#yyTU^5Q|DUUkIMYA*@>F(`m zcoO6$p4KxV?4X)^fbG6$A#nM^w!|gXm!g+;&ZQ2nX#BE{ER@B1VvBZ|xbhEL9?|)y zk?l`Qa-@qYR73#nP_unX3h&fPNYsr3NaX`uB17vCff?F!GhR2B;WRfC*r&7`-@SQR zuCAg8I~r1@yQl*c^rxF4mN+M~`(JsU6SPe!T!|{7S zJ^d>8(bcL^AuEIQcP2veCT^$J00`NGclv9*TVM}d-{&=I3x+J~Y;CVp`v#YZ2~&OC z&*0h{(Rp~Siuj&y2;7LQlBZS#4j$b4)ZCKPLvJtWvYw$tfF+xxb+6w@hJ)v(FhZ%8 z?l$W%M!JPhI6sjFo5|n|{-}%v2=SIU+N&JUwfVrj;gxVg2V1a^N;y)H+>JD8&#g1J z_R;a{q9rj219iT~yIkqK^Qw6F4meJ$OS@B%B=0i_k7IYv=9MZto<`X*@yhUySo`PK zc>aR4O`qE5oeR<}4L<16M;H{NX=hetaO1i;Kr!k$TR1Q&RrO|6&9 zEiyQC>w4|Lf~CerORSgKewIJTnW!>IJo4iY^*F*G4WLRvM% zi{4(Z!uA10%&WrM%{~)}_MPKFOgh2T<5ZNIENa+{^H!5Hm{_C;^E9<1(F$1~@4d|3Ow|0w`P9N*7g;;;&jsIeK4RNNDmY8-77XGedvEHYf2zj34 z8nL2#@rhmu8L~oRX#Su4w*QHNLcC6s_9Q(K0UV)B$m{IyDA2nNg4{_wD3GPXQFGY| zy5QuW-Tnpe>rDoRoHK)#Vs~gx*4num-T{6D*`MS3H83OS zuYLw?8=gq|>L?xgOm0^U%UMPjD)1dtJFTuFL-X89$#w*`=;F`OL+dmJU@L8vnb7xR zRFn|xdsK5s9V4xsu6YVHIWL~<7F1j&5|*!WiSIo{)yffyqu^Gieqxdn=F7XN{&I&a zd-8n(+R^RUEC-&(?H{UeaD|RVXC7d(gr!HL&!J2-f*#yX%VpiQ^)r_hJs4I|=ihPa zbddKJ_rV#`FU(QuE2~mAu*|vx3$2H@yn?siNo0(Bf>*kA%ui(gh>?-sRp0!6phx#< z;01`iM@e-NU(9nc=;*L%!cjTEu?)jm>aT?}47qq@p2b~~G(jpbsyB?4mfreI%lz{s z5O_N+V&zc{W3-#WNhxp6o^xD_tapx#OGokxr5otAFJ zFW7~8iOS;w7sXEuQN_nY4w2#gHum&7Fc0y@heY&CV+6>Z;k-pKCAL>bLs%4Lj28W% z%$g}ehCitAf!W)0)A{hnGlrK_EtJcB_Zk|D4?aW{<8$qD!A^sCik=JuE)l7P971@N zX6h^CJzC$KZR0%X{N0N4&FfmRN;q#Z>EKpdn{6XS)ykxZ0m2|0b`n7^&16bkOND16317S@|tdsS)iUwxrjl@3$X>p zjMJA-2un`)n708o8@wYcqlpW@Ko?1QemIv;mbGf{Y!3G)QZ&VBKdmenUGQr9IedSG zE{?)s0imuR|DG{D`zbn#7ic=YON1*FZV!AWMw)ovWjg2Ws32Kou&XS!GBLPPTP&2S;9RSaS#Ga}T^5VVBB=vAl&z@GrHG6C)F zjF>CfSJ01U@3bZuyWu?REo>O@bK)qR!1#M*hr+E~kGgCnd8Be6^)Q)SKR}F;aan3H4TxY*@e2;Jw z@0JrG!7loq*TB3Y)7+{@N}SW$%ey%KSMadNfgK}5)zEaf4`+X4sD@vD*-49bO$mx= z5HtGex`a6#^aJ?S)|wj>w>eNe8?%O6RBU;83Und3Vu%qdnADRUG~NlL zm;V^UkoL9kMSj@dW!IPeIUQf(kJG?;#SkK_Qb&OoA>2e%!B){g{hgrcoBs4TVO`?9 zZe@CA;lhJaJ9q!vBsaf^Am@20uX}lY>no!%gOzsfHgrAbnqjN-Bg<5OlP<-*oq7(@ zwKRIjG6*uig;47I8!Axs`^hgmN%eP-?T<=$Ld>vany_go%>n;bEP@xAVS%`v4~{n@ zw^J)Wmq4Kbp0iL1KPB|Hox?iXPtq40zG7Hjl+zRa!!*lKJ_i9A~ zc@t26OcBHu7iImG-{6h*=x}SrE%Kg{5b%tvhh7bri*A1udfc^%=#8!B5#bQJrwuqMT{ zQUTRE+q?l#bJ3{yT&hZY(bKi|3bMl{;+P+%C_=Zs%CTy8zXP12ZfFGm6sqK4bv1`1 z&XE;9tj42aVjV8a0we2wCH*pg+P*3N_RYrmMm^(RcXfO5uj~KrS*}1z^3)Y)jUU=D zMbIRzrcF^!HD|>|7xRTbMOL_Up#ayo+vB~A#vBYI%iFCe#$`eu6|= zN&4(KF(6ZMkr{|Q<-4wD*-LnaE3mmq@1Ncj#x2+0X#xoH@Pn`Q^2GdRT=OlpNxUvj zUrWWePdfW^9$SN@MvlopIKG;BGiM1oaCmHqh=s5;?Iz)y=XO}iaenMr*0?j z3{~@Hv=8dwq$vR@jfXLw_AEMYpT(_&IJJFcO2>|!kM`8dFunRFX6;{vX@W19M_O87 zmpH|(dHk!sgA7#;II){ERhw9OOfh&p@|Q1XyR;{u2U(!XsdaF-#rk;xtoQ#hpEX&8u{`9J-E4dp= zwvg=9HT}|x7_;_D<2=WyRKZ@nDKaSt=uCeNE*R*X7aOjcVr&OiixB>)a40uV&Eb*AT zNOe-OrY`hFS-Aexdyn&ABIjy&1&CCL@mEJ69lN3&rHrsjvMQ!v&s* zLon$^qMb+|9oc&=Spc3j-*-3phrp^WUMhH583ah(;v$@4zMm~)U)r!rM~n00(G^kO z*e3yZhID*n=|Z;YIR*$Sy?fbP33EIiOYkB>U0Xkp^LL9q#2Y(ntvzyi^I;ci9j!p$ zF5nW}c%rHM>z9%XO8+ZYq6CXK{NEC1Xp&uv!9bCRjR=Wo0rV6EN4SWtsp-_&gljcxbW+-qb>W$)CyU2?MTS`UYDS38i< zsMQ_{#cOv#?zSBm>X&>Yr#)!s52YhV$2=%tkjBQ*8-g~$Ou$T(V zBwegH{o~^&y=-E>A5g$9CC*TSk7A+A*pxt}f*1?vM3j za1F`PhnrQ8b=l$)$MWeyxfgnLA*Aj`nN8|=W0kGc*W`Bo8XMmkgj{WtPXKmFgJpdX zY@_`Au|fy&v&N2)w+=Ih***1pEAz4FRe&+&2p9-R5LmYR2Tbm-4tujlVF~G1_nSL? zZxJVXBU)c#b3bY>l9rMpFEBbj9CKP#aF^ z7Uf#gp`;bLgU8FfeAn@#L0PJ()b-IOPc67EX(aXe9Bi`Xw=Oe^d*@ijCset47T`Ze z1d*}yRR!#BnOR1Y36sp+-8!7IjjuSawW{$U-3;r?8Z5GBb9et-eW^R@p^I51t=GR) zxYVR9M^jDYZ`j>Z)?1Pr03wCLC> z!Bb~d;MYd`YKJtG@9vnnfm}lbl{9qH} zg;0k=B1GkC1MOyL;Hj7IIg#b;$O|Y~Hj!)(YcI|l8z^=^s+0d;h9aySUJnP9j)U4OZ!i=r zIDyyR>OS*?A*^sjgc%I?IL4Gl5F$-E9yS|fH8)Y1;VWT`z%7sXU}lp}%9ZKzWle=Y z|41;@=emb^ZC|DNYa(X>+T?R03z`4$L-$rUh|RR7F_HlvSmj%MI;}$GswJ6%QP&;i z2zEKSy)i72>dQaXvz@tKMe^Xei{`pZnlet^aCX^f`J$m+d-9E!Jblf+5y*A)R|-ea zTXok1|7!Q02j;)2uWHO*>jGmFR9dNNpj+9l-d0zo6=Zb(rjw7wU!G&qMCu&B1{8@O zHJvqZldgO9%?)7-{`NDnCjfj^jE`BCyC`^~+Z@ewcm-J?15JGzJ;@uG?zb>Mi+3{M3`2|NUb2|N(T=#8AOS6``j8J z=$||_gqM`lZF(ErhPU90SNY7J=gTyBThfn21Mw*0Rd*_{u$93BN9X$-EGM*WK}g%Tyj zV?~*+Dz6!oUq~yi8b4$_9)wRTha8B8;4Uq}rTtEv>fYym=jXa@9seJ~vw}N!&)mNA z<^`YkrnNL1h_CB`Ez471rr6gdMVr;mirT8yJR}434j607FBatU*rS+)+mFhC`3h2G z)EL*pFqr=EFJzC-WFR?p;VE6A_kG#K%yAT@kPB7is zB3(>pUN1=&7%Frz7GP!s-nuxRch22u_8-^j+;HwaFwLLqR?kaQIIz%Y}`d;)q0H%8@b}b%(mK@JBwg9V&u0dBSOlrqvXRXcqoz;yOJe?>^Ms)>U0+ zGMbft_&Nkeyac-t8MwRRFEcm^lbb_a{iciItkG?-)n)L0RiSK)6lFW3^vasg$`KP< zN1njZB7ozxO_noFddN^CgWE@_xhu<@3&JUYkbE_fIR&6k zL4K&71nb2AO^bXmev}f#7xP0tED34o$VB`e2)8 zaV!OhW6ViTvog&?mij_dWSrK8$S+K05x?t;?t>Nm3s(ssC0+0s>pZ`!GbYZXh zO)`V-SB=ZtmQc^$kcCrvj6KpISrP192>qib%*EBU24lWljY39K-ugl-5y!)< zB^o|Rj56BjvnG?Zx%N}s!Zp{p)dIS(n?p~HvD=iR-{c7y$_ISR){4ce_*g;%yg{^>@I}u^r!AlV* zq~BTL#hL7i6tJR7L`J(DD2~sUky4|=XY_!_AQJnzOD-(iIKYeu5pP?TU&OaXQ}hjO zuE~-6OJG@E3M=S%py0rO1RsY^PXDrFAi zbL=pLr%|R`7CM=szuFq4t~%@qpU6M2N^W+0g)dZE-wnHdg*0g-$UBbN!~pW;&| zasZ_mykhU}*q9hh{jw+E5L@GEU1`i5QK-6fV2{Hlu=5ayN$ebTWks@SW&?zAd-*-8 ziW@kRNq!&ag3{zE!fFvZ1i1(x?Nih3&d0We!djw+|Eh7>bRE}hu=>$Sp(iy{nS@d` zrzg^D0gWj?n%)a?JO-=gQ63-u?|ICwd=>u<{oixLuOTZVilT~WZoonX6+dhG?Wi@H zVYat`UElo9Q;VfFCaPG`AxwdrzPl6tlzk>=f_4(SURp^luusECzKGSP)6b#yD-)^t ztEPNB?zx((31I$JUa(4dAXf-3I6JW}Ke)Bp@bkx?xz2;8*y0IZlCE+Qy4&5}Y~Kl0 zFvq`gPU;ROFh!>`t)zZ~qsYrg$3HV{XxO*wkUr&6MNv{Y5D86e%#5`WPE(mD;e$R& z^uRcYhp}h0Sn><5EOG|a?E70JTbC5rohc16fX;hYe^NWt;0pbve0U{%Uz#H_;s1h_`wDf?w{4S!G?9w^q;rBO7C3sSaI#Yya*5MoKk=-VbJ+1P zmmJDuVT{QZBc>p7R;2{RBg1ZDkrqtIP;2G7mvEuX7jcQXPW@p~3*F#(hvhg=Z`QQR z)hoBKpdEaG2jF49u5#9MrMI<&6E9@Sv0Y>RVcB z-n`mg{|3Ur>RSfb!5AkdbVX>jDv`OEKc-g>C_c&a>Sd~7`3n? zGjw0D1jeIkvoI#!;#9UI!%!F21rx@!hJgxXTO!&IycyqA zl*fDbA@sDJ&(=<0%p?jB;amOaF}P!2DJ`#yJX(eYOip>;pc#%L1BOtP2RFDt4y+c# zH($^XaUf)8WFk?^H0!9Y1e8=GtLN$|{EW(#pHN(CU@N&CrI!B@RG(O(=Oz{g=Ekh1 z{$*2TEmpyU>KUddmH|q>GG+;As-gwge$K?kTHMkS4&|hTnz;|VT!Ja%X2~uMgw@B< zL`++lv`QXzCi?Mn(Z$fe7?-sO-%@3;3)&9s!jGyx^PAFo+PcAQ6}ZEGQli|p#ek_0 zD4{TG?Fn(lyos?Tq1UvjRUcoIAK3twoEGFCPTBa2>_NM%v;k2&@%El^GiAyjIy8*5 zu)(EIU?9e*-S1+{_xvCg7Q&mhb0Z}2y#Nq{(6sUt+nSQYK@{`le0L71g{{0&Znp1# z016SX==F#D-3E7hZu6b=u=VKa?krXsI~GnsJkib{@w5!X!IvbB?W}0rRhsJ8YC>G) zF(;MG#Y$_GuzcCu&&{L~*)G8OsQ}9aO$9k-LJ*iB9Zvk~vQ61zPO3YL-SbyT8M5(m zV3#sDC0B^`rv2(ix}G4SQ{kb`6#bk#OH)FEOP3VFu9h@nLB>qjohGIjLJ>Ct6;Ue` zM6sZX7}9AV(OO9~pI6A3G717Xe$Y5q^R$jhWq5MS8EJjbe;hhB`8g$m8xJy{g;J3N zH6!!z*`I`qV|%K6gy77I#%-c&P7v)xkKD_oPNbnHt;J+BA9vQRRQ>$vW^bO=SP%ZI zrKx@EX;8N;#m75KrE9*Ktf~X&3?;Vx$LhEMJCu9Dp1M5Ihl86eWb}^wZYLSf0w*7K&AX!k(g|OZVyB!8X7xYQcKfoF@`LHX z>Z!K;=6vpX&4H!ua+aP85+1~xEDUwCs4IHaD9XtRUTiaQsHcaVxw?WlioPY!wO{gbmNj-7DNXAIQ} zh2InBLOB&;5;-*NW$*24Nsgrr`M9cG%BHf0{HGAxf5#+4e8u>iwTof2kYYG~tC{`% zsV)QhlGTP_9bIZyQ-~U0;hkbdiT$A_6;*v%a&FlQu>WSk$@hJ#cxG{m;VD)VCBu38 zdE38q-|WkSsl8G|1_MO>pF^(1#dlq2IVkI|h zaufh>cFBa7x<{m_1O~TIja(TE;<2qA!Sw-S)17>8dy&YKFXf$0VBn1E$4}I-V5(kr zSC>spYjU27sfuMi3vOH~bhv&|fq(+H+uym0i$g+zAFAha$B|3zX|5SVt=u#cGO;6h z=2~>=1CfxO4@*%|beZQFgVmw&aK{MEcwS%_emoN(ZtvC8lkV;tAZ7jWWzE;K2#x}= zEQ6ajn8fGQkhgFM@-WyBqXg}KKV8afrX2_uBiXC#p=5W626lPHe4GW^!wZ%$O)3{VCl|+cQ_?9KP4Skld z(}gmwGO*hbUQ048efmH`%)F(E!rg^FY~%hV$LdDDKz?IL3z=dg5&<6ij ziAb~N)V#8J5^6fi|D6(LAw*CNX%y9%c$Ovf+xs(BXCRF7bvkXMn|Qw(b(3^-bj9@@ zH4v#3Di&q-q9k9(A&A#SIvIzrSvzMWl%$urMOp+0j*ft%RL)9>@n#7pUaP1E7s}aM zln8(wMs!if0q!RJnlOQzq?-2nJhe)KG@cL2@Q}R5yX|biiFuK5T^~J_tyV6|x$|frzi~uw zCKg2B6em%puLPIRTz%wyDltz83t*SW&{Ny#boVLzB+zKoW~m=9@4ByZ7eL@Uxz}(n z%thrACNJy_VuUx9_RaO}MESZ2vza-#vUl#8wNDGsXDIC38Ny`bZ;r{i5pY;tt0(lO zm|EGT?=gSp>&el9d1sZ@f@J+#u$Rjnbh%XzXGA`s@&iP{gWBjY8or&gYoV1dj>0S7 z1SuCcg7D=xaJhkas{$p{__W`7;sc7ExJDsNu`+NqKP&(ft>UnbPH=Y>m$H5~J7el} z`Oj=RpfH6ZNO~8(+)>BrUp-yfI7JPZo&AZ@T*8?gV3m>>Kh(Wn&kV6>PT9}o`?Fp! znLmI<4q2Vi734m#Ns)NBeH0C)_-^0&k%K~%9>VdCbhf8jv|sfd;iNh-dmr|< z5U)l)&Sz1y;k?EBXXozPjExXWAM;j)xm8I{A#{VH7`bp9BT0=+imHJp!kfv2O65~` zHX^_vM|fo&70wPCTN)msSkPx5hT7AwMaK<<3!%b`{a}{JT(cNxhUKe4x3x=C%jLit zPeR;uFgvhBa13{0d2idK%40lBg`?H0n=u()acN!iSZ^}29EDpv{dY3VjIWfIf2c;f z=Nq%ikswqs1it^Sw(Rw0-|Yvgm^?Sko6I1FqDQ?X*@4%C0@koFC*jn9pMLiRl1Jo0 z7QOU?kCna44d;fsn1`x0&w=X> zEAnT%f$@-QanPpW@Ej$Ak)54Uq0vBjFS!>#GQ<5g(4HYS=HbK(I5l}e?TAuvLDHSD zkOu`?UlS2jqJCeJai|J{rsB`4RQbY7@G7`^55rWGVm&D3+%w9X25)=@G_9{!XVA0? zmZ1NVs=O4KEGcLNpZo;gn9^FH*jD%AR9m+7TvB5NQ|j~w}Z;xTJK7u??* zQNst&3CeIug#K_YBLpD?TLBK^o-h2S#4W8jB;V&Ld$0k4@=pBRA7G7pWIcRJ+Vd!S zR%nv7t^ZLX=6r#1sIVxzWB8_7~*P2xDsQF01$e_T(9>$ZZA{IF!F$?33`VqRhsfuJ!c|Jsar0YO+cvM? zrwh!Vpwo()>h&Ego-=O%vUu?uu%=qc;d|&py4(l8+8-UEe5zCFEG@q@gOb4n^T!yb zX|UF5qm>N{m(>NuX*pmWx1()R9=oy>Zh`86iri9%rz`fWr7NYwKXK7VCs>#r9 z^Bp;jLjMYN0txde6ll*%GVV=2q46ZBG8ZyspcE1g#WCj_->>0>(>W`+`0Z*A)Q>pQDgv9s7<;Gn*9bycTkli&xYhD{ddm1dtx>?1=y51}pa@ zszsqfsFP-Dn3%L%F{Lo8wA#OjVl%auj_6R&OUQ_lmnNg6sh^FKTc0 zdpcmkz7YIdOQsFCCMOCHawSnbmku&B`f^RHF`Lp8V+s?Wh{z_w%$RIZN82B!qZ<4a z354B;F&L7{>INSi!*rUrV9i6-N9G7VdZ-D7S((4ID{lO|H^f;)(zmTK1jU43Z>`;+ zgh#W)-Fu4o-8p+?ghSnMAemStXfKB0YH$#S(K40ylHG8CtNb84t*X~srTt3KL|qiom^< zd>MF#?@x;h8;i@+{|2lZpj+Q&Ru*x{{a8c}Rg*_U8*WWIyw|lc)nl>rQg|G)Cy!x5 zfHWjis3Ub45OC=9Kw?ttbt*s<=RuS(VsMsM=`911iYMzo&=I=gB5y?Jvs!AdGA?c? zH2Vg%pN@{^x?ll*#F*jtoJv2|H7L2M9uMX6rHbF=`-Clm8z^SJ~@zo=A?8(ZA)zm)nh(mH%J^+x+*Eltms!czZ9-&(9xk zj`Cg(@uV0U@a}n8aC}}6eF}d)eCR9~r$qE*f>;A401f{jc7%jMr_Wr|36M*vsnc{rlIQ@PPa9fIlAoe-`~u7X9By zj90P&*Rr6eZPu4<<)9@9Pm8^`Eu!9*g1mG9@$UVj#r29a3d@`?@B7zm9Bs>8 z7QQU|(6$pz`Yroe^I+}%e?Qs)~) zh27K?1+L777u_Tc>I#>7%o~c7TK$2YEF63usZ`{Oxlp}*w+~TVLu998Z_0~EJ?-a3 z)gMj@K3ZbPKc_oVV>(&F+>Bycc0I(9ai{Wd7^h*=@x;c_EfNIEbJW)qDS-!k3)Zqh znEv-o`lKit9Q7SA4u6MSB7(HvqjN$qzAbuOZLPUAF7{LrZAXXQ3?Ax=pCq7D(o2m| zwU)5?RKQag=Sq#q|DHn2n$xwKj7RmP{&Wq0-#~7YeyN&n>8{l}u%*BD1K7pMN4t3o z({aTB4pLH!&a3X70$?|_Hx5-$h0Rr!6UVr7r^l3;T76}`gY_7X^kj;CQof?~k09)O zQI^fi(Yk0Q?E+)a3hF;&F`NDI;=dPVjL*rN;d_t5aO$j@c_wszI<+knWf#WxEwH9W zUgi)E&?W6ld|Z&2#;X?}9O38)1vbYow);Gg)4Gr-a=SBza^p}S!)fgy*f~q6h4J{h z?CNOKD$JEx+Iyrtl1gjcdMC`|^nB2}2&h-f?rEYn6}2N8p7=PDtufeYR~vz_mNb&Y zOga+vhm2|c6dDEun})M|0atdL+@Z+#DrQKuVf$hI(T5T@NEvoY6S zc{xt51U{I4LvcZB5iuRF>YE0sU$z9%lNmfOl?}aBWwrcwB9cp5sXmsEh=BM6*am7w z-M{>YCll=+*BzIJ?J$dT0RCkJm?it;Gb7HExyZP7-W2^UlIhb+ymU{%HoiMXL-D^9 z9y74Gry0N>PQP13)&F;wWVQA+8iLZuJbqydI5FLZ2!8U{_s!Z21PebG4LJBTQa1s-LQ^4|Vh3WLU2 zHge7J5!deKI4$}oQb41pnpeA#xace2)4Y?f$^!)G7xGsHLI~(zoKRVoSYR)rI_&Sr^&yGgtVPe zOaz4z^V6EP229ktj(~fbg7?f5@x*7_Ke{A25-3YnE&?CRC=osay*`4%Q#%j7^t?tG z{S4YuGXjeIA>Kw4CK4}ATTABfvtQnRf53; zAGZg%Vowv{Z$X^n$Dnn-jz-isvD7WTP$n^nV_vHh8y!mH^{toJLN9V&W5NMY{u8NF zrUr-Ev+qK0X@rtdAb$8Yu5d|Zz17<6rb&eA1Rxx8vr8MO%hZUZ^tcO_#s%TYDIg?R|GjhSlT$Ox*tlXMNqeIa<73q=3ACy?AfAzi}VC zala$f`(Gm6ujV(6?tRw2*Gos% zuplSe-U}Nn{dgWBz7Hlq7ZbOfv!+z5#W?TWObYXJ za{eJfA8*#@iGGbBvPuu<*aCbM3PW2yx2ibYXLz7r>9V2RG!7L&(TF!|87juxpfay+ z`0GLGe{!!RQl2+4?wB^{QRi`XyttcBGX-jKuGRbyDh9Uqe@ici%dE;U`MlFoBVQG# z86Jk|q5@cm-&V6((b8E94NHIe0lO&$9~)l*$8b`x>Rj&Q%E{0av;MaWvk_T?o)^dc zrPlUY)1@!-O3&yITtMk0oEUncQluVoZ9M)0cRD#F-pl{=+4SfmZl#&Kdvkr(50;yB z{$OWm;i`m9^go>PslQ0ddx(D|V$zkxey0{Ea*zMEGUJV0pQ-|iSMz+Q)@pn`y259K z7+LE*i5{xWj~g7Kfh4~W>UOhQ6&73QAo!`skm=VCNWp}AulSR87$r$oXFD4K&xR1t z&4!y2BY>ZK$+k~v26UYJk}b&WVb}0^e+9SEH9+ARbTb5AhiInVOSaWbgmsb$HXkg#~g(2$}tL z%jxS1#%Um!+<$tAW9`fI`{f7hP7~zii+4-VQN$^ws&~}aFdYBPJi$id+U(lfE9e_u zTC5bv?_@OGtm2iHNYSVhDb-c2)v}MvvoLjIHIaNeIt;L~YK|>@P`XE%IN-{nwvrcF z3))@V#3bsPh!4>$JKM4(-J;rNdJ`J>{XB1!0R7q`u-IGndTij$F<`$CRmMKDom;DR zKJ4F1mj2LK9S^#+7 zZ$3)at@n^S+*WjiWITV30A_ck=tB|{Ve40eV9FTQl~EuMKSg^5$M~yX)(@+8nDCeN z?3pxhth)>`M(HJn75O4Hrj$KBdQuOQCap&q>zJQW9z8t{96exbdZc*+oPqlGgBk*x zpZ$!e%YKE`|J2kjS9n|hLlmAq>gI_O8q9~gium1vAss86BcWrEO zJA83xegD^To-qc_(tiHA3V5!2h5Enz50mnZteYqLbUs+;e^V8(AR%i#^8`wPg-|C8omiIql zLz5UYXg3jePN1#|+o$31{W;%q6>wkOGXNo$Idh`ak}wooljOtHNJaGsg35AK0Fj6| zdqFNtJ|5w`aY?$eS2ng+i<-@dVp=J2SFbaN}s#D1{Dp2Z5}voeO{KVm6_wDJdzO0~|g|ZXVfk6Z^ah;u?@>3}tSL zUQ~*}J`hCI@nmRuW#Kd9vpmy&&mS5z)RoH0#zJIu%n7fKDOiAkWXfvw;5`g$!WS~a zUw;nP&Nfyc)`A*k*@TcR+mBxS+7lsNGvhO#fc`Hu{7gPygXN-&>BJfpJ{dP0R9iKh zkkOs^5`7Uegl&tVFu+;QGnYK06uxqCfR&KmBhiva`-Ol??IQz@){0RVW((waGl;|SDHp?pMW(I=;w}VAu>m5gac5!x8ODtDb4;`$Snfy>>%~{v)>C**V zqu&;cJt1;Nd?jtX5Q?qz?79#2JMo5O7C_O2!bKZlHT!BR<&+@~YnU>@6@f!&Y9vR^ zaUr*xAxqpuD@d?4-hSfNz5Jxu&& z6BbKEinXADx8+{CL9xQvY-M|ENn3wocvz%iB}k>j3N10LDxGfA^cBZ~dl! z-}QfTQ7ZE;op#iWxcu(7!rz+Dr>wI-;{Q9#adWf(&sRTOi_EKgdhxqVH9hNK*H~F!q#E4@47wq~k7EF1!Ydh8=0dv{VuP9r_oH0QQY?35_J_!CB zN_i^AdP$de^}ey;U{$w;W4s9u!S|vGV%fJQ>Mb(NnCj?%j-)1m<2?cOZ%+2<>>{{N1LVl(v zaR3lFddf+>jVZ)pt>K=ykGKFWKV=Ho9)mm3W#`+x?;K-z#_49=ruNJn2iRXj&zect zEf_fOWjlHqQfIJ4i2oChNQa_Bl!%F7LPu32Tb~(A2e9P#AnuU+lrG%Y(0bE?YFcZ9 zx8u>W^gi%*e|=d++Uj}#fl*{i869G{=6to`bJEBL`Hlz7B|Fmt&ex$pz2-a z0?N=-tlGC-q0<|>)f9rG@8)%sOyO$RvqeQO##3=`Fj4Oy-#z?}1Oq!>oC~r7)50?e z!h?~6w?^!!e>-2$!_}UjhwYw-ZKQC99~*b~5GQI1@`1{#<=wOmJ+Hi0si*&`Ib=~% zV%j;uSD`{%d1>!@CJubkl4!dnRT_&VkdTdu=LSvudCdT2-}xrGz_0n+-y_S#mbJ@b zt|{EdFYtSLeHc*1aJm%LjZ#$0$9)MX)WC*TbuV)l@Yz{WjYvheQ9dte|yOQAEJ(m8h39der zBO-?DUprdiDyFhiKTLeeepfEinGGs%#sy9(*Vfh$w8+!m^`G_l;jxOZsPN+HdNT*GyvG`@15XyJpPN4qX*= zCJK~@>th1DQd@FXzy8)PmO}S@IN8!HR`ldh#7P2KLnLVGO|q{Xbj0;$FDqb&$NOe` z3q9_X1K@DRi_dh@JpA(J4P^eSPIdnO%$B2Wn^Bp(zHbAYhXjzE`3Ik+^Jk3dd5M0Ro%Jo0Zz!P~d!W&Rjr{QFlL|L~*u|?TYVTW86Uc zK+m^;v0J2o0S&+l@l~aNuw^pSWFv;4T*wOwHA>xi<^sFJN84gH>mJ6mbOKO+MhU^R z8JRcpNTV|BERI&D=8CxA42Dru#{ZZ#c656jCcHIT2t#h48xru zpb1_6{)tkm*klaY7y3&(A8Z0_hAzyOTSW5AcJYL`te945!b#RpaM`_@3n|iKAS^)z?O3ysmHSJhy%A;p^W(E@Yv5!c=%Aev}g z0&i~*AYSP-xa%m~W6_or==KpvwjG<$eBsbMB|tTds6_c_1}0KR8McKO=y#8S0D2C( zE5q6v7+o^?hIQ-e{7aOTAAZ=vgVhi%YFE5s+k3tZkf$JN1Q!d7!;8gwJu;-*22-HC zqp7o?elC$nETgA!pp6z=2EwbJSp3p`8Pm*>Byq4`gnAtPJZ^|7<=o{jE#nHTP7Xt)i77 zZN6~*VOZb)UTsu?$+2><(w4j;Q@keUOC0tigTAR?1Zvn0O?La3j3BLNq&KDVx>>cZ z+0jK(>>XDld@S;q`+`*1eDs?4e>Xf9&;H@b27swp@9G?G7apu<2NECrs)sL4c>5UY zu=^wRs~z<$9+4C6zyAdoE{iQj{>_F?6hyapakQViB_qR)8E-7pfroNp zT!>zTDawvJILvrhr{HHi8$b~xPh0u%s$M8gdsW^^^@EaGSH6B^Beec&3$zi92-VuB z8K+n#G!nMF5TehsIer^-dnEFW5WsU1r{%HzX@^3ax0bCP+AWPu7cW5;2>`H{yN)tj zaiQIR#ZokNmkl-L-DEW?7(-HzLEX6y0o%cCX?!2aLSpX+aIf{b9GU8gS%oXX6Y>cCkP}k3*i+?n(B(7tU3Sip$J0p9lrZAyX6iej4pmWQjb!zGW3Q(m8 z0|^R~>=d2Kh40TQGv#XQG56&{jhMyxje)2VAas5mp=bH|8_7KNHP#rSR> z^K6gG>U>vOHa_Lh_GXaOGSK+V z_~Qza7+f3JmlPz#P|8k*fhC4DzLetrdpyhZs&h#=47sL^A(9=*t0rM1m0cLog-C-^ zwRyxS>pgdEFVFDE*9-I$p3)<)M|Tpt(ChtB;&6SWSz^7bP{U`cls1d@P8k<&n=owV zV#DBBu&X;X@a><9GE?;Ue%{``;`b)V#yd|As^% zHuJW$ur>CQobR${{5VP|cgrTO`*J@3{`5QX8N*^mv2hlqBcY5YpC(NkloQo{K#>!( zu_+A^T+~Y&Ads>BMqP|9AzL4I4#)}zvkE8z8!&w+&p6HW;mNQ+yAewWTfOhiziDj$ zn2EVd4U)5l+r))#-cbtsI{Eo^$9`OiOWht0I>4#Iw~wyq zUI>4|PO@@o;T-D{qyF542j<1mhw~a6rY;>eOJo|dOwVluAE+u;7mTV+u4#TB2Y3j> zoP3E|OG(XkE~j?v4m-J0`Z{hS4PY|pC}_gOTN))OL8w;t?9McLx$99su7tvNeNZRR zr(yiD;vWBF(-Pn_Q(9S z5!k_E`tN+{abvHv;^TFW5sXgH9F>*B_fC)~)}A~S%XS9nuD-~OI~WG_lnk4IS&$(a zA&0j$En2%i@K&4&1|BQI{m3C|t+2JjV-0xfwdE(|O&eMcHI2_DPBnhd%^1fetZi>& zVzvLze!{P}JDPf|cI#eON4@kNY1p-A&bDe(mKiQ)T`I|;dd?vYFjPSwpT2+cu-A$y z6znoV<2vRs>_<4t07D_cKuW->3<9JzHJe8BDU8kOESM7O47;go<~Zh2`HwT2yndzs z`xKkFa~N-IVq`H5qvumYC;{qEo;warkwoiCnl!%{{yC7_U*ay%`4%afrzH!Q#mZ>v zH_#)(R>f)G+|=yVOYGBxO)}_PT|>r)6J`df>@B7fN+}uN3gSV|72l- z#X2-em*zfaS?gG;WCPO@7aR^4)OeVMp{o*Neydh4U$DV4w3Gpq05$`Ez)Z04MQD6h zh9yitynM6LZYL708B8S#%qkk`7AZ(BI902Jc%421pmSgb!ob-$qdXG;RYbgWdC5}R zaN7KrYvdmd=U`lP2%H8CooW`1Nxq0t=&t>XQ_Ww2D?67&a%}L2VVgDFcaXfT)RSoJ z>Zc>AD8BundiO#`YOI4zPRQI>%30DJGej}!xq<9=U9vmfynW(1dAq+K2L$>WM2};f z^xiKgVOPi!fy9K9G_Y{9U@+fU zAr~awGyD)g{?&Fpw0#Ei$hSIWWxa1an}(}@yOl5B%v~*ks{UXctX{Dy1Wd+Q6mO*p z%rs2772AZiw6@(1eqa8oMSu@r!dT}J!sUAHTaoZAX2>8yE%;?<(04U$sdizvW#Rj; zhu{1XC)VV3b4X&GwjaPdeA%k|*L*M(Wj5(DWMa#Z4Sks*>K^>N5)ue&x4`JO>NS8| zg#djU$+m<*gQQu0#e($fcFq|CJOliE_yO%&N|9JMH~=#vvkV1bfzz~o$IgemfO9A& zl^~v90Zc3Ux@AWYUo=c*;Kui}IK9UJF0%>>sVo?FliwUofPU8Vg!_O}L3}>XqS=(i zNxJ4@94KFr9x)}{)i)iq`gx`yJMBX7N*I!?FHDd>nTzBfGnfuyZj1l5b-^yA3wOj$ zRw{MZc@Bu>J;_L>6QwxfSji`A!#mIem%D6N%iVb`b*afZJtpOy>D4)>!}^##qUOy% zC49U*-F$@C05~nFsz5btUQ5wYV-aEN3+$Xyp1H0(>}30W{I2z$yvfyXbT>y3#?RsO zYCL85k+5%GQTIF5F%wKhn0!xY)lP<87KhCRV+VTcOHNX>f{8Xu9dq@Y=y-H}CUDLe z9HQT)`e=Jq@W+|k(CX) zY9%JD@4}2>zFV~|wZC<}2Bl3>1`lFyNo8-pS?&gwup=q!>+7-UFWdJ(&8@D%M!^5w z4radaRX&_qr|{>m?|{=&hq-iTiwb?}WG3w5YKtGnla&0FWn6gt*Z>xlsz#-ktyUS6 zR{ylPMnWQDzfvh>4*J9iCgibj3t5U$Ue$&s4xK8rJicSh7#m6DDm7j_MRRhn5UHVa z44^Q89j&)G_Pua@`RjSXEbF*{)4qS+uK33ss2_??WQwe!4X#Q5mGZnJ)jcgExrBkX zzS&%WdzdSkm{)3`36?a6Oa6|sG}FnTV4T}QTwXoV{u$;5S{9B?;@t)#m(>MJtR-o0 z2?y{C02la9E^>;3G{NShp|vr8ZEQ{8X!FzX+`MMx~ zgP{xO$fFb%8(BA2p3_Jw9V|x)IJ<2nJhgoN5;oh=`UDOgFd~GkgU9bbZ6E5Z-V;Ub z;07b%@W^SGh%a)R+T2upN_Dn~wb|&mftLH+E-XYLmph4QY<})e)q>gC?!IqGZwUVw zXDJhm=0zEor2|8_t&0(5=SakD*@`GM$lZUb^G;2gcG2i=%!-zA!khdg8U{BAEJ>Bp z?VWdZ44hA1uc}0@7ZtST)reKcQK30{9Th`5V=1}5Oh+Yf@U;$VVu}OqEoLD#(U|0Mk`j!rk3nkqNn0%c37)7RurbuI~0+~R#Bhi#{i z<1yT1_moCh4MDyK(7Eqr+~TYUlvOSIkR;%>mpf zgWzSp3_Ixs19_r&!W?DKa+w@;JROJ59k+&PYhf%)BiIh~*I5AJa~w^n3i;89;M}L3 zubU0p{W+uxCg%e{`vdD9;s+tKvptLQ{-`zfFTFKpQSIiC{e!KbJeCY^Wrfy14n5mIB6)Dx5YT zrn~>A4oziEb@I>ObF(bxho}XMNtJa=E;OzcN_3p%Y2jz-McdEzFA^r5@RU`5wvS30 z93*Y3!qenh@pt`#FUn(XQB6jt?c3?7a_e(+F4N}}LX_XxueDiuiTDpiy!>)GcvQ?H z$9}f_zlAVq3DK(-=50LevbqO&+*qBRO0u?P`5(KdmMxB)z=m>TlQuW5ly%;I&wuNuh zK*5Yto^ODa!n3{}NUlC{*Rr)R$96_Hju@CMleQ?8zdwvcDVJ?@0_z*fYeE#H?6Fkc z*pP80B)NQ7%BSDD0xNd*kC2cE5$vw)(46s(ZCh#tZ5SsL@Rhp!aB*xdLWNvQQbfsv z=7FRh27p(8Dq$1*l=4UN*6P*Y^8)KZ7Z|&L@pnoY7M3A zwDv$@^CMG}Qq=gEu8=iI&#ky^bxgYQbWOxvKUH{j-ijvlOnQWcfz0fUpRTbbSiM9hUcY ziEowfRmm8ZrmS^?Cyw~dz1|7ef{MDsPEmSTT^2lYiA)DBa1j$~DXcQZxN?uB;U3o{ z*E8m9-RD>oe^^o$os;n1p)-xNh-wV@+UTC+nZQ_oQc--f^kjn5hAu8Jp&g8XZ$w~Y zI?AI);ALOG^1<-8hcm|r9_R89b_rZhKLEyGI<`eEK5oGmqu{Ns(PNjUR{ZF|K5U}A zMMh2nR4khXYj;_JW0>p`aEAS<9Qf z45p6tUGR@Hlrl4RnoK$16AdEJ|!pL zp(@hb)fNTEI0nA`9-F#%aFt`T5wf>K1GoE@aIPs^=hd>NAGF`H%R1Hq#W5zCZ%_c? z1a7R|y^#~GObsgm9soH;ol%!(ht11{%|#dF**>uY2`DfdIliu0^+P^q$F4K6Ce#&+ zLO#?+kbc+S?mPV+O=lNq-KmILdjN(kN#5(t z6N;o_NDDg zPiMZ&_>|&@;>+%91N!QZ^TU+OKk=vtp$qfpGXfS3hMB~J$;kCg|vYaI>P;XD>F_{0o$O^jfzcn zMJZ!&Nt+zSVZ@=n!DDF6U-tUm_K@FXK2v;sDG+bTnQl4Nx$phNxiO$LJVEky;r4w% zNOjr2KSv&=FNo&4|NboOYmpN_vktaxFjH*dsfIZch}tyXZO9a`B&>`u`8tTlr!rz2 zN4?m8jF%oe`tG@ntV`=cr{SzR!5$3)M@y&P_e)+gYRc8)!~0M%636AICggU&D3A?p zEr2{qzcN-Z6?+ccaDb%y>7QR(A5Ys_33KaGI+JQSokgTocf51!%D!8#E{Tij8vZ=< zh?iW{kJnEv$D7ac@Co_e0cUC!iJO_3;V{iwjqNwx@yFR{12kAN$w8@$A(?5t6)p3^2iOvov%K;%XPxa{EM_!&&!|w zb2`q6M|)@JA{Jtj?d;O=b$94zX&BK)nwH7b*5Sf{+Kx!#0&>^$^V8;eT5qcNAzx%_ zK-8GFbV*2!*T&wS?b{vhm}1FY6VRlw1;I>$h433FP3WuiPR- zY!^*{)TW~?mN~l3M@1c}V+Ydtxn1|uibhzHifP^98rC)oOB;D6zW#^BU=a&595nbY z&RyMsbngp#GSFzs$34E-RAuXawc3puMAM2Yb;teTi6`a-eifLx2VWL5?cyq}QaHwv z2Adw9=5J@wpnRMpX(6H7K27tOhZ>?N9U`36(NVQ;sBWb)=tMYQe>o$d5syvI3tEHS zm80GpHhf~s2w3=?m&IYV<4^zih2V!YwKM#3lT=(1atA}1y>6o62m_GvmoPw22i8iP zxmG}4DS8w+MhV`dkh`HYDB8&bh9OSY_Z;X>D2Op>Z_uHYaWZobC0V+kjy`z6Q?^=) z`S4h8u@-wVmpsUpm|k3G4PT&<=Lzc=+kms7a&*4z4&mhik8r8cd+$|a0p~GNLiK8= z%1zuRVn!hUJJ;u?>NeuTIjbma-BN4P5`gH!4`ueQ8)0_NyzONPfjZGa^6*P_{73Zj zc;v1vITVy>zrKvj%(oTwIYvnD-+26tH&Lmx- zYoM3LR{IIEWW@Hnke6&Lps$t(E*vq#j^nTC?C12B;M7`c2q9%^2wDMa_^~and2qHe zWu5{|JtxrvnxdIDz{jG6MlwVOV9!d*pg8Ee5xE{DL3=9ZwOHKNiDn>xX&P{=u#lH%yoDZP{5`>{W z$A{53@mbH=F6EbZDa2i_D&t`i38I1kT{DiJSxDn(EL=4NjT`B!tEaseFiq9O^r-e> zvd)^d{^V`pQt%-bI%xNr8g6Cgyb^#(6hC9q&WM5eErQ)yMh8Yv<` zi5HF5--eXU-KWEw{OW&YEDoc5XaMXqfJPyI-BYDa4K&{+&cU?DmFQrSpLD@6gK*w7 zK{RDxvK1c%iWTF37SphV^w+e2DB;?iDS`Lc=9oL+`ljFdN=QToMCT@Enum^`7FNbU zt*7YeK@6$kn!xU0b56+(e5ZlHWQQnO$yQ3_^;W6EUXmqHhkIZPP_cxOtm30%NR1*+ z>P;BzGUjdaruzgcHl69rZXWp?`8Q|H%tp|Gf>MK?6pM94Jc@b%CTE3eB03$JmX19+ zCrQ2&3#mCZL3+c2Z)z6_Hh^wc`YYDNj&lRlR2vd$z~rQU{P@!;DI{I<+`|XCHJ)!@ z0|ZN1q~>bH@(RsC3Z2jQuQ$~4KRT#4ae{eU?3IE!09ht$230FF00xwDK_FDo2J&p# zo{yZIw~1kbEZWlx2H!*Nd}>;QMdWTR(?*W(+A#x8Qaudh-~LGbWyIz+#^-G;A0tqt zL%nQl-Qi_Ee;vwQXLto*c*V}tgSNh22N$bqAH^cU2*j&x;xjL))xwN@7YkG&I zanFTEDhuO2v*hjl(R%~FPws)HAN4r=KWQX1@F6*-R!KOg2T{nqB?q+xa{eEMC>uik zc>m8cZugtIE@G%)fy>Rb5WS3ZSoIR0wfFlTmj~0IKUy)(!PLbiNk?&L;5fuK#aI%X zvdzQ*#nU(eZSYZ1g)&gVXtv5x%i-fzs*YLxePF7UnwfGj-`Y>{pJvKgPOPD!(cI+J zV51D9?hr;!$*x}6rk?dSIt?J!N3$a?i;+t&!4+kEM z%H$VPyOjXgH=7?$np{gjl8!5{k&8P>CScxJ%eA5p5AMh<38Z~@iISOyB}O8<7>gyu<|ytT|!k~3tZM+I~jb0Pbe zcVbk)+uvuUv>1}3Yib!vy39$(`$SMVThUwr{idLoQoHCHAmBezb>2A%Ckza)d6O53 zrTqH_nkhCwsMF-F((5U5`0r!|hVmQ4QZn;${;-;U)g@o)KXPy`Ep>%8LEO^p-! zkcgg^o|h|cso(XgPN%6rLgO~w`-*<%@IB3=spcd-woP*U`wLfiGpIc&Mx}Q+lr^1SEKf3EI5ww@?eY9Mf){VOM3LL|Y0PTcd=~lfQ!eUMjm!94OHy7HCV>+ zqi#FZSuZW>Sy2x!ZyBLEKeu9+q^xVrBT_n~YKH&I$Z zD8V1~!fG#7HWjJfGNe@y?cR&{dEp8ML4;M zLQHqEpsF~m58rqZRu$BxSzta?C!3uIOSxELY_g!9tRrc_1d@L2@5CUv5=~hnaaet< z0c$z0tu%A-l<7;dfJaIEg;cO5k#_mfE}W|!%TOpV8)uF#;i)7`ti;uJ$SOF@dFu3?OI7G4xBtQ4CkBc&;O>v>{mt&n>dde5h2?LP9hB+8 z-mQOdPYm|_#-^XKew9+%k=por>xQ)C?1?o_zAqcQEocpYsXIM+3w`;3N+cMk%1SZ32zZ`MF}o7)VX-k+Wy8(ZbIl9)so5~DS3t~*p@5ICDOzUpYG zlEm7byNlajR3;9`jhv5>-r}@fU~HJ6)N`;8AtK*bDoava+)IA z{~BXtUBVfP(xoO!RDvV`Yw6nN+f8j6F}ec|Mfa=HbV6PzU_C8GoH9r)1Gh${zN3!T zhgEt3ni+g9Lzopjd-fdQt%op3DDlw=ZoITTbfhLnk1y^Bv&%QT2nqh+z>Khp>) z0Gur&kmxcE9t!~~9a(r5Gs&OK;y5Y3^~}>w<=bn!idYGGNX0k6%$$}UBp|EQt~5Ek zkV<2*_@yE{*5@(j%!VZm6QwJLGbf811Jb6K*x%R8a5_P1Rg7j)!WOTsKNpcHPEEsA zRkH-QsWe*Z!rN5#wF-y1=AigtEa`@Ct^SaZ7E=ZT3lrGN<_pAWLW(}KMU*5+l5!gc zo@&U7io*>Vf$4S*h#FD9X#u&g^A43GU&lx-M8xDv`0q7%RMe7C}Uwb+m&7)PiNA1{-IUBld)9 zmijJrW8bYA%>WUrFfkBOZO~x+&9QDREIRX4`zH_46*k~+p`iGV5X2W5m}blQ6H>s% z7`LSKS^Z-|8-;43kIf}zqSS6q!k{VI(Ar_6&d)NRA~X~oxB6O5`Yd~E3kc|~5Ht9~ ze5*!~VHAauA-P-4xeXL(@CXU46BQkWCS8sJfA>H!Q$&5Tf`%>2cSP^=%ORYR$X$1w z6PAk%Q%BI-4TdJDOKLD83kAHDN>~VINq>3Vp1c{8^a&wHci)D;YPNn9`u|FY*uTYP zCn5sA`oX^Q>`ChjUf?I8HZcmIIF4D#9pJ59>i+C`@b&q~p*=oX_<^pdZz?3Nw8d#N zmQh}z=n#jJVwd}Rg7X>a(JzqnE5c55gLxN1%8MZz@T;)YX< z4Hqo*m|z_FMpCi7{ID*K6E#=M)*VRrjP?U%V&R%{-W|?W2_?_)#73n#vHEYuT>bg< zQt2#0_WNB0iTbzxORZwxUl6OrW&VS>_Zs{kyPD|8Kmw~h7q&OaNkoCK?k@KgrsAUL z>>@ZLWpo-_N^p992Ux?aO@JU|Pz}&01~Z0fV0RRPpnh|W3l>M=kPOIrA1rvNAXr(# z-~Y1j0BwWmor?Pfwj_=~GAH4yb1I-B!`>|8edAwCL9@Yy$VfebBwTw zaf|qod`?Ko$&rr~utSl+A|-?3?Nq z&q_Z|VI+*iQ8e%K5em_O-77fBgf+AgK6GU-Ac;uhs`(^oKsx^?5d`fWnnOTRE`K*k z+rBnrP5h1WN5J*GtDQ+W5~$)<)WPiyv!TIl^#q7ypu(s z&@ocWIcA@))@$Qm*HEKYh5H;C^}xPkK&Z3CDI`-jM+1erJz+iTwXsm6QI2=Ck$&(I z2)1w*tP2q02;3fZs;jY70Q16Rc$#$IyG^Huq8GJ@Z7@K1e|)D&#XqVb1MW1*LdZ<^ zGswX{s7()ZAGI-C}Z0LKt!Beg-NLXGm&stS^rZQJ{gG zi%(v*q$!j9V!-Ut`HPM$rJxq_NL?QY?hQ;ACPe?_NA}k_mbyht0_r7;L;a&Sw;4h3 z^6@O)mV$F?Cu<4eF#=jE9P}Iu3>L%PEVr#7?9R+xrzvUUpUBn9O)4Z`q=av@#h5{I+O*0 z*(g+8tn=PDgvJxjmXHtu*Nu-GN=|L~gv=5i;;NfgXX3ls@YU9_tm34-Nd~%JbvR;b zC6oyGGv6{c9{tkwe}i6SDB$~~4LG6!SzC7w86YB5hVDr=+$4q6S+te4ojk^ORH=M_ zy534&IyaaY@Hc)*`qTM*cjhDtm`(S@hzpyTU8g7gNpN7VvHQXcL7p0@iihl9fH2$8_cgt;T9Qz{&DO z>rFT^r6=@8z$97cH>OW7T7{{CN7`5SW-ZG~FLmhl3;~B*kXgi2iuTV-Jn09f4M=ax z4kf@_tfz4wy;x172V!9CLMbvI2DD<66L-J}eT-(>J)gT~f5fAc7=6a#0!DRJzjIfJ zns{0|28J2s6sSY&HfOJx7}iiKeuy+TVPfbGTqp{{=p_MQg)+yC%vip6=FIsf7)3~C zNcTp1?mP*WtF7(GRR|tlCP3r2OKH(so0e&~4gP+PH|(ZF9KcSN#ktC1GjK+$LOqON zAb-9Tp^cc#k^Wt82nhF^ZZF#Oq!A0cvcn$9O8dDFAQ;W2Pn|hFUYHZL%4I|K#N7 zl)-M;a6|L8_w6)q%|F~9GB^AAbewU1KvaI@k*@8orJ$zqfSj|ciZw{bBgAw{iW%-} z7xgeLdJ%N5DdVCbQ%!tpI@ix8bq# z$}t5vY3)gY9ieS>8Q-nZ~RL|G-=q}1? zn{`14Oh%a6G-j25o?m8SInc7IbefoGn`^;B+NBgm0m6@+ds(tODPXg|pY(#M2^%xh z9E}V0Q__r!I?!{fy-2TzrODFYedIU%029+g31f12f$FJ7di!DW3`NJ7HDyiTJ5L^y z($u(PVlzg15GY)*7ys%*?&=AS&Hi0Usk*B-IBKBCCoKH91j+oOjn4QEZY^X}xcoU6 z{KxNpiElXBRK2Jth5}>66A7qn%#Cg-mcdfL&eyJB6&5*qi&YS(+oP>*y2>4fN0~^D zeBW{0H#p*}lKYjbXS9-n8qa_Th<7k&1-im`@5_l!p+kqs*hNMhvDp_ug)Dt5$I*oF zfLy7(cjpe3=`xC{h7Gwkg({3%6xkxi@xOB5 ziW~!J5{hFC=TkcP&3}G6Y(BUj?V-@-b69lT-Q&Xl^d;iP)w#Y{9h31q5ilzW?PV)L zyIFUxf4+g0%6wCZWQKB|ne2mp1Ms0)lit(e;Bdah?N-Rf z;M90z`@(gX_hwrkCl97;MwEc+M_Rs?bZj?WjftZd?xd(R6b~vB7|;R|{3`hHB&aGR z1jOkNIXb+BtFTYH7UJrK9T2vs)}26IWEc1KZnS#cJV)8c>+y6)mSla?rJvEvxTuc$ zC#idz8`Q*>i=<5T3C77jBiw;GSXN>5MsHQcdMSTB(58d{{X z;Q>U9C#_Ms`5k4qyXM_@SR!eg2KhTc4JF-FHCmORC_*L^pLTl5R%lt%&$vj_=mSrS z&jBEX5(Hn`6Qvn)mzHPek=1uZVnLr8@vd3EP=)LWFMLd~)UpQ045Gz>&aNg+OqB;r zs((w&YHN*acf=6ryQpHtuJhDLT7~i^=vJ@Ogqs^bL8xoA-SkMi33?lshikiQPExEF zdhFSY77?gt+P;2#11PL~;RV@+aJ%tPl#;i_!gv}Cb^nE#(X-k(v%;63+dzo{7*|%Z z&X?OTT=H;}S6TlQmA;!f>rS&D>8Irz!0>bX>nX7OffoOV3O_D>)nAc?-0=rl`PgFg zHaPGYnKJ0xEp`e9cGt&msOY;)uFAP9`wL7m)|c`nbH5CST;f-ZRx565SNJayy0>%5 zBz*h?Q~~FG?4b(Js>;GF=K>r&8E9|y0FS0~s~dIjA!Qxs=wG|sfV1t0LTQ)|Zg8A@ zM7m?3vl65{a8<=|0P(O-p=1gqO*XU=49-NwqqJ-%P*wxnv9c~U@lG*O!5d1We^2g_ zC?UVl-VT02g!l8}Qh!4ZK2M1b6~wsbhletHuJ)Mwu)?@YEWJl{G@H*N!#I+3hvp;H zpNmHm(SaMGRKtoB`sgftl7^C8sEcMHn8Q>wtLPDpGJK?ag=E0!i4BqTsM1Z3z%-6! z{_(>P|95L;wQY$OJ~p@o>9q!8ah^HQxXTb;ghfQb$OhKi)hWSu^uuR{wiSWZmaOtiK;|cVcO8nO!J1AW4fOwd&7b+Db>^* zT%69`fzk?yL2EiO3UQw(GIU@xB#6p@Sqr~=^qfRKnPEk2N`7xVe-N~{F;G-kE}7N7 zORzVtL$8b>1+e)2&?lU503yN#fL2xHGK3I7Tgxk5vT&p;4(7#(rzVv2RxlT1j@TxF z*V4`KppLGKqqQ@^II=hz?toD%)#Uq=pKU$%y1nt^)Ke>V`pU3%apv{o|%!=>*LOu z6^q4|#T)~6JK;6jzvH@M_Bwi|iU-jxWb-0SE!5FG8G8^qi7y*+2g`n`AlOn?sW7S2 z5;tkMN}rP?jR0DokgJ7J+ej`1^EXDYxB;k z+Utlrc~9FRc7|^LLV-c1s%Lxp<KcgQwat7Mp0YL*yrj6imXKbh`k`nBLetDnFTXa_+5lJfO(#)2 zYrs~Askhwz0^qG#4F5w1G>wU`-fz1JsUS;U-~@9(A*BNQ1Y3~HIaq~U6&ZI56PF$& z$??xPbFos2#IhoY-0(#eH(}||zaIfAr`LsBq=GBFuI1)hGZ;TWRrSx*k?m9a8SoodB8H6B3T)SXgGl zU{Ou4O2Z!Ec`bWn?(O{Jvyt&W{nv8CM?BD9Xmt-HIi!l;&<#_3Ff(k$Hdsk=w3Ok% z*$`^ed{IwZhaOjdUuv8{tcJK+5Kwx&^m2(UXNtyj!nV};^F_h1wg%HayYWG)B>CgN zy*0W}bIpI9KvU7o!~vjTC(aqpXyqoMny^48HwStfa}AF#G#b>}kV+8W-kg03qreRg zn2W08LNZ?#nk#*ONWuUK(P!4ii%DzWJfx-v^zzf@oiL(U;Oc8(Xi6ffWS&u^Ka^VHfdJk`z(+e)JcqxGDC|cJo8AO@SJMeA9JtR_hNjWtUIRh^27k2?!u? zSnYW^= zm9?GBp~F_c+|QSWra#Q&V^{7fr8A^_VM)rCfBQ{C@j`D33=4LQK^uDe+#6dg1cu*m znTeOj6HM&_>1_GsDmN?pGk@m1pJXI3fjR4 zx4`#?80EYa1u6Kya$s0WfvoT1EAqgA%*O#b^hzl30?|$EAEN zza}ZAMc6Fma&zgaI@H~?uEp!=Q*g(#Vuh%G8Pg9+S6UX8dVYSt1~Nzkr0U_Q*Zf`cDG%Z&BjPO>B) z)KzhI=KxiCYVubY-Xq&xHIXSdMfAk)39Vo|Rm z509UjT9|Gh#OR+ zD(7oLb|aJX z|Y-=M5=yeLp4a$8lt@96?_M0Kb1yM z_1kR0FIK-j2ZYrIwKd!wtZw>|j#tS8ulqd=68^V)%AuFbJ^-h!#J-v0tTLKVEpvr> z%^_0B!#Iyr_3FZ0Pt1JQ(N-G&;Lp-H;*^EQuh;Y&n7gB|oX@{Zd0$>rHN6*Z_5ar+ z>c9EyMufsEh@rlT&Jve;}PCCO(BTyWyTkG`5M}>rlQs1 zy2`%eeZ&h=y6MBow34$pHdD96Optn`Dyh|cN972gJQ_gTKQJea41rbq`^PAc(H8}j5>+nGe{ipg2*mYl8e z5Wt{Y3(^DFu;UnDUfnsRf~6`n37a%E2`0E%Ge7O(W%J_5L~#ej0mg1%e1U7NJDj-& z_eL5{W@&ita_&4Fjki=Ht)z! zDm6;O2gAD5wm?cCc}l%sEvavR1c2`D2&q5A0r=m0l}dKUsyvmyLj4>0 zn+>!@f1b{>HEwU|8Tk#t(%7_SSS9v;;z>$0jOh*>z9ZKM+8&w-OKP2(&cv9wWQ2 zBRqw4_CX0bzG|+I&QuCz&LC7b*>^1cwI;O&C+RGct;+=;&c`^HBGuHV zk?%i=yJ{;7A^~GN>?(bv_T|BapUy{t6*N`S)HdyG^7KdE__@S)d?aJSv52aW04@Q8 zSV`o$c(sPR13$C6bx0hpx|;cNo?vxsnrwfLPK!HO*t+0(VO1J9`$PC9wDk`rvZRik z6&5Gd>Ee99@sZTOqYoIN;0ToYlfi6vO|Zt?m`DW=&q=0{7SQpvF#^$53_X+!qcR1# z2ya@NpA1CF!L2rCA|a41XRMq;TTF%uz`dom)|Ll)gR^-?x1c!^NqYMVRnTPji$*%7 z8@$_yI_Qu5xohNXDH|PyeL}9W79P~>4`b%X7xXd6muO#)4E&03YP@v^EmiRV%(V5B zacRHAp)_m4aYlvd(ZKh#;X{j_+?u$Lm+%g+(=xWyuD~tBl-!vC&CTwtfDa>Ccv_gV zc@%4@T)qc)_HQq~1Y6MxoZgf26w|uNRTwy|Z}mtM&@x*^G%6?lKE___;HNnzBhSHJ z%D~~C7|^%rA2c?V6{}v9wMoS>Ze;c5m=UTJFF$~x>E~bShc=FEH;_`ADCR@Y|a15 z9G99vV(i|zARH*-h<*>Ok-evwkiR5@-3Z7HiGeqz&wfv}20_|j87R7ZzQY@6yaY+A zsD7P0z5vH%9G^vQ#svKdZlq8Zc=Q~9iNC+QfuiU}KO>n#ujRmx1XS2>jWI5PJv>T$ zFQr&H=BjnMO2wk&b)U_nkZA`lrar=Rw)nS49T1RI<|K6(>o-XkCt2WI(NQ_vk8IV* zk&;q6jQaqLdHsv0*fU~4El1GO#xx->2_RuDL$EL7zaQ;)y_Zr{No_)MD3r9*odRmI zWCx=i+aQ2RE0Xd(30JC_WUjxc$iQ%NfOK7K5}{F4qE}0w(XnRVvHbwm77`(zBqeEf zjJoNNd^B?H05L;Fvk$JbS%eiP5Eddiux71jJ89B(==t)?<`eI)I|_uyt+CZ;ZS^S~ zCZnG$v5xJA#ebtyAiUu=#aHRXT}!bFq;o#zEpBZsr#6UYXZM?dFHlzk@M2mNH)~y@ zpa%-m(6LRi(~0iujKT%Kzq^Cm`9*9=Gd1UK36s-(r~o%sf6{DO)Q|_0K=nq=%#Cwp zJklTC4lPpsx30Gu92250I|eXO{>1uING0_>w&ak}kD1Zx{9_d@yKCAVh}~LzY~3bg zTFuEx`*j7kzIovbULTFvw=5LkP3>iOc^~CYUt({WArnI(T#yamhkE!$a8*4`q)-{D zncS9Iskda_UfiA-UbTYhV$>P5i{Nq!sN%)ce7VcJMx>vuDw^8v ztRLqq;RKR;@XS?9cS`WRp21G^hCFMB76lb1E#wAOOz2S}cu~g_`MQWXDy35BvRXDl zupEZaW%sXKC;L#ZWp*#Y6O1tn_!0`#ei$>w$VC_I!;{wu)s?_YlIW61Iz&twG-3@T zNoXlTf}x-wDAv<{J5HBbYfK3D&lE>0+G$-TQ?8#WtjEp!OTl#n;QnwdoB$YQXr8TE zb#1LzOfQ#d7hawIB_>8bd<^ssa9sd89dJ0BS!43> zdMr8}kq-rp3q9Txr_;wEI~ym$R)fBZRd*|Z`Gp>ilFS{+*FSMY45Tnrty=>cf*cshl2BK4i~y>R2rAF`ZlBNi5=q63+pnUJQz1PPpPa~dj+p-N z=F1GecZPFq#fUSih?L6Bmpg^+-TETPfw>^03xEJXQC3RJtAbeiO&Y@;AI1re3 zl{RDWB2)!Yxa!>oofA!nW1r(6-y-aSmMIWAq?(OPd_r?xZ zx7{*+fC};^;@@Y%gYgx@%7;WVh~W)SM|yp|m3zQd4VNe&gHH%r305A%_&?@+{4sli zlnVZqetPcOOAoiLTF`B+_(Ks7Wj5X_|BLi}!4P!eX!=Nk{XAUtH2ly~`jQuP^}XX^ zuIdd&=s#54*VXms)$0l#-!Hy}eYt<(8`+ab zzCR6qf12iao{m^}U6gpV?L0&6JRA9k%6LxN=_Z`{a$)*1aw)oO2t83C@Ub!IF@61M zuj=KnD)0zOM^DYZPo=n1_x?AhEFgL|cIq^_EXs+7BvO^^S}ah<|NTTF}zUOPwzRbyS!%zuk>DzOb)*4>a8`;eD4YQ z=nc+qGf$~V*Z8vSo%xLq!4@YK;dLiQ+eji$A55_fQkU&Olb4&ktP~lQa{2kkDRTnV z@?SM8;mwivCg)X=yrl8-&%+gtnu|4&(ZzY84}-C|DO`@xunQsJ&daB%&sBgO5YKRi zwN;O{p-Yotb>^F_v$^E`4nTQ5eU8q>PF9wi%*6ylY*vM_b{pq%6elObQMoujmU&*~ zF3Vxdgo~g)!e9&Ld0;YfqXC8x#x;!^h)`iSNcYAbhj_U2HGgueI~!J(@%$dCgD4#w zi$@j)vUd(IYp+&sqP=^qxpm2%cRJ9QcxZERthg1&@EAX>R;_0)$$uhMzwkCa9;27_ z$R$$4QtV0xE68$w*`Y=Wfy2RkE3LpTt?(7D9h(cg?Sfn2TnUXX9Gm}coy40?r_>>{ z<22nP!|(LutmARZsr@_GUgPk$ZdwerfS}Fo&7dqd>^D(QRx%jA6 z+o{&<0sMYtiS3Q&%L4PI=vRGh!K^QiD8MP*Lzs-Vk(MA9$vOZAz-xz)gCN>77wIfeu@gewH5>=7~eV5K0kTm*3rx8*N+}Nr%N|)4bt$ zrB1|&5o5mdR;cgR5f3Qlk^aXfx7%LScr7E;Y$t2uK+;SO&nuW^ko{*gvpdDGB!*Uj zuV$*@U}L86t0xK-tez*=S2A7kY#5--Aw5Bmn}Hc7koiW*D6Kk(cQ~tqBa{42_*`A(76y z+n{fDTxVKQg6Yh-v)TKfAAYw5$Vj<+@~Nk;mF+bv0V!9jCx7+5{ia-iHsQ)Gri?c_ zMPW}?rceUgF|GXfDQ($B#v_#s0F27+od^EtMNBuU`4xteo-LV^tkyuNZ5#3f>K z(6V&CG!fC!Y@mbGSpr3tcU-M~It_Te4eEUM^Y*?KTfY?Rd|vKE-+)fx#%xhviy(BE zyO-0~K%18>!p^$~seZQ=<3H&-cW)yI*bM6IrhY)b2z~g*OuE_|i@U*6f@J63h7YL3d)vzAAuYN?gks0v$Bx6C=Pw)1p97V*O>I#yArEInv! zSlypsyc_t@RC7lK57@Ml#FGvHIguuaS;iD)hTF1v+GW#=@oFYKZ!!)|GoN+eZRW)E zhStDOR$e*@km))J`w>ALSmR=gqjF-T=(oafXf)wN$!7CLsTTY{Ya>aTF-$DT1NuHH z;y1Ylct3@UdgJ6VCBH^K*YR+x+E`d1n=FR8pn}Ck0#Y&z5*2c^*h~{CH~DVR_V?0o zf1sae>OL;JeTcT)&KUK#WcoIhY(;gU$}=XPOIgV_P6)IV#1W%pxw~Id{nSa)#gg2lqT6~HGLNoCpgWW-K#eYyioJa=^$HDN(<&)VcEToSzsP+roH+aW zyg06M{-<0x3>K!n7D_meyETXxdJf#VeN@ZEw(jr>%tBOqZv%dFTuC9GG zqrcZup6%^4N?(dgt;#*w$Uc|yG4l!u3%@D5CM)u|CBUR}oxsBfXAuDzB=tx~`ZFePMch)8n5aue_ zkdmSZ_?`joACn7zYAL`FBMcqD5W(iumudassxd6u(!zKHJ_@rzwbnsq4wU=ih(22T z)el^ST#h_hiiinNVrU@^MU*{NUmF5gSTdDi>x+X4?RDavHNtEBu)uo@TyZdKMq=$5 z6JK=%MsP+qGI-3U=|FDeAW+crId^Vt`R<44HSS~YaW4K|BQuT!*G>6WSopkeDkX?l zNIgPiLbT}jidT=V0A{xSDX5K1m*epoD>y1ZJ-jHV8<(WW_llWMVcfxvay+o7mOzXdb40p-(R&O@5;FR{50BmrFqORrye$(O1)dV>%(7v79a4~dSb*y= zz23wivs4p>J2Y1$iAW6G<^rX;^VxC6XBn|3d_mk;UTN@cC00%bBBus4B~L+Ww8OGA z@*@zAwnen}`~mvtfU3Y4CtTjl*e^ZY%PZ`R4TQ9+1qY7v>-yvI_JB7^*ShT+n8eZb z9tVnlK>o{z)2IaMPclXUzg>VUrpTfFosT4_`#QMNnX2Hdp~O$s8Zv8mAWF7 zB94t;yugJ0;ztwmrJbn!m9sqMs^ah2yaVP&-t|pc=<=$0qEoFT)sRsqte_5+HDv5J z9H-=**qYz*F*Y;CPCoQS*qttLrAvUP4~qrjU;Bh>jHZi}=Bc zbs(TTg0d-~ecE=30lr-i3JTnLzFlubT=(72#1y|X3l!ady4Y3zbQrtT(B3{N&@uZz zh9}#DmBW4TS$gS2(90$CgmGv?kIQ0J>+0VK)2FT5z|&ipyS3Bjwa3TD*E5QjV6p3} z*XvG#*wg#0)BB`E&wowe|8Ep$e`Fi{BcK%8v*{gl>wU7%5pbaVuY2$DV(mo0Uypyd z!8be-mptp&#_QMj(4PpsxfiVm+^$kQ{i1+7V|eM!zC#V#YuB! z#FdQ1m4qdigK*VCJdQn1psyb6Z_JZrflm__zJ)Zt+M}##&;*XI8~O%I5rfQm=7=K& zh}THgp7B@Hq~(Xu8gBklUST1$chGGok`PB@+2-N{oY~0?Q|Nb9QXsTrY-mh03ndpT zA-wE^)I$D-vON#j?Q9UAxT4{+zs(2cEl+c*TlG#*Orn^g&e{OM|6}Z}qT&j;HNjK3 zLxQ`z1a}MW?(XjH5P}A`1Shx@?ydm>!QI{6HIS}b`DBu61-Rl$G#MrQnAKpfg30P}>m-BSj==K%<$sbV)ZydF@PfWif!e-MR4VL@t7}R~2j}5bStu?h?WvN3CYt5a{9>KF{KsqNf?sg># znbkkBmS+RiB7@zD`J^`Qg#vzE=V8s?U<{Chcutoq5NRM z$|e)W9m}m@&S}GqG*O4maF^f1ik`YR&`Xw;v9(k6H>kQ0lO1&b(Sr#A=gItfE)X%+ zP&z-(pRpd)ZgF1UU6-| zO_%Y^D5pBK2CkmGbAThwolI-}t|Ql$rcQ?w8~(WVdn3z}0PNwHiP0r+dAUc_=E9po8L zYm@PCkrSRvVkZEh3_(BP+j80%HNRp?8bgT0K^1FH?<5#8Knkwy0YR*c@LK}SH5-PP5g)zEVXyjJ z{qwRdYSn@6k!viuOVC~lZ6*@B4LLzZn1$i86&gjx9~R(ee~LMs#%H8!%vFP<4-N`q z+EU%IBPx1~Cd+b;nZGqyjLtO1gzA}pRmkO!1I?$`&o56CcK<5*pltXai=Vr8Ie?zW z6I~Xf0Q@0<43z|;l9UM$4i|i}%2`PSH7Z&ytXV>Wsz(F~nGVI5;;M~H$NgYxZFk$F zWly|AQVr!;I^X~_w4^kZ%;SJhE>6@@JyKG%HPg0!e0fYE2}9%LEdc)ymcV>(^GBDH zlc1D67u|uy_>u~(?ZX&-?XKTuJ49=UE_y+SllKjb`2p0MC>Ua>E6vor%kJL<0H55Ul#x*Ot@6zgWG(+O8W-EZd%t; zJPCzmsnd#qU#a8CzZoKhFKj&0 zQAfzePyM*vSK*6!eh+8M&VPrT19oGY|K}$CzkaOu27a4X;$WQXIUV*pg+-8h)Xxs9k|Ro%V0H=$6pu|IzI?5>4!S6g1f-{l(m|F)< za85)PQ4?h*eRk0)=M&W$?zh9X`Q)^EW_TP3O-7O#Enf-e$N&1i?LccY2;^Ik$N?BS z-Blgv8cg+q?%515d*DaK%Kx1gYphyw zV$X4(Z>WB`MneIhkzaR5p`&=}9Nn)5>!555!eC`v(0bDYM${Lz)xJnrSHY>dPge@6 zVN`~#8gyME$-A*GxtS>4z3_G_6!gs4!xg2IRX30P;x9T$(%BwCVw^eakF>uv7f7`T z;;IBH13Rw=h7uqx(Gh5at1@>=->#k=i#)tO=g9dZ%5e>v?d)T?cpzRGLQOdpc<|km zgb;O}X58RQR|-|R^=zR?<%p3UIvMxoousZ7C^1{D?EIOe_SFd2+id``+N*Amw!e2n ziyntQG+t0~E@v=={!Gldcd^+yLI=b3p4;6qIsSmlazt?PU?HJL*c)tu6`kE{X2aAx zdjx;fjkC&A8kb~m#YOR4X~u`-yhi4~ebrVr%H{?()`*(P{XQ}}efU16r!&Hm6zchn z7Eph9k?bf_cX0RSfU*go7o!xrM$)+4w?nr*V-f#aH&wA;zyOf~irlS%GxIn6V zQ90vO_ZsNN2DQcfS(XDTorhNET~^U9D<6OjCvSb8YD3s-_ z2oZO96c?cRkG49z(M6CmX)IPFYQ9$LK`fuuzR7%?X=E@kMF|8%5(J_Vre$#Qxohsr zP$jBZBYxb0nMfF(C=Bux?$fP|w&o%L*Vn8icz~~6 z`}Ll@aVWD{n8Vg(&?-duIW@FF|Fm^8r2$&r1(0jwmq%Dw@C+HrI*gpj8ILx*COM_zUm~FQzi| z#Y{C;iOb|HyrFUY6fk}hsZRcPcz^w{64QOHr(pU2jJyB5I8v{$48Ow6_cMJKw{$qL!3Z3PTdT14Pdvf}FUNRZ89H;EBwrDHt zi>MfBzk3_|ty@Hx+2!+pXrbIXcK*UkVsF!g6ID7y;Za!asA-N+Zi!3lHE0n?s2dp; zO)72r<=0cS!TlY4ekP`pNr8&U@Ajp}*8OzNA=$eaPLw`+rZaXRU*?Zh;!oJSqwm-` ziz7@GV1Bh)s9&G$^ikf)i2XCPv>PK>_+c>ud}r=?))BVEH{w?i>tHJt!)dt%2*TU_ zc6*&ZPkVSLBAB?2 zH%e*9j0YDO+;!DZ+SjBh15N|HIRBE=#L6Zi$y+a9vqRIepth_8I_rd`TrV%30V1>< zuCVpQhi(k|S13^=a>#AXaWJV`lI%XY(t<9Yv)@eaWVrT%V6t!2z1mj9WEVp>ThUs~ zK`-svp~Leyv`5*ewvxJhAvKF-B9nmQe287qCbHDE^%Kite$bH+#QR;t;6t<1SW$T^ zn*d;r;(EvKqmXZO-`x~PLVGs&63**w)Y)F`m641@65L*AT< zr9a_(;Lp)D6xeSU4pv(6Fe&}s{J43snJ(lHW8wb0+!@$ADypT)Ri~q$pBrxf<=s|0 zCRe%60Cj|XEdI4?k`kH@Or+JeE(NqD1>A5d3^(3`ZO=2=k7e-eN63AM2D$GQ?Dh_t ztrvn&fC%jz9E#?0L_Lz4$jRCI$HDIlj*>ri+HN(_E9lkZVJa-ZUO-uG?1cvc@S?d| zt-eaN!4NXX|G}*K?H8nGw^Gc`uvrhA+FU&QQ?YyI0T+c0+4PBb5!#;Tx&+|ti01hF z6S_UeBR?u?3OD(x^qu*X7@dU^K1Jhk#q5&pd{Li#CdGPZn5L2E^aigK>9C5F^}0@u zTzYjs6f2btL`4~B4UV}Xv@B!W#rU0T$A0$@01Kz4h+zQpMSr|1vO=DVFi+M5a#6SC) z$+>SbB*imHi=I)r#WSMA6W!(pcP46D09IINSV>qSsB(q#F@roZ;UPKxav9{=K2VfB+*Pfv(CN$J1;W<2(`M_LtWmQ)(EPG;Dc*B= z@{F+*{YC8HqsE&ydtJDK5?@%hJwIK^G594ezn;QA5At*);CbI}t=GM_A6S=u9WRLi zCjy)-HGwAq&Dz}6M_6DmBmmxb1mJo|v1qsQJD>5=~#%%u9RN(}nI5r*;f-S>c{_C9`BU}tw>kR%E{P{nZHsx|0i@OvuOANxF z2pD=l!fofNfzlD$LU;V$-@?fpHD3x+(`W0H!Qd{Rt7 z4Uu&qoVEV>ANX39d8 z99iO|eSoy<)rNRyk;Dn~>nLOXbtb($fl=Xbt}xQnGuQwXofwEk`NEK<3HrJEyRw@Z z;FNJQLdzYiqXdQU_U39zN3&jC&p|!ex!apChomXv7~5JBx?xavUy;*`i6a3oZhONC){EKUQ6w^&1qXV_hwp+`!gd^y((C{Q+47bBmfrx7NdSu%Re8ngkYr{D|f<=+82ahM8tF_|=iZ(_VsO*=0kdAT& zclv%F1+W!72fRq=rCP}Obj&zJ-QP#msI?iRj8DC`-hVH(t}SyntMiFtTtIaQ5*VbB z5?RfiSFSr?k$`oVWty72%ecP~X#Ui*Cj2(~j@IjHqjpE{muW?4rddUF#S3#Vnkl?| z*FNOXC{+~Cg}ZU74ctttThUEO*(5yhHr|{)z-*n52c4@AEF07dAGH{}LcCSbqG_C% z)DM+FC}T<=HndIaT6F}s1&`szH!adiroV|=pN6Vj^B)IJSQ~Nv+0<}~69aE`s?>iX zVXlhsn}g1{k8M|>d5f$>ydv}QA;`Q0d`Bj9YBrh^-96^z!ocnQ_4)Y*MX+}uPi#IU zlpEGAS5s<|AS@$Fs=lOL<;dRUbxlz?7oFKomIS-y2{v-5&0rDocsna3#R+U^ zti)jXFHDThbg9N4(y;&chJ`G3cl7h2u|Xd#_GV(eHn?|h51ypFp|0EvqGwiU2ezY9 z5v-DjR8+t1oY5=SBgtZm0-`YVNWHBD*eproerQFGKQ`J{*SRS})X@UbN|(}Q4JYPl zCWMumROb^^xRI_ai}xwXys_?B#&a|UY@$mx^KUywA}8~gCk z(@vbM%T*d^^S=bc)w#m4=jREV)Dt(flF&2czff0vMaqc@9FGw;lLmP>wi`kWKHWrU zjM#I#tactB8ABjBj&JSu(7$kn)Z`boF{bjxT zS)wHRl{%-se~dh_3F}6w8T_I5?a0C-?U+678z_$%l{Wp+fWe@Fbk-i_@x^9M(Dr53 zO0ZfAprws4!AG^EicQL6dp1Qm#zGO6D1S)Ti^dvN-@)hGC#z>+Z(CspN`rC$lN>p> zF+cBF=q2vm0SL}j3;G$(Ai$Xsp6Pw0!2UsbTRGa7YQUi~PbS2J3nz(PwT}-W-e}j{ zhamkw`J9&t%qs2(mL68zy!`WI--GuvXBz3BW;pRhZL<%MBh;g1c*YTWEB6Q{;N+?d zpfCnW-J!RHtBvkYcbD+An8k1;5ek(NC;p^5%0PZ`-Z^)w)M#oim4)nQ)8KB5UCd~exGAqW=gjtrT$6wZ@K`}K(Uk=&mY*T6f*erhpwxSIubv%U{IkfOWyUDh$8=>3L~IpYst;YTym)WJk*SzXle z!zllKmxI3w@1i-z*a&+38cpk;P8oV;4R zzxGA;v;;p?7!PaQ`kWh&9LD^a;$iax$Sp3zX#Dtay-xy9x|DQi2tX#|`ANn#E?Em` z z?zJY<`;vPGrb77Ra_1lsVd$%Q6{g;)l-|xC;zU~SraGXd`*U7StAlbFhOi`m4?u34 z{B_l-mhp15w9yN$4`*A)Kp}`zN2qqJ_%!Ekx@FMKzcF*hERb0sF z;WOID-BTH8&}PFy5~BK7*i#%)fKU5|oC|m`{}Dr(JCmqs+WD{XU+7-v{x2^z9eSrs z*M0R5@gpF-vZU#9vB3LqlETYJ=8(w#xl||OJO@8Ox&6JF_~qV~#|wxTS7!&$jr0CG z>0E*|xEWz-lzbvYBa&Umq9)LYnllU%?`Puz;){JDEl*A}+!BB>n(_rYV*@)JKn}rJ z;K^2)aF-9LM}I09l$hQ`X-V#jvGF;d@@3&FFcdv734B=aHA+VXA3g+HJjI^U9=>5& z+ufZUBQtLu@u)&*nE#5t;RMPx{r%kf|H0Ps)F2!Q?dKDqbm&}k+T}fdKBnUwQ4~Zf z_xQQf7oP`^7-_iUgNSt^sP^rZF+(Iyzc8xq=zu7$te+GD|>$8DOf`2A3uATNp}IuA0Vx zF+iWn-If>_@n}{ZG)L->hjg&Es*<}@B+W;#Pt;(c-(%)igv^x0<@NX?_HL%3J2}6b zc()8Ag*>eoRWk!C+}Pzo>B6a;J|h=(!G?qE*w*Nxuv1*e&nF05M_B`Pj2V;NQ9s^( zA$!ObXj9CL%0i8rl%jMQ-UE7h#*JA>l;y=s%H1|e%zeH{o$tIC=LS~c>R*+Gd7X^! zPc@@tA(&#oNTr;aZ{J9HXe^JrSmmCC_N*-f=*keT)@V?DGgZBofF?AvkdiE}B9hw- z&Ht2!ySq|6z`G0DNDYYsNFC}3b6ATqLf%ja8qyIZXR(+3ZmUn7 zH^;IZfDPZ~3;7df3ldqCVKAmwTg(m!h5DD;hfjnnfJ0Hy&O_aLUi+LsVClhGI+XH} z5?z>>Ku3xHOK0mu@`@&VhsyB+O{!)}sBdxmnJxj#!i%m>>bF|UR*~#>qOSSW+y0`l zof4*-)b%NzXU@;b9F$0fJocV3+HO`T2w=6q)Cj?U-t)C85&TsLK7jAFl9G5s3?q;= z{<{(`Le)bX@sB%cda%j4Sdf1pfYFK~{##9z({ohuvtl6);9@->s;Yqb8{5AsY^Gda z8;1q=(ZS!Z{aD=<7&?2&uftcsoORj8)-U0G(&N^o;hleex}}}iDyxaAryDd^x7N$W5(#;fyb#UtBFlm$yt6?f0ps}7-u-i{N5^%c z5Pgv|r*?HGWk^0dSG5uG;liwPGJG@Tv|^ zcA>|oXh}oyg&nOFr}up+ z*lKt&#&t)7k|VDGMV82VO!uCy50Ij)Cd6m$F6{+eaPyD#oFHuSb^ZfttK z(Q}DE0AnmcvDlkw{U;g((EZVf{k7*JRP$oXEY>W)8H#!b(bV*94-#d^VV3xzp9E9p zkn)=2((Uhbac>W)%Y~Jz_%VxWx{S($F7@sdr|QN2+o%lwZ>u3P`wQZS(i**h+0l)Zt6@>3piT*FmAB)8uKzKB%QpSk*;ac{;qJOZK%!y*bt8(=y`bm`ZLAsO}KgSinw05rw zLkIyaZjy3dYBg-96_~B#65QfQb!*amxy-T|*Gh`6F>uCn+> zPNQ}BYv(I6kp=s|oSF2Ubq3-+Z#R%gvkoXHo z5+ekWRbo)oSb6Xjk^P(~wF`&$>sa0913GnT&K0a54Z#m`pBT&gnUEm6P-`<0u>jmp zUpbv>tvafU0Aod>ImOsqKyHUIUHWr74Xp7Q<;PCsVhoEDNx8D}p6|;!ffMR350aPc zCugQTP%3`t)V$QRfY^^k!vF{rI*wpMI1~{>XXy&myDVhdXNSkQjdde{FjR6?9SwR% z%Bgs)5w?~e=AQF!=Rxx+c+nQ~?`~Zir3n&_iYHn zfzT#-+PJnCDiJqW$3fdIwwCHpDZ6ddLjCT% z-Q8%?!q}5%?&0%$=)1Pk$Ci^<8Qsc zV@IiEKm-awFSehNU9n*5QfQ?gQ`H5}vzHuORfMIkr<4SP*PZj?faahhI{4AnDOo*$ zwD>&O-8p`laNv3PQrM?s{eA{`T5);ZA4t(2Zu;O<%$mw{M8vU-mM*}8wP(>34}^a5 z^`Lgm8OA%&1b&V7gKDGB#qERz0}mh(XZXt<_vU||S?(4%Bq3piRdQacuVY?^vKQv? zTZ6Xl1esWY#6_RQv+^93ovY9R0v26gx(ycoPso-??EJt<6p|{X0dY~=9m%&Hnipm^ zn#QHpz}*2ux@6t73|OGyBPiT~Nk_`JK@QkhB1)(uR*HMp)SIpSF=YuDrZhV?mLG&; z)NNEnZomVPGqxVo{`i6+uAALRStWn6<`KwhS#3`_LKdut+mWX2hEj;sZ?{eTPatZJ zz{q2Lcp9xXniXp}@J8xMIb+dRbw20%C%ny(8gm-osPoIAg@6i4nY0S)`ssI#SJ`+a zTE>+ID<>_)*?mGNoC1XfWxk|kgk3WUNM4p#c_4UQ#Mzrr1S)EfFPS|*5dx*gF$N+5 zUS@=(6&zRo7hX*7zTSQ_cK~;gfved^Rk$hnrP7H`wGJ9oa+wG-_F$_V=3mS-@Acma zj8T4z3?2L-6|!8ow}WwMA<+BbylT3NQ4*86T{ygSp|t+fF`O6K)>E6?E2k{tkM`x% z*0(O9d?VCTt`8bv_oHZyMp!nvZl#f=EyshC34)R$H0dP4OEodwrVvpj?*C$N|F7#A z!i4Oj_x~3*7>D^K)$X&D$Ora3K67sNK$oTW_<)DUl|!K0{dOnLg;$m%71%O8Q!0-h z1R)qiHM)ZfV77$1Eobya)>=W3VB$b)DY4Wc1A#Hd%6}YPZ~IqRqK7k8q`5qi0*lZH zsqm3f)Sby$jt zKxD;)im{Mqh1<7Jq*51wdwq^OxC$2jvF`YT_KuA)VOd}7VtbB8grklsn(mtDdA*6= zh3&M5?b#}VFuY1EmQtsNd5+o@FxtO=Te=4WWbmbSzNavv?BY?aL+>Q$=bzgRJNP%6|1${m zN#oqRZHMBd+Co%Wa%?5(ksr%JthICX_#qM!2aNhOEYr!q&ew5E639e7oBvss_Wj7R z;!O8N=z~y-S)f!%{&{cOK|=#6KtY#xZDi#r#>UYlHU))X>jn%>4~n}_5_&;}bn$-T zsGTv;3w9$hrMKc!ap^&0D&VG=j6%0h7Ql;jjC ze(QGUe;a0y&~iT2jj-^gzOLW+CXUUmi+@xn(|Z0F7aI zb_`=Q==V_?ZH(a;oKS&P0HQfhOx_%<-u5e&InkSlhY?l9DK5HD(E|bz>v&b`m+y55 zRQ*UEfMzG1wma$|$T42XuRlW+>3E18U>gWB$!+KUa%r}j0McNS#Kuuh)fLhm`23r0 zze%~~<^E!SeDMiQbhSh?i$6~w4?y9IE&MSijsPg}b#~Q z{LwdFr&KO(BkeGE7(R0_>DP4E!=i!ZvlZ)V$G&8v?1+e-_)A5Puu&c@Am*=^xqTJ-S8TID@1S-sE5=^x zq%s%`PPUMm3nx(LG>Ky?Xo+*W+Dz-8x8kHSc5mdm6_3XN-T}%Z?(2BW+mK$*z2`}T zbwb^_d6&`f+2|1`nb=m1@$6x>+v6zqMd>9cD-EO799(Z8WwdNMo;C)5fA>skloH>X~Y*swVCEdtaBw zQNj(`JEzrV|97wvqJR0{?|8DpJcMyFBtUHRLbE66avP^tTgyAI$N#yJPYQAg?Y!cR zsj}8lLZx(XI)jLYxGix5*w|}tqh$qtEDD2o_ezwxs+Mo}37dtj zw_i;m{Yl~97@Om(+)fK*pB~o(yBgF7kNNDj%{GsDhWXjFY;=-6*Scza2lyEl9~y3A$484h6k03 zO!#DK$*5ttbf%KUz2W)xXO^IMz*SRD@lRU63I#2WMXcTt;~9)~;vv_WxGC4)Uyvjx z$QkR30({FTRtppoAdkSW7GBNmzJ1tlaADWkS82x{79F(75FKprP=sUssOE)HuWm{U z<)zrRi~^vo4XMj~4IZ%vI=XWdkq^;qbsSjcmmMQqHAje?9X#oDepzG9Q#Lhke<6Coi@+M%1EoULcm)JIgaN+Y z!qs9>waC=C+&EYvdne}fILd-^y!L5-LY2ljUmcZP7}G}Py&`--mg^3~&i-zjifbA; z!uu*htTDkh?WqA=V`EHd_+eJ@!!$@rx6<(@(5X~va9-ehH9+p|*^?&?;3-ADw8 z7#qs$XG`TXX@^^db`w?(gE_BXs#wD(FSVUE- zRwH+L?mTq1X+R8e7`R#^9v55S{3f#ZFc1X2_1;KeMmM7SuRhZ(wP^Jb?V z-=V&D8?zXsG}BvUlIvFhb~3H+0!dyNNw3eY@!)-+0kJwXnbjsOX$p_E)raK%13A=u zAEW`i$|dK@R(D^*otFpfkbnVQJFJcn33?oDw%0RrH?|kETyXRVnNgM)BiNu^rt2z4 zqd@xWya4p?Qt8rD$#5}zm*ziYJc>?YY~mWHB3hA>PxG#xE8gjRR|=UGu8hya)ulzA zPzfXd9<`Nw4xGo&{C;m^L92#y|A&+R+jcJxjn#toyoS&IBC&a!I%V{80Nn1D`Bj*hMNWp%m#g1%*`pj6`74Ra^z?ejX%US2q>JQ zPJhoT{y%203+*%yIiLEAb}@ih@!$P<%iIyMxn_9k_Zb$ny1daGl;c4tyBLjAX#@FS zw$^ydBSQIgSb&;y`Qy-eA7+)&+~X25AS$nzAOs5%t>kLaM(m9o2`uBebav`~yC6|d z?fjC;!D8Zjml}=qnf80{XM~8RqUk@ZmO6zy@Y)YWAs@e~1LgM;c6ALBN2^xc<;aE` z)x(~cZxUPSrNw*})qzMUjz5s>cL?qRGDiJmKhv2?QI<({1U=O%4z4#2pg5|LClX}( z{2o_#RfZTXD$Cq`>E2Tb0LY!7zU9nTOum49-h;U{Z!y6$ z4b}I%ob->k3_|)jKxY%*8ah}QHaBW{w~nz3-X(KGppUPHYv&hDequ(CYUT=71(ds1 zgNk`3bY^}7CgtZ(x#vM_di=n7OsJX1H~FlycolIp5XKxY`Vvn=g;+MIlAs zb3|6lURIMr#F_B5UX&rK{rMgcg=9;qBS)$qP9U1p&wPr2>OH+PUzuD-MC>ah;B)Vl zqOX@Xa3!7F2?4zpSlD1hjWZ1|jN(rp(RltNT2BTifd_wp=Mu<3h$>R*h{jh-v?MSB{(5z| zq$g{%V$>bqQ1wS(q7-&H!LlIxEox0)eX5;X&6Kz0gM06}KZ zB4|qCutDY289Vw+oUq;CBO(2d`d3k>cg8M6ll8$>C!J@DhVQgr#J9|}3Y0V7;~q?e z2LK6Q4Q~e25GJLxZ(2+GsT!?*hfoZOV^WeqUT;^=Sw9qBb{au01ev^kBMze7c2#Uy z8|F90!oqLReo#LhZ`-3cG@n?p7l&D;oPXpl*R~XdWPHeQApK0ChH*-`j@X;E9(1mz z%71{<8@ce`BVKT7haAmdzI@UnE zMNU6f)|ZGQaDXIX{X7RMh>H-^C~S@jv;;k!ooqLN;TsZ^P9Dlvi=A-wfk3;Ly_;|g+W+NexSli?;i*8(G`*^#Yt(KdQwv-s)Nh?j<%DH3v_MaF-uhI*R_Y3TbQuHZYWLSJN(2o3T+)3f6Cjc2bHFXzt^$n~F8cpqAh3myA zBVb4EX~yF@GJL5JMJ2j(=`8&%P7ZS|9;(O4$QkG+mZ-iB2A=!8Vy}&--A;Lzb~j-j zXL@9Gue-2?aM!Zhf8c&+kP_NEs6GZc7*x;^`PKuQ+K5rGIsAJ^f|?QX++Hahfwj^mPo*{)tAH*$9COIFbeRO5(l_*u!$hr?qDRKlzScLq!uD55Sod zZlIXR2q!3|jkUJkKY!uRGK%lGC}RYVwY|E~DQ1BxJ8a_DBF=Nr&d1Jl-61}8B0#Vx zt*=0LS#INLjT^BS<13_D1p=i0v)g!a^Z6J9rn10`+({^3E?tPXdldk(-7tz@gDhi@ zc*Fdjn+yDiFFT7@A4{d!{{{J4<@{|X-Z)ZPWnH>Lr<%;%O_?gihK2}%N8_;l>4JXe z=bgo?yVarZ<_G>ng{ZL(tralHc)vTm^2URrx6@XkG;}oJq%BVf!_V5)m#6J){k=}A zrI}M>P7U7I#^3(_92aSB7;s0Ny_z_AnNi?6tFI@29G;6^&d4iG(?A~I#E&%&Y_#k> zvu-(GDs@TaR(=O#Ww0*`c3!UP7T z%4U0q+838f)AYYgRGE>wx2liLq@jVIR2vDz5dJym55!@b=S>Sl&Jmff(thd(B+9Xr z8sxpKuqwbJat1VJo5H`ir4;h?8Z)7XPy-(hagJWGg#Co5^(7Glj3n!Y{)!Edj_wLb zC0v+~m#~Yr#Dlp(ylMiiaBiM0$L7sk0^sbO&Z73L>~zl?C=f2$t;Ld?Yt$FeuZ=jz zE=B2RN921fKCUV;1$lUeUJq>}sOHmYC#uptb<+4FCi`i#&D8nk_u-w5)%9*=9-uvO z_-qKz0kIeXoN;E2s$A6Qye zo3vg;_iC7>8r3~rzj!shzp0HcHOV)6aJ@~ERV_?W>A>)9N*nrgnH!Mi?3_7`QyE_u zJu-YY!^#Yev|K1e2V2Ra zW}MC-46GEQmmP;K{t?);; zj_P-w$oy-i1x2W-=SHNFh2;enVdvz-SL?Bkj+B#%t?4w_9qa3`zehbKB1__7lv{X2GyCQm&+=!TGB5d;ZOh+J>>>$)*K1 zF$O}gJa!6oPZI%QRk4Gla?o3e?FkY0rVcUefb>odFHY918AMnJ;Yya5mUbdOqY|A~ zL?scBb)GB^;H85HFzOcgnLKQ9ArYzvIIvS;XNLKUw2=5Fq<$>FZ&I!F6xv3{94KNZ z7CYPrxF{%qKn*D;ROv!^69$*t;~m1!drP9g=m}x*!rfeYOhTy-8U*zjhv4DWAG@Pa z%peaI$G7yfLWp7G<^Fo^`^cyBs{8Gn@r{Fa?KeLeVch0$+`}UKnTGk1k645(3dw_k zbB;hKsT0ZH3KWtMw0H{gYtpj|;IEM{Hwp+sCwY0<;y@*)r2ML=wY_2_6{@2q{<9b0 zcV!1Gk-Z!z9nnU(MM|U+KDv2Edjp^W6Zy{Ib-~m9slYm?=G-W8KJ43 z;EuN%`8LX+o2{-*oOaOG73AM{FPWzrro@M`2V$MO(F5T*k;%2O5WkwD(DP-mTr@=D z)P<524vc`v$OnWD(r%?k3PyE2KlIKvYg2E2=e7Xi_vhfKrOFOC={v5cpn88xw*=L6 zb#w`Ud=9OG35SL(IS_9RfYunD-J(u;M&Q#bXP8t}ABw0XqcOYT#N6`D7Uw$OxjAt4 zimpujhoMOGe8#s+$+v)8Fq%%2v~TY0<)Zeq=n?qEA2!vu*P(Q{*Or_L<&B4X^HlSk=yhQ z7XVFs{iCB18lON{)Z2%AoorV}#S{)O77IrxRBt&X`-NFU$uvS%JSA_F=uZSF+evH` z;%K`o$jE1vO6_806v+586y!k`#trRaG!#&>BzJ%`)cae$K+v67dx#lmHtalQnB}e1 zb9H!t-J_9ljvp*M4v{KW<3i4t3y*HJ66?k2`c2s$idTk_V<5B#n`k96RXiCMV$Sun z4TaclyPHxCzolc$C$H)b%z^7ex5dV}zAX<519isA?A~$X_yCv;0)*cdaeLd z<{<=i<}Pz{Qwwhaw%X;6H`#YK>tm_m1S?-;5zvn2Uxe28C!}8BtKH@zzI+RM*aEA$vgzk31oi+&1Tv z-Lz{Q6G2xIrk;T!?g3H1n1NA4iH}1x)QsHlB77HyEs?nIJmAyg*hUY@2=9(C$zDysMXwzIT+J?fOGl>ddaE6(^6J2%%KW>AwnqwxL zX@SX(S?*ZIAAq2vlDDGs6*EA?_S9Tp*5*53X>?U`2q*d_PceM%Grlz?i{z-2AgsOF z&9~`k!LUy}$bf_j9N#H-fgA;Aiuoal48SN*hv?h&4S1NnBM}=Z@*M;?G1&_hVBY_J zw(_D5+dIIa0>s#S8D(Xaq@)i$1PVe=Qj4f&5icSi>|X^&;wu@O+w9eG>lKmPcts$KX>>xF z>|<(0^sLBPAMNq*+ckX}EkFH??^Uga;@mZ?DW(RBC9DwCFTrjdS|x*4B*yengk+(WisYX~|W2r99 z&x6!ZhCy@4JkcweV|*S`d+gDUP$4$4^yumMA*xi=GQurKqXzBuUUl-+Svw(V8oY5A z=~f!^{%EU9{=fTOzKi@fx*HPVLnYIeEC@tRwcc2D1UCo)KG=&8dm&ph?LeJQAl-qp z6buEbfnwvz__W*P4VL9VQSM+_!fTKx>B9H)!>pT$^$P(fBi+Plx(y+ zo#r|ofte!V&Y{u%AB??YbX^U*{ySG}yG^6Ujm^f5Z8x^n*tV_4PGj4)ZKsVIpY=R@ zpEKV5Kj)0G$5UIE1liAc&$d_)yEhjf>V1MJFnY*m(2{Ur6PajGeM<;UMj7+nQg^}S10KgHwP zAb8ZK|DHRtS%up!A{04kmIcBc0rTu#+V3UVgZtnOt&)4!kV$V?P$t3qo+s!#yWhN} zRTvyi4HTD*bcbW#zV2W9&*S*!n9BZV>p}G{n&jyGWE`?-3(#>@^M|q2G~oq!J1@e@xm-b&d(z?To>oo4>3G*5_luymRH!#x zY(6@9m{e$ZoL^e!OJa%y3Ble@vYJKRR&i z4wlVt$BBKL&V#3{^k28{@Dm|a=X#}*gMYn6A9NlqpQ!9e~@J=Hk&|bG{`vTYPVB6);`rPqn z+!F;P{++I1Gv7+V`j{fo*rl@-zE1}4%QC@Xsl+H|u~Qlh&zM|M1WuZc0Pw51fHzU! z*aU;3;U_3`_n);E|2pV6ggA)2u4P#zUv45rucdz)>|iZmB~~Gq z%JFgZa`X6!nt{?CE4*q)Z`CtO{W$MISCJfTP0|RX!&uT?s~P>mQ@(Lzb6MwL?KMf8 zl4_L019UZc-0#?EMO1!HlRC7B_~zEgx52u%bd1w|`5pUN->%ezeYCJyGl8a=)w^P# z`4=9~gY?#Zh6|woei(j`S7EuxBKMIzA)V-i2K;Ta^t0wHGNceL81?ku5Xsz-x11@fa)QU{y)#qBti(zYATeY7mtJ^9_wHfkq z9ZqW*xos25L9s z0G~!iwCUIr&gxj^25<}dM`e*^DlkoU@S!V^?6NI{Y4B7jsVZe-bP_IAg|`Nkz}g*Q zs^Z)|qwZ${Ql6h1(yjNUJ1=4$^``Vky5)ZOrgz%C`@f&XJUlVnHxa-_5T>TK`3?d# zAi8#DWMOV!9cVl`IV1;(@8(V_pu2T_s5x_O}rPo4Rj)Ut0&k|+)pu7nIUmSnmI&} zryJAvw0{q!KiB^Uh6e}nBW@voNN2>+Dlwbme3INvBMOi;mfi1Y>{Pi#s0h1#&`i_I zF=3W6>}4fai=V*JsF5gn7{9*qL$P%71tE%_LKfT=MlT$zF2Mn~;TdV_rUx2K$?h9$TjX#iVQ_eJSu;xQb{b(70K7)m>M`0_W(0e?A9;$ zMsCl}1qFH!Ewt}PPtU(>?j~mcw-OpCd(Z&^7h%kf6cOM1#-E~~*>n{{%s>Aa=?w!! z9FINCH_FFRhp>I)vuHALugV+9>f9(ZC?J~Qe^N4u>?fwbcY6HsLAD(T=bwF{1|mN& zWenkUJZ4xglKEQm;OrT z*$G5av8_)-rDh|;gFx$euxtCk14PlvwesVb#?3vVjbB+>vH0fwHMQOXi(w-ljWBLF zp112?;nqH8wGuEzN~y`^Gd(^xL)6?a2(Sf!#NlyhZ6!uUXo8Sb6v^o->SXZCfAn>B zY-mN@8BW8~@we@R$Q|)FBG1dvu=(cNahZ#o;#bs5*G$%fQ-dZ7CP}kDOkyexi3w;t zLMILoIw0y76DAO#LFevxt|4Vj)x_!@eDRb-_I;kCAwN>DoU2O5YT}32igzBRH*K^t zqC@(rO|Ol~P_R$qRSYx&DEiR=Xs;b<6B)5W6qnk2%H|yfY;XwD2 z&}J)?{v<%1`HgZDwB2a6_WRRVN?mv9Vc6dUddi`IX|lMGpTZ5)Hj-- z!kQgjFdJhw*eZ0%n#aoPT$?#@gTI_ggNVIkjh`Ne1FP#pEAPsVkc9CcZ93&wq+%$G zS;-}&I&FlWUnj`@iqk*W+V9J0 zLMOA7CKvr%nimh+F7(BBz70)$do1Fn={o!f=Zi3|aDDJSD^SVBMKR_jbAo90i@&W} zUqYklERzTDp0nXdx)BJM>)7JhvICyoK+6G(moDRjN%$KN+J~J>EiYDH+(CzosVRnJ z`X5A9KMXfCE?GY{u7wnr^J_GXOs?~Q;mFD-uj7htY;kGDooxE*b&p&(NUZ+Yf-Q7_ zwo*H;No|CzWEIjKViK~-(f-OQRPLIIv}z_;YnZ8MsBM73A*P40mUx$T*XcN-Z+OO1 zqEx8A3SvU-$TylR>QGDsv&F&bq^WE9_T&lf$!wH>7ST<(@vejDh})g|X~R#rjJf6u zjo}u8SV+C;A8+K4Vc6x+>h5z#!bkCtmt*TYiqI_MUL1kIEAQg9v{7>>s48aQkZS8lKCto`yYNv90T?v_NxzOMl~ZjI;Fn12_5v16btDCUR4irrPMwLE!?1$O-dL9CZtEcorGJx@mCZ)H zPBD^67`J#uIfD)v-kXFDUBxJmp01Nps_mA)h_QWk!YfBrCAfRi^Nk@|{dQvJf9q6{ zK!Y!thj!&}_0#%X=I;jGfsbkrc9n6hr68-W4MgpvpyMb1~j6F_+ zwlPry!*VwrMz%mXvGV7Q$Dj4qPgdG&X3x1PItqPQYi_gra>TCxD6> zdsnf}jPB?mQh+nE&4srfU}TKKePBB#;HtrpiH^H2tHb?B?cYoNfpiV)YAn;QO46j< zI0g^d<6H|UeIOtlK+tk1z4tO@>C-?Uxp>$BFIUZWP}SDZBnkL{U*fKLZWcg$Q=6BW zY8L)adnWlZ&Ej~BQb{%+;cTXXbP|4~e=2&&OfW*4^(Xj#0#7DRi%1eeg7Af)7=#vO zKW_6o#zxwlMSjrV^9kW~sB?G zDH}P{ao(!XJ|hD$+KWkdf>OJIT6sV)wjZ$C0N>sCK`Vulrpe#prkeD#+&jjC_er!(pf3KUp z1}uT7DQV3jC6igkjICd+xcYU5B#PMV;SA)ik}^kANJ*yVMHtsXmGpWAfnVM4XOS@k zaB#IuKY#Egc}}nu!>w3#v~OF^ZWc0g36xTdQ0SqXnk9kU)6S`@cvq)|#EW3)U6juL z+`QZJRKi|mbf9zkSrz9pHN9^crB<`1ES4!DS-+~c2A>$yc$fDP7H5~)c&@8x%=eDB z6}NbTx?p9Lxp`I-U}cl1D2;NnIi@Dm5fzeM6%@E8H^zNH29Tne zYh`Tk(YSejVC?VnuUkK>0Mk$Ff7c&bi5jW2h%OVrYtje}JsNasbd#$gZMVoC&kQN2 z_)}%`qS8#-JPxEC<2oB{_$u|viOgJ)F48Gp7oJ6kS6Ih$s?>VTlGh+LG4)Kc%qfAjS zKVW3PADukH7ZXLCVwMvLSwC;3H)*m4zr0zZ%e{%T$^}UABGnN4R)g1{kcgXi#Saw; z4eK6s96tHh6pM)d)gStubig7Xn^!s3gAV9H349GUv;sUuZ5d|1eZbAGDk_!I+vR7& zR*KvV9@Gw*40D8lL{F;N2ME;KX|oHW`Em9fqV~HIy^??L1HTyKnCs)4Mh?zL6Afj* zh+w8n5_p~nS+w6#5hf5R;#tGNs%17+iAN8!w`-?=nqoD~(^eNaL$e>OsU9|Z?x`Yl zwBQwfFgr?#8aUf$tPqPoLP%bFK;Y)7fDl*#+nHy~^T@7eZO`h0dPc#UyVmh9ixrz; zSrszG*~XZ=?T`kYiQ@)}88G;ofh^@U)^7Gx+--@N3${uNhni?Z`nYqozoQG%YL@Su zUdG0D+usd_cU07o^FYJ_DL;Ai2Cz~x>?IAZ%>B*b^9vdE)cq&H@<81TxI@*pd%HJv z3#U%b$#?#HEU%Z~A>!ZpwweE1-H5H9B=!3?(M!NTP=iNstoFlY+tph`*Zq?BX|Z?s za1GJxv(Nk0dxFm&xy!4o_uHJexvr=3t|xG+tB%hxXuz*1*$LA4UZ2m@6nAz;g|h!! zqb&D98DuU213yU4#q1wJ$m{9L_KT-+CWRUY&*vPXx>PmA;}KA7j}E91ib`=GH5vq{ zD^MV*d_}o!eYx#uvfF?3O@ZI8?y%=Bm+r^5O0^am0Pu)U2iA~Hz~_#2Y6?>s8j-YS zZFwU}r?>_0!y-K8P#4_*d>PVjvK4h%`g9wUkjsS=m%Gwo*s?k$Xkz$$?rMB$WHXVb zaB;*P7M2#CX*IKmqQwLjJ4i@XUq5J@c1GY=>sq6DwOx|>s)*O+d@u=N0=lJp=LvGY zXE?r|S+$fx@dcwEpmc{IpmAdDNuO^ksw+kvbvk@dl0Rfo&j(1@k$pS#gv+ zA-zph%xJ4QzBNDUO^WrDDI*lmtWj!SjJq_e)kW!&$bA-pPAt+ZW@0AXTgSQYTQ4mbYY#k8tZse;b9 zyzAX}xWTYV=K+oti-oq`jkHQ_dW=*03hWTH{w zLq8GyRw-J>B5Y5c^O|fuZ^mAeNj3L(+IxZ>(Wq>RtvpkVF%562wBp)MuXJy9>f&?| zfutjKR^q#wivvQ?VOVZGdpbjPV88q21p*b}QBj)JU?tt}&X?3b5j&F-4GhFOHUNAt7D={bXnOd83Rdea!Hb)x0ELHtAa<7OKdgSHC=R<0p>~B#GP3oF-SrEm0MB|TiZE(n}QL;xg`i_U$;ho~S zfp0VOZbC;JxCQb0GWGuQ4?N{xcFVt9l&|kMU*EyZ6~upFC&hSzzy*Ll>JZdBl}>HB$3$Lsyt`ze^hLiChJ^n{b*_MZV-IU@fg zuOi+Szs=`74ZZ&*dQbT$LUozw+3@`Y9KGsuWBWG2@V3hU@%KL&1s&&?mzN%$M;`xD zVo+ZLYPjtP+GE35jBL~u;S|K8@H*?AX`qacEu9XbO_?cemR?q*IP20!WxmcqtGQj2 zLT53iF0JP$R#6Wj_LU>}CLyw)?|S~IU{Ed=E956l(tXC09%|u~_kJyO3$&du$rg74 zX>5-cowo)SnA#*`5ty4O#|cWBnmI~3RhstV%6xYkeF@`KBP({So!FeaYH>mckltr2 zA;S!Z1QyFznXD=tYP!oQTG7m*w?8crhcHnNa^gQGha%H_2)kLzxo0;WvDWwYU@4+5GI}ohP5$ z*TVt^?&X)T%kJ51sX{LvhX$&B?UP@1)LYBp+%ifhR@7E==j#a@Jrah|tuA1Q_BQUHN@opHieEH+8@%%W;5&6yl-%rd$`!;oXTvUhfmI4hGp$zBCw-(mQ>NLD zyWNCDv&D&KZSoe?Le51N3X(tG$pc;MGp+M}PrEy}SYnGAb(DQ2U$ww3(k?c`NR)Qg z>*K9mi`av%lAT+(nQALFiI*KJ_(oW&JEBoNIB>u=dAqs^a-U~y$!|pPA@t*KIQe-p zLhG;J3kGj?LTc-(sfZ(5l8$=5iImM@nx^x$mg_WnTebDwCF(WIly}V7O~*VwG39Iz zy>b6q6{umNU7|k4#r^phS#D`Ve2366YL`<@nf~_|5;imk?pA=@qVLT^;kOj*0sL5; zUlK3?<*rrH@m$@~m7_=T_jHSXJz2LSiyu0=3)ILczCWjPim=De=tywds{R0D3{ zo&snG|3zzFxWTeEA<)Q19{JGhY|<)4%^uaOUyTzk$UM;C%Ui zi@vG#w=X2lJm%jG|+xHeQ;lt-I>ifnucxxKvGG~*{W87aGt~(Am&#v7f3tj&v$Rg*ms65MH@P~{~&ns z(d_`H{vp=)H+bVT1yz|l@3G7~eONAn4+L{CdV6bcA%U|bzzJiQAryBY;C>Sa{>(I$ zq6=zrTRX-uhgPP;XR!3mlr5oAUhgV+wbMF2f~OMY=2x+MV#awBe0Rc{(h*CXI|fiW z<%L3Y*$B2ipq;s})OP~MhQ}fDvVK>Pz*YjcmOTVOV~0L^_ftxEw&Vkxz%tp#>2qfb z2J)Z}ryajSzU~g1-l6Dm0tnusvuM7*(~^_Q=58w6#4}I1ig@Cj1<3$yH0;D)mo_!# zIbpbmNe%UsZsco7;4fx#2{Zpw7cKB^q+Ol=gRZ-Az;v=$U+bP4GiIS^`0{(3-zu)r zko}Am*D2xG@(}lwcZctDs^19D3ua;ZJ`8`T%^3YYbV19C3QS)HLhT+M*th>pNVL&k zSnec_29~_XpsPYYFu9)3<~lWa5mlPmi#ehK7Xhmd2uJR6cegM(`~kPTTGmz^d+qjJ zMCrft?jANLL&+xCKTmwRjqiz-pIXWhz^9+gDN$wTuy95%(MiA0`8g|N`(<0a6V~?- zm5g?V`GZn$WqrL3!3}#h^av0^b!0vwO+Xrjnxu3YyPR5V6*RMTbddq!dn-g7AaG0*3x$gf9JTVgu4qXLw@pC`c+<8x|H<2Inwm+`G2_1l z*40y1M-c#GxQFu)ZVagqPqJA!pq@$-ckA$yBuc%NIA*@nh@*Q&gsB^un*nkSbuqM% zY!r@CMhk+G-|z1Y>#;{*z!SLNQq+PdPiD1T^`eZL66Hxl9s35G`|P_4+n?dSdjZ*+ z0@3^YE(gRm5GPDP(qmvFeEZwif|_7<0@r;#o`mhze<}>XeQOwj1x)Qg#j71$@PM(R z>CK0yqxN^%eM)BGn|;e{)==1gMrnY3R&1l4Zq^KD`y8OKsC|Q>s5`@uRf&j_sX495k>5*OptC> z%jzoIqKjK{%$q4ek}cl7$Py7Rz?&6uQS*$#58j^L{#f6<&pPnLapi3|5yjQzq79ZI zxU1oNgbX5kfd@RRZ^}A}V%1nTXSJgmU?{c+?#x^OdDZ6kuEI5r=%#W|cS7UCmwYF2 zuo~j~!c#lSjNBph(0n^@1qjSUN=~zi3u}x%+D(&B?UGtHx|C~>Y3W8D#m}_PglJBG zIvn_+=n=Kpg2LHxA$!$bxUDR68644O2&nCpe+*;c^s7{g0vLH=eSfc!siuh= zhll7`4>j4K!2AIFEQXF%%!}M-x1DhTJEKuML!Ce;GERT8%*qroJ5|NR<}c{p3H7O? zo`|=u@0VMzVL5-p{_UCmneN^ex}MbD@3-F8zCJj9h273_-2`2~Jzq^#d%nLuT=Yo& z%S-Aj_y={jH6H&ziAn|kz64~8F>hK9vm8kwwr_IYUwFWb5v{H}t^2<=P0#-@ojSIj zJ7P9B8!s+`I?V@k|1;E-QW~80LN~4VB;|8r>T_@U^`T|T>z~^1^Ahv^4?GX|{Ymfr z3zy0UU5b*VH0C*%NZ)KK+7bkNxkTs1AsTzs> zvBZQMTf&*{tuo$r5Bh2mgjTdn)Q>RR>{sGzj`T zZgJEzPV|0p;5fWMdW+J*N?0H;{gJb@0lmd{H*p2-ueX0Xvp9NFA|jAz^XGeOzu&mI z;2FIS?FY(+2hCTh9d9I^gfGGjlpGdVa%<}-$okTX*sR$*wKI! zilxAj@;>)@WiE?)#ewi}95s7;)#j2I|L4QKlD6yMxY4?#JI~G~U4+A&?>0mO?!J75 zhcON)+8z@Y{$oExEVaHxAy_tYGF#43tIxtH$m3f0=flNFjyn$z&5GP7R|QkIC?LvZ zYeALG5r4}+Gm~*XoB`kYcZ8VQ{iN*~CK#iJS(b69k#ho!ge|EZOqlWpUbeA%{ddo5h;|~sfRu?i9 zAy>xu;##AZrw_Rk4fpMUS8zDmVk)NydZ~}7#R%fGw8cAp9crR8xo)NW19sp&Ii&cA z;FKpPR#lFPw?FGJ9X>t14Dz1_JuXb_j&X}Q_#;fYq=~%~4NI&D$B!Kky`2D6kf=m) zpqRM|VOTdi^L)!eeweA@=fV+DLm?tA+NALB89{P{dZHiD-FZ+P-13n=oqK{<&0Ij5|Mi32RHCSB+>IKPzr&e zw^e6M(B1q_g6Y&<3a7>d>~3?n4WR3Q$>kQeW`(s7_a}3iLcS~HG zK9@r>@#gWreSPrSdU!pweKT2d-ddgg&s@{vgn!alZ2fF(Y`jlW{_CUb6CMgi zaWHu9fTm!qo1_7lBjbRwr2VLDaxB>NjCEThSijn>N$yy^@kN2 zi5m)Ph7N#`eYX*Osd!DQm7zEa`V z*=GpIu%IQB+F{PeEgiKsIGkcxLH8lXY-?{!(exi<`6331OC81|VFtqR&B-GijF8m$ zML-2XqbAQtcrZ1OKb!)P5vpwk?o`rX|XCvzuYf+moNM#rq*@-=c zt8EjnQAMocmiS=P*9|GNsFk^OoV$PItiMAcUpZ)FKZgQzTJ~DT$0jV2$$Bn9L8J^> zL+hM*hWDArixlHvBqN%okgkv}&5s0cbWXqNbE^o; zt`svXxfqxtntaX=x&C&F(pA}Npg0nHOQ~g|?eeJNyLCPM|D%qh2 zZkvR@ty+0+UqIvn>6(S7fNDCa9rE&^QGX?peISs!LZ*Ui1JIjiSya4fgv|pRis}Up zt9Q2-?#f;*dGC7nZ$~Q0)>3DKIi`{LacPzpUXvY7^<`KhW#5H+g5^my*%8m-Ru!P2 zJ^Tr0qKS_Yaq_9)f(RpikvySNxy%P!D_Di+`umgGE|hN)9i2BYlNm%abw)9nh+O^& zdRS|5-pO=p*1qSqtQ;Q>#b7zF6fFD$bpFag`_p%0q^7{5$em{A2y-r=G zWa9K^zvkdPw?*Gc1>0+^dSwHtV;0@Agf1yi~iY+C+F$#Nht%>mmTWtn?zsD#I4xnpo5qT!IiIZ zZ6{tr%U}h}c3(Y&o$;h?Oe)21^IvpZ{M=@HqWDJmYKj6)yJwur^luI9NFrQ==q3E8MsRCzQx`7QHX}eD=H2!n}*r*&2Q(j)_f6 zHH_w-Zkz6%&czp5)E;S^7z*utav{(Z=0?CJ6rd(SGRe3QwAfkx@EY_Qbix^hFSTTx zg~B*tUT%O5r1r8e1ol8BcE>&+K*kS{`H5GX*Gl^p@YirOB4Tbx;23YcKem;+N5G(E z9i&C#uo`^(@`nl11sU*m*7j<30vuT!^94TxsbTo9q~>^--A8#QY=Qplw+0|{dc}cT z8wg4(wD#yMOa}-MKC#lUU+4Zc?a9FoQ-qFlMunYAx>Pa$uOb{^bsFQ)cZac^549$X zLn?3A?FAA_hVQ5P#g0pePK&p%-YM={$5YB0S^VP)mnn=9e#ctIU}3Q(CO~d*FE1#R zf^t2gh!-?NI`gHHFv4^cz2-;vR6tEu7s5(&1ngReW;>>!=c3Tznb)HaJFs+hGiRG) zG;m)l9n;2dGc+-3u8iPAgB^`cGX&!a_NMAY0JqG$(AcH?iidrVz@);KmeNbO${<1 zBU^+q+yZiK)3+`CRkW6hU~A$Io$8dK#o=U)aQ^cZdgo{eM@yB%D*3b$Ew>i)$*y*}@ zLeSBF-988bYC4El0YOX3`9iuxc0?Fn2ILFExevbc3awSY|5ia9_Yki_vio*>leyT? ztu=W>jT4$}x&F<5#hNUQ>29Qk&O(UZ@#kA$p zqlo^ceyU2fwYiG9#?eOVxq-+NB=C8$@IXSLIt6|5MPg}cGr#py03GPurD^%2Z<9od z*@JrzKRn-)%%8i_HkY(6QlLJW@>c|>FPyvl=W(cRue6Qj6c_-t(7s8G61O$s!rMs`xjL^ErplJ+yn33T;Ds z9(yNn7NIb{W6sbuRn|>m!H@*j>;U&lQDNiJ;FSC+6Z8dOjpE^vT{ii`>9|S4``-oS zzlw;h`Q%r)ssE?S{PUodj*0&&CL);%L#(;%IUicgU~h`;N$Tp!g#Gj9J9qu9f%B5L z$*!qHP};99eGh01I~m`gBC;#r;N9{9a4WXxnnkYCGj;2M zoc*Xsxc+{+q|}y)Fw?^fKhE_vcH@jh~ z|7Y=|fKm>4g;FMRG&0`bx@wk~edwLW0d$=3nXpiRFnZ?~7kb$`v5yHM>SbJ@!j-U;S_O1Qqm`x_Bi8B zNYjt}JR+Pa&WaPY-`A{|3elPM`!UUQGMMH|N5rCw{^oJ4>s@i03N!j#G`)NI`jddi zP-3x)U(cV^C5Spkai||?bys(+S#e=!E!yVVTLo0bKMeLNWD=?kQQ#d6uh?~P%v@rzQ{6T^98m5u5_CBmZL(Ug>y zy<>UaJAZcx(#gik{ecAlN*|OGpnRPOGg!K??BjedBbm|3ZJ-7AF3S^wkUf!b8YGxQK?yAa?Zg-~5Xt!ECXMJW$unX@7iQLfBEdkGfxd5LQEU+o=Dv z<@K}*RBV|)3_!49V77l`f*M!br!5>#iS z+9E!uW}i5-U0GO`qXh0Q&!Q=~sLag~I9xe{oA@(A7?H5K)ar z?r)M=1Ve+ogJn^K^nAR9av!=Vp%&o;7R3czbJSoNwI#<1DN(+AYcQ71BLv~;f6gzM z7y-w`t#}#4o5K}W)EKGSzB+8o?x!6s8P23XaSMFH^Au36ZxSwxw5F<9nkIjW5Tj*e zCZBy3jh!`#D_BoRi~Qcg3IoR?LGe*wp9UTtSo%S>hYU<#jS}-X(UF_{>3CNR6|LH;ja)g2(QB7E!Vfdr;r@L)u>){CVSELICRi&eDj=oT&-=cudW+t5~0*qeM0+yupPa=4X}9(3h0tRTUa)@aR$# zD9}yWZ=4cPiVTR&yC&r@9+sB(8)gB^SR=ZJb!2Dpu8g*jVxks{14XIm0#shrZjGs% zX0I&H88UlBYQf!yt5Iu1-4pzqp8(yXq{y>vLz4!TTf&rHtH?D~>;9cv`gJH(b*cK( z)`^syT&y7!+c7FQlkW+_WXT`*I4w?2=Rf0>!|}&Jhnn}jIDh`Nw%O5j`Yb0U=#+7* z;u7V{<@DLg&D7->b?R z-(fXhS0LkW1^pL8aenk;j=vM2dP_BD=KO2LU@3P(=eTuqp8QN{2Y$nmSAN|6%i>L4 zb!`Tr6J!m?^=b!k{-tR)Gw0SHkbFh~QxN)ZJKR2%B(qENvIfucc*T~nuukc|W~*Tj zQPg+G85B;fj;ZYSrl&&Ye?E{D8rBv5EqP@MsIlCY-!rCo@E}*;`~2A~F$tD)ep{|= zbK{UwBKhg>$o%)lP|7bjpj!gvsj59Q*y9GrI<^w1L~)qEv+lEbT&qcp@LQVRu4k)l z-JxEr_kGN*F>5bP>hNixK$b{lSmINgT~#c8k1Ux1a`tJoKHpV_dAe*AC2=BO*I)fg z5*Zf@tNS!nyTGtI2X(R*oZpvc0}z!U!+_1PU`a2@;E^AC`ZJxqu5f$ z6Z{JAx5Fm|D~hG3PI~)iy$o8=tyxGGd=G`ik10Hhyq3$o=A8B`~XIl;v6q>HNgw9i=C z_}x9AY8rbD8F->Shp5jx$@cR_fokQ$$eD!2mpt9|v@Abe7CC=6{ubd{!T0UZ?q-lh zF2c9}cmDPGGqk_i!*mc}HRo*o9co%-q?-a-^DA}fgWvpR14q#_)6&7${J1{YGvqbc zqzRC=_jah;`wBdyAgUZB`~A;^*doU96&Xyk^;Y+32jg%ixUjuHar0)sPp@J@{)-sc z-$rws>4F`J*y{__bBxL!9g7a>d^J3@j6bKx9SiW@jLz*jI7&;4t(8qr2L;g^xCi7fBO}|cU%)T_@kW!_! z)_Si}$OV|&IcDWXj_~vy?y5e}bAr=-c{``CkWd%o#M}OCvvg5-P&Acal?Lm)1M@ z`klqOlwnP=aKYHT&hw?s`6A#u z2h=QDuh&%r5g)&2N2e?Tx$i~9kLTQ&tD>NoHDvyUd-3>%sV7Ng(~kC7Z#xmgGg#Z; zkG~C-$WfK7*?;N_0ny@MWx|VuxOfOXlNTSahgy;AWeo+45iJb|QDbZwz~h)-o+Cn- z;vOzeOAQRe#GD4D>^33NC(&7EMZ^!Fp+YQcA2nXy-EUxBR;Mh)rxcmpR88_GWs(T2 z=`o+Mb=1UQevNKfia~ge9dXU4V`t?5$?D}FqlV7IV1X`_f!ax*XVwM5n>2Q7R@gVfid6KVQgphekf+q>lcb5W@ z0nNYehRwUSsQ7ZFSLNNsT~|II?c59nPWvKYSM4v%g^ z`&O1EG_^pcrl-wiNz2-Xi_>39;A?Ug^1gl`N4zVw^WrWXvgd@_w9vn$J%{cf!u?Wf z$k%%lWThT4(@4M$od05Z%8{uwL^Sk4%RwBrMU~D8E$PcLy0W)-tO?PdE}kkfs2s9( z1&+U3y-66CYxHUzk1}f=j&v@vff@4`e1~!%{kO6;QArw05}aSn^eDsx&I2!f7wSA{ zd0Up^o8Abredbm|OpYs4_76Rw5KJHNe{{m;y3QP=cCXBQ+Vhg-^_x(ovW;%){Trlu zLw#X)`0@+4zDl6J02$PKt zq5GW{RR7?xwGi>gZ;%|F3G@@6*F4#DJeW}364rh1sjz5IxiB$arEE=$1I!~6h}viJ z6d?H4cAN0MBGKOZV>F`cqJ3*hLd~hA>yl-BEbVR2OEmp9Os#=o2D1@&{hLdZl3WF=}n`R_@9I56~z;r*a-lgnD?&%i0lIkM#$f}w+uj%7Uk$5Q-&I_S9M#(X;o4?i6;k9sc{_F+u1 zg-g-69|0T$XkV2=rLzTpdh?wt#+*dCP`kl*v!44*1Jm@c4kzd;LUgBA&h5hD{%hMs zP!xtC-Gx<5&vs70l*wX&01SX`^xQtLxqIDj|i=L*2+1gLty z2)P-1dD8o{Qu{VP_S)2Vhd8qIsq$C}sSK>NM+Plpx24tap=2&&o{+$r59*+&2`CK5 zvWkg^RLSB3uaX&lWz@y=Oz`*7=4dXW`7txIbsWPSKgr_JTzZQ`RX4RYMr4USKk3^6 zUf0$dJV^!wVoK(qeu)m-u^=9nPFW-n0NDsBS#$GQu}D7m)ixyb2Ca#TSMr4Eupi>9R8Me z%RKU9jZ!QA-pC_4@G(q*=d#jwr;mh}-l)x(6@SJ{0Wv2AW?o;wL!>u$jxsngceZ$2 zQX|b)Z56Vmojhu^hslH#s32Mxdx>21^WAWI&IS5&QBkR>wyJicN^9xDGvXr1ls9DV z@p)os<)tWpwRd(FqPMb4!$BZ0{Gc=_8)1x*iiVYvtG@p8bGlIV$43uoi$J1N43^Sa9|K0k+StDvLUC!lJckT%yh~5{^Dyu9*TQ{3T;< z8K-73g4sWDBmwjzukx*mOpmAkv)H^7|DQV?S)|cKWKfL6oPW~mi2&Ky`7W| zGK9oyWPw-7#@$1oD+W2^316!K>U0FgF;erKZA~LJ8>F9%G_@7%^oAstvDi)p?-s8p zPn9JffORUbd8Vm2M&WS23FEWV0NVm|!#a{U!np&k`CT83O>uvJ{&efFqHy@|AvIfQ ziaZwX5hr|BySftRQS2;Q*&q?Di|g3Mc~JvXSyJ^yx_QJYTQkQT-jWmc`8?so)8rtl zYXyreEoKA@K}4J~7WFJY{!th&q!ROhCJY6LST-~Jce=ShK|swJa7_=yX9_M9Nr^Nk zEJBy6)UT>7udGrwZ1l)*UB>*dOlIhEy^spPOn_v}lb>pH zSq!i<8fb1Df%SpHBQ5vNZoJnd7^i~VGLTex)5is-W`mxe394o5P_BA&^ntTO1J1bb zwp4x2+YeES33mv8zpfA8+B3o~zzUM%X76Q!LBmb0%pm8ZeL>ST#aYOnlK9G9u+b_s zNV?^j2j&$z2a$3~DWP=B5QL!acVi-bn7 zH?zD@UH$)}>>Z=)io3So729fTtFdj{b{gB*L1Vi~V>MRe#Cjn)19DDd4@j$OLju5`fJp+ckBtl9UR}2ZOqGg ztr(+NY<9Oed`*`=)f^WuuhEA0A2mRry6bfuVV@eaPj2FnCpljApz5{l2P=wl26KBR z6Xp{)MfaOWD&*G+M9C)p_+%M-A;ISG26@u84AL_uCF#w) z`g&$qwm%tLR-mtLXyt|hx0`zX&EH}mzt=~F-4qq!Wyyg3~@dC>?C~efloMrqhR-~Qltj1p_I1UcO>5~Ucv9s zMW|SM4(_P+K~g<2P9u}7LV2HGN<`1Br0Y%E>ng7ige@`~CiHFW-|LgMaHe>JjwVji=#QMj6;nZ#d^%C? z&-(Y{ZR}qUV%`iM&v)M|4*rJZ5Pn(BSqp%a^>Q!)dY`Qxsm-Hs-3p(AcG%+*JsET& zeW0IN(#K|goVoucBR`vs8U3L#@j_T53|z8G;;K82zUY|eZ$0XZxcQs?oH2%Z)752d zs^C8@6O&BCIvlWSxBW2d48(!17s@_=h|;a9a?hL}8M&4i1HESPp>4+5&Xx+lrK5CY z*ph=)tWw)0r>wd?8l?Tr0w=RFj(_gOTF~4+m<@ZNflk=(7sf}JiY)r1J4?hXW-aik znlhSJJ8c5VW5nqXd|k1IFOg^2JfYhee2_w*NXR>mCl3!{ygseg!b~X||9f$}uYu_1 zchUQK>?lkLOt_r_K>l(3ez(S!Awdy=`DP~+8TaLe4@%Dk&c+DQYcfjG9j`u5?7hk| z{t}#(`_{&!v(nXms%sTt2?i%)xiYT>j*{HGG^{0YWeSY>G3lv<;r+9w4*_dFTTJD@ z)b}?h%x^r;_pm*?$MZjb5SjoWTX^$M*S8d)CGkb*JJBSQ98eG|yo0g;Lw4>J@JMX@ z@L}QQwH7;9L>cv~GfP0elFbb{{SBL@QO~JIYFQ-8NE%`V(#X7rP(@}pYrKpc@O$y< zJ~SRjMRkp)B_uiEAp_f1Mmb-$<~e7SYVu2n2W7ZITPTzQ61KWiTgyST^mrrI2aewa zJn*L+gjO4ZsypSl8+ynPkn$z2ej+`vLL3k`1r2!Z-%dNU4!F#mV-k0|ycIoh8cJqb zT(2~&=Lo{=aj7;%^APH`CZ6-$X8PfH#m^RKTzNX99{K09{zM^;WF(OZu72IWB*G-UfS^^s|{TRQp7d zy3dV=C_b((jHqqB)F=CX8&&xRuLE-M(z7oQr`waR6eoK4m!*GR=xbMn&7YjM^sHEo z5J4wcw|#%2^;?pB{Bk(spAV}ue=TmW8qZy>23pTk_LaW2^3Bra=dC&I+z)NnTP>K;{D{3b?`Z=YQSc8BIt%)Csc#Vp!L88d_y)f?#(U zpU*;+q{C47v46ZX>e0nQCZQ~pH(dL%kTtbsdwWIg207}iT2apGOhDP`a%UXLW36X;Xq zOGqf(nRbZYXZg;;9%yU0U7BT>;w1*f$63q0##Ije2L5&(M;7A*#I@sv>$j45dd8O{ zS<2Y;PvLIAhCSFXEGcAHAU7<-OupdV8lb|iPFcj{9u#5(CYe2rtH1xmHPUu)7hV1r zr3zZRpz&3-3PTh}+DOGQTRY+*LVyZkiE;3uF>LSZH9qx#mxBLOcSZ%1pfSU8Oj;kr z6Tt64g1*ZQi->Ojq$3VmTZp7WlOrELo-O9J4Pyza1QFKw?Y~ll!m;XWIfwyAx3eSa z^98rx;3*rfU9x!`!nkvYYxE)5`r>V=bt&xsoNF05k{y#wI!RoO{AoIbhp?KE&2Bq+8jGr>hrH@*FaTL)4Cf9_#WS%p_*@)+mHx-1Vgg;D*2XM zH_gMKKG=<~UVYCnp9h(RzZwz+~1L!-8H?F$Bmg#!>Z2bD1Z<=Tzdllq%a!2}#Ti zZ6ORm%|+?8I#s$jJ4DKx%4O?wRnaqq0MdXVM z;1yU6h*B3X$k!RB9zX_>bdddQlO&kEFQtYd&>)!tZC8}hI z7eTagi9SjvnXZQE6Sv3vi;H~lhPp@81F{~km9V{VE4pt9u1^~+3-2~Sk^k4yCJFP8 zIM)(!@>YE|hytOMJv6`^Iih;VEF#X{1i^XNy$fq1*8z)@UV|O`3{I~b3OK{7w=Y=o z9Zx^sjJ1RaAHb@4Xb?0b+@=ccRAHoXP49@M7*SFlsp@7jJF;k3b7}E8e zo*nf^2hg*f1!0|E856jOXEYs!n{Nw*P7bGEv;NoPZyKt#!Y<_z11)&S();zRBg%q_ zHZc+2f;YS`mZU$j0?V?&R7p7Cxb7OCoGmIXioRDY=($OjLv+w^D6Bz~&B#%dHa<&m zjn~^p1J!~=)cwmT(D4OY(#QtRRJmGWp}G8XJr!%@Kx`C~r1|Nu&|zY=CWCNs5{b2k z5`4=#%%MUji=9Cl2z0_$B}XkaxO&e(gClsSXluTQ+MXIl$SjI4GqcwP_vD8gyI9Xi z&&JRZ00K0fs;4FNo}o+F4u{$ z?`70lEA&E=+W_Ai`2D=grFYh0IjGFr*P(`iLMgm&Zv2ymWL=p|CrmuMNA{;&&*E9u zaYX^YC-h|59DDH|P9;{oQCP`AJQR>oIIfIqhU#+?1gykv6K!2vkepyHffq`TkK)&e ze`7L;D&kh3jWk62zma{nhFaRbUv3&rxqp(dj>9v=LCWEt@R>0|d*(sH`@5um zk>4EwO5^7dIa@M-Q4u@EtB=;2_Zt5OM*tbt>+&MhPsYG&><^js!=+Mx*&Ks)$@6qo z)#IXr9wDfP5#K3=&a}-mJV+ga&vvr886*%#^jui;j2ty|p;;gfF$e zG7Lg>g41I*z`hZgtG#VMi039Jz^s7=u&x?cO08$iv{RcOXesS4IUvE0Tml)!%_43# zDr8F@?!3a6-4x5EYCkJu-Kh8!OMHKpI9-}|H?0E$e3%fTOn5IS)baaJB7h>ct2iHo5H0! zYBQy@>)AcqD#dp*D1%8jl`LOG+1zuzD_qQ=8-h|oic^yw5?zbX zj-kxix>ow*bB%E&1zpQKB;k7p7751#G54jXrGm-|b#TplE(@*|0Pe(CGJA*P;#!)y zaVsO7uPFXCw#1$Ep#qfr)13#nq}dVk{J0vkNObYBH@`_B$*jwP*n_f(m{|!HyUcyZ zbe553%wF>93}Ch{tpXS3KttI=2F6UMMZo;LqS|{~NAzGiz2VGbXqyDTKi~uCHKyRK z9}|ros{xFgoK>SdF_Ql^`F- zoX~2L7TNTtDnF2M!G|;6A!+A-V|LP@visCSEBQ8=$n(GM9gCkMDux;BR{d&I%(IdY z$8Cm(%_1Iw$lY^qVEpQr5Z-^_GEV(l{dPs+kxRU&Z4i2osxh4BdkbAtj=yFfjV~Fa zsPz*Q(8F)ida~ZM*jd*Y+aJSE*fD*!*eMrmD>w%w!GZ^vNvN+xQ{i#mlOobrvOqU3 zfh*U5m|58%CTT9;i*TU0H)4~|$64;{@qvGZQ}?!&J&;0i<8P1~Kn#kBu=ly7qs8gK zB}s+*(Gx+-h8I^)<-9k$9P(!BS@5h6Hq|LGYzK5j`@pl z%+}JDD)OaJ(N1}nMbvVv(wr&#SH1;{#W;#h-kQ z54#Iz5N@8(oirnAK55t}XX19%1hRKJ_TR4NL%hOtd@|~!mfbb2bTctLDYXapN+s>R z5=uM`3|Bl~NT%yfjTmRs884-WJ( zi#jbu6VLm0W@JR7yzD9Q#k^tIq(%vc3=jHYGX{Yj<$si9VY)3yVK8#FCKT&rzfZOL z%IVTrc4;>HKP9%1zMfvaqnvh|=Iu7DC_*KPW?Gn@e~2s-JRX9=Ul`Ai{Cj+-dfV9~ z!P2bhggs?NEY^s{%;DuQZJ39|E3>D({eA8&evt1au-(%QtiEE#18!(C7J2r3k8h`K zM6^EtibewRY#w;G0uWR3u-t;Y74_yZO6+TS>5Lm3dPBWrK19zTP0uQ`>jnS4x}}bG z$s8cYAKYS2RF6$%6%9SRAsP}R=fm_8d>>9n@+}*olg@{nZ2E$nBf9#rL82W_R#vV? z3lDK5x;*s|u=)!mzU&6ifqw5{e)OfkugW<;SE)N}5Tvr%XC0?dc`F-HAD1R&kM?0F zJPm6kvGOUhjJskP&Xy&Y45W+EKikL2PQLVfJ>$Hw9}na!ujDF62u?6 zo9!}A5~EdR0&f?1UgUU}2$815C@&Xt_HQvJ`e5tGmZfv-FwJNrOc94`<~i{_ch*lU zhm9dVJ{q{)%0PSn2}eVs8>cGCPwjH&K*C_iPBjG**1l`Bi`eHUE&1>!=Tq=zpa|tW zm}Tq{=-@@YBb{~Lp+O@fuxF{eVxs1jiX#Ip96=BC%s9;A18}^kZJU$dc zCAm2kei{be8uwJ-ao@#ueOR3|TZE*S;)?sB2C`-8rb(=5k@tP8pOHiIV>%<)^VsXOxD$RJirzEjt ztV97?@ar&|k~9yg1}QSJ(y?6eSYORox0U;G?yT#-eiCG7qUF%=R8YUFv4Gsox{O48 zX-rkPUV6x=i{0CP>W~?LEOJeKnH(_)9_WcNX?H{c2@K#X&xK4vUcBQQ>jsJU61M1+ z$Tsy17mfu^3zvJX+3?tj4%nxZc#OgZF7oBbh~$^{K(oBm;W+oSYNAEc0k;M}gyV;` zSB)Jd+c42^XA8edFeG*L0!qD}T(<8m6iOs?X3t{A;B(q|yswahyDc%`>{i3szBky# z-{U6r4xvDb?zB6r&n zOHON9V)z4uRlQ3YGJPKe4k(bf{C-2sAgZieju?oz0|$MyVREo^ISbjsW%cWJR^m5C z4Dl~Kz%`$xMd;?C?lx@Lxu^D#72TXRzt_ZBEyPST=h6Oi-}Jc_oc#JXT}#gO*y40a zo}F*?P0OyIZfcZ`dg9U=TttZlVJnR&`5!PYGtC@f1T%-viK^8|(@?(jDb>ByQaN7D zGT(FX*O>^wGn_$i`&SjV*>}98FU>0-Wyi8}1w?k{}q>WC5=ani-I z^QPTYY837E+8BS8_oOuH8~OhkTmN4tNjXWT%h^z%+BCSb)($a|E}4w}t0-bvLW|4+ zUtSXGHi5HGfMY_5jHSfSIq?lKD8l)!;n*+kl1sa|Oxc@@Q)duZ8qn1omd=JKyvr|| zMjZt2l2gg5(OYrlm@B>$<*V95sFaT3!DW+`CMrIZpOk~y&!5uF|IFNJ z53J9^{34O(%uk1YskKtM%q`>wb7$%{c~=5`<_(d^cg zl!&IImOsZ4`_0he02?FMYwdFEBFN$WbqOI1t|CNgqW@S7CUk!8v;ADHich_#2fxAz zJ7!_w&MtnOp-6RRUaI*0^e2Mkl=AQ0x%(uQz@AKV)rywZDE_-Q6sy9V*reg#^cZ3% zh5n!l_0xs~1hThf9zh_wzZS|#+N15iCoZKGj+ zsZcB4lK%QuF#82Ya;@MU^(tS2{}C2A^9Fib90(&F31~dCEk7Eib89fa*P!KTv(TLC zY2UD3`lCil? zFi}z^0nwvh_~7gm;o-Q(cj)x&Nn}0rPm<>Lrg!(Ks`*+h-qlDn)(?NwKOk&GVloNz zPVX+5@*EBSPeRSH)w?-{DrCupO%LE(nuczhWzkk)IEadh_RNR|_ zaKY*@t!VE;@19E{L3Ks8YslIl;^6CdM-a`k^&jwoysll^W??K|a!L=Wc=^l_SzrQJ zRQ+`kRnCjJGvI#4$;Kx%DxVpG&RA#9UDEyjtv|KE`f`tr5KZl~wT(N#PZ*sL`dX@> zUu+LE=ppzt{g(uuN3LomhlBj3!5#FbD|xAskCj68kx1U9GDHnW@V>^f#mwx|XVitZt9pIaF{!W>jm1MWxB(gIgzaIF|z9VC(xEaG6Y zCV149bGi)+w!{)IhSM}xEaJ|Xpyn|yG~?fo4*$0uTBRtDLH=2K(m{PQw^Ik(jZ*XCH``nGrb2*;;APC<=kp6I8&NHEh?XO+e%2J-*#r1fUaNd z%66?#Fj6ljkk5-Oy2N$XQ=G*kCoWA{O*V@zd1GW3*D$7D{LmMj=Fex@^vsh6QxO*T=sVR#>h4@@;Y z#uRK2X%#R>eOBh$1<_d(bdndfV*@wpn3d#nqF&Leh0bY#Wyv&F zX8mTLM80oUVw|R`F6{hZKL)laQ-6NQaMap{r6`L_lrTQX1;yf%6?*QccR4B{Mu~(< zu|Hy^)63RS_WIoPFWy4(e^}V0jnS!pM)7+mcb~rNIvAhILdCl_cNlw@CfXw=4bSOP zYW_Q%(C%J)v(Z;-=`?GUPMg^WNgGj~;@iwy*rbSoHkrpe3KBKikq1<|cLX&M)rmLs zSQZexDjFhxxtwcoVrUR4m}dQvBHQsl@K1NJ?pc}G)WJW%AxDFQZnHN;F!eLac@~=W z9H%_w_@d%rOPX;M_;SPcr=Fyd?P57v?{fNj&%bVobQ`^&pcOG^w`SLofb6=^7#tGh z0#@MFbsi}n){i^uV*im>+#?7}O*RJ)L{B?J&gAYR{#@nESF@BTSy-A)XCF46j(?`h z-XcuJ8txu0jBiyG%4|UXt_wlp%;YFe=LQk#kcE4UTe}%NKLQ!v(IMmtrc7-9-=@l> z;s3+s_?_>Dc`74PMHh9O_9smIdEFvOO1va}fxryy9ZLd#Azu9k41 zezT2-3&EwwP9I}ksi|-_f@qWTkgJTubd9!!(lnsXP30y7*ROS%p6)QSosZa31n~V0 zo?KW!WrC&Kzq=TT_LpZ%96dCCYEDy&CId^-R@#`S?OP-Dwo|NgsKc>SYJz)J%du); z3c`|{ou?3Gwaz}7L(i+{q~nle67KKBq74@y+~D0L=gpe`>5COCJAjKK)Au%tnkGU7{t$m%-;9F#Z2{AB9=pm@s; z4?an|PSzE|&`r5Qss!^ijX$-uA3fo%ziHPHXi@2j$RL=v+4Kw0X7+|{ujjh*C}aYA zo6hq9jGRIJP6?jN#cx4=l^c1xc#l&DDXQOg$vgEpTiZuDBca%doL)%ITK{FQ2V`AZ z#!GfW9AnN`Fm`1Z!!%jMRD0-nj*IOiVe}Cp8`!&I*e?|;p#^3JW`>4vuU4B@_mi_t zt=|%Bp}fVthY~f(HV;#tr{m1MWYUbQ()@6(Bt2Y&@neB+F)!OQeR6kHWPDh zu9IPHO<5CbvCaieCtgD?$fVemJD5^XnQzs&DaeQ$f#|v`!eX!tgVKgXIiZx;!ycqP zK7~GxxE7HzHv!j)wXU~5O%JH%Vs>MKs!P(lut2p*Fy>qsLrX>x4XS7AT&wG{6f46B zs6p@YzN*ROy-5ocY%LIur;ux_Ty9bW>o}q&I3QML6R?XKqIO<*?43*?4l#&jIIV3m zH*O>aFWOX;bmshdwt=9vCi$4gH$W<|Lo#o!<%^GNrEQ&%#WcQ+QMp z>X66p=$B#6$g77T4*B|e^or1Wa zscZUE{ftOOShW03RRuRxNH4NJL#nt7wAsL&7IqGtz-w zq;oKO?3}vtAPGgdFkDmYv$-e+=V`HNO;*3Lp@xBdg*nZ*|{d`9+)et(6;q-({>WK$NoDt@XxPX1^gqzRke@9IwhE>yjsb* z&{r{S)~!6)3o@s7`y!q=dvREu97lX?O-cQwccmZa;Bo^udq2Mjweu@fw!}`9kylKx z61vm{Kj`>KAfGrCO~@t9KW-M~1&z*48o#Ra#Ov6=e+~W&wDB%YRIU%fO~D-W!BE-p z@Jmpt;OD#NP;UKtgEi_E-P1*jjy4M$ad+c=zJ)lM9VtAsgad~*Q?E{0tmU8$v5Jr% z7j&j{^vRl9{)0h?wDqk}&cZm#hAM1d4Ha|B)mkcI+d6^IHqb&@vR+xHJS>o%r`$bO zG@@!d7^b4Q$y;y&zrHL`oTafw_^!(kQ| zwc#Ve-*uJ#GjmQwh0>88%=K(~bH8q%5GzhauE#wDO?<)K-Pvdz{aqZgPz5hDP2KN; zCD6s2sz^#U#P?7x<9FoN7znNf4>)GM&iV5VsbC7W*9%p9;@QB2yI>{upH>^rc0^`v z6Vx?ZJ$@7m-NR3+pG101d6xUiu9&r(J=3m@lmTz3wZDx32WgY$W%*BgqN2EB(*t-r zoM62N=cuO$SYT9hEzTo%`?L35%aSkP*BopKic9i^@EQ$5c4K#&U1_AuR zDS&96psSGc-M!NUEdT?;q1nGxXH*+yANiIM_z4G5FOrL@vY_)`#MGb>TUa8-ROVll z_2g6nYgtQ?T$np4)>uawMJrZn(qB#|&k^~?dGJk$?_S+_0fAmpw3<;N=fneD%ubX4o9*u)%e;eke{GsMoKo?&CmPs|Ahq`o}L6if7boIhU;Y2iOz zRE^al?*23L4b&SQ?F=<}t3x{;9D+D@fE}PP9Dcr|7U7`=0u|35EMMA|{zy14bT9LX zh~;OO<{o+)lLUr-d2GuSKYvGaT1@wS`4Q{_U({}hvX5ou-2o3IE=T4_a_et%3S_Uj z>AkiE(qe21+|m&i|3Uexj=?%!lyKPf`|#cZ~>;{6@IotlI;cp;j9uWM_-RXpsBizBx`wt`GJS zeCF5aPu{ou5M(9zMQ%{;#xWvG36{oDx%_p=vy4%Hex%a}?Y^(M2es&vit>LZZWY21 zt?Gns3?*Ld=t4)CgM{$yj!j3yKa?AXn;U84scj}a)3StGvUF^$Hbd@JdX;Nu!(DR^ z82j~Y!D#IpDZ|RJ;26|yT-2651>=TAb${Z3lv-W3sLfIt#@?bC)cPU+3mR#udVJw^ zv{rWO*Yj3%z0y>xsWIab?HY~m)kvh7raFVNKV!AZB{f6x^CA# z3(wUhZu9iZk>;CWxri49;E4T`vto-w>P08GXpUDS)nB3A{E{~2Z+hi^;53%Gp%)YO z*1|*1$m~p$P?bMPmk~-6Pd8h z+^=K0M)s=;WkW~&_>fkIkgNS6znbt=)-eGumy4|zSzzMLW-W_`nt-*W__O#s!jO_?RElwDNLK1jedhD zmR%d+DoR*ckdov#92u1azG}432~_+YG?B!4I4sPE)Tt7pG`)GduZ9c6z4AQ0Gw>4o zGhs8j%HjLuP8HOsLwVG8m%i@0$alG7zW5g4eEn-IFl5WrJ-1&NCf-+~u`mj>bxj#@ zQl-|{A-0?b#P{va$`F@%T%3Q0L7BJcxjpvU9Gut)8}Z0Ks9D8NO3e1U+R2Kh?d1!GHDnM}c@3)1Kg{jx@!xQOqP`E>pL zEc!8eulgtk`)nwkAsEFbg$PBpQsKQm2ag@(Nma2#1o5D~Yk%X_z~qVC+wmhqUuw~a z(od}-?OJyHHs)_R%x{Rz4n-#iz#OLw&-XY}_HcluJOr07<3$9og0$LHl#jOv z_4$}X(=V*+QFLE-)4C`zhL|9YB%_0f@txL8*#H_;m905~g)%2U!z_x$53H($X2X3Bv%8Abukey>F$<`I#=->B5~x7Xa;jYb5LfG+!X zwf2^KSN+iID+@#kOpNgI<_63*Itqe=N^9-!qZF2Smoeq(SMS^uV@XZ{t151Wz$EQy zv>tTR$4d=J8+U1~Rja~pOX*adim?R}k`pPN0Z)4xrZEgRo1j-Y{s=qL3uV z9C?W7nm!O>cg07$g-kTU&<~}MC8=>V{Bu;6dFDcRCa)ybt$Ea`d^||R>zx|VBr zaTijyU@lwzL7UG{7x`edg7pJ8{Y^rq2h35` zORhlf0a`NEPhR3HzCMu=<>r8db4FLxHtn3n_Wp2S?ag#I1GQ_9-Bkd=e1U`pgGG4w zbjFO6!C3R{Iz$`8_ML6sCbJkI%KgCT-WFM}yg|X>K24?SR~eSb6)u=bL$a`KLxWk; zc>Lq4%6C#)C0DFvzmb)2y{r2WW%7Ys<$Jr4{d`maPd`x5q_J5OC)|wy->BVY8oF`p zIrV^(SzgvR;FDrUZTkJimm7z79Ulv$H-9O{+MrYh#dVVWI{gu2TcE8Kz{xsR`Bog;OFw-ovW9? zkN@o2c)j|#>v>->d1*HJZ@hf{H^JKT3KhJI_IeilRu}AM;P?NEa&BPY4fsf~ak1z1 zt>@z*c;)KZ=;NQs`-I5HpO06QN2K6A(zoJZ=-ri|ixuE?a^`jNJyqm}LnQU%FW*FC zz+KwTB(uo>r0DR_gWcy?QJo+EVfgR7<^*H)g0HUUYUkr6_=6El?+?D8d0+bw34U{a z1LF`^KAug2-&yIU8JmhkxupfN$DdKiseNt{g!_z%qQ^_r&wm4!?1&V*Il#5lmpG-Q z=J2uPdnhq8+D3#w)(e&p)mKk)X$((V*Y^)mkwGF~{MOhm8eO#%Ze(H@HUs*9Q>InItsbQ~! z22~LfOM<-~-QKNIjHB*X!m0_z0`CY@%?1>*5-3n~ZPrRik`$irg7_ShW3*J@DD1uE zJtRQ#+xS$uO99@Tzn*Pv*1T6duLd-*LMtW{3<0{37{rF(WdU5Es8GLJxY0##%EA|W zE9DyTrUo+e(w3SP%O_y1Lv=%l62{bDPy#N+JxX3@T5dy8-24>y+mOtwRcT`^aGSXkRq2&mH}__t|I7ozWQ?zmO1g3W9;1*vU&1!Hv9_7;)fEe3xx4 zcY>+qo!;anOQBiQatj+@#_N#k{=1|e`C|zLba4lY#Hz4Zw?f^0)9=Cij9;_r!*@F1 zwt7PyiDE2I^9 zH}JOHM34*3XA|q4kh;MxhqOXIK6jjsxNv7l>5W3^erA#47?85Mu36CJ_7$jSk0qgQ zTEd#uWC!L4mb?h0P$bGzN6ezS_W_l$9g&xs3@DAVY7ZJK_u;Jk4;fPIf7kbxV+o$X zPbhycGdopdM0cT4CwB{g36mz!=*Nd#i=kOUS;%2E89feXs?}BJ0A2&_2TG#{;t%7MabLwY6bnSOV5MZzBV{6AUt$|p9%;=ojm_*fZ^T{ zZRX(q#~xU9QX;)I(?%}l`RVvWgPMvC4YEGc3A4y-qo5i8Qyp7b0c_bZ>sPZY_q6I; zghuQh21YE^Rs^t3<+u(4-=Y}HzmquGf+kVwrNsJYm5zcjIz+~smiibkjSbmuPFj^$ zIss6enb>8GfNo|^*ogA@Z@oq#SkcRwFJk{UU;pY*Uf(ZK%eDO_h)0E9_yjq(VqM}( z=zDY(4GYsAmfH<*dxFgE^vw4%Ga)^L$vfqOOXEb`S;CT`pin3Qhvn83TbB+=tRQ_d z6Rwe9t$>>rY*bGDu`EKSpGBcx!l1o;ZcGAAzsC^}C8S}mH@}73Ul8ty(oXQT zp4AU?q5+|pB~w=lSB%UWqIa#NUE^l)#`@n+bKR z(X4s^cJ<>37w?>U#A@axAgcS2uXR@aitvC7tX>D!i5n@_Z5yY|SItw%Ax_x5s6LVJ zyWJQbuWtinX(Mz-#} zJiO-ZGzC9(g0}#?JAYjCKssb@t_rB$V!x@%dh>@KlSP*a~>cb${o=LRV> zG6?(y)_-_9VN6SGf6_Wc#`fR|WUk=L6$Uc`RnV@?U1*yTWjA6lOKhNCT--fJ*@ef+ zSLq2`Rv>T8z1nrt4G9MjP>_Ep{`C67)9&mJ?FB=fJ4@$mD#+!><5Ph4lx1X!_fz1g z&aIwJQxRw2_Q%7z1-NcJq87)8TJkUQ^MF}~e7$!&IPjvF-G&h5UdLg>3HZc(-f!1p zL{8ihCn0g5vP{0ACJiZ?W0+HwX7w}2FO%X^u5ZCW-gIh7iyuC(!oP3QCd3X|0K~}v z!Ac>TqPMB(=UPohW1f@o`L||KKFsLF599hPiF`nO8v+E|gF@?g?|Q}5fI;AI(`8g2 zh#YV(m#h{FpW+`K0KF-{B?shSpJhRL!mFkBD+OVSp+~r9DdUHtMO)73iBNz^_l zS4gq^S_ISgQyup+v8$^fG{_5@yqe&zZ9;q7x}drvH+wcesOD^6`0C<iux|CHO|mM&)JBOC^k%P8Y(_`X&QU|^L({uEW~DdQ#k_4-7>aM zEkpph)Quk==E5e#Eergp(_H3bwOR3P+D4nxj&Mz8bl}r3z5@3Nw z&6P=`B8d@)k#tm)yztNZR?kbw`uBMIGbQKkdimJA))~!$WsPr8J2OhoB14M8aqi~m zil1M$>58OtipE(3i6m-cc9$-A=Q}TO-lf@PECoeuRV*d=5WOIV)>t`T_4eA;o&KY~ z7Q)o;A;=2e3JJJ?F#3cACenygcqy@g!Z9DPs(pWk@EjEo<7vQNgM^38A;dnxj+w|% z?G%k>pnm~Ok?BC)sxZhGKXvJ>Fb23uEM-=ogV?ePrnmQ|KQO-V0fyP(0=@0annOSW zg!7R8pZP4XsH#>)@ZuY(&HqUs-+DeC{=3KReca@UvFBEy=hhvJ`tNz2=y@&wxXqt- zego&1nLFFt@0<-f;*Pgdx8%JKMrC9wwJtLQ;6laVcXVVsKZz;@CxT2jAP<|UK0omUEU3;6tv zsF(8_Tup433Sf@m6enh#p{D?g+vf7Z*#iZ1-|W5J4;}TXOu+Q6nCzTyQ<#+`ZVAJF z9}@jlwXJsm3&M2}Hu6`iNW|seOb@x({%yVm-RpR|;_KgO^1B@^5Qz6#0S#9{6!`HD z=-xFzQ{R@#$ZPME>B=V%Fc*DQeBaxL0`L>TJ63Xbr&?sA!n~7f zRarh-fEg9qS}x&BSs)96b2byC5!BklTdhFfS#4v2^p&w&JCV59Y7J)=ur6>e&x2}j zxXP6vah_I6Qoe)E9GV1aUV(J0R;?*UsX{%f8Jw&wN?-cjM^2)O zt2;CN^GhiGJ5zfg%?%yKnwC)uSEy9z7(-*@Z8}||Z3mA#GOcl^w{EL{S5b?U-~F#8 z>z6pcO&~HWetYcPNWBb?oxiRo*1LlJIprrV zzJd)z##-X91{yfByrdQjTa-=a??l_{s^R@MDi-8iu`~AALo_n^ox|JP#`w(JjcT}$ zU(dWz0PB^i6U)pBsO#ND2TTJc2*Wbr;Lq-mk8^h|K_7mCM>OVP;c)$XSnV$i#4OFN zVC7f3KF5fhx;vXu@#_9IuyqkP(W`8>is-fApf=BfsxekU7rq1lS~ehCz~`MA9fw$y4d zIS6AX!_(j~_`CB+NBXduw_H!dCE;!!P~aW!&z{QXHSs|DylM}^Ef_ev&os1-Z)R4^ z{vNZU4{0(AGbRLjj}9J8Gx_lvs|7Ix>>=;gotq+~&QcKyCo}kd6rdgLCD6O2_mR&S zO2XRYbY7zW;it1GyI6$jKvqtvM>>zWO@5xTaI@Pjyc+q3%T@Dr;0QzQiwqT zElzwU@`Z6-uk7{?xcX$X2z#432UF)uGd!{a(@XWrMYVJVGD#a#H&y_m-nZj(_b+W8;R6P1Vqy`}awYEKsl`s)Eb#Q! zZqLvsg$7TuMiq?cd5aWAwx%{UknrZzEo@;_bq0}OZL93!gs(9fWwM46aV(0*AlBw+ zALEfda(tglg!1rOi;BCj)K8ZhPnPeHKd?-;c>~Sq&dGs*d8(y5CDn_Yk?whcb>a&Q zVDR(;|6Zf5&|{}L_S?|kc1NPklY59aiPfqL(*PsdRES%s@fQ1HkcbA~SA;AgmZb5>(L(nw)^y%(h_3w+*RrBJjc|GU* z4SB~i7C?0}%+&0OU4?MOTWb{P$ke<#uRE2-ojM==mPw!$%cu40X>GL%>|n;Sl?z$zm&iM4)x0(3&N_Mw zRcW1)1Rw88$M;Va!b3~aKGa1Iq{j<@mzrmT4mUqxS*kfd$4!ZsWD`qq4ZnV%c;X=p z^cAKHu4<+GSnZTXFhXsQa{$i+Sy-cUaeaFIyAlbm^Rz@FU1o}nrw^mUmH_M~HC0Le zlm_`;&1LHNZ+eRC&QgvWl|_VnSRZCNBLcE0A5n?oEsd`t>|gqfm^@4}^8RN8zv0r5@ z`{hJBS@|RbNkGWHPsM>gReB2Fwp$J9kvG>NU#pLY*<|xiLiIwKYnx{*@1SFzedor%Z%np&iweZIJq({i7qe7R`H6?l-XRrre-0F_0L)hA@WMa0G zA^eZZfKQn{8>X##PsurJn9x9OTL>G3b8eXv8m)EAe4x5X10eq7EnAN#^Nii{mr~|a znhN~k=Ge>vd^5-Q7#Ig_&Oh%xe_Yr-5uPkk7j2dc{DNz9!F-jN05uS@6tKa(iJ4eY zkNAu!_SU_`P>xxWtKj;Sq=6-92a=qLBYl5gYpMU}b^96I`?SQhS6q!bqNi3q$#;Px z9_{wdku9tNzMHFsr?$-B8Av-9@BGr1)LM%R70Bm&^M8mhQ|@U*q~qW0;i zNtfGx2;*8$_7Eg)v)?V3{uT>gxcY!xCPAU%h~cu>4&!}5yHWV&5z%m$@Bc_BVK2Qe zxOBlx^}SLv$^X4U1%hrHZ!~>^0Uqs^=FP8q zp&v$se(Kk2qN6XRtYc|jUQpj4+;IhCw^I<_TdVTCA1&_e^3Ip|TA|ZyGI&)2OyQ%M zAemzya1L%K^BBbM>2M(CaM(!)+f@rx_!aVXgwJ!nS=&e38vc8CMX|JTiBL55SVkL%F*ant~w_CIxVg%e?*8Vn_67{SJg-iRO1I z$jupR)GcSEJox-N1YQk>n-yj1#j+2u;Rc(#(H(@2xI}v!k7PQaQ<+gvqIdi&*Do zxNzv9yH5z4k7L8FY`pB3)5(8Hw-yaPu@5t(}C zOvM47Z|KYRPmnhFVz#;|JTfIx`ZRq^jxH`zW3=9!Up9#i%Xj(5=L-tL$eeI~KUwxt zidl6oO`gNkOalLVWRiPX-yawP72O&HA#cmI-+W1zbIAz%tvv*-5UXl-hHUs9>bw`szME?NrjoXvSs^`Rg8hzj+nOKE zG0rj$0m%Evza;vz9z>3pWZY}~q1JHa-IND%?Kf+@3oXW0(IFMen|ih{1cSuTD9pfu zgN^~$Nc~&#Hq2AnmJj1}*f-Uc@&C9pwTK8LN zuvOh%*HNUx8`;K8YU&LY4_r>{E z+dEg^R1iY7C!dcObUf;lAp&@2MxnciPj1|3m;shTb#J+uM+}P`qEAIcnY^hnODwvg zxsA5#KVwWk^K`2qaG&^_;kA(@ekl#dVDN9Qwe}b%D-`MO ze6pRUOwCvGFql5o*&kiRTHK`z2RiOvo$v#2LT@b4h<>?1cemmktKxT0d#^Nyl_K0p zbrDbIm_RTx!6hGU4Wa*#s3>)mGupH3=7KU5xx!y0Gf z#0|oMvyKV%2^(v9yN+Rq2FOsBk3Jn}C2lPZ-0`lA>byww;}K~GVm)ebDlT$WF8;x5 zm-M-?p<$1+?$%po08PPy;&1d~6L@AW43h?S+_yEgTl9ZY@nnwR;2;y~{`<;X|Eel54j; z?vInp_e}C-;+Au;;JYZIfD>#H$B0=z9yfZo5#)V%L5P`8-0D9MpuSzzptEQdH6ghv zNWEBqc2jCkZ2wQ1VYa?`Si-di#;@?-tiF4L+5dy_s-w&A22Tn4oknbTZ=ZM=KI0t`J&x*HTOC}#8tQ`v^cwb zO<}cIY1ydsZ#aZ-IU?zUQ9@GdqpTz(+|WwvI%XR=nDW@|8sH~9r}7jGaH{5TwtU!W zB||DajV~}b4}?n#l9vFxN_iZHO1l{ZTsxJK`JM?CRaxI7mY@s#Q?icSP-qK8(oTAq zh2vKc!Kj{80(CfZqHt1-M+8yHlN$i(}jE+PhQi(+PF4hs@ftF$4 z(>nNFK~D&OiMh{cAnREPq)|ms=*evrI9U6X9inUd=O-t+7&TcbU0r$yD$xb8Kl2VU zx!c+NeE4+RGXdbSaefC{mCNhWDXaL_F4j8IxI5Y=s6$pS3MY{lhE&tTx#fzy_(eA0 zmr-So+4>rhK}Sc;8;v_k?@Tj+ZK)orga-(IA-|FG1Jts}lFGy{ciUO~ z4j;Gb_o94^_wNVMsI=Jo1$LA0tfBN3Me3CDE&Jkb_W@Jk=#w5l=(GbFjXAng*4})g zc;Q8mwj{1lyAzouV*|>!2jHK5;+KgR@-rd__RTh>Kil$0fbz}w@GYa3GwD;*AZH&v8o7vkCrkg*)+G#4^vl zYG&Hlgj4)EW)88z>dQuRP!GqJt6Hy%B+rQK@<7REun0Bi3JuXvRW$dcsJexB87YVk?FcL1uQ*hgYK6hv5Unv7t9ochAw zfov#gkBC>IhFjNA{xYQH?}jiS=0DS2dv`&nDG|{yoA^H#d9@LDxZ|;u9j7TS4a6OA zuYbIi8bT)3%T!&5HcbS=z%@B7iTq!&8E)~~co-2)1hzb^`P~bCy6(qx%)@iE7-)y! zR08jkkj|2djgt{{=>$qS?r*P51P)+_yFF(FTxQKvb2l+C*k;vzS5w-5VK{M9v9>Cn z&Vl7a7JBhL;0D|Hg_6J%*cLF7HO+B_Y6@;-tkemM%ybWJDY|%Xb3h(4)~D#c=*3EStECC3g&(!`QE~v^_Ufw7(iHe@s+kg4v=KV#cq;TuJ z3DM1o&Y;WxpW$Sh-0FN-26d+(v54uyQ##D%xH?YU7?^XEm?7~4nH9#%3YG{7C)gZ* zLY=}mMi<#><0Hmpt^nf_yDm_K2G%M2i=Hlte>y@*a2NzRHEZ)%FB~R({yDebr)=ot z4jFc6ZM^Ldhu)QvVdPiZNztMNx-D6`k`~j(th4Ip>Dq?LQ+Tly!J1Ng~ok zK1wDCKgQi;OmcV@=puzzS`BgL4f=$m5I-_PSD^?f9q5e+qS@%b7OQrjK7%-gq)ETvu}EI%|Jaf}YXoE!6ujyD8{wA&Q$hy`T5Ttg zkPJj6VfPGjgLDKgNi|YYp`@4{kTTL$`)Xq7VNu)Yio16fC?v%GM(LA~E53IvCh-eS zbo}RZef@mgf_fyV$J>1|b@n-7;5WB2M`Md6rxyMygZ<_1>E#jw@c!8iHh$^)#?;Vn zk7kMP-a`e&qTb@KvaZJ#wf#E!SMx`b`JH-5+wwZNKy`xup76(>Urf1*_liV{MFL@%xBZ0qFru^#wj7i1HccUcNF1vFQXxNcr?8 z87Uj+Zw<>6ye?hpkvs;6MkR?he{BPFP6QKImG?N06UV)lH>^o?EvzRvNOUhm#($sN z86IpLNramjspnOq%5bd&-l?uLxR#X(cwHT~x4S<1AW)s7z-YK%ONe(P9m*gy4jz9C5pmf8Gqm=>!qH;Z#R2P0Ce2-)=3GM85^ZP zACDTeCU|YhKJ~0wS+Q_gp%&G$I}hHa6!YTSl)8@?erW@`Jr_Mz`OTk9rE=eh`!Q}M z%>vC&=}Fgm@56lF>YAS%i<5Z1%0;c!ku@I6H9qO^Yn)KtlQSX68m`zgkn{8SrOal1 zbs=1Lpfur(C0zZq^l{Muyqi}IZ{5>hwRqUOOoNFCgsFCu{g_|S|Lbnj((zmZxCN>EgVS;ysp)EtJx%E>4 zbI-6sl$1sFG8E3oE1A209`B#%|K+s6*lWUF%Yv?GCoeN-V5h0<{GN#ck^GV~sCL1h zzoNMX+bZ=|zcu|LMg4m3gvHHOwv~$QiU%%UFY$`BI#yfv?F9y-=F7(3+j=l7;h;}% zvXwMQu+)k#%bDay0sJt>>TXYT@lMqq5{BI|)3T~MpZY-$ z&aaFspC089u_%9Z+NX`)ZMDN-4J^iu);HetP&t~F%}EELB@D5M`ePsaMT%bze@cq6 znhwgUx*ukMuZ!H*`GBw4WevAAkLsy z5y)$(m$>`($12@xa7&tn9nr2yTLJ|H_2!Dc?#$;bj9Tk+{+?jyTZrldr*prosZ6t4 z4xd!_;a*tl@Wuv@-GSb}E{)4Yc)9I2DfBpP^}f?^R6LNCEYd(|VRaWsaJb+d$;;bH+(?@Epz~?k1RXXdyxcr0lI;5U;TPjbyipbNyIhTh@a;qUtXrPw^$tA6lT8`o9=MRI&(P~BP zwCdZEns8@k__XC@M7O+gw@{I#_$^^abz<|8pyeEi@#O54KCg1JIz19VJKzN6$<)$PvqFt>^KOo~jbtbUF}3>LGZ z5jfQ}14~plv3E$(StjE6U-4lap$oT}2qf5Y$0rlGJ>ja6%q|B()Z-=IFjUxU6z;Wx zj5SLy??PCoa*b+(v_!)d@;`Z(s`f%fb)@I{B&8^mPiCHZoB0D7LdWZ(c8>9*Hn)rv zEk4GQYDbb!d{a9M5|%je$Xio~E0()qyx1Cx$9R9hcswA?PofzDa1#^UlvoY+8!wDn1Th4NXYY&8{Tn^%>FbV=bPR2s#ww4!p(_R1amF!3sV zg%tS{fj;VY^+AY<8vhC(u#~pYY2dKPe%YXl-o47bLD^LL6oH*Whs7o*1`QNi_;WLzqpYykh;HK%uZ7`CZa6TcJq(XlZ`}kf=60 z#A)ps@CAk2CX@dhg3W!9aQuo>%$b>K{klnS)J1KBG%8fSlWsejs zDw}NUK(KxkULySI2ezZ|GDsmBi1itU5?|k|T*2+O#R9=8mW)0{jS7XqYeI#JQDYWA&j{3VHmEYHm^xo3zmu2R#Z561u$(LaFmXU1YIBv?o4 zh*-D?Jx<>0wWb3OXv8&;{Lz@UZV6l0ti`M63KSGa)chJM zsjmdL%J^N~tETO|D@*&FDQr}?rx;cpY31hu+y&`W+;czDX@qalz?f*7eiwRO&6*P> zMp`g&aE!;OBQWp^{%r!u?R{jp)?TyF^Oio6$bK%aZ#J`|zuW(XI zT=VjecHQ=B^at@jwq>nHl*3F8Nu5g6@b+r!JeVNidSQtF8R4Xd^>&#n-3oJx?^!9bCi7N^|n$L1GQv= z`;$2BWsUWGH2KqVwcQfD?}kgR9lux{jb8BZENsq$^Z)T$s1M=mTeSx^S2r}gIFo<> zeZN1iOSRY6d;d3vYd<>tb|u4mqQgDvQNXQ5TOJ8E$_0cB8^`?}%M$t035x*-$4g#5 zGz6S4 zXqj~RT?Heh_wG{> z3UP1vfvm`e%qU{jd3DY_!XWu_PK&t85ENez3Ch$Obbi8@{;tlmNa&mNW)q^mlQYa>7M#OXkZC6Hm8QItv+@=T2j=#Q@0?^VS_0-!U=AQMNT( zS$n#tuxm4#T;v~OKgHnj)&c2Z?FSGzZQLxZPl`(K%kbU_@?<2|*fkFjuCek8_}t6aWJ&C`rJk<;gbj6_*=nVN4zV1>#_TOT&Pi@`Tx zt6n(GA5yS$4YhLLNbDc15oUe^S0{6{2v`eNdx&V@i7I}VRTGBF8wnEhIs}1)C{oK zKoYx`(RPDj!uNHfiailf6=;+GNqhN4>qJ%DtKD<#GM0nkqfq#GKw z^P#bpQizSDmn3VoPyD)n&|J(s{ze~-y$##BGnPrUy(Vslm?gdLv5$Y$zB7-6IPk=- zPva*)ts-#ir4I;aSDycCw~Tj=c(l>QxgRh*{xsunZC0}$8*SY4l0ISYC!gdMNc}*x zr&!)^%tq&3-;aV*4BuJPjald!QaH(9A^-dQf4{sI8;_0lXO!F|fF!$;90p#x4&tFEP;H=t zMb!Kv0vZor$@Ch`ZgbR8uVybXtqOteDU3ibp|MnoEmylF&)KXb{0G^&&TWbJWA8 z<)3h%af{o}{KDe4!^KyVuZtoAK^Xff4b7_CkHf!Ap;!WXLSQvI*Xt})e)ikF14TWw zXh121P4~-;WSF$tCetem9+gt3YqSK4o~|v-#i4(C*wOfC-Nm-<#+I>9w|t=NB&KUU zu}qBM#OYt|E|G}{d~i~+Y+RGU2E#vH%{?~YV|o&cSsKDJ)0SPMJ$Kl{csp*Bwz==6 zLmjIylyxNQoYtD*41otBBP?lFPSR_@_Ia_V;9>(u#?{ji(K0H{z#%>XLPP2Y*31-n zwsdHQQ^0#lB?a%O?yMrmYSM0~8B4Yx74Rm}yBe)T>Mo$iGZb<5*k~MJ^{qYzBH2+A z3)pw#^RT@iR;}qKOq4}B%C5ygXGqFiVgFjgpAoYs%J^v{u8bl{+lpLQB#BKEaOdv* z^|R-dIo!Ttxukx3NoCtd6hYip_ERD}h01Hru{m7bI;ue=34vlE31>X(ur=2=5CoUn zu)e5yH%l3Y%}|I{-W3h4$cp}Z90Ji*%!u7{F>J>qHTuq1(3IpY8mYZg@zm5Ayd%6& zY%=XzElL413yWt$TH}iL7yfRf=-0p4A!}k^2l9uR>thK78j&Yz%uPpkKF?(--5pN6 z{82LcA*4nGHoO+Be)tUwWoa4s5QNisvdtfEp#hcuy};C0UFuCXv5yF~U}uoF@6Z;G zZcTuQM)*dIGg-{z^hGt`WX$a#l~_!n9aS%^JDy!f6#aZPZvE%3M$;z-E00PoHhSj% z&b+u}_6kMkK;b$kKfb6*lZ+%kii`91-Mq6k4cg{uQd$@fv0miSZ5qlL9VZ2Xu<2hs!-RWqiy-D-J7|C z7ojp?rAm7r zyIJsjfP>)x)QoxM=bBI{m}NN)=9dPkIg45CcBx>A1ta_ogCC8EW8$A^Vnv*5pH)zo z2Mg()bz~LfN@P`w_&lWzx5FKSHyHIjay>%q&ncHpq)&(E%VHMcTQj?45}0#?9;3H7 zr*IVHv!$kgPKSN5n&0wa1(=A@xhYtnqu%0iN~Yd(pzi&r8!u8*X@3xX`jjgw^d?;U z7rIWwi8rx}f#}a30iq&uWavbd+;mYqJ@7C>w{!h7F3Eo@DupjKzTGjEpi!#3Ba<4p zbwGrJGGnb4yEEoBsaVT_&e0Eqlu_b8^H4San5`Q}nY&AY|^9m*rs|7%+_Qp+>n%2;+4jHvrV_E4?k_8>wrk zhYkL-pg$ZLMbh73yOHdSDo%!^!iMluYr29)ZUp}EB1aZt`q6z9a4Lk4C3oDKC<}uw z1~imijg8+_0g`9lEsksYDaHWM*rx=CWA%b;Y}+BgXjuZnFFLK9IhxT@IjMKqAt^-z z?HX+j*gM7G?vS4$q^v9RN{HHt>J|aYN+>~=tB?ZVZx6R6z+Y`Q-XuI{!A)W#|KMao z9#3%t!Pd7Dkqb@0Kac=L5BlUTa5c4sDt?UKE9vjl*rQqC62eD78v*#yCT-A-4oAbx zHiwPj*X9$lx?y|~zk8}asg78uqVqgpI5H9iF9LG2_KI8gH9X z=Z`zmy`G4NRvF<+`-uwA)hV(daWJX;kBo6Oqu?5Q##x0JVfAY&Zdt-?n=t&Sf&GkG z$-0T0b@Pk+eJ7_I1qn^`$vh?UVM(jBR4-e`kkx%OSk9A>W(t1jj$}Hb4-b@_=1?q!d*} zKihCou~!I$kFooHRsdEau4BBfu^5)NP-jhi`Kk6vA6TzUlPEE$LTx!oDn<0{m#x*( z6jqupa;np5s~%zmzUayJ$)}FrQwu0w<1%8vvh$lnJGUR|<8P|?2n4B-q4I0c z9LpKRpD}4S(gH(J*{NwSCqAyT_u?WagQyAAo_B(wWQhx?fDXSBLCH>vaw5Yg?XG$a z?oLFM1NM1ggxZ=UN$bkIRC?3cA_h3~>!j%x10Ik(44~JYbLfJkhRR<$!Qu#1(+49^ zpY&HqT><5RbIVuabm%sv=~Ibj(GxbGzx?y^VmtYfkOW_+jSLO?{4uY7hHqbh1{p5~ z-XLK?&}#UdzqFoi7dcRMKF`rn+A-_tmtRO;H=dCSmJDnngePTi#YjDi(A!ISUb}x7 z;eKFNmOSuK^N*+X=*)A2w(liu&rwky@tbHAlVphnq{vs3m*@KB5A+jdu}{H7{KDRG z-@!n~pwva=kBvyzKe=YKu{VBh<6hcf6DP_Kk#k8E5tEEY4mla zi`Yo%WJ4+A+|a`+5A`4d`%|&MOu5F-_t2x{o4mAnl7Ox3;BAoE)LJ5V5omZ5_LfS{ zn3_3?$7Nhn`w=!@iE3K7tZrop=tuwFeh^_?4%vUgI&o9SQs4||k$m^3#2Ad5ui8DY zzr9!s2N<&O@l&(c^fx|Jo#!J8Gmu2uC%;$S0|6iaP>`1RC}4+}$m}7A6DIJHF>3O= z#-74Q4bdMS5}sDl%y$KxIYg2iM^!d@`NZOdQ}X-K-``foo$}{nM+$4lDL!xNY2&JTV6>zxCYcRFl^M=;WSb4TSyJ2QoQQ&Pke_dzEiNW7dG}{N!EKbO z$9h&I3k@QDX28eLuj{y)ULlvLB7XiLW7Q?q`rl%)T(OPWldk^=&_uW&bHd#MLV0Vt#JE)V}ZyYC!V8*|5vUUuAGT>;M!yF<*+W4*L|#2?N;~R$H)jH#wXZ zWn^k=Us=mco@Zp9GO01%@EhrO``v2x`9z_9i!4V)q0BTt%?598yPO7b?#V4IrId0U zW(WzehVX;@SICUm>(<`^jGCeF6t>;02RZ%DqhDnU|8Dx4a^2W@ zO;{?Ml~T#^z!*oU?_JcDjf|FZ(5vTlJP!|w5u_^|VEsXj1tpjb{E{vBnBLhhhdlXu ziGLg75d_=EPc&&CYt7(_=Gu44+lMsj3+Qn6vuAdKF8-)=0rM-;di;M-Z=)Xzzk7G? z-mDQFpBPi+Wj$5};@R*qW=8gF!VU?*$n)%&0B*djVR-4~nC~`h7KYNZA3#$WRL%%v z%~r~;C$hFfaaOn;loeB=6Y`!lgUPYzfbqAF10%xXDYoh{zlDF#J+-VN-^yT_sWrm|j%mC2_zOf)prg*>$-;CMatJ1}O5^^L8R{x}^}MD)EoEGJRj?T;>A~I% z+{c+kYL;Wtgm{8mU7G5~WwYt(tj2^qxT5v(Q(U~kwBl0O2tp2RTIZcOAOztdm5mF~ z8?H}?zh`bK`pnBIQ1}Qp8k1jSQ^?53IR*DN^7&g9W@@IG=u6Ien}Xou8anAE<0 zH~&cZK^Ii+L?VOKiKrZ2Ul5RC`RDFgTipFA*I9|>v)C<9U(;8D=nRTE#sIy)LTCU_ zexTh39Y)ih8$FCxIPy8f3bMSus*?wRQ{MuUMqHRRz&QqXFf&p^OwlQ zqT7cD6);iQDmL8`lBomSY25~yK_hMGgsCtE=PvRINauaIA&H=+eJ7*&o%h(+jfR;Q zlLDn`WT0d3#zcuAORw8b$8{D>t|>Sham30&F7hQchGdu2ot9Nc`28?dXmntMaLh9t z3F-~ewjyG1LYBQ)Dkws%D0BwKq4@(m_uZWGw5qnu>2CNnuEEaGqw;0^So4es&Zq(F$kT~R9-+< z`#xdX3&z&}n>YT~;frd+Kgs_Pu%QD_>QgwDoPP=N!9rCuv^y6&2N0k3g7RKdB7r?2 zVY3HR>J+MK`9RO!D?&2`soZIXecML#u=V1x6ff!(CX%N@L&6~*NbePOa}3EcZK%>L zc}iBAayXssth*`QQ{mx4zJ#U=_CHf0WWcu!V5(SLq$v9oa~5Yhj|{v|#MoQ^s3eQv z^Nt`06{q1c2R4pNM^+?c6-z0w2b+^U)v}O;;ghP~lfP!8YaIBubCY1%DC%SXmt*1P^f z3G`bEV$2b__U7-6DFmsgYDEI(jj&VmPVDeD(Hyl&7*rsjXIjqOq|PeJQ=Pj0PcF9? zpZ7-<*XOhrJuCDP85AL5oV^YKoIcoDI^>5^*s<>e> zmmjrFQVM7(lZiM{Ho)1UuBY41(Y$3Mf_-lN@SR!68<_U6OTM+NI9mH4K=Z9nO2v#qYxFe4rU=sNqlnP5L)?MhQft=j0&Y{8;mL3L^=X*q zz&gSavr(q9b13|~pFwPv+ShhpO~}npbj@>@Ju!&A;1wv*?w&a(A=%UWu3}7YUKM3Y z+A70Kl=#ae^E8m~3jgip$iUQdC>7phnG$O2A0bv@xU-o*Xx0K&T0t}$d2-`zJ+UW2 z{cB&*x}2~ER1xymJ`C)a&{gk2TYMQ}6l8~F{B#j5*v(15B7jr={hf_E_5e9?s|ZsZ zsgXdbhgl9^iMB5IL9X4Conk)+f{+4uiXKowwY`3$$3Xd;i@*u`{Cq(biRud70h&Xqo=}~9aI!3~Sj~O7)a;SdGUynO+|MX6Plhu@tg5I*) z_v)Teydz$%U2J2xKgCY}Q-La0GFoGpH6B!X>Xb3beCcHr&3p(hN;6u%TIx3J-Wem^|F5#w|JJd6 z-%N7i7mg5Ki1DSBoJBjY@~Lh~b@1og4agKuNg-fOPfR66I3na(ya^ACpm*QB{mW$Q zKz?o;IW=Na&-oK+_;O_-D;U%anhN5F3Gv_su4oQ5cq+|h+Fk6Z?|FhOt~K-kekP0H zc=!=X$d;)RB6+9Jfp1|dAa6mWaoKA!mujn2&434+AHR7IXv{#UTtOAH4L)j)!7&aF z7|sLil-;kDniWRfe73PU!b#KF zanzx>2EY~U$GjRD<{?ZPbHc(**cn7vuxC&p>o6^-b+&-QWx^$w0N8gHO7_wJI;a6u z-J6YM!vm4F+gobOpE~%OS^`N9cYGtuHAY_!R1;mJ5kT-AWRf3KryF&MJV}N82?m2e zeUD~<8@Fxpx*3|B<{i%0VI&`@aIb&_rlPD-o00fiI5QR#`|x^0uR&<;U}+*~vMj+z z_#l3{G9g@wQZaGsE8#sWiG~6E| zfuHfhiXT)-3(k4(>uqIS4-eEOEJN0_$@MAv!qg()u|nO)fg7qVT|TO;U2tRFJ3jCe zn&qY1E0wW^4qLa~I$Gb9VC?&QSTPQi^}kLv$$Q=15N@GL^dnVmNBaIU0K-;tj!8Ud zf?>IrMn8eFao*!;`wh3|sl@FBt5f zXzAlfE-tYIAx9(Xu1%l_F#|%R?{Ox3=Iuc3(rK*a0fUGL3h@`fSiY3O$cq^}?t_9N zf>HM&sty;+7$`3u`B*8tEz?w)4grin6MP$Zgex*xg{Qfp_mvMH_3O<#(O-g*(3;8?=BOk%G2ZgxEphz7L!+~BWtUZN` zmOT(Wgk(Mh85iAf@60JF z@TUSX@ZDJpXbiKQEo}Rt-zNO4tgk_m94%gm(bIVJg{!7EcsrH|f7t#sh>yQcD8dR3 z@=^$_$S*}_f71Xj^#4{JVjt3>B%1JiNN{Ih7K1@aMBW6afMsL6w!qQt8)L-k#A*(k z@Vh7g+va@kD(#f=^%GI~ZS-w$uTVzHYD9%A8+ZO%uuuit4*!? z=^4ujvT7rzkeks3mWVH*ZF$t7fzzaAxTLqd^pH@Ls1w`v8 zWDr5o!^|5-o-2SUh#(67uS7FBKANZi`Qfnd%S)Tdd_?3(0{m!-!8#UzXoH1j;W)Sw zc7zqW)B!QXTchr-Yw(vy3VKPzcHe)z%^VznMnU*eEQb-%ptr0%LAB1^+Q)f+xAqfv z%4)5EmeqW|w$C_F^vKS<8RgU%tHdHudC`^P!@%CutGfF~^Q%fyf$qn{u_=V3D70`> zl@EC!Fh*(cFPSM5N!~~9*w!IELd_p;c+jVgCMvN_tah}WeSed67@{k}M+b6cxfrQF zkDp`bu{_Z0I^Nx5?upO9MO%scc;{VzKw{!94bb>~9C_h%A!RLsyv~}Qc1|fkJUBM2 z7%1)2bJg3BZGVX8|5lvz;NTWetIB8N98!PY63t7?POam>X%-x*cH6?_xkr*lO}=TT zV}{fO{VMQM8_V#ONYutVXpJEmVP5H%9S9ogjOS}}lz5Rc8{>x_M1U1|f{M1Zd8hK8 z_ZYS3PvIJd<%7a#8NMvntuDr%=)fUo#&D``7O>loq*vr zzr`=Q&D(Whpjc!vc#*pO zCCmm3=r3(CBl~2iXA%K6>Edx>7oG2OhIe|BsE_;g8wdQ{O^`vw7?ZO1I!&Ulg0jiY zzVBR}_AxPkqtvTvcXbgLu+*dC_E-OQcZTJ2M`fbRq)W7^j>hTbvcJ%6WjCQ|QMacn6 zdIkND2^?3apqMFMm1*;7oI*Q;OFulb$~Fs1qwFb)~u2 zNhecWN{={`vgj@T)%`tyx;^2(RSD`oZ2c)R@fBww#?HYp&c++>hwLQbeCzq3B3*pZ zqseCB4DavyyQd>=8RkOzi&3A>F>?nXjIHpC@#yL?2-=ZSt&9n!P@cFrFK#1cct%W~ z9s6i=EG0#V4~-}2Zr3V?PNXD7ICe|gFr=0h1lnb%$d(hr{-LNMfPJXe=9CV_x90q) zjM_}XHSU-Fci^2#NY$xSa;C5!041@B4@8lV3X$FM0a|Ikpb2rIc^;kUX}*ux3H1!1>R8{z_48UCMzax4J>n1CerO|K4A z-;%72hC+!d=Wb2_x>es>s~FS7u;#jEz^7AhrCU?5kX#Rwu3|^N~xPEnN>rv$!Zd04s(W->qLIT7fb$_Vm^MnpSQj{J@{kOngv zrZ6yyvME@t=}<`r1pHr=y=71wao6qJgADExT!LG0f(3WC;O=e#f-^WtaCg@PcL?qd z0fM{3gy6y9&hx(a!#U@kd+Xe){@h(P)xBr`_x`Q5?oK23v*0@p-t^EW-#l^sRCQ3Q zvTv_~2Q7t9u* zyZx@yA)=RDB~@946QeZT&}~m=qEYNvVA)swyOP`Dn?DfM@^p`|l9$+WmbqdF@7Z0Z zbZU?;E&`$e6;RM4H{(p5vf5WRXls2ab-#!-WGPs4WV%wfFv+s3==?* z4ZhG_Svh(ci{G8p0z{u!C+u(G0pZ|EZ*GM{{h~+dmhNZMTpMlfE1gneTYZ^ zb6&pcJ=Qhr2RDD6O;Ax-#b8z!p*9Vy2eQ)5bmssn& zax{p+mj7DX3RWIaxQ|U~Y6mX!#<{1M3Y8nOqWwJ{ z{R}X=NWtxzlJoY~Q|q~o4>o$7+*#f)l@@&AfV8yq7a?EuVu_jGs57vGdy)t$+Wc1idQj+ol#r%W_q?LJcVpg(r0n?h{ z>hq-T5gz68r5h~^I11z#G3BAdrdM|HEIxT=94c@$NAplDL_7Lt{jxf^R<{1v-5Xg> zDN-nnCN%o*#2-gXq#jd{98@nx3|G@bkOwedULN=A#?^`3wArzzmrT^(8A~;>5En-VIx7#qjI@gCGYrVb zozN1>4~EP%Efbf)Y{fl(edt(#sd<)Zg*Yq_C{NZi^n)lN<|ybwC)5+Oej>< zRa(0%1Rej47sM27Igg9A8{hrynJ0kr0<|?*pZ`Y(!-6VV9I11z%ksackpJUqs1k;5 zG-~TOAf~Q1k4l`BP0Ld(?`e*Nvwa>RU56gsMARxojYd_R)Z74)!4M7S zyC;E_WlJvaW4sE7huN6O_dROtQ^M)dGK9s2w=yZLPV%k~3GFvO5_XZcXEgK6?QGv` zYFK=~g%eARwSafcLIU!j!6O>lhx<}(ziA!%XnKE+&Kl> zJ&_jI#)X*9x|r_;jvS`AL7g?@*PUo&l-cqc(K`I?1%Yj#$xuwj!>;M)B*^pLwXzz zBe8n-AEK=CQ#SMtooCfMP-YIZF=qdPRa#`-5#bH2-s%(+IK=oIb1W>(K5!rAZ3&zC zhC!!SSv`e9`Ii)Zm199yG@u!Q?KSWCo`2{t8J&+kpkP72JkKO-=kZ!IO^g%4OcQpxcFg~=uvB!kUXL6GWQ~M>s3vmv{+?Ro~{-rG!c$Sk*5Bt z@d5Pw2yXC^R{y%o+aVmi2$E70`v`4Fnuu{$>DsDWd)kf z$C0om9EjFvxkcD2Li$76zm8=7_ZyhMh@oUO?U9DykrE8{ACz-;l_un8)k4N}A5PBs z;;sw=ZwByafh*_W~L+1pt!+Tr{N9p7Cj)9O2e#boU9 zT167nJ)4gIUHJv}!l>OB=Bv2Z3sL&Q?&p@*<{*LTtFx!6GymtC#lx%nmY~O$pr>8R zSA`bm{}GGZynx{(FJqUF-LH?YPkFCbqK`0(^?x}q6W^Dq?#Iuse>Y!NgC705Z~b1c zf?lS(A7F;QHLh3xjRybsj^rw5IyU;n-o4ZKb;ab-;lGS6gYK|i3xW=>4yRu)O zo@-ujOzzNvKt2C;2Ut#o=rinTK`;Ng>1&=yCWw&)d$Et7(ZqfdELJm$Jd)Z$vm{sF z_7Kf7t7I_8NPHP8XGY${8DW)J1UJ)LobZX977`$iT$vwnVer-2%9k*XB-Egl(n1oY zBhjA>SQEkGvlr1Rqq-cEo#)5v!%FLw5!3MH2Ez?M>=sKq;N);(; zT%{RN3BR^KAhsuH&^Jl~7C z^)SCWj($5HZLA|hsEMxDU3-f1rWIL8#)pvUzB%~n1qQxrIrprvImIYR{*-m3YDihk z1Z5B1`0G|f_(zKdXofe+M+$7R?_@+c^z{o~>-_%{px1xU@klPjf zB;;}aTMQ10$~(&d{{!(jcK+5tsHhX~`hz5Q5fM2FL|9=;qu#NDxS_56Dv&bEs!PW1 z@!!^8YD3CFq}(_<(a`MI1Qf5hoZZmPNM-CV;50n_x4^08Lc(`1F5Ki6jGu^G0lgPU z`T_x2ppoIH;j(_!P4?k_)GdyA3%xHBZVUQJYFyi5kS)u~qIk;`$0)mCjI*@)QOG7Z8gvys}%P3z2|jWX=nl#?d@^QiaR>BMi25LI@S8JEHWR9>9<8_c8V$! zu(vj5<)rAfl}0$FGj&_8(wx-x2UD!A7x{iDKve>1u6OoCmz7n#)YK$^Bl3c`7qExK zwTz0TV#dUR^Vx_$aedj{abDuTrIfmq0$FSke>m8Ad|lk=Ho{^77D-$>w=d-yxmXYA zoM%8HO&T=cwf_Ej`mSVcnXOC>M5Cur7rVei_{QAyjq+NX6BfjvAYCq81#TxBG8FGq z7C1)#!6R-W!OGl*4j~c1-x~hhSJdg!jKC||ndK)#a7X!Kf`wH(C(30EP>e`5I{++T zm7cLgS8@VvHpmwer&7SeV_-zy5Y^pK ziMEbkmmlHcb<3j+*so~6-0)Q0yCbP3MqLK5UF}p$9wmyM>-jQP)r#1I^T&q!S0cow zjm-57dK6@ju`0x= z32Q-zY}6btQryt(=7RR7^}d-QyKz>P3LG$cb%-KTJAnn#vy-Pp2-5lNHUo5Nh`1ni;v5Peq#jf=#%zo5k&Lfy|7< z!w0l(5%5^999Da*mN81t-JI5&YP+SU)qk_uLW3WIVYFOiurdXv?6a)+K1aTGJ+AB~ z`t70}|H7Obh)UCFvBMB)lVVIEbZKy>z__90Ma6>K>*2*jqM(unUN_P*GEm*cvI`if z`GZtasBMuM(#>)Om4*ZR3!DxR)R>6RnyXC1iAKh?OA?@#r1y|BHk~Rq z@SjjvcgRy#(3Q)}c+fq2_baTN@4tV&i+lAZinu-7blJTB6&H9t9P~8&`Y39WYNbc` zdIg0-+1(ZY$7T1G(bNmtHoH*JwQSIJ%ge&%!|m(Krs~}F>nhFDf7ucDiryia1P<48 zdJ?jL9Iar=d3doGya(N{&|M&1(9inZUfZR^6TkK?Tkj<(9+b*9nd}_>t}L-@lq=J4 zKs`goS|MH27q+(6(E-hj^2JdC#$RkAB4#yZiu~*qku}I$J~iV>Q62DRI$4F#7KTqg zTQ8R%-Sa$Rb)ATh&X&N*NxLV~+ua}0jiG>Rk3qm}&J`OrFHLbqc}uN*aJ1YfF)OYf z7UNHhx(S4iA9t6u^pPnK7NANIPp-^(cGTG8@?{lpjCvG`>e~;6Zu8n9Qgc60ns_k| z!KVBRH*8r5&nY@;l0rA;59ZScM>5Icbb>xX&(~fPKS-)FL6wP_7sJCBq$0f_jvi^( z0;~AY|y`xrADtqhIqo_-z%Il-`wWc;lwz{Bj}_oX)OixQ+kIq z@j^=wkp2;Q{e5isH3S_p*;i?0qy40xB+msfTRO7lXuvm8u?JXQYHFrx>xPu@ml|@u z(6X7}0D`=RBn&rGJ2y_{qX)&9d&QMf!O`ElWV(cRS-#Vei<7kfy{hkWBNV$Xwf;GTOx z7Ti6z8drQs-Uy=;k18m+gDm^2J5FbMr+~(2<}SoOn2eQR@^2;$?Lv0g1~J5nvD9*G zqak~930IcFo9)2lGc!UtCcESdh*=5x#tS)$7Tw4pdR zQ&ky34E)L6ufMJT&x-i1R$T=vxe{s|OA<y14suyr>qy-YK!3!*q=}c)w)*NtW|bb6{SFz3YASk_L_nLO#j1c)CX`EAVc3 zIhsteq|o-uLbWoVJEC)sDNBMlnnolyTkymE4g+s{_P?x_q-7aN3n+GEV$u2wFR=%J zesIW|p(LYV{Fd^(c@-_F*8k==TSM0d18@b6O8H$FLoQLiH@t`}Lr&4JiL+L+OeKDz zh%)B$fr9WO_*^^5s==J}aQ>IjRj3@g6}+p6Xw>=r=@#9=+4QZ6kWshDR@N{|kE*D# zQWT~9Hz0k{oqrvcdjWBAE`v2A4r}2P=~Ls8R{?g!JX#s8HbjpBgurT@53o?l!FC=AgdiOyPxZA_a zhmS!k;!-F}Tb|7yP^Bzk{QbLT-%^?haUoVF)s*Z^sgzDx)s1YMK__{N&MJdsIDYzR%8{>?$w0dXmNr-yd%zYOGnh@ z#63bKndgVGrhMdZi7mg@t9DXgSbVG_VtTWkr)R@Aa}^_&kuGnv3!|BQz-tX|{l>&G*(yk#HS zjOaJB&crvk_om2amKiZZn|7+T+t1hO?uwP&fxhsrN)FZ;B~N=lxFLD#D!0SseO(`# zN}Jx)Yg)zs8vCWAIE40l-1-thM?n!^16~zNm%JB^)l*q71ZO5Bu_U7EZ}uD8w~QUU zFIJg&P7Ui@joXjDuKLKo9LO9kUB9HPjBN|>NH`B1VA&G&e8V`01L=8MS{S6d>d{(PvfM8r8qC&FMtQ# z17-a<3?XRS2TnVF3kHX^_OYy2P>cGebQI(U#lGDo5N<#pZ4nk-G8i|ywg`EsPvezQ`P2BWCUliIz$OxT+BKf%>-~}oQ*ER2_oUlcL#zx`XI zy^mEUloSy70q*Wtbu*xge5jy`4HmB@;lMxv;`Z+#7tQ_)1bX=R1^G`jv9f`P8qnK} z(k@2E6M)X!sslB72^l$<{glnf>;v6_6{g8EmA9K|Mz<&967qNaIw(?zSOeZ4TRM2;8 zpudcj9rxZQ-3R58?`B^Z*aM7f*@hBu&Xej5WC8*v`4sC0^44Xb^k#82AHK-a6G`Ik zjn<#t4_d#5pTEXks=kbhnjQS71jA;x|CJdx{jx0j_|fIp|4oYiS6S~G|4-Q^I23(> zB_cihWR`9|umAUDvVQPCbEU~JKI=>>TI`^yY>&5|Ta-c;h%@uO4TbL4c7Om1=-s>w zXbt&mIbXFDF$@pA8LmsHN*J0hyUbWSC<`sg$e*CZgLpAR)m`H+8sSFtJRRFDfz|__ zPafUQ=BmM%s1VhZ#L?uVPrVU2t+%S6A^LV&&_^n~56F-XMgq^baFN4#{M~SBG=Qjq z%FC=&&MWm>_zXOfWFCxO%8@Y+t|FDdOdPo)r3LkVf3zd3fG-EWfHla<$?7)KRG?ML zn0yD$_w2ihe?S(XR6?-eUhZiP&?3-TphaDX--NDo{F0rCp1Ey{Qxw!{4r$LM7_8(1 zdlEunMi+5$XNxVptFwN*!fml!lY1C_BGzPT%Z=WdX3LckaE%=atNmopGxI+RQDkf* z%OocN!aY)WTa}n089Z=!#5W~+>7u7pzWFfxdqO|na$GP#4Db1AsoqFQcCez&R5`8q znTq6RevO|L-M!2>gp}!m%M8Mx|o7k+6?kQ%LOFkvx*==Q{Y_ z`zohx4hzPn)8{Lxl{-Kxk=-l_^F=s9qmW5Bi&r59vz~ZYC0H43l0y8ga2@xmO7a6b z_({bK1Udv6a}<_SzR+-T>Z32CoV%o;>L6%1c2${LkW@{A7B?j z$1j?p4WUN5y8KKYjqudQFFi`Ym7wl^u8Z}ZJ%^9BKl_O79p^opRAuaEK{hf5txFVE znPjF6L2pMf(W+F8VF~@vU{9}P9;ITzOwPvi-2ua1G!NvHtZN@uIkP`teWZMyfHU~vcjt;(+kZ^d-E?oJ%G!h`WmyV;LyE! za%_qUJuj$d0wcTuP%{gptv1l~sjcVCBy?9aQ=2KeWDE?f;(cv3mhuk32P(?7hIm&i zK=E?cw>S#;M*S86ttlV{+&}rjo7$t329;kn>*!6BV zfFk>Vz)X+0j}%QdOUsM<{ev$8O`8WLy;w~~T9>={8XJld+*bd7AjKPQUjtbnkF+Iw zzr5L~x%4iQ?}S5}_u;@7j$>N-FCt4nd%lF_NRtT^#eqVly98Z6Eli;~~$BAXtV z`BYdr3L^u)t>?G%+@-~Fju6c=X16zvHh3eiMLn0W* z5?#=Un}_<7q1U=F@elN`AVxjoQsSNxR#e}4xUIh(N4W2l%kvy^M1yMIR6JVD_BDF= zSo7JG1%Qjd<8E&s?+X!c900X~Md-DAZonXtOhJnhIFGh>Nv7f;-sho0NA{@vyQ}2m z`Ij5M#)|oLCy3^i6ZcNz^)0%XP>2q0_>W-E%f5eGre+Wc68KgqW6E^e{h~+*$hL3M zr^C3PJwRaE!r5UG;Qyi0&Q@o}F+C9C6(yN;#sSB@MgOr`m%mnE`r?nZ4P~TYUiq3k*RbPwT`t2c(r*V51>KnwC6b(kP8qG86>z zNv8H~yk&!keJ=`N#MaQRwOxj}dB#qX4K`)e@b%umEnnvNE+GDpI@hj_M2mOvRTv(* zOA$%h%^yVnhZp(^y&8Xgx|@nVDWd8DpLmwZxh&JgycDh(mK7C{lF7>rm;c;XxR=={ z5Hro2KMubRA4pu8vxebir3@?ScwBNQUcWf2{EIw^fT!rGU9Hu`Szh6=e~9e81g45* zsA8l)tjR}w1ndV`_Jw9hTgYp#C+VqaI;QX-bG3bf3n*$h3ie}s*gyfox35lYnbs?| z#?@NJ@{v^g-iS?8ZXSPR`N*QW<14vlQ5(g?EotrdCUVKP1yHQ!U~xr8n!d`j<#h1plkN+= zy<5V#Fm_kb8V^)E)_4h)Kbg;v=^=7=77Y9O5l#Jlw_E>Z%BW6?Q0;ex(#mC|ui{?} zqFb&7j%|I_fa9f0pMA`V1>TxlrIhsh;`i?G5L}%_V3;Urqpbs^mYy*l4lCO z$%Y6JMz+OZ5gV#0Lq>rsRSbQxH9cpNw0T%do6 zLb6m~ibex2i>^G&f`H?QCg{lAw`qoQ)l5fflA7$%g$4YX+e*3E zo^_O(HCUvC{dg?nCX;~5@IL?iK>IxU*6MDBQ78=qwScQHHU&yg8T8_mvr2Ix6IdFX zk9y<9*-G!~0{^A}N%b>b7sDN5I}v78tB{ASfV?;TnRFV$*6AtGGsT~r+TCRwM+4b+ zny0IZ3FtnKM8$|{W@3^WD4trn|9KJ) zO?0o%9yCErsB!?$FmJwM?Jg{Ouof@(oR7y>povQUIUfI@aPda>jL0O&E42nD2@u&d zF7pkVFQEz)r|)>wf$eq zDNWI4czjxD4LSrYz2I*99YKtuZE}BMi44K1(JX%G;QbN#)*eZ#5kD?oc+usyM{<*7 z@;p{edXT9vJ{x*Z6nVY7D*^M&lIyQB{qU4At1fjrAK zbH{P7F#@`k2&bD?xfxsBQ|#)u9m;0V*eNteM}4dKO{i&29@(Opn2QcL-3n+T_bZQw z=lP7?*p^iLnjwz+^xGGuQ@1vVEMVs4&l=npg#1M=mWkZuGN{->n{q_~o^6YBYhfT(2Xz{HgEVxpBn>VF<5={HQCe>m zZZ8HK#}lz8$l!}EI;&IrBz38oe zfjnjQZYIPlWH($p(rWB9z0X<4=WgH>e~DX1VV!2|^b6uc^S!uZnX74`kkM(DI&%{O zQbZLR@Vu!o<*+b54i7YJP|A`v(tKA^(0`<2prLjq*YrTI7u5|MAUYDl%qL<<6|`Du zYR_{0P2?W6jS3)GN`7uR$#hlae-GSIp^~XUALrm9j6v-b3oZ;lt$@^M50-mGZt;~gQBbQJ4N&?|G|fCG)h*}LEG(SYe0K|gkf_P%d3PrfOBKUP5ZSJQw}Q# z-%+fKvk_b8(~Q1Nv%+a6c@GWscUlqaY`H_{3#ScdRa_`?ybG4PP~6m3)WMa~Ie~W>e#g)MIJSV-0du6g(6s6?nM2FH4)& z8=ezP1#g{)wl8?Rde3J*U-yprV3yQc_uGoF*>dJ2e-$GVdq^L@k=O2zKR67BDRG_< zqsncEUxwh-wg0LPhw#v_gD~u=H5@d>Ld^%-?5u#<^lBfA=2Kd^6Ui{WFOxjRyKfOf z+Uxzy;-_6O!J(`#k+CAqL%Y5aZppR%Q6neN5iD@L3wkIaUeKz4wZCm4pw=~cj3GD> z9`Qy2Yf2|I32x?_T3zy>xEfA%%MQu8hpI^Az*7ZPC@&W+U}uFl7n+#oj0H9$V}@Ww z4*%?bYH)Awjsa5Xf^PnU^Gx-_ZT%WkbZf-Dt1X3*Gk_`4ij_XFSF^7^-a%-Rs_a<5UIG2+d6NUkahD+# z1a(yp&d$2v!l})MGLb@V!mrEyt2fYWM5&0X9QvASXjIPmXh~DP22Y&fj?iFf$wPAw^j^Oy52Pbk-e6ICn_U9%VB6ttJ6Nh*+WitD;Ck%!Tv zx9!P{mRzugtlwQEhyu9w3_r^59C7jA2X#~rXAM6!{M*Jz%}1;Va;G=$|E)^D8yHy| z;@|d<)0uslO_I@(_%n%d!M)ER+j_-)WM=4$LWk5L7P>L}!Ehltb<*I6Fvp0$!kSMcPS(XUsUtW3 zY44zzd}PeAQ&JWQ;SU`cNpIkXdj`_0R*H{^T8FM9k-rmIyH&8j5@@xoZ|>2_g4tUu)n zflwxJG!DuBd(21mW*haCyoOGDRYe}BA*Ux)Ql(NgAvO5-w!ICnUmpS*;3i>GgfH@X zkrs%x{<%Q$qNRW{#ifE)-m{;~%%vj&qI6E00p*>A-UgQ^JOVpUBfE0|p5-6*u-s;9 zdq%8cI{o+J5cPLEBDrm`MEDMC#TwLO2ZuVikvObF#&kvJVt}b=@;avlIxDBm5x~kp z|9X^TQ?JyKh$THvn9Ft-0Kgyl9Lg5B>44oexag_7C+qzYKiIy+2m#$z&-^t`a)70R zHwSyi>Bna;-B=Xocc8|wj^d8?kA<4_VK<$fufJ9!`{8?5&HTw0vE7dvDa@pQj?g9$ ze{^>bTVCK*v;y*+BR9QY?_V!(hV#(h0MN|U%mu!IoTox`zUC~mfAJFF~Wd_WDhyD7h`sl zs)x#IH!2@-XZ^Fe9q(-5CnoBglgH1WZ?UQpuOs)4G@ayQl;r*CwYccI>5#>E#lHwP z1AdsM*yX<>;E03qGwY0ZTtMf2KqJnZ8Y^pn3#`5mNA%Nb{Gdf^MLtGA)SPSa^CbQ; z@8h$p{-{Fr=!B{$7~jqsxL0tUDBEm5OfeUQ_f;E5rJ+P00e>~vY~guYIerLwdN_0- z2R;3QKB~6#Td+kIJeYb1`T9RPAeeSyVAlsuN%Y29j_#a(O;AmABglP*n;SL=d`a1a zS9*@1))zwpS~JmAmxhrA#qbK-7ehp@eB=S>=e9Nw0&X!$C}n5QSj=K@9EvbSu>nzG<8F}+E-0st$I{ux81xaPXVob&)lrg)xrQ=?sd30( z(U<}e2)m||l?2&>m`aB6UM4=B4PTM)_~BLomVeemzs5B1!U?%(uiw%f1tJ2p!SMnO zs3F;*SzL%r$F}`t8?DlySaS>B4~`zT1KRaKtH{tz))zuA`zcO=4Z<0AgwZ-DbliBE zWh0+&GNo1FOrmPeIp$-QZEwI7Y9>8RMa>b7efE`4|L7%6x@r(pCr2iJyn;C=k9nOu z_M6jyk!!6xewjW8Ij)160+ZjuzaIqQMrytoa1>_x;h3}QUH%h2F_Pzl&=N^JRq{}O z&;ioYhTY2PzL0>@4ce-9wd_wqVn1FwkppX#p;LOJj6$B-wq`~G%(cELCef=Y&3z>p ziC>KQ#lM%Ay(8v=Xd-!x_MTJfIJsarz&#J+oig*>7!kZ* zY4N{8;9Q#nYu-eJ<+z-mS5)gQL#VMe|HIsHf>QwGUWCQ(04Wq7oQMrd?}A*M}s`o zvpTj~`3nK9fkJD6wPdl-?M9%LWxL7Mrfn{#bL7oCMnN70`psCt+j_h_Ej@Qu>8LYp zeg~mI1rF@`7r`ztWO9;}zoyyfvnAE?81UOe{?l6ZrG$<9~?7%xh%tpcSM;Uw;p zj^nd1<@!BlHoN`JN=`j&5@M(N@}9J-${m?I{zJXna zC!)fcZ_tm?i2wcn5mtqfN{!rP@G-?mIL5`{LE6aY9>Yf_))Qd~)aA52U`d+!GLPg^ z*sc{wNO@~*n+Q}Qd>^!OP*HNH^Hl~;e9NWh6v@j7dbV4?o(tLlzAWXIq3Z1KLVu2D zJ+i2L2JX4CD5_y2R zq2GW?JO7HvamZRmH)O6M>^dK)#+`+$?gZ3w!*wzxrjM53iKBbjOsQY%1!hxs#V-#L z@;l#?%)2vF_IH&GI+aGT%QhqdVszy5xB|wq5j>iOWpS{T3nxfd<=4#qh^7_Ocd`bgT(SwFhz&o3QWuSMR@P9gIi?SHdaiXz%5 z7?!?Sk{anOuX6_r?EGlLkP%;58+fV&z2Ih|yIFN-A4Q%!`F$sfj$!YUu zu{JJ)e#r3S1L|Tk1|XPbSw~xe@I{RJu)Kv?n1qJ_k;#1bG2{SHeLNSV5HQ5r+i;)N{nIZ4JYXr6QSe0a7yLQ+483 z6fWJTLD^mBS2^CYcXH1HPW{8Bmi{RuSo zPehV!FDwq0)E1qEpYf z56?79(r@q{r+gas&&o&K;Xe#j2#POcwQ!2Rwo|Rm-SEi(+F|QEO;-#OY~;E65B2B; z^^Jb@m4Vlva6a(;Oy!TVq;1suxIRxJXZEGw2x!n;$Xc9NpG}{UX)xyt^XB}(NNfka zoN!z-sz`kjuz0HNi#AULF%l8s z3AHVrM!O0xNn5c;(w|0$82 zQcGG@sJEx~(v)$UtaTWz=^c~=blugFh{`WLE-T`BDX+DqRPen41E$((%>oXTSW zO#8<$IUDORyf{630PC%2;z#Y~T_4c-%N>8}@8{pZ{E=sd|By5LB)aJw z?>$i|nE6E8OZw6il@myG*UnSHD(Ms%dR=^d==CrJJq1vg_%JLUjTgMj zS(R_)PAqiXu!>FQyb<%`)<)?Y%*A6g192XeSfN0|WC*^|+xhwht-c&5(hF4wL@ZKua!DtLI~HN-#`m ztZ>f@BBHQ_xg>UA<`S8r+0e&t)Ki4Zgv-4D>^mbP?GAllUqs)i>x!iL!9_CVRE5$* z85meIX0PP{SG$2U5j~d0po}!m9o|#B6rr`8Z;S&?6olDzIKN)me7?~pY8jh$Qp)Y+ z)rw5bXQCp0T?J+#jqM%yupohrq}+j}zKZ=gK9{H5#Y|Fw3|~ecMGl6R#^lDFQx(%e zK4Kf195{b?yQ^-C`{qY!ajD~1*vQG2uDT-y5k5W?u}&bNq-;qOja5>pQ<9)!PHipz zEI6tsB?6Uc<{kGQc#&ZNZQ{n7{Ox2BPl)B`Bpk7uLUC$)8)odARap?=aJrsgmwQ^)I zvg2y*Fdx_911yK&^4{B8rg9?uDN{uafw$e^3%`$;Dpq&;Bk)g=6@$%X*tpcH1Nm_Q z&tb>+*AZUspb>Xm_4hMeBd+#<#=@0d;QJ2)L_Hc;Z^Kgh{~ZWLPTWQmtyB+w8n_)5 zU>V5mAV!%bqoH*yG%7`f_gD&xTmm;fxdS2NP+Y=)WiKKd}S<>B|_ zDF2lf`YRnXt8~Kx>d~YLNwze0eKl1cSr{#;n9E%kY41(t662Oj*z~{kjBWX}xM$cH znvanjZ$b<+k~c_NQ`8~T%>7jwA1Ak=HTxOFB}K?f`|UfB6?5seEC!x z1umr7y{jJ6&9wrDz4K3Q!ReP(JqaO_WF=xT=0d#VM2Z-Y>#Q0%-0>7_su_E%U>Drv zjLmxlxjy1)3NiS@h)bllF;TY9gV(f2F~1}*JDEYO+YR8zrc-^Wjq%dnnJ(~ZH2mA| z{rSkKx2A3Fe%ZD=BrKXpj72&Yx>)ue5EUFB-n}T>H~+D?DteOz)D~xc)AE}A>7TT! zNB(XSYZ!l&?^0Wg;93rd5AXzl5;VVq71#; zIC?rM134Y<<3bToYstg?iHJE=_*&6Gnr}W_jTo}fh^w)Xjo9O1=#bEC2?*jiXRz#N zv3SWU4)K1Sho6T2AZU(t0ib^Iu5zI{Ar!yA{;Gf@ish|)z;6l6UXUr4TSPbgCHZq+ zxN*A?E?(S2r5t_JC8qobB*qkxYsgd%3-iL;6MwD|ffj{`^IidUQ^i(i1?~pl=?`Iq zh3lZ}M_@cWCR7+n&6yr?p8Y46UM2}$63o9e<-OrDxIuFF4^$kWQ;4u!N(Fkh@Z_w< zhtTY@orhAq0}A6nIzMx+x6{sawGt^g~Z{iRjG5RQ%N&xVJGqSaY57)l^r9w0i8vLco9R&m? zo-QaCpR*u0Bl4EYR1E$3PCR0I92zW!nww{>LnwEN^V`NV=xVFy#eTIN%;g&DiSGBm zHzRos|9@DHC;U2-=xbo;UE2FBn5w;qF6N99XQ_!M&X`3!B8o&r0P)PqCQ*kgZ?Sm4 zL-$L;-3;*fPW176kpc>_;7Sq}HBtXys2Y7qRn|{(41`QgM|(QPeHG5%se4M!;JF>T zMW9ckL?8+dcWVo<=_wgGF`gXeg27dezeiJ3m=FD~uSyYz_6XI0Ap}$|g`vHVeK$Fv z_@t4N?Z@({d>@;fY~V(;Dp&r z^0>i~tfL=+0Lb=cw^C*{Qf446_hEBNfpqNWJ0hw|5GD;$zY zG~uE}t{N@tgf|w{GR65q@FSlU6p`?Q`FdrXK7r5caz!o@kX49g4OF<>!Z_A5^dWRW zs{~PM$o;RBOM*zmMNP0P>2Sq7WwtXAxcihTkK~-j1VY-yUYj$XM)C{nKN@04cx69(yHlKG(wp zkufuT{anDGo>)R2uqa&LDyXb+LF8-81Rv|6=ScyW)ttbz9xI zdk8MUNpN>dg1fuBTW}h8f(CbYcXxNU;2}7{g6H;o?w51V`EvUQ)TlA4dez=*J$p`c zt{x61u!v2*jr7*6B$#A!>}OJw5VAeJUH(wC*TqYTkVQmeK55VcIF6&Sy6#BkY6D6) z%+7o&OUo5s`Acsl*Qnf0bBpdJ%xAwhH2nI+E18w7?OPk5&kkurEm>eof;ZHPS~Qhj z1l_s26BOJ$O1vvUt!PN`jirpkq^XO+{>UKR%;%HpWXtm;L{;5Os98M2ZUuk%aP7(A zyF9+N=oQH75nbRgCK*Ad=fW45A~Is;OwM&rvEumQ8PS23y}nVg8)v6km99ziH1h z`y9tO;qQLuI|%pl^eFl}x2v<@SoTf(qGSe5wJal#z~~r zwe>F_YMCCzObf))A`E&%(3T)fje3Cqwj z^z8*ZTCk_}{~J>GVeBu9OA|gX`YDEdONtZx)xtp9*dm?%Zw0krJ$^~ts>3%kqC`%v zh;)w+tA&GSLml_(rEuotiDTcFzmeI8%|lsqY&B+G_-%q=DvNv==_S%%>P>;>dh_v? z#XS6d>E=7jh9>dV;g1Dq28&sY(OHKqfVBu!uL?x}qfQYeYE3hD2` z4<+2s-U&)ngN*UB!Q9pc!;g5TU%L$0}-i)rGoO>NJD7Hpe?W)P0%EnH1r^M7LHo;_}tnN9-yUOCTQqhd7YqKU7RC<_>{J0dSCR)F?j_xyh(^H# zW->w6^H-PF`9I6g+%||_;Kg#YNh0sF2#}X^VjXhA6d^)%!r?L+I5lx&tzWjgdQL>p z7)44`3|)i%S;%|_HKq@jX~!Qb0h(r;4Gpn;be!^>T=2+g1ZfFk+YKU<3EwJHNpV3xPSbuf5 z)-*(mTx7FbZP>HOW?y@3#V%51) zZm^tTwS0ss|BZF~uPYA2rZ~SFzVQCat!C(QC8Cz>ELQ3Z14b7`eG&-Xtp6*<&aF6c ztPMUg@J(3HSc8mjgQ9O`!^r7lnFl~NW1=s>-&DkWOC4+sUX7-Um0IVmyGIT&uWtT` zpCH}Vnkfj-7jo(v!#Zm1Qx7R%9Id>flD9v>ZOOTG>hb|NU#q5=F( z9&FN5dA|QOzD$iNjc~VW+vNSW*kJHK`N)?4i;v9zA3icyFba#^${HMSvHv17f&pA1 zOTC2MT^%06_p(;2#FEJ)08MP79h%{|5MfEY@u8^{^iYpa@?I2)%~jE2^C$qp?Y;e* zOO3G4d=|iRO#jzFwnf;p6=8O0^KrZoKz9-hrIN-t1$K7Xsw))MW&M-NVy^>Dv=+L` zO3i=dQsHLHS|9*!n4GD7R#$g4l>nc{bc>#T7uHv&QSw3=mMHvHDXP!PM{h6Fjgog0*GbVi|cZx}{U(gp6{Kbn9qlPUre>7kE%In#P zZOj@GT|>407=8A3XIwXAYr!6A%;Kwq>UaF|%tb%tzBk)vHi_cOYQxpDbVA==V5$g)7!@DOONOD!>jj6P}iJdHpFB$8l!ho^?Gfr}?88*uw&7 zN%5szAc|}sil?CKr9T6P+WA%NG30Kfb;WZaa;~i0aHE-!tu+^l3jUg|a+Pu-*=g1N zP{xdgX$Lm(Kt4pyVpXotw)mOIb-PFn48%XOs<@C%%N1I+^bo)l5`5Ik&E)K>3>G=Ov=*4p3>Ml3FO7I-7)6LI%nR>$4{4 z0A_dZ721}X5Bf)YSf<0(L&wSKlo3s!`_F*fvFALEn*i1Xk(a-vTcG&dP6rqWp3fsT z3=0ju)f~5LCOv=N@I+jTJ1be(5&tp1_A#jt81LF&11T=A56D#&>hp_IkfH#$MemsY z%EAKs6~{|P(@O*cJ=J-c`TYFv-)SPkHn6I|)i{<_(JB+qt_oLi0Xyt%8ws2~$u+1a$qBWMyniJ~a{)o)8U&%Vw?> zap7#7qhbM}8HaBy74tWnpj7pgE=m>gj-c`dF8TahU-+wCA0RGarlI6$wpK8N*=~Ha zUsXw8#U~kr`fXuzq*pj$ftYjKqKsf-paI=iyR-5L5h%fw z|ERxovXaQ}OMKo)WS?-OJW`$pWRiYWYA5+2t_wn~QC2gzoO9RtT5azHHUz6m*%L~B zUs9r}|4g(aziLI#WN`Ki6jn`$s2#HH>-(32bbB)g)Ja-$xh*L8_th*0aCWjnn^psy zR}|Hp0mTbK&rNh)im#(#=d-tBY3rWqdFXrVF${rR>r?TuP3KJf;mrbEWMx4fJf3X| zBL^F3<9Ey$Vok^6=Tn5R&x`x#%YXL79X8UII8lVs|DB{jc7kCkqR(m@y_ zm&deU6qs9%HbJf5Z+ugQPjfzK08+XK=@l?><)^+*A5A2ISc=cpvI&DQ9`akfZbi=J z=)IkTcVz{+KsYu2Unc?qB#1)C!r!NonWu%E9$$nVWxkoa<^UDU!w~^0CqVf~ZO-@7 z_C$w*y zSb4|_LPk7U!t=EkY|NHu4kjzU=Gj~_1e5O!5><*3Hag#^aU~Zy88haRdj{xJ0U7le z5bP5y=p>$j!}2{F1aq7(*g5hGaoNT!XW}z;dCNQrPBep7Cwz6HsBU3r9-(E%t~n4V z(#$%LM?f`!yE&w1la;K+w|g7&-S0gxE-JbsVZTPYZ1iTjFo2x29!j7vRCZ4H9Hh3q zZV;qN>*jsF`ksM>M>K7clouTdhDr4L3^Y1~Q{Z*L4F|wcsPFr6vf_ot`F8=1`pYHV z#I2x4C7YIjZbpKt_2y3nq4+1s8rZe5cOJTc&*g&;>KK*V_tPs!0%^CAXC#P&vy!SA za6@Px%J~G|X>=_)3J|JUf}Si(ApR5`gT@F`r~(tSDa~ffVh5px7zAf#=8-P%RYfJ1 z=9>31j)-OWD|^%H+ZeN#ITu&8;sZZ$B}T7g?&rUkA#owcE*G)nA205NM;n0^ zVZf*USJVI<`o!gOt+k0&uJ{e)L>Gb;y&dg`^#FSiqM6fr6A6<7>(qu%6u3lAEqzRZ zdGO=X^0h8W?V2>N!{kYES)-GEw5bWn_oX{L7C{DUO1hh2Y?n}b7EPgE>p?rTc=^3^ zFaOlbCxYIMxA_i%Wc1zw#50px}rj6 z4zv?;CuVOqLQ}PiMK_eZw6<7lCi+0Ms=M#!mbQObb_6XSUYs!OKntejz8?Wq?l&WJ zFvQ;VcY?o8ia0F!nvwg3Nt^{nE?e%fo>-u?qNfUK@z4PZW1YJ=d@hXi<*8-Nbv3e6 z{?*{QChl)8HiLu2?$>~mU%$~6!CalQG{;fl-r`S(xPa-2Vws(;0jP8vSeN(7G1!s~ zTb&NFxtnpp*y(JQ_8~zfPe!*lce4vj%#GgYW+)}`a0Y(NhCF-mf221DW(3U16gS>h zqy>0;Fd+%=N5t%JY#5mg$e4IqGf;C+RZyG$4%xG(s?=`014?U~er}L3*;CV5(NH;b zRISS@oEydSJ|G+39+xRU&i3eB0=gIbbqaGSEBmJ3eWbIh>QY?7`f)3G%~6^XT{9vg z*ivsMc*XdAJ^lBRG<13>NU3|N4~TjUnCz11?BmaX;@;|0j#VWZZ0os9*b^pm~w)5>Q}(wc_PCDL{1La+=-pE#R|cU&Z!;h>T=D! ztN5VC43wwTd(BVvYam6ase0@WB(-vRQDh!JWl@z1CK*1doys3hO=!UMo|mBJdz4+d zi}`kc)F10xoe(ojDrGUfJ+m3jWM zd`qjSX@rAlMe-nszxz1ar(!Wj?XGl}rpMn;a(rIq)oA-xzq7&ux~u!$WxTuef)-F0 zIpR%V{1{5b1YAvoVz<_xuYbb>Ma*VMgL~oyd5sBchGTRibw9Tnm$ztt(32Uui<*?8 za-Rm{jEoH9ETbR4d+auDKvAYFPl|$JNp?g%Uf6}*S=d#zCrLz?D?(P99v_w|If3Iy z25h{0f;Qr+lAwQOsAw!dqbs%B%pP-%$-*Rc9mFT-4@$FQuyEZ0JZ+ z<%Y0}c-nkYu;W|!g$W;k|6^KGVorW@jA%>SUNThMfdRE&WcnpgQ6arVK|c^!j(Qw=C;H;d&&NTqMt`T$Wz?0wwJlZfD92WWwTG!*VWX6D>< zK1X;>Hxz&Ma$miWHvlWLCh9`nI9;6_Y z;1mHnzF3ScI(&jLWI2$YgWXE#{jXHvqOjMlFj8a!nb6^fD$hY$RTYFS1+s3=Q@ z;}mHo_@u9AU~F4!rr&jA>>&1(%5MN#{q)J^nicobl)mqc72g7d=C2xocDflZQ1^ol z`|P7^X2inq<8z7*d+qgO+mW|ZMw@Dfc0%GBOzxzHxUGjXn@bzxKm{Ch>*I-bs$!r3 z>OCZ8DkP7?2hs^(xp9GdK0jpl{KlC0(xj}M49|{5L8+AkG17(CVm$cVSWpe+E*9dQKB9>Sk86mH!F+buTOVUW8ns?1%?N*=$PfGTZ+! z*?xMhZ)G@xwpS#>$?wxts6Pp4utD2&*-yg9(ksatVMx7lx<|zFWiWB|!Z}`5gQ9?> zx$3g?y;Z4eJ6Px)KI}3AwJBMsmEvuAh|Y{$fGe_kgV3#b`?~$x$c>HCi7r$+Eu8%o z&zSa})nC7Jt9l9p1X&F!&<8pjUk8yzdP^nJ&Z>RTq|+v;O^6)vyj$^O~A!pXu zOYWTYdilp9V5iyX<~Ahr^jykD?{OpTC~%4*1E^It%Pw#09fpWl^5QL$0Pp-Stli{s z-(w8=^;HvI*RoD0&s%_l-!>$eJlvYM_r8^$W6_3(3SjscV*2M48+cK@B>nX#aR3^q zA9x&Q2GsvN3=g4Y&`9ZGP45A+^s+aM`qF{kNZu3}`M zETx*2blf*Fpi}RTq?CA_w%|>c?Q#9oe4N7bp3tVFgG@6;F>J}b_&n#?4<2({HDkS` z4WTxay#My#+?mxZLB1x%i%02LN0_!oM{4C)3AxokJ$zihApaGOuV|cMQD>y19P!55r;_ya-F@d_7qV z6U)`jzJ*f+SJwD(m!XHn200k#(z<1xvP#(>AnO{3|BeHzWTq55bF-~VtG8pj+5U~VDhPyecE7l>&A)jtGaSbhy3D| z*W31->YO;F1OB$Z^F#;*$~9UD7Hv{>Co?9C;o5s6edqh0Kl zGm5vu>(z5osy%_yD*j~%YO9C=R?2{Ef=ShBT~SZ!2D|_Kny}$76zcj{=Ur3(U!$Bq0F!(=e!dUl_w*1!d5-&hMd}E5Qf0bBBCC`rI7Qu>dIlWC z-Njh#V{(vA32QDLOlw(5E~>|B{uFpkoz$&r#6xAUX=u4w_PxDTdb^--#+kh&>G6x%cBEwJ3NS$KJ(VY(?=!zs6+JXWj(>U+U#$hUVG~EzcmS$_M+RUmv6x z%5*N*KC7h1@$>Khrm_Z=a%IVO9Cy|<-VO1-gYlcT@p3=O;Q z7OcnNT52b~${tXG-}##}hIm7#{wwieE;g?KQ8`$7@!|RpmeysCQ(n_uf0Nw}yd4;5 zW+?wPR{j32 zgGQbtimaj)9j1ksQpUzEkzXUyDCp;EELSr&N1z?1ZAiQ&_H>V_ zb0SwhSIvlnDZKiA?m-uXV;2hWtgP>@7h_+`*?2cRKMYlnpik!_Z|r8qJM40qrf9@D zw!6*a0XEmbPk#aM!w&kd*!H$8P&Dgbq3=TeUOX_8d%v)=FAyra#v3#^-a<3`%a)fd zG(%Joqij4ZXtc&0$*EiMGA$PbR%JG%yKq${r~Rf-$u|oJTD$osfzFB>RsjLxN@>;M z^0cxzG{15x()+VBIQ zd2Rrq1$-)c-xI2x36p$iWNW09yGjvbfsS_*4F~srJNr4@vb-5)#&auAw?b=G2pG^`&CxOkN)h7q>q|o4 z)xgd!*%)7Cl*s`}-SPQ=HHOnp`yHKgP#+=JCcPP3qo#mvV2^E;6Dg7vyS(yw zuLyvJ_nT1)qDVI$4>bkRe0!T`;Rj&1Z)`{q4}5h$b$sXKMXpQA2l!D=JC@mPxCOp{ zkF;(p2+_}cWU0n;e*}*8SN%Sng~Iku`_^Xihlt zp|zlg>ih~y(}u}+3ZHDxiX`N+Elt!x;r%l}I_qbW&ZYjjP;L~1zh4^OJ^p$S-S~0sIiJ2kW<)0f*S5 zFLoiIk0tT}c@s4TIUv1Z&YD4_^8YWr&{;I|M@5Ro0iGg4EKa>su0?GBiL5r|b|&J%KPHf>A3Zdl2^ zkG1U7>r8a{4N6*s$(6Q21|qLd?fJUpg|qAdpO_5nV0&kmYM%3U-Aa@vddZSBB8KQG zdNUdIV~Q-XQu;mEo#wZHjhoQL*uVS7=Y4jFu#1mJH_^{21Q<&g4XpZxY<=)H4X~uk z6H$XqX3MzR)V1K1injF?PA%WgpFup|*~+4Yt@J5Wu!BGq_?e^!Qq66gLa{q*e^r}^ zGNH+&AT?V>RL;OBI%Mj8JaI3ZocRb{nEt;;yI3fk>3?Ld3fAK19K?D0Q04`JIP!;+ ztXg~^^^wi(`_^om@wP4lkw_`l2os1U!#JAS>8n1Jp^Td^ts0*50b=o)xFNmY#wc1X zVc-mKqyYZ(5&QzVH)%YV_9+ZTjQ<%*kqmihbi>4wl*C_=&QZ7jNV#LKY>drTD}m&f zue`b$Dlt`qNRA&C62FFtSuT-WtJY3GFh!O1BhMGFv8gLhFg`;NKw`*R)+hXsXPR1Z z50GbtT5u7Ueu>Ms4dp0cmfoKISf|afuAeKGmHq`!n*Oog<7g?HFxaO7OK}8T(#`{1 ztSM2u*{N)S-Qud0rkEAsEx?Khfnp58a#MC58=vw`M@#9dmS*U686z4UKP<3SXp}GV z`G@E3v{8uY=EJ&E~>7|g0~z?|U%4GH*}1-=$O2W+J} zqwXIp+SMbMlv*nolF0ahMi#~w-eP{DFXJ{u?{(ua$gR9Xafth}bgxN(ql49&fvqBX z&AbvOg{WjFH~iy>CMipkXU=-!w zj@?FE*dZEWk2KYP?Bv|`t&*k9_BCYc8C2_y*0}oJhx*_8hgth)?HLa;pk+cT!$w`Z z$kOCHqUx^lLS%YHtv*~GGE)^=6k~Z1-|9q(*rpm`rr=SX^+ zYpi)a#G3hF;AR&F9ZfX@2I#W>jPH)~+}7lgM%k*C0~gn8(5yDsYQyaAT-=bd)RA|^ z3RC(s%7(lvLzAy)^B&cbk?W%!rnYbXF%DX#ogt|>-{>;xmr!?hOD8*F6?V^b2X(>m zUxu)l)8|h0IG0rTD`%AOdhJem zl5s3&4ZzVLoV-a+_JisIH87ie^9l0hPKHXyk%JwwN5Yjzh4nTvaQc0KClogK8+H5B z@$QQg0q7&W2x15wN4hrh$`}-GfAF~}%~>s+;nBi^xrZ1Qa7eZ-{iz1;4YgWRNC|Mw;8i~|__%gWTwUlCi|lxAOJ@D8CtBQ#Qx>q_KaooCI7Rc* z4n1nqDPGM+zsln?u$4V|p}Y+>rf+;=iiT<)`F?ay0LCASuAesl6d=@2+6xBH;=>en zbTp}REqDmE>mr7DK#Q)3s(}~c4TwopEJ$x?K*6r!8nK#LHh0kdMuuoC8R+CEDJyQ||JZ)5I{&3Qq&ue!QXPtg#;o1IeQ7uRD% zJs=-Knb{bx*-3u+kP;6%$m5x=V+4M*q{)*;9u5FI->;sWi7+Tg#K2dt8Va8YEPXrG zcu-~-fx^0zlkr*EY=MTH(&!EPfihR*b=$Ja`+m^kSk?TLjlh;0;OF7vgToB%D-wDG zmnnaYNUIWS(bbR}@V%9;D5vuZ_`ne1hOC}5SgI~dOq)VioE1^+IY#ZCqT2L})i-UE zjiNVjOQRkGmIPwT`F$b?dt)AyODqD;26DBv4!Lm;f7x-Zwf=|^+N#lct*pO1{{Jka zld?-t#qQ7B(;aS(i|KFj(uWg#;L`p{>(Ig9C#=<7j8eo9naGQ{BF6>tausG(y#{-~ zaI5>`JeP<=#piv7u+Sz;ceprxfSg@J<)JPvYsllTjQHezL>7!^YGc9!1X4wGE!CYx;kZOw$o(S8UaZW1IHuQ z*IAw^+4UN@j#2*Hy1Q${X4U~zcF!`g@_APgff5&$p{wT(H@>Rg2=y~4RX96 z7yHIiEiMWCykB7cDezwQIrEuWz6C&M1+g@j*xE+Yv6h{eshkKsIh~j9`j=DP+$)1{ z)!(`KB$EXa=MajS_pUanw)2&4b3};dO02#ShG?EJHj3&HG`QfyD>5#LDk?z_qtMPS z>&T@U_Rd&OqRov>eRo1Pb_vLrGHel-kBph860fq&Ri_M8Ho@Wr=}2|4U1yCey4Z%n zL(IASBABOn$UaDJi9Nh8P6i#EJu~sss;{F%B>A`DPxiVPlK1|T6>uLNY=GIJVPjaL z%A^LbYbpY&y=`-r_MbUPr8Zn`U zapAucP73r&Z;Y+Vd=~2U%!@HpxP<3s1q>v!${W4{HLyiC-`MfYXig$ z)mw-nO{{MBL2j!_g{VMvMc<0@+Ix2DQk4xQRmuEyERZ64sZx!-DYYczy!6)3qxDZ7 z^J^+V#r&n!Ee{-?7tIQDP8z`Aqunk6Dqf@Nt%%5*ScQ++QqL7rMD7Oj3?@r$3zAMz zoOO7;U-``yE*CJSM176~`HKfxoy#GKJgQeSYq*xiCPy%__{@l;xa4&d;rtxl+KC1%K`)g$N&%S^R$q8_d=Q% zkV;Hy9A8b!0Gu#*vo7$seTK0zN|bb=xgprdoI+TkWrfUQoudK0z@EF?9pJ5jNDD_8Z*v>9bdf&gx_%WjOQ9kTIpSN ziy_V1m7F0Xu?Tq*D@T|KeRH>oM%<{P{NaPd+2C7pit z-pfu$c%b=YEY-gK#tRGSt0ha@_y_ zr}~;THbL)6Qg7Y3xBZ&|k8j>|<7FN&4&kn!ezxm2w3lBq7e79=&zx=fA6AYsYQgV){JM|v=ixiz%CVQxeDIR^ zt!xF zcYPllHKrpK=A&6+`?i8rDiaZ#o~C~`P?%j8p(IR2l&=~gD<8i;H(6OPV!Iq)nu|P~$sQ^6s4KECNUiUqOpP#!fgIk}k5wv!HHEh1YR6hKB+Ae)H zW%RZO6lwFyExT=jwqMOGPN!x`j15t7zQs*1P?jPYpUsv8kAv*(iF)Ch!oC^5?8@%Q zHXA1?@y2^~L6y^tt!bYN_~L|NEW1{jx-_GbA%mNEfHR$Df&cZJ?Y_h*`CTzs@p4ll zx0v=0Wjs&xdeydfcd^y%dE|8O3!H9kw-WH4G3+D|ml>!ZyqX+#9l9}pY)GTHVjo{M z^u9b;t&&Z*hXJN^*ZNv0isUI&N1D-H%;oK_WNOgp;9eqRVvc*gMp^KkFpZl?X!~8z zSL-W{c)!9hMvQO1vVnf2;*q@;0FpAes;49 z;WSvSf;VlI4WWhVHPxgZD>}Xbg z=D6m?`kwf(%HGmcf6Y15Pfa{E(OC9oqhNT4AOc=PBgYP0`tH-Ap;RS zoIWY+y8V12EktUIWy9wrVwksg=9&PpaeMTi{e?O7y^g=!WH}=p8PM$$@DnwWmi4ug zPS0|7Y(uHR4MWTwxz_s&&%i-19c;;OMt}qlLi1~A!&SXzCEKfWak&zP3cp zm+w~Wqd{s?_NhnuEP!lkwQo{cWh5(mxsLLLx5}9}oL?OdcQBQW#b>OU*OAob7Y9v~ zBGi}ZfW}r%Z+5S(u+RF!mEV3MD$js(5s8=*DxS@0&u%uEZCLv7VPeUfC=x@jl#1}z z7sSQ(TcHD;b)YmZ7{9gJt>kGL;nQ}ivMwODL{mrX%QSvUc(B+l)z#0)WvjqQL_b)G zTkchyW{_RfZ?N8+ynkGIKs=|v>pNw!v{uIPv*RsT3>LZ+`s|Mr0>>yb8b+BY|6tef#I}_t(Bt z=cNU(O_qa@(s14A4ovgPQ(zF@mW}_9er0Am zavQl`j;)lIie`!7f|gC9OFAeSr45>bwcGsSOJIoJWMlUC!tLZqHBzPo9l%o-BOu~B zR9~##&Bg9z+$;MN$JvB0! zmJ&ZH|18Z6)Zq$cNknp>M)ZPj8ptBC0`Me*Y(Qe&eVmQSc{j*x0_*yMWtIAojcu1r z#7piP1Y=c>IHQ^io969ZFA19zg)_r979%(i)h%sb6OsUiq;mG-!_Hvm2sU0GkjMz? zvbEh`d<2S0CN;2OzjAJsgA6WJXurCjHi6g|Br(Y@N+ot}gjQJpVe3rZ;9zH~Z{nTD z)2$8|om9{*+`c8zi)dM#Ti?o=CI9Dk4VN}2Gv7yfb2Vx$VnBb5IGU?qNZiw87tAv- zHKnG*p0X%P6txwb->e)b z4Pb$Gkg=TE{}6+ch%~-aqf!rpdol7y+|o^W5rFjf$kV8aNIj=ND6S}A!hMnQmg8@z zfzqgbi7)Nx^6FXXryGuIge@C%&Fr6HnhE-u)fF1{*Prq){ITWh6E$9a$VysXVR!x% z=g9ol1?_14*qf?FCf67#EaxafG@N=17*QpGlpJ9J-eokJ?$Y3St^(@b3m!Uz43ttq zTPjnhZ(Dm92}2-{kMbya>-`upU~p?d%AE=K_VxLO1@Lp}7!ybI3>_cGdD_;oMn{rw zehh^8T^4RKCXCh&rxR^L%sDRQumjWKZz!>t*V2pwMoO3`?3D+Bvpd<(ZGmB|-j%I7 z!7)1JR3B%lzGvcEgmwH9DM z5!Ft!+}t307k6=pCQh(3pPY$+*V^P;()ugIPJSO{c=6=?+~i#<@VdRpTO{c7|1#Ne=oF= z1TFlYzrG4T&H2AVMTY-X<^QcHyzcwIeD{A{@!xuTf&K=_-ew<$f6xKYDLv=|aCriK z!!b%qOx0=-Gt^ntgr9uqv-*m%PZ)FfnYMkZD=*q^0ioi7?^{`QIBFi#yCW$bBOJhk zEl(GEO2EQDvln2;`*oqsJ!KxGY3*S!LLV}X4@kU!QZ28*17b&b+taLRm1@VR8`_#`Z;!lwahwfZBqsY(g?EiG8<`<)gmjxH;f z1mYQE=H_-^F9>HwX^l5vU5MjQYetF9Mmk)jh~Px`6_|=QgRRNa;^f6SB=PlHn!r9ti73=dm5%PmG=?su`*IaeP^>;qJS=_GPy@Q2rg^EY3Ifqt8rgK_}4ZP+j zl!jC`9!clvx6FY7BezPM`w=5Jk-`P+oEaF1@UBLmhT8jG^@PMTMJpGXz?WfVWFx1{ z)u|YV${_5Pcq!?Ne${o+TO3IuwE zkmD7iE!58yiLz=RIal>tp{Ei9+=*JMRBZ2Xg1TAUCJ{y;9EDq}kYh6mDCDL4TQB`=2#qky=xU2SzwA&J|mF#&b zSM4ZX3WBcLHBNpeQIaMMQ!P*Gf!YAH-tIB?kE(Mqymd5W!0dPuQDSBGfufx<47s^} zgtW&pbx<hHAYl?A*MmZ_3KxIdenqNdKMR}!*&=v|h%!@KvtWfM!?*_y-MG6hXn-T}zY90#`<{|1h~)7< z+R}!{kTTc>b@?z{tM`GXLWgzvHJ%sA%!E2(#b+0pnla+Y{8A_zaaK`oA#tTr&!n zOl>y&H(6JU0CfJCBMy~(k-b9i=f3A{<=@k;vF_{p9}CHz1?sfPpZeI=E2%l)Ajx7=Y`=y`dgve`?nZX(&hV zY_fNS<%CoTN~)%h*{L-*u7N>Gq^q?ekWDb2Un&A1vHpzh!|4G7fpx;+{%ULxZWubu zVEcJMj*ga!)XzGpSd{br>`;VCGqI)Mo=@K^rKIDx6d+5Ddngl2HjMyGU_Z!D>6b>} z1t;qm*e!bDI?FvDw=+1b+^#n-WI9n1#=Kp9eNY_bxZEDV<3qoAI5Y%dql`e;JPnMDC~*lyChWBl(MF2m*rb zO?omH7i#it8%&&YkJr-1-cx-Svf%C5Gcx(pQf#qkMq4{&Op1EQw>_>Q8!UHuX@lz% z(C8i~|H4dvYb)BmC@uI4#VJ`$&_g04#@2Bi^kY9P&L;BLT~nWgrqkY=N5kTD*y;lG zafT$YR^|OnWw0D|N-<t7S{jO>RSi5xX- zgsV~4w<59E8%PbHRZR1?C-}C1f+2q4!OGfp+#aE}xR7>iDmeorHruMv!bMr|BHM_5 zB%O1W3p4~3?Jjx@4&jB0)2dhk^m+@SngOKOl*Nz-jV?*v^cBf9&f*I(9y&uxb;mS! zI^;4DBjreEnRg`hYh<(81OI}bCc5`?mQgUb>xYlx5E&+rR!#dyXVieNzY+)POhBCi z#;L5r#>5vT5(|ua$|??;@(6bAQ!0;6McFp%g z?cR_33_SPK9~v`KmBkfs@N;T&l;L)ypD+Mlc*1sr%&2^0=@l$Dv)1Un6f~9=Dx2@N z!j_k(yUHceS%%zG_Q;NQH`EAx1P5k15Rfxpb2%ZUsPAaY5ZW3X zV)}x;Y&mXs=ldPt?(m9Y0*^RuiAssoY+pI zd!7Z`t*j0(#fne z?Dqz%Xh@RIZNv*=$AEl4^9%UH-R{8lNxmwaiI*{zcC}|U08STjH8&5dS5gK;9TZFq zpizYc^9O|l4nLxqwxJ*4unA0+D~o|XFaoW3)gO}s4i(@3kndYFLKwT^T2PR+4olH@ ziaa_E{x`1L^Y8E^X$NMTmt{ov>}gq~w3(T$oU7AYo{Bb4qWOngS)!p;kHjThzgIcE z&Jm6BR;!w{vam{}M!H*kiL7|s(gzf$P zSmvy0c4VE8KlC3R$pk3`;|W@5o(Fn7gRbr-{|=ynOv~Hm)=L2JHY)Z+Kk*mR^9(s_ ze}ld*E&q94O2tZ@{=ZXS?1c@gMwPvV{2eU3eEF|pMrdK(WZuDl??7)j>d1h8ZMaC4 zfoY)T8|FvZe;-Rv_};1y?M5Oa^>f@6{*kynASj2wxZw( zOSQg+kF;XgV8L4d0(7lH1%-;vHi~zH>|RF{B3;jqJ6~7FGX1=DitRD@3`je}n&j8Q zY!6FBZ%-p6DCj;6ILAZ+OOd z0Dz2MLDUi+ApFaBdEFoIrA7KoOn@KD_{Bc4(ZDcVVYiu2mP%&S@ee%x{N(5`vX;Q) z&YmF5W2M-uOQa8(4I(_dd?3@nl7mBOF#QNC!CYC%u}rzFt`MR)u4d)=P|+;t`W8ZGa1Gpsq0mACv-cuQ;wZwsD^YP0Qla(g4e4ap!g6$D=v11BQXNN$r z$QH{&oG9$E5#yNd4;-+~H49Y!Nj+t4(-$2tpi1M#E1x@0=2WB!c)GG*ZV=Qygr<4n z8b0d$(8Fc|qF@H5Cn%4KsyFiuEjD>iiQf7T7zSMn+yV|rS5-8>*sz}7aR9YWhI?PD zVJO-5=SO%8*QRw4;&rE0w3bD)5-0YPyV7Z}GUQg3lELnkUuPNUs9?5!X+fcMQZs=tC~% z+M|wZqg>4Iw;T=BVTc;hhgPMKox{h0%!=6Nol~5$M1d3!?-8@EwyfzRa_D=V)G+Mi zFz3x>#7MVBuRF=csNjBCQW*1tZ)0WKNge0l=K^K_Hl)tD*;m0WeFGt)9N`gl;7^ZA z3btg^imqybu$RB$@QVOF--Ab+8Bv)xik4h6wOy?OCF`VGhD$QjyCkKIzn$n|kl!^I z=qMJfn>=6HxGEgv@^I*PVPH?!zY(hk@Sr+Va3xr~VVZ0LlvI|Sirf)lmh1TqU1d4( zQaLbSKQ9AcL1|!Q!2NvoZ12xZXLQs1hFtfx9)AYbuW|U|gku-A%e7DbcbuF+ZVUb= z%IZ;roqUy=hmV2T!xUnhBoh8x(+?X^3fmMJG@E5-^}%VE!u&>s9K9v$hcS#Ey4sS8 zT9k`MD`PW~DS_3)CyBSH?O(ljOC6$VV0y4*Qv_!UJ@Jp>xF-%y=tf@0yrB*W4a&N?EQV>EiO1(tYlT7~Z^#C(yUK^~ybp|HT@Lm4-1{nlN16sBCs( zm7_ZzeL)?FA2Kdi<6lb%^Tb-3>Q4dLc~o8AJk53m2(FqOc|@*MNi221_uBZBzBr2x z;`e%Ni7899!&RYeLoi!ZHUHfoaz1{K=|ned@Bng;f^&a#9rf+^p_Doae6UQpz9@kW z{e(RK)4M{N?FA3$4r~p|MgSi6{#IlK=sIIYi|lC(Xu*KlrulAZ!?@9`{g;x-7#&dt z0>_Of*A@y`4@``&7%Y_3Dmnl8l)C`)ZymvNZ$sTlb)=Bcfl#I&E}PUWh&p z-V#q4k@CP^vE(q{+r6&6nYZN9UjsjClmt!M&BIU#taA>11<`>(7PulsAb-@&WRrWvXp##>`43#A1j6OWR%3OE`k;xFbVAmofjStlCewv+;1r8wg>7CYVu9pW z^On_@ZmcE;pICTF&zr!eD5Ush*7(E?!QUy40bin4>Bq*HR1zMa>R{Qs3}PdB+6BMd zgF-4;tz4D}Rv3pXpgKwz(@q5l3p3`zgAJgebh?w)Qpd=zUZt()@f)F*^#17eFIGYI zK&OZhsEHkah@HMi>Fy>K6DB=Viw@c(heu^@QPmy?jn!f*q)>hy=2#mEoo=)FNo|IY zX^J1hxZ!bnk;&e`>Y#T7bYK9sX6?a&YY-1}1MK_6!(XB+8?3S`sFt{1avdm4*pm76 zDe!gf)IE(NQYc=v9ak>xLk+?anYqfs;y~jTt0r8rTGcMH+U0(BJDb>g-?t)G zILa@l*_KBj(->##mc&oLF!&_?-iJQZ24?qLY#}fuZAP9P>i|9cfqyH7@V(GLl-o_u z&+Z4<82;7XL9-7WCcgO>76I<|d|R+;wAa+(R`=q!pKR$bezhtvYc9p&Y2OrVk`e&A z1<^+6t}GRnr-gp##jTyb9hv*Av2Z_X(J5j4B$_$C2hwN%6jd*gr2M6%*D{_GjzYCk z+LOUu#v|fR4YEw8yHnRFO=+>Kf2R+~m0mBn`=odLGSqBq7l=GM0*PCxN-jrmlgh3B zHOPBT5t=+7MgtQ~=~_3nK`pX3ZTb{?sMq`FxAJ66epV(Y%`mP7f~f{=@*kOI`l}mR zE8VeFEH%}UJ53?_GVh=dsnr=dO5xLO?Pix!z24*J_S(qhrsfM1A$%uuU0b09ploYLtl0uNxdMy*Bu`Q$5P zkH?%8!3taUkezE0bUdgqfpPac>2{+PI5rn(D-f08af{ZBP0{;wv41L;KTk(Y&pOXK z!|>ive9iPh>aFkWOe@~KU~{+BLc1^TLL3iDs!e^`A%VyiD--Ep{NNixhX;Fb{$Xr4NC9QD z2E8rcHb<6YSrKC}gT?-xzS6&rN>JPa+aWc1JGh<&+|rWiE7ddaAD^2$)+1L~k*jxg zuL<3vuSLrdO{)uOUzkWEo|}7s$3T9OxL1$U&73%Z?i-K5?xHB-9-B2Hr8-{0o>^B% z2FNgwP-?U=D{lf{s?}GoBd6S%HzV~`-<(8dNBTn2P{^uGP_GK&-cZ;?@h2YeTuYHE z-o&4FCno?X$m4d02+$eS-i^#|UXWftFIXY1Sh`2X|A5@zB(*}HDU-v`+DrCo?%zg@ zc&x3ogj7M>QTAKaiDVr-RxJ}VQ&INt5zIBvo7DP&l$^JpxA5UwupiMdPXG|fDx&E8Wm&s~yW)Z}-mm36;Abh`5U@cYRPs``jv zns^`ZSGR1yY*&Ah%{#w&#A-mL9%^ckuQWh5o^&iVX2hkfKm;hme9XCKRFp8&I368M z3`PT%k|iOStn=nM>F3GX#Z9;sy0pj5tIJ+J*3~eEq5i*6mha49{yXPAx0h0_#9^>K>kvczdgFVRbOS zB)^K=*A~0+J6pdG+18FWc#n=>_A6S;+6+!6`7CB8C~YJiL{wR@>FI z#`e3Nkj1gb4zX96&(}&S7UZ#={-YQ`5sx&8lFC@lf-65hpS10=p6uESncwfTy)SM2 zqHzsh;|uaGUrj{Vt>SomO21K01A;5H5a+{&JBcpcE7Gz9&Fps84Nbefh!;Q#VG@}N z5NAbB0*g3BRK1%)_vF9{@)mKYCpyH{}96 z-ZCX7MXIXr5U=-qDdCz!?R6(s?Qj88;2h(x%BpII-Wi~X4yT7&o zhosVsgf=NfO|9+xMwMR`D5aSdzXADLIX^VsgXJV_cL{W=q0O3n@Zupo>lmomj{j;# zOL^usUFi{fnc7ypnYhdRwHSb@qABah81^d^oaE0UArM-}hC>pt|2ayMKMrj5A%{}h zuXz^9Y-5AR!BABG7H27DJraw0sryzmvDXu3W|m?F;$)XB=?ax##!3g=7O*;|lgkUL zX6RrQ-i!p;54mLozW2f{_@^OHI&(N@UT%cfAK*EC#BH*Un_c;Y6j$cu5edPde&S>e2d&z^07kMVoy)zMCPVQoc^Ly^K166 zy3Dta(MOVHjf@hj*l^-QM%{ueK0CfpzeYP=klPx0%~&k-H_%*qw4S5kV_>k7N%AdQ z>NZk44C_F)#h&WRV($}^iWpIDtl8`5xIEzTF%YYO_KDUHsE{tWSq?O~gaX`slfnbt zg^y-F@byKQ3k@)cImhS_X@iBNen@TBO~0|8i?B)Pp#E43$SWaeK9M} zB@QB`4UuMPal58b ztwX#@P~*ZJ!52px5^9Tr;9qYHJ#8;mkIFlKy>62RE*G<2+%Uw-yyN7{D;0`;94^i1 ziYN_y-|r)@>TGwkCikT%bGEQzEE;}7?yZl=+x6D}>@-1ii*VCJeIYW9LWpLu4u(Opk)8DXH_6yOy#sb7&{^|C*$B6lm6knPMCEEh zsIDa(c;+bAQ1@sn&#HoNoRsbn5A4{qtPH-pz)C|pxVJGhGU44QuToq()Vo7@i;!}j z&U63YyBJ&X8yOjRZmEjaY7G#l<~Fbl$%2}_D`~os2(18h2RGr?Kv(2mi&F=yqvF)G zIOGb~4;Wy-f2(4N|MnM7BKhL(`7y`UlD9-Clz$9WR+xIkv4>j z_t0G3fc)+7je7h^KSXh<&zL8>$(in0gZ_2x>xRQMFR{b$c`9Pxv=S1TOjre6Q}#f4+q`{d)65 z?|Bq-ax~~xm+?=M|G*>64Fa*RC}26ZhL6rcVK@?27Ij2gJGZD&rOz=2&5{b}nY8ai z^zx3#do8wlejoA;3?0{045}4Q0m);E7_y~UP$Jdzrt*3&HG`hAB zLFzmg_;)Bxh4+nlGulGObF>d;v+lpElRMSmo7Gs{0IE_iFJ6ayX^Cnt%eQo?G9c9h z?yD5f8k0>4JdQy_v48QlJv2D-SV+O#J#T?Rm)dw}Q{&(Z5^PCzd0YZ84ZK6x16 z^d`teqcd%FQS>fYQXt>^Th&kXjfiMncihuohm|8L6z zk^`L;%1RFKH4Lyg2PrXn7QmUrXm(XzL-Ry^=+Mts4m@p67%{>YL+U!?%Ul3lt#A!s z9eg8j0Pwu-(>P#|s!LAO=Zh+}TW^d1Ad62K3zmy42>_3mm^D$xs)~(cgBk)h6p|cD zTH(@VEac$mdP^}j4iT?M1a#+X)FtU<|EI>Jxjptt?B2Z#SFwo%$mDT{vmcjId3)Rj z()Pef{;Fkd0UJW7Dj}vRS-<~Pu@4_~nY`mC>saV4*XGA>*=JWK1N);i*4G=mFzan` z=l()y6@0WgTm1Z-rl4fSFfe^F`Q5^7_OnTdZy?;Sr`Zm7G-mK;MKYIsdde}#W1H-! z=R{xi`rOfn+X9r2OY!Ou)0BQoO@3VAuEsQ!_5Pi2R^D7QS*7KO@nOA5{`I(_9#F5S zpBSLWBoeoP`Sf=CuJ<8wWcsCadSosn#2kvyOS>3OAy%fc`I+oX0@f*IXM*S1wxg?+m4W(>0 zE|4pbEV$r@uR5#N7Qlo*b%skYx7l4%{avy8U(<|`=#T5J#ngy{-iila{;gfWfaU8S zF`w9%kiha8gJ{+|YyNwL9v)sT&`T}bLWrJh!1C?tklGBa_~p3Wi4r?}KUMX=wd;*F z*ZNCC+Rl)GzwT}DS4rb!fQ4{1rt|b$a-^7lFi)|m`6v@A4D87U-*Z4XsDryD9y^Ps%c%vJKTFrB?SE>vH@XHK-i@Mo%jb6|s8h<78nN{Fz5u-* zufAN}J*1m2B*6Xx(^6XvH1_3iA9CZ0nIA_>n09FT7heFL=b_Lk%iBCYYA4@fVv9r- z(6}T$TcU8zOsG#hv!`_;O=H*dv;wlfqA(U_Rwd&XRak~K9XsGxe?(9_6gO^6%w%Vu z#Torz$sfGjXbH*ol{_T8lC()37tu$gads9*|v4+d~Z z3&f8N|I3GAi6<<`z$?)M{){9Ouw|T_gVZ7fZRsbS(xLss_WX7HKdUmmei_C}t_^Oq{si#|*k#Pe^`IhwI8^i8GY|s_2hTPdP zPGYA+mbKXDv+T9S$ur#cui~s=sE-(ca;^#$dEeDc>m}zwVHc{omq$EAQZipylaDK2 zn@~cshmYE7R^U>ki#Tm9#&JKZJ7-qn0EG26`3g`s&<$=uK`UFTt_3H$cEx8&@i0yX z@Qf}neYBa*OkJ#YHuD9t&!!UAEa(c4EGJPMLyRS;tJFu_R&tu_3YQ<%H>M(h1U?nv zVAq*k+f*x)sKRd3WEyN+P>n1=3#!lkBkzp8xs-*=i3bm?N7dudpNM&@2~UDlkrWOh%11u7rHSkBK$VNk8hmL58MJ}4hr5=Y?=B% zQ=}kP%LArtQblC9Ykk0*=l`CZEG;r$knsUt6Imw5%!03zpN-G?N!}lRd&8e)7mdJ5 zLo<%}>8b1S(I(OdU_t^j*m&u0XuSvfoIL*XeVkZMm&)_k%~cH9BLHgt9yE0XvI)OE zQ31Z~WID(`&Is%`DEVR2O^8=XcW6m6!+P-xK5MmAWJ=bX=wh6HA1^($VVw>?e9&1zCA+zh<@bLa|PZ|>8F z0h0Knp)Q}u6F;E0j#$xJd1Y50c6;79xWf5Ex6c7b9DtB!?31*P3s#KPjIB791pR93 z&)T?t{(c`>{3VXNWIu+gl5B45c+eYhvX)RrT~P&^H=`j8iO z%2O(1oJ@6Je9Lg%5#$H5(+M>qHnG5>cmQT8ws`Qln*8VVnA9#J;QquF!F5lU4%_M@ zO0S3V+BzZBsqCrxoJ(KrSJ~p@Rir z4n-0N!S}MHl`~VkSQlE!m5#++dfZx5h-C2*;SgsHB9E!LFqy+uZ)zXWDuci}2t1P7 zQIr@8=HHB{tMgacwKU4it2U)4`GWmaa?p>HjVKh4#*<;dN%5FGY~*zbl}ySFjGrm% z?tkP0t|dg{Sp!3GwU=U>Kr2ZqGy-ePNI?yb+Rrq@t4>SG@lu|G9di zf)kQHYq5hb)@MYPK)tGIYya-AMk51j3GYIJ?(S?h8YWvGP98rV>6&~I3h*{hQQ&Fc z6$UrU%Gfm|c?wrYvVzhz^;pSs8mpXs`p+pRrQOXM=3t&(nLU{ujU97=E%aReqVy&$ z8(yi1W@9{Tplw6#B}W4%|G9&oy5Dm6-8-e#&s_%xd!>t#OacCBAUvy=Lop?n>Ww{O z2pnRst4GrfBhccbngs-o7d`-`58oKB=3=uad!>fWE22fO+C93z&7 zED*FF_MHU}o6qsLNh=_&+{3tC?OdsGs$eR&D0DFn65>xfvIm8H-oH9r>zGlrZVuly z>;!g41LSEtbJv|({~|9p0lhpS_jY+VWYj{!tfGqeHS;}EaC%7quHUNW5eY~`{q1-I!Dy<&)0Po4Sxdl?YhO z_w7&I>zS?_F+c3F;fZZmuE|tWKo9wap)4Fjdeen8j1>ZECs0<{8r@ON+gw>S1gi)6 zcREmodH2Q-3;l#mI5RS=o_7BVahgbceOezESbp~nSHARgerkIcBuN(D!gYA&{Cw^T zqn6@tB0Y-MVtr7haHNRY>xbOEnicQ$=P#@4%6csqjU?IF<8BJR^AGC~iHlVqn0O== zC#)GuhsTKT1ah{!EB|3T1^7?@_saS1!j+K!fy(!`{IiDTb?8u^Y4&^?_+BA|XcORn zzMDp)a9K};o_Tgoni#yQ0iqxRr$kmDc3zWA;4T})j{I3LQ5?V5(BiUT2PJlYe0);# z^k3FLxc^Y!=6?SfQ!V@i`lA&FiS`F5=FC!&dVe+^&2pV+blS=Eh>4Q@eU6y^tO%6g zg6>(YnS;_t{G5`DEU?mCr2Rnm) zI!rjqocAHbRw_qG+i!CZE|KFePr!D)taaD@1dKRUO#K}> z7hpsN7-TTBIn~4EA$~8fiZ4`dGJfT61D?PYL*wEUJLZ-yzMR>fDdeb{E%v~S z9K=GEHa0x0$JhX~DvnVv_@c8o)7WaN$^ZI_Pcyni}o2GhdRO|E2QRITG`|ENnRx)An?iz~c5 z-Qb7iAGFNI1VeKeOCN4xVvQg%%{8k%SxL8vg4DG3oKlRHQna%Wq&OyZ>N%CfNWu2m zkIm6zbvzX#YQLWap3UE9>ywQ?&BL;@OtLHCmsv1*UU1YP&M?Ss-cva;lN0Rg5Hkmfcu@a0IALC_#yoc$K>$ma1k+u9? z%#$l;QNUM3MG;xG+awK24>c&{0;{2RD4l+C>VmUqCgd7n708|*n2RhtLj`B;-K<=D zLTln%%yNcZr_fRG)s7S{zxg3WU)fz~-d`LiL8AHCAky%CPKD#PX7CftStS@;eB~9o zOy@d}$Ut&dB~KhsI|W7XCX7R^pRvv-{A&swa{Noq)qQ$FpQjz%z32KgK7-?gVIPej z_R9%&*vKyd{!|6De3E)}@=K=fx`;q;{tW%==o9ZZnzBUf28GPRt05;1ed@$xCg9@@ zcbrM~Z(1S1>}OC~j1W9D0Ap`O-Gn=Ke0%f?g|q{KyxHW=BObBMEYj5@wZ!CT+gj~$ zXXzE=yN;T-vyIEVY7s9h=Zo~>1RtZ&ZBo9Ttxhq!h5qpm;DdeKcgosNB? zD{ZM-y=Da7$MCOB?>fVMo7L@nw6O?@L?_Fye|W(GADo$}bRX{0e1Lc&Nn5&8CJ1a- zw$)dVo%YEIP7_u!oeUy`Gc#l%v_QDQ0hjbN1@38ia2v}IYO?u9AjY^WdYO0b3H6HB z&A~-@`yfb}pf{jTH4p-YlPjko?(MLf>0wQ%#U2N|)y;oIO5VngKH)aAX(lgJ*~aSM zI1M>GyvIYCmWFR~X{ga@Ua3+@-0(*>$rA)FJ1mZ)x9G-+8Z34$^u$94WrHGdtpW|jR^>1;6O~*kt*N+9yL|=4#wb zfhhk0gf}@8h}zac%Lo_v#TjIJH)xU<$M|CP4a%HYM?15MnJY&P0$m}#LAQ#l*SAq) z8SLC#1ZXkL_Z0JmD)i2KYFMi~gpU&Hx;AfJEK_yKV~j2K?&V7By$IG+l2z+NXr@R9 zcBeImvukEuXGd$bFs;$In8PEN=+n&{O8rF`_7QbYbb$g}!Cu`Us3PZ!iOY*Z!%=1U z;$q9>Bm}Qk0eN}$gcC2yi=$W7NQ6~m3q_I0B)H_Q?BNgAgNmqW;~Tnj>iDBs(Mp7b z+A)5i2Do+dtjx)^)-JzR&s?88B7GhsXt*FkOTVEv$OwFv{y5`1!T+mt(MJ}96Om4AoB6i~v|o04L8dG&yB06wnpE$szUM5(Ij47sg-m#g zI4_7g^h{OUifn}&Ch8^_Y{<(hr?=S40!LZF>dKK)Ef)?zQX&!+&cs($B$N$QU{$7d z*%`CKNevMy5~8#gn4VhU5@iu7F)Q>Zx|7+|O3rxhteXR_7G3ejVZS(<0B2IR0!!NY zGbVvl8pj)>Gc|3_sI(ISWrUSH?uswzWNM5!i3>$7p_5_6h?bk9w?;bpWX;Vni(hPa z)2%?akEN)w(E>zC6)=mM?%H?Ex2X(>EsiV9(lsN_oKP`k?K89SOR)JU(znaoyQ+%`IW2fwp<5Ej<8>xuezyPo_E=A8?LF&rhLr*1>F zt&Vq6HZ)-*V4}UpyU0<8^eNxm)zLUCbb)!uaYOzameE;%TQg#BQC_SMu1(?iKxx|E zK3@O{5OCLewd34VH|pG6_rnsWW}v&H&G_ee5=%lsgMIaQY{`8FerW)~SwQO9Kpp%Q zyogvEOI%u28k77a9lIh~GN6Nd@a>o8+QJbAn`0z#?~fc}iVLpDhzMTTBPy4ddFqChFsN>+a$B@SfjR#xBPI z7%6xi9$%6fUB|2H4RZ9j{KPe3H`iPgTvoabd{M?n=#jO#reEatub!1Z2LcNO(+7=g zc>x{g)7AHlrZ{KO48Peub)Q9mk5Wsj+DeW$AFaK-G|T`yPXHtJHQXDO@;8O*C`Wme zVxFkJbF4uQDd1Ef5p51&P1ufKsXF5>XS?f}4bI~V@#M!@O{O>Z~R{IIK zeUYTz2_M`Uk}=Fy9%@?|vCu_Nl)9{Y1=ZOa!CehL8xD=uv7TY~hL`j#XB#@Iu$-?g zOIv*^Z?0<&P?{(Zkts#7HP)RQRLlIx$WZTo|8qzj4>|kt<95`%MBwIGN<5v-++fF` zXmJz=C-CmH;Gxs2UJh6^g0q_GK(RCB!=Co>aHaWnJ46WsS7dv7-WP(mm^fntt07Ub;tWxaiBoFdYy|qZ zdum0OhsOe+R7zy=&yonleS*7kiGdP}6Dcpx)>h=&al+#qRePO0oyf*-9998J%5ajb zUpwzkX?drLz&*HnxYO|V$n@lrYki<&2muTnIX&Wbl#`cJTYG(vPx%@k*N+t@KaQYN z`l}MNAnFULz)*5XzZyalcJH+tebQRgRwE9BI!)4@1ahSUks+$-hAeVytTM9vTJ)TD zi;X)J#OqpnOt@efTCZ?iF|Q5kmDr$|fBoeT=*Hfc)+r>7&rbHz4cGotKratJggFDa zuWGA6>sQf~)w}cFxL_yO&g#ZYNLe)M>HRB^UvrvNQKE0&m}9T45LNmUD8jA4s{lML zX$QAOeSAUSler)Dq8qG^PORoDbnq6OB{VmY|N%yS6)Rw8( z{y#rTGUpVE)hF!bA-_Mg2;&rJ6Wm*)@|RKx0H|Wne>7c4SF1TN2hTTHfteB|rds&T zsq$@Pl=jw8WZl!kWWnk0u;LPEo>inve^p4zlS7cfGl$G%fic0rZsX{jY?G*Cpr8RE zM~=%0a&e8*8zF()2Z}}lBzYT4WC2QSUsh$-FEdo?_evyD?~CmS9%*+IG`Pn7Lgy!* z31lns(OyWarIYF0N=|OFrRBWo(rHk|SpOo48^n%DRXC%OfwNxCbC#bPnXv-@onDdX zEP;MLv8~d>SpjFunMEGB_9VRW43S83O`4&Mm7R)SDM~&lU?lky+l`?D7yv$k1yN(D zg81^%nPnZ0vP^NTYhD%O1=t;vIbamvreuiK;XiqQ~OZ79f-Sv z!dY;ZqOy(koIrsCZKc!Nul?|<$l_W)MD6q~FFM)&Li3DumQiehDYN@GOgr#j<~h9E z1z})LDi7$WGBz$I{{1>UAN>XBJ(Z3?D8>RUERuH_4Ha+xm&iGOmP-LN9&C%!f>W+$W@9N9wk$) z!dLBS5h7-NK3gju*g5VUwzofUykPH|#m<(H|r+nG4lM#==A8|x7y-#WlA1w z?AFOovf1l8@(4$1*u6X3dB34MiwWFzhpSmgK=p-knYE(M*nH@vH#oBP&{UdrM)g18cC63NJUPlbZ~4#d*u z-TJzxZgnoC12COYhR|h6t@@Hn^1y9j@__O5a2k^IpUrB1Z zy4#mK+FLs7jDTfF5`9j(mJEA9-Ehdwr#5Y^>EdI{sws|wDG7wcY^E)qtWzq$<2*tw z2aN}e_1B#vQ%8(0Er>IV2Rs%2LdZ9~INiN|cRf$8e1Rtdrt-&L-3XB=vbgGb8A1iB zSz#J@z4Ul+!7@Ax*_D3H|8ScIfQlMsdhMRi_fb@f9!eD6$?h_oc-DkiX)wm2}mcI9B?Xr%Y-wgUsE zWKl5T7iG7(VfO_Dqg(n6*cw*uYRj?paMg3pMb>fo&rUU&)xSy+>_9YgmOX0;J1%a{ zO0r)2+XFHKNjq7@Kf_XnO6cFJO2XL|`^lmE+1oH55(B=}xi4-a&8qh=z;KO;t{z{vv3RB)5NH^$(j5%hCFq`@ut|j06F2_$!@zj~9FHuS1VU}Ak z)S%L!DQt;y53l{NVgFwvG<0jKFB&-Yttk&; zz3$Gi*JdGnZ%xrmr4y^Xi46E_Sk+m57@cR+QM_->&CXm&d$+4j%kHK^qfq%-(QEO2 zLcHf9aXeNa!TgCdus%E*x}s<%Y~RiQ=*cK_*~VJp1uyR4COYbveu$u=06i)H>`J=r z8+0+#yskUyn>YT-%&8nEV5t716-P_QSK!4tW7AG|CoPr-(T1QAYKN~1>hs=#`~z9*k5A=$(ywrjk<-2Q-|uQoxL7!?q#s1b@& zY1qIIu5ah$sX$Zs{XB!x8_cynSH;HtsUbRh5M5M|fQywYA<=GG>jZ%TISWYu%^3Hg zE2Mq{aa*5vZTJf_uzTE!V*n4d-~Ape9(-8hs^fk%OR?pHkSfUTio$(c}oblC%|hV^2cjY2_^5#p|lN9y2Rot9sAt^b{xrFwtX3RjYnDKfhv{%K9UQ8{ZIrrW|%h0~kw3MuKFx5qIe zkAhJzIzJro=Wn7fXa@U;HY)3hb=mZ(*KpU9AcIN1;uMWqS82eiKzK8=4u}FnoI=wh z)eNqcw^n+8*RU)>Yf`kMgE8yreqR@#zke6zruE4U=rPI# zv)k#{1zy^T42in@B<*SHX;%r@bqr|W=u5(k2P>jOg#jh)pr7RnjU&LKl!upIQ+1Pz z;r!0=FiHc__xA~hFW9~`8Dz;IZf(b)Wfgrm#RSTtP>}1iK{@|a#de=2^^!*gv=8Rj zHcQ0GvPsvBv5~B%oqyhNUEsysFMilj0(8Lp?X`PSd2l(7c*u-u`BWS}!gk91sO)$yXUOC7$sKM~#@70sv3sFHsP4-QqrvNFUPci|g zV8w(sfnjNTOtdrHuZxk@k@)qQX82mNPFXskzo98jc}%xeF=M&X@j+}dqYeDHl_(tw z;`}EhKu=Tmna>NXI%n3L{NK6P{*BC1R^+vB3CSbJaUD+#_JwGL{$N!!ARpr+Wf?K|yyoOFy z6l~@!j5*YV-zbGH&C2J@OEli*L1UG#C9*$tDSeARb=2a}Q-XLKk%FUZC%ku+{M9fC zGs(eTd3n0EnHB?nA>Vg-cMET(EP0r+7M|`dIHe;E93PZim-!PrKPL6x4PrNN@t!a> zi2}Ako+^@_9KKnocwces%mI2SkuSGdPO!bYb}K4`gjoUYJYdwj!?&SHZ60zc=VYh% zlS}|fOOql`sZ%8oC^4lP)r#|?bV^1GHZwn@dEy=}dgWGMEo7(I?vM0u6WtO3EdH#i zVfNW1X&2btc)yk4uJuiA&L~6`O*L0*Ff-atP@7HHFQF}A=mN9HnXd&mqu*Q46rRwg zlu}4a4EJo)nb5z>OdN7Y?Z9V-NPncsl}kkiV000Kw2Ln$F~HGJggJlbT5;rAjU(Y= zi%_Zna3-9%5i9dxO3_c^m0+_w@5vEXF4@H6{Knf;u;zDGlWNBz{TL%$z)}9d{zH|e zOyLI(uzX*`As3@Sd?IZ2{nh&NNGzbl-R zPV#fa;+~MgQ>)Ri`$XG;^lqVv)o?!sf4g1H<@ZT!mFptSX5i;~8`$XAD@U-{d0+n>u+G=<-lOHQDgdtUQ`(97)y5Z?1aFD|6c9gL497`%V& zDIO}^w3PvZ;B`m@>YcUC3#c~%!3@~*Gf8+{SwIlSTT+DIUafujLW(w;r=5@d z#n7qLh<>+I0;~2k|JdsN0SrpFkCZc{1&-3zlFI%5?*BC=ciCp5?J#F6YO1h&$T_5o zIqIXDo?9(m92qWM|D)hR#pChAU7;h)qKQ)dX18+6W`9HB3;~ADtoVoeHGwv{08g0~TWBcUyt>5oNnPsVj3%Vq>lIDqF|7 zUCjwI2oVy62zM`oeA`VNO9UC$v|CHQL99EWis9)8aF7`L2_3KM zhkGEfTW~8brOM0A-CyHQ>Mp~<2bA8NV#L@4$@i|aUDW}&>lOji@ok%kj;h$-#i$N1 z!ww@5H%%Lksln6viLTWeA$iHOJyzXlvXwiQyK)@wP+RhUc2t@_AgV1u^@l=PZ{>ZrJaCf_@_L(n7ycL@;OAxMBhgF6Yq-Q5ELLPBuY1lQosU?DicU4q-- zuJ7{ge!J(}clPXi|L;9>4riDAt+)xX_u^9fFkaVik=GeD|WKO%3fR+22wM8Iy_RP;`oX0^>zt<+K}|JAJ(Ad+Thj`V$saQD17yxDF<|e$H8(v&*B` zVwwg7=Q~ftS63@j+ziadmdl){ zH+Gn(Wvuk|s2E6ht~JO3;-w9_h~^@+YShLBGef*fNjw78Y%%45YnlvV-iU??Tc_ z^n~!vzeAt*Ajhhz=v(aDm6vUra=qLg!SS>`#!%brcpWG!ivuXf3BHmes>lmbp(aNQ zI6#kofeSzQgG`B=T~O!8VZ7$O0ue_Gv?Okh7Dvu2jD2nyZ7mobb-*-kCHTsjW?5$? z-j3UiwZZH?HD8pcVHih1(zGGXC+2#k2*G0GT3?#bnmZ!M^w5T6^crQ+in9y};Ipxj z+x@%y7yWM5xSE^QtFNYmmhNBjafvk#5-rQ6_1yli!mX{xWMne~qp2T8INU~IxzAkSf2icjWEnpCI}*@ajrkchP6Ag^zmLPtQ; zfUc4>KJ)#RGzCq5J}IEZ@LS2n1@NF`R7)e8#ro_pr$ZTYFhg71e3WNJ9Bkawl7QcJn%DIkGq{BBL> zxeVnYYpgRwm&%iYfgI3{E@*rTQP)V(wq(FR$DsPm0jke+x(1rUZ6W(&)d{sRJL=6` zi&yo;nTbtaYomZ^9hN{=QwjmgjC<>%ZageKUG7(S_W9v;^=!8w25v8+1FPe)*{|Rp zN{!qF$@C>_;El~ex4PZ5TSw+{&X^=vy712hLhdgH#ioNtT#`?a2QDnOx z8)4J7gLtOUp@b)Cqtg_)F9S?PYD76eHu)ddXX80tt&zZJm&##MdS*v!%vyLMI`j_) zFGJ27m?~+5(Eog^GvH3f&O+qM`1d9nQ7y`AXya5HFg+>qjogdj;E!AimqT#A~ zsHDkMf+rm#&`|`tPCKDYd|K6vVcGP;i>lvPHd907pK@@7-|i6xq{<4PKGP~RlPT`7 z&C>tZy%ae#%ztjINc{Z*ZoUj-pxLFc*ANKzUwL_}TmVWqDXWD7dqM>E9GEoyh*E{6 z#(E)p%C2mB&=gf1AeOUGME@JyaVbg1v?_&-eafhg-$eyrC>+X*Tmfu8@Pro&Kr9S| zU#bhCpAqajH<#y~bEn5M>WCDu$lL^S;}8L8DiPH36m*6MPtZ61IgPNw zS6QV7ZVXQ-sw4{pCej9m9q^4ZVQhWC_saNckY97;4_Sr_lORhvt%Rl1A06w?zW5xS z4%Ga2HJsrqZ5S489UpeW^!HiCnaA05JgH&A!uo6(+L@R?JX@%1sP(4!AL9e_9L3-;CnhD6T;k?on)kDsvNpQ)*GWu)+=&%^ z8Gf&{RD&p88*<_W1GwEdvd_k~x{9Dw9st^!<_dGNLQPUnju2oGJXsX|-prcDnU*6) z4QN%O+7OJj>wJN}ycZ`F%^M`!3lSgn!pf;hiTq3tZZINe#s6uew1^E+PK9GcAmsxt z#(YDOIXOVy$cN>5YeAtCgf(eBm3LJS8VHI=GC(aK32E^k{yC)>H9KQXmC0y1#q$KO zZ{g7!OALw%>Xy8DyZC(JXnA~kwXwYcwte$^Bz)h7=br!Yev69gz)SX(@SIR^s#*2j zWNNPsgAG)cUQxoaz=rX75ZZhVZbSr$Q5PPw7NUq(HDU!+GncaZaM&9Te@|htN-55F zH%2Diy}^%G>Ln``N;>3GwQ?TU&N2)^4^X!({~TfUoS3}r=slNoqiCbG6OQ;qNLhNq zAkuOcoJ7nYCx*lK-q_3&&9^ele|Rb1lMQYAmdBeoo_iu|WD_2kXzNI(eW| z9lKVKseNs!=h|UEeNV=S?nI9FYay>OPIWKl1&-$eGO8+yrYAjyc+I{$Sg}_g z!;7KFB<198y{%*{ZX#z#9Zffab>Iu1AUhL3U}+@#7xv#_$V@6O+APuH1S%uwVh z3x+h}kSQ4@%|AlNM2g^2C06lYUph(X7MAU?Pvliwn${3gOPezw;wU0ey{s%R%*^^S z4g`1=NbWfy8s|s(9RG3j7&5J?8%CH4ySQvADDLmGxUHN?j$sBhMe z>XYefJM*FS&^kM3KxHsBf+x^ic@;fP_F4PP@4ml($Z*JPbkOWxSP9WkkH+zT$vEkw zjJZq*byzbXlW_e5m2$RS)Th?#?bg!=xZT|YNG;62vfvb`KeJ+aA2A+OtzXyZ8z)Qh z&;5@7=0tNRIz?wX_R_;jI2_%=DiXEkdPeN<+Drg>xCCHUl9P*5xU4wL-moB2hj7H1 zx8nx5T6{}{J1im=2$U8%;@gL4l15@L#e6cfp@h=+bDzArVFZuwYDGC+{xSeJeir&B zJ#tnh5Sa5xP%q|StAZ-;t5vqXps}^lcV(IiIe7>RC#wyO%1!p_eJL3DfF^cr%8wBT zZW>V_l9p-CA9AW{uU2Dyg>%y>IuWXN(X{If*v)HKkc6|ogoFWc8=U==()uQulBTYD zZmje<4Jx+aNu|l_RkOMA2*T-9uIXQH-W3amMlT_>?+-@5rz>!9DQKkmP!o9oc7p{Q zPIEWt;;SoHVEUT=#Eti_s%jl^QXvxI{2zZHm+2EBySusQn&~9e*dG&R0h2)a{(GOg zQeOmz_3lrDwNJ)~ka#9A@72 z3%r!2yB3jdQakC|?%oLz5rZLS-r2JVTl%~tUv~iEk<6y*d2KouK+WmHp^i?@jO-t) z?HDynWC@d%;hv`pYa#)bX*{xN-@@>j);}T#NR6z;X^ggUoAs*|PGIMxHR+MbQU&OxV@ck#Ftpqk@7$ROwmE-6)*R-_x0diZ^ z;@0z{JR@LhH*TP4$R$4`JX>inUwsZqX2$GXo+@Y(N|pE|UwSn1=h%}Pxu?~R#OJ15 z8lu<+lLCkZKKp&NFTT$)*_#KxSntFg!Vp2EEPofjRZ?i_1URe!C!t8pgQ}HmlLiI8 zm4Ovj-6x9vDi`u%2-@|_Ov!oHg#NiA7Y+6?gW4a;F37%fHLdkk~p0!m~)7v1r^~`g2|Fp&@k4GS3-`fs|E|*P$i?IqwWzXb%P*8<=1tA zPy$^*@gRAn)pJ0-en($JpI0!3idFp^1?$7>S z`gi-i0;K11y@PekU0q+l8IbLbJtx6UX}>?L<8^Os(;lv4rQ~)gc|6?Y;~S>&>x8;N zZ_dEV04&9Gu9O+7vq)^KT&E{cTfym>|BQ(u5fJd5k+VHXB$*WQy&VxZb12i@|-oz>X29B#nbO9Bbu`Xf!%A*>D zt37FEXpK|PWMQk^S7oY=kphW0Sr$l}#;8b5698NaHaHviivpWDE7ewSNCQ&~8?H|? zq7b75N*A2prrT9w964=Y1j67l@1~^=@w-JR;p4r*k`y+dtIXU34NS^(KNh7 zFh?mnYNoH|Fh1{pgvuz4$?*gbw3Y`r!@>1{(t(y-3!lOKT#_N36yJILjB-WnxHk-A zs)#6n!^iW|*4pXyTKdu~uO`fOwT!9Z2Pq3srqngcJ;J(BVpq=eqq`6q zUd)_gK6_=S=bX|d(y#!Mlooq|a8!^4@JMhxZL0m%zS>2Tm`1D^2PcM1jd{)a5huDm znf6^J6pj|u++aO^uSw4%Jy=ODVoqi=*qgz9XC;DZkgH)E5@Aei~n~ z$W)P+bz!$FgU*%)I!7y8aX!2ylEz<}lT-3P`3fay@JO%gn;8hl?nAYP8-Kj0j?<2J zDT{<_&h95!udOHKGYwhK&tiIU2)9nU%_r=e5 zUVyRbNqUSI9CowZ} z97iH7mpx;56brt6BL<4-b#2~b%z-gDlWuQjl_q#KOTPj8h8g3cA!|1CD2^Y?jnj}v z!^0-jBlgrjD=1ltHNCc?Qz)uJEj*9_YwfsVAtGt~76?KQ% zuGt$N4~vSs>x?!#MmGg1Y9hPxo9%Ca{Y5?dIj^zFI~ByRA0NlkT_^|+q_Q!^7GQjE zzK?}0@LB{tH$Z=8@9cxC-K(U}t|B~h+2o-Z^uHBp`*b9d83l8pJ_MliR?xlDXKB8u z!4kD^QsR00YujfIvEDmF)DxY^oB*jU>GtSPUYBO!t!mTE2~F&DFh*Z3OS0++eYvl! z3IkN0@9PSjYcrMpX`ANl{yusHS8VMIV16MN(-wAuPm(LAPe>kH_u5pl!w|be?NH5y z_bF|ql=*WBDGL1Lou?8$d!f3PM1TVw?}EeuJlh%n19|68eD@wt-;?U>@ggQ!xn+U| z(ei#O&e#m2^J3m5o@IXaq{v^jPI`2EOZf)M?tINsjtX8sbaGAzz~C2}-JYR+PRg*U z=41r!3Z>hwzXCv}|8)=sSxIees`}7g->)}H@D-)3xwq3NtA0ERGZrn2-9_IfdZ)G3 zVt-mKdRiyGh)6FJ_I1uJRiQo655d=t*RaMUyhUf~w1zWVgD5;_D{>gs3jCzwsx_hRdaV$eDDQDt(d1gSen5}!9g2+@OG`i7ei ziE{r(e+-s0Qn7_ll6k-IbV}K+!&eS0$7;?r*68ant{ht9F=uTTZP zrn~Jx2z~(VKg$^mv`-Ki;5Tyd}AyTt3DPkZl?V{`H1h%_c0 zj1x30=+=m6PM86Urov-O22cE6lzcPSX^r+BDB@E-9>V8I2#ANIys4aZgM*ZTI69P@ z-Zs309J(W3)ruCydr662!+QBxYQ2UFCA`_ZruOjBzteE5Wlc6$IH8sjJ!(L6k}W-u zo`kTHy)u(mFm`2CBx_;@roWsy{79W(S}vvOb4^=?e%cCEPYyWu+_dW} z$*WeKime9^0Ubn2y9%pKG`@{gE&b%S+K(2F5gZhaHCsrqAhiNQtJb=4{I#lQO)7*! zo(9Ox%N1lUvPrsl{&}M{?7E}ndLCKzz!@i}I6;$@hv|5JC!~d20BPfneakOLJVyUF ziJvJoqpu>VqV(5N1%Hgqyo?vOi{4UFVMzwR8mxyobR;v1+iGH%D2iE+6N-I?7c{f= zI{JIlj62^APJp{=1)gQuv4U|7?XEB8kP=#oX<0Oo_C;}Zr32MEm1mUjYm>!VxWg>A zGoO%>#!QCwu-?EOt(`2nI0f_XQD@|LDYr?&Atl1xnUcfC{wz>>>OtNP9hY<%-{*)v z`h$70{%RNC(^lr&gC@7$(^0cfera<_W7$P3@y&J+Ee-j^*v43W=k@@YCeillOsp7j zqA+&UpCgN7>^qN*4=xn}Kgo8i?bfx=Qz7Dmg&?ClO@QSgGm>f*WG%eCNTD`^F_59v z&G}&VF9s0jL#Aeu6q8bOtYfB}BPFN+2J8hmx)4s!8wseoD(p~-@>#a*w`PrBqP^2m zj<7oZx+iF@D>!y@maQZKK4LoY=FBnId+NTjBvi^D?@&9PD?{L5E&g-EEr^8TP<0Sg z|2fw8hil_>9tVGvgoCw6W|FooncYWVO|(yB|WR}ECxA8YFv6!EMZrLbn&5*TVZpfn#iQU|?Y&)IlV z7gEebEY*o*a3s;?iyjeGkxq(A_i*Kq7tfV21T3RoZViX zta|f6G4eTsevR;rvw5TnovY**u^8KUA_2vBJ1Fc@mgaO`UT;>%1qH%|GEyk*K9h@! zX{fmfjZhL=CXmb;F8gvu$i}krvSpO98{Jgvsb(CxzV#qwA+@jh_CE5B!i(<_^vGIU zoF0?>0)QexBR$9XU?)8~c6McVvFc7b(^Hy2nIJqFU>W!<`*I!)z}LsO^5wK19D6>( zimFj7n%)IRIsuf5BL}Nznz(J5fh`eg$G#N*$TbwGt*H4S!waH0FyA~l%f;0omQ4Dc zM~GS(en4$%*t?4<7cf%B!lxQ}zKYey!hrk7viUpfrV#G1xyiHY!RWskS=>ui2!La5 z9AzMogM#nD#RI4kfhk!b3oqUxr#_)i$yF0cinE`LP@FIkT3Lmr1yzD0x%}>;U!jeanjJ?WFnf%*8G7)YY0XI+%UIrLkt3 zl;co>WMj3KjgGv@r<#RX>Bh!D8H1(y>xE@V03pN6fJHvNe21zbswgj45e9-kZU~e@ zVC`6H6kMzT#9U~l&a-!db0w;nvf(BMa*3tW9q+5`qz1paOlst{L+U3f^l5UTfQ3mE zez84Rg-8v`N)^H9urg7+|78RLn@`Y=21rhG)%UYB@qk+kI2o6Z_Lv1jB{bhHEbi60 zRPT$5Xl|g+UH@eHVZ+%^YXhaCrX!}Dn^vX%Eaf~eZYGh0JvbgnZ2+r4a8Nu~4`X?S z9O=B0M*H{OP-v`LV6u;dtZSK@O}7QjMeBakWRRp%RXPh|PFde#c8cuBkFoiY=YakX zqAEe+vAdLDONM;Mk{lfva}_E*Ehbdiy5-YW_(^lr8&67Tj#9}*)f2mXz@yI$2B^kn z4pXyFa@Vg`N3}yJ^Hn;!Ebsvv;pUA?}quTdT_7JMaRJhG@mlVW8@GLe4ya!|sBfHGuD)p#re617DMKJD6C;SE|z z>d95B5_~<8+MyyBhV&FwCE+3n$B;TJUe$v^hv5pT;}o@>j)=>>f)I8L=eJoA!;56G3O?EWB>RcX9y&8Nfk+0%|H+=7GGE5xEk*F6w(V?5qlESb!^>cBW7WlMh(!l64~{KGGGF~ zeZg&ZcM|bmE7}9i++j@(+e+QnIhH`lPPCj51>ho@bj*>VOYBzLvby$Rg>pvN8H>E7 zKeSv5ajChYIh+ny@M3%oN$slH>zZAYo+8p5$y)vnkJZj5dTwA2Rup& z`;s(RR!9syt@NzalnAu_+0KNd#&QK#@6vPOlLcs3pC7%=`EOzb>2``W;L8-2#Kj@-OMEkxB;GBjwBC~J$%+ce_S)TOPM z`kZ0$D^ivqJr0_8+;fn8#6$)xd&);`Q(SCG5r9Hp9#A=a(xKH07aR)zP(im)QLQ0M zc2-LTAq+D+Fm;7uKz|xsTFeO~lBvM)R;0#M5SIn71S`}uAmRZhas#zmjJ5cQo`-IozD6DB>f+^T+YNV~VDMc;XP<*NO{&;~Cuaprr^=x^FSf3h~oaDe!mH5?%GTt!E>8K5HyeR4trxJa!-Cg=9>Y9dj zN99q8o`@j{MW__bE?Z9$VwpI<=@UU~9CzH(gT2FeB2lf?yl@X%zG!SGR~@N3(vy6l zClWa`YuF_#0aqWYQo=UM^`w8M{7mys&{a&b^$!dG>&o|KaA53S-+Iun6)+(m;BDJm za;q`>ZZwzaU|t8$opvw z+V(lBRPBVcyX38Ux$s8l&dI9{o+=_s^_I1lotpMTtK!eN%U^O_ybTfS?YOT`il0L) zMAUX)g~wgI9A`q!Ln0Vihj$usYMA6)Gaf*9{(6Ny!*s_O+6|cOXqi*=%#eLH8m=20 zUvF-Zn|{YReUD1;`Cw8xT%n|Cqj$}wvo8Xr^UTCATsXQik4;Os4kEc~7aW++iaE~v z_8rE_8Ek9+?V=z1>ON!K>CqZ&qvbuwnb5h)gcKh-^;SNT;c9+HmWzppmeZ$k=QQ}VE5khLrAezEb3 z3lN_kbD;{@$fbEgKSWAg%$;)J+3w>X*rxQ>2IhbWk;RODL)G;1!=klWs-lwTRFC4h z=YFvoK6PLzFYR)`xLJjDZI{c>@}C3&)p**bZknG@w4zk0*PEL+<)Rscj>P?X1kG`l z1OBRt))hwfe#OgN5Q2Om;XV**IOy^U94nPZ4;Y6!8+bUjF(YKvpe`#aU&VOGkB)k&)*Y7jG}2s{2&8%*H8szsbG#UfO#`h?s1I zl6!4QQe6MxTI7vf<==4G+}c75_<|Ufh^UtnOgmVms|_R_lM>_y{&LxrK!7k|J4+;~ zpW#B$o+#4=e@{`O3x2hfF9S)*iX)If$_w06kcy%9Z3tGeqYJ(ost%+}hoZ$AQ*+=! z0^al-qS6H?p*c%1mQ>2CAjqdpm0aecXBQO|kTjXc;LG6;cqd7Ph{wFFFp9w&Z9|}< zd%kB^t!AEp_&(_LyCaS|EmrkdSMhK%7w{G1RuujHtrPnD0rYIF5+yiWBYP~~P9kk^ z(us&^ND%&eQA>hE^?NM>i5#D1fIFZ&4UgW-Ork)p^a7rfxzY47u9cfgAvoMdO77N3 z2`!%EHNMk_jbMkV)i`iSa!)pc9frh}{D(_}|1crUgchs?|3R^Ro+Ly2i7ajiJ$rAc z1*MXG3Q)~yXjcvYqa{q$<-+o%4!>g|No*oqfGX`o@UI?vHd*1~b)z0S6-vUXm>k-x zH3BQeq9cDHDk+m__C&nC(qN9v$^mpLdO%$1rPCOXIGH*qw&zZwX6PVTb*{@_0>@NH zs)dpl$PEBrWiMlbRZi6D>c~d{$No11gE6`VUC-@D$!b`fp5?NXMBf%HHxI*WAuP@YxDl76>7v^kC|o>_+#G>`Ug&aJaqy&C*?y|HUa+=8eq5FxET{W#Zoo$)ewr z}%=Vy5BWO(m; zcF(2cS*C29a)H0hQoNRK?C%eIY$oX6nu%LpVuG$6P|{txM+rT>b!uNdSJ8h1`&_Vl zZOZOFgDQFm@n)o^xNJiu_c?4Angdyp{yqQe&<5Q0y9GJK?7hZF7Y{!r!fw~gJ$|wL zgTX3waO08K{J`bMe;%e+#m3Xqv^9LqdxQTRUGfN>KaHZDfF z!NQ8?wdRbnn02B5IcOZ!Apd+u@L2=w?Q7`aFj_9o`+s&rg)!}IpUA)F9i#utr_g`R zVfZf(*5FijHlL&)_$;1eOx3~>L7W%;&1X%CZ*!)b{eFf0J{jcn-ETS&rM}+Iyx(3p zDY~AlYdu@^K7cpwXNQts515Nqd2e@rB`x>A+XeZ1adB|&k3oib1_lQAtc78W#ryL* z|2YskE-o%ExG&v35`W#D=8Ls>TR#01O5u08@++cvVh1D-h8>QpDZ>rDKqWA`nVn)I zj%WgB*ZzJG_UuD5f$c-eron;NRD{v3F^lBw@<-g(1K)$HP5Cu@h69h^LSb8|{)edW z_vrUpDX)+%4zej`#mZNm%x zM}F~Y$X#^x%Ks4ezWv*6&$rvHZ}(fFVz+RqAOS$^xPQ0waBC!i6LUTuNY$?W&PU>-*iM$O)NNDsV+GWR_co=zJe z2WR45FaFhB+h8i6(N&+8rp8(8MazL&=JmMPHK?(-S10GHGY+s(ACPu)0sBUV+!LwI z-EY(V`%HxJ;7##FkC?|CdTwL5@@V3lr zChu*K8XBxw0Iuk&6A!nU4_sVE4Nkudy{4KLYhW<3$klYK^QuoLo5K&g-~@nbLb`i) zjm+-RPV-IpnBie9(`S`*slar4i5N7wx|TI?io0X!j_X1x?*CHUKe9+aEN=FH@=o;x z@4Ng8zNl5-+&{|!tvjzukS_c;wq~(qfOaKHa6?h$N_v}7@F%Mu5 z_XfA1j>684*Za>$AN6i^^+Qv&#-$id?rFwFvw78w@a6JHu(MwuEvTm47tS`$*m>`E zlsxRS(R#Cg^U415(-iFf?3>skE%Jfu8rj*O=-&NmjX|tI zw=^a1s`cQe^#W9?rQ?@Ns*ff*?7evMXlait$Toxgq5Y#PYHI@3`U3WF2O56U;|g&X zE(R9Z*mVuRh5vn=p`$YQmfgom9lo0%_I+rh}Xt`JvyWaXCc1$C-(gO3jazej{uPrYxr^2pM#cpPX6TnpY z`in(x*t%Y;>BZrrm~3JeD*rC>iauNoKTJJb8$I+HU9p`11l@qF95|f9|85zrYCFk% zxb@y|FekZMU0r=7cqhI>rSLffuZrK~Rf(dP?YN(NKpk-SJ$qgArRcs5_qNS@>1Gkv z53J$je}CdW4BInhFD|koc~JJh9s?EqlI=e8c8$S39}P6{p8Naz?bt6J56`GMBR_=S z+AeF9drdHiUG*9zdtUV$-Cs|^7VkkAAS56la0OqwhkGBi)M4Gsb+UjSZ>%N+q@?uQ z1tAGr-zRUygaW@yWg07jLpZ^k6V6pq1^7|ckpWp7n zpb!sk7QvrG!bQ$sw zRfogw>K>n_Mswx0$_*#LrJ@7eX?k*Sw{+v}f54S_H2~i5;8vl1+ogOP+@cPeqBwN> z{^sBTbgh5KloO);CmGTIPH{Z!W!_cY9Kc{@yIRdFM6la_m`{G{iOuS!|NUm>(QYpY ztxzDAP!_v8b@!ceFV!r*ySqEM-#;)cpK>obGg`e} z%q;WU`|USixN~z~em?>8y1%{pWqh=GKcDG++{@tZe_tfb@M5Qnyde3qwW4ZSN z_$w@0ol}Wq{`ak>$9#Q}R diff --git a/docs/resources/diffspeech-fs2-2.png b/docs/resources/diffspeech-fs2-2.png deleted file mode 100644 index e07604f56eb0acf04ea512f5a5478c59e1045ec3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 145505 zcmeFZb!;79vn|+WikX?2kC`1aGcz+YL(C8}J7#9KV~ClV8De$}af~s0{QKs8_s)G! znmd1wuGCVi_K|k0OSP+3SFLrTloTWp;ql-B06>(M5>o*H$a(+(*MbHASa}VNln1~7 zKw9jxn%B4USN}=(4egcqfRS&mHSRt9j$8g4TZweX-w4WwF~B3`BU5IJn@jUktmH$) z@FJsxMG%vyL@2I`O{B!?*B{d7dW9sci>dC|!8J_n^HVbtqK)WKxU-2?w`L~%ch1bG zS*M9tIEEh#OsCz2d`0uNcD#R|J&$y(2&{ZFjr-evgqQdDLqrCSDShJ5rCslzTZi{) z_ad&-|84xUwvsLpAbRq@e*L&{sr`@nKi>FX5%WL6@c)OS@lbplrRcY(f9bR0IwOEB z@-QkX*iY<#-<0>d?D)Fe>3Nx(mlwFT;=kqie(yMOf8p`m_O`s^b5~{Pf3{rX+4H>B z^ZwlPPC454pLvA!8o!^qBqk*D=6@cq*9A>8p{yzXV(@TtV`iTHg# z@MSdqWpr#T%I`Kr=pjVt{YL1$?)|RkP4WGg(ChZ)>vmhO_4Z@{5uz#;z5!|)J(|CU4B#)Zrm=B_{eRfeypDE;(W zNdL~OXK-{Sw|{mAj&pr$&j_sj8b6$)sh0Qta!f4XbBp}eC-m_A;hXn|`1gkq!TW}P z{QN#-_%h`9GVJL0_ISKiIh5O{c5p#rK_48U+S(1QlLVOR`m-Fx(50Mxrx93T`H*upt26PCI_ zx}Ni*2AA?gZrnIM0F0V)fm?sbSx?knl+))7_M(VY2(LeXAbWS`@Pt?Ifeu;#HjARe z9%!CWv_uU$DB1A6KBfBC+SHG9BikKKPM&UkErm%0j0^n&!Kl;huKb4;SNurUT_0~dn*>u z*b3cz0y+|a(1dZ{M7LO4k5PY!-X#YpBi{z-1fpu-cF|FEQPnmkUf98z5OhnoA&c5U zn<`?RC$vhHtNhrmq1p)*DGIRULIIt8V7Q086YH~z$2wDVh80hLe78S<`vgzMXIS8+d zLtVs+()K<{35@_|8?9LF(pCLy>PP-MyT90Q5-@&5+@-Hv={n*VV0rn-rezap zTt9xvzaRFzU;{IP&*uR>fmcJ=6J)!W9~n>R`Ml{yj5B-6qG^}$^{JST2P{Eh1HYdM z67YwX;cGvgP-cfX{aZ4=;N7`{k5$bA7W$Uz4Jwm|aD1mha2QBwHLBl}>Q2M~^^T^WWZ*8 zjHBlYCHy$h;Iod`^4rkB6HN^O2y9h`eeBvDHC?K?R(WP8UNzh~w2An9Z}wLU#{;tA?0$L;{{1 zz*%mYe}%IQBDFgLC(oEcO!h$W0K70ZmILe~eM|s(B2|8{YI`ada{ZKy~rPf1|)_L_d zJ3So36$*Nn8X0Z-a;@VCD;r|-b!^&Cv^+U`EE{RNC50A@m(%t2z>^+0Q2_E6oZ*32 zX&9puVpq|OQX=6Ud%*23ouz{`_5#s3eWIu+2|kq#C8179ofU|LX*a3?+&*48pph#y zrL=p#Jl%VbavNowVmapskNX15ZNSt4cA*;vn8n{V-g^mAN5l8Cz}GRMM`|#? zsTIE|o0g~e_bZ|9=dDZVKW9B}XFabQJ+EnPU5At<+2hDnK*Et>&$u0xqnX0OB}&10 zL3$gdf-baO?3T+Djq4}>%Xl}##2%-RGNSG^sc(_FA^|(8()Q6Dx^osr^L1~8*Pb#4 zzSU(%l?luQD!AxBB${+HW>iUoj#Z!NdhaS=pn2;zc_zDefB9QKQi<;T@hUZVvye6n z<1v?JBl&YL+>?7eH1lLmpBWP*@asGp&Bfu4>s;S|X2R-_cU*JI)J8}itex8GZ^&)5 zA1WN&;R+aVUscw1d6Q<)ZQu!a&*#>iKKd8=i35_Rhj4a<;c+*!c9zCFdnc3Ku;sWd ztf_H%>z2wMW--miFx&L;IXSm=(=E*7*p{j08+RaT>h!{KSeA6)X{~*;Qomkh*d-Y| zDkD+J-#iPFjbaP``AG_o(4)eoK$ogau*MWzpB=xLEbd=QO%#TS*ky!1br1(iJ3>^{ zu5(=0lhLY@0LJ7S zTL@w5zY5?o=eF*Q0jQ0xYjY|>60)vpg_^UN#YnSjzb3^CXfZxB-SCaM*t}WKBb|<< zOu2O#St#>ZKN=SvjQb7wSgTu}4&EJn053(Ecp`zPsht;j@Avi@|KFN{cba+LN10sR zS6_v0MTKrP-#?%#Lf|g5-vXEa!auMt@BK0F{R0(K0-l%K{yWdxf8YN0yk}bGw!0or z{|$Wq+jGe1=p3W`Z&*2TcX05}gLeoZso5Hh#sG7FP~}yubAJ44BsXKAOC@Q zFhUP~#1Fy557BiG(SbLPZ&O@;16-pH?68|LgMYemyui5dFaQ{AsVIm@46vp=hW~)78Ju3?-Cq zTgomP$9u$J#ukuRDFNqd&2o zyE@cH7(rof%f-AtB+1gs{y|y5({e^{lvWCdHQ1I$eGoqve7Z zPU0o`-`e)@NKQ*_XE8{3PP}i$9qdWHp<&OaG0~wK#&ty7?;cqp3;O5JrTfW#3F#Aq z4GMxiD(lKFT}y1W7QQfhn0jkrSD01Q%q$Wm$?8AbY<>3b4>bvJ!USTE5oq*(3f+p8 zI3OK;4HkC~WwPsSSRA(y?0NnHD$5CN3ErLp+LB#4aI_)j2N)^GMrWzMS z{WC17z=~T$E$|tg?L@e$Y=+zM?aRUgYs39OzlM+52H%=3fgvXb=Vt+tuUxbN9N zlZX7A9}=lvq@y7MlhZe6@*ZaLSAwxKJp1hA{M*)@eM}XO-Cb6kak?dGQIa`UB%YC zSI6YB%HzBVGgov5(Ps%n>>O4vcViwkFVK84#wTH>lm2(gZbby$7 zGXwLvi5S^&)Y>#<;&2`~5{#^dJ8`*Ck|guB!cTCbotX6P-t@3=R0f`3C#+H)u-MF+ z6V2qk87oXgtSngM>>)U~bLPT@2Yx86QTOv1XUFnwH>+u^f7Ygo>Zc}9V0pIMSDG7? z;{f4lZM(G;m!_i?YH;f08TdGgwjCPzCxK4^c%lrJDfBj+@N}9C%Kc1dw#;)3mJO9@ zl%F;3Q~970I*>+Y=2yEvl_OWVX zA=N`5tG?V!o7JFGT#;ksteJ6G%A(xkEKAZSBtr7DiJTF2Cn9i+ z8fh2LjmD@l`3=i#QFcrX;pjj*b=V`hi{-p7KSC~Up2B$)$Fn)D*o1E%6MdFR>Q7Xd z3UFg~Vgq8KQZ`xy-o$|e-9SPU4_My6*7&2U_r^Nv` z5Gjff*ZPtSIo(i81w2tO<%|n2*Ul#<>LBnbH_?gUBI;LcVy_S(qn`b;A zvUXYHHpSJzBD>3OcYf|om&{R+971%501_*O6Zn%gAe5Y$7iz)(FpR6US&yqtt3UHu zCI>f+u%51mVo>p`$=GjJbN3ZE1bsxH?DR=h7X2+1#Vbjz zE7q0ObdOL4Tb`97y>x4s+_SV?Zyt|UBc|0_YNr1-RG>?kbdynk7K?3u_Eeg}g`h;l zi?+AspmMWs@QoTExI0BZh2|Df2`$0i#EURX(stj&e;&VE(jH2o! z@3l#g%hBhOjmSGXYjloyh0r`W#4oh+lW8o;kuvX;ucIl!(oo1GP`QPFsy80`?KiRN zB|2rT3R0d?&b+u1TF(Gc|%IF-&t>t!Fxk&V53myzaGM~ z7Z<^U1}AOUcy2E;F^ZNnikfVlX+;$;a~qOVfP=DGp({ zB_E|7g&*vLE?9bOC`X(X6P-&d>yk(0uUWW37O_y1vq>eBOjK0BV`De>#f$Qig+~F!r>X)A>o~vX2U2yb_}HhdU8}FM@O1uUZ~&MrF{VVLP0rt* zt_2mc(qgHMVILFBd-}BFLA60?8cW|#(YOWGVrK!z`m2_fj25;sadaLFH*CRx*h7te8vMa^k>kz7BK5JBV4U@grHl35qC@C&9xEY#)r_*CZ+?HEYzRwCbqW`=|C@ZisZ5*3)-5b;{YyO>9@q zOW~Sy5f{s21FhCY=c;t-Vl@L=SHy@@9Ex~=xu7-9Pwn*QqHi_0R6fzX+uT!>65FvA z%PueG=3+z*QGo|Z-@DDOF-86ZVIOuBT;u;2)5Yx8S}77r0VPtNYai#Eg|HY&-8Hi< zmIk-353XuO7&@*@@j;8ltdhsQo~tK!Nkp6^u>neOV0krFgSy-ZhtfERFWGiFclM$G z&fPI=f$nqw5)G&sNJ}Hlmsl6i6lig1{z=0oD{w*#2#0hLfqmmva^LD(-BjPBxfNqo zXNgK4W_8xZC#wZ(#;T}nbft3?dE|Nyggy|VuTFI}!W;c8iZz_3>{MFcQufOfR3YHl# z`TIMN{Z@0@vsuKRXfi+#&>cG&<7C0hgge!Q)aL3`b002#nSo0hVJbp|mG$a^19y0S zA;?%%gaF()6MT9QfG5K7Dz)^W?}+A>D!NZoX;b?0nan%v+XGxk(Pf%U?qK8*lC6HxtAJ^r8AdX zAqIMnr&>aiQs5F2eF@!o;U}l_B@S#2MSF-Bu8&L87gy2%%N|c}`MV6zVU~tqQk6>8 zQ;Vd`SHOp0m?+4nTWO)qI%|Fv!TJ@X)V*EK;WHBvnnf947%lp%vpe+vOQ3~4SH^~# zSfg2Sz|eB`;xrkbb7?K4Y7Tg(Y*j8$t<4^8t>5CC4)?;Q!8K)n21k`>dfZagArNvbns{C?e=OnM{1oPMgqO0KyZ^~ zUB}imeCmZL7+wz{qV7dWRYy~Pe2jRry5+na3<)wMMAurQr&-sdinx647-e$zm5!NK zb7!l75?vd(!cuTOHA6ypeIFqk_XtqNn?8O_4`gV}6NDvIH7$@aRBEHY&p`Cnd@G5| z?|i8PTq4A1C(4oi(gwrRnzI)VXeNzYD8!RRf4Qih1VvjanraGy6j7v@3HbnH$69$G zn$CWm+T3Hai$_%eWdy0D?D*3tg^{x;EeWFihCZS?cc;=am*i-NSS(EtNjhWW=yFU} zK=p{ik7lcj68K$N0kMU_^?_fV_=axLUKaaI=|ZjTc{k0_lFS4)j5aFYIM^c=qYGt4 zxt_C>nfjoA9hCY!0>%P&!L9R)A0H@Pi5C?m>*RO$w&4X=lc}nV%X9)INu8ckI9W&8 zfs{0ZRO?%l70v1EIys+n87btT#>_y@^UTL0%U&{);Cdx@=bV3xxDwp4yzmAAj#E6%}Vfc8j{A&TubndaxpM`oe$ zs~LAYrumQ1fgXR--Ttb%ahpD>{>sxOmF5va58yDF9ecE(tVI<(PPHg^ibQ5{dC$H$ z*%~J<^6v{S@*l^$3R=ehI;&!cjit;>66GmfHjBHp!lpmcSGcYQ`|H-T`bO0GlV|ae zsYH3y>P}*1T%t*Xgg7%5OqJ%(ba0^Wk+ADjWyQU>4(QO>3j26UhGYrCF|ULGFnz!% zvY%Z$Eg9SOgKBh(M@;aw?rGsE3v*cWc2HJbZkXb|c{%vPO;q98yJ0n#j8PTJRD+I6^1+|SQXcVraUf_VUyzCcmm?4zm4h2vY=`3`9S`v@^Ctr5S<`j&Z zFGBU+Hi&BE4IMNiHkw1$);@x% znLfiJBl~|h7nBmPK&i%_RY|ObC;s}l%hMK{GcGmLmbSSxr#_v{;!{f$XC0!9LwhBk zlMh<@EHJZ}V_cDQDc8SHhFWtkVB@OMJ|0Yu!fh~% zt4J7-9uSnLt3Q@{P9`O&cr;~d%5{tUzwt=oVg=PfiIFEQdz9cxaGF%PkaBkG!V*;G z23^?mjf2HH2w)-+Lq2Jod+5ppY*|4ATvNGg>1Lr_XdUFD+Se9wjtoNGa^)Ey=Iqc| zaxCib;&4Qcwj*?@EWxdFWWb3A; zFyQlK1y;fJnA<8oB}!&^rHUYu(Q=Motb;{=#81b98+9R!*fhzZzOh;`GIadxQ>`c1 zBPw%;dRd=US5uBP#@A{tps#~+l-Ws8jkmm(k)M?o&E6_S2QoNS9(>ZMFt+^+_O{hQ zSW)zBpElx9WMs3hyvzdBp*e!SN6{pdO}f0wmLmZsw4|4R2@`5a@U+|_M+aC%3dhE7 zQ!0iaf#jsByJ8h_o(|LPEO4xgqVsc$SXVqPrWJ|T2F+`#du3Y^E{E2n@u45!K}+Vq ztRp%|mib$O9QJ}V6NaKT|CMARS1`A;FxPAqd-)LRVNqNph>+WZC@-EQSfLEbyKAMw zR7$HWOG|G3JN;bMo&LlMl?}XaO|c&qR!XkhtmpxnMY|&3-HL6rNby&1xRF0*ZVkUy z8h+C&QW7|Tc`G=WTsg<v0nafwFE#QuUcVEa4Q4Oo(ZWu=7;7|r*(<|r|O^=Gf-5xF$ zGo3DNmUc#Gq&b2Hh8L0`c>i6)S(2YtXJi)Ki zXz&`AY({(G?k~a0IDCLJv62Y3%0uZqDThUt9|eW64B%svf~GB785RN$V&FArmW*>u zD<7N?4479jHD6j4Z1%yKHc7k-jXk{|TZZuDG=0fXl;51@|7jWW0Dt4kefH4?1)R8H zTrN)|d%@zkZpi+!UCyf{3IK9E^1pEQy}?lw$?}!*vao91+cuBQxlVgUYUA!uNJc&% z$}t}4IjJ`6U|c8bgM}2(w_wUK*YKzv8vyvukTv!w&7PFKct*>M7MaNxlp4k0rWRP0 z8-Mi~LCv-DB!jh7R8#s^^kZEjCSrK>33+^Qu0yWr%JP-Pw2b2QC*=a;YCr33%BTgz zjOK14=ZVMd6w9XFXnsfze|85iJHT{BGy-(jMI;ia)as~81lQhGnOXOucr{d7^V`9% zJVd89+zY*D^d#{#<%p(lBbafAj?|2lyAwDx#(LpdD!~p{A;|@y*IAi_a*FOYxLbjuld`@n$f3Y=;`;|C3|~YoF|bK{R2{)JAGO1I zPCANibh;Kx+CtZ+54mq?Zl*r}xClt1xBn6yznn23Vux15q_~_BoP1NPI4d>Y|AQPj zN$iENjB~@&!Wb=TIkE}()nM2QV#tx1D^8~)v(0{{k;e(U__*VLH52E`K36nhrHt@@2eB6_&mpI3s0_|sL-wmt zL?hIZ{)7tDG53@Dm5UTs%hOye`<45nRo4sdsH$KVExd`^#6>e%Da;KS_k}E;{3hUZ zm9vUGtPu}+pxvwvoU2xar(DFOT@bTe7h5pI2sSZ6JV#)yB|~grqZl{TRAD}7Fctx6 z5VDqb)`NED)2+=FDPzSSCQR?W)y_UVm#o<_Hi9-h+4-mxUo+0oGx*2y{! z8r5=U5>?@^8@X@_jH4$8+nN$q4qMCL)S{>+=$MTPeWdmn@pxYdfO}8ir&A}{)Icif zCw!?-?O#Nm-VNM|1E&+2Lkvsh{AJ6m^<2v?4@T0lS$DFZ@>%(DN?BUBV_zUJg_Dp= zKbMKQg7KLxL)bu9_s}SJ$}YJ4whOwm`R(#|pU3qZFHj_BVmFQ`_rZBs|4YJ;7PF*& z?NQDrHHPtMn;VtSJ6Nd_^Ozmj9V7N2qb_hHyB|mZoz*`|D<;iNU8bb)|=!et*`%ucg(3Q z(XTcVoKZXaPKF{rREnpNY(ZA@yKr*xG&De7#J+%2h~v%&H1w#)xRbhj!u1A0Io;w6 zS0EQ0NLn-oxBD?e1{svP{VGwJv=h$RUs-{+XVB8GEB@aA>`VB6!LtOo{&*=qF>;d$ zCnBEi&@kj}J6P9SwG60znQ|v~9gejb=MtdGHDlt;Ubo~!^ zyP1xdA+7RRl+CuQ%aB;&S%P3&pA*{%2(^u;%yS%5bZ*S>l9kvoLJ$KaZD8dYPg@WCpS>P zIbNNnHMydc$YE?+mebhA79d+JJ#Kz$=FjplbS&oIaQ{62siSQUWIpi~6X9ZgkdJ<3 z@9aT!U$hc3fW}`dCsJ|>HmymrN$y%>ZLS&8TB~Gqn6h34yVcXaJ3fUOl4PW=XFpSTDvpF#}pzrsq+4H(_GlSLgDtG5W0h5>2Oq zpSGG9cI~?a^v*&HRDU*(IV@^Q`7pz5En5maOzb+WVme`xnqjRyVL%Q zMQvEDPIB5=|95m-Hkz$T(TH8a>b!-W3z=54u44J96@Wrr5Dp&?Yef6I)t=m$I!=t~ zm!4RLXQ@&QDy4I!$OtV>?v3}!cmsUSeW;8~8Yltl`-~*GB&F#RQp@s2zuNGLDd)`~ zU7?ZM`N)_Z56W8hVt&$>lC@7xoNls1%~@{j5g-NHBFga2*1B2SDy8V*)=cwogvr95 zZzpq9KAr4?bTf~OQ;VLgi#C@o)*OS69l1OW_IAnG)?&3V0m3AIAA9$6azf*0c7`{T z@Rb|$2tQMch+<+1+)|oCU4D8-YJZ3Ez%B99FjMo3x5P0Z@oP>jC3hm9@||#{EnjT9 zpxi*1%B_9HZWYYFq+oX5KOWNb77gEM)CGcAxynHYNjeUw zlYG*J!4md^CFOREta*4d0!tUE^JUSX<5L@#38d8<35GHY6ICglUv{FvEU{dLTdgj< z(~Px=hRW~Z^~{Oc{d<30^r*9_(f>Z2D%eJqEHHw2PNWcS@v75yabvzvJfU0Q2oLEW z;gT)Wh>L=XaY>yJM~9J;6ZxriyWP;XOGT-^d{<&*l3gORFllU0rkSDq?EwQ2;zn4E z)XT6h`Auh_ptH6*S|w@oFglo+T*9x@wJ16V&`Zp(RB8#AG3Z!C;yso3ePZO1;bS?{ zu}SsO%IaatjLM8ZSsb+2$fcWOZNIx=#p<*f?}nY8YS<7rbL+kVvGA`Hw$TPEIIR>; z&dxdztTS30TN0(xaw=h5a0(S6LNX+#yOUH$?loPM2slLoPr-T;=5NgAD*#nH=?M{t(S1=pXCjU6Qfq#YNe{rmEVaGm=x~Iamp+{JE*NGMwh&Bh*c%~xI1?=w#k zc0Z%q{*;&<;M2%_KPuvvX9)?#6gnC!E2e$a9#WyH!1^WYPK9cE451IZ_@3rtt!$;( z#=Sq+{m8(skj+|!-yR(|kgTWtVimG1?OS8dHc+#p1gO#2cUAs=k{ z%RCcC)aJ8S&|-Vaa1p^gx;@TyK3iASumnty!Kl`i5VJ(O9u+;$aHfkd5uTN zNQJqD>W}{wCNX$W0wG5*e`l?Y%3U$&w8RprVvRIaVR411R&ann^)On6KUX)RN`ZH* z&(Exsxw7L)3&p!Pt0PB$TsFzNo6W`{5ntKDH^lhnFYAW%xY~U24B^z>N4xyL!oU9$ z4T_&*nQO^4ZBEB#a{HK5KxCK#7|sR|ADp4E+R5E^wis09(ZYsYoq+{S*Iu7O=wA$$ z!9!4#u#FPe@~N;j6v5nJzkj7pj>C(QQiKFhd_rz;LPKf&+PM`(b3g>KFxDku>U*UZz0Oe>Z9*U76D*Vi{q(-)^MnE?say4ndxIyh!sK9|UU1}aj z(!!eL27`G|-A$Gk1nA5#R4<;_2wJ#gnybOOO0GTx?NRCV=ctt8emr@O2k zs79PCN;05`v)&~}sI9>(Hv0Uxtj9U8!8u?Q%_Fih+9mh+B7XGQ6ZX6Dt^d-7cNgpbh{L>^WY~qv71;V39=KVTMkrP z&8SP0zH-6qYPv3H@-M2oDT^-RPnX~-=lO9-bBuL3-sDe%Yeko>4a$5?X*h@vE_pvq z<2}(@Uj`EnYUU0aKl@^3xm|bz36%oFpu>zQJ{Ul&a85_8t*ACVANv$XBaO=|TEo<6 z2z?E?!HDU`)Iy$%7F|Yj$hNwy$?r9+r9yD!Q+>~LNEhsCA8piaX?Sd|4DV^J|v!%_J#84T~tNq!%D2zmt|$t-U5t>V6o%zKhJIg}rsJ zor1RceDI>*&Z!(@n=W|0Rb5=_h)W<3XLi1MY2$f;@zXJ8L9nPVE4YYxR~c~yTk^kc z0fm8t@kB||pm~P_5!aeb~if0Ve z7J$of+>J;c*A{Q=m1UL2QuQU2CPR|cTF~eZuY;Ucom`sumHoU0IJFb9i_42+CEXq5 zXw=gqx_iH^x&u_V02yhZ49-#crjFuyHO@IVXsvdQg&#+CQ$9D0ISVkFXIbDe$BnW> zowOvCXtpz}-l`9qs!(P8((g5IysCrJeoYBss)?NVq-F=o%QNL7hMV6?%ke$p$Z-ny zwcf&N_23+qpFM3oPio_hIrSSx%-JC@Ei(_Tq$Zt>VypRc#jlkWESK?3O_|_qmr<8K zN?M56#?oOz=H10x)xG@CIwkIB)R(+^8f?AH=CR2%|N65>??y~^wQJgy+d&7v{)-}@bYGwH){hR!*4t?jG}*xBWr$ya^HM5m=*pqEGwwH- zjB~eY!IcxRcCvBAb{t{Q&RsJ_Gvz)X*{5IvBC5FK%N@jup<1Z^wV+nMk8Jq8`}_Ys ze!=W7(|p-bd~4SU@ZC2gTkGu=)B5sQ!b05h@XY_Cq@~Y+dKb)g+$vxy$#DOH;j!$H zrYhz-rbUs2_BKMW0on-QW@ z{AR_dwl@Sua?W>fJo;P}gmx8cA`Lb?XW4x_2Q6}tx8y#S8lcBadK6cKqC!@00a!qV zcgpBXl(MqCH+Wd5*69m#w^F#gDqeUuD1%6Hv=LSp;>Vfiu1&5!y>vb>_?SJ~&%SUH zSTRm4xAGE<&YYTD^cy##rCaw&kzaYWH4QXa*kcEp^(nn25|!9y11{X1{DW==r?y9C z+POt~%LmsT7PMb{4pQin)JN1Wh=(b~MGh{mw?S%$3ElU#?MutDTLrJ2XU9FNW>dw4r!IaI8)wXlZd=BzK zH*5GS{ipW&Q5iuHOo6CqX_lz)eT)i3*SyLmlcYwQ4YXBJ`z!-TmGo!(5;MZ<2hZF` zGJ6(27MjE|lJ(9pCjEjm)Uq|q=j8askR`i$foR_e!G*1|%XhBkYnSD9?7wiJd%+*c-8 zrTBUc{B!@*n&iOHgk<6zp;874xMVQeZxs>(fQsZGwymI>BD26qDOJP1U)coiuDKrF zb;ypos1M)!9U6<{7j;tzA%{HKS1hk=v-L<+4S7{KVG`FQC>%}=#6?yW^qt0RQVl#c z77_?+%JDS{4+aje(yy+myAolOV~pe*u(Q(e@yqHjE~Jnq`=80=NZOyBjI*KjNo4Wu zV!;86y2z{LRq}kZh!CKwLJeR@DmxW*Ctc>lNW84rROuyAH-iXN6mdsMucpxi zHM{z5(b%SgW_-RvxKL~e@3JqUQL&|2^JQO|_#;j&g)>De$L`QfsdWQyi52{0z4!#I zSA&I2+8^2`aAE$r{*i*^Y9gQV_T7Y`QS_FIAGy~%NeLY0IcGExlzC6$QUqNJQh^a$~Z58S{W)~hBlR_PRqt=OZ z-Y2Q<=K9Z?xtZ?ACR!o<q zzD?%sUgW(TP+x9!QhSB~+xM+)?)rZ#6#Y+S6D9wH2jWitF(9aG4tN;}^ur1NhfHnN zD)jzxQ|G&NHr_M9!hbW#HuJH-ANG$9@R#B;G7-O*i?;iJFclTsUc&!iE|m$r{tA5g z5E@tt-ek@WyZhii{rsS3n+n~VS}d5D{5nCx#YM|Uy;Db`9hEE(g!D&^mH8a`#S?9{ zqOKp3?!{u-USxN!;%-5N5M@3yio*mOb5%k){He%6$jerEzCiLP-Rs3AFcpi+1vufJ ztOR4_l8I8J-saTr72Zk+O+Fy7H3%i%KdsQjRi%AqIh6*TgW{eD+(k<&J(&5*L89(% z{CP@`TJowEq`Mny8EX1r*`%o6I{9fbYQ6lAZK_yuv+b>)Gj;b_`54bj6LfEsfEh0V zB?}&r-y2qTM?arXJ+)lYpk=z0g2h){VC8=mwU!G?H(Ua4*BuBna$dM@FykJDbbG>1 z%fIIrA=-;2bZKe^8cYJBs@-f0EKEld9#(EU_nC@RBECe8|4Px_ra&pG9fJ++x&Ur< zKSttUMyudQyehIa5TP@xw}Bq(7z;-5{tWh-+k@(1;?EqodDR(17Ku_5`=2C>_GQLzbKvD~brzW+>#(!Y1VUO2vgC{m|;uDITygFeJGjLh8+d;gq*y2q?Kus;Yre-K!Q>)xO0@~9iv z>^{V2ex3hG`~Jc3uL4#i86R-N6{xx$_}ew_dCBl~$uigb=&PgO{y$3F#u=LypZhh- zo;~_*@ZRk0+fv8JG{?u>%g0>gz=wf|f!*is%jfNz+`nCJG0Gt*lg4xX8wu~n;JV`6 zhzTq4VJhfKq#7P2KXZ7%y#!j6SnbM||8SlPNvMh9OAv6#dt@)Z;M?@g=E`j0aQA`3 z1PiYzAzLQFXaVz~3+S@Vt=zXZ;PeO&YC&=~Ibt3aVG6;)_n^JJwoP zG39B2_RP#iH;DM|}uo#rcHk^TyJ%El^n0pvVFsS7;mYr%qChVZ{+i<*-mkt z!eq7+lE!bf#>UI;GfnWO3P1bzllEi^lo%q5vR1|+Q@v~szI^Z{eC-{AkRWp#+DKDk zuxePG4M!NjSuc}60@A1j7wnTiB#S&@kZ}f3d=0i8dbYjabsJj#gZ zg|Vq^WE&5q$ebg#$vmQq3QUKJqr0&HEn8tEIT{>}oII{rt{1c0E3Im;p3eN!o=0T! zq0O5vZg!c?ws_{;$(Gjegm!auhLXeeHj-oQI%K3n)6N(hldgc2End^}rizU%0pMmb z+Le9*2n3;wDykF(6Q6*8X6-(kPU$E3R(cMOD|p`arF|J;%>-9<$DzWbk3C~@bpDLc zIFrSw9=b>ZVTKD#^3;NGLDxRvIr>ekP$-vtu z(SRe%fTIt^4t>vKcF$wZrQh*Ca-5f&z_%I0#BUw{sYs_YB=uAAIn&+As`bUf2i|7h zABA2n*=G1}J~Zu3iY$u%IieQ-5V)1eHu23aFG%wsnlP+&*zimNW<8%3+N+tG2utj^ zr72&n_6aR_DzUJZ-toyf2EITyQ{{zy7gi-`J*AWDSH=h~^3olz4wqUI2@J1{z{fvc@jwMW;LYrc-ff{IbI>_=LFgUVGzt9%o| zi>0F=4p$|^#F(Ti31)4)3o2uL%QGPu2TN0p90ita%u7=T3Sd~^4!lXCTc*9hdf!~2 zjC2%%@PaQzCPR8Oop)-1e_V0iQ5u0Tbh@V3Dezb!>+hT#AJzgnb@*vY8xokWF^*X$cpj< z(UpRv5fF#{x8x%p7~H(vht2^bjFn~8{O_?cxnW3C`|R14&?&oJ5O}(CNdrnkXmafP z>2SD}418|!cPob;tVM!L(K>swH#7wTr*{q&e9Hq7M zWZW-Qc~Dr8@lMhxZCDLC(r&1}(tl?T^5O(LSzcn-vO4|FzDA_#E5R9FGOX{>OXaD3 z^M5h+R>5&KS=Z(fGc#Ju%*@P87BgFnmRw>+3rk=zGcz+Y%d%xL*+N_Bski@_?wN1C zh@Q)es5loVGEeT@dp&Ca=(pU|5A)qR7CT$K z?_%k`Z`pRva2+v&_9k5V!MaX3+TGXTVf~*=0Yp;vzfI(Xh6#v3N^VU-P^e5Zm4#nz zs-fZ2qTxP=_oPmg5HG$JysOS8wA{2;`BkK<^Uma^F) z^U=Dw9=2lii<~L`z9mPudu^;b_^}0qmKK8AuHXnIfhwrbXzq*+_|i7kT1^gUu>5?2 zl|UtqV8FqVlJsM!;z&j#J`VnHOEoMs?SOc(Y~Wa&lGQ&5Qp8)V2|4xU&d8!%+8JLl zOk}k~Dx9QnoqKQPs>hv>8Up?-@NAEy%{h2tC!uDOUvJoxqxT^texFl6}p|!=ezLyiqX*E zC-mZxQzctNo6AQX*zCj+dQ1-Iw&a%37)Oc6WQdv~>vzYwW>M_)2(7k;c1#d7 zhjW>SqhM4)`HvMSfeUywZ8iQHdZ79qkzy2Tbd`IaRrA?Zn490dA8kqHp}D7&rU++U zs7OV?^2Lus93f8B`Go35W{}j%z6|lvmvvbmZU1YQY|b>Yw;-F&Xzb*-w2(&oO2bwM zouZ~PXLxT)RK{uNTZXh?ccRsAr^c&Rs)n8hcAAr{$>)5HCT%wl{sA<0nsk&o19t&M zg?)}wLCOkzU~2yaizfJj^|Pu<2lSk@M@$!A?FSJPZG<0kY1NK(s}y@ef3j0Bfx8Dy zbR;2m-4fS|p%JxK95FGT$9>viuO6_SH<9RMj4Kr=xT3jJ4flmb@#=4k)9 zNC#2%N9BiZ4UynfoKgl-nmpTp&A`pV%xp*y$Dg6@T*kJ(sO`>*mrVx=kW%&kdS z9dGOm|CRedrd5WXxfqS!sL2`+Jzv>(d?QSbL4=cstDE>WOmniRkT%~YB@>4$e-s*y zR7qiZ&qaHYJlqD}!6$Yb`^=-cn8c_l^HWixHEzZ*25Og7sU=c>M!m^BcctC@!$d)u zP_XJq%GB{W(m)!u;khObl%VCT(u{6pq}m|gR)PjU(nqcLUkVs`t#J8Nh+9@NzE#Z> z&JpeY(5(*1f%9d|b*X@j1AoN+jqhAw>cgRIuBK~x>*|by9MErF-Uf{pKN2I4jq66ud>X?|Hj(46 zZtN+-EQx{|=%_FL>eXZf0+bCbkjR%))EF5^+ruqGB?c9_C~BAGfmb;kJZm-ELd7Gy zMu978(0!#}>6mc^`GBd1Fg8tRG{qb{b7b7~ddg=+dq3Y*gW~}yDmxXPH019USI`U@ zIN)IZ9Q+J=xiRDOdkfn;;;rAPZ>`0OGYNL}={f}L?;V{!_c>R77E6BDS}>LT;5;17 z{Bh2@iEGT`s|xD_@9ad#={poBa(bW>(GerK;*s5;UT-3wkInd8S#?&t==biD+*MYj zxk^u1c;QdX6bsEfO{Aj8yx-8@$5q+C)7#u_&&Bo$JA`E)B~=6FTYwG7Njfb+g<^O$ zKr6uITJ3TSsubzuy-W_OaNHbQX2trUju=IzaLWXRiM|J-r_D z-{(KuI$HYP|NAF(v3i)6SwSYUMHLGP#a$>NJRP07{bIk7{gj*}X*s%tIK+%Z)$fnm zx4PKa(gc>*c~H?uWVqmmLTkG6^qIZv2yn1aHtPk;xI($mPdtiRr)pV?q`slgqc%E3 ztWuabrDp!{gNCQnv_cb3Ji{(Z(xaaZ)Cpib3eGI@k_=S=bOg!{Xk;0tUnQ8nrSaD8 zWs(l#0k&)`=Dpua=n8#)vuen@eK3hpRB0QOE zAE2Qd0r*jiNZ~@2{CHW&AB7yvT;IE)fPMG=>j|!A9K|YI*{4-^O)A&DY{07>y)^5( zaKvKX{FVvogrB;pyDZYCyf2oo$+Na{=TiW{85SLzbED}gZSWctD76^K&-!7XU!icN}?(ojwj8 zC;`OoI-1bXW-gT>!#YzBi1^pru^pJo^+NSz3$d}ZaZpixv{oM6eDuVj?8*iIBC*Y& zLaUMb6-fq!OJh*P#_VsxGE6oHw#=2IFqqHxF+12M%|019s?9B|Mn$w`C6shTD1{9d3tY$LC3Y-g-N#*4_KUcIe>g%kLlIj{# ztuz!gMHrOHb$)gH{r(AexoU_zo2?q8_j&s%NQyvk~~`v?d*Ft(6sp3SVQ4S(!fUeBz5g47Q$1uGU0tDa~Klm6+-f_bM!hq787QaF5WEPE$xO{H@td8rt3>IiU%8bg5!vLH1L z$C<((v+TSMD|R)f@gt`F^U%x3Gfl>2$SsdQT?B^k3DLaUDp{AQLC?N6aI=iYx}x5P zifcH*7y&1zEn{v+-oToNp*I645mTp?mKgj+O%kRhx^X?)a6+R~;x!8YF;B7)bMp1WFuzkHRufOEP6ABSNku|8cy0^L6^$g zIp^c1zxr$?c&0{X#yW)!8ngWPU}!00dV{gLA@(Jrs9uVQUF*5z{;pr^I>x*u9#7SFgM@% zB6L^WjcD5NHfhR7G94QAXfN~6z&V$mufdb%riiqzJ7q(WR;eGtbSQ=ax=@9c@N7maV$*O`kFGdJ^`fO>j8yC1&Wy`puQdR$(vA z*8Es>ePY!AWf?bN*CGdAZ*_*1>gH%omNufDPJuZGtJLb6Qh`|$*PNXYM}QL#GlNyn z@?OD-jnBVC<~M)vtD+~WCkcI;&P0nXq(9QNK~S}Yjg?Q+*#dx|#5pjHF(s>RcB$a@ z3yK-)Nl;;iwaAp;yQj0H((YMBC_#`r1 zKM2~miRxv*rOB@fkyNGEB~%nOnK-xSgqb3noU3dj2E7%*aQH9-&#HM?0p3wl)xoGajqG}d`RG6Wz>9tPBc|0N5S;hQMLm~LAn-MbvJGEbof zHZM&obEP>`igM$)W#s3iVKq^tl7e*7GDl0AzyYx~d zT)9kmz6+hI%P}yLWa#E6sn!y0mL~@e%~vxnzJQmm`#*L?po}cxF3#pg5qmtJcv}}; zu(ctU7IQxozM$~T4)r z70h=|^w6v+VTP5yA`i5fbqUO}SvCNpitVo54{xWCN zQ(02dSxCyQs<}``_&cJiRmfVH`wHx4HxL1MhL1J0g-@tB5sIL~YjI%8kZ1VOxk(+E{8!haU1e zzsvnokwF1)+>{fZgM%~LXHWwzNLWC%6L@NJzihiQEkEY3{+i5f!LU57Yt}R<`2d!R zh=OS0OT(mMH~mnH)S!1=0nBwQKrg+zhUQ=t_@;y^2m#&Lg?(4bT{0~hI5`GeSFQzu zi4h{06`3OWWvqWL_r(EqZB9UDrXzcf?MU0&aUV>woN{qf<`Hk0O``jaL|ZHKR=U}1sOPFFkQ4Rgvc z`Lb-)$;&6(eng$2q6{b~S4eX4x0~0{;Wnq^<4+sXxt??Vu*6f&S1P84X}bL6(Y6e3 z(#iZ2H|D*+%R@Qof;bVzbz_YV>)xMxSTy+w{^w&3rcGMkCB?ircA&nUE!_P#qF35X8+n(%^Ic}L2o(pG|67&2s#k{A=SlT6m+nx{PN77<4JSjSduf>E(r zh#3^`KisC!@1)F_Ukxl3N{4)qht}}!7kW^3Tt-Shrfo#ggYM~wHyR-(a>HBT%vS@3 zjGlj=S&WN57K5c{uz-zpo3)hESRR5cT7(@*wbvik<(#DzD1Ou~UmLOVLyRtGe zlOIdwUn@#k6ADHP4w}Nqr4sWl&N@%@Ttp}{X_^nuu#^_Za0+C+=RF-h=`QoE)A@h7 zoyX%cUK6UQIeNj0vAP=XyjlK;&r4u7jkkQL9$V0GqGax@jitnk4%PX%v;=Sl#muFW z30z7DQEe)d20`6%EscVFFH*{z+|QtR`&p0mg@VS`=Dt8z7+N=_m5&%#ZzZ5T2k~r& zoNo}ODnC8Wh=kAT*zjXwu#Qg|Z%r}>T>}e~hkWZdy>2{;OB28zZ4m+_}yg~V5O_ovE;l;I9o8cWH)v3O5Z0g)kV^!qe~CNc8=*0 z0B$!35XD40G{-IxRCptFGJ{T5gU=7=3;nV#Xu%!*Q)#LWSc$HSY08Up1B^5=!4=8I zZeu7xPzMP7o>iBkB?*JT&sk`aJEnxLu=h8#=bCnVUiPNQm4_8TSv;kEi6o4x;p0e8 zWqmt&$^*TLV6c2K(4SAdqS03l2Vu=wJe^4lX-- zXch`Tyf(jP!0+x?V0aZOw$O4An7ox9?V9^?GK>PH@#X7?XF;ww zfB#H|kX@O^(#2`3yZ+JHH|5J49Vr~}z)UR9abA+>#6m25#DQH00Fuef@u0NQw*O(W zqnLL9{)WT(+97~+KBDDWP53|uo#7ypUu+w}WrnRZiMjvy>1%>WS>{(kc zAaMn3tg(;nFPpyWrnq(N3pM@?JF4Z%~d)tg|J z7e~%mr@fHG#xI{r-`Owg_CDMgq>J)nBAaT64txxEN&&lf$4{33y_l7W#JjAz&o*-4 zbywhT48~B^A_$AEYa zLyTIC)JJ%5aoll&V&QU!vziS<H>`OLX*b zg5F!_D@r7Y)<8Pi;P^>%N1WlK_iRX`c3H}Rf%@tT|TYXi0!ZV_& zf`wM7j&{x^v_kmy;*3C%PYPVf?|#_90Lt=j?g`1G@3!l6<6rWk=d^s=%lB1)A4cY2N6XlQ&D zr^9H{Y-@3Drd>G^+qT#}M~*1r5y3Yc8?b2T_f==NpebDkZO=ZH?KyhsC(DH^xwfoM zd!&`?eWj;D^CBIo$zwSjv~DAJd6%Lj_0(~6=bP-D`XfFxK^PvMgI7mxXvnCA;z!eC zj^c`|@i8WmS7pw+$y`}llIW@?4kL*Tsl(H~13A;y#U_4hmkr{k*zMACdxU>frF>yo z&BHf%Wxn(ru<`0m78y34WPhvsTxMONXYu|yTS*-~#eW@3;A3T&pYR&?u}2!wQx~Kh z;)%bP2Pl=yZIqA|n!?2!E%c#R7+bR_<^Z8AIO&9k;$%bA~GWDjLavN1l&(#8qohvjk4 zg}}lgbDw27ZzZxUDvTWbal$e^2k-z6ktJ}fJ{i|cB`X)rfe-!j2uOu+%q1yQQ!zQf z9eH-}3Js?6Mau^IhY=-rgm5@>WANbc4i@>h zfY_9^;69=aKbZ;Itn+wKDG*2-_ZPcD##qr-a544U{q$ZY7DU0V= zz8N$5>lw`_j==>=e|2qC@W)nAQJm*e>SP^_ z=1HIAOAu3ct*Bi-YNWV16bBR1=B>h7OBn6CleGCfRAud14*2%r7n`1I8XOwbp9+1> zO#o6gb|RQFPHoIA=3Mi$%i^0E+c~LuHGiUoDulVF2a0vZUma#=;sSTomH21oj^c~j zf&dMfEN8aNvRKWb@t#Jo*nRL#IsQzJJ-YVNZz5tr_vHd-N;Y&FsKc zf$a`!Q+CO`(4dcdX+4t#=zPNRgXY32#_C4@wZKJ-4U)q})G^O2mkRKzMl28~gSuc} z^R`1a-^yYDze9t)14CQ}Dns>5^fkj8@_~{@7|aJzC54kSE+aF>$=r~y5-{1(<~4=n zOPdK(3NxgV+YFP|_LJY_>91>UL6};~ClqDzDz2;ev zM3yOYI~z5juP_r{6@30z7_qtTN0sy}U3H8d#15ZZHMl=6ijccDMHFEZ`fZT6V9I54 zl#enz?CW#}L;CNvqQ_RjoLcza7 z3#MD#1Pfq`_3?Ssg{kut#ZvJJI9dsFmBIriFYC&N!@Bg!N|x8}ZY%g*N{of%8!=kX zc)+(ApGRm^DAZ#-9H(Uxcmx#!Cp(K0pNtv4o2!?B!NCm+S19=4J8(7DXQ|p&LvBM> ztG#d2yIl{d+vV?p^HoH1UqV1-lf}rm_z;>iPX*ZTleQ3uIgrW!zKMc+VQ3<=c=5=E z%@~Ge;o zM5?ZYTkS&0Ft&G3eu8SyPqBW!BGj>2Sm1r_f&|YIr&c4}^Rcvig0!bu>bYN);EaEj{MsDLt^gds z%qgS>wQ0ixbAa!j_LY$Nh{?HC+9NMcm-@XGV&dz9y_8}4_kGL z^7@=4)~aIEqFTL_ohx}wdumw`V(3qj67~>D`e1}feDAO`k6a-<7@fyw_f3md%%1;H zA;yFy0(`aP&w$kG*y2+YVj@R0v#qa_q-7OQ-iN286f?L(D*!X}3nW<(X=A$(hcxVg z-=>l_T)L9-glF`-UoI36FLlMB+(VXEelbuwgKN{yfs}F14<6T`B~#%#eP5b9$@5%1 z2KQaHncL`NQ!Di`bvDlj)d?Mcah1}QD0XKMajwbuT%UZM^ez_-E``64R#K_A&yphY zF)VOOlktMc=4yUnyq3?PB%9h2r~^jnR90Z{hyO)5R3mCOW1w;5vSuWfy&WARDY@g}&(S)!itog`DIuos{FyhEL<{eqhi z8rC*eg+@$2dLdKPg^$$g{5stvm-s^6`4a;M$kCcU`f}wd0LSC>L2taR+^x-40Kx$+hrou>Gyc_oixIq zZ(e&MBO{3wS%opGi#B@^HCJ_FIn%tzGTnINn-@n=nJ5ZDGb65<%E}Y(Pr0xmLI-%q z?p&zN4Mj=F1vL_Bz?K)fqhkjk{9S593j@11B~2Pn#-0(N0vSoV1akaxmBE!rU1X22bLGO>lmx zU_UH>QcFjE?*BmTAu>(}4FwxPmd#p37~qcN)x|JByKk!&K*bK0cQzjxk_^UV?T@&g z4<)v&c*|{`&gZ#-*?zhJzBRl9%;%ZYANZ}FkW|0SWdcqQ$JH+f?h=pwybHIp*ylBV zv2R8uD$<+J1-@vp zK81NDr!G++9pq_yq@$#QN{?@H5N7R;__zEo)Vq($H^mA?a9uxrUmnGolPeNdWgH?7|G@?7`izhb67UEl9mG#ml0MO#Z(x!;9~6~)L-n<-Lg9p$Wwn`=%|VOkG1 zdT3%kS)z*agk)7fCdWT7p=$UM0R1Gs+}?@X-z#r2WR3y?PuXVc4pO_6uo|R8UaL@9 zYJouUOILs5;ee$THM5k1zJAn{BTT`i>Du;e6<2GV&(OisrUk$>alFF zU|-o@P46#kG&{LZl6wa{I5s(Y9$u7-b;6T!-r+)!<>q}e180x*APRbIgW>pE%N(?S zx%iVHW948V-?5;XIb0s5b}5TBB5kuzd=rwHasoSWIE-drGfDi-RuozQ(u`INTZRYp zGo2{v6$nbZf~Rc>Rn?lh6?=VT9P>kA?sAYGk!f?RX}MkL8Xgl^NYJ}Pz@8RT0&5vm z$9R?RPo2-KKT|JC{#BhA6!&h7BNLli>Zmb^3rQZEtzNGj)%<|kwP9lKV0WsyWA&WJ zm}V@G$~VL;l8~Thww%W0^{k`C9Fe-*c#eJESVu1SLTys4EJO)q5c9ke0Nv2v4+SDE z;fY$Vak4aK?RnTZO2DVNowCTkrlJ2hTI8?ZHo?T>5j_`Dgk|cw4vaOLPAe}+Iowd} zxt(lbT@=w`!1mrvWD=e7Sv4Qwoly#bp%;u4IkyIG%PT`Z%4Ti2IMuAWq4inTya|bM z%=+?~VQVypNCjhYq}QS`9(QVJ_D`3Bdra)cluv@|*faje^8a6SZMe^*@KGOraYIMW zd<8m!x?mbaQ31C4HHjDoIGzP5NixDeSam-ZVDJ=;^2ju-|zvh z^erqm3IWOB86y{_tPqvEkImuNjCDxiWTsZgmOJj@YPf&w%vjydv`?*Sk!bk=V3A7k zb%0_MbJP4A%^c#V!?coe0`1qP+U6s5G0 z^@=YG{~xGOTF%eP2J!f*_004S5a*`b&3nwkY3PkCUb{n32j6 zjW5;#bSp^uh&nK))46ZK3w7cVMvg8DI*na)EcoVm_|j~FXK%VM;WI(N7?Z;Ak}tAV zfPsihtWoD>Dce>RB(~IybSOkt0ez4qcUTT-uQ<1dCjCi)j)~Jclf` zbC?Pj`Q|E07g9nNvy#}^N`Dn!c+rn+e!hGc55A`-EEkG}MB~hT#>7bxE1Hitx}ue9 zcAOxnAdBUi^`Q(yqe^E7nI?bkL_`1M7Y2C=TWvg&vh$`GnahCG|2mtg)VTjfB^PQB z$|87$8|d(b7Y_dL5iXCp9)*HFqV=(lbQy>i4e1|_lOSQyc& zv9ExR6&*SkI>TN{nI;{6auQ>~j2{Yaa+5qp8u0r7yE5q9wKPcBCW`Dt2_R_}xcI{n zP=?cClC6jGq#bu)+ckqdW_Z4-*_Q^-MXgB3#u;h!uns&?(Mj$;sWiA@F&XEc%Y3}$ zViaPSN!EJ`jp;#g$@@)`vhJ=;{x0tDS+huTxoSdXQk5)`+v_*w;P zIX}h$ZX0L}(q#rgT!r9h@Z-A(RpE=IX+xc4yA=n zd9iEsH5++aCGQR905KeSFP{#$);ltvR41fv#~lUhkssh9{~ z4v;hGmooYQYWxTX=g~4^GG+Mm{&@3p7MD_`aVW2Ii%xh%qKc40Zu8hEX0y^j<4ngY zhu#ZIBDV|tJ7i2sfx!f8;(`LmBgGDsRqOf8Iepmo=DR&Tj}tF$9If7 z*0iMqH7&u7KGy?>{8}X^A)f7=-7eUGL|g87U`{U(vQ>m&8j4x&s}apv#!jhyzk$7O z^TOZDNE#=cv1m75Y7Pl%H_w?t-Eu@@Wy?BEOc^^Ko?*0Ju&$_X^2ru3+HcyV)fe8- zmiU>Wlrz2jH9)D5P2ywB4!P(2jEr#}WnFd_00*JP)Rn&c8KM7rU*{|Od&iS9Jn7>B zb10gvBkLU6Ne_!?fLucNd zEP5>cN9cb)`maC^6Jz6^$Jq~W3sGVtB<$?$;2<7=t++Se-??AI_Vq5qshU{Bw^+dK&ADSi!sR(s={ zBcGV;RoCeO>B#E`?WFj}T74YQbcg_8Sj&DCPRgBQ(H%0df?a4zIMe;!`a<4VWxN^> zl=jF#8%HT(EO@o5tm6O`ALaDoqGwj zjih4Jfua10m?2L3&g0;z^r}&!fA+>$JUgT~7KPOvB%iX9+f>j4NnvaFbcy)wBAfYu zVe;7}30tq*E3)rKRH%;z+S|5>sxbnkEDh3hGqX4Q63Gi9@P3G%;2bQae8LVg?SYEW zFIiiZq`%CUuRxkn<_MiI)ItBHyhzDaDNY_Yha zk$O@D8?|RJuI}>jZbJ*BW!?qQHhj@b%aS9{CxY_w#h5rCXJe@HVx>9Eh|J$N^*N5% zm=#HO8QNzTf`pCz?D@q6bXViqFDRM^d{@R%ZCMA+5SJTL}P3UgC~;@)5-{CLvNpn+@V zvTRmiYxp$UI1i-HT-X?$e-0Gpu4+UoyqgTqEAS0;0^Yj506U-jAPp&W+LR|%2|D~| zLIu^$tz2Rc-B&(`EmF_#4<4IUE-pRYm%mBR-fU7)XDKI^^=TDc9KFvLVoI+D@anpC z%|5U(naDo@(5Lr-i9Dqx>JM|4%UdnrU?{>)jl0VF_q|#-;5V(ax7TONaL7i6`g1@p z?*H?vXeSBmueSEy+Vd_5U#e|3_403YuC5%?U;9J5?9=`F{oUL5{)hFqtvq30K;-|1 zOw0cRnZ6AC1DRIh41^(Im}c+%TQOW$t{yD?r*!eV+aKmnhxH|CZ=2%JcI>}5FoJ)D zK?qZETfT^ya7-4O0XcCMU{3uWz6k?q;B^`Y_+k1H1qK;jIvfL;XCfp&LR1;(w!9Js zc6Wm|&mFtrAz_YytfUfy%NRLLTGa?5I$uqdQBu`Ud?-BBXFOsJ4Fu$EN2$;3qp`dr zLpt-TOr-w(G_y${W#$4{Nj8l-KrK@glpJ79L#=53ghOi(Zl>jNYMCI_i|oRs_k;*1 zp{0U8u_Id|a50&9BVn-arG=C_RO7P^BFzuJa?T(_pZ@F3kE5n<>RHa=H$PLuVpZ1` z8-bitg~$3k9L(QXT!OFfwIF3^i(skr9-n99lanpOUp=FSo1!zOP?(^q$>>i%3Srzj44@!khEb2M$VO4oA{cl2={#)T`yrNX z3=uL|b&P2~&>&q{ND2$ziZD!F1GPygO6JYsCfR&PsalW`3oxidBavp0!nB$V;+#c zGX~1ewlmbnN2fQ;fuG`CXx}WW2d;v}3pCP3D#4VRb#PMMio1#7nY_g@-OQ@EGuIiv zL&=o*4u}v2SlhCUJ)coUQ&7 z!)Pn>0JDd}kvpF6Wo2_8!=%5%zit;Q)Rlo#K~|kSRtUp^7q*8BIM=TMPn6ICy}d9E zIQo|;E*jpr1XRB3PzqA1)2I+Dj|tXMr0H~pbY|JCiZLROqk100%qU*ar2rrxybIF- z3#i=imUYB)9g;9{^gF++4eDDfOkBv1Q7>ii{`!rbj|{?ps7%@=o4Gr#W<5a!h<_zwak7WLV%|5mvF7TIR&-(Co! z-Zywd`bHl7e@ObTN&5f9_5V53@9%Q{H2wbT&3+tv@Dn5<7F_>G3_#Ga~4JZ7cZut*ikHvm>LeN&TprdTNufHPsKaTr;9RKGy zPYS*(Ugnvk4uyb!GXcH9%Miwub?=6C?^bm0_B(`ebrblw_d94W5W=`-I&u=bl;}_r zS$CS*hxc6clgk~nzFvC(EB-7qCZZ@a%|&T&6j*XR%{bRI8OtUnbx<;m$Ej)ZIds~j z`z?wiMoc1vRz+5&ZAb1p(tRz2OgUGU%IOsQnW~AEGgKp<)IyTIC^>X(HCm6 zRqvCevP=*tT&bD_2|*BZY-$__mC%%CBIH;`>)pP#G>{hQ}KX zvT}m6g3Vh#sYH!FELtC#t0L)QgS3K^I=JE%j6cQlX>oiGClCcy*BOmY+NT$+$|6)A z6t-o$wKc}z&CQO>HHF&lF7|nxf5ijcLfW^*m&5~yVcHTuT=_p%?uQaXzaJ<8K?L+i zxot>NlX8^|*LbAr!V-n`uKNDwwG8LRQ{OuiO25`gtlpPE$4$-UmT?pv7E3~{Y#n%* z3${iUW@0CEI<#mm8bT-5Ng;roWTR%-ezsNnbIYZvCQ-4{G2!2VGEpm zc+*jCB|!iPGD(2z1(-*NP)e#mXT80xN*InZC<($-ukZGmR&nORdp3HXQq{I;fo+pR z{n1&iBgX?~BM~nE8*Y1!uH`P+eJiH$;2pC|mBljnb5%~MDIBUo=~_3}nLUq+hv@Eu z`HLxh)1B^Tv?>Hbc@0J>Y_<2fBJ*-U1i{t230fM8`P>xSH@evNT+6g4YiF)3JD!=t z^w4rAeWrPB1zmfJ8=X1g6Jo=!%EOAvWaPPkK&~3V*Y0ENt+}BIFKol5|5NqXoHKS^ z0>;)-G@mWW*YM2v9YjGc^cbclB_f0*#24{t~RC;sm5iu&sc^B0%@_&s?3>(L*A&fhPmZjbECE&dz3<!ySV1>BiKMRgqgSsWw{N zx*78d)gB$i3mT|_$#Ce5i@=%F=tC~UKeeQT@sj$!bv;njcKczisf5=!pvyu8%;xM3 zk^MFUa~t;h5r@!koEbux`L?*)10~H!5|CtIgWp&KZAk74 z?FS1%SWI3CBa4La!t{+<=oqXI%N#;WSJ+1U_hcX$$w1{p5_ma|s=j<9hYe^fq%~e- z594R8;g$xZoI&~Q7Q?Mg29}q=cumeSN5NsGQP@zW%`*0%76H_DVTE}rA02eO=r(;s zqwAORi1=x38OfnRS=2Hl&w6SS)og%5=dto(9Ahc;@h~jKP`t){RVm|;n{iv#?Ha#` zN1}a=Yr+zJIdWc=3roU|2Nh(dtAAMR10}JOh6PsY%|5w=rtz!@CpR z?R6+E#dg}WYFx~r7xQu!Gl0tKYp4oToS%>3(eP3o?`k{q>iwyq;aOX>r5ih9sKeIu zzOzsK4C(^n^<_bWE*4}8l1W=P9ec;@ zYiog$YP4j3KjC!RBG1j}RuReK?%8~o#W$pgbE_vrexy+_7Y&^{M7kXt}MCmuy zQ#1wwFxiVd_C6wYy0xzs6`Kqnu-_xZ1)X3@gq+?MOIV7N0I*(%61@ZI?80{+z=q(Q zdW1T9_IihZ?}q<11?LnSOFp!31T)#{8G7SV59msifD#O68O4U)$18SL(}TT z2{=kHsVI{&h3V9aJ%^*J)UDxw+|}hAJjZ(?jAtlfS6p>+Lw+r$y6D1!94>&|f21QL zqiT|4@%&z^Rm!YvlexdZq;#ROKO7#$wG~(ZLUug{_o#@}uG*`xAy>vq?^ft!Hdq2& zwge>uWZLW2b%`;NrNWG|22$k5iE~4y`{ZZP)moh9*F+;@jCwm3UXG-CmyU&tj)}%X zlB{ND#Z5b1YHH?KJzw-V{7}>wcXf*VX8aIJiXzu7jh~(ko<4rHv74tWzANM(7^eLH zGP)=6|1*?>sxfVaNqhRr7_*Tcq|}GkPq{5|ID-7G+-dV1{;cu+NYP6Q~@Lt7et_670lg= zQ%bs;(wQ3fWYnJAy6O`|KElQ;D<$4zIJ2OEYOPgEO~MtiMy|0-Qdyi#p-euL#YD+1 z)9Q{a(JOQqE-c?;FO+YP%Ke|ZHu`a?fuKU&nRQdbgNA!N6g@SiwGa{rd3nrM%=$a- zi3jROv?jL@ThAX_Zy3!1a2H4uqQy<;XEe`I zsULpC;qNFPaumHy>sx0t#6q0RO11%trQDgiX@)(=qqm|o4^Kz!o!}H`Rffb7{Mq?}2w;x2X+Y!Ozrvc|gULY6%VY{sjq*NQ zxoi9V^tdJfCNh@%tX)w8%BVZA@fL@^D2JPSZ_WZcnx;||j=+UsT~Oh}BdDY%|MI?} zpVvWF4}11vfCt5;)?_OZn1fqXL)l2aL-9MqZfpa+&TRI8v#Gapq+4PyzH*m55+e!X zA$%VMDAcq%TPdSWXzbH{77A7)>eBi>R;-@{xlFRxz!e?C-#sMASBPq41eF=p<>YK| zfdMz&aVe5TzqM#p97GV!3eNcY(M}tDs?Y*pE(FJ=gy7^gNysE!OH9&hAqkTgEy=wt z?k|?o?A)iJP0ru-+mHF5VH|YU+~`^dOhZ89_)M-FR9=yPquzQ7=8`g9o6u&Tn?Jlc zw+e6tY4kPh)&S1FLc*kNL%EAO3;Ih2noFVQQ|VGWy!5ta%y<|mZsagMD7tmc8sbo! zR@%0Tr(eQYSqm3$8~1dn>Ri*-gE4^XB6+z}z}!S|rGqRvD1dWqQ0tH5{To@#^Cc98 zu@^%|dEoLZkA4)mbT0L;b4u@4JfNoj01S}<9grcZbH7`Wa2!m-N-|isz1b^8?~Mui zx9)ccJ`ttKUKUSp>nG z|E&=C0t1u;!I@`N_jj%xlgoNOi)aMS_WMIe)EglATjIJA4|JR*W3Bl$LW4q>q?Gi= zD(})@Owye*mxT)O7s>lzK#&yjo+~_+!Xq>D4DgZ&GG}$j{wFb)v%w|$sA+I%NlA*gtsX`he0nlLv|M|k8|VCVB;LSEjGDY}AYGpRZUej4U@O__$l%qcT^%S> z?apWP=Bo7s!)Oe#QU}D6_dj|FBQ!D(@P|vDP14PJaJcv42Wj$ zefL`Ldkd~lfmw%|5KqpGHIfB}1E8yTWj`Ak24P!~BT-jIB1fSgp8m~yEzQ+Oswgdq zRZ`$F5xF=Lm(4&1G%T&5RL2dB;dB4y&TPLcKMyWBAXZ{$?Q~LOrG`YWqhqXth?6uQ~U`XHl_)s96hXK7EMCuiX1>ly`8jf$TF56J&f;lDX zqnZ?)_McP4q0MW(&?QWLr+$zEdYOX%7h`7`6LpB`Tt97m^|N7;zQ4|#*TNV}X9tT9FCh-6RPHPtT*o#_5UYaz zj$~9sXOSG#b#%=lPMg!!RuM5**UHL|1CB}l68o2Akp#Xh$4P)t8Y@`?&*td{N*GE9 zP@AW~6+_IWXWFCAC3o{KGVdp!T*)hO$`y1-%h!Q<^LuWk~&WU&}o&mwQjIL!1kbYTsf zlOjQE88yi)qhY(UFnp}i%gP6|EhIw1@@oG{*yl1|^?0(OBI%j2NIX>RzM8C?_j62? z|Iye9&n@;n7#v>5O@V*{GhY4H|5JZ{M=oDUb%Az?Ib-AmjFIu6^HtrrXf{f30^ouH z53Dp5b&;UHaXF-+HBkVwW<(5^lv#(O)QhLcWFdmJE4U}$e|W{j701w3wMwd1;tW$a z<=e6>6bGSZU_u5wVQci|fg=CtdJI&D2=x2?YE-zLG$Irv^U?JWJ$W|l`E;wNz-__+O>Ik z-=x>N-TZZyyNo4`iX6dHIHswWn z#D^0QR8!)?Rq%oXR(b#nj2xFe&*TfaH90yl_*N_JfMCn9^auy)GH<{ z6vezaD58!WqUwVO>#m=9wD|_vzx4>9CVXFv?L;!CW-AF33u+mXDPD32EwYOYK{K@! zU{XaoVJVUzIz-=@-cZuP>940Z*sAM75!(35djTV0e21(6Uj#;?IwLy`_wRk~}Vz#MIT2j-Ez-JB$J4rPSwl&La9qZ1mQENZD-aum{n z;7J^RlQ{X22b(5hHK$RlPA>0jfp^#uMSgUT7TQ>YSZM(`!&;da@u}X?uFY8R&XoM6 zMq%Pw6x$-KXL3#Uj>TajSRtQw2AGyhUc9_ixi%6%805XTwdf9yO&fDK-uuH5i*zup zh}dPbFS8pOJk%;#I~Eyv%T+%heT+^7eRJ^f;@0ic4Nt}w>2M(hl*ADILNrOX^GPp1QK`HD$N^~t)?q5hD-WOmunn4d+?9ypHUv^B%JBKgJb zL2Q22FMV=_PKRlFxq#MBuS1`=q0(p9#{a34kM6vI>f>J0Y>GE)ONCnD?O$=42JWb3 zuNi8$3TNN#RC|dfp063A{YrzQkw1U)C*}qse2HYcsxkLdPVQj7lp1iRw${MTWNSVa z%0c^9K7Nqr7F=3hAP@)b?7U$IwYau|&)<%MM<_}9=@Gw zyF;}Vj)vi_a9)~&%mOZU(n7qUGDGY2E2^w)Z+l#(SkK>1fnlOtVh6S^}u+yAi*Z0c>{K`7b21=R1avRCCQaSJ)Vosk&5 zhlTb;gfio@)#>lWCY#S{0hN#;D zbUdN}`tFaRcb+w(3}r*%1LI;hz~w?Wnh=EI^_x3OPXL>LMug3y99}Qy8w7 z01Sb7dTU5>H6se=38XhmHAd_2Y(k8tNEe=!XHIyDh#; zrMI1!ymmLa6BrVR2IGB39}1c%#WGRVs~n2)4kkF*L(DLRRT z>WtETnGW0mk6FX6AEqoF+x)-bB%gkIF!{TRQ-hOc!{X=#8(ZaRvd*HHUVuYLV?AYx z*_vq)p(C<8ByaF`dXF@}NtRKt_)GrB=;iPjl`;IL8aLU~s~D=y1`DXEz`0Yp$x4XC z6WEe8E5#s14rlHO0_KU1JFDT+dcoe#!7I-^RKdQXBl$f!}1#`@1 zkf+4zyLfeg9bOiN{nrfE;A^~UKn^z_*F8|(sr(bBp!#+T{2L5m!%QLFs#G?F#falO zR0a(LBr99@j#yW*S|jPMs?&vy{Nm{!!ImcPePV9jBf%GEoWW%py{%6Ayq2nhIvs^y z{uKBTbn<>o5~%)TAX_ODa}=e8ky%C96+$NFrZKu-bsNbc|M>B=!r z)S}^q9g$q27L$r@>-x=)PqT}uSG84i4FB=+gIqYgB-hZQ;HHxFBbL4##?491;tO-e z{m@Am2OZCf2D8-Pw#fqA@Tb{KWG2k0?}nI+a&OTi!ZM}qXAI~+l;{O)@>I9=M|JUFiZv60mOGr` z)?aVS@PTC_H-L;J5woL95adv60G=n)=+~3(N>})Q%$SPayu-K!MxvW;SmfZ9-&}MJXAZLZNVY5ZAu~ZXr*wQdlhare zzHm5iQZ`-i2}f&Ei?F~&7r&atq*UOj)FRtqBa`tc?B#ZFktms7kqdBR>IZ*wr0@>> z7197u#dzg_-#!@Y8yh%~8nQTBvN`J$IcWR4O)Wf*j2f97k!Q_{W_u#db14GHLnhLP z5gd&9-ak{x--eLa>4ulsLHnoB9yjN_QKvu!Tv2Q@*Oo*DL?ElLxb2ZEur@Cl$y|U% zi^Vj&#d8ak0Xo(f(@wjhsmH&W%=NU4)$QqK&}=^e9dE)zrS7-Q$}NR|pEqhi{=&ba zN;DrNI~9t)QDa1w)SAFdlY6}r{dogAv=o3hg@JM<8FZNvb4l`98ds`-zeaor4tK}M z5a+v7@O|J0o`1-lqG|I`Ik*2+J(P6H37U`CdMBKvTpXI+O1PJ0bfAN;)}VFJ!$pLp zY$oxO^-npvn4kJnm)Ot#G1Zdh!`4yX_1No(6r|^PAhDELd9Z(MwBKf7=928;P-QJ6 zB7Od9S0=%cI?z-g3%I`Y_jgH^3pk{rL`{q6dMwLk(l*}vsclgwRil!Z@OFrW_R)h$ zIon2De(ir#*dmd(L-8fVBDO*0o&xck>&(lW*wE_oTo_IsoH7rADBy!x{^SfaKwjdO zh=r?Di$26OD1BiAGrqP$rHbaL1Hy64fCRe?o0Ji#&$N#z$3>#^=$bayf}unH*n)Aq zJB3C<;!=?V2M+UcT{yI;J=}i6QE8yM(~DAYS@0)}gE{nk;yE4)=(^cg2Ra=bX^WGAeV}kKJgHaX?FA?pUk>GOI9vQ=2BBso+};MiX7X(f99cSoMNN zGJ?cpzWa25^}xw^l8`hg1Q~_#SsK9Fh@h5Li(OQ2>5l10M*qz%_9MzE@7M9LLdMUh zrkR$O3Bw%It;}VwFiR(c_~H3955J%Az&p`Icy$Tv_0ZqQ`m^5q=t1cDh8}fuYJ}OF z^$6=O3D_NAV95y05X;S4lT=!sc~|dEA-QF1zvG58yjk9AVLJ?M62;}$-?1FQDk=*s zZ=&@TsF~?Agl{#uOCRG5{dMciscFkhyMR{R58Q973M@G(1D`_>h=D0P@V356`*%gF zD_FPEq*1DmehQqrY*+K%JOU6wm#sTg~trf?a=L8o=59^?JfhRuksc3Y#`6R97KWdR#{W_3j3*YTqzq9Om# zA6xr;3!IlA#6E%`Gbcz3@37J;ab5LJ+!7hYnJh(qoL|T)<&P3~Gh%NpHKe>S3Cix? zCGjM_|NLP_aR~(JSpn830+A%1y*@d^3xZ@)opmBuxef79^S0H!TPz>Q+ zQ{|R$Lhm)Qb+l(mcD~sy(;V%?EVa+>Eu!HeJdW2=*lmE(<;}3HCa7N7+>%03P^Mcq zX6NHcVVD-QvS@Z@YX3F?3F1KKj+Ler#I$+?6rr^KrhbxAIkG}X9<Ix{A>ra} z8J%-!-G5BEEf%TaaJ2*aJXg|q=~#xXXd zgtkWD3u3GiUItQ=IA!FIq;f7E{4{cTh~A0pcXw=xK*d=kaGSuhA8|J}_(}ewU0Fk4 z@)t91;&8J`S{0Z>A$W?VwE>}q+A!Muykv`nSvM!lk4yrSpYU$5ge7ALi&)4-q-%B& zmEN0Hc>>P`=nKsndT2AbbuE#|gC&?Hr7PdDmr@iaLBWVB=#20tuLDE+5Bvc^AvdsM zk&MN8;8<4pT!EC`#3Nl{(!hd+NXRyVib`z%7fB>nwCzz^1fo|JreKUcVKg+wq{An>h!KFy! zsSj3nO~srZ^@Qnul$w zx=`@0K)L$3!K^^3eU<&}CAnhF7ZOI&CnF3BGn@G5G&qrM`VE>r)K(6PveDS67@9`z zo{mbddT(k%Oov2tMZ7*WCYu9(O|U-#h9HF2`Py# zF!inY&hebUCR`X*l+NTPIV*uo#d5+VZj1{GT7uHGr@)9Yo;ZxYr9)|vq~!gRDo;z0 z;bkpi$FK?RbOVcqj^RQLmm_Z!!xn6i9(0=ayH?sR@sj#`?Di!=KaL;}asp7;nRX`< z^5-Zp{lrTZva=qgdiQ5na6n@2c~eiO_X&pZagw$7e-~yj3>2w{OAdw~g;y}A@-MNz z6Xy5|123J?Tl|>zm=Oh9C_$U8Wm)}4sk`fsi|#v-=?{u;S&!4ihq@K4#KQLUwfOQ; zpDPzXrB2a^+bn!$a_8YT;T1F8gP1Js-BTI3;v3~nS>M*bC7?5j2hs@vrfBoD9 z1{38;kE+>MJYgK3ZgKt1M_Ek^B!ck$uTV720kTb4!ZHxt{C7!D7wm_x`M*+N1@$Di z+3&V|fpd#yP|cuH1((J$I^d8yH=7K=3(eZo7gyCJEm=pXML1xjf z+K(um1>ZOFu$XfhWDl5qY8obBIB(9m){ZQ2i*}3orQZBmIF(0Z|MNXPjfg=o$O+jF zhW|Z@7ods3!A(a^>8KH&g>4USGAeUS$SY=sZi?eF8=3eldGkyO5WF>|y-M3eDWh9} zR?nuR?}Ute+;O;g2S1%tLzgLN#~6$H#V!t0<5onS`o{24wC$C_g%NQf6JeAXfp8fL zrCv>vuo!3kdiuFXi>O>Y9d%)e9j_RghlEBp*Bh&Sfc2LL2LI1yX+~?lelziWIh^!2VCZU%L6HpyOzj}|n-$lnMkUA5i`cTQ z@`9b>L19j8lm{pw9wqCiT+}8m8bhp_W0_ZWYn1L`2NiDeX%FAQKk}p{g+=^!hd2Sb zX4-A*1bsIeFI^T}>G`HK-r*!l@cak8)o@c;=?dQk!Po=V`Cl9wK`mw(UPW98uI;5Y zWkeF4*>*Jee|q2{1#^e`PLYjdDw-HZ z79?U_grLX}TtJ8j=#`gvnWYA;8J{rSC>msSM=>~3H94dld=Zcbe%EIv4_`e!#9g;? zb+A*6Bp(1|g;1-Gi1QKF!y~JUMa3F9jQ=~={@Y!Q9wh%tA(l6mWz173DTzvP zO|Zd|$_5#GBB|=x;`K2dYHgh*|8tP>|A9++ljbBAk<7UgfM7w=cQbS&hQ4shxir4& zsQsN^eQsA&#%0&jMqnw(1c0r+vL87d^<{>{k{Ks`O|p?qQVBP3^=&k%hn|Z4c&`P1 zaM{b5(7ARuver})adg(+m-2V`R{=-k6SC?9KPl&Pu8qH+jZp6TaSe6u*_UGp-*B^5z zsg0K^apHucp^##BLmaxKgX{PP+bN=FWE8|ITw&j6AVC`9Fej@h8d<>7Hi9@jR##6 z8GJUAJ`nxTdEyCWWnw}eS8)lPu&tNHF$`NtfD)gRs%}f%vmchyfqGMhn0(l-4oS@B zwY9cJ)`bR{Qeu?ej$k~Z8g8`g5oIY;l#MKvnc_V`pxg+CT>;8)d7?`%4ViIM_Z0D} z;t4po!EzG;?JG)c|0GHQ=f*@qn)?5GdX8`>Mx+%K?nQlucFxgkql0VZSKWZ$Z2Kj| zIiR}weR}^AE~duMKFGnJP4=L&jjVBDsHqeh7f|rXax^+wUXU6cUVGvzMwllq`6rG} zTqNRmiamkhV+u;y}c>DhY z!%S34#u?r^^MBz>xEx&i!IBLaVuPr|TpB7P3?opu-#)k+7+-F$P$R1fRCkFMhA;s= zlAi;HnXG@u{F(>P?{i(5Uyxs&U#Qq6JYRg~9mP{_tE$R48Imz88^U?4AK0vmKV}Q) zQ=kPx>Z|Y$Z6sbQ@gbhvAimF}K>O)2ray1C4+!#513vG5(Q|w1enEv4^bkSph-A=t zkHDAV%c&{ert%bSq?h(yae-_`dHLJ1OLT%jh7pWIE$2~wwWuGWVaw+^+TZh(uOHbk za#P5U-M4`C%No`L5_DlFzJi9)D#6;@G-xLaU&|M5S|qh*9CQT@vT!ft*ZJzG9;R3~ zsBdcb(Pk)*DPq$9%tl1v%3^({&g zehg|?AiMjyr#P)*v(Qb~@3sHfm8mwIrm!RJ(Z(fzL>=e6G^fdhCS#3G8?JMZ6u*cJ z{C3V2Lyo_)HG6vtGPPCuc0BPRoTRfWRl3F9 z)RK!8RvQDHW{AP*V(M5+f<=PdVw zrB5yR#6-*y9ZvR$pnv$+0xfb7y~{p+uxe~aFS*?qG%a>EXqTll@U3nB&uUMk87H?T zgHoje63oI#dY4N(KbFv+;EaUP+zR+ru0;Brlbv1Th8ru>9X;sn!Q!>sJMvr_o*?E=7sx0UnnjF$V+Vd1(M~qDzS1FPh zo|wfQnHNRRxN=hmebUa}Bg^)eIAjpPD(Yp0_xxf6h0>M&pty=xOK<64zwNg?Wzz)W z>ngboOi7Shh{YTKcAr~;XWmLGbKxIYROhzI zbb$pmu9J;tRRF`S891?gE=_*^>m9=3QP}NH>g01aV3kiz7W!^$->nZu!WeuBD{_=p zxQEj0=GlZ$gBuYSVhtR02DNZYTbk3I#oFyUfwXpW;M1>>Yeitj2T>vwpNxA{&P$C+-~phnZ@S9@O&S`N1kqkl<=eZ>fZp z7PN3HCG=Tr*v-)q+gn+G#F#YGZ|MS+ql`SGu0P45iyX*=UOP~$TVWj`ewb@`V+Tq9U z<{7k;ispZOsdQ1g{F{;?W`u^+koyXg+`n%Ji4Bp@tEkt}*n+EvQOdc~@h za-YZm^duA-KY>w7qx^5mQAyF@H*=tc+%pt>bAaa=aovS4kp*~J|Gvac%-K58E5H<) zMj;#z?FTb*>j*izAX zU3Q!EXH@h~y-f;*D^{57VFH0Hw>}G5@U$rZn`Q1_M;zT~V$ZVQaddL5@0}~72G7@^s%Gn z5#=tE?qp4nEAK$1q3>Y&?i=TCwXT9h{#bkR#kp{uRco$4gZ*`>{5=Ffp-r09j(yA6 z-NJ!b0mBCTc^Dr?d}xgE?odzOkY6#wy|_$35uMS*dZUAqG6Jy z<`4m#QWEQ*(}+T4SiB@_`akPQq_@5OpqLyIe-#qRf&pLc(YJObQ##U(Ql~V{5j8!OqPF5rpb%^7dbU68%f18vO&06flq$ z*n8K4|3}%!;;li-0f+4+^HzhNz|?UPCvA;Om12Mg8!JzL?`5T~CeU<*>%UUK?VsxM z;UHcpE)IyOQ@u5`R?Jyh0R3dRS%{K*37H4)H@oEe%GX(5N;w@REyh;A^NU?rJTU^! z7j6}B8A>~OkGz?a4*r!H z|6z&3(I3W6{mBOLb-ndcVmHrUmS=i7%<2b=#-*3bbarxEOYvj~;E{uuXyeR6cJ77;zu zQWZR~bb<`IY6(P5#_9WvL7{h3LNHmeDl1&;N^ln0+R;fl%@WI-yX`DA1gj~RNc)f~ zFKfxFCNWa6qG4rtftcHH=8Dt>OB$j)WrHvxXUAZ>W-I1)ltfuk)ER|;ni;vNniBq@ zEiioo>7w~8jw&W_UfE<%017IFDmSfROdMmxd6U*KXl10Pl)<>lNrYi;Er;}eX;ZrD zk(}6$!QQ)d=5`6;&M5Ra2BSuPl=Om1#tYJbQ7F(&)q##4bNFdS$f0CU=UAog%7GAw zWJ1CujT5&4YuQSf@QQ9wx@$547Zb>YG+8|SBh$&#^Bu{DLbu{}e3xV`jSEokmo4aL*K02u^_Te-C7 z*$AjvLkBhkUHO|;;ud~sF*J(tCl=v(KHO5Kh!-sb^jKr!Qh&`k!!W7CxmGKNvD1OppJpS6|l2Vi$Y>azCDuDwoPL-~XI{tG+j>Hl|Lb^WM63))^0f zig)*S>i|g=%?gx<8b}6lnE{8o-}SH4`*&QwB@9df*GW4j62+!3TaFoPs$abc(aF|H zf7NPq0cvz&+HId4!2d+o4f*(i{g6xRSozhs;C*+h@u& zBV7Hxq3O=)g%gpqM5+%yFmK!il7tD4oay(>1%cvnOtCe|aSMT7^oNK+i8EZsHCJ|g zy|=D+pkyRAUhGF4ti!pa2Un|~e@3gSJ$q$4PFr98pEl?J=SdIU6T)d_b5pd&hFJQp zi|#kBRrVlkDHcrF=PrE=!c7tr1`x$nQY;3)U2&Jmq z2O_4-pXIU6@(JaJNF-R3_5!8zKcGSB*A7V)q3O=Z103M?juzk>$@+x)@eY*ss+NB3 z{eg_5Pbh&(o>-`E#M~#T49|p14m{Y21;b+vK3RbED^GtyX?7x?6+62@ zog)rbc3yT`ZKC#$V&@|qa5ttH2Ua)GXRH!Hk`_?^A>VJy9r;;4wO+Tb9%{An%Y6im zBULc4#UW!^Hb+Jxi_<4w?m%y0n&Qk$A*(^ADIX3ZWh{gH_``oFmEVSBPN-*jPEUSi zI%hYs|GunhK7sVGIU`B{9h2fIR2B<5U06w3#4BAkm*W->Uxk!IMWhS*j zl@CB%TDY71hD#o*woQjf)Dm0Q=bI`N;*w={HakG}z(Hy-52TWz0#ffLR{Ft|{C)Bg zc6X)NGHIO;(=xet;D3xyPBD{gvIweQi57iJBm-Gg&5AP0CWdY7PCza{FVHW&2X~Mm z&rig_-Ghz1B&JK5OV|9w8DlQW{&lxm-lecS|H-5jwhvO~=d}!dD{I@%ftzqBA2laY zGb@E4*<;? zpRTmsb>4NHO%zC@9Q=2vm=%h)hNg;txHvjq?DBR!h?74m)jyNJ^!VMYir%P_znq7N z-jph4<>Yih%Rblab`Tuep}*w0(cq~u#t%vyc!m-Opns6}*`4nhQ?~Vghe})np$()7 zlY;-3I56@%aZ>SRTk&P3>&d+u&8J@UxVR+&c@qQ>u7y27~UJ#*30j4TCpsfcUklHGsb5W8(N<)fEm?etep2ZYP z99+=cl6Xfi$j|+^T#U5%2<+55KbIo}>tJ5kg`%gx1Yls$(PJ8qX#ls@@1rJ%ai z`MyuoFH%k}i>-MLL2e%9inmztrTuVsiw|A$M{Gqx2-4)`f8q)QUWKB?Xr@M}(m@JC zd}#F%c|Mhhtl*I4fVT8NRs{O9>ci?F8L2F4D4KAL{1Y!ZmcJde<`X*1xCQ&aC}X5! z6X0)ulatU((W{+v%y;9BtGklA*du{XHxJxk{h<}Si`ERmwwmrD5x`_tZ6=rAEw^#a zN@NqW%#lOMS8V^XHNsfll{0{Z>=qzvw*qPeL@At`!eYxVX_X_`7C)L%=t+6{kXv{% znqpl+o|aSrcg3pM;w%p`KyMWjPutz&V&Wg^n@EXn-j0*f+2_cc6cc~^#178z&fiSo zfO=t@b5dzK&bM8p&+Osa1Y>>E32X?h$e9<_0i}wH13-L_c-9dS)fZ!%eo!x5K}Sy4 z>o{PfYuq%F8BVudi8L;2bmt%j|D$jXxkvk(i@V#YFyjda(of@1;%!e(n9vNU+0rNh zr2~Haq`bVW_RFX_j)~fzQAQZ8EOoHaFC}BL<1D>{M;+oT8EamZ9p+IlBJIu?V6qkA zO%v!rJITdBa5Yc1zpCAl3i8!pbdqRk@N1|CZ^4Z3$oML+a^C*dg#OXkm^(nGhX%At zZ`QNS%_>D`W95p%Tv5y7V$nEHv|DYCx8JACPy6VH z_JF*>R5s>w>ikbjHFWp$JlsL|yTR*?!RvscFBXJO^kD!>Di~Oy@Ne!ovHd&;BYKVxv*msgxe19F<>l@6%MX{ zzNMkBXE*yMp6E!mEkzzz$`~fb0vXGiV)5tA6i)Q9uWSN%RuI=mmDUXUxc%2UTAA89 zT14S$ay)^h+F;9ME-C>)HH^fk7jx9}F)0h%f5a1NJcas90*yYP8|VrQJXm3@Hx}bl za5Y#&l5z|o8}gz7WHyR`9u#VZ4xK6TT?5@(U({56i)0cQPic5e3yEb7;!^Z7{Tj!(JDm=gQe)pUd($INiv9v`OpexzX*=y5nI||sMh7%I*%5iBO z1D}MrLIKU{74^|yO(Pw*UtNQ_dOr0J(|17ipKra6cxw|Kwmx5mgV@ksMQ>kNrkhwg z&cywmd03rCT^CZyI?7ExGrBRj1&#hfK%kVxK*HpEgMjqm8w3eCfSx*)LR3PFg5Qx+ z_v0JYeh|_FWmp(#zpR{oz7V}4Wl^MfqPT;Nwsf;f>FuhzS9RmAt={8_(BH1=#_E`N zb5(T}pNVg+UPEes6BXReE_FmLELFdD&A@W4#LKGYMWZ?BHJZTtYJbHPTr;bm(k}9Q z{(^Os6Djt59ct|H=LtQt?lxaBz3y%0;tq3Gs<+hm(J}=uIWzI(XSi!ylf(NOyU6ylaG2MoufyZM+PUeg z?HE;jGyQJ&{*yU6*V-~K#IEGxrEB3Dw*36_+B&Cm$GJ?^mzB{8yn8LMFlnbw7HnLz ztaW7!ebH~V4g&d$M2rPhtx9;q88k>EnQ+U2gGs|BjZLe4z@st-SB$4&$rT%duG_O= zy;QYT-I7(y_K;?uhkqr*Y8)3=gBGv7Jmkg=MeYgj%-FI<(~%YV>Z%3C0*iii8?V+} zcbb!XM<2DtmQU$($VSl>*I{59;}Cb&xc)8TSSOqv<5svqS1ZD*uM7BmKX^w)pG}j) zr?Bwv!U~*24_>MSRxvrT73TAA6tWxjet2dwqf~G>N!lTH=XK%a{iC^dLBz{fH8;er zLeHm@<^cCKA6}o!)3&q2p5`^*r;fW-*&K|X^Ir($`{me`b4P#PllxRfg#Y&vNn`YT z<12gf6MBN3G_SotFBja;caM*cvIdWncYd#T;I-H1wb#E7SFc;I_oCM`ua8|X)l*zP z|8NohB+uplyA)fIIUbb{Zf|e@!7kjqH2Xa@`#sP3J>Se3yhfkhN9Wv!<=ijiJpUIG zBIjv&@4xz*>!Ej#7${*0@HST zp7&_Pnl<>Gl*zD*Q+iwq-E0KbhBXFaR~GQVQx8f4yC4bBd>$F=z#OTf`yM4U&VeDyd$dao&b%@{9 zHi*sd_-#MS!IH2N(5yWo=H=R3+O3m#qXl}tN$+Qqr0TSSr)6qqr|VKE zIfv7kALje;b4{l@!vxLKEU0zgvNZ6-DR5L>9A=M;xS()>4=5>WEd-*Pb`u_{jS`h`;m)fqihNLkiLNV{g@ua4vlJvzFMZj$$H;|u64?5hosl_oHwz0ws{j*}H; z`CQw_e;JE`c577rwL>TFs8#qfHO z&~&WY`u9}kr3TB~eu=zlj0|x(a`xir%SJ~0y2yKfAB)7#t{+wbkGFj_V9de6Q3>N> z8*>#pMs8(z_oX1PtM$l#s2u#_(O)nF(wd=C2;PeEMZ`P#17C84go-1YHMq%E$yn1% zC#qHEZD)EW`5G+#S6;7T?Bt}JW?t`Fx%}_@d*y@wdU{m9l)sj(9>sN$Kh=^y*}r7G z?$Y^P(w&{2`u_dn2R#S&MDGJd@Be9C&xnfJ=(=)$8HMgpzYBw>xtGS)s@F=tqqWCA zgTH+-EO(Osu4?~5@*wi0mn6R(gS5SezgJ~to#)@#yf;t`_+AhHIU@dHO88!DGco?V zzFi`0&`DfzXgm?Jy(6_zeJxspapeWc3vRqG{1XMqC0Qf!?h!eiyW1?lMXzsu8_`^>fHW`PFTNWHDG-89czjR-!RB8SKRf@25gM@)CN;05!5>3-jVkU;Qky+o)&Bt?&A) zoK(sTYm3HgfrBBV+7tqr?SAYb6N*|q{x{g=vs+DYy9`SqgUzbB9${n}ksN&@%5S#3Fg>-P-}p?qBCy-nJIfA1_T@Z*1=O%i zh9~4aVR1)Po8N}Ur-H)|Zp@lNw?qb3R0c%RmjO7^_!NrH+G^PewTC`nRG(8@GFYK@ zgYJCHq)2M59p{BXvUeqpzWOg?Ddkk?w_B(id@~2!@G5kpng>pfv=hpI;0ZDbW~a~^ z-_PMXs4csV}QZnN05ntfKL6g?rCd96Wzbsn2<*18rYyncwHfbqhgn`MC-cVvHaZzhxl|UUH3b zo`AGiogWPS9u;W`EJ8+}`XMau{O%iN!EzI%MrHwSPxL0Co#^V&uXP!+DV{^1Zh5dk zBHZG5bqv$;wHC@`Ex5nlvK`)q@L=^s79*XQe4b4}JoLb{LK6*#Q68<=@T0=qai0hA4_zrr)`e=QY+@%YMgA_F%3jFXpvSJZGsc&cmG6lbvMu z?J?^&q0k3>HN|Mik^S=KUEWzV(|X?uURR*V1NOr^seIuypAtv+5Y?R2mX+pZ+`m1t z?$wJ(UEpHkpw@-VZ|+e?NL{>o16!0iq=%eB>1)e#056L}D&xAgFj(-ec;Q6z2VkMP ziabth+g!Vpzlc9M8+4jN9=u*u=aQ$DDCe%2@t5GvMaAH9fWUe^@Cjx8;t*)>>;$iR zng|BDDSx#`{o3>AvVFO!QT{-(r7Ue)g~4L}LHIIN3zrca;}!QyFN>S3iY9UqM{Vu} z|IN9W_sgj%@mR!GN_y7?<=S~0hH7VrfI8%&3LJffJeh6Of}QPd2<+0T{Sv8Jk6o~N#19YPzX%7WWZs@ryqS^ zq}^!!k@)5!*?BP*6~B?7SQC3(gnz$gC<5Hc+tT$JeB+a#pb5)=@)l<41XIhE$}aB* zw*4NOW%2YKJX@1=Zf9yhhTYpUSFWEHbQyH5q-(=1no8>OK@_n(}6(_LQ}>C2ziV8La#+}hVl5Llgj+1u`72swOs=5$7` z-OagTEwS|vQfC-bP=sH8kst{nk;ai%Sekw(Rj3IWx>mKzmWlwnfTfu+DjiB})pLNri&29H?+28@=W}HPg;qw`BgyU@c z^RN$1aw}G$VWBJI?8@yD@?kTq4X~6r#hU$xhlr6!@C3~F&VzRSE~LH`ET$2LnaJ0j z*%x67=qfE?(by1j9X$7Un!e80w z*DQFvO4`sRB#?nmiU50F!Cy{ABjzOSxLLmqzR4EDyQU2gxnD<4wHHJbfqkc$Y#%zd zb|qf}!k{cLr<@{L4$a))fb&9cBXwl8J0>Q91MQvw!}8!;t}aJw@m(5+Yqf?MdKU>) zUEYnc0=jT<4S}uk^~m1aO~m%+dm@jgO{oyu3^EL~>t7o%_fB^>Eq8|TLG0R>WU0f+ zMZcon{Czm1(W%c?A>1*)YGcy}FV4}fpH!NPksKgAv5=h&F_V1D68|Cuoq{9__2eO? z#f?PHsUa>FB_t^dCsn~`kjTYP1v@f-XvZm*ZP5A#53a6*NPNQQKVfG>^SU?u^F(_- zfp8N7WH{$dgWOq^P(D$32{VMRivhONs-IHp$Ol#kS^r&q-56UGi_ekZ* z^qtwZlBJTVLs)-JI>S{o3x`z+K@Z9!h@3PBRapd2@r~1^-=A2g2*$k|O!mMi!=+eR zmP_YOqJWbr($mDiJH_vNWp+!6mknwDrx&2&N(Yjmt0MqV5Lu=s7kse)NhuJA%Fc6qw&1-<_UNkvPEp7Fp{{MhhOS0@-Wn42tC*+o)C1 z3hjRhw=3Q-GjHT%xBKnV>Z!9M8yKs=ef0hvlKqFS!(H!usye(4he2%tqivifg~-h% z#i{^0N~z6=9(a$t$G8`-*I#(cs|?qQ(o!kYn{KdVU5+8u?nDQkzLf9lov~|ljXG=< zdUN2kJcA^3xJOIK^cBYp5RY&bz1mOxF|yrSUP;dPoiLVXIUPL6a-sjnmfP$1{yRE3 zD|y2ldoJ+JsRPEOqTWZFcxRJmBsst_k>TjBend&>H<+HtOe+KcE!Kff^M#l0H{X~d?zj)Dn>6!Mf1Gv z;oAjgXU_wf=3&^or!Ly4Kre_k0%4_8T)s^=&B8xrbjwrA*-OGhZVdI+zE&=qc2WL! zO)!;RA{hPqvoiF9Z;`%OT)e?cBjxKod>Uf)WJq$;9S~Hbs7n&TX$YSiWPV$Wi?7n8 zU0xV8V+PBEFTGy*M~VQN6F+K`U4Mv>HS8Qi96=J@dsjCK6z^vx;RHypv{{TMT=(Q( zm{DjcWXZB%O`zIn3M<&6b?hUyN#wd-M~e>T+FQ^tZJ@QHNt#t1;)MwJTC;HMV(oJ@aI=$uXg3I91k1w#%u>uY zJ4l8)dimM1FihvuftSFsqTL#(1%{wBz{pPJH!542&0!1iEaYkn&%5~(N zI0k+39ie&FHkTmFr;k)dCK?)B^q(8L!S*}-Vx7Gek*4Mu&X;Qqn+$7+Hog$f6Y)v4 ztRHDD83EdVb=LuTj>+S{1q``Owqm=m6Y{>PMiK$OIPTM?Oe-rW?r0+LvP*U_8Zprj zjd#5o1u3=N#(%=tRx03g*RCPYp7NKCf~B!mza2TWi+RwuX5IqDL;RQjYIE zu;Z05B7G(uQVB*FpyaSeI%*Ex> zg_8l5@DaVYWL~cBSbmoq7*0$2^TOX8eKfIk7ZM13TLh&7d@amrPk7zCqK{GM#T!#w z1fLlluEaD=n?DwR*yd0aj5Z3q2-*MgdZDHi?fd&HLjd2M@Gsd>nItl z9r*wRn6EV|QKbbC?}%~#92I?Mhhg9g5(gjCdAZdvWMXN>dk`!nFof!{M=*+~NLV@kKE59Xiw*C8d- zgkdA4ShH;DuV720PIg)lzy7@jt&Sg7GFkOL*kxfp<*nNy)~wXR&?)jJ$kYtM_4F;% zjMJ<2q83b8FR7D6`K0zc7BN=p{fS^!!FKu5X9_&TEOBtzWYsg;5zxgJQRLLF52AJj5L9`#m zw=a&iu$sc&m3By{@98+dR)VuX%5hrzfyPJd#P7DKS&6;Z_WnAO$>Kjdt9N7hm{W(yL?#IjK|29?iLsacxKq3}O0R zWK{$8PAZ)Dv!UPy-}I$*Ja6xQd10EIL$IU%GOLQ2UljQWlm<$c=80czE6|Ku zWn;2gvx84yd>lAtSE>G`Yo#M_YEf>?-gvB;XY;jJIq^GuYoPBc-|tf0kr=nZ;P=x$ zr3aocdQ|vvWx(8=2()2F3{mp)+mrGlEh2RU4|*8>8446AQAztT&F9sWX%^GPqOaMZy?C2amv+i8!;r?eS zb3Ll-#`V4hCw2K?oj_4~3}zSg zoXY;QJxal`RgCgx-9kJ7x!Cr_(BnLcrQiau37+X0X<1{`#zL)kgCr#%HBPPSuo& zg{p5q>yH#$+1Bsemmu5h{Qp&(jXg53KJ?T!{c|&O2f2?50QSE_zaFeBf}C_RE~qkn z$Vqj+{V+XSI@CTg2aDythu#LgNLVZm5GwNc0P8W(|6%WhU`&k^N_Y-bi|z!4v0bNQ z<;6-AL*BHvv|xz0fl);bJ23C__c=fpk0-ct`jPJ&w-2`k)zSp=zc+c>LY;@Pliz-(T%kI1rg?F^AI(ki|(llmh-oSi$VKuK1sv&!(-sq}sy2V%SA z3l-s#fE-J8`CYy6+MPwCKUCa0FTYqq{aR)hnk?UXsNLMP2VO(nWT;l~!oEP^$D!h3 z_Z~IgC@y$nCNIFn*kbszb^jpv$uUcBBI_`nPgq~C7{qI>*W}J>*+3a^QHm8dbi~;P z!xSw(AL$SZ8ZF~>s}`=<&A4G5CLjgYox9aVr?#qEsXu*t<0!IjIv8$F!&>|)-=*C7 zbL*YM-(XL+=K|v4F`9)r7%Y)GO1&+ImJl8xckbwQqY>t8sA%b1;E8G!c@@gW0KO)+ zgDXF3l~^lQmYoL+z!-EwLBrN#$kVoLB2LQrsUM1NOR;MRpdqA&_G30*`gJy?1+4j> zzqqax6i4a2o0tpeTOnvRtO&4bK(+1-$?D+zBJ3)FDqav0Gl-!#H3|nmxI>!=vj2z- z`q^w@S4as{U}iYx@S`e!%Dd-Q&t`HVmNicBj%rF}kUsRmt2|Iw z)F~t49w%Za56j5k53AkVJ6t(3&!z|ZhFmTkqD{)Jnh5)ick{WXJv2su#KFp^OCLag zR2fiZXh5XEVjsD$x%ylMtnpPo=D_HfnY#R>yC)@Lbnxqy zp~bh``vYWq4kK~`b)m*}u9grREjtEy<2}xYx>Tkq48^i5>^6dd3GYj5D6jS$%h;{=OHRFIekcE$Kkv1?-KeEYXBJGiStTcMWuHpQ^r4-` z>-pC3z=o*Hn%G+B-8q3SvRQc871gmTigL1WzGc4W-o%XYi6#>!PFA?{R=p^%7E1}u zQQmU%6S2#FurzUgpQXi6a{hGlyXi*nn<=;JXba}-MS#97tA<$Q26iZMl;wNvOziJn z#l&<3hNZEcjse}{#1YNuZTJHPoL@p24DLQPyfov{9AZ#cXxYOv5RlZ3x)R0&@JZ+d zOi{1UBJ@153Dm9_?Vk580ozg$i`v zf2cpP&Rib;kexmnb(2#XzLuTE({;}Vx?bm-rjN4p-v5-VQW8sNd=%*De^1kFL+JIi z97~J9x{43H%f#=n*4)fgIer%kovzClKai+fPi}RSO_ONo=zgyV?QnqT8!KfA+s!pW zj`zYcqS(38>Kzf>=dNogcjWlh{?(mm%|2*zS}WKFVyi{UFxV_G&f@R4ZZoQ2Rf7_fER0XrcYPkXnf7) z+apIOek@g^tL+B)$|8AU7iOCxjr1rFo5-7nptxR1pL3hXx*`WYuzAjc)_zU->g#4` zXnRZ1j@BG8FGQ0XZFh&PN$4J%iT4oVS!udQvaB?U!5X5kCKW}XmHlaC9%r}|;aFf* zTQInF72tn#is9tnfChfLj3gN6W%QZe=w7bY%wQ6wdOrIF|IB}V<4{Wv%xV8c_wrRB{bnaB(&YXdC%mS|nS5W3&Mln`$j31d zj@!#n_vVk~3rX)iZ6E_?bixT4yLiA4=9-<60F6SMywRZJrM=%ayHG7bed!y>a}7$p zmrD_u-+~B<$=xf9cl$UH4VOetDSDzF0uHis^rTrvbHgwXmJv-slynCaBNT#Mp&mh! zuOy5vW(xYWvx<^!{I5%_3x5QK`XrZaC9r%`7nx;QD#2mW&GP~u8K82-QA0fp0Ben4 zOh53QLn-Pq)@hGJqEaMknzDs^_K+=Kw8?jAr5^QkUdgS3U%im4zxf9}ri6%yUFj(a z#;qH*w|raM*}Hj!^CdVG9_AYB6&CMV7Fl2AH+82;R>dlYEtjA6fZ1xjFR?QMqNq~X zi-o(^5h5judQ+U=DL8*cKInNi604C-|y5-yy*U{oZ0e|tjg1%Hg%vMCPSBuK)` zm{Fjrld^bKgf1Vug9S|N7sCtgv;*7HI;Fun+SoD#Ny;_n2+va}uG^waW~^&Ag`lAF zE0e1-AM{m&71S$74+xyEYBNI5M-pNQh3uEe8QR53YAZuBmwHQM>(%uUicGeTHO)URy<+j?UvJt@@iI9anx-l~s!(b=wLx6Pu#f?-ID2d&UU# zp`~r;jUz-}b!`~pbv~$mSKDpEN15FuP(jDFzu{raA`Qz5pEc&`wsjOUhF@<`fM^!D zuok%F4ySQNfo%%X&R+VU{ll@!fQJ>_W{0L`d`GnEaE8r8d*1I^xxoFK?>^TW>rC}k z6|lfDLon_2^p30?X#0@`Au?tRfY~+>yvAM=IdfBKz#|Q-wn1rnpt6$w6dzK51)14I zE*Y^21cOb8=nFBQP!W$O#Mha({Lr3ujNV26pEK)4(eJo-g>KyT4el|AVw)ukb(LC9 z{vt&q8diS8f62fa{~sL~d!)08&SZjBXW)1x2~pNZ=GEjK;+r-T#;*k|)Xb*(IQ!a7 z<*i-_owJstq>|vq`r_^zt2YBl2p8ACj0MK-a-t5W+Ew(2)#=DVHI8i5bReXFhobx! zp;W*f>$OMLD-i~X)0N3zZ@eov)>AZTW*~20eI#%k$1aQkRq+H@%t|0I{@@{g2_Va<0p1Kw5B+QKi9DNb>~D36-Au|#xqMJ51T3>1O&(a zVy{z@KU)AQNX__Yy4m8GGhZ3)jEMa%=A4 zEj3>`uz3|4xXvFEWHOwrqm8BtiAuv0n1Akpmn>SO%WKwan z<$~1?@(#||1zud;wG}}e#e8X-JMgOaCeP=W#n})wT}Fh>VrHWryr?3stg{xUO*2ww zSuSk7Y8uC99xu_`<0wE)z|HWzeCbHSPl za4H^OAQsAOp~lU(!U+4rUEw2RQLf z<>O;9Z%`AeSyz7MMJ91-x#}u$>kZZQ*4ah+XTSQ=qBom|W(W-l+@CQLm`cq9h}WUR z8t_|E#B2|Tc>cx;e82sW!qbSe% zW`xFt<$0~mrd3wi{iL^DUUpX^#KQNcWq7tlqQ7VZtcaK2WcMP-o-eqYSii_V(T&fO zApS}F=U*PAygY4HSi(F$=gJm4qzq(jm4-$TL#@>pNfB z-q)5nFs2-bLO02Gt>ue2^9D45P1yzjN8@FbyBcEoxfeC+M>suaVAU~aAi3R%%0)?DOwbt_WA69f zmlkKjIk-%LvMK1qKzaeL$MiL7*V-)|ahmtu0q4jJ_kFV?YXjj^DK4jXbNA}wJeqH4 zUL{IgRTHF9gp;_%5wgs0QDTK$Y7nrv&I8!>`REnj+-wGD_dUag6K?eni7C-^G<2Ze zS_iU5z9V)iS2wcVvh~DjZ)nAIpr>r0s5qu_oy%~74;Fw+0}o9Bq7p_Z$^s%uDe7sj z&U1lpO?UI8INDFY0&Z4T7@?l%(W`bJU)zf%?x1>7R-{u3TD};q@M(r?qnoIW4tu8a z6ExzSt2JM?jIn3WxurE*E)$+Pbo}W9?|kN``LK1OchYyUs>^-`(5kc1Q;()UJ4#ky zd4lfnIHcb!9uVx6;^=r@?JB-JQ=sf!ony;bS{4usWSd3jEO8<5)m4@|h~DG$g$Wm{ zX2)dC9iz1S-eF%KJdpVHs=H2SjrBjLyCVZ0iI}v*1AL50vY1q|=Ohy}3-`9GRt$g5 zrC_yZ+oBQ!1^d*lhYNky?BXHR+MYmZj^aC?u8H4c%^&wE>S#?T-TiGW2pP zYUwQxz*J0vp42p18Amm72xx#{5NE~oU$C(NmdTtuq-St;ugx3K8HBFmb`p8jYjV-? zLw=$~-7~#!vu-yTa(0LZWsA>3$@aAohxp&0+CprRuRmNy9noj2f)7mF_pME-TV;ar zf$65MJ`->93Kjj4z=)dQTV z=it-=;fbfMskVa?83UfZ9%^Iztb_T!bTJ3l(ju30zo-Y+baksyP#X-jkdp*CQ~`Lx zraHUU(BOpe>NWOB>HwyG^%=D>NfPj8ss#*$r}6l?23Mj!t($T@YtSKqgSo{JlE-`q z{j8-R((He|lqccx|J-AXa71EGi{5HDfnb@Iwj6XKbY*-d^GIaHe{zLGgG%GQ?5-G3N12I3J`39Fo(_K$*>MsjoGRiae|w~8k|q(_m+-^Ccjh3cdrKtG%h@GYWfSZ)zro2~a@w#)A(%Mg zZ%e-{jxeRZDw7(^Sn8ph+JvFh$1_2%dglWHuluQi>nfG9EdNiD7|-N*CumlTn^7ZdF@`vL9)G0-J$XJS*VUxl_wNF zxaG-7>GpGgtK%qh&nuMzIoG+NH)o}lXHC}rI+0H`L*&g@-vuCdiotMMrVU9oecLs) zS=}gGn{HA3C0xINw+~ClR9?F(C>nu!IInj88G4&_s6Kgnn?JfA$P?!%oUD0`TAJalMp|sx$jdx2DR|+#%WC(C ztQV9m!<4x1pBcmkP!ci9LYTt(I8n#10=H9Yc~Gh)+#pZz2f+aU<*v}a@5EzMAz0&q z`5(?wccn*VpNS-$jgO!lqm$iNcRD(qvT}>20$xKYPpskI-b-@DDFyAcj@pnFEHZxB zRhpo(%N}`jRiw~B2F`Dh-!w0hI+-q=yip7o5vdt+TYpHtO~`l+gqtM!3&=$5Wy{gy zMuP#=>A%#t*=CA>-QL3rKV%O<4Sz;HLzen6;gsycoNN-$dxW*nn~QT__i7O>$msQZ@Y% zPD{qSO1wA0T&DbdU96)XxWV9Hgzqp&Ouq$hv5s@%Z$adpJTZHM=zPMT8>e|w)*toM@t$h(ysr|Pni_}^YsIO0b2^O|! zcXkfXPw_(0EQ`1NLit^Lx$wTjDVdgM;F~76b7`ZJAs8!qDrZ~@|I)3Or(9jU&*pZz z)DtPZUPc{y6$e%g9CxQ`QkPnKC60NTF~CVrKTuOG_n7;qGcGt(68uF;4^J~6AW5P) z32bQdVIS@o-7%Gt6&Wg`m}}cyNF<0lJiNIdgt#0mS z{`_B!Q9o=!)`ut0KgF55BmfqNh`d~VaoW3*MP|u>*yZoek!0L;KbJwhi{`|cCS!5r zka;lXEkYz*8P#|}3{@Ox07dmf_bqt)UXC}<&MD3I$d;IVz5H9M0Y?9CdsbfZ9ZJfk0n3L)jm`0CzpnwgS-)TLF*RBe ztoOVs@TD66BtxE|&S5?IV}5|e>P==D7C~e6#}%SJLyvf2XpbL~Z5CqJxomZ$utV=L zD~#t55?&nNZ_hc9;OZ#(c{6+SaR$&nER}M;QSJD#w7ooRCpqguEsl>d-zU@gR`nVg zQ<;isvnA~QgR=_^XSk_oWxbH@xAHm(^cAIqSqGm*6d zdK#m4gYlo0NB*=%w}BwTFyU=5G_Mn{XkOpDHd5|2kDDJEmh7frThRDfdD^nLFp8sS zE1>gif)_e8M^v>Cv$qhFw|eH7qC! z+emZ>mpdbozk-3Xa1s{<(yD^Y%<2qer%)olSzZiKDppWZGp}u#w3Ib^(Q~JjHMyGa z+*}+GXUEDgwOyIuClp3d=19y!aw@5xpNN710*xuLQtAY&`B}7$&BisBW|T|fB`(9= zuT!0DB6(cR6uTy@mP{LM(x!g73q@zN2y`lWOdY>_ZKn1tIFh96^gHRQ_j1$gI$KKj ztO7oauL9!J39O|p+`2nOP%Wl;=%gv zgC4HK^u5e1=QN5@zrW@hJ8Db%cVJc?e$F{yGVyhPISxN%(4Ia27?E3Xi?I!mj<4w_ z(s*$KUv0;1uW7-e^Yn!d*pNpT-rR61G+usLUxi%_ zMSUk)c)?weoS&7~g+2(j^u_nv*`V@z8hC?1Du%*GH^%}$Z)qAbk`2OTWC;qATDr2f z)FU~%dAft%%X9EI-C#HLOFYGv>BOOMbAFqX&`2Z9xI3NRN%s5sCt~OcadAOF=mlD! zbE=c4JX?ju7}B)zrZ5FG@7Sk2dkDgZFOOX3fNg#8{B{oXu%Z_d^M0*jA??5*dbcl8 zoPd5-D-C?gvkZgL*WzC1Mk_l7#HXk`ETwOU%R&K8?AD}`x-;vWMOR+SPSAZD!uFa@ zjLJpE>PjD|&pg`G^`pJ05}%WAKX%?nZhWa4ec!9R7^0e=fe{yjKi)Y*<*kqg7146* zqB6ORVhOLoyg#zP!zV;V3OWf28Lvp8rWC0IO8%QSM5n9PlK-26Y!% zclgx`Z3daVWDWiFw{X0VWrToKWr$PLO_$FbX8FCNE}f_I?A}>ek~f#2_z+7r4%Mo05f00${@7h2JFEV5_w z>b_r9d57%a(u08M4@Z;QiF|48t`XYpI)sq?5mYbLk&>@FVr{Pa@Xl94I%9OU%(R&hLMOaW{Lu`i3+pvaT~_^c;g_;h84@%w|e7>f2WYWP%KSyF)FAkle`b8xuo z<8k_&^TEzy3qRgNJNFhkS#HR#d66ZVjg^)inU=3=C94b3t@&u=7WC*FSHtwj{cI{j zE$B%@m(LS6(1D7E@+S@j(jn3()i5}uR0DGV!kV^fP(kRGvU$>wX2HEZ=xSmbcZjn) z_%{ugVF@^^k0q{9XbP{757_O;tACR#Um~u_Q2R6e3pIQDT!;_|Zdckc{d|HRF6Ae*%NpQHh>U`y zjJXOQ{W3omR7~jP3w2CMb!F|vib%*ctqgpa|2E$vjHRxWB6Td25l@YZPhHtaMB@%M1qJo57yDhw$117 zOZqb2;)}&HT1c76C8oSPYhJl~GaYLSwGC&3wyq@vyb{_PL$u8t=-JEvCKtK z7E}ZS0{m3AKf)MJT`Ze+kg?MxR2|1VHx9r|H3arEFNAHhxyGsVy|QbhP9l@kx`};d zZ^7K0CuF9Gz>B(Pu~&7yd;EJAtxECX1)W#B-nEmD&LyV54zy!8KERBG{W-`e?Qf^7 z)Sq!ubt?e`xIEy|1B0fIo1;J-?KL~=WdE2M{&e4)ty!yndp$g>ih%TVWRy!+Orv{0 zl#U{oWoQMxIxAr33(9Od-PMsMX)L&2NN2SwK-IE99WXsXvc#o5M}oNR5vvA$-^{gT zUv3H9s)RdzxC8P*a^$E9@VSr4m8z+js_fy;>;@$(gtQ1F&@VINuS_*0b(?!Z)5A|$ z91Xd7msn*p63xEP3_Pj7=Xy#O8XGUEl^HWC&$VgvHv7nf4u18;Z7UPx)~QdSBytIE zyy|%^H||UTttrIwyIE@IhmMV&s|PFhJldoR+39C!HdciRYd&TS=V@~M9@ae@7`Y&u zU-5)!I~KJD#R+%XLs%ro{qYqph05N z*gE=`$l;MmyWW}1%@g_BV-P-t^tN~Xey$4KNFG>GZO(bejULS5c^VSo|JQ5$^LGIj zTG16jNE=+D7e_~?3f#T|NLB*i!Xs1TufKxOqy?ae;Xe&rxX%(KB4)+ z|D`7(LY2`^sn;zCtgfw{;X^4pNP1Q(@}0s=@Gq~1*OAjpb?nx0^|K>T@nsS3q`<{n z{O5rYJR|1c+eLvfBPuOzH30EFj@kC%XW1CcQmR%TNczO*X|pM)F~?-3YSlrUuROCa z&Js^CV4n+`cE$|X0)+BM>Bd^g^4}<&zZGCN=3(~sVD$X$p!mG}tACeoP-6BmCCHsv z)&T1(!*fNdKCl`!ie;#0SCDH7AHsUA0&wpnWef{iiK^jIAArD)t+z%wXqW?0LDY$nF;Mho&v;GY~y zsWwM%6*;8zoANrWi!!?Bg_#4^7F;QJyS)!RS{b`GZm7q}kNb=AwvOM^r%&`mZq^xuor|4_C1a-3pOmFfC$3Oa=!x*LblFikD1~CT?LMGuVOs2 z$2&5%8+BE2XH;nll0miPv6PdOg=;}Xrp%zW3;{0c5|TpFQ>q=wo~bWmf7kHnQVpJd zuF1U^s2}3fh=SP0)iThW4dr11V&zvnAm0dpqxw)E5YDtMpt#kYK<44yoAJ*WM1S83 z*6oj@^?w^?8|Ufw!Bn6FxHO6#3;Q7v3lyEb8y!yZ;ngr zX{*Z_AG4f?A_C~>#H{_?=KEH9EH?zulFhK)S2{s6t9UG(!-?GQi+ZRoiu1*!$6tn< z0v0DX%!n}Ade| z=w>AD^(sVPxg>(?I5}$<(%bSSu=zsQ3QVy{NKRaHZ$P=UKWC>*+KZ2}l?k8j`r$eOv~2Jt3z5FY^HT;n4|M_vnKkubv+KpVsLg z*Znuwt_A;d!To<+Px|{{f0W^~`yMXvNs<3;k^gnLKgIQu|D9vk#jL(R9RAQj|1d%S zkm73aFaN;#Cykq6gxPrZL!JMnHh5bbyeSCYZ0ovfd#ZXme7aqGYwhBr^ZQQ@FZ%dR%)7^M9_DFT2q!JS;F|7J;DzfO(sp_L?;52(v5z_jz__zLC zw5qN|%+^f%1l$w?d_pIc8hprXn{flQ8bs|(PYd^YulTt=c_$cQe$8wA&Az6sP$6#GzxY>}V#r1dY(` zv8}3=UwnF!NH-^|+e05;2I4oDDicIozoxSFTENCp^rB}#K}A9NB>i4PMO)a=DpIM= z3Rk*vkpALtTmzxm(=*3a|B-tW|0fTO=@shPf8rl?OnMa092E(GdlY8xob3m;2o^9IiHT8^JCGMMdcXTvE>^c`BKE`|L| zH@5;<>y?gCkAG&X5|kNwOlc-LE-K!>-e&a>!UH{?^8c`Sug)yO4M6WY+|qG#xdJR#^O=Jx(s7B z<57rpz{K;7uxXO-cQs-Uxgm6=>B;w627G@9Zk;@;L7bT29HeI(nYr)F`A%pSss>MErN*k1JhwVryq&U5|%dk0cT&B>#ZN{Ezy;N9m#uyI|Pvo--09+%mQ-V|3I7)}dW;ZUSrcJsd z@rBd9WV;sWqlmqw$!dQ?K3duxm5x}GpNA*ZQ$PLTS^b^g?m;lTrn}c2zy6A}8boMv zW`}tI#tc&o^<{p|GEy=!!#~%832{&m2zN3Q7boj#w^*WK&Y?SH8r!(ES4TB|GAcv+ z+0juMxPwyClic?F(iHi11B*Uq>&|IN(h^kr9*%76>s(sL#tp6Rd`URHPkahEI!YLz?ZV4!@uyFUTdn3 z5ag`v3NysLAQMStURoIe%RkZ1wNzAyhxh!p$=}q(prO~6eEO(a;AmxXxB3NZXGb|k z@@hw1^P8@sSHbw^11_}lcX$})%7LCfA*ncZ!D%V>o#4;i`&9=wtc@LSrQ^pb((1bykcg22iv2uBX19q@ zs~a~C(Jx`YdC=lPY+e>l)78OW?KJkzeb&Fe=kwazLlT$(`CFSr}qP{xIF&%Eq%pYj{`zqNcz%-H2$X})CN1*dh(PM#zYX8mOcaEd}UKZI(Aekmz zH*=53C)z()BKS=f{N|q?{Oq67d_sSZ^@OUTIir04SNZ;E?C}WMh~NCfcau2dl{i}% z17FI4x8yoagItfHsnG6gtIk#Bn~xdV8B6z(5!m(*(R%=9GCf3iE(i9Q)ceb$7>cJN ze-wpxkdgs!UN`A%5!rXT3BPEjO7P;P1p}qgW>-+29b%@;;&W@0!2Di*C1Y+NYZL^5 zTMes94xXJ8myy`cIFWNXQorPx&=G!F|qU6>MFPW72>9R?wGp{i%)P!z;l`&P;?RwU&>p+4q;}G1e z-t(c&TbkkVk8zEyQ~1W_P>NF&=NxzLWEaqaKb@B_hdz^mR%66*Q47qVB_qgE#>@Iq$zU7bAgk(x?K$JkXgYN?ovv-)+@Z zBThKA<Z*rcD1!2|%iWWT^W*{@d?Aervp2LmV{Trx5Ij7U7f<=~PWpI$UsmZ)@ zRDL>w*@p|lRnRLd*e@YqtcfC}qnP-yE(EANITvzE`9X3OG&m(AO~M;ObG_@_fJrdjg1P#`L`@v&;51n=iM*23Rk z=)DRj{5;vKn)M0CgoZv6WkZ>L#-Aw@R)S|H|H9uRN`ePri{2(;?@hV08 zS8xU9EMsS5p{@miYv{21aHO=o=hsp7I=R;yAx>YsNej{PaS2f>bRNV)cKHub&g4oa zWp_W2Gr&u|eoj%L%wDBgx;nU5XI%qp{Pex@zLMFW7E6p|F@;on%qOIhQ>sWzPoejg zfB!uZZUmop#_os5?wOwk{7+YX;h;+-t}FgA|7ox4{%2QS z75Eeh2ZF(1NHS1gwuJv9k_;4#Bm;G|F4&2SHvGFUPa@+V{!cKSn9un?J$1D4{l<;9 zrX~o<|GEA{-ihEC)A(1UdxEX#AM(%pku2}zn>RgAJRF~Jx)WY?zsyoZH#-~uEnim^ zm|GIKK8`k1UH!F=f}a7VPP#GkS_uLwI}QKx!8{LWAiULR?W7?|W*_veafJs2=Cg@y z9aRO%)(F4i=))l2$|=82Fpy#X_Ml)zO8RX=I!u9wO4+^q^?*ct;+z>F-@NB1>m1=JVm|mHoN%ONRF6JKw^7yZ-NFwUZ9!hnp}^<7cvK96 zO{y32lqkKNyFwn{1%Ud6YTB=qIeAlFzd2KQ|K(PmFX!uI4Xv}mv4(o3rk3lV&W_OT zFk9*Bj$|N}MW&uabEItaoX`F{;ZR+HKOHsIEmy<5qj#yk>xvjhYtKI>cFeheTWPRu zadXSUe#<8I)Ya|`b*yx-k*AhVy94I5B={0A@Z;nfdzrCM zC=G*_L}lfc|H{vlzD@T#P1uK_hTU0+$x*SIGcD3F>FQ&p8(3H?B!;zSgzgRo3+AOT zyL`}WATpVNrhf9kc$Q2+BJ-s+4~Y}HFFkVv#cFd+J~NrV;>k`a`$8nIK}uRz)Bh6y zCgncTGpRyNSrjXmekIUXiW&FL(%_t8=#eIHkZCvsnYDBi0=#;f*|oI{BB~Mp@w;S% z8k8BG^x=~7PjYGpYbPg>+@VXFjFfuW%&_ys!~&2`t|BE)w20iP(P@7;!*=tp##zi$ z46jYr$C!(emMCVAp$i9hJO}3mnr_EPq47qCZ>$HJ9C=W$+1(LYWY|euncqI(wKDfU z_D3$TkaVL{3-I=ch%6NEBaN@#CK@XxRvKlvaFq3wU9x_GrWlQ7#?~{+{(S|8pu@;t zE&FzSGbj5oTmE!~LCHeig*0iKO$WchmH}f4oue;Ek*R{PTciH>_I*BgI~9#io~kB!DY+xaiWhsDiSy|gI*l@(=R zOzvE_Ey`JMGYv1lJyXWBWkQb8o&_FJ6k>`L(-V7g_Truqf$H@izRzt{y{i{>R6oeu zFwUMXFsA|9H06{fJl9;(|0XO!=H7UG-i->feU?dFgK6bt`B&8v4a9&j!+9%=m#iGA z+Mmc_iu-4Xn9T5P;HSW)$dE|C^_`;;?O*L2DT#;{jCWxNuZ*pzY%&a@oOf}Kz7CET z!?Z_`Q#W+hD{j$C8>q_N?`s}5l^;fsgrzqD_E^$JpS|P6196CGrk~KtXkO+)Fjyu~ z7-_B4p5L^f&2k+ zP+XO7ZtJM&mlNiiu;?Wur);zj9Kx-PRGa)EWN9QZ<|xuFMdr7cm(8hg`2bugaP z51iYd>-l`F9Q_!9gjxO99Mu0DjH-X)q#I@*HK(WgQxR0h#v^<1D%bB>HMSHRm{CPA z&}GO=xqh^aEY8TIToqMbP$x<#ZBHfX!(<}h8{1&x+xH7mOtp~v!&}T^f*ZPU1xOnD zi^$uE7jl_5xh42oW?J5O9=1g859qwl-q&;BdW-C$F)f-un|TMQzhX@JWcGzCzbI*2 z?uT);w}Q27@jwiT(1LmIdN%_Rz;L|HnE-87}2@|J~JBgsX7+b7~EX`^~g zc=hJ>tRs}6j8caK|MUf4OPL(a)#>w@=tqtyHT3=V!PM+vIoPMw7-L-331!s9x~QnY z0)WMfh|D)iAM+fN#8^G*c7_#Br?G%9^VJjYp7sLw`#{lz_&?A27XJ@pZy6I=*L7>- z?(Q`1P`FD&L*wr54vo7@BaOR5;}q`RxVsb%jk~+^aX%+H-*a-_liZb^`c>Ig$4(d^09!~NTt)8LX~2*u755He{~_i1 zx}Uh)V#O?Yu*j${BuBHGgS$@AHZ3HC3g>s%W;d%oEuBk|-Jp=<{i-8lm}jvZsPTE# za!?$<5xoY-x}I$P*j5K15-IY%l+v~Z;*d*B+5+VcKwNpEVKS~Z*_|&&-F}LaVB*Zd z7bAvoQDmK}_Q(nVL&7C%-tf;-3JHlK!BMbID=BU3VHKcfJFqDt>W^UEF3`5u_UAKk z6@&l*cDQz~9;7&#e&%$+BdRA$4V#kagIaUH2OF5ly#ZGqvB>lnGh7D`5?h`Rkl)s|K&6axH^JofEspdhphZ2>0E?jvd06y$E8|SUG~S zVo4G&y#rKIUAHe|YHI>(F*~RNUo8_tZ5>#|RJ(zP=%J}`l#dhPu`?9&goTV>j*lF- zdxoDjwJ$`WU;Edbf!8Q3&=>>GJbt^A1>cE!e1w2mCzB8PT@e=nx`O<<5$eEJB2w|B zj^F@9_i}tAm0b5}1&4y{l!O6#n{wDh3{d04r&JS{&a)dXt!HLKAh$Gn=&^evC7NcC zGs-%<1Xf-5L@=s-ohLNI1ZvGA&~D5gxr>M5u(drILapdcx&-aMlxNH@ugLZ1=wJ`T(^T>_$0~&u6 zpFvfqQ%uhz4pt@C2XIt+BDaLPwv1Xs)W77+wb{ntTgys~&C*d7?|1kPRYK3(H{E?qsg}eCV8s zV5cwf4JcKimdwR$>^#w=^6b+aI><2(a+3k_N-POe)@gcDwflXg2J$xgPevHp6>1+JqM_BweCY<@! zPh(PT?Pk7L{ws6@6sKTMhqIN|0*hkUG17k2Wg1;)HQRNM9O77cORk0Memr*HJAsWw zU{o|L+B7zCGg65Qm*J8-&orn3b9x^09I#9etS$%+lNOOjMmmF1?EyRM&zpK9G{eC` zYfP+JG(V-*{sHrgpUAU*;7TTw1)Uj(SctXU_fK6eg?iz3*01nkMKHD#O+A{(8NFm5 zNy;Q*qPzI`yvXI2hhp})r+maDOZ{T}D=s5?J7(?eiZvd`Nvdr2O|R{^CcuP>9DRoq zFX0T{8kB*RgJ7v7RQ(5-Lox>)Q&0knYn$! zGUR1Pi56@Bb-@0E8UvBwsPQ*MaiiJ1WK99v2O16`@6x8CwXG2nW#{b|K~z(_#KDGu zIxK@f{z(j!IF-EW-McF1KEt@$op=G}++fblEyRq8Q}00N=P?2vfijinmhNu!wx zmAvfAJ55Slq%PV#&{+^Yw>8-hgo`>vtx2O~mD1o%nXn04%a!^o%LE0mg%W?O9VWYPok?|Ste$b5Zg zxD-Xv&^+8EbWG%XiB$=%dc>5o#I zy>Bq8n=wmn@5f>W`Y}Lmt;v9vV_kt+ZIeECOI}z9RWQKp=|dIG`QKVoc1U?dkNHPX zl8B0>p+a5ACBw*|GI54wUGO&weJ@av|%R`WBKsaNFSNO0j$rsNJ^hQ^wqpcaJ zalNcA;~X%Hry7UZ8!rgL`8zbc9n2A%EAq64dwlUpz=nr(3JK{!zLrs<`w7%EfgjFK z=k_$(p9}$x1@K?#zUjx`3hfng(e%NWzMvmAGby&}Y&tAMJmgwCgl&cU;X`Nd3%bH* z`k4Eq;Eey}$K``DtShJSu^olge7&zSNZ2-dj{G{MzyUj{=$95$v{(*a2e~}}RKRWH zuLyWS_zXcePaWnBsLgZ~U>XcxoaT=9sDxETE0u?3vw_Ex;RpAG^L_wj8fmt#(CC{u1kiFm9tiSCiP;CRStu!o{Bh6VZ5qlWc60+8at~RM&P+d0ExW{4H*(r zUIq4sxd?R-1}TUN!rdh(!EHVKM%Xp^chC27$Vp-<$mR_Q(#7st{jKRmU8Zw)&E-3g zHF+TO3jr)Orn7O1ef(Y;`&`19&_TV6bTejJeCsrn@^N&>%dRml@&X1bHOKHJZ%?Pw z-?nPyX9RXRa=V|ro!pp!n~;jjX%LSOEtNDAah6AoHR3d^seIlqJyntKP9%ukvcwSa z!l&$3)fPN9{kkebAPI0uOl)=D4D#@l^fwx}=8hhU%IwZF-!6NQ~T{Hm^NkVR1QuEV$sRZjyGvANX>0Q~#@J`u4_#H0t- z5A@74=0≤T%k=yzDBMT5^}IwNkFb63EkfkS^}dUIX4^=XV54zChz6b{`13WsT9g z&(NuPlHrot&_II(%>9O(w;JB$^=U2L-F%mI{!|=jqGIIJM zYSI&xX8rVVkgxpfc|USo^tyR$lG zi8}v+h;LM^&+aI1<0i^fp=FWEgOEQGSh~G2@uy4J)Rcz}7Oy6c3~T&z(j~n_4xEO)dqrhwiDx z9xJYd`=?L#G}7$e7B%-g-!7M$xS1e*_1FwC2XRHm5hxwxAf55e^);!af=4*_atexa z$%&BK$XoMsC|vY^^_x>PKMYk=KYtnSs#6>ahv#p9)=TH~#D3+cbT#Vmyc@Ltw$uS< zCyABhM?K=w@f~+}5%}%X=5|!9;tjutnP=afWB3^uvqmV^?ET;y#y>=qfsuJNQ^B4LlCh7Q=|_<9SJ0Pr>9yd$VEp=^kj(QHs+!KP z>n!s^gHpVwetMIcuaK|r_Mt!CwwkARk7k$Lq~E_u<4}_v03m*kWqs-rt7h%QF8TZ2 z9$|~Bm7JzCt%)y_Yhx2a|{NSqz2Qx#^>lSC!d3w>Gql9de`|s_@Hjd*i zzm#&vu^mozHUSOYbBL>%3um6tCnxRjrKrdou*#0(dREn=VJD zk4>x1=vkqUv985W(3SG^NKdj2?DB;CB0CDKp`T6BxE$w6r?st?a;$2L z`!K_jTQ&Ohv`*bLCiU$UARMyb5=xt27yfZE{~36pex&9(7k5;Gz#S+_>>cCSIo?C7 zp|Rp#mlq_qA(>?oy1EafDa_6ny1)`IYQJcLEY|xXJ}|$qOco|ASD$Yt!oOt9&2i}J ztj7wef?PqL65PzY>m*OY>(B(S@#OJd8q{?yOSK+s`EY@hl?z7Ym;h^aVNvp-Ij6k~C;MyY@#6NgPstI0%Iqnj&aw)5|5 z8;>f!)0H-|cdyI^Rl9k3n)K3^?Ad#rq|0bS`mcsx)!QBOYm41AWn`J1%chN^RWgog zjMPti?+NN?cIqrB@=xWPKdT^OK0%N`jzIrf9HmqgCH%RRlu++V z@aOW#Z}=705l>-Xk%;)F7~MB^Z@#TKpPL)5$ad$AkTD~V+3`6q!{7VT8*2YyT13bj zjbceU;D2KDUmR$eR+g@>tDGM&_Vv2G?sH6D-*n;qg_+5W%XE%)e3OP~_)^PzX}@%> zwsEX`pZ}xwU%9g|ICNDb?Hb`WanOQZE9waZ;=Dx>acEZ(YNM@USddj%{OHjcbwHWrY#;vFgaVQ^`ErY(>j7oV_@|(9C#{3y6v% z2i2*vwbJqQkeA>ihEP?=80O@>s!#Xw^J_lMi>q0;A(`)n^{VmuW?LJsEVc&Vf=x`u z*rQ;db5oQ~iE_e_b6{63AFR|beT#%$U`A=m3YRT2?{EY$JO@u`CqtK3IQpm>#*>WU z3KWZ&^04ti+hXkWv&q8ATM#?TP4)8tqfjB)zU26z3l;9arHBY3QfNsZ&m`j>!g%~`8 zs>u%UtO&5u2p*!jRWg8{AK%AJp9qIh7-VNb%)i?_0Pk1!mA?_=wnP2{RQ{SjC*{H| zWp(cq=>-L#t22Y+lmgyUk$k>^WYcbze-fbfO-P&=-t0JxD;b$!j%So#5C(}w@H%`; zaz}FAo;T)@1-fHv>gvYYXn=D!6B7IUtqQ0jRzaEgvzI5MLkuDd$l(nQiSwnIQcp{s zZ|Qy-&ieuOp(|>U2?D!i6F$+&yO^q(mJ!hd}1a~H_*fXnwtt0K2-6@u}-a`0JgKD&GrNP;7k8jwFnoRDUFlsJbAfhObZkEJ3+J~%iLP7b36Oq|S0i3q;ikktk%38VK5LNz zQInu;NNRkG*1aV?@O>6TP+#rHw4SccAOTx{)pG@(-VkX=q-06p2m6{#@pQhh_5TGA z_@5ylfB9lsM!u+GPYC-4>)VdEexwb7cn1s}0!*Du6F`WNxlh)sJBLE;+i%DiiQCJ?+O+DMVxAbySXWa8%} zpMelPo7}eyZg6l{HG!;ztiZ0AQ`^Mjy0NYg_yx|HxiUbzgfQlCx$#j6TalD4fZ%OH z{ws@jZuxNCJk#sI_=IfykSb)|&YaRj5L|8k9VY;OuM!HG83;<>^=Um9ngf<8K36Xn zL+|kHnBs7r(mjyNS`$6qvYh@^TGQY!>O+FZ@a9tMep%cvqWyF0(GGE}*O$RV6Guc-K4#rd@xv{1Kj)0pZ z8EMt~2)Yh^Y+Hfy7Z^NwwbdLSX3>PAHf5+;Ec>V2W^~V*B-VcA=4tzmkJf zWtlQ%0o~EXHB{Kka8A5tk5UeJaCs)?NQ}Fm9=2vwZ0M6As!lM9!ox#P9K(cX=a+DX zH^CBEXE^T0H$MHffTQm~DDF%Nn)3ww6&=Pk^Uy%9ZV$=lh*9R)VIYY&SX?bEOrTZs zw#)WMX8mtn@#>Yt8rMIJ`+Zm_O0}}}2CUuWV1}6E8qOKJ^pU&3tv_RN!1oH+M@aNH z5Yi`JRMI(Smk40K?By-H*>SsBQz}ZkYFhy<{iTg0UedNwb5!urV=)*nI%h@l&!`-; zm97f;TaLEDuhRx-%aurh1~sc3`I_OT1LAE}lfEI;oF0Fw9EP4(-*DE|^Se3PfIn(@ z)G`;A>|c;$^=Q+!*1|Awe3Sl_4Mb{Mw_QAw>kKm7jPmD=%%I8$u?;j}g!orf{$9=S z{--3kPqF0Nfl3ogM6IHHqD1&8m;a@O>Rjqfw}!z!$0@ZQnAp8|W~@ySTd^D<~-T&v3HLSCC_ zM`RfD5jM;QS%{rs>R5M@?Ed5v^-SQKz<<%w+#1$2XQriWz}SWW$RkJaRi(mev?>Pa zw|OTU$FpkH4B}{jPsraykh*b#PYi%H*trRCL{i3Uyb$1>oTg}oCaGolEiz5wXyB9q z#0?e1W6Zcksk;c;PeSXO;Ifj7jEMQK)YAhaR0~~2?>N2(gshLE*7&o5&lRv@fA`QI zEfjGy!OFxxY(u{of1C8mLHUA*LimbV)=NaA4ba_HBm=~aarE`54IbF~@Eh@n`)CrS zY9X3)uof8xKM2wAx?!ULsSL|d=1+Q;n)GzFMOp@1(4z?KN29urIPSl_MX*)g&`E$5 zZ_QbJS_I+)mBQB0)86Px;T2im*=BZ4nB_E@ zkxhM-OK>-qw&NCohprD>!KN2WQ7}`k_3cnZtf5&A2RmWad|vjL;OBv>a7p|z>=I*% z_8o#+Nwn!7^=3~H*iF{XB;?$0=;8^miUmlkEA2rC|85sm+oGPI(Vp4C*oQmE0VOh&yt~xX##pLIboL6@pK!h zkCE7XM*f~M!^@a@6(B}HG{RRRtNa!w+Cr5+qyT}5_>+0e&S6QB3@b<-DP$(y0k~AR z#*P@wknk2hl}E%FjP%^=$%n=PTfH$=qBQXw5uD{dT0R?isw^)9g>w0gDhPG*YTSr! z3t>ape-aY>Y#-6aN6Hp086T~!gaO|YhVJZ3;nW-MAa-!&GW?~~$G-oV*U2=4?IXpV zH9_fqS80u!ndf^imd3xFOxe;oks;U6-~_0d?HubAD$o3%eyRV-KqaNf$<#qQ0IwQo zA@izSh!aNVr2vjdF_HBM2O!cm%%L_+N<1KQOvGjsRbuI2^&b08%+M}0fLvuppUgs| zAe?8#PTkD!((T0wrpLdDV}}zVq$J`zJf4Oh6=jTIk1j*~P^$D@b(yoa-R-h^I3HZdFak_Thl0a;p zQ=Y~iqV_`^B$z4t9C=`;^bY(av3NDL06PIWssi-F`C>qZ=loT*Qf3b= z&&S&(jn)bMeehon*8^ zX6}Pv700z&t^?6rq~qEQ&gTKjA{z7xhEO{4qE-hC9Avz!CPS(WR5%s;s5WMsQ8&A_ zUC;pdlAB=73ICeSyoA1}4v5t@2)WQ+M8ZPYgEf@^J)>;QdKrR?!b(wt*DMz^Lz5Oi zePzD$GsH1&Uvp}E&3tHQAc?R_lH6~H1bHvf0J-ZY%%ogfj$Hts4tG<|t$; zz|@J#q)N^0ito1^qkL%`$rYJ}gu_v^Gb1)YU$(npgh*yz^t08Fn3Vwcn_!B^&|7>= zEr&l5i7(;zD$y$3`C{VC$T97*GXgbJA~Wz))D3-BAVrjTnX7&(2k$D|cXLi&@ExGk zUi4rgzLpgmbZwNP5Q|V`aX}yxYgY?+-nwVPAd8u-{n|W_w+T$Jj)nT7^Rol^FZpy- zs2zIHIcqcGh57*|u7uAPL!WCFmzGE+xCAGynbk~j+|^t0fDwH;tkyaoDvmFIxnYv_ zNI93wol8yNgqWeRSiv5Vge1>G_v{2nhf5_der;^)C>mHW(<#B!XhH! zb1q!C5#L*1(eMZ(x+$c(r<=mt-!BLTxqrnlRn<*pm>;Z}%*k$lU^Br^c13W@G}Z1y!E-;eQy2+?6cX%}XwIi~;t$BTCg@L*#`Pil z%%bOF7mRv?@uXnRxY+@2p2tlob>b6?duml#)Lf+mRE_Hut>lZU9AvR-sq~cZXpzpU z$uN3=-_e&(CM0 z_v2l==|f+GQqz6}RwBGtwymE?Xcwc&FxL&)uwN0tAd?DUB$N0OO>Gi^8gUwF7^P+| zkCDJ3%Q>xzX+O4r`ZR9x7a|zbksQI4YVq4a8*eIkY7zqM)V|taxV+>f@1vGF_=Gjm zy))%V!a`V_Ww(fdK)7<+h*1ZbGln8h0LKALEX|~qyzXJU$jTvk1cwf^Ij4hTiLPUa zPOkQ}_{d*vXp{$<7J@ZK6tNo5` z4HM0`Ic?!xm2pH3l6t?RB^Y~i!hrE2;zCJDF$wo4XV5{`)g&NeJ`ggBNoBPEM1p4N z{Qb7y=ZMSSGN+rqeKsPg@u1Si+7OQ|SHJ*b+N|$J7uq`B#Lw>GNl<9|#2jLf&t2=} z8_oWPuYN?70rD5GVzY@%Z5fe(r}8Y*Q4!nG0VmAsT%af{lAE!-!Hu$Z^}5XKDNPR& z*@BO5GO%nCn{!u1IXIFk=I15)In#UoU*mtd^UuJ|(#JmH;WgtGz}c~J=iWrhgoV+g zCY&f}qNr)(*=~{H(#5(p+13Af)n6%|0$+lIJ*8)#;>0#zgvmzVy)#{Hh@Pf1nI9xx zLgcF%B4sX`(5IjDo2~8I3|jRJYJNRr$8M)m+%{Ub$dU5_%t_IPND_0>XfO(!Qd#DY zk(ZwLRx|3>w<^QQlvdVXCj`b7gwK1B8ut0 zEA$%V4caLzNG&X@HtSaaz6Ll!R&cWJ`8Z;8XO=cm6rJOP&KMos*9Mft#}l;cC5LLT z3bjB(SquC-dNfCL1|Rsftf5L2It3b;z?oP~MXLbWbOSk9B2;1_?V_zEh{OS-YFu6= zbcmmA!#&qs2be1mw9*rcQ-u;(KZf@Ay{y1UXch(R$(4tM0TP7b8R#%h$sZ^V+4%};7}B{oJzzG4EUmG2Ije=8WLXvZ3 z6|~1^%{ z7Na3!d@O@DNlojM#`W9*3LT9*w`G~rr?>Avb1ZZYzNJ@2_Rb-c^@V?Lbe&_)-~~Nu z-!lEXl~z#kLjHK>Uu`gU9BM;Gp2R~z&^hj7+(->;Hf?x5x#vXXoFl~uSgTMI#9F z`BC0C_4(+k1kD&B!&36)W0gs?(*8Gk_DEsEcwVh;*DEA$K1^7Qd~sML250K~559vB zr~wYA5uTK|X3#gwS8Ib3(R4DS3Zgq!?gY*hY9|}fgc#nBBY;U@KMQpKfXJF>e2|b! zsy4Xzaz0biLqjwpItAHT!A=1|toep;`lXOLug2wr>Jxe1GeRtkex(J`=YGC@YI?r+ zIOTG~I|)276W)>;h6U3EsvPjW-|=rZ#Bt6uj8latzvhQp)h?s9aCf4PU&-3Cu~?SJ z8COrMJnUGf!_t1nNUA*>CH_#E-Wd8&Sx&$`^%Ug{BllPP9g4ts%+C{U#~98SDUFd3 zt;DlG6OA;ILN&iCy#Wiqxw**ln-}|nD)qjtp|H`GqlOiGC0hJQtaqHfp`c~ncE?Iu zmG4f9dzTcL#P6yYe{z|5=AuEgG=Lm~#rGkfu7~8>c%diCtK+Y=s`s5Np&kKFvWpPN z)r;!cV1&>0D$}>GQ5Tq$EJ}?R^NXlsr&ZQQE*L;W;W`Ggik%)KMQN}qkrnvsaW{GHUGjiVZban|)L_fnDi6z3NK~}<8yWq%0$9R5`o%)w z=`E?jKb7NE&fuAbY8jKjMwmu+D`1)VS|f?3V0pJ+{2E!G*(-N zV!rz@L2H%U?`8kyDhFrZv#~Xwx zRf(&u9sJRFqILXS1>V-4n5MYdJncfGaFO}KXR%XD>?W|AQ$}djR-hHSNAhL-qWp&ph!;I^t!^15QbaR+_H$1PtBl?vc=Kv= zwce3|$J1K`cwF=HY}nE7h)qx{#6KfeuJ(J~Zf6_MUb_TR-ShicR4A51MKw=*YpE}c z;E{bZeP0vU1}gTuJ(9dkQ!AmYWA`ebEys%`LT2DoyDpjIO^v6evrrqfrLZ)Y_)Q#l zNTRA>qKI> ze-k$zJSqCIs|%h%q)@A?gxHkavO2Y4_pTE8;fHAn;E|0w|oxrQ-G^pXqjsJ zQi^!mnTp=v>WA|hp)HQ@q(o&gZRpz?gR#E2Q<(NSk`@c!e)+g66mIzhY#}5E)xE|h zGnRdgs;4-}@Hbt7&TM}9qrO~LE+-LV#@;N|@csrXjVW=#=+RCNm3b0&F-xYf-O8?O8gKOg6Eyks? zJu6P|zT_sm6AuyDzsxH?<s2(O02y>l#WNJ&RUUwY zl^3%Z#m=rrj$1}erINokOY|yCNWM50u(2E-r^kLCzeaFadH4?9n&_N>tv;*Um4FSb zi00Dg#j7Q6F|;~5r%Wx#scCDc^f|E1ag#sAIVWAXXoK_V-onoX!p~dqlgKmr#!ePk7`XACHRInR0O+1nKf{LeJxcSf z*dcH;DQc|Q;`Rrz_c;u&{)A9e(d4DtN#(W{Cw9kw zet_BU4mAg_;kZ??pQOCCrfRe5Evu2RIEan4P}CDsVtG4!59GotAC150e+1cziW+^} zeGRz)Rji*u3}MWhN^bC4sqEDiegG9+k+ktpgy=4a3|n~{M4Ey;@=z>%7)!Yl%A%Ip zZND3Ko&}EGq4OXy0f;KVR4jkdBgJW+(M?=gL$nM()l{+VXB{|;3!jVSRX(28X^Htv za3t)^jPV4r1(dNHSjS*7OkI)~@}LG3|H&esX~di!j*&D?8ms^lH8a@>^_@?oW@XiU zS-ZMk!|v5vkgFN~su6oQ!F4FYUX{gXc0Z<&jWb1p$?wj_w7j{+#7VOW0 zz{V$_kUEiR5CNr6-)!c@4+m*58@yWJ)XdCsasMXz&=J022JGxa{mwxf^W$it8gKnf z^i*~Qkd>b$PHT`-(zi!aL1sOlCFO=VY-{Y45=vF3^cuoIG@&3_V8ThI6l@f8vL|eH z#N_)cME&tFrgFVD6mN6kNS(#Vbt?6%vW^h&q&5`O0G=SEp~mIo!&(ax6TKuei5R6! zE~EIAh1rcmmUK0+&aI!&1p3AA=Py^sI`*XE_|E??YD&q+k^eKh|ED!{_2a(l?MT7@ zS>fYI_^tVU;qUB0*V}5>+a&+R>on?H-22P!NaDrIpNp3l|BnlQAKw28uz#j3fBpK% zdf4oD((HGX>UVUL`|U`Ig%^R~}N}t3D zzC)e#oBwi{DkOY@%2+KkiiK}1-p36-PW*`<=l=O&e?LAcFJoP=W5V|)!uN7H@6N&x z&i;4R?;!D@35Y*z!rkEz*>OU>Fs84r@M9Ct{Zp#T2MK+(&@pO|r|t|j0VZUa=>b4D z#MguXm8LM+i($eEqS^f{$3C%=#J-wa1A77!^h(0fHd~S|ebQ2zDV2pUo$d-4_OHTSb7R(7LlG+YQl;af z^Ifjsfqms!M2C46Tpitp-(>ouX*AX`4q>u$mHOj66KuSj8t&Ktm{@gZhcot5Z6>8* zp6QPD)NmO~HdesIENsHG1e?)`Jwq0yfIcVK=LT(oK;ztyE_KDeSI-~%7s)eD+Gih* z$%Ckr1Gecsc85iblME?>yl4Uh8ebeZAxo=ap39$q#yB7vx^4xz6R3kS0ig!<}sMpLBZu4r~A)Q=E!>tZE(GH2Fb(th^@C~pR zw8V|+z_Y-!V2NDI(G*S6OHg8-4Yc`W`JK;U19Vj>!=#qktz}u*NoM8_Er!Jer4wbh z!&J<}X{ZrTLZ__H^~zYpMdYnAiYikg*xepD2w4xLm#xkmM}=!u#aTzqs6YBvSar2l z;%uWC;AYqs*@Z}U60z5Y)_WT3!x&ecC6Qo?cQ}0jL zD>p|kmIcRiF;hkPQtduRc&qpGY#O9LyMc4X(sU)R{cOa&R?H+B;lfstLSD%k?|8TA zQ$G6!&#CW4w||B2O5W!$-Zn14KamPtm*?#l#nl_W4|=|McPhenDlEDN@7JkaR9ejc ze_-G<6!1Sa{7=Jv9}#|{IqK@VBJ8@#{`-{O{n=<+oB#T~Ec%@--ag&k-TlL4y7+j% z5a#vm)E7p1PBCjQ{ok)c%;L{PCZuPXf7lSq!Y>vd&!1KNG1+x<@fz%ZiTV#u=|5Kh z5a_o}4(Q+oOV;hM=g({P{Sw(p z#tbn_-eU?k>!6H(97;T;A!od$}ju0+$ z_9I92fham$WA$*Ym?65S;w!iK{L%x>1$$G_K_4TRwX^ zoDz8huF9H~lzc=xuUR!vQm80Hila1hdJn@)8(NYL;QI4`N?9YSs(0=?gg}D#h79=^ z4N{8$HRxN3K6OL+nI{CVV#8FU89F@Sx~!H43^kQ#v7MWJBvu7j8)7SG%&a)?jMZpG z!TG}ULcQ({V4sA<8oGVbkU7xD=oN@Cz>JzYcY&ZYpNFUbe5~VvoN&dI!Q>Yq1}J3~ zfezjbA0VL9C_*b`+EFjDXyxijtDWph+xk;>$c~1-Q%vjLcUub<+Niq{S6zH7YWp@U z8zOK8y#Kh&legu?EpGokv_I05Qi<>079+I!%R=oH*x0>8Pue7+pDMh4thhCX5At@b z36B-#+OB&ys-xq^eWHX^(k7omP`jP^Uv>GPb@PAPssBGoP6&R zg`X1L-Q9)XcZAW*k|vo`S*vx0W@K6h@ZWF;mio zV;v5OVF1T9$!K^pF3?nmS9fpmwgxCmkvRb-8)-%gD4FR45>#>^VhP-Q`l=wv=WzwM zPiwFiL1=N2{aJVkzlmTj>3l)p4G(cklI`Q<>}-@S`EVE?_#MMeYKQgR>P+;bzbPOE zL}zd|L%L#56Uj(fYwE-m^j{bAgr3Hr|F%ZOa|D73z|dDrsih*AtrbLJQy}2#T7VJX= zDJ}nYAi?aAPR>B`qfz@2fPrhYr&0!qhu_m=N|R*+mN^img?)&}chr*oI@@}`{d%O4 z6;Yr$0>vQTn6yl@!9eGZpB&coDt_VoOjAH7zoXqdKBh!!Q+5~1lc6jT_YzX!Trl>z zsgN0oGN$jU&D29N^vkRlo&*f3S-~Z`=^n|a5O&jW$khW&Ur+n8K6FBRA-8Y~EPM2U;fCZKQ31Lm6%yItC#` z-O9fSSWtlX9jKH1+8Y!mv`87Zpp2#vC7OkoksZD{0`*!Z{4NJ}u3I!@QVr6XtD*i} zvb|6cTv7-ESXVyXwhm@+ACg0Rub9DP^g%_PY&%^usg<4o*Shupl~`t3mHmd9pQk`E zwlyC;AeJh%B$P;FuYeF}!n#VUB`JqJJ(^=L2$9GiOP6qG8l!{nep+Q?xWs(g1^feE ztOtox_erJLE;F~&*xI)hl@@?ImF;E^;nr*_*L5SFUT0z6x{4R8f-p)0B)}EI|F(j- zj61_L!{ceF0pPHgOX!(pKTf8HnBb?-y&6mP4SKXhq6u<$6P*fT-r~W>@dk*YS4!bU zi0f~Nk{cag98gNR?~`UXL**_PRBO~~fCsA&-lG7DMnpyomMrr$d_ocK zO+u_UX>qrH&S@Mco=mT%BGoW6DOxuYI|Mo6k+jTdkXol)RUpqrCfT-?!e|qHnMg2F zCz+2nv(XLfRcJk{TY)BsF0h=7KsI>%>vJzOK(28Hblqo<0%YUUFqik*Qz|t&LnDz$ z__R%?nY)4@<@h0Y$aFe>`b>7;-EkEaZ*_d;$h^UxS84@{U9U@sXjg?f@L8O_9&y&Nvo$ ziUdCYSw{Q@O6Z0Um)5A=@A4JFH>)RuXS|zPg;bOFur&Y-G>a! zu7ToYw$jTTn7-QQm?MdiqQ>A47nlyi+2|U}f0-JNCVZpS(9VP;Qw1)|!D?~@1x52y zW|rR*_?T&!?|1Gu5|nnzut@k?J}fWEl&BTN|DZfB0hB)$5WzH#(((wjk@E=(uBqv#toR;J-1X&M=g}JxQUuoOGwySaKwm#2;-NWmXzT zdqwBjHko9DtstbSHeDKqYt8T}RTpOaLc6o-sVY9h&wsTpqxZKL?D0OrqZi;RmD@a` zk-}q{+2rs%hFA{eZFQCS%{GZ3P2TsVcQXp z3e_U#avbwc?=R1CC^rex`EIevoM3twpT$*B$oC17>}i5~rj}eCZpb+v@EtrKc1#Ma zY~tqDQ*yGCM9Bp*{p$Rb#r#KzM)3Je@HCd5vKp!Q7PD-NTWtt#hJ^2XS|3t4)$t|D z4Q73%A&0}7Ea|ia;`q-%+IA70t+f~nid;)lj;F|@3hQ`o9`J4=oaphKzCQMI#EjPM zVPEJA&aMW%3QP_v{=YYtckv0*o+lPWf7o8F2+{wl4$9iP>78fX%qOfcAcv{f=t8?R zgPk$)Hike)s9}DoiKfJwLgK+cgMDqV*?45!m zjoO9ncw&2E+qRudY}>YN+nm_8&54bUZTrjnSN-+w+DChx^{VPV=&Jj9){S*t>On+~ z*KF&#|H2C@DjshJT&G_D#ZE7cQ?>w!lHr(I-jw2%V3$g65woy7AR8}9{k4qahgDqL z2Qz2;Yec8%)Kt{$DznQzin>40nO!{PqsZ@4DV!UwIBEq#Q?b%A4%T*?SpNOXd8Y;q zdM}0dP%u&|)%h+N1unlN5B> z+rr+^n={Kwlr1Rj^LQH|_X96Vtje;P>>+GIM-1F?6ec6%ESveh0q`j5ckq9VEZ)_Y zjsae3u!=H&(=-LdY#k&u`Cl?7{puxhNSl>l@!>5@2d5M2MfJ7)K~lBMOjPT}ii_59 zhI-)K4QD0bs{bsJ3tL2%`xYCw7`}zGA1S}hgLv)NO>^BUSo);YdjT@ zPW^Xy6B2b|gN*{9Qkkq(RZFu=Y+tt73)lNeZkE!>zHn53Ofhyh*J2Hol+!N(bV7Wn zWO_{>I_$Onh{n<(z=@A`uWQF(YVBv6y3!c}FrE*>B=__w;(V*-Pc5qmE5 z5e(;iQ&)hrqjRGCQLWvqf=yBMF)sNI7B)^iUo%c%XGP!;xN&q6s!WwQHhx;^#K}~* z6|=ypscA`C4;VCJ0i9gA_^r&S?Rb_&Uw+5@YN(x{6sge6en}k~ut;~n*?N*5UJcnduOAE$ToCid} zC&fYQ4`?s2>gGdk%^}?&-EyxU;YXp(dMys&HGv0L0nrq7mJM^&?NV32^-R;%9P3hYE2ik-I>$fN;&o@^B3sytz}kT_C~v4FpNhpK(qY00k^jT z*j*LlIuPa>iNP*I;z8s0r(u;)Wq6ZvPJO$(^)5TdWNBO@x~&DgbguA6fRBsx%Fql` zD!->)ko;~?OuGIwwfFi2Qyv|b`F7x3)L{NLliKN2Bq`X`QgYie*Mj)Wj_kHikF~d_ z4Y6@{lsb!F01h^RyF%BzGZ~qYs~n~1*ogsoH70hv+i~%@U7#e1b?HxEb#5$ zGm=I{77!4^Z`Aou_?n>w+y71rP5W^zy{gc+YJUtvL!J5X zo(n>Z8NS+qUMZVWlNZ;1V&-M36Mhqj?N(Yyd<-L|$!c8N;_Oy9KQ1&HHz`@RQ?H2Z~^_<3qzPho^awQ8w#`LD%BW(F}kFSJ(8t0tZA>QRC#ht;LZ&!po z%lbWAaDU_x%W`VDOI|3Uo?w`CRQc=2(Cj4d6~9W1v8%*Tk1+I&&gUji9b!w$xw>-$ zY*~T!i<@WLY#j>>QQtd8&N)j7^qC&!&E2ysY;Ju)%G>}j@tFjRlDg+M3XnN#ReD>h zi_?Y7*7O%GiAmLFjqttrU3hnPc;~^RP2np7daN(b1-o7Nn)+@hjs}V?r3(+$*lOF- z8y6%O#i&TUpgPO}8F--LE~mm`jI>wYNrDp(D$hjdRHPS6%}=m2+Lpr4Gl-Ql6a2v? zbvUaFV4c~H6`)#-)x$rk-#_V<&5l_+S>W|_#d;~6pn$m4Ar}uz78zraAJ#bC+t?3Z z{(phTa0mVbQz7X7<-c)p8oSb+={vcs0yXyA`_~3KMnnPuFZH9rTLIt|(Dwhl#7`mg zcUoU)za3UU9({tYuU%R##G&*xgLdR9jm8?Ru@c@nk5=MJp0_A zD@%H46NOyI8K1`W*i3W>Bs2XBM?VRy8|MLC!IP1W!5pSpJ=MOgR$v_6yqizpcF*Wh z=+@Am3)WqfrMGRDo}2IncQ~**Fwu8R5h^-|1n!iPNA8TE`zEe33X&{9PG+kA*2>P0 zV-+%5rwxx~d8Ks8x@AP1)wz*~>@_DGxQV8KJa;_f8aBomISQU=@y-)dj|8)H4AeG> zNU6Tp!8#w~D=>&>eK+ua6f}U@*1laU44~($REM2nCRth~6^AMKhN^xc^6 ziT|;imi~{r1|hI?sA*%rP7KXc4vi}U{7i;hvt^^@V$b4lSkCcB3SSqFJqoT%+(!2$ z+oaLR{@-#{YW)r?f8VL!6@82|Aa0q*l6l!;g)m-0_vr{py>rQImg`+H#j{n@E#$Pu zd^)hB5ZBV{q>bj;gb+rlvg`3Q)TsPU9*Z~?>!0xhR?&H|$kC6EI@ zH( z+s#S|t4vN~w)lkpr^3|O*IOx%;7B6HT~mR+WR~9HQivXPk=$28Wgm!iaG^2vrqAsjq24*(8pPldBIJILM)_acVKyK zD0`|NtWCHFk+YpcwRx=y9jrHqUFtEB)6kDn?dD=(oDF7WNmW)LLNQSaTdiYCOrVu@ zn)-E|AF}M{+|H>t(~@STUo|qnSn$AxTz+4A-0M$V*9U@XT`7q~j9 zm???jM);3W92O4OE-4=5I*NPf5WT2$iYI<IFIaAEQ4}XOZf$`)#`n zI_nE8y0_}UK05%1?KVuKTr{9MCN9R!2BA|9|JKXDKtB; zk&hVpmN-%4cqGCL!@wFn75BYzi82$5%vig%yE=kn&vJkkg_{UBqBll*9+4=zr5GVo zZKlrNPUxUa>6Fyi_o;+6UrI2jAqty}C{u&4S@nTnQib7lPBPhePA|OwemOCA(1>g&LHxA zd4S)yA`g%nGW!DC_Snt$^V5rpMfa`-oAz5s+bK(NI$27Ie`QLp-dKXEJ`Su21)p8f zacG`qkv(s4h5*F~XcDLuabnTCA8Q$RK6U;v)(!eif*HEJQt=~;_ID{i>EYmQN@OZZm`qnyaz5{Z^ zpYbjHySzI#&S?#09>+*%y}0?U)-qL2;k7W}V+GsHRDXJpWBT_}y^!0%SVF;?ff;3o6=8BuG%~)Op4p!-Eq_9Y-Nv8c}u|IlR=rojIuT8E-B5-)gZ&_v72j;-Fk>1M>j|$o zxN6bI!lrnNnu`En;F>b84>zn-gd73+*T2LQQM<+{N`Mos^!!!G#04_}@umW%H!~As zWhWSGW{;HnFmyy=)tz9i;mWgA`&>0z+1lWZTp~72dHp!@=YW>9_(V~jO7nls<02;q z3g--Q&5^#qIx3qUhn!CK9nv&YxD5JT}es|Vnu z1CC-qZw?&eyspNDoJJu|aDSjg)%f@hD^dkp7MguXQuzt|rJLX@D6_4!R~(vNQvwAJ zVk(blvzzCrI!w#mS|8&tbT8-FPAQti@yaf+RZC-H8yjPnd^O zP)>PypyL#D%9Dv9<*+-Z)1_5g-t{j%o&O$9XX4MH-L{56^=BN~s{4-1r~u)wgn{ze z#pERJ(xgBeQyK{EU5>!GgTzoI96V1qKR2(hS~yNPyp;bPsXN_S3*9Wwu)hjhUh2TX zszinLp~-1nUz#5>h}BhPUeM&glGw1E`WdK3kQHN{BCs9JWh)^>8^_72^h{8^Y!cm> zm@_%2=H+>KAs*3`DB>^uep<^={(b)Mej+wV+jGI6(3jBia;VXgg6nASQ&P zKY}#;L1Fx%fp~x-Ee7fp%%0A<1S_Ky!#&&&k`zz8=S_Ze6R-g{Vv9Q{j(&uv_}~G1 zR32DbSSA_;pm`gEF*#Brr1CVj3-l?3q;_nksP7CNlICb$-5TMk#A{&8snS@uRS}p| zb;}6Ap+mn&O_AxE=m2>eWoED;4;0Sy$BI(-sd5-&b`)PbSeO|%$paOe7mIv6i0)ev zWkXJ9Lzy|%2#G_R-7;3DT#94DwTL$dOZ$W%X0s@8VaA`w8KfjfuQig}QPkyH7W>tE zLQ|oX>BYUOEnw0_+2EO@#lw9EZhf)%zDNtbKyGM}C}>V)_bI=&MbR*jz~8BA9{6EE zkqnUo>w9RILBPAu_M`oAflCiN#*tMEAQU#FsA^GciJ#3d1RwDM-IV*?RZ$6;&ZHqN zCG(?bsi<~2g^m;X(}1ug2S^Q-g@^;UvT0$%t?EEp#plv*wexSEv#R;WBQv#H2h!1Z zp&^U6$+Tjqu2xI3gWqF;Y6d1GG^t*4YJS^5oV#A*(oEn^p8uw3)|n48|2bT ztR2glTy3{A6-c*aox?#PT8Hk9{RqKG1@f8eh||0Y6-4iv{Dbm(*1htiRw2iP-&H4%G z5n4kJXo$t7Z3vfaO=1pB+DvoCHA~+{5UQJQIV_A<`xEC8uX2UWh%z;(;G8`5>y9SH z{UEue8iC_L(P@{3G`mJ-+&iItj7fXbv#B{>&eScK_!;m#a^j2&-*thOe@lpE83FsN z@$#=*(3tSF2NG8*Xo@Z5xOaatlRTci4^@0L>)wMOtg88=SpD|YJJZnz6s1oB;X8pz<)BsSvKD_X{R7~xu!7T04GbA=1QdToWp zY<5JDeC4L<0J{-cvglbfxg@8OgEMb?$!d^|4gX!+3WxmvYs19&p)%jXBvk`ul`^LL zoIq$U8i<{k;pY&N3aWz9UqQ}M=pT$)n!vaLPAu+R0t02#Q;_eW{yhfB6kDT+i2KKO?pS4gWRg(^Z<=2z87hlmuz>7 z4@N}Mprvb$ND%3GyITj2SyN)~A3i%U7!`Nt#j@uH-Kt7quYg0u1<4XZ^})0MkRU^! zH`IvElCC*k+%C`Le|Pi1n-EFiA_ix+0;Q7JkWijLrrV{0J@`qh$y&$gI)T%eDi(=w zRA>=ywzi6WkPXxXwT<;pwvv^#Un?~%X_R4NM&VMHz2KU|-zQFfsys%8rmg@hkZQp94CNco|jV@o%%G^LDk-;Ihz zr8afD2=rJACwXw_z`x&=qD@WzGaFJJm|yYZWi|bts|G$Q0f|L`2*!1rV{h@S3RwDW zpESq`rTcM^X;QVN1ZCJxZVNMTlV3b0B7sUrQEs5vBT(%MNeW?-*&oOX%3FJQRd_Su1%B-K z^+SH*dd-JGZipF-Edjkf8^+R?JCV=55J80WrsE2HbaqQz}>82*bg02ekP%iM=azS#W9b7O1e^)Yt+~^ni@GNaj!b)U*m5 zP}Y?rmyiIH(QZVrH#gHwPfFiU&9UJQIi9c<+K$8vc3q=dB8M7^{dKR0F_k=6e`Nhz zDO4GPQTFK#l`Bs-0tR}WM0fOX1d=Zcr2@*WQXmQrzZ-VN zGg?~fJQ#v?@3|f0C-)(`kTAg}1snA(R}SU$rj6n0gNx9@OF90y5QA4QaFourgHyl+ z6+^R9kT`lg_K+oY#3udJZG6OE@+0-{mQu@=fr+HWKD)14qa- zy^!ofoOgZCk=#*WSOv|KD5Ah-URgiuDPNB&=lpPL*bb_Rw17jk<+0vb)#xdhj ze;nN`tkU8V7%BFe_C|4DN?i655Q>52YB;+S5|)Rl7|LVy zXfph!JUc>6@KXVLq%15ARHRwA8|%!*INW!oMJ^x;Ex-d#lqW7^ zO+U||R)@5Hvm`Q@a?oY1pRgh@!c>R^NeBy;OUjH#2s@@pTmvoYbUijPm`O{al}KGO z9C4}WizrpH#BK71Ey}6_{}BOI z7Y(m#qt`e@d~_N5F8i4a-A8 z*x%|&|1uA6C_4iQJuAVeY8<&*DoA59#2nQ4QoU~;)-{GV@ePBi(?jY;VSgk^oKSjO z!I`dUDE?8nr_&wo970Y{QeKl!otgm_p)veC;~EfQRanREmZZMrQvZz@t*v8NcJ!_} zqW%1jZ1ZkK%y$Pj9;DmRK1V4{;W*C*J=f^^3+L||b1{74Thw||KuJq=Qn=6G1|@YC z6)XiANKiEBe}mqWN2h^LOD+kW{(S@HzSGUUpg12okSFC~kx&{ECK{!TXFRUr znyxeOX)tA*y|l|$oM{4OwnG57ofIotp5;RSgtgI*<}6fR*<9+Vd*NVr)L>*V@sfJQ z7DZ1h(N$2m+HBolALz>;@`gH%i(<++u%}Fe4%?T?+Hl0u%Q5A)C-$}_qK01d9khY! zf7dd1#mBGzwGkcdi^jq}RS7nZI5{W!KHI=3AynhPRRC^U&psV&WFb1B4ep`;2Blu@ zm2U&F83$vIU60SIU!pXy@ss8$*asTYNN2N`T~mR?8y)yF^T;|g2SFyOL!$&HAX>dt z0~Uw1+JxE?UTvZF6!bOf&1{`15n4r+wbASmX7$z#h$g1k8g=z`cIklsN%Tt)vY6tK zuO7%YVGjw`TCj$ph&E{C8|=@~tf(O8uXT07W+$a&r&;C!l|c!lB&NE@pC=ci@)_Tb z#zh4(;fQKb6E&V`z!&rHe<)5kM4%;*TMGEad|48++_eDf$2a3B82C)hkhG1ple}7H z?qnMG*x>ueQ5GN{ko3Ty;To<4m!5Yt$i#@UqC<1rev^4_hBs&91;(`u%Z`%|pipte+CfbyHSB50{P}D<pY_Lzm9D_;gAycE*mFR)UDDqZI@DFe0x_A_{}_J(%V4AHDwW7mBL z37{he?-*HNs!?j>_z=~+$>3PZMIPw>hI8MYD_t$`SJOWW?mssGE|*$3|FX$mWp!4)j%4i9FzMYQ1 z<4^IZ5v~2&#R;?TDs|9*@WM93SvlvDI7$70|7Kw;Qg6irl`lt^b9T_(R1-Pa$)OxK zGqsoJiscSvvY$W7TEhm4P98QYEhC7vcl}>Zi-rvIefj;>?zvF;we}Ps=B~JPKG#Sb zxmp0n)&bdiDXBWZHP|~1BbyTNgAX{YFnCk-F2>KKf;M)3yaC{q@&rvDy57(9t<)}& z&0N=*hKB*ARtU6-hQ={vTOVnYr`QApH(^8qE!b#8tg71Zv!Fvn?<`d&I~BX3u(gu5ekwH&I# z050i^{Mbd?kI{Obb%6j7P|I97BP}Cv##{D;AFnVTahNV=b^qh5eNz;qzHWerxVE6W zS)sM3SM7})T(}Uf@qnS0d!sD5ff~ufzImgR6pUcmc=@#B+SGWk^Z|CQgm>quwGR^p zcv}W2%4NL+le10RII@DA%jOxc1DWFLtKT(Q$4eO)o|)^VBm|E#D)$$PDry%dUlNi= z??-GkCAdI+@{#e7&!BDGq`w$caL2{AU~k&=UjZLI!bYF3_8W~|2N%;HkoiEgF3_Y2 z*T88IowpA1I7vv27p)M4-f|_K- zKLsgu1!o`ORBlgwKTn2#22$b%PDwfol2{ok)l4s2S9B+f;Gv4uE1~J0CiWEok69BY2oWegHXBuDB?j+AVgJ_{E5QWx-icI zV(&IY7L%N3a-&`Gz+wzi=KCUX}Uc&;Zk@Yt#P-}WNEC=OmS zIRZ6}k0EdZD{!!6bNVB}Ga+MnD9yd(&nPXIy*t9WuOHPSJoEyMf=?@v&F!QU2l7y&RtN<{qoL0Y6NPG`NNDQr zMZx=(*>1{2@3;Xem7BrvijU0Fh;w6=PR4cv7DXOZBW(3R-#j^nE*-UK0)EW>1G#dv zYyw-6`h3<#T^56m%In+0-H9M9G7DHUM>(dY;y_s%pSKo=+}r#3*?W_ zJtR#VMl2|%P=vo7Sb(c^)B0A|SvwIt^Jl-`K+LcD`!6V=s$fmCSNTYaOSPjwTw*7& zFF^^f(FJ(0=t_!XbTzN}kOieu$wo4b3>!N<;TUY8yUuq3*-|Nwm~z=DVN>yxzad;Y zDNi-dO8KlOMdr91vF4PDfJaF~HD~%dE(3&D_Cz*)eSkA3evf}a^60AeU7q9eVb|vB z44fV^QMEGJFQ16SuwR>dQLr>o2b|(qsIS$p8{j=?!>@`uMty06E_fIOYMamENrQZs zY*)CZ3?%JR!_iC9*cC1Kbj%I2esSdC8+-{7`0C)PC z{-$3vR0B=7dreMX*!!)%UI~x+j_h_v8%Nm`)15F3Ipo z4a%$&53h8GPmC;>__X9%WByeRXNDq?DqX;b#xITw8Kw`ssV|N&QBW8*5Usoj$!1i+ zrAmq@m20vcCHq<6!RYfTzeJ`YqbJAyZh18 zb@u?xUTviC(h-BMU=*%S3$?e93a?tXckYA0gR>&wcI{-7av;shG<3#rGyp@DQ+I&^ zE|rY(&y{hn817Guh*F&_i?3FOsm-#P{sL@Yd2K)^JGVC~Elx^3R#k*>4K3M9)H>sX zz+jtgX?ifJ!1HQi(}x93KTozLOwk)Bg6L6PN*_myraK)J!(SXD8+Oh-80_K|@?i-unI|1r%| z3yr~oB0y!n1ugg9|B5n;6ZmnUT^fMGj$)dQma~mcoFSLNlLl-wy$ebO)F_t5f=qE6 zR7vOKQRO?c3)-jJPgo>b+E|VZG37qZ&(iOF^ri1B1hX@M(R^B_jOu)7V>nNA9uOeC# ztRZ>iwj+Vbwu{Y+H=fu~PXMDPhN3o`W8$@4V)-YK5nM!*Bp>tSbg4&4Jesaq1fBPQ zmZA>g851fQmW3#blE|STTHog-kr`u4Qi|oYB@yW(XFqDsnvt(9hc-)jVV3AGfZ0Kq zM6nRw8UcZjAhd+?O9cnWNnsD1Vv!jl!224c%l#ITi`A=%W08!&Su(x9)TQi7i69 zw4bSB-XEaaFXDQpM+{0sh!E!qWg|;ov8yGlvP#O5%gsF_eLx->rERj8h7zc#8px6` ztRQB`c46wYnH9vyXLwL8nD;ZzR#z2VwlvNwj2x#bl`PotMdok+H^!?*$lkvYze7Fw zJnp0j?k6Xa(w8u3h>GJEXQfQ@{m`R$w`LHF&PgvGUOkvDYYN{N+nNW`V3M1~u*Q?2 z=+51iM-VM|mUgMZF-dLd4^<2?(59Peg@F(uRTRj5H3 zSQ+VaC(Tx<%8d?vkQIyA1VtZ85pt$>a751>z%L zPtQ?yj^0E6r(jmu@@poImV?iLLhSZokK=~8#<8N0*b4l)MH&s>s>P@6ad!DuOME68 ziYJXDm3bWB+GK@gfhT~%;-->OlM*6yl%_pc5wlj^zxB$#1@SjyfYDSF(8?}!zGLK| zJ>OO}P^sz!*xZoK$wNwju}=RyAw!4TSzQZIM;fub_On1GfgHT~V;VM1i3Jt$-6 z5h+;^V%!)}BB>s!u5mycj2RLv9!l@De+le+1^}3PmA*bE}`|CHM_7CG<{LR===Oj<9bZl89h6G`TrN0tITV4=x*|3g4(R9~37C zIjIE{qLC8cG(o^WNe&fZvn7nY12Ixg@vNRV9?;Jg0#P627ge!@NNjR@W&hSB+pF+& zPnbMueQT=x+Ka-yeCW!=v0;sPieUxVi+CW;;pQyppL!LbD)PozQ~WnEIPJ0GJucet zaemuwj-(Y0%7dq)-Iutcq ziD#Kh%n$;b{=9Hg2HY$&fM(|r*7@J3snZ2_;g?8*Y~&~_T?1v1w(#)RXj`(6iM^3C zy$oY7(BEmX*6F^|Fy1;FZv7SKzGaZ^+nD3lzVik2Hm_AcP+ew>{@grP+tw3EHXW_K z0*%xwVb)Tqin*noc@}u$?T~ebp1KuYDRN;Bv3=uDmA3?J=Bkz(R%J}N<4?c0AeY|F<`h1wmk7y)uq_nSMm^y^Yg$*Z0#}z;PnWS$C({!i;h>2z>tu*|20{9yd$+wDt4pW|&qJhH{zvj_ zi2B#qLLs}J^R)WF#8{n22LjRG(JZ~qUr1FeVZ4OIfAG}$Xp7S1TV-RF66IVFbu#`* ziY;RlbP2tfoLzr*_o|@||NGqhciMY=|GKgB^)#7A|NQ>h+VfZ)$N%*rEvxfCN~mhu zuLuM5vc2z}z3*{fbNp{gerJC|{`h_!`+c$d-Lw1cv-|G<2T0NL+=bp_~`VAxzm3Zu~p34H+s zVsDXk1ouu@Cv9YY>A~qoV08`!(UCOM52j4B$x5QgJq=At_vT6mV(fYP-qA`^iAl-w zVYCTxkiy+nRxTy<7)ja`_m|T;+i~a|M~ZRsf&UIp4DEXuY(#>UVG8QR0yy?UuSKtD z%l38Q2N=+%iwH(y43#h-6mfXk=Q6~g>V*(&RotSQ(C{5UAN2rba<8$DXlHHY-jE-w zqbHOoZHk)wh{9At_k@#iKbt68DQ_SeH(_FQlockxYGv-PU~gbyH_Uy=A{OD~Q+GQn zmj8lnmMwgBnpa2o6wSp+E(%{oM9*BC zRWflJnQVW;d?MPBBzj9zJ~O$@ByJ!nNTVH%V5zMRjytvledAg*^k$Ngxa zx|t&`E?=4iSuOY7(i_fm4AJSP5*i0HmoGbL!Vj=8Spq$((bfh__Vf&XC3Tou_(BU~ zYHX#2WN=D&AXaI$LviP*4#=8@da?fo@^)MW&D+#TP|VMQkw;^ek0QT-*5_#Iv$yS& zVTGb)j#%C9DHpN?)w0sA>HiqFHdIy@T*U>-$(Kh0JVNqhtF~RAm>y>64FirAve50U zVcxcCHU26WA$MXqoYX%6mp1UigEKsl0=J=2QJzcZ32$XJ0h#~OzbfoJvgZYHG6~t) z#wbt^cCMf-IU$bS(5MiFz?07JLW$)THhicCMTXtT_R7u2BE>-U@SxX;zN&#}0VF}v?4J3mRN|IxX8 zJEQ*jD%4x)_e0)S*!K;-@3Wg9`{zwj9G~UJ)6o%`_uXz7zVE}FxLuxayEYw$&G6yZ z*H@H`#s7TFX#6~8@SA$>bFM$CdmmfBmU`dR&L7|Wd>@YgL%K=(n%Q~fj*5dW9Yck0 zj-9-Gv;?VnE`7LRw{z`#AQ)`6ui5~D7#&st+knMN5l>}6cEhX0CthaSVe4BZDKHHi zlMB|Oh_31)H7QUf5j=D77SatVT;wTpJ|HrrxgMINGpQUR3|5-wXgO+a!S$@}P-Pr};U48RUp$vp-C>=WAZsN3LfBDGx`%oAWpE->FRlB_D!0`@m)Y z=$mv3?W`dcdEjoj8iBfK1*+Rt_k;t3NNV{MWKm1(cQRzM{y2=$I>xoC&J!3~T0YpR3}h=_(mm1xHG zIaAIzlHJ<5JA_2atG#``;fh^hzgG|*s<=JN~m%}h< z4Fe3^n>j%Tl|krL$pJ^NP%p;azqYfZZG1SY93ma#yPZ&Eu`8D#ldXO`v6{|6v79*6 zo6X~KiII8qSDzBm3S2`hT0?AFEX&cG0o*ME28$A$K)9!YRq(3g=FD6wFub4@LA09} zDyPrONTW(}-~&aQJqpxg+@GNmNz2>u&+8Ul?lFA|pjevyonjPdgB>X2y&6utAu#ox zcHfFl_Jl+Jg}N`umNLJ$H?iiTtm~~Rl;rjtNQtPD?A}v;$zNDjBHk)Y_zFF&8Lk zR}CpmmV9C^&n(Qx8d*gFxl88VeG~w~TV>>d)t<@2dirM6ZXW!TXfrXr1ug#vaN~0~ zPX7Nvt2%1VJHD!S-l}`QpTB)|R`7ja3|3rPTU&R>{&RS+(tBUuP&eZlzN|2Yio_WIr1eNE%P*WRw2}8etKpwJieEuDzp5)MEAhW?@e#haE?w#M zdKSs~KNt8vVR~<1=yki=dV0Rr@;=voekJeYd&m$~-x2Iq87RrUn$ojKWoD!*3lFnK z8c-v>M?6_3&d12DW?RDc{D$KkW>XM7p z`vAWEgBA0@a#t~cyRMDt4u7gZhi2R3pFW1kgIlnn_DE7{#cJ8F3WVY!X zOZ*P39SRd=8u!x}?REsXbawhEpbg zXXP|PhiDe!8&MJ105)U+K_y#Y(P|_aag-P~m@jhoXq)~?Uge)U1c(QeRH=%9vi>9x z@#DZ?fMQY|;*~J0;s>e-%L5CDy9%11o*-qvqKnkc3?-wUiVv>?oviSNR`)rX6yR7P zYFV`svNahGcVmVWy8#Q0bGq%_+*~Kn7z|Hus7@AqcG82j-=}|zjPh2_;N}wX9z^-f zTm|^S2uh}MEHqg%2+UOtBU_6Px`-*64tWwxLcPR8$OENJik6T`!H!iEttV4ET(8!4 zi8tKp@efNbvth4GA~iBdN_xBTxCS*i zhNTQuYOZUg*y3h`Y73-qpO)wpacDEOVx?CkQ*Kf#FoQ52tzU$CCik;6_P^wEV+f}O zZsk#|IcIY?Hl<)mZYg?!#ZVYmiWp8hd4ed;VPY}vSbI2z1peW)?FDR8B@#c zcy}Q#BE|7ZO5PS%IF6_gj(@x;gWJ*oZKOQ(zVpfF#Fwyv>gghtqg8M|{q^Pac+Gt_ zgZA&?T#tbTIV1$;19c4f-x6$bbu3SdXjAtZ#r57d^l$Em;1`)B5Id)J_O*xAJHl>C zA(qI>J1aA&G4lo+qI)!%nkADmOC|0Nz5+iDtFP|_-yZ~JJI$n>&$(9G-FUAt|-U@12OF-q98>#mDw~;; zTU4TG8sW@lLG3L4@wY%3tbvZ9B2Xt)l{ba{<0N1`MCn8pvPdf*&!V#v>@z|NnP7cr zic&-8Fm_Grmr#vgH3>!htLyfPbI(%nla`p}cXt4~Rpc56+J#3(hqjfbf2fvqN-`ST@AWqGCb zRj`v#xVerNZEBPJm2gFfGB1fu?&);KTX5NPijD%ZYDU~5MPn|8N>ky?kx;q9L4+%V zGGwb*pq!rdk*-RNCO6lc^k~s(BVcD)sG}O1GMWd-oJz-qN2Q>J_3lC?hl;||RG**MC&({8R$@SW9npZE`3)`@(Q)jvm474q=i@@(Lm|4#TY*oi zn2ET$X3)O@G-fc0wAv&2y}B~8kjX7ihxEO4D|?B|?`*{3ehXKrF5_xDS8#?OJFnXc zy)8G{4C!P~Iaj1>rey6>8;QEI7sZs##+-wLrxUJHil-{f1wtAN*|$3Ip$)EFcFiyU z3FQ2C_4@=Qe<%b!o_Y+#cQ-xIdm}KDV^W1{T*AH4Bo0enOp%5ru#ssn_}Gg4hep-= zM$L9l*MS^Se0plY(-M4J?Q&n#=Un8;FxV!q^-UQ1Ya16EXHvt23eK9;(o6i`1v?v~ ztqq6iPUNyDT>I_@TYA^Gg@xD~bLHv2GBaeXKVeS<8>>cD=MQ&CHnbSr^7el{GEI`J zODT5*DYx%{I5KMy)ZWHHv>6b$k`emlWPMU+}rX{&AW0YPnW&Sr*Q-1ni--7?E zAx2!J2ac9h&pe_=vFOFQyPc9G44(FbfS8wQE7$a)hb|-1CxP;j5@1Br=D77b%kzjg z9CRd_HQ$v|8s#KMrMr&ldkgv@#d-m5>mZKkGATlnVE*CGWI<8Yjm^P(LM2If41=Q` zt;qPaqMm3A#)p(QRRW)QGUKoewo=X4!54^rL;!+Q^~+pq*}ilkHrO@FDEFS~L13uu z+}6}RQq_3n=ArYM-lXB}3tYa(vfBps?4^CE0yLJZ zMkW3qV{aK0N5HRp4({$A++BmaTX1KP5S+m!cnAb{cXti0g9q2aT@u`#Kz81}TXj#} zJ-c=K!&G(GOwE_>uD?9b?-|ORSgkZL;k6?URjMh=*}ywrGO_d{gt?f)pAMn&-MgpX z;u_ah3YLTG{+XA*C5#eQ>nIS?RJ~t2HChv!FRP%?Z^#p++K`h1M7apN6*i-eHFwEK zV^poMAVt^$ceMGl$yAM`!!?+Dx4-Q}lf&L*WbT?xWFy)Zy%6F8)#6i$bixvH{bFs+ zNs)<~{>zJ%8)eK@rwQQlh9g3U2ElRr|) z$BIt_6sJOE8%8}amk7y%c85l#YnPVl+7qL}mRS;id>Q<9Lq+<>$(|$EkU%_|ZG^+5 zp_|5ZN7h_?4&yv}3c)rLB}yeAk;o8`mOu8dpy$&irCmQYyA^MD1I8E?j&*HOYF zbpahuIW!#yT_rYQcUc|C(_(?prZmZL?|7RY0qIVo$CU=kIp14q+x*AIY|4sY`A9=7 zI>5uMoVym;$7D0jhiINASA&o3!B{5l+OYf67Y68&{1wET5M+Ec9xgwVr<>M+Q)&0o zwIntTjMuwP?N5tJDNa_4c-8Bm?%lFi9Aa4rDo#*E@K|>KUTb_dEYl2jLBBN!ZhIMM zOic5;ys_DYvO6Y1e6*TgeIL7(IOWNJQP5P(&7f4s-0#={*g_Tj1-g-BR@i}@Q~K1@ z&RyzJG?+cMch#IZF`m=f+5p{<4H2}&2%ml&$V{Cnsn4%6VnW>6uHLoa+P=tu+tiS9 zUJxG}Ol$o@yKVwh_ADHv4n|>ZUG7D?dvvYikn~Mq?)332e(Ammh1H0W`VD-#HE@)< za!8xlq%qry<2z*@R!F+TQ5-tA+{q;*uvfnsvlV5Cn_^>h{i@tODVf>SwG{wC7)m6x zBdp52Dj;snDf#NJqg^TDOcG@C21rp6X8qsCDTVI)H?IGE&I-LCFW-mCBEPRI8?FO^ zPkGq{fBNGj%WOgu+%|MYSZ88GX#Ws7_c8+;gfz_+Vhj)X5!XOmQ{=oM*<^$P<|6{X zZ4zd<9KJ(Mb>BsY$vyxAacq=g;D@>BaC$Ht&a7_0{e_qt?B}31m`=4=ku`+LnV6g3 zbip;DL}(gr?Q^!VJoCY!qX*xaWZ8FeX;jfYjbS`y7_P4FL~gB9Hn)Wcz;@0dl{tH! zVG%;~pVM4D!{n`l8Mv&JH_i=0L!DpJ7u)gJ77%xu0@f7g!FQ=-im0wi#Ube^0)!{F zs*Q~w^K2#S$Gu?c28hq`qUrX$YGzBKY92Hu;IzEj6vtJl=me6u!#c0Wcrj5q*p&W! zXS7b070~N)b5T`%I^h|!3&?Xd(H#EVUQljWQA?WfRN7ku@-Z0kxSwkr6EN#ghIJPi zJ?(k9{c9&v2CYrt9X|Vv)MBm+5=`_`)!9(GMh%z{Nd{oK=*ngmOvpnRiq#wD+&KiF zr!R0NPynfhR}@xNvUWx(zKp|Dx|d3O-}?7cN(AWIe+Y{8{di+qyNbI8mag8Uq=4pc z(4Zb1SAq2^Nm1j0YxzjP_Au@&1>@x}!UI1hSLvA_?H%y9ihNIatxi}Q-mF05M{sWs zU?!g`cwrdV?_(Rh@S90E4?d0&YGJoq#n%`l;&OHyjoi7Av5!)7QShCu(@m($fQ)H= z%{rtF`WqUPZW7lpN*8!F_t&gD)8Fcvw|BO z7z}kCN)(#EJcud94*dVx6-!ylP?XHIuc1pWU;+yvuf2$6-yk~Ani1c0!^M$_B)t!P zHMDH$|2hK7$N-p7pP!NNxHT~mh=a~EF;JqWgEM$KM+Xm2J*3!7UB(dM^INnL2=SPQ29f*oem z6HZkdMKrJzP5-A^&m9cIb**lrF$!7I$>4M;`mt*5@@NZqj<`zPgpuE@RU?Uqac8t# zXql++Hf*7%EZmg*=)RejgO36(KPS7hWgTjGd)FVxksQb(FBevlERn>U-j;IcZp%t` zs?g}4_d^sNW?R{p3QhXY1Ni@1h7k-vHT|{hmn&aqIshm3@FSIUzJprzUx7mP$Y{ZS zUHQGM7@W)O>N?$t#1x%zCJrq}xKm0xl$LQ5W{jyGDu}Z39@#R04Lc3q86(xAG%xWt8>f6Gx z)v$`1XEq}^R(-vb)o>YNNkG(U9morftY%UHuCVI?e-8x|3a1n{Pgu)wl#J5+wEN6v zSb4);U1l7 z^q}@58Y7~yPEPXoRZG{EJBc;c`ZVILfs>6w07nF&bU{Z_!|a8AD`d^OP|LgVN6>^P zZY@M(FlITCV9o|{KJb&he%RyGgZ)R2(kddpu>n&f%Z%5^ubueO zF6^s+3^0wZH1xv<*P?6uIvtR!Yff1GocbJngbf;D4$~7l=fdRoT!x}SQ`Fr0$ZdK} zZutB>jM2Z#c-Oyc&YWPIAXxQS{>s62 z1T=kH+Q=PDxk8+GA4n)UH_ys?SCipRQ84J=KB>7gUG@KoWXDq? zSb-ZU123c0(yXm0S5N2BoJm3LNc*j$f69gnc755BkaO;>{8?BD-tW4FWE{R#l4NU9 z4vy6+6xb^O&0>AZ^CkytXm$t9)?riM_DaTzu#5nWlp}F?9u%%Z6&Zv%ZKVKhPS9v~`-QKXh$)1Ow zGXRmU-yT&z2!%vHWlEO#=sq}=D1Cp#=WzqAS&Q;fUq)pywE_B}x#LWO;Xd36C}dXM zbsWE)?9@?`JqG>8m2+#WtPQE#ONEx%3KJ#Qn0gm7;6L#Jc~;4(#)pLFQ()5Y#yI8+ zp|5B3n3aK{0;UFToAnQ~jZtmdDCp{rWm2W+^5<4t7z*eleDPr<75Z>onsCF7p*t7* zC_dSxO2R*2<6!m$q&o08F#(J)Y_Jd} zBCTeJxav0C?L;CN4{hJ`*3RzWS|@JcqHBcHD9;}b7wJsGOFQwq{%DN4%3C&<0mM&~ zFJncNRy2G!pxauL&!9j0{E2&82iy=})9c!>rM6f-|Et>W*SM{&Wds56fuM3VH9Q&{ z@{`QIz|m$0WEV2XO4Wx8o^iC29dcR}RBiw|4+p4mg&|W*PGqdT+@&;Peg6>5dyYWZ zu3<>FKMNCn5pRl;k`2RDMg3eZ@ys`WI{lLagOP*zzX5l?EFu{84VR$C5F-@WCJg8U8iOp=HC|K+^KTZe?? zjirJZRi+zv8bquDOz7IowL+SiY0usNz)|r^HCo~Owbblsl*Xg55qm%}MoUEvVVI1~ zYBX|}yR{*~-8$Y#4)Pyje%f;Hv6P5?e9kecy_Z*Dee$de<@3k4x%G z7942inYd3WxB>9v*R@wk@|4hV19|Oiv@qotKF5#yh_)flZj#O5%!)nszE!&0PNTn) zvr%$BXcZvY@^kqp@`_*;N|o9A1RrU2*%OA(g&{8)baD_g`W@Tc647>ZXxZlOLaHo^ zm|JsD+jk+5=s+xS%C2nyneaiRHIP1?ypj^ z%uJ#K-VII~w?y0NArF1Th#{oS3eA2wN#Q2ZAPi1Z!O%b~XSOpO+Wo}#TXu(1`=W;< zc>5qQJ{t@5PHPS~*Zw}Cg&Xf$jrV+M;$mHbL z=#n2=Gxq#sj9|tNa+eXGE-ST)tJI&Mu^lc!`U^eNxW*&Pp%Y>ePJ$ ze@;|nU)e-LDqAbQVJ9q_PG&V0DD=5m5@!z#61GD`ex(4{i*%;~K8d|8@Pk%;QGVP| zSfLJ|o|I{TZAx3Wr*^%fDX;^tU|ZD6*A97uefjk@yqTCbHgsaC4~3d*G^1KmWgss- z7@((F%xZnN6oOteKz216$c_Oh3X$}^dt?5y5*)LI-&H-uqc))VvIM6x;s+SF9Pxcx z7$*L-dUYZLGq$F}a}F4nk>klgYvoL;+bzVgS{)h0ZC?M|xr~<~caHby;y_7DSGP>p z!+b8HjhnVAbuGjI816-4z~B!+z>gohNW!KTh~#&&Dcq~ZgKlSuFvCA5s|zB_BI}?q zwP;9nN$jbi$!U0MnNpsizN~G?9R2Jx(pbXM^8NmQw1WR_Z&(#aNNa%n%a<+KRex_$ z7VOC3&B}!((=R>c^xcY5PI?nt?|e#*^|~pWRuOSh-|dzSuocDM2Nm#J%asBSS%cpV z>8w#lbIJ}j5QMTC1VhZVLZ>F`;vfM@1QGrLSFuB%^tgz5DKEuo0uN+;Y z+O0gUD2DmabLEY$nQa-7+ab-0tILYBgcje7!H<7*^RV0Sbtt7?>Mkt-?=R1&sXklh zWKeAM5r>?fBNzyxE||ciL*NNxUm`my%q3p-jF!=W$Jn)}%5%fgKKREzf-u9nKR#h_ zJv#lW&paMGTXtb5W$^^)`FLA-r&q)?eUPix7%O57_Ri$WFJ*(ry5_2G`GBDU^(*ut z9Occ>CD!O({=rJ!CaJ(`M2ilOGK(fc&a05w3pe`a4eBOPbNL99wSUACs9uqaa7~%} zPDtGPhwpQ6^iaPLkJb2Bv0+wkdQC?}X~m^sBgL>x#McQX zY1CNWpgEXSg+{6fT;gG$a{O&ysYNtA-E9*|=kR$L)l;AaNpyt?23SWA>b(6=c!GdQ^!05mzl0uIBy2x0`g)|C#!L#}n_6)j1U;R^ZLs*l~|* z@ck-2E=_~|7xw!X_%j^xSaK>DN#Oayf7z`{-Z8q)dj@B$IPt9I746$Hp_VIoS`L7} zdd0BS!q~ev#E3qda4?M4)w)*Cc&)?aMqNPr!1@}=S0V~^QLjl#Q}@ zcpI6wv`VKXqLTlY*WkaMwW_rkuX`HR)HhTNsPhO#*~uD8OJuDZ?49A)%xHDWvxU`3 zL~(Zs%ob_ME_S7X&wqKcTyK2ERX3vYno$Uzf}cdhCGMxm)5K29RO%fjlpp;S#)RF& zRgaPx^(hH&XEtO0l~718s+d&mAIO7Fj(lqPLT4|;0jgi5m^nX%g-WQox`~5dGUu%J zM0N2bx+P$#9I2oM>=9JRbN)FNFIyLfeL~g|5ps`T$0lmf9AiNygU*gYc+`u*!bbXVQb%4l5LoakV+fyYt>X^6eS+t#zP~;hX<~?Q*)1k-V*s zeJpG^0nR(VnzJiM1=n`#FIVR9dJsj&=e>dgE{Q?6gJgyewquzYWF<`*v*EB5#f&Ak zGlV;TyN8rAQuHuTxXh=xpR0Z>V>R1*Q;Ze;=H5$pPZ~gmZP8}ha*fLCkrRQ{F21_U zW9Ap-?NAFA^kokp409W+yvL5DIPCly1e=vgA%;rRB%LkeHOVq_TNj-k*;3y$YYNR) z*s&(kWSBLz&v+gh1W7%YsSigk*sxU$O==738u)7D4gRs`J+-B^vl#N`{&qOWORq6k zJFkbWx%FdXB-?1IGh;h?ZRH;Qj`OWRpebNPI$p_vQ#Sjny>8wOtGd6*A8TY(eJ&QL z!0C^cN}2ssJRxrV`}?W|lH)pM#C-L5coU=-PS`b4+h4ne>nMttvPwlTcX%PsQufl7 zR&Xa+8ny(nZy)bsImDkU=o9r(=+KE1)?_sfmM+$$DLJ5)#9Uc&O8}`pQ>BPkz4cJ2( zve^lzUVA@_M^@lpd>}13>r^~sx#OTmby619$HXfO>;wftb$r7#Rw_ioxPbdS2{P*jR#a>|RONpCqV>J!p^ zLg30eQD5;O0!z!4$bzQ8RLO0I`*hz7rX~sz9+fd9myH2=ig3m8IXXNoA;rdq2WDeT0QB$QPFsMp<_* z9OaYKdq;3MaVXZ+B8)54{y;O&Jd@zux2iF@OWGUjRSay*u)uzs`?SZWc(}Kf`BPTS)Va8q z{Rtw2q~3sYS|Mq>*wui`l;C9lt2nnd_yQ%PEE(C6uhIL$!FURCd-jUp6>Acsq3klL z0~MENIT1j5HZJiYXVuk4l!N~RYHbp3MN$$SA&@C#?(7D5{BhNk^M)f`4(-lACdY}i z>Epnk>hd-{uPPN7kL5IqOlwr1RL-KT?e^HElzto?nJ5dlu_KFyw3)QdmYBeZFy@xi z4I*&q!=6E`?;ma72R*2bXCdzD1pxo$uU2~c$cOthd=!~ zGQtL-+{LqJFnSrjwuFsF=Sb*DdPZ5dn2wW0IQTTXEswj~*GA(P_gIUU8?~cxMOi6#<8VqgtDk+ye+%YRzR{I|ePoik=2T zAP7fq&lZ6_1GNpqV(O82H=q`-=bAgm1?h@Jnk`mgV?thWR^3x-lWq2eWV}K32shX) zs0HwVjir2w8uR#xHl(G3+H4+ieKr@pJvN?o^Y#dZt$2T|8jN1VKk4OaJe1Ln5wT8S z+Fe;!6Co@B$j)_=AX$b3`Ta!f)F&$-qk$2rO`ragjBGGvji>1qNzx)nS30Zdh0&VT zopgLtX-&@JaHq{9^VGk@iZ`?M(bc7OX8S_+e~ykwSLxIuk9=Pts%_z2lZ|t_b^5li zb-~6G1CJ{@7!pBarcfYFMnabpU=ROi5@EKF7}?n-A`0Lv)j|@i*Zv~|n?z^)hHnff z1V%(*kV7R{G87OL#cCX0$e8doZ<>Z7!LXv7IImc5|@PGNuLe3E1-)E&9lg$&?JfGrPuNrVC?KkEFPTgO8AtIZtOU z&ysD7O?G9l)zZ2ptn?L&9bJ+&jn3S zbkP(p%NnXc=07hEFfo=Nd2KQTE_(Ny_>AnzyEAfBB}A`|Y5kM9o<}*kpd>=*tJiC_w+D&hhv>Z61>0A8|*U9z-MV#uniJXUbJ zL>L_nE>|-js58<2u85NW*4D9KIuKq)Nq1Nf;{=WO7mH?R)aIBSz$zvJ=f^b7PTwR+ zi~Q_%tDKGN= z3ujWR;Yztx4^4ca{Hk0tRfML9+F!#W3VJ!Cy!i~(c4io|SZhyLsMc!qGXgplphlGZ z@8L)I(9TSdxwE0UJ#rT1*$AYPW~IPBPTTYmVK%=zNuA$f#GIf#vSL$CY+|$T)PCgI zn!B21_u3Efx0Q$c@TNI?UxZLqNeq*w8BIJzEBg+XbkELKaSLv-KU#1gS^l`zV&);b zR!RP(++-GQ@yw&bj*U00dH@D_94l0B5P%51so}#5hp3Dkn~s!$&SJL#cYU?lii1@n zuWvbK+HvEhq=l^%wTuOUOuo@9f-aClc{tdV*N+bshUP#1hn~Om5lKH$J7CDEPAvOi z^xNqv3Ne)t$Xm$WBPWChI#y%^_R1nFfHRo-uWe(?duv@U9Ja99>(eWKTCOLDN$AaU zJhC6&d|aoTJahJOR6Ig{<8`7g z&_=CTqxTMZM}^H58s^Q`=jS?OD3BoGaeF)#W+=!6RXwam!(CbC53PO=_=?gu5qx*Q zPk5vo8nU5%Wbi#*_nZ$0%A+A||FA0v)vS3rA>~Anw&4cUe_Ww2#}y*CBwo`Zw&31> zDF37l%lB~n2OBKP6!n8jBa+O_?%LloAy?QOOLALJGJuG7eI$4}GDaX#qQnI2Yw&M) zlsaC^bVEP|eZ8^DJQjL`Q9Bj(rHha= z$t6Mq6?o#o1a`_4#p4jQz5B}jp0B7zOaRc8cTk9MWbX~x8tyM#>BlRdiq`F3^?{}? zVzP6!;$@pXCnwoM~!$w2f* z&t*f$MS%IUYh#+v*2IkcYiiPE*(5?6t2OMX9&H@}{Zq6n%dbO!Ugq1hka7BA_(4ZT zI~)99IfI*0#4Dr>U*5epTYHrP;_C896rTpiMt;=F8HR5S5)Myi9pPa)Sf6h^dM92& z|A&S^+g&Cf$t}$xuHw{e#4oE#;fjfDYhR zx0x)NF@t1jY?-6m7rPo%8T_F8eURYyt{!^=os4VJ&Oo#Yr&K5c(efxS>?)1y?EVCi zP|bg23!ji<-5b)9m~ViCUL)7>z*Yg|feup8t!`$3ZasX#v}C|%x-g~>P%4cptax`I zptqwB!I?DL-z1$Dlxe>M`HWRl4P&qjrKB6~>y;KJ9Gd%y)`H4g41wh_*12c)%Wzjv z|HgwF5_Ac_1Mx-sdjxTyco^*ZS`btZ>!lWf$FOMt^QW+Xe+@2IXuc9gxloPe zGY>+xs(*Chnf9iH0Z-$z$y#RL_a0jjrfbJIctLtRhrIQlK6OdY!PiI5U=TXFi-NVN zi6HcW{0s->LGYLuW$?7{ZCx3FpJF;FVXZj^SI5FOd1=G|#9vc@IcUx}N9Y6L^_O1N(ZpTGRu}i(O6>R7tRfe>8DflfA zZmrNy`eK-IHwOFo)s1a=fae8Fl<9JZ=>_x@nS&6hL)@?H%qdboI&&U!z#u$TXi#Mp zDsHwehY%kN-Papag;YPll6zx-0-dc^_e+O2zbW{NyC4ELQYE3O*=2_4BAb={zMlK1 z2QmiX-1@}!IBxd+Eu$Gz8P!g7=%2+lYY+=e?|0+cpGy`U_{>H%PE_0u8yr5^-G)b3 z^ZNBQU=WQMjN{-aihAOu+YLT@pmB$cVW$)w?~uU{@ov#6DRbg_F*Wnfo{0c|UR*sJ z`AV49lsOK5C!OX{uRk}9jQ=V$Z7=%*0W`mIqX5S@9^wMNZ1);skwIm1eyR<|D^{QTnf_o}yJ? zA&3{KRjyxKyTlH5$6gn>$miv7P<1 z3WT!0p_^t5%>D%J{bo{wrteajET@Sj-b{3F&#vFI%Pt-n39$vAbRhTqppObzhF0FM z!~WW~$_vE2vS0}J^1uWOb7LL4Ie$EkGM0A6i_Myv<8}lhoYXsX5hKN1|9|pjSi{T_ z)MEP{mW&(h>Dc%edAU3&_v`9G`oul@iw@EU_hEfwTTYyk%S&KarjUnK1VJ}| zfLgqQ*9-6#(7i1VTkQr;hYwj;fyH_w6~e^ml`LN%0CRP7ZZC;(O|upx+(*L_pWOe! zeuE3H$_ehw2zG+NKpjQzSG$N$2adZCaRUk1fAQJzGHc_Ou+FdSR}t}o;Me8&w6!{z zixUHmVt;3bj!mEAYVMRVOC7@<2AHNy>a6Xd--XL#tp_4>{6{?h5Pf}q9ZPK?Drs{1 zAA{>i-u%vLtqpGOgH5mPm(nIpVEm;3K_vO|8upiaa`HiNb^JvhuJcXF-P3WL87(D! zBme<8uKPK`3ZIIFML%InJi(xpj|{Zr`Qc*C=~K$P_Rp{nn&E?NI(D6X+wO&JXZ!q5 zH}!_fjtsz~yViBwveFDb>-;6psXw%3yuvj{4ev&9>Tai8pX~y6P8GE^6w~o~DbFb2cr8<>bvxT_v0~T7r`V*dq4Q13_n6c{j zmGGTJcq$V`ny& z#VZ+hrm{_$kmPKt9i}*!te4f$RsH;be2X8Ax=Y&}{T${2=~SImQ5J;Vl@~V#3^tBf zpwbuI>6*-Oj4$Rq6i}P-u8Hejvz~fHn(LSpEFjm@XSti`@l>ZWn}uN$XxHw;baNX& z^$IK@_5v(_e<1arhrBYXP$4`$RT6A&R>A2(B&EN1pCk?UuyD9`6{X@$DWN4FNunk8uMV)Wx^MBAD=fn>u17Vlnl9-z zsv4fRC)2L0ND?2eSkaMPe;&zVnh`tPTJ6G4^Y2R@X#?Jr&zt>t1%wt;Y(LnY@@vME--Vp5>28~2zvFW(r6t3N!dL4AYRC8RwL5RdfycXkV&^TK zVsLFPgEvnJ&3Dn`w3lEZYD1x8hF2-XgFgFn+BvHM%xIBz+6LW_9Ou)tG3mXcA88t& zE89$7JibUVh%^Oq{f`t7shV~*-&L0!%Z>H#l~o1&78vXP`Q~Sb^9&e!Q>R@b+q?gI<7Q~A zEXEmruIhmNagR(s>56&}G&257e?8N(LBX(*E{&}tnWUxY_=!_V4T0QM(NZ{7iHna} zle?&Iv03X9evl#^iHwKt6|X|YS*9H*&g_cV& zP2IK6%M1#PoZ98eF=rW%oQ<;Hn*DWZ`H>AUyEz$lRSqFnE*56na

  • &Q4iR!{qXM zt3s;64$vVaO-E(Ir`O#-p(jE5OLRZ0G^e+Ylkr_me6a)_6w2R;r!?fnSiE)yh_?LV zZ^u^0_@mhF2STZ17D@uoY0RNtDR&8=_g~K+p)lq^;{uU}DyuWmToF_If(j^I0QCLK z<8J#&h!=m@Y0M}h;}pxI`KJVVhd^bco3D-j6t%JF*{8gid|qxEzMDz2g`4tlpy*_mys&=hf$e+!68$;zdS=T0Y}-?-PgFjCdC{?ZS<7*)PMGjS+NT6f0NcJSPMiPL4=ElQ9mIe z@6|`3-0)F&KXlt_IYbT_7XPPt9~vETbOlp{oKalcHj0KVPan;G=9FhC;=f_>a=&r; z=ye**n-op_H;Pa;j2Ju;=F7gdX{x4358U_X9_Z0Fta%4F=9I5&hnCK+hYMF$s{ZJ& zz+uK%WUlev!T>jLh74rt%5Zo2@FPg(C1oNg$e-gdVN$g(_A~_QHva{Qa z$iYbRkV0qGn@8|{BI>2pRBoTeger(>9xT6s)Ybf(}zh7964nhh4BaWfZ9wuY640n5{4BAFK0?H2r z|HtdhK;KPAS7hzZo*GyAy9c0rZm}rv5J6u<(li!)Z9$>sg9?)#T1fRfia|yIlDiWN zkj0o`-5|Q_GdBZlw|8KCK~qKOeH_E!n#gR;3h`QoskQ-(yTMXYvb8J(pdnJ1>jE)2 z3&aUgYlbR`N$!sE`QwuXaf>%QiDBzh4Gupzup?3qCNY`G))*xT*!iot=b*KQMe^3} zI=LWlqqVrCCcx`8O&8%B7%8YL*;dk6D36wkq!2|;s*27OJk$ndqE z6vDFH8sXy+X!u)Ru=uOv2ptO4fCFxS!VtgEi22~maNUonws$2kyVt7PD@%M!Y+Ak~ z=ina0bED{QcXQgG{CSRYxmiAlnqgKyVDVV`<~;0{>p?cfjD=xVTc}iBYob#~qf?&o z4tOF#N!xL857E4|^UzdFoH9k2 zjNk_aEdYj>Sah;sPkMda_&YNU{WS?x*lgArLXr5{rY_>{7<=nmWh|pIohXT74aZtn zzLApgMPcrQ9{~2YMq!V{R{@Y{Ou@s8y0;~&+TcXOjQW=ZZ8SeN&N~E1?*<(Y6dZ=FT#R8Q~NLogl;hXAXP$)$Z;* z>|+4JWL9wAhin&^oCP zbI&ZE%_=pd2zG!GIFf&OQv)##ouOe|_*gb+u1eiHt~?7ZrrN&@U8@X=gm^gc5V0F# zJ91GlQ&3;C3phiXBOe^52~jmOlV7^&iWW zmz(h!P(BcUL6H>G1%nLW+OXJVXXfv+oG}O`D)r!;7m$7%wD)xabP4J~IaYEh&W~Rb z3m|~1A)L9Qm+9SBp$$)AK`_WRWZ4PHq<#hr(F@~vTtGZR=%8TT;i(C(a}M?s11zkr zuC0gBvs$_`E6D)c%HRjda95NJE>*5trMTL&QKgADFr_hsoh&bMm4A`fGn~jX*%YYe zBf3A*RN*EIr`_qJdKq!O{5q0j@7@V}tJ%NYWuwjS@&S>`iksaU?5gH=2Stw(aCKwb zii8YLjJ(GEVuEQ_mkCLZ;C32PB}t>bP(R3OEm~MJW?8TOJ#2>VhjrlcZxgKeQoCPk#Kn}c`Vkz*C zR%jn66FYc5v@1BN3xN!A{KNr$K8>jp*HIN;gKf( z^G*Qnd)G;~C?s1aC3NIfD5F}Mpn}wP90$vpqi_7{H8}LT(?_A8-C>{>veHZht`eGc zXcDMpO)EC3b$fWQDNP6Y$w}+1wHd3Fe^-`Rmeh~+F?cwiz$6Yx5-}y+;4<`hGtmqC zq&|{_IbU5jsur32k9k5+#6qNoWJBD{Y~S_#R@iZ?RQ*x0jFvk*M-y- zcfV`@c?BToG@%J#nuy?&4&Z&q3T|b!@L5Bap(VlNwSKnK;Qk^myQ+gJrI;b`j!XUdm<0}`$bTz1sBgHa*@4ceK-?Y%CI`UL z7FDpl)2|2;+y@+GS78nw{Zgxn@JiHa-?Rj3VV^P1sq|3nGZCqq-;CXAxH}?m3;ot>g-rqf=%G{JG0Vvu*Sb2*V;RF);+NLjx1BK{UP8k@SnWjHI< zVNd71I+>ze@n3=C6i5RMXX$(FD5<;Nv=}QC*#=sc+hE!yBIm{?H4Vnp_Q-bHtb8-x z&3w+*uV8bJJ{USdj_9s*0|m{ZTwMvNAoSE-`7VL_fh zrj)&Uwo<}N0_Q2Rk}D+g{L2+$^N2}U@!wmOy+#rf@@+R4ca5D4baF~wyy>tdZ9#l* z?_&ySJtdT1_%4-C`}eY;mg7rJK1s1 z!Fz?0(3!ypD{`|GhnCWdXL#%l9)t;s;`@s$e?Zh`FkW^@WEGyETnjv;CDa_r*~u~A zNYj3&iZc;-W^UkCu7;v>|6}xr%PEc!)7`CR(yX5LUc;Y9*!mWW)3I!=iD^1+QxawQ zr6I7dZE*HIiZ_AySsZx(GlD2(EkBkif~pJxJ~XhW4Ufx3-ajnyn41fe9$X0HQ!%JE zCA2K%3Q_!OzT?;`&a8cEcPa7uMnXJ-D@0`sBxY^!Sv_51@qozRI0-GocRtm@`#tui z#gwrov7`Uy7mmo6>>%xTWN&#aoHeMm)Z+wc>7+u=%RvafKFWK|)sjMGc?sBf3487n z+(R3((D70n{TBTc8TpA|-rJ$Alk)Q5O7#2PaQdQ6Dc9ndqxO<}&V{T1f ztZTI+jAM-|{*AgELaNYM0Hk&FS|8Z$67syy530}ybF;(zX`T{W zDLW&{@S>egu9t|`{>XLjO1^ZU%NY1eqxEVf5630;dZ{Ufl8|amr<$EQ@{?D2E&2a5 zvVu}*O3hzNC0@V$JA*(VNL2n;%kOLEum6EaTtS}w-~WTRxjCG83;YL#OPuaQi50jD zng83)dJp-(o#(SIZ%Zx#uTV4=6hoC8bYK6jyZspS%=zzdaJQiT{||k&QSx7?%mLs3 zgT9jbcVqrOarG+k%=qu%8blKuP5v4=8+S&7 z$Z52aeF%b*(HzChG$KS`FFd>}pVFtFdBHUAIk;$S`y|p8db@blFcn^Sv$do^AKT!m zIykd43}aC&{@HteO84i%H@K_*7n~KyNEhEQvZ_0ZNg3%QJRU2=+&tLMW3QuA+aA7j2Fo0Y5sU0h9jZ>1Pg~slVB!ZoReEBvRkE34aj%KsVc)^*WP_`>pC}49 zLz2@`os^26WJ7=vQrLR&p_y+rcer)>q8Y@k&bEIWNQoJPUmyK|o}Lz!cs<)2eA6kgM~xEk&d0)DZKh?KX$fCN4f|DW z^UTC0JW!8d^_5-F2)l2WHr%A?kg80}^vp|wbkcQ=kY3mv#MgB2DP5p&H@QJ%pJ`un z>bOX9w0t$uQ`GG9F(nm7N9TP}>Hl&anKxZrTm%zwngRMAzxFLZzL~$M1Sy&S2cxwD z#c0LboR@t319!@fI}zPri$DVM-oq|BZFVyLK3f*PAXpZo{!7xbd>?|A#)YtX-- z(;6t4$NcYi^CyJ2z@VUi(BHY+Q`lk8fY`3-UoCjc{)juwfXgmI;wMmim;}L49KL;X zEnHa@(2j!HsLWAeqhRLJuI}4_Xv~+{Kk2yTnxVf>DcgI+VVwA3BUI4Bu6{BpIuvq^ zBFvgID`A^Vlf!t*sDSXN07>w)=zYXALf??Yw_1pR3lHFO6T_`T^JP`Z$GqU5xQ|(A zK<1{M>LRSIKzEqwS45u<34rdh#x~s2$}>X@c%5&F6@T$WuI6^8RvnH3 z*oESZ3f6W1&cX_^sy-*xHDet{Fhq>O!JBc1acdYAubMJ&B+ec1cF3K884~0%cv6aB zdvCow&UkFtiMP^1wSyJe*zd1zKmve#`Z7QjZ* z!DG^Kn{vY8-?6Atf&74KCylEy@_p=xwxUH0h%&7dx$8ff+qA_5-$-DUl%`qUNU^j%-U1{{Xwq1w_u?%^ZGaS8nyuh4s=GkBZ4IehMb_+ zR3kLBJM4+cGUmM!aH-LY%ZB+Zkv6xTV#edRpm$X5;3}i;iH9P`Iv8;MlfQf&Sg+In zvZIiNLr*ka z+y5~6^dN^4Q?D-+M@Bwe-V48Wj$oS&BUG`%80;km_<|HZF-jm_U{{ylwxcUr_cm9` zd^Ki7wWe9dH_^A4dD2_$n&)-fghaXIP|#(JBWalr;%8NZddE(t@6l6k|KNwjmo_@G z8fLfBa=o;ot$Xl}K6B)Tk6={tnHxmUBb!e`QHQ}3<5`Qhn>apDCFcH5Ttez&m3q_0 z;x+Vn41Umw!%pEUAiW8}X4|m9#-_tLDgkhcU=clJ6qzqWz*bXxfA!m`Aw{FoB*oKm zJkVQ7;AgK7G_|jf?`?AC@8o|tTYKAYT|tkrF8(t4{^t(nr=1rM4+3nsfq$<@p|q~$ zyqxy`K(z?|t77%vHuJyZH+tK=Kl#s@`OnaDD*r7TO6m%{QwZYofB*76nQ!;MC|$Mx z-rq0(y;HsKQoSE&y#HT}y=71v4*0DZ!L_(UaW7D`cxizacXxL!Zo!L7krub&?oROH z?iQf97x&F?@66siyE8lgOeT}$bLPz}=RD_cYSQTcS%sSap>3w-I0ihfbiEvPy-d8` zi*>cOP7`)pPGs5hrNdvJuM}UeIG%bq;AH8ohldBtfE&wze~bbD0&KX@l;eyWJuU_d|{+TIo$JPkWqJB2yxfT5G6QZ!N z9pqsA1LOo{wOttSIERQ#5Jp{d;?l<5JDpgM-j9QTTYir~t(P^pF*YaP=+e#4Q^RjY zP(!k*cvKip{CS;*h?S=Lyv_uvkhfhDI!tilsRLYi#5?bv15=uaQu3vWYs-oA&p{be zF~@Bvg;?|azu#o3piQAFvXi&$zvCKrUWyiYtMgrAsyh&A6<=IT9qMyL>$Ill6n{Ji zS3ZhlXo{QPIQ0LZQ8AHf`ar*VB=zf>xz8yTKa;3)pY*bpjKTnlA%!Kh(2Ma?s5 zI{LgIrT_pNfzEWZC{rFk!@3> zzgs+X3{I)c5hN6)i)g-h*o1}j*aJ2qfVG&I;uw==M)xGbM1y?Ow{!hhzqB1bKS)Lq zUPk}O^ZeqS;1h85Oazs_%vjvK=uh=9Mllo(ZbGxd8kpB%Y3b(2w3QjePTst;=OD5_gPq+ls4D zS{q`1p&573p+(;cnUghK@@R6%gFr7bV!^bJjVVgwc7`clLw2rgt1-2vYlFEje~d>z zr+@Y-ghbnMK*^%_i1(bcDlq&RveL0NYnUrcnb+SmtQyafOTo}<7qAGbs?^O{zk)i| zGN_9T7`q#1x6w&Ng)q#~2~|ML=2jt@0iEA?9crJiZyf>$%Ltr2x)2M42@<@snRcFU zK<({b7)w0qwnfH|^5D_rH`>W&bnLXNKMVd#$*{1q^qr8ky02ZHulK1F0X`BDcuM%!lrpMTN9D4FCQ>D0;UYUmks8 zEaf&$1l_j0ssGk83-$NtrmC-3{u8h4xEKsMuLM4n^hIpB^8)rNlmZmHi{A-@%K=xT zxaCFcXKr-GMP>{xylki9}5}(H}B+VO%2WwE6XXlL1{;&5jftS_VM6(|M$lWT2b=nev}O25X*8F z@@il+1P{kzh7p}tvc~cGVFgF)j00MCLIS^CS3AH%(usX>%1{TnPX{oagd3pelZkJ_ z5-&nf@d)htzGP4naUJb+(tqE&boz+gVUdCKYnNg&hJbnV?G{&ru&&tmD0Iseb!V*4 z8g3G6SSM(3hBWT$fN^+k}6_?~`$WJwm2Xjf7UMlEq)ha+BP$*}Z!@+Kwu{sBBjZ zG)XEjkLi^7nzcgOv_e^o=#E9-J=`ou+&HO?MW94Fi&^bW+eeWTFu^sek6fMx3|HoO z&(dDzTr`Q|9l=Bfe8*t{>gWF)L=pb0=$d%g-Nq)TLp5F1LS#WIo=`kD!Sx*q*Wd!; zE3`{VTkT#m8;^;lm8+^%q+ zukQk8v4&V_d-0`nRmNkQ*t`8dc-k7~iB-78T!(DmhCcE;Xiyk%^z4Ow3_Mde6#q82 zs|K8@0~R-mtm}qsHYSAEX%Sdx(Zl@+P zFkG_tOLAJ?vO=URaI^_?(iTHoK}4~6km*$%mCxrd`rTXRx2pLv#FrdJ>gw?szQa`J}(=pO!G8UsQqQAo6L05FWQ+vJ2!atUUPlp-ef7ptUqT(xYCsjWK ztF=kW*wP-Hng+g(SZXpFEwGFM28VGcdBB|Y1Wq__MUryug564e@XI7!5{8P8(z@0e$_wU2fSIiDs`6#%G>Y(E0=QSd8`tbij z{rfNL>OU>gShD2T=@H45>#|#sGMo*X=&r`8$EkE8WTTlYXgkNbe!nF;XJ4*9IKw7q z^|tyu6~XMO5Nv1eKPBI}llC?racU(niW%W*4{85Y*H>S5x~%QIb9%ih)tSGsLrxy- zbWc`7!!>vjw^gO;xm2+*&3dlC=3V#b>2R^oD~ZBiu%LZNb<@bP&f2L-A#0TA3avpH z=tLZ7K^(Z9Xdc5j70ho>J?b0Ci9W;mM4dW@P|i>BQh9ZL_n}7Wn7w43_i*gO1{6l) z1w6KG*?IRM)h>AKw31%s=m_-9Zdz|#r{EV?mWS1>4Oq2uUQ_d^0|_-8j!i}B`#HhZ1KN*&NZPT|fMod_9 zXD3n@S=MK6;(=djefDpsQ)#egN~MeCTV12cTy5ig6@I0RyL4|WwScRqyOfuF$yMRq zi`ItCM>ezJR;OB}Y@`r`)=!w<9;pjD@BQ_5_-~WmKv{-L)NONBG6QcyKRD^xPlUK3 zUFQroKha0$HP^H+$opDW5oli{A}*otqUrQjUDc&QoL9i9b1X?VdTL+Eg3;KuO;baA z&9xtOc;1a@ebxx>UNy$6(x5{}w}w{Ap0S3Ek=!|51K=z1^Nlo0fm=)`tF@8RDyKn; zG1A86LaqRr$X=q3nPdE+3e)$A0@kWq=;ncs%c$892sVEl6x*KaoT>aamG?aJF+H9$ zht2fO2VVU;L0gNS>Psyj8H*Iwl(Y!5(%cuxbG9j)h~dp#)RG3cE`-n-rH&HPk7>*Nz8oz^lM2D4rS0#4ocE*U>_5KbvJvaJM zmAEV8l_4h~srC(?NNG^&cbt1|ih7lYk%nqG($7V5YZqAk&P+d!=PYeffpl1by?P}x z(2X0g664ki2awhM33^0oKFTAA58HTn=S+c`AC6K;O=u*}I4ny}{)K^tHLnIbmI5*1 z22+h8I{DTC4Km4K-~4(O_0RY?}36`ir#t_l+dar8xZp6q1BC^b$Uv1&%{6CW_ zP1;dAqs{H{xx|dwK)J|r&Im>vK$O_5u<2QW7P_9?n`)ym3jorR>@42oEmy0)oB8PI zLe>Vjtq|-mK?a-A)P$}|VkbYuG}P;~wMwS_PVYI|SN^#8vi_L^8C~#}_%;cd^Q%)3 zkPbD;iaciq;y;g}Hb?HlUX{AtizfXw@L`99_%EUBM%c1lu^3Xhv_|x?-g^#V2-GTI~OA00WPYO*_Jyw3zWrZ|bVcn7^4n zW*d7ZNSSF39;F&A3K}g+LRc`7`C~3Rsi|%3E(NnH(?x6MwLTlka*93jLDpkB-nOv2gYuNcY=FiN~k(q(|3}30PbiWcM!7$CnzP7vAt+o zjt1b@i83+JOVipQG~F24_x4IIT;zro7DJOy$?xgO6QiIB_v~wGDDBk@B#iTiV{%p^ z!Bm=zjJUCWX@;ZiSJ5iiU%zX;v}+NpB!k7B7=jITH-H;uj*v4serT^zxSRfV(XtYQ zP|SF#6dAZYXSQdLZoYU@BPJwYXy(sCUyg%u?-5QhqFiLGEBYJi%zu(c1{J6ZvZyT& zge+8k0xFaI*P`9w&i&OL{`cBpG&E{kKX7@j9s&hc@^*9Ajcodm2e1z>sgs)PFEyGyU3qp#36qwzvMK6x$vJK(HmMl7l$4||3gVs zOYQYT(lq{BEK!F9yq}Mif{D zuN8B>?^bpE9a5@|R(`?|2p43T3Nf%LuDMdAl2(aNW>pkRc^(A}KlU7G`}WXhybY1+ zrwLQl>JQ{~;6hH?5qEmgi-EGi1JDzxc_#H>qVe*sz}+Jdzx7k@vq*O@p1+kUa6*$2 zCcp!A^Xaa5<&(ny1-$uXQWrr>H)3HLDB zh~5yaU&N#|!%T~nTk_^I=eS1RaNX520-A|)ABSV^gE-Y)A zNxm&EdqK#+A~<^?h+U{Q2tIt5#pHDGuczjxejnBtj>4K5OJH6W#ClJ1Qu_6iY031t z$%w;y`u!ix@9%*rK|KnhL>aP1#ik=fAuZas(Cu--?>5p``Q7@hmcYp|cBNL1t>#}) zEn05>S;+Z1WV^(3WPzA|W@M8ENe4fTKu=2Dd|2Y~Uo=@HTBe{6zNptFAvSMd)e0u` zbT?#>MkpitKoI>@a6rlBat@LoH$~vYbgq=GKL?t-L~UNmgp|#ZAa`A?>!;Fk$`Lq` z>%uy9f%a>FaLW`(ROlD7KUmXjJ8lAA^0sF_)_c@>v2eQ zO?zH02r{&FbX(kRMta$QBENHZ#JD@I?zI=&FcbsC+;&*M3!GS{FnBELM0Fz)#$f+x zmafH(?!q9m$ap?k1!?mKw=al-#M)c>^;P0xqiy~k7ozQLnA!LVIYJzW-?m2UY$l;j zq6nZ<&`gPpbJ3T%1Loc8a~q=s%st>cKsZn{s>|lNCM(c|fFd#Qo+)4%jkpm}Hrsfj z6&q)$xU)hCR@w=mg3HtAcSrBSuQAUPEG(1HEdcKx->1}JBc|Cn&Qtv7Q#mvNEqhCv zNcwc*Y-c7-vCQjivA$y=knruIJKVMFmSxvOTVh-}QOSSqnAQOAVY6q>jiGKBI2(9C zw|hw$srTI8=lA*+Ulbv>lwcpxg=Wc>9WkebPut~x4yLC!y-A3pE9Et!exCq;$7B*QzZQHd?l$1+(t*dZh%M@JkgOT>bN}u8%?VHlb1t z^6je{ll53>)x^q`QDA;sYp+L;TGY6-SSr+53wZlXx__G>rJnKkXBLa7+8=*+tz?0D zGJX^*!?2IXP#UWW7145{C9ZlO(7u_JPM<70m3wWc_)g;Z@c({zODDg+{XarXhT&f% zOjQL%4iW9x-j7Qf9m8XdJ8QaT688Ks+XWpjJZEaz@Qk(4MlXK{m`V@gT7ov(1Ywig zQF9-|*KWyRr|sR!zu>bk=O>)eVA)@i_6pyzUQJ>*TD9}KU1%`z3w;TgXWnuVNc=-k zCQjiVP&Cm-6=t=79kmJNtbER%TZog=)7sav{ysVYk8MsUvgo<^z=hj0U{D?hmG`09 zEN_&@E*}O8$YUymh+?tAuKOL>8D#$Kz*Pz1myt#^4~t>7n27#_huoi^re$h%AUt`0 zI-UKHeeGWXt8Pc7~dlh3Bq@ZUS<=0t3`NGWF_Jo=1M6Gw)rE;v);T zMu}_QPBMVDbfk5wgo1SQ%ZEz;AdXGRU>SX}(ZjM}W+A~+h!NSDFP`VEq!}cTQRK_Y zi!QfhcByKYqWDT6twX=iG}$`ROV}CI8jGQzN;DJ0pXe7Y_%2U_dtr@^|u_O=K=)g?sq!LYS9M zuEF~^+UJXgstq9khX?Yiz-{BDz%D4R2y}a{Ho2#bNcp6=K+8!;AKRa`LfDMBD&eH7 z`$I<^e4h6K=J)Ea1rB#dY(Lmi!dPX&@Sp{>0m2%Q7(UL9`;UBEAXgeOE0!zu*QTAQ8*Fnz<%^nQpI+jSrkmWxiSD^R-|EF8zR^n zk+tY<1@AM$zUK|essGvVH^+Q=9s@Ra*g>`@w-yWD3|hu~Ff-uY``I%?i7YL&*~t=K z;Tl%tsrrd&?x+Z~xS;WQVPb;1K}Agc?rk~4Y>WM|Bwx{%Y?nX4Jsn+?^DwEmpXJJU z8Sq1VDMWu(;O&CB@0#q2xD9ky?YAjuD~yv-x>1 zg3G*(TDYhwGl+eL8Pco=+7!PuTI?B9iPda+ut8cIRGwjnIG$P+kO>7|PEB>DaL6^~ z5y4B&zg%(+eoJx*4KK>I!^=F>9hCu|l?>3j<;A@OGO<2$)V6xygm$PRUQ;UX`l7$5 z5Atfg<*Ll>X)TfggC z*x$6r!VWPl5T8r`3=+E@&D=7pFkV!c1?S4gW|ain#5$R$*QUM{YKsy7&scR#07rG) zFPfCOG~^OVm)IKE@WJsE5;w5`4LIFE9&P|#6AGk-=9@eyIj5NaVLlWuU^?G$Fa$Un zoI$*zxqK1hQQXo+P>$6!#J!495R8O?sx>)Zh<4RX;W(wfJf~=4V%e*4nhAxA%i2p5 zTq76ebClVhc!xB+)9!1-!&JeWIH}xdZUyJjbWo9vrL@rw2g}p$oy8{;sc>EYidBdA zl8GVitJ(7cenf1$YJPO9Rj(P{1f80tD*PJB7ZhGwqiZ6pa0zOp4WZ=#BHP9 zafIIniZ%1^AH8~B8hk0z%G`UTjb({2SD7?0I+e|lv+-^H+$)$6p8L+T-04))c|e`Q z0@4!w*C0i)1m}|LvPp}Ek3H^d@zICC@1=Fw8N}g-MPIhf5v)`H?BRWO;_)ZBX#e|n zwpZyE?c!(#GHZ(4CE6jinL8AEi*nE+lSf<@gPQX&>$AT&GK&NH;lnIp6RuzGnbR59 zy(RMHHUwEcJHz=<6VnsJ0Wa~$Pc{SFl`cOsOg&pQv_SJeD{ z0}!JfsBAhjC#2hfWS49bYy#UQ<-EQ)IIA-!q<5!lq<1gN#Ihe7&kV&t0b5I!&?xw| zS<>#*l=C+^dJf*l(g~h<`hgI(Ds1BC2OI%C-K@^JfUAuUH9a5o_@AQn+io^X z*pYKsSzxcvhaF06CMQAGhlLSbJC0QRp2QdU$Jkw|`VrE0nnjaxn(&3}q8AHUdwvP8 zDEYj&$3~bQY)0PC71(+6Lea@9?su813ye!86v&JrYT9&``|iO?`XxGm_v`PMQO8q0 zC}S_TkxvH-YgPBhl3zT3vsg0>8c~jou`;{M6gB;yoe;|_ISS13A99ewsbg8_3=*K# z@<_T@KJBSB7Y3*%kS@51*5Hit996mh<<4r&T5thc_DI2bif%L$Ntb>8%loPVxe?p^ zcLf*yZs%z6<{wOjy$?EC=^d9!Zx|>((DC0W6|B8WTM%6pf%%9uUL!8@6Q#KFPa108 zU9nHA9G`kNCiG>I?1DVqFTe9AZ>tAm3k84rbMsea97>0wF2tP3^7g0Q#Mi*G=`b{^ z`OeO^h3?yDw7aV84g^szKTKa9VRVm(>q-(;0>PA!rGgl2JmbZLB8Lz7;yy%-r!7oh z7TY?lIHF3 z;c-^wA0=6R!m-_w=;lK02BVfMr*#kuw2B*DqCp;T$jjbj5&1G;iVqrt$4XWOW5w#z82=gXx1N-b=K8t{=y$VB7 zsTj>Wqu{m&D|;5&zM<}(yf{>auf(qu8D})j8l@#nZ{V5RqWA)hA#M1PPi*TpEV~Cp zHexNx32)M6G`^9@Pb$Tk>xFEbB*8LRCOYU~KBQTqZ+W)5;dJ+h;mj>U=74}ll&8sE z%3Bw$t2F8QW!laxiT&pKrp^WqE1HK z#6wzp0p*5zBQ%oH_ItqhfpL0kTS&<+gwNyKm zZR?4Wn6y|IYizpUmtm5-Zw>G)T%_jJiZA50REn$_rRpA{**ds#_{1DHkERWv7b9OR zxF4Zes1k_9OZRsjL5V)1YJz?}bcT5K{%#vO6aZKb&F8tOa;N_E8~r<#ZEY;}A+Q9W;b_&lu!N6F?z^;_F=QZ3MmruSiD zu`n4=A2g!CvwxV#Xns4+$l!z<#>@dli4LAOAAom;ja87DS{am~lT^tWw)~w`bQ}1mwOU@#z|D2KP^YwoWxMgD7fQ!^T zL$CEQpHdY^`Tvd05qdU?hPo1{X+nd)MAU(;{Y19tg7Xy6sU8ZtbXK34@{38V=D}>B zhNu{}%tl;k9sO%h4BJ37SE0n85fK^{?+BDAWI@(ogtvV-p~j5(A77(96GLI{M$;JV zi1BiW=ZCd}pSYtki`ROU&VN?3{u)Fz2)36KLn-BW9)S?BAX$((41?Uyz=_c~i)PE~J3GBj}S8ECsl)KR0=Dc+-m?GSgS z=hY!X>uxZ7{d~$y1DaV?k0|I1LHCi2p8S!MB#jf0b>@NA{T9P@~ii$ z%D<-OxL}W*Wlj|dKkY3{q-@w_jH7s)y?VK&l^UkqX`Fk(26i#~KWg^W{g50UG0&$C z&;+pMn@5&M>SREx*}IG5$uB3%Hfqf6^2XLNo`<%_0FgdWy{eaOCb(NSt12bcEfwPi*5v$uGkXP?VkU(S;1{uNtZnGG( zQzH@=WRAtfIdhO)vn8UE2XO5#X~yQqCPZJ^6OJ2a{cceaqnht6OnKK)!xBdJC=|rC z?e>GR5DsvPQo})}PTE7G;*rUg{x)45XJ@P%=hrOq&n`-_^L zta-UT3CbqFfj(~|?NjQIr)T@6L!LP8#lLzWUBL=dbF~m0#SAmg@Au^igqj&leE+C> zm43CF35iEAl_w4>o0MZ+0~!iFOeT}U-!k-m-diMg;2>*HwN^;z;u)VeY5_~KABP2ZY zeGROUow)37MnUgX_|L=E|2Gsu4CVh2Gt{mto10yg<%mb1EV(u>Y=?pJUB<; zDM_fhvv$V-R_6Z{m#{-rMYdvHBSB?3Ws*j8hgSX8LvQ@8MMGRhc>aEpn;Nl1{!jR5 zcSI>o)cI<~kWCff9YbcrEN0DS8W%-|fsBg$XJl%0^6(C|TUfrlUk}!q2H$dp%j{|G?jalb zYHYp4n7sbAXuP?*Hva}Q`6(m(_u^=q@W0H<%XzGn1(Ug0@s@+YO}%Y_nC`sT=@Pn5 zkLCgPaeUsp^mKlKL5c5gMEbiweu*H*F{JQ-K2LF)*TqoC=ICLEOtz*<_SW?{;%3*# zqGCM$eaoR!=O(h?sm{Vvp*_@KkVP;4LE7JQaTHn|WwiGr*!-J?f1PW)1q!YxeZtWU z1LHfjaLimIgdkKrdzyD;x|l2VfOdpYeWeWJc1=hozS7+-l_-J(hi!)XiVG5F(c6zm z^RoX!fTf>3fONP2gmHJeNs7H1&21!+-lx$p1}M*#PGn!b>KFq$+Lies>seihlI`>I zwtFvZTXaQQcGwHTdQbA#yx&32G98=BOml9fSz}S2(q1DNuIPYREHrxd@2!lSP@+Z-T~k+@hYpJOCZB1zMjj?n&<)n>fyg z)&DrUF6)89w=HNqu{9Dlk27`DXz#C_<0hm@iFrRJg^tOl|4 zxi#)0t6BWe1^4-HW0vByRtiO3%3EO%qIU2`x0@+TMdh`VeO`@H%rZ>B*JcxRff zDL~g%3HK6<2!q3%F;|Zjpd}4hE!`0!g&#n# zN7o^jP^wf-=z()6-jw$)a3j~r3ci^}QWy&%!$HPZQ737H0s=e(g2gs{FAoXNRH2t} zk&y#jc;UgU;k5u<>%QeC#>X^X3G(N#fcbb#K*T(+B?fI$2P;n?@@w0F>joAl|IF>q z7%PGEzue7yTKx;+XE=FLRVZmqu);9)^-=!t9cX2(rkDa{nnm4K-)E8{>sALz^&r4X1Nhr6i zYS89PnDw7C9-DiMwN3F*&o5rsqV7mSBIFAKi_D1{o!>KbR@E@~A4&J*5IXEiDepGN z*c~Poh2ZQySDEwZBrMEC@M3)5Z^Hf>2PBxXz!E(y8HQ9qD08O*lqBk12&_`i3kD>T+ObohOqW>v?S>!Y*x_?nai>0 zbD_ZfwD+q!FLnf_e9Q{!?tO4w;aPfjkmvGeywONhWNH33%G-t?Q5;H)^@&>rRl~qD zHsI%axfu5I4Qvj`>&dfX;Hh~iw@>~p`md@;Ml~C^XGXc%?E|#CQ#b+n=K3AJY)332 zj)b)?Y7+NpSqbi!;6LEAinu!^_hVi1Sidh}Z#A+?X4yO_zBZ0%ZC7h-7A8q1;%Y0+ zOM6aF3!`r{B*r^flZkx@|MK7Z!odyPZWRk(FTC-8j}Y(2-~0A}e{ z+~!$|j;m=;y=uM0;{RNHLfzfP4=NpJjfL6Ig_$s(jHNb^UhH2km;IAovAoL%oipBK z136ZtSG~V!dO*vraU4xus1PanY=sq`Ge*57n?gc(8*Po${7T4T5h;&fI_m_N)hW(v zML_9$BIno76!0@p|E;ZX#z{A9npeEYkK_}aOc4ACY7 zF=PG0sZE%u>#M>+`N!@z0MVwfU%v%E{#+A8sar2G`T)eay%RT*pHs7UlzjSi4OTLs z{(s#qG*P-(ra}F`>6*<)0gp#`PA65D1h~>?(%0kHPE8DUL8+Fb-5&S+w4J(weK1g~ zEZDB8vGHF_y6&pMmhFj1=T*_dkd8?4*2oD{$8i@~6V$Iuq075QY)h!_+GZ@;CX%@I zu-<-JZb@R8MH^KoOfjt$7aN6P2GlV#M|S$DiAMYG(4Znvq3oUdRW?&X48UcUcUtsJkqD7Eb2^hCVBcD_!;}z4(`YJaLv-`zFufH zD)yxQu1UlfI(>OJ5c}w9dh`KJuE?b6I?u(d<>#cg(NH)X?h%qi0(nJw)PciZB_b+f`cXRUn zaKEYiE0SGx1acpwoyA*J}= z5yX`mqyn{ntEDklU&{VYg24VEu+TThI+>|Nm7YEZlz=8L+Mc6!m|iGp{afZYRcU-7 zOAINmvOY@ev>*ZoiUg=*j^ynKbk5Hw0QaO34qHL&e>vt)Vl`cTy{ zk@kGNhYjmlMg?4G0gbpSfC9@@;BysoFl48)D*-FVaCupgf?%4RW2uwfx50yr$M11c*W1PVCoB;FwSZdy+spsHOD5M_)mUTDoB; z%{3}i&?`@LXu>Hm!=&i92y%fiNb&|#+lpVdRNXm(i?dq zfI)h8NTy4j1L}A+GH1S6^>5>YE~amS&cq{luRaYKAmXZPK>b5uQ7k);v3nGRF@P}i-{nQrQv3|GIkl`bMjpP1^BhJ*( zeF~+I{Dvr>p3L1QL%&uk!6epEh=dvajI&E zGgMqJ4J}3-gFkO;&b+%Aki6u^*m2ZiHUJ*~wUgdqs)t^w`-DHv*v>6?##Ls!Ux971 z7ze;evm*v1Ww26h^dDKgeL?ZFS``>$+>HH{PK`FQxGANVFBEcy-};FuMm4PKh*7b@FLOyb)Jrj}33xZ+r#orFXp#g?$awAI+9! z0d!*^sAmH@E21%~5Fp(q3c!$v1tG_QPnM-8$tIC7a;i?qTT{66*<69>|0L=vNya{(A> za~>)mstJ2Wp)`rbh+~b~L*^(h4tz`FmhCTvAK*>Oxu03qed9^NC4%ChFxNz)UwN*y zdbPUAXVkgmgq1(0D(@p#v+p8_Yuj-{wb0kN0I~$M9$v}r_-n*~G!jU2M)$B0Q;pT1K#Fu z`ZA|<#00GNB#)W0*IrDs*xnF8jb%6Y{3#dhLy(7h9ar$#OB%Vtz#87M~ zdu*Uu4~f-pG%}QYETj=68}ViOar7E5dF*$1F0Dix#D9aJMl{H8=2W%DvN)U*-1^BGazPoPjaJIrCV~cMpk^l%pa$K| z4=+b`iI#G*Twh0@mPmqWQU9%^`_NilZ&M{pEwUrvB3$MO&`o1})66tyLb{Ftpko4< zeFw-TwN+mdK>F*kSoiWfWExUHKUWt6ug#6M#k8CS^hJtQ(j6D{d$&BTYIl+(`yC09 zy$3;n=~g^eZ1boUYPDg_VeLCXeJ-6#x7Qn*po%PHSW(PZz!91K;z2^5hK!STIA2qm z?)*;*+Z!XvqT{3A_eg-naIA8xB}EjHK?_DuJocV2rWzkVk+G!(8ikN40m5_E&P&ze z6-k}~#|UfnWv*;U`(e@1}RS z`vf50hDL?TlPyJmX91bE7Q@VbpWu-}Wxi$hliCq{92PZQ$sMbqgz!We+wNdS=&Uf@ zB^_CE`)Pfot&*;ACzwYVNBq&M=i?j!z$b7V9azG4Py79O(sO`|`(Pd)499JFmu{3) z7MRTn#Qf^E5q(H*xdz0(<26cFUqxn5PwOH`N)JUl(3QNiOTJUQid32!3b3_ z+1Wwqy>U!RY?b~HP4`Q4)2L_rqQ%ueaxbQD0&$HR|9@4C?>Lg^67U-G-zQ>aIua48 zFRg#9>9qbGqR4=)n3{4eY+me2IRI9?Wz_>&Yw}V#Vjk3$!VqEajzhK*&u5 z5(M7pUef5+Pk!-YhG8I73U~eS4kXpbl>F?=5z5y=tP#=8q5@G42<^-iURCN4vNR?p zs?KuzsfARnGz|FGY7Pkff&F{EJMOTm9Jkv>v$onL;dLP!Yc8yC#x9`NejuKEGKjDn zBWbCa)KaP==eY?;u;tBp(BNjif3_KC880l;(R8nQJLP}DOODQh+1)xR> zUR2SqGnEDK%@G;LC}Q6p7h|)8{=3!T98o4gfmxLu@cpyGOMd68-A^vw!lMRD6l87v z=>l-^E=Zgy>4^*@S|!|Blp258nIFsaieGW&q~@%0c^fYH1!O{;kDoJ_}Z$QZr3Qps3jH zC(Ml367nX8USAu?iBXG$K~!DVLf~Usq@6l{7{P$0x>%cFk4J151}LnaB|!RU|>GXmCDL|Y`zdR{mI#r7JS@pDQuA`M+KOU7=5P9 zVwjs9H{>ttukP92c8mrKP{x3N+TH-8=X_A?rkRfm3{vrECl>yyk2LY0lmP>i$#W>$^lp+7%9nn$cOWAXwojRgxVTleUwHM9{3D3%I?CIyW#8p;FB zfH})o7Ex;MA82_^Ev0c3Qg@VSF%}GN6P1TQgDIHj(D9)H;q?XBi^H&4<|)xw%HYed z6d!Arr8=;4H2;A^*x6)0z|gUd<5Ik=InZggq6R?Rny-3?KmD5I)__Rp4>jRd}law~ou!-eFpAwg(mr341+ zPG?l>)4EILt-fhjVliXPsDN0HbGNM$?qNdwYP$sq-%#flZc|H{5*I??m!Ljf>QB5|g!$nGGij+lEo)4n zn5@yd*XfRRt|aCO-(=pDj8!7td`~j#GTLz+$z-v!$V==l)T4Y@(&-ha?LkEt`bLoD z&}igG0W2W8>%!GAsX`JSpsN9ip{=Afh?XCwPZv4}y`gVKnq+f+!?fZVpHmL06v;Jm9+5NseyP z*0A1cQ`Yg@RF-Po)9s^lKw@GxUHY6T#B5QbPJ^^Lhkz9$CVU(XfTmGagtUH#@pv3c z@i8KE2^MYU3ivXJ258ufyu{m~grr8H%#Z%EAFpWQQ1_Y}hBn00myVFLC1Z$}7+V}y zU-;HSgvjtLZgunke+FCt+Q~&=tp?+gl(2A{>oS19iwQ=fFn=%Q@-(Oz8Evm$fdFds z=5Rdx^Pf$~Zy>YK?fJ9-WP2WaCP`@m7f@k-joL~ z(HK{mIpB5V0G*(T>UmD_TY5D&#<66e-&9Oyc!M&c^o^iNUj{drDBiS!p*+>PW8NbH z$T_scJPvGk@ufJ?nBQ1 zPf1XY4YH7DHfl&<8}e&k1Dxkuh54wYzy~{^Rmwv(h$RhH^S)d(V81M*_nNcmHDU`OF1Z{l%-Cc2 z@%AR}OvU$1X%%N`IRFIW<$6g2rWBl1dzr?xl2UMT!rXK9k*i0&yALJ}RhVSU6uq<+ z_}SYw&{-!?l7FiOEA@W;#Hsm^g3DZby$=~)o{tJX_a1%~y@6~g=N4>$z%a759tOu$)QQ~D)GKRA7bAS$e& z__>bZVs8-O;fH8y#%ky$@4C0IDes#oL336~j~2(e+;e+(4HvdPy`kaiXHkRGp3(++ z#7!R5SQE26g;@lQq2kpRvI>Rj%$}l2JuIl!YVep;p`k%v^|OJ#m>JQi&B&;_pu2l; z?Xg27ivDqi4yx9{#-CBe;D>AOZQ0I-D?^I;^4g_u@9W+2kgL_VjT>f+ffv6&5@@U{ zjM;aYk9<#kPn3dzT3%&WrHd5baI*6r#l$@i&9-BOg9c#F0=ADe-zKr+Bb{PXY)$Bn zn6Q^yR2@pJ+K`_zH3y=+-`LM+^QR4PspcfeB#rN;V)X#pWeQ@w%{<33z%eM?D7#bz z*u;oJmWCu3hq~!e84(KmB&v|gzUvm6ETO&TL1mYG(K17VpT3RdQ1dZjlwLEn?-9@$ zzAvhSekl0hME#!Y*sJL)wka|3lL~a>BpJv?NwIA(=;@Wm%k)@$wq}bX91C1Yt{;sl zBY`Zd5sCj!>L#tX(D*J3-s^Vh&-CFruL~4nV06;aR+twA43w?sr_!1j*@?t(voiw2 zqOw{=)PM!NwinA$#$B7vja~5B%N^q6;8kIcAI47ll+w=GnX`8&xS>PzNmo!fsNu$Z z*!i(mU88F`Eodae$ZkG3yE=J;e_j```^XE(tj9<J<%qnLlpt6a*+O#RIjDHGyom9d!>bRY6d}i^<(h{5ka0(cJtF?G7`m5ACYAmw}%SX%?g1y$z-XU{A@-o2R$MD`HJe$$l zy|dezg8mwwXeR&?pR+&*kymeP#1F zJ)$Ca0LiWXj!r`vinl+}*KVvhg^T+qF#mtX1f~YApCG-94RNJL&pcEk>l(__0M9&d zj`D@%+^7Gmz3Yr>s@t{!482Mh1Vj{Rg3`OdhX@Erkxpm=(mMg9DNRrWQF;dfsnSBI z2}PQepeVif-a=18-r?Rm-n*}k@&3IZFF!L**=Mh{=A37rHRcvC81YHF-sVVVm|N3e z9G#@Cx*Ja>Z)q!;?-n-+a#F$|%!O%x8xjxmw~vv;(_Kh*(}f*+UE7jeS<%z3b%09L zd~ZYXlShR6vbPX&Le@l(d(g3AkDS@740tU-b$C z2LmLcSejctO@p;Ojmsi6i6S8lL}35!*5NdtB(J&baf*-*k2^QF;^D?%kMb0dRJvLf zxOprdM)1`5qn^+KzgdXSxL53xtcaqyB0AP2$Vca}YGm!^$GU|-U-BMCXmF$_sisMn zm4EE?R?YATM(<3mH9X%fqwr<~pk=ik4zb*8h^(;v??X` z78ZWO##2RHx^f4v)}|y+;NkDdZb(lpd29Hux?kZ!B7>j(8m*6We!p1-86Wt!{0=`> z^G%(hZFiDVNnH3u4t=8S*u}c-cSt?0O|J`5ld%QOzKm-RWcl^^VLrhs*V_~$s9G4E zewuo@evKKyxEzgw^x}~?DfO!TPioC*OtoXgTMD01qIrtX(fl^IRd@H#E_q#&?WvH} z4{64&J9OJh$l3%7`HVgNU$QwM0T!1M_Y8h0u~d}t zgHKx9{F6&Hox*p_yOkH%Mupl-xyx1*XxW3pBZ}!-;AG- z{XLe{&+4LnFre{Gltt4Gq$0&mx>r6U9r#a-N>6WxD8H#YJw6C-#oH&m}NBVFJ$`ghDzgc ze`nXkIh_HK9;d4GVwEUQ8BtFOJl0m0gkHKFo+N8oO8s(I&Eo4puJppEQyzq&FKK@h43XJ!AM2)jPc#T-uh=-YGR{R=O~woFF5 zuI*v}!k5WO<$PZ}?c;xkvqHaDGH)(-hB`mw%>o(Ps*AtXjlL06S*EkG0jPL(T8l>B-l%+)#*1igj6jZG*=*TZL|hpuMvL7N$$TEO$yHO>kKG zF4(}}02VJsP?w2xpAcwI3?%2`vjaIjx2$0|T>a}0lbPlNNUKbk&?}32Zuqc?kE-2^ zYo)5x34wjh$5F9%vFp=A2CgRJzIA-KpJ+R^i)vLWd$j@(}7sjpzoI?kd&7Vdm9_twklugBuoEN ze>_d-;p)PNgg!F zRqzy4%ktXtL2)~QexNi#lIWR>00s+%R&x)`=GUo>wI0qZ6^N)mU%zJH% z?tHG2?RJ(o-m?LLKi#TgUprD=xEETP`eX8#+{GqZ!-{wB3w0X#rnX)r&2BQ}-U-#nSoWNHM3L+=cJLj@M7@A^)_k0yq&ew5A|or3uYu?mbMm59lY zf`cuADDcaj_`Pc7+5e);(-vZ#}tsku|cDQ<8MOBE)E zn#1iv@dupv0}JSkZxfE0nVFZDR~R#ZZ+}6hjZY42y%tkZmB8Ki>`+NqO(XmLY`N3i zOBZQd(1rndF{SxoQDl|Oy)D$*O!rsDn0wpoqpmknq-FNN#7V}wPg^)U8QX*cVwh*J z)9Ps*N9qJ-&{-E9tV!F2A)Qm@y_|Q1e+m^#5}5^Y%3j#ypdZ*;GqBcnpGy!N=iOAO0X=_?a*)4Wwyy;x3COwUm1%W2Ki^ZKQqs)fHm z0y>}JHmhAkKSX$0hxbI*h>0MB46@1keI!-O5O-dOA1lkEo=Y zxWi#mjll=C2kao@_o;O2->V7+?oDL!#0#LNUM*7U)!%)5e+?X3sMuf6qITt=v^mPZ|-&5=bf*TwIYW^C-&sM-|QmA#*P#z z@;z?LT-|@qT-xJ*tvh+N9almLav`FPc+;Kj$rt^+6qEACmPyIZW3)#2!*e=w%%T!y zZp%7v^w20lIn$()u9%h@+GS;Uz7}-e7I{B`Ro`TWe43d^OUi{KyB z)LAe4Ik{CYIHkhI<^1Ps>QS8~0bw(1{|W881|GLq(#0|<>lamfABV6zQOrF5h$<1p z&@l|5aL45g?!nG+bTUEa{2{$8WYsroRV*uQI8`R=s52pw!})5142HOVk_U+z#JMl% zW!1fmSG>~$W*#25rNJBri64~nq|SDA>t>GFdOvy5N0Tp-ZL@T*!9#W>)U%WW*fj~R z3DQ8@4++D#jTLt;zr=g9_j(e1o(%J)mRvmP*t0@*QwC=?u2&V4w<#Rk)~&SezQdo^ zQczIP)6*BbRGi@>vv0}0TR+8MPWOSafkL%#y%@&QXSVIXN$BD8kK};RuFeoX;y4HAZA{y|RO)q*)iS7sS8<71CThdD z-k%MdZki>Lw4U5$j$nb%ri)T>#4MX+kk$(5z_?yKwv%tOe?y}{!x2=B3Rg_a@=`Ls zMxl&GlI`4!qDp3tzz22Zl@$3%P#HSpiDM*mAISa_5I&INLz(yBV@o zsXuQ-Qki0p28BpRtdvr7LX=oWb=NcflzMN;)p0GevxG?d;eMSS3}_Zx(5CB?GDX-c zm{fBK8r~-jJ|zh7iv{QPp(^n*6d`+X)+Ap7wta4PeI>|>UuG(Y^vs)Kt)&eVgtiI3 zR;|eQAy76N`yQOf8Xpwc(izjcOCQ0#SnFCiU;JuAj6zxAhN(}3O*Q7r0~a2xw=zT^ z-3N7CfAJ9|Hxasf+`h?4$^&J*)v`; zG+n!SO2)>40fjQtm^AJUZLtY$O^e}AK7!tz%$#HH@kLsDa+~EdyUvgh26N+Dt7i~D3~HhvoV8#7N92RF?B9@!Q}hT0<~JfjI=aQGkbe`i2p()WAhAjE;t7>z1AUw z$8@2P^gHx_TZmq~`NGlB(aX!L!pt+f0KCxHb9LZa7WDA+9B7_4t7n(-d7`r7THDZY zwhKjKl8hi9xfXWvK$RR_9>@2h%KjoSmyccG*994&0+>4U?eW(dA75gQ4O-BBP!@eF|Jh>2KHB)TK0vA#4a_ z=zN|Js?0|7MKvuP3~{CWtbMCd=(PhIQ#kCsjdjmni{cuxuwEp=&KB=J@XKu+g zH=u`P_ZpTyY*jYzUTl5CFES&?onQ(_c{CNi1%D@5+BIHN+H8wyzbRMumV<8j(`ivj z$y301R&(xR6s{bf_Nm0M>=a;Wa&U>i=D$ASzaJS#*RvLRgXm{}XZTx>8RY-##(;lb zk8t>Z{hU@O9TtTF`> z*w^ZVJv*3kM+V#2*f=?D(%|=LQc_aJa%UW5#cr@kx?zus6|;&qOzUti3zQGA$CmhG z5gNsdpYcs844nzWq38W$=H67AA$#N1OBr+6yKn)rIi6yQ+=u%+o_O4;n(k4bPDmpD zIY;atAXyuK{7=HjmwNvl^6cF=GX!R4&ub|M+n0mIAn}+n#0jkR)hXg+(wXzDjbya` zqXS!oq4QL|D~;TKTkMYISqC%Xs8ATUGk3WIIHLnDG*`iPgZHPnA#o>bIiVq_Z27sf zZb<|>YcFW|HfKPOSum4l6Ns>3qbS2%Ib=n^(sPQjEdjX8?rbaO)f{FvTmBVscsrHk z^sp1`?AHddd$JvT+Dj8ThtI*E=Y(Q=LWB0(N%k7ovSG5}uru|kA+C)*-$(VV3N?oKHktJ$&jT{Gxy-j^R+ylppne`Vb6#WzS;hZF-S}^vUxS# z8TbU4XUrHWCE0j+u>~Z<;9RZXR7(Il_;dzduO)+N1QHeQI#qA8gvACmnqN+iA-eIG z>uQRGRRBLhn>^PDK^!4x)|c69=iA%cSANO+NHiM#$LUzW@?1lgK@qz^Lm+qXojzy- zh?PBqsv>SfXDq+Zf!P}LE+42QS+~~X(N9o)5DJ7pgaJcXd>?*#W#POim7S5Xq-lA1 zn8JJaZeZ)IsW_&b`@OX;!{40WV|0QRmuDC{MQhz=nt+HQjjkhHS=y9Y&qhs5O`EWX z{ioX#OUlJ^3}sgWTKxidd5OaS#M*`L_G|(2HyCqL;aWCC1_|6K9L$ik^y<3aqD^v&_NX z{_IzrtlvGd zIbv&2qbloJ@;Hz?Lh$Lu8tQb)u2Q}VU!w0*3+q1%Op+THsI2VmS0kAr#+`Sw=v{zU zhISIpmc>{BNGR?Y6};<>yeLCngmYr8+}tYlN}g(IHO>G~zPh?PTA8*+O!+gGXSutv zu`!NKvT3JK&k=WVOuo@K?K7fvRSxxTGZ%;b!l5U%Ij!fb8K*l_DLgTR{xXMh_+?(j za{=U`fRTkSW0)xAEN~6RKC+Cu+e5J|gnx7M2Qc4dw&i@nAqO|$pnzU{Cw%2~E5OjH ze=zNh+8M7dnA-Ue7bg$6cJBqCUS$WYvff$9SX%}Gp=AnK&x_6u+$`bE_UE4d+V`yd zx)5m>sV90+5}o2)*=P^UEO7brH2~!Rl&Y_|e!G9~a(C}ArR*ExA{{v5=$9tvtL^G6 zS!g8?e0#sWSsXA6ZSpvQSz_LvVfnnk<3C33lXZ@3Ls`N|>|tRFr0Fr0Tv;P9cfe56 zA|oS1&r?Dhfy6!8ooOBwlIf9>4!auY=hyDpMnJ~U=bU{9D%lA(X$n`(Gy+<;v>(dL zXYr`Nyybhgzc4W|A@1@^y65-r-&a1q>{1?9etz|HdngpDIfY?Bl1Rnp2Tx{J7M0M4 zUt;nh*aqc5`}>W{i5nXmL?qOY2;`_Z@n=2A1Hhpe*mD7~-$Bv1LKzPA^Yc^n>bSde z&)bXAs45ua2f9zg*vV+XpVWW9d3)0NTk9SI1H2QS7=<1iX>`E9_Ys&^hdlHu#dngOhlfBKAX@-V0B%rk ze!tlYakls_n);@mNa$TNxLa0MmanIoxp@%ap3DjdvUdQ5+M7e>2+Lor7X)OZ9|~*t z0FepY9Ag`o%4>< z?b*9D@<-2LrojN1hMskDh5+cCmY(j#q_^l3w5gvHxLw^t%^`ONY=be^?6;Giovq-t z{6v?56SuA=+_at#bg&-v@#ailRW_gynhG75H8nML6mdr*I*L^DSDvgnAr~7t*kxW- z-Dv=h_4V~F@fTw`Kxe3#d#mr(XhoKGw0k4K10g^vogegY;sNRN>UgP zR-3i0u(0q74QAl(bM4_Ic%VkMT7f97q*wyHea-|HS+?cGj013F8!x~5eq(A!!ct~t zUTfs2w5+meA&TuPTLE8KGd2mA27r3Ub}2`*N4oagUK8cuv1>U&hkfBBG$FVxOXPB* z<}4r&&_b~bn@8$ocOB71|2uocwziC_EOWtbSG`Z3jwvU3N?CN zOG}@hs!19vS8{0f6agO{lqPA|sh^T?oir#hm_4q9&Hz$~gqnT+M7tQR7J8%xOn|f4 z4dj)q08aYmDvOdp;C9@n26t~*h@xVX_}sq&089`NR%(1v0VpaHTRaeY79BdT z>A=#W2&9%f;(Rr*RY6`JSE3W_B*IdzwS}VAX6{sprf$XK_AHSh!7$U-?V5fCfT)B8 z1skAzQMW?(NkXga`ja&ZYpgq?0Pye_s=dHkxm;wPmbye-ZDwVC?{0n0jiueEk diff --git a/docs/resources/diffspeech-fs2.png b/docs/resources/diffspeech-fs2.png deleted file mode 100644 index 5d5a49fabbc85f423076685246ecebeef5ba34cc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 218998 zcmeFY^;cZq(kI+F!3pjb+}+&?1eXB8H9>-F<4$7%g1Zyk9fG^NySrP{!@YCAvu0+k z=Xw8tch>4(YS-yqYu7ndyFMl1Dqmz#kqD6h0064IoRk^>09yqBKeAs5E15xLK%C2TN; zR3N1lbhy-Du!LAxKsIIEW+J&KRc`iMc$lH;YnN0YIiYGUCiRDOo%$c07rIW->w1EC z<#!^_`WLN@!$a#&wc0PLr@H^*@XL_3Kd|=`cV2O&68!ok|DWGM=&NF-*#DxhIVBA+ zX#Qaesb2@M{X^`g{K5cj1ixA%-}qSrOy^5s*pmA)fpXajiIn z-2D&n%TnS0iTaN+{+|N>fBspJ;fh|uh+d}o%-BD#9*D?Z+rLJ2R=*u>fLnbZ(tL01 z-*#z5pXT-Kfp^-zcQN3_n5PReGBU^}*=u0d)2QXT&&!$pYrp+#c-CWhmjB(C|5)Sh zQY>B&87`|~$LnzA>+l75{Q`LX0=dn4n_2KVTkv^G@_iD04TW58yuM43g&dN#x3=Dh z0$&<^U;e#R`8L=29P>6M`g{QacS7zr-d04f>;KpG1mc`8DF&f2{GTAthLDrayT&&y z$ic<)x6U6c4V|}1Z`1Kfz^h2Z*Co*xOGskpwYJZ|#Ulye?R~EY!z&!atBJRF*1s9O zjOqU|hat|KH^s`X8wt&S3-sNMd5yDw46%P)fIJ&Qx(y$ZJ5O3WMk;Z94?}&dHXbS= z&y^3_|31utDCCLP0K!iTls)WRw>71+2g$r>vKBIff`Q z77y{qxa)Y)r}&HZhxM#SEZ^-I$d%|DEo3+A_3Z-k_I_sH7tu!q$fh>r*buxU`nFRv zbEKa2JnMZm0d5w381Q`^Ff<%|@Sb?^esJ@BkZqj)kE6Wi{Ob#@gxno~+p-=w+i}U9 zt^+Qqq&1;le{Wptb%0koNxE3U1)cwTA->PRjR#=o^ScnpU9k6p8*stRd%*2+!tiw) z@BLU4kd2ME0e3g;3OYQwK-|*{u?avn?aY`B=bvE{wrz%Ma=@|I>bfKl){ph=m+M1{ z_QSz^BCBC0R9zC0ppqDjYfU@=1nAl#z>*2eZKJ(2XHp>Cji@0KEY2nrWa~8}1Z6{J zK(4Pt5W-PqJ|CwxlHRx9ZLekeA~1lLl?bJBd_;r%4((x>VHm)yYy$FqrSdu*? zYd|&}m{eDl)WH*d<#YVnc3+T|*rKx4!zry5)F;A&S82wWLL%Seq-bMr9xU9L1X-TL^Ffikq$Uu389lMpfOe1Fq;B zBwAf))^vF3>$TTbd)r>N&Wr)@%-#=p!*6J8BGnzxVc;{JfXaN$I%Q|TiY2MY)Ap4{ z+GNXc!GdpJviv?r7xsc6*whuZIluqp;MB2qEs zG4MSj6W3c#?LPaMU4r(8=Zz;cBz~9uH=Vc7!P=|_?Oy#O!!m?Rf6+EZ4o&3@COc}CFI_P2=Gc4Q*#G4?)gKw6P%5{`-Nu_K7QxS=|cVMdLCC-^!P;-m93&7 z60`R!7pX|m>n&WDp1t9CeImR2ocr0`+>WbS%?=vSBjB#H!$dyP?vVwiIBsSz8`o&Tg z+zZ#`dR%YCtXm9=;{LGZ{Y3Tv*cBIB6hwvX&=?738%z|@xHHoogT)90L|oIY;30O! z(65J(vt)JM#ZL-GLj7Q$6B|NXQZ+R8f-@RrBWbzX8hLy)KYXw-(Mp2)V`S+OdyUTt z04%Ap6r(R1q@3=AalRx>d>UaNL@LAZ)3P!b&9k56EMp=eHIi)3Ht2#+O(6q-CHE}e zj%e#R%UBpoj6$8ee%kPvP%_}G7q=vhFix2Ukc{k9Fw%1hy(K~nFPWyV~ z)8&yr+C}&01b%HyBI`gaFS7gQmQ(i2C@a>g&ZrKk8WgAt$C$;9{L1?x@Auo<&_=me4QSz=%fi6$~-uB0@H2HCw8?l#UdD zbF;|ABzPW-L!xJ4z#(pD<%O|V@Y&z&{ZIz1wrBq9M*_K?d+J$PE9usOoe6Hha|y6@ z<77TkN87-__PHSb6jZpd!>98aJG+BO_pdKA6!J$;(?j$4vwxKBX>9bh-%aZwt&lf? z6(11O4blS`w5yR!eZu}8Frb{IodxH2-*99B*i{gPV$_zCZH4hm5}kNn$ho} z3N~s9MMANJrWHbENMTYi8$D zMCD_I@3r#V2fqoQ``U$*J7mMh?l$>HEYT~hEbyu2yZ3+c7Rj18vETKkb#n`*^TVzZ zoA8sJOGhbMLGGqBfzIaoP0}}^ajxf)v)3j`?}{Mi+)Q3(_HYAo5I@YDV?`8Hh@ajJ z{fLREV&x;1Pq4PfdbD5DE; zlwKyX!`ef0HQb$@Lh^(nXXw*`B|d95z%YSP?jqo0 z#tZ6*3R{S5dkRp|)yMc=kYRQ3DDjB}5jmnS0ta{lXUzY(Q%|HCCTM-_ogKXD2G|Qu z#M5e|PxaKIn_?7G4LN(C%Q~^N8 zs-j$G-$H)cj+<`Y8K}jh_I^fionDwk7%0APfdw&76KK*V)qKoPfTnP2;;969F&8&6LDy^XvcyGr#2pB z=zr)St}{;O)|JMB*oR;t8~z!OkqzmYo@eZQbeQOUEg!#3N5@dWn;#NUiSkSc4bZ+c8310N|$Yr;b zd?P;aV?0G^<^(jcUgsv1H4sT#+4wO5D!Ji1{L(KW&Xr%!$~hEC`u;5k!WbS&A#PNN{igHD~{}saRISc&HQ2cc;W_08A zJ%T3ld7FSd0eqHNU(8n;!0nw64;@!TotRxQ?~lALzQ^nf-eU`|l}uW$awMVtXzU3C z$r(!@Zq(`7=|B!8FrCDNmb>uWuI49w+d{t_>~h}bsFGF?*}0U+;hQ-+6jR+0^;SuP z2!4MSsi5_)yz1W|leJ<=iiU62^^gL>0d%dss!g~qho3**X|7f<;U)Yz3U|!WV)ru_ z9smsu%*LSZet~SqM>%X*uNy5a@PN&hF7e!ldTYFH31 zUqd$}pGJl&@1w=(EaArH^YhR2gqYf(CSy#z8)zg&BZqMkGCGpNoio_c&QN%a11KM_ z$;K?q7c6UOwO*M+(7SW z$HEcQpAVUtKt9N_a-cNn)t}@0v0*>I3y!^-dUHCRHzzTHcLz>To?p$(vwR0U=DXPG z4o{R1Eb&`IV|@=d7%(T6I(Uc#1>Xg|fcYwD9lTv6VS-cy8_b18vzbLfP9>z46{mII zc{k)q$XxYN)MZ0O?F3_-@W^)H)u9g=)`E}*9Z5iGAHG*WIFO8Lfv9Y1W7{X0I@LRG z<$dcO*HMerID~HffWa?}btW`Z2J9<CL9VT++R@ z907^$U>BqOfK!xW?E<#{8MG6S6(UhV7sK9fx+aq))WN9l6&dGA9KKyEu%AJ_Va{vF z@tw)c0H{i__99x4!j-^y2c4nq{MNs}9BkOumk{j$X9BYStkmEA!af;7QhTbORp~lt z=h&KQw%Q>=6E(GeLK`3>(TlXxw>ws_JT1o?*kifOpP}n7!oSf_a4?aXH&U#7U+>1e zL?I}q6Fir^N4|4{E7)O?x^Tw(qp7Shs=O(MR&a1ix!;(2`CbW2&I+ye#}fPDKJD?d zc`Qkiq=Q=BZ9MN6l__5{o9$ObC|Cg9R7|HiYOBvA1sNeUTt(;6)s&!*(kJLcp-KiAK00j= z>t2v|a3>3SlJ&in^?eNWeVoa9yZsk#ym%eIc+Kd%aeFH@y!+E}e$sLN(g}Hi>~FlL zz1#PjBp~Z`m$q|HuVYWJV@FT~e5CBVjg|E{Zx5cYd`hhx3Y+>Txg{d>?j(A)5qvKZ zI^Xvh_q~^W^Le;D-ugD~SA13;{)P750pmm?aHSEPYj_*^wru$F!|LJosFk)Sp6%nmC59sv?@2E~dzd@~hWW=U)GQur`)%vDl2GWlOEOn{Ksw2t${l z3l?|n@3H9F`XJc@>_a}LI)*XhC6~hV9X#mYO?|yc>PW|3(ik7MJ8%Qbi!{|77K%4# zcU%|spq^K{#%DN6muTSpaa}BeS0&YwDbm*=+Z?&N$A-iHsHG;?YwvXyJIqh>z7xVFJ5=88lR zZmo$oZz9g24bGGAQk3s)Vc{xHM8uL;5}t^|75v1MZ;_NaVI6> z(a_%|Ohzlp+N#Ad3h$AVqU)nNHqEq7)DdQ1ynkIvbnV0%VL&{{D4Z5)IH!e~ln^_Y zE~W~J@sfSZF7aZZB7N9On?3M?K@Y4Li4b8|-?$_%(VB|>()>g-wut%@i}gNlxmvl% zlT$yNmyY}5hcLJjMZ`DvDA4d(_w!p_r0^d>LeW-khC2SnUSI2DVB{M%HB-00z7`j1 z%!o8oM0x9wd2%g`fo8( z#!+YvRWS(dNTxE|l;s~xK9P!`&5jWW8E$k|Yyxi@)hw%{m)eMR|)L$rx#z$i^r%>4v+ z8tHzz@b3ZVtl~T6B9+JzqlOQH*$z@Sp_s2ZG8aTANVBqnzED5uodX&3?*2wg{t_*E zKrF8l$z+*K`!LC?PPjYl7|zD;`=vMx$Ep09de^ZP#7yid4 z<^QjAx4il7ju@Y{`t%RF7*!#$%2*Pc!>G&%R>8dF%!SGy7Qs~uy_}IjY7=2;n1#s- zih<6RspeEBULl2RNZHtT5ETu67gH#_1$Nz20~QT*n!!I@~p_%d?RMOVTM^W|sHDxY502?E|J>E6)qN17Z8JOL?$}EG5ZKd>J?U?7dga z=Rd_#w`2H)7P-Y+fTfs;8I9@|5@PNo2|9|ks(KPgpJ~_1BwgK)Jrgpl_R~aRq%F@^ ziQIx9HTtHg-XHS%a0_mZ-fAVc34u|lnvy`{z|0)#+b<7)zV&M6F=&Q{G|BN*8#+r- zjTiEWsZy5Mf0DeXqd9UV6B?a2iPCI-fcEn~c63dghx>zSp7&*_?o{jREwG)VBNN)w z6!T=tn=BnWg5KjApZg?hy)72~=epJpY~!Rc>S76bcLw{sQ|Cq`zq&A;pr;ifZGOTG zs(!WzjFuE{PM1f~AxtSy^#fEgzO{u$0lj|_aD*z1zESm9a!kS!QJXVbO?HBSqMz5; zPlQfJuQ@9GCyf&F9Tz2a(yP2-_GeIqeYq+5G@j;u2(g!`T#iYs>QJoux!Omb+Op;l zHI^SNsC!Tx+Zf*r?x=}9(*Atnd9XncREk#P1$YxTH_=HNmc`I%$6J3CSwoO@po%V) zep6r=&inn@k{6F%$x31kQEXybxn|#6%~UjqN!JQG;jmWc<0c>m@s$mI_kNgZN%mU)aF+sMcO+_B+$D#1xCQmAx+etKz}yL1RwE8x+bq-PKQMEqI=$0kb$+ITI(ru-R9Er zm00Yr5g2iHex|K{r+uN+;93u0F7n;Nh|7~KF>NgIi-rvBpOM9ZJTxwe%>jMF7|q=< z;?5KWbuw`ApkUB{Ri?`4cy#W1&t_j+IRTd;6|anu*^&V0dF1H#O7K~aiX?iB+DAzqgl z!oRW~_bD^RryDt0Bct%udKKNBZ1-O)p!j@MNAeWZ0W`D2KDgzQi#0l_L%$SxL?kM< z-aPKViDKs6URICHj?9Mu*&%|jlDdl_Gj>sqqsCKprg&cSONtheRw4yOeXgpi5>iy2 zT(HTe2hpA#ycXnuKsl+W=B|cgd7f9Z#_v!Gf0oo*fae3emcxz_O*@@BfA~XvC^_vM zhCb-G&MRpf^W|zVYRinC+D(tHJ_^^(5nXXR!Zjm`+UdAl5-v60je_Cs-TKoKNV$%O zvew|%Oi@1w22meQjcJvS&z%rF98}V`lI-j8dHH$eJm0~PP$AH@=Qmf za`8QM_zvEisUs!}tG>L5^3I=0@B}sMwxgv4xM{fY`uisz{}09&E^}k!YN$J{q>Sgo z*WMKO=Co73^ol9kyvcg$rE_}O=2{UGi>G`|ZG>Mtyu^XCO3W@WTIeI^a<5+PB>0?L zTVm0U2P=%6;y);Il)P+xv`{KA6gDP%*Q2iTiDV-ka1+SdQ`rL1GMvVyNF=9C3Z;Rt zOeR_AqK2l<1L?2Mw$XD9m{kIeaGdgsQA>YKE$-qTDpa+rZGVdN8ik^e2g{BBil^t zSmr%uoNT`Cr{x0~hR-dSQagwH*{P=mWycwQFK@@Ol0=`+nv@*6wSgzi1sZmIHNF>> zlEpAl%4EU%tT$ZN<(UUbBZn?~hL>H?>hTJa3ih^bzUVMQdh1svsIsFR?_~v^)5A<= zCw?cmDwc+Yax-Zq3B){M& z4h+9(JV&4NUH_^uGCUf<4@5tVw zbD>Z?evjB@G<3k1IixX*s{M*Rb~lgW;=e8t4<8Qr`-H5E3zPw3s%o(9>*Ge}yP&s= z+n8)WwJl9Tq~sMe0YLQ?{(@|7G-syylaRE71VZhpEx+2PvzJR5oJ$xT)1_`CuR2F= ztJ#u939no$j*jQ5qpqd{6+12W*&-f*qoy^(f{IKO&dgpMgUQ;erR8v1}R>nYq(k+-py=%&SdhCQX#RewH;~q+3nBO`0KqN8v7)b4(;x7 zGicw&(Dp1*^@zU5l4Oa%5-p86O+TMKeC8DdyI9(@e{PSrZXSC|cZj?>BeP-XhxcL( z9@J%J(4L6`JGZtLX$yfkm@iw_b5^k%Wo*5y_eyo?vwr!AQa%>aa`N8uq|U%%l6kt9 zR#z)akT1}9GB`yGAD;Z_K-h>$IqN^XRG+xP%}m`*+aMY9-Wgo8nK41}x%6>w|I_Br z3w8wVoLqSJIBF@yg{Wfi8wl;P=o!3dq%BpzVHbR*X{B~=6!OWZ;E@rgR z=+w9P{SD_Lq;{`F4Q_8HTDQL?N%RL6MnyqMV@uu(~LFH%5KQC^>8e!YO?H zWi_P8`yiy%R}{bRHIl9haIbu9Q7X!%>2q_)&mVMS&`SJ3Wpd^DsUt(516lO0V0)Ni z>`wk#N&Tw$%$JInCk(`Sz6z3HvU=ltev0GsY^{{1QUc)Cek?-LiOCsp_aJmQnl5l> z^xzz)MK)=6O+UdbZj2D)yqyh!UYaJ|9IOStSygeXjkLu$pB2&2#uNm zly;B8JeHjdEbF?`S$@)GoHQ`q^bB(xJ>_xj=kz*qAzFm$%j^-+2!P_Jck`(Kdn|dpo@U(Lska%A6$? zcMvVJh3!dIi<(1hYmJgkW73db{(=M*Ryww*EX%-NU-_=KDjWM;CBUso zF3q%kMt=djCF<99%u&T`D(KnW5^&9dNRE*mxpbl&E{9cj8f#SWA=&N$}haEG+5$ zT@EqM45^>vwyD;CstSPB=aodXex`axJ`=GLtxyaQwS9#;F&^cerib&Sw93#+*!f% zvn(*ro_Z@M1vdgTk15tK#s2ck4IXBA`p9l2SXroXWmb)hBFy?gYoLA*>UV}lq+(}s z$!Y}gPAZk6qgGM~RL+gG#-_7W(j8ay>Sv*^MLVH=zoJwOsq%Yf^Tk`B4)t!Cl}&ba zmEMl#DRIBM-lUoTDavzi+Iz!m{vaYu!U;AJpE%*s|3+fMYwvDk5Ik3Eu1pf3t<^LkDl4Y0g16L;heiV&!z zxS)mH;z@K-#Y8a`RwaLw!NawTnx29VSPSR9^13^=EQ4B}L|8T{H@!u%Za>$06MHEH$~o= zKR%bpSS&|ty$#RQ-+XB-?`!eqxTCa(7%=$RN+Zk9PM1~ac(OA|992azLbpIOg}#@! z=KL&^qdS6Z4IshTf9eSuSfB}djOT@u{p0AN5a0wEZ}8md8W=0FzMhJ8`!D-|-hq+n zUk2f0b@`q(Q9MyJZsenPA6bxHjyzK*6hgyiWzmp3MSf^u#};8Nn$uElflFF>@!*ZtAM zCM+ny3wBb;diZ3Jx5j}XPkI)GlBQ2*-qv8QOvhLl^z~N#Ri3dRC9xUi6@rTNN5god z*hQB;kE}Tr{|h#h*wHe2NW&-If}aB+_(rjPc0|m)^ZAxCw_-pwa;dZcp zc?=*S=3{m31)8Y3OXqz-y8J7v3Cn#<*c+1Lf45YmdC65!GB~3r<|W$y3=D6%m3;Ph zz0uAiz{akn{bSgxpgN2o6Ig_tCXX_2aY%7*d#tHe2JG{n2&wo(F6f+qnqh5qpkO8DQ@(;RO@>6GW zTZJKohw6;B;h}vshTdjR*T{gwaL1@dquZ=S2NEZQH6}~`ve88~hzIus82@Z08s!JJ z-_VvZsjSn5E_Cfw${O$QsfVAO5II?ox|%fm;BYzFg`MJEWn5zRKOYJ52Q+xnJ=jS3 z{fU2^M|kCg-x?Hft6qibxr4^|S;rbu)Pp2mt?W)T}mHS+;Acyo6r1`Mr=IY3kV1#&-m} z`W?mJjSdzEthk1>AFjlC@#>UIwBJfQBXi?9q52hKm7L6;QP zWQHy;&-hI8OmYibo*9j(1&I9#=vT9`7<&@^27&1<^Fx}G9#U7KXC>FZ6`V&%7}#1) zfy|-#9{>|Tyk_Wc%8kxb+V^fnKse zm~~1&S6-AbYUVMS!DJ>J)2KsU88;=a1Xa|P9+YKR z!KEJb$Qau4po$~WUID4vG%vyqI~x%B-|=8&LjD77O8TxJ5Nn?rkh^Ye$MCnvopRg;u6 z?b6_v@kITC)}w}{s|B%a9I#Z^A9r*Frd$0wgHbvY7Q^OO7aQbuXuj7xQ@FzKb1|sB zVP@XH?gX@5RO^A>AZ~gps0sXln{t9T(SyLcdET~b8d&>Rr@N9>Z$@cp=P43n*wqc> z6jbB38&u$a6_`DP=PUr5`$ZPI&@Iia5IA%&xiEFe*|%R56()i(4bUFp~lL(5k&v$z%c7 zbt7#uPsSw5L`&w54EaxoRmrfoW5G3+kH$7{ge_r23xXzDri6`mXZw5mMIpSdp&yoo zS+jcIJ^&8gEZ{J1JU3Jl7|lcV0onR0ED0p$ybdnDsu{Kf^c}TzGRTJ^@0~Le_di8q z5;5_d2>9nRc*ppCjsov@Ck<*>Ioo zZ^9BAs>2&`9d6Svb2THNRZK5(mDq$9d+ai(XAgYWs+}Rt_()~Rb~CZ06Knc8I9mQb z=}J*Kz_8o1=$I@@zk419Z?qR*TDG3iDzeBC>4wn& zcbbaVk@QMu&|u4AA^7jeIWIhi(yJ}D5IfVc2 zJvXa|$qw@xNzGZc`06?FJ9GpFI%9hA;;Uh9hgmbUAJ?GWH2qkod@KEjyM6_w#!l}I zZO5m0+*A9TXL-Ql279LY5Su&U%8O$z?ah#F8g?Bx&CW@L3724B1lu|SeM0Ph1i?>QOJqFpcht!Ma?MPo z)Ioa_(+)dt{MiHZeMw_D;PF~-9hRNXhf~6ySe&lIO~N?Rmzxjp2T^u+%XUI`ic!#M zq(m4i9p0TjT!hgXg1!VB9V=rC4?M@G!e0jwuHGKtv%HmGZ-4CYenE_no7=5qWB)UM zVRzw_x`0s|#;4mkDg)YJHyC~9n8MUG3T|0jJqd%ue{k?^ayw$+iH|=`Hp6JNHO0zB z*$m4`-8g*ljK%}KeEU3ci!O4WhADGM0H}aF%cRne;O#Mva>ai*SD{3b=#@&?X|QD0 zEp5m{AtZ2x+A&u!ta@-!|UH&8i<1aR^z=dTue=V zX3@y{AO{{N0+hQ-1urS)iEHrQ3E`syOAamT8moLIpbpz*=pr{N4^La=LWL0D+ueY{ zj{e^cG0ExDbGUh16;WkYCI{A;+?ZmRv~T3a2F_;OZ<-2l8l64|0v&<&EV2@7!)GQ*rNqV9-)$qt@)dB_I?CA9Y(4y)(=(0+{lN^0i z6cdj`B)co(4GY>iigtuU!O#hB9bid#GG3^lAQ9rH7xX3$T#5^J;>>0D+@U0F+Ys^H zE+W2A68ZT5aSv}IMkSY!o{_%1*N8g0uMLay1>QmoddCAN0 zdyVXiS1nA6XC<7TB(q}xhUf+#E`*K-;dOwO_>QzogQEjjJMKBSDL&eXq-6VVsSjW4b? zo4pja9Gem%ir5nO7M!hA-{c4QF6U5{+d zP$m2$rreJvq!toNj|yB)`;2fY<$t2*2t-&=&dGveZ~k}z6~>`ZZ8zZ6@5_x%b7aH| zDKp5EY)L2B9z&77gxK3PROwS_=kaowdbwBy%DlH+Zr_nWH~D-7lhRZ@co)sgcyt55 zeMH<2#Eq)WasYp-5FNQKiz}Q7qaz_8)i|v=Na*oifUjHQ;j6T=&%}C^_45TICF%S7giiUW-okR>|vAC}u@lH2%kSehmjRHxkCq(nmi{tG+kf z739;4Y>%SSewhuW2IA9bA)a2v@h_9n14tnw8CO zLrNwS)47Sta-d3|8-iPc0Ga^OiEQbrjq#0#b(H3bK?^|*`beX07B>S6Ms1KJ>g_HUjzDD5#WSnGU)C#R72qM^FWxjYNIeXjQyF{u2JC`7D*XDW|do39mK%ONzk!GI;d5EC7YDwH` zQB+}_@clT^0q&X~Qu*dlL-f}V=GfZ~d3p6#7N-oQ8hP|yNgc?!v!n&$%N){&s;Jyr zVixQbM92$R$@9-5zaaJP|XV?Ck5O@I*M+O4#gLmQ;a# zax?L{(Hp+)1>xqkHgotM%+@viJ{}7z(+{yyB76ztd`Na__Y?y}6xEd_C0M%Gtm?DziJ% zL)oGYOr1pa3_YQp@)mgzZk=&mPE;kI!rRvK{8N|D(kT%i~v%lWLiTn}iPSCfL{ z!SNke^V)%m%2UY{M>e7HMfT)4r*OghXz^!@pIVX8igohh?6M!oaG0ARBA0id5j@p4 zgRPF`^(kciD3Xwtp4^|%vB*WOSda?4jv-Okws*aMzZWXFx7cWOyg5*R^Fi^{0S!ax zy8ycD6@H}VO#G5O*c%jd#im*>hEkd%seW|KiBaf)IGkY>_ecURCDZ(FcySL~)`cR_ zpBhCUX)%>@YPGeLRS(#i+)3%_-|0e%r2mqb8Bavxtw>utGmx_hKE2&7%J@H{@&995 zjUkT=!J_@{C(itE?Rjai3=EYHWriBEPET?8o|N~>PYYOsMm!o$D4We+sFLHivmE?d zG`~vhF784-I3P!AOK}W$;yD4zr`)l;L$MVTM>TA`X|RquGKFa=C_sk_<)9L1k{L6o z-&})N^uuzj`cAUOvtwncb8K8zn9cne8gwCWNIFFlXzH^V4te;vxS{E$yb=U!s`?}Q zUu=~Re}!lyr&*IsTde^O&pk_>aU)XFgfg*tBZ@*rXXpfn;ml*;XKk^MGdRiy=p?6!3>e+w+hl){^niip&|eebSSXMx(4jA7 zk4dDpDE#4bA|=j*w2@q2Cc%4K`;~;GDO&Bczhb)8z2paeJ*JZWd18hQsRaH~rc!Quqcyp&V&SKOJ_mcaLjy2y|EN7y9C%#3VjG?}pB6I=z_ny7t2CHx0Kh=gXF!@C% zscgjq@qF#~SMZetZduSWxKn4#j|1+Z@3oTDwHf1PepI1m+=>%&R~Ch=QkXhfh@C8A zq%N{2c)S0;>JNdYGg*#14-f{(g>uE?W@M3^QLl{POSuNHfAlNLj_z9_CKA;30VOQ# z7_+xtxhEKsiSHqtU@vw+!uXM6aK^NVW7&lzU0Na@RXzKOyxLGu9bV@cZ79-xgj zeqgSAf1st|!-WPNnY<=Rmd6Mjy$=;BgbqbzxYzPSHz@$|)OlZ>2NBIT`gGmboQ7+s z;w^vgSJ!a}WYRh@s51Zh3@(W>q2YppgBL8! z0Zq%$y1h6P_~pU1$-lT$7KLXOi2ao#ASpdQ^gqR&Y8}Z(&BC(aIE#1UN4EAxo7l16 zcA`-n@W_Q8@SwID2#-)+=Iyams2-C2vQF_|j-Dn?#lYmyVRvz(uYda4B7yOiNtD?K z(85~Ix6YeLRZaG_1b83`^o^Y@^{+Ioq4-oJDhaAR%e=ooj~}cay@i9hwz~y1gi^+Ag3Oj@5lszA zHFt{4@J%eGMT4=qurvA?4L-3LuH@A{RMTd*^x{7g0G6m;qVJbEl9)Mi2INv583bx= zs_Dh^@FV+q8lG}7?IRa5M*6IG&sf{>pZOCyC8u4l7NrQwWk_s~0NEM+L~$0!PSr`S z3PmGBnXCBYi_wl(0-N`6Z5}JV}w*oEf`g=J~7>*TIZ4tD#0IN+-#~% ze|I0V^`&ej95a$paOT$hjt400tt0p_cqzS>ayys#w<_-d#`Jkv@sBuwoss5^#ppSg zidUyhT?pOjcA6FfoX$6e@!ljO{JB_03^r@|IkIBi$Uf3%k=7c{tL|+*iBwr%F_ODl zbjDzj*VLRWd*_Krk%$X5X?l2n)v4)90(~SG7F}&m^!x!F>~OL@vjg&E@kotqY^%NU6lkm-(mXQ6~wQ)N;3@i4Z}bk_M>G7DF3QRDr~^qIQyHa zANeCKh1Ase3lk@B8Api+F=^)%pAny>dZK;ce$394cV!TsE45cZht|!J7I0o?Qn6_g z5f=G&>@|!udT`9W>74R=XDWo&^^ZHBN3W^OVZp>FQ;rz*NKJdv5i&JO;m|f-YfUMO zV(QE;7pqNtXtu#GOZ@0+0}dWv79=C?O=EenrR9H=Tg4y#>@yLamN8)YI?6NiT$e6@ z4PQ~+0w_z4DO;c&b=fn7M14>MeX@k|jIN~;L%t32iQsb{%mL-5Y6e?=RZf9K6`27N zk71bhT|>fJlSc>KZ#{2KjH_K8(X#Gzgi-Py3#uJ0@sr7 z{b+ulJ!XmvzpLV<<%N}UzG^a7Nt}c(vYQ?i{@v?Xiu&N1X8}Sb_KomGa^AtL(H?7a zZc?KFqc9v%#1npv^yCmGI`^XY4C?a$YX#HCw9FYRGJo&Y$LX-ZeexCLhhY0xSV-6u z!VVH$YzHzUR&ML!Fz~r)ML{3Z+|qqceM!k~#>yosR|~cK^kO?)&@D$tDOBYk6WSbS z1)qTvyWo=6nG#(@aP|a+fq(qcLyeT??3_}L1@=;$|kK_;45Ixt2FU+6n z%nzbPY(l-cnG2K2gn}{58;OLigQk?Q7GDlqqLi<` zdIIx9BS|A``gVlN_&FWX)IngX+A@zA)Q9paXxmT15|=vsoGskn*MO=LNd!IaRBuYb zM0vrMNYoY+;mYYRR^K5N!DJ43d}Z3LU;#IqILKKE6|fhbkPWCybaF9lHPwL^W1Q~= z!lWh?kE|JR2l4x!m!!#m)H1>WqqQu7G)UIEUAUnOvjJK=aLW{Mx=j@g&bL$0#PSR5 zQ>fpZ2%87!NaYvs*A^p!VMkPNNT^6>`Xi!tu$q}{69j#ix2VW|4PZij{*@oCyd)kq zfNN6QUjd@q?9h33*yJH|JKZ@*{!CS>|3@|vd7nIDA_zM zgHke-j2$6*#2!5+S$vhU9XCy4Ks*rx; zj+~v0vn-G`mZF|?U>@cZF`##AhEau|=qUqZCg?o;KDoG3NE?VhjEQ{VMKKPdbD%?4 zPwLSMjI-ffYdj`G1y$RC3=KS zs&yq>Cu!(vY0edSZ_&3;z9#hd(DJ`=&Hg=l#N?ejwoRtzDl-{X!bnVuaXYm<90DxL ze@)!RjGL#|3Z{UQCUF&pasxYes#^&DO(^BUIjj4Y4dpwsNp66RX7+CGXx%~K?wQ$#gRvBpxd);h`JahkF-oJHYxLaEGTj7T=e}0)onoW$ z+umw2hG6!#FnyoG4yAXdqAMpVFa|)!?u%QhUv;C-pu*S*R6(jGU6*CbL*o1hWv(WR za>4*DeHAH0d;-5hCtIH7nwD+-rl@u@eYiqD@pDN4Y`CO;rg!$koNVuLFvn@%kErx$A(U(st$IuFDdJskIkV^YIA!sYg6QIQ@+|E~?!-bau$!y~e|&BfHRQfeFam=(zK znuAGk@orljTlAPoZBi{!m?oD5-i}pdpEvv{x^P2KQca=L>3q-YM3nB9;Y7vhtlH71+$U2YcBkaFW zK=GxVwd0X%Y#!pWVpn9GLtkDiaMLX!IlR^A-#`D zSt&9`0(=GI$twN_VQ(E2N7Vg&4jO{HOMu`W++BjZySuv+WC%`hcXxMpg1fs64#9or z`MvvW)o$&+TXp~F>YDEAu9|!L-gCa+&ne5(HHxj3_7euIg{s=2Q>?K=CPCDuittsZ zJLE95qAWjcWl z-rmgMM0DiYqW>^X(28PXwPtngw-mYPw+1v`*RJ8A*bE|{urN&mAty>+;tWqD*!YA! z;vM!UB!@+@B}WA0zu9G0ZCtw6LrDlOx?a7v-kZvpw-O_B48Kk92k*U&CPXL12LA7N zl$@62hK`dt9V$wNpwfrQCgZ3Nl(3}i~2b7vqsnC*FO_e6v;0l)~ zs=1JfA^FyjAZrt|en?1TrjDOOW2$1|ONOe2@MJPyEP{^E(4MmC8cM`Vyy5>cof zS&@ui;G7U8$V*lU;@vZCdr}2vIfK5LkRv0&L_QxCN88UdXhkQRQpucueFPQHbNMXP zt<;GRjIa)O6DXOhBsnp8ig|bkE69Me1^=Ae?GbRujilFM03KQtYSd%6PRV`|GpiB%H^Wzj7Ri685V_fjl1mbS$k`5KOiMZ-7JDelx~J2_b! z!#y#*Lc~}hUgvi--%Xy6UHk1=0%G(YcpT=$QhM+=3z2G8m?N3im3%e@jj${yOS6{7 ztd(Ug$xe~#2^u5Vqtt^*X$4F+7l9GH4+Z_zai=P~*E~o7RVxX*UsKe+S8DxwDjppU z3xpmra>=-|5S2ihmSnM!Kb<91AS>1G;oXX9WI(VQ!j%jl$5%H)5?r>gg>UPX>Zsch z!DC0&n}MSkY>ev|O&X!|%cjyz3tox3)Jzn6u$DS1nuV)E z4fxi3n_bd(_H~%RP!WgS#x*t#c&>7@TVMU9z6@O3x??XO8n`87wF@61I%oUU0ySox zDBS|-Xj!N7$ty4wmFZ3qhRlru?^5E!n+wQDpT)GT%W!=vBpt;`U0LgFV4dD?vM`9m zDc%<6oaM=Z_q>Qk5cnC^s|7^-%_&QMD|L!pZ4YOI7AV{yG$!FH4aC-_07NJykDq3N zWaeM$3KD*ZjMhbU8+mlRwUx&LA3>j(r!>S4!tCTytV#Y~=u`>n2Ch}Nq^<@DjiH%0 zmB{794HeN;tT_RDEc?!g*Wqvv1EntuEtv_3H6$`5k>o?>gws)j>#yg-?zg?O1P^_a z-Pb=|E*85L4kMiLcH5n#driKwj=B4a1?=a2e@6ZPk&pLY-Mg&N+t-3t0U)gl`L;6E z(B;F6efOT!cqL&U`tUGnSy{~l%FqxiOmBe&)X!B*2Hmm(>GK3PjA>7(VszGI$Aj~( zX$k(ODdfcjb%?czYDDrSi5RnE=1GY?b)efr7qtT$7P|giyRG$Fv^klhPe`CvDr@&s zeAV$`J^#x5f*@>jThFOh6^HJOQDPfwaVDwZz!EO0PXRle0xXnA-6TW%%A1zzI_8GB zdp}+5RYAQwuQA6vMyMkBf~hGqV+TR4R{D}`?l4tZtVS?Y zJ2v}F2k>jxq=gM&KrNV4plyEOF1ME>OidCL=0ZKu;^Uv6iba5xZ}M1Oa8!%r^a>T* zlkV6QVHk%8O+~sax%nF*4`< zO1yD+DP$Ec^*4!fENfaGuQquNB+J778}I^Gg&6xH!qYWyH(G|1p^-|?RHow@!h(?3 zR~;gHYk+(TtLrBprcgm@xYA6Ld%Wh>T&mMAQWN)RmngXRV&N7Lx6M@5NX6+19BOeN zyMGRbEvtIM=^4+Ige93d+q_d0-HqpOtb%`~9)gF*MiByjw0Td^UVO1T7xT}kwy$k`8QbLR~o~%pN02vSiLXLh5tvO}y)`!!g5{_2Bjm@9^a~ zXc$-F8D^_F@6W(UsG?*-`XbIModhb5$Mcd&}drLOHR>#zQQelDHNKn!1ZyZER zcy#k85K!(ah4#Z^^s%+ESCH>E4R9@>!G9qw(H=uBLp;q+s(g_SDSvMoy!pbEDD2(R zS6b&;x=$p}zn_e7dYx@8?6HsUb%<`ROoJXlOtM$J9HL6#+-IP@VI;m5J#6gyjnr?E zD*~Qw@+*QDuLVn<==EUeK`GX}?F2SRk+|#`MduqFjKyeyxeYQPA;?ALwC{?LO`{3iI zaXPiwZB%Ca1s?HocJF_2g`Q4DZ)9)#!e@^w5{^`Az9o0Wa7_;tShVVDq2DkPWaW-|ZT z8J$7* zL%B4Bzxo@QmYdeb$QOpuZ(*%H>&BL5=CtLD9T&b+WLPcy=34+cM=_&*a8KyC@!C33U;QjQ z(5DLPzcExWiNd(s-XF2Nq*!mQDN*IDBNvD6$cy0op!IJF`*1-L$k|_KkuUe})mh96 zu0w@qnpim!yFFJ(x-fq9xc}Y<_CpIqZcmXnM<%BD-(qFeL2o)PH^6(IHdCqMSF9mg zS5^S^(POx*n0spO_G)KSewJ-`E_?5IX&Kk$KcbdC_VJnML?-W#aXua*7{rlp`BNFv zl8CY!axg1O)z`1dA1Agf{`|e>7yi9-i+=6vxh3%@W^7QAB5*YUt+3Uo?L&H%cmd9t z8sX2XSMhueJULfptm$>((u?}G-kc*64D2R;Re(Dc)g#x>B3+uverD?UUg%Ku+}y6v zhvvOP0QF27HtT7(gj7@%L6S7ncit~d0P4GQzu&63mgHqH{%ZyML*NOm%Xbo=q3muV zSs3Is@8#8t>^>eEPsrh(taf?{|8+8INHLqxd8bXNPzf`UGt7g3@=43KQqxX*;bc>Z zky@i5&!IqI7gyTfV-~R#wjirc=3x7YAYlI<=PgL^`is;@n&7R) zk%RwfiT`Ov_eqAp&2I(YQ!o!K+2J`E>@U9}*IECs5j>j|Ocyh{`FHpij9;sH+X55V zI6v;eM6jO!&>{ps!LYZMkAMHU%g05}drQE7JmNiIirNy&TQADn$M)x|;2(Cm|I*yj z{)~fwL;rL2|De!@1>ajfAGW*ib$afv-oVy%@KZWu_Plv~UT*t+J_daDyrq2p0}lh_ zo&O<~5%>={JgEenD)6^i`jSgJar8o8L_J{NRUr#qW?&Y0!jPpB51Ce8GpB%O{|@Hg z15Zu9aNJTUb?1BY~1}b?{sc z0#j_>S;&aEqcnN_Q~nJbH_VzvYb0k(0IKL>Fkv>q)7HIydLzQ)N*$?c>0}kEJX!obcm<}qGLa52j@*WbR2C%? z$~T@H9i6jjzCcaho0pFItx>2#ST@2s-ilh>;NJ50P*?))TUiM{G-um zcWDFvYXfxv(16F8%e*9iO}l2!1PTB$Z&va-r_|zS+wvXPoO=nV`+5_}ZXnGPVnzK; zbf=IkKQsA}Snz4bu957uXs=rD(O4a#RH?(WxsNt-%8yG%C0%IT=o6d;0zDR6G&JlL z#xZZnSw-7I#+E782a|gbB5a2;wPTy{H*W;99C6i4fnV-49tA`N>N>T;{MeiD)Epr? zErTj;!eIea{e8alUOn47m?>jdS|97{;fl5rVVOoG%bQT{%mGOd(FbpW;SKDAvBf+2-^gR@G*;b4@>3Ok=X zhODSTpzs(s>FkF?qbKUlsOi6F-_sj5hX;QNpTBqA4p+JmneuXK)hX7z!nL%`sds?V zl>3fwAOVrgFdrd3EDdvmPHAygIXz5f30X_MlV;YbG3^>ZK!J(1rhNqaD4=anJ%p9s z_ObTYL;s!a%~M!_Ypnm}XY}xOiHL8kW@j%7MK7GWO8+klXtz=Pn{c^yFyuk8S?0z_ zr14W$eP6By;`D(9yaVPjw{v(zDZWppG64&vrMzu6g{Tz?}D!upXcC! zm1uwj`Seor>5cY`^SNj6CR=IpUn~3cuTIe?rQ82N5G$Mf*TImAgze9#*w3fwXKq1@ zkA|feF#Y1?e~}OS51rd|89#12uDV}$tE~Tf3lfL_A>{!5LBNmmocD&$Q-gQ9kATw3 zfBf6G{M+xF+kUrnI0B*SMzB>g4Mx1OP&E)Rfsoiur{*ip9^EU<@e-Q4ED_1&h4<>V z(VUHV)Ub$O0P`_~*|8y*U%oBiv`v*8e*|#S6xei8Zb<(e2@9ja3H9rTU^O0N(i889 zdo)ew&K`RB{)7TgYPay_WI~_060VWezN08-&Mk_8a5i*^w34WQw8c$36|GPh^Evgx zm#xZyB)h#*gg*P{+`BN7sE1_zvL_B%x9dPM*n@tNPJs(+K706^RPDvJ&4v=G)+a#vJGBX@n7*X98N{abTx zZ~WBeHx(;+*^N!piAIfoTutJ+ zzP2s-8CA3~IA$%-B0+AkRWN&_v_@^w({JUp?NRSV^zeQrk&kXY0Jj?y?**b1h|=yw zdY|G0tC&}^bfzGah;)jbyRuB7%jGLx%xc<&BnC(|^>~@?vmP6D| z9D9-^Q5f78#+i$XK~8tv0+wa0UYU>=iNr+r=GHv&$+q2xN2?*EFV!wAqDG-Jx^Ot@ zzeF$`GV=om(@LI&`5I4GjUhFG_HjfF*#~O^BO|M?gm8aQf>b30l|OC-KSsa=Rl(=y z>45GfZJT$d&%>U#UBNeCKp)c+3_##@b^7B#;p4#9%;o~TZYufDiSF*WtC#Alm%W{y z5Ab>>zRLIi3LUKfkas1=|0iy}F9*E89dr5)aQcFy*zMIbf6tfR|FE=tcfl0CJd&3@ zgNMKVc!52ikDuM;9}xyObo&1%j*n!Ohw8s1Rwpo)T96*RI_vs;BKa)%d@B%qDG+=c z5PbVjAiK@^e6tX^O?ytaTmPSD{SS}_;5($@J5=-P)^qvz-VMg}fX8e1XP)34(0}>r zMemaitssck7M@|(-!M7O9va1K`dk9fS$zEJ@A?D|Ol2x&O@OuXf-JJI6v!MU)ErHg zM?6$3bP3*+1R-ZEV&2__D`CCAB%LLSu0}kn-3XV$b)ZfcIyU2FQ!@k!Ut9~hhb=^Q zKTjr9_7_b8F1gw{l0H#1C0pw@Oz)Ynk0}XaX)U~twY7<-4P%-Rc>sL~299x?r)xvq zi_O}ub+cn3CbE*_tVupkt=`vQ-B>tvE29f*go=^*3xMI^l2I8ZVZd6IzU5+-Nx6uc zA|g2hYt#=;_iWWMqR)Kc2efD%K#kkw%lvr-482ND(eFGFa*9}GG#Zy*WZ}Qge}P>A zU-(gwBxF5;GOXf#x$vD&nXO$f?9fzQgRkb@Y1Z~H>OSHzMnw?_k%&KH5yvKHIxI^h ze@lN~W(abaZ&I0?K*j2^_;;)w_fAvc=SmS$T3mvv_3lSGqs8y##1K^XhjrSHo)sPu zgEB4)^0t(dE}qrOq|{ik;C>NT>x(scDdiA8@IzM#OPPG4$eH~ z;lg8=uD;zh#D}J5bp4tN6ahaypN0o!5t9wbY_Fou8#*uThj(j7SvDKUhKpx9p$mgC zIojw=JL9QYtlXLuMqRc=t6^&e8sJgyau2Mk#aST&H0pQi-tW6r6d6=w32pKOo>gGz zW77&3IMV|`ELULjVf4zb7HzwV$4&mh*5~SIDh~*f>OQryI8AyX;H0WO4-+j@f%YEdZ{c3TWxi+j@9H3R+JeY^d*~gS zKW!u=S%pdMYb&{S#ATIj)72!DwnA~pqZdWZFTW7tNGBY9Eylb4JM}ScZp4&nm*7=I zg1x+GW76zc3&K{*cD6gnK$o4r@*D(yUsZF1_EwiirXX{%G-L1^w2tDypP>hgruOLo zw&Ibk2cL$9h*Zhtp~&)E8*#mp<7 zMr&cg0!237PhFxh6<>e$DOaqpA5QB^KE@Bso#Hf7cKI_K2RP{!E*9hAWpC%&<2DAJ_lskSmOm+_ksGQ-|3L-@n`RRv_oeG1 z&3}PQ_@N|$Px8yP){x`*>H+z;q*;_mlD9sRxAK~o{~3_iMB6t+2KQrL zAJ$43bfgp8e{b?8U1NUHj24gn9%P)?%YZIY7^jB8BuZc0y+~_LbY$fH>*-KaeHvDnidp1((T$$pA?#p3JZ}$Of``e0z!O|+tR6kY{O@a3NM;zS z6j{gOdzu#ZMgm9dk+TLc4%QA;ajjHqyU(7vJ_>%P;-rMb425>Zu6XgMNn5J@0#f~~ z^a7&bufN5;`*LgC8b?yx1eE1JD;3%x3)Nhn$MP!~0JH0hcPm(yKopI}+*OmvC(nzg}hM5AV>W2mjpmvaq z&6GAfi6mZ%Z~~jNoQdLNIj>?2d`gTXD3}G7{2mv| z)*cr17e(y5ZkKyzO)PG5?MxTsn_;!b&NVbz6B6{_1F5<})_T=Cb@p5)VTdI!(VFa4 zGdvNqoa5pmZYrZm^^&UOfX$id1wFvd0>rrwgLqeIJKk%>B#tpvkuq@z?`pop!vy0| zsb`Gm`X9YY#1vv77MsN3(va@UfR0VyU^H)$4&bI$S^4-`(+g&;9Z~P0kxAWrq+dX# zC;f^fg>BB@c>NoByF342D%MKYFfJ2pI{Plgk)u_jjcsBS{Rr2@MtZP0rV{HyewPe` zpTnW|Dqx{wsYbj8CJVJ;GWTHdF}L>{&N!BSx-MruGK_NUlNR86AKIhb^7#lWvUCH5 zWF5)Rr1?Vu$RLYpyoO0qPIT2_C`_3|Y5-grWwiFp<;vB}wB*a+y(c_WgprPO?=I;;s@`&J6+m;*-6@SQP5nnXd>YBYgm7vQ`Y6fcBN z?xnvtO*Em|%7o&Xk#u}4edbi8Tf=30yP)8IC*>^jCp4 z5|TZnDv}TVRu^MHp>@^f{&I6wq~=xrcimY$rA+k+G1=JiMvfuAy!IG3l4c>Iu)<$+ z`GFm1y5k!@Y}>C|1pq^Hk*Z!)!NOA2d!B#RwUHnRyy9pq1H6HsXJslAQD~bK@K{Xc zf9K>Zr2|UX6}<+7nhQ|50akGws(63G{KM2Ba))GPBa-Hf&S6!B4G`v0c$Eh5dEiGM zP=kc-;Cahz(cG-16?4NWp`sMo0q@8X%6eQ{e>#RshWb6Ef>z_nPw}xPV%mo*YcmBJ8@q$ij)0lgBbc~>fcp}+1JLNp8b z_NiYVT4t}vq?qA_FuK2pGXA0dV*@|G&eLh5nco*>UMr7Usm-~x7`One#m@dDT5$bM}p(>XXsnG{ZiPI*a_$2 zoWnzZzV1}b7k;Hhxa?daWyN89VxC!JLMNV<5qO59Yl20^o%>n2X*B*{I(82{4#T!+gcmpdBYXNX0wbtbnmBd`^n7KtVV5w+& zBKhC~B``juFWGEs&FONfElM)f%Lw#DWn`rC7Yqj%a8@4zG}@fdW8NF80UpYcMWgOF zm5D+2%}io+#{ST)Di5`Lq=Sv9@8P)zTx7K&TsU+rJYNSn<3>2Z_RYMWB1-e|_LJue zNe$>4A3M3JM_wxwrs*XZi|CsYdL2XKhc%5QCunXwj-nb_47Qs)RSmBq;p$oA z`q)o5MWfg_x^+fCBm7QXlx0;QGg#G&(-Cl@x>dFAW_b{ze(M#CbSS;5MBFPN-Q{pc zEq}XTkT#irEOT1G0F~yyV}G+OTBTd`a-v627|qC*HC>>AJg=NvGGKndOi-1ZGs6jj zB~rGREe~m*_hd)`;!^=i7_V9_WpcjDeJ#3x49tq(;3YHat84I0rYq~0$` z@XO72WWwwCIzl|GCCj}It;IppOj&np3>2fVAK58}^nLt}k+Tp5b&G~*|M^jzKWjL2NYgaU)3n9LX|So3s;Z>wHIVHrvcGi(hQ zU*hkSkM0nj&WA_JH3a6^*mKH_A@-lcmU>-oKcsp4d)Sf-;-dbX4^8Cs0(OYc->R!FM6Wq>i*3Um79? ze@2pgm}*L(`F!JAJf%~sn%#5jTcm*B)yBT>zrFjMOtposz*dOXo{n;9wD*M)1IX6P z!^M=#bG0Ukax^w>U9obV4A*zrYG7|A*4hfRH2zN(%!YD6D^6uu)&52I56Q}+wYm>{ zpOuj8*HirFXyp^_cIk}8 z#Uerp!FzIYVMB9DvXap_f`mfBCxmjrS5M6?m<4-hB>Bq3g|70dX*Xz(8N?wm#V|6@ zsB}&|m!B{)K_L_k)dLjYrzM4@TQS#l@!T0{_?pEmrPLZ>`!V=MxWSmqhT9FWzErck z#snh`r#Zo)NtCp>S%1lvjtwkoiazTLf5+%~{C#j9qqjdP#O)|=adPiAvq`9#X zmK99lU4!k9M#wj3IF&ELNwc7|5Hkmu6L+`P!)S|HBi%B%nn76c+4Q5w=NWT1E;?S<3oZZciB9C7!OxUuz$% zrpaERB(09%+7XZ9zzdp@|6ULM%6%@zpJAU(bhWBaoRIP!p z7a7rYUex^{w>LyqhSNHyiGAV~;N8VREF6-;;pRivBG1XxhV|b}hfS7usY) z7{{JE2%(fGkmcLZd}I>IIQ7OM9p#YxahAvO7w$7gp*z_%DPuw#EC<)eq#aQ!p_+c< z!}Hnb&TbzL(Kd&rRsYj&KIh4m%(nDBA{3KY7eYwe1-vVzh^$z$ER**Wo$tH{`22;s z{ALSpFqC!V+8F%v5=EZwdxgGg2dDo83x%r-N;wPseA7N?vY{#i(5B(~G4&8%Ti zmABER3HGSVh;Jf9@h)b)kZt_d>IIxFERSb;x(x0c+H6sv2sTmfCR@&^|1eh21w?&| z*&3}lH{iN58Nb&BQ!!K~5|?1IIPT=9UtmRA1UMW?zb|_I+Ff7i&04XtP`0N8;xe%s zu^&w5l??adsb8ljcfx(zQsX>$BpMUjR_F~U;OCvJ&BW=@6_qPoS#OY3B+V(1%X9)e z`D2BD5D&(e7dKA686kmQ2~R=-R~@-_kLB6Q)+z#LCQ;JvPByG7}eH*4%6 z9(S_OwZp@0E=G77um{a@xo`8#PVT6qcja!UW$)>hb^qHEu`+5n%g_6T?aXu(g~?zj zarWJgSeTmQ!Uns58m=Hc0;s=V*bOe_3003gPs={&XDE#`5vMxIv(IJ2HfY>tmUPM6QusYO@Jg?;XQ%xa)7is+#Bm-@z z58p}RZPCio(DB0*+vB;C1(Lu@IcaF^FH*I$7fR-0ONm^F$q=&~Ez6lLbrChcTUD&s zAQ~1g=42i1Wz&O_^wy}nYc&a2pdTkOG-^d(VD9~xYX}M4CdfJfosXcn3)6r$N50E; zHUGeaB{%7LL<=k%`SCk%U=>{) zN%(gK;4;po*;FhF+3e1WT(IvO(cCPSE0k9Vt#Tio#kA|dyoLOFZ_E#}81hxk@Dx%< zZ5A(7PMMZRO=2=1?1*`vc!El|hq?lJo)nWWj<%s{wNTL#byWsT85&du^}nsEIZ+%O z%dc#yV@+e%_UO&C-VeV5-0K?tZM;^{*1$u*PwCoMYD64MSWkSdb|GnN9*|B#J`+Nq z+qlHUi3i5LncOwiR{BBD(#tNG-?K#gRw>i44vk#$uK!oAEPBA}7j(=NhIzUM(REhuoCG*K ztG3`0i3aXeGus6tF@kV%Q%LXaT?f_5djWV*_az)6v^duT+);WnJZL(GCXKFT)y=Sw z-Nh{qFv0>7=^B;X0lq3 zlTdME?KzO6_tBD^eD98VMI5$rHNu4u3MADAvLLTry#-C`!0zof5DV`M)DQnbylISQ zMsu~*L}DxwNw46(eQTPKv&%>#@4~1U^4UpYDzM`*;{UEO2Q!2aj^@`-o1#{hnbvq0 z2YHM_<{az+BUurpDt9j`v--fvQfc&iK4%8RZxN;83?8=j#KvLqc6>Vm^&~N5tPT2c zC1_8vE?MtWMbFE|M>Xk88E%f1Vl+amcU`G3`}+rsw+L_+(h{#&G7Gy&!Zm5FID2$pUba&zy@B`P!6 zYhVy62yhpDL40A?*iyMoH#J@ki26{gE}MgwsH5pvl_f6v6tWcVk?i3FxT~ut@p#}i zPG`*3!WwTZEj@{F+Lu^ve7T*PfW;=Bl~h|Vs>dLN0W+t&?k71Dxtg8)4}IHh$h_NZ z-}8EVaAETs%{RV2+7FNRr*ZD>aX==?H2wTs>*%v#d?tv^fcy}5D+!3a3c|LT9pxk= zau3oilkkp^5e#dqiTVQcTxHXGfpzrdMG!f}W!i!p4hOaVl)IK9>7Y4%65Jh|99Z`M zygSVFVEWoS$Y*2S3TL7nUK95-JpgJ``+2XTBj?xc50rS>L0ZE7jXRs|>F6k8InnT4YkJYXv*x@Y% zV~PorbtB`FA0u#tH4rhzemR;%cM#zp!3D3FsaZ@voZSpiJG$)Cy@?@{!T z#7j=+_j}2&ye*iX8>R#u1JIKBQQJ)TH&PszI^GtH$Nt?NFAiHiUdv2aIDG=jQ87BV z;nU$z*ag4r5!HIhe%l?1tga|SGzLX4{rY88bEg~P2B8E-AbA-kvqcA%En#QLCnHoKhvw^d=Kbv z5bgZipJK9Odrp1aEcJ;L@lb@e5#T%u)0N)Y)sPyV<{VJjY6Gx70YfFkJDJOCa){y$ z_dM$Q(i|IP&v#Ob@n?RUDGm~~Iq^YCqU%_q33NdEL%G1($PIQHlOkqgLy52`JElA0 zta{0I5>vUDwdHn-tcs>zPkRTe^`Y=QPzZJ%5dD8PRaM4&e?lF4gw~GLIk!3FK}Bu& zafkjpJXEK^f$KZu47Ix0&pXC089p{y)MAPB_!-9HUsDCD=`n*a2!|8547H$0llSCl z-NWIh$(xY7xA*8ZX(+{V>DjH2p_fI9awYCKC8-&BzW5b=m}{zrQ}0N23_2i02G7VThJ)%p1!bmW6uT z_#F&0fL4Gv?65JX&Y+d6A){!{XuzQf^TYVef-h1tQBo_%MjI%z!8jXO9=gGfB}w&z z3i}uC{QGi5ULV+Sz!4ww@LMyHd0Q7Y8Q~0S;M(@Df9Bk>O)E@`NB$f}Qq(Rdv3RNN zU6bU##5A)74un9Y(~!2W_Ip_&o&assBp5iWMxmd^~_VbTIHbzB5nFVa^) z$))4jKM{SFS08K-6x8v{jSRS8V6w(RK>)$7lIO^#6aA?jq)rxSZtpV<5%-eY{9(Kd zje7XCyjHI=Jbuocf^!s6np~B%z?5t&!&!2F}#aZOQeB!OOXpKegJ!I#^Z1Ah9& z<|wnlT^v9^oNqnDdLOTfoBXRDR^m3=M)VyA3fbV%IS{m{4_ zG7cN;T^(5|Q_FY4pB?r4ilXu)$Z~kheWjJ@EiQ`J8VOufDv&CkzZ4D?P&a6CzQj~< zDE(FSVC~(eu9jRbf>C#w_hX3c;5YMJM|P4^y2dboq#(+hZ8IA1GPpu$5xYd=|7#ZL z!&l5r&YijES%)ezebUF+`DZzN_||?H)@o%-bk*Uu0^Ex1X}b!&B%erl8=VWP=B`Abd$UZEAEwov;yC=wfpm?$Ian&hc~@NvY4)T< z*$a0GHCc`}Lka4aCuio9-hMr18Xjvg(6DrdSfzQ3q=kMi!S&}Xe{RRzUD+|QKvz$oe-LyZ>6sYG_fK4sByJG_ zJpmu07L(P)p?M*!RufS(lICEl%li6$I*R ziu;lb_uV;FUOiuOxlWYvcAkFd1P6m!lT66fq_7h~^J5}8me(qdOYHRFj$%DHXP@AuJMGb}MLrF_gueh*;6vSBWx!UY_^T zCAn3@`w3faooDy3H#1W>??cnxTy___ynsACAFozhJ@JU8HL$eSf~dc1M=6Hzgu>Su z`Jk3<`JEX)-xw#;!rD4B!qZ3IVm%^d=nOEvnt`=crb7t7`DDf$!Fc+$9nWKrPrx}$ ziG&KJ;8@X9nYV(b@;4v+dGq==Ymm~20O9VUL%5lNc@hZs#)+9l5W-MNW$kC)o=d&D zB~cQVAF+T3^-cY0M@E*~))s^-7JAd&Ye&U#QANCSNDIC#HcqHm)A7woE@8n|PVNnM z>K{I=bIRxq^5IO#467|{lQ-%&-Utbiy}aNwjScY`$@npWh`ku3JB{QKuA`~>*n+%R+S7i@>4Xz@>b^#1zOND3r$3(rS~Q73fNgwO4s(4=f&_` zWq(kpM`9}l*?GLL{dKE(4aa9IN|Q`genfBDvFLieOy7{=+c&}>3JYXdis)+Q=J-6T zPI_J<;`SjT6vzu-%2!`4*M+J2T^_2(iiruX;net8RW)`!smDnKIYg?3Mx|EBgOlI& z7-E9LJY2NX5oE; z_U_gl@3g?c7-ofz*&hzRS)AbJ<6XIuUzWhy7JW#qYcOGjRbv1GEJ{=1wlyf*v`pV^q*?}Wa#KQKmI zE)&R{=ud9oWpy&6%7=|21arwV^q^h{IvHAw@=N(pgm7e12c$<{m#pDc{)P+rt zR1kHE;MtL>EZ8jrazLSo@-@$nA=t8F2@S|cMJz5C@NXh6|MHg*lx#>InGrsNTB++H za@gn&xhM*nkw%@%^Gh>L2JDuAQpesM9WTuXD|pV;5fpy#3`b80>il#cE$~eFC#IN* z746$nU)NJaJuG<>bV0_0&GJXa{cO$k=;s>Q~BvZ)H^KK5v3IgSeV`w`I0 zw6R_DjTM$Ep=+63eb9GU|CV*+n{YWt7VPcv_<6(iS{3HbuY0=k3|-p7asGtPE#igs zs0ZWx^EJYI>o^0Tx1C(IoT)LxDUZszmhRbbe{+Lt^~kEbvf=VVtXqXaV$2@_SI|tJ zC=i*5=RXHmr7?nVDF5sD|2)ZC4u$uwUi#x_AtWDs9{%-LPln3cGKx(d^5Uhh$y z`dIXfw}lgS3;o33Z@s04$hn>cm3(RSGz2a#MCZq%bQwi?hew&_MG$To6vcx?^Yufr zH9}U8)xP&XBs)nrCr1E=c*e-TcBcN=!U(=p&_$P_@}({(jA#R9r%S|64-ZxCgBsX9 zT4SVNzG17+3!<7q0$^%8(3T?mugBG*l(Larb~ZsFq60^;OWZ7|f7mU7yvq5jR2)5W z0BV}jwy1I*JL~B1swQpntJQ*JLDFLP(Q!K7dHlAHUNTj0xh}|CeVB7=cI=AAF_9c7 z8f2Z{oUJ&#+Ex~L&1<{OHkEiwL{nBz)$@_FzBXM`(xpq^OeDTkVA#T#^LQ_-c+Wyz zno!e-E97}OKz`%9OWe7v=?Hc5=v07!!#5clIyzPwE1%tv$n|3N5$nlJ^1oRrT^Ltd z_I+pq;VZ99eyt#)qitS^Z?^uRff|oNJG1*oFa-J6)k0jPC3OctMc+3oKQ=p7m-JwI z3-K@m;5M{CF2|}88v>T&U9H92(E3FJKX_1I+=TAVK+ZMZS}$Gx`i}m&u_m*vyS5#zld-z&Ud)5R_@SM(sDjUi3XA07xp9 z0kUImr^c4*@!jd7nXX2yq89*U3ZitK-yUXc4zL~$Y)Um@3R&wfaHK)pI>);r-Oy&{ z54^@4aAOU6gXQhZ+BUSCW4Pmk6i$T)Q1#2y@|LM1FCKY>GF&v>{G)t*EtH{CMIN0&s~tW$Acs)nS9hz{LQh)e(FO|D#-SRVEla0 zz1V6DIs(nHE6mp)aMf9H?0F~S7T9D#1Zq|})MR78Fb|DX{Z^%JC82voz*RVbypo0s zd%msquC+G^0d0Kfgly`g>DRnqrBp$lpO0>>{Rnd|ih5<$@((RNYyt$|RQ2)%8&lad zwWbU8834a~44GlyDKG5-3MiJFbyIM3&Oos89^@#YXiZstMIG#ggbus^XDPZ=rw)CE z5#v@ayKXc+UKbV-op96qI3}}J>zX&GyRhV&JEO?g(@^kQ~TR1XLJXvg(dsVhoq-ws|*H_hTLap9^@$TnvIoK~Y3_Sn0U%z;8doJG#slfjQ z31~*wS~+m2F=J{q&7nEl@+r zioT%<+}lD~qLBEn77_-ZBxlgpG=6fm!xf%CbREUPl35XJd|wjN+6)B*v_g1cqPq4E zZjBBIPO+??NgEyoe(_qEvmmBX(oC?*5dd;bVwRWH{zci9bF!tBN2;0SFi^!9Mb-=J zf3}s2u%?HIEjEgKXrL(%IhU6~O>^W?RN*JM+q~Fzm}0bhLpt#(Dx^}`eqA79H3mHO z?c5T5z1x72dP7U1`^DJbY7P-Z@oq%f;}Nd5LklQSqQm^x-9(XyP*my_pRuLNf=`}w z{9BMlrjN#h4WM^LxPVa!G8m&01nkeBwaGq7mr045U9!YalHnq58xP5xFJTl%C97J@ z1BgH58c%3eEm#Ttbl2A%5p~oR&v^lnpSgUM)*ttgSXsqE_W^89*L}Z403CKeXn{@& zPn<M0|~0ynjTqUXEYW>9j`V*Big-KSw|4`ZS8WhrqC;Pp8g! z%J*0k$SFZ#jK6rM?pbjlS*+SJ%O|V<6b?b|i9l5&XA1~ta3^Aa&F%hLknV5xyA}=UP11OrAkml05QAK#f}s zYVD9aL{5Z_&KYGcHJ)CL>i*bYgeVXyj{BfFAt@r#8m*!X5-MFoZ@^P9;-*8Kn+%{- z;j5(G5NEkR>|sfx<})YkZ^{6&YeqM7$+N7Py4&l=AJjW#N1`F!b06x+#sCBoFqQo^ zJ~G69@Wke0aV3EUx6snUDao`npHrp2%a?dz${lsec0 zA=d_uVYhBs=7iPBsdX6l&M|P`6P#BbMYwt_{GsZ{kiV^HTa{|z&xUG%Sqh`UC1Z8F z*Tr#$D}L2@cNVB^^D-NW3T-OeW%hm6o|eE-$;zO3hbs|%z<;rWe=&k_nQo0Y_W=-3crdW>}Ry5D2ev>l34t^UhK%MUZ4p;Uw~xJ-2hQ z6tl`MkSiBH0y%WJN=n}zk(q)Br}&hekFj>sK8>o_A0?x>^sW_RJ=DhXfOrI%%48|H ztkv8JOi}$(f*zwS6Fg0Hd|ZQ~^DkJf$=9L&=mnm<6Su}Rw~$ukp|OBm*&IFgp6Rs} z08^-r9u-+2M}R|81}-w^iwuHG3BkZx0S1(wm{mdy?$(<@mN8r#_c2rqj6m!MEkzrL zlhyyTzbMX#7cAXDD&KE_%{K&)yqtr}d_Us%Yyz`I5^l|#V;G;z`m1<)j=sF~b^ zeIR0k*Ko_7+6D)y0}Qli;`URHW(C#;pEZs+-68tHPn>CrcsPC&j-w0N)fF67-7L2! z1`6WMOM|0Go4DN$l#R;Sjc}EpT&Y_;3;W}>3eFX1lyS~rwlIzM58Mr3diPQ3uaugy zG@PJanp%x-aajS8O*I#*U1x0vdF`wRGv`fwg zBYgm$-?DNVXrto!ej+ zM*+g-E??d;WN9;Q3d=Ysm zF7=1tYKHRcW$}ipz9~2wQKarBn%H#YJ~5T|6MPvcS0SQ$5Q4tQ8Zg&b6@BhM(;e3d^JF4f#LdfLyap zl|DiDU9y#Cee=*5VAIe;ZrHrztZSY5%96Ez|2Jwq^O^eH)z^1Y7y-?%@hsZ$#`Z`oeYNiB<%p`Q)X0}hbt z)B!Y`^dsd%8Y-<4Xdu=AnL`&bjS*Rke;pCbmRc55mxi>oj8s#V&&6NW2|tim={S|) zFKNogi(zbtxL4P;Fs)Udt0-HavpWmbCw4SE6favdhAn z5W)a3P3sW_mf0~B5xQw(5+M?2i4wvpISna+{PYyjgh*Gj5!5Yt=1Z((;08)Q&wn;2 z&kN_k2ihdG>Pxpwgw?fH8|Q?SpB-SGvY6`Bs%zzSvl|72=Pw4+D{@aTE^NtVd=mJW zk$BC=oa?^jO1%?8%uk#&H$`&r@D|D(AL+O`rO)-gJrsl-vyIY9gIQv(-e#>n}nytD|g}(4Ld$vcMLS%=^hjy?PoFLc~L0 zkk5~on_@RCVqR6cv(J9in^pISWdM%4L1xcFcL-HFz>lT5Z3s^=1^C5#WDdy{h_m}L z9JH^FuBag-o|`E69jAt?7;IhpSV2vV$xI*u_lI&(7AY)7FU#Zobt|N*YM>5LrM8_@ z*^($*$H5&M+%ph#I7~VnMpyd&wAjit87zn3m?~SM``bi^P!CxQN+wrvB%m&PS@G5+ z37W>MQNs&D4eu*E3g^)F#FZ4c{&l{%G-Dvv^E+MwHk}}{)A&Y9m*H(vnXpwJ-QcgG zw#6v#hyKjqX3Shnhyp_KAA|Htp)ZW4`YT_mu57HawOt6F1Kw{~x$@87Ic>-g48wH9 z&4A?$-#?;A6HBp})zrdb5c;EvDQu;E*?oiv9n*5)uTXf#tY3d+m_4+wZ(ypz(JB{r z3(a_p{CaxLObPKHIed$A{{GJBuf>SCt;0#M6BI*b9x9G3oVcg@#{|pDK*G3!ljRY{ zpeWY$!Z67Jp{R|~uOlOy)N)E(^j${z?m{JwRUsxR%ZSsc#%IP_=vuxOsCKp{**KmM z$qIh|;ubbqUReJxf0wb^Q)O(GMEpvkI%o=0fF`ZT{h4J8h9-VJ~ClP*B!gRj2lR)wix+AwuT z8x&)Ov8Tf?%EMNI`8vOiCVb~)Yt^ge?zsV;9L&Zy$|Uk!Uv8%5vfI@}{nt3pS?V_R~RXiUdS0!vQsTfi8ETHj0QYJiP!39DqLNI!U2gt={h- z#w%KtKP@g-Aw^wDR$ccA6G6;66PB&o$E-xxGQp+Nq-Jn9uwPG{a2!#@PXH!CS_?6t zddS=mq$h5l1mGE5+Yj&4avH@A_jQA%aQh_-d+T-;1Qot1;1XCX8<}O*_2Lg-M-PC* zu6k}z^xTNoI>m6K(<@5wb+X4w?^Fpq{OTJ_aSuQko{*?+WmPndq2)Z-cx)VelS!AN zj`ZfKWACaIEgPX=(RTF_OQCqSDWdpCgDbdFcq`GB_+Mh2{&~tR@jri9@DE+q{ zl?f`Oi_)FTxEdqb`i^ip+5G(IgBBBPIN7b-ce?HEOVs>=`ZQ?kc!scFfc#0zRtWi=Kl-Sg!)-6NEJ&4(Z`n-Cn9 zd7j^WSbh&ZZ73Y8bB2^UQWz>XwP-gs{ybj{55&UmQvSjM&Hfl2tzXExC?V&!7@HHs z^^Qq*_#mBP*!&4d`?`^e0DE3gq$Jnx~_zDZ?B<&X($p$Tu zz+2)a=5xA$@ntWAS$zMb8hCZ zISl&l>VK*km??lrCT>gyYZ$}pt8En0JXfC-WdvUZ>-^JPgSt%{x&S?Y4>ll!&N6(j z=Ex7Qhkf1wZFX8%6k{e#oUL9g(Mu6rUw|DxZO65@adae!mT zOx{;lOd3Dr$Wt7x0qI%Pm*aTG`Kd44Nhpfax6}JaO&9Vx--+PGg*71wh5BoK2TZW$ z{-S1M?p3rJ(F@6DnJzU5(&h$twN2*#_7aClIpiTVi=))E$#P;O|32&7mpwH$+P0xZ z{NQ%{A3zDW+txqe@0#0leSdmhZ~bE`4Q<@)On?OM;w`t9xw(g4o%Q0Vf`x#>h^Gfy z0?MjSnUt1#n~{Zf5@XAGUUQXDLmiaNSRurCgjieN>sxH^gbvFuz}&V_WTU-8C8|lO z{?{UtsTDo4brHs(L2v?#Oy4dcZZ*@lq5#!BxT6z@~AfQEWaMP%wsh~x7m zem&`G7Q_J)kQn+BD;wqz&A1s}_!i^G*7}KPnL69f3A17kRq^8y`BqP3E5o*8H~uik z&Tp=uO9P6xW%T@SyHn=6gF}u$Hr5j^j13tii$_w{iM%*qs>;qQ6I4Z#3TIAfkyBh{ zi%Kj~bq3Z1<4|c|8Al4eY+Rl%iIrs-VK>`NefoU~UNC@@h7%c)P_!KGyc03cabi{- zKJG8as-l2wfb++YAc7eF2uD6EPQYIr$XSO+6I0KCLs59)r3qRWq&*!Cf3m;guXyE=EFy4Ls}R2c;X8tp2<`>;4v)9K>Q zvJ+B!lMV$Yd$_WuqiuC_d|cm!72$Zj7}-{Hw3{;%9-e%vJ))KjEOO1S-cs(~tW)R% z`yINIfMgAs`r0ovRpntzy{SP+=j}NyntXLiCdZqLpN7f|Iln>#z%V(|apa%?tr1b{ z*AK>i>p23oqAaa2z{vh5g)QCyf_o^yI0mJjWnI51PxpZoweDnkOOk5?bE=tJmK~e! zyc-jC(M<#`^BDJwrwqYBJ!P_$IpAhgh3hSikZz`5Gr zX!-4tWZ?MAMh{9(kF&bc`!zDca(+`FlLl zB>mkmOeCS)y}++#DmP-qUr|#Q@_*p;fGoae*sdbd^`6p;>qeAB7}EO#4GvNfx%5?8 zRvyR%iSS;-mND_!6}Wm1vRj`4mo+Wu)J|oG9#}}=Wk(vwr=?N8fT9IX-0hjhb6A?c zGUgW-YJp{(-oeMoU|1il4zy-N_!Ksbt|ykXf*OGyt&n^{7G+4~lo_m8ib{n0EHy?M zE*hB#(i||lsmev49%60eP9w8}WTy^GRMnW2iAW&O8Ci&EG!$+%XTisu%u4~v2S#aO zu<{{47&cxe2j9U%Oh>X%^(56b-D)6$wD#;QS7;d@9`64WPW{Iz5to!xc86Bq4vv|Z zc_v*rEdg=~re8cUCN-Na8Y@x9qP+$cj$fzYUxTEv(;!?M7#zw@lj)WG`(idOegeYo zvKs6{mZ1gwsRfur>fZ~jWlSS2u5F_DtNmEPF^QYG`t|OJLfu%Ke`u|q83E)!atUBb zAJ;dZ*J{Su%!wP+=8$?btE>s{$Y;r{oEcD z2!pmb;X=2Seh`s>WgV@+1U8^(NHa%fD94&)a88B?nA=UDDK3$+RFMUa?c}$N*li2b zcPZSayM&=+c}m#Nr(bYtk9@A0q!54*g+N75NZICbyYU&b57C~8h+c!)3i$C=)_8F8 zI!ziT-FQhLvCx~FgrniA8$^WpGf;X}Nr>`rVML9W9)&}}J-h=+-{RQ&*@6skO<@3m zh#CdvD=BQa&!m@Ou|=nSX9%$e$3^ro!sv%J1F3DVm)mS@3^UdQdpMwgDda3Gdi%(! zr9o~k44Zf3U!47HYUDwZ4F@}T)12us;Wg@b$lSx=QV#&P7Jb?|z!{hG#LiD{{6L=u zYi|X7G37ceV9LA~SeH_n@j07L+M>Itx2g~WT7U~0&BOp}wl&vt07O^QT+pfSHkN)& z9VBml{z_lQd@N|}FSlTct7Dv8_qnOB2*jYBAqm(Db-YY(EB%6QOq8NSIqfqW!-iGw zqE^4=F-iUIk#dfO4E6&U6hee_ruU;3@&fEfJ##%tfCD_`-sQb?mM`gir=Ki~VyD)Z zYF?Tw>GUX{^ewLJ;x=;E7^s}>G=&gFe(3q_#^VM>T^6}1E-^yrHYQk>g1b!eKi}^F z2PkUcYU-i{*&z;ik`;PMNFw#xlMDXVfxgKhQZDUo>g(T5bp>?yEDSK~_`ZPWRQxQ( z@)1aO{c$*CDbujmw*@GHsV8IN{kL(QE3NbJ$6-NWQt4W@ct8LPNpngmFcKW!B}*E_ zjSNYX=c@Vb=Z!^4^*~zU+iY0!6U-l-(t`w!Ot;YQrbbtTu57jCr|6ywq7KL-Fn5&@ z%9Rx<_+mMlwQJJ5TU9^^7YBvMvq>=xJv=K^==+0!^SIjj?Fn%pJbSDO+Oj!0BeEde z=8AG*%cQN8?FG=czyrE?WC=mGdGI6=-%7r@lK&@hew4yI7+tE+dkf(7s&W&9aurG<98&Rrox$cp#@1mTCh>H!h&&a zb|aJ(Q86V*lx9=C_(q1-IRy*S3PWj-O&25ha9FLpb8{&4t^fLYVE=Aj2~l@_oaB7d z(d_3)pz6^#|4j)Dlah9Kq~dpGMZ2E!-`H2?@!`K6L+T%zS!X7G$dEz!8ERzfKDc*y z{J0p>bT(VWx=-@OlugKl&$Qy%)u&;idk1}WYe#Klf& zxIEW>1$CxSYa@%^{HQu{T*@y|?P!av+IJfUyD9Jv&@zX3JKXRlPT3<9D#P7(yBbdz z+1Q65omb`F-7xJAl0AJo{0}VaKcZKCA-g#W(iAtp1ijq5*VZR z!BS9MwKan3T$O2+PCCnq3M z+MvOwbYcR-Kj1c?EOjN2c z!mBE^1=Q5P>o;m7m1vlakSUAslIhOYc+p#Qk7kHDRj+9bE^1;Bs$EC@{}2_Xe9_R?)YZVp+4hjrhF^k2FZCau#VoCHJk< za49->SL?Q0W-kJc6pcM8hK4AOTNvLD%PXQ*L&3r$51n?Jw=91inYvuW#~mYE9krRi z)2d$mJ!+C_E^ZHmlie&9DiKT|cA86@uod%wdN;-4b93D|UZ+O3y=*#5+A)od>JGHE zKHzYmxm7xh`72C4tsOcOBt%^ycE)B3V8qPh%LD!k+qsi?H4>N!qtG~BmMgdH1SGk1((;rj_StO*3 zw*gS}rh4}QX8~&gOL}bYCjd-E&aohbiD0o_yqKK+cYcm;!97+hd|B&F>+a>T5WPrI z%BrmO%9E`|Z|^|+SG0ZL1Wg;{%^l4Ov;@Z7y1?zo z-{KElEBnGvcU=d%jHbZr<;X<2JT~OwXmjRf7R7Z!2V|9S(iw>Zi@_xppNn3A zqW^&%a_)cpBv3EaL#U=Mn4l#|sh!n>JWASG_A@pN$P2M}Q(e2A;jd*tL$>nn-O9DU zx6V-oIrzyVxSTHs9a{`)Gq?5!yF;8@8+p^Agt{j3tS?xmaB>E+BdR!w-i_hpyo2`J zY2&RM)GiiJI(cQhRO8S%_pFbh^$XNra+h|pzNtK=Fsn5CdIfDhwRW;?Escv^Chw&5 zl~+Obv2pN-m3~fpiEC5KCrtTgDOC8gj=}DV2b%SW8RZd-b$Y&uZ`39){0#o(575(J=C-f*SH}l!q7739M7|!Wm!KUCre{@?!hr9uDX`J3*hj-yl-| zfhqvZ)}k@SZBykfZdmZGbI7v6fNC+{Rt9`fzpo3w)=<8zNgz=e_73ZjMu%#jd^|cdEHomSCa0yPwlw>CmG4Z)%EV}qxjb+ z;n#`Rv5tH0W3BeP-{HEiH@ZHLe@`B7&t6AfFFQQFyLx0_Ry%v6!_qmjR`2zS{*}jau-U+`%y{5kI zb=?2!fgjOp`pp4|`G+fxoiIWsN^RDZFMg{#(Mq^iTC$&p|9IPoY1N(;cU|xM72s!* zr!yw4cS5B$b+N=7SyFja_dd9`ieb71i~*IBjjF%%mh4Vw!{xnC=Zz!TB^*sGYt!7> zT%$8fG<5m1MPiZeY^WGyxV?MNB!ax6d!Iy(6?JXZsa-H zI=C6#U)*}WjJ2>m>@)9oYM>GP3vdKrVeUJ_RIFtG95HK7k{`AiI;}I6Bz#PN+m-0- z0_B>N5N!U%2JWZp+cF60PX@CByzi~1O1~mEhI^FvX>{qqbJs?lhp>shkz;x(5AIS` z-`u1j5MfqE<_HWxh$DCJskw)<+q-uhimOwmB4Vks+lBF~lW?UBz<=8LAX8=%)m^9w zB{Y<;T~zsdk2Z_1^^?8bd!&#wC`ifG7Tdt)6vn9kJ;tzEUj4qu5BUY9p?T!uRI+-< zGD->w-y8*8MWPX$5z2h#)5ML}LoIp9*gnaY1jbM)CAJxM`8?$6&A?- zd0az&AialXO0fDx%v96s78-w||6PJqqwk2&O5;Mx_Qm%^w$#4cQf6h<-;4jY;yCu{ z;U?)l_@tt;1aI$$4Rb_Lgz65KsOR7e?ejqi_u+beJ&$pyht79KZs4N`YSj*w1L92d zM#jD#8fnr5rO;Txj&s<3#(*2Kz!`qQ+TzXO7YAhZ3NwwKDtCL%hvkpZMSKpy2Z>i{ z%p)g!2_B=nUQ;5|RvRB0^mVG1M6RhFcEa_IoaNNWUk+YTV*IuD(Zm>rcIb`T z^4hqCX0o#8&kJotHJ-_EgD?MjqIc;+Fg|pyj68cJCJr=8&@c70_4bK7)7t>mRznx{ zD4f%6tgpvrl$ks@>A%?Ji|75dj|rUB zXIOla<*u1r;y~Dk0v0-Zxf+U5RTH=qr=HVg>EoS)DlGx9perN0H-f%Rg}klu_~p>Z zQg^z35M$xstpgSe<#Lw(u;iG`wSgHsn44{l6KnQ8M3zd^9PUi0Gd+(ZN>7okJQoRB zkW(=eCQbIwEpFv(!&9rIBzNlXqj7g`mjD@OQ@Jib4d@eJk9{7Fs&z@c%);nI_qCRyWEqzYr#FfW9;q|7N zw=Y?ka5rVVE!0(!`3hP=#PRw{t66blG$`o@Nzk&bah?uLCXU;Q>V<^@P-3l4^u@!m zOaWlIVy1ARw(cFxo$H5CkN4&B5hTaQO@ehn8+4Tpgku;Jw0t={I4Okl@q`2iAXU9^ zUSFDbW|II1p)c2r{D*<3;B_xqVs8Nfs3cqe{Va)=PbTCvZBExPDz$~Gaj`UZ`u>y< z3~c}pSE}X61<* z8-ubF{d;W7$zoPU>5C<9Tr#Q?Tnt9l-Cgnoo!f31h8hX#7a#LB<#YYIDM3m3vg9qW z%?dwlX2WEKCuT5^i~8V0Il-xId|VhMT!WGke))Pfv&_N*uvh2{S7`=cvKi)>qT|2B z#>Q_Bm)b9=D)k|&QGemP4h4NH7$I$(Q4)6@i=$smCOD1b?8jfEyHMg@IKnAB)rAk( zOsv$l-5x+sbSJZ?=>h+y1<}a4ltj{vDFl)RDGABTFp=e9_;K&61AU|h(ThRhlJd9? zs9*NsM(Rw}UYatmxSJ!|t2dAL@+?Ex)tFVSnN?lJEtA~OR0WN-e%Z*!#zfjCa-+Kd zhBPF$Gfrq@HVewRJ8Byf($8pBo=>jcO}Z&w23~Y`uES>}Hbnt*dG&Mcjlxwo3zO=_PMJ5DC&NRo>32lOOHYcEVis>=_9}=aKuT*n;3)uF6zG_jS(qZ*e|foj#cxZ)k)?inpsMUTi~}clD!Z z6k1Rg>OqC3o)uDtrZ`?WDB1_q14WyJzr$6~c!i}!onT7X`&4<_AB@B-MDi>0>tUS` zoPwih3i{WfC&APJO^7Ipji^1tOf)Dh2*-b88URtj3~C`O_1(8_53KL~sK3>(s>Vkc z$2+Y(>RCPh*~Z|ZhFj{A-0lnAxnFSn`nV>qEf5Sts;MEcJ{&D5L4PJ5rV1FL1J!?c zzY6c=hEuBNVm#u(;rfY*+`v^pEG-?{75u__f52&T6S~64a-d`zHGBefu3UIC=FVj( zqTsq%o&5>P4GoH#DA4|+uUYC+f)uvE1U|mr9$<%D>Y>j#5X>lwF*&zI7Ri2E_Uw>p zNG`K};VmRAgP%ieU^{uJRqMcOND1TK3x84s7T12qYS@6eCr?7szta@TUQbHmryeS3 zkIwMO|1>nOFAN9aE>aOWok^BKjld8NHlr2Svrs!?A6dsN-aWZDL>JZ?6dKu*-RQsq zt%6OF6N~u}XWTc*4v=tcCQeIBgT0=ntZ>FrPB!}Ir9ZWX_2~P2%bX^1us7=)OB4p? zI^bUWI_}>6Ll0FF;XzNA%&(VG-q*P|oV@$YZ?rbi=RUFH@-E=%Lzm8uLB4QPQv50LzoXstVC}j|DoQJSgAFPAs6; z^wJPVfvGC(gGggNj2p@3TwV1;F(x6YQXZ{@fdfgPBdv{an~;?H9ta|gKFCoZ`;xEh z&p(cug6i_7s3HL~(V3EQl*qa?E2Y;ey9AYj1H^KT5BPPITP$(he>0Yx7~Ob6|7=g$ z>^<0uk*<1dW3@8=(WrIB{PkCl;uQ?EJ72v&8cE?Dp<nSxD*)~L;=>g8nD23|^mm44q)dQOU-0Yi0HBbKoT_ zpwF?UvffmMy=CV2ojU#n=Q`&wIG%XxEJ*Jf>xy1A zRw`inz~&pak=k+l*EnJ<(>Wwi6t~t=JkupBI%jCDpUmgAW`Bi^fmeCjituWiBhzr$ zMpyeSNfxje!eQs>B;qVqYuk>Ou~>?i?~qlME+%ZqA&~H3ZH4hb+p7#rP>_s+htC=s znM8H4W@P4o+!Kl{qMy#h*-T&yquA{=zyeIkRJu^6+iMp5Q>!al*CUN-`bZx^wu+LPvUt)n;v+X-uKcx_sBUE`AN zi$!+x8lqhoMiXMt3hW`H2hE^T321bw84LbyTY5e^%Dere~9{ZPkP-ov*F%Di{AQsMot*yM>+a1CyBC z+|P;H-yt`DNqJ)M4QOqaPEL3huI$foF0a^jt?264-DqlHXWlb5@@ERAofSyZ_Jqoo zXh}(!)C?#e&PNHmdk)WTMi|&NEv@}o*Bfy!7D5=W%+iJ-^+XZ58Ys73$MQV*F>J}L zOE#^eJ7<@7*#tFHtYK zuZ1s9i7(T-Q0KZ&?_T?KpG$mh&R(B8&^P}<*!K2IlGkmYr;?6`65!Z zp9@}Vlb-|s$!PB1xO?AmH}gO2M=qZSNCJeJpN}eDihorIzeJ*|dcEPM&)L~?Tju_k z|6xm-%zwKf+dyaH(7Z)!f&{D#T}_NiMdZF9Y_yTJ;_^u>Mow%l#ZnI1z-G_PucCxT z%v`OLmnP%=ek|PTIkNAIpgO5(=~jynqL5t555iGsTCm|Nx%VF>)Bac%*UID>%u^&H zvJFkDPlkMeg}`o&Ke)G~u8k#R6lsqAh8hWBTk%9b{2F0M%lW+8DPta>NLEg^Za%{F zEfGS-fPg&GyW6IP`pCuD&}F0Q4=E8}Gf7C|*W61t*1WHf&t@wI2hx59wt4l3=ji5< z_gBC~pFw6-LAwk_`ZYKs^j~N1$e|fHRV_Y!x0r6oY;CH+3V9B2W>_s4i#$i%Wz8gc z2Qp5Mv)=tZ?{2QvJQ+*o94ulnv8-V_ar9GA?0BDm<-zU?(9fD^U~VT6y;sP3x*!^V zLhyXKS1y;#gAYmMfNIR!V$1*CI+kl{sa%7-KEy#yhi@2NqaX=DcKQXLuD5$GIY48U zRq>rze{AN=Ym&E7i9Ct-Ke4eMn2lV^%vhnA!+Xrn>YrRFPI%G2#QM`F4XoGi&+wcKg zMI+9yV*&p2)r0hx!#2`Q6PPx_cgOuTxj%Hj^*^)|agNzqICyZ9|441peP)nRhIEYB z5}z?&-EA{(0AOpIq3BXgsO~RA^!$XN&QaSdyNv0U5yomUaKbd-DU}q~sH)-#=VDLc zqtNdibTTsK9#~?|CzO&L$|QVi|Ejd1z~r$i77g{aeq-3*34TqPcMC3BWlB^vSR=dnKXaYSdrcQ*BvieR zI?I2L4hGBV({8O5dF?5sjI(-a=O?>#DvcI)i&g&mKb9+6!6%Qi(51vy7=F?@2#I zT6JsRiLIE3m*L#^WWq1Yy+G|upWzK#MM$=eDXYJJaWX?vyZYkI1i5At(aRZAHrB1c ztC*9lBZ|GB`{Az{F_!dfxjq~0UCyU1`R}#$_XGaoH*os*y8RzXvxsB|Y7X`~g#fRnAs638z#yJQZa%6c1AV$N}^{K zE{1w9)kEJKe*Rv4EfL*14@kGiX#k47~ zTgUH{lJehph3L%tyIfgG%3r}vTj}HDKMgpyU`Xx1*rV)nwGS)U+m%1gCC4)o) z1wbS?($EcbKHqYK7mm!@ePA72S*_7rCuL@L)UEFH^cr@)Z7de8c%^C>6KISsjXquI zx3BlF%pi7-ft2tCQ3B)A%n90J=5(H+y?FM`2OAz2{kq%k^P%*LU5#4S!(Q6i7@s7O zY73@){36OHye|{RcxJiNs(0mVKU#3^tm?;BfEZ!pg4u+A73?c{6Q1axntv139(Aq{ z4m&_$2f-*oc|XK*V=yMnu{SO6FRTAnwz*QbsjAkQ8)=xzSZ`7JY^sJ-lI)zYkXLIk zN6)to|CT^~vDGfn|L8O-6YVck!)X%4SkG)n61}lc2zWI!{K{x=J*OoPIZGTdZu<5a zc+p2;mZYXZkKcD;^1|+-#?4mSUka*#(n8T?MF;Iy!>K3{NtN(!n=f1tZJ|{hP`yDW zvw15M1!R9kSrNbg$=_w7@st^PYsF&F!+;Y61xtSH$0#`zm&R9qEsC?P@agyf!wsRlpQEQ5c6-opj=Ws^?GfD z&WS!|*akghPZ$J}P3kJctzDR?!c+zvP^=_;*5_Ne?oiP;8_a-uJ}Wd&U)t*F2q`zd z!0=+HnpDD84#=_rY>UD*lLJCv?^Raba9y&wGyZJ%z632LqM2FTn_w9SBR~eE z0w0M)UOp#^O`rIF_q5jfV-6`bDWRI<)J?9M;f}T4v7w>01Jan9(m1UNAlxi8Pa#j{ z<0Kr56mP-o)id<4l@op@&+YI1Fqc~}Nz6^l(V@+EgQa8#><=t4nK-AtoBmMhL%%dG99>%l6S_;h0f zOUY&6Ml`dmuZGgTQCkazco|C{Hi1d*XO(ZPsyky>@n#H@Z1g#@S}mu9)?d@8u4xy( zcAT#gbNzS6xA=wvkq-v(zue#dg;*}i@N5s(7;U`LLo}u-xe1fqZrBa7E1cxc->#Wa zr?ZqSBOJWgd?jUS`;5*-!v;;WKd0v;hx($CR6(J{=PB&^KY7+hZxVm~|);C(pp*A7Nb_Bh7{Qil7t1GDy%XuJL;G>Nd<~v^coGV&A>R+0Za0X%J98QLAo&kgferV#v}(r zfhTdxi!jKfCB=cJkEsAw#GXQhPz42%`Koah&S-{EdR<{L{cT}AS6oA!9S#$3>*zMl z)yjnK=@r<4Mh1MQYC&Z+CmU6N-8_ykvPU)}z;doyaus`^KIh-1-{WbJCVH7y@zvmND-ctmg~Q5_W;v#$!kCrKbn(8~yKYn9Cy z%Mm{}YTK=>3bYh2-c==2^npby8_U%h3Cr?X?s8E-{5x1OC8F-Q?lSe4G0EE{;D&UJ zalSWew+E*jZ?en5Z^?>P#u~6fSN$swiGCA{E3fYD7p<%34O-1p&D8) z+;-9~+fL0p&1f*te5AMT{Z{j6m|a>I`8`U92Gj4Ti$uDvu9tIBu|);C4WKa-Fb1$} zN#vjKInD0Yz3?wjjIS#sfh`;QU)0!=6`PFiMXK22&yyEnHcpj%s8q{it(QG!YSi^5 zHcfxs)IvMYBiI6ucRinvB4#_>Glo|t%3uFJGH*mRco4jd&8)orBUpIT+cHp!xt#XV z)45w;IrM|2tL}S%thrhHJnVqPtG7W( z2Z{`bi#@GA?UaX;l(ux^SWmUYL_XX)wg~4yIhgN5LPU|ZX(v+bZTT2fm4}cEyN-;W zKE4dgD^I4*9%g4xt!HDV5}MMe3zCj?w8zM~30&&lU2I}7ZG!#q?Q$Z!F6SYi`|Ee8 zbB8D{JUET-$;yq^QX169%g9J5_#HXuAP9PO#5!ezInj#VjYUdLj;n3w$t5xIC(~8~ z*HfL1Iy{48r8mC?xj^nX!v(AmAmPcnR;sOS%Z{Y+^Kz~!MK53d4AIwyef<>aLxMiL zXbr#b9`1&K)D((biHl2eik1~)LnTLmQ;WxJdDh}OS|{F-i6<+tyW?DAXS%Ck$w=?_ z!_}X$vYNhu4+;lH#3OO)Eo0(8Ds}$RLrah$K|wL_q2}4n&P~poM?@bb7@S6+E~`zg zEDRop!DwvyC*j+6)|HFY%WFHD8%ae?G6fFdsfNEBsf7(2G<-}L!7{7p&{uR(TuSfb zHe1VK#C+N@zM;1r*{n1ESyiiKZ2lLHv^(8P>)vE`3)++P&BUFMSSf&VBBqmrkd6#* zijg)Nq;=(r;8LGA1-d5}<|n?KY%hbif$OAZbsRfUa4BBYjjLVZeSQdI%=I>lY^dQb z62JnE!@33QjVU4He{pk9{&t-SkZIbtIj4x~MqVrM;2Im8T2icUH0rLZyF^11M^F5w z46ZT~qnr)=VZn=y^k$(!TMN^38e5X4khZ>8CRSg7KUydk8s5MGdbZhq@FvB^BsoaY zu4Yi6*FFp;j@ zyxqTeVe6-b#t(9E_h65Ox!vua&lsf6;ka`;5bS=pinw#A^`Eox&c-!Z? zAgvUjP`&w63jw4t`iDbICJ{j5z}pZQw;VysLdOh(Y*5(bbKgJxFUsC3D6Y8M_FauT z1b0hthoHgT-66QUJEVh4kOX&kcXxMphv32858u9Z>z=!B-Ku@6`k}iYy4J&5v*-NJ zF@EFBkDGxcW8#gqY&QT-k$;#3As}vuA=TdeN=aj;$F1hLb0Ncm?VsJ+Vok4lXSjm8 z5OM-@8|fwlfVm?FanVcS_ww#@mm9+M?%{V`nYpn(3+B&iC0J-%J7rs-tSQGuBfY$H z(m(qI{z3Q~0tRG&DK3$-5bs%$h=7KmC@Kf7iW$I6fbg0fm>s8sL}L6MtGE^hQBz}8 zIzj>x&7Qtyovz5kv}g&z*_Qg%gGj53#;l- z$Jx5XzjahAS$zF^exRTwA{ecC)+Nh^fM&9|4)mkmZ1 zQ0V?WVJ&?t2v>x=zpX(RR)6L&C&G?)d8*BXG{&U1V6=c7e-Uf6ULVfa^}7z0C0(yS zDnE?A6a9|m5D>n4?aLd;E z#&r`j+YEg{AFI);F3`c)>tOFM@HHH;yBmSBBVlje0RGJ=mi|`I1Zut;>l3e0zS=e8 z?C(FRo98%$oO*_=r>}B!eglvu;n+suAG>?kcM$tK%8=uVE1d7Vu})dggwU4BF`nx` z++hC)x=n@6eiVmIQshZ*~+ddtd=GHqXBV{KSPx$D#FJNUjd@R`6pN0p@+hW1)Qbsw#j)wrE_HN8zF5b4W z#}Ww#GXnLz^7Y)n6e-W=;v=U<;fPtob8 z%^Hl3HpmuKh6FuqG?aR$vXDizcaIdaW_U9di>rjtlB@@d_YrhF&n(+tixP9TfYtNYVPC!4T= zl}Ps1EccD6?=j!Uuj~k~4D`}~KOrZ+x)qRZ5-N-XR*kXVT`68QCldq3h^Fy^Q{b8AdX4_FaN@Kcr3A5Pkm}x zKka=QL)-rBbCGgd0Dd8bUO3C#lq^J_zytR$;0r4vnTDDoQ6!aUMFgI(7U)1_3DF|+ zS1Pc8vOtWu;C4Pu_qvh{lKP*jbWyg)EM20d?fF>7a^tr~6ssZ6kK-;(cAWy50PlL? zvoEzXxK<7grm9{IF{c^l+FO*IRih~0;#m&gDr4nvk*CMXNj-oZUV{LjYt=-=09l6Z z$%@GJ_lBG-Q{!an7dSL>uBFYH|Zh&THA7i>%DFvgD{1dLVwh5rG;xH zfPv0K1RgZY1Ly@>C}F_|2ENbH^WB4nnxiJ1(Fe#|oOQ3?ZXXgpC7_g{t8ba^3Y^es z9D6v6>Qpwq5Bb$SwMxXrCM|m?ePm%RRZy>e+DA62FGHp>v(NlTV0d9=#(m>F|3973 z7|=nn(2ZHlb?mRoW6sfGpXi8=v1ldf?c#lUt-cP9vm(M^!;Q1Y1({NMt^9)EAOb}I{!#Epgc{CNqOs?Ns^Mb*dh*x5q# zb@_W}{udxJNYUi2OSLKWTB6Taxi81< z{F>pC?MJ~bAqi}UsfN6RLy#qP-&PGIPm&eF&7;TGK^`o7!mo&{yvHZ|@ak_7_sC@R zLJA+plyu6cjKCo}1r+0VsR?}}*^13|o+x)iAhiIH_-_X844c-;Wy!goZqDA8druA1 ztcO-~cleNv>5YemMI3dM$NGUhOp9=lVVXD-Yo$eD!vJ#YXTx*STiV0rV6sG+JJuy( z?4_FajBmt$RdWJiG-CUHf9%3}XSbx2p27v}Gmtt&j{C$^C% zT!?~YTH4wVLcl3@oUj`Koq;I4VIN))8l4+7U85GUHc==`RrZ#$I=v#CjBg}Z_$*6; z%Y5K=3C-7eiXT>tD|(&Jd*)fYsYC_fgsetalB`+L@lzCj@*fGiy?+iADzux3f*ia-73uE}8Gio_krPilNzbYQjIY#eVFLqetW~yrTv)9WQ z2Miqo#3c59)rpoP_d_QCx-$U!Pt7ZschwV$8*#^ZUM=)LKEf1Kh6FnJH!44i{+h@J za3{Ev5{X*vIiqse@mY??2t4a&&##NaGS`{jE0E5cezsLO*6{)w2tl6?# z+?~(=o&rJ+1`Re##dtQED!D_N5bR6YJ*#wh&GuOPMz z(>j~}*=W(%ZlEBU1YX zVO3G47(h}S*<0tj8{noQZii`<%hHQdr&D-K4Fxi)SE*1ycNsn{O}thR;EmJiBLr0I zn%F_-X%6*rZX z*OYzM77F)gTZ2A?*C!%Z}?uGZiW#n-21G6IP!@3l*IOjaWYSCQWsJUJ%jYEauck^u|n<-Hyi>fQKdVo*C<=|p>Zi>V2dcm@XJAYeV+Vbc(oz(&g^sfswe8{~ zRf=6zec7Obwk}emOKUI9=1jBy_Ve|q$a(q@AmAt|(Vq{7P+lSV*-FyTi%)6;&?pf+ zEs%yeDSN6s#9Z5M4dNcasL~ZF+ae#S8bhC~hOhqV-TgXgzjL|^f3N6VLAQDoyQ`}% z(0iwa#qG9(fp+HV?4`y zwl~S|4Y~-~^x+D6VdD3@+na^D{M?|-XG zx<#Q(Z9j7J#vh5-b<1;v@5x!ZZWTAtoDyV3o8ixz4#QR?T=KUVxS0E1pf=6*xD-B+ z8JuX%W+i5-gNq1G+v z%tYT9YHh(0KVfe(tb=p6LG(OVr6>K`oN;^{`$ChhnX0}_RK>3wJ1b{Td?vo0VAzZd z3^FSlX>eZn0J}GfkI>tu zS{xs-d+CoOVirArzzytvAd{Ut!zxNyNPF=W(HMA!IQ?dT;~c;Vgh~*PNx-;j#)@D- zUet~Fst-jvR@cYcMZyL}uZ~&@7gtVD(U#v-y-Bwi>4!QMvgL=>y2wdz0b+#$jh>zD zx#rJLDwE8Xk#4|s!TyzK{C1LU^*sm=(fW~T(j*AJ~@IY3Gt zej>$E6n`AvPvIjH6}DHwUH6xX(b0l9gX!AGR<0D{htCa>L6`>7-&;86a^e47^xII= zbXT5_3bu&KU8)chLZj=!AyM(s1cs6m743d6&?|T%V_{lmUa{L(x3g2XH6~zb(?t@Z zFu~lo96h0g?4kTdU}qo_^iuVa-Hl6-&RmB~<8LV4emVBCHuH*ZS`VF)UI#0hH4%Uv z%wS3nmgTM0~Hd)X--%4XRUm0vQm zgZ-8?rL?|de360OubwAGE-d8>p zOj%a_5E6ziogo*5$=;7i3qv0oL5>^Hg)q~dPbTm(vs#!q`{I-7gaY8}S^r6@2=Hfy zuq@xz;P*pre#wiw^LePSuB@Uaav<-m1vN2>L~c|pnspDdoFD<$UlS;b6kYpVjBZAE zUwT8#Kb)WXe4WongCT!xTJk1|D>E}I6f7Yw=0iBIuZv7bd2aMVHr>KYru=>mN%vGN zrMj!kgfE8>blM@bFLzahYTH1in`(-8^go(z)fmrG9#H(ad;CBe-IZFr?XHR-p>z%G z%(rL-W(nihFQoJI9RiN{SEIasmoA`-OAcri$AJ$QY>g*J`F`|3_ycW^kO-_U2-?vF z@yuB?d*g3o=63}Vuaaa7#Fd|<` z8^iWGYZRVfz4wRnL&f&qNr#{TdeXVOTi9g2;y!Cnesh8oUT#+3*SKq^-bOKSGU_2BY?`h12eA;rEhHhP}F%JX1ad zP<1T%rLFZyO_(1$u@{HVNLQu-PiYwhS|nqdBAnU5c2_I9F&bXpvBt^Yohh^%n++{R z?B}dKRP#iHmk~v1Sab9X&}IwEg&FWV?vQY(20}hCV5ehAUOiA`dFOqx_|Ayn0>Pgb zfTSNORqNB{vAZ_vpUK;`!`oeXTMk{djeY-x*Ivbu@}Eh>G!55l@&(>ijj55h9D7XM3_DG+cj8HYMy)%zE1Pjt|m#@vO}mSUFdue4WPzH2$C*;hY+MF+rM*O zT^gx~4u?3o|I@;wL-spp%wgGyzzN~e6mF715BsnUImKJY^qW|hzKUQa3Ov9Z*Vs>m z=w5NA6AuWn_j@UFH;==Ju-_uM2h1{|1gtO7i*x4I$_IWjq42m%Iy_q>qMoWrawja7 z)XFuQ`8?9EW8#x-+QaEZwr1(M+jM7U0oZ^LM*>+(+8lMST;u`9gFQ@V=Fa@>M~--k z2XqI-dS0C=TQclmI-fdp%pHiX0wfqlD8p~D16aLF(oG_6-c~ms`JQR+kXr)D`-V_K z!d%>36Zt&9=2P;sNXya#*<~*Unj(=P3rZCxI{Ji- zBPIs)(CAN$u3i&$(G++SSKM2Mdrl|`Gh^X_4TsLknut2jG!_7}Va?twGIfguNLRA# zm%ZWzO1HFMrYjI;>aFYHSmi?)h`c~`^@z2T`)gnPU(4k)J{>J3d@&)V(QinTNY?{~ z4O81h-p!x#=)OrCut3Y5@B6^-;e(bK0u#ZBq;<1!?e5egxh}LE*pM!f<2G)lVaWV0 zYmF+!n#Duw_5V^Y-SX*}o>~ll@U3f}?D}`rmkO7U-oLjXfB&(IUnvWc=N)9m@K?C1 z{c!Yaj@eO4)V`G_ZeDW0_n$^Yqb|;J^(kM90sZABI(DDfQ+ymmsU?5~A{Pzin^`~G zR)o!N7ah~;Zl?rbCIZy7K6sF*lKh!a66U~%wwD5gJ3u%{KTXXBD@Y!ERVNGh-CRA) z4@MvqI{(*c`19pm?&_{W2JwHU)t)SIQH<~s3ql}yh<`?#t$EGo)8f*T(!AW_!qx+k z#savmBO6G^x7A2IUh7MN)+Zj_t6e2|d>Q=^X4ZuKc)hyYJ&pSyU08~%aoXI{u4D(7 zh*VG?;MOV$bO=CbR(ys^qHhx8jFr_srAsBTOMNrB5yI78g)^pc7NjJ;ZawGw;nv~n z*gm!h0;eUd!Dht-xiDkQr_X?C?D?vq{4meU;d~Tpwb_N}ox74iFI(%^GzAvKM_8zO zym{y)d{+$p&#ZA`#6+LalfxwE&h!!CiK`>! zgkq`3*5l@MZNY9sJF5Hh48gMIhtBE<8^qgTTee*z8bO@nO^@8P?K<`N9W z?FsF{3NzYS9TXhv-@yjN+@8meUDEaf5&vh*-%O__HrbQGoZP>l1)C-IGh{aJDa)v8 zBPI1SZ`>ylY+*SE%_rWbLE{T{Gh&VM_S9wQnZNLkk{ zzf&6O@^zQS%qmVlJW~pmE;mUF9-D*^hEiLcyBK-}`(fGDiQa5pH>wE!ih6wb7^~?% zWqYkP=2RrJxLw82I6=LZ>l0kOb|8}(A{_>F&)@)ie60^=`oAIT`oxd33|ecsCPd{a`e>RWtqkrRYI z&#KC%#kgr5ex=rEnRy3_BzC4h!wq$2%14fo9JD=}Qp5_Yu$(q$jl@}kd@yr~>^vnp zeO=pcs=Y5HC2Cm)ig9v#SIj@|9K64+fA`U&1Oz-iCArfIF$45*N)rNZ(L*#V8sUTJ zk{?^Jq{KZMlj!pBP1oh5Wmu|ludV;76A|dy9?nHjiv$Ln{ai)3W-rl}smG&rV;^Ce zX2psp22Cqlm(~ywFEiyW&s3P0AuD!-h~bjVxJVZb1Y>!~tIsa6b4kFW!I1dNP|Hj) ztgQPui7uD_&Z^fdYxA#gPrPLF(~&AUv$zi%K5&~(7tj@&;&X4|F3X8Vf6^9MQ8I4; zSb44jcCYu=R|x<%KEBLw1+-+b5Ot4^PQE0j>x-*BD2yex6KiXFsO>}uD(un+EuzaR z^l4Y#BV(F-^ZK_JDVk!f2iDkynbLKweQ=>gzEOBrM7}i;L+pf06er7wuV&rQ^RZS{ z^YGJrYoGk_By&)hIe{wR=hwD5puR6O&J~A{ixc~F*rj}`Jud;?&mtHgvqb^O2TYr- zZEf_kw;Z%V<;~bNQ7`j2b$40T)a|Tl$Z`Q(m{<^1XR|V}tRe+Wy|yTTpFOesfhj&~ z@y`Cl-MbNRh0Y5d52M3zzdPdw#T-jyu@*DB5-w5-N5Ye}VR~|y7d+Ui5;VVrEsw>T zjG!R$_VTO&p!Py+p~os=VMOjHHsPAu@`J$E!~x$W$H=fbxvlw1rXqxFC^yWQ!^`)U zDtNRNw0?G3P4IS0Si;M2!0l25Z2=XHN_K_U%HcFRM5T0(T9Hx&?VXx$%zG;E0oG1H z(r3iu0jLHBbhvYDU=MRdrb^aq;f-!Ogab6szY!%n6f=4C8`!BY9PaGM-;Ub)o=Uf3 zg93~pYiWQXMx@Ys#`djSCNiLh;IKMQ{msNgZPI()3eaS{dQ6|ADwEJ*jPx7Uq;_jDmUy&9p9*85Wv)^* zro(x-*4+{|$bmxq$UaOr|1(7-mebMpN<{GRjU^mUo|SJKb4EG_X5;k7o}Ta zplPH7T#8YC4v&n5h7GR!cO>kk8*kcta?}0Da_xXWlUq zDn^)CBwifx&G537daMEp1CAuCCHH>m(SBPF=qfb$3%~YC8LuC5x<8xufw1FM?gic_ z8RX*}xw1yGfHOB@31P zr|%wK-tE!z!SWNAe}b=lz6X0f()8mZ0%4x;Y?*0J>iDYI(dE$XJNQk1!!I)EU&(&G7WlG&f^0zJUxjPgo6IPO|V<=;5}ekO?q= z<7DMj8P{l^tisdCVJ~5K`hLt9XKnoO7q-pI*#;Tz5ar*J~ z@wL_ZD{NI9nt$SSJe*Q6cFwwYU>{k6&%}{sRjOltYkM-regrf3nVE+1GJjoXS?kf6 z)IYt?t>hlB*C?F%jSYrENUuEBgN~|Ko4(XaMaYqn+NcMFgAiS^_M7_e>-YLE23}7` zFAx7&&>v)Dm1#UnFh6-n8Pnf;O7dqFrCSED_3xk4MnipA(cRz^cTk}hy9$rxr9`s? zj|0LxOH4_&KKlR@k2g(i+?zihFX6lH6(;XlUrH0*1$;f5CIi0l=>&Pi%Pm)>7UJ)( zm*%E$jAr#Ddis1@|M9So*A*T1Y)dZ#2IJQfujOboRi|fXNh-2Zhfjn0vYTd&JfF|* zWTF+AEMImW|C~FvBRp(eB$QTCu(1=sR1$teDq-U11{AQ*@*w5W_**gw6m&cA_4!dI zVHDY~I#3JR*@Lm6?31ZeYu$-t>0L7}Tq8ax?sEB+?P_;Vt8e^UrPIwqu7srqRs}dZ z(jK_bTQqWjpO<%2J{MEKHzBBNTwbWkrYl@&=FS6;_UI#GH4gpt;5MG^s-BkQd|S)z zh5c2v^KbG@NIIHkyA-GgypfMy1R(mw?H99*;FoQ`0i#SF0%IbO;bK;6JZEw?`nFLG zN7XmkD(edwn2U=yv&>>HXLwJPN9p=76FH(<8ohV5R2~of*gF!#mT)Tq4_1^g1+~L`B z3T=C?wGm=LqFR7Z;76jhI@{G+bLwJ(_GGJ$XztS4VyyrOf z;q59Z*PrN4&kGiElV?o893cd@un$FJ6y$R$hL)E)QPeF_L;Z+ybS%e)Y5rmYA9FWq zVsm`e_1+#7YypJea+9Et18r9|#W2LUfb(a_8Aharv2zy)NiXZ`E18w24;H6CgM zsj|JyXbGrMAei+M21nD@_Dyj^(jq)rQ1H5Z@l=PXlj3Y$!7q7DM@6633EIBWigM20 zGcoOLyUFyX8A%&9M*%)8*y{eOVPiUjhMgA!e*>fJ)r!Q48XkGdZL^AZEf2Ui>&AD>_L4duCg+A@J5xeci~ zU&r#HA~u}Xw1dLQ@;0Qj^5+#P7&*HGBq6M+W>1F{1GvFE6PK*-GrO@7Bozm|uxOzuq)_H) zG|ir76xz!~7CHnuUFyEl3%LCT2zp+ooRQ<&|F&2Ka zUJ#-{ojo8#1*JRjBs6hK(CgMni^#Q~p~6a()ETSgA=%=q3cBE4O6c>a`F~CnggHxX&6j<&Rue8!=!Cq*@J+ zilGhw@dF=&7L`|E$bRl>TiG@iGm*_YxEUL{2>A(=Gsu`yb@!Q~$ ze){h*JnC?r$R+y6P2Hcr@&v2}s6oOnmR6!q_uMOLl#X!v;%#;cCH^#a1dv?so!=3x zo`Y-tbFKx6U5Y3gzC+j(35@h#CdEGvH^!$nowf{Ykng;AQzXKhLV%s_oR)nn)51RC zu7a%n36Cj{`dYji62NfKUS*7Me{L(=k+%O3GhTm4y%3iBy@@l5pZ5)B6<`um$rW1W1$q)xUaw|JTUH%ZBXx ziF@QpW3=vUpp}=-cS`n$VeTgX`P=Zj)A|5^)YIo42P%LmAVhP4l`yUF!&t$Y_zldt z%bWPH?S;^7e8JssIMxldb%&1kH4PoSXBw!(wqC4G!59j6J8IVL7SHSfcQz4yKWjiS z#k9++Q??mHh3x-HH71SzPj;VP=Y+!#HLv4fy5N0OA!`{+a_v%Q4~`Vxl1E|RoW)oF zOkNvBG*LD_AGg-bkQZKHi%&Dhbuf3|)lm2~$bSHKkaVmjNo`5q09boqH^)r&Hkle)Q6qX_f_#kI5Qh6zR{B_-!p8~wNC{KJpD=S z)l8dI=lgSIJQfsbK5oE|^VjQJ)&fjv6yQtX7WUtuM;>5m3IY=O%NMHVW$4p(2fu;^ zyN6+AD(EfD>(*HYffYVfg?mf+YLiiC(eD*fzQwaTW;TSQsKl9|7oBMRcD%Fq!PE85 zhl}NF+mcM>S2KBIucU<0D$(o2uLY}rmmB15g|}CvR=qUXKgeUt;dtBJal&b9xjn%~15iaOg4mKezpabYb>6@1yO98WG||Q| z7iUhLNQmH_eFNI<7No{!IMVgpG}14|!Lq+*j4a3^dk%?1WmOR~jAkuC7rmSc(`!Xm z<27-LYkxvigEJ|B-`B4xx|(-g-oeU&QpKK2I~uRQn{Ou2p|<5q5CEOM8R!u>Xa~OEJ+j1Q z=d%8ZW1KT?`&hRlmf!E$FhBYr4DjEzX0h)&>Swe@DJm#{_Mm{rX%=qm-SwSFtoRZ- zdtfBgDM45+HIE`t9J*ey?;+;2yww?h1a0(`|7%R4m}1Ft(rHX&!*LTqxDJ|;KBwJi zYO8mAiPM!+@c04}qKvbI2&`d81u{BI9PHvM(v;oLkH_e)>#QuhkGOjTZBGMZ(CaEQ z;W#_9GYaef<6ZmziR1V>L?xuC$YqX?pcssWZHY?&ZpdykM+<7P)KrY~$D|cl9-bs6 zSd-_@%-Mv*u{M#cpMqmR3Q_VvA^nN~AfB4JB-EaK&w+2ucKp2H&TxN|if!63h{Xd_ z8$$RLo!$aD66zD2NeJH9cNih#0k;J@Mflk}+S8llFsObcJDJ#dnhGLP`jZI^ZRR^n z%q9(zL3*SmOe!qi?;>|;j6M!74l?OaT#pvzd(q;~8J(nEdeO|bdg8MJyhamdoW_=C zn8?G2s|5sA)v#ag1gb9cS;4rImK9j{grgzZbN;v{_6gU$PMxSg%u*Q*0Nr&@s`Ttq z9_^Zuhh4hV08SMAL5&d3q<-)7W906ieEryVOPV9HTkUc74o=9&3S7pO{r0g%ufJXi zd!30wgDu!Zy-SqG2b@V*IMY!}8`nijFnZMH^KsNfcp$0I`w>)E5ZxHt=bpHlw);N3 zkr^o6SZ?`xc-@hkB!*e~5dJ-lRsq{ek8v?7=5K$nk$(Xxf*X2`p~#pzM_({$QKTqZ zUvM7WGN#b#a@B9rl_B{*MWi@w;mJq*{fwPPSW06?nCH*-zqmge`FbC`pSQ0$|IOed z;ds`eQx_d$kzhCPdx$&$Rap)Ep^_>qo%g6c4nEuvk45jMW~rMh51XULvYHSq%h;J4 zlKd_PMBkS~i9l(RE%c<2Ge_&f2oL|wTXr*XVwx(3R;a&+LdtL-Q`mWYPsS>VH&LKe z#3MQgq)tcX#q`krxZ@zp10{D2l5f3M4G{sd6_zEgCFhA$j5|E=?Q#{f^#mbI!~`%@ zy4>;wk6jWQkOhu{*iq73LyU;$5-pbDSzdROGpQ5=)Fu)Gqz=rjyQ`QcpZF)gva`a!}7zjwfW$|*b z-mg8G@GE$0PWIWywWF^X@`nnwtsm>Q8$_Z4$2R~W4Z?#!|CE~LQmMkee;IHxeq z$=2topU_sETvnF~G~vrJLD%h2Jbbz0qbvPM!uoM^pV4^=`WNdIj|QE`lnB{h=bG8U-Jx7p~I(vtgeHf zneg5?(RUS%zyO`j9cdi%JT)}lUL0N*t}br5C?vF8)VHLIe%ZKg&mG(&m-V(J!Z~vG zk%SK)E#@l^WaO7DZgs~Jg)t6c2>rs5{4~umStBbvNYU&1<8m4%|YdnBQ2lrife&4?ud|DN)pQrG7x^@P0>f9J# zK0(v^@mi_1TajZ4{$5B~ELLSgwYh_0$wMnWrp$88v4xg4V9Ore5%j!TV1=9;1Jx=h zm_K=Tn9{J5x=C-VFySr{H$AMzTlW!Wg|1oHK}nlrp)`wIb%hmBM$w1)#J9_c;s(Tz zsh%x(Y*eyD8?@7py^wB0JiJ`J0tHa(+{l0Ou`O%oM1TMLP4jam&1n2U4yrIp3C7Qg zy)tOwll09Ymtb2-a+`^4lI<96x-oM%PGbM{_;wCSYeKkwoyByHSOhkM_J7_~7AK$v z{@)+|>)RXldH-K;UnD`eK@do+BV|QO33rjBvb55)c?fDVvJ`-{ztSjSD0;f^7$edl zpo|yE!>SgHgC+as4V_viERP%0oiqf^1KkDb9OyCcYDV>VC_4$Q%=Za1ac&1A!{qJp zeXJOjtzXn!JRj_{U{az=-kbKhZLSz$PH^QHt;iuERX$iaJwoY%WRF4-V16#Y_?Oi5b;Xc#fW5+iaoyP7cKi)GR0m}q&4&j1sq zBuQzkg1SD{-@zo$MOe<RNjWB7~jHLqZ+~F4Vj__<_TK)T=0jYmU@f zxgrKEEBW~17pZup2(tMxpRaMnCJyPmSlo(Xa;d9!(vI|fHt^9YTnZ`MCSx#Q1Xc_{ zoR!Eir-~djWIg>`PQtta_Igyo`0vf`n)y)CB{!;@$0}0&y&RN*ZAIx4htP*8UYa+$ zB95CP*8lg$RTt?YZS3gEvJ?4^TDdYQMGA9Xp_xlz&Tn08=U{8HOTGN~iA z?wg+S58;hmwFS^q$MncH5WfY@yanN~2Iny7Dr5@-MU6${jSgDYt#;6q-QkbR z*n~deam5-_@jk38iQ|Z7^7}rh%GHXIDJlcg(G@4Y6G-{IxN@?3vhe_J>T;;bf(jNM zbX5cvc=7dkHl)-Gh^F6+Nm5$!+p;si z==)uhba%t)gH!GM-BZVdjUS5*|AxM6fChze-mT$B8jt{l$22k0s9({SS_DfJ^kzNBXH!ism%l8e(!bd$;7`7^ z>JA2_<&ZL!?6eh%;PHI!pqg@7dBzw%`=!lLFuS8B!Lvy`*2F50V-ad|@X&R*lh2A3}H{ zVbr)900DrMsE}%#r5BKzXpEyf^6$HyuF1?|t1HN>4&7u%v?^Sd@;81_knC4{>VW-U z+6D;hFqGeBb?G!c4OWF-@Sx_nwTP*XhXu`HwE<6x=9e`SG-iiOAC+(2!a?M8@U_wh z5|lP&%D`GO1Vd(O1|p6tk1%sl7Ff^hhj(cABfZoXLFrIuGspQ$>C7S*_5n37Z7 ze#JI_v0GSQ2~t2^t+@ZAxhWJkIkXFJ#vhM`Wc!`jbtGW=rjMgGDKvrfp_a*xEDA~* zO+alcaOf&i2OnQ7d-_>7i3qgeW2#B;TI)Bzah`b3?ayer$ zRK|aV;4LVc#2FbLM)R8$Ah8Cg{PXbKTuF zsz5jUY2v)JMaLbmJFz<*BwOyN`(EuwVVVe%ggiwSi&0)K5gf1kDdeBc`|0X~YI+|y zR_zqOEJa*SsZ2If@-;I1n$Xoyj(eOozC_8}Aj-VpW=#X$iqXTqXLdshXp>2`;!@`? zN74ap`Ih*U*s{`-C6l#qB@0cS3#I%CL#h^i>~Yr+6h0>X^rg?z=$$b=km)wi%p^vU z=ZQM2OLH$Zb$28hID6(Ww@a-m>2bX2R_)iVBfR{lRWi5G-Rg27=XYUB9S{H5b`R35 zgaxXjL{6^_XcVgrsbDMyTolZv^KI1gKNZ7)iIXLa1PQt1CO>&QinFd)VlvY@cQ)ZP}KKdhi~V|>>eR_0C&MYZ<1*f>URvu(rYD_=7s z{aLVBHIk$uWE&AItyCEZg2IJ-8F-nYcYcqI_JhYgVKPoFCcoiG+d~zL4-AZim1*0qU6bQEZRz^mcL z=>xUPjxWmi7AQSqL7_7-<%>rLS2wC?M1Lf*N#g-D2H z1`~C1eSEEwR_ykyc=>+9C0E)n^O~$Ra+J*B^XkeN+a&UK-D4%`QAODqT|zsskrS zi!ot}9j!p+uiUtiwa!P=1qMU#Y1AyzX$?wyN1=hkuBYo9d4+Qp6cth2*BzBW+Y&Ly zJTtq6lY0mUArT%(5StYjVhcKeCNz;+j?&m=;urs{M_~>c@K-%gnK%sMxBOwmZ|Ckg zL2&FYo<4MnQzRDnOF}LkzYXvuV~L;$JoD>DH=|6fLQMmYPsI$Q;ErGGPePqFvD7&c z_|ibUP};eQi>@t+a$XLovK@#giOxQjSe(mt{1|RD#^JVJ=lyU>*0YDZ0jTuPN28ia zTXoAj=l)n<7Rq#&Z^{qu?@J3vd=SLZtuK>hk<>m-{XK>`{FfvCMLl2r)?fCni*RDh zft%9IM*}`^m@fK6l{33g8dd&tEQY1?gv#ZdeA{6h?pxgfaIFb6oEVlVH&%Z9VMr8A z02K}<{f-k8F8X-qGiI}24x9+fiPtcX^x%Ifn|r{7%MW_xsCaR;KYQTav0b`Vtj<2S z$t3$(DyEUVASehkpdCH_=BggFWY}MRF!F{{Y_20I8GMcpRlB!i=9CPZOZD+xU#Cb2 z*!$|3A7}359b2LXmo8#YsnFjee?Y_SD{ZF13k`Hi<6N)M)TX*T87(Xm{*t6jsb&gi z_J$PnUWhQ1fvCp+2lYF6+P{7oN#OZqJaet1gxat2$DIV~B#76|bwakR-%^FmI%Qf> z*7(#!UNZS1P&_dVA1M7QtZu>_SFlkbu4YEJO6bEKk!GfMBXz*feqWsX*~D0iU-R@X8^R%RvHt)gj3QRsFtgak z^TT!xTiXMu4;Qg7JHi-fp-htmY<}%k_`d!=_T)N#86Cys*VGC#1wEPfHx?n^;_K{E zb7KueOi<>=Xe6z_5%7KtCnT=OIc_ae|;7$_Jw@ux) zTIGw=_PvRdj0}g3qgLWs*~Cca4tW!1;WF+}GK>UXYQYNk<6%oJBIBx|W6% zd15*=AsnxMaDi24Q)QQnB0z}un?{&+-WGz6j|VQl9wH{y;3N)akPm0}${GFj-_pzA zbbo~3oc0GeRrTousk+{IrSUcrLgFVW`k(v(UoTy?_icff5E7pYq1msUH!__!us<$f z3kBBK*Z;%rJ9cYr@920-{lN4SqlCO8@9F*TP2-~YC~OZ&eAM7>=G*L#C;gWXSn9;? z``^r$*v*&m&6o1cr}EAB*G>I$_5V;*`wekNGW~8fe%yAxeUPco-_|=nq6O|Y-_Fk8 z&MIG5Dqojm+W%j}_3w|KR3cyhZSVhjxf83-|G1lcTi$$s{_$S46#uyHsBU5kq#na;JyV4JXBJ?iz6xwjEKvo4vK=eCD$;#dXPAk zDXIIc^~|keZ$3Emd#(+Oxz5D`s=UnMuA1DwFf6K6pJ{SxmFC|I(IMHj=^KV*_QO;I zm8kc;8eVcD0{0|&Sz&n&>S(*Z1kV?1W0f)&BB-g3sFkENSq>~f_x=eA`3u}k2TKBJ zu>~kJskn0CFoI|Fk-rQKq`~az=msp__tQRsGSoX`>*JSwYVo2L|A@c}yYcls^eTTm zjNEogph$0`0_U*L!YPk?(@v)gC>NMmaD&`Nvt%t|zY3Zo`XCCLRm4+WHy+u7s<6n% zlF*DzzRXBV+uSDuG*qrXhtr+rsq`jH+5eOk^cZ48ut7qXTL$`M=<5who6>d2FP6TQ zoaqpp+9yRtRxD#p%K4c~Gw&9*b5?hxWFBQ~{Mhw} z%M$!xo9Dm5hbRHC%r1I=ei~8c!qJbYST%F?Q+|s0B~GuxpW4Bi<*a)<{LxZ$x3pD2 zf-7p*YQ=lz7%XW^zz2Tb@Y)J`^)M!1DTs5|IeHuL224Om-BG!txzj^&xCppGIKV$v z9mCuO6E%FEN{Mgm6hI-ar!=SK_sQW&^v~)ziyRu}oX;J3HecJW&bH>-)bs ztzhIC>5}_a)Jl@d((Rja{09rn?YVEQxQ>DRfI#Dq`m|xM||f zchc;%q~sI~CKRIGPkl_6bOEW3DQAa9%oBIN>Lao_L~PZ<{ugC$9TQjhZVPX$cyV`% zQ{0^brMM4Had&qL#VPJyTxM_z6o;Ziad+3^R*K*Gos*lK`z2p;Z!-Vv%w+Fm&%59K zJZr6IEnJ*DsF%fdGPNogV?;(9l*t5l0a`Z1k_ZgE%lpd~{JJFedU&~Hu7iM0Z_k60 zEZ^|Kv~{njl>M-3`c>6t<75~O?AA^qBZ#3*DFzTGyIn~ky_HbqA8@D}2ZzZxlG#yV z+Fr(VRQoGV)8E~@UQhDTEasMF*p)3A|3S~#=|BQl@FLIZBJ}v@JUh-UFbI!h5U7Q4MxFp3 zP_fuC_BpT#YaMO6DM|2~^yx1a+gThOtSQU6#Q4$p_|O~815-G97kJBQnGq(=172)I zYJ93IXTCe9W+sY(GmU5B9BRS=r3`#vqH{G_i{TRKcfcRG^K2pGy0cOWRV8ZZZl9>w z>FgfAtNWbupUWEi_H_i4d93F4jN7dlC$i)ZTy<>c<|>WC)Vybq$9_jVurU0gu;P^A znQ&p!VKgIuEjuH75DoJ)w%*4<} zX8BEq6}VyHKi+hp48;uH8^4gtRUy+Y!04i`msjKO9yU=T(>djG)JtReUUU3e((+TZ z+fr>|N?em9+emn*MMG(+=zR==T6r=7;0G^#@{u)nB!GAMYi>`Bc5XuYJF#AoqqRzZ zKX+|ln#Jg&ajDBisvK$3F-pNwtPoEG4~K%P2=>vgsD#iAd+&{eEE{W>gm&P~)F+r_ z^*zV)G)J0oZB0$k7E#dFUC{X*@U#v~5V*bdyyr#P%m_Nl$h^P%@_N_(#IPfH#(rbY z`t(1cfuLVM0;r$r|6ETr?-=yV}jS zblp}{1y&OMHyrxk2xv+r63O$c#B*o&-QCOh#=Xei!J3ozi<02SM5$CNspq8~Ldh>x z1@409(On1t_BXV`$#;e(ioxpdQI=52Y{9zsv$aN~RkCGj?+K&v6~Jj~Ie+-e`u%=P zTL5QP4u3ci%RA}w)}9zxRM1UJ30bi7uM9TmN4jEjxB3?fEw1)SD?v5f!B%3+_HdFV zG|SjkCGejmdE}jOP~ZHK$yE^K;;=ptu0(`rRZE?k-=_%p?)8EAe(-bJW<2%jo=eXq zyp@dQ|D;XNZx_h@-jl#l1fJNye|1i9*~}UYbNdx&eaJ%P&KjL(7j5zox5S=>$FRO0 ziS)Nq5GgCxflD2Zaom|J;|Liq^sZCTP9!d|185V$^Z-G7Pf=yj$J?7r(cw%jqsjF+0GBMC;j=sZGH)teQ!!rNE@u z2sLEZGZDR(vKwdjnqdk=rpiV&?fiGY&G8Rxyk4qwWJW27_39>>w|FZR!3rUz|Ek>@ zwEo5AS$E|JI>?q8#4St9nm1CZnZcu-JH|x1 zc{+32hTyUn>HH(1&`Dgq6KbIydu!au)Gsbp69Ltgr8H0Kk0N7MtUJf0L^cMe>R{S- zqV_L=oxk8(N8#9)Mdh)}E5FyZTLbm8l$246UD1QZ)(PN5<_+21ONG_~4zQqyEbZEG znX2#uHwJ^cvzp3_z!xsHI#GRiH`?!yo91DMPLo3PPadGZl634dbINGw$NeK$VLc%o z=f9JV@RJU*_9aw?r0=bAlw_5kClxH{;(1R4 zsfSMr{n`|NH^EPOGJHQ~&=&U!4DYc6wN$RX1!M$0;^A4PbJLGugM+)i`|o*n9ZFfz zxgU0>8>If3&pn>Q47k5>bP0-G4G5F5(=-#fK2i-m2ZM@#SzHDR|d$+HhzrLKmZgafQ1{*&O7zg0;FsM5bnJRBCkUKzjqK0Q5c>3%-#euh=r z5)YqV4-bOQ4!R!K&R;^mTwsaAitFyb3ooR>-(T;)zbbw?;&`s^zHJFQZV6%?{CC{_ za@>8z+I`h+$0h6@%zL#~NKk5I?EOd@9RKFrg|u-lKRG0rJ4p(|1mF4uf8G+H$p-P6 z`ZHf+Qv`GA0)ujUDW=g-j3GkA#%N&K3S%mwvpfl|LVZ!F+rxqof7AXe>jgdu{b7b$ zy*yfHKz6vcSuWthFTg&>FPt*uK=tT{a^}$nm8X0kY)6sJ66K=A$T79e13vaM)Y(}M zpO>9LKOlB&zqqR&31GPD~YlUM|*orG*M8~0)_YN(2pAWeWl#`+h zOIS?Q3XlF-tnoM6Sx>Ns3l6hLhm7yaR9waW%MWd2T+jq~ao(?HrEKIijIikBsC#~u zN9#q&SDKdT$sBj(rl6M#*?aS@Sg57JuqRTgRUmS>bf{`!$8pYJyvRdtZfFRyrsmz7 zP>fXYt1D)stMkies_yP?xQn>4on=hm#pX`bx(u3^Qf`hza08*%JrLe<|6&jJG4Os= zd%hS`rewn}AYp~4tB;MKRqaYb)T#r?Y+t{Wn(i_aaMDf9UyyK|l51L0C@O+BO!RyZFTsEv3%Tu4R}OM zl}_34qF{vkxgKu@#!SH{7KcjZ_OOKYC`HGF=5kkd9?BVq$4>7B&wZnLdCL&P$RVm( z#)!V{eW;nIyMOKa8%x_WFK5i9{j4anB>~b`_(rm7ehYvxD*p9x%o_OoFO1MQ(hKB`(uMI8xw6sh#(dSrw6fNn{$2FDz?*lN9FCQ?|N!}qy_Ka?AVc+ z9s&*3=WdK%-(O}>Z^LrTAP*K8n%k+5E}m1e?%SJgj<)3_O4{QUq8h;v7-x@~mmA%m zIRkGan%E;d;Y$veQRT7VFi&NDV;{g(Vyfh6<;Rh9+#YN5-;b{*k)W!5IoMh<+8E^J z>H}k&swXS4ypQ&9awUlARVja8k_B=%8nQu@B(Jh6Ghs@`o&A;_-H(~~XtoM-E(`e6 zMZ_m(7Z4%kFH4tFfjE>B{LVFZvTMdd{BqaPOyiUd-kmxgao~sTFHtTxx_nBB1$Txz z@OF*8M9|MO5f0a)uM;!o(X|qnPm!(d?k*9uhE*-?x;z~CSm>3Y_p?08YGY5 z#p!NIS=LsPJU+FU*EUZ>ht*-<+4nT${Paw{11Kf33g=s3Ik!rUL8yS56!}Yv-4Ttb${dO@LTi$gknDST!maP%48)*J$8>Qn?`Mvqk!Vd0p9S06;+qoUEkQ^?Tmq2E3!9Lp60Qwtl?PD(VK$G_;X7fb z1!IPLOS026lgz(vPksmyoI);=CH5D74HL{3Pz5gN$k@gDWJGKZhAzOwU334GWVNE? zS#v;*jY50TXJ+%uWy`XlHmaN5R?zl9IRt#pB7yGcC>C`w@$6yS?tA>%l*vE`JLG0tK(lw?MzL=2vn zs~j;G&7{`o0groBaOX|dp>i^lN9pj{35&lI&Pe3^P^EB@5Dj+|`Z{X~ zD&ZYcW$n=inwu$%o&KH+JW7KHZEF9#=y{4VU~LxK1G=m-q~`Tm#dzn!G&9_#JVZ?O zS~`Sa<%Xt!ErCGJY+?nWt9}5T=?V(3f=VisS2okX>Ryj3X3d^4ll>S#bIjln0Z=1f z`#8*>9vV^BN(t+u1;jIa0W-dzAKi{gY&qJSd%;v}fz0&64JULlf*@K~4d3cra-;Nc z;UU5wbB=EpwYaR$-k<|XKL}w&wH&w5gp()W^VUq;Fj;~KDf{)Nw5$52^zAE1_3>5e zq@mGQLd)oUW~#VA8#LZajTa}H#hY&1#|(hn7fz5{~yLZTj@Z!r7({)5{~G zTFmOU3M|sH)?q$ny`Qe8OIp_cghj(=O4_&DctFBG7aOiJTN|hkRCbm%XIe*$(Dz*p zm}-viHJ>#HnP^B$m4H*fO_bvRO=$Jl6J`&Z&AJhDt@tmyl zBU0S6-bxkv$io$*AFM&ET_;5yJ)pja2bjf$j=BD-gPbn%o9-dRh`91|Wu@xvmx*eA zkvuKtb0dEu#_0LlD&zqs7{SYk!VAHwBhV@3;=&9VsVa|-k?))j|DY zIaBHE6l&fm>d3N|a?C3ym;$073kYh_=*LiyA;~R^cP)}W_=?Z(MM(|I(8b69De*x0 zSr+}>Ld)93MgWXJ>~2>*Jy^4fohOE7G5#UG{)`Z;%P26bG@M$6EXfX0$4-Pp-{{ZK zdDJ*?hRnHC&~bi*&k+A~o@>AK0%*_Bxn|!H_MnasAg6FN_ZWV*v&}yqnd2b^Xt-#J zFmy2{cio=ZbX*QJEb)S?EOc{Na?e08CM$EXU0`QFXrn=$wn=tmoe-p!%WffsF%8}G z%-%gX)-RN0EmWErSgsHhCP`3jCk_L&lszC^=mR!8CR0Si0q{HDZ0~i}+bYar`4P#y z^5G$xGjz0=pRj;W(v`E6sULXEMQ_Q&$2%U5)^}JF8V)}d4rp2Z&$9ME)E^8E$CYuH zBeL8z;cXoBbqv|J47tzh$>^FY7aBg^&4dRo*avK;rzqmjY|@OH#Su8)17^IoF2uxH z6@&{VxJ$g`EGKM?8pXP$`*tStW-m#LDofhFS6Z1JwE5`=H<;V4@6Q8>K-`xo1MlhL zWaHn4nEr6a1IVkh*c>$!cag$~Pgd%*dv7`!060CNf1|RRoXNz(2g=OKt>56Whbu_w z83$F2ITg*uVtb-2hs1b^fyc;ZNPy9&bB*9MhjR0=jYgK4oBT$|ucO}sGf{W^_&gSOVBha=RG}Rw|6&$9;ZO`!d7Rly8$W;c--rGm=&KxoGqm0=KZRbZ%MKF+y1>-^~0{ zWC*W7xXJATdA(-LnJ1^14t8WlT$`2;#evyawP%RjnX{cP(h_o%CA6D27W|md*&E~f zpBx`j_HM4CKmcmn+W3I5W$o#1I6gjdV zn6zq%8I+5CBfnK@Q4EKz=nx{x@%M88&P&byXYYH+bVmO5xLLLegepT^W6w6@y65QI zJ@k)k1=Z7yrB~6>iV8iKZLn$k$1k)k?N0dEZW#&s?I- zf(PKT(zo4B#;iD3F=d=;7lItjY|RuI9=R?i+&Wb>HZYxHc~WAMczVLCwQ6a92RLm< zsubk!9c|z|j`p#r^2J1J+&I+|lV;F!Sbm5Lr?95jr9?PrTA7umXePnK7x|_a*g8?O zaAm4-3|gxf@KiR+5bX`|W?|=7#itK^AypdfXc3BKmw3QM9J)qx%%Y!Vxpz>Y)8`cC z3>*J;;f?@Lov%<)m8M*c9$EPCw1w8g+Ss}5wZ7Y@R*IXV8^Fu1tr`6>EjkqigTN9< zch`OqHGG}gy0?xz#eR=t_q}-z*!<}3;0Pq9!qIU_H{Y2fY!U}3$);)SJCu&1>iwj{ zn-?cdH~API_uG*oziH@hMJ$+gK_@pk69c!j&@1Y(yc{>xDjrV z5~*G|N{Wbjbg?Mg3qAtw!&Tz6M8JfAL(7*nP8UCiHld`Q@Qa#~XztPHq6oyGJuoqF`>m<+N^mDk0%S*~Wm#ngyasN>k>a zfvPe^0*D5dD-l!v0IV>u0Db*VxG`HXE+1=Fo`GFPq7+9+#=|*`+L4IUyxd&u9epsq zp5U~wQ0PrcQgzbbwmJ(J>^PXIB=L`22ss6H%&tSvbD+}lB-ctKgyqL-QKoH^CWkZ6 zNStOGjpiPcrW2A9;2Ne6f>OYXyCF9 zs}UL_dR5{L2l}H5(t-hQUV1~=fC8JA*=N~#MkC9q1T95nj{g#}{%=L=f)=bg{ND%v zg$zGr+kKGzf~!UN;3~CHhS{&GebfE*DBBxC6jn@!&pd-4E!_tG+Ias#r?rJxR~S!) z17D4nP($0czd1=hG89=-L@|06?4;RA*{S%vIkO2+r(KXBO~H(bwK)B*`j$Kqj4hRW zflb3V)Y^mF+)_MZ8YUfAR_`FVzR-{<1GE7F%k|i|QBzv6m>mT@LgjY0jh9RJv-5?n z_09Di2Z^hR4us=bLpGY67^#6-Ym&UVv{?C&6y4ND z>haj`Qa~)F`_7Y~cBMSY1o? zGXQOsH+$>UPz}6=-iGRg*K4?NLsLU>xbf(qer~QPy=P{()qh`J@1KhMls$>GBele0 zZ+9kv!#5K3aZ*Ag&@kT|@vt6j0mn8Dq3;9zdu!rKfT|A%aE#cq*vhG^!)V{n$AwsC zY8IDT{;!-P!O}8O-M$3S8A}gTjr!MiYmgrB7?xpBzxx>GxtM@j`#qZYF4i5;?%wC z!xcII1hE}{be{^mq&Uf#o4bHN{ zzPbZaIZ$XNyyEP7-ZvUD;4bj-!YeK*Ee&E=`zvxGfg=YYI1N+p2>Y03g=Xbhx7=Y- zj%!Fg@1gpuEx$*msG^6N0u)@YVKuOwax-43OcA_43PRAziY*dgsgrFOgEEatOXh+# z|HH)9EEjsx#K2t)>{yZO9h9pxaIy#F;JY6hT29X=i0GVaqEmZ66|j*-BdbPl&hkP* zNeta_P+}A`kVbfLOrav~^^(u6y!Y)=kVtIJ)n(aHcT>>6f*{1n>`6K<`GS5^PGqk z+$^>PH?}S||^k0NK_GSj;uZqhby-2w|=%lFHOU8V)ZU5ybg~k@x_ut6~)N= z;;9QmsC{bJyLHZg#RX!Fv1{K2wjck|&6-|O)GlF=WkAv3Y=%PS023E>a#7)83|Hz9 zeYJTEcoX>44cT@arYUD?EC|%yU27p+8KM45vwm-Ub zqMK1hmZ8VXNvxiIo{wxO-mP)kF`PiOVE`p-Iqj;sp_DVj0P`&c+%f}hOI&0H$q=JF zd`R&~K=Hgr^pk#*moyy4sKnydg`TWz$-tiz7g4BnPY)$Spal95^!VGcWvLIqQ{ zh6ls&z%%ry#uiHRRBx+C^Z!-T{{M{y5Z8@_lV9VtX!Vq*K=QvTxNO+cb?=BRO-M4neB|16Fud%V@Rc?P%ls<~jq# z(x@Lr!@->hlSaZhMzD1RHFKH9FZlGw2)?dH${{i_4yANdPY{$MP;4Q^IgD6guffey z95M-gs8X{qJZQr__T#NW+65+m>f+Ye;ZZ~f4?47;z;zH%n!qv%z4#TC;~QH@B<1$E zCI{EkR%vJ}Qoc&SsfD9O^#n6<6>xuHb1&NQq1~#1m^uz69HI3vdc$69nvRKW!fQ3{ zX;g3>r`oo>YG&_b5=T>`rxO^6A>l_`e1E06m5%iRv>tISqYIgb7rQSz?K(UAGds;{N$0hHH*6>yr_CK2 zIO~&t)!~0K>=jzXWbiD!t$ph;#`$xl_ARj#T=Tp&2bm~eoM>*RB_2V#t1OIx$o5Xr_sv>_1;UYU6 z^W>qzC-Pw&-xJzeDX085_cEBl%RC<1k@;IWVffzD&+nJWxfFGIu#{e>OP*2Q{1{4s z>m~71ehwXPueqg+E}Uc)%@TWS#@LJMddDCBE=BezI5l_)^ikiaE~-4be1^A-b%^0; zA*d#%MJ4pPbv-HP4fPp^{vWn|2M>q`QG%gPss%(&V8eBH=MZMnx0cH_AM=$v1!O{% zE;Oq91e_?v4Nt`@y!ZAZchCFw9XGtD5};9aC{wZCxLO`wJMWXg?heeTAzE;C!WmI~ z!vQ0f`aIXnq#EkUR}N7~v83M{vFn|33R~FYwTA&}NDx7d_Os{AhIZ%Pdxds21fQE* zU69>=r9?gUu;E{Jqyp_|562-af%ZV-;(z)XMkum4dOTXDaii~f5z5^Ird<(l{B*m8 zYTWi=TCM7_K5uRBiPbKj#Pr@EIdN2j^lGy2ph)X8l+C_}!tKQ5fERs5KQK84P{K~|+@|~1ZwY2jGskkzQU$*2pC(A6gBO=5j(EZ7!9isg7 z!TTC)fZ%gtM{C&fe7)}z^|IyaIcP1g`>DZ`s653(TM`u*V51B0(T=qIS- zXXs=#dE`4O;Y3%+zeS#kYZcZ$NYjUkZ4mJtbvTyPbtYY{!!7dNK0=;bwC}8nix(?a zGno|9$$-%jl}r!;n9f8uD?%gMR*O1gs}jTKdnQ2-vHclpm0h6}5|dHw8(=yt$R))- z@GBu_ruDb0-->2HUS@fAtLzA_L%d4!JhAd*TOH?f945XO{jo)QuILn+~}0#P`l0h+&bw(jmcOxT~u{D!m_qqN~A59)lF zX@S_+Mw z74gH(gYe?cAzXmA%T30B!n9#Jc(i;lP8XHP|ANAG`Bv8QSb_3Is03U?JW^ zYhy&uesax%m}M4j+<==&oKg}EfTvWSM-MoP2#-sg)ZA4rFS;H_OjFt6!8tr-HbfW8 zLsOkL0~M-(1}=w^N@+_?e0M?Z%T1n3nI!H!(HS(uA?=u*jmELdvOnj`f4@B{uYySB z z9K$nLXEIdJ@3c|=UThWl(dwRBsn(l)@S+*8`)jr!g*{J-!{^+0{{GMGgEys^*!{HS z&90_`pZMyMM}6X=cJ+jV40x4)tK8`<$m{x{^m`rj{BGjK|9Ktam)F3|dlijWV+eLU zNp2_8X_>nm^w!BAc~a|O3SEq>?>b+3KdQ}O)z$x#6-NuxL{+=I5t{#*v-8^Fu4EO_ zg^Ltq$@d;;zNIc0QUbWi#0u?y_WZFzx6lo26~=GurxC*G()0eZ{ct<+1C}N$zVQ8K z0T4eI@D3C~)EU=L-6sVAzsLcW;IqnP)DgRYmrc8t+Z}eT$NC^aUf}DkRJ4rJ94WDa{|#l*`S$p{oA^KXx zcG`FSPaptW>XQaJ_e_f_v)i;?mAljA-)6Ocg?UXL+r-dxQedn-!Qo z6<-XOxKLm7j^A{&q?4KJxP&SCuTn|^%Y;AgkMv^@mWctTbi;t0*I@_cnq^ZR0e3bY zt9t!6iLtP?!iSPJuyTOjsqw$(ikjbNr$5DIB$QDUgp$ydbw$R0i@{$Yg>0Z0a8=d3 z1+JENcX8w~`=zRc86yaKuIJW`4-%E}-H9Qvrt5@Td9XV<3Sn$Tu*}IYVEMaa!bG;Y zy?Ms!!O+;GKT09AIU1OU)HmD8d3~w)efu#EjI^)JdvK~)Za?(@Hl2Z!nidr`M{JsI zQRZW528Wl*P|T(=%8;%$Ef*EQPf8DPY6Gjh6e(SQbflXN1X9UeYW@N-s0ME+G(F|! zh}K}V5Ma|H7QEu=y@w1J=(>25hYTR;Z z?fB;T;pBd`>l)zxUUF-uqgfJ9jWww>a^oMer~z+v`e9MCAlH`?D=laKSNxU*`*4TrU_0{9N{g^4L0VkA$h8tP~<) zr&O-E%)QmCpnC^VpN6`{?J1`D(mJE=?k?^77~M)YJGX~kz89C37V?p?jQ)f%h^{qzU6UWj=rC&UJ zUM|nDuNoa#=iksv1gk!?`Zo=3t=g(;!^K-MmEIi)>Y^&_><1$&QmoJ-7`Gb^3 zoZ}(WIw0#i6(BlMH8F<;Z};_q>|<#y@$Z8mIpa$UzXOhg>yFJRm5J|`fo*!XId=av zS9B!0A8yFby6QGQRhn6zuYfdEmZCJYW1y))dk*XW)KkURoV#Cr`iRacQ=*}}qe5Ib zG2zapg6R04zM6IEKVa<|HbciRukclR%JZ`-;2+G+0FIo#-}^2L%`~tb@6?X!6D0ge z6_L#yiJ(kBZl0mD;xC|&=Mr}?XX-cz`^SGi9A2E%*u%+iMGVxztXI>OlJ&MfHd@N& zZVw3I?xAxWW_dX}J{9;rWqAffjlH^X00Am1xw_$WTvaiae2vi%5aAVj_3>2JQ&4BS z*BWeB#eyO*@sam;Q8w4M-3C=?>~oiyaVHZ?Wk=y=BpOr5- z1FPCY?XI_8HXwO!MJs*{3{f~@@vS1Y5R_5}v=aRjX$iw~G!xO7$1Rt~sja2i*T!h5 zg(>JOE#k`UOFaaxW4)@Yv6Umgb7%9Dq<&Y;1 zzT;b52ayA+wCf|Bt#_SgbMrj900`-4mu1&|s`0s%oZrHYQ*Qn#{ z^nA%)MJbILtos{BwJQVH_e<3)wb*&hs=)FP)(Qon{$Sbx?G2WcUAbL+;FLi&d0pIH zf8zt}_6u+!y(`WGvvZ=(vf!7=L%dF^5|FW|y`6wq0yl86nVYu`AJRGF*vwyS7|NlI z?(@XS3me_?c#EhU;HJ(VP+L253r;Xl$&&uToW*?)n7m6cdVVF9a0q1jJ7o@AMQ#&` zcn7^UX1FM!RW@evhy|H5`CAs>bA)4X!{7wJyV=(@j!?s6WDBQ;%zSkA)ot%7Aroj8 z@7PwRt~JkU)csBI`(tQ&dNYigm9CK3aKviY$Lo{pd^^#C48&h2u5Rg;c1rjce&5fT zNyzYc-u`cK+Ry((7htT(MuqY})xSRu?ZGhluY8I(bg4>V$r2buY2-eFvQ$5zQCprN zQ~8%i4>Q#Vlklo1*%OH#Bw*<{zqO~WHUb+NwgNrLz+v(V=B?|^&LLerEPbR6;iBp+ z`C`SXNn{q6q-s(KSjX_jM+2{pCvU}b3cEI;4$YGsQn~T(} z(+(N5da8YN2qW>k(Ud6w(fC`M2PWB6sRkgqj>a~?!iMM>yP72}(FUe4!*(Eb+#&{j zsIou`M$5bWNW-dX9r4Wh?C_|OiHn6%yYm`n7-jCxXR&UI1pk={IMuS zop{Qk!>SrK^+FO^_S8=9)znlK`lA_K29hmk3%82~OR7<~<-@OR zuA_$Ct`HDEgfnEQgbTaAk3$C+N4KO1? z1bzayKj14#321Z+I@BhVeMfTMLc2(`D=s_4=O;9{3doX=i@!m`Ea-TWxFL-*yPL{? z`T6*p`SsJ;TO$jXZat4;3@tZgoZXeIPV#P=wN!+dF7cV32;?R{-j#hT3TkEPBBk&A z_rW^f=w8vOl<@{+qARX@9%S{rUw*7ZcD7M=ONGe+=dt~%Ub>k%hbcGQeLt1)iVb~% zL2n$!&YSC$n($6UO-Ear{R92{e5=%U%nGd4ykQL+-(%fl%Ufls_-D~&K#=k8IBW_{ zj)?)G$2~}$;ovrXL$shNKFi|2|m&E2#feddJ)3iVcJl=4nzzdSu%@6TqnTvb4IyO@ z!TNm}qw#|QcYd)HeOz^gUlB-qMOEQ_v%fGPhQD6%vV`bZ2|8`CWJnuKZD2|?v0#@A zS{bJi`ba7655c~8$jowwt_v@4-T!8t8{L*1xnu+ex6{x-wA$M#2JEq8p`W6mAUTD*MtoWz9;Pb2WS zg*(5oprDoI-n&#t#PF|X9(W>~oJwg9NyAXiU(F=>0z=bqQrtVe=9J8|S{)ikyJrS9 znF^j1->c)R6TGVc4c=^K2n}IV6$bw=*>Hkj28}(8Oi7+4$N+nL2O&U4K;Q_c(QIb) z<|4rV-}rtPS5jf|B27DtKrjHz8;ouudnG7yBBd2Yd$W*QU|JWI6)F?_U#T`yvItWS z*wq-#4K+iFV_guq-TwZf&jnR)sH=_I!2W=D;Nz0h@2ZGW!@i%ZVR_!84et$I7p}@TBq3_)b0Oo;pVW4#ns+J z3xc0!6C^#3o~z3t^4~l&_f8ms|3i7cx;*v`9b;Fcgd`S28>5RXebvuXu3it13-6FC zCzzpWCDA2m z<+FySDk$q^)l`wYU3Y&8d$0}%+Yu$h%(-E3F<1eBaWu`YRN!A{qlFm6xQFy_ zcm{mQ5~tXIPSnd|O4MReHH)YVliLCCU!`c|WJ&1IsAbSis94nf!aWOfiZabF0JJE< zSaxJhQLQGQ-Kiyh4OC9-a$5YxR=bG#L9<>)g>Y6@?fUq}11&YyiTlY1-q3Ol+GcCd z{|r0@RYYvEWOOwJ{`|WJ2}0P6Ojtl_$;sEhDmY-Wu({+zLOqJ*%MjObIIIc0gX!GS z$e2A=M{VS3m8)8H2XVPQd-YP_Dm19pcDkUUdD~EH)@H`e_TA*;tVh2SyD|vuU17}s z$@z-w;aLr~Xpj*kVUQ~tA|Cg{TL*br%-?|<;%~=5YYPmH&nPCxLmEcqqE3o#QnMpF z^1ei{g?9QVY|S3nv=rNN>|}sh6z28H1E?e?Jg=k%ZZ#Ckwu>v`j=sW;~4J72=$DWznRXnLaQWCn~{ zLE&+|73-Tzj|X>9uHc_kjNr>z2y{2PFqb471Tunjx!(_L8r}td+RZ=B|2(-J}Aw z54SlxV5&~StLj+;8p?81>jojQ#;$?W`(J1ci9O|{+VEszv2*6eU;eaktVlC?c$<-& z@b;CE2;~$ z+H933kFuk1_k>4U%=TxP!rTC;Q2=s(KOc(6F93E#tQqg4gQb381-_$(IbN@5jaG2I zD-I_TTUv>5_-hWM3}USs3Q`+|tX=mzE{wI@;HhOEIeAy?bRW5_Qc^p|)-8YpNtE}$ zEaddyrZJS27%g}a!pQ$QEp(B#kY-M=B3!ke-OhTV+dJ~kF3sv8WoP2jOx$1jUV%Gz zKXQ;=J{?E5ipQap19T+KtxkO~AWbCak#zyVCDk}CM>Srjy>L7$3pDQDuf^iE)Jh&o z@pcaDmfq0;48`4`7<_vijSDFPO98 zd`BiiSt}MC`5z&*&<)6H97r`xs~WSUMmC)~k}WdyT9R)SjBzpg)G|Im-RWBRq$V7C za3Z8P0%&^~E={SkGpsB55oy2fTn-p53|eSK>ot$*Hgl3=r3tHNdzT%yrgLbnrS()T zcuq{d$1u5D1SHvh0bIF$IbE%Oz(s>l$Df4+X&xD}A`ms>n>;6VIg+}V86~L^b_Avt zv3hsv>SlAUFILkWZ^?j1>AX+=-1>|WU={WoC2BQV;W0VOFa7^dm2Y%nQD$Svt=>$a z_wBq!jS#$zkQTYBDld^xfq&L;&#&tW4LT$TIA-s{2dk7XcDMYfU;Gv+A6SrBqk)Ri z(KLeNYd^k#pKH~QOhK97Pn#p9*`gU0J`t;#^~RoISE7X!Vwi!-6!}lyx4Dqy`KC2) z8E*IK63E@QkzNr{k<}^;%sDoZYK-kgAsvMcjYPiB{hCIHWV)A?K(BqwokCbvA!_4T z6R5AVGZ3$N{tPeQxcm$mo`C8M;F|ZM9_`1)d`n)Sr z43nJzC5A6}{a{_fA_!iSUODi4AcI^se7-w({tLjS;bJ`n{#A~LGpQ2O0k}nq9T&Fr z1{u*8i*vP9^N#M<4xSd%+a_fp@*lDUaget$a8F^Up&E@oU_r!&QFa*Tj(_*Wyd!15 zWicLEEN2~&iy3m#6boIiJpXx_6wXl{uZicu(ZXqvU(!t>%iy$LOSeOX;v;w9kEU`M ztI@L_pI(d|N3MYVd8#DZ+L0!yz_|IpRbtcV(zk_v|C|gwxUdI!?!CI5H-13QtIl3- z8dd+U*p|?74dkC)_rX+&WmE8Y)i7^CqjNa9Z7JMwEy@&lTW$_9*R+xXjF_s-qMo7R zFDZf+?lT_tu;SYAe#^bFwcA*XgpeqB!=YEGx27ph@BTXSl&H4FVeSF3XXV7j6T20gTRCj)U|vAe6D(HPlN`yo}d!OF-1{IGP~_Pc#0HIF~c;NQTe?# z$m3z=sHvPrLhMo|<7fa2z~{=+d44c;IE(OFSfE*if2qd-l7)zJdVkZx(`2#VWBpm1 zs5W+7yTHz-S^4U$mktw~L>#$=p!Uv+ACXah-<+mI?UkrV`6b03>^tGKRi!;0qten==DsqhUQjfBikpyrJ z4ulw{54X32HXh!K%C!48%64#C=4D#bhLeVWCkwY$GnNdv5(%8{ovp0KO`&`{wzEds zbLZIADm2f3`XJ1(W26rXL6ML7U6G_DJnBwrBq{dKbWd@3M)UQX$za4AmU zdFJl{&I@DmFsTjz2fzT302FBY;zLVhlXz35R^FzrGNb>PZ&>P?5hbs}`+l^qGNJ2g znaT;}%OX01yzEtgNqV2!8kN3P^wi)wE(G$M5J7`w{5v%RDuxDWmh=&0fs$QXEH0b^ z{=ZpzIh!{A`g{x;Yd->F=I(>G5D38I0;TG%Rq;SWPsn41s|% zC)r&3eH!;3Gaeg;V(>6ALBQ7C&tKL-{u2CpKBq&~R8)hc00~>(r)VbEtr}H=YAYwC zuQFDLo^u(M=H1Q~yfZT>fK-DK80avT=&JfVq(#yAg*ZoN=ysG5vV8+J#c)v~{*j_tC#1M$P+T4 zMideZV00z01*=|rYVnt(GaR1n?x9e#AP}dJ6qkO@KpAEZW`8gOck4+!J^j7L%3d>f zXxmko^&Hkt&4s4@XNEQ|e`alwkf_pJN8ZAM{Y}ZBwei4qkI)-3KV%vfPA2KB!+u1u zF}p7@gJSjMN^ekBZ^a!jg70$HzWwQ;D_XVIk;9lkmBhWq{-YDw#CMf<8|axv>6}en zwwbpl^x?g9XBKXMs4h%9XtV_6=sfERysh)vk+$i2b(uM}2ZTx8;J-qW`EWOx zbL&LZ=-&3DGUY>}g>dMBsJzGRx}~;DJHx!U<9G{{tTx=AixB$@ZR8c`Q zQn1k8U&1S|7}P`pwCQ#Q$SM)&z6m@El54!TuSAQ`{-tcU0J~Z3l@C`i8}L7KJ623f zW3n8D9P$0OUS=KS<>f7bj5#l?w`mE+UAu}E*Lyo7)?IQTW)q8^p!k3y>49Vxn9_Yq zMlSm(PV30DYUAN=xI{pE>!X)@Ji51NYm%@EjNmHngJsF-AT<$~Lf=bGLVswZ+vYMD zro2o0Fhr6%5gPBZ)gWgQf?XL>Pa;Vu?3G6M)?P#wBJwHZ9c%EWR<*cogFZ{nkOSN* z)k@tpJ~4NAWXkB-*Q&Z01w)Dz27HF6#v#WHG63u+JO9mk2O8{M^Jkq%umad}%dWaz#3 z+*E0Px@}Y^NDN5r=c@_)FUHgt((d%dgIde-mBU7F)vT$O1)R##(?OMNf% z%|vWV%y#Dw2MMfU?kfIlXKuWUMQCwzxpf~>W*U_RK9D8&m@2;O@Xfe)q1Aq~d8On0 z9eB)LAd34l{41Svp+qA9IdjSobrQN&HWXW~{7wxwbL3Gkf5EOG^jP*3u_gHp^Orr1 z5v&^b%SLhmdjG=ML?==ffKxh_H!r7X2o2nQRi6P8t!F@z>!Mt^cySHx&a2^A zpdvM5Y?b~?Ko{PNFEO6JCB@tfZv=)kA%hu`Aw_EWuQNT>Adek(*XL4-MULVRJHYBpF1v1JWl@rW$e=s#b=oQsBVII0wh=@ec?G ztL%M-ACaa?VT(%dheUPPD)cu@5bq1H`Kx_WaH}kBm}3So=SFxIgZlz(YcU(t=4de~ z8I#MI$2pruT0U{=ElbPQjGADk-+i@$mGy#M40=!2hctI0$_Avfok;*a2OVKe9GPkt zDz0ZNy}c(cxA#hKF>30LdR?-tKUi^_CXXN+5~R<;nqu~9^Qt3UD;a+fCrHg2{&E?p zvl|FW6G&@bZBA5+{g3i43@?GgwAN_nC&5nx^?$jc;;Lx*0N&g)<^)oDE0fR{Oxs4& zbyKH5`D%N_{fTRGpr)JhQ|q;`?yft8=+Up^c+wI~a4WgvJ$}>d;D-cHfWSCaOrfyi zcC)8%TZJ4GibKWY42gcfc~6yO(m*B5mjMo-f7^5?_8h&Ypv$?e%>EBUT7SSKz(q$q zSx$m681A6;#Ko<9|FVb~yj7tG^|>A>Pq{A-7mr}T+~7YqJumuImLFyGc;q+Fde7R# zv2YQ%%yDO|u|3fDMFm!@gdD!OJ0sW~MhiE%KTjR5%#VaUq~d{DSt5`QL?OW!N{_h#luMku@_p z*=X;DmsNSGp)}t;bf{8;G<+J%VVqZJv%yCfxuHMmpe^oFAT)4C@6oxvjStSvw#{H^ zU9Or2zbGtzd)bVK=5@}+7oGytFD!#0Uq4(z&e4V_-&WeBotK=nL<%DsM0i!(jj~9~ zevik!Oh8AN1Thkc0D;TPUfPOt1Ejy2PDPp7 zLf@v%>T}$_&N{ z_#6PvdhIXm%*9S)v=rRUw^<_ofmSaSz0Ep>v@WwfmM8==*6t4}8o2N9`T2+muer@~ zITlILq%D)mK9z#M6)G>P9O>V)PA`!5dfECu_LVpIXr?O8-iODfC|M;xnXT-cj7M#n zJc$%o)E4>|VBkoeqxeAK>~9Xrxfz;yu~K;V8FcjV z#TtkCJPR>?s_!isgJe-M4D=t>D#o}!D>dhHN6l85P*;}h)DFC+4OHT#Vpb#3a0bTF zeR%ohP=8j)E@rG5WL036M;cd0dm1opNw6}A{OIrUbS28Woo)|_Nd%7iJ?7@BTnUM! zO&Moq?PsC@?(2a9G`XP?yc)(IR)A4u35>})+T~PW(aZC4=ktub-jd$(IDR5VE3G38 zIYu@d!01fRJV|wx@Hq0-VADj>sJCfZ)qbxni*AOm zr0V*-?MBhztY97)q!>4GAeg{4b*_0zK>`RPh_JKjFfX~xtBNNKR-u9EezNmfU|R|| z3zKI1jxEEv`L?u_Kg{;kqkJy@d7%<289=uon?nB`<`CWVYO(iWrt%~ocihMEA`%g{GXT zKe7mcoby8*cP&+nd@~xyG4F#%EsQ|%O12Js;ldt|vH_9UX7u9K30k|Mzm`j7U0EFc zlT6Qsep;^SjJC|T9xt^(K9U~_rVO5jfvC3%^de`a<{qy!@q?K-aV6|a8aNevKB*t) zQA8%;|BoSzdy)SUng{k2{CL4E_B6!zNVh!MY1YDbyqW+l$zy;8<)e zu+;VF`lYk3Eh3;{4J|u?c(0|zOkXXaFQC7-B#vWr!NbwYXOu@YleFA9Nf0f7wQ^wN z@y=85<}f|ZTU-Ig=n*_HLdy@c-!C8;4Y$8ukkQY?9+ zZhWj-2X%O?R_rwD3`luw{a*ppJq0CF1@$7eCQ{O0XKn1>yG@6;P(W61j%~_{9PyN? zzLb4k)Q|SskPX2LNeHlv*XKsc%6nMn-F@9p4T!tEGB}D0HzJfK@Vo`#Ip&DDd<=Mi z*izie@p}+{kb^tB_y!%h-b_5LncUPQNv`|EK?qQ?ODGE1v*Dqj)&Yv;ey$e2W!2NIe&`c#n^XPae znwW@$hEK%GdXo^a&sZg1$*YV&lU#8{e#MLh1xn=({4SAT9S)ryajG6n9qly7Iq`>& ziKC)dm1#qT!R4Ay8f;jiFf4Idc8!=2*HcQoOTzI-1nOQ2iIGrvv12_(SLr&!m{dc| ziOM}&cF++I@i(RO_|@;@FRGNbTYLC@{^4@7h+@>oHiv47L+dRTbJ?#+CAr1n`kcEezO%D|vwfS3r&_Nm%p3z|cWnQcQ?pOFJD(Q&Lf?OE=Arg9PZOuD-u*s8+C;>i zBk}(H|9WmiLdt^EbjZo8Z1C5&G(8Yo3iB%w9Q4Tu$Het6*`X9esm^pXSf53&bEy(F zzeNL2d!207DK8~KRYY=y32q+pm|WUzJ@bL?W}%Hp9aDVjGm;@1w`6?&L{C}@XQp}S z7laQCE}AN-!`%vzNHgHpMt=Op?RgDjSf32m-N2?LdP0fd|-jryV0BG_z*Fz*+y zpf1j09D-^F4v|I%6yz93D-6aTWBTp|2>CS~N#8d%b79<}8M8)iZk_bkFfnmxtprK$ z>UX07QifzUD`B==-qJ)l3Ae{akH<?4mds4wVRHv0h(?fxUO7S^ zA6y-$1PT!qD*=D3@s?;5u7Awc*7)0e@NW1gF}an$pYMR~Wf`q(8Mtu`p07RZx0WV8 zV|v29ghSs%5+QBapC3{olYGHv(;@v`9o(=Htl7R{O(`HxcdwX6kf^jeVK*d>!SpZ3 zQUV_{dis0 zFy^w#Z9*jee6)=kJM6CcdHVaH9jolmh-^R7Hg5?N@xnNDeohp9e7ZXj|pvmMfer2_7zKE0J zcUuDOcD_~^e<_NFe?T$PJx@{Y>VMt=EMjVm1km5;nHbmoJS=7jw`M#!6GaP9H#S|{L^WQcjoOdIK?(2 zTP0k;=q48l6D~Fud#sF)emlhx7dP!&H{t?ls&=Lt8c;iVt5_g^W)4>k$Y8HNi?_@q z#8H5UJ%F(wvq4E0%16^da@f7s(w;Uj)`K-En~;KIp9O!*TC!NF`}or40Tpc#rh)D> z!c`s~;!>vh4f-ekXu|TBw2X&%8>C-p&S}h0O7qBR^x0Y(Tl&*7Qd;7vNOA#Y)fI8- zA^G^4pl-nr)9?03nByTwcd&w3s$&?WHO^ zeYBPrF$}N6mF<<^VR4QKWxPUXCD1Y?7BW&yzY?wS)Td}x#iurx% zQRQPL>m>kbRZv-f7vmS(RYGZsVPxZVo5vp!`CQu$_m9>W)J$0cWQ+uvn0*I+WbW+y zfEB>@TvIvf%K0=1lg(j?a$QU8@=d;*>-|c@c<7Y$>bZlcb;~n*P6WAV45RRToNg|N z^4eCj3z`I$E=45F8Jmt|1c ziA(@@g(zX|nMm_+Xar`bsz@tT;QJ*^Pa6FyeD&cx!+<-O^+BWS;13g?q3n>%x1)rN zc{*F}XQXs8FkE=l!poVP_gvj$`)KNG^;|f7<*y?W?%(Jxw+dW;cgruV(VR^jOA;ih#b!;vu>1`e%^kN-C8TA$<_soyzhQ*W zVioWY@j?sCN`1xn#A`a5dTm0Q%!f4Eh7L0;^m9)Kru`z`h*zezHk(**T5fI*1qwCd zI$Yjv$>anf{OYwt0N!>mW!^VD(lHeM|?aZa^q{S9Sequy(~ zDID~Gu%EO6B3vR4!(1Yg7nquwNh_7ogoDT)Z@6*Zr8zCvi2Q`#JJ5G(f}<`O8vQf$ z)b4mQrj5<9OX%O1Qg!n(8JMGzs_b)qpmMOq=UG7}>^Q3F*^4`-!=m7QA_Z{qQi;+$ zQiBYYJ;#r0Jf0mq?ynZ^Cl57-S2nK`?DA2(jQ(uw%W(a66}ARO-EB_g4=?$>9|pS} zJ3KSjiDPmLhV2PQ5iD_+$Wxri#X}jo4uFHl>%TtFPhR&{`qkwxw^>4hKK$_PZwMsb zguD-8w_D5lA;V{2b|oww`I0RvVk^IA8GLbsG%4@9wnPTG@ubq>XD- z04!Uga|Zk$g>5dW<$;pMd)`E*7sZB_Nl3Ym2Y>K{+>JG5SgA+q@vtk~8efO(_vj_G z^r6PNY)DIxTy`e$$*Vt6c-1?WaZ`2H<4aqiDn>(G6V%Of@9{Cawxe{oV#U{6AgDB15F4AYS00!# zqg@NV-ES|beH$NWaAr-lUW2zSM*GoS%u}l1SB-!6Fu&t_yE%#5gqQ?$s+nMaF{y=u z2wf7!TM!K)&)b~7+NXmdtmZffMczt>4)536g~HF@teafLyiwk#x8~dONI>6%d_-)> z=G^UZQknhuML6-Lb+jQ^w6cP!n=aE*>?!-tZN^_7<0{EN80bzY=%aKqW3EePt{MRB zHhFRD1-@7a2sbdo#m?m{lc6I}R1`r2*W7v7I=p;dyqce4FXw>Oxx?t9;EJS{Wb3qP zpX28_S5r?bdH{NM|7=?7eyo12vOlujG-_e)IV%T8QsKJ`-QRRKQ)?!hbhO4~$vA z{2gv7I$TcMaYUG!7G8dLuiF{DvGOKDZk<$eZ_d@H#Uu-4SaP$yGQ7JxEPQtb>Kq=< zh;zr9K!`ZhhTbYdutgl0xfd>b!E0F?K;IhPF1q%7dm2eHID{?d`pTpKbEcNH8 ze+N2gn*_iA_wj#yc%QT*BI%3$mV~CIho?_XCEo4wt6zogM!C8-v`DJ9+nM1)NRe~U223}_3>jwuAx}Z>^QI?t_$3^#LaRUS%7tyN988LhxHe5v+#a8!{E)_`sb0ocgT(g&n@L zn(OX1KWIpeZSCdjwPdf z5RU+Dbu+(4Bs9HnJ%zyT!m=z#a43}6ZZh=wsXfjoc7R!;8@-Cr8rnb4SIXl*V>^Ew zDMf=EfMDX4hfmA`p%=((7(0Qu{<{DnU*PQea^*tsGkv&!h}=Aa!87*V_5OLdd(%0}&~h=EIMWJF zM7`S4uoj~NSyUEookFm~*JjFW>LORaBO|*b<9xyCdE)9!z`z%=Gc69}93Vjx*@P<~ zN{rLilD@bNsk4l1lMsqnrt$jh<;{XZXd?>-eKNP>%(PB>FuTew)vF6FLI$(#uN@M6rB2I=j~hTMYzJxyU?kMrgs)7E}+K{GIj8zdkAbHkQ#hk zeePJQh&gAsJ>4yA0p`;n3YX@drwJ>mXgMm-hKi26+DxJB3i@L}$fjs!X!bRDSRJ-- zctF_PhU|2!wmo%r`P{3#jt}Wnf8DRPTz#)#!jMPp<7ao!8GCuQ+;D*&mKgHex!rO` za6P`IpHP+x5F|<1Ss>Zhe|_~h_gG}=>xGTx(@NtFS@3K+I z&$$*|>$V`Z3_F(t_2u(FQ5wbkHvaVE0(5#_B!dmznOkIC8>T0PclCI|t3p*pqEB`D zf@AUsTIikK7POo8??V0Z5hAKw>&%AlZDL;`?>BNhArCNhTwa?&*QbI_%a65#^c#=g zZSgogqPvF>d(54Trt?*dIqIVF3^L2_$F#FOwqq9Xq(4W}Gv4tRdmGKAjLFgL6tBn~ zLu}%;^?iBfI-pm*m;44vjNoh2-$FjW2O>9gH)@4OglorMl5uujhi`=+5!P$m2l1^e ziGTDCeOWS_deylvE|Xu!Zew!fVBmB2gwffr;Ho6p95AJeaGp6eY<7v?>6lK@a9=vS zf*Bg#)-Y;x5io5;4(H9nR*Ct6ju*m;=NFHe5LgIV~ z$Ko#lfhuAWm9U<^**be|V%pN?!w)lp(MV=4I6-%RpGN&LogYtD9N#)NobIzO={siP zFkuS8l8iY4AlYQ+|>ByA^M8-7-35 zhkbl-|5grV9Ajp|T4GY%m!r0Wk0w=d1~r=Z_Dx76rPC8$;S87oc^l2>!&9|kjUs+51^&1Q?ZtgUcclPX1m7)v zC~qDXbDP0?h$!sx>HbV@==);w=B z`(w4SLrF{_*~6z#z6(F=lJoq1ZS-0~i}F86VQl_lAD~p4rmHK~#HY?bTE8l~zN(g! zDJ5t5uEYG1$`03Mh^`tv;r51t-7AR4bX$jQZ|O;O_-RS_@+MeVQ9h+c)+GQdW;B{@ zcVt?({^ud5yV)H((@O#Gt9lgmk*{`}jSw7YWvj2Rc1cPYv!bhnONM;_lCP7u6yCBJ zz@oD+*LKL0@`c*-Q z7#20B+NpMabyh7u>`W|Uv#CJB;=0UV9^HVOpksHb&<>~FU`s=&DLj&$MbCZzEBE%@ zhxB9ZJ5?H_uaGSl9ks1{dRMk=`3{+crtlt@)U&*biiKcxWg}j~%YDQXkvcB+7x3}m z++5)Ep#`qh4+22|7LC?^Hc>Oz(}rmuHtW&>^>~oq@+%CHRC__(_&~PX!_dj1wvmcc z6#tQ%KW}@i38%irjb``KRpQ`qw^Q1p?h(XB4c!%})NZnw5evIJ77fzn;hvg*+}Mm1 zP}&C^MAPfw?@Crb^0kGtp;EHOtz5Zl+U)WeKp{4mQywMW?>{EWOY&%Wf-0??J&Pfz z=3F9R9>K3CnUGd5ZzO;qv0WUD$iKNW)W>r+iZ%a}_eKXdk{=E0ufO_`o4z zbe>DAJ8XJaO>vW}p`}=^aF@efSAl2$(53uq%aXfNJx5F&2V$qbn0SVvF54}1cnU2i z%aid`h>)gkBg&(**#?XoxU zmF!k(2wykoWN@+OCDcXr2kPfkB6v=jPK%CpwZ+QsN6+j(>LppUsROIGrbGSvo1+d> zH;0>}VbS}(l=;G?|JQjb21kuA^dD3G68a~n$_ z7=3r_r3QMm*&qGyYh%#Jj`RPb>q18@gN|7HY4mT>uDha!NUjdPSuWEC+DS)`AE|tU za)Itj_WSQ4Sr4kRGo>lg%VAB_=A%0Y@{SgAOUl)Mx!y%pP>LGGe965VWm(}Z%*5zb zu+N=;R8^rS%KvtKbM26LJoVJg3J7J5GF}QM3MI0JkjJ4(s-@Q{7^Uym%~ZIGnX+Y* z>$AMJ__?wrNd+h6Rn=4vDc?8uR49kd8we$K9x0d0CgBYg*EeCf;fr8~w&{7aTB$4{ zl(Nt0--Nj=yU8!yotHiibDH|UbKx4${bMSw;CMzK-VB& z%Q{$!CF&P+VTrW+#DHZ;4IZA`JuJcv!ewK)r>G?aE6dW(s@EqI%Xy$-QdM?WfqTk> z)ADoO%xd+A)5s7^sKGBYC_hHr|H9+NbPoP>Jd^{`oG?f(FfdyPTc7G0{G`}#3BF7_ z+qK48U9W9G!m@QRj=Ug*w6R#eGSstXw0@&#M#+j4Z9lu1x&Qs1r%NBsC*1+Ng<|4fMQ4wqsDkY}zui9w&$v3= zww_V9b?TBbVk$Id=@%XPWcz-%7ra1QR{nfLQj7Vyiz51k^D8WEhvD>ys7HLamv5%` z)|>lRmtZNCGX^1Wn=R=FSn|)ay?M;r%Gx1Zchk;C{qQ9}!uPt=$~{D|E^i&ZWO-0z z;a4;**ZjV=Cpz)Msy?<2n>+#~cJ+q`I^ z2_f!fw7Q=(h9q4=s1S0EI6;lD5tg^y7z3KP85L|~6-Z30S=C^xh>F7p)mfYrxv^)b z@3)tTIVEzv16j-|OL~N@munsgl#_Vv$DMsF-ohK^DU|C{U)$(a`~L3(LFn@(MxBZ z$6O^$00uiDV27DsoNuG8pF42kx_ZR#;pp{ojA@KUE_<)LkF(yt`GrWA+$3`@`1|Cl zU89+&_f!8S6_4#r`nEZ#UDt!fOYq#qrlZ24MRf(Qb=yIGVm%)I*jFze>o-QWS%Tm7 zDmWkSNI(q7BPm!{<-ncO$B!2#bSpiw^_IPA6Zg$5(dbJQl#AKkSpHm<9Vd^3__mE3 z)V{x9?THuCe->{B_y3`W_e0NppM;+kpi>6-%)ZqM?K+As8O!Now4sx^m7CB*!R)*{&g-_NYuAuoV|7CU}|^hGya`hI5#@H8c=Ym z-C7Jo$PsF5=R7J5pM!aJ8ZtxU+OA(n1aaEWaSb?vc*uX-s$xQ?nLF+~Djr0^LHn_7 zr5z055Ax~oKl%d#O^&pE#LWW`r72yk>Duq9U0k+{LaH^sp?G$m@wh@0sZn|DV*xwt z05AAd_-=fTN8X*%J;{aDU-vKAE>ZG^2mBx5$DVd>j?8CEZ7>)>t)c=p5;o+eI@?#J zAK4VNK0}}H?>7TP%zKW?-8OsIe{JCoRef8cV#}g?^lIcBmFL9QKY#>R{`FgA>P3cH{3UxW@q5I;flLCl zJD#g(;I1z!gx{r57ev|4DxiK9DW2RtWP-G{niqHfpXkc17^qM}-i27qVsR_fP2Vk4U+>#yc-J8zt;>eAYbUBnYVcE4cj|{^-|81 z=BE;#aD3nIg3B&QF>>PYJupJq1F;a=8Hz^K@N!busk^z-M@ zt;^PVBoDinqtT&)wCab{tSz)o{P7SRLpxkvip>{O`I9$qe>^kE2Bk|RQ|;Nr)Q=T= zJ^S5k5_J|;6a{Ep%37?GsI1q2X`nSD3vo$II{G#aiBzEUGkbVE&cjx^4 zJo}{xYN7U_rsFF4WwPU<;)xmdXZceAcd#hp`8Wh~wrROfwyl92He+C)Zf+@*v z0&`aozQcU+Z}4?|#@}@%e#01zn%D6@;aNY3`dg7C;dN62J!cLxum`7?Ln8E|!tuPR zKp|i1Uw&w;j#D0XFy5J`8z710;{c>Y_7;mqMe|v!2`2Abg6vvQ@-<|mW5|tS$O(n~ zpJ%0muk1$TE1xPu=7l>v?;&N`;`}F2QQ%i{&MV~flm^`+`r1$f=rv4sQq+8@pQN8dQhy5;VPjyP@9&?X!t1_?IwsH6>bb9v@q|7n#6&{C0bDz~WfwAsyA=9D0(xr-rAWTj%OCX>*x2ZS<0gkq zNO8_bP4|W$Rpqb1ULW=Y-7yQ=!Wp{2WeAE{S_V0<4`jBE-0tz+pXi@zgkh+vWLGvI zjTv01_BBKLkP4{R&w@BYA6VF=>CrVq!y`zvBAX+}?1xQxTnND&R(rA8vB{>SeS)p} z>xm)bZzj(+p#*aV)|>E0r3W>${m)59aKs0+Lm$8#{gLo4$|lKxZe93a-6L<+|Bs2B>E78|`?&MFI8|W2v}f zE&Gj*A0t*6vD9g7q|Pni<)=|8PH^)G4w~x}M`F*^DZe?Xg=41=9dTM2YE}jACcdcq ziRN1wil`@;lAc}3mSr110xg&0;2NRGL|jld{OCi({h`uM5e)!60N4(?(FEwu# zG`C2OW-!BFGjV5xZcz|dB{KLP$@Br4L+W#r2oH;a-96m1(1l`fpB({O2zWabIKbWc zEUS=fsoRloOm-CE6Rb-b#6)+RWc!xn5;n7E+>|Gjr>m3{gQ4yAwS^*|f433RG$6s? z*n1|OZh@bB7DnmhZ_{0vrz1t-YUo#)x+DO}h6j%res0Rq&(WxLZS>)JXzNd4_Mz-# zHI!I8&8lmy`27%2O^Lt_3V?_w!2?l>~22qcS`M{Lc87nI{b{-Bk^%f+&i>YHMG}-E%~(T1Un!b4%EiE8@NXFE&W7i zaX=z3ed}7PJ?-q_!9S3ZEhP&OnR9z`E?YaEUJsvDcx&AX3i-V)&=qwVUAXr3`S(7D zFYOdP!0Y{?FP%W}MM+DT!qx}qIg05#`Z{fv+5VFi7{B~DmmDNue2)B^uwiLf#JsY{ zZeshnHu7@r@!Gumo}S}l-%XZ6N8j;Wa*L}atD=@g%hjZAgT9bZc0+cCdrQm*2P$Me zDSx>T%Nb|5zGEli1L~nLm*_)R_*?Q*egOJgu<$clxsBYDJySF)WyQ)7!|+{=jeR!{ zV&`_nDM}&LP>Sg`G1|PuHw89OP6LnTT&(aPQsq?G4v^`)yY_EW8G9NOzNd$#`tD@k z!;($MkHy4dlQ4CMr$dY^g%K6l*|tL+!T!cSjcwwInfp{pIY7GtMRqo+?S4Nw24 zVFf$rkmB>9dg9x1XbNd1cUNa~(cW@`& zIC5xNq_lP?9T0=4=jNS7&8twJ-iZK2Ad4!h55*}&eP#ePqK3><&`Dy2?MLT{F2u=XR64nABZ*$EX3lrwsNguvtWDvsdoJ{4I=o5j+Pht&QuNQ%u|6vXr#z>%0t+iqEx_cll&6Jvn^_cH29D+fcBc`87&)l zdjeW_imZl7i#>j@!o)i3xT13NC;+nF_z6g4dbc<|G?O1Z+nYXsm<<4MYzt`Izu0Ae z5Z{P!ClN5RRk*IKqdx zb?;c~N$1^HtZ_nXC1=6c9d#GREw}5%PV03eFbV%G7LFAf{fF^aBI?M}{&Rop4f9zM zat`N5mJT&v!Kp21Ibs zp;N`?F?uJ6wp+&_zK3JbNU$7Wo<~FMx!}SMUABdHr|ptAU)JU^HRTt>(J5ER$!{Zh z31TodKs@i2!m>X$wd}nVA!2Y>1b(P}4z0BBG%X|uVt*B*WUYeR#%xK+?u*7I zK8O>jAGF5c$%4CusFR$dlF0m42COaKgDSf6)`!aD*a^M1hF1yM}nk3nv0r+f(K6}@}xB%+1=3uwvn8daes0>Ie6VqET< z_I&rvX__8xUZ{WzxiQREyp}e6YcLVf<+r=m&tey#PSd8A(&6{pK5A+uL+r`aVLy!! zDzMM!1YDaTPLOiN`1Q=ohKVrsh4Nd_=O()s6i5dgAcT$k_|ue=edAqw`;obk2+tn= zq~e=|VV@tr3@HV5WDQ2dkA98GBCj1b0`3cZ?mub=m`9&JVAbL}Q3Gsyx8!g((*$2e zqMc+Z9Lp!$L$gWyj{bO+2B0jgp_QS9mI;s8fA}cMb0;x2Q!T-&2&830Mn;-aU8d@j zyz>6u*u1lJSm;+{1bYc9Lhh5$uK5bSK6ehdELROxsXF3X;>b&{TiIg+t?W4LtH)aB zJAG3r(Lb>0V0dfYek_Y;6zvzwC~MOD)a|>#v)!1E7g)_A`_a+&MB%wu_&T5BdGB!z z`qFW2`>f;pG6wTQDDu5sdv-O@*MAxDySN~%h4|9&Uh2}#gE?^$@9d`uK4TGmt+v)#DrtmNPw>$p}`=6w=z(gK_Hs+YW z6mZ5k&BNl0l-Z=mQ+66iQ23DF{`gho{aCE>OT&=>U z%$m~SgRgY(21RU7%v0%gqvTPhv6aJnYd9cw#IqBSvO#S$`M1)EO86^krL9dIbOq+w z55Q<&#A$PeFIQ(akLYyRESW9YKDC8q9_%eBi4X#RKGD|@m{NE&=SDBB%VaWNyuwX2Hmk?0#$+ag-$osbpP5kjGs+bz${!iTA7qn~ zMG97!raMeL{@QQB;+Bl3v(L|himTA5TlHA3HuS7 zB!x59!eaR`yS`;|RUsT{PMb9Y2tfQ6O=luSow+*QvL$mjXu+wOqob4<)rS0Igk=u- zlQ6AOaw|@lo9lu{lhw8Aqw2$V<7?If^ALGBr|QeT;bTWLVK$8cZbbze;sg=KJ`SQ! z*6oiZ4LQ4LxoA{H@Hw6SxwYv3%%2&$2Q@M;8cob_P`npwCWpd(0#~l!v^! zP1WTFTb1;*R=7Z0sZ=8p>bF5(|5FhQ^E7b9dZ;&+bvdAft9F@+vv3c>pvSu#2;{hHyMOG zlX8E0c*v0E5li>usXOINNRpJY!IT=JXV+f?G^#Fn1=K}@34X@ES3v4iY>etIJ;Y!) z%U?_iTLD=e`thC$V;jGVL)25$S9Gmqqqk}b(%6c?xR|X@NzRq^=@J=#aYy6UGltoDY;d`lo&U|Qc!Wl@ zRb;h=kRF?-y^lTviu>scPzS;e1J{rT-t(~KA=vsq38l-Y)EzxFQocy+!t!_yk=Z0%9QD*i6OAyc8+w02G!iL6MEBP&%L(hB zUn(7?O#dUe`ag;6|F7~YZt+V771_1L{6XAO#9OUa5>tkif?T1F`nCq7*ii=YWR0*} zt8Cl1nRYPEh()6)_Ef*#w;3gUyW2-42)3WgXPV_tQkW;YVP#aF*mg!1LfAC#3jE2r z3&G)6USYd)~}+JfU_aB|oC06SI0&uEhN{uuEGm18pmdpJcE&->B8 z2y)QI-}%LrFipt>mFR{LI=ly((NcDU1mJRl z%-?|eA7Sd4YB}sPPk3jkw(phjB#y%4l<-9~%3{po$VnrMkh9B319XAV+LWxPtIc+A zhGc^z1YsvKLRkbj%KY>+OpqQHcOaKvevA5>1m+kMWA@N$QN~jNHHi2pGhE!o!R$n* z!^$Lk(nueqqvyf<-bqFMpye?-DYI74H?cijFZopqH{`$=MC4)GT=hjfta&_u0c5yv z5Uo${5yBqU{07O85wVOs^DCn#8n24S;L*pi-)r=XvK6s->Z3*w2Fr;3HxW3=rf_F1 zcR3Luq+)wNi(pKx6P3@yR`q1}!A?qY5rR@9LSnWRq9k&qRt{8?%ntq+Ae_G;Mg#V#LnYGdPA+NyS z>4|r)U<$=hXE<_RLWH|%fpPx~O!#&yQTP--+{pNCPeZCs(%f4s?a3e(wjTx+$QoU@itZ=#SKaF~U`6HfrPB?j|hzAS)h!~9QTYn-nGQomtvbDt<+UGv@_-70< z(lpc8*$nRvWLTRvJH`zG`HEjM@-bDzi-?3td)i_ib=vQ^i&4a{%UYDHuq|Ksa&VN1lwbsYog>A@e^j&#xt5Nhtisp)emY`QwaJ3vwUYtzEz_ z{%6$JL0uov(K0i&0R7qtfId6u;jLnB^Dn*I@;crDtJE`ZAvpx#3v0LGoXzTE-tR}8= z!Cq@-RhM6+*tvGdg0SHNO1IKnpx?}j;uUdYt_A%#gsL=TM`dASbmD&0bCsR z6B6acE-w;J`EMGKXbrMq74N7O1p$7M&mI1mexMaMov#mnC<_81A`8}rkl;ak+3QnjRsj_m`G2o}al3hFJ^Uf4c|!hvWEDj=KJpRXM@$N&ikS09S=Z z?rm)e|BJG>3W_Uiw=j1%*0{Sh?yf-_4em*R;7)?Or*S8^1a}SY7CbnO1lQmiLa;zc zhBGrYb?W>xRcG#Y^;KW&+TZuC_gQOAB#_oKK6`Bei)`Wn#n0 z=2&>lz4=|HxbgLF3?0Tuzquq5;JA7BS|4S83Au;Yl)aRGvhx4?Rio`g=}U+MnBo{Z zXalZSY3MfxYo`||NKYh~V}&fF&AZ2??+}>?wJ8UD$1^hw8{|0D1YtSv{r*Y~BXYvE zAdI|J8QUm&O#*;dP~i4ZDlz#{MVC=w&{2r@m!(5m9-e<6-MQYJlj$L<%4?vcj;U9iySVA~iQ7&uXGt@Kr zWTAnLh~blubvMPEPkA+4S)gWra)--TOZ+W{&F!8byzF} z^y*GeSlRsoJy;voWS+N%4(Iz{Gaeu9V}16+6`8~o@5GqkkZ)9Oa!2n*Up(I#g=Xw! zrz_;7#=aXINmS;lWG~Px%}Nwz}zCwK@0sWEo+2iJO%m z2~@r?M(QN}`om(`%DR7Nk7{EJgldFV1_{%Y4|BpF$Aen@DZuc8MR*8y)M@z;1D!7< z@!)tgb4WCGJ(E~ov4bCnlZVkwo=#Bgd>Ga#WxUbExEG4XIVQHtzE1D|l2 zZN|69+??JB(%t~g#UCY%9-K%D##Yxx)$LW~&4e-#S%&0ydSzwLnuj=ACaDP;V>fJ( zT3fXy4KOFfAI%u*CFqP;-lnsRq%1{JO|}ugZn5yAnP;TV{_QSzwYTBLNWZKOKp4RE zNyFDhC`?{#RcG;QNrJf!1Wb5HaULV_j|06G2jLMk_AS(+Iib`RJK=?Rq0LcE>j#Zf zyV_u5bHZ*MP_aW?2m%s*b_o6W{6_}L@i0+MQ1TvcSlRA^()T2e+m88AKXj!8i3AyO z=lUAh-^D%I&f+hum7QQ1%lRz}a@UFXw`-DoAl6Q|_VK{CLZ%d~6Z#o(`w^zIhypr7 zU8#T87MU4#xMx29`H=s`+uJ18@AOmI%b+UcQBA_ax+J zMLJnjST=oic(#CD7;4V_#%JTjGKYg-i%D1CM7x863}m|=T?jX*#3x7X^h*B6LY){q z@_Ezo=B7h2u!p=jmSxnjoO*@n%0fi10j3A-Tad?ean*=9z{-~3S3M%66BjxMmQ$nvyFeAh78^rPyG#B#cB`lc8%&&Fr%VOF^n9jAhBW?&a zAw?Y(5UJ0vA|G#tm9jT3lZUs$6DKlVt@)u@&2(LJ=I^EWd&s%Q<0K=hE|@K_J3+mR z8V7+rubawL#_#V5QNE6Wf2CDh&^cDmqfOfwpx+7Nx`I-}D zD6SmMi(Wam6q#g}d-@ekHH(i;UlbT;*uM{tYfWqGt`I~tCQ~{fo~wr}Gxf6YcnCK~ z@r4ixdFf4}f#1e=V|H?z=6~=29=WSM8XpbwN;OY3oRiP{{P0&$@j}SrCmXx0n%Z0Q zE4V!3@lsErQH^QhVM#&=>T5E{d~Ic_kX8ZZ!AoS?SM;ew6+l78LoQ#S0zly%&m`I)rFm_Z;=t^^S$uEPO5 z8=Swo=h)$CcBR22&w8C zyoMSdbNF{+mR&8=7;`_@H_U|#7ZvQC!`GEJP28p>xr6|#>xb2td(UcLjOWFgW@#zz z;%oF@c8H=r}rqlnj13K>*~+_$)=}XT$WLyNiDmS#!NIqW{0RrJ>U1-wrF`2LQtbQCOo8EFQheSXxmC9>CJ zW;BC%%MxY(V1Fg*wU?lXQ&DCUUdfk#3-FXMAKJ&S#beTb{g6QUk-<|I6rzbhv9HLJ z?#Mmr%4wQ0l8(&%%2`$@g(P#lL+llkefvVxs16zsLJq45e|{OzK37p5X`6N%tsnkG z$mb8g4^!%T$$B4k%$IHyAG%>x+e1=4yuQCXG4?wC9JMHA#OjZmvSwXnbHg+vP2GDX z+d!Fj&T>1%=tl={PAFD>=cWEAinG^B68>Q(42`kdjYlJMu#49A1LB#_NN{RtCNnXa% z$ouN}JdrnNI6nL=1Jp3zn8PSi?$23VD3*%t>czhrN6w3=kY4ZifmYo4E9)`q^VX&2(Us1g$PHg5gk6`12U=49G_)V2eQ4OwGanGH zb%3}+pI`3{@ad?dyc$d0E>(Yp#cDYD?q>=vdQ*Jm5DrC;;t{lA<1R^nZp+PqmY$g> z1%&uM0O?bgr(0b$ME(6wD22=xhP66^7*{r2tiQe^Il3UAm3l=RwEdJtL=WY1`q)hM zdvMw{^78+gasN-*uJl>9`=56~9IIW#)yuQ(R`U-&&0TharjYCu7 zq$&-Fw7_}C@lAbdlwPp$+~Y{ev_(cC_Q1SiLsy2mSyxX@hH=LTo$9r{nNO#J^ei(1 z?b9%|8D=QpV!2DMKAn z`vdou$LHQ8(}(Cz#SR3^jev|w`0?)eMcm4>Evq|zN5b1Wno8UIAF!_=3!)86_DJP8QC=Xa=?Cw>eoTn7>FG04cdJz*#TRO+*93 zJYZ(O`aZYNd7WCmdofJU;;(oX@zcd`;6eFGP%*DYjc$`nnrmfZDJ9KB%bry=40hK< z8+Hm!(e+~^Nu;xpmxn<7%oof)W~;i3nmsYA^t^bzlg%xoa$kj&PTJv`2^pH^o=Ys3 zS+#4{9Im(kS~;94-h!VrRwW+{JTQ+7F&~QnirsLT0wL@}{uR&agR^RI6!+QQtK_eW z+)Z5Ot8{t9p>K}fBp*4bm_{Bn%WyUpua~_c)1esmybeS<5(sR=SAFS=aeTzWJTAp! zdDo)gmOR&yV@{kbyqX84pS0RgInmiD6>72W<}kCA5B~TnY1+t~cPNjFeqAFUOfY7M z+hw!9m#^k*8xnPj7S{dqCq>EEdLjPdyh2rE?b-!wFTZ_R$B+#s;c?dbPE{wzWxwM| zB!$ps)+Hl3`jw`jmpn+?U4^^n8uuKyG7E=FqfNa-f9wDU0v@^46P07vhnTglb*Cc? zW35Fl`KSMx`qfWo$(r#`$7g> z<-{uYJ5!n!jI3k;V08-3$X2zwFJ`=7o}`RmnPGd4?N~7UXZC>B6mEMe0TFzII9Q)09vGY}+mOpu^YNy@ONty8A$fBJzViUcb?; zdq4y*vMhKPi@M>3w}d!!r5frJ&Xe>#*XKbl%GXbae19k%@g{;96z3n0%U5Qda%Da0 z*Ip9Ezl$q>5kOl5uTe7-+S*f_F(-Zvf+;odRW8qdlHz8;1s|3DIM~BA2+6sV>KQ`xLZ%9~lE~{|8XR|HYK&gI zg7X7qWfAJ5*_m}IEP6ePh$>7NR0X;4G|_v7?LiV)U^x-`t$BpXUi*b{9|he;saQYN zdbHmIJKO&Gyvy*J)-j%@%?+NoTc*wNriQ=f7%pEOgPJqHU6S9@%N2sO&*w^Fs9RGH z-zjfl-?RKvMD0x`+xx8L{}i`o3jqlN3zajvr)8fx$e7s!Z7(6LMNs@_k746A3Cy^- z$M(aIGplm45~hM{LwuU>TV7|{lV5iew@w8S^9*GyroE}>zv|H}Z)z-2@n$fhiAGOu z(gY?OtOO>QJmRgygDE*v1hq#=gnz`}(wx!dS?IKkH->q5GetN01zrH{o{4n~h{Ct) ztowV~kI_hJbc)qN+c1a`2s9=A@1KP6hy9CVT2)Ub_^gtXvBP|Z*bBoGa<-jsE2%gJ z_5@blI`6D64bKqPJenWgexa=YG_oXUHnKEV%p6U9~nJG1hPv4Wv zr`K%GqO32etym)S#>vv{E`dPso^iLG&3*@*U!jT^eE-@94alm=6N{R@ROlW#A7_r2 zQXmSGPaW-dIIRCwZ+)HL6iEuw7|-nWBjyeld5xyk0zm8Dd7RTgJJ=)bV;*{up1Z_T{y?V;V`+Y(_54s@GT6g&$Geg_Ih! z#(qcNJ72&bmL2mP6}vPiGj9lfedVjtmA4m~m>&#J@41Cf8Q|l1rDBc~cbfTmt=qjT zRF^@+x~^AVeOlJtD9UHUSH8Cx<<(!wElPy%W&(g9Pix0L~5p;a}kHO%e zBU5}Rk0X{XL0m`_JXB$WdE(0=D8Oo0_vN~?{mbK!&D=AlU>I$t+u%WoF|8ny`@fkAhk zkGphnFFe={8xYnt+=H|6c>VL2OkFblw7S8)en9pDIAKwCKNij7+x9YsQqPw+fNlup zzIz!HPQ)_w{8unxYnim+CMfeg&$|OR#~VFAF2$RpaqF6;v)!w&)7|=UNE-EWtAEqY zBtS$?;DXe=YyOcIHnoBBpCk~)W!`yao(e| z2mgN!&T~JgX~jflNtJhLk;Ve2Ox^vZ+h2-8mp7a8b$pm!lVjA$#B=fzlLshFa?fDA zT-;#ZPR)sK*0#%r8g#LIC@tY8K+iDj<4bi~HrD`P5c?!C8GC=?8;%$0R^X;3(GRQ< z?VNLTS#Wa8?K0pD8FDhu5o$Zny@A2oE?Ll{s*+y3MFL|VAZe6)Q=32)lvIo@J{F$1F zWxi~+e@3oBso)VNqS42tLQ9;CIvV6ibyT<2nMQGM7krTdl_#5x0} z&E)eU5Hc0^{B%Rmc5#!fCD@V=Fea^9CBL_$TveoVu=)2K7pqO2*gk-%52Y4H>ZeLX z$Dqd>kAlUicmP%6Dl}tPrR{F_vgDT-8%lRnHZlvOU;DD{360qZ>VpTyRe(hW#^?^4-Khq`^}dO86R%Rr`E z0E&)Ef0HN7SRw!sbRV+!RO5)>hSG1LcT|_!!Lc~FuIy2ir-G;W?e`rk~@=DZpmE5;V$`2tseH; z&?tF-0GR#?qy4c@h7v+e?Gn+nhVYCT@}>Fc;$Wb;h0eTBGbehG6@vOk2}Y&89dkY? zYOs&7f@hVS|9a(%uE(=IzfQ8^+#E~6ct@tL0n^a@P`g`-s5h`pvvY1_p4_*^8XD5W z1}96TD_iYSUZT3*ZA=!Q*%0#=C_a2;%hUc_05z0!w6@3s8)&1giBA`uSXRqscKT{=u2eJO^p#tHv@@G+g_ zA}&Q(mOSDUP+#_&DlzLm`hbA>ESZBPeh%%j?(L^eQ*B4DRO=emXy2EllMw-`BY1sk ztoXlul5pq5O~a^_(>|6<+_o%n^uXHiw(8E>n5;j*pvtR!s5oOsGwfff|08OnUG6>> zuLmRV6-nhxBK8gW>cDfb?WXw?SPDB-HW0*X#kfmIQ()9l^X^;soN>TZmDnwAuJN@& z4$**TfZqIO_=fKyM}MZI#M(f>id*pY&m9ul{>{3g20NrB~3lH9eA)n`M{O6^ZKoe`JYi= z#J~9UPH>CAvPEAuv0Z4qvO6>L2N=sVC;!&2?`P6WI4qt8Qlx2tP!~cHPBS_KWU|fN zhg{eZHuB{8KToX8o4H||7threzzAAOr4HHFefVl3x`K4WR{Ue_4O2;BzCED_wKtjK ztZBl2g4w2P44%V}*cn{F>c$}@6ei+@hII4dD)iNx1C~3Jqnn#nwGkVMzhaLY)jjau zEjs1g2*StsWqK24G?&jU#c|jSWLh_dj>_JnV!qU0CEnu(Zsr%Wl$0lhs~2#ML_VGP z()cUARsDO}d=0_;9h8aaZUaZ0#sCY%w<@bJIWrvwxgy6#P*K--%?w{>@Ec@%( zNG(6^PJw$VAF8w84Zk;|nV`iLq%GKOpBjN}_cK$%Oe*>Aikgt8JgXS1nj*6&vuJ+g-3f65*x4%~REBQ`i4xwST{JHgD{1yTzyqv0RdVwsFDDb@$@g)M&Vug(TJ=C4Si? zOcmR6&DfYwl%OC9$0sj$?^JThwr|Yaz>mD!#KVG@mteFh917HPMY+C@*?4n1^HLT_ z=4J(KXINcOq6wbEWtUXAK#VRP$f=05<1c}`@>s%@EA1Q{*3g6Y@lwpmLasg_dS5&# zgV>;wvQ*9#`h2iHA=iNmHug~k(DG_KyeB1NWaSqVLvX4K8uRZq?p){VJ}WHrT&G@(nSm5e4hppdLi&z3=@Pz4xwKI)birba0r+eQyu*U*sl;cGUonc_ge4>%>uI8Ci!0+^y7s#jEDG_ymGGWlHti;4l_^{5lR= zMjrAJlwPao9DA=YbGE+@BC9qM#OS`bPe+~h@~2ovf*9I<&<4tBQz~B^Be?SQm?J6Z zeCsa<+&t@$D<`H1`A2((7_Y`2J&b({XBLD|bydQ6X*W>eCl!2#9R4HH{u8Lv z#+Yo_jAdw#9mhjT<$_|v+d6wic}u?rhrz5AYEMr(#6SR0PrCe&X>=YCSYba-Qr!0xmO zUb|P-h3cnYA_u+G3RL@c;+6g&K|-=&5-9N`IO@^>>tk`sj~8MQjJ+SQv1x(Ei>Mt% zyuM9|1HJ|B(%wT*7EUyYT#pd~xj)-&(o*@Z z1AQPjfd-0lbr)?FZsE$iUkCC5tbymlxkX3DHTGF&D^^{vfdKX!Wu4ekj%|czoSrLz-*~IB0(`KSO&tYb?DL1D$TcY`0d2844`R!TQ`jRFiM+irGg`B*9Y)Y7V+J;=Sz+M!(nykP9hK z1eObh)4%S!-}@`c1%CfSHiuS6JI)HhMn1=K^*~J5zW_$9tWVfCI}J0ALZ;u|RjdCl zK)d=*FjGQDt!WW8_G<9c=m?&CaJlK{zSCYWlL!59R`ch5MaKriEI(1lIsN@_ml4_ayjjvpuG($tuEbEuHwA^Fva>Y$ z+}dJ*zWO7T@eZLJnLNbP9T1Ksh07z;mk8W=^x2g(WbJ>qh=cGnj}N63Aq!=ar71!l zH(cC(sfTriBl9Jz7#=@G@L?nR&iDKLq6XmIVHiaD{*{K-<< z^7k~LZjg)RJbrivD#_g@7R{ZpyDljza9AW%iz2>F z+1z%?Y#>MaX*_IK0*Fi(J)E-})#f|pa1@boMdPbct+puRU^0|BsHDTe`S33mMNOX( zS7B{jmbiWp>V!fMGINT|C-W7RMnNa~H$8m*iCRAm9bTa!n%>m_5qzAgdtr@u25e*8 z8WlWShzju0+gS^uRNXf9N1YPJ@ZC*h+mtcDC&iBbjWs0_flPKVb&=XBwGMHod!E#w z&1)QWZl})Cyj{jKtQVeKd6sGNcFPj1mBL7Tcx=^hm~d)%Ble-Kn9uo8kYXTtL9Tjn z%^7okDoyhm7LYne1+U#9T$7aH522A#b*YfY59`nAWxL>#j2!!{Kq_kCCKg%%ieU{} zJEcjEyZ&4#Sk3H+BmF{S`W+;>QlkX-2}ByZ;{hd0uD23VWblVK28R#%=RU#T;@mpq24=Xy2K!Jn68> zuJTLY;9MYqV%c$jZKl&RwsGj1$|R-DHod+BJsv-)_KgZWilW=T6mtS)=*WCcinhgP z4vM;hYIjcjJOm}3W&nsy{|biX@*GNeJoJB*D=R;!V=6Sj@kaa1&XfEihB58<{ELDs zLo?UhJ0)S>vnP8#B=F%tH`m!GpM!|~oN=)!39Zy4+tn9NFtfS5XmOjBvLn+K0SWuoL zt{tKXJWmYUYfpap@Ph#?ys(H|!6?5al?H|4wK(In98vDO#i<}P;5_u}SZF<55VR9+ zP5#pD(c!Goft3eRx+F8a))NZjHU){+f7zluWU?^zZI@8@TgrN#Cn0{Ml_VP11SxA_ z1>j7n_^U(4_#P}J(WM5c%N4RgAc;>e5h$vW$lDf~0k8k^XR|h|Ljq!Gv=9S%DAHny z1)j)Nw~@^0eIXkdZA9l89{{hg;yCjn!v7ohzXu1EQlki@irMUu+Q_ID>>^MA-iVJJ zK%WMz>inHj1HSh9-f<}#jwCK&T*lLGW`EYMbnct-`a~qhmAa+Kj ze$I+;kyXukm3c(<+bH4#waA6<5d-DM1}})-s-@R#OxR%@WOuqSs1*3*>}2}{etTg0 zkj!3BNkTv-I|_;W;_4UW4N6keW1j#LqoesGK;w*x&+b`hnEhL9Y8r(F&fb@H4*9xu zNPiuSL5cMarC;a@)X_pHL){9QsfvoYPn@>?EHEp@lkM*!(cZAh;>%{gRF*_5LJ=ES zUZg)%#LiI4pawjVXE(z&`x*xd z7H~#ZW&T94#^br^2~^?Zyj2ZgpR;3OtQD60xRcbWU3WRwdoX5y$wMbKp9^%&nP^8dKgb3jK(^ zEXZEoH1mXdXVIqaN1b*AJtGRT1u~I2(UL?LYmKOw!z>yUgsTy0ovVsCpy zyyl-^{tN6EGIS(x9u_2RG}VqDr(svJEIgjTiMlwmqg%q7Onu{9FxpYsAC@vm+!^k) zU?idvHh`?PXHbZIdk9|6dfoxRF&?(Z{KEx_;ODR6vkRP^_MXX1UX_%@(c+huCd&9` zsxyoLNKGzxK@1YoPX~V_+%nXFLG1Z_C6Y| z%7-|%*{f9$oe(RrLv|HCz5W6nSQY=sh6m&2p{1)&z2L@Y+A2y6J~zH!DGc72<@(O( zU0JyaY$%bsI(49uOU=eMUc59*@qjn^B-V&~(GJn`D*tjc7249qYWWc%fhhy&yGQ*) zOHgADuStzJ*=dd4vR$UqmVM1U`NGE^pva_(O~~SgGZ1;vgmxRTat+B+tT1|^*oi)e z6<=T#*F2ZO66F1QP`2-~ZjN*c!^9FM^|}2q z+$kwyuD>H5#{&#wQNw3_V{Qp{YxkVMKmY2PXxF{@NDTSe09)|xp&}E97 zeCLA-d=Zgu>S17XqdZc(;41?{!AfN2I5nSy5Wpnk%IvSotR{Sufq0O-an6ji+pd`k9*BmH0Y z*_0X%hl1;cPX^p%0;Bu?`2Z!s)i4G_n&W~nu8vmWJ%khZ=v=iYkkvBfAA2KV_@wa^+?QqTRYs*}+$;Tu zoFZFLr%9J9CHtGn=#?3l@VzhK!ofw#kYoJQSI=#JsgTg-*)D7u#>`cQHw=EVg^;2e z*S}BNUZ(kcl3n^rnAwb&<8n|(EI|2NL1}(s6yIJz`XB!ikWSRa{s|qpVA_jAMZ=_7 zB)$w7=&xDVNaUw8vDJ8Mpg_tTmM#S|!(z9ujhWi?O-E+3%G#Rz0SVRO~M}u zQ<|`rATbvxN)QUmO@7+$>J;8t=^o#^uba^}g%oVfGG^*Jh*%LI#Wm|b1AnNqtDA!t zROx%aq|6BD7|(Eoi?>jKe2J5U*Is?vwU!TWZ@ZmKm^dYcHP1Y^k7Hh=y`t&73#CNZ zeZo}*rg@Q(-v9k2dSmZ6wX-k-nAs|YI+=Bo%f>W>$6axrMIqmkL|kZb zp-D`~d6$amX1Gs?QP>Z}ch1?GB{XxtH0w|!T4tU_AxKoZcI~!c`Sg5|<)jhPVLC^q z+7uGoS7@0%HTz=eMK%oBs!7qb`PS~DCrRb$kaKYD5bA7D>DD8YIZUDny7+eSD{vsoZk%zwi$O4jGhQ=%J@P$A#k^F2g^nuChSK3 zr{>>sDaN?LPHb1yq`7pVGs^0^H{6yDBx|-QP@660h?op>uPMx0LGqMTwKZP28}{*~ zO5LTbAHs|f$Ku*H{eS(Hla-}^ufst0OvkQOfHtHsyS#JMV>*=OxMq?a7vz@EmE=4k z=0hE;D_d&ZNh7l0bk$H3u6I9mBu3xutQI2&{w+rCn96>f`g&cL=G% zTpLD`afQ$HTD1YAQt32^8cf|jp;O?`hbW~RC(}OQ# zU(ERz_07b?s&n4_FJTE(eG|nGf=DK&PlM=u5wO?IcYR&VZ0R0Q#E+t$Vvjz$q`<|; zN|tT2TGXLGYy{+K5n8JOge&+_B_BXoJ?{$jq1!Q9V{|N%B??`(N*F(>N;o{#XR6f7 zgTzFV@J3c+JmabFA{eHbhX5z*_CI&aNBiu)g zQOpvP0`Gz>v`WtP`R7D3?aY;Eo6KrT73~-|&WRi2xql#L@p7A>SW@R9h@KHig||dQ_|YyUAxS(VivnI0bQlzdX-x&HLnPcf0hYjaO-3TBOuNAOl(5*4w_*l9 zg%lEhgexPH#=2fZJE#r7e$6I%#HYwyP<&4Pl)$wu01HNoU+Kk$y;0eH@I_>{>i3L0 zeph;Vbn2JfU7;CTUOLhab3x;LA*}d1X@u^Vnu$kU;7ke=HFU^jZFT*nV8>^!!5{C$ ze>*gbzr^Oj1A$i)5sjoup>Hh6RYlHkS>?wikP5i#(Su?V`&nax<6W5da)K7L?wd`K zE%7Gk;HcnMHaO&=>Qj3cv#_##bC_Oy&8V*xx15<&o;+FIh?L#$wVf%)7J(46NT&)O zKFN6JX5Dwj?!%+RngjMA!0+N_;WK8977~G>@d9YiQ+Nr3D~}_3YS(%qTq{)qONAD{#MxryEQt%LM=LA(5a(;PWFo}|tS-Q56^#1oeRWMCM* zoi%3x65*Xx+g2Sq1t@*gddS^ruGYJo2(t?tbPP#yyq}TV{^gS)q{#L)CK#sYL(U@G zN_VAhxT$`!@c!8+jndctr<8vwYunDRpa(g(jU{cFj=C|SdhnXcx5`W038DNEZ%)on zzKgo|x$xpS&hdZ7bARRe540~h?ysyo9)UJ1=@EuK;&jvY!E!ZrRay2OUplE0Z5Zqe zZT-}d;g3%ty;f^QI#o07T{a)MxBj#fD2?R_ z$oK?Tc{^?<^HrqiGn3k8%V@% z9QXpUQyZQnA4p{y;a;3bw-rOe=da3T@OLK150g(Ew2Lg9o@a`kFr9Jup7D%4M@$uH zzrA0=N(#aOv{4tnQr(KD=iIL1*-ZRa|AC6NOB=GiS(U>+{2iEwu#?`1%S9`>OcefD z6OGp3wPPi{?P7J8*8EH|5wd%>p}~-oF9DIpjWDQ9~m}pkddFC<@})d2?bKnG<@3ULTe7y?!L0Gb5b%ri=N) zC@F-MDrj21g3~EB*?Z%4^Jlx3u6f}lnqU{;vJ5W~La5~8RzuMFt0$Y0hduB}-`R6h zO{;32`9UQgxjb)+7_{X9`1d1tfqqWfj8`S*iOpSCKU*E5`aPQkaMxq~*Tmh`arPc| z5~mrj9TL_GmsxPSho-|XRGXQuiGwhlkj)M+qdVgg{5PPK^`q(Gr@ryLeoNYzQjhm+ zBl}`gEwRYt{8>o&?A3rvJf#=p?7i9aPY+58>maC(Trj=%%?F+X;2u2z1QOP$~{TR_a-cWtAXNJ8Jhvz=d=9|h;2t%K=Q z*t^laOQc7{PfnLTg@Xf<5xaX8IM|Zqy29hvgUF`Y@>LF_CnCn3mozffIQB;{PEr&1$g?pK8(g0UMj$9#`H ziqG34e97o5D>d)Hh})2YrCTa5K+*P<_IvWz@O=#9MC&_r!;g)Rf+HCPe5GBVy=T^$BaDUL)`5@+J|X&^1^jlgR%A4aXSPbFz^r zXvxTo&2*plEIv-Z_!Sd!)qX)8Jzu`CxJQYoicnE9-hm?g!**B-<*XHZK?t z5S`<%l9zmV)>77MjjJ=vm7s~RR>Q%Grvb=hr@k^em)W?j_G`_`cMkR2wBJy_Cl3;< zD|>t3juNgx)2OvYkg`xz{mptH&+js&?`VN$l5A(FpL&CCvdM*7yV0LK5YF&z`=Y^QCcwG=uQ&D@ZG&6x1kmUc=Cyk>DPt zSYy!y-=poCB9<^-JOfd&SXVT=4Ln;e91WV!Q;q$#`f^pFEx#tEWaLe()$IeFb#C26 zRVi0+-4DF*;^tU)dOkX!o%UmHe0|uP#OW8I95r%pfOb8j){ug%c)!-e%sjOj({4+m7|f=qBhU+7qUB>F6;_T$1re)>pQ^#HhAc;U&on?2mGObzt(v5X<4J>>Db3#L+sIoWK88wLF4 zkdTm|XxYCRA(<$B!iM{Q!+)3#2&XU2mQ6`0z4pbS`qYM{BQf$t7e|Lrd@5r#<_C?g zkk)!5twVQ&zgK)X2tIrsLG)PjcO&nBW<8@S2??&pG{TQJ*2bR6{0=J0~* zS2BW!=L~`an8Y7Y0+Xb#5nAQnecut#>8bRrXArYSQ`?1xC%A(DMI6lHI1s?(YDYPM zvs}Cd`<4C6beR5CCRd;<#KX{K#hu1Y!VxzKte6ND;XrA~lv`m<;*fiBk2R?s*Q=m3ADXz{eJeZCc>25IPx54d9 z-hX0>%;2TOZMZc0^kJK15PIS2xE>v-D7sd@zP1-SlCgYbZ6~m%Q2$fFGBMF2-84WG zmi(EdPP5+Qg)@&pPXCb~Wzj0MO{Sy+>FBMjz>5BxT|$a&bbBS;&Zk5=#uPg)j zQWnNa;CeFGV0`a8PsD1}8+_*Qw`2YcUp(*&*L=_w_CL52I3yepu$|6%;eIfg^rvWju?!(T3%leBzCz|~=MGt7x?Qc>O)49L|jR?o_&32kB=4__^$bYzvQ6`+J2 zGQW+F=N*l*31v{(Q3%fg&zi50WAq+bmt`V{Bo<{agIpi|Q2Xdbc{@a^SZc=FhyoW2 zkK%p|JGExp+@(fTwva#7Lo6S|ZSM@!vC@)+Zb@RR150y>(C=UDx(I=-?3CEx0=b9o*d^L4vyk*Fgfo-Q6KT z&>+Fx-3E7ew*ZIxe$QLydFrcE^;PY^W~O&{PxV~0_FijUztIs#wl%xVTbQMUXYrCQ zIpsxfPUXKXNgBjE{oC_+C@)|gMz2j`q|T7P$Br-hXV*GnV@(F*9O`D( z$L1NgC$6VqeO!Qf3J9y#h7-vb-j&a9n{pgRvWGX9vFv7#)Bzpn8g26Ry^R0a`kv#RE`Hb2EgskOvS8KGN}o za(j|;LRubDFpSG(D9FHij~?b&phLiuldPMt4C0w3@oe8qBTSiwGU2&`bN*&7|GoAf7U7?`T0FxSDN zWjIH+lM)iSuGAWl4PVr*Tka>Y2?0hT0@K@&oK)rn%wTi(Y*L>Y1H~HUkdK=%2X{SU- z{I7?AIG*aLp<#v!bPQj7FGK_@^`Gb0QtgO>28{qG-d^^q5c$vcE(~EAecVJM>X(6` zNM`(QCT1(gNx9bEikQpUTPzs1ZdGEORAIl)$8yq(hT7QeU}M*FgX<|zFQ9F1DvZ|^ z#eVQ-S@!N&-c0sG|J(y8LT`XRZ*0;!NMR9d@H4jpJfV;Z?Knu zos{5c5KZD6MOv}#_+|WAli5;Cz=j_wujt8RS9x(12ar0k6`(I^y?`IRH_#5{lW}aht z6Kk(Zm2u6T$&K^oEP5F~Z_f_yFa!p8mH}W)3dd1#348VV(fpscn($(Ydj(4N}Pdc(R zds%qvdYCAlW;>=6CC2^pj~x_-uC43Dze6=RK4%suW@=c#=LSDPaZFMtD!5)+@WyJ1Zm%k3mb+_N;*sQa>Q_=0j-x1D#m zDsdM*OP0z`g*3VfzFO4?lz3=maHzJpi$@Gz2 z2^-AI*Lf$85I25`x;0rF^b}MOM|0Eqh*(xb!a!5_&`(3tq*8E!Vf*0<|I+f;7(184 zJgkr#^NMAtz}iqE1{YP4?wBOayX|z?d)g}by$g}fM{_P#|}}$d1rEREzm)XQfRi_%>Sg7 z`dC~9o_xI@i=yE&_LCt+v=-MqyH#D=!h96%G{H)k%jgllSNtUg6X~1Q4{^0EPn6jg z7@uSsI2C7qPyL@sZW)O<`4ty+3v2dkb-t5hvFMcSnnh-h4TnUPYkh3m*FKZX!3D-u>?adXG zTDE2o8n?2uTB#*60aJVc6Vm5T!JdXNY1GDCDNk%L+v!uxEsR=wSwb=OiVJdICEJ>u z2YkBBU@mqNkz;i3di{j&R&Tvs*RorHIC%t_dHJ?r8opo@h%0@GuW@u^4rp~Uk`Uax zeoh^j`nFtV7a2?;8*Z~MT4Hh8q$^2Ze>%u(m-7Locod4QRrm6*U>++7*g4XJMqWZ1 zQaO)>80Ne-!dBU@f{&hGfb6m%sEV33Nn7akxg@BCK%DER*CG^KWhZ^oqv)=W*h9~d z!|R^IzdofM*IDPESyDuAbTnB zzp?4Kv+1}b>bQLOwk~;{&Cy%94Q=`TUty){SXksM#?}4X>-yVa&g(nFMfuI@@BYN! z(TQFWs+Xgx*Q2*v{};o5UkAMX&3U`dc?ipSNO`+5e0>yo+kYeT%ZG{_riTc#fywQj zg2YX~V>XE+-Xh1)KHJt?TW#w!t{^}|b;1(PBGLwL_>-5yx`%Aw2SABQWazfGVU&k1 zOS&wf@f%sDK*VW=2`hWj55%R6L(Qdlh%virbY@n+Vigm}aMUrtTVZ^*v{+Z13rQ(* zN{o!k>8Uq_8%S;_h+YV@$=C58An|{yqXF2FpHW*1!;oN7JaV^aQXsLHf zqaO@Z$c#v|R1Ky|V0s*9;Wel8BovP#F6i;U08)h|*BAKQAH$t-!1afBUx)%RLN#~J zquK!VJ~`wz@|q{Ilf0%V;p@L@>oa}UuX(M{WA8RK29K*7s6MXt(+!K6DC54O`yX_r z^H)rW3p?$j1i6Nd9fuj8UqrU&itv#Nfa~iMTd8ly-9|34;z@qNnN-CF5L)JhV?=Hd zEx~ylcph>?tbP<4YdmKu8&60P4YACco#8i|c^l{reYq?xW5(y~osINK zDO3{Q12qsGaI{SHoyM@v=PTyf^F(s@E+~dkqOJZLrzNZL&tmR+igF;K{-$k^d$eEQ zc5pAbnpn;NT`kX#0YH3^fx4JsVzBQ7pf!VcEB%TQ*gOmH+tyzqSFOxb76kuskSP?S zlLW{RJ*o08ATMr;%}vq=Rcgq~9Yy63Lg`$#(`tx+ zL~-;GODzJBV+lYG34D{sf^MUxAS_pl#wt$cLyHZt0N#k&E)3S&$R|;bq9nkL8=KJ# zez|8~mP*ty%V}_z8Ko^5ZQFZORMQ6lZev*}D;0VF8freTT;b3a1Ubx`^V5B$%msHr zRJfRyRF&T7MVG=J?=djilE`kL$Dsvpr@V6A7?5cIph8ENc9PUD9`qe`)=^Awv|{?D zB*FT1pr-X>Jd} zcPH%6R+n77VMoCS=wCcFLf$(;rbyZR`>tq|gQMn9*({xygm4;zyeEci@kgzg)*&wnV?=V}%a%wG4e~H52wR64y)$mZKiZ=f&TQBXkE6aXJzQ z=y-luPv!ai%9}TfS3dxw)nV8jda`Gq@JIbe4GT;l-d)&gdU#vd-V3;2193V9ekw;87m7770Q4|2=zb>IJW;&oBv zd0XUlTjc(m$i4FGkVsg}Tg=;~$U9ie@Ax0x-WCygKB*JA8+e6uyuGVxXk*cT>+Sd> za1)*4?dAPl|G)2tPQ0E>y#6EhZ^MRgcAB3lDl1j|G+;cm@US& zA^U!G)F6bGQE}t1$xyJZ58n`XBZ6bNKv|{4I-!L;Zq3(xLSO60;FL$q+m~B;<%4ka{Zsl-z4gISi;BR$VN~ z`#>ZhdQtuByHm8^i4ZWn7zLObr#+T~t=BAkdD-ar9R0-~^93s0x=GYBI0ws9vIq3) zVQZ|P$^&T$#_Eiv_M#EE;?B%DNw}@~{SFr%rfooXqy?Lm@mh53;bWe1;5WMwd{*}p z*O7XLRft7_RX`{@p0X=t;$aw)MZl#y&m&Oa9%S?|H{6IQDTARaf=S(LII{xlOL%)Y z?2jDtWlv4P2DnCQzI|pw%RO!GND82IhaTibLn1WVz3E@V|2Uu}6`-S6#Q507u)4L#P;kpI^uR$F@GfGh{+-piOZkMvJRFzAM_Ab-h%%$Vb8bgwv}i zaO1b|(#ACZ&Cl%U8U~Mi+52w+;=%Yqf(+YF5;lRkz4*?Tw{k8)+@6#q_g>;S7L<2| zusOukJ$gcXZO9=>E63aZ7TDJsZg)H%RhApf0akx`5U`s$nISl3zemEj;J zdSW{T3gi&B_oAiZDgDG9#>=P;ZK-^g&|N@O>aQ`e7=6lFSA3}G+uS(Y%^C>axUkSJ zT7_S=5b%v7bR5y+r!$Wc_|f)3+StxFnN`@(T8UIJ4_1C{9y*-BLmu#inILEfEP?8v zkNP@i{NhSJxKC9?0gkHKj}bCIv?_yL(Osw*&uvMX#3C;yO6;G-JElLXEAUWhqL?k!U_fgAzpQrB@ zlr}>g#U3zICTOtpK*g#S&GNYnGikmv3NRh+TcWLAcNPD4D?8d*8pt1d?|M)cXO99A z{&i3vD7>;T^}@$8_xwd*uRgLq8c)WcpFHpSVt@yE-(plmu4a!q_|Ndgvph8FUW4$* z)f4;N-&s~L-|vhZr{k(n$~=5bminJaCaR;*w%K{MZb^9~)TGVE&|T9Q!DsV-2yP#@ zD+^!$jJ4Vt`g;K;%O%6Ou41FDc~9S@nZLH)A#ey%HEyd14?G28b3pDF{4D$cpK>tX zW7&VAX#WTFx6Nn&*F2G%(AWL9zc~QED_Vcd&VPsj>ms-RunsS~9Ny#cv5v>f!M7*F zw_{-K(0{4whtYqPy5j=5x1aFbrb*J7Vf9(kI7gBFBCiPlXSe&zP^JHlMEk|MDF)r zSN^@;1^<_S2MJ)P<8I+~>7S|ZE&T5+{NK;D=kQY5aSMFyBzyRmj3e?2a$7;fKzg<~ zh{@82cJ%d({598_5M|}htG@b)r6`sWN$E#A35X#pm{*Fx>qZM`2?f-9M=14RyxIMH!%67e~PR4MG_ z)KhvA9+PcHtOUXvK4pfU>q&-dfa+kR5Z$_ml(eq+qY?9CURB@C5ti(!hYz;(cM1`| ze!=254*%+fLmvj+NU;iSRNjT|(>H660shyvf>#6O%t9-PJuXl?Cw>vOmllrchl55FUN zIBY$(@m6Eaz1Y7W*m467hp)m<1x~!X)#T3@yR1P!9EaeM3_`Y;2=}*5Q8}&gKM;v} zZ0O57T(hMvS`S+WYxWlxlQiYw6^B3lc3%^m3zy{h0511@~L=NSV z6zwl2uBxZT*64u30$u6<%cz zBY!FORkWHW3`$K;;BP5t3DP)Awj$t7n;>SwSKQ?LH^^?(`OypiJ#VlQw`NLmqV`q6 z=>%cds9iLS{0YSeW<5M-FD=)`ng9O$3pgN^bc4^fAYNzD5DDDgdst{~PWuzC0*5o^ z)K-~=vQm@^n)q`$3gr>7UV4W&y`Yuh)cqy*7S3Qt>Q@>d>yep7)uVbhDW6{UYq+QF zHp$nuBKVG2 z9hlhVszXq&+t|B0-GBH|?94aCF8TPGnru(ahw#bLCg?^<0RD$}oWCj*+owzxYu+5D zWRcDo`lYzFND|?$k+zIAww3p%E7hU0ixkYCtNY`GElnWGnnSbhnIY_%G3vM)BW4+P z5|3^K3q@_~6>2`M%4x+E)zY*Cvvts7;barmZ;|y?@2VuRVOy&1(mqXzL~CpW>@YLz zLNwsx)%CP(I~F2^1KK9>ZCtfAS(?N%cIJkML5zeMM`PXA8?3tVJb`0e#HWFziY>04 zOAjl-D5QEDq;owTAbNg;UB_fj&X2W2Ny49d=Db3+>bZ%lb#nYVTpoDlnO( zR7sOTgTY>D*GT-=D-yc0vTR)^wAhn*Ztr)}!G%%y{1S$l z{-h2R;D;N9FTfRGdBAy2{uQk0`?cLOLs2@YnnLdjjCPC=3Li_;)FM$TN=cr+io5vN%9%t+I@O z&%hnr&1~gO5K_MiIVtm8fm%!Y3w>$Ye`HUack^p zNM{c0a!(1+2?hPbeA;JK1@ypp5AY8Y&ROJZn&YccHv(;&rf&l-6py|59w%Sbmxyai zTeXe#d^KdMf((K-BptV=(@4M$vOrjzd_k@0!@h07rEy~y6iin#pJEvENvRKm5+4+6 z4%RXv180yeO~|@n)PWE-Uw())viQe=IIC)w90-|^xh(--G?C_xWcn9f$EA|U0`6rV zz0lKYU$)NY{R%u_Y;D4Y(#Vz2qI!}#D;!wFa9zB?WE-bOxm~L*tDn>#T5ILd1j~}DqCg30iQ$kl+{r~;V*zvg7^Vv zc(X<3zQHj0j~-KgqJO%cCT-FD*7{v7?1(SgEn{mUdb(mHX%xGo|C;a4fQbt3`0Zik zHvEhdD6Cqk6(la-*NJjRt7D3QVyklQ(?*VDlhljob|L|>sw@=Wul{Gl!Yp$qZC^-9x-f z!Zg&Z)?~yf_Ish2pPaacWA8*+^tc6#2F5K`xGER`sQ#$aC@n9<^=j_d~^GJGiZW_BHR`qjDYo3RMDY9 z7k)Bp^^{|Q{??S`@>m&tN&RzS&FpvP#QDIsbxlDl_PHBwt>hGD)gi)>mohkV z10zA49dDnCIQ}!B{dCPd_AayksgUFE$v4UZ5u{6$Tel4 zQP#b#^-3@Y$m9^z_J^y*Cm-E}L#;X*=!1w`dzJ>F#aVV&PY@dUdkU@}(QX}vE3 zz7w|dh>MO+kt~4UXc3sIr@pRkXxEtzW?9Wx9P7Ttd#c?WzJ^K~(hy0gJ*nZ4{SH`( zpNADoL;(v%>78Nx3HixVDf2wpzN+6{DUQ>YE0o79-UI~yYG6}oY*-1PMjQB|j!r^0 zWpUFjDh6*gGsoc_Kuc%z)7!fumSF?*TZv5|ycM-+#6Fp)T5Puy&zCJhE?!nvF3^Kk z0gww^;6+pIWacLqq7Fluu=B>Q?Py%YRJd9_NxwpRh@I(wPzjX&a2MiYBJ&XlJ9n=1 zf{a6YM%w;PqiZ^==GGI(I&Z2?gA{*aAAC8i zam!bLjAcs<2{a2_lkMr#H?Dz`DS2y2QOj6hmx3!tjoJd%D_J^d@Ys8e*Ws}# zP6H}tE$g-zJWzC>z)C!4pd>L@ej;X0!Q6Rj^(a~04Y07F%i8Bw%b|0es*GW_&PWak z@#(vRvyNxZq5*gw73!(2k0IVs&7|@Yf`$PJb5Jz{^!VSRja(I)=W~hOjxNiOKMI+6 zbtQ)g^Y?#iuQB{VrA(JMwAB0KpP$ejW_my z^KJr=EAl5i6h$!ljl>=W9x@-0wLIo?Ci5e$IO0B5ePSKr-c<&$e?W*eC{qnFmIv;e}W5$~ku}HgN*q zX`Swb9W$oC0d$mKDp~sOVRLCLrav#ubAZ_zuYKKw=5bYB;0uUmy!{;&Yv*P=$0zgG z7Jlk{u0*U^0h3t4xMx0g?f8g!{J*tR|5IwPKK&lxW&dy4!N1G$MlM>8rvePci;>*D zVd(;s=jiJZ)K0-$u<`WtAF%$gVTlXTjh>MzHU$4AZzTZ_3Swg*C%-qMp)X6<#xxe_ zE%?Xvh0ZNSM}0mWkShaCFjh#>>z$&*5M%yG?iu)mK=%W!Tuz{ZU*`BHI_^`K4;HZ% zUT|Yh?Gs7^#U3{{CD3G+zT}8tnVfoz!Rm(9 z6#hyv+Uppy4}UZ$09QJh|FGLUfEfyu4EyQi0BfMrD8vW{1m~;ePg=lrQK^xMO>=X8 z;+i!wvQhC>8Ae2ykGdwIx2^V*X)0S`*>0I)bEEySEA%9L4F`e&SXf^7{>Xyosqjf* zxok0rk#uw?{0gZYlG0TPWf^n%cBC9N)`SJ}<6gw@h%Ps&JoUVz*S#)zYdLg(xP23V z(dm#d+`yZdXi4+pL^vRhwHA2IIoDhWz$yVrF($@pd4ve%enHbW53#r*Xz&*=u!f#w z9ajST2A;y4nutgt$7_X7Seic}y=b4Xz$k>=KgaH)=A~G}t zYG+(G557X^L`SOoBwOv4CKJbwJZMBeH__Uk1YubF6>DrR!QxN2}bb z%Fj4^MT0^eIew)$)hDp3f4PtU=I->&wVk}xy3F(EWYN+WW9oD>){T^M%LfZR?c&J{ zLKye{{oSar#&|2eoborWx79}g=tjr{GR0ZiT-^G9%jPKusoh0zKK7VR3_0jSfsO=n zI?jaJT0dWV|3cB1|WRVE5aSBT;87h6FauG6~0sL7fV?NRIm z-?WX|v^F{QL^SF_Ig-jxkj{zP^!-?0!Z#GYgOoHet)= z6M!?@GAO&+P(H>O7jDaUa)g6T7SXmksiusb6A+sxL?<^Y4+A#H4fc`zWsUb_9Ci2X zI{;-ddgnyE#Xz*t1|QRCn7`@klft1V#sO}6?U?PTc+B(S1+<%g#&Ta%y@0Uwt+|R5 zWqdfjmPuvdc57AsmAMsog?LKYl5h<`#D^RzVV)4P%J?XpWQST}gS;iYodLsWw8RxC zTkPv0SsYTX6_!8du$pLc2=c9PwRtqwpa$FN;K8_|2}GuoCvaRp*p!qwJSvkP288O!KBHpaU1Ct;R;BZ>)YE8}kPVxkHeRXv- z9;>@u$!F-KcVBa$9Ea1B35o7Y7z>>~a;{fQ3S2BIr-jj9z;sB1(HlTnMGUwElbBQm zJJ~3@rypwJ6@n>Pzl$i`w?y`>o59w^OYzkV*jeZ|k~2?B1&pP+w{}wm2##n4UiBpf zTMMK5a|499xv#h23t)sF9068%09FOyN5XsL-_K0`d-uHm_jRWpxbZ&lC{|{^$?+F$ z1RyvF!B*|bC~x61d33%uze=VX0ifhZES7zl4=i9Xtpu(6*|DgZ(?WTOti)O+#dSZ~ z=M)*b$VCiPq={U}kf>z^;}c6^Omxf%(-ckzrP4z61U3$&Jitn{RJ78}nM&TQ6?X%C zx}C3VnAIC@=JezP1O(WM4x-;)m*~r`N``Irq^oCq`!?10lqp;}a=c93iNZBJ`5c74 z@@x89)Ycva8y93K0X)fd&s85lxxH(~~q9@I1m=hFQWos#^+DNh~%fo-anrT~%8mIMP5UOe7ZSbCTx; z^w`c@JixLdNljS{OfxsT2dU>17kK|^N}t-)T?u8r&N9t6y?FnP6)whyGB~nS?qrX3 z__frVVU|(ZM7~5FQyy^Sz{=p_ex5vxD3SN}SsL|O9a&yR>Dn^&wLE;L?)U&sahGR+ zru91mXw;oLZH29Pew9 zt-l0hZOp1WW6-zoCiFLnNn74@=cQNS&#mEuwW_n^dU`$FW6}QTK-#gARzTbm4!7#+ zCG)*8hkSTlrqQQi=>5i*heTT9p)zjYOSt$CmN}O8#oLzgiWTLmX^>7SpsHEipiaJ@ z7~#MX>{kImpMKMx$9!2O7xZ&_{=2E=4SjL7Fz}fQ00jWZN{DJWi(d3l!U{^jIV0V1 z=f2;zv>rj^4WJ*5Scy|7Q}#%L_nmBF_?6s$H>BRTz-Q=(FjkH724z8KRQS^M3oKA^#XF{(=x_#XMS2qu2OCX$UH!a*#%&`LxL$V zclh8q<_A>NivvmtHmZjW)GNz&Ui+?Z)$eYP;Q&OssPfuwEAehY_}-hIqeP{d!bML6 z>&0*ypJS9$?9LCpGFZ`Dg5PbHj=FhC zwUbsJqa7F23lWFRZ@?KCKfiS; zIJ4a;c41yg0!tekFJHZ7G?V=mF({*8hR_a1VB~n}qCMugM%hR@CztXB#hw7ac;bbC zTs(!)1SK>#LrKMD)Ri2gI=5V@*o-dd_O)=YlBc~SMhO@3RhV@#-?>*E#NP`sEVqg&nS$9o}Qf24D5;i z-Re`2{Rl*Zf3j?=0)Du*@xXw0D2%x#4vlK)2O4qGS$Zr_A<6T;rSi0T%pydA9&jke z_FEbltA)n;M0Cd+mJg?`gkW-3^7)EbLQGw$|_B`$e*pwk76=MnnLa3Y$Bj z#g%>esGnYV`_o%>xtP8_u3V+!7HwW=1kKl#et7PrYF65AfsI zf3vAPD9ZGsI9fuG zm8EEeHM%`W0Zx{VHKdPWkB`pVt;*RRMceLSu6xFLM?68DcnS{}yJJ`)KlSF(RRGtd zJVaY^MeaB5eTprR_cMN9L?X&kQq+$CAWPqGKV(zDguhFfUpw`63~LJCh;Im{PGY#S z`thp0A!{eOMoe6wFp1WjJP_QsKj6z!*>Y>|J!CLb{g?*qp`m?mo&v!uVL{aX9^vbl zhu7#i`={O=3)>k^n3dFa=ks8El=k&v?MFqo=tYiAEV18HKYHA<`+00|X9EScd9QMU zmc$~mch#B}97zu;9p|}LkN@&o+jxVlu%|kdrF}Ow&b7{7_X8<-HtN6dZ!+8_R&+6doO(WY{JlGIo$K}${Kf=$4h_F@Z_ z?(*`-Ru~ml9P2yXh9&=k+I5IS6^ePpc`(?>j*z5E1`NSV$@tyXdo~diUKkVY;G{A< z++U75t3_8;|FQ}~&TD0>l#X)sTA#IJ&Lg?g4+(2Djq37U>~EzojN_@7w?(`nS}WKL z`|XDEnkp0WmR6}(ez^SvpiG)-L*sSXN+!2-Gw*n2x*V#Y?4VJhyehv%ZxuY-bbEa| z24Z*~H6HJH>t#2FvaWS&y<8pxU+0N|`%PRRvJ;yeS2~@ZB3Zk=))*;yqNGW@$x!oV zV1W+2oVCDv)=eGf+J;W{5X;vMUj-|onkbV!VZX}RQY41tBlrmx;48Wo%i|HqT@>{= zca&&DD9<2wNZptzNbBD+?yI~F;g8*w9aJD%!nZECYbP7qd!gjnV@&Ed=h$p4@!^A+P zB4Q#m?ijDi07!C0p^L|I=&!Y_6GpQOz;5ECUJl~~yKmg6^3#3$UV8?DKA|E9+(|f0 zsTL2n*yKCKzW)T^Bky!`g_<4noiu6}3**&|@-#Ohs-cH*n>sf5Y)`4V%QT}8>=5IG z-ubHKI96d=;%`v0FO=Nq!7I<=wEk`UW%J#%uZm|kh5*!#DbWJPst_m6eT5(oJTp?l zDA1jGGm#o!xzkpn$W3oKIYQT^g!<^uL(y)z+)#zas4AYDUV@sEh+!3zv*5X`F$o8b zGnM$yfX%HW`mVj|sUBrqy;t`cSN}iKh`1RehmcCuJv0WIJ?H9GI}txOUcf$ebmx?N z>^*3>f~BI5Q|i}unG;;*&{oUfRQ}T0U;#LD*%$mx%?nsv{Nu@cg4;3WHoQ}p3I!B6 z@u0e8$s7d4YFFDlE)Xr8Xp4ca=OOsW`M=BWX^W>Id`AdLcQC@T*m;DVFPQllAmxG4^QgypChtP zs^un*>TFBG*Vg-;y*SO*ZO&Hj^o_YaZ*ZLj8|kB^KBQy!gExtXjEYWOI$J9BH>X}w zJ!m%Jh8lf)GJccn|`Cc>13r8jIxwgiqKO#+<=~!J!fq?ZTdd6r+3kV`PRb6m9s- zni4UzZ6gO338Igd6pYZZ{A}LzzH+{mySv+PW3kxhF=*mpL!67L3kT!j{UVWA|tpO>}-1~EE^fuCoqPQhlz$=vDy68vjVr% zcIW^n)hv+%8lO>Vm^Yf+A%4I())&eRr>pNoaDaT2i5!42X5Jx(<&5 zBCmJ5&XR)J*c}~24HA@Gk_y5as1A#?0SCK~^?r{@S}{aN>$B@3rJ}ZWW}f0E_He)r zfxYfw3}WA|shWb8@F!BQNKbEhyF7oM9kK>BS%chavcK6M zyKZa1RrzmPX-tM@qJ8`3MlHaNu2m6U%}34I3CS6+jK4*I>izVR_6{7E3}M?sgQ{x| zZGf5mT8anTF~N{A<7a&63T+$gTGJn06&x%Ok71JApj{jn2wIMmvdII38V*U+O<+C! z1p$*v$SHgmp0tS|se%KpNP}Ojg$nqi9TTgLA3D23Bviu2_gmw77Y2Uqx_kSKa5)$@ zMZ0hPg60#oJ9f%m%q*6d9v*WkmyXG(FVH!Xp;uiGMxX001qJ)#xtl^_kYemohj7K^ zC=X6J11??aCq8#X2$LWYhq|tnL_>5Dnp|iax7f?(7lwWz5{Beue`gvMAAkm>tT8T< zi$7OLPZ9IaNR@-peLx|?6dnD$?1jRI!>e6Jr^-V}PAT?A;u3V$H*u)bd5+<)-rzoM*iFu&=Q#vwpJ+XBGE?&7CZR zH2PX(J*K|%CC8k^+tJ-5T5FfWSp~CeV*{1C#hm-0MfJgoW16pxO07A$JZFO7CGpg2*oEc2Os;v;u+{Ug!9B+GjiV70(@oWOA zaEH)%3zy6`-?p$UiZ1k1_GAnPbx^z!@gKr2t?Nud#Lk9L3aC%F7rQ&tExMV-4JD;e zvnx9{+EL>Zj3MsUY2Oj+!Z-rUG7= z{#+y@2X8`YZY<4eB=nnmq~q<-LXs&Fg6^15Gxz;9=-BUedjwP8O9i)5QWlKa7#$P}QcI1lAAuEwBFCg{m2?=IWuU_*zBh0(&*bg5XBm*=nipI6CXjDRk` zs0`GtZ?JgnkV~F-ZP!v-^E&e0no7f4CQe364xo=F?0>)VS$*mD zFtnU4wJ0fWl%`)xkVw85-fbH>#-n}Gg<|DxMqS-COa07`Ql7Ri;8zsDPeeeSqoSj= zn|@_$FI$N6AaCI*0PWH53(S-u(lHF~Eth3yX{cG~YDLY1tyX5yHglh>1SICqP+@0x z44IYhMk2PYnLl&tYIdlQt$$jqM)qAp(bEf%`u7ADz?G)c%_Cqgl`*!V%kr7>huiN` zH54;6QX&lPsKxhM)Qj;$IR0?ssA>iq)%lY8o*589Wr%jat9D}A^qAP)Og2u6X5#sf zN)GUsXCttP=-`O64I#^zIP6z*rl7USy}l}mhkMn*5ELDvssm&7RkUO1@(>=-s9f}c zCL~L{kmMA4ISwlJ*+N`O42%BU)NCWyJw%sawc7uOnkYk`2zH9KA3LAm$Bwl5X}sG- zC^{RjHCB;^loje<-oLmP^hq1pfOa=f(`6No3w=70Ol1e#JiXCAB#F*4dEJabO0HM0 z8k9y#-Cncv?JdQCT!cl6+`+m;h{ejWsZbv4j|_5vg8luOAy_z8)eWM=GrzdKy*!W> zfHX1Aq*3^i<#J1Jt67h9r_o`HlHEX%0TC<;2& z5dTO3kWFQ=>b%T3w%^RGe!2=!P?xja{_m~gg@tGg{efn;YY{D0C=5hRTABGJ=u&X;7y|W)*#vX(JkM8$3^8f0NN^`JM zGQE+e3`R)TIgI#Qmpr;|&k4SD?YG_>;)=5GgyMm0tSrGisRO|&ut8%a;IzB+efrc) zE>`c!7{H%za!)?Z+>5R`Yx8BlbXS|uvsFUw$oZ#-H@_hZmlu2i%ioBSB#Mb{Z zM%RxZo1MV%$Nn^UH%o+skYh}ymY{9ag>skpm76UiNnlIY9v~sB|6#yKpn+YaV^1|Mk{^ts>GLsy(B&+>_tKkXv;8bJaG=9Qym9;_vm<3aTk2l z+YaPLs-p|`3;W8o0p@j-VO=b>4kD7Zj)9Fe42@Eh$k~9qQ*zNt!%C6u$RKtXVxr%4 z%`$7RtTGp&E;NeKawc(aA_O_tW9&VZ4zahavibgiDaiFDR%VWV5W0{~v8HtW%NzFyt(*y(#Voj65x zeA#D@Cd)9yf{q~krhCP-99p6+=_++VP#`tI)3fh=KHsvba(%1o;x(-jA0m4jOI(cV zsi{xbbE8@#eM6y(>Jv9MS*nX++*7oIr+Ni9rMN_zs0CknT6gzIC;SB}5LHfE+7Ld% zQE_-{`Z4`FqR+CBvy!gLs6N3=N7@r^4e@+xpWx1%m5+GTI4BW!LU}IZc3oe^CwDwy zgV@89c7BI*5#ES3zm@&ZUPaA>eUUdLgLnIzV$p(rO;sK4KoNT17BgA*`-aR?7ZNL^ z9UI_(U>Tfe*#8U(_z7Jtf1P+S1YfhVf#{CHy0O+dA*jk(!Fb0W9m2y9G~V8H+CQ8M zb%lKGYFV>c9qsn%*H>f_o~J-kWNIWfPumHu-}`1ClScKG*YVt0^K|Q((!DvA#?jYo zj`Xkcw_iOf>gdo)H0xW?8^lSnLwnrbpaIYn@Z>UC@pu@qk;ahP2EA9G9QZWakXpzQlorrFw*Qd`a987k!~K6Kd#k9pyCB;0-_W>2aEIXT z7F>h7ySoJU#x1xdxVuAeCxHOL-Q6{4@Zq~Nv*zBJmpgC0`r*9v>N<6*_TE3o)a|k+ zm;@)~cXu0jNJ?*6f7xTzEv2|B2Sm1Zm^^5g8r;NrAtk%!tnfe;=ElQKR<_gBhqMeA zf!9KAIpnCBk8D9ZEFSs_3phwsdC=8D;feE49;mi|_mOkfr9?jaTwe-&{3PTMe;u$g zTDswH-_vs0$ZtdsDOSf_d$?)+tbJeL#H*dUyW_=H{WXy3`u|BlWb@KvDdRcW!P0Je8no}Q0xgN^xUYX{Q!TG>GnS@aGO zYV{(pl^1t)o_&;Lg1>Z*d%(Aap^I~;r<+t^ zpOJ5c-wa0@veTYTmSZ1}&$iASnw7hve8(ozt7BN4kj89qAhJe`)uA(1bfe__r`%a# zX9ctlE!osJa9aFzq@_Q-m*mcQGCLbDhV4x8`FT%^KwuKRTx4+JSv%Wr9PE6oY!Gpn z(@fF=m#-XW3nRiYPJ|Y?K^d(vYje-w_?y$=sIf6rxo!LgZ9aEhkmA{3k*_a{v#>?5 zsD@z~*d5UBhKpoKf_|`J*)Pi3uWkk#PpB5+Xr(C#VmtY^zbWHe7)u90M~a=N-J^Hx zqxR-8>)rdkG$EIOA>wBcrJwDV3_^nHxrC5M_9d<%UE?571{&TFFM}#_ExZ4!cr?1{;z@qBEV{pZsgxXqz782_; zN^?ealPv$VhsZDZCPMzYc9+kJaQT$sIIS%AAdXgR7HIBnnzGKtcx9x0uE+BzV^L9{ zTuc$KY6QyyVjOoT0@&A4zmtKjbW=TkjJAsFq#opr^fB83pS2bRxcCIxsO9_Z`VU^N z(H18E)G(FqcHt6`ZRc0wd&a~#W{pU)nn5#M#|iYUkQz*k0iH=>rE6`ij2Pu;hCI|7 zB+q&sth|3b^0|r(auD)xZ=%qr^7i=(mx#8MmjRZtSS!62Lt~5pQcX^?v76!#})IJ2~GzlD1%44EMU6nEQ(@C)D0Zu{6l)p%?(ugLo~cBGTXW zE(?HwgB{oEawop?y;kZ^ZX#*KA~`wOsl7C(G&k{k?m2*M zoFFiDKIxxN5sO&ZCg5BAGZ$cvRab%|yBK;W0dL>CoMfiqxO z(lkxK*eefs4d-AsWEp$t`#5#xqFQ(njIVqv=k-*Duf1Ty>;zynBdybU_kk&#O;^IN z#R<$Pz3K!I@1@!xM3y+k6(;!grlzE)^_g{5A8RiNPo*zxUFvhE{qXb52^CfD7o(Oh z5sOQQjRV#UPss9h+^DTemq;Tk^|(7C*WJ<5cS2E1vPO8LdfG%%b3wt4sA_h`$3Rv7 zL|&iA;7Mp}b!$jAYuL-;deU4%eLR)q9ya?8)uKCXp~6myJxb0NmP8Ta1o~7Dn^E?I z|NnlgNlxFeeRw0GenMv>H|480duAB4S z)~=I4X=&@hUe29_0ELX+PV;tG8D#XmrBdd-N3OV2kS3G`Yin$1{9E?+qos%rc9|UG zgyXQ=VD)?kU{^zB%i(=lFj;;U1G;QV`|sCXQIlDB8*9E^?--ebO6)pUo2>Re7Bmj$ zyzDpimwl;Rh;SbZG4gk;{`hei%x0kQu5I`L2hi|~-qQ5hWZGPb9`fvai*ra8GT`*90}(nz(C{-C=YAy+GS zTi89%-wB~^YZFj-ghgGzrE~cjnpZ~)41d^fl6Z4o0`!e=GN32ZbcP-2`%mQy!jSkR2t7iMPt3$0h_o1_!vvEzE#FOHx{peOjoN3)MjQN>WF z3XB}mkzH>Eb&?W#d@n<$q8N0w#+2^)B+@j&Pm@b>H$mcLaF_Y^6!Vu(Bs+@;)!s!V z9y(L=4u<|}IQHiA*%6sAjTUCq=zviAolkb+DvdZ<22{2`ra*PjlfPS`Q2!$@;c!>nUPSYY$iWS=1W?X3eKw&iPX;b2&-KVp_cjwHbmc%0sD|-;ytWI69;+GhcVOX z%VAMEkYb?o_bbd8(nC@X$lb0?LuM-OI=MsfRzq5>o_sEA7rcnSc{j-TX%oGwfyFU1 zjG;kjppmw&UMQG&&z*>fD2C+m?irM5!;tll_5)zd`4lJ40~D>Mk1_{MtWe(=o{w_+1y|ox z_2QPwy_`oZ%+Fl-UEqtF8m;CM^6M!YB6BM{ymj`6S|0x7iXCPmPKHGr$*_)(8-neP zhTfd5R1%ueY>r9UL(LruM!Iv6zKr`K`L=uvcPj{TZYghC;Scj6pg<<)hb8+vGZfQ< z-GR^WRA_d$j<7kGK7ZCR;Y8ZqxPE-otiX4lh9$%sj31!jJea*q)7EGAd&LAhCyyOA z{wziBS!&a33j2*VIK&gDCHWUNraC?$`ArOZt9#)YaMU{ zmh_5FY~Bcs>Lt`;t3VTsY#pk46O4bb#y3bbhw!)mAG`67)ZVJ?Q#95*+{dHDB5j;} zD>=(KA2+A3W>?aHn~hBJlDI01ny!`h)R!4^ zIhtZ?*n4PvidHwoZ$#M?E_okJn$?5PSmDB&=$rs_Wm>HaiJ+WGHr>wK_9!s>-HFmTL7uA_rf;PQ5+h;&DFx%8wVAcGvW zHIbrkPPJI+>K8P}jx8`fC#Fm%9_XB#-BFGKG|Y|(*c_*mz}z^)u1H~dghZvoVq_E0 zYZejQNxHVmv;SVoK8H2VSZ>~X(X}2?qnS$yM^%L!Hnv_f^8 z7iZ6J#nRUeHeJW*_5~?^D`4s}2+!p-$s0>k?fdV!CeK%E3tc4+ z*^fD1oOKMN+|yGeVyPvs`Jm0+WmUsAe=Ukuf>VQ2eCP>&Z4+!8u6J2=Z$}r?s>^Li zNiEtjVPVWF!7}?eHL8G3uE`mMCdhCGHsQA%%p}uRaqn0^_i3H~hnW_ns@Op30d}y$ zIl;1C>9iF3Q^b~g%6lLPJ)2iW-KCC}lDy}aq`bPo&EC{AM@(1&W!SDis69MIltf7p z70l34dkTHYF=hh3__9j)K;!&Kmp2c`C(Ek6b;+Ij}FZ( zBeV8>3=z0ay>GTUD;K^Ux-iU5KhtzI;HcrV#HIoZ!a#pyS>>1(;+5wo!dqQBbQFq{ zMH{UI3P?-mcYlGblb94r%p=>DdvdNyLGmPsrf?t{;F+t9OS)j$S%UPZFdHwF()VUnAh&(Ln#22W4hyS{r&_wVADD;9XIQqrmyNJcnrP+3<0vW z1P?M=pV+s0U3ux@=XS-G1@8^n9No&O&ur`3V;)~hCn_Z0#s4iXBul6l=eFoGOs9P! zc|D=HkV~j@2`GW5_l%KK&b$Ueen!TPuP0RCB*XBMj?Mz1-~b|`eu>=49CKm!F2+=K z`5m+wp;Fj9pJT`RFI$?U?%6=fYOIfMwl6(dCsw*gL^PML(IN;wXLx6MXwB_qHPO7~ zC)aSekrsHRRm%CBq<@+Maqo4+N>+!;+CxWea-emY*;tso5NRd3P?P_d&wi40@sq=7 zo=iFQ%2-T7Sn;plAV|5|X5sEE*ZsK4Q8$OBr3JZ-PuYEguj+H1R;~kOcCg>P&@c{k z!d<<5;Y1$g+9VA>IL|#3G2pauVP)WMPAlFPw)Cowt@ITAto||O^dxCrUf?MNTKVt% zw3GkK7W;+L_3N)4_iBp`*@Uv_FHejMUd6i){`~DMFL`95I=Y`W{d%a^$ah1@g<{_| zC=c~w@Sj`t2FHaM3~RNE&+_V&E`Oz(S3WQT0WR;VDJ?4>`>t;h=;=_CNU4HdRJpBz zBDyN;lci?Y_C!rbF#aSW<^TVL?+H`2*?Tc7W){;s19#a&JiwCQiFfthj(Z|+`g=@D zEev2q*LF}CJ{OA0`Hmy7mHX!6u-A+Wq_^EUZal470bZjr(T2twm`=k`nZpq>u_6?j z(248o{+b_J(FzTEBbo;u^K^VnQm79yk);C#L3v8zjy9?6yO~!N-uriKphNfWfDGe? zQr$Gzg6H|0bH_hWiMhv|3%Y6{o%lS^5JpU=g|@M@q?J{FIXFZ~_y8AwXZW(TbxeUy zc+t*Kn>w~N`Aon0DNJN)C%xv{ksC|HB4i^!NItQ)SPi1wIg%IQPpLp>^mom}nK}hs z6dfN#7b~iP)AP=wGaidbhP5Wv{tpjq{}8nLHK%OA6BJ7Vxj^tH$}o*@Z$trE^6w*T zV88aiXO}HK;;o%GU%>uPA-Jod?&2F!<^cWlg>(!1&paNsmRvMNzj{Env!M$?MdOXr7YbN-lmIIk=FC1>xM2iC^ak_*l&Yc^sR8H<4kYs zUbF1k`RN}w;*Rp_pa7qr`+2;2Z)GJVTJOuBD|9hksSqVwB~z=1pLb=O z>0SySA0y%hpXqcs@cpKzYUEZx1@G~L!v|ylsJD?dCf2@5HcVm`&e?(x*8Rh!23ZOl zKx7EZLqcRT!d%~t9SRxGEB9r`TY@K@9#xT1xZy(_DgNbMCJ_Y zF~ZsVbqRDWOMV2nVMiCGc7k>v-r{CS7#`s5o5`TB4tvHPkf27Yyw2&?w#j*r9IEKpHX_>%jo>d&)u{+8|~y7UOr1WGB}!uUdQ$u9}uCp zTWu-Y56`F9QSlfOTQwhIvk<}E;mASPJ^1 zED@*M2xx}X?EU~(rEYE`G%Ila2Qaq+qTL8B;C|AM>a;r(A+T}kjGS?nF)44_8C=ye z9vKCpRB=Z>i%Tq}fr1;uYu%}VfbpW;SS9w4DrII6e`hUuONvI?2BUuvZV)_w zZJ>_M>OzX4n(w)nz2>KYlA5;fOJI+p)NV1XyMz4XAK4TMTqWn(onB=EPh74FFH=({ zvG3D+vos`a&bo;NhmpaXG9vAa#U&DAlk6_q?)uKDW4!kFV6i&?xH~X#3;i5^1k*+2 zM;R31Vm;q=B0RW)i4{E7WydTy(kb3dxu^Y&<5;XD+KoRTeR zhzUFe@EfX$iWq{#UlgGxJAOEBbO<`OY`4hwISvkX^FmStzhCGf!0LfLVJ8^(a$b&H zU@l+Yt(`?8?@Q-hEyDd>?HjYY5tC_0Xp>tgL|WxU`Q5ydojKfPlLq%9Z+m%U%+dAx zB1LWjbn$VHW|maVW@%}!_#grQk=^4zt`}oaFr=`qB1>(V)i^VX{AK&~<(YRAcrh*% zn)x&Ak0B%f`|GG!+i+X-eK~*ayCItmU)ak`Bf+y;EL7;N0KG{YLECM7H;|umYI8>W zy8tmE=fyoUwt~t3F`H&w5L+o3`q2JRg6ot{qyJ}i9pE5RIx)TghGZWNPOT$})J40^ zCZcWk^9CaJLK!A#?Pe}W%MmWBtOuj1`8PE|Cs_p_a`^ToKot5{$$eVR-22m@X2Ae` zPex>W=}OLNWfH+6>hbU{mQPap`FOx+s`m>3BDX&acz<1 zM)i|a=Ycm2m5$)3zOewj{1h@mi_?pt7VT8$Ay6TE3}U59{c4$|9^Y-FE$_mdR+=2& z2RAI$0xj-{{<(;Ibtp0wxlyq>e7u5WXEHqhLOn#^ecU1M2u=#BhKQS_?%zc7=ruAH zvotr-xQQ#u(Xrz`;)eACepe6PayNQRLDo;1yHGB%07 zFCr`z7gblNv0BbAvdzXH4gKvvlj%0?(XoO%)Jtb3=D}^Vm17YvLs{YKb6Hlm;^|_y zy1u{YcJYd_(FvBH3*!cQ*?YJNbb>2;&!k7^0vwj%BrXJ6g|PMFFP+_~VkUoN`@W40 zX?H7B8Pl2GkzKzmhRmf-$Qj0CaKiar%!zp8?pr>)Ke18%U{9%cjDK&2U>l*>$a@i=^kZ;8d zPCN2b;A9J~mafv=*V7ZUnMdCfO2~!3A96VBAawMMaU0tFO@>VY!mlSigntUIfc#;u zZ0^iWQ+k_&6IL{o`G{6?%p=80d1zC=|37+A6U`#50OF)zqyFA^V7f+q>vMk7uUTlk z=3cUFzxvn1j4Zp1S5KRXXw%0#Z!3#B50mxed=`R;i~r=FzH&!{kEiUrTF?m#`3|So zF7KJEl*?4i=CRz~;w(W$B!z63jlblM*1Rh%=pDQl-_w+UUMcWdCw1kl8YNuEI4ahU z*pntf{v5(GPIth*M2X5_-{{=t++XhX#XIZg=A!4WmZ~zy&kF-|Bdzs3xbsDsOYhx} ztYD<~vZ=colOT`(5POkf0Ug;?!D7$R$HpEK+A?yt8#!`{M*Qiz~>jbMe zXl&u#fq_3lXfDGr9s82@j}bUuHKNg13mkSLwapMHGv1%!a|h9&g&X?KFoYG~HQ|gq{{iv*`u51Ym$axqb8F0fPl7*Nlm;Oj5$UM3!%Kej5+H z+Pr4l1-;vKqL&>CsckUhqkgX=_j0qmc-N&g&k(fGl*lEtUbYMqe466V6Ag#U!28Kg zXkuKN^$pda0q7&B5{Ff?Mr)9Jib7)nn2T$ecv@N9sy6<`ds60(VfY&!1kBAaNoCxVf6@)k)lAX zZbn=ETg$srd5%ZX7V@lGassUwz6n5>MP#(DKmQ#Wv1e>DNhk==&PFmeohG)^f_)O0 z%OzduJOz?$)?{n<$3zyVAIIGqTiv%4wsj-E$`+u_a2+roTs3ukle?e}G*~hUFOH`jRjqan^8P7!&i5?__hgVA{kLeZT3of?K(>wrYK1a7VLUjcwMZ}4-!8! z;BlCujnh6PHJN@Mw`f%GY(M&b04M zBo5Q#;oqhM3-rVSF5DJJP0~(LjKq+zs?hLpdxaaPX{K^wvf=beOme@K@Fp-$Xr7{5 zm?JriyX_sltb@h;jeNN%60AEp(C%fj3xq6XPo{Q4L1IrkgH+JCx}BPBW|=#W2gS^Q zgPn}Vv6g&>i>&&J^^f303Rt*4jcJ8R3B6GJyh^k(E3_`84Iz8)3X9L|R+`~bJc!M2 zav)ed3q_0~evv8#I9zb6Eb=)4xKv+pfF2oGaPZ?F-^gMCqEGa%63)2MfQoPyI!-7E zlv$m!{4$46_nXsUz_^c?S-3-JX`K24x^e4O?3qgtT4WyvG6KXscokuYzrO?`wKk1Z zQluD6jFUQVdxp7nS&(uL?Pm3LTxVw0ZayTkgv_pM3sVT@Bd0G(r9~wlbgzO1HaugP z5(%All%4I*6k>Wjw#OqEArw8fF%Ujf@3&)jViIA)^oY)vXR?@-A^0estgqodBJgD> zG97`HGQ6T?|DHTs#OVP?i+>L%2)#b|EsS_XfMu$J-xne~0np!1hszK0{1l*Mu85UE zuxxu@$!6PbeT7fsmIh5m0}DDP*~}`Q9N>AZB-)Xy;3Lhs^r3+~M1>;QrcT$Qix`kW zR2d?dr_e+xd{-`)HP*3m9xwe>cCRaq&^gLAiU(FBG&NSol0oZzqcWFZx=1kLWBAf< z{vQuzu5Kufp_Cs2zyRqv*M>M;_{!ZvDR+EO99|u23W{nF%1T?Htqt@qG22OWL^|iQ zb{Rd)Snj1g?f|b=rZmG^B_qd(yK3$w_lfG{usG{abx4~;Xp(a-t7%j#SEX$cQpCmnG;d^#%?drqy)mUP)<{?QS7!y)*nm6H5aVM4^8_#nl z{k-cd$+eBVA;IT`DH-tWh5h$Hsni)nreyIc#fFidMATJtM%68o1K@2#z!UQMXo|yomIiRj-K`w0 zDqeFBHM^Wp9$tRP2SwGr5)dk6u)vEUTLcroJdqlahGX|u?{Bxyxe<+2%GpD~>SFPe zRsPF2b>6rcy}jJ>^W|k~u>g$eS4zst&)e>or%omqpZnBEkVq>}-Kj zVgo0rsc~H^xx!SgA4vF18_zO9Z4?QP_7X0c-$U|5k;L*`GY2*^NBY#hSY|5x!2~x% zTfvD$IS1wCh~dfxF|V%QZp@W0mk7Tc?Ksm%xKx%j({qCo48x@8dQ)##hwsk+8a^Vg zmi)c4er!sdBHQzs^%Kpg7o-5Q@#CTVSgS~(%13MjMHc+tB5|-m=8=kq|IH{Q;Tg$| z%=Pqladi^!i+`758l}`l=>vsJjmd{XniMr(QkXoX1A{HI+%0i*#FY!ZmxlL9d_mv4 z)1^kMiJbpFJj7Y{i2q~2fQ|gESSY(|KAPm>KpGo@>2TCp3W(ENKm!v%W)4WTr~|IV zI+u^nL;`HD_+`$kie1|r&R7(0;I~+p>`CoFS8g;a&hm$4B0{Lm$``0$KP18}d>bj` zJV_49-ktR}x1L%&e?b@Y8!{c5>BsnDJ+G6pOeo~flSJ|~m~Oe2GXG7zvm%k)aezdMjBoi5%-5_tq3kts7FAN>9NKgB#@nS?O=-pa$ zOmU(XXGm?X?V!Y4-_J*w>)4k+Sdb)p(BbH1<{kBFnR8L~##ot^jA;8h3@I3e_1}qcNEAPnpIX{0dBY#f zhk&)EP{q#JN;*G8oo`O2Na^2Xe!D_JJS}|mC73McIT>X5<<`=kmYG3R z`Cj@o^G0XJSMY4Pcg2|ejWD; zbmUYE1r;BJE3C}7@;g@vVe#cjF&2Cf%GqI~d)b|D!|-rjk{`TgGlSe4NjK^@ZkwB< zn%K%OTEFBya^_X+{HKdxH%)ZIOqfa3T}-?t^|yln?wJ*Kd%G(Zaby!`+Z#|qKY~$5 z19PaU@lA*M84h!-+i$64Lmh-(HDQV|$Tt3!BcZDQniSB&&N;H(j=FGAZh&2nDbgwlL`}3IomNcW|Bc(Z@joO2P^6=H=nFzBinNs{XYZdSOn%4 zftO0+-bNXO<;)1wtMCZrKjkkg40v96#CHP zzS(tQA1x0`tdD8H*@f5>9F`x!Bh_gg#%j?Hu9fQ&0H&|EJn6H693LyrA{}e@Za! zQziNYd2&Ad*U^jvs+Cd|9MEQRqs*>xu~)6^H3&F_?=6}9&fh3%1fO;fh)96@WhlN} zQcW!-r~vRKVf-^g#ums^d-#y5sv%&25vE$iBMedNs(l@{d7Zm2(-+iFUz|MMA&J52kx ztj+~f>{MRg`1NzajeTM?Wj|sJOc@TNS(hI|I;pey5IMqW?qsgwpCE3!ZJpE$URAa`IA%s z!@qM6f|d(nme=050B^rG`C^Y8L`Y9%Jp%nZ#xs(hxQpp)DHMgF+Q)74kL!@*Nli*v zO%X~OyRQs@4043&lI{1A!kHyBizmsW?1=5iujpRwb{x|$O$1dAe&fx|-mXx;)dYnZ z14_PsL%=7~4be1E{jf#t{JmQkt<8}NrEUH*ut-jcPQZDrLAGW02B=us{GpjdL02H2 z8lRU^(s8t+ErNjP?iIS8iq~mdxygb18pROas~_2~CT318l~@jWju_#7w`Ytw8@5M4 zf84mj2@`2f8mUlcH{rg=HY3+s8Yt+uqokjLxf|(|ozoRO3zAIz=m0a-^fiS%yc}0xNe?d7^CS zGQ=~bi!T>h6+ODn@dZQ%^AOfx`!KaXzGEUFztXVpz}JL@FKYW+oz5h4klJT1AQIZ8 z#sgN!f-r<`b2Hu&h#%q#PEU_AY9Q4gr?P0Q0FJrpWlyw<8gtsU3m7Jmt8VB4Ha_`3 z5=9)*SM>RH((${^h_UT;`BOQf%DQ`Wgu?i-BMU$7(U<#P%&TULuL~R&j;aSHauO`d z;bvWd#dS&And%wHE#O-OG*51Rm`@xyJ&jxDrO z(#m|@H9W6h9g*v!zXnNcQ-2a|u9_6f?XbG_!3y57kALTh-IFjKG0%rxt z2|s-iBG%%hLv_v^YOi34TEgh1CWpA$>Pp+`RchIwVk^4iF#u>a6J{sE5)ww$&iBGL zEMnkbAy`1CXe;F?-0kGHLREz%p`nX;=Nob_n^R@6%86!DoaMN4GnA6j<*ESRjJo9Z z9`}3a!OIM!F`*1KIKF3SMye5`7O&S+sktE>S!Kb=VIhPwk|W8r!j}O@Zc>RilJ4Zt z+`U7s3_qcyp52y(>?5xEcXca*@4emOYwW`YR1d{(J|kSplcJZW?gfXfFvTAzNZZXg zaWMVhH$^W9QU@)TeAB>*`miXWHM0C~4z+8>SUh*|3_WojZx0~bUhw`ij5@PVy`Lfu zLw3ApkrM0SCctY@- z*kLva%ie`;9;V%^95?&JKP*6@T7CkRIdeDt^+S%zgozp?a`(eG*l5m-V3-x2xDb@m zv!hn{oteGXfE<9;w{d+AZE!cmSmn?1*SrQ5dV8o?%t#_3H{+@r0#RNI3=^_42J;u$ zk+A-u$*^{1Ei!(&WSm(_;1E&i(#}vS#+aehr%m2*nl^joQ&zkkrjvS6nObP!1650D ztCJy6rl!^I4GS9&IK7zVNukd{qhE#$TX)-7`%5#M7C@i#D|G6Suu{(b!A&4s4Gt;YM~#56kC4vzw5-wR>z16Yrko|| z7k|9;UrDS&B()Lx%@qsoKthrkJvIWp%AxeTpKtR7FQZEz*5GGcoK8LlCJ_-N9<{>i zu;271@4Nj1G7wXP_tCp)W~^K*HH>(xvYNRe{&=2cAzvX%M&@98ltdvfK!{}y+}pmt zus{hvjMF#B=T#jo)F?RS3-R+FJOLKH7 zt2YM};V2SRb3flP&l#r4iAXS*?)(C?k2Q%|lVTZO*#D6s}v)wY9p#vwklD2k>T>F$(3m2|9a*L0(O*3 z>pQs*EU~rHb#^m=#VI!YkMGgST~iP!{SN?Y&N>T5lasXOJgI%sm=iuEKsKM2MkXjW zlBg;VHhi9mS}1xPISggeh*rK(Tnj>*slJ;|MNFcp;ZzB$eX;nEWzPBed>ug~Es_Ww zd1wq(BM2>ifo&nYBe0vOh3Ye+pHs1ytYNb{3x4&Id?6P)PME#DzFQ;XKGFhP0U}qg z@zq(HYse%CPa}B`t%S%3MXTCk5ARf%2oIzz;ym_rhXy^lQPoj59`txA2|7d;xRV;A zCQxW$PGUk+K?}-jwp>G$^{QL3j+(5CkVUC#KpD=qP&^}o3!zI02IHmVky${q?#m92 zN#lmXe$l45=vG&w@QE$8cN62*1w<4rsrj}2tF(*bY1fqog=~F?i#`;^+(;6|b(eC} zrEfj(8=v7yn_s*weuQthE>HnOs%;BFDdgrjYjytdmQZ=C>{cigoNinH+6K;?w2d1L zeWk38Z+Imnp%PeB&$FD{`{GFKe0Z5iG!28HmlvKYG6cIK>uU-WZ&vf zg>G*j!QHWU;j&tkJGP}X^;iDIhgOm=Am_F4w}bee(qRpKs-$-6`-Rm}VXjFZ0wj^e zkdS*%X+pi^BCFr9Yd+r%JL2Xrz*j%cwv3GhJeg>;`gVbD6q~mTawK+4$F=xCEq&1M z-tZelK=r&dm6GHSm+9P!PgZh7a!gZ1D)%{5d!C=L-wltix8CTCu9Qt#fYDOlD>W#K!~5Ac-r;%AS@H~B;n6GzYz4^Dcqu_h^pNWqYii_vV*tWQjlRa ztR_d<2>-H#NI9`Q5)&-oaGH45NliHb1hx2HXEpBxLIYDCBHk`Zlm5%Kc4~gpPur_~ z`O$OtcW3DO;aRWeR!{h0Sm{-!#qocx(>>cuZ)DGuqZhB|7q2sck4kT|xsPg2Mh|*{ zCr&T_o%H$+5?JWD7U;Q#3%rE;ADkEA{;`$+d-m~Pxv%x}Fu#eXe#eTjyzTWoPrNP( zKRpG$U9{AG5*B{D4t$;X@9XryyNOpF;BDLJIY#*JJDVWz`9C(z^LOFvz&+4#9vCX= zlLg_%Tzn}5MAV^rSQMDJz8Q%m%aFM?q*fx&j*N5Uh9`H7PG;ki-bqI$3%Bl6Cdxt$ zJ)|ECqY7JRGO7dDVDbga#1s*VuS;Y&2LbJ5H|xamUs((cb!q0KOz~6zq55&pw$mpw+88$6;|2Zq^7iVQ`jV~`Kro7h$;b0}rx8Akrp zr!S_~0G{kMyCpBLPuOxefUlm&t=U3Ig3^S8M?M!REQcH-VX>2UYkE#N z^V8*@=%y6)CMK~T=EE8AW#qhqQ4FXM9b)fPXiB&~sV$sG%a0{6qC1(}$V3fdH6%#2 z)pw#r2YL(5$vIvxo)jt@*W%NCgOaL&TIUlp`|vAYR|VKJ@V)r+c8Z&0II#8#cDnOd=NUd>s`lcK2s?P*F>4t6n+nG-=a>=%xfcE%J)fuLEDVk%bIo#%ctIc&_cylU2-4p`{5b%u@6)FBC zU9W@#T}FkJ7}719ff0QLar<@S`cDKH<%QC2D7AMVE(@iM79n%+mlnaTqhRk`1Cd!v zAOaq4n)_NhxyMHXV+Q+55j7{DX0L6$zs*b3lG}a$^+Vl(dQQqr7d!LkP+6+iu%06D zjfn0PPl3W9UZfXp_Fdvnh-WPWC^VN+3=1+q_{o`^MG%iM8ZejE6&lF^WZNP2w2X1@ zrzO9Uwwh5gh1Bm*VTK#k-=pSy2M8>%XU#YmdJax zP)b3X_v+-3jr+oWsIi=IoeZL-$O#==UtKT8_nPw38uXL{uZ3=>hTH~wg@eV4`Uz+K zN#AM`P;8{SDZnd~!DV{3YTu;_1^dLxjWN*e#ni=&lEAmImw&2BBb;Jj15TpxRS?D%?i_k7F=W;>^eL2TXgq0CT_iTB| z5k7q%w7gkgy<=gFo?-&uAqfxf@WqYpE8m`b-{O>(+W+7)gSBsuwQv7jG_R4||4Yl? z^SE?@{)(|{^immkoBQ8ovbOQEX7s!rANagug16q~dw-Wo`n(Az{Q5Wk?ZC)r!QJ4A zeI-3TUHA{v+W&H&e7SpPP%b^ZY`nev4;n+yOUs)_M#-xsiuiD8i zjs7U20p+&~Qeg#M77O6AlgLpI2f=fFzmMnF9a3kO6ukD)5{5ia|56@&QG&fmC>v+q zrc@ghph~-kJL_P_kYfqGc*Ws>NZ9pmn<6p_`rOZPh4e|B{_uCBo+Y2Van+Qpd%=gu zA5cC8<-?Ip;WTL+7UH^4L0iR6;)0tf2f*eRmZ+Rn*P@uF=(Dpfo;}MDixcvqB$#i? zX+IH$uG_iZ412k6sq&Q_mu+ha^I$mJj67{0@K_J~eY#2G^B6-V;0ffY=LLS+;B_1O z!c9z~sBTTAdX9l2d@^?e@Wzso?&yA={QBS5phM zAW$_FUV9!%SxhUOoG{U57m>dBhC{|!`%g6BxLn+#${4hLjtW!cQDfGzhq9C&+7W~U z8CM5FOZN+r9fPX};5m;+q_CsOp|qboXK;%>DHNR=6bfiAIFHO>H#NwJ(fi#+Zq5xp z%@%1^SpKc1Zu+u2P1AD1uRNe25lo^GxIoGcAQ(_#+JCP4dFZsE8l{Kdu*^F@YOzUi zt>feHlBG4gekRM(Bl?Kz9nSnwlS&KQgC(W9b7>obSj;2p@&Lj0=WvGHrkfy43u zQNkpyLz!MgE{ZCKy*|TXS%h!p6U^S9J^skhXz5E{2fn3FWWX>dJ_BV0mrzEYIfzajptH>`uRKw^Yp1n5#C+>soy+0NbTP(Nc^H9h>JYeSF z_j7KD3dL*M5>4y%biAhkB({e;ZhaqKlFMI+zAL@XcB4c2rkC z8Wam^h4dYk9c<=9k(sq2*GF}4s5YN*|4yGp74>FsigtjK`W~i;S4Xc)^N%t1X$}TC z(zDX4a?w(xb{lSB^NU*TX~WMEnIH(`_E+?ORK*=Z_(OTH%)irT<4FS%83peUKaud+6<2W3UGpR{Y1vXKm zY3Z`&a@b@uNwHm0@3*EoueJn&Wwzg5uKqRBr@&kBKEd=xP1%0I!RKbOVtrf&133Nx z#=USqOPXy0FaXm8?1A7f5;NbF%(_yyG+_bZPhHGhD7GYi27^V8;sS*AOvB_}W61w@ za}DEkm|~dhV9|wzeyRn*X03vyc!lsmpSpVliPDQKnKg?wMO~HUI>+c$$yN-i42npg zMLrfW6t9jI+()%{=rLWv8kh+n0ZIlRQ2|Nwuvk#|gpKdGPqK-L+3oQXhPjQL*Q28K zwhAAWK|1{G4kEqOhv8)5xa0A|@jnvF+Ldwt z^`e9OVnqwkftV&Fg?2=!9O&0MF-Gj1CkkB0Jn}JrYf$elhQ5&$d|8}^OH0mjUp?Xh zr(qG>aenqsM>XY-ljuIAPeCn>#uKC-?~Q$W_Qq4!#?z5f;Nv^&F;|hUI3?h~GvI*q zZ6`2G^|22vpw3|>oyBPVomKY!#6S!DH}Lj=@p}HwwwSnoe!X~mWpSGMpXc}2m8mbN zTPy;B=Myh&Z_l}J*DHaqsXbR(0XsYaI~UK{J=a5T|41kJ-rm_{7yn@|k$M9k{(D-y zF1$Tv_B>?1T~mJD;f>p2afQBiGG?my))FtNpj-AyT@bC{u?e0%GT8t2l%PA(Q%r(z168ekJC=~%JAzw zv5IIe?+Sq`fjSFFc=o6F%?-IsUJD^~*RlUI4tF11DA|}nU$o{QdQSsea@`oaM)Gyl zRZdQ8By&1w3ZrXkk%O=VrWfmm`gYja{!JlZ#K|qz%B#?ixHZPS zYjtWSvxyI7fU#Eq!b?NDz)MJBsf5d~!k;BGkwZdot9UHj& zYwixPCqEnbwBl&($ejX6ikPOg7z)Pd+s-D*sw;U@LQ@r@5> zEGtblIVl~RX9j!-cz|>Z^eDgeksmf5RIvXCWp5P}SM>F1pN0k+_u%eMppoG27TjHf zJHgVpTd)KI!QI_GXmEE(fZz~ZLh|*0X1=O-rsm?Es=heYb*@gG+Gp>zp5L?X!)SKm zS~P8iy86px^&PBc&eeVfbo#HGbjWtRmZ>Nj_Pv#ydhmFYF>TdIJEDAS1lifSwYMmG z4Y4ilD8lguPQ3dnem*lWWY^M}YT*PQ_7ck(gvdpz@AxjYz zA)Q3!NhC5P61FaY)FM?A^N4YZqO|$ZUYltoH6fPglTBuCy)7w4>Klo2ETnCHBTs`} zvf5=(n;+~pU7%NA&wF4&$5nRx6SDa z3VyFVA-qm2`4tq-)YsnaDMko>_;`By6ztqM?lbY)j$5j4cF%a+8gNsM5}!%#+!R70 zc8Ont)Y8am!0ZtgZ!D#};Rw%0(Mg-@yJ}eOY**r<*Y{h0ir-mRva`Q#`QJ!jbk)26 z?8ti^oKRxbcK{xcFCAMaS}$6w*z%6R%vr}c%5{DfJ^&qfY*2%+aw6lN zOnvJZ4!sBK8Di_d-2u#7h)|>*f{94BOEMZuG8ow_?3dVP zgk@0LjM$qnlcJ-~!Cu@N^rCT7O)STSg}pQuf7ykk_4e3~!Bpq6XQWFU{jo&1FhKK@ z8K_ULWdek0l#G|s(cLw>12`RO)od&jsvprLS7T$BGplrW}8}1 zNRTizx9Ux6^|gN_0a|=5rFR&l5er8c8a0-lc`FW;&3N(9a-P=*TFP^ug)?lkn(L=O z2L!+QPi5&};h(|}H%T&k@Y%(!OWrT&A9lI^q<(-8R4Z$>|KAExrT z?Rn_S-G2sY{^vLAJoP^?oBzMFApM^^bs!1MX;SLA{qO~Q_3YGlLnhTIUC2)GZ47~{ z?AHzEY}!7ha2Z~A?@sz1rz!XpI%{YEp^$KXV;wl>mN*~x4XN4z>P#Qw=0PjkImb*D z1$`wxZM_y+)Zh5O5OtGQ2BU?PjMaV`dBafL+%h8tOt4qNljsjlO)DVshP8vX3B7g> z^C#01ok*%|aUfwT!VD(A$BRVok&W7IH()Qqf$&?KzMat*e?4tlG2TS-s9KEdjNznU zU4GCWsEg^bw_c&Kv?}bK+rS;y0H^9U^F45I=whi6eQ)(B1K-d{IrbBHnWq+4pl!V@Tl z7Ll&8`(~j=;Ek2KCR-op;92dgOg!Va(|*@RaYu6SS5# zgqd3fp+XxVAuB}!bghWau$vS*LQP?9E4ssEG}qoT3E&v`&cMW*dSc_GP+?Dw?MU&C zub(b__RHXzThoC5dN}F4dnbqVXRRh@XJc;(UH*>-39eCXvs01_h*yN9=6NB}XPq@O z)z}yQ*4YVJX-UZCyYw7xBVwRE<&~kkf+herx5r;*B89pb0eA2l2;6y;sfIikLEU;^ z6J@BB#YG2$l9E(W{C1AYO`m(TD4+7GGG$>Yi6X=%_CVs^M>B8K9p<2|H}gF~t_QRR(!e;-5EeHQb5X!9Ntsb4Qd793o>duMx9&3Bo zTY?g5ZvM7O@~`ps*u5^dn<8}Xh(%+oM9^oh!N{@yKMEe8255bCJ7<8cK~kduWU1TPTvgz!yaQU&>}z=J;MAzg$dQG8cqqOQuAn zG_aFny+E)lh;~U_2@*}vz(O`Om*3?1r$ETBhj{;?CraRkwWlQ`UI(|LpA_(!^8i6|-m0^!Q_4W7XSwOcNmz?#l8_5IWh8U~ z3Xc=#wN^F$SXl*p)3>d|sy`AVe^fJkp|tc$Kqk~bxH?zk5p;?jHK{r<@H$iDB{Qd! zqG9767%5{K5MI|%C(eES_sc$RUd?;rM24>d0A}Si>-zPBZu>b((kbvM^v5nZT4#SDM;#Shm)sUl{E3XwW3o)njI;jLgjiuslAEUAG$$GAo8I+)d+l<=L zPGlV43B!2))lk(FtLBlfZQJtgD`sJ?reM$e3)#|V>ofagrU#r3oW7fD&NoicaV|XK z_M{jRS?^3fQW@*gMt`1UUB&)xofXL8pyFK}gTZ^Wf$e4zb>gkEiZQjqV+FQ|CTE!X z7*|3Qs(a`Gnz=1Hoo)}n({H`&h4 z!k=FL?#f%y?Z;BvcNg--*~t8?u{$BDurW}c!p|vM@1E%UrK`x|2x`26=T~bt=|hn; z1dqowHyFx+_ykc>?_e_=Qs?KAh_L-I;J$VnLBgAK_pHp-&+(B+(2DuoT&1t{TtxQ zp2dbR38WTipeEoGkHcSWCcHsGjJ`*PrR}Mga*Wn8kRh$GM|E_v)0@G0#9h8&oW&}L6kfp*v>LFqMySaYLPkeUf zIWQQ(=%(UI->ioeWQRC7>-cSu2V8n$0*T~IJFCQP!qhf-9k_IL7<~EV<>%uQl(Tx~ z&-(gN1BT*@NDcPjT93y%GBBv2*oGg~@tHb&^+~4+vt#x%ZS0OYkD<1KnLNNltj1TP zs0-S)s)MCZnyi}IGPg;PyCw^ET)!d~F?Q2o*}KS#P%wD6k9n{)%vRT=`8|LE^_FNk|+2-VceVUG+DKmTC z%6HDLP4DO}FhFwP-Q;ZQH*1Ue4O@6xGowmE$%VbmZgN;TpE)5&;9D?RPZs?y>_!8x(8eV-XSmet~su%mbScqKrDT$t^MVY;cpSYd1=P$(_COCbEDGs<>PO z5(_6;&;a`TfTJfewtu4k`7<Ck-%wyr&1V|8^tGLYNqw7!V3% zTJbH`CNazaPMtMKyxAr<3 zM4LaMomhG>irLO5q^0q9S-wIhR_B4GZnY8~FfF<_JTWeZ{(c zwxQYr$J+07%v{`u3dH8Zk-e4h^n4TRHOY;V@~<820^8?tbg-u3I|=w;lHnfLZM9<2 z-thk(r6@1=fsmH1a4vFZc$o_Yb~G|b77lbZ&s9+DV;G85LE`|)N+ zMbCj$eYJxQ8m+N_f6RiDhUo*TnNuEyLzlw-kU81D+Q`>cKuRr)u39q6rc1}Qx1J3% z8ZBas;w^r7FvpQkL`E51DOWct1$8tIKaSLeTOmOjzP*BgUnE7nsO*U=5_QBnKF)t| zG3*hKPc3j>?c~c@hRv&YLwjk_xXFDbz+Rj`8cs?W%W>+Ms!^*`JY}Pw+n-R6j>gdY z<3&KEpC=HA4wOWs@1Yz=FwwxCK@x)|Lx8JBew?P4KX+S4n=DEvWQC6KVa3$%{xwA< z&cpk867ZwR9OljQ?xB~93>N2H?81DrxWaYX+E8THAp+!hVVKN5{*?JU*MJi8R&YJ; zx0}cy{$7&q7&gS*%|;~{7MVGdJaw;^_}L|zF1*V(N>ugrdWk$tvRjF)Z*fq5WX#JR zSv1_ebASG)rtzj?lASe0Cik0*efa;283+A^=C0dKPt;0FpLAt+j6z zWAua7Kh@0q(<-aiJW>kIc&DeXD%c7gnessxw+>0US<d!$6EiMkc^>0>#1dryCB|gR> z0|GUkEex;UJDK$>Alu*cOP33v%6=|zKE9kAvo^sgC$k|VW-%I(cg|C_{>ik>tQEqf z6r%v4L3n>|ZmFzdZ~fsl7>EoHJx6-fDB(oAPctX@Z4?-X7;PXC8QY}@3L7)^K{rY&)uhmM4;E z2x%e-qc8LEQb-*rsqf;XBhlhqc8+Fslk1odm#d7a?E3N0EBrMPouq{x_{b5vpsFv+ zg@=Ii#|PNCh{HGtFMFmAR%WMgcS8KQ#PnUGh%C_M>Gu5nRBfJgYo}GJuk=M^V2sL& z0!yAeMi7#8N4`jS5>3R>nrzj6y#G5-tg6QVL^1Om>h+)@tTGgg+EJoS%7qaeRNcg{ z(IrfnONdSqK}5SV>1Raq&Ddcr<}&DWvd8U(4^VxV4%ad|m*{j%wp)yx*G>zB{6IKo zUCO*2q6Sm*ce#KT%O~hPO=}Mdq}?IFE*kza#{XI4tg=6`yds=0t&7=YTziFCoimex zCPU+jQ4@MaDHAO!jO-RL+frIjGz=fSra% zZ9m&9+=tRqknKTVn7P%2Eq{%-Kk7kC@pwR^FLooWT;0alOG`lQB$N6fnx?lPS7TZM z{VJUw7IJ40+-Zi3syk}8X~{aW07b5j=$Hyj{NeeN58A zX!CE2H93N~pl2U00;(UD_;wsK@A`{Ash6X#Fv&JCKv6Yrp3eW4|7owpbTY0#qz`GU zi&-@=*=O7C=?Yo`@KIG7+ZJtWFZzul+3RCGwR1_zaKpPIRZkxBRw;t_6iV5|E-Y~ zdU!M_+lz|5l>w44(P^~1ZJ^J_SAUfam4_riTb{#x0Q4)i6Oxl>d#m!zJ%aXai%ug? zTJK_cW_?`gI-X&d!3x~!30?g;%4aYw z&Cf)?G~8le8SF-J)+z}cR!*(QO!*+X|LH$l77q+2U*bBsX`Fap8Eejg29qJJa3x(1 zACn5(KS5c7(xMP|G1xDJ9T>ka10Jq%L0`!MDm1=8#ZMP!K4`)cc4=B8uP$}P3?;3q z29%yPym_*<{u-PMIH}`eM}q~6G>B#lSDEbRbkq^3cF+u1jCvPep*i_I)J|T8_yd@2 zjDVOBf{z>63St`tCZPrOaT%EE@Xzg7EztFtRuQ@#E~$S&nL`8ody|i1cONVHO$m z-7b*ZuZDwb_wK5nT?fnq3A0LIB3U0S$~T8y-71UP#Iwo->EK${h@VV7?X`d($?4%- zi5IGF2^m@YyiX!_9c~BBBs0_K-QwT5ilpbyTY+!x!wBaNhDTdx_*zgDYiG43oIM=z zLkU)3SE>((&ZA>;uS(d4xg_#(cgio=$}ed$lByTdG#yYzmo5%y`p^VPbtDto)}Ia4 ztDF=(T~*0V0fp?b^%rUPLo}AR*iHh6ZgybO7fESDfAyWS05pl^WHd;2E%VBIX5bO6 zJXAsyB|&ID5zDIcUXvZBgesXms6aAu9p513pl7Swh#VomE}l9`yxDK}iMe~FRPHbb zivFMIx{U+Q07>p3{D`Oz@+oU@{?MbbtfsXB)+=haB36yr1!LwX%aWt2wsQLn;Yt{! zPRCRNzR#mM>vK{zIZ0=@G+m^62S;H|+<|W_0Zo-OH-5DVFkU+`NI<$|_Aw)QX z9FrAs2SxtG99qy&`}4kM+X;b9dPkZG8<&Em3D^5n9C?68^!}=~Y4|E71-9mx2^T=@ z6b!aPko#kg8UqgCYU2rk%hv_T+2J?*;w*n;h1Gfn2stl79hQzJ>Ug-x22#?j1>A|q zeR3e7^tIfYR5gKebr3aGrMhJ;N%p8*bR<~LHHz+^xtN*}-*0N;7QXkmO0}ZzbQCb$ z=!|K<-E9dC2Bwz80W@(ku{Z_)XoIx4_ez?E&`k8H3S#hyXCfRTYZN@aI8qV&o>pf>hbfkHkL0k%TGo9%{Vf z6z8gA1VIstwm$sh1(MMg6fbnLZ@jU1AsQub7M`e8l%Y3egxjkC88ZU_CrIWGKWpY0 z%~;TZ^lj($6e4P7N%syZzFa#Z?w;$0B~5u+h->Z^Hrm@OSkRSA%@8s(u4hHoXK-w) zBu05_2&<97KEDa|RyNgfAyp@gT4S9knt`7oSZ!z!EhTP{>xPZ_uA>}26nBey9Cg`4 z#-&$QKYfYANqLnzL38ExY`A~CTdew0rgsV^5EM#;MOx&%qXdz3J(WZNcN=>vQ{FGBj zeY;(Z-YFXHlJvK7RxK6XdLjGKmBRy1`Qzpbhoa%xZs}fxWXyzKRU?rS_lQK{*!OqJ zP-nuKp*Pav&~D!ph-3hbSh%VMsRd6alu#t_KedPR5d`<} zBzX=oe)&wieg3wl{kjsQKofT;1%8Icar#xFz=(U#astkD!X^0aL#E2;+M?gv-nED> zpMZB-FxEeIZQYtT7SL=|A{bJsCVi`P5*OdmUk^oi`A92YIbT(3>g;>Jf`neI!5Ycc z^qZ|PPPlj*%U2k8@2{!N6M?rS+k>+y30cUYr?<93f?9a)5dLw9^Sh6j^Xd*L2rIb* zQ2crr4;?f9eXSU4ITkh{NMQR_-#$jX5q;}dgPfqlJ=Np+ z7JmZG{-)-Wd4;&l2K+0)Z#=7xQ+1w;thuCP-xmE4uCE{o$(B1;P2>}?3QF>DS>sp% z6MNgdhXSv|w~AC_pnFC$JLRep_dQB?yoZ(UZqr;0&r@6f6_Crlu!j?J{}p}sPbQLr zPsF2nNiz>rnA{fPpBePBRP2VA@Xnz-e>qrdX!dWX-ymY*V`nc~_H ztKv+@BxaMU8^mrvLe$)C|ByG^5pQUQuO%-=%0=MRqJ#W)8Wcl)+Shs@tt(;9lyoYz zIPV~wKn9?1W#kjwvSx97DI2n05($vBMHy6U%2r|^R#h1%7wd4syF%xj?`)sIH$*Z+ z+;EGA0O_X12`==xK!9aFFO~hZa0cPx6mw521zq>BR45s1vpmaG~ z#Q@K&Uwc4mHbq^K)@O>iCHc6@@Zd8K8R}jiIp4p`m|ZejTesaKcdW;+^=IQE0pUD8 zg7UCLuZSK-z-7T8Qm<}~_Zav7Z%GfE{w|`3G=?_<{hl(ih6*sM8szoC?x-ojYH|#j z55R469&BNF%jNiW-Y|rW;bHPvwjQFFj|&p_Bcn72J4DT-<8V`R-7ix$g+2GvMO78y z!&^0S%SgDW>%v9oI8HnrQ-(>G4l&{HV%9f zZ}nri72jKdx*yR(lD`@6Kf3T&t8| zOTiCwGNXY{2FD`P$Cfbn%U3_f|6J(6|5c3_wk`m%BbLm&+fpVyVeoXeBB0hljr;%s zlGLxM0{n6{Uz*4_7}WWY9Y>p7h186|)n${Ycp4+GpFSHi@W+fRNdCOr3c zq9L1)hdYhJg=E&zjY->+@OX#IQlni-SALMO8&FARV~ghG`}PWIH5l!Fkop?(uqQh~wp4Zh4W`Y|02b(}+|P z9ux%>p>v1EEn1y9Hh|0k=<~N#U&)So%8+mcH+hA_rQ!`A9%M^g-xeF9Nj>8x*LSI* zEgYGlLU%&*w#ds!+&8Eq{va+ueP!z70W(vi*MzSbAMb!%+nq0k`Jqk}S74>83%clW z_!2A0n)|k{nsA`4XoHwcn2CWf@diiXVTS```-!r2I_Fs3vsCURd^89cjSQ6RktKWU zY~GZR52x5*s}Rzw+e-e`^!*uz%W6vg?E=w*GnAlMN4O}=P4@}sWpj=FF%H`7N53+P z?1AVefsvbX;8wfFb-@bn=!6KF2`Q_pDbz-+cmoMY;s-XF(|K4 z4Kk1j-`g~R-cq=Y! z3i07@rj($=zd~Ahx+^N4&sPn+UrK$B!^kE6bN(1Vf|jQ{UR70;AI$7u)#D(*ka*5_ z^u1N|d%(B%0mB~0ol{X*R$W08a&B(1^7eGW*`v5Hc-2;n8rBpPVd2k+?`^y|qHlzB zr<%=PbC1>9k7s{a=rx(+W@Sa;jDYa;kyf^G;7P8!+h`S@X3wEOL6q=TitiP}ZvQxH ziRHrJ48YcT7Idz)QTOWQxHWy}oofq=Vgbgu{=qR-Qt%MQ#trL|z-ZVjlciA*qY5Ngh!!gLGW+qaZj);Vt4gY}RcFlI&@EUNQ zV`*<2Z8O5tfrr;hxc_0$4-awoj-Ota)Sq%1*H{JbOqj-Ag7U!~Qg|N&D!cgxd~gwh zxm!4`+P{ug9x5&Ms1FI{6U^DPbmy(^JQ682;SVKe)R||_EM0EAU1uy_;BbNw&z7gn<@8e}PEmQJW~GF7UM$1=x)b zNpmZ7CRP3@O^CTpI)LdEiX6(5=TiHaf!SxV5n0d9G|WuR&6bctvG&rGyu&J_+>=M? z@9IaEAl8;>%W0$dmV~PH`n~Eqc#hiDQx5i{hrT8>|0^HP!wy_A(hxpp2ASh`I&w1~Wx%Ux4yM)g*r z{lUf49e_#-Mu&NTlqj`kAEbBwK<*1`g&QGzC;@YKLlm50SX1MD*S2Fo@X1tQxP3u8(R^{tIl|9L2)?|X-9`T#7Jc&>X2 zkKUOk3!GRlhHCWGb^p`~bBVP#0r*sO(Z7`e1@w%2u<%liW6{t@;S#pu+KnUEEpUDr z34AW}{sv06_5-9FB$4~9{b~TGC9czsTVt&udTl#w5%D;TX6DX^>J|BI6Y-tA)G5!7;A#v)fmVRO7<0QLPrZ{VZm zxLH_Vyk)HZMX{wUYvk&oH5ehIVD6Sx^0~f2#2=AN3K29_!FJ7-d11uX(Hw;ao4Tp+ z@lc7R6_RfG`w5uC$jL7zYUf|zIA0o%BsG+KGs`;{iag*im4%DiA=mT1@;&C6_cRSf z<>5;UOWj6ghWsHK@-b?|=;eU!+E#D_PZM6;Tp7AlJ>erBIQiougW^>nD=_{MC%&?v zoekYVMWCaCVpDy%pRA9ha-3ESnHUQsrMu?GgbbR94u_Znw&6a8KiM9ZRnifU3uVtU zW_R6F?tYSw{{>C)bBUpZ@AK6dbK(EkHG?f*WAN)J+2rA=)S> zCx1B>6LmCbDG7;Y5@FwO3(*84-SrAFGi=)*vIDDSq_N^6leF&biCJIb(hRaPY~ z%2w{c{*H&pfS!|`#7dWQ3NDLKPC6~HdPvd$!ZNs!(xc{r$Wru@T9UyJ?nyQ{qiP9I zUA+B_*KDvl0@Bxl^aW^$m!0oYK!pUf_SiadagI_99ze2e=p~IpQN)>yW0bxtg_NzV zXdXg`B$^Nr>!$zOiW+EqhU|=_OkQR_*%m=EQ>cJNe?9kCG>$$E#}!A$JpyncE6_IJ z$uxWt^0*fDMO6}o?Zw<}oLyD?;nrviS6F?vtyYd~*^KKV*F{PF-L{D7!N=t{S`+|7 z$sn@12{Ic(xY~gZ=rTf5CUF#mSnyNd-fbB~eHGet_j;y{9q>NI+?>=Qep3dDfeHdB z5h**YJsC|)(EY4M__r3`X6GwnS6moG9TLFS0~G(P?>{{Hf2kcj>iWAje@%UtdV&b^ zo~bT6s{5>HZNx>VyIX(vc9DBF0q4mIX-5DP6MmLC<2EhKau_9Ak_Ijpx)UwS-j~?giV-1-mUj(8 z^|3h^_c3(Uim%Fb)JAkDt=ci2oG+Nr>9Fc)#*`dHJ_Hzf>6kTP605udDY>O53T`op zu2SEs!D;~wrUyWd=xl9)XeWL;=;J-u%a+7zL4E-jj<}XFp-2>N$yAA-u6u7|sag3g z;CqK`kFdgS!$zz(WYEH*2{Z)*GoIfsvYoD<+w9y8-kBmf4NQZMmN=ixP)%9jrYI+( zD&%M%*ar#EkQM`4f^oIM79s|u+m$05G7Gx(SUj?PGAF4ZsQoB=3JxhI$s*=0K`@kn z#m@9Y!l37e+PSQ=b*!&7Zxfqv`TLhP`JR-=gQNbpz+C{`vGI@k|J=mF6B~N8nE9&< zTtb1&1Nt?Hq*44}3T7#*u&0~hnZ&aE1`zTL#0t($&|%_mqsa4yY~jKFu&Yk#PSr`* zN%3cB$zbU|?K$DK7|_XLU?F5n2zf>eAtyws39X z{Ov}%BJ`nwT+@Sx){l7N#96A8GSxZ#2>yUPb9M|taI`^-H z>d+X&QpZ3mg%R(9i?L^^rz7|C-Yk(z`pLy#($FGNZ=M>`5avEr#-$o$+-#6=P$5VP z`@Odp`>xx+fxUF+_Y1pNK+N|tCSd}TF_XZBhoa@@Nb};fnQNh52)>S?qCnTO&@+u2 z)b4O6tb?$hF{L0!br%$ct%f_zx~?a~p$5y#&EuWJAj3f*>wI53;eq5Jmvv#Yn!u<8 zyiL56DR;A>2=k5l;#7xPlOPvn$9eInTfg}y+1xJmaE0>7_l^gF7%}|&N0$vT;zU9i zeXHtY)+r7RjjX=KNEtT3O*uaDb1`gtOKno$Po#EU17M~@SoXdghw7?Q_E)1~^I0}P z#%cZ2Tdw!9%47TG(mY$MX(i4CZMg8a>w->ArmsH!*&)X4Q_rOeK25@nm&oDOH4tN; z*9TOj6{8lzijSdp_#aem(*%IH;;-FbK!Ml0S0hxtfv$l$&JSIbfHj}miLMqnmJKA% z6Iy+m{o}Qal?=!n|Fs_)u-{!jgFPkjhBSu<9Y(yiB7p=F?3uE49>*NO_Kj>oW%WE% zJv{^-++Tif|GVlH3H+Pv+W70=^0ffa_)bk`~J z^Uc~PS5-G`rN=8SvyU%zP9>L34qXK_Xusf==jN2q0ELu@z|^hZhIXLMlJv{N`IR7t zS%x%zQ=(PB{yQCCCAr6Dj(6fiRd}SDt1?yjm*isYP{dNXy%o)6mwDzTzK@D$zq=@} zmKZrL{M>jsY5>~9{&x*aO;znh)`~&SNi0C7t45M@?C^5o(!ucp+3x%8Q31v}RmR(X zOGqN)Sde1cGhriysc}z*W;JmB!2;waRGSpF?{19z8r!ADQMHvv9|zVBv09zGU8h5W zb6~?O6*EDip55iz4YYVWW`siyC27GW>!71Z-59LA9z2$R{T9 z{g-=?$uJMSn$99IgVMb&NV$%D>#|IJHABpUUXlZiIy&9b6=aE3pcun~k7RlF6y zMelp2r&;Ozlxm{0o7B7Q2OK{vszR2m7myHsxSc(>E+3l#ylEmq*)Cz(oGjwDJH zD+UDeuH|-9Z8XeiTEn)$X~SoJ*sd~s#E`;SRjA#bGQe!t(qIVS_=P$tI09vK(kTa! z5W8dKiPBA8qWmuu0G7 z8h~=nP)e-2l&f`6mk>j7SpFJ$53*4Xe2K=^Ctr(!*Iisa`{14 z0hXSI!Jch@WozMSB=mgfv0DvMUQUEOF;Wz^R1d;0@4(9YWNIWzIl@gW&1^pFyUBzY4`bT2n9b}*cOIXn zPTbdku7IxeiiwO?qsR{W;^->7Ey+C_*2hT{q(gUUB}`Z?@#&DLiC;T0DoGpxb!rq2$_KRLPPzYpc6QrWlyv z@VvMq5M7e`9fTZP7hSa>wzm7cjQxuY!2bK^(KwbGHXDSw67&fK+8Dod`GVd4=l*L~ zoX-59ZY)Asb|h~FHSsFQX&F@6;yzAWWqstedMVYtslEr_00|GR^9vNcg4Gaz9rV~f zIJiuUD@j7$3DoDhZVS$8!)0kxdT}ky^hJVEzMCpJR^%vWLzDoUw&M2b&bJg(1qJ>! zgGVtZt}{Iz!q$IIh=6cVfmV0`dDy*b-kysqYD?OLu(`8*a1@9)MD6pe)3l8t_veJP zVbk0{8X!u3MF6o-#FClLsBN8z9)~r0{E^8f$8Hwmp+JNEyM5mND^B_0?o$TW{$+1> zu%AIGJtYX>x`xslwJx-f0E>h4Ss}c>J_HzJKHt~p+u-QPzrWX|hQ)AgChko6m3t$D z?6s3}U2{Fk@hTY(^M&3 z{L8pjJVr_DWrLv!vZn%l68(3PdH?U?{UxpTa3}3CfK@uFlHq>|B34Dgj?znrEKic6 z%W2?&0v?Bs_-i~MAZ5oY9NBJHiv(UDB$$lyr_}RoH)i@UnQyTQ+Tklr@2C^$aP4T{ zb1HSHG(s@{isD#l`)XGX=eM~CAz;zuctGn07gzGp2RbE-*VVsSAxUsdP&o5I2L|m9 zLp`k5|MAnBB_Ht1?4tI&Xt^%l@!$Q-<2A^B&v&WO-&fV(!;`yZm$X}~yqtNF|1~M{ zhJ_@5)uRDVNUYju3iy$wVfZNQOv{M9MHEQ@7G_72w_fN>ekEZF^x-e;mg85DBf^~G z!RpRY$&v=Rb6lQ}XcKNo6o#~BkUIv%>)Q8UF!A)ECf^b%@g5u32!PYmWQDJqJ+Kg* zzGvZZsBeIeXHjd8qVz^Bd%)ki#{X`OI6Zn^6(@erM%5I_cX8!X*6H*+BJ5zrDLfKY zf_n!+1q*cpE(SxEC`ALv+98Q0Q5EXv+g{F@!m>G>Efs%0tUhO&t+$+35&bbdN&20Y z)Vvh@1%DTx%c^f97h}XFG@0VWJ(|3Uj!l+5~a~J<9rwnK{AeloPz#% zQ%%~K>d&DbM#^79ZvrlEvyy%vcDF7y%#5sAB=cw)wBc$FJrKAz-J1rS@g4Zxy#96A z`}aAwn;b1AKkEeiNDXX^9!zdBB*QgYJM!2FrHgT8lrE7*)G08X>LVgL$@6j#EK*c} zDOp1FaTPGa0>iwKY&Smh$>Wc2RpF1H*2FKmqns6oGVjHYrUAuM z?|=4L1H*Xg<&U%T?mcqKDw08ICcW=CWECX~s+5*jGL)(F*QD46PPNHo83h_KDx5^- zMneky!f8V=iTwtg}l-=O(ifkpsxhNJiAA zq>GZm0aJf$dToS4jyV4&4Ij+f`FO<)Z(gMD?5~m)rn44-b-|KNdGb8H1GAjXq#>Twn3!?_M7?8PMo}=XT z2&m`@cmZ?5B-e;`bn_;UsW~VV-U_vmaDpi63ouUDZuIOW94d)$gTqrZE!gv#+(;%< zNmB5y8yBbiWA^MSM+=l;pOs5;#Vo^b^*=EkH3#tqdk6~j0D>IVB<4GnT6E}2{dI1| zayo)>acVAWN79`Wr5ZTJRoZoTQW$7+EpLdT#&iY5G;N5!UG+55wJ~i$L(nC1mGh zQ)~JtXzkzZ<6>s1v=Sxk9-&W|kOAkw6Oo@t8-209wFd`Kh}Ts)YHD)rII-*e<$n

    WY=2Nceu6XJ#ygkrhfR9-a~67Ik)*!^Se&0_V3keytPe`hE?T%tJJwjynkB$ zQnS1rqJ`z7VIT@WV_5Z{vV=4;<@~ok)D-z|ZCUmd^WQTMIsaRe+AU3#GenRV%;Wx> zJN^HgJNgC(VeH1pEDf2vD5LQL)f)o5>r=Vg7C66rH@X*o7Sevl;eIrn{DvVie+D|u#G3B zKzijaZflC*6Wu_sgX_EovCy~AYQVnOt1+Ih5Q5Q!s!8a_-WrAC4iI7VQ8#^^sDq4^ zIiz1>FV6ZWjo&hTIDMa)#v#l2uZro<8eaYW;l7ovaqj^Uk%Npv?4!z~t%!ODfsFd< z1WYO|gby4hu}iYDoqsmS;WI>Ci)#Fg*vgz0P!q`_*G+*?Ap7ys+gh0L&*1jWmAMx@ zC^B6^=x4qhY^VhedFH(|tmC_A;X}G7B<9@O*vYU|XYD}hK(`<9MGqP2xeTPA{+*Y2 zJ39KAGy$$?o7A$Rf%ql3Og*nDR9EYtY!qT41{y*h3?cuYLoL<#K6IUg_{zk(H zk==jy|2yITBhdfvr2jplBjGjS;$gOkt;1Q6MiN==e-rgt`DcDVLslweeElD*`~N!5 z$|2u3gEv)jTiZ9Asj*z<4=3H#XOsAO@|sN1J|1mXXQAuHg5EgdrZhCrvP3=5Md|)& z6FfPw@Nod~&pA>n_pn{^A2faJtGbb@9eQWn#M}3udEg}#i1$@9Ho`Sy zoE+Zd0R$bA+aG%$>-*IMR2Gnr>uA8hGyn*TQLisf=8p6Uodfa8j~q!IIw^)n;xEfI zh9#6DN$kM;_4^aK&%?eZu&@3fI(Q1V+A=p+p|hlrXx0-t#5SY2H52KoYzZxeqSmFL z&~ufCN+3q!hLOkKLB#=CCkkAA6yfan@OPQZKdyCkQ^g8%*WBEm-zp`tnwJk7=-7rT zkCS!%Qp!DnCB4lnj>xqERE=t^>BC8AC6Oy1;r~y*KETJlMARh(=Q-G24VCWQR z4ABulb;UmZkjL%|+{bkXRn(SFvf#kWrXE@{0<{0j!H5hcQdrx*T7=?Rx1?KDh^Vpz4R zB6`e~7gj(O`xX+^qD&6E{7SKnM?@h#0A+Pdm4ncb=6=mBNP+_{1L7OT0dNfEq!1B< z2|!+t9(byS{{eN%tmnUn5}Kt?k|Mdd&4xMeiDkmW?qnple%eESC|P=BKTJAzOAtBh zWBn;$5-u#?#~ExF8(+kR4*w}a(#>_t9BbY%f`YN6z)YxB1~U&fo;rs@3DkQf1L$G@ zqJ=5sI3lb6Wn%Tyb$vPoG7=%hIJaUEs4wAIGWr!AOieKYq>yVlpi-*n0U&((ne`2F z7k}n$g6>4?p?FIh-JmM@DG!&tAq{{2Kg*A0c7_KC@f$tL4JG9ziamP_c(X=iIN2n3AEet=86gavhmT|_?Wtq-b-+jt#s zO(?5FshcBZq%Gb_gpsTeQ1$Iu7f1o3ryGK)EUX)7kV0^vY3s0okSDnfnJK4py*Dk>)Y01T$IQI` z!s3ve8rL0=Mt)GUWYq6+a?u?tE-dmj*onqT=yBG^_G;9LhI9Q7eNtTKteJ>E5KJde zhZJ7K5a;?N4I+++#!`-6RkUo6*h*I8H{sk4UuOP;97?A*Nh-1X{p4p{4$80iB+Ao~ z7wNr0D8q6b{~!aB8Z$&hAqo5Q9hr}Pj0uZd@XuE%pT3;s_;pWHiZd^H%WF5rak1v< z)n*f6WTPiC_PEEu(@<3R2CyH=sU(wLL$^8k{IQ9$hli!X^awJg7Q&)PfbVKetLmjU zund}EL~++3GJ@6Tu>U9&5+)58ZBM;m^a=WJTt3i7r&s{TvO93I_hAQ$LIz8I&yij% zH!bmMv1yU%gY&#Z7M-)?=ITybCXdmXfR_$oOCKh7$B$b;jd}vs8g67#x=CS|DeQD| zjD6f_Rx8uC3+#x~F8%&b;t+kO7Yx7SPYmCuJw}<8I_3WS=w$We95?)&V)!lG6yOI^ z^Z-bCAQOh91;q&fGr*-f5I>-6^yk~n%3bl7`#qzlrDGh38F#2NH{eNOp!ILV*^ z{9{Xg{UjS4aRXzY`co1bIA~jvC!}fFKR?(c5wV?)P3UMEo+jDdRQ+sC>9peFq)=we zWUqNJ_PfC?l$O0ocBrqg1STQtG5Oh65(CU;+pe!!_!PT~2UxJgLrAvyiH-*xok4I# zlxwh15rhF;19)NBy%!~&zihCWL>)dtG_VMc+bZLi3nHd+_YeQMYl1sCZpau7kFRHy zSC&V)$pk{BG*x5M3F7k7=IE4tbWCa`(WZgF^e_8>=K1te8KzK!e8_R zxY(6iM*aN6_v8s{{_uX&NB+9CRY%D+Z06wrjXBCH<;3-=byd+pgC=wzR=jE;EW3$O z?u${Jq*e*DbuOUT;EoK5@K^Jxq1>^NS85P%_>X5g6qBl zr?63WID&L4{6q_QofiW%e1g>dxik)UCIZRfU&b3@C&S~&t5+kS((Y-ET&+D?x6cUh zjD9i>9TW~z511h?i1&B?QD!egHi)hDVcw1S}xN4Kec?KkndEQae; zjjYN`b$u*=TX*oSb7ExPqUO&@=5#AvEbc}jv`ZH&Yda|Q11-v`MGZ#uAhgA4g}mZ}8dPpcmo%kc_o8hJ%#t0bR|9Ob5`I z0s%b$D!w z9BJsJX8*Wo{musB{2AxpwWEfK_WBV#;PBqMrnnHEp6ES1aF8XDYJML34#sMPe)=S+ z)Sh`mBN^^|QJd=F3S|AwgrJxP| z&sX)wa4Sds8rFlzik~wnobr{?hd`{}&^dpnFpi&W?Bkeg9Ni&Po07)kW2jMMm=a3< z_I`2En*EC&lU>#;;Q(0T>(el!kOi3*?0_ot+T${_3sc1r;t_M}-+P703^rt3j#N|I z&R3_AV|EI_NxKrXd3Rxa6d>RmJ}o1`VW<+~Fd^@vUBem$af12h18UU*(-<&o<8L6r z0W{X}6N}hjfjhsK6P?df_v=?WdbaiM-38y<>Er@;I>@t4cQ&?a2GRh@!%JQP5CdD5 zS1SjnqMc5N0Q8AJL{tjPIX~KEl5V#uTF|`nF1up;Cyf!INO;%L&oz*9Ug1cor$aMX zNezNv;8=3vg4#JUWJczCNv*pwCA03}LJ6*q(=b#i;rxF48B)@Tbb(YkLM7^Bnh|G~k!-n~3#~naF2=ze zP}VbdmQM(ID(N$$08U0iZ0*l-shq8}y2H4vp&0i!5d@UpU(D(H*-#tDB1MvuDZV18 zqbI|d?^OTF^n~SzxtxM_0eWMK1fV4hnfe!-90>Fl5&EfMeWO9!`?#hKjNPF;epr|= zqh#>e<-g+l@f2glwXjq)I*W=qI$hjnhzPrqN~h$}sJ=gJcH$t;ArYU_Y2nW4Na7b_ z*v%M7)aN8JwUzY?;5T?}+c&7~ELGqeLf&N(@vR89l*n`qc6Vzr9v1D0Q&FZ9RMP>U%eY4L%dmz#5&S7!G_{4Z^4NBtfEfW0%!A2s!& z$H|XsRj~R!yH8X(1QiqMY*~^Jhd~n(147yejFv5W%WJ5Y|;>|-l2uf8^7qo zzp}!Y4hNWWS?fm(mxHHR;yfT8N3h+ZyEf3wyiiLJyfr$Oh5T7XrtaDqx*gLG`kEkH zX@FT&9Vj+@oO=>!ko4~XNBgD*LzG~MIRwaubA&B0=(StNmdM}bC@_WQd=655wgk*V z)m9(W{U&n7ERBe^`1+0nkX^~}8ejTQ|MeS2`Un@45aX2oP4#<$n!*W)Iz#pAD2)@W zc6* zO*%|DOntDFhZcO{4^Kj+PE(75r|N98AFEk7-3nHB;cEC`-D06sY!TIwSUOHj8N*CY zXIKbT#=JUf-cMTslF}2&&&tO`Gd4L-&WHHa8{yaYq_5k!r5DR??l6$>igK2-Ax7_- z&1K9eoq(NK0{k)JNG*9_)mXy5px|;0!O;jIZ5ELib`hxxZpJrQ0!HKCSF~e;JF`rR zT`1SKagD9o(69YOi?)Ajpx8D__Tx`2a;;Ne3-3iIX&7c;;T@fCZ&raQr7Dy{eN28E zdY~Rw%#Es45I|^+Cju`Z2mlvM(SWX~BiTypHW+MKMn{xJ6#Nq!&4SxA5tM(d(l`%^ z=?)rvlwIX6Tj#9XtGe_)q{vec`GQdlfpB`66PFlj^eg3$sWxwzD@kI^lJy_Gc1j2Y z4tZQ`%V`g5VCJhbxf1wh)3BW*wEQ@ctej@EiZdJyc>si`-Ft17Nv4II1Rme1_u3ET zWQMj?L^$tF#w!UX`}*vf(K~6>JRUEj>~goVBb5e1iqi9@_KJCHNLNDdrloY$+wgN* z1@|KdX!~Z$UV3@e)4C~iO2=%qgSxa5s`9=guecV5ang zvN%GdLh_`GVsQpn_u6|FB_=HgjD9c}9?nP*JIZ?p0TKe_HcEmSEd}{C`Fc)R3m(17 zK}!KqkEg6*i)bAq%~DdQ^yX!4|e16oSd6 z87rKbR|QS*kZ2|S%fZg8SC@1!mR5#;myLI!5-iY2JDo_F zvxhu77G|?=hDyiAT7B9cg&)Rv-8Sl*1!m7Ihr$0?f|SAY&6qYl3`mV7IrwR5_>>yk z5Z3Wij{-$B!P%M!tvDo9ey*v1MFfdj@Ny2(D5m!&F>=eza+DJ%esF!+Kk|0hj0JYL z5tGDKAtNOS+|DRKYoNOX#AQlp)>hz$QB;(E*R-Jup*+o+5hOjNB=gt1Z15z?ldeJX z3mzPesSCP2G-y^{hQH+tPUJq`Xv=&*m>wz^ZqU%qv|T?YOArYP3T9{tlg2zTrdjQw zloOE6jG^SlmB0}{29BcE@f7|70vC$rar+k9%>BEq+8f+J{}3n5hcw@C9^2Ca39jTU zbd(<3=NmZl&;1Z zM~#+U2$0quiZ*$;o9_uA_`_Zmq%TG>@qDHZS#x@@Mdv*j;_^xi4dcO{os2^+7y?Ws zm@E15aP2#!p~?0yP!l_Fm2D_#zGUTEtGCgTO@abHXShF z&V2A_w>KRa&(R2h3sy=JiDr`Iq9~A!2h(~lZ|tpTn8W7dFK%uJHWk9X@5^II3$*!BXpU5tJ(5E>TeR#V0N{&^+yh2)bY>VP?qS zDv`;lBSfcV51V`kBPPs`O&rzi&o_`$B92G|$d1I0(V}cvbea8;VOhdIjui0WEfF_% z7q@86&PSmQK+OJPb--%OET4|IY%rfj06Phtm9xbIkVoO_qyZ^r=8Bj1{qQda5XoY2 z(6AU~+I@16`9|V?on`*r%j)RWw)((oJJYUIGAOkGqVj1n&1$s~n_bxeHg$OP8tuae z89t~1JN7xNlwg3+_}!vk6dU;p3{CTyoV*2cf@B;aXvumzJq5)tksh`&JMK`0arX}F&zL5qRLvIW~Ds)n`*QCA31;uhAy)*+gD9y@6QrTaYkGi&m;dKEoU zJZHT)ou9sNZW9oSJEdDH2v@zjOsmF;D+BZ^+dOmsCOyny8_G?q8LT1|X4NIKJ{bh!bBrzM98jQN4g zO`*#SXv+nxot6Msd-^8#A1e z>}K>Y0@tv?r|IG^A^rgn7fv=X@&MCWi(q-tdnP7SR>Nt|MBM3}ANIOr)V)#{65kof zR3Lhy_kd0^6t5M-SKEBeFgL_Shr6ES1N=k|CJ>X)>0$CBoRw|h`MozcQ3bW@Y_HlYx=DZ3u%do-D3^EoPrmxnSQT4P8|G=tWAfh(^l z5fQ13gN_wHl8`w{o(w8t#Rm-#U;QUVKvA>73x2~=Nk48JM%&;KCunRq1KLQBK+Q%1cIUNA4)4fxVe7SVR;|L&?|D7)0NC4^FaQh6V0g zBKGuKSd@L&OAennAiit-8SkU&`^x*{-rgSg**Z5RvnmRfLhjwubZ^XI-TT;vmnvj3 zikM<=68<8F-W+{Sf;!rv(0LbO1n!ltCkT(mkfcUN%sMB&@H=?OHp zji?V6)MEZQBxJ;$JZS(#f<@o!TWTkXx}wPd>fTB7#vxqLl8tU2X@1W8{rlVcpH+-D zhBW~L)zjFt6N4d^HwCtBG)U&!%JFy4YEqJ5QCd1k$JlM7OduemVLr1>9jNCfKPo7n zNOAv$v7NTMjvMcuv?HO(}VBXitn7gF?)H z0Gf{L%AhPZ57^6Pi-fQ@s&L39WT2JYse~!o{QL?S);MEFNc7Da$u4h7!Eh8}q00~) zFQr@cQMnVuohrqap7zpqT>E)GP0XZ(LPZ9rH75g?e5QkvR?T_Z;!H)iB|rp|?;c-P z1S+swMGKnh+H{J;?#DX%9~ryGv%h$1F&0XcpZ``b6Gbf&-`gBdVj(er;z_Zphqy{S zc1J6%Z*$*E;5#*dqFA4EW^4k)`4O*ldL;o%9L&lR(B7rn0jribFtu2rY0`@VqXOSc zGsDGyiJ4eDOl`GP=3f0oHLCy3aCT8IKMPL6WdB+7^3k@(AT@&i_b^X5(Gi>(oej{5 zAz9Rs9Hpw63xK@&D;MBLQZXQRY98P>(J~#h5pB-XfLamC%k3S8OG06WD4cWj@#`FX zNh+Kr#Ru=zbFJf_ELGpnQi7eCvMs_subRRO<0MIU8;+*~IP+p-aJN7BPe)`gf)K#}nN2dVEI3IPjzQk!mlHJUa@Zkz^X=xWMI2`&E4z>Z zKoa`l#Ur_TH)d@0?_V3|i@QSyr2f~--lodczr?A*J;{<8GI^zM1dC)O&%8l#qv90( zdc>)X+QNi(ig~lLCY%-o`et-|ktzUl^e(7va~3Gb?Lm~C1_d}^+lPxhs#?rPj$Uq- zMq`T+Y3P5rJi-S?>a)tHNqzC2fFzABOJ zc8i2*HI{vF)_*%ypu{D`CqB%u_t@z*k!oe;?Z|&S@kZ@HBb5ReyMu}VX)A5o{uzBJEQ=5ZideExu2E_iRq2}p0Fvh%}v}W%$h~h5&aZ<7)AXPxp<(`2>tZ& z3NV^WeN&Rw%*yEWjL&AORL@5VqpWS@2rj0+>?8@Nzu|!4Bn=my<=-ALxB2HWr!9p|H0+^ahzeK}5k1)|n{9=?BI5Jy4-?>}%@E=E-NT1@ zOu<<>cT#}JQqiw^THGhax`YU6CuuPgEv}Q=ai8tDyoNSYB5GsM}nywi}cgvIOT2!7@i1Rej<%Z2lB_<1Pnp_;0!U> zB*|_wkF81amo~;Y(~q2@iL`|(vW}2q2!e_iHOgbXt%EOc6=17oz#C)#4-an!+<(;vpweAG@N88 z1yRKruPvyb3$1>HRah=uOv2ln7-iTDkJ8vLI>7Gt%hiEO^=pjI1kLDRLt}cOiG+DM zTT)-5$d7xJ-M{I4YSUwrI#N(?Q-x%>Gf?Xdg$|FDG~`GZ!#aFjucR8bFS5y2ure#V zPb%XB;;M=N3OCYih^%yCwyK_GI}q#Fk{%c-^C@#6NqB#D_;!W1s}*V9kR+j3Yy9@rD*dm#<-d6U zzyC5;VQ5r8#+(z`%`&A{j6tj3n|sD9!tkDrbYwdm7}`xu*5oVKizq1~hE` z2t!+5{|G}0&a^q@p8(q@DmABRROsAcLE9L^Lisf|oY-_U_6^=fx*PXG4P!}(8-e!h zw{t$El5Sa)ojK}_Dk)JS(aMl3OE})(Q0itbMXHmmP_6Fr{T?jY?`2>}rXAJKOCPl` zw-uw6aOiT|#hRnea}V|M25Uxak!nB^P(dOIa*Iqb`dlu073!yxw4`&&nej6$^ucLJ z#Yv3S75oGlNNA+NB_uOZ8y%7r4k0@_R^QC1|XAoLd7Y`Gn5x%ZdH!a`mi@tQ@E zZkv;mC{d!!PdQSWjEIUM9KyDpVW`IWB;PPaB&N8^2sMUayBRQ@^U|$9P3v|)Uo>QV zmQ*sFOvo{ub`&xYVZIMVv13eQh#0ukB5Z|RbN+vP6DoU4@Jt3tm$+j#>9xR~#8ji= z(*vARp*xrOza7EK_8Yt2YT}#Gjg0y}-FG;ck#uue7sGv4!5-DTJBW5Tl&Bb;u|i$7 z)6#lrfU&=>wEnrv3FRzg9Q(G!x++Ta2`+w^O;HE6qwI!nVxl)J#2zmYZ~&lyVQkSA zlNBSW1s_A(yICl>z#{TRpu&u;q9#yz2P*=&r?u)`fMZ{Japah0(5vDqcb>j8i9m{1 z)_5UtIJTJy2dOemOhc#Z*LGF0lQK<$Q`UyR;rY!Zu~Olv;=s_EK(jrc4*cSeQjVOS z)&OcgPA=}eQ!h8*0=`yR1-WbqmqyR}_b^e@75P;>T{hU)A`viWA>M7 z>4+t{+tA{x_tqt=;@7E_;n&>$8C@`pu7fQ8#%gG0WD!kZw-2dA7#9UW=Xd) zo^WJIn9e0K?s7YYTCvhFv5B0pBZI%8WsQ% zW({Rd7Gd1#cx5%sIx4Ruv<71_`$b3t?2n!^fZtgB;9S}`hbvp-1^W4&X zoBRHF9_a}DYHZEVpc_qtfb$f7bnN)l9-ZC>IaSt=b@nhV-C$a()3cE zl4L@gw2S?{L2x754)kAAvdkWX640ve!S3Mo5!f==p4^T#v%rr+x1#z`?=mRH(N`FT z>Lpmhb6QODDkVX{DkfETVa~%M2Zc(}>}-M!DYu*fmGo+-Rw#VZU_f&wY$#F|&fr5p zDQdq!@r6H2)thI4)Hyj6Y=txTu(zjyD>BPGGIWq#bG-i&m2eogfTGy)vJ9vf25GRB z%!d=xosh@!$vMOYw~>&J1qG{Jl;5dx4xpn6(ap`#(faSkbS#X`>+sB#u4d6q^{bv4 z(*QeX0?Fm5-j?*1e&U6c$$Fw7WCaXV;nDm`B{2Ga9xnY-t`3Un{No52x@?-UpVT{eB{Xr$}y=>qBkxjSx-O!j6h9= z^%?T{-HOAbA^DpM!*T*pgKmiBHI7~?YzhY`xhL2Q_+VKFQy3qEPU4SFCIl8Wcc;!% zGGx!7x;8X@VMaSxZW)z3$+XV$a#X%})JWGy*f6MEZ6kAXj z$;Bhn(F|vt`slsuzr9wCf70l-$TQ>0Dks=-i#lXV?^+WUjvJLp7dpdp-AtLEvz=+N z$1Fh$3^(WZ9u^)lYV%l@Cp7A8$1&xTzha})MF^t%zD!jwA{qK;X<&U-#4MMAQn!TA zqirZ%l(3zMO8Ty11Nual#*8S54$O_XwMwP)DKRdyBgD_#JRle|98rpx5eX@sx+>wy zbkg4*CsY?VK~~~~`x1RS5Q6;?t_t1ciW$~m{&@hFq7@?n)>Xk2f7={Hzx~0r!|!+u z13x=Zes{bZ&*Dvs#FlbQlN2uu^w41DnuEr6bU0h`#gI=A2V%nucC7)dU#F{+{c`T% zs7MWYicM2o70I(3Syq8ho)>{JguYbPmS@%-gbBw5b#c2wI|XT7(%4A1NroJl{xIM* z&gF;zRHo=DRFx&f-xko*EMr;)k&VVpp%tv7ynH(5am9~QX~f|a0!rMBewWspz1e%R z4>`J9;}^PxU)6!<(dFmvU!t5FM^6T}pZ2jrX%N*5DL)Z)s^2u3;bM2b*a8awXlpPh zHEmM)MVp)NNpX_RX3j zY86_}Tt@Paa)%=UL5ypYlg|g$5A`tUdH&Xl2RS=mMVhJzq_}LKPmtXw3*w=(EJRX) zi3h83>I>(nf4D51>RbsU`=7G=t&GWdM5}n{&10heqEekmpN&niwb$lkxu`oWsbG#A z7J$XFGY}e#$udY22<`eg;L}Lm=$|1>L}Un{I`No!FUk6%1@PwHYEug*OVsvt<5sj= zUfLH;m#ytF_PG{pJOA+DFxEp4o%f0p+|b|<^XdRrK<|?a@IymkmY-H3XLOHggePkK zO7!H-vJA!4C(0qQ0j z_;)3&Dv}(cL4Fp%^Hxhq4Pc7L{Y_mlyZ5lk?O9HNUPR^u=>MwKBFw{5MGg0-jW-ST z6JbLI6A0h`Bu1?c7fepKR93Lq!Qh}q%3-2}O{9;HOwUY2uB8FPuLqaP+x%r6o0KrK zBUF}jRzSS#k^J(dd0ZaaOmeX=Lwf&{w}c5%Ctg3t6hLR(JRdee&@|a}l4W^l8Hf58 zmp@SgPMV=!p;I=H{$DjGbKXswmJHfApg)Z;)?kSC2d{_i?cWt6V&r4I*$wr|H#4`MbkkIZatsjg=T@z$zT&wJsiv z#vr8Uaj`h7nNZ)SnUlM7a%$+@mSUD?m0{|hy7iLp0E&KcCmE30F&O)^E!aD)Y|#sd1CzY*(hwjK5_nnE&$%2qaG#gyqT=NH<+~ z9(sVV_TsbRBYaY2!N)+342y$m;*A00CxDEoW?X&?bfni2qoSG@jY)q?7nyz!a?>Wa z)rP|PEN5jdTZ^R3*SiV?~CWPd07{iQng^ZWBvZ0za8yW9IFd52Do z(O1RP%7$|Vt_^fGDGg+FPo+-IX`F5eVTyc}B6)>xmR0JJ-%bU-iEztL%QxDWxC1l} zpoPbV)T5YaSp>mYWt2sEph*BsEp)_BjcsvAF~dL4yok8Y{oz~6$iDZ{o~q(@#<5+l z80oU(wuzh6j5nnQn2vJ}kwVNuNZP?&$5dkoQ<@FwM#(SQyK+r~6xM=|27YQjKY~aw zB5@chbJ*W}$mB<`M4l&~M1stU42jQs>RgO3$y=e`%jv^r;v{zs*gt7?KDI|&As~Th zJj=MBXeiS}*ivM`r^n0z!B0<>YUW@IUHY34IK4F9TeV@E z{;yAW>HCEnZ8v6#pTu6*=YcXK0Uk|??TKat2PcaF4N8VMp2EcbY*vaG4k*?HBt>;; z44P90_{C(~+EgLlj6A0_^KeT*m7xWQ()iAY9fJ41K;f$D6Wh9gVc`_h9h4xuB$!jRv?1x8qqTD1enKBG2i7)1t5VV!Rto0{ z+%P{>k-gDhkSdub2vIkm>Yn6x2vmuOwpi;rE; z7G1YXQ4vB72=*URC3AJM_62*zydByd*iXfQN;i&%mpDxHc?MR}&f=kG=l?M1hGA%jl1<#$k? zPBW*5y#U}$e;dg?g*kn&b2Y5Vezvr9vP{b)g-xP&Vm4Bm{wfULjqzLjj%G~O=-W|N zd)hRF<7z#7bUIjo?{{cMgkB6Dj=M&`x+-=}nAL~NZTi~659i!m$r6mN&{VPMtsZQ) z=SCy`QGCRYn+Cmy1@dXwuGT00bCi1X6ovu*X(cN_0JRJ6Wg|viEU$I+t$RkZQGbw6WjJo>`d(B zp7*==oj{Ecu-=kiJ8HzU_T{nV88*D=WpUlRf6j9S zF+V%Sef+qC;6G3c4M;FlxzKBk}~_&DM`^C%_4Oz z(uBL!Oe<}l+~YC>HuF9+ z<=___NDMA*Xc%Jz`5Op5t|{UyP2;4gE~EvjQ*jMBUH|}M#880JbA~~M^a%fs!IgrJ z)9uv-H6Zab4K!3TXQ;`DMJfXt>0KzKU=vuN1Arm4&B6&n5VpSAcQ3bM2*0P76xFKEeOB@NMMtYljQ4Gdx@88 zXREa`S9dG5%l5opB4uKg8@dggtO<&8zlqxsbM2I;d*Z}WsiT@Q8`64PK|q-oB05E4 zxO^{91I=v+YB+GfDuO8lEJ{_1WtIUetx!i&x2D!g07WJkq{wfz0SL1^KtMzg6yg9p zc!o81wKX9GbV(qrqm3?2}bgG3Bn{T%?Cn1YQax0X~k<-{W$aw8{PNDdDU z6f0<=sDO=}B90ucc@}p1H_=)FKm|WWh!~vUS&&Jj4I+WekOGhv2!#M6ssB7!Wu76G zl7_L0nZ<|!py(^XOo@d`R47ss!G?g+fPgc~f2p&gqyTIo9O5j2#`!s^pd!w?QV)f) zmkhc{s1PVfRA?Y3h5Xoi2+DIB)s0DiIfq)Z~y&99MyJClj4G1y8 zfrJYINfL~_sj3sf>Y<;|SPL3JIv@lND{O4$E7jtVpHKAy2*U;_rD2d(sx1w%CIAD0 zXyl?n4T#c>B}~x)P>5m?LM@Py)FeSDNyMm7fZy+|t1|AAZp|nnfIwVjVSvyo3@&lV z@~d8IYAGrJz|0C*PLUUa1ZOdwfRq75WE^5k1OdP@g#g4!#N>d9)ObjsNNZh0FYU^Sh(vpIGG`h&)gnqzSgkgY@mIQVaHmlGPMiPA@Dx{E}Z|^8*Loc!F zcFvyHH(96@FhFW)0VE2T8rMvfDOD9Y8zi6^+>lvK2GR(TIUqr806PXdvhspS1~#L< zjX&QM02YuzA_a~=iBj>WQFjj{Fjg>t2pnrt1hE)*aEw`II`>DV<~VXQw!3=;#70Ez)ZLJWn7(@;=J z`j_P(VFCTv?>^}WW|+*Rl<=_F(7`efHW9)UiOsAf0Ak`0pwf1#I2JHV+mvl%Js}bBDiUeQz2%^!2l*NQID^2?3&(a@c z+bMY5^;w<~br#AZuy8eX^A(SsHCYspQa~d}fJP=(0$3$uQ3$;fb1H*$H7G`ID%8T% zr%xoJjsQ>JWF`VDj7=I!J8K*O7rfj=TE4GlU9@;)n)gqO3=&962mm+ZAcq792Ogh? zLI40_|Bk~218`~kVMJjdfe~g>%m7vdz(55gAi?z8Z^Q%^a%?a{a2x9V*ODvjP)v}T4ji-+%bNiFL?>)bi!5q>;qAFD;@w?60r6re&`UO5FF|h z3L=RiFa)SRf?V~5Ld+8KN9G^^5KyAh^4IH;3?L-NDjboRKwrpOABI~?ZCwyTLQ(|p zOk8OAGnY~ky$C!M4gfh035ZBd0K&?GFU>f>scf6Pd$|a=d_ev__YPHqre!9^R*TXY z2S5N!i@+g`T2<- zXfhxwiBJ+iJTNxud$bdhVdWGMC52-QS_}fch9}WXOePl)wyC;$$wnfV9^i2wp6f>6Vhh*K$W;iV-+q-o#)WB{C2T%GXZ@9+C%Af)AlN#_oBjW<1&~q;Xh@(269xcdSOQ3eu!Z1+ zaKMA;e7V3^i(z4p=;TphabOdsSn(&%8tSH+`O1Y@2LNM;O4Q%eQh~wPVMgDP00Cgo z0bm(m0e}iXNLO#B76n7n6bxFb$zIjhoiIw23qAODsjTBbZT&N|Hk*3`h%2`_ZzcrADb`R23dK3HLV%Wam?k-q$tszD z5okjaLbwZ_J6__>jtEwH#WgQetD#s($brIuqqe>vnFy_bR77(WWfs&R)==@Y5+yZ{;f!7XnxwM|u>@uz%91HI#4wo{xHJqgG01-O19*xlsuWljDRW{# z$XEce3JnJs2DZjZmwJ^MFlIdHCUs(C=EZl07!nZz1p>r4us|SKg1Cq&IHVl2MADEk zT%f8u%te!0#dvH8>(M)B;u`#}AUOq;w%#jgYB$MKzINYpH$_ zfa#c2L_a)*Fk;<>kqq+yq!cO`tIoJA@$+k(-se3-u~eq?n^YA9!?L_U)gVSd!TG2-paOACi7tExhB~U# z#?{A|B9ituHJFl2$e7YM?BA$~KrmTU2xe$LWlBv){*R_CYM~>9O!ZzR>4pl%$AG&@ zG7?amH2NeGQEVaX0Xl#%kQf#$D1(HRm{nOe#88A8410#DyypHr^~s231W|Q`c#L#P z2vUtYm}Mkm|7Vz`BTgg`mX@AG*N~X9rbGl1IG|R>jUE8AO2t>l)zGQ`E2)~KXhKNp zP;>0zG;P95^>+h30Co&ye#~G6r6_9C9?&sZg(20$C}<0pd@giwT7{NT2!e=z8~XLB zg4}fL)OnvNZ&;ELAUL8ZtWZQCB54MQWA4$^xbgu2U1T)cX4KTmz%wj+%?71P5+_l< zz?%oVIWREPPvX1^wVz3W^k^}3bRu=DYc)RDfYbmelNb_8&>l}ifSHyd^FWC#KL2UDY7R53)nieNi zFD+Oyi7c3!vD6Q5CYkNsw{+=sP;6Bj6P zWslgh(f{s7e=wTAiIl%Qip4kqg05Ga__5}dEo~859yeApbSk5Lq!y@SPLFomOm3~B z*iJRe>_>jH(0a*OqWopa4EEnWwRl|Dn7%HnIxmcu*_9j`m)w?wE0nI)p5SvNQPXFH z+b}23L*X8qjz*^$!A0sr3;Ss7L@vqNq#5m(9yC$f4)!r5X@asM%5)JUXrLiILEx$= z*+h?^WzRc@5&icDyYC<}Uja^cZ^on%(ajkkpA8njpb_odE;3k=Lb+Yfd2KGu7D6DS zyAk2Z92`5mf;@oQmEf)CSMlBQ+oqT69p#LssrdF6Rr?3ZyK~*NuE(p%C3ni{p&}rTfyH6^y}+AL9pjAaW3b7tu14=e~WB;t;r51KNGE*@tA9r z{|~T%sh9TGmd<|(r^V&~dpm_MXxWp~<3NNkBFokxbBTPqD-8r|=iTC`zEnI_vFm%g zbZml-RT%MYn$F(fq6%6sqV_(!6v;a%S+(Cu47=1YEjV-UaQ{8ox8o;uP@HIuKYJdi z#Y(3?y)w12iQ70Al`LapMTj=CC7&<|b?dd8`aikP)f~*W370;i>aGi@tD<&bI6F7h z8@oNCDbuL+T3nOu-S4vLbLXXPRBF*&eS(Up&p<@X+;~s+$#W6!V1Iz>Mv3YB!NXVa zMyR(dtgMF7gKVoyWbPk}(KrMgwp+EVf#vCHJUV%X1VOsfee-N-5{^>cX#5Zrs{4A6 z43lBLXQ+L$7FVTS!l?60-?veu zbnzea|52Am?2(De|C=J7pbv^Fsz3&z^1mzMzM3EiHy4AZL65{83(eVQgR1}~K|V-| zpbsA`I_IU1s?yGV18i#s7G25yPadG#H`_0fX#SJ+*-j@nO}PEGm@?j^^QU?B9f4~c zNL9;X7W|-XPIQCT^#j+PAwYf$`+(3qumJBSC#8;8@K&-gL`ysuxgn z1!40U-c1+=>h|->oc2uD>&w@8^h6w0q&|`L#f>InsV1z9H*fdl>5v@%v$_j;T>o9^ z67*Sw!C`vQe!hBc$2Nq!O6YDnligNZ@_fRP;ssc6-2Qoc2DN#$2m3mX-Z23h5H~a*h%1(ZthTD>iS0K z;eg&Wr}C14z=#M^Ld7@h7aH{J(Y^5_8(-}ei|Unf9Y(>Da+_GSa})akK2WnD&f3@q zbWm3qhhMI{I(zU7i!JJDIu6-IAawfHX>Qh8nuv3ww*aO=8nt#=kJx^cu)XVX*=!b8 zT+J%D`46HS#d^do^Yb8U+xZ1zl%JEQt*ZwIHJf3zT$9M%ps}0%r}c_ya)1HhoUyH zZ(S??dt=?OpcX7&8l< zc54!i4f#bD=9hCmQ)XIUqUe)iO<2cWX>Ug$ytdk3ncH8D9UC-fnLxMMVP6tg#sbfChj~w9r#s}UJ3XfV>^%rc z>Q#3H**|UX26cee^f&i?_rHImHcm#(%51r>m_(-SUc}-0NM8G3)f*hetLZlzS|R3s zfHp#P^1Awtl49Q1PhQxjd9H`QtPtcr@}6O(Yo0sp<7m!gC7R{cK}r6~S=l+0OndI% zTxO7N~TKKs_$xOc6(KK*NlEW7iI0|F*Od|Jj{ z$8t6ke}>a$Jn?#Tbg$^)`O_iDF;rfnEw~wK+FGoc%HH)UX4>!W8e#GR#o&If%^hcg zB=b@5K;<`+<#FJV$kw=*-#e9Iby~*gFMU>|itT~YwynGm5?4j4_^L+@0@q`u&F4_} z(YI%2PS^ca{Qo;D5VMN3`Tp*aG{LsXcz7|gty%fcWIe&^?cM}#y2n1b(tnKH?o`i7 zGr2ULgf$o9^&Oq~1{76vfy~xwXtsC%Lne%)WUaPCc*D+ys4}{nVBsCXkIA2PdZ9PL z%jnGt7*ZD1JM8Pp=EK zADyp;(BNQLtN&+SZ{67)d-FTy0`c@9&6Jd&r8>T|d7q7Kdz+VXHay5nM^86Viu^mp z{vuA+WeNUlJA=Xw5^AO`3)9L0K0XlP(77qa#a-J>kj#6)x|swQtb+d3)2>mCz5+^z z7Y~W7QM7x#;XjVY$Bnop#V!kpKWS_{kQC0~k6 zmse@cU57`2DYCGI>=uC1l>S*(#N=hHN$s(AdWv4t)$y&G-ak1_>vjXbk`ATnD*n8v zM2jWQQOMmDR{SUoFL-fmu z@kixvp?au&=gqzTh7$40D~`sUB;eDf%9M7i&K%I|Q=&$)L$N2>`kis1W6k70WyM1# zS2mAY;O1O8&&3ynDf3QfGg{}>^{K6JYB`rPW=`S0R3g#qpe>k2LWp=XOiuQsGbE$2 z(T2+>g<4u$?iL33Qr9z2h=(axyFoDJTGslDxml@`?5TA!y^Xs=RWck;K1K_-jt8eU z)vge>O=_Z+By-m)!1N8FOK@1)!oTgGS%GcdD~B(f*XDklmQKyv+Fb6>J5FJbrL)pJ zjAonMcs_Bg2r`dxxSD9W9|qxgTgZ0ZhWmf{*B?Fflw0zF{mvDP)A(%6nYkFLiOBBj z8j{@A3vbKWSc~Q~naA3?h@&)1!y($T-af5W*dW#0zpVT|&fZGiKU8+uKifO}&_+)48=k)vV0XFujW-ecD>P zW<7*bM-4uxKrOw!o0wR?c%E8C#v)vTWwiUH`Kb#@Fa=J$WwZ%<|GuW@NMyZrnLRf) zQlWY~H~Kwx*0zzHx1&hJbtaQSTI7wKz8V;BQkgStp8Wo~V)B0B(Yb8hvXgjjGtJqW zGPu~>S(x@q{?K=H)=BrIc(}-RZbDv#&%1yECsBS8MP)28EJR>Ne3_S^)L7b=hBTsL z%lrrR_YYTUrw2SP`~GrDM-So7tNG6jXIHR5VMGO`g|&&d+53nW*FD z)^;|WPW{($Gww^l-}I$RZA;l`qRabcnu9KVHwo%yomDg^6&GxyU}g4sDAprY=OHC^ z)i%O&@2KtdA56>0J{t17MYu}3Lb+Sl!?54{swR8ZuU<5Zhl#8_etDVXzp;}V`LG)X z9h;pJx9SxdWsPLzaib<$-_AESawwfnq>5;KDUwao!$dHZSwu$8%1U4qXmkJIjV#mlo}h3YpO1 zml5J)+wWFzDpt>ft33F2{Z=1_4FtIy8AhY^bnup5W}biTR?|)KK$dQX6z@&_IZ=h- z9odZohot&o<3Pr_l@{G8QLX$Tm1t$lT;``j9fuPc`_pE4OG>ViI-`=BN z|0O9<8hV1K8`#ltl*3>fL0vux4|TlG;{K`|Tt1&UO?fY2H&LoV4w)(=-Ad(r-A}2X z`Xm|pYpgXrTDx}qq{qkSS16m}cT|Sy>GkVeGwtp%+X;lE@xLS8?WOClAA%o;%iXDpB~qb0-d#*^qA4Zx|)^?KS2{B;J116#QegRF7TgMsfba_I^_rz9|00rfRDiS zgQv>)AMa~(m8RGG>t_h4b7NC`E1@4o|~ zddN$BANPy(pS4Vox3&^=leeFW7mDvEP4kwvR`!%NO;c2Ze}M_)p+TyZz-lG*Llqa7 zyI};Zbx@Xfza`c7+qXQL-UDl^M;LJWNht8Zx{+M6K{BN9eMzRT```1INg3-ME>y@H zJ9&81rmVPHa42|@5+osHgM(R@+V#@{e0&!tspsc-_vS6ssl;MMv4|vUqaP8&uP$JBi_WVq04wRp)!jp-OE7pu@vm*V!pE_7_Ia;>7Gf!R( zn~nj4{KL1;8KPf&Z)L{2i`Lc4x3_`@YB)AY^J3n5i~j_>^*7Hh7ZH#Ne=?_g8VuDM zr}Lv{t}Qa?eqgQa*ez-;7`*MJsjg3DI$+v1DP$&sBTJ?Tq~X5`zcq7I)+qXM!$dB2B&VE?=zK%s^V zpVKu-G)70NU7mS;Qz5y4ijUZ+jb4=m2JVfl*sC3Pa~|5O({ZE|Ngx6H*meU2s=wr zDaS$d7bCz$IYT;g?eyVP?gY+sl(e?CJ}UZ@xl7Jq?N5luZp8C9s~EU989BUp8u&LZ z*1-)fq#DLoPq&*i+)XI&A|Kb8QiWiOX3g5QbtMN1-7yhdR`3tU5KYX4feX(c5RNuk zHdZjcug+(MKV=Ze5i7lJ3KI5uK3?<-zElXZAMt60-)6nmn3;NhugA#q^*%}5{eJQE z=BB_%Xx_?=48)P}k(Leby}m#Eh}9{7gI0NKvqbWoYFJH2%q9=bP|suttM;wu6BW556mzNJOt?3|sI{n7TeD+4pLU@Af0QW5+JKYW0Y|4{&%P|5Exm zK3>;tYLCKRSGKwYdghBf0mLEis)v&2F0*}!c2(ZLzg`p0B3Zo)FCM&sx%!pqxex}XYUkXabq(^>pjfUy6<%8&L$=ZqE0RBShn!FAv*!Wb}Jt`NRuq@=_! zqDo1*bT?|{`*_z?S7)a;Ri?}#%=L9Y=d1sRZ&1{u;*BVlD$R0)4_6TY=p=mMFGZ1H zB~h&(&1t%Ssgu_HyifP4&sBrJ__cmIM*SeIwg4BjQ-CR*vG4UKRIyC zzm6I!*2|x@v=Z9W&V?bL;TE<7tz$5hKmHH1J5{qO)8D_Rx{JG+?oFxekzLj0;cOYA zC->X5hCV{4m~xGB8P)Engqenk$v(&39ra0D711wT?W4MxPNd<@R8Y%X#_TMlmysxH z&4cd>4R8-{J)wOw6ni&oqpJL+jG<65FVCJG0h?yF;qk)p`WAdFoFmkZIDxDr^#8o&a<})+5GJgj zeA|m=5`jJU5I8{du{p!O)|#Y%TA~qfIM@$6($&Rlaz-s!xRPet)&w z_ci5BC7L)H^fbeG5bJrRh6Jwaaq+}qdgE%ua^;|FKX6qdz{tAmHL7#=2kK4y-c77H!yni*~1r;I% z1rgX$wy}|xmL|nV7(Kkq!k8{ZG^V)6<&|+o3dp6|2{T z6P@9X=l-|BID#V>yax%2@E1*b&EGh|^GdXsII#nk>~ZpPMnAObxl)`OJ4($Q9S3SW zzyS-Vzmy_Ig-?FxiMIbD1%=l4naE{QMWcdkN=cE59m9dcEpj1Zn3XSMq&d!22 z5}DA+3H=>GO^3pYvGbL8uvWGm8qnx-I(yUwl%+noBnHtzmAX|ToW`E$+<*T0OOh&O0j1hW0jS-7K4#@?;r<=H zC9D2w=KjebL(X;fH6+VdqFDHH=Mv|w-0^BHQIn6c+sEqlR?2R)wf4>ZP4s7ct>}(G z(NR-fdc%E1XPx}lkB&=s7d#EhA_ zU^>NHlSD=mixJAYsJVN|S98_AQtU1LBSZeyJbt(eqJTbysMid#Yqx(#+p?t$u0;;!W8h__l&xn$iMK7XXF=U}?D5vaUs~e!zcdAs z;~sBDGDFPy#>g=?^5e`d(`>^L2-vQ89ue#3I<&ha6xPo5@glo@hA%aqgZXToGPV1h zuX1jb_t=-U!jqY*vdx>PIUIF-3x(6?C*rs(1x7&aTi%6yC(NnaYo>Q2*x$_(vT43Y zSsx8E``K$h4a#0bS_O;rtYG<2%!3jGonN)H4@J@1EtP@ZDT0j^yk5r~b@A&DL^ghI zlT#Npfz{FIWwyzq^#>`o~*{ddh0s`H$Ng|ad z<(yn?+usB)c+uc+Em$#oxCv=CB2`FLF1-61y+8(o!3RiUEBFP*Qqy0G-@tFKhU zG&dIBx5vlFrT4KO%ep6c3|=PIKY2U`ZNpapVzq>(88|I=$Zvyxed?6fv@{%nY|6(2 zH_$Em7)N1yE}Yf!c~V}Va)qA7Z*znDWW~jz8m-=<3nvGE9({TJB*1HAHa0h}p56wL zimMeuVX#xRW7SFu#Om@T$fYOv4ONCxYwbEcodjnp2MV~vn`A;=j-V*@NyMN7p- zevVkTut1=p&~5&CxIGM#L4Y@x#jF2ZX*STBKKEAK?)QP&Hjn(T0Ozboo!i1#n2mF* z_`QhpRTxKJVmF2fMX^LYA!%TG;6O;BPK7C5X7^{(8uUt!r;brYWH0d-2eW6 zEr6?s`B3|+IRPHQbZB(VWU3V9wE5l85M;ejtpXKeB;dDmrs50~f?QF51z}#6?YS*_ zr0FX2e1FEgr75<#?L`)3_6D{@{c5=iO}|m9sdO4eh*BmK-F=ne-0FkSzG=yC4EX73 zd6aSri$ThoDvnJ_#ol_^3f?w1#P}GW+rQznr3TQAYnf~oYP6{$g^16-J^VeVYOg1w z@gpkDfkL1&b2QsHSrFq%s4)@d4cFE*(IO?ZwY4EjMT(buT;4r>o2@P`QhpXS)SuJD z9WMB0+VyM(^>>Igp4eTiY+fe1S|88k&I29(;eH-De9BZWo_TzD4IEGQZ!Obl1Y3>M zI}>L2%%Aan-Xard7grwx432Pw#FTS#b6K+P6U8(SCUtCUZG8JR76_J_!CfOjC2R$5 zE$tBW9q7g!s#qF!zN5K>OB->Dd@DCy{WAwUiUNLqrgJ=X$X%1Hm>c*rx)xDCi!n`5 zPb2z)rb{)jKWk)9*;>{UX`0331qZUe!gTSv7uM%l%v2#T+rs`lKeu~D>v-=_;Xfov zcr#Z!d$axHGt^SwsXOvrzxXrucSp}6?x`{dC^%oMMBPC$t|nB8q+hAh2$ph399(k$ z;sad`pQt^PVfJmJB0+vweETLaEloWiWsMOl#K8T)nR`_5xml0Fnu*h53Un@#&l;VT ztaxBX4CbLyg#rOS;e@v~o?DkNsnrjDuDr>^C}j3C8@ninJeM_sV#SpXN>=CEEF+fD zNcX~rUpI1aPn{bX{R|6pHSD_!m*^p44klEwTmTENg6rgisit4`P=W zOta{W)Jh0BBXdpbeZOM)W}bYSd8Ich=3C?D5MKs5YK(GUAOQbcRgyH%NdPCdU~+XN z#4oBcJ}B95T9Ls0R7wOUp)dO5g>s~WNu`}a(n6sj9$f`ZdAa#SSPSpEG*>~~nU>{E zy|5% zBhrUl&tEQsh^Gj%E0FDTA70$-auw1LG?#CJb;HnZvRgVo@`N}m*eshGsdRA$?9k7$cqx4RR6+hv_yP4FJ8^I#r7nXrN4I4h^X9}vYtxcnQU+ZZ?hrCQ}% zz@MJQaPIbsE*bFl5j{alPyNK*rI$PKXL*e?#~&}J!{-#s?_vJ^@6-icU}f7Fc!Yy; zMchW+l>F7~W0cOs-~?SRNKnlJP7=#^hbG4&$KHevc9Zz+XL$BbtK?n?FPEokUtP$a z?x_~CtAx@=58vD_XLJ{?C#im2L)o04%U<$yD4EZ%G|a;?bjzH7uW1dBn~myCDlJb_ ztqi&GZ0peT!#~nyow_NJZG{ONw(0xBYJhaJ4@FZv)yu!Ey{|A?sUCdi?5-*0QYzOw zo}a(bY4iAuaM_)A9c|;zWq>1i>55}AMTGBsNwlS&Hlm0mRCKQ z@Va=Ir`gA%HoW}yWdSaa`#DfnMcfWnby3wEp?I~L$)LBp_%I@qS#@#W`|%<(XR_aY@$U@ODK=cP zRD}f*4Kb8epscd8v2dPY^`DzoEywU^ z%U8|GeHIoLCgA>?oBcC)^vUP%%XO{`ufV{5;V03|qf|VUIWhc*NXlCzc@71oCYh8i{A_jU5 zBPjw0etegWXko$=lcbxs&LP2u80kDUXJ*ve!ItlRNXfooiQUhpNF%{QmZq%R(^LSwubAlc`h88l4anS`w*K8 z6%|+K6->GzY!mYtcWv&kWlU~#$Upg>OL%IIguFJ2>@0K#yXMR~ZZcgPQ=g$#>^&Ir zvlCr%E@&{9t4?mZ# z(Oo#hYqoyMS6A5`IUnr{C~Tb~}=wAdCVzd&2tdoO0~T!t>OL z+V8)XioI;j#XH_^e~ix;y~~@qZL};%buUExN$9`p;AXekOUqdJnrl_qsP$rm0M7(U zv0%{hyd1t`rMBY6YNg<3gbIbq>d132V?jhpxymM&E798k;1T1;6XybF1zuh!cO9Ho zZL7CmKBvC7wam5JY$E$PF6*m&JngRCuJF!E%A)Mjy*Tde`mOXpg zI7U)Za&$xjBWDjr@LAZvAYZ07D;pt9r0}%Ts#N6k=?!$!gi)*nFF(JYh6Yts5P8&J zCCyS5c{#Zd5t29)dCJ9cG%A_Wy}IqLZ_p-{YV_%oLB#F+d5+(6q{|y)&>};It*z{> zv-0weUYr@zdHp}%RTcT|=FZ6y#}0-<#%-H6ZCwHbCJ$*-J!2(QsnJ__>{v79$Wgs| zK2H+KL`4ThMT0?B&c5x`wG;GPU)QQtXXE3mS+PWo3|Tf8hg~Od*Gik@yGDjAnva+_ zlqy{~WMwkxP@_F;>R4Z2?|FL^c>hP%?B_2j31h$r0qEAR-@Np@o90?PU1~HLCFJpP zbu~RWOgB)kUPcTCK=OKA+1l73go|RE#F*8p58fU2_ZJ@LdDj&d_FK0w({FBURIQ#C z&0~*f)~}8lH}(e#H;;G!w?^Gw_nQCap;45!KeCC_Gql9tlGA&4WVkotKQ;TC5gWC_ zb;{-z!3PV5!yo(8`NHd~KU+ro=*E zwYhWO!&Yypqg$K5o(4xDaOAdfW%)Ym?@S|rK=e9gPZ!y97w>=+KI7s0n3)89Bw0M3 zL7vv?-nn;u8v`qY?5hUx^K?4W)@6UC(4o8VysG)&&TW0c->2#jPwQH+&yy#0=8K5P z?zNoc%YbjV?`9;_`Izlm9x08|CI-43KT4#0z*NoAj2oGO%J*2%`F?J_*#hU~_pg)L zoP9gJ(y!N0#lC-npD!e<$NFE7P`Q!KJ?SiD+o>uIb^RA4gwu?pOLL{IoX4RyhtH)V zgMMRO4|`HrJT+Nx<}-Q%9kq293u*uSZPak5{xEczU`96bV@^y{=JOI+4M_?-KHo3> zGKW$1eC)1UMVX~S&22Ky-?XVifFcV`S1|9gdw1XLhsf_qz`qd^l_`XS1b>w7J#^u3 zG?kcj=^DN&LNxDt!F(RDSbfc^$L{nWHE*lSi>-KDn?~OIhe^Aedu{3PE_t}iMVVB_*ZXW5(H za2}I~-5)qG_0}Tz5q=zhWo~HwXTinLo*LcJmvQ$6sxDqO(o@9oxJ57FTvc;q`q-it z3!+d>Yq4n*o0HlCZ0p()hh%dfdv3_!y9VKb#5sRY$;HWJ+PBrD`rh-O4r7CIJ*@`J zU{4KAR42$`ImAXtXDaC~a&0TwzYG`@JmY`e$4(27hSB5L3>Y3UPWcS;?V1)S)Q^3p4XwjK>L?QQDAr4qTu`ah)w;$Z0~KF-tb^N;bgZ@WlJep z{gNETJFlI4xsFGx+ti_8g)Dk7XAOt5OJ@E`xySU!ADPOb>$ed$ugw-iY`O4ToRx|= zV#QrkcFydoVNTKd7v=2rkx+$P{*{|jhtGezZA5%0vZc4s9V^vS)8Y)OaVL2h{F{4y ze6|)i?Op26U7%`qzh51t{Rm}@SKD1rn`p&eV1h3XE3)Z~cvwpJ^o0!3>w({uh4B!oXh_?)}=fsxp&8IbMZz+*6uQf9HVL7l(e#TwX>C7=zFil zY+hi$*3nri$PZsES9FODxzhx-cnDf%=j3GVX2Fd$TfiB+`Kvi*=?pp*cv53$xy_!< zDz=Dvdgc`m7th>>4IE(^H`{;Z+V$k|YWr>BTs2o+AlT zioB;6pXqPk9whj8Eg4phbQv{>vM;Z%kKcnKAc7D6T&dAMz2D8I%TWB(v`vZFJq`wn zD6{KKK@=C;UDh7Q<#TV$->gKooq4rMP$p;XWZ~pH{Fm77Y<`EcX49;ewzQs}KZc)h zGmTHk!p%HkTO}~&`Ex$9U)b-oyj~|8Y4cDbCbsbP63kJ|b`{Q39^A}0L)cI@GsCR4 zX6&z&-HNY{St#SYiemS^w65JcZD$i7f%TWh|LjmMd2`<(i?+|{$BRvTm^sy#&#vf6 zbDZG&I86val>=-0I_K)5F9+xCJ=pRe52yO4VMTYF_r*Yd@5k=%ne^TTHxpC5`u({qLM1*$JX^zRqd%fQEJCaq5F((bOJVtW2NTpYm zLXy+7!a|WgLy(9a3@|SAH?_<4Z zcZ=nI0dsdb(Y#*Xb#g0*-sEo~6@Gc0sm`dFy_b3T>P}zw*O`OAd;8a!`f0%Rv)Xp= zMpu`d2TNxw{JuVW-dgv6`(%_6n8EaMTG-2h_sBL~+WJIXE35uhjU)J%6S!H)E}y@T z@3)rqFT2(Sf;X)_y9#WT@N5o~QnIlX*oQa{wIyt(T-kdb%6!QE6XTIXl!stNgWe2- zcdx&KndrW1K8p39l-jD^N8*1plP{8YOq%`OIJRjCJYXzAxRKk7r-IMF=F6?%7^yj{ zJxs2jsZ@2>rC#;hd4G<%3I%ECGM4O^3GmoZD{Xj=8n+^KNn!DoAI6sHQ#|StG}}>T zz7>{M=mK^QCOK9oJuUXj>~LKczPjl9B(~l5>)zm+gVInb#!I+-^vMHkT8?K0^ zwk{^p(+0TEaadjgvBoWd5o?wMx__r-H(Tv^T{twnxK=FLL(8d9IVxT>`C5?N;qdV;D8KL9 zaSCy%Qo2y`eQ8u=e}ZCk;NN=T`Wm5jfr@wATwfPy$eVp9&x#_g?O!jOqdwP}?VdXp z&P$FXmWot=yjktTi^$7)aaDY}I6)S?t=Qk{3p~z?R=6*YwlK!6X4u1d%yQkV<%NFp zK>wy)D*?*4FK?KwaQ%C^q>ya3ni15Xm!Axtt&U#B?S{=%)qpQ9n5Okik&J^{H*iiH zwYabBCTx=YjDE>&^JC}WE8VTLeSxy*YrJ@UCnMU{=xWo89nRs?K>E7jw%VhT?>tE- z5=5MpxD)J=+k1&fu|Iz}>CB87{f+~d-U|k8fSdoUqwDS0XH1*se-+d8BbnvsM~$t3 zy5Riep}@|m@6y~(Y3e){Jxjg)#bvD zfQ}mT5`Mj1B|#1h&tIz3{`D;QaV5R90{r&RGdfsa-d<;}PU`jCPMKWTh^MA}Gyjkr zR;<}=d`8-la~a~Vd6wAF2%ULa*-^|L+ZFGfXYycJMT@-%Ds1SXcJnwnmjSsGST5KD zq}3YeAr}xt=Onc6@A+R#m(GI(>NHH9vy=P#`<*7{EtAYBa-D42u@cP>s*iRAVMKjD zmn*6Xyjm2kY^B&MIJBRql7}q}VjOx}OEu|9IAErBUltZ;#e#2%eBY)XeHmVw&C>Qg zWlQ!gv#;UpIoxipU)?{EyiQZm6l}P@96B)5J>te;mc;)GDHktQvu%H}X~drKUBSCR9jKE zH4Fs`#Y>Um?(Po7TijiWySo=CP~4$FaVG?d1lLmBiv)Lf3ohSzo_p{3#+#o>#!1G` z&N_3?IoDi1_U9c0R*x?SY)6jlx?&xF&xR_WWdY-PivBnX;`2)WT-2jj?l=C~F+-5H zXy+EzSY-HIuYZu6NaQSK!oSyT4VCBhYyYPzuKF=O20C(8t}Ssv-kj^;-Hc#YIvGue zF1MrHmze24xTAPdp5*p5&JC4llobw&Ik>&t?w)MU4#^1?2q%w1$Y1^jh0Fw*`m?!*#Jh*Q&Acb5uBxv-v!Ez+MbFeMJN%UXF4lhxh@_WqK zSsF>Tbhx&o!)@MWmjWfQ`UE6nf%HaYfAD(ddf!9X_5yU{6!o<9yLY1duw-0#MPKTIBy>r`orHN)wvCD?StOGE76yZ01Rl?B{@wFWfWct5$IH9+ zA8MX!nvRZ+rpEmahPxUAPe7d)ET}&d*)y{V;OueY3s z^6`DxKdHEIcBdIP+!3JhcwbX1dYN!mVX5V=WN^;t?0o5_;(V5^^B~?aaDS=?HF-U? zK0TFZ<<8YTucI}--&m38a0D7vMi0=5R2=w@uEbp2J0Lp&JMpVG@j)&~tbQ>jOFm|2 zNo%LRl?w!&4X>1EJLF3#3+15kX)K(|-TBjg9=P?za8lZHGUw&~x~g9>Zgy#mi@d_Z zOY`7(ZpQo+m|w?fgL)!-kQ-Nyau~s~A9zFFZL_-vp@3fwcG{XRT*U2)%3A>?d=t?9 zwLhVSdriI^Y;B)1R1EvwaqbWstm@HXcn{Tpc#e;(#*W$5QvoB-QaEcvRp*Sd$7PKS z;I^<7GZQ@a*`B1X$b(={3zQwpJZU87x%~F8gFnYCCP5Hi{G%nK`j^@o2c5vzlH1Ok zzZzvKd7X8RU1C*pOXL2cxjkyuTTzK88OvuFFlJux%IUi5U*peu_K$Z%)!*wz7aSTd zZ=|;^TW&X4i>Qm@Hvs9fVn9fo93vx{lVN-L6GD#rPfTu6 zK~ANw%JR6DNF(-NjQ1evt498;dd`kaBNOQsUs4%4e@Kv&4hdd+jKjzM;#P%Y%K!61 zNtOdCffdn4_tMXuttY%y_VgxQuKd#nehf5ucCq9reqX6bYXV0`JR<5pmj9a@k41bu z%g4x%Z~4y>pIll1Z)LR`EA)L=gu~hSU#j;VCemaM1uOf^d3S>~_!Ui=4DHWc{WU6w z1F26YUl2rT&XBr%usd^fUV=A7o0r5a6&#aL!~gE6YLh80-FmTKA%V%5a_LnFmN4E* z#Th#$T3PDG6#g7cP7p7BKHb4RsUE<+hT}MyfYlgd-R3f-KN{U-#1g5Q{4AHgAK@#q zHVrD(*hA-u#1JqX!gt`Zrc>A6FqrDwp7y_4HRm%F8SJ6io=TMu%#ultQFj;^M-&@0 zjRCcgj7i$)?bTT<(|pMq=vVxlXgb8{&hHH&3tpcv5#DH8YT|s~on+5u%7cA|)F;HD z?&f$-eBlmuf*x(SRsXv%3oCqV%N#=85ZoI-Wqce-5X12|@BL+nP4Ldf^3wVk_ ztI-LAzTf-SSz27emk5 zQ3u)4KnQ2-NAjI0o=dv;GDmgby>&_hPi9aDD=vS(Ta7aj?KsUMn?~?g>tvrT3dMpl+_xv6^<9`k%43u3IQ%IE&&06 zV=)yRy?R>Uw&p;eKJpY9xV?peQsvuvPj8w{Kgz0UhA=v4FVz;w85k^~gmPq$iRG!3 zXk%ev0lpteN^wq3S3p~MJHj?PaTTgsRt{!;-Q6t(sGkGi582~T8=DZf<~l$s{^;4E zjP>J_SOz@`Ro+C7SRh^Y7(lPF7OQ%zvz0X4U3MNZU`AN0@Uh#SR$j9i}S(Mt^W32q(HPshyfifg+~lW-CDj zjEgF4;w-`pbQnH|GCcxPOLk`Mq#p{;YRfI>nzz`iM|HZ94iEZb`}z6@H{*+0_h2kk z{c8osQ!ABgqK>U#WxKqSkARnhr%x3A3?KA1Vr z=WLld?Me1a-@BnMsaDL-eDwF_>LD4G=Xh-nv5oiv+g8;JaS!Yq(VS8dl?k6jXOjqF zg7Cg!`W!4#Rq`1ge~Q0!;SHLb`;AKBU*&2yvM^iA?8pnXu}-Jh9%^VxozQvC*pP`L zu^#G6RBy`I%<{G5YiWmtyeNbQMg9Uub}Z3J)8x^8c^WH3M}|+q0%;=s(yrJx!W(QX zUz?<~pxwYNW1f-58wX{QsInmW!6n!XscGtK_e%b1k%dr8i+s&yFN{Q^~q3Xcj+5%rab24I8Z_JErz zGmci9dtUX2C9D@sV@4w9au>yLQg1+m(6btEGJP=lN&k~D(%XbLLl!~p%h3-Awk-tS z^PUTpZ^IS9b^6T16We%YKWQ{+v*HMS>1?XAx6hp!;+rfWf)(Uf&pSGU^u@p~Z~Et7 zKz^0c3VHq+dHPIO|M3&{e$T7r-B2FVRPNMQ+Py={pCg$wiSnQ@>g;K7bICW#!f!JVu9Ac#2?sKii)En0A4Fy9~3>Chu)zK=_%I*gV8+KLo}as6mVb5>p(ZC^|NGy&I{Y3;lmNA$UhzVc9N^5qH3HJv7DM5Z77~MLee{kQ6(c`mG@d^vV(BDR*d%g;!@`AC@54B zy!kBig!DRvLDSXu_Vn>`g1pJkgr=!~co-;+fPUNaDUs%j8n#3xOUhWeCR0|aUOFi` zdDCJQ$v_kpHu~E^)E0BIP}7gOdk_I-?gss$PA`~#PGz2WFeM2lw8D=mQ@ujdt4%ab zer$ZKRGl$gvS9iM8_|}B2 zztH(2mTt+3?k+%*=GLOgRs*il8s70>a)i9d&JlFb_PzTwOizOoC4K2G94S(-X*R|d z9OYb)aDq0m%Am4JqSZkD@~y>d=U`wUaHwCLH9g&u=|d^^(sre&=)UYmrT=8+1-B+v zp=V{irkll?M4^UoC8fwkt2RP;bH8>B^#XJ_Ls#c0KWoPL@xU$BP|q+IBymi*p@f#v)cwVp5?h0RtPi)POfr-fT}+5FG)>q+8W0G#4#mAWzL%gSZZ5w z5qjyT$vI3}RbQVFXoLt_ME3hx>wM?j5QGtDD9>^lm+%iWRIYC4R7Ce8-#S!}s?sBr z@TXgmXvGbG+6!@V9>^mhb3WjA_C4&9j9uOR{-JB!^c<9Q`7TPDDyf}T`ICj{V%7G% zq0u{hPEa-cx#eZJdU_vh6-+|QpvcIKW@}TOjEv4i_Xf}HG@!9Ut^Y4-4BuB(>3?9J zC87im8-7vUIn>k~J9yS1&DlD<w_@drDd@%9&x2qw0@PU^wiDo_B8#g%DE)I;9(lsL*C3Ol&ZfL8%ia$nmb`y zZ6RaKsL>xQ;ut8UZ2v8oZ%!=|suiRn{P6m+i*Aom#V1Q)TM9?@jf^z(0bR>jr+AiE zs-ijx28QsMaPpw1s+LuSM!du0^-OZU3&7n3Nshrc%!^KM^iTS*S;vF zWAc28u?%PPaQzPQufsZc|M2*kba6mLW9ps&ELWM1tUC^^-UtkLjNQ8MT1Y#Vifj}n z=iP3P>c!OY7uKjd`-~lS8=Px#lpWSv=-`4+;w!<0AG1 zmkC_3X|e=?lBt0rag^Ap{231J^WP7$DYl-L;Z`5Nl>(yZc55odIZ`^{W#0JonJ3|# zq(+rN8fmzjE3$Wd4{hr6-_4yS$E9?IKkde67d|d!DXb~j^wUJ>6>(M2QwL|~CcBI8 zlHWXAle<&!k{N;a+2jhPCPxbk?mY0UU5iKo{qc}4SH_5~-pyus3|Sz3bRjo4x9NS4%;3!lOyoXoU7m=8a}RX&|}6|Q(Y~|R6BAFszJWt>&P~LSRJN+8M})&AvwLc(s>!1HZcu*od~|* z<8P+*hb`S|U#~rGKcNMm*OLdmJg}~TZg>4*28}w-d(QH!&hz&r7$K|fj|Bu@9?Uz! zCXk$s6KZfRcu6u4zQrN!+^#$i^`-q$Abp3OXo~VFe0oRBS23N3$iN%YnT*1YoT576 z80;`$ELJdSu!K2mq7OTQY+Wmego{&ag*vQTcN%#C1? z&jZ0CkS`O>a*=1WdBdkdGup06QNHT?pTNkQvSD|mGGBHfQuzoXfAaV8*!W6BaS_+l z;|(k&SI%xQBDUpm@LvVx7J0CvCcinTc4F)J%mlWbw^5BZ?~^IXpcHGdVvzp08Pe8IcU~>Kcd9Te z+TBT_@ZE)-_C?5jyxciH-;@IEe+sna5v7l$ih5jz%+?hDHDhH{x=Bo%CUu{BCI|B3 zb-8%WO0us&ZSh}T-cAbdo)%i+p82fpxQbzG%vJ#)=L|=S=IjL&^RTN zSsbHU^%OUaf~4O2fC9Ze(KT(4%Fj$HWtyyk54!#K_S<%R(gxUs5!xWi`QJPQ;*;n* z+Lms)BLEb9Kguwyk6z*{>!7BLh07xkqG!BH5L=P)mSL-9sv_mGU{%0a{k(gXnY0wn zL`Wu_iv2w%>LBo>bG>Rm+j7IJvRWX!YGn=wJR2E2I$$u?BZMT8Xqc;kAXE-NsGDdq zlEo6?=FEhPiTb(TUTgQ+$WhjiTl>@!-gPL6qdDIuKfxXcTdj90lyZ0ng6gPRN?Vm` z7x%84P8yMua|1gS5-kZeVsG~RVrDRrJ)tefVr3qM3i~8Lbp9lVaDCQf@Paz@S9rdQ z5oRtAAdPL-6MD^?KP%TBc|^DzskR zA4;wc>1Gh|27jH(ru?DgwjvT{+!=VlM06W=*m7AGU&`m-wtGoAQ?7@T6E~<_z8L>c zRBvvo(1G>I#npPuaOY^$)u30}-O8$$#Xiy`V#f2wH%%|gX|+1f*S-*Hg$hjffi3XF z%HYYCFJ;qACDEleD9H#EAETsE!0DzyvKOQw`|Q#X;VC|%tK-r}o6)6IK##oINP9V2Nbr%83K_D;BB&TpLdg>3 zu%$7o)haaOWTlGczJAoM&{Q9yf!gr+XMafsz8;T~{R~mMCC1w@ZIFzrX4sZ|+VIu| z2TKuc15+9*oHS?_z{H3Wc4^U-&{-k~L-dZDVD^~Z43Px3DlP`%8#lpgWu(&#WrAxq zl^-nN@$(>Wd-|;u09@DxjhYtc za2E83+EKd*jMP#*q8e@u%+`j#HDb$TniZr$x8^kwHR?*o@FKrKN3)yA-`(ts=^J_E z(?Vlfr8*bcwN%ttRMZ%@s`u9#%U~aK;@EG{OVa`NGjJe{)+2pJ^DARTIg2;plRhNI9fbM_5-PkG0;A)+`UM|hbs9$J z0}c3Mx1)?pEB6RXH#48t4I@~LS||B-4~~wO9^7JG^dx7mXrvYlm+*+|OUI+!r_vf# zHp9<|?vkjKron|xxYhF!e*FCWv8Ws_fhBt{ZBIe3a>6|XdkSOT%^x{Fd|@5d_P!O0z?E(fnL z9ugryf2(@V4|Cqw5h`!pc8w;8nUie_{nJB8(cVv!F(U=iU(Hr<{20Eo&bOrM8DF@x zj+RaGXvyoHe2*(<3YgHd5jBD9T~sQK1gT}(5oQ`s_qnNJrdlVxaT0Xi))GDa1-Wmb zTa%IdomMCkA__H3sZs- zE%Bt;0Z$4)XYD$!RJk&c;FT}>RVKWxo9f9qkzRV7HI{}%^xC>uyIM}SaHC^rOIyCQ zC}k^_Hhe+)$Hb)XS>YjFfc;sUxlMJ>W~M}2B4{hC&h}AOVBL;;Cs4nYNlIEZEm(^8 z_hYRDT{98dJ+oS`Pfj6NpnH$G&)02_a`!Kmbpn~FUuMg{BJ7Gy)7MfH%w4Jzb4fyX zrgtzaro}utj0pJ>w>`GmaPpyUrn7(g6<^=E7T$B)lO@nEGw7bFk52&p-~o5Rf6!>B z?L)!R$_BXop=;zC&f9ITa{gykP%bn@oL}WtL5CuvC(*x zk&Nz<-AfsH;LS?x8GG^rC+&aE*yDh|P&qPXt2r+a0UuGK3<|BBRb@yWvocg-D^brJ zvS5r(@BOV_0zg)`mgNJFtTC&s(kg9W@IsVNVM!l!4VqABBbLX$eMK^`P&KD11Zi)l z39~RVGIDGAMf0V8VR@Yh4i2CS@+Rb_(MNt)XfmY?;l?20O4&va@3{4-a^#HvDAZ+2 z!$q|OI9%O1GfrXBc-f*JSCaq4QMMYgNtU4@Hll`yE^Pr{gdsZ9W6`QUrVv1}w`XfL zfI-m?10@uuuT(~jNiOEVmx+z|9#JC1w4AB+p9aWQz)2^vFM2$=^+VOVl@fJJs3HL{z3d zz}ibq&Uw8X`Q?59c(*vyef{enlB)9gzwm*agGwQb$K&saE)P;DSWl7r0<}FhFKddv zbhH31bU{m(_l(CYlau9qP}vX^)ZMOB&8MVo;!ieeDpe`N?BlBE!=v*Yvj4N1K^wN(lWDuSjT*tU%TbI_wdFr z%L^=KUm4pUaD4T3xx=QZ_pQS&^H~q;_(&EzW(|oDwcg8H4Vp*Nsv@;BJNm1)U`IpR zmado@+}a>2gf;qWsa%U6Jf~b%vgrVbvx|AfS_f0G#q-iCRf9iRsjA!zKYt#k{0x^| zDR#;>3;S#}p8H#0e(HQ^UDI@6Ja-J1@z#rJ9Rb@0@wb!|v(QFuSI2aFNBw<}==9l| zvd&a_e}0G~$45U?MyK7bE77Ivz7DC#Hd39h!ufPnd;!gS^^Y1qOS(V0WHvbqTE%8I zYX9Xo5G74tqGynF1tGjBC;u-^(U;@Qm(1k(K13n$y-Cki&50Ap&tNtO1S=HagiJLI z*c=eBV=w-W$ak=%RTJGp&qU0lhKYjOs{R$ z)w=bdt>1DWin;q?QPvVYhZFdljp@*g3bBqlnm>sP9yQ(Lu(>LCn)X67D1dvJ2@O&_ znvJJqQf%`F&ro3{O+o)Em0jnc$JbFU%E4|5t>yLK2UXiEhV&Hs=}L?Ifp&q)oGVmV z9dV$Hc}!`afR$8#Y7;S$(TJ$9Z}S@^v*7&BV=gnu2e0qc-fEIfTg9% zqh2aS@`y(I0G}zt%nM0qieaz`tg7AY=(X6bN%xCFk~axews{slnEuQ2hcj>W*YDfU zUk>x`N`(B#!Kd!SJB1@sKRY};KBSJAqC_6mb4obNm#BNkVOX(^9;kkZF!j8(iuy~H zh?;aX^mCF^TE-R?5f>Jhyi&Rz`_G*z84XIGxY?Mn9~~WXb#YUg@3w>cxQ~Md%M~Oy&(7MF`xyl`?{@OUPaBAOfj7ad+(_fGrhleu${C zm*ZPm$vs=ndvN5B3(F~n(h|q?y(ib^9s_lO4^|4)tmA0Bgj+6M46j-9xM>xirHBv0ME_>(YiJ~nTi*>v84xg;0SvN`pCYAP{X-py>IT`C+v2g3#u>42Oj!0(~jiiScq@15dqQr^wbGpMWDpMr_7r5aVTO+ zXw?m0wd;f9Wz6B=Ttp_^NkY5h-s)6qevIJ!h(4sL4%Y7ZEkc5651?;Za~>jiY+&q7 zL=v5M%7O!($M@+Sz+#R7k1d<1%1{hFa&P&iU()&CeR}`>Ka|?)zjqbk$5Y#=VzYlFo z4|^JG4KVj9n~2IDdH?0D1M=@1@TrUbtf`mp1-|ZE=OoHRT`(-#p6L@KY zgog7wTJG-&#L3y5;h99Ie&3DZ;i*OORWPW_osf3Z1zf0*LN5Yd!mkXsqtVBick|aF z>VJC9gdR^89~R?{9a^7Kz9B&FTUk1=pyHi69alkS=VaU)_4b~7aYm^{Q1w_E`;)4P zOC*ukH4&Z3IkF2yL(f0#%vnlnK31o4prY5UvP#TvUrDfv>h4Zm_sD#K%;Sfey`(Jg zdD62^cD>5-L9Mbec8v}+53^D3?LYBBObnfm<(yaQ%Q&|rvvN_(?WPirKc}ZR^p^^L zTl+`z8j=YAMn!qJj+^{xIj`wbJoU4y$O|a4HBVVrCSU${v6I>U$uvuvdB&yb$=E~M zTaxNOap^%EFf4vuD5i+b%Ic7t3aBQy7QP?Zl(jdCryc6Kp`f<@wqdbA?5sagZ}DnY zsc7$sGFi+yoHZ&g^3~V1>o6J8;pvVj7XX*+20u87vtGL^t`^nFq#F2V8X{Yn;>&s0CWqg()_fOKR zK&)}R+(zmL71$A)HnF`K&WeB*kKsYtCH_g)3NgWgK7G9ydFSIj*k1)RRyDn<4Hq&Q z6)hB7${oMToJu|))RTOsL-7@IT%5^#yufh-mUJ#B zJ`ZCrX*VzNNx%5z-OyvojQEAZ%G%2i%we|On#0R`69v2374|8d##IzI!vv5qNfOe| zBP8E3@X%U5osK6OXEC1VJJoBuWT$(&g-tH5 zihodHQ=%o#yA+diE?_@ye(pYI`K$&DL)HdsA6>1vUE~l+wW3H&9?ELjFHjIYVS9U?hEfqTr{72~ zb?;Hxn2?5qqfj_tGkr;@C)E)npn&}sRACez@=*ghnsD(aZ)&|Kd1&yQ<+$uDyH9Qd|)DUyB zvShJ!fdP6P4qTbXe8e;vTl%aK1yRz5VC^spAK0)K9HMVL5T_9(t&ZEL%VWip=@SoR zFv=DU<@5h$Ax1$&lHkZz1Q-N>YJi|hBt3>; zxw8?Vkw07rC{1s;d@fLhqnNbesRYzD{3G&@KQl7{4x*z&v_cbA<~>TNwvG-$2prIH zguk{tj|it$Hf_}IQeenNpZo<^0%$&0;QN%eNVkjd*`aAN0{TbToQp)8aL9f$YHU5<(405A`_0J zLyEj@`uBjHHrBrpa&!G)xAH-#4vs4_lSql0s%Dcb&Io~m*B*UO=V^R?zn%HhGUV!b~P{a=RNj}jB&C!z8J zt3j7Db#sjAZMiYLyn#m~Zm#1iIOtV;oeO&JhpY((74ho%I?@S$g`DEs%Xk@dTRlPfKIyO*$iy516|*tqTK>>%Pf zUnAIn)Nm`|hw$}ZBf(}nsw_z3TM7>G`&YcL+~R)E{y%Q7uI`juVAHj>BbyHHm2+pO z3kys28&Tz_jh|9d(Vn94U%*)t@JUGJ%C_>oTPp|+v` z4J&}}Gz!V@?lHg0LKL{4PnacTJ%L~;kBhCtK`kY}`XT?L;f@u;2!Sc}^@r6i+XZ0- zX&Kv(-U^+d=uFNFw|xlIxHv|fdyWlE^-qnQ#QR? z*4A6iR)I&yCuAao$whTH@{)Dl&Z|;WSWQ8N^V+6$n)F{b;}r_l36fVieT$r1HMY_Y z+ymj&a9sz>_up~(0(}O=ha8rBIwu?H7F>Eyh$mpdKhx#8RvUbOmbfYMNQW%lx3`=J zSD)r#VNQ(&UGyPobCw&B`@wbCUxFal_-=Bd)}! z%jeP^zhud7D6loB2dr~XbDLHES7ToK+@i}dq^TITFa^K!4?Hs2TKL9mX)|LXzW#}{ zB^=+EbB5<%W0`f-p#x99l9~)x?x1q60P@Cd~+9z;)XX5 zS;zAhJ)Qe0amIPsN3DBuDVa?;AD?Y z32m12Z>wR+SiI*qF^ac}j(5EOToVJq*LM_Pbmm2*DKIv;i*R}nP>m89n2Ll5~%cVNm|xi&&x{(Ps5AnBj&*q7wI%! zrCUp@rd}Chu|l77{Y@#Y(8<|tL}Q7V+%+vy7VBu%5qb(U>y6Y(ChH2bKhxn@A}R8+ z@{Tiy&^vh!t_l8o#{93oEb~@^13ioaWn^P#XUB1=5dfzVrijKAC{$!I7wz3#u@oqL z*@2h4#bZ_)OsQ{+-ba{9f=Ee-4v)?NSD63p>5c$qC`Cl+d)z4cO#(J!AY|k5mR#4! zNJg!}kS#-@#7AVDBX;6w_Wk$Ouj@X>Wzc$zxeb@Oiig8RJ8Nq>0TNqV+o>bie~1Y7 zeUH|jf}YjYRb8I+F)Kjeku7@Xfb{jFSF2&I4++%9CfcpV5G8u?1T%hXo5QeL6E)ln zq9501W?aLMjb5Bw0ldtV{_lWCYFZ%P+0pSmB4EyFii@qPV#!dTZ5xZoXV5A`4c~lu ze&j%p8Tf!-A6{r1I@w}a9Tyi@T3WiAYphZ~aNtqLvb4I|*xWqmN`l@CINy4{+L)Uc z)zqxnK@h`$G=EpaBL4k%p)LX>EWN9T-?~zYFG$b4|6yQ8ZCkHW6Z9GgcbY#Ayb3$= zpcH;H5-TGWY%Mf)K0D8^ZP5QxL%(_s^SocUNKZes*+w?5xt*CYvODs=;Rhk~bl+!9 zL#J~0W>G?QlWPz&+@8rF1{5;b?Y}R6%6{2x(aZd*&FmfR5>Q$=vUdfvsvTeLb)B1F zv9qveUg1G z+l8G41$MNw1TLT?-4DOdvvTl%ef8NwMp;D6+^+Tc#dLkzIKr3mHd7+ZbJCk~=C;Nx zJm4hJgl}4^2dtd6y9mm1C`Ft}MhKk0SmK3Fu+t$Lapl(hb(bNnB0a#_`S{jbTLp;2 zYX8GAdUW;p1DFIxZ2_5?t<1r5vk=*}ca5xV1`mmzSM7q>Utih?*VY*R4^p5o8MMBy z1k(7*BI`AH*+$-I7OEfBBib$cv$M|gB-xXZWs3z@VZT(Zv}wr<(+^<fs1Mya}5Q3;x2$EV0>7zmqdsj&R`L{*c4 zQ$lJ1UPvh3uAx3AE62D)i9m2oW2gmZWOs$YZTu^Sn)PU9W%qWGr=Hdq?aCw8%m8PJ z0fP2&)kMg!{vph4snxXIsZ)es>`1&EP2v-k($DjVPmX0;?l+{?%Z@5TrnwPH)gVrL zKJ%W1;t#Y86c%dMiy?lNN_G0XS-i992>H^cDCbR_VAeK zHr*BN{e2~M(Y-`9Lf9+8wP-nYvl2E_U^KmeENAVTLYLUK=!t;eE}rS|J~3Ad3QA`Pea^k4&G23*1z9f#ucW28+KCQ zzdVWE(H~WK{4(iKw>deU?;;wrs$akR9I3ql{a7Yti>U_Uk&*g*>LFBP*!BD9{mv}E zl6(mj8K&MihbbjhQ`6<* zE*q{)$!y0k(VPt8iGA2x*Ey(_e>BT6kY-KWS~WR>Zf(n24f4Uzn||;a(lY{?UOCg80^Q9f*^k&{<3cbn7K-S%<>q- z;d)if9nrR?Ds)6GCR&O&8EnRjwdTkY*r5~x zB$%O-Q|KQsBPlbh^ug-5B~vzyZwzo{XmQ@@Vj_oz=!1a+P2EBj0}3F1LG9qNf6fag z$(P_05JW^q)-S}WSG1p;JC2NuaAl%Lm^LkewJUlzT*NY_k6<7WhysowZTR%`6o@z7 za9lF0ogWwzCVS(?+s&yNW1n+28Y;S3{Kn;88qJlxN$R$GXE@Oc=yuA_E@ zCA&$B2tPkE+^68+*OHNpI9X~LHHiMome&Zy!fn^`wyf0!Y|Pq&d-gCtir+{Q zU0Peh#Q~E4Rf;nG>{_st4=+X{l49^XUs}kd?C*qLSh%mWCLD*z$&|&1qA4WGjg2>n zl_KNF36Gz9`N-#O6QtvS@91y1F>KLpp0LGI0Z;q0YzGr0Lfxtq-ZTGmSx3vL8-Cn+lnfruL(<$ti-)U9yR(aHM8mffl$Bg*`-)H)?=WMXsux4Wuml=s!l6Fb%R%% zqaHmkN5f1|%a%)LrMR(gBxk)rj#!@nxc~|=@g=81O7C_(_ua~^^jVXW_!QiHmU#O3 zO!%PBHm%p}Na(|38Atr4P8KaG@7ooB-mjo54}(FwF3eB~xfSEpU#m_p1&xEIAJ77e z-%}dIX7O$*mj;4(?;UbBnUH|*=bVrI`cvJsYnNMm(snlC1TJd3}m z$aG!S%|AwRSxqQ9P#X)Ru#O9SeE3wSg!QkkKD+a)ZF6L~^V=(x6&v4X@)gU$9Nnj! zL8{%O$2rH_+^hnssqkT&0dAM)DB`Qj#in#J@@(E{6_WiKPn^63#LG_C1V1HJFbo z@4rh0*%>xiM4aN6nuzP37OX~kdge! zRvHNoc3kh@-p&j3I0Lcg>U<5C^1lh(tLa>UeWrI<>*v#z5o%TAu=>XlzjX`AX39J- z`Bf!RF$TAB6l5iH9xmBKeb{ajS5`-^Z(;%^Of(DHpIUnD+wln6tkGG*6stDJabex% zfbOQ#`NU*dRo+&e7$a=~#;XJAgq_}lIEt**BHLCqeTo_iv@TUKwZDw`xf?&wT z1)d&uzc-}x{ZyLsGT3{otfZ93dL)i`Y5!LdI=G(k!i74Vtg?EQB*}TdRg#i{-~?*# z(gq1R@9_XU@XNtOz%DoW{jLX9;@w?RB0f}E1q)$ioQc-3OQ6hszE5w7lfcV3XK z!LLy<6T{x|!`>yV=lscif}NEeFGdDPZyn_Z<}m>zGe18caD8p2463QY(NRSP+;M`- zpKrE&Ut&z2HlLPBz9ffBQs#`?%w(OOvPJf~UeDJ??eRh!BW@@~Lt~?r`+`xM7Y3FyVD4pzfHXARJsb3yO?~AEc$m&};voW3#y&<^ zaxLKJ)#!ss1>F!MHh}jx-RbKa7$AgnkB&C1IO$Yqk~KX(z>X+TnhmR+1Gb1W*o?A% z{`^QzT(1vSE*m*gF?y>o{kQA;3YJ zh%exE9NiAf{BsDO2X*tLYcr2j_Zo_LW60pgCBlL*jC|wo-=kCdyNx|yqLQtiyyKZU zPxk>q4d3yCiA)=9jote|)0wBu5Jr_$x_%zl5{XIf3$wqPr8zy-UvbZWR`e6%>2PoU z?u7KDl;AUgq|@nlzDxLWcXae#!P?o?@v_s=zq-RO+GM%ywkSB}mfZi|tyXvG{?lt_ z+5rN@E^g37PX)cyYeraDIEKASC-TCqps-KLZDx5VsV!s^b_8|T!s2YLDj|K_{`H_3>@SR7_m#zy+t@~|6mfbprC!f_*6ElmvH!B2OW1qkR-6^C+*WEbH_>hy9tWBh z{bBj{Zx~|^qXrfuY511-6aLlo++%g8?z@eIAlrYiqN6-#!;kVGF*|ZEo;K{$xwDgu z=Q&`KY8_ba>oxQTE$-a%4Em-(tt3MwKzpI|0UtK$Cw%Fg02FIpUuxrC0qlxtSgRBFb`$;Lk1%Idr>~L+ zYl|GK3i@Aln)bRQz3;Bn{5uXKWS(!HZ?79IRH}ewBQg>%S3Tm*?~UBpwU1smc8?UM zUh?SF`@3uNV)*N4QA=vxb3g4iGUlcK+qO4DmPiJ{k>bR{S0C7a|N6%S*c%&f3<>WF zNaU%HG^X(CG5iJFE5Aw6f!(@)tFiMdziPAkQqIz^p8aRVFLT}ORfcxO@1>cWR&02{ z=$-1V$E(nppvw{h@h8_2Nek@~F9?L>X%UJqR5O*+CTi+Zx)XLtQo3}Pq)|rQ8Z>bh z6K*0hT}{7irqw{$`GX_DuuV|#B{R?+4OPIysO0U(NF1@X>Bch=&^I&JlQrspzkY(p`|bR zKX`(BWw4e>&ON8Ypj$?p-w9vJ?sf#3Kku|y?0^|3i+#;fh`)HWjBXTx_0RbgtNlYB(4JhT2gG$(Q`QsBKJX zFg@~xkZNjDvp*QhN{gy8F@BWg4f1|y*{fqvT>rtgl7CGMnQLr>j*b{94ycP2YcjC7 zS-xk%N|7f-JXoeNsz4lVo6hvu9FG0RBj|HRp5t>T_5bYge+vsO?d>>mvOwT1FOd=r zPOUBrP^aX+0##n~vpVK~D1r-VDxxShc6K0AK2uSfC4CMG1)!{6#V;{Wp&a?gfKD48 z{Tk@r?%v!SbjHq8&f1y=2WT3B=!4rpOGhvu@7tsXY7bfL>+1{eLuy+Q~({~?kza)@3&$h^54z0o`37VJ*bv$85t4j8=v>Lx?q+YT}BTn)>SMD)^x+7WV`IZW z6qiVIVC!^xc!-0z@n5X?|8DMTmB8n3I}5#$n1OcH5xO2XdyKhznWDtcFud7-u$NcqBPV9^MC^{Z=fmhhdRu*2 zO+(=6^hliSmh{1UpIJPD8q2ztf{l%_<7;xpQDCUZL{@e%Fxk$~-^1M&HqQwMVNh|| z(NvtQ@}_)?M{6LF7ACXnAD{o++F$$ls9rgQU)nx_I%F4QetKdT5f$rF1y$r=l3%Z z&V8zx&MFdqEt7iQVGj;}eog?5pi49M_S6}kfVNHe-}m-a>8e2CW0I#T6s@k1k-12i zA0Nc&Tp~e#IqqikyZSz$hBM2ykI#x-w;a)>+z(k}R!*OHd{BVspUlYc{$^08hUFH7 z*bI^^zEC=<`zmP2~`-(upZUf4@B+!mBSRq{HEdEDl~Aw5jI%;K}+e7%xuW5sVv1*dK>ChByv0Z>t$M0Y zRPpE9U|K|?OX|wX zEcVE&g~__QRy4WD&~rG1dQtCiQlHsxe(tyccG!H>ufST7~JKpb9D#(1Y|p_5XF zMwzqk@4)T7vehNt<_`5UBO?Qzp!TChh`snzn@*RFfcI$={J`JQkqI!%0%=!sP&3dd z)_(Vrg15U`c^Z?!HtukZYZfdz3qMIasu-3_{E4X z@8htt_33JsnJ%0U?$v6p{*?ig?DIub&9EkTw`pM1>Z(N9K+2>^&``wl<>p&cW2q_Z zILO;Sc;dKS;RNkwOo=4$**Y^Im>=&}7!Gbt_g@AnTZ_>8U5nZyL=W%(>+Cv%nrhqh z0YL;5l&%;$1Q8NJdJPa>iZm%AAfhzsRX{+RhzOAqq$9oeCWIoO)JT;Uq>F;oP=a(4 zzVp7jJ3I63?Ecu9^XoY?&&*xUeO=FU?(zs`QO`~+3$4FC+1cBfFS<% z)3aFC>2yC#gjL&#-h%U%iBx^cgD5u)Oq*`N4C10U$#I*QU3hTfbNI{pH;ZMub@{N; z*2Aqfhcb*0C-_);$U1c--+Q@e7PzKjTQImw40b>N@hpwg$e#sb%X=8H67a!Linwsv zJ)P%ejh}Oz!p%<*-nkh<719B!bFYCdD4xq-9I&^Hi7vg2fv0f|K`u%YPK!US6{ z>h_9}VIlYS^nRLkn!370rO)sL6~|+_K#kz7h=MG*nX!-YHW_9+A`X92;WHc$)7Vl(MujP%QDp)*>&;A_B=5PN z?{Ksh1^Do7imu5o_cw@41$B-^?nbKg9#O;@)&^)E$e&^MhjC@%TLJB-N1yi=j(;f> zjo6nv5oe|6z4q)>^Xi*^ebKWlwwEE?#(+hQ)4!wNtWUF_6FX?(q(5D$1frg{&K%f2 zF2->C+UKc8G%}SX{yG3yN z#}9X||7OKBmS56UL5tq+s~@Z;SnaoXF7YloFvl6 z_5;yj+tVLj?J}NA*o|`slM2V)x#&dZ#6j=wOeuJC>jY%xWZUmA%agA)mgmHn`T4f~ z`4+0j)me1D8xj&p+(n&>!pBLrcCH`4Z<}1%zc?N`I%=@-*0-zFG;{yp&!0xQHb*b1 zs-V%Ad$_?1ZmSP^{dM8*Dl5Vj+$@Xzgv+<@h(E{BFW@LU@6xAk$@vjJ;tWKyccoy5 z=QHr&_^*e)hN6mOz|ouj<+KkecM_8vKc+1&7bGS!qq=iP?1d#H^78UXh|eo~ezRKy zf*58-0Zd@ND_7UJnv#;TYVVennbrWG-kyiR;ZaHB%`ZPcA(Gsso$IE~f-Iv8dYmP}AzKx?&+5%|1i)G)H%CWDXXnlB zZLpwdess)h11A#pz zD|o;%ey<)jxqqS5bIc>)$Li{I(nJX2x0jE7L8v zw#rPB+rFRSX`&Add`;|ahH?~SE4#@X0f6?0CQ{kxtS^QXpt_=^ol%Ff=4YMMf>r8N zmS$}lZ%eunSM3oiij4(N`wT<_HP5$H=BTP#abw;~gkBOf%fEl6d|Ju(!NP_!U50Pc zP&q|b)9jxWPugnu<)#oh_C`Jme1CoWu82PoMJp$J<)Th^V)%O4T(F&>kR%(-sJV=)ROqZzAwzD{>vYb- z(Xa+nlxKiXf9}0@I(->tq90u4Rqy@9cj7lV3%kI$>`QJ%caLe6Z*Ym-!va@`V3X&s zvB0z#y*F~{!h`%<@uZ(-8EXnf)6MzR^CPBmAA+nsEXp3c4w9H#qWp&errAlzI_3gX*KFWY^*t!KapD*o52~A;yVLcWF6P(K$_D zE9(weCM$I_uFu?^k$WgBuh?D?*qbvXncUsJY7o2a;uN?SmRb2lacr)?Lc)p4#cTq% z9(OzY!359d#;DQiKB_dg|y|LQ5o|t#}ft5NXQ&(5*Ym(`EvCA zm}39{@gNQg03nZ-sKKNYRzGq8!1DVk+KoTr2AYJap6%U_0BCsH=+JS{VoD&q4{f9j zAs0+YkwrRZX7d0Eu$GcnvK0EVi?Yb^Gio7nE_$0?l|ni)LdZ_|2YEgHYd~C>ECrUG zJlQLLzkv~djBCAJt3~nTeJie3H8qjncm*aEE%J+;kXT2%_ z6ueCr61#ruCytws8gc7qVN|2{+iyg=4vOsOQ>{;^8AC=R+n*FvJX1>nxTv0z>woy( zFX?Kctdhy44QMe1EZaG~;(JPtwJyh80e~eQaEukeZFQ0LD2H$@i6`j>d6DbC3_4k1>(YT z6?HuFm`m<&V~H$k>r~oq^S9&8;)O}`&dd_$aZs_R3Y*D+ng9*s9#l*{Cy$P&)q7hjs2#C{L3U1C`R_F01Z)Fu+H%V$B#Pzl z1ias>ZBxeSmGHztJ!y5s;`}-ev3s>5U;A0ZH$g!W>z*#|a;$`DYIyY& zTQ0&M4F!I%#}h(xJ1OFPw9qQukw z&P9d7KxMqAX3)=QgF&j?CcE|8CC!@dt4HXguznc_v{Lk}^jbDL--3atG1{bM7V>?L z`3Agk0nNxz-{sNSb@D#7Mwxpn3B3?0_vzYk1{O~#$uh5qE+%v-1kDyB-ykmXK}81&JSZApyYJ9wxTWE3C6mJF;3}Y9A z)O{||6Hv3a>lf0eRg%c_An6f6=w(#%ZVl}HZI^p4P6ZtLHYolbl$cB^@2z7fQgN9M znh){gf#%N+^zxrYK8nxPcK%`@xYA=pT_Q)G%GzR6ujOCIdRX|PZ&}kCBhjjU&rP%| zOB^>bq_aeAb1CeKe%$wq@bFA=g-Is&(FD{nV-pX4P~;a`h=zw;RSjl%FE939ScZt^ zil!j3f?JPE6)UCBBV+tBXest`|Mq)7FOTSXTXh&dDG@JT?^QR`UM_AI zDNugN`KsTj=c4FH!m_UBXv~V7r|z`et|oosuD*TlL{&GuNyJY5uUm2HuUWm5ysjRn zX1ia6O1>4&{2XD=*PIyF0dwH~SII&&0)|nal}BB zHbv+aSYHA?OErTW^X~`szc7}f|_#acgEZ9 zi$(X1An|o`wP{^h&kN#5qgn2L;{K$w+01&YJdZzfhw+_9@&Us!ZzxZspW077mCa+l z8y(24_|=K8GTc`4*^k<^!SAU;6+HKnc=eW~(Ro~8fKG?Vke$$gI077( z_FUTi@1U1{?~KoMImX~`IKWbbMo1@c2pE+skpZ9WP%rQBSt`-L1B0dM`y@(e_NAYk zwZ?*A1hvr#66d?Bm$TiNa?Hu_Td!JMERIvG?g%Md06(Wf0DGdL^6Gj($juss(8fRp z2ftTQ)cQ%AkLzj@=~S6QYTvfoh2v%(bPFf9gw|t1XIZ7RZI0~Jb7he^zUa_b@q*f2 z6@SGqQ5*kU2SONZg9sGfm*VsoCb$Q(02+-3J1cT5J-N3AL0*}XVkyGeqJNwl5u_oe zta4T`l+B<0sOej=VFb{Z_|oTfzS)~~K>x!d4F>jNRiOKtX@o{UBlaGg75)fS>_27Q)nx5B(-`OM5A@ zBXKpY2nO~ZP|E~>e3k5NWlTPRhSDVLOz2vG0P`BuS)8y#RhX-04!0f?kLaX~HyMDC z2RBFDKohTkn}^Hacki-X9H#^T^xx*7=GzxvO=6-WAfKZBT?|=2Au9M&r zP)9tV($NTgE>!?9_)MfLHaZWs0m)1WTNa4=o@nUKm5ZQb-{RF{lC=VLuSh`V%)IDB zQ6b6xSriuotu2CO3`aea1%Ne`OhJ40dnRguSst`N2p?!?9(nUuY-ET+tkxT$g6y5+ z+;DyZA?D>q9xQ+)1ZXg)a};aR(EMVg6k3;!J&P9tJ%z%NnTfpTA9nTbZ|?EGS%5@O XQB^)qb6*kU`T(G*ri-jpdGhLC05!wf diff --git a/docs/resources/model_b.png b/docs/resources/model_b.png deleted file mode 100644 index 21af06bd93bc4a63fed0b6a210249e5c6068fc72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 178518 zcmYIv1yCGMu=n8yBya=>?(XjH?(QT6hlAkmPJjTxEx3gv1a}J-+}&LdJ6t~g@71fX zYPWW_x3{NvtEaoC=Qq*ns&enq2+;rlz zy!`ui`9xMm4pfk0*LMIVue7$av_@hHkduBRs8A}ghB77`PNU`+kwALpt1}K!s=}8l zg7r3U=xEkD*=>M*Uibi^c?aL;NCo z*))x6L+K6NDr4-5W0VFVoVv-bm&`_fHyM_g0#}@2!UV&vV_TNzJ1cbCaRMJYz9pIy z;OztNnmIdYEkfNCBE-u3BFlSS*t0IYU^sy#X8YLuThwZ2BYP z*u$88VidgRrX{1M%Xd<;;6Pb7BUD7h&NpTR^v|wINpu~ ze<+0(_yzdc9a$VZ=w|{=p0bi|JyK1>0?!Df$?ke@^a3*81AdoS)-OIpI@;m_Opf8y zTc0CG&Dj7ke>&VT{J)a6_?c?{-D0+8s$5wyR)Wj3XJ9*kKICBpN}2hMYj&(ccRzVu zCG#h_aY&i+fvtQ>r2h_%Z8_h++sli*BSU#{C;f=^MBNd^_CrQ`xI~#;Iknzs5IYJf zLo!1AE-&*Av-hQFu{kw*=?-fmRUf{Xf>1nw6hP02MULwGg=qrnkKQkba6cM5nUS9J zI3Duiuh*pfGWd}?Fy2B=nzQMH#@I>blOa4eIY34n;Ygh500FxTv7rmA34pnQ053rw zh4|H+V)QNupX{03O;c>-}e`=NyS414;TOAl3 z(7O?~0NzVA{?xGf@PO-sLdA|+(rx$~%K@p@T#X+XABNXGbk507#v0PtmEw%x2RaK= z>rLCh(nk{uF2xE0VF?z*vSWShk(EsJqDIA{sQmt+2;NqF={so=S#ykhV(Fj}4zi34 zhr03)^nYZy;adf%`MUYMg%uho>N#biGdeT$c4#mx6bNm48hbCjufYKIh3oE?c~WrvR^P=gbkF}B*CBvadeqx=-n8N zHM~lq^tiRJwT*wJSL^3{Nq?2UDF0wOBp+S%SHrMayUn%+V?tU}FFt9izKM)t_yi2uD5R|(RDrfqSZlTBzH18z)A zj$)l+u43H3ueb~(`9zZl2Zz_351$^~AB1#RbkuY@j%a6jXPGLhXZOmy%PwXWXO?DV zXV%KOX8dQSXBx{r%h#vHZPRVT?04)=CiyEb+%O#Dhl$U!lLR97SLVWaM!BiD^=!_j zi6^q>rsgiE|4mg)2^PY%vmmGWITh_vk6|}O7&-$gazAUO_KU_WBG7u16UWNFTyvDT z@wh*`)3m@^LR+|e`h7Hf!hKGCZm&@vn4hViNgmoCw2%dnDUgwntB@PtUqmqX;`EwC zJV#W=eT)12jThgYK!j753xj=(56shHFUk9r&%kQR++`HU;vc69*Q%q>;yd12J|&Jy z4kFITQIZj)z1Go4mWhw5A0=avV~LbMOebgbI#|XVPO*ftptH0x=NS;__Zd0X9yLDL zI6CEkS8ejms9E)46tJpF6ud z=3hcX3quFPm$ z!~MzPQ8Cf0Q3LTmVQYXUJA+!xheA@3swvwkhbf`b8PW;`K83i2*acaIW&>UWKL+G2 zsy^%Wo5oB>VgGd>u-Go#&e|%)dthE^b{6gy&F9~Ipxl=6j%8(@)6ed>yZg{blT9P{ zVKSN{nk5=4ke`h_c0CnTT(4Bxsm0hot$o~!<1;~jrT>_6W^SiyC126<3Es-*y5zDunKv(9stnUz`d@XL z1{-#pOd9<(`rTmeR=of3L1&_(xY9@OxSG{aq@&+yG`K#j-qt>lzK)fd&99l&%0w+u zcQUh}1{QE^6pI@p)5pi#Hk~)$!FzfCV)+-U|oA5in&fogZ#1W;wun7Cecuin7Ws2rB{+Lvh(5tR- zpty9~cNlt0HZ7v-YP{>Uh}siffSsqhmzOP?;(l~^xHE0huH*yFjuvsPVrz3ccbHhb z+V>mJIlMg)^In5J)u4~z#hZRHIceE(E_dw;__>YpNG6C(f-DqC9NEGqWO7E3%jdTh z^O!QN*Iu35`qBAxYVav>^H^ivX-&R~qJ60K{y_JkYQNDHJicP!is#?BlRMQO^O$){ z6u|vlV{$|;B6=hucy8VuDrB)zLnmhWdL7VqKym-6FKkTuifUPIfYzUmP0aMp>q%iQ zIis{sjOo?*ab#!Fv(xK|lO0!x=f(K?X}UVgtgLh4b?VF#>b7wGtVd97Wt!He5$L+X za_sopn(m3s)eNnHGKr-J9NgnyU!0YGQ_BlLxDC5{JaL4xe@iCGE#x&-0019a03av~ z0C;#K1-&Ji?(6`-kvRY$m<|BoIcK+M2){i*b(S}91pr8U|2yH@($bdR-Uld1OK5v# zpR9U*Hy-jPzk?3@LE5Szc21LV5h%!9_)h#g6H(a|GKs>PODQ-MRYmPajw`GE3}-la zhB-KRcxbCfI4wrDmU=h}(FIsCE~#Mt-mmyx%Qk@1)|OU>G|%xbd(Rr_lYhHFB^&_4 zdvwHt)}wYkgOo+6Kg_a28AmdRS|RD}aHAT`Vg29sRJ&Y+`hPTP1wQ5Ge%IE(n}oId z`SgH=xCN;FFD)oll}yx-qA&rCScY5rGE?-$_zwIC~rw`Zg%|3jxf z6giJcCLEL?`d`WO+>N`L)%hqlH2r7W1^4@ zkm4~2+^0f!1!g6S@Y*%%vs_)Rhv-^nbpW!&!disAd~Ua)s3~8b{!{c%@O;WJCXEH* zp{Qph)xh$p9mVAARs|E7S%FCMCShIT*65?1#k$kJ4ODbyH^KN(rn>3by(85vt#w<+ z0|ou}eXY9QyXZCVbCH@K-}2u6#@gvK@i_H>>Z#sBNjPRB| zDU(3>!xFRW)%l*sqRcaQ&$fSOUU0mD9~G0R59e0@I{MEy5Bq-p@xG1GxT^5<0rTa> zudnk-2l6q_geAB2W&Hd>Y>lX*!_&!iU_jUGth6J|-od%A6jJ*S3^C>X4QR}ImrwoH zA(z^18Ivh8#6P2L%w)}#plM6zlf^*j`ikYAhgmX&gQZ6Z@^QpuTk~P8S}*I%CLtMg z*Vp9hwtX1TGy2#eX1>Yua)~R{SZJYrsX8s~Q!o1Y%R(K+%r4J`mYA7<_cu4Aq4+l6 z$z9t7Px!=2lb~wXG$vCE*1#Lug0eh9#}!w)MxvNJ$PIz%cDX$Zn^mi6)^nFa6@z}YB!|3VRD8^ZcJyZPE=DO zJF6yoo({Z?m#z6r;|`GcKs3Q4@dS;q~nPvk_J>7z%NiO!rZ z?bRz#1Nw}Rq&2>*!a(De#+8+!Q&BOpwF&;?;Yd}d5Tend<2qaD+qhC_5y;y3&_uD@ zIWHErylLmSy>yGLm^E8Mq8cC~GX3Y)FsbO)>vr^Y-W7VQqTJ$IxS#z#BVyx$?daAU ztzz3p6vCvc?w#i6(^$>`ZW1yLGIAq}3-`e!5PS0)GM+=Wo2w2tt0#~aL-uDZ$ z@xN^yytLY)eM$Km07Sb1Wau!`tUoX%&uCxnd|$I>znE2{JjtulGZ#l9PU@$nTAB5d z`Bji-gWL(E-Nh`y$TkL=G4vd9Cerr8*t(6>%MRAk>eCU$N= z?scDg(c=tDzEe4IxvyN0=N9NVC6rtHp1c_`g0yt;GN8t{bP5yIUQubCEV;|2UYV`f zu*>V6ZRmVSIN-(IxLM4WT3Zvz++2MdoO(_6OYLFC5PjovH8KHFeyW+-)2*dDYTm8k*&8dJ6K~m)VKPoa3q3 zC`C@xJ3(!qY-ra&@*;ECi*K{fLxYY*uqsR;FLjAsW%19D9M-m`P_4V&9C!qd@m+L) z{oldVR!?%?pS@HLy-zuhsMrw13>ZLn&T?21x`Z4oEta&tj*I{2KZ{!bT8&AXloFB9 zQBbiQ?m3jaSrRR++M9#RIg8!gfqr$R19}@KQrluDd(O&F#Vv&y&( zan`Dp3hFO}wepM|ET81Y*IGG7(ul1+5<1; zn^2SG6104S&c+4HoHNdMHT zzpNd#!C3SD3mWi!Fkt(eIwc^8TqehrmEHw_PFzHfgF}vQ9Y(otjAaZ$lqQb?(jz)k zAj*ZiAjZ6d!_sgL21e22%Sm?&SSV}pBvau6qO#Ed+5+tT1e7!}Q@?9Qf1njRqL2j3 z^V3LEq@t&z^#DE@NKOMO!a%Hu`^tbR)gdf^co0C^>ye2DFT_<9ND-9nDt>T65BRQ$ zC>6XWv#Y2MV*8&jX#N9e-Jzh{jK|z3?KO^OAFE=FR zHVPr{JE@>rX3vHbX{$UdeuMlqc4Dq`+OEwj!7np4vA?Nat`E6NR||!UVcKe~jkoV` zbyM~AmWE|xk9UnnU~Yc|YwPMP2G*K;4|&Y$I)7GGS62#}TtCQt?qSwW+Jtz#n`4@_ zT0N`k)A>zQV&E{_(jcQB^XKdv)6TeMaC&^>scl2%`?Nkvn$2s{gIZT>>yw%Dr}WGq zMQ*}T%Wxn7S`!B9ecx_5|7&CM3n!}MCL`N@OCzh*abLu8Mtvly`@XApBdJM+-H~$J ztlg)iW4Kmfi^yIq4o!^3i>*bss-??!H<`@yU_mUiyoMb|jN-(4Acf^DW>u0{vASYq zN6Dc?Ca`g-Dm|@yxI=c{kNac&M>iSV`jR`v7`yMn=Zm zrq_kIlU^7PVH}Kb-Nlec^z2cL?Czk7_didJLG$B-BE7n5ka{?pDnls%kspZ~Ko6vV z#{zyR3Ze(XqcPHhq!ohzxIaK{>4hOppMgj}oN?$8CEx+@NT!5(fT)}xX^k4zNE3#pqQxLbEf3uMp`KE}N{kzZ4BthY7IYxZDlfmk`&PCDd;q|u{kfC` z0wkj(>GzERpb&aQ6bV5J95{&}P4Ta`S{3kU063tNoJLf(EA@^h5XNSvTT@E|0HA1)t@V$jcQsVEyg=I@?%#?uo0;t&dNtPf5c;#hAXGqOD41%&Zvxd+oU~ z9UJl=zm-W{Bj<OdF?Eyyk$}Y$5Dujx^>-N6SaS=InR7NxCCCA1rV?Ij`Hi z$GBI;jBkC~V5K+>XIFNTIrz~fkRZ~MZf_4aS2VMGnR~yl*N>db6VGDL z%a)R$C&mMb7Oe&M$3qx;SB?b-h~z=WY{?Y031<8yRWl(W&kwKMlyxxbOWEYzpN{EvwS`yevBa3w#>yY=)~6 ze@h51uPr(gOnHPTxs3{W=}R`y1O=Tb`FG_({LnCts3^u=a(uL+Fs82Wu9S%I>IdJt zis12q@#HwtiXaI95MGi-JPJ^X?HZ-}5DD7*t;zwUr1<**gHx8l{k!Yfdu`C<7CfBE z#}CPqNlNts?%%m+=|8P}5_HB5qvfVBL_?Ae0Rp~pQ3xKE1`-e{B4GpMzpFC}8c<22 zAfd!ZAx3Wj@QC3g081f&4{`t~bpb#}2iQ##0YC&{A>skZ&ozHbH;V_82XSFY(uaAN z#<{K4gBmPz?_<|}%$W3()+|GXE(bqaT$+dNz+~M9!i%p&j67$U?lB#n2QR9>EdK6n znX!}cGs=o?$(lFTU81+=DU%8IxNjN{>Qkn{awY1XtWzY>{9UO*UTHY_aQ;*7K;3aC zLD$aD(r;eB%J8(IO-HFJ)qVI+0jo^aRp7|}%A@E4bIqrLIbABTrbA+T;NU)nRqP@Eg}j?n;Z=h zfG7#l4w2?w!GTl%36Mmj$PQ|{u1W_<1|MNa^q2jt!%3Mz|^V8uPUq@pwaRQw@ZPCN*Y01-I3&<{qCv zQ_^&-^IhuK`cM*Mbb7Q6obuO*cyJ!(KF%FU_LuREikR{L?)c4rsa>XR|4p~SE|V9g zUEJ1lGFYHu*9l#`hm9vS`?*?vJ-v5SgUq(YrKyG$9=Ws?(}hx0jCAtCqUGqUU!d8N zspF-oK6w|90Xt^GM?A*KDpj1M6N>nM9BN2XU{P!!(|Mf7>qaw8R^g*ZbC=IebTC+- zy8e}AT*@rWRP+a`rBq~WW{C&1{rn-n`z-uReT%6#ZxJ?`mh!EBQN4Bh4Kju`HcT60 zI99Q3H8f4UH063ZKqsdbUb4M7nV6{c`fTJ@x91w-K0LVCKmw&rvh-}VUi z0S8Ayh81X$%^3ev+D1NVLmPPT&G7><900!pK3F{@%o!xfe}E?@0tQ7t<=h7(x*oXApaMW)K*kA58wB-WAbr;-40z5li6A?8dspnY$i%P< zh>w@1X-E@f-D*OFr$Y<}WT8`pIrF>ZYo?GFh<77d8QoySN zmEMvaaYaPC&nL zZ`P<4Z8FTKT@olBB}05<({2s)?D`!IaS1HR?ni+ zR5CI7-k#;;=FYjZ;GhY_n^fs46(?B;1$44y4Kt#d7@E$x=P8Oip4^ zgcX!AF)>L=Nm(s_8`!RXI+wdQ4zRVgUGMPU-Zs+J(;Lqb8o-y5{L@TKshTHboq1}* z-SX$p5j!tCJ3dCZ37C(vhQw&T-lbWYK?j#7d346EUY~i7)41KgJ;RMQQCYSp@$3w| z%1+y;BHOd+xZIGCkboB}BkkO}N@V1pM7_!=E%_Z3<8&)4QF@Gy1p?+HvRUc_58V_@ zj);$iS=Z*gziTSuhHJ`b;K=B3tITto%xcTzqv>pz1CG=>;KRzsZua3?A04bJL(8pAG$bK>L;fn&~xzW|B+Lk5P zevy!EXk>C_NmhL21zp{Zw8}HBuGFbEN95m(+~>JMH4y`F9cyNJjal^gK=Zhia5??_ zsfU#<_s?>v?~gAWYTV6^P=wQWt?ZhZ0(SI{T`u)sgUxYD^;iS`*jHkch+IE-=G}K4 zD0->=o)Hx^ag%A}+2cIqrQ%K+UVMF6+<&=q>Ri$Xi(HLx<#srYeiSwEgVAU|+og^7 zopgMi%B<~p$ANvan3%5Qeg!i!1j&JSTUs8=ULf;mfjyJJ_SBa9My$VR1Mm{oKp4vzAns|UsmyR zSl`Z=b=$q`GpY)FOL}prx^x)bK67qh&^d0PIU_;hM2ozI`1N0RBY8 z4M_vL9=qwlk)wTqZvp_2;G&U;#htqe;Ns0~Vm9Ez#6v>RS@|jIuy=rCBxAU%SeXc6 z?3CheXUkN^&WJrKAjU=wjU|lCcn~oz68%ya3C6czNdWHi)SWwpIuPgW9{xKjNF3m1 zL=^=n1E`~*0>`{7EvY1Q$br8>5tqK&_2LIkEW>PZ9(Wr{$-S1Z9LLofW+&MVHB{#9g)@%b?1*lN1&5V*P6MscQdz zRUcFdZXY|%Th#DaC>M9-Hp4xp+8l~K3b#^<3$Jf02qXLD?P6`nMESAMu~ed_0AZL( zgJ)5`(V{1icS8AvWVOY~dU3uk^(c-^Td5OF5Fugz{x-g0%VrhQA!U}A;Iuv&!P8fq zry`c+F#E?pfx5Ne>Z*42w7X?(Zo7ALpsl&e{#3hZHT2cts4^cG9jujQ)I^GPYG?mSVdVosC3IHsFw&m)?@6gc2eI#z{Z z8#zd@%Lz*z^&Kv9m^r`>3^(&ppMz2V`2X$^Xe9C&+I6W!$1|%Aa5D1WpY7tC`K4AEB--yNZD(J-jvY(HM&n<$aVmAbta+1s5WX8}tql?_IbQ zqB>&CMDyy%p)wC32f~!a0Vf_Mi3kTb;sJmQ1|koNWBmY%BSw$|!pX%2Btn8R-y#dL zG(bIHBe-D$_;Wex8$Pcy20c(O+6Lr?lLc35)65TVV=Tcs@nsMW2?-EE4@d{`I#Wvn zC#~KA>BHdVWYif!Bx3@6ZsE=#UMcZNRJ5>kVgtHFB=}dF#JQox3nhe(|K?q|VXhcm z4Eb%-8T3JDaPT;9N92&u<(oGu+z&tUI8>=#CUd%;V!>|ZTo=B$a7^%L^H64TzpYwA zM_Y?t5C{U%m-JvpY|>W*2Yojn#~UUTlYQQ(jUbBZ8wzilXUJ4h7gBy;*Fgd ziC#|n8uFmQQ=Hq&B=i^tAYiO+G0A->psml43 zHVl(v1Ka;K7~fWsp1m9Xx{fj8mmpnOBQl<)W$o$y*uoU9SQT5kmv%cIul)~~_i)XB zvX&nViIr_J;v-%aT4z_ORcELVfLdOZm$DzmH$In#qJ#bGmSfKGSGk0-&dE#+3~V38 zYFbv?Y* zGyiIB+YJ$QFdK=F1GhKnU&XU*gtL@brbJvAbSAR3)djx3u(jKLP(dA$GpzdPnl}8E zaKgu~@A7X`rG7d2^zg0~{mf}Lvu5KQbt=couV42TMt>X!8jg(GitQA5!axC?GHnn| zi{j2e>emMuiV)SF>WY}Xu@pNj8p*OFn>IpwT~_tI+sE61%3V$0R?qg<*2l-gN#j-3 zqIgFFqE_-1mSN+Z`LJ#Dsjdp|wbkh?(@`fFt)a+pxMgF4SCY)1BOQx|MAg!6qy~l+ zg*;HULF)MUGA+;Y_le$F&6>8^IPJ1}-H~I3QqFvHbq>vfRV^0V2=Kxz&#U=j&WV9S zO1wzd~gE`mmgcgUJD?e?C7jaxdF;h>ex2oMo||0CW&qXPk{A}wWas{7uo zp4lv|ESb+ieub`Myh`b8BlgzAv+(G5T`&Lm{+vR6hJ&SI@sy9dCD_tqCpc>0G#7tiHeILb(?QEx5qH*D ztGm&39V9pE+(pgFtX$tAeHZfAxhqxS#5bB)H6X8v8y|JOl~?6KlR&*Q^-rjUHSxn~>Qo{4s>LX-(4t^8H0YbCT()O zw~xcLWaqGR*1ADU)>U!y(7SK9strPiA$-tWO)ey3_=adiY@T0A_8mi%e)^07Tbb+0-r7I-V8snO^&L z487_*>#Nq{t_yAqQsek=+gX3#h?D5o(##;Y&YzuELY=Png`GEx2+*h2{NCsNKrayi zoEtfvB<=Ze$Zfzz@L8m%_P83%=+T&hZl1oS&_tf{@N-Q6d9<0T>dWWx`IiK-!$OV9 zVp}Sa>FIS-JIjNKv5?mt8?VR9zVO`KgRT6+88T5%RF2E-6}k5 zA3559mlGwq<)eh7HPqT!2#;El)nDN>2UVg~%o*XbmmMK3n0xIKqu7PRR?jBc&9$n- zq5toO4;baq*M}*aJ6-n=-rM%fHyML2mP60T26vm1dQj5=Y`eSWRhp;L6uwRl65~ZGglGm``zRPB8H>)cs*Y7Qshv)>v(9?{CV3#O5#z#-)qis& zV`{P|{Nk-NA~IK(5OD8njHyQ&-Q$rr#C$tHH$~IQC~I8GC8A2tu_n~$m0!L<3AAbu z@ivF%^95`>ve23qSgVvW=F9Kb{C1v*?jfv ziVfS9^viuWle3{ZoNA+J zKbk6>)iwJYeb8fxnZwR$vU{ih)DvBI8yVX_U1Rrii))p1R1LY!#Mc+Mo%6aUs5SP_ zji8F$W%3ub&o?dw@BR-9;0K#|>KO2aM@hodIz2f53H?rb55+dQ z)dlE2-2p4k8^LU;KwjkA5;3rSuufar21*E zZ9EhqC8zfO<4S0 zI}>CXR+xX#083jz7*j82>cI|95EUF4G-6%1fVG*b9~VV>MN0ZJNm;jxP-7Z)f6CD8 zIe6jEi6-DxjdJj-(S96k8?CG2+Z@PVxKa_QAKx0q~t z=nvmpD-(@38E`Kq(lF~ODJMm6z7D8%c(_6Bc2f435}@l~J$ZLCPrr}ZzbqXn=xwqe z>N0!7Cq;cKa1}ta3CS&++p=2RCl8TJX_f}ra+4tvp^44zxUk)8TTLk2@R#K$OR>dCW0v~1@1z*7j#;=i`FM6`w z3?!Kb{DPttHZ}Z)L*xJaH%+juje#RyxzMnBk<{FsH&XhmEt739#m9Onj%P%BE zhp&^<;HewKT)y#kw>z=tZje)(&x!i&$b2X1aTfpmWzFlMnCMggCsR^_&)60_<5nZV zT~SP?qDB*Fy`y~%YEaIe{Fn?vkNQ%(Rz0Fnk7D|y89R&%&IqNWtiYRDN2$=J4V*Nl z_kFq%@Jw`Lj|{A1qp&k@LSF;1@P&`O^R{^4)&R;jQ*(z&dmQ=%J`m9(-axlio99=z z^Uwfeb&=sXeSWOh%Q+I$a&CY-xFE`}YJ+;Kf5|S-H-g5{An#b5xDii1@WuV;t4ug z=HWlfO&i*g%&!j*g0$=(Vt%nOr(IdP)rA#)q;g~Bk#ncRLHmKH^j4D_)*EAE9+ND$ zUB2R248CBxn_UneU%#?XV$yvXrE9;@>tb4t9!SwhISp8=xRJ;Ba; z!pV644SR^IF-lx3kg69}EDu=1JElxiwRsu!IBCi}@6TKR-$U;|RKFFo8^v10Qlkc1 z{8OToOzhF&B8-%(STIhOO6(0OaafztPHlS(OV-t3XJ437^yp!KpO{PNuV(5-b`bEj zeD_mdYc$J$?KMK*MP%DFRac88`@ff5=A1>gE)cVy&cA!;QEZ({$Ip2u_stCwaM|-C zaFFFhbIpx~l{Isg_~GaGa7{sylKv(-Q6p3Poy>qvu@xeQ3tC=s!f5{ZWVW35+&w9Z zyWMhRZG%kX2~PXjYIyU%H$_B!Dd%tnT%PDBS1Z)CaCw%_w@LGS*P{DA2TL~zEs<1* zIxhQhLo#P~;!SpSU0w8EhtjNtGkL{2+1o5f1bOREvB@M0K2+c^-t&5LX8bZ8X*6;> zw(ftwsxQP4bXDKKHCJWywOm2u!3A7DYu6f$^6uR)0}~Syow_LJq}5f?&cL5SSaC1` z(~F@5Xe&&Xyj=|3@`iJXe3};v?DLRfiODR~Ko+FY)`=U7V#%zDqdPm z)XanKxq~i5fZ5X%b7tPtM;mM0`i z^!#*oqip}rd@^4lO|&fR>z;luc6^=eedmL+hc2QOSGedX37Xv#_Cpfge!N45?v&c; zV!&Q8p2ho68L5Ah;n&A>4Ny|2U##}eSd-8k0neJfp8oFgl-}AVb3QTQJ={!712=hC ze@?Gl{A`SS9J27EW8 zvVc?#ts(p=X!)30JH6caeoYpS!DI6C5JbnO`{>vEpuB(Rpi_lsU{b?z;9l8-R^p#i_2kDjwH)6;$Ly7!f} z*mJm)QRnvK+21S#GY)uUX3^HWVAf1}Id zZkM{#Masy*fen@H>QP7G`g4Dm{P|2i)ew~L=6RJ)c)!NqFV*idBcncCwsw)L&f-{; zCCkwNi#dB5n91lV_VtR)r;QYf@QRR|0sF+IlWkZgKp}OCK^IABBK9&S)-M9*x04vM zI5$>Y`hLQI3j|}C)pHc{B^=;wczA=P$YeseKry4RREy74{U8hE=uF@j9sfD!&-$Y4yoOsGK~m%9R9HTW zH~)<^ln23J-Y}TOokz~t3V3AN%Nd_j{Sc^$c1rwf?)bqkpyubrc-^x?;A_&}QezIz zKIS{;;{{W_z%gDAQsqznV-r-fy3l~eWyjJvJ=}pjoZKpk(UtDoCX3jL4;L77p-V(M zEM+Mo?`!i1EZ`mss+hM6#ilz&Z={~>LyN7i1(WG0EwZSu<3R zY`k+SUTuy^LCk>Wo^V_n#_Evfigd*LY;9jI*GeGCPW*oFl)-v&{j_nKJEx`T3j6W$ zzRkx+1nOAEYW8|Z3+;Q{=zZnroK-5{`j?IfJov$6(+tA-60i9r(iPp2^7 za&hV38tQNh21bN{Fm-(AERB#bM&8{Vg1`KDU5L0{M!ZN<1{#x_#*F}hH_cw1ZS(U3 zGoExO#x$3LoNYDa&zy0@zXJi2c5ZaoW{+&0F5WB-&cNcd%;ztXAN;|QAAEx9+kq$3|+teI(WL>WNBROS@;yq?ph?K5B)z>f9ZMS&C zqb;9w(6HP-$``4S*8jLdk-u!J_dC0rGh5vE=4frALX?c2$-u&{6yd64``mGGl(iq1 z<6^bu8{I|)4d@yymf2zkPx9h_#KlPu4gsqDRbTGt`1{2o51)yKt;5^rGDGA&^jn!+ z!OjDcSl)c;=*oxGUKkAW$l@(9vfYYBuCq47i_t&`$Gw*KG@?s$&|cbsGkCS zF^&^>?3+R^Xzy~L|M6AKlAs?`n(4*D!h^xY^-QCGA*aD8$?Ff3#1&)|u?D+vW{$;0 zWp+)36=%!~W+EbOmXC&NCqEJwb&oj5W{lzJYC=$6^PHf*J6$!`0`1o1mzalZ&e>JX zx+o&8)}>@hwDH*j94)8O1Pxnf38`rbX#*myX1rJTZI{Oo7{rQsg)P1Rp8$=Z4QtKp zp~G#jkOhtGH%X5d;V!x}H`k`PCMBda_?Rs(5^a)v1ih{~M0$FhjWpdc33Yr*I3omU z*|1(>IVxjNg(e)ag7te_B9IUqc5~e?ZUk|1au6as8VU#rZ@~aDlM6&n@A3~-h5}Jg z5=21{1c15#Bp5&7X~?yIu}iQ3stZCSdBU;SO)#_&Jt_7Fkv!=c<;C6RR*==p02^=kCW8R4Bnf6VKfASK0IZ!S)Ub<){tMANka^+ee6C}%JV(yVT?Jh2+ zxmgiEmkfZJS2Z?zZgv~Kesy@S8x9Jd`aL;Tn$Y>w_2RgW_C@83ucuh5X}RxfQPJIu`tRt|$=2Kf(BpwiD$N`rY8}&9Up!EG%UQCU7k(s6cz&ED&8g zh3K}6-nxiZDdXKj{TXvqeT`T4XX9^tLb}NX+~BTBdw;1{H9w`3+9IcD(;T6jr5iF5 zHV*_`SeQNPy{;nb=Dp#|K}P}SBV4a(B>g8qWIJ$w`mP;XNIb?j$?-|ysIy%^+8aqq z$vxVfP!#pwJPTz*_9&#p_w6NC(;rX<;9Z+>V%_3gZD#S#x zwqL-IqfGsG;gXAb2=}I^y&5g-o(G54jRCLJYz^U-jB5s?BwgA~N4Jm1_9GLnFX|J2 zPITiIFwm=gxOFOe%!7^#+i-ESgN@>v)xKU*~ zROJl5prVc)@@ZPJ<(pNNs<^^Uy(`0Lv8Rq=71J2r?EHLV^M0()Q~5dz^qQ(b<4YHg zDq?AfEf6tk$^bbNj#2}cd_fW$pk64CS_ls$4_B%@1JDCPT+xv-AI47v z*`#rr8W(r*J8v#vY-iiG0J@vR;m|Kui8BmYCC48<(?L0nKLyAU*}LJ?7v80_ zi(@G}_rs}!c7Qou!1d@XX=_`1H-5bA)^cRP$RRVaI1(NrDm;)i3@{0%P9DMwd`)Ak z4u5aD5w5XP+Wnp86kJrWLM(5-4@pDr;2MjQ_-= zb#8v%q9t*7S8oi0;RpUprOhc-P;)?NEt*%|$oG>-mf7y^S$CA-0U@Ac%yBv3yc-2C zZQLo)&1u!vb*!c3L#B`uB7BBnRV^UdahWuMZsKj%$I6d0{=UzX(Aled$u7fKX-Y^c z6As$IXPCLC+QW5bZ#u-6U^~^4=7WNTre{PY)36mij-e9XBX_-Fm2TzP$WkWL^7hsJ z6KjG_`41+`gaIkk8s=J^@|`)x$;21QR-EbHSiDrpeDg}p(fzARdwbLBdGLJHDhWPX zogqu(vI%5fx^U5E=3N3-{J_<{pJ<+%1Chg=QigKM2T3~~Ja^_{46izK_Ju({G*{QQ zWr@Gil#@2xkWM>l1s}4qihrndh&nR)KYl{cL!=R8Wdc@@7BQt-lOQ%Wu)C9vap6yY zuNO1WU&8BkYwwdqmTo=W1R{ybwflUSpLCl{gcdvTY0<4<3mH1Y+S*43Ut^~0dp3S; z-$vJ#pPU;(I{hSm=@1q>EolwgR_<}C*b96=6WTmBbu7!@P;RmnTOjZIse|3lvwdPd zZH?~syV%u|!%k229oMt-72j=9JpKd&%Z7itJ~+C z!piikHGeYc$(_ms+iRXfZe7@7Pc*1MN@Go*a8?b6b|Lw&AAS?3jmRWU^b&j z2p|EX5ERTtKn0L+K<HLPHu^e1P! zd!vK?gi+TX9*T9Jat0I0ndXwEuEn+vMV+uYub*FdSdDvL!A0_5__?8h*cSo=<71Q? zrI!~CeKDi7*Psz{1sA$a=ieC@KepODvRVqXTnCJ?Pmk5RQtoKb#cr zr_W~_*?em}C_YP@+J*fifvVfw_u}U+Ou{M_SwbJ)JNS6hCxYg${NArccJkZwGEN9?5r+3ZAXFuQbh_Ac-b##IqQvo`j(O^tothYrDkKgW_{ymu|z5fEEn9 z{agBm9<(0_r-|(p-5!=-%8rzLpZny%3m@tBExNp2y>_ee%o7hCs!Z|G@%=-}2Oo$h zAKx*$H-d2&c*E>VhqpR?Te~0j&_2@RSvx-8J)>=G^+}R+i!9qB1c3;K++tP}mMAH~@cGs#s%iX;tu5WL&b~M_Nm#k=A(a&Z_ zs&sACs(P9>`YZLAS>N7nK304xdu-sXzUCx%B5OXJVr4sBsj9feNiG46jCxhuN zJ>^|uK0Fslj6ft(k7ZNJd9&Z;B*?buXzyE^I-=07F6~cd_P;^XL#qdyi;Y@sJ(st( zxqY6J>m$LacRlPbHkdlMN892XTYTmG10PKe|I)+-VpXF4T9l>-*!p&no7^lj5)(j$ zfJ&JHK#?efstQplQ58^$(5TQAt}9%pwock^v^>P|DRw}${VEUYj!(C}n(bC?k7oIF z)2p>zO54B&6%Ax25hn6Bb6SqwvMtfFglU86bpBB$h|SSv!gN^M;%y7tAWVR^fHt6s zcHQPJ&YPSy7;kb*Fbiyl2&RD02-6Aos*X>!0$Mwyw!^9uc3A<<^s6m5HVv@citSOD zM|C`^Z77yOnX1{U+9srhU^4Cq^SxkspgxG{SisC2=Q&gs^7QOVVqpG%|H~Iik4t4!Aulw5H0Ss_?4T@29Je`#=HB~ zkkzhdC;jJHK3sQtw)MDA{H-tbd|dnUU)w)y)&^c&uC=;MBc6UJ{N^iXo_CKVQwC`- z7HqGUOBK9ozgXjacoJ-KDDvOZTCOT6}_*Sx0<+LPL|qi^wOdm%0cVxdzPr>lA1 z(rr?6`hrO#;nhlRs58a1Rtw85z1bl2ltF12DqryearA7j+P=A`Px3_eywbz2&XtbNn05q}HnAkBJ zEUFiZv3NYK+irKre<8K8)hFFnvh(h~z&UV^0)_23rfncnf=olILGINH&TKWuTQ5ym z$$fj1har=hi%t-dq?~ixw$}>xxMyorZ&2pZBPRZ%xLH(eRjHLYH#0|4jT@AvEBh-sOsCd%bf zv@h=U`Qm;3w`5b-b;ohEKp?xZ7LCU<+l9X2iF|9r=VR4s#WJN?tA)d1pU=nGj?e2g z4TE#;Q4PzsV_x6r=%{_dHX?=+)vMKtqTtrnW;h-OfIz?xj_LRNve_)xWnp_e5Q~*c z<**xSw*;a=M4@2FvaJ4i{CZ|Cxog;U+rfZ;r&O}-bRv`xENG$9MhpalWuQPXQ0BZ= ztL%yNT)23#*EiEPq1kBKwqsdlS0oY-_Xr^cYuL7psJ7efMx)_%omQ*WXf(F-+s$TQ z(`gwV!!*r!JYG=h7PF=JDM_Ia8DoqwBbouBMza{(~cd07w<8Dqd$M@Y^PWv2_-c@P5T{_s^LWSj$lbR|P%jH8smIp;sR z-tE&tZ5IImwQcz^ls?Ul9Zp8LJM^4RN_)v(~rXlyFLUrR@A!JnK}#uC@BZFF^Y zIrUPr5cD35ZhMTKGqJQczB$ke=qyszj=$7Se)R6VRu7(@$7Z$JKlP^mvqz4^|M(>A zDTgBE?$fWnb~qBcKH)2|ws$BR_6BOz{H1O*Mux+#rzy(O?%3Mu+UQJ}iluzrjHIL4 zYt`;Zxe|@--MuUK;;L0Qw+|Hb89jTc4!S8*_h&>ow5y`Vca)K2ZKu(dzFRe0rD@OR zR$=g$MIyh}?);!!L%Z>}(THekLM>A%5()Bdbk%gJ~8yvnn z{6lg_3-z;rAVMG_5JI?))zzCl6ZhTI+L_pM?|0|VH`aRsx9m)y z?s{6|1=k;be=6PP7cPb)R&!X4Si@{5Xw~gy@A2WSgTf7!z~uU1cRJv`xpi^>+s96yIeqtPbOH}G^cDw zD`V-Yo0pm+I}ndSBw@52pt!-u5#qyqAl@_aJ&KQqGBBfGkAQTRVY`@PCt{9(~ z5Ld4?o6Yt0nh#ZD!owJ=l*<+AQIwQvnr+q!B>k4m_xJU0ZDl!EY_rkb-5rm|i{xx? z=X2|J)E7*Lbct3d6w-BFCqL?X(QLF$Q>`|e0-bK%>r;F_t-GgtZEMTxi3=e{2L}uB zgp?8h0)D?}$b3FOI5_BaEGZ>ZSgX}i8V$>`3{TkO)myDrtUtPSV=EL62Lb^_$hGb5 zU?8liYC09pTTLNEyWNgQ<621H+S>9NL8(g9vchQ#U4$*h`S$iUwW40HFB(-c*Bjm*uVXu|t0;;Ri^V)1Z@F9^Ga^;n>gr0b zRIAOMQg14_xv}YwbSs(~j)Zf!@)(Kiot~CqrQYgZSd`ivX!pWiwk3WbZ$Hs804E?e{Ppo;G@R#55zT zzt$cVGrWKIl{43eMgv!8FDHTzD!qv(e|}NZ_22n)dpZyvn@Dtb$7e^f(fEU>#pdD1 z4$b`^pEk40u?6t1pY$<#1F$q~KvUt|V>5*qH&g+#Z92$N1yqpf zz=#0~1_%-q$$*36_k?Ok`YKkBF+VZBNbwdG0B-D zY!nCx2*`jTAxg;s86gOeQWA1xi~|CZv|NVBh@k`~B_jeCnfs%wP;c;pm*4-BzYGG; z&M!vuTwvLEq|-u^zLn#F;+g;IO9*IhGtJ(dGu`$pN(g7=ga-6nbxG4 z*GK1evWelo)hlOKGp*rwl)CQ6OP~63uzfVzZO8jyZF|$Kt8P&VLnM2nDtDFx&Y-oU z4LlwYVei_vRufLwyuq!Dx&Q8?cWrK*%+K_g&idfJJx%5Qr=Q8~ak|Ke9 z^Ue7AcfI?b;L_pOUjBBun61|bZ8Oqpl==gaF27NK_412i>g?J;cPhRfZH?EemoL9Q zmb^ERbG=dxhhnXi@0ODvtWz9drLdmQZgo94^3?QCx%bIW6L;Lne-`-6UmbIOSPcoU zqXD2BVW6Q&ur{+o!V#ibB$Ra%W>k zqlD^&y<3dMT4x^GcWrvSaw9in>CL@2BgV4Zo)(2nnX2J%ynhSKlF?+Ckgidy&TEcG zv;6^zX*Dm7B>0)O+SZ%BA&*(k|6qG0yr(dlR#>!2g)K+=yP>#Cmi~b*2SbTz*EI4iBj}CBs+Q*Sq?=nA zaLnLrJwKCbcE|L6y|FUhbk|v2Z*42Nut?TZm8E-)8V!MKau0aY-?0@Vi1 z1a3=XM{-l3DVRe{5K59lfFMay3IS3w*X536j%2n#SAy-LBUNB3G3F9;q()4Ws0vUe zD#$r7cDs+s0I_2v!T}gZR2bg=3ZNlqOlatrTp?5>DugO=RWcnEmAE313#Lh~Ig08q z#bt`%3MmSyC|RfJh@hx#E948j|84vKb`;*=iM;5N;!JwLl3ptkh3XEkdxGBRD}Cyq zPo{2qBes4cqUwf@Hu%MgEVg;GJYZYh&S<-y(u=bTqufdd`(}IXIscVtXuG$Z9)R>x zKl6exgf$gWZ_OG$#RJ~pRjyxerg!Bw@<*=S1BTsYJE=nX;r^9pwC>p)_hSD8CG%B( z-;<%Ui`(O41O6*ciOuVSj*s$|PKKtrUN*egZu`2ea7&N+hRD(v_;OFVznDHXcu#%r zY$ek^*jNg8yAu<)xYCH+I9jQf5jNBa9HMy{`fFsTBi?T(sA)0bZ+Z^Hn2xzXi z@Aa}i8Cvw1rEX7rcSf|ceukpD7vbe zL>v%Yi9+%=amEOQWY9bkXoVYK(j_qh8MUrFZ9sw|6N$*}1jJ zhxeO<%ayGd+wd3{R;cgD52`YID;r%ZkEGMbj=$jXdV|)`T5%`Z91U5XzPVGShu)F9 z`g$^%%;-^?-VtIMDZ?hrq z34G(&^8-5%n5#`|uTpVq{pYbbD6jZ)IbUn&T75@v4U}>5%(4ExdvqV&AQ!Q`_qf;I z)V2nLqvMl@^S8LiSb^?5wdrTrFAD!3bhJ(k5s)#)2uNZ_X2bxIF+xTpB>@NmK$21- zB1i%Wjsrrvu5=k^3Rf7SZQBT)JP8p|kU)UjBBR^Gdq4z)w?$Iy_88x7$&}-|;DC^* zI3QgiTrMRLNhAP5BI+0-+U1!L6hy!f8L9v21!W8ngp>e)42cMZ z5dRqo-5achiX;NrdhYO{BR5`JS05SfUCt$?=iGamv6TDP^x>CY`u^UJKCqxH)?Tj8 zxObIKWCyD;FR!*1chug8F#p`a-PXx-F9-ei^zKz&$?vx3?1%ktUK?OHZaN7J5kK2K zX663I^ad))NA{0SKlJL;&iy?Hzu&w(()NhS&^$KAlW)-uEoax;H!c+qJh<=RQhTem zcX#Zjr@CUT`s`7~sZOjdEe&>ozdAf0Ukx5gn4h21?~Tl^EGtXl$EJ%vIJXmi=Ts}v zcl=Xr)oo4><_b)Hdw!{Ry(#-f)@sF>2cpfJd}69Ux0UI#mF(FCYchUt`b}8Fb`I@L z(`GxcQvK-L1HV6edH3`Y|8_0<(xrh^&thfc8)YMTs@(fz%oh*&mIluEU-95jW3G}o zR~~M7N}*-T74wTmI2iV&?;Z&4_Prw0@xfPC&!xu(*8=vqK9LW)T0F56%vQZ^^YG#u z8@@aIgK|gz^s|5}(IMaxn2_X>(&CIEM@b5~!u5DPm2w$-jdHmx{QgX4(^)J9I8Ve= z7iMqfonkU=oH=`TH5~80YtsLv6{#%rc(ksuA*T?ue(#mRfq~M$h&=Lz_FEwbx$=O-{Dn+dI462-)C!`WyaAGQxF7*zvC%oU6~Y)Iel&XK*>OebyX%VW1Xm zRsNipLQ6M)x6u{a;tut)P#QNDFK_uKx{PMe#wKs$&58F;(9>J}8{%dovPI-j3W@U7J2K_Tq1zUs+!HrGI~K^a5OJzWQ+2JHGt=Z+o`K z4^QmQU)jJ%dml9>7mmGd`}EZ~ysa0BCWn>DzyC?cTG8 zieG+uaBjYUaID0nd!#`%d+)tAgEp&Dzr^#}W&%s}sNTPvWN| z_7Bc#ejgbsymgN?Fre4u>;LXY*iZjgi-!kO6=K)KO;>LA0m~!A&8F0cr#YOdttj5;<~?%ml$y4v5467!XGQl5pFJ5&$GJ zlAW9lxI_*JmSh4H3rH}f)k%1GM^O#}NV+5lNJs_21p{@s?KOY^pm0SBNwxq&E{PEd zjW{49Nkk~nwp`>4fIvzB)NPds$pkTE)Dbngk_lo22;TuBmvfFu9`2abRsfn>)o z6kYDZ57p=$Hv=JMM=M4|+;nxn`y-?92B%P#W%_-w?d^QET(0bRgY9;`ri>pMk0jL1 zTbmw_XMKIGm1_Ag${14=C6mc?-JM)sTDnxUjgWC?kz}Jvo@zynJ(DVqY*AJazhXbZVkrt0$Amd@eT} z?3-Vh5A+nO)v9hpnW4%G?G$%<6QN3_%2ONw;_-OR=RWYj(PpL6Y_)_C?Y3R5)rMnf z$I&ldy0pXHNY6~l@~>HzVHjJPt?hhMy3O2H#;?SEUazXEoO211F}Ag})j!o+_j)s# z%*4cmZCSQ$g+d`h2!%pIxbb-G^5rXvqS)nf&s2|755$9EV{6;Bsw2Zaj^or8P0O}9 z=ZL85+Ty}Ov_Be-L>f+Eb#=v7f{FrOu7txOUAN!Z@ZI4KcX$K9&c6d7ASopg5|c!b zS<3{*({-2RSC`_6=~Tl}QTW5*t)2WveR=4S@q{mtyHq&(=v#~_y_nsOCzGXy)!o-? zI*w&o;ZVrq^(u-&K*?lsd3o9A^Lspos_R`bf5o;n-J43KO}n(Ul^Kaf6O$9QT76@q zJfH`+YqhSfZa3c&5;itB67hJ$vWY;`w5UH2?(NmIux7f=1_{Or+l2t4qN;ws-#64n zF*hEMw_2MOkRFdenMk-TOSJ0>s;bm_hlj7`vIM=zu+?h0bzo(SlHO1-ZQ0FI^(TF9emn#N2SFln z0>dO?1PmB);KURLfFwIrA_fQ^8zqoL00zjBDT+Xl2)C<7$N?E+01P=XAY^TcF6e?Y ziR8FCV_Z{+f#5*=w)?q+ALeX;yrVn^=+tWPpkgNXQU@$Q8ieUoP{Ukd~jN*SS6!ow=H@}}Y+6-zD)r(^xy=Z{}#h+SS^uuZa5 zyO?$&NLRygyFjV>PTd!(%p4d$bE(wjFT|ALv#$!XF~Wuxs;6xK=GB8E@#R?e{&=*V zvuoK(_`V*wUR>J096S)bermwTS8O}rWSzsmmJ0d%3(u8@on5Qg*wHSI@10o5Yzp6C zNTgyXGh-N1!Xh{@WDoEQw@&Yx_JxZ6VzbIq{zl5P5b0T^O0X|#U281dU)gG7yeBad z5899Q8!yv{clUaqm0QW0*Iur!UftOht?0FS;!@7NRasP%>)XX{BCzF+O0-TJaS@Nl z@@K9d*%J>Tn3vdS_kBJR@#SA71t zLy^fT<>-5ZJIz0yU%6*HE+Y^1Rd_j9cE+!5=1!j*@*Qnd&8v4`fPwnZvS;>a(e2m0 z9&PYbtFMhlh96$d+7}03Li;WBzV{!DM(Cy!?ceTx{>Ise*T|iH0Z+7?`5Uo-TYM~J zL?(9)rVhRR=*$1^^?SFxW=E&g(2VK}KN z`aey4^2&*~pQD2>&+qBYj9x!jTdfxJ8~N_lw7=QB(RU?$(ErxGW$;lE7>rSAr|SwjAjY zn3A<6Zxd^iYd2iS0uYP=s0b)Q<4ohIGNFJ2E|UNX-cF$uaF}#Z2nM7uRGC!J;nLyc zsAMZ->tJi>XebOOR5W!8SX@KJaUCfn<4j1wG)^2{9fbl8ljKSvKu8b-LV^$=97zs= zKyZNsSi*K4R|w$}xk#ibwh9gho1?`69g|C!GeOK1s*tL*x$AIOc+mBbsSDDi;6gA# zOn^ejBvU{Lo6yx%WD2>eA`k?CWGI0I5n~)V0{{tNlj9GE-}~g)0q@I;;%>v7Z_ z&{V;Dn!VM{BXza0Gx}`bdb@IOUagetS7+iqv#Z-NXIzTu-LofudE*GZp5SM%#DOL=HFp@&gzT% z%JBkp^|cK{9bcnW8oW}~^C}yJwT9K0>RaM%zibPx_O9qr-B=5`!*jz*odRv0HOh&a z`{f4U;p;EM3cqr37hSfh?E*yMv^4NuH zb!YqNMt)~f4|i#j_m;%Gcizr%Yuyc98Hha5@FGvHC)YQux$J}CKq7N!>0;*j_aFXE z<6HU*te^)1-r%>zvd6xAih2i6KYvcqOSYMFO+4oLc-(LL0^No$FU3pKxA@RE6-Du^ zdCDHwOv@S5=}CUbDKh80`M7OG*CN%HQBe-g4Gs?OJ{NoZ2>;YHE|>k+Gaui31Xh=> zxrN%no+I+Um?!hCppmQ36~%dPcfGDC%C3=J@0Rsg)U1}gABF2z_2tXn* z&KW~487JcC5=jXilPLhW-5tBFJ3>?x#)R9ERY_z7z!(Q4*6|OQ5)f3bG6GJWnLz~3 zf&Xw{9uaqe1F(*rI732UD!ct?3{}Rk;~^!9BQomvo|7O+5>P?xJlpN3LL?;W6s0-> zu+HAUAH53iM^~Z#&L5Y*{q^f=R(BUH&*ECzll;c3iyIs9Ks~jPFq)(5H{|TQLPPmT zb2-qYsNp>(rJ;trnR>afQ|=mE2s&OEp9~y1a^l79A+uzJ9ItytwB~o(Uf)tF_laMq zeVGzNeYbj=t8`;{sV9+gLQ1(kKO$K(dt@@H=F7?#OWSW7EH=u4u|w4Dw=RvsGFJ{{ z()*&yi8)W$^WZ(ZZk&0|-x|ofxi^2TK0GC_{?*QSZQ4>GyRx;RgNFhEHDtS#4O?>& zXs{Z9?Q8aUXz+Y>J2KQWw|SGF$T{PK+h8@fRL_mMsrN-Je{1142?n>qLlG#fuYF;+gvIWsKK2RUr)O>#blwJ-WoHKFKmMkI#U}K9n`V7v&3fktemYha{7i6X zBJXE`r=Nb^qqAnzZG&)BDOHdvkii^HMoK~sCK`=KT6g+JJagByM!VgA!0?{H!NK9+ zUn<|cIkL|$l~$cr+%M*`$$ZT$75e#(l;9V*FDGo_;T+bemwq_SK9fCW{cuS4+KW~T)SFb znwxl82g`HtvGE;qdnDK8K+wD5G{5_NSDW{K;#d8`E*{H2G4uZO*)K19f?dyv;`ir2 z_)p&)C{LUmtZ#{2;!TO){+;#u7j}nM<*8WH=v~O>D%maPeYpe41kCo1txhGnezm*x z;bBi>I9sm<_MI5S?>B2pO0i|m2KTs;BLy7Y@Lhc7^5TWQd_3V}YkxZRzwWEP);}Hh z$FyV1w)MjG(wXC*`0%vY_Z@fQmp!(gPV_VvZ@Jf5#8WshH9qH5_ntSl4la53TIr2g zci!+{u11#XyPo(r4@ZBa{GIj(KK9s8`uO}9{Pkx~G?M^A@~W^I*_<3Cn@Iv1X|a*VB}GL#kz)mo4d#YLe=rVga*>+W<`$n=Ws-*m5;f zf)GHGAu<(-gY+=;DXy1EgR!74Lgf0H@G;>9=>=hcG=OxHI!R4R1Ar@1VZ=cwC^$MO z9V7u#k|ZK7fvDs71dfnG2VS*COFa&L-%-avr<`ItL`70+S($z*a%WI5->VuADCOeRx8**)0b+SqC|n*%*v>l=$*dan?| zaqK`a?l_HHDeI=3L?TgZRSYlHYBfbsEXxjsLLS4D$!r0Q$D*;>O2zN@Cz1&f5FSSr zLhh7G(shH8Sis}gn@*$DG;1}V&)06ZSsNu$CYue9d9KXQ&Aja(5$U>NmKug(2-jU% zS=p5wPzf|mM~}B+xg8_ga=D!9ODToy8ymhm+~E#?Pi`-|N93Hd&Ymws;2;<*WiO4* z`m2Rp^S$9e|HQ4X$+S;*x4y8R+_mRgP-NtJ?|L}vsqWdcr?5(s2OHYC;ihd*P3@}D zVx?Mn>7|#0dh_jX|E2aIfA-%(E|)VeFAw~@QYw|IRg1NnZmXqf5!7U})#~o$PsYuMQ(%uR#_DH?zRLT7};I1v3%@67>`RM%vJ$dN=KfGMgDz$HgO zlpMJ$7+_~ePI4)Th=XJx5Eudx2`>jpNXR6jB&Q7wKq4VXL9PTTEGao-#DSq?fD&BG zMn{50T!5*oBnc3JN#a7PXln=>I*JsGG>K?4PCx`q5)d$njF1aJNnCOObQoc$$VC7^ zj7T9uNn8*k0I5q!ic1Vomqb!30yW_1N+F~GBFpWFs5)vboj7M)x&V@-L@7wvElDIf zL(U{arYM3@$C8QwQ2elf_P0QyA5otAJDVE3_3{6$Zd$&bzcaOKm%PD? zC(eK5pLrIu8`{BNKX6~`e}C!wJ?Sa$IjPpXHzz6sg9FRUn{VCiF~2yJ%jL_z?7VX! z_}Q1w?c2Mz-(OS$-d8W1!C&aatyTg-#hjCrITB)6}?A#!{Ko0dSPNa zkO^fmse01E#Z0NI>hpTNyPitSOa;IG($$c^@5Xm8>Y>4%L66smzwnOMXTDL~H9y&E zwV6kS$Y($N&R^Khh=2L?sr~nkHcnfLs+!UK)I;N|SL9fDkNMi_-M`Jg`JGo&v1#*j z_0e?C7H@2NTK18aR}Xlq(;KI*yvg!cnUV5FE?vHC4$y=8(0AJxAAkJu8^$tRt-+<`Y^sp-dObP0i6MH| zll00$Yj}9Lapv{exyHZxrH?)R*H0UJ_l=HT`<>lQDdl5tF)MY?Txn#d-8wqBzL$HwUhM~^X4&kGbT8bzwRa|cLyM9oJW=$I zf_F{CzgeqPDx;t5TU*KQtUl0eH2kle3fx+qe{gKq?kOX2*7_q@e(wYz#J{yU{bX$J zAPgA2>#O+{$2FquJ=yq$b93Hg@4%9)PK(HR|7uqH>e|NGtyZh$dnwRB)# zIq}T3KV|Cs9$-vUx0OI@=gGIS@4x*0lJBwzjr5UVmrTNPG1&Hm%>}=i>*C<=iK3RI<%J&pZF8n`b6G{;}Tv2VS3hE&j~!)Cc;% z{F6RDKL!sx_J@UioHOPrDQ(r1IwKWbjU*#f;C2SZ5Eb50Q7Zz_5+nhcDjnCbI!k<+ zWB`m&$0-v*fRH4)!aHkyiNJLQ(}08kK#r_qNFaa&0x)zDh{zyK0ue}HoxwW>w%`IZ z3qVMtZjj@$P799_>CDt5sX#g)5+a$(57)T}Kmc%v19E{RB$I%M4oRer3qq$=ha@Es zaRCf~yBu7}T*;9M2~tQ!GA@Wq2Azcgx9|G)s7J>G!m=#_&@>GIPzXs96^@)shD_lS zfmlb6kAYG&wlA|N8t^wLh+nSeT%K2=36_M8z}So+-PF-wf_0aJ>I872OQtj z6Iv;>GS+?C8klaEf`K-BF%Dza8XMU56+mBq;A+dvAM_-4?vL2}sPH&|$3>!#0)dmE0P64mDRMAyBha=ck9?w&kANs#>^IXa}^ z^{`sXz(COF9~;R^pY-WnLnXzx7Iz%B-q-L(i_Z0dxUZ+)x=BMmc};7(RJPiE*CSLh z6bx!s*JyI2va=rb1Xwn*S4PVohNx33+g^C58}-y`t26UPC8qB3Bo`JIyxY;@?zs2r zYIt|^*4YdjFv6**y=JDGez_Jw&YF|6^|7U;D|?MS$6H%(-ZAU}&42n$ z{`L=ynu5y*Ol+znrAGI*)0eq>>5&LO@rC`nchCAZ?^oqD)Az=P?+$mk!`~ArWygO; z0x6OV3%De(g#_}1RGGAQhezcs`QF{7-zs+Z^j!G)!q{5EYU@1}GM?DC^XJTKS*I!v*yfoZ*1dfK%^KBu;f^X@qL)|kI zp?`g<$_!s9uwnX4*`1CBJfVVdxpC_^9$9$w(MM05%Uwu^t<`XCC!N3ivNA8PV0C5b zYVU|&mO_48&fF7S{=w(wKTBWyOuwQ`{PO2g>nZx&v;K&;r*>m~XDC1VaQCV2T~qJ& zrgm~US#pAro!aDBW9=7ryQ@Cy`R67KDIQ8UPOa4Q&)4eMp1)?NQ^U7%rrmyEkeBO{2J;5#I~tpIA>MVd$&hd*!#^!=L%4bN(uvoAH~*habB1+R{(@ z^!zxmjxq^RQ528IBQ%f-GF1wcTxEo}Qz)izV9X^{B>)N{U|c0fdKBamL4rW&lv@A^ z83+(U3Z``2q?nY1=qegYDH$N=oNm`;NFV@b#2FB&LKV_=qzeeZkt8V^SBYr|$PhRu1|%g&QUtJ0 zk;;|al^nT{B!%QIkRuey7-JHV{l8KuM25)F5FS8KIF}@)?MNb2SSN*YgbNG_0XPE$ zMNt&xM<+`^S_}0n@4xpyUO!e)|LqsQJ^hyb4&;CC3EnvG52?LgwWp~2;}75iV{2on z@~zp_iOe@zBl}ApPt7g3(ScC$p6so5V#t$a6;L4>?iSw<=dRfP4>ohrjqQ_EcUo8L z>wUY%;cP`qb=4vQJj@C3iJmbhZ3Kq(4;-zWPFU_jm}wF3bj5;nCmH#X!0B zoxqC=WhK!!yLjs(NA=^!zx5l3_twswc+pKcdBdAL*FVKJ?5=_9m1@}Y^6QIi@8sR% zg(Y3F;{#n=-gV46y4v!fwP*X-_pFIM(}i5If7`lv`PrG?yILhtDe%&CZE7|!Qi`Ef zQXYS-eD!L7U%zx?&F&V?o9|0~ z@a~x(_lf{@_rV0`mawgKWoK+JHQ;I76ThQt5d_Pt&-m~3}b6x7*&XOL)8BJ*Q zgue2^A3pKs@h^S#Ka|$Ydk&;$c8z@MfBwQlG?)70NAyGf_RVJdf#J8zoP3L~e)@Ce zwc_<(``pK4`yeW7AkyMx+?`j0&~_srj{rN+lr7S8>_hq{8j%HnqlCr_SE zyla2rK*SvEK54<<{ z$jmFV*T)_{`s}CAp8D(je(z}M#I1otqqrW{BbE1lYHOLT=y*Dv2o*0nIlK7c zr&q3=khObGm5+V?zz06&w0gEr?X-6)fnUDIxF>Nnw=B5VBjZDn?$u+bG0^Uj!Fs(u z{(JjsF$#Tpn_eyb-no&@uS5_3>b}&0zSeBIh2?j?@YW-NA2D+EUoC(6g%j;Aj+~Lh zTdGY6B&7o+8QL5LB6N(9I$q^yJK#D3-SauqO{>#7oTMk+}zh)G*9r6M?!9Hl0SOLCDMkgm>cg?QT` z64G@Mkr;D@M3fxJK}&LB8PZh*>4G#Nbl24!X-M*b)IjJ!8kYuheB>Hn8`9Cq(WRpZ zp}Inqjw)=;MPNc9M!a-@Ha2Sz| zC;%h~+i`#xL+(m0B!G009I!l5-v=N0FLo#YJHr@{M8YR7Y{p`HL!nTuUA}eeW;vw> zn=OSaKEJQsZU<8#?(;R9&33yr+}l%KZZgKCl)-3FdYw!*qpXlW7-h{yI-Q=~DMiDv za5yZ51OQc44Z}m^^!D`H#hrG$o$8MCbo*GjVwq+#kto+{j%nJqsMqVZWqEvPw%gHQ zd}U>&ZMMnr)!J6aqBIZ)w9Q=F6V-LYG|g}%V(2;|-n@D1;lLw~<1`&Nl86!KLWpX$ z;y89F7?MKTj$JCXy9fPVpU-ieVsR(Qd%Rw+={S4$?!EqEhI7UkXN(z!A*J*fo=8O9 z+}Lcknwy&$hAbA3UAc5FFf!6cEEWsRLUpjGcePqIO-l``Tg9TL#>&OA*Xt>-mupow z>eZ>?6pF=)+ge?P;`TPL80N-CYH;k04c{H^aEHG?j4@=lO`$+`KE-lCC?;xM(W!tQ zmEF8~Y3pD8N+0L!cduM8tm&iEQ+ZeVzqKAuBt5D!fMWFMy;ojbPDNAKu3ekCf8;=L zvQnT*rBYs5Z8$8lujlgR%R8H``+dHyuC8LC_=!(^V!ieuzR`#$5+C`+4}T+H;)s@I zS*E#r_wM;C<&oiGkJk=`LX(q|hYlWE-mYe=$?MmzudlB^_0&^x`LkCpUzr>~^wQD= z*N-E^!-LY-G_M>zdh|>@^e>8|v}i{JMgo^CdqUaz;OyE`1QmUG!l<<@Bb+VtK>H(#7vSXjuP zRlMWA2OfU7RcZo&*XvzfUG?}>DP_H0+s@3b-k8g68L4Ed(QKNg84T?WX|5mj!J%&d zBfF2h=^cOd$}5@8I@Eg0nf&HCv$wbF;fEg%+w$bqjdLeXec-0#i9i=5T?wvWE-(TvL4ZUgDTyRVL|`ZtaG7L`u^+BI zzI}fx5hIri2GCJT0TCjgM7X_MhEM_kIZVK9ofI+&2-K-zNgX4A034^c)^RkYJKxho)3=h`GY^au|Kd| z9qVu8oPAT>1B&;RD}_R#`1Zqn?Q&xErBZjISFblB>_uznorY95u=Gn~^u3E-UpaAC zy>U}_!k?Lt0OL<4rtb22ulN7o?7ew(WLJIf``de;q3YCJrJ7YzX`Wj>s3-O0c6&Cq zv9S#{m_RTW2oT^V+>mfXLLh_$5{xmk?EuC$#(?Lk-EQ3TJW1V}XO*U!=i!{Q_r8CW z%zJOecO~YU=e(ExEG=nCDpj4Mz3bGke#7@Wbz!Ertv$9$b(hKkr+fI=aV=@M?fng@ zRQhYj3k``>;s)PBe8HVtN;+hpdxxs3YwwsinHack`;|3{c=F^qIeaA<(O(-n+Sc0@ zGaOq9mg_F*;!@#Dd9!`lDC_Qpk#qyJmv^@vdQP`HJ**{>I~(a;(~haQe>rckUj5_c z`9xzV7_HfSOZ?#X^BHBKvFB{MDUrh8^Ci8`D2YEBO|lhbjL(o!EH-t7u!O{fi$}@ zzijqe4hfcUspwddOY^u0%-ZiW#O&d+scxI}+IoXC{pNb9m$}Sk-YKv^G|mu0A|}=M z`wSQ&02t-`ezz%;CwW?L%9LPaQ%MBudj#iY3(aa)mjSt*@k};DXp7l(Lt)}*$i*y~ znxWY2-<%m}eW2|xKCM*@=1SC5%#z1f60v-vJ2dc&I=bhSMF~Uex1ATKEd`rLag*Cl ziVwbve)PKB%!+y6#X}X;Q@3Ysa+(Juz)0y}teOKjDqA)cGid<8OMxHreD(B7j z^wr%>YNqCA_YXXMpyfhK*5$W`ckdYb#}iaBxJrZhe16Ma@0z`^Tt0TRqN>^L*Tn3l zb)kS>ET4LzBRBC#?`LmwyWMy1j5H-ppSu0&*5=NJJzMeye#@GtZn*yX&%ge>A+{$y z)$8}y7LNQ(Sl28$YGTo-AwD)e|61tUI~S}5C>+?g;MZo_6oVDElWlX2ukJtHboJW0 z&!@X8-kMwW;bF1R<}_9ml0D>-<3`W9TkgGf$H!+l=N~)x;rh{uUp&^K5%_yzQn~|= zJ$MNI=$+m@Z-uIGB5Fz$qsTlM3f5 zRdvc4Cx8V+Fh_>UoA;By%~>d=ab_|fYwc;wCx4rl`pcF?A(>HAEs4HLU?20$=}k5A z;Nrs8@K`Q)I@cZ^iuBvpPCzzeIu)qPReUP3sx^bH<{5LU-9N8q3ZC?O;|eb_QMW5e zUXJ?~tdpojD=Ci>TBua~xdme!O*Rea%1!D0DpOz1?bIC$isOlot&ZE?xaEt=*iBf)eG^+jy)7OXB0I}Lce-CUDk{@g%=yJ4av8uKa5 z-4IzQy2@^QMI(z2Jwa*=l{sV7JnuD|Em6g4YFVk75WNM@qPOwF(y6~1P&dyi<(7hN zp*(e6M3kjM(i&cK(l$eIrFL5BS?OGXQqFFtQ7Z}7)GSMsS)5si$5c$(V~Uoy#pD-TrJ^3P)~GzKa#}HJ<1Lk1D-{IiteUT^@tR{r>iORU#+Ug|$z_9={|ab?YXs>S zFh(NhW>w>ea|{)M_1Wu^=QCKPJQvy0+y2A99X6TFZn*Ih3SGk4x0@7?!O z)ck>m#0pEtvXe2{Zk~N_pHOKM&yt0i@w#0uq^;M6)x2r3WS_rwPqFKaeEPtp9Ig3p zfAQQ8Q#W3kPGzog*!2CLzuXaNWf$v}=5;Ih{%Iu^)2^+{yT>~PHsP=Ae`(F(M&8zM z9C_`*z{y)DGeYC(DakOO4lFMnZGO-DCfa;hj(+>W-~Nx&Pd=kiY(qiNWDxb%u`rp`(G1LK#XfsAp4=JWmiF-3XK`M?s}%yQSa4Mrv% zO2e`7co?gO8@`>W4vmpf&dW5lp{cZtG}%J z-!a4iVFG{{<^+-I-(rS2X96LsH{lEc@HZkRSe-^iu=-#K^J*xA@oHU>uZlro3~04` z93fP7yW_kn6b>R0c-43Bjlhcl=A2h0nh?jhnyo^da|Db75jf_6ST&BU_K;(Y5$1>? zjIbbJ`i37)Rp|)>5Xu1q(is4!RewBOm1(D(vg)@O2tr7!=(_&q35&N`oqXUBoogWO z#%Axhd%80dO4k<0Qa;%@_R4_NtQ2f4{^N@Y?`yY{cTGHhQEdyDuw0X=HAP&;earig zUw~lC;>u!eSgIgT~g-T;S{mgW;;7E^7 zx)Xh~Vk9YNgcWMv-aGp0Onxe3jR{hp>(yVb46c+GRwi`2ENI@491hZhF59;8*PJ*n z6lX^2-(zZ8>E}_=7%n84>Rpyuq2d6R-aEZ~oomXKYAXA!gEPai+(aZ}SjpIl@H(Dy zt*I5_5SqucsX3+2r2FxFun>0E+}+*#A77ozT;?)=XdeE>F{+@F3kekj6^1lUfic1v z28b1fk|1&nylk*4^LWMU^^$~uuwLI7p478KCeyXSJQtadE&6y(seiq5=-X!-jX}eW z7NxzlId$K+{^6#h#~-9eY9gl(8r8+Mf4Obo*@nc=Uv-uX_@Az`c1|vEMFX4Rb=euA zBd<)QE=D(86WFl_9o{o7ykDxk_=x@Cmwx?&PoLla$R)NZud}TiU-|Pd-?(pP$TMw7 zzTe#P3H0+nFQ&$NUi{?b%lB(f-+BXl@?7q3uBrV<%@_XSbn@zkNB?Dq%{UyP8^8Lo z@sD41cJrE*-JPRfyBB@pHg4_>&z>L6O&Pj&P57Q(H~q7t@ITkNe|)i$Av9N6Gqv>V zze6wmom?~-E`5Uax}}sHT9j%+ZN^rIl)iUV|HWcg)HZ*j=Bpzs^Z6@}Jz>yh8!qm4 zO3eY!69w`mtZ17@4cvB_;sJpo*Lcm=eTwovDm#3~2~c#C6Ve z#yMl0>s-^Qju@tlvDKz&|cKe;z zzsalZ+nj|O92{gD>FnHGGk~HfnW>DXX@Vd`W3e^0n<9~U&e*zj>rOmzoCxIb;lqA+ zi`|?yiY6-pm&@r71d_Ro)9KVT4KQAaEc7Vr6$K-Nj7Fo`Y@V4OP9tKo*#tp1ipC5r zs;XL1)#;h(4M8!TO3Sh_r>ExT<|6YE)yx6FXfW2e93GFSSS*&x6=Efn(p)}gG%CeX zNv%}Yt?NsjUEH;6*P$~fOU2Uk)JmVxSS%JNCnu$1$=_K&GCH!WyQd_lC(Fy4s+CHm z(b3VOTxtl1XHJdgCMVn5+Gb7Nt(NL%^nM^XKH8wV$ULGqH<9>TWHKj8d)mPV0 zDwR?xSQr?v*=({T7YZ3ca7|21)Vu2`rHY~i0)hD1I9XZASy6BAn%Lmj=-6oYd;3g^ zl0wPx@d-mu$Zof!2j%y!k6-5hL%bnsT&-a+O``~N0R)VZDTWu> zB5zc(VS|)3EaxgFS?8nY<9MkE%WBzdxz+r=PmOrJ>+HU``4Y4B*899Z#@P78geK+M zDwWaE(az4!T(m7YKfASm&C#JDD4Mo*bqS*Q-1KyhKN}1N1$;@5si}NEkKG7kvb4N(^ytxS#v~W)*-QZ=G%zqQKi?d#4;PAsK>&xv zv4nA{RJwHO;^?G&46UrJL``1v=;&xcAC|(~3x&eMLL?t8pU+^kxz1!ZCX>l}MQLqo zOE~N`H7;27z?ChLXcD1#)PYz6}swN9Ys?&Hp zZZsMwrNv^=?RGmI4!_A2K`4=k%LPMob90g=EMOG~Q4}QyKq<|nQ>tRgX0kTB&0vs| z$&@8kJaX_*CR50>e3QrH+p;-14)gQ#x`7gbl*?tiBuP|sIIJa<>FVl=$kXri-g#>f zzyu*+94JG1T@*zCPALNpx`Y66Ob9|%@h2`a2^feVV2+Ru7!Xiit+WB+Or;oOAc(!8 zOMn;y04`9#hzpp|YAD64+Da=S%%R@Q`1|kBCu&x24AP^!_OeqJBkOqu0t}+o}kz=k?P&ojooKuZ)0W+i%KpcTi z7{{DpCK6O6sAzukRd{n630Q8nI%?da*sP21n95rIGipG?omSYRF`>6&a;@dmYDb^`d2MYlao2`mL`K{mwCe7ED33B9;xwxi@|t6yGeT^D7;#gmdG8>f9v zlXSl3lw{DniX8BE9~^U*%jLTpUCCg>^pU-)p6c8cuK&^vBPX=NVm|YoS5|~n-E}=? z8~f`!dWuEmM-N{x2=)9_Tu+aj=@ZwkU3={5<+>}L@W*fSI6C)LQofa+edpu%E-o(q z^^3H%=o!N?M?l%Qv%b@7|M`6f#GT#Qq^eyL=XUHxSH4E03hgubFNd$IbehV5HP@iFh`tovA{T64dAM_!6Fe6 zLb|Rq#t9KH$5f-hIA(x2NFX4_X+`6d5g-I(1dJj?35TlHFajn30}j9m!aAsazyHOr zefZz5!rPpMau^B`lgfk6<5^YO)e=mmTNsZ`>bm!ITTw78j?j8-=%jaXv~06k3^_IA zO6X=x?HXili6&j0D5ioum&)S}MW^CC?J{y_v-fmyDXrUx6=GKC&B;c8 z%xlS4N;IE0c!JYIb2VP;np9VAJ{7X8ojP#9dgWSoOlzH&FL-8+Olu|PQn4nPcg0a`x^5;=o}O{Zby-(;t}%V#H`qWoDJ$(*di^xYzk~le_IOiFwyI;=Lodb)Q4tl3y&>$4zM)SdFE{ z+2w-6U#e4c8!d$b?z*_VAl6KnqUi=SQ*v43oCk3{mNVLXG@C7k8b+8RdL1T-1;>b} zu)%PpHfPT+X{5r}u=F4*&9{2tcZKK6{9$-|-(y_n|MyTR` zYl=$4(+F)*Ip5NA=`4J}$@b-PxfeHWZLQ=LMl6zd+)P_fWF_C?$jSw6yPR6~bV{AY zg>+O6;9^aWnvdIjmV|G!NV=C6lRd`L@tVfRy(>|i$OdY)vBF}T zC2l9y08tWUXVk7nRNJZAG#20>nA2mdQA<>eOC!Hm=$-#aDl~s!WLiA^d+o7>qFoL^#I)hywu(9IEasX!QvA z@0z6;XVt!@)wl_O14q9*Em-}&dLEoHfNIsSdJJea)mtqbRv(>UK%66v03qNUtzNVd z#stBt1s+CFoiPN25eDEGVU7_7yxP8wplaI(7})ARgLB|-cmTfn3&Gnwk!SB;Gx7S- zP5uqh67t*G>~uUbI=TB=v183OKYRM=n|5D+bl|7sTE4!s9mSEcxz?o#r%x7a)?IVs zbHO@Wtv$dr-2L%wQel2YT01IAr6yh|GnWr%v)Q;yx8KuTJdr*yG}27g%wIact*L2N zN-SHLL-!|TKclwsd64duocLi{bX# z(G~&hdA+EPo~E*8fCM>42ubnoKwO1ZIwZE(K?)-deWaL!;j=w~&P4Djit|*F^M(@dd`_iAdiKba@ z2<%Y{+H8i}+t;Uyo(v!U36;+#!(Q--A0%qAZa`B|Y?eKZlAYE_c zbH2cPHa2dmR4V2C8s- z-@JNt*ByaD*6Rz(L>u45&s4LWl$6t5a(TbIv$rysF!c7zfG`0!&b~xk(jaHD<%4 z`W0bB5oet7)d-Angcw19fI{^EFfhbbigbnErlyQy-#+U%XjB}`}YjVH{l^CizC}RY`02mOAs{Y8;I8D$P5fEYk zDnlG}T}23CBf)@lN*SXa=QeNN;`Mq7A@%ijxm<2} zWd#u0uwnhy{(eG8-^Ptri&fWk&bhNa_%owv+Eib(A z!sO&+Q&UrScQ-<4Zf?$EvFMua^Z6ft?6LLh))PXyIy;R6 z(bd%z4u{X5JE!Zq)oRssiV%uKA||s*RaMS;I-Q=FmD*%n`;Iy@ zP+wnPEEaovdu=vbsaz^oDi({SSSUoJ(Wa*6>a|D|M2p#?s_OFcax$58xm=ZU84%Rf zg@S=VCX)$=8-l^0*XymVt<`ni9|%y!a=E;!s+3ZP!)~+N1OC9qO`9^AOu1ZcXlM+D zLa)8H@64IAilR6ij!-DX7^@};DwT@KY|dq~O-)USL?V$$D2j6M;K5WXWj2{8qX;2^ z5J{8_1_J2xL%S+H8HMx!B>N|npyN~N;AyetU9*8cw9wQFrQtKDW7MNw5%MNuSK z?(FEe>E@dU2M1+Y&gQc5c>M6;!&6h!nQT^7)qEiz4u_8&J@$@mn%@gm1XV<;h?#&e zp@`^+Fhl?dVi+?lFf1}G>)1fC%tS;5&;6|d!NRyFjK&la`M#LmUCD27qMW7N zsS>Y9j7ppfjAEuETEScu5GRBafg?e~0uuyA-ikW;?>&+C*e}l%b;zsiQ+3N1c{w+t zdTWE(=2~Mzc*&)Rejh&e3#e%goC?VaMEs&!?`sS=90q2sEZ=k$0jDhMm_J(xrB1au zSur)4Sn(J_G&`Q^3v{sPT>FAGr4-~AXU<|yrnJ#lp1g6ZyRY_aK3k)=W>H<{+-L;)3GyS3JK;fMDzB9`(kDJ*wp$B-K05b?(B@_`0%p@)>6?6=1QjEQ5@aZ zY|(vL)sK@UfqAf?DLjv9*^Dj05sFKr32041CHQ&RM)RS*k<7Sh!tqxbl^J-{%f2_vQQK(lF zP06NY_{Pm$U;gr!-{Mp6GH-!5eS%!(GXFas`Q=Gw5=(Z9bO43`5EG#!s=A>2mKGv) z3td~C`Scl9x1ne#q{|tnyM8HNT&a{@ZncEziY1%%g#kG zZDZ}9u$WW*-5uY!aa;Q6<2B)|FJd&yC9)B+w|sDE_;snAi`Vt`ZG8X1^dD2@rR zw#zcuOp~n$o#qJt2H=Xuj7*maml22@1dc=?283jcj0B7VD5NZis(=xuL`USAVe6)Q~<;=5Oy2QBzTp>iF6(dlQBf^PD+uV-Mpsbd!y6hcuKIkjIG|4 z)TAU+gQ?DFcV&{o)yVPBHG_?tn#_txnva_|vf0Jd=`)Mh>U(dz=1*UFDxUDv@4QL4 z(7I{piE;0;T<6WD$DAeA&~bCfkPjutPSw}p_$+8+snJMVWNT$+Zs5pNo-VymFxy^B_7uxfNaW#MsZFNrrYyIBt86T?)6g z9((4Jm-O{s!+yVIy39Mm<#zJ_8Xz45r&LD>6U0T1B%&E~U10Sc>ndJVCWa~u^Y{^=j&ID zk+0q7`_BFM*S3Gt3VSuQM1ktfv_39qk0nYXq}q&K&6(U{ZfPai@pm<$YfKrR*S*oz z&@+Gh$SG5<&9q{Yc3ET-BBQC@wf)Br{d788+WLNhdy2W0Eb+#IGo?}{65jENp`cMX za3PmUMf=-BC*PM@1{i<<;|wE& z7^=!I0YwNnAp#)+MiKxJ28`UoTE3ImpEVTlLCNhFDpVA0fwsHC*lA| zRV$p~>aZfMR{9XG>OgS>oN*oK>Rkgu7>lAPisGBE!rR*|>-^HaYn@3=(|FBIW+#sx z85tXigz7e?av8g4VQpteqrrADCZ0>D6|1F8lY*KxE(8n}))>|{^!vZ_?105$-mN}d5B*xa z(AnIPn0OR!Ly`u~6JF zMHgCXqQzN4h#)BDDJAIz#YEV_(>bsCeV^-m;`wKobYZ)mMY@9m$^tIiizszrJ zYi)zlh>2L`Ov~e6egE6Ie3yZL)1Ln}K0z*rQ2!OU^0sg1Es(0O5F$~^#R}pgs#qwK zdFY90w5vhWigvBC+~M$ySTCGE@9uCo<4Pu7_U27Y%h^JX;ncKytAFm5h4wG_@aM_^ zkRuD(a&fVf_I>cG=}@udwOIC;W9N>Yfe$_Q#6zjcJeA)+sO^tp2iK&{_Dfr?(SK?$-lAN zl#eFMIbAbLr@#K%?7>QYs-@5}tVMQujg}AIrhV$Owd?h~Rtf9+F$T^nq+HgDOzCpu^Kyz<;w>bSYt?o3`IS}fMgQcRSau4~prQ9NI+ zHN0L?nL0f`u~dGjA4KDy{Ag*uL@RAz+5M2VboL$D zGrzm$yrrwn)|^8j4=uvNY$8_0{}1sMMzM~lIX0n&8DI3)-;A}YrkqZlJZ z3}wWDgI*Oe=VVn#z1mXFQ5gv3K&vX8cr~L^*Wt}sw zVgLZb0IC+q2q0LdSl72f7{@(mj{T-AyXA38Lj&nI(< zwH@sk%pPxpHZ@Tw6uchKuOEKQ)U@5~^W_Ug*(`b-UX-&gjZQT+SkIk1hp~V$nVp^8 zbah8^p?u_-v8^3F?qIl}maaHUHwmM3cay*uhQ~@D58jGn53(Y+{(&@CtVwqb>o0G|p-gVdL z=;*`|Q7aW4P6uPm?r^B(n9XjC%r7l1WlRBUKA#UagwLN{T9{5L{*V;*VJu0ah!kaT zaL^?ie11QfBmht>6uW!ZEMK@hi+7pJ{O5x6Rhi>zPZYtJVT7w@W=js!8EzB`P2IzV z?>DRUTAe$jrd3l6OS-t+m`T;@ZCQgPdSbHPx2|m=GslxfUDrg9?(ulsy`F_gB_xh&?_ZN$$J>mByrsn(l`o_k_o_qeeb7k|! z4IjC9`m9;Oq1xKM^&OLgLl%ogQS_Oa={38rRFuN__}JX&v=MM@AtktN+t>77+t&Nw z-=Dqy_S<6@*Tg61UU~V)=LXL=-}s$9d-e!;`E{CriWrGR=9kVzGV{c+{+@g8*}47d z$M+8$KmKf~sq3!0@0zeyrca(RDoRsRqphR+#L1;c9(mZPC~@az<%s4F`5PM=KmXBh zb*A&TJayP%Z75}#=q}f|TzmHH85|fqcy{{mVXqZT?TwA1-I(8w6q9oH*GK&!QSrEa zKHoLhf8>cLtzym)otcj>Et&=4=9_PtnwqjQYd9QoGG%?M`5WK(1`F8#?vsTE$@a6K z{rywF^j|`WO1X0K6Eq9}sKPO2gaP7+R5;c+N3c3eSJj>302~An161w)IO9NcL?{47h$GGs zAjCK$997Q_Yamw;Qvo!o6jc-Y9+%O_0GJk!N`Mn^}zo5C|Av;Y0`mOtI_ z_y6(Lxy*lrKU@~-GXE9W@$P@&4q>HL|G_7A7 zwZs$gvMudOIdz_oZ6+(4FC26aoZ7c%r!i6rAN$dvkA6hG=ce0!{lH`U_nX@ux_fA7 zDF2jTbe3FQetXzHlsagrw`Td{N8ORPoCUV*qu+5_8$7djqkc~N(o4lLn+3cfhH!0#1~$j z|4cC$td%}AW@@SNeycl?OzwIi@WHQK`es?YFkDzlnPd6px}h}x)j&&2t9nyyYN`i4uN|JXaSXWmqp z=HI>Y^xyc8$v%uRmhvKJjHw(WL`B97TvHH72nixWh!P6Gq-vZ~b&6xa8AlXQ)h7;t zsEK&BxCXi?Fbp~ZgqS9)RG^wjL90ek7z>1RuCfY8j0+Sn2SNcr!Gv9_2+FRRp?>_zH zOwUzoryM5B@DU$=_odc->(%*~<%8v7VpE0w?UgMp+r~D0A+Y?*pYH9yy3y5q>IU5W z=sAqTZq_e{;t^eqF3yZR@|)%jA88=xmj{jcNB|k_b*^>FRAk&5@K-KbaB9*~e7e8$ z+Cv?x&me1H&=LODuR6K;)mFHP7YtfgCPghHym4Dor)%h^Bl|r!+>`9tG%|b~gt_%L zYc`%KEHoGAlUs<1bAcF6Jf1lD;VIHfDRtz;?8ulq6~SQsnPlm$WOIE&rjo2X??g72 zkZZOtxy_wM$MC64*i#ADl(x+HA72;sE%e{fKlzfCtRO#O$I8E6rgG+pBWC@VJ(c(U#RE@1cPmT9KWiTT z=@7A~Ph}-l+ zNqX|!nk#;F^Y%}F``f>|Yuzm`k=&j7&->eZzw#k-q|I>r%eR~$#U*Q{tbH^GLm+I}aE*56=V6HB1 zzhUd6-J7YAmy+ z;NQ(iA&fD?RY3v98F0XaAOUHJGR8PUKq&xMb(M355at|KZF(61hiV8#I0A%dnPPxy zY(jN`VFV&UWc5*qu$rvG7-J%EU>G6597!A#z!D$?NYxp~z%WpR^lEyHp*NJ4&>Iyu z&UrOGTXktFvd8%(bxpQIq14hIKqf<;F_)hAfT+O2nCGkl!-tXW55u`BF7jo!Q}syEY!_c z-=FTx6+;>g3v9S}Z;R{27hjCmNWq(~9ewq6m&I0&wO{?g`PVPj8KfFLFIq3?1xZij zmlbbh{nqaD0~d78LOV7jF2>@KLVaD!E60wkY1XwBl*=%wzTIvX{7uf$(b2_2ymgIg z-P8jJ-Qd%)P7nNAA08gw^`UlD2sd^aPo6%#G;i9N{OzwQ?Mkg$*JKFf#^#y%VGdbqHYP0aKmAO8j&1S7#To^UP+Pk() z&COa{UrmkV41FJ;nw-3KO~=G@$&T%H%gYh9lqZ29umARk?!2r{{x);D7wR(qCv3U( zAB|>Vd_5&dARI)*I7bXqFwoi~m8Ejd9SRimrDz22%FDHN&Zka1T7x#PUAy)b{$$?{ zf9|)vBO@cWK8J{d>Eoj}e)fv9121pbB01{}FMQfEH#g_+h|V`V!lkzJCr;mdQ`dXn zGr8z)czv>PBJOy&Z}?Z|uN@m571S}-w=Vkk+>+I6BqiFjc;K5o-+kbL=i?u8^h=44 zJ!mu<($>XPuEnXj)pEJ9MM^<-(Z{V8t9`Kbs-K@P{&3IO_?Z07rjPe6-t?p*AZOQ! ztR|clezsTDsN(pj%}QiE`q17le(gK|^8QbKp`^U>vElhb${GFmYXNWD99R;GM4wCc z-?et&vC&j2)%*R<*j@khtMRWauPo2J_O5F`di+Z-FTe1csg0A}sD9zh>n}BU1DT2L zj$J2i`M{rj>VMq7^}5fc7neqyhdMetT!$N$Qf2i6#B4U(9u=l8ocPhP_MxGneSh4m zUrmy~NR_pU`Ich1tM0(}9&^`tjQu@Az@7*@OGSO;$EReo^UjZXnz~EZ^cqJODwCfH zPEJm){8Cvw8)!deiN|9ju-w!WuKDHY^B3;>_ZRpNz`rYx{H9l-EjNBWzr{cZW-B@Z z3_4(psYIE?sDcp!;Dj@d2tx=lCajwHa)bc^1H^z+CRAeujba9vv#RhUAfz!J3rI6F zCUeaKT&fPXAw&p5fJ6kCbCCg}I?{muFZSL$x~}Tn7XQ{-d+*babc%G2dhb?qk!`tS z4A{n$012rO2qc6+LUIEHZj#H*O~}0omlV<<2?-@P5Lz$>+qhs`E^@ckC951sN2m9? z&)#dT_s21Lzwwg$dwB_I?|BaWBMoVdwe>A&e0#3>&B=uXQ4uh}oC9XS8D|RPGS|xz zR=GZ+2bD6F5@r+7177#&z^A_ZU0_Th~o>$Nl zLMUe!i73W#h6oG@1H?IH3;>YIFw3`b2#EsAst98aoU05oz&V<4>lkoEFv5iLi=Lhc zaf&&SgcOspMnDFM0ar+_w8Z)LgMYv|>^+{yi^t-LWHJ^>W1DvJ+#qh+!M$L|E$O3QIwMD$q^*$3MRR}w4lIRxMXFy%QLZV{rZWi$wV?K3Zltm3WkF1 z?d^@_$%v3{SljjT;WP1Y!cl7xL~+rgMuTN~?7{^}k~}~p&Lzni2n1~QRcA+Lr#y51 zU{Ei0^!4?rUG=4FDo2=D4DDXlO{(YqxFN<}UN^n2AH-?sB_-@xFaT{KV2MjJAvhoU#HO#U*NI2Lep z>l!q@S0vsD9NK%#vSx*Uus|$;rB^IJK0hp!2P-SeckSNQ)M_m*))&}>WlKAg$>jC# zTYb*La_4a2`t^f@`;FRtMVDV+R@N4q^tj#gyLa#AoIiNa`@XqvPjlH?oi<&7B%u@@ zxc~l%3rTH{79lX3%{Se2(}H**mTVk7Fdp#w%gRa`%8bcIQ#PADwRd)Ka4>PcXx8QO zy**XtnX0aSVBQrsn@t-w->|y#%9mbx>C_;KMx#g#+q<7;`$Ewq>h2ySiR}dUsWG_h@`ZlBCH=SNozxzy9@e ze!nl5YoUsW5U#GM9`f#=aF0EB-+f1p^j9qJN$B7wC-+rV*0nELqAF;3__#Yfmzz@# zo}SenyFyh}rdDO;?6G4*Mx&vww+?`2v)RA((fM!j2N@}aTy#_tIT*7@r5qE;5Uwx| z1Tl;_C!BF07-JArl>^VaUgCG1xEU}E3=mVP$S@EF2!J4j0Evun#tFqh0cE)^O)m03 z7y$w-Vn7@)=70%99GI%XfDpq702pvAU;#MdTvP?1l*pVj0EPe~U>Fc5j1vTmA*Lb@ zAQ4Gqh;yJS=ah2=VT1)DaEvhHhy&r6G6s}k1OyXIG31>$0e~~4P{aUp1dM~Ka?XJ= z&N#sYV_-nV9OVKB4vbPl0aJ<)qWO74#1Sx6%s<{maT()4ILDNstSVrnYB}P6c>eu8 zzQ_ySLYC~P8QL=>U0>v^*!qh%e72UGr%FAQu>^9AWQAbC%97bLnL?|KO3WcG(N_=H zM@wvGyWmXDxea(MnsT!mLzqG@5-mL!dOLf&v$7+b9;FI?~^l+snAEs`7?K58-7l~-2AVnLPrZ~6F@LvM}V z^68HGLGG}Y_Q^+T*syJJZzhvj*|Yiev=p7?cC4))ZPMJR8()a5*t%|e=Iz0BQJHz0 z@35{W%z39GJQHf}syya8#jY!`HrT@~H)H2Yaoc03ret%axlAa%PQ7-kE1BFAeY;`4 z1e~dIF`hm6{j_OOnRLeGtf-isjW)0GH^<84wBxtO#$zwWJ6ALK)O%aNOZbPupBC)% zza`cFv6lVQR{o1F%Na!~#wvO@d_onJ!W?9P%9XR;V@}$-xbRCPRBlNW99>>YV7aw>$Sx~Uz z$)EUoA6T{f-V>{%wyxr(Tkkw+P;N%+))8NHPKENe4NM$+>8m%N7(el$NBg^) zIv;uTDk0<;|Jdc#b)BP+c`7D~Q};TQ(riebd1BYEb4`zO&Z-pmY42m6&3Aq2k!$D4 zcefwcf&lbD3%>>dNNGk1V)_0HQI#nK37i9Fh^iE+ z6ajF7Fbq8ZJ^6>oU-)rQlQK~;00zK_gN_l-xsuDOzy!(wK#_Ea145i~P!!52<(y-T z5yp&jks(I8qA~zL0>KDz#(;572nCQi#ahHA3;}=V}fX-kqHU%QV<|W->x7RSC><$+;N(G;~ z`~CaO&zwigd&))bKJUoo<`$OgY$vyEG&SqB$CH-pRz*%{26y_dc<2Km9*=wG=*b1c z^xht9+|kfkdhpfB6VJD-ek#(j#`2p)nHcgnEO!i_n>Ci#JKTvLOX8?<#?$2WkJJiN z{$jInw%*oItx3Onq^P~VqCOgQx21Fk?P7Z@g-)Ie)D;iJXUeUGP4ly3jgGdaI&tN) zju*}gZ(La25$pfpoj2@y?HM9;rDJbO?OL_n{E}Dqu`lRP?K{{_Tb?-;EePh?3d^@_ zn)$CC`j|GZDJ<5Rq|bHKe=FKwTU|1-S6k%JYO~WN73CB03<(-S68)kv#A1+x9FAZESs?Fi=43{!J!SK2yL&P^-gXI*UvQj^~scO z#_Ji-XM#1AXLmXKaTZnMl$R-2=3LF*6c3|Pq%@_aBsO=)pDaG3PR8lA94Ae zE$%RYO-U6Q5@~m}9(U@+3#gT=h$Tbqhq$KDOzb5{lNOV?u0x5_-NZ~mjM#HKCL-m9 z+zba%u`B)23zJ2)wf6CptbnhghRj&%VO*9=5bw05{Ala&^(2qj3Ab&pZ) zWrhApGtLQVNiMRu^vTl;$?8anOyheicDviu!jTH0WAa>yA!-t<4i;lv(2h;fAS0mDee zgb;xU0;73*1C=9&aK4WafH8`Ia>f`a8HHgE2tdf^%OL=uOwEha0XRm8V#Emo1Q;{M zDaRZbA`COmF1n0kd@;Qap*-D__l6=EBLM_J0GI;?KqQ}shXEn35fMU229lpB1kO1y z48SjHvk2#$Fhmdopa>9=M2rwsDdqqO80RS8TnYjJAOHfKU_by!6u5x%oUsc0qF4$6 zLP!t<#3&$~=iUS&kpIS={5{q}{pyFW{PK-ywNRI;5GCjF+`zoUQ6=lj$Vn7mTDstw zx%FfC@ZjOY`WjsP z6O-ZEG|jo<6UjkQ@2K@G1RLY#U>SD0Uuth}|9&#MMKKO4BbzsG?qKF$9~Un#PS@4d z{rk~f>0`>L@87g19-0^p$x~wQ&1DB)Jv%!)*Su;mdtrOmRkzR1&X$$`Y|p_>_3e$m zgtus^tex!4_!lBkfAONq+}4aGER~yWv!eq=sKskNR$pI#_Eld|OTEu0Boi5ju3(XY zpOs^d;xf;^kw^xXFI_b68IEX=UAL;}nKuuuz3LPGV5zz4mHE>f0&dTjZ!g_bUHapn-tos*=Mw%H{G&|d{bSKZ|L?F+Yqot`Gz$J^MkxRwm8nc2 zya2EfHC2SWOY082b1-Ob=Blq#t(~3>8f_KI?$Zkk3&l5fdk2OKuV0!@r(>^stqsL= zy}qL0bg!dq-*f(C!?|NwmQ(ltAp6y? zRvehmQAW$(7@Sh$1z)%-osDkQ=pXvX$g&5@wHoamTWQhS@%z82ujp#=r6(JX8wcI) z=H}*!J-aTfaqRz>iL$ct+it5e9Fn27jzu_Z8RhjcREHAv8`t z5mmoXwXonW-))4YH6_=(`;Yf;p1$!&2Wy<+Ua!}-UQ@kg(3fluEtoGna^Bcks=1kH zwOZld)3zYZ$ZAP>*=0ZM|K?bOW~iDB&bRx0k$f3|F(@u7+OaLuwo4Q26d2=1KV-*4 z4c{z!aq|Oza`pfESb4)^u@1yI)6J_ShZrCviMc|q>Uc^^5g?)pn1RSS!jKjLb08`Q z%CW*2Wg3l+aZWif#6^_=fQl7{X+GypqJ(fnfiVGLgJhINl~c;83X~y^5e5LHAYg!1 z1OP}6{GxLvLzpU5Mxwwuh>AdSRI{LBOcV{t<->pxV4xfr*J7k0+$g0nlX9FhpbUV6 zl%)tEBp?+7p%gPtRFP5!lwkk?W2&h6yK+E?bFPs!gaKk4IHw3W0uCTxLOD}8qZsrm zaE$;6sHzGI&nGUaI)MwAGR7z+i1K`JF#rdIG3K0$MBp44r5UYUQsR8)3%CA$72e~E zytm)p9g7twUa3%u001BWNkl|>-yWa z9yobk%b3|*;2m3FMMVaKVZ(+EyT*r#eQKqUN+ja{_4=FB(7X5i@QW|J$k%oj6&L^L zM?aEQFOS7&ZAtrylP4P+TkC4;;u7t-eB+U^v1Bs2b!(R|vM3@aN=izRI3JBh9fgHf zt1(LDP$-m`ReZUb#m#kdX9786PL}1uLQ5nZ5lD$F%Ur|v?%hKhtA>Zio54|JFFJJS z(4vk?jl-EpM7M0&tVo)Lg@w-bHQ`a6)oR(fbJyGZkiN;-)z#?>1_?2VqL@yl7cE*W z*>Pe>2#A6ObL6%$2ba3O$IO(v67sY(b5g~DdhR#;e=N~Tm*)o3)FbCbzL2r-#VxlGpUogt(e z6P!w=5CW}6n@puJ;gqTxjl?L6Mx&XGkW3|ui>ow}#)OTMq)DgK7K?>otk>xyk%-M^ z*Xwi`D~h5d;&IM3k|f0waZwayIqUQJ01zfv62%+<#yA>{rILlQc-$^k={aVM6M^I? z1pu4PcHr2^Vr0l>vXMy6VUa{p%%}<>f<}^vUSDV_q?E>DQA{k!WYq8XR~njPv6#-N z^Z9(LN;8=Z%4uX-Zm<;R+-^p}=l7-3DMe9~WU}07WL}TKpbsj6Y&J_(YBre|<3^*g zz*LYnOInRquhYll@ib(OCgbevY<2bK$+KbE7mdf`MfOrz4vL}<0Eq%s1%a`Dq_79h z`I`5Aiy@IRpc+vCCC3pK0p|oH@q!#sO2sT?g2+`wDHRPIb3`b@3z7e!oF~s@reBU`k9Am(M5M%J)9FO;yg<978N!Zq`H;5c1DqH#Jxr``? z?x?4YI8%C)qA2qGboSg-v7yvxG+y!9wo_-_^P(vfkd>g+*p-@yMmW7Gz!k3_8GspHkfP3KjYwW6T5rDX0PDhdy0v)Lm}1(m^9%3~WM;Y8QX zS64L!4))8DAj_qZb4kJQ0e3MmAcW*PjcH=mkL1EiXVaDbsMqOv9UKlvLLSZzOg!-Y z)xSBqcT~C&r*ervq)=Zw^y77hQTU%T2or>0<7>cgFJg4Lqne-!rUaO($^of(pC!*%6Z3AfD zy2v0^#u?-Du}DpIgZ#^ZP`6`Qw;;>%e7HOvRPXiQMulRcp5;{F2tYy_uF2r6 zDpKLGeqCqrfa~n?JFZG;fzNt$I$eMi*`1kOMS=WO=W;ZHc9cGWbgsfPJi1;-bwWQ@cQ-ZU%hh%HJ9mUi!+%_aG6^A{3Ncm0%oE_s_crTH$6N& zl-RjaA2S+@XC`OHYVWx=>Wl{4&J$<( zlG0|MO{>)gd)A1fh7y6W=T`&B})&HyEcc*vR)~`pG z>kz_b4~PoOi6YWUSy9gFDJ@`}b4(#CAWjefBZvV9tTIG7QWye+5k?5}yp=b_oHGh} zF%su_&x1VSBM^l#U04th#()S1V4N_-Rj$e^FvJ;f1_TiVK!uC8vOpQtAcX+d2!hH` zj3EGmFr$ofh6Mr=q8M_7A`w%>IptU+h#)Q^hM0nAUhxi`bCm<lu|}n9z-BHqef6T zQ&a#%H39`L2pAC05~@m=3n_=garnMJaOd+LPvkBC$_Gw-{Vl!K;_al_-BQyknbC|r zx6Gj>n(Cm@W`FbbVAE$z2D7iSGV1!}4~i?lH0o82qAnmWPe12K4Qz;C2<{ibzWr@**Wq5e7leX%HwiaAgYwr|@?Tsft zMOv;|6HH8(w5r-MNtrrrx^|nnpkTc3F~vQ1^#>k&{JCdY*WC`6#JeLNNT*zd#s1c> zJ*jVA^l@qS$;^pW`qsh(_TZ{fer}6EgxR@V z{f;BEznaZ9brqE~X?%s@qp@|%lHVB(2(!g)#TD!7heD>x%#`!>*UqiG`s&2$Ba7CT zXHSf5tJgkruvC~x^0}PH9rBycJGM3bX=S>E{|oRJx?%jsTJ|ru@}ImcGKsPdl7fOY z8iatVInFp2s0z96`rddmYhUe5*a-JdyuZ?Z`pg-Ny-Mk<9G)Mlty^M1;gs1JO~;M# zvCcMKaQXDoPpL1z{OU>N_Libyv!OOfMwYC({D{DE$D_KaSbQ@2lys9R8Pw`jMMJk3 zpUoQH;ha~kTRs;tZ1`xU&lh=iyH&=m*fqe)f8I_f|+drMH}BW90sPuKBR9;JEtrMC`LMULo{?8 zeYG%|Og2qlJ$1uC{M%Zat=GTyo$2_{J@)nmaWpvcqOh>8kXPC;ch3o0qt@}MbIX=3 z`;TW7Ssz)lRdD7)#m5`7Ag4P5s8Fr06Us&TH&w&UrCo|Em05Trt|7i$ZrjNgwJkc{ zw>lL_lvwAyM4RqJ>o31{(Gowd>~C$_2AuEvf%cJ|Qx)%Acj3Z?4&lx2@BLfs)QLa8 z`hVh?`Qtja|7BPCPcTLR9AS(x1_S^cA>fQ*j5U(PMZ_@xFa#6<5CVvDL^xH|JiUT2 z<^Y@{01Su&0z`lV7eoONZ~+6xsw}Ij$^l_Oj00kvQ3MPy!ieB}PK-zd4tWX+FhVL* zD8uuO?DKyT!|| z87?dEtQF7?_OElFh^5o%4}P-vok?sdA1f;>@BiA*c<<-WMZJtcW2(GqslPycrBy1h zPWT@lUEO8HO42Qfi9^#X+8bI)MY+WgMz@3glreQe*`+jMbU=$H=RT}P!=&Xj^23o`NAD%RDE%_oauNM$CPdp*^B zjXn~Ibg(M|0S_q;b}cQfHPDy(Ci~xz2-cK#wot)!|Hefxzw(@7lTPmlHC^_x!TG7W z-V}z#*}fB(ZR%3&qpuF83g?`4TC4ZS&g%QhYb{soc;V$6wrO|nsdb$_vFXaS$4p@Y zj%u~wr7wQukFU-p{4w}PVWBSJ?-R>!eOxhszb;3xgc%YQ&M8uKvS37o-&mk)^=hMb z4to=mXCLTTcJ$aWxu{x-v@ph)qd+oc#|B~zwY5FbOhuw-juw)2z`{V5Ix$?xVSDt*!$L`ZLL8 zCnf)NL`W3GVzKFEiAs&GtMyP}sbhH6NOQ@1&s4u}2A?@X4<=6f5c=FBHYUK}iM9DSle z^*7oK%~E%0H1p(|wQFOC%YEKLeEyuGs0pz!v#~E_sft7*U)ZQjC3|;yVzEfJ$WpQ} zV=xwkkU}quE)Crkp$976L*q6xiIf)hbURM%d#xotGgR*@ksdmF`0$F-W2b9s?Ytu7V)@~l@A;dYnLo`JYWaOI*bq?Bq|I5QGzJ~ zRpMM=R8$ec0unI@G{Z2GF#}A2C>&9cvogU%G-#1XazH6#h!LbvrZ5Cp0*n|_b(m@} z6o^z+rCF6Aj5r_wOfch|YcVIFh!g}&5oJ_G1R;Ep)w-C6%s4Or$`J=$PMepKtCV6u z1R@506+Z zR=3+-TiZ1b<2Jk9hO9}w>i7F;eDX!lcBUh%K$^qF&0r?+li;i8o@Gc!eYdqYuW&C>UuKWg;` z0>wqe*L>)@Zyi4DcF%wH?vMQVjTwujuv(|za$QHV)-`#et5`^9UY{N(L#o%=;bnKgA(wY9ZH zCvZHLri_)AmOlOT)9oGghWcVdOz|T~rBVe21t*S=F0t88o;(pVoGog)YWnE>>9c!p z?5>MOB5$AEJuo4_=R9AFqBJUC|;qMFYPMjkE;J`&epq%Fz z!$iUlw>h4gi}!~Lw$&)++Fhq;QR$tu`E2@a*a}f zZa9gQYFnkb&RVQbs&hw<3|pb1wXMx+6-hRC`^Rs8bJttmd3$j&l;_ILX7hsQv{SEJ zzI=JPsRW_H#>Pe-ZWdfKom~*zUwGokK8>U)URU?}_U)C`MOSR=Z7i~U`bS63oSv)y zT=UgeU;X4Gy9)~~t5>Za8GUp0>eZrse{?dMNUDKAK&R6IfNOS^2LtV!S531_M@PqX zO}P?F*^c}6X)^^G(Ks3C`|{6kee@fTF4laE^TKE}Cd+bNPp4cxU%&nuzu*79E%+y| zrJGDnjbx66(>h`=t*i_#cn1#;X~NpE6W#`UDOJ2+?4QFh!DrFhfF)AcP1YByfTm<6L4YQaDoqAtVwaXB19V zMiJ8^1}euYV3KcOfBtgiZ)Rp`lvKJHw#W-$y&pn*F$9`rC3Sjj)bjC>pkA-k6&>PmsGs{Xq~F6 z-iCxm)^vZ+H#9M3*Wdivb^9NBsnu9w&uCAuP$wxghLZ_NukZBd=0lRADE8m>pI)@k zV6R%bWYv?u-P5*WWvX723C*|Jjn_2(?0{>NA?+AAbM9kzbO>h6FMeamWR;nBPH8I* z_k67Bz=3y?p{q67Q`bIfJ~TNr5&1~Xi!Ri!jslPv9OKEH2oZ*@!VG)&e;v_iA0QX3r_E=Q%Hmc!CEJc1vcF*O$Iy{d_Csk*vZ}Q_S(j3qmm4>_tl>#ggk!8X}wYF_r!`VRr9;& z)C_dmOOBs7+O}>fwwp{$^WsB)AhqWb{v7z|7b)dT_o!x~2*nyxQz2+uC$78cs4)yc`pqEBoIMg~ly8OAhaS8bX~d+79@ zO4AMTSWGAhR`_(UoIbjA+0yy>0YypHOjd_Np}HFu2g)x*1G;3ClxIuBCKPLmV2l?( zpVd7q4u9T-2}y0Sm8<3HvWypb3mTwVd-lWHwx9gyS=VLXV$NvWq_pdmw{P0~{@}#u z2-|0rR2ciz!$%{S--~IbD#d;fdefm`<4~5(O&Y# zPp#+9pRc;BzR>SJu2uyC0eSn8;%I5%y_O|Q7Qc4WZ|?qTrcb^yVJRGY%W04vXbD7Yu4)eyM-us4EtXT2< zieFVVesp?qaY|2C-8ymZOoi~kgwyGaew5A6+p|x+uykcr=a-yIg?^U4e&2D|0AsQm zJ*}y?TCG#x`jw$!NqvbvcxLnyJy*OP9rw=~2*$nlmG9nl-t6d6RrLl>)^Wq3wo1pQ z&L96x&di^fk$3ZVV@)y^iJZkK$3Q1=iDMm=^-KX^3^WQ+MTCjUL8eAU6hwkB01&{O zU$h6{oO1w$AkH~bFa}H{0L(-!;+)eYS2K)8aY0H)vy4+fD&`aj!2rNT2ADCTG5|)f zqylFI01=K!PURw31)enk5ltX$mR@EHDHLFinoDh$Dn~PSrBRL_wug zl&Qc)MO7K&h+u*V=YSX&fe?&oR#8-i+Vmh0mF56AsEQy7Oh*_7m8qD6E~PrG_Pzgf z+wWHazZVDKe?s#KLu&n8QFo;A8T06K9pNiZ=x_6eOEjyloLiGRS#hpUN{iK|x6R8W zeOqbj)eFa7rueK!9KAp;*O)dh_CdtaS^Au5-AJ~2WUVx}#U`IWE49VX-jb^O%|=(m zJK~wza8qSEnNc2=#{=0Mj@HyPu9~t?wC|N(ZFg<$s!9LJkKcQJxarsa`ub;2 z*j$mdO|vfZl2cplHTDw;*XXQNT>e}4qF3FE$lydecfpM$#aS6xj7+2@XOcumA$zC4 z)flO?tyT?fWt}l|VnI_7EO00@yJl)$AFeDu-?3~)I-3|s?dv?8E_r>hxFSB|I=wKR z-M8bkNu!G#&8@JOnv$WJ(sbd?{uc`ueg3GR9v=P7!kHGgtJD=~D5@<);O|N}KUx=g zYkp15DU!>)x8=KpzbLwH_|ES>{?>cBgufI76=`!AQ>;;lpn}2~FacA6EA(>z(z2vj zTV9)tCYDxCT)hwWqhu2wy8krRTM4+tH^RBUh7shhNj?xPA4FN!P6F$nmXB ziPc}zv{a{OcOK9k)?6sE)LbunocqPA?cVmbx?@@MTZ5_Vk$vn`>y)7;X!UF}`##ZU zj?CI*zjogN%~?rCihIaoxjWf^Z_zi7=NiX17Kig6U{1LmDh_>pS2 z!|;9EzWl!O&)!|M{MK5Ny>$KIWjBm$mdb;2Q{>ia`Ic(A|8((BzP|K7zU%$Y$1T;- zU;L}H=Bpn%@XOk>)lYn{rt`CghYaSX+VN;6%4eT@p?a=R5?94O{3OS}i##Z5om-eqYNJG7qea%K@lAUUw50F)=Z)Vu zR8}(}reFO*(eWkwo|~!EAB0fyb+Ol2F6+gaaOTGk9=rQ$#{zkZ`yGGH&Zv-dL8nxA`HHBEZh7Pw4a^rROZj$=>M6o>A^4i{?=#b zBJ!e~Kju3xI^ZLONj@+SV}vork|au^NCe@c?~^ECLJ&szQ8Rv#D$V~i#t7%7+ZQwU z002OxDy0+<5JEIs4P2BE<(-(YAP9meh@v2fq9}->C<;Unh#(Lm5P=9pk|co;LP&l? zL=Z5>09+6Rj1h3ie=*-XB0w0S|8F138Rzf1f#(J47~?$S!{7D#XN*xw8KsmlS(X(= zf&ckva8WD|95^S0;CJ^-2*D(8m36WC$uG_?`cwe`;CCfl7@_>Hi>j2W>K`t_d;5~T z`YS*242?M*MRT=jRWq{IwZ?2V4LgE7i`uily8im>|7B+?k(=36DxN=mcID#t?cEb; zzM(o89d=qmzN3Z5N6H!+>ecMdz)TlyDv~ttwRMO47trzkr<R9>3`i*)O<_VKCyWQQ{PZyED7Wp%Zo6Nip4Ut{xS zi#2;;c6#OuU+Kw`GXvVnV8}Rdnw#wdQ-`xAqp{^fm&ZfT3c9Yz3v&hBKz|wvY-ven zk`I5Ve`dPR)3jVrI!^sO-Lj!ASn1`QYlvMJoeqbDWHPnZVhH%W-pP88$8+ry6_bOq z>y+7{GmA<0x(%ymC&XYNxX`?l?CJ1i#u$h|nW$(O*3(t5yDy})J{^N)#uaVhN@h139Ag2+U+T8m zY|}^252gwoMX0H)%Cj#bXe_mHZQtp)Hr}*$9!-$ZO5I0l-EOz<%=k^Er7-zd*HU_2`aQ&}^v0SLJbxVZ(51XHD;ws~`Qw$*#w58yp%c`LA$c z1S)>J#98gX$LO6nY`puLDRV(V$&HJn)~I7t6As5tPmQ~>r9j_u-i%d-7l z`bgH2eI?Ga+MX35uVi{n(fQ99)2(hT)-*eXmXX#~?hh(<74+-{iukPqlBqV|hhuYs;+UHMz&1?EA_$o_Yf6 zHVD>CV%DsD}Nyf2PROBOf&{9k!lxtLQT6j{IYU8`S94@GBfzu5i2 zj*ZTWvj2Q+d*7S3=G$ukA#_HQl`~Frc=Yu7CdYCdK{NsF%^kA?7x29M)K^-n+iakb z%YN$FLr}=)>lIEc|K7E?HANTcfAqIHHvijD;onEk2Of(o;TU5`BBpYxA)J7qV>tsy z1Q=sNnk$$SfxX)jm}?Lh2$uvkO;rXQ zVG#3*aX`Qbzj3-)wXP7guCb6i(6Jc{q)oA z-Q}gunzPxFWlc>hR}h?|`qbj$?&>2i?MtWA4?q0ylfSy}hp!yc>syVc*0pQb)@o_r z;lm3H3(a-qW?N%4lJ<_wm1uO0O^w-XdebJe$=I_is~bGUbvnP_=d0gPd%XX+#i-xd z+bd@a4<0;l^_ETBfBS0EXRNBK%A`{@HFXDGK2_Gfc>RVAM~)mk@b-LNZC%ez?MGgS zxZQ%zfZpocziP2&@4mfJpJg_MW4h_P-ha>Yzk0s+s;efB?~DXhN@*+>t1B!@rzgh8 z$Mvn&^M}q6ZN1543IqZqCtjGCcyG&h372pQeYp}}?O$RKAjNV#rV4)t<6qWS_r8t^!&(Bv>RE&;}48%_8CIf!f zw`j$Rne*q*`p%{*_*-wCuxM`0WHVNisi~={u~R(J7q6+UEw5~j>`E=!y0xx-V)7gLHRh6bw;|1A*kXCUoEZlzYCa}C6kH>Gj z?e@Kfrt`aEI-PFq+O>~A{%9;2lzTr@R#sL}6dM^Cjzwe9XjBjci^b~m`AjC$f-Ye8 zEIjt@Z=ZLKYiXIR3A)ZG!I7!gzxTbpG5gHZyA(y~SlT|53P1eM|7o>C+al|{H}}Td ztF?{g*6tcY$P-UIp)WKSn3k+qv7%*V?aQycQjjdt>LiELdF5v}TsRpFjQSURzNyK{ zpx0Xkk}S)~WHJ+rCdNivZ@j^>di5$^a%f_rFF3Yn)8Fja{0XmWoKr;tj6sk9Fy#Vg z7*WEhz>$>WoHJk)aS$*^NXq+ybHo{^YTmRAqkNzWLRdu{fa1J-8(}JlS%N7dDn^VW zP`FM*6oxsHn1C5zm0_Nzv@qkqIS48tjOQ>=0+axj70NgeqF^l-38y)NFy{z3Vw__^ zpg=jJ1QSe<%yL;G3Mg;3&JZU8Cx~;zFh&R{0?sm`$~osE2ShPZIg;{4qL;Wqf7mrIzzW%Vy7Y#4DHY1vasdoaR zC@#CPYh?v}^p`KLTDj@qw|2usk>M-Vlv10=F?Uv0R+cD=QuWMqth%r=jVaH@jK7gB zSAkLzsyWrOtY>~qCzhpsN7D7RjUj(fuk((%Ere+3z+8daU-G#RPEGp6SzYUvlQm(< zs4w~13tkPX&2|?S*fT%ev=b?T@B2P9J~p;lpZobE+KS$aj4>nYlTKr`xqZxPx1T-6 zR;;q9u5&#-JzxIu1)IG@D+*3~VIe*|@W%3A-a(#u=9v-ULtN62;Q5l;Qk_|oI68F0 zhZAPA`OW^wasm-6<($9snA5S;Avql3XjH|nv~_w(=Mo9TzHbj$nySscm4QG2O%co| zItPk=diX#75B}yZ;g7&27U~lI9IUwE+j=coXi`-q0Dzz<9Ese76(MK3C>uTC$J@{YmJ?b^KYvNOkyU%6_d zzb7Do@s;OyUX^a~R43D0YV`&wbN3VcBiA{s+Nf*RbL{lL{9Eyc5A}LIo^SQa7p^K# z?D<9WB5U!-dr!UUdh)sVzxCFR)rI%8^bCFMTd&-C=biuc?I)&U^>Megs=%Inv}Mn+9Q7BiEt03!xYp<$4u}3{;Z#G_L*GAGQBiTr7p;aTcKROZ@cF!D z2UCld`EU8?2X#8#D}ASYr^mna?3aK1i=XZOO`UBrf?9N4*VfRYlaq(a;)iAp2E$~) z-ReTlyE4kXmikwKcMw*Xz|6+YGDf=RRs9giN1}_I~By_y^iEnaqs;4eL~mb#>$5i%WZ)|2OeL5SoI;o)tZ>FSW7L!sd5$6m0z<$!r5q425i^V!0*XPEh(tNX zjAH=IwK;|qiV2pDQVxS5s$wjtDis7NM->n-G60u2)u~+KSdRp41^_r`oB>9d1LhoY zhB+WgficEJ6=N)7a`8To(fkb^Ay`$_Y{Y;lfI?-01d1_7Oa%s@%OOe`C725g1Hv33 zl>_5Qy?AwbQBV(vD2xLK0kB9^recB_

    bwm@%&RXB}4iu5Wze_p9(8vrv8W{m!LJ ztE#Kc|MRo9y57m8GMP;*)~u|N=Zj49JsZ|MwL2S)jBAo&o3j;7d(GOJwN*Mo$Q!=w ztS1)Lp!5Ft(`vx3pD5j|`p=%#YwB^ry=`kVQtkaa!qB42v27`(r|n}Y$7rZJaqiVu z4sIh z%rJfCDSPk#2jugf^(O!KJwC@d&RNm#18Zg$AD-df59@yRzV7RCWkn4y?F*mn*INrL z-=CTByAML$9z}XZy$X9wzv1rs?n`|f3Jes=EU@Xb|gJbH3s zpd%1X=)aX8JQ3b7eJ|#??WgDJj@r*PLWjdR;rC}0%~5-Nc}dp(${{Rea|5O?wtQFk zyYhwGX$~`9G!b&VbaB(BkN=W?z0c>n&C>XC@vAVTiDB66eb;^|bB}Go+~L8HeRrr(F?qZtD$N4(7x=11{r04U z=*-Z@ZCzb|K3bleo11xh@YsFx3SC&Cv2 zXEn{}xjIaS%!aftd+mHarnEZ{T(;_FpZO?#Hf9Gf!VED40fdlTHmgKaR!|sml)@>w z(Py4Xi~>s8WhE9u7z2bEWQ-C5Ii6z_Ldr0K05Sj>gb?r?!-#Q^ARuEXXpCUWAOM(2 z7(%Q;2sI8vMi>K>P@)lz1C&y#5{_{QBAF1TQ9aguCOW!&jR<1KAO(;CNGXH=uEUxV zQW45=JYy6?$QUDpP)Y#>97LGI7($>igdhV`vUf-!KnOzs5QAtc>5otd5xQ&?I^|FW z8Ke+HgfImZAxw$JraY!7#TX&{w}sJP@X>XQ5-drMbiMMcF>yZhSd zfmeGg_WiM^NIzP)aOSJec$O|*dc*Cz+sC%|#=n2$NRKEuu9^em{zOhnCOQ~DDJ-UcGE_=^ua8=nrT!%jb@t zC$qmY=czxoeDc!cqoeL?jT1Q~`oNg;;)&t*aD!^Gce|T!xOYQHO>iBVn;$!e?a75J zXB_B1ghthzbt}#fbk1|v&#Nr{^=}SyoaeS1tHwCh2Q9n~E!AB9`uyk59v7iJz4cOl zc^k>9SzdqN(~o_aR3)F^;SEg2#+CCQlD%hYL&6eZC2^$?O^PM59qKJ`o%ou-R;kv6PgQlZ_Xzs-8X&4$rSE|N83r z(d2<;^WzS$_q^5khp!^%g{+~@;~XdwOzPm-y*K^os(s#kEgmvL5>e65Pplp*OQD6i zGk$qKDuTrQSv^@v3B)IUW!d_T*UTpS$lyTkTYC}^Y#SZQF)sFnt+{=lKKjI?oeOqy zTUXdV=t{(g)%E3{7H_bf%(lwu$A4L*-UZ)2Z8b%!z%n&h83(Rqnno0Jg>h(1yI|tU zE6+Um;A@AT*uS8(-ZB5ko5!;M2+~P(_?Adot}0LXLoqkoRy)c1)XqWIwbLR$+EtjE zJ-V@IoS*Ra7^0j31T*VOl`?F+E-PS* z+qZ$xnVxue@|;)X#+7@l=G63MGqUeJ+w`72GqZsEhHrVYxWB5A16;_d4C;yQmMnX5 zQS|*rd!VAD?C^}MS9s#Oy{?@ve0TM!(-*{<#uyA=wQOuWS?F3kk!Y^-r~Yld@INku z$tqF_G$@onO^Vw!O;cbT>f}TKLxAHsEO{taugMczdnWj>} zI1;CbVvcZ3qX1C?3B?2;00rQ~L;@U1C>U@MA;u5@05X*UKsW#Zz#5Koj9^L;V~|Aw zLn7cI<|)IPN+mrfGo1>eP_r-*L7+rP2r^0G03fmm)ewegN=Q#21dM4@hMM5tf9kMG zslqe}Af${Ch2t7h8iWWT2na$HC?aDR5J(WlOr@AYzyJ?9N-1N6 zVM+nz1e{Q0)u7`wLXd_yObFm*#xsE^6lhSVGDVij|HZrO3vQvjI3`tCz-AF^=kq_ zs)v^5=4R(Kg%bMQbAR@Fy)k}eD5yn7$1>7g0%x2Qgu=qY<}-T(N}Nz@S!pTdjYC62 zOO`E-bWF5+$J5i(&z(4zK$Efz=FeZSZQIYnj8jV2g@VCT13})RpEYY%U@}l(lBUgC zb>wKrtFOJ5pI>9ij&=^H*Is+=v#-BeR9sY&X686<{{?MmXlP^IhOJw+teC!d;Nyec zJ>5LVPoFle+nsx&vrVtpz52?l(dc#ARoOnD-{mqS@tL;sjZJIworV4hANp~*PsP@!&IamZwf1X1d@GQfe)n7b ztv?Khe9$K=ia8bwA?o)}s;U~3V+K*P7z{X!CnhE+b&R^*C@dv3Srm1WB>6%KQ4pdU zvD=-!!JyvhQdPCGq$CuK390}9po|HEfDw+z;vSF2WF;1h$&oOlOp-*M#S#i#GzdbR z%E4fe;~X3Z6h$>l5;0KK9mZCR zbs!j$M3cp6j8G~F0>+5v`B*%T1Uwb?=df7ZW07DWI5{~3Z@l#HS%1oSauvN>uHVzIa= zieV7ud46arAHRLVdv#d4G8FvCX~_Nk{_Y+<-;1xjh2U#|o0x7PL>g%6hv(irYpp zypQU4?b_|mPB(;n{_H|85jbO#!cd5h|kruXlB8y6QBb+xzaQ`;kIp({UU zIM$&Xt6^EO@$vCgpjY9U8R<)p9y>Z`g|V-pM2-5j_-J`1H;m?q+>Wn&grgiMHDZUaW}pF*88*W*ff5sp?Xg0 z=FOYGnsM9soyz3U1p#0wvT^>54?lgYGN8n2Z!#V{ZmV!454JTqm^U6%H{3hDWuUvc zt2BOmKn}zVMx(SKJNanO#s_cz^BaHCyT)tgrW_hoM4WCp9s<+#?zS{J8Y{?l_`_}U zDi-hEyU#ptT8#APv-&L$-ueZW?+V}_TMKpNAnzZas;~Y5SvXIDtOo*%s}kTTB#J&J zn8ADy?`<8oBbWPhC^#{0eK}WF&SqLK8^I}4b}uMjgOBXKbM{|sn-4U#(n$u&nc)eCyiI;2Rb@Bs+O#| z*rFVIwbSSKS3i2gSocKuqob5X?*HkU$N%M6)_t?E5Y7>lp))n3+utm!;!Zga==ZL$ zkHtJW(Y%V%vn^_FBw{NJ&bVsXZw~I9-J}ld(*#j~a=%i#>`(W~`2__(44vydQzQ~d z#zWTDDD-*I#$wS3n$GPm@4U4B!AA}qI<#`}e6KtH^3$)Hn7PnY(~}yq%@|+4eEF^i zP9+6k%U$^fTVPh8>p%{-(Nq=*tM4?2=<$xbuiMytsx>cKJh(u*)YU7AVn7<^5*HWV zI*sGG;-LkR!{?6t-m3wkTbpXk(s>8A27^Ji@CVO)?fhF2XJ04R_fR8@<2#?c@u6Eb zec8+Bb1-xLW8qpJK!|0a3Q&tuj4?_FnqJ0`CmK}~8bgc+9AHRclmUh~KpDXtri_BA zKo$)!MkT5P0BDK|A;1hWjX(eZLI1u5APTXHAcc@Xs4+dkG_Ej(@{&Y3%ou|jfu9`* z$~=MqK*#`K5HbdU!U%&9i2}r!LyR$|F$O5qct|*)=_uqGwL;2N3N?bj)Davd8Uujm zIKaTvBnG7bFu*7Uj7^2uP)Z<#3;~EB5OREs31NU02IGVyOp7ULX=$H4di!5j z;cquq|78Q4H-ED2S2r(kInOM=C!(A5ynpY8)C>D&oy|G*`;5D|!*8zgB{Uk2_#b$B zZx1W>)C!f~D^fa`cjx&mK9(n>qMF0A?ul-^22@l)oBwQjtcM$Gj?Jnp^lBv|N-7;^ zN6w%D84XUX;k@|GVNa5O=52HLzpF)N0PW)B!rWD}ZeO|ndgg0gwDtF-fB3YTp`~gj zk2sc?ChdRmP|^tmCnpxp$zQrEb;FO7OZp|#wq9)?mp_A=)7O8}hEl(Y^K#OA(f+i$ z$oFnXeUXgXX>_>jy|UGna#>-)c+KFJz%@&kUSD}n)1X(Hv9PIM5wuTQ5|-qJ*WL73 zc6Ov9)Ht{(bH-px_paUHONaX$lQu)pgmxafu2L>?jGPG<^1H0xd2)ClI7h#EOY-t; z{lZ0ZO*Ke%XFQkWol?O`IblWm-ZQJ+T@-p%mpH!RGXB>UU8EO#p{dt2)QGG8gX{|7Qlr&a?%`PPu z_qbXlP9Nj>uuf!2L5<7KlwkqXd9%$u;|rNsp^N2Dj!!$79pniG;T_~QdPp=K@#58jE(o9NOU(c-g zWIQ#HIu@zFC@mRE$^==Hcw8l#Q7RwnSyO**TE8LNjN+|?zD&KAD1*HT4zze(}2%;d0qKG*TLVz(AL_rh!lXQ|KiJ~YN)qM37RdX&L|kB1kErlmf;mnc8BUItKg?Po7hDPykXwDPs`A zDZ8mD7-RpBY1RMbAnyyF$XmPV*#l>OyY#BnO_P&(rbQ}MCVUe~TU#>M>@r^S+8fV2 zbIscL+UqP^#^)K0#s{KpuP2s`>f6GRuyb3jH(N{Q5{O={|fdx9fZpzTa=J5_`w*`r2PIGBR#>t;7``da#(=vgJc};oVi+8oslC z-j6o_seaXtf>AkcI#juM?xu$iWtDY3f7LaQKKAIX&prO;OJ_%YsmBi=Kz98^-`b>a#CWj0=94$@# zv6>=nddj@&;j*+G6fm?7jmTT6Y1+1R>(}q!zyCr*nfaQb+?nDb21tN zx9W7ycl$0H5JHBkoQOLXuL#G}G*^Am#k~=(rK`xK`})fKXJ=vG(;f5X&$AVT-HD5{ z%Tjkd|9W8Oij?WwRy_Tqz+~u!uN|evvrj_v+yCZ606 z0C%C%CPqV*n^sIrjI-TsWnK2MX-$WoSor%VeqWMzpuJ%0K&P(vd^Eko?9Jz=PgvU9 z+qI|6`K0G?H;adCC4YzwBqgjP1-RGXy1!69je z*mgB=FlC;rh#VIPhK(M>xyccwGPcS8T9O~dBYgMyYmzexnE{O}m<&fTP{qYtY|Se(0_rf7#3Db5MQlZxS;E0058~6agX}!YCv~H6w)( z@;oQ$DZ>h85CR+uF;$Tb77_D^F>T5V08mIZz$l9=Jb+Lo>gAAKjq(&B3ON-*0+7n& z2yahd7?Bg>Azl=K*`Q)TF@y;67(>V+5`h3toxK6bAVO1S3M!x!FjEk)gho}O7khdo18RaLdi zQfj}N@@&`0td%Qox>28;(~14~%GujExl*7K)-+ zQC@yQj3r{hzFu#|+MLlosi&u>qrHRCX$FI#uCDIl#f#b5*){rf;&1%*um5%9lNa;y z^3q-DD_5>~XV<6sBMIKsvZuMhmA*bRGrnuru6ysf_pzs*I(<~Gs=K{%My<;Q7-Kav zY7QLfwAnKiMaj#{JNo9k!C=Ptxc9~zZ)|C9X&W6~HfPS!BmYuaS(%*dNF+vjdwZ|H ze$$H=aA!xSMc{6{@uu@F%_oOXPF~6?cjY|$?6c9D+3oG^Uu^lV@OR~k{^_5ADX+q* z7YHIs8DorSnub(W1vZ;?G7!+ITIa<}vvO)}oaFO1PB5)-<@AX;IqvaRrgOZ0;4rmj z$5pjTvmQBeKCd9ZudnY?uUO6ag$oz<@LN(+lBZ9f_RRCoZ7lj~+%c(nc#Fl-9}K#j z8Cs^rqw}?0xR8}rZw^y3I{5CpTj`8V8JQWqmwJPP=cy&IY-YaG>Ad9gp^1s^?(UZ6 zv$?tDQc7N=&uK7oS`@`x~!4`_g=^W`u*f5y|OvGMBwjfLcnje@qI<2l8@rV~C2P zXfm0+F|Q;_wY9aj(4yFIIFxXDz24rDq0EIAeNgw?-~KlBm$z%0=E`toXXOv~opWVn z;e0zG#937##yjlgPEo|C_a6M{qelBsjwlMXu9>Xd8tv;ob?Q{d8~wp}boLEDo;!E$ zN%?4HWyM#@7mbZ3zJ2Fl*MOI|NyJ3MXC|s<)_KOp0At0)#b-}8UsZX-m%VmAAIKsk zM1mj^1YXe+Jir7Hg(;K)iPI5{Y6*sTObLL1lbBw>8jUJ|gS;RRnP9|0fS|@GVLA;! z3P>Cw042y1N&$izWJG`(4>cWCCDp5{I0>LMBV&vLl|eu`$S_nz0wL6C6d^ceYzYx0 zM8-U306+u?A(LUC2@EkvKmv6fhXJFSrZ9vs$1?^P0bpvr zkUOWxXb|zAma$4ltD-Uj}VWUMz993#xO;kULu479x5E;LW-vQ+s~``XFvPdUw``N ztquSF`rhm7|MNwC{lootK9qXo_?t@3>=<_cV1D&`fBu!fw!XpL$8z|*s`Qil-Zu;5 z%|oRIF6EtuMvf}ASxIQbTU}!da0drK`FrQRZeg$|9MRb`5`b*~&7=AWHJ;$fsLD^k zbqmf2GjoSQ_aoQM)@4Np^2>LupaW#2RHY+~<^e_9{vl1J@I-mLO0*#GUTQwIhw z)Yexys6Bs1YcOirx$k23%<2s|l&DMW^)`%;>*EW!y?;3f{NjQQnZ4Hhu(}{hDht<2 z8fU|U=bTxtPNlYX*5XZzPF}N$|H+S<(X?`RJYaKKw5F&}H5r^p;Kbv-)+Pl6S_frG zKW$;|S>=SWXZex$d<*Va^_^>qws=E(MwB>~3Is_;ynDzSjd#NK+;9GvE31lL>{|&6 zlIAVQSW;5)Iq$nG{QvOtpa1+nzWtjw$d|=G^0i%IikG)HVv&U$8sw3Jh!9jE#VTct z3ARwAD-+Rl8^3+RZkgd{zJ#8Sh}N1V3(kyprgazxOx?+C2o8~?Qu*wGfNfo2P3hG= zW40lk9+^w4XZ6CE$9pQ&no;dckz~&h*xV=2BmVpH$7V{(pO5s{i&88(8DuS8f1+(L zY2$YSR$HfXY(>Z(Mjgh%Hn-E99;Y*gce_U~8BLYt#$Zyz>tMQd(g8Q=Jr)wl}tlb!9q_DZuoZ$xE}6yj1tlM~O+IFix5ttYVDTa=+u@yQ#G7{Ds+ zo3$vdB=IS*_ERe$CO;M|x(O{D=;kiIH16I%A@@bAex4F~H9!-X0Iv8!!w-Je|1a;9 zjSiK04;*iQzimaeF%1VMlTE$0v8-5e`-sj_b2AGkM+?VM1}7x=W)jA~)e;}Vj`DNn zb;&&sp9$<%{7S*hKqg9Y8Xe9++nD)~8qg`#(+eyU?c;J*F2V<2eBt{Gm%EQ9zu5K* zV~Jz*j5q5h^Q{}Bvu5`?%OZ!5C7~@N#yB!utxuWjUzOKd>HOam_VC<}lc97*Ify7! z2>^uC2-7=w!3Z%0P=zcik_fL(8dyj~5rvmAuV5Gj>ZHoc0trwOpfH3`6vGg}VaS3E zMKOwUEDS*m!zktyo=b3C9P_qm&4B9sV7SCCkKYsY|`@=cQ{^BtiH5?82 zOqsI;Q;=E%$Og~*VnC?MO*QDmgiHBK?t4v#M$Xq=y=r{r3biiXINo;i^l7I3$AiuV zVC?OCYV7ghOYfh`j75&lubFmj*3o0k@b(jPayawv9(?=R_PV3yLC%?RYgzrCq0vRN zM_=j7VWiwWa3qCrBmFf!r`&uzOLQDsSdgRoxZgkH&*+wwiH5A4>;tC_i@_Ul?z$)U zxfR2R!!6F|3c}vr$S;KP?|dkUt44f0msK^Z_>&VO>$@22dh+80x1qnLudt%%^y96e z8e@`uWm8YmkUKiJ8a8&9b!_k8vT9gzr#{Y^bwzANmi|0{-U-F5s@N`;T_eVp$~wFK zzzDXs77m3&FeN=7oO=AzV!o%qf%F1h)NM2-!i!R^1j@o(4cm3|b zqPcTFj_eK}-(mf?J7Pic+>`r?{b-`HPnc1b{88eQ-o-_Kn5M?#mA`6cIBf#8PWqf5 z|K#VJKlx;>R@v!kEf&HzeKYMxvvUc_PQ58_GT0i94tg9gyCiwU+mUTLU-y+8_lsGh zITc(|5Qg;@qoJ4U2iKIF4+g_Q_NSiY%;)1}Z!y?sp6Sa`lHcyZyGU(`(CHzPG#Wf|_%zNme2}XUQrwE&8;lCrnfI4c~e6t&U~S zY#Iyq?Eb#bJC+){d5&fYJ7Is8!%(RG_O;*LeAhjBH%|MqSI_5zO>KBG#yAcm3JIYc z&r^=YqY0t{pa2FSBA5Y0Gy(vCkP)IOiUI*7gfI#azzAYQ0Hc6V!Wcsk5kje^Q3L=$ z3;`7qgaFe3A^>QN06-yx08q*xWg1ogU42CTRZ0n?j1Uz7064%HqZ$JM0t6vKgb~01q7X4gDWn=D8i7!ql2wVK4kD;B zs!~l=Rf_0m`F4yliV@^EtWiQKRaKQTiaCw}sH!SpKoA88AYmF~6kZA_6JVQ znOE8J^sgTIRdFcwrX$CW%4{HbYDqhcKnSnBo}D?343k6<_=)^c#b21EWi&N46<%Ay zj$JIcqx|U6qiMZM%w1!?YdA+589TEzrP8(7oi=Iq$ar@|GXm zcd{dFZ|I#_SJk9+mL#R$cYXD?Z6ADkYFT7D;`SUYab`|*Ibj8}RkHKv&*fb#FMiO_ ze?Bc1j$EI4@%c}ru@&aj)KuxC0e?PD^tI2MKBMbwZ{a;PldZ9A$vP{i@4<(=ik6Dg z>T3rFGiDSAe)I5K3$9uhbM=2Mug=E}pY3hwbN3n8`i#Yj+vAQr!gHLiyS%@oxTJr7 ztIi!W+o}YoO^7OwCW;dt-*`L$lJ@t_dHR}z zM-DAqaFwU6*EDR}{^tk2!17%IT+u>Z;lIbUJDwISAf5$-h!8>q6*I`BT1``p3~0`i3<;pjg4JznXk8P{QmQw7M7Q;+%TQ^*XvZ{lKN1jfvr)@93VS`|q1=trJgd({F#} z&39i})pkMt;nrQQiaPpEvm}_E<6IyRNO@>UGV>eSG86GQP7A~WdjH4G5F!6fPK&@f z+aD_`%x!I3d#3MfPQg*0$H}F&Ej25XGlpxYSGTQtt37XKIW&1roiB_yZu=g4N-&Pe z;jGf6>nnsm`eTWZ)A(FS+TRwxlZm3J*V*!NrahRg0{}eH+B!Ti=o%9MH3jmk)7yo% z)UZ}AwQBocS@222PnNbc24WB2;)T5JD&$L_jIf0EUQY8Uq0E zkRrevM1~+zLK-2mBuY_A4HD%M;1r0oSQaNqNK>L*gh%MJo`*3omH5jb01yHUA=Chc z03r$j1&~4r5M-D!7>mVuK~Tgf06?T7QvsC;LQta`;;^PrMi`F-h&4nZAdC?P5sEW{ z1p#4%0fefiCS)E9nuO3)d@%)3l%^&HMFbTjBWpDdqsKe>EmZtE?Nad-eA!4Jb$LS8MX$CZmj9+@4xxxoAvsRw?FIn&nYbh_N!+|!*M2M-;p&Xx}z zJP-x-NlCfa-+lMF!zYQ&+Tm`mSXtxu`(v%Kty@1%H*Fm1A26Rk5RI-#Pb&@tgNZ;u zug{#lWbyc=L05*$@An@(aAHC6^zrsj^Yd4gmYVu{`-X;9|InHFYt|SH27e-P;>2<0 zOax=Wm8(_-2FqIqK9D5IT-qhV{RqCtlZjf z%_*4CdC6-s8cT{x+@7!_*?}d&p5zkUI=9Ddu~?2CJt~IDnw-_2=Z?F=6|V3<0R)gz zs!@$n1^|W-3WC53NT=f<45+FakH>+X>+9{c+Ki5zWbpj%I=g)!+eS;WON`rGE>~!H zc^;u#wro+)T0p9@_LMMC=~R1y@pDC+!v>mnl0wH-}y7X z*5%We5NV zAk!$2F(3%aJk_AeF~ z3gEGc@q_@K3{svv{`Y>^osD&s_R&qJ8Vk6j`&S zSvTIN|I?cX2gh9dJCcE4UP223?|S;k>3tjM&NEr+xp|b*Nbd1l*EBc2$RBNJtH`ap zbN!JMyN`;ASZ8BAzllm@ys zU3cS9X;uAP@$fGmpS||C?PStg!qt!6+TPS^ZI!p3`cz&%@9Ud3ZX0(SRy+xhhfe)! zyTrNeHhW;hO&J!aTkDiEz(7RI~JcCJEzN&>%J!5UThoUGk5Ra zJ<#-ii?K~^C%owT5mfX+h4x-)yd|s0?3E zFYWSWtuK{j`RveG&Z@kgmv&k#W-+IG&E3oXaMCX;dPi7uWS#HH34O8UyTadYt&Pai#M8k+Ao=>SqD7g zpB_K4c=2Lq?#pG~Bzk>`E!`s1Aox?OUT^60jnF*_m;of$K;BG6zgx!z$zvxJc=B}o zWIKq_0bj3=4!HceJ*RuE=DfOh^yh%vBuWU#MhT^@@9{oxzL#$7u+vTJlyNJb+i7S z>9cRCKdNUDXUs+~%1zG{)ZUqW&m*=kd+mHasI1x(0?`x_1)O9d9Hb&vqb4NjEh!$T zha!yvhA9_?0O&Ox!i)f!G6oQ&3{VCsgBpdGcYYuw08qwc4G@Oq1f&G1GDlRT##x-_ zu|grCP$hsd4Fif95dcGs;VHXc7a&n(4l>55 z#;cI10CS?s5}JwuPbvRD67b?~HLR>Wux{bXGBaUM>h#y&`~gBpkCy2&&QzkMLwy~Yo7x6QI1g8Zqr)3Y(B?LJ z-TJmfqRAFt>K=>nrg=Rl6DG6zsdD5VNx!#kZ*SIh`HsfjTg8(HtShTo!Hw&Vn~s_% z^-}ACU~)_dfyp(qG0!EyDWkb;J{gb1g283(w7GN69e}D^=#;N=@Tq0?Lx+xcjjZ@cw!)~|dBT>I@eNnxBuUjmg88joW1P#}o0XR_Cu3n= zx5S&B=|!h|`mx!V*s*{0tjf;LE`Em^?z?!;zqUmpk>6}yFm`z4+D$6YhfX%9e)5~M z`ka9!3+FYS#BHv;Y8?4u%Xft9E!gC?H{Q;&yRV6S^in0EpIxKErX7doRguqUY~K|RQQ)aLHefv zH&AAtRDGl4$@CO6m;((E>6u#zx#uUq}fh#7wA2 zgVQ>O17Uk80UvwHd**w|hoZqC)jO-@sBrG#=bRTpp4SguQx#xO-jUYRvpPaLjhdMq z31&_!={Flp>Fu>8&hy6`?|CY{ry==6>+cVB&}qGIOy@)r+|+*CX#9v9_-Q~k*oatBgyPUXy&wq2=%T3|uRaEkSbZ+Ks5 zT=4GOzTGd5JJhbP{Y-oF{B0hOr@YUd_^9y-chRB;zwFiXdARJc4H!b000`KW6A~qa zAcPTO7$L+s!f_A)%rHcdF$N%m7*UM?czJGgDrXmbW;8YRhT}L2F(4cU7!bhoJZ2Ok zh9CiqK@R@ES}2C#WrrmIAVQbJd*NlJ5LKttQ-D&2xT)=&sdzgLQ9>!j97Y%*1gCaw z;nYkkz;G($k73F%y&UETF~k%ggb)UhTnou-ly!_Rt52p?c0_t78w(!V+ z)i7dPwBWomedO(jGZIzeuRokyR+)M4;sf3DOz%H9IA_kB;qd!qdiWf6WZkx~=HmYP z#U(d6cBr?ji}qaH;FtjuZXFj}SpM3bGv>a?r#5|XsQuhT!O~lHox4;yv(W_b0_U{% zPbbpmsW}Dm#O@$H-M3(|aPI6U_)wQ^*;-gvex5E$iFZcKP`Oq9h}rG6qA$jpLU zzxUAhif83!uZv8??r-gEn0ANovl{T*L$|)OV|3nKbwAF@kV5>YfBfUzt16{r>Q&E_ zFH@U_GiLHQK9t|vdq%BKsyaOsT{?--#DVk7YH@fQ-oI|yntiV~lT~~}*Y3i(1?@Sk zaQ=qJR-Pc5mUUl>p(r5_BWU};$?=}u~CF1ei zyh5)ODJv`MYi#wNJ}oaW*WIVPv1&&-v^;(1vF06BYqwwb@!k(^|LDV>Q-N`xamXE+ zb8XV4V=OY;Z@=2JJ_9RS{;}R*0D$u@tk+izSNAV>#{d8z07*naRI^~f&+a{HPqUt< z0>3snxM>WH-28{8v7p;~@BM3hq-XbeZdzTP*)rtkm17~>5Z z=EYyt}N!3B>^HJb?JI284$8XznFH{&jQnnYwlBe0}-@C!V|R)}klwE!j8e zyQ8_(HCmW;?tu%=R&FIU=tHIl&%EX?zV%zFkwmtb=$j6;Xm@&fGRIsp0G>uF(JX0UY3{wVGl~BeQ#Qv0vHf; z1jM0^h!Wvr34_YZ5F!QugottoeAd!Tg}tDJ0x6?}T+UC@2w@B}3KT`7lmW_LPSXPb z5DXzE93p_J90Za;F`r1pc#g*aBaLCg1jaB1yaIqs5k?%ws-gkR5XF$esmVMH5g?3d z6cU6K4JZC~gyI)Gk>?p3ODah64EkhsGQBoUPC$#ra?RBnq1^o5d+&*&xP1Bi?XBvD zt2e&?-g^akONGJ|3v(YmdbFy#I)6@PdVcAFU0tFmPOB)iS$&H))cob+nAzp(hz$gT zfy0Ln!`#}%OBMqF=GM-w$#L2PVBfy|i^@vF;r4y4hv&_kmy?|%Pb#7)_V!-97`^z3 zC(xSXs;QZ^aBlw2^S+gf7nxJWl|cCT@#A^L_aEKYT3lRWu~JQ>GS2L zq@-NzY;5$5l-LW`u3I-I%YA))B?Va+<4;@0)nqLY2-xj*0)X4?w%gN9CR4|yR<}E` ze#3@K=P#C(mKCor3AGsK&YkAP9-c1cXpdBs4ARG8ha7_3Y^*hWyH` ztgKAMb^O>FUt61Hgs8qqw|c%JJ=nOzJ-2?b@jA7#T=(L!U1!Mo)SQxq3(MwYfAjUt zcWv3cS)ZCxSX2;afuZ5yZ{M@Jg#`zS0y*VLlGv(gn&0O)nM^6p6z^Efk(A^xSaZs3 zM!SA;c*0__w)XYK+l2WGO1=GqeSN)(ZafiB96xaa!V9+URKI4QY-;-U4L3CHh|ZWh z&6Yw;{&uma(s%xV!C;g!bBua}w=XDS&Sv7v%PRGy_D{TCuh$#SFsmv*ckZ0Z%Cya! z{}O1=wN)(Q1wI%I=I0mmvmnn4E|=@Vh5d^b-ymyhduONr`k6a$e^XP_2ca|l?e>IRD@l(3!`^#_*HvBXzhln1*4lk*tM^_l$#U<# zU@$f|m>#O>m;h-&5)zUS2$0Yrgc3sUV8F)4rWp6$i;At@%kF)ZImi95<(zYGIQKdH z4>=@%^!sFOJ(jjKr`CGcnD2OpduV7vcFs_8a%N^qFnlx~k850$6*(Rc^t7~;m6fTg zIxsMhnUUFwc>KiieU`miz&Ez`tnP#LfBox=N?1C7#rb8$_;`@liC>0i#hDbm|6x^gr1V~~wUm{1`;D`xHOehdabV>zbNMt0GbFL9d zA|Oux5QNBr5Da^Wu3Ke8HUjqrLC-cU}C_8?Rk<-Ad+t&z+VO zkC=TsX8!7jhkm{EE0dy3dHfn~WF7xx&C2V~+19dlU0>k7*=e#Y|MlHt3yKGJsIx08 zE4x|_*Ce0!@v(K;IS$?3lV6y8&4OirdFELUn+e$&p`P9E-C(q~wqEk-oGEE-Q^!XJ z1_tWwmne}F7mgZtbkp`-Z%mwjXJGe{1BOIS@|qdtC+j<=8MUt8s9+k!>za|x_F!#ojijCbi>A)z_j5fS&$xx<;4V2QtTU`5{K zXKy*@TU@>oV5B2&g#Q_Jw>>OL%se`tqx^oW#O*&tm0Dr zu?D9-Gg_2+xOaQi(z$B7eOi~k%{`=KB@Y+}W4bE8JaEk`7t}Yevvho6@sB#VyRL5L z)Ytbv6|PJ>$G&L)`=747BJInDF!8jpcz14kKzCm~b;(RMi@#p5d+J-$wr%>#Y&JFI zA9To;s$ZY$9vaGe+s;~JF-Pd6d*5$fSmlmp_HKCPt_!dJ*`hl$7Auo3yJS$+zSy<5 z_#-32);W`4U{>z_ZAIgH##yb_{a2KxW$Sd+#)5+U(BcwP?^~p`ZQqdTzWeWAHZ|)N zd*Jw*SL(mmUe0cB>GLgmsBOlK8MkGvzUcW0J35oPnwj_%)XcU^u0IlL>FIhsSEASM zU$P-HGqv#eV6uDJV3kiP*4%hd_oh3#$B?#wx~rL4>+8J^Q?>EIfHUOaBPHo&mAt z)}GOa{VC_46%Jx_*I+EvU+~a!pU>y{^q4ICIo&&d_OqXTCFzF-eWwoET{e4eFmm3K zhs$fHJ^AVyWp-WDOH)~nJvB9TPSW`E7H|9gTOXY|2F?TX59gnJbKBBc*|&bz+vgiF z{rrb~n_foKsX@?sB#p@1ff7spiq!-03;MC07;yF5dzbTku-qB`QjDT3)SS)ocWMncJiFhmdn5`hbp5J^gWbB>5aC_sQDj_bHk zWH}MMiJSnCIB1j-N+_a0!3hAR1Q8RXzPS*Nn53$4L^PYshA;%9geF1_kSQ_%$buQ5 zLYz@bHL6Lnj4^^jm@EV|Fn2_Shd@5n|zU?tW|e%k)Y#*&H)Y@I=? z)tXu8vUWHcUl`2KuOOFP|C~#{>NV9LEpFI9Xf<7w(XjgCi>|rv{T)fOR`%>~nlN_k zDfjjx9$C8RDqm3Dv0f{k(4e>FAO1**&4(!+Q@=98k)Wq|@wn|LGx_t;7E`?SoJvdH z#_X|$(V#=BPj%J}-o4)APg>rg?|*ql3l{P?cHjlMwgZ!T!qw{PF-vo8(nJ8sXb*|2`wk%4)UBc7YifAgwy zvw!h+u{4#b;hgoKhx0P8E?svp{2re$y1FxBI&=BQZ(KIEs%q4c=+5hJOun!8Cb>U7 z?Qp>7Bd*0-Y+$e>+6C?^jm-P(OI!ZQ@yYqL`>Rl8<-~B!flu02>0jgpn;Pd|6w+HO zw+uB4g+%_gJAEF?btY$d(!o<+Bd@fh&C%BCV!;!WsLJ;=!H9$!VT6Arn4N`9C@Ha2 zCBm49N(Q%MJce;q7-<=5EaV%COsF1LT}?h2W9Lqj*XS>sGNY?~$Q0>*Ddee6$vAOn z_vn?AorA6oxvwd`N#s;bdt{N9wl&syjY2KFBj4E}WjGuKkv!k_tblYXXMjfpV zrW&?AV~$3$9lT@Sy0QJWA950m^|L(HNnCnLG!l>7d|6{V9Ggll-IYrk^Y^O*&6+P( zU}{NGvRe-Hw$I+JSoE8VU_M}aaye!8CBkdeZ}dZ3J#tQ)W- zht$z|-m;7rBbLI_{%L^|KRR}(BKL+}-n~Vs4UU@Xa(8Ouc%vj9*#t(|`7u{0d=Pji^sJ2Q+?wTjn%B1OtgXGb9k(i{Z8j6I~2LTa4P#}px!HKU- zDMb{NQVIkCkszT!5E2rKB#z7kCWg)>oJ0hM=j8z)5Q0JsClmk?D7axDA|W&}P)`UX zgvhe2$W)Sa0}()=7?x0fAK{V+vO+{mBs~#Cfm}BjW5^K@Ku|&mWyACNF!2)u5JD4k zW(WZkLXxC-91_;CVaHa&^_3WCgGdM&o|kv}b&(j%Oc@eL=)tdFf^T<5-qkbj+fVoA zOsm<`c%(8d-#OhoVdD5r8$X+zT)zI3CTDu8ZjZ%Ix^m~ZhK7blg{Gg}vMr}(VMFt& zpWIiv@r_d?XptnTZBBOP`+Z&2Jas~PayjjMgB1)#;+kjlvTBD@+1}9^3kRjo z!z5 zXN{KxjR0dbiY)!7g zu?^ezU2u79>&{WkWy>hHoa}8+SHbCY3>Ajs$JNT6_VAWYv&GUgJ6*C{J;jWf{Ez*h zH`TND_@U~z*9^9&{6HGzyy|3laz%6yfAGWp$(K)ayWO*kqMdVX+Add4ZjL^&6-_boO96;v`ug>ODSNBT z*M$$a$Va+#i*fkIQE8c`RI;vZ-6YJA3QBBePk8yO7j8_daJ_TS*9<}Xmu2+IN8A%Y zkrg(m%Zd;n83b(ky5!PgaYG13R0fEgP#{diISkvhPmdx-gv4ZGN>L&JkT{%UNC*(r z33A{%1!M#$B?1kB0u$vbE;s-XM2aX0K@bSaaM}b80EyU4Aq1kJlvz;Lg9d~EO$Y!8 zs5vc0b}+5fE6a)zo9`7vH>OXYQK&j*&B}0P zWwZyHPj&>SPMx}b{kmWsXlw6ruy%D3AcnOUZ&7H>Y) zm6J9z*wW|r^rZGAb?iA+IAdm4S66Os<@$!9tgKwBalgNJ^?PgRC`-psUm%c^DJ6zH zOJ98PMPoFluVH9tC@Zg~!lU+^&1SFH>u@*>jTaXePnl{?PfL6Eop)yD<_--7YHDf# zz~}R)rKMGrmmg|r;cfM)ns?)eYcIa|`j+sHu3%tja4@@|=G4Jc`FW#SJK8!sJ1wpK zoR8VMWozcr*>mU3-9YzMR#y5#2F5^^WnI@yCex@;UGeB6cUM(a zjURVLw?~yhUbcMM>eX)s{H3N?boA)aTl#inr4;!k$>;MnG&U$b`UI=3qoZTgvPF8X z$>x?1Ha0weyel_vx?-_>yURDi2qXLt85|sx%~B-V=h1x8NHo=LM!UH;&Q)TXQ#WQQ5TvCPbjsw%785L>_ggPzPe&R(at(A*L9W$aZ0G&V30Rt;~FT*Ns? zT~k#mQ#~^?t0r%Z-86pv<}JB-x!$Ch*{lPlZOv^aL+A!X^iYzg& z=pQP1B$NQ0^U$>8dZ9Gdre&?#+cb`9vti`E3d{=Xzn%ny`DtBmDOr3 zFE2m1rQ73nr^>R~DKo3KVcnK#;}_=S=EQqq?d|Q!Zl9`X(P*L7np~V&%nJ&lPTgz~ zx~BE?^z`z+?|SciGnn9%5K4t&a1CT&j4+CtigBK5k5fdG1u1YrbR-~v5db6<0Dw?L zOjKD21wsf?4#TA@F+AcCB_W8RGrEI>_2oU5nd|E&aTZw=m zh2a)I|B#iSNr}lqGSH+WC0s%xxD#SB=y4lU%pQ@DP=X>-LL>k(>`ey*0Nn@yNrZy} z0wNI31tI`YLKsj32!L+p8U-MrF^oxwKsaSc06~)kl_*kTASDu!IiZ9yj=(|I&@hlw zB!rrPi9{@w;1Z_mc3AFGkD8-2S5*jy&S|K_$&aqaNGcK3MO&j zxuP>nCsl&i>vwmtI-4Aa7%@v$hS2YX{2@?P2;~Nqkkju}2~dYB`%O^ehfoNj$-q=q zbD30CFSQ(L3Qr6=VipLGQ`^J-Ndrza3?p5WnQgn5&kQMG$Zm&7i&L;82od))j|Qp9 zU`8#JrOT4FGenpU_Mpf#Q(Yd6CA*wHr9K&}lSxC+?`v}vX&_6GPIOT%?>hon8JHP~ zL{z&d7)&3KHN~XZ!Vt}9mSP1W#w;@HbwmsZSwPYB6q^jH0g43KA!v6*tsn%CTSIAv ztI-yz3Wqgwil1po+}1%ritrMj1{K;6f)-3-z2OYIl`;V~9YVV9Fx-klI_;1TT^7Am zxKmsj94z#j!8{tQ-{SI(0RP%vs1g3HnSRT&v&Vl}TU&eQ0hW}Jn*V-WlH{Y~TC;z< zb@~NM-3z+MO{{$G&+X@)d+zSr?h2`s;m*8qWuYmSG~>N(J9hJotIum}Z0>38(^BJO zw2H2V=D0sdykoU9oP~u2!7p3ONHnsg{aJ z>-C4@mW-_KAN~BSJ@VU1EEYFrZrC48KKJUrp~it|BO<~Vg~dC3?LD2c%j#Q|bL;rN zkKav`ENL-Na=z%SC!Ljx^T%CZxZ`&(9obQwbk~Fq?++-m(IK6=|?^!k&3`2=7L!?op(H&-~0d7C_3y}duwa&9h53sTUG6?_KqE^ zs9n@1RkLPGi4{s~uiBduq-M+nk>r>6=ll5mlSlHnbD#S>&V8M8opD{ySJuJ7flZzb zB7h$n1T?}>982T|P3T&+pBV_!dAVC%<+?*&4!wY+i?CR3ztSI;WqYWR*0SCr@voq} z1qf#bVb{pzEvGG<6+m+?E_g_2_xjKEK)kXhk4$X~Hn_8yGt9_E4mfneem-zqF`OSF zs@I9?T<6({IyF;~lZzcrdfBnXDHLaz@ffJ_hh2Bllw9q;Q?|jxR-&LU3FlPv^Z1o3 zHBL@0vN%B#Dpj2fDqG$ueNxp#RcekRJvU@n{r>X>M zWHE$soQe1dBr+~n8rC0VYQ^>`F?TZ69!rcwlc_)Ei~HTi#bbC|!#k{SkN3An)L->n zKCx21Avt(~In{G*VuC2e509Te>YS|HZiu0xKb|RPVBf}l1yue$i_E_Y$Qdz`n zF)IA*s(zAUnXc30r{ICYO!OxK3U+pYJLg-)N_QKnS9%YnQST{>UZj0&e6$P2-kw*? zh=AqZA@js(fk!ufhpX;cm*U-t_os`OMyF_ukSRJXE36|C9JtNEH7k|Y0~5E=8xxz3 zybRPr%GlV=s-&ww2zDQ%J7L{KAL*`mrzi46U=oU1t}4;pG4@PDwQkFQ3M?!M_SPRP zgn(Z5Hi$j~)r5rALBU3k=3bC&Oe;2cZIyojSn#i1dc89m+d{B)-5`gOMU|JjJnM8t zz6yzMKpn=JUEaxBqed-WW0YV82dkTJ*2W@2_RJMIbi7|0`}t9<#D*f5_ou`BWiWOd zeN$OEmI1evqB+jkY5%`J)o?bK7v0L8X{;Upnsi~z$uZgm-WN8U?74(UO+;ED|*pyiYnQ(26m=(}Z>FLpgIaZQs4 z?^tAEHEfElW*Q5;^hKJYh=%HFvLYt`i!9(|w{OQ&`Rh53ql=N8$@x^-)m}8HZ8=N` zv)UrN(Q?s;2u%+s8nC+l`{w~3`TY5*-k%uaIE@DuiE>-R=7#E1>hXMZ1XQWLpiF}g zc$8?I04uM(fvQdvl_Rg+T|#w1gLneM4^-J?goLUF8hz1ZXF!1td^GIHMnCNx4Jv{> zA0yuhqHbPk`{oPnpY0j)rsdVo6Zr}({}FMICHOn?ITw#tY;t3#r3hvu-|c7P9Vixj+*d2 z_kB%4mA~<&D`XB2`Vyo1v_CPWe)GXgsm+725WZQ|{#$UHQ`(DRYMTGE(wA+UhnmRD zEP<6Y(`-5ux)P&6yk09ZrFXJXmh|o#@U|71rKZ23=J~B16t3sAGSFzb`%M5fBnkpe zbc};ATR*3lnv0h+Y}_Gadnt}jPviAoL0#0Ld*4nYyI*g}dk&OjUFsH7c-;-Mha!SU zkY~2E~HaoG74ZXlkZC(j#4W!q`CI-U?Icl_OU%JqTLDyp% zHH<@THwQl@UhO%&+Kuf^+0k5AJ-=BIW$30vMAVq*HQ9vzqHAyyq1Mf^rzdGtK`)qc zD4-VEff5tFO&lM_(9qKbM0UJ3*f<7Uw#Dj$f0#3W= zFL8Z722Kye8f<@hQIt&ta*N06sB`ts3jTR>&righ_Vuv_wa(|LO`=XaE|M42)Fd>V zbWEI783bSd5|b3jkrJ@V{GcTueDp;mPp_IYj#h_mJBC1bbTe@|HlGw0PZ?e zEzTpl4@>1~oeS>rG$Q0fa1BP4c`1t zd6>u~7?^O2VS9aPraxvSMvSR!1t>T$;++4gM(jsd z!F(~9RqSgRotM@of5L0p16W#$O@n*}1_?5h#JD5T9kTQ_2lVRScs)uYRs&B#HqUQUcG4Ud5)?}cedg2`94=KF zMI8ckryf!s;)Y4X=F)rqGurCTh4SukCA~wPUc%;gL`iaePQd z9rMC|(d_qQD)q#VOb6=3WgpyGOEVH9bhepz$9A)g2J-A=2#D{LlXuc;{WWnlAxpC4 zPoyEB#=CF2He!6FlK18ACX58Lwi0ZVC_zYQfFZvDD z$+^`CqsX0jvA@bmh@<>fSj+Ut2z>epI2m5$1bjIVG@xo0A{MBzWu+7G74hZ9FD336 zMybYg?#212OKWKS;-}Wp5LAEs?{&Vv*E9ji$FbK8a9`n!tgPnEZg(u+{Ud1h0UM%P zUO7BJ@jJ_#;OFx{p4z6VgiM!Xb9-JEw^UEASj)=y=mThOn^b3{g1lbm=gI_L@BkHT zj%2D_981J^1si4p*8-{qwv8F|i%*9;ITlUCJ>%hB_ai|YzbNq--##r|xTUg*Lzy4y zCu60lzp<=ti5mOsI>U9N!2W;8R+-z(Tr08N6Vk9eKoDHi>i+IZgV5>Vk2h0YX9I8j zLz}*P9u1J>2d;Y*itjyv?*o0 z@i-!f6rWys7w7kanU6iFh*A>cSTm@(qQ8|oN+v#=iym}HdMs17MdK11_iY9a{MG#= z_7Q=q-5d3XF5s*lu$*5Efh7&=gIAvk-VbvU8$PDbCa)|@6C|U$OB|PxVZ@tF9mb)axwyaD>Chc|i=MwRhhVY<`%m4O}{Iu4|cv4PmPB9@8%$x4=0vQ;? zXYRkLJ*9?C2OkYs(S15JTz4BHdH-(~%D$1tFc}C>x56E}?bLns>@K}tIQ|2YL@jp5 zGOiVUlK4L8xP#^%9tQ8SuFtHEnysN1^}X_N=D2O~^i|YE1#EXr|G1~+Z`RynSxx0C zBOU<_C`RMqZXM4DpyY^$%X2BWiP`!NkNX|G*1rT&Vlq1bDuK}!I%wKT> zFc?zgPZozejaYeD`oA{gZScziRP{K8>R>fvKQ*Kb756o_Z$fJKAM|E9@L&753_gQ% zP$n!NY6$^1$Fe&&aC0{+IVlRb2}4M@k`(?hiwLL;U;l)HX%Xr+9xb&=ns{kXb_Gs$ zk=%c+@{YNBtg_wC#naJtp_GwB-tW8_@aS9BrJC-DcPC3Qx=)fp!$F^)O`+`aA1ocW zxvY;nd9y%TUsWk@+@Mq?Z;pP}?Tw|>yLXRrEol8XysERjH^;jW`!47lu0`tIkZk(g z{EhaS%;L>}%oQZlfFO!co=FhTkR|)%=+-Lsj6ha9E}x1wCZ3n<4wVEA@e9Xb0d^V^ zZriPA{KOAHes9$I2s3F&T~rfA-sJ_izI-`Pq(dbrK&-8VToDrnL~70>$KqwK?>w_m7^&6aXeg8~=RSd<-%fr^03=MBJ6o$RA4 zw`MUH>9gl_8BCMvSG5G8csu*(47z6dFFPwOe%|!HPA?<7(U1BmTjJ7br>(zGUGg&Cb?ChO_qPH$n(&E2)J|;SWbrw+ z3&Sp;lSYgT}BqD!58B9^} zeGt33-k~-6w(~8=dAZU}NZrADE7n@HXLIb-oh95(`q78DXVyTGVneI7tFn2$SsOq)`EGa!8qpJNL>kOwxp@3f>VqbJ^aPSAO_xMX5n zP`CTXd+#my>$d7{7?qV{*HAsM>cZ<^j0=UH^NgULpJ=sM*vF@U9*-#?RIB?=pe*3| zxs(+d@qM7!9Q6-{%h{qf?f`bl=nT{6(LT&{gv1}74EY?zbE|3i-Y95MXJ7_3y{~Kt zHxc!Lst>+A)CbK)?Z z$JAOh`o8bU38&r_CEE37Gbs{DZLY@5SPQ25`Mp)Y^8k#v^I$N1>RPD&PqcoVbN{5pEzEFR{NmENs@PgM%-QM6M z{tRWa;lD-v^BO#KQVhS0X9@3!j4+N4+%!)fqxuo`?vaVyote+xV`C*)Y-C_o0Yqsw zva1Q0u|t=b)C3(|ZIN^yGYB*3ciiIg;S)f2t@rg2cCK}M{ZcT?bcg3Edg!cQefdJ@ z(%Vk!f>|E8EXAte{wzzozuW~Q(U`kL8tC{`7PDeWepU*^3IyfIi;uZtP9MapMjhR@ z4a&IBRvSK>u1*Y>1IYHs)NU8|KuZ*=TFZYwBCl1k^2UQAQBrcoWxAETrcQEfETyFJiscWRx5*?S`+}sTlWIf!w zZjc?4eJ0kR-S^irCs8B-4+!?hWSoT@o2K(iIYB(8I-D`Gs-pfilWzWc0Q3 zRC2DU&G&y((|=|^dH+ZUzmfeDVt$kC<5UY1g$^o2Q ze$@Z{AbWmu$RVTTzgdyn)oJIhgGjCbg$Au_p08U=I~z}$i#CLMR}VL`<>$CbYPlE! zZAQWD;Wxc=SsU0mjj|)g9;YtJV6gxgCGz4@`EcwJWHxXYhDQv#Erz?k=Wl0NWy+Up z+|QOx2_2nW5Mf?3)VeEEgAihh(0x8^X)WzOJ`bJ*{oNUE@88}_xL?k{*&yK-@ z-&EBdRe68;k}7bK*&5t)ZXOf7qx#_;4NnY9lZG7pLxZe1>r++gDC3u|Lh57!KOc(R z;Ugv{AT^|)JtW! zi0@?!>c0E~i;>ixeh};LmFGy1L2RUv^^(_IqAfaRzb5W3&m*q*C(L9;G**>EKd8(c zI{Mv``?s1rsEbBJSxLmUf0EqkCHK3}mlk^<|T)vAUnfhT7=g>KIay9L7nO`Mx# z+S)J%=TS@Y$lxY8*50nE=Jv|qZkZUW*je zb@V>M0ppov#N&CH#;pB*ym;MKX|&|irL(Ef^~7)|KH6H+cTDNeucL7e#nj~oz=(+y z%)_cmSt*T~o951D;NiClrW!f=S_PSgPQO2hq{O7xwM_QV)A_SU6@d@#X61z6Le{zA zvlN5yqj#--5ieaSZR#%~^U6zJ7Rwtb{8cPdaSsdO9HHZz^zNC*!`2$VFCPU}1m8l2gny$W%9{LosN^(pkjzglR(4mH(x1jsWGwM zM;blNqB07aJgO-%yhW2u$mf!mrtsh0$rsMaYq-bthw7)8xI1OHwOWJ?-|RxpXDs&X zNA+1UTtP~}eXPy!&!0b2@+ArE@c?>B$;pGu{Ji$+>VE}%Y)e^#f5w#h1*c+qBhrET zMN%@bKCPff*oTNIS05d_whBzKC}MMoqEp#(x~rlrz2&fk?NmNVVj6<^y1v+nt?Oq_ zo`yo!rPP}9RIdI8A0+L)JzmamPZQ1Qm3?#mY%M`A;%1jtf3?f!(#&)!e{Dn(1$8zL zN}K(22}u-azgO6J3K5=sV+|GfobS8*FF-4lmmSawg;(9L(Ub8g7MB~&G|7>+4&7l) zP*`wUJ29Cvo*6C7DF*fb#%N`o1350*rJ+}AKYObbg=B(l&p&{RtY~qW4@>sJB^$n* z&BbP(LcSA4=5-Yhs^=DD1Lnrs!VgRlyLhR*S1N(0n({Oe<}310e@m-0x2}uW6oK>s zYhjF(>J=$DC%Uf{`kF7Is!*?{%jw*MDFK-6h#t!QICDeO~icV9jUX4V1l(TRQhmt zLJYPsb&?gnW_vpezYl#rLnx>Hx(+iotMpuQ24Qw%`vxy-f zB-6v`oJ?{V7#O5VJOVn`*1NH;V#^E<=nmdGIaPeoAdFTOq#F?y-D6@p34v+9H_~-? zH+C@AK23V*{jI`grPCX{a+LPGUbFvGP?%rUD7|=R)^uiFkcD|j{v_1c)IX{K6AAhT zu6j$iE?0t$_ensaNGmX=;RYVGb;aoMcPmvp}gYbH{>nVz1$ zmo!Zj{i})h;T5K0c$lKfjEp+He`Q6nD4sgG|0jhPr0}V-p@!kK->ly3pm{dPamMgJ` ziNu{p^R8lK@x2d{|C*3cNNnNazurADc_wLF7(?#BFU`rA}q_`sB*V@$eK>!B)t8DSAktdS}=G55U@12R=!RVO_X zt@lH5R7nr+ynn*v^yG`}Z|=hPVq+3Scj9#K-n%nfCwW)Zfa(hmHLFwxNvYr+nwXTB z33AT7$CATMI&BkywO;&Wf>8s8aE0^(n@+Ng!K6`~KKVW(oDw9=!+T|RJjC)<9b=pXu_>i2xa7;*>(>>dW|iZ%kK;40Es2;}ne@^sDHshrg* z&Hl%%?@GAV39KvS)t;V0%l76R=wpzSSB8^(JK$H~b2X#!!cCv6?d7-c2+iE8G(2<^ ztNHh9=IZo7tD9Ic`7*lnoQ}0)E239Q`kkk_35>#T1FEYg7W%E_9+MEMHJy*P52t1w zM6S*_C?9`Vf`vQ8SJ|7qPLjxbS8QbxCu(^Pp?~`Nu+Z@<*3}0Xrh7CtjPMQ#qtgr9 z*Vor$mA*-{P(dhn+!onL9f*wjg9;DM<91)Z)SY`Y^l{DtVv`+&G0_>4hq*X@@~BZR zK=nz2r9|JljzKp2{&VskUwGH-^^$_)h{Vx{P}c_ zaVri@ye@h9nn@Lvnh(u?n`$obKFa;#{P>Y!`<)sc>4(3#9!48Jq1JgY@t#V?jar?6 zF^M}Nrc{G~NGto51i|mXVx3H`3?sKHnt?gqO4pj_LNX6u6d5Jd_;C}fJyYlXe9*Dv z`0;&QhwUTIm~TU5eJ9sWn)h_?N)9J+CjK1fry7dDgRNInAy&xr+7nDmRyuwy!*lyo z3^EwGos42djkAvFoF6tMy!c_;zVmk?{+e|9#{KO(@%Qnpx)OV1e^Rr{kLTzsF|=!6 z%Wn>~`COc308*R3Uf2H^Xgk=yQf$OMM+v>q@jDq~jFDtd^6a2{OPbY(AO(&o1Xd~! z?+GD3Stw9Arj#(e{->ptY~|^(YTjO&Safly@Nc5#ShwaO+i2}g6EkBg!g7A*U4Lo? zm?3WBwp8~%`kl9*?(=FSGIugi2=Jz;C>PG=@aomQyZ4xLNiQ`~<3+O7+kTdYdliX{=GrBQ**co9+zvljp zu}H}p;0+sP_WsB1EvTpEtD_>?=r5(u>Yv%JAp<5&E2~TPJ5j20h{Kh;{)YC*h?3wb znj!nlA1)qBLkZKriqo=Bq)9>^b!9n;n0F$c`hNBrmqSl;CyX{6hx_ugSI zrq9=Pk3Y01zRnqACgD)|ev#10ai)A}g^2TxTREE7fXuN;bN*u@%*>`H*%ue%?SzUA0 z&70lEov&B_Bv3;i2bGH*i{8XJ%4Zqp;A((l+1k0np$F-*dTS^5k~O#?N~UM~Bw_wF z071bQ+E3m_2BUNp8$*7>PK&6$Mx9HJCR0wY!*i8Ak)a(6(7DPNX9pa(&rT7cHZ}*p zPd|M(9$YuC?OIz9qRKKV^ezgYD=1d%E4|A5=V+N7Ic|S@*gQEOxh*6l>|0TY)B=CX z+g8bxiU_)xsH&>18KHRd)9nUR%iIa^!0vUeTZQ*S&pC&kR`Sc%3NAM@!IpM1g)`&N z*owa=g*C5-9@OWrh^w0v|CTJrX?i5gh0U-Y*U9W?%9K8?L)_T$0f zzJeu!GJ&V@w&J=d0-0^&{Cb-qEx)}=d*yt0aEAzbvoz-~!M`Yy_x9@6e{$wWc?ykT zQy;h>=VpY5e_0yZ{#jg7(jQ%L<9-R$6T!BO=Ffyp7ScOFE(5H0M<(9}9Hb>6Uv&*! zUwDJ0&q$^FWF0irF}W$L;BN5k)6s0u4oapEC+=3oNG6{-$rM~_M=21gb7QXneWL7pBY63O%_Pce3aT>cr%DynFi)ec%hE3#gr2DRZ{gM6ta1$sgDaqH?)C|$0 z$C5z!<%=goZQkJpe;SonM75_`AV=Cy3BDfg*$Q5b(Qkh!zFupzuJ`BJ>ySV2`n{Jc za=5oQrIcXr3$I3>b8mjTePLI&{)0O#Na)e0r*DfHnn41H!+Y09+WlEm{^M`(DqxF0 zIMO*NzkY4tT@XNABgpjKl*_UPE+j<{Z-=ve6 zLa&l_MZ34R5yLNq zyjPEM=I#xu=2_~=+BqiE0EgFNwB2U7RQ_L|>LJrriA=HXGB<9q0GBA)y~gsvH}GgJ z%JooG@mFQquNi~$uPir~!3Ww)Ko=zefn2ZVnwnEACgCxDmq$$ca>?or)br-HTnpSU zm{p}L<&D>YOA-lbR+BQ{X^Vb)@w=`pklCLS3dPf0ld|qjUKc-O5RUu*u>h4L+l$fd z-69eu3}o@@{raqsLAF6}woBYM)Vgn`)f^PzT_2%ms;{5%T(V5+krV!)ID<-_bcHxp z+^1JH8wsAx9uS-z`5e1V2EQ)qP##TNfMO+@w=Tqh6Elp|q^P+6(MOJ6${^4aO z=+M8$U18P)Xa+QUv!P#GWx82j(>#f&J6U0oUC_6JfL6>E8?HvQHI0EbVS}}4a;FpE zHrU}q0nqvV4JCBBz(yFt*MfiJ4DB6ya9rXon^4Jz(@e~>*8@msnHlWEp? zT?@f&Pg)~?^#s7*0lIP80(6>6dyU?>df>)Y=~IzTbVDa)Bs#0-EWOKi1vcMk1DcFh zR0&);cJK4HJZ}k`xH`88O09?f%h&I|oM*uyC--ULb=O~eq%uLcW-UMf1_QzZ(%LX3 zo%)f(ON!w0Yln4AoP)qcBQ^w1Vz!FzYV64Zjv`{VWJJ4Pgrpn z;lch_XaN)$)jzo9&AtlMINGFK_gTFfR_NYSL`mUp^(P{)Idtb;**me@U)sGQX06P+ zASDY6!{#h30%svX6Eh{QxqnF}(cu`dHF8cg96Jc;!kr;jbB=O_#R0*p{5wCjNn62f zfe78cR0KMjR_}V47MRB9KH1~AWu)j9zQPEAL>yw_;V3AOHRA923OL+$!F}GlZ!r+$ zyQ~ao4#8A-S6i(wBF}bX;a)vq@K{BD_Rgb*s-kZpllgGgQ->a(jTOL6`2rSvS_p(9 z`4~N*H!%+f0kEfop7Q+N$QjvH;3U8%XkdIH67wF1O$sXrQ^8C*KoG0g_@~GXhC$RS zX6ZTX_t#0wD|F*JWcOh%E3hxV{=ggaTd`8bZxpZ*+B*S3fc#*)jWA99wld#NDR9Uc zc06pbP}8srvAzOs!V9KE&Ny*k76WM~lNfv8j}?PJz}yi~*h&avUFQif^2ieggEjUX zzd98NhPC^)0DN3dm`c;S-p7BYDf166iqk4BQrRR0Z7NTOAmb6?a7D9?4yQqKMeGK6 zIjmP6j@?xBDp^F;J!J906$5SjTzqlAV-+KzzQcUEmZ#JDKi%}Pr(cByyw;$wzQ)Mg zmBy}%+Eggoof4~k+SCcOJg*2ItY<+iUk*{rVYSvn2iZ_cS1l|85Eb;-#;cL^9%Nw8 z?P1S4=y*vh6&!)62bMHyL2tDtZ*l7wSA1%E&*s0QNzisSZm|=jui|@BouCJDt;^W}O-585tDFyIVb94Ym%^`I zRD8!CgTe<&PBF-eg|1y`MA)XHUg$it{?#6wLx_F(sZIAmH#i)Y&Mvn<^c`>&i)(ov z>5OoR^z^%zUUbqR-P1PQFwWl-dEgBIfY$GWkt5)%QL`=#9QSV**N(pLQrraExK0OL zcDS&KcfW&`qyuVzNH5T0Jf1IRdw=ug6?e&d=a$ zfk=6{5?X%pqX%rFcw-F@p=+fTjoh8FLGK)JK>vNYzu~u6i`xq`+h_tEen}QthWeXf z8bZhVx+Bo@DXAa!Zvt8L92Z6*F zB|9!fz5^c*WTbjNV9=?=le?#(T^)}g(+ zpbKX3^~J_Qn*3FanFsyP7R*W5Vol_ql@IEMJzBo7g0)~w5$%}?92(*Gu zMyixC??BB+u#Ack;J9CO72>jm1l&Si$|%E5t0GalW}YkleW()9Ap)}n7lmF-=R*BQ zJ6;`#cAg|%9?avGdWtP>#~0E?o7)JE%V?!Z@2kI8!)SM7 z0G{$SbezW8tTa0V*K7M3_UL1{dL|a7O^fjls zMP5a~Z^wQg>4$IiBXH5Eo`YPGj?H(MS^5eg3k!t{m?`=}D1PyUMwWC&9Q9UtUERXF z+X4^fA6m#Ld%+p8@B|SoURCo57ncio=Z4BcdP9+A5D*GRdz!%Fa@hVCTifk_iE)qI zjAg`JWgRP=uYRZ8xZSG-Amc4?o5h7`#Y#91YtTsRI7i4hN)G_LIOUML8B>NKKu3ek zLY+G%?2B#3U%-l+Z8Lh^2kYpP?(3YahP9hq(Alu(WY}m$?v?1ZFxw;`?^8sDBz52N+v4i4~hj_Gr+n}IBRcW{E) zotsC2Y4bsSxXwj-bxMW#Ori=L0BtBvcF+br%pvsevWtg2tSv%owF}!D5eD{KFQYuP z#0}jaROt*Hi~!bywhVzP!823yDYU7DNUPgMS)1^)V^VKLpI_eAXVn~UEF=?M-CV6mQ3%uG)!hTi6CdOOR@ zbDI%#Svm*}MvJ>6J=r$8_G}8^bH`itxi*&;F5c))gth;?@WRMAW0x1|A&@nEn)bRa z5T!q|0*u_T8dQon`$sE#*oBB*vk0>O7Bre8TxIQtMy9qna46WFJq7k0@yV)qxO-!f z@vC}&JdI^-zCFVoP@5<4fJ2AVRZa^)H;ps%DR_T$wW4zQT~?*Z7wrc6JMnjuX2kb( z;uQI@g7W4O=&hohFD}HTIBYzbPo^YL!}DN%D;jrM0zVAC*nNr!eT^K`Wsf|Gu1{z4 zA3Q*GA;T6n6xuh_*l((jALvcm<0T*sd-uihI9eY!1Yw6F{80@3t@Zk}>_5;wA&yo9 z^TWmIYZ3Fa`HDT^uu**|T2=&b&76*&wg7E11_2HxBhhB*bv96V-YEWKst~k4o~5sN zxh$|MaosA5ogYM6y~csMG407E9d@vRDkWURAOHI+XNx7$DC=fa-h%f{u7eE(kL<~N16ofha-|F;)(#KUkL(M%o`oRfj&MD=YD(pI<1M*8|56rShVMae zryO86j_|XRO9!nu0i|=5HSDo_y25^SbL7FTwo<0@rN>JZx`h-dj%^|0>|*2kEVrk~ z#&<;k5VjS)01nviZ!tborVPh!1Ys_pnxTn+cPSA={`h{VHKBB#;=oTKQ`n!zmY%Y6 zeYs%M;}4&hN#>qBd+typn8Zd2U@c%tQ*@gd$O=GBSRfCr{Kp}U=IB4zFKs8Q{-Ss` zFiQDsk;ek!gW4W3|mUoYwjzhA}*NBeoNDsBrWl zx5yTzM3Jt1^KTux_-n0`eXV6Pkq7^I5KYP+e9}vr{Do=qT)GK65lh;G`}xcF8SmpS z|MPeW3+f@hANSsAA~%pyA}6 zi>1QJm9Vq>*1jXYlTg4c3Utu4(Sy{QP~Iw@q#Y^*37}CRY#B?>W!dFm@waP8y72MN z(ZXsFZ<-wJm^NsrHx|A>WswXH-~77ZiPy1k$MGMo;@uDK?6?JWd&*d$dYu+^Hmrn>!Qu=0*iO>O&V}RK4|rH&?@pW!UlVGT(s0}<{@`J zCW$*hVUe%&vCD;#7&F|Ft}J#vE-t+GZ5>g;Ox_Xuh7X~H!!1c988v7 z;aC&_a)g&?rQ3jzGx}lwj>BZU*8|S4a*r^n;|R=0+|75{2BgdJ116Eu+k$P@7+t|Z z{L+x6i{ZavbF-TT@A#OdE!E{G?uje(TDiQTX6BN09+Vfd);pFXinW0jney&@w9=H< zyN^)wVa4A&XfCGGn+%gn^JiMNU{<2qkt1(>?^i_Mhj?C8+H0B(`$-#;29wisac+Aj zmzH%Rm<_9=D&YXelT&KQrvbO+@fXjxxC96DDMbZ<_!1dnNxu|P54xA)zZbQiQf0#e ze&T<)yg!L*kPHvz9AS1+y0;^j1{;e`_{>@*Sr$no?vTQCTDEB`-$SD-Dt^&e+ngC7 zT6$aiFNL4|y&t|D=l>ocFNC*U{Cz^?jfsBGY4K0{zgnvGJ~K$0d06t_VB?#UR4d{_ zeER=udG}*>>G|usgZZiYCtLV?S!15^4({F6|6jH7MKS$vi27eESl{1~dm6WqivQOG z+W#x;zs4y$PT2M2pLh@+!-y{hgkp7(`M)~-@8TQ1^lxTHtOX z^%DV`jF1NRuO)v6SO0q@ub-={3p?#bUs(54i_!sho0+?#8eno0f|A=bHD)t14-YgW zH|T@GUqKl6B%;DOvEqPHWvrWZS>neSlC|L_z?n zaOltO-Oc*=3{i-Gd2{*gE8X{d`c2e^Xcd?}gj!a>Cc$TLy>{kQsa zMx%*paSs}|yHJIE`2AG5H%*cI8Ps{g)};R#6KHz;?|kGwo6H~_{#EkcgHLg9k1uXh zo_5$wOu1v?%(VjZOlDQ#gT-9V)K;7cpNwO}N)+@^5A?OxN3vx*lH97`*PZkBinVxa z4(*>#=1`aw6$vm*lYH|)TfQBVa6we;{n7;_xOSh%tRbzLH6Vm>Hg)XmITij@zl&!a zOWCw_B;sXrJP41*RMbsq*$wUqC(P*^iEpg909K%c{zY82Om3yGRfu=KRxk@KOWIE* z_)>X$l5&4ONFgh}oZ)tcctF?iJIGUZOuVx-l8L$Jd`)eqz8^^4{(L(4xieU7R}mZe zcEsu-1-(&8I556s^Ra+_-J*HNZ?b#wf|7Iorn5VQ5e7j;C4W*Q_nUIQW9lsF>7^aZ z4z^s`klo4@{*lageP3`~37UZ8_&RP6(T?tB$?}v_DZZ`ecB~hwvoxozA;>JpE#Iv_QEYMUY0%622+MUoBo*lW;3?)^XRph)m2!xC?@E&+(hCZHGr^d z(CGEpN)A(^4&rFn>Z#JtpnrMhH?A^pF1AXwCN$M-%CluRn}6Uk@e{*ptV`bS8IKNf zp}^psaG@;N?7oC5{yl%2c7hzL>q96Q+Md$NAA=tutw}bQ(^t4v(ki@v)<{|ix8T)s z%5N>zg?TVODvb0tQ9vb zc9Uf;3zHtPlh09VHTMf9S5b_0>uZ7|d`3s2b_a_~OADz?C_A71SAB29p?`Q+mL*TuPO@P z+gRfh>D4cp7z2jiY)bhQO*yt})oa2xTzaNRlQwN#4obuY^i+^_53Kxbb#jy zAAcp8I%j}sdM}21AD&PL1dkTE+pM+*a>VR+-D0V`J349+To~Fr-0k~dcd*Z*F7|My zrN7C$2qyP*h~|i9nnOk6A`aW59(eT|y{{;y#1?{i4nG+inoiwuJ^(3hfC8)wU~)uS zI03I+2SKaj-8y;;@ny+)f0cDgt*~IX(wnSOMx)|{uH&99Xnp0r{;k(t|0U^e z?ZVzWRM? zI_tQfR)RxYa*oZ@dQ1Jc{rCb+LHh#1bZ*pbP&<3Ik;`unfz1hLULNwK$=Dti-{%P> zbd{_*BKIMkHi$1ohPvc7;}EGafJjTDQ@becsfhi&?LL}ciu--+H73@W^c9<)Cq1(hsK8G9m+Ml6Ox?2waF1VoIoFCzbM1E`h z7U@?+T`pjFdo6NXoG^ZDHhcYP4Sok6Rx@h5sGk1P1-h7J8gRZ|vG57`d*Sn_6U84+ zz=NG_;oYvHGDh$q72TE)hpwUFnwD>82oxq)yk{gV?7Dp@k@CGGpe!}K2*@z0u5NBXi;%`#|}+V5@&8QrUyyQOD1VrqrI;K;*Ubgl<);Fv9HZ-kbCt&ptFD?_QK# zd(|+Q!R4|S&US36A0LrK7)6e;RS5`5g>ln2b1>6kX2<_-)ModG-$zk<2r{<{n%H+g zX%`9O$z5*}F9>M+_-Zok%8ujDZJT8HSnkAufz9gXDY8EZxAmzi_QhKj%#XT}G4sH9 z&zRgG1>W@C@NhMvI7xx~%DZbqKJP5H{HLHbQegc~Nkoc@+Fe8hME{MS&{ zym4WSmI?kCHDeK9!{e=z<7bF!cK5v! z2NtPR71ErM6sKTB4Q|{V@Xm0k%-~39r+dVl^?pq%(~U#-MdwHC*U7;6*a@eyHBj$s zImKPMh(0pyYKC*G1&oKy>s_Tod^5Pr~rIv6SPdI%IxtRzA{OB*Grt<1y4bzMxb_r2$#1 z5`?Oq114k@CjN;fwHW+An!Y?NsqFv%f@pwffT&h^A)cf~K}= z%d`(DDv}u>u33SCDQUS@HZ`NB=AQdZP0dWDm2FeDOf}W~=<|Jk_b(pc-g};N&ij2{ zulJh&Df7RzNHYZ6H(~v-+UWDJ;G#S;>a6X>)71=zBdlL>!jxZ6pX!1=_sdJ>ki~h} z_lmfYGsU;wJ~G45o81gE%6n&jCfFmceWm$}>>kR28JEi;N5Bfx zq(!*-=MlgwvrV`;sO(GTt9Q<3Nizd4w;(U|lX}K|_n9`}kXNuRQ0_|C1dq_8zl8uO0rEBrKWajkl9z4?z0oiF+oCB-O>u<%j+>OYeG zwI6S-bS`+jIc8zve{ig|_nhsK$qVdl z|Cq7z`O>Rq$qL|bf}YI0EZ*X*FEo#% zPt(Bqs>45T|BiUiFZh15zH`HqrT>SZ11p!D+ud;^?bmPif1u4{9B4m1-SEM!9x|x6 zK4u2Q8HZ1uaKDAa?7G|b<7K5&*1kXB-Q(tJ(tabF*JZyvLaYotbo*vtOkw1~^=BXX ztx2KeCEdd9dR)M@u^UW?d(|c^Y+)Dl-B&y}Kg2c{UwY-|2eV1DqpUcml zDtB%0`kK{Bd-o~f?P9+DuRCnc1n#hkhgMv~omyWSLL>L5!K0LCt$BW%R37a*^$Peqjb|)^8*vYW$BM zwEV8x&Aj0f!CcI#r+#1i6{qaib`CPDk5Vcf%pch1%>C>1GdF355%0l_!(CZ!2XQsl zPhUKIvum;JJD(Xz;NQP~;fB3y=ubPIt^NAMZ&0=n>Uq8LUPk4`r_F=E#<-am<`?qW zk^8M9N#P>{j|S4LZq(--9=P!1sr3!pnHAgJ6YCdT*sgB!cOx?M0lEI`dmoELt`%5n z_>omxCI#0j1X?x+4>o5bTdb76D|Z5TZGG>YLE7zlC2xW)`@FW$g|WE9=lIiw>bly{ zUC4qQv-5G+V4&AxN&($)csm1@o7&V?m*z=K3y%3Z-8z-}(qYq2Y}Pyxn1m8oAP$i% zIxViAB3Z>QMWzS%23U|-b_~~HjydbP#i~b$=|L?j{P^+5>&f}&Djw0Hj-m^S3zThb z*u*!JyBhp&>aO2(&EF!ZwRoiTy$eNV&vN1sj2sFc+%=l^+nIqyWz!pg+88{~lEW$H zq2YewTJ5Toi6yV!ZQb$pu@>2$L$O39ltX-oWeS&Qk3*sOg8j>%lTtaKEd@-PENJnuc#yT+lgo}vbi`Kp_WhT&5Vc+gEH+-vO(s$0vb7$Q;QM7y2_3$ zKA^(ZNH1DbZ{bk6(W}WRr;_fk);u-JO%{eua~N7+LP{=}1|*2P1G#+#3xkT2_buQ( zyvanjir6vLK@EQdXlC?1I|6L)ukRsLAJt3qE3kx`7z^}ZY$K2`n#&CNO&-8%5~RZ+ zjBekZ$;mr^*@QNw^1J8$q{fwWiTf7e_@Q1@H;Y3&$dEW%(u9-yA5|ig2omlj@gL`i7@f|M%Z5Jsj!l_g3Yj3=Nku@;& zQ(VuBwxKJksBMWWrrw=#^sDY4v)CVO_;>Ho$e`@VUrs!qgDmsxClF~d4k}Q9P6xGw z=5az(T=py`hK_F~#**j7byRhpDA1)DuzvY>VDX^GR)kzNnGZ46mg7$9SSK+m^mr@; z%x*h;g`EY^UjFVzw>#vSV_-_t+@bJoxr1!MO!2}fG+by0iVTD%3ZT9$f@02+C^>A@ z9bTw2nHJ|bc}5+(zUOo00sHaHz%qhs%Es#32WJ{Zj~g$P?e)yt9G2(!!nEJfvg~NwaAA_U+U3lo}Lhqsibp7Z2NnXA-RU|y& zb|R~Z?+}mSaK#u}jzCYB=jf1_F7|Jj?Hv`3Rt>P_s!&D<(ym~^@P}JF1aJFQ8*+Bg z1}oZ-dwtbVs?M?*>Z!AAgPqi7QQ#QM6InZdWH?Mc8ph(RyZfR?;oac59|{zfN$LYw zb^URn$}(DoV(R&ZGFa%)qzabBkd`y-2B*B+;^-8g{4T|vy1-i*L0Vj9LYU0^{PAmM zUHxw2L2{XgfqBjtfTzU`8s^z_lhDx9^etA&;ZM_Eww zCaoe-r)l4Ad3R4lS(8apJTd^;pCu+^fq2gI-RPYwo>Hr~rMwKNzRpYLs%mD!jmQ%U zA|}K%9nwk|BT4kMRm7s*B{EJ!`G#fdcAjg4_k-{!W_M{j{lG|jzKaf^hrtM#2m6{g z*u*L-oS~se8eGOP`F`i8hd-BZ||@J#a!C+lPI%g zqt_A7`-~^1pYGIsnx3T%#DEURSQq13QC{}@X#$~D3tp(~kYn!ZXK%NXvII7f2VN5F zzu}WTDY^i0LIH>v8FWHh|4bTuX*ClB1)?0xMUQj|@_>+LtMcHXRDldHG*UIUYB7z- zlAL?VSz*w$!CR3YIZ;zlvv)^BR!v++iA?lt;0^{zt^0zfF;IaNUGHkR77fb5GTC|( z)Q;VmYK4ZR8YS;qM_67Vvoj-mMmL>l($(MbHvSgp@9)81M%F5#GL0sMfFkaWja)r( zsZ_8QHV|8laT8dh;8dO9S)ZN0HFn;Xy4gDrDR=!AFWq4b1r{A2H3=4+02q=Tuk8#6v zi)==OwMYWzVnB>)SqSoOy#u2hcQ;foEs>*9kYxHmMLG)bDu?on*KNOkx!V<3yjv@c z4}!QR3}ss?**T_gu1`9ek^X=jqki?BO_+UWvDA<b!m_72*8PMyx^}M| zbhj?NBS*aIM#xNL@qK1BdMt!Phf`3A;BvFrX1wEPnVGGeM?dm=y=i!9^Z+iWQ<+5+ zC#0#6o6MP|TGNn;mRjY7w#>pI`X4nni}9@O#(SctwZ;A0f5~i>pVLJ)Sw4t6BDVKz zst;Mw5{QW&KhQvN=4VCS7>8icVmKv8Ii@qH2g(U6ZpEyPTWz!Qm z>>(6?KVf}OAd`ma5(p>IO;xoRPZ@ZR9k_eeKS+{KOC}?GD3TOR)aIQK{p>H)X#3U| zA03fH9vfuyD}rBwwfUFXrx^3K7YHXt??-`b$r36A96i`}ck zc7Jwv+;FTXVNG}GrQwF`6DdzZD~4a>>|4tFp>FcZRn;@2yka1+wh0zezcaMJ{p2;4 zkOC_3jWtzO)HzNZ7kZgvWIr|YK&p9hQ^>*OhQCxuLBC*v1Vt3wHJ!^cB*z8^gOEZL z0Tsm>Bop|pbdw}?Izc{J^xi$Pp^XJNl$)j}vPC(AvBwvjj4l3c zvexh4_f&Uo*S-UE>pk4m$(?;tJkdTFDA z?69X*EL~2_L2BQg7hhaZZ`>C>{}jX7E{YC2?aqX~q%P+tEN)i5lM)z_3ELQ`T2Z`2 z?CCugN4E=9JnHP|Bj(fshS*&W{3`yP=f9c`(X1Pg4+&wX$3OgZG^8!LS#_MY4g#il zGt(1&eT^xJR^6XE+TO=~?UvA}zYZAMUQgpZ#=SSW+~4~f&a%Wi zZ}WzyNJVUU;+`K??D?$Pl9Dw5djYBIO8p$Wax+G3zXUg2|E1crUO64~=?in>6)^QT z?oLN$c$!U+#>=7UWUJUMTOf=S@4I`QM%9mxq!-@U{^J9}kH8e?0R6-u4Egd%(Se40 z1@F#r7YnxE`u&HNmvf?vZFe@FjC$QlF1|aZ@o?V;^?R|8a{uXh``4CF8A(ILD6c1# zjF!NNy!+T&G2Fv<)axV4Mnz7cWUc*p0LLDZw8i&PvCH1sU=S2FSdjtMTWUvZVEQI; zlC}~KSHUnE>l{S?N1%!_FLL8Vpz9%?UQ73kxY z*Np8Y)rJa( zRHa;MYGJ-;jJUt!@*}$4npb_nzy9F%1I50}sXY}*LxeY1MSjwTEV%poTPM+-HR0V9 zB*vH&U|OLWCjcSVw*zEpF^{!9%U2DH-?(^`Qk5zN4TUxebIouaMIa#h2Swa0MrnGG zy_;H6SNXfJd_%F_eX1OZwxljv&kKYxoS^v-e3R)O8pTUt*je;Y znzT|;)Q<4@L}YatU#`+`^$pxTHT2@ked+j<_ZvnATIPK!-;plv{cYoccAGUZ)fdRy z?B|SfI8(1K$;MQa!N`vN@gOVW?j!H(FC?tJKfh1}V|i~sqGyD%7XS^^6ET)aRYOmu zcA>?*NQ*OGMU=bpMpdpp-l|{Ie^5tba&t8F2$RWRSTf0(R2=ww-{s|11;J@|c5ZyO zTs^}m#0X66Faz2OYOizipn`tuGnU^>r&Y#Xf}nJ-o87W)0ZMS$JIwQElV1m zDmF7@pFQuG@W^S)$c>`r$mLpip5LRk7Dwsj`8NYOExtcC>gK;*tofaM*Zn}#heCgq zR<*pkL4NGo0qV|dR^;7hI|jt}-<6cSeN_BoWXpZylYu*5Xiw?_&z}Ca{qQYvWVJP< zs^RdVv1Z#%99>lnCXzpawVR?J7n>+ru~D;p>)5o)`#jI7-j%^}`R-O6NtjDs{|D!c z=t@m&D%PUTpo__S94jHI@9cB@dAMR~slX>vpf{aPkp!25QfKs9dJ4&R*fH*o1BDV9 z9LE)f&2pBtrRZy%$jcC8bgFv})ycmFvC^oyzJ{yQm4omhST4jZt@oOtb>QhnoKIHB z?R11CG}NmqTHsBfKq$Fls%6QKiwN;U@&0Am{jaHAaB-UulYmJz-C353Oc{S$c>Mr( z+&$w=Cz}6##7{@f%FTWzh`fW2=lNiBqKfJcr_G6Hnj(*ST8a0solA6SaT=}o0;xk^ z7YrC#(^?q53lAt$iAH5ry2m;1jq#$UIzHOl z>a`0hC#`q$RndXm+NsO#K5yp~8^?Dlo6oLkxUp}n{xB?n`K(=qrdnZ#@K(MO%c+jl zC;aJ-5fi{kL{?_atVGHU7oB7T8k$RAYFmm2s)oTS*ONbesmiSh`fmNeaSoUFMOd}z zdWWD2VQ4=a$*firV`h)Tbe;+eApQ?tP-z`}5qqo|Qy20e=2@S|_9;}v4`2M5qxtC&5^=&1EbQn(vJFQ)d7_>{zSra#)WX$zl4t$81@ z{bi!D1g%>O61B4YFKpEaQ89KEkFQ%Kx;YFqXLP@t7TJpitk%?_i>V=(IVG#+Ihk{@ z+deGXyzaYy*(W;nVPgJ9l^L9F|Fnn;PSnYmUR^`bBsG|8bR^fD=YRlCs)iZlD=xax zT31_NDJc`d5Eg7-pQg*q(80WCNLcaN)K8IP0|HuCYD)jQmVNt)0kJzG&s!hrEBIME zJ-_MJx^`@%@`7>c%H~+lgebW8T+6J{JZF~bHt_AR}8C+Eso#4_vd%x+Nk>u6I%}Ba)|KoqIg7jmiIXiak3UX_5FrhO*$>keO>MVOL zMLb$osWVfj&Hl2lJ)D_E^%qq`JJNh!2TENzVC+U z0Uli1ug zO|YniE#`2W@l~M?qH;-=WQ|WqY1)}bJEjiCy=eBEu4SkcZ^3*qz9W{Kh2x#%Q0is?`rcVi-$rA%g(PpA2I z>xb;ntm=~dG6$#)(FaDGi=?>`XC|*h*h>jSz zWZc#>(4E*!PL0`8d{P$PT^l;qS7l{P)8n<`K!s=+XJMZZa#l3?E^B&kaD2PnPP0q@ zf!&hfpEljXrRHv0_r4KqIFA;POcgXSr>zkj&$A5=&2l)Q5CoF!MF)Pz1xzLuBiMDe ziKfT4VEg`7YE3zw7d41N1{%Cg0jUK8^^wpi2ME#AKS6XpYq{2vG(PlDo;J_gm@FJK z)QSrjyZ{d+lHi86Nf{UGlxaAlMlgX|HuZs-SloDe7h(tw5OdRMI0C`#uEB|WlHIMn zV&_Dk3NHSp6IZt$S44MX7iPpp@6Y|We$&dEo5NmRyOU8FUR~;PXab5?i~-83*-K~8 z8|nhL+85Li!8Li^t?)qOg5cBA^t)^LJ5%|N#gE!TR`eT}uL1h~mBl53wIR&MgjAh| zAhZ`**2Q{e&)t|MrUnGYi(-gGfn|WFPma+)lB@?XR#K6DhkM?J)|7imK8x;F0;HN7|;$_vsfGizo`SzkD znda8e(2mg5yBIeg5dBkxlZtk(U|PurDf^g*Dg$gF>B1}rsIR0J)q{;LN<<(Ov9;ds zhh*p$sJP~VfrX(#YWcxES(^3Cs+YG<`sVW4@l4bJDB1J0wo4UWWzSZ2*Wfh1MTBKa zZC}#dC#r@RQoJ*O_YLkDVK&n&+!(g+o*diO*qpwlHTelXNF6KoRV!Mfa*>*73v5D3 z$1MfXJ;~H{baI>`a3&N58V5G0I$|Dv*j$L z?-bG))q*JiBl>ezv3cRL)R@?_k8k+cL)0OC;*+FADZ7^H#Kp6_X5M883N3U5|KQ^N z4{{vBPdg++wc^TTFfCD~Q#2`ray+eKC47+oxt11A=|^3!%yAq37aX0uz7RiK(1Ap& zXKUP^K;Q+5#xtOzw-Iv}w%k2M{>~!|0 zlDX0coyoaPIo};;Lgo4H<}GoD@2WSpJ@grl6>)r$tWuir^JNYBG5!g06<@kJ7m&6* zPcfC^VKk)At%y)4CCz{Lxe>#`>s^vX?Muzd2oaZ5oU*TpU(;5 z@+xp~eXY8cUMnjIXz})N{ zxJTfJ68t?K2M6*?PJ4m7-Z`n16ftP8iIq>>`eobHt_FF`nSCyg4L89_8(j)M{1o%} z#jOP^AihW6Z#aXyy0CAqV}982m7fQHx9jYU5~1hYU1|eDM>CmB!l*UdIsX0pmDV14 zwVLS@1K(D-{g{#ORROAZTb^*i;u6LyZz+Lnecq)ku>kElqy>RJBcx#dSZy<$AE}qZ z1{zG$#mm(VXIGihe|Po{QG1B(EX+v(^18Lx564BCHd9~PbEqbhTTPXm9|;YTl?liH zoGWe?``-T+8vFU)-ZOu;X+b}X+<%Aq9;Ftu`EX3w7W<8X;>mFYMRRYNg?6C#Gqc=9 zbBa`5oZ;dsy-xA;D9TwO2%vN$}0v8EmHBcs1MU396l5NR02fZCrd(+lmVE&guFFMTkewk2`S)TTD z!bv3FV4_+`qZhIv7gjs5E-a5xq_}VktAYjx(EEoWRg)xr2b5Zi!X$AfDu$2Nc~>1F zjDH(g`uF;UKfSgcjC=an26FSh`U%3TWy9@n%c}QPetP;~>Q@Ct7i$pyrK&4krcp+S7E z3>=8M)2R}d8R_SsL-F!-akS7{UXCS4R8@9uVEGcNQ2}zsSjpC|#s|Jk zGw2QbZ^BG8vYRTqnoDhYVdUftEeDNKTRNN*+dPMYQ+|{URUa( z9af-l5iZMXbzrUyg**99e1NwdUCT?I7DB2W4(;kp#Hhjegc{PKw*DZiX$zEPP3*`A zqFW+5t-ij$c;}s8qK%7v1ARLikxq5g${?ljN;0j_aBsWYiac$X9mhUWQCocR39EH{ z73~IrM)YO7lAVazo-@_2P=gu&%>B?>IB{j@hu}x&8+@Ewe=KtR-0&(5V~X^E076uR z*k?R)uJzr*3;bu&bgQeG7gxzjhzeq4gZOZS1DljqFb3_$!pe|V)@gq@NQ;ltt|&q- zN8~E7sD#!k6)|7OR3kh#>;+eu>WIp*-@Mb(Ezd~@0!_>a%K3+`LkP;|z_x-jZ3jTJ zR*R73&pJq{v6o76;*5cy3+?Y{r)Z7Za^p(RpTHMff7Y1&+~1w^xwy@t*2j))AY%p) z6}12*F`>6hnd{qwg`Uhc-3W=DXxCPqI3yGBBGH8M$SC{!CmCcX4zs$ejQr%SBq42w zmFEf|l6@(2E(E=F$)bhm9&9xab0(dF3&HnQOC0D{sNP(4csDaL!_|nTa<^YzRWe|9 zUz+$qwn?plhu_p!W?$W(+FMak6_HG}PgD}g6AK1$7HK*xpSt~q(Rh~wwvYD?h;NoY zeRFHYy1lVz`g8BR4I2-AE%K3k{Cdg98d*Te(`sMlMwmI7wg-!JM`I4M={)KX!Nu$gWLBoj9j#M~{-94L*BT?`EnNu_ig zkGyFgT;sT{;^i`UNE*#SBP4o;J}5nK0+}EtPL0^fyz1kIM>7%H>g68~`?ve-asKf0 zy-2Ok)gfF~tnj+dj1>~c*rY>j5B-@F?@_Aal{+ zGpClSwK+C9%G5q>lGrDjLke}e+wAIS&Cao`&>_u@bf~Gi&I$<173vil8NoiI0UNr- z3@aFc{KC6}mAK2xPCg!SKh+CEKZWrordUZsi2?u_t^4J1-*ecO;P0Uy^qgH6U0XH{ zI;wZf?IbR<+-XYHQgWaKQ*T!{o}iuGklb~__vGgBkbx-2Eiqt7$txa2?1NG&W4ewHQ8z#Iwy^IJsDJhtHWy0(WStKH#gvI zKUN$((||p`{<+^VcIiJpixrul^Zaf6R$f1zzxDnfVXrmij}eJ&D~GDo_;kWFI6el^ zmQSzC^8TF+-`@6ci~>wjtlCUxJadWw)R{i(ap-bjM(GS#TE?@6=o|8?>9?%L3ywBC zn-7xFe?^K7ZZdU^8;HnRGp%=e9#L!8hDeznvhg*9t#n{Njz~&vTT}r<(Z5~-37b?s zqTn>31l<)vag413^^0CCV~1aIbId zyjhx^&o7Hf@cna;q7Bd~lqm`h6NAdRc&4$g?Zpzano+)gocUK5|YzMfmyLLy@Y5kh#>3xJ49sE3}F8R$hVy*(2 z7`6U}`qDY=XkRoBHTa>k7Vj3m?QXn9XwE>bj#1LtSmQ)Vq=ipT)=&%%5Su7y2Fqe# zJ&WBOUworo)wCtLrXI#cx)#hL4)QIWIP61{5L)7%ugv&^W|3ymDaKba$~LCik|^3K z(BX*`Qm!-5wQ{UY^CQBrn-U!37z64myZ-l@X1O5t7o&pr04qsjo;&P{O*rMz=~J56Xw6QvhYKo982N6wpucm72i01kY;%&3j#`Gs8gNvW4w+5rvuARhVnpZXg3Be zuhAi0@uqO~HeobpW8U|S2E+NiGm*KfkccaW|B&Jw6L5lndVJJsRnWuU#(QdG|P9NApsS)!(UI zT^|Az<08%gghrGXN1X#SX#v~k9kYTWIyA$@V^S@z zo+S4F^(=YO22}8}?`N6wHv6Y({_RuR@(KZhXnBXpjORKin|u@ZW41@gR*05VmPhvf z3Dyl&ph$@zBt@Q^O2l})sJ*essmNwIC4$AFa6pm#1bR@`M9k_|N%+)0$4MtbY|w?A zyYiWTi!D84hs>~SXy}7Ab4D2l!6|3-dW4ka_TX&*z9 z?6@sJ+j4?iR0l^&7lOU*QN4YsPR7M3AC;D$E>Xlro^{2>6K!~%qsWt-BK_PyM++al zJA3DiVX3?050&bwE7KM5@bFqFv?~i&&)=Rav<9^uZh6^5U6423UgyB_QmBWaqAIFu zK#nZurSm!EzfRt1ZC}w9s$;0)-Dn@~8km!=Rb+acQ#Z2DrY%<$K+S^MLjn~@-VI^f zXuOL97d~9^-pPAxToZ@aDxxcLO81UxU+Eg8<*_AIAvw}`ABUPO9X_c=mp|#_lO$KL znLE#mkU8281d%}|I80$hBBp1ha6(H#$k}p4+@8k!{|hb~*;iin%4}{}Nm!XuFrd9@ zx=_Gxnf7BeomUkET<8}FcGr&cuiqO`Jzk|pH`X%LV6@($TV)>=n83AC7ADscrZr6x zIbNch2E-ALS7=oof{pg@!7S5wUv4-NV$x|S1p*VdHyej?(olVq$0%aFC_MC;JKg7p zh8?b;iSdW{;^IZsl(#F{sy0GVLDBmZolY23#c+T|jJl=UL35nzU9CX&QaC4-?*dLe z{Wz#5X~!R$72oxc8GQb!x#xT2udtt=PHnuX<_EH}6pC_R_tTyGR{D{EREG=vd4S>3 zaM=UKO|Vly9pqhI9I8qh;tQD38S*r{i{3lEoJ26bY{6Qjq@A^IEewmvn}oZoNy;i* zQl+v87ubF%on0nLJ(0N#H&n*Obh>wnUD(&DoX=W z4NybzZTc|xLVr%DmQw%uQ_T~?0V~M=l8)xKIUJ6Bne6`wn<*NKXA9r5r@Egl_%(Sr zKhfnhcO)7{aVq0)aX4-Pu;&j6!e%YW%8>Gk^gt3gCwVlbz^$tiV_=pt;GwAJcKWzz zfrl8qfDqLLD}nI^adnjJZkZ1QEMx%%!7NR_ z-KpQZ%Qz~CMvK+SQa?{n*UoQC_}`Vm!H%Vda)4q0B|C_af$)s%w1gv~(NR>H3kblG zqaO8?)|WYCLP4a@iCIG81Wu6aWo|l>?b$`3wF5^ud-9RRA^f~Ii8goiAPvVH)|?R+|32=Yfazor6BsUdIZ7(0^1T5_nhKH@1_r~0hH?ZM2LyJ8 zS4q$i9NRnp9*d2H)t%oHsqQ05cS%)JXZ#1(&H^>%&cLzlEQ`hEKt_pl)}4@F9ONKzxg05%B*OAZPr%F@<@fsT(2EM5qh;TCcm zMf4^?M*Jz=ASO{_@XAH=%Th=^f2kkzfhI039opXm1z3R70>uTle#^|T5bs`L#jB>P zyWs{cn@Fq7D8$PEQYqH)u*Av<59mmm@wnFupcEcY#=+-667PldKsbhY{n%x`kVWSq?`>u#~=K7yQH$& z4}=>FTI{33ak8)$LIw5_?McX~-orOUP>}L|Yk;hL1z*weaBv<_B?1{+uW04y0B@3& zRmJ7)IVb|O(97Mun}Sx80>Pm5+3Q2|?eZ88HVLlZx~Z`M7)&hG9>eA?AhSh++^n*y z2o3OhA=jVeDQujg6lRNl;H`z`6x5v)(Y13=MEd(mz+KCO2TKW zkTw!IufqmSA-gy8g;=TrD8TagK_ReJQXa2dv2KATzghgHnt(J6T+K%92@I6sNyx@p zfo9Q4%YKK1j{Wz6gi>wykR()3%v$4go3=plcY~|Hb1;`7L*C8}n>0BnE$YJ3h0jm`qt{FN4+?SR8H(*-dMZRuI%hkoZ+rN&pdK)Y}67$o|uyA{$rq;)1J+Aw2Yk-0S`mV~or>wvrZ^oiSm419M zk*^7nC0haA8~-~#`Md9yUayGN3$1p#eRy4d*6?vNO1*d2mETpYrw3->d|UJwE1OS! zzmQ*h@Rj%;u)(BQM{N3NwT6hd;1T;CbkKbMpf)W!1=6&$7UGh$9}6XC@4W7JUmN&% z!rb3(uhd-b*5N!Ss3li@vbxBmX=>aK;2=IY!L&9+mmG_Kvv!lA8N3}qO>$72%^F}18e$^SlhuQXI z@5+uZ*ayxsS0c3o0rSd+i6Mc+)1466Iato~i><0n=nal5^V*fB-5o{B8- zhF7Ro=N1TQwJf5kNCCq4e@t~op~JPU2XBobma2Gtyv+XaJ@&w487AW&fA1P8Bn4lj zU{`V2G6iBtWeH?`EKEwM*$+kye8evH2J>AX#5H~dH+|XQu-@L)cy8^xwk0d;jaBr* zq#zL+hY)%}<|>OE#)$ODcK!NEhix* zOVL(8jgY|k&BpI1<>SL=v#dpEOLsgQ?`Fg$04ZC;)Nk78A^8H-qRB9ybtec_T8he=11w(g*QV&`=%eZ1Hm^IW z9rbcNdF*Ku-MmfXyv^HGaS6G+VYI+2k;TbPPza$Q-x}K^I%E4CTL67z9tl!Bye9WU zm9qt93BhR94iO+QlSWRjYABlOWLugQ_(LP<`i%DDbrPEdE?n@7EdJ=AOO;jxMV)Xm z+Xd-t%6}&mxWO4t^JLd_g`M z2SW9AS60nc2|o5ea!(EOtZ@JQ<1a5`_SYuCwi9nZtWAcH3ujL>`(y4dGg?+UUrWKF*{(HGybLOJ-p{e<$5W9~K0xB$l-MW#w!{koc>v~|IgNS(N3Tpw z+4Hf{FOd3ZQStEl#`X3iiqm zLkxHxiOq(Pa{N$IPyrw<13%mg3u^WCZhdtz#AdirxYFfh69y2DbS&7VOU4eg0GXQwc|*r4(~HDY{vcsK#HHL zEN}%>Xkf_j_k$JtOdMDiGRH&#GuRR+lSR}^R%#l#MjsB?5QgO^cZf;&M9~8YP`>4$ z^4!Uo)RoZ>z~mZ%&)s}jz8u|G&_I-Q1IWl;gIF%nmMvgzd|hrGhlB-q6CM8K+5JgQ z`al{^tYGCs&`7|BfqDL~Iwz;+{3FFvck2_6H0t^!Q^oVQvShGgycpk8g(hsdi zWDr#f3aX8g2@0uFqETW}FDdhJm=cu0JIUWC@LsiF0$p^z3>o_+8oQ2Iv1@<90#nh% zdY+U;KfZsMz3P|{1EO(poQmZgb=x7*sS3&o-M)dq*-ty z7I*)LK>qp5>4SOYcY-rcg5NA(;?%(hFCZ^IkNJm;l1d)$<$u)u_2+7?JPU~mS9P`5 z#g*Z1OTJy)bFq#_EX~RM^7dU&HeXk>6J#m zU06QM-dv~~kkZ-Mln@%+q<$@ZtkXJ|B9Du6&-}rXvirLCEe52A8S3BZuaQsauije$ zki|)&OzI(K3RIRG4>99Ta;AnZK>kWdY3+RN?O?a4h$JID3*asUo*=3fAX^gga#@$+ z1hnV#nzV|Er5!owcV@68>2{`YF&_!H-~TsxW#~^E(Qx_dw3^)lOUng6fmaStykvAG zOSEE8^7z@RL<%yA14f{E9awXqUm_&_DaGz?el-v;4*}-TLoXJzyiUm|h`-bCSxEaC z`1(%$!)185!z~-vmFim%uP{hunH#2VmEW%gjUa4`*N9A}cZb-4fucCKyZaz4gq@9d zOHg1)#bC6e{Jm42qk@nzRkNr6syNqUi$bci006R5;8(xM2t#X~<{{|r2WeNbTmOfXY1HeQb$D(gvt{#L34ZS;HW=M9$2`@7t z+dC`e6(jzcA^acK#g@%RR$t5Nlst9 zt;5n4?6QANEslZ;2(S##C&2Q%{=$c&GRRi>g|S3e+N=AS&zz*VpVF3I_Htd3aL{PO zlgLAmMRJXt&>k=ZD3G{rl^)`-brxIzAOH|c^5G2@!~?_P%YqBN*4|Dv@5RKouph=< zOi4i;?a|{8L{Wd=epnVS|6#_3h8=o!n#9jVOUJsa_!PbDuZ~{w1+AFX%7*d%; za$T)OB7%<==yj4@Mc3;$eOh6@Cmq7+*suHr{APY*TnhPBHbsh(;;Ar8Uc%#mgnMqu zKQkn;`XTN?SOi8L(53tH5LV6yT8GRMxEkapLwwf?fMZJ(Fyey_j)BVMX0N z(w=@p>yr666oCDA-K$zgE=mP@^rqf)6}MWQ4M_X)ho+8`NI+Tsh5{ds5K8}F55T>`bJ%R>E2n+||I;*Uhad_YC8N#DpUlDW>h(o}mb@$rf@uca?THdy2xQ)O9!7Fvg< z5+XVnH)Yr<1dI`fVGDS*fl*Wa2gR7)Ckyh0*pyo*Er2&jjWXAUq{kp^tO&V%ykam6 ztCQS3$p&A{O7mQ?swh0N3W6~&Y~D}R$?jLoxE1hI{kv4EZd*Fg$2HY*%fr zWE_L7EE?W*HO`y7UvaCe!V`}q+2#LR(N-gu6Y&r?-;e9;4!!9k;{_q}i#Xu?AtO~J zn26tb_h*|h4yKfm^3TFY)W>fGj__h1{4UNILzOtEZX`0TzzD}}2j&jEJGA=vR8n`M z7wRq8E~VSP2%hMdd$HT^!JfF{w5x(vn`EL4zc?1iKX&NNAIS)^%F2KQ@e|=iV8e`P zqr6ZMP1FMb5sijo87hwC+FUt{!>7owItU9F;R)MMPY9ifIof$Nj{EGF@YSvt&e7jU zGD3XJ`k}Wh2E&HrhD1h&x}KlV*COoQy)YRt`%7%{9|zxDLAnmV(A-*>*YR+RE1M`xRU|1I5VAEe_>-Ebp625V?9!k*Eq?!9# z7`^n1f`(urW%?2dhO-H=c@OUe#)Wux=&?bIUU+ORd!}Hca}rw7Z(7k!e&1xv*<;y9ba;Z$6R(tZOvTzW6NRg3+9U zUd+u=2r(0x>+~ZR$}c;zT_uTgtMXS;01^_-H#eyISH7{gtE=nbFSEQkGY+aQK{c?= z#EyCEHP75V9hDaNHHc#mwPmZ)p+K&=@iSxfwFLllg!62LGfn@H@E77G&q-j(Em-7C zUY`vhh%gHDW-UmM;Q+ihuPv|+zEE@}f|Nx{;{awWyB&D_!w`V@{MR$;M)bqwTDbuU zBL%%lFCiLz!Vd^|ltV%`2!Q*X!Z+Qs1yC4Eb@>KP)+CG(M1dmup~6$CqKy(>kypGI zuhJ)h&6aWY@=g(4-TDx|$TttxG#;1u*EClA;Baf{e>aXdCk}WPt~1jkK8rz0q69F4 zylb4gkFDV&@)3(ra|b-t`=tPKg}^<1Ip=Zz=<80b&!6UYaD9M3*r`vRZdduevOR#~ zPv(#|Te7LiUADf?4gSuoI)07M(wEDLxlPbhx$1v0$XY+)N<8-REyUE5P%tQN4TpbJRtTehttV=lhVXGy8UG4bKIV6zW0{3;^ zxcJ8+_q$2)Jf<0&0~z?6yRZmI7KdmHh~PJo6dP~A=B;7)?TlQ~$k_=)-wIy6`|Xue z8lg0Vi1yBhUQ=>-Su&0r7XDY3aE}|=tuXiIveEzYhYMCs64q8aF3X5nH}0}2`atH1 zB(-{c!xH$1)}N2X{67F2LFB$12aM=Mo*gtB0vDj_Afq;61CVIg5C9{b8Pel((y#~0 z^kWYi=M2a^mzs;C2ofl$PfqZ&gO018z^HPVs7go3yB|C2%o(Z#0D>VQN<;z%zq9iQXf`3s96;7-Krg83G~^NMuMrNK&!v z|8AM(Rt{RaT5 zk;o-SqWD9_6BSGomBPY608*N#CW(lH!H5(B1j$*Vzy$H^1W7=IiAaaIm;K-alH^t= z&%(muV;S+drV*Di|Bbw}usGt36kM-}5tLxtc7FW2{D=g~b#d`jD%2eCWn^UZbQ2|m%#RV#sGu#Z`(r5wU4W?o03ZNK zL_t*5xw*LzA&2)sQr@`z-GPCi(`e93=Qw+NdY!h614EixpA`;=F<07>b#J|HtBN|E z&e$HQ#wh0YD4H6Rz2TtlZYfA|CwU7RZ4A4m&Eask{a)6|ld@f1(E%t)H8>MSPqN(^ zsU}v@=k#VPiZbZywL9~ux391`BT!b~w>L%lGxbnpc8*J}2-W^QsO!eiy!>>JZ0{cw zx-z!frk&-wd-|DQ%*FkZTZ737hZXN>G#*a`|v5-WA*j*=PpZWe{ZMnHdPHv zl7<~@)n_D4n?8Bf)6ZtHx?69(byJ=D;lDgFd+vnPf}}lTA{iMOwU2jwwfy9o`=8aq z(Q~rlrI#N3e@`Sm{Lg1^yXVWJuUr5nuBA(t?wt9Ii{*Apqr9q$G}zi!ed90Byy<~o zrOsWPmo=v7&bR&H(8faTyz|cA@Vk|j<0_-SZpzKi+psoPnzu0Z)xNcB*CzjJDR5u# z_mAIj!}S}R*rivkY-(!y^Xl7NE?3){XK%jxw5^+e{ga>k_?iu(d4Ka?FEc)PbNzka z9A8mU(UTU;DazZrZQG*73oo55e{%V?sU4^0CKZPKVFQdEIm)b=GuFTT?9wxqu6+7O zN9sZ!Jy?xAxbwPeKlGWO{NbU)-1FmZwXituXD-LpmxtCC4m6%eExmM#q2eY zzoVfwUEALi?Dg$=bMD-^JzA@7i0YnjZf>rxrq$y~8`wJ-)Ajxf^2B>X@2vkI8bMFg zzj=I+y<-#xd7$A{UOAJ`XzEEB*g0e*uj@&^xMZu(%RC+%8|n^Xbf9f!n!R{X+5AGQ zCndRUZp=X)zV=ihM7rN@@W!T>T5L*cjP;&-w(``Yub(>a+`qo?T9T6ItKBnp^q4wZ zmcgBaW7R-lfAQ9+YgC$l-HR!yPO-dFiuAYtx+APYYz$K(Vp4s$vEhI{E5i`6p20ni z9VN+`=^eoXS%t;!{arh@?aWS@k=5We1~){F{_LBV9oQSm+SC~dN20|U4kz!sgx2=a#>kcKx0$nLB&xAsRikuu?jbbE_vb{34h!sVEd{`~Eff$p7A> z(rL9_;ZYM7?5f=pJ`1yp`^a_DU0r{j+IIe*&snP^%Pnca%7%zC`TzSrYsOAJ?c`C5yWgnGo|sjT(J*Ih zalb3Pf2gN9Nu77*0)5i*$_Mtkw(4h$nz?Z+lYKK9t}8t6%x^up>Aur0zwz(ee{=Vv zEB5WUWYif8^}_rA@#;x;-1N_D&tCf5)aKmEJbmaj!{PTVov9WS{b}yTo}TIyuXph| zbH0yz^OsKCiyd zmAZ4&Z=SpDYT5C(%F#Z7u(sA;j>jjY5%*;ci+CfAnnWc8)t3Ywk`Ah0Zntw zI(y*v3gn7tNk|HT+nBR5qa!867|ZU1K#FRh8V-lcM!A~% z29$Q0@67Jn-)Wm{gx~CEoQ1|`C-c-~jD(^CdI8JUZM(YnR!tb+(x5S$%atTUA%B6( z)7smn4%%E!XUNerMl0LZ63EQVhdG}p) z_ES&&rt0E{uDIitfI>}82l`gKwBEqV7i&Ep&*X&z85tSt{>&6bxpsBynWumMfuF5O za=DgW7F_+>zx*GB8|wT;+4GBvidMh(+-<-6)$iYbPd@R?b1&a_`Mu{ZUVQrQpLZsE zGSct3A{+@nJXKk|_}c3qc>0A+by|MW)|x$Kqbqv)2MkG>nR%WE+Zq}ge$ji~-FM$r z*s`>%tE-Pi7A~B>^$$N7Jzl-`u0=azeSdgsjG`zV_y2y)>uWB*;TLs#+9wo^iAEqa zsl;x#pPcjPH?IE1vOj$g^5zr=vs{{e>_p?i6+e6SZ}*48t$)1dH&dMFuU)$~V`Ba{ z{^y3vzy9>Zxie3`pl0{(-F;ifWDM$AEl>Xa?+^S->)u?mYwLQo{y;3mYzt0X=JL}+ zD;`?@KQqhA%Xj>wJvTS^FaP-Nk-p$Zo)cF>eVimZjIOfu(}{p3p$@~udDrJZ_}NzUedZgo#}{rUX`FQ@LDdh*<$)T3z*trrXoatHKs zUzU=RcR=Ng)z#K$#kua%EW;KZvNfmn7wqfUH}m9iGfH~@^-ScntI{^_+#d25rKg0e zo5v%f?Au*BJ6Y^hqtWP?)2DBIf2$f&Z8lrk^whz?eyAU(Y1*DFUuK))Pu~m7|8v*G z=e?}4H`Khp`JBb`J8Ip*+U;o>j+U&LEjLB>=m)Cnmd~lIU)uu=8(R+)obCw>cmgSI zyTi^jUujA5;L9P(+ER4V1b=3R!>!LL&wlFuKPr1qrbT)Bh6v|8>JH_PF6Gq$MN!vv zt(%~heCyVVSJ$kr+dortf{-D`S#P@2IV9AXLyR$4B{NmmwtsFYDJdB&9dM;)IiR?9 zYxQU?eP3qp)GoEHr6XLP=SfX9=UzkZ}I`!U0pfBm)8wA5!FYCLV) zl0a>@C)v~E7NM~3sXNbk@x>Q5?V`m?7T@`+ul?q?_w8(KedXni&NUk6ygp~2=4oA! z+4bAs{qB_SK0EQ8CHwl?x4pY@ZrasVDY@u5??*qn?Cw7;tghY@*#66>pE~#3Z*~Ry zi|18c^{cz?{_T@IQG-#CBC;$HOrHxLebhkT0Nfr_d}Dd+J4RaM9S z%2`)e*YdZdF=NI=lK9SDTW3P!pC12_o>TYT6~9=sZ^5*A^JcZT?cKXqc*ZPUd_k;d z=y&%$)K{D``_xnK{Cel3k3LpgabZc`>4D9?ufMi>%=l=lo_IwkEw>9${cr}DxJFN||s_3%x1lzZP;{rVrz`r}V7x#lVg&V1^*4dZ9c zd-Tsw2OGP;{N=Aa`NmH#|JJwKej6Pa=-=M>$0L2gk31s~q;`yQ=XKW}UJ`vsOIiBq z1mW|OP{+YfCJWF1xxe3V6NhmajYeGUsY6rj74D>(ox8Hr#?&=$aaEOPX5^Zzf*Oy^vc}6z3suZ`!mwhlai8R=}A4(pIYDvI9o)Wt!P})fN#I* zTO$YN<)6~u9~=T>?CcqZE1!?r?e>avebCmBKUltP+qSU_W^}Z7r?OO^&j)GV%v{x| ztLX0T=^Jlz)J5D?zDZNdDqq^&R61=>+ko9>>ujpkBI8+GV_8LRTaHk&J)r|WTa3c3 zyOqxLyn_6eu8y(G^P1|yp32d!ZEZGJ7-3-iz_bSc-)5Y>XsdTXQA26QK(^v+ekH4> zz&jJCH#aw@jZPkj29p|PS7&Fz!lIN;=Ec9~loal1m{vS0x4yADJtHGfkY*csK52hjTU$?K#k46? z~(?tS26MZRWB|yn};H>o%Re{0x2E z#se(}wm;MLr@QanwD#*)Tye#9{}`MzYstE+eihSWSy#{6zUQTay2jP3SG>7hzw@Un z4PS077Ng6@W&WcjB{fA%t8uwpfz>s$W-QuRnmhm0`Caedb;0ZfQ&Z=hwfM@|r5AhL zdHY^|_x5XVoIEq_%B5e*>#R^UbxqltM;?ECk(z(YEkAi{YxdHmOTTvI%cDn+{?Ur< zyX)2Risdi7_(ILozgWI}c?}Cb_~1kLT>MA3+wBjxopIJ_Z*6$1aBTUOB6n%=INyuA zrcIkVW}0(%Ao$9YPrkBY{^r-;5>2yK-m}pW9yM>?DOdhIHW&@w`LmZCN=57Adn>D| zrksS5DEA7_1PlfbEeF_Z1SlWfAz8nr=9=u3-8!^JYV_BSAO^Bi;wgL zKl1R~ZoBPBpTm*il(Uxo$2%PQ>x(%nizCj@e)h9N|2l$ikca-harn>IUN~!{4*Ny& z;~VbFA2X8`a@R#pZ>C?b znG%|ZHvMsx-jly>*Jelg*p{x|yo$2X?M8Q{l0GFTpm+A?D4y-H<_){AyS8l8kZ1H) zwDD(`u7ACjc~aXO8q3nNTmG;vW6-(Q-#ByL#JcJxn|-QR$#RznKj*#e8Lsr?wDLEu z`r*l|2b#$1_DEYses*9{Y>;%8eoEU=`lMOvmS^8gQVN|)O2^L4Jv-_r&7Kiz4|%e) z-1)g1nl@7g?q0vi@t&_HwXb+`VNf_@>Cs4Pu01FFz>b$2jG&NEHFj)&Yn!91|H6yr zAL+|}%&>RgBc8}!9`1Lj(Ut>gQeYCf+iC{vsV7JM*-1_pgN~{h7;yD88>P1VY~2@1 z8C6hc*kZ-0W%la+nsHv@nGwuVwY}=@irJ-VZF)*WN}#4?O4;aF`|WD+?8djA zK4)Cf>#tQ$oj3(M-Z^K~JnrKU6PZObaJ&K_NwCR2yr z)Aw!J50mW!Vt;$smsB|GU)5Vnt8#k!L#`BDbWX$e*4L!>EH^J2=Ugt# zN$G;U9iBGDKJiMuUTv=LpB-aa#r~9*&27$Dp4(AcoT5GV(o4nJ%j$ORKlMwtUf%~7 zUvSBhzTiimkKU0t`TXAw|N4SY$il+nsHL)aoO`Inr}wq|YG2XY=`NL*)bJrgFPKr$ z?T)lNZLz+0mre7Zyrg|b%0RHZCsHPJ>tcaGc&H(ay=rFpX?~Y&mt$yOrlCefkYYoM z76>b8BGcZP5}1@(ogB*AR61_jM5QklI}k|UmeS<&=GxM^KUh3haeI7PciyJwn<6P% zZc)LCzdNcs7pC_4Lxm@MCS~|St$7i9Ku9HPRNCBAdoK1z=BIgOZ(({`;2*np1e(=> z!C+^1U#e4SmssyDbLXc{uPb%i+Ftb4u55*@^gbh0s5t|jo%Tgz@=xDAIxBCW-5nfA zN^uOTHm~xcr};sD%IGv_vQfLE^rgoGB`qm(YM-w002xE=dXYEHH5eG|Xl}HNf{Vrs zP3(agQP1SUf>TEK)%o-u)}yuUt8JJVN){h#w*cr>B+nI&-=QUTdxWTl2|Z8Z0@h<*PSey>8EjJASBaP=v{*L^L2YN}p&%Xwe6 zFWTLl^w%G}zy77=g>(A5oh`An=lp%Ua>uTF;F8TxUh;nBwa=ILoc)bEo_xDW`+o7z z<<8*R&8HH&{Rf(_bocL^RBl~+n=}Wq&M!qCn?*Xwc^It-Z@p*Ai3_@e|oc+V``!6iCwKXt@-|(Kc1vr^L}cm@LMyB&tKXU z2_EUIeN=G5d7(bT{wB*Db3$QV?!z#7(KU9Nu4SZ)LP$zV3Uv1?yS%O6hyXzT{z|7m z))tD%6lb`g)w$kYRGG1F-#-1d7HzpLmOEKhoXLB7Q@uOA=gxIG(|fDiSS}Ckk7Q+L z1yc5>r{{)Kc~4JIve$*ZuHIlw3iR_)7oIbHQn{&Vo;(eB2mO?BJc9`~t= z9!~4&?k);#-qV^t%0IcawZ(C^&Nqk4mK4Ndu`xO2>sP*4dhz_)+FDWTb*7i3skYv} zjrl1kx3YK@##Zr+jEurFqI;^F^IFG8(tYj0-i(55 z#m~F@TUAv}TJ6Vg6n2IqrKO{DYld1~`$X@!Kx;SDyl*>W)}AemlO|4#Q83q=y+x~^ zFu8Pa=f;JT3)XIM7Ec%(RC;_d*jMvTj>DtXJ8XTy(4|$$DH>M{CT+pJL9f@VCr8=N z=!{vD2fTy9AdQQOCtkAVo<1p$G+of%-oAZzm9MkBqPJl6s#W$1`-0OKy#Ci0%F^po@Ff!({SFIVqczkdDs-*!E=@`*ry;f>$^^{OAQ+_R-E>Q^f(DeKc-+FEJH!r{D=@qLp(?-=t!-}F@_T_V%Hg5NC zt`&x0lg7DMlv}uof%;xguOdLKFKE2_>Z=+a z+87K551iw2I2={lgkAqwySQYqy1F{&>PuSN+V*ex*YbI%zyCDZ?e-4_A3M?){K)gs zRjA{bPy0Wj^&hH*#W1a{twn`}{-L3bn>G>A)~#DxT3Uo*WM^e@&Rbeq(o$0mVZ>sw z)YMd)-5!ZX=g*%%ckbNG%uG$wGSV|HzT}d@!NIPsuBytal$4Z=j0_>g`0?X8=f0sK z&UqjZ*uQ^&c6RopiIbKrSrUuI+FDx)=!z?@$jQk$>#Va%N{VxGayq-Z>~@=!a{m1J z6DCf`$jB%wD=#W4T)uqy+O=z)PUotBt%`)hO-)U@9veG$EayBN3fXOTDMeaZn#=8u zMx%j1z@}+#muvKxF^K5#cnS&&6jfTU%R4 zM~8XBlO|0{PfvH)?RqT6Ij^ay85kUNyWRQu`RVByg$0G{)~)O7?F$5g$;rt_`kEh0 zeDJ{svu4k#uCAUwefle}yrOBg`uh5YhWh@2{yB5!I-SmNIJ|V}(p|fDwYIj_*Y8`p zbm?=?J%@}Z!Nf za?9$~t5>aB^}X+X&*5-25uCDI+zx;Jn>8MLDy|lf(jWPDt+i$=3 z-g`CgZ|v{uU-!;Cob#nimkJ?XfBkj8-+$-Ne}2zB_dNROqdRx*EG{m7>UY1NHf`E@ z=bg9g+%Hw{sb05!-Rjp@&z?Q&=G$+dGG$8L-ntz-cb<9XnXkO^%H~a*uDtTfH{bl- z$#YJ=?6S*OtXQ#b{rbQDOvNWoJF}%rmvMwZf2j`FUw+Xs7UVeE+#lsIj{Px>#?cKZA<#xNBE&*cU!Ucc+^CLxtg+uy-adC6sc?t$X!?YoO|a(BPGs$|~GXj@h^ z8s#|ynVVZrziZCw)vJAvGz?6cIsfF@Ti$)=vacmQ-;|P*G}O`19Ic-qbX)2|Jt;L< z<#9d#jj9jS?`z5!Up3(G%`Gj~5raC0`rAio*qfaljYhSoFXNr8?wlsW zFv@wwuAvR(^S-oW=Z^26H*MXP_m$d|T|4&DqA7XNzM*&OeBNlz1Yg*ulv)<(((Q!1+}x4J?zX3Y5e6OWF`%y;z4 zLGNH_N%n$;3pV}eWtYpxccKzH-UwP%h8y~~<&po&27q?$}(N|e$-jXFtzVoArH{bH3 z=F&M;RaH*m&CJ$DEgp5pop%IZ>iOUQy>;8Z=NsEvUVS@!$;FpMo_xeN^ttZ>ro5eSy2PyKj4!+v^)JgsX6@Au_Uhgg3h;{kimM zk(|z{Nw&^nZ@J+g8p`UR=z;!huMz1C=bWBdm@AB}y{!iZGA2#z3uo~(je@@X%#6Yo zwMh(>+Dki>!P@-6u{*|O*9^67>+2j9v3HcDo!(wpZb-vsWaK8#)7+hdIs1D+-{PFT zaObwx(xSY-#or=|OBf+>2+pv`6Qyg;_Q(~+5UsWEt;*Zrc% z+c*^aD&fokcva~n;US^cX0&uy1WLT6N)T-k;ZVBMhKlM+-ae3zNkLg;&)FQ<)9r}L zBzLpc+gRG8*}B@2ihTKz0QC0A0}~>nbKBEwIHdYg;&aph03ZNKL_t(LM;Swt2g5$k zhA6j}H+;9Eb8cm-HaO_FDan53(_HU4lj>T#F5VyQ?Dg(dG|!MN*dOk$2uu^+!A=_R zCk55~2ISuA+%NMG%v zf)g>+;6oDX<6eP(S6V0FYjpx*RiO^U>_tDuojuVUw=Ff;InbB5l@o-}Kk zlBKmpLJos^`?_a;d&=OmY_N4_Z*Onf>>0TM{9xC%?6N$VnrvUAmVI@ox_bBg+%uP4 z&~g2(ga~Q1d&R`c)U=IfoO#BB8-$Lm?DYd39UVF6&9e_i8n@KvWM$Xxdw1N#1+l4a zum8ZTi8GB<2vjHk|@)c9V2m)hJZ4uOU*##oxIv!g@2>D4dIsMzit z!f+@YO7G{MmZAOQQyos*;t3^hclWi)+SIhv;=tr^z~_&I!{Ko9tMAqny7Tfz0mHGg ztC(Hg{lvrPo_p@&Ppo1WiOivMa@;8`&3m78X4E`HXoC%X1-~Glnm(BWS!v}+1P0@Mt=56lXJAP7e?_1w{XWgF`zW!>-tg~NQ)s&lC z5by=;O!>!UnK#{ZQ;$^6`}PkW-}LU}%4IcIY%44(^k(tCzTW8}=k{&e@|P@b3wis+ z2h*pYRR6~MTG&o37h6*-;x+iG@q7OST%JZbI{e(SBb zmarQ~CufJ7zy194_n*0FYx1srjkq3yMNJm=w$-+WPIc{`?R*@Tm_N;=uUB02@m@F(R zl2Qf&fsCBYXy0H_VPSQ3HSf+U$jSml;7VslTUAw6L^Znn0SBgabadq9M`|Up8IOcF>XJrW?Iy*X)-auwnR&jB0RR90%-DhwV*SatK z4%5>;)01V}2}7n%xq z+1|b$wMc1qFhP?^Do_;^6tuUu!FG;HrIM=U10kLZ^}~=<9hr0va_?LTCI9qzRl*1fM8o&8yDxr#Kbf;HH{iM3Lnp{ z*c-5ujMwW09n|QAM7PJovZT#sjnTwW6y+fU5{ZPC2?@e4l}hz7hM>cuQmI-8J;7k` zL1+F)0>dx_f?lsTIy#!mMIaD#xm+9s2?m2a9u^D|92k~LrCzU>$HP2sPnZRQfdCKz ze10FoMG%CGVVKwM^m@2_K2HE(2!ez{1WA%Oj!GaLTrQ;ibRaxFA|IW{y;d4 z3IsSQ5}D0reUuW%aj(~o<2VSCEX#_dVuoP|0zic#x6dtPBaui%C=#+>lqMh;hG?2b zQ7#O_6h(m`2!epq;S>l2B9RET!4yR!Ty7{7LSZqE;}8Vd?RKqJi{m&GX2M|(!_Xp; zC_o2%J|B+rc^D7jAQVl3Ai&{pXoj)bYzkCP0sw}hFbsnrs8lKmf_Ttr|1SLL+R6V( z|NLg(kRku1+QZNO@7II=F8@XLmVUPwsgyQd^*QQTI$FsUh%}0XVCk%+{$@*UcSBIZ zZ}<8*2g8WR+8Kh`)NB$1dq)fM+JpMEI4VxkA|QkkRdl+(r}ch1qRmzt|3=4-`Qoze4?_1g{G@b^VIl%KlN|@%y zRi8fMHaFE(oSE){@}{8ze-GK+VIPyzKG^L=U{EUNRoQ@kqm@xPBHF9o>e?5+!U0Nv zLo%~X*b(d#Ah&&1QL;`U)54IW{rcoYuA^pUYPpZ*)=-5Q9RPzvH7UU ztb_>?pk?3iZy%3VFU{*OjtJ(ojeRYq;`Hg_$ne}c2tL;7d3s^TqCz+I(WR@SGiR2z z1VXoO2vDGO{`=pqKYQc8V&TkByq+-$P+{Vf!j1xcS@dx!clUt5@=RgAX_&ZSqMkl; zHFEs47P@^bp}*&eF)7#He_`RObVIUsus%cnxyu^ph>RsuF%l zO1CULxcbQtxB#^0j{JDlm3I5_dxIG+k$3CGSt%DD8C4lW@qKP$aB$v%V_IrKQdcp# zb@SWrzBe>99{`3*s3Aj!{PVKpiTw>Vl0cYM1kjiWzXR>p*n5IVy){70@+QI=AstQV zLwy;1*C@3sz0aj@2{^27nM?SXB2eUysqi}6tO%4%8r9-XTzMbo*f^j$GV(Z)#Q+|@ zp~=nV03=)shb<9#L>b0e5NGfz1@|}*zFTvyka8s}y&+Ca2ftk&bgOw#s-6J>h1y{1 z#ce&fn9{flb^URyOF_{Tms8P0kP$pSE?{ywhX>N+VAsuS%+VxN#aQl__Er_zqPU(X zQ_Dth>q|BE9;1OYlWlGpD;W_D@@)L*J2p`_Ev9f8&E=UrVN6DHaE{Bah)QtRQPGjq zs}X;>*@p9yd2U|tdS{|rY>)SOQ%Msuz?j(hF{}l-H4+-8ih2|ULlMd&&Cg;y&@c;A zPqFIr7KF`ncXSwn$uFaJm>h4%eT)?d3efG zyoaR6m|xD;tlhY2UHsWAFYq4SY+=&ZNSzi}`K@^~^Tv(UB)&3Zx3BB#z_<-3bi7tq z^ZrYmH8bpUhL_#^Dlx%dx@7G2=F6Yt8?xJOWydm2k?NZXwD0kewueXE{LsHXXD2zb zfE|;2wBk}`qDW}{qQH6l*w_y;PeC4@V1N{3umPJ1^HWJr<@&O_~n!5 z0(D29S9mIRJ`#E&;pzup#9wb-KYpg*=>9KnP0YPql(uy9+3i&Uo>9LZk_{)teS{gECaXOhS67BRJbA~Ik_=>D<;(t&hvO}HjytW zK+ujvL?o2gow-Ner18guaKs(kt2YX}ZP$1Sa*fc`XXB7i+?g%6p+b7G}2n&v}|+!=XIlSMkS2=#bi zm(#N{fyYvy%b7JNs;aqugN-vxm_ z12|FdZ)}OvCEmSzH^wCx8J%^rvOGR6+P%5FuP|kpDbv&C;U(qcXWH9FdT~4~Z1LwX zdVG-PyHW3$2o}eE(3$^{{Iaa>?YDO&&tGkbi7CvyNtmoL%O4H}5U-Q2x#Dnli%->5 zS+SciAI_TlGJ@zPCPcn%Fz!5nw<4X={a-P1D+WLBZ~&=O$dtrwEN_} zI`e_Gixw_?^4(=?PTtzWn@HvjF9sSry=9d%e_mHnQSsYPm2WM1{>4WZgAlas*x_~W zzWVHwS%8y&a95cvcG9eIAAyMn@+OVt#wT$&oNEui4TM6L$38DwxI&wzuW364I>dQd z`Na1`_n9s_=Er#}rmkDOW$l`^-`qTO;K(OI)8*G+f9cb&e){F`)*p|X0fn>7dm>{- zhI`=U-CrFeH4SjQ|AW`I+^IYR2x`{7{Ne?nDwyM2k*a)Q!`E6mW6g+(cbe9ZnYb!C zIyw;^d}P8z2H-xjYW;y~)9B3ECw|>_=faVD4Uy#JNl)3Qo-e7SQ9{xZi; z9=oyLG-!H$e!}Swd+J+mri_GGo~vM-;Mm#voj?9q^hxf64*bCzdc^tnhU%vwL;g?1 zVzI;FjEOM>0s*mD93lv>#~Ta=8ylMlA_RgU%z^*bEJD*1!!TBh1wjy*OlGs$91e$2 zDD?Wg4fXYUosJ|!5Cq|TJj{mUVq>GCq5uF;DwM{70fu3?D9W;|SS%hfVuaV{&CJZC zX_{rj3Wc1{=i@l;^Z6uV2?&BTP2)JO)9DFL^fbcPwmX0s0t4$9?ni9{lm%9Ki_ zPNz2w4$=&RqFgSDQWT}pXw%cv#f^Xp^pJabNXg+!|0ym70QTOgc244YLGusFGq!fBIr(J^;FdHLR_uMW#|Df3X*LcMKvtRaGH0*YtLRPHH% zy+E8^@j+?n?3V{U>8*FWlXY1gN1uCdktRK}(G6UBbk=G;T)1(@KH0#yyiDkaJ%^?) zn>zow18;tEXY)hjifue?Kbh)`j-GmI+zY+?x^IwLZx%Ex8;L`wDVnm{re)nOj5@n) zO1>dcw&A_&eq=&xYy18BTCE^0?3Sfq-@Nrijj=f}Z^?{1$HS{;?b`9e!N8yl(qws2 z9op!s`M!MVbGu*vu7Y1LjnoN_($>@$7tdX@bk*z6q|cmbb(s2_o4Wl)S^00Q&yJcW zbc`$`$!$0AIUDXs;-;M!ljKp&6)OHT0$`4i7)rxIdCYvAA$!uQ6Sx^lyH39iD~ z<82LW#pVyLzV}T*!h~)Dd(dhBE(~4p@=qD6sD=yy0HC3*AzhW+O9X&ZPS;oUC`-kP zG=-Vbg>lOGR!*!&D^`OPG8h)(3P$$Tc3(?(p&JZb1AFpt(Udp?XGX-3l9WegyRP<# zq6W;IP{d41c@lFepZP0>3{b%;qjb2AHww#oMB-QX_cHJaV~uvQW)SHbxMerBSOSS! zB1HnvV1nG%p86UupUgY+aBDhX1f5|G8lM|m5)?)x72Pi;Ejq@ z+2UXYJPM4`GMP*$6ik>f;lTa_BpC{kq{U*!aU1{vL#&~sxHvO2 zQ=`#byLJs12!191al-a=Ba%g0L)Wx7#fiOC%D3VYsQh ze4xKSH#avqIoa#=;y5lAiwS}#FE8g|*zMb0B_$;w0CseA=H=&AR8*+d8ii7+)9K>k z;v#G$5b&EV7KULiUcAUK3=t%RLSa{Dr(7wb~GJ%C6q5zDxItQKgod&>m( zF<9R}JS)1VI@GO-7?jewwq~!5B~TjjG0_q&rV(&me83;(kd)EhN1&l-sR4#LR8j=& zj0qH&eW_%~LSinwTLN2C#0bOVa6!I}k7bCsc|s>?$th?wl8$&~=FP}JN_uIhsEadA z(9S24+#GjZuR1TSJc(8hvfdcH6LcFe$|dF*`Wcc&sCeXtgA+=Zuzm_LhI_d@Un&Pd z5Fbed($wr-FNX~iGBJPwYgn8@F|LL49%HVw*VNo|CYCQ=GX2^cZ+-FFdy5)T1VK_p{5bcS zzj((Jwics)!!vW!E;lDha?*@(+uyu$^_%X6@W5Y>51#n-`){_aToCYNN~7kg%PYzq z5=(J(*mSPiFlXU&udjS>^ZKiG?~E^cG#~ zjh9FvZpfgIjLCfTFKe%#oH=67sssI(=g%DP`rx|ONhQN=z|wb_BYV9AsC}uWr?=<% zFHov$UQS%{i1Eirgqb?6w14L8nKPc8R=>?*w`b13@cf2_Cr=(8H;(zs&%&5jU%UO{ zxsh>srm7w_pzhixQ`{C@(0sJ@{9A{oHRoIwRaDew)$sgrFtBlIKJYjTpzc7~xX>mO$1umlP?6(Pwd4J-sDe94L3@P+fs zo4MBL2c7os!k=q8`QJ}?`VVWSzb~PNR-yhphOYGa2T}^270CeiFrC2-1%d(P@W!4C z!v&9`1D49E8m-fYc>8q{siaG0hEWLc*mBizXFsgWo~MZDLmsbpyezp=95Q#aNil|o ztJjE5kuXy)N)+4BK1qL}&rvT^0%T0EyDgHQuCn$GB-*mNa*SFB>NN(!L7p$0qv&mm zRqJ@5szxV3t~$KUJz&5Sh)FMkO5-(I z%nA_WArdLi1-ha=^4^}_#1-O*owoSAe4fB`#G^|g-SKjr)?mKSLk3BH0WKVrSS*&* zd_4n^D%|e(`6^MR-Xyb!{6e7+COm+Irwp@9w}q)dnJ|(56u+jsEfN`_AmuI2uZoI_ z8jca>0)-M0iAmIVD==@>v9j=r)_T98fenZH-G!MjgkYTx4npI87J^_4=?Ka7sdAaa zIM~zCF>G?tsSj2^=-B@-=I_c&NlA9siR-Fe1v3_%JA3ZT#`WVSrTnuRP4M_2r`)%I_Zhj{W?DPaa#DwSWH#KK0V*F-s=A{_>?umr{mXFPu5c zv(JVgII%RSzzI{_E3^f1fpO+&I(Wp62GZ{K8QM1*0w=0WMs)uw-Sy z=ihuq%A&QpLJue0PS{l{h3v*v1VKb8g}2L^=I}CGT3h*(qHNBdoM9DhI4 zwfoO@&wAvM%Dvk!>^iVv$zL^|L)fr@@Mfs zV1Jxt+2o`osZ{E8xx``#!!QC|(AwIXoRSh37as@&!fd#^yT@j;K@em%o82y#OePD3 zLJEaafD2S=bxd?jFc^SgPAC-8>2yM&FcOKFOy<76KDk_uq9{$#f9tXm2n2&hBM~BG zQklo&fgy;dX^h9ialF31!DuwfCIzn>&YjEC{~{9urf%Vov^qgWyl3Pm!hG(?0r91a&jG-@?TlHqWeAPAXECK8K0 z9uJ0LVKy8Jkr;}0c6I_0!0-3lY&HZz+%9*RWpz4TcXu~U(>k42AP_(>>~^_)KA&7J zm&s%Zm&@n#9d2kS15TGa$h(!MO5eOG?xm+w8hB+LGRO0vhK?q{P zj6@=N(3$^{Y$^UYwJG73Q|A_~n;oe=jF$iU^vcDrKlA?6i%+25*(oElX_05dBa&x+ znm^(Bg;~o=F7`4T-aO^KA$@MvcfVbk1S^dzpPj6f>VMvJLafX+$HLpHj~FH~(^tpN znhR0H?VQpPlOCG?+Olsq&fQYe;<;06d3XJ`4U0cIcu|)y{M#R1-QRu1_R_L9by$j= zSKQNJ^wsu_?uDkiAi1pLlaCij?Q*u$bGZHMI^bwYv8b#4v`l23FfzNeVAi$Mb)%+C z$V?yQBnyrncaxr$BZpSf5Q&PAMX67{IA_&!Pvv~OYmOpDGORGWVASxZpMC%`LCwB$ z$FG|%oqp%t>7T!u_DX`QcLeOWcr#%^(w1|dnIy(x`260pQQS*aYm@W0@87fYwas@A z-n@UVq5eYSn@gS;ICfpZZH# za|_rW4`>QlOm51XNMR1jgP>tTE?006lm~$PLF?VS)#`#YgI-#;_k3~jczjf5llcND zP4>FjTtLG^&+&DkjEu~4o5fW%cR0}+l`;wmtmf z8?E&=etcZ2uc=R_6BBZT;sJn?BWQK=_44ZKssw$MetOovrHdYP?0*<3aZo}+f`3$Y za!T@rPd`7pYfp?SW^D1a#HFuaxNvdBh{6-6e)Netemwi_qUk?Od2+Q1{_Xi$OP-25 zu2!hG*EM%^bfn6@o>sg`I&v=@IP8vVTWW$ei7FrYs#jbK>N_ z{Pndmxy*L`V0;==Pu72V@U>qr3ikZ6anF9e001BWNklhgt`}ho>;o<`~{DPA{ITbuKwIPYQ$unF8=!;evs*| zu35Xr$&cBwb0?P!JTvSw2 zQ6p?2ol{?b^ziogXzl!gru3Q{B*>1OQ&93Tu-QOJ!lK*TxVG-U=2R#7yZ z&1(tPF&-?|r6CYmo0j1OJxMIf#*6!^Ff2-J=Zo$Ok{kgly2w{V*V9%{zm}p$d(W#> zs+!1jj+sqt=uU`4t_h;&i7Pv-$w>x59qokL&{!IeSV%O2ak&g1itiu@Tt`ZRs0@#b zkZ3#HrFX^HHFsk3ec4U!L1KizQ=(&1Qd(e+U)l_N6-tyQyiP?w4|n^yQWp%ULIy_N z%fM1+2MZ*+BVwG4j_-ycrN}C~&m|GDC+zKx3J>zsz4S;myYqA0JN#P49pB%9q=|dGg~gM@x4U ziHTU%Ft*8gsk1(8lUQ(a;p4Lov<@6RXiDmyfBNdqOU?RK@4TVr7Iakgmd>A2-f(1M z+_1K@*ZLrioL86%V^WYi-Rj-Aul~36l;XupjRPIYNruFtq%*%BvD=9kUVQ1X_3wYb zbNhn1a~^#-WB<;dzIpe&0LkOfHP5eI`O+hgvn;D5<4*lhLkdpJUb8l7&e+<<`q%(@ z^|wuH*BmHWx$xw;S_$3S+S(t;@O3B04m2m}hwr@b%bkitJGXqf^T0(- zno{=HFTNV+PuX`%WfY`muf1~a?*6+w<~}sZ*rYNua(HcI@$4@iblSfQLnre7Uqh=+Lx%ig8b(D$++LrV=TbLb zG?Js$QLOlCk-1@EfEZ>k(dD1+zn}eR2{PW|t@k(@Y|fjesB7Zbps?}06tRvgPD&h+ zL9!jXNJ}on5#0=p?7vKn7fc{vZlhn$*OL+W?&uMStg0V!Q-+k`RVEVY=G<;Ffdv6sd;y#oWWtV{1wwt+o@ zkxH^AfM!UWJcIW;2d4sddzFJpNDt=c-#9`dR##T6IXR0x)!tcMQSUP=+j>!Pu?%IB zveOr+XoHqgs2Uj34F?)RqqQX}rw*H#;^6G)^uH<9K zh|H7j%mRLHV5F|o(49+@Qh8v%H~5_;J%GcWuC`8VTZh$c=HlwSX!5sSsmgYIygT-%$4pP`*GCSyKfZu_sl^??k#+#{Ikb) zr}o6}dN_Bi*xa*!&*az<%|^zMsadgX*SEi2k^ItMTCbQ~w65{c4}$CXjlUeZwYl7O z%6eqw7O>3t*47on=IK7$`Q5;!6}GbVixw>!J?2sR8e%!=*2V$-(*D8xgq&C26q5#B zFxE_iNm026$NkFtN#MSvUR|_BdxZ%8P`~9f{@#vp_m@cNPsYqC+{LqO2zYzV#>)Ql z%{jxyDqhu#KTQ!VQ%v9cYW#+aPyB}EK6NR3{;qe|O#1ERrEM?$<*Q=-Bl%BtnO&0Q z*I%eiI`Q<=FQ0vA*c+-{yR)dkuhj#sq9py|H%jfYroDq-Pgy@^Ue|K}+xW@d>n|OA z>6?e=zEQNmX`H@jT7_rRkE0u`z(rz&ewu5-$lm1|7VBy3K6CiyhM&(p{h-tSUHB9A zLJjT84SBHs2VC&tcDrL^4FViz!(o@p#o=&*1i|5Ol9Q7W1Q{3@;BYv7eZ4Hpf*_EP zkjUY1A^@ONsUi`!xv9x&wYIjlDin%vm<=-w2!a@fIh{_7#}iA$6h$c%%FfQtzujWS z<#K6?;&3=TjHgm6#Uha*CPuAR-zhH_i$y-44}>6#*(?%?+S=P}Hk-%eK`|7=Fg6l# z*c}YRxZNHYh5-OjC=`60Us5uL!{G#i!EiWC5FwA(E0@dVGTFVFn%dersZ^>~t0|Ji zFw8h;v{)?7&CMQ{oBcI3T^Z7!dFeN3Wr>BRa$gIpP5Cn|I zfpD1dc)Yc>br{BTxm*f`LMRj}luEfm5g#9)5TBsdsHHNg*X#M)C2J_kWm#4xlVKRf z;c)u;`aB+wOeW*N9EDtgqUZw+{14`Gb-Fm_jh~MGQZ#YNAmCSI#IM-Q;i zv%VMmlz}zR{WvL@b?5uR_nv!e>@azukDo~fu9|W~!+kVHL1Jfw>Z6D9n1kI{u4k=hJ)7-#@c>MBZEic7L?pFKlFlJ>A#mtl2Y2o*d}hW#->qB1w)XJZ!89XtqV_~# z=+#&A+uobnt>~Piy~aqd^ndW_n+JDpbnaQ+{q2e)_ev?rvBjwe(80%c9oqiGuOEn1 zXHJxjiGoiY*gB%iVDsdLkGv}npZ%;GXy<2eA3JnLz3hnxo%Zj-pQwcT5B=|oYN)gt zGUPuMPB*oM2|1+Hq$Tlma^BVYj~1z)T0Y9tRV8%?j4?QKHz4lDEB5r;yrvdkn@=2s zB7&k}?%pnr(J2{#gB)X075E!8s@u%)-JS-{L=7=qpmfL;tTPk@2*_~#0)xhCoW{VN z6V0S|0xT8E`5s4qgSDlfaooUS<0+NS-EU=46M#hZ42DDDb^|Z^+FA8rXECHQ`)}Fc z`{(9n2%HK{YPw8ff&5K2&NLnBqMKsxv$G;)T_d?>Tc`p78qwHSL~%1R$ZN{f)`CD! zunDS)&EXSao0rkaMQO4d{-tBa=@Ayok+Tv({I-H`2S-2Dq!tJR ztiaEjmy4?vfs811vPNWkNUb%egIUsG0${%1Z7bV%%~92+b{bRs9h^&bLQ6kujBoAm z_{?E|b@!RsHnTTOMVy{cL&q3W#2h>~3XudFJMVXN)}H4*=*<5}9(xb!f!-SYdibvS zcXd-OuN|Cz;jzd1KG`&G>MT^K9ey8mZLR2!VPlsYBq{G6{wc)!<@C1&1(Acl3F~hM zRw3fov+KS)F%g}xHQLWmrWwr0ZS5k_se|n?Rc`NOcb>&6t z2>&-tOzQsSjy&=0?}s83e2Qu6Ijs-| zFe{0Ve`&$QO>fMaI@kBb_a%=_DG>V2UsbAWp_1lfxiudtg1`0YY7sHf_3No^XFILZ zDYH}Fme1a&%iHD%PdWbPC?KVJ$G7ikmp)7S#whJ0lw*=Jr-=;HgeikZjxL+tJUn67 z%9px6IH&R@oeGVvjCz4n8CTA2O#bMHZHaRXY{{iH2a`AbGExvzJ@=vMAMZGRKBiRq z{AVMx&XB={T}c&lKFay-GtSL3`OWE1=DpKlnLN34=PyyknN76y{T2{>Wd7;h{Yl~l z=B=Gqr=EGxY5y+#=`MKrd;ekaG}H(+c8zzFRf#|SI-^i;Q7%kJb ziV_E+A(cjR8`&Ps2@g}~JL!O&4%ou{H03}n(N9McifSqW?vdok;ssQLNgb6YFxFvi zDJUZl5hFg(BOugpJi-eA+Ez(_44v8#5XqVQymM}@uCOj6c-L!*Q}j#435w1Xxt4nm zPl`LFm6f(I{mm>Y&59K|oiOC~gi`v5J1$Ko6%#9{@1>Ko9IE38pBF^+WLhuMmLTI~ ziMsE&F|oti6D{Y)+M^r-ch5+jUm?>BHY&ME0tbYe$!G=cQrG}(v`gYZk|ECxM{*s4 z>ZA|=c(`1DLNp8n0Wb(-%BXIX)xs3T$gfMeUPy)0kkOSEWNwdjrDZpZNQO8FIK|RA z_mogA6M3v#d~>YC?r86@P$RQ(8-OD1xDIcL$YSo3s%Wpz@2?-{43dMEh*c$%if;zN zjG;%ne?c%9uv%^Ni;C{vyLac#ogaVvF(oB=?AWob>Jpm1of(k}gaVh-l@K4_c-d87 zUmpzsFTU`?tXZ>`y|8iN?#=BNsuIDdsOV@?%X{UOSGIqCV0iBEef#!)@x_NH4{Hqu z!^x9d&Yu4ehbuO2{Ql70t0Jj{!ynwUf8U@PnLm5QDSxH0wae>w7C*A$#^tMLPM@AS zW$K%6zPWn!U#xch?%lh`B&N^|y>sWzFAu#rVz#*KM)%xdQnSglZQHhzl9HV}chEH5 z)zvw9%9ObAV@z!DLIc3#@#f8&_t?TEhmUT1boRnmb2iVMKD|oRHD>jsV4x%r2&`QB z&T9{^-1y~}bLY+-*w_5z}r5{c`rkjY2FXFDtXDv1#MRjceAd%~Y&NPEHO=xfxmMPQtcs-TFOy zd+VATV`F2te}Da%O)ob$H?Lj0_P_`GA9UKk3xAeXsG*W*$dLc<)TeCO;sn8fDs<|) zw5-A4&i(AQ{x@IVi2zYfgPsl5-$U zFl$zI=VxB3M$>r`b*NHCo=0r~%FJieg$EuP+gV;9HpI3XyF!3JIU1XsLdN{27;K$@ z3eo=AdSUEO5V_e*%+cMWQmAxk(!^Y$t&qJ;!nuTIiaCgKRt*KGl1! zffH?Zt>Ere$XF}WvI9}2oZr6h?WmGcP0oY^4GJ5N3}R#`7>L(cV|kn+{arWUkJs=rfpqNxU6Kt6i*<6YH`I90lZWpr&r?d1Og`=yiLu3rAzrF{v*^f#+N+3@N}`$LNq zuYLU4%5_S+6O0B&JX$MGKEcqz!+fJ_y#a;Z-eGH!{XuMXD1IsbkvUGu{h^T#G z{9ifCR+oIW`;+}U;#65#;^dAK!VQ3mbI2MbU%jE(f2tklgC?rWzqt>7p3PF&| z?G6S5d_G^PR4SE9li9>D3_%csCKJOj7>1clCYq*$!JyOSvfJ%$j|T)nf*^tfK~a=S zr2--m7>1n=2Mlut2M3)lmr|vIK+qou2!%q6#ll6o;c%FyDWOP6)3n)aVi;PfRLW&C zKA-RPdMSz`Nh&opjfe3>Vi6DlL?V&L?FIl)sZx?8DG&%f9*@K6)Tq@Wkw~Z4p%_L4 z39r}txA{_Xg@TPl0)YUJ$MgGr5C{PPfTCEC2x1tGpOS^Vv-Hz!!!098*; ztXwwml)PDTe)?t}MTB42@N(Fb2e64Dkg|m@zxG@xTK;WZqZT~DR=gvWm9IMR;iQOb zPeyuz3*m7}Kkgu=fBM!MllAhp=Ik?3Ydl#m9BWJvjY^1Hz&mqeZmsR9y*vdsrE^%p%X1?>nwxFz8CG@?pSQ_{&pP(;=$iKLA6vFA4@um7`_?bP!2Q!Nciwwx;JdNt zJmH!9JG#Gk{KCC~XltZU2amjRpp8Pu-1Eh!x`4Ru3Dc)We)@IA``1srS$O>DqQ-U8 ziuED)`e_H(JaU4WftUQaX5u%K_l5S?eLMBmrinclDZInb_G3lPfMnltD02F_QpKw~ zHU$PRUn!JIX}N;t8J-hz;i?C{n7_7v()WjjT%UcQgPO8!HgBAcI_Eoos+woA zyXzVyDS4m?jelaaXOfcZ=XrWlR+g^dc7>yuzIe4YhMqQXMHdKUE3qP@kzTNTxmDk6 z?d>>mMA~$J1ffL4VvztQd}NnC8+D|*OupVIU6fMEF3ZOgBmt!)G#I$=kOq7@v@y`P z=&`VMA`=H=9TnATTu2P-RLWs}p|mbgNsni!qNM7^tB%f_Q39vOIat)BPQJ=5P9prB z7qu2Kb*(SOg{nCML>P*oUMM@l&rR`sOo{htz%JaqUlzFA=C5u^vva+YfI%-c=oRqX zcw(BNHL<4!sbU5y+*P%FOT+9e4XSUf7MmV)=6@ts6w!^$#}f%2j-Q{XzNv;Dp|Y-i z_+h(LeF%0(JAc^mRa9hrVow71#>exnyz$M8yV@^wlx7KvsKIqw=Qi_)I%I`irF058wPLu=TdNS2TCV1n};m4aqR(zE>ao z)Ia>%^ewOZ?={UiT`~D=#gvo|cip5Xrd|qk+2GNdaT{hdDtWU$?p*$@{p0GWu^-hE z*IyiabWGd5nvSs(a<&HE8}V(yOEWKOEw4FkZ%!<2csh5^_0vCKiJAAq5y$%6(A-2T zZ1*?T_3nPEZp->xlOvXQrc6BBUtj$3JHVPL*E76tomcN_{%+>Z_O0DtzB^15KJ-=f zo+Y2XLw1zSObBE7x{q!ze8c$c=%((zx8BS3KXh<`nKaD3Jis*uRqYP!soE1$k9-_@ z6tu0Nfe*alt)bcXL+}TC_YPc2O-qeBhPmEtul=@aTJB5&jP3%J&3J@2d(4AQ`*&gJ z5ogGd|4aU${ZIc^00T2QQT@WZG1jRL!f%(i!~LzItgJ4&nT;e~7muK1)hO4g9!vy- zBr=u@i@13+i542r24h?<7p#qGEV-}rH>c=RYGX*)#02qJJ0(T@*%3I;Hb%@Q#&N&~3E~_C z3?f{Rlwe|U7Du&lP7dCyplPTt+RvBS=>~%W*ZYFakvN>K=MqfBMH0~s(uf=5fFdrb zO;jbg9epyM5Hd-#S(%Fm0i-n&5}Ru#BxZYbmA+wZ7@nD+=LMKRfZu|H8L9mHJ;rPq zm&?c6y{t5@)*TEPFiwOQAt}5J(4%@(Cw3{CB8Iq5hKZTf)?Jf{QDFray*3KM`mpBg zKo>70^AQLAik6ZUX>NC=`Z5AubmX3WaW$3z9$(2q6e37zl{@YKo#j5DbMv3NaQU z2t2|n6biT7&Edc}j>9m_;UGSr7X&dbm+K2U*f0wMKqL~OX<8@{GBnNS<0uz}NC?LT zK*S{w2qGAVwpmd<3WA{D@0ZEsEXx8w1Oh>;%_htd0RR9*A|aCGAzUt(>##etdVMGq zLJ)3*W&OT@iieSr2nd2f0uTy?!C(+$7!tbGcj)0vLwD#25%cBt_CR4MPwaB12>-5{dBnJc_n+xmICMgo*VGN7FJ4QKkez7jHD-<( z(`?$Ybnf^AA2T`HSz((N69Q4wJj3YTT1#eA4;`~Ky~!bLqq>7gFe1$sHLE)uEm86E zOTG1Cxk%KU=V%x5nXu@|FmO)L&~BtCY5;VPvLsl{EE%s0V}XNJ*OV%86fcfS0%Mpx zGbd+%*^X0fa5L1Fo}Pvt(1_w9ci(je0|*GC_DG1f$}%&?xVeo%6`%2CU%S(!;U}X> z>;O(mApZM$d2NQgf$o6yT&L5Nd_s!9&GC9%mXo}5v{~<~RLTZ0iBENEqjDhxi;oY>qLMj3dUCO} z`fTd1!maW~qynVa8Y@Z4cAsrGwOFlF=Nqz;{@c_4AI$eed+M6;qt>oHc;V|cBTpn` zC1#~IN(0_;jCALtdnP~p%Dk`4{F_|_8GIo%-I1QN>N&;obyI&h)8DzZs-Y$-J#N(Q zLyJEB^waudEP^1Jqqm;;@uk9?&9k09v~=UT=iWT^$}79_3oopHHCJzRNG~SR7aPWw zWHr@UYDBk5k`niGD2g1ma%5r}vj51pMNyALAzY=QC*b$@#w{PduzBA;@adE($)nxl z#?A2x&lYAx^R8Q*NBdS!U-iVYC$g-M-umv0M|)?};m?jtJCu)zIxcPBf8vwpf3tOT zbTIHWG00s}8V0;G!S^oeN6Vgnb3z;ADW`~VgtO|(@(oWuwX1Xf@x2F91MS9J<~!4F zoo;ZJ?Z5Vyr5oE4)E|8*x_9BA?1;Cu&AVmY@CFG0+-#IerR@#(3{nmhup)`h#&0)I zTDZW|UwrS*!2Cq@t?hs5shGmD=U$9VBS{uGJ^ zx>$i;rwjOLE|&|ykVGPJyWI|lJpyQr#sNMb=W-DY!vd^_hw&&bGSEMeAdkOs^;$}5 zhFl>xHrcos7lvWG-3EeyR4Tna&>s_`D5rzVMVS!K*)! z=Uv4c{`&UbZ`#gWc<)k?#64~}!R~qQDi4#zh~tbVl9L1`6zNKGOZNPHc+uK*?Z)1_ zO{YN+%xlhhXCDZNElU<&I(qczd$U%2@za(mkByZWu!+-N@VT7%*`-%**F|{1jOA0t zuG>7k@X^?NK`ljjLor+DK3hC<*z51jGFseXz7v#�s^wHM<892s!f`#X4H*R0}c zhtEB}a^)j4WtpMW8QV7=#~t*C&wRaS&mM7`toC~IuIC1|o~(x!E|@rNiVBF^^jUdg z;*5gPS;Y%=nG3X2grCK0!TBSHrC3%@E?uFS`^4j~UR<)|gM+^we|W2A@a{mbU(?$; z26YOyZTscv50}64%%NqQmtQbl?x(&??lBA%9-~Y9ZB5 z(MU(=#J021^Co`yw_b%t001BWNkl8Bi>)%IVc=d)PMYhKyiMp{cxTr3||_lF#>%{1&jjbfxSx&Xbnr(hcx^UuwX+gcz}|LVN3fG97DEt#{n#&O;(Let zdzH)#CeaFoBivr846A35v3_%-R7Qz0rOhkz@nDV8YPIzV+#-P*jCLku7)}Toshj!# zGynf-JUjyhQ@!DEzuc#Z>SR?tIKZG3k!p&`c105kzEeV6RktYQS_TMPS+CDG$LsZ) z#U_`=$P#`9Pw(_=A`!M%>g^Od3qqLyLv@EWPM)|&ml5=Y;(!XdQsDI{#9^(12}Ym@ zZN~#>crd=D8NUK(3L@@qg&^8&GW8_hf*@Ka)DcV@0GzMm-nFVxmGyBvfHNsz1nPwV zf`qSkC!tA6T84;m5fH6b8oRpCFfT@t(=I>ah#ocQio?NK<$@F0T2v4wc@Z$ci+~bd zEo&K}l?>c}7UNEdG4F$*RtsYHYbgr_4Y0H?_K+jCS`CfoSRyUr*f=~|)7^Pfm}?Kl zH5fP^ZR0%lbgb`0d~`N0Nxa30z7^A_3N^%F4n8y(Eo2gG(11BCiMi@goa=X9FuB7@ zTY418D-(4zCpeXeUW+9BvT+DtTOXDwNW+l zsOmlm$0c-#xlxo0H){eqBb(fSCbXlm5wI|}6%gDJrG_hQ`D|PVm*nEzK*n9J#zFos z_U=1MifZ2*ex->3JHf<4eabD9@jvDyPNm{eOnai z2_s~Vszg{v^#ClVB_N9E!&#U&HpGW=G-8fMEcE(%Y5h!x6t?+ zNhCcpHNMu*1{hwGi=}pTj_PzM34_Q*FTBg)l-sQ#0eXri)oBVhX&J_Tw#6W1>cVgp zDYWxsLEOSM&>d2aK`9hFxELnT84L=xkWiUmlGH{LZ85$e-G|8dO%e3AR0-D#fBE6?iG^OG^Rv%p&3Y;I%Z(p>dF)u>wvDVABNFCi z$wnA;1gDpFf<1&z|5ainW%;==h_nW7;+JE?y6wGqz1%ImtB%@$Tm z{8(Pe)z^l{mxGL+$qR&JUj5F83s2nb4o{k~b@LA_Pgv_Gr`%s3c{k&0>v3S|pk1~a zal}!Wchr_`pi~__94m<)IYjWH)BaN!s6q`G@GtTwe?I@&I&WAXUu%n6i|6%bJ0mSlhm+&*Na|wXn`)cQ{!K@XSt&Ev zsuWRTl6nbX-_B3s-z)FeW8DzjQw_Txko_wwu}XLw!6h7pLMt zmzJMkw;S1mhUaxw+&;rAkB*8-k;rV|t;T1Ulj9S|donA6-en7js;X)gG1`W9xHfyX&wI8jP8n{{H7fjh z9T8Owr=9F~ql1&Wu4<959`J=6$5`p7rr#sah zE68^Y6z&U^oy6a9Iq3irMUThr@wT z5EMlLlJt1o9*+mZuwWnvLokYBOeWLmaL7@><8ombhGG~DlO#d<1AZ0&p)e&F2yobJ zm&=792uYF<1d#w>wpgfCIY|-#2nPLrDwPUBFi8*)4Eg+ik|4caFO|wcQGl>m_i7Fl3=IZ@fj|I)ArJ&{9H%1`iyeSr z7y=-F(9dKrAP921+yqX*FcJs`Xbif`?Ft5iOeWJG@N>8v2nH|=!$8!J2S^fuAs7U~ zU@%CMfZy*Y832P$f6;0GDg0kCslk8&1AeFf?~0Vq=W{xp1c4Vnd7@NlD4ipFgwP?T;TnrqO5}9UZ;By#NSAMn;8%ga81^=kuu;6@nm- z+e4?*U2ZoBf*=GjnatYS8mrZcq9}qO7={IL++;NR{Ql0)P6~y>VzDrc3Xo)PZ?Dhm z^ZWe(0MMv3p+G1Wi`{Ow$LoP%7{_sk(?O%r-5!tM=gY{*2nK`0h7H4U+~f9ib#_u| zG`HK`(b1vr*Rxq{on9xG%LPJ#*Xs@V10Ij3rKQE~@o2SLhr`a}@#5p+IyyU16zx;> zDHKW;i$xG5jYhLrEHTm1CX?yb&6_rx4T4~g$0OhiYHMqmEEbAl5{cyRE8jUB4nChx zr_;F{uF+^To6UBILn@JayW8^djJ@5p_S1PMZhYaoYmL_^G@dCd-*?<*t)K9X*|+!7@tq!K)S;0_7M3MyM=z^d z{8pDVUY!uDNV|JJvt=p{8jDj%aw2N3MUZW=)iD_mtPb zw)&0JVkk|MUJ^BGKAgS=vG;qK6jsDjP4>*#))cyB{v_AZmu92fi?7Wt_(+iO-t*^8 zUoKpc($wV(T@0ayKv6b zN|P~OMf3Xz6Noe8kglFS3}G_WSZ-S6w;#{>1G>P!r?BX-mtKA;K0EuJt=rw^)U=HB zFHc8wbhNuwW7n)%t<0+2uwl)PCAQI{Mvq+cV?sg#R~>cu`zvp(U9)o44uxn&dVcOB z%?}SAJOIqpp@U|2`RNc5l49ia_4WDE`EGyrniYYl@Q5+_pG}=I1=9D`*4Bz*!oU9d zYq9y>XS;T7+P3R%UEiTLnqKQr->VukY`EQSfAXZ_m6A2_5of|x@t+=>) z&eZqM6Fd|84||O{e|eN@Z|1xUSs3pUOf4=4IJv%-K z1Ra%*jvF^_p8Pa%aN*!yYSZxXL-&8bf8N5GWX1Q-o;AiznKWX=@Zrh8)oa%tJ+4{# z)|MH08((zbFW!G$h59#GQhvaI{}JH-oImKW|36#ufBfzL@e{Ge4{)4)4F5CX;FES) ztCT`@TP0eQ-VwyYTI1Z!Fy~TOW-TKoLSyy{2n2KZ>F$OgSI00i0XwHfV)kPmpNq8P zpoirU87x+FF9mYj%>Cs8ng@d*7vLfo3_IIT(E~gOo6U5DvP=%W)oIp;ak~h6Xqzy2`lRE)Jp-nxzFxp_ye9GfaqvkM-Y^S zm8XF5WEa2F?CE(78%W&j_3K%A6+Fxrrg}wE?lJBbBfk zLR_j?{C}(~|6kxz`8SoiqNA!NyD(zPs5~VP+wsoHXLodaHtw1|Z05Kr%emt0DO1_k zZ*B>R`FPI(SLTpUC(YdzHErSG*B4rz-TP_X>nUXBtuJm(&l$dX;bdWJwOn=2p+4l& z9(nNmBW^OWZT6nZpIfI-du8g>>Fd6F-z!55_ROuHl+0hccFRY*CQkTj{jwrOHzL#p zmb^as!L=uHSenIByDLvy9}@dsJ-2URQ6YEHp{5nt>wygg8J`SZtZmWJ0`!sNS8MU8 z#0C5L1)Gl?+<##6*i_?}k-b|Uf4d|lSei9w%+Zn)C$Ap5b@jykJ-ZGsn^JW6yB~iz zKXy=DN7cP)0(s-LyZg^i2SZyP6unw?WuKw5 z@_fn<``lv+=ZrZsdC=Fnqa>qWj%g}|E30I(Ip4o>M4P}>f?5}Eyr_k3b=-G=B}h)n zP?-o?{l$h44{liWQtqyGo1%OZuYA_>{nqc!9UE994*&+LPy+`1C%^v=eu?Z({T=Fp| z6TNm%Cn2CRQJ=x%V3QKi7O)7z*;W_T#kM3SEA;h`4LySJsgWHW9dcW;M`_1eA*0dA ziQ?ExjFCwkk^}&QPY~-bXPEII&TNPPEI5rcA(*e)(VFQVV~7YsR5yigi+dJQpW>p^ z>1LxphSg}Y$Y>Nr&`JgcWFSey;c%@tbcq`N9k4$&J;UYk!l2bVNY>li%RFN6Mso$> zUapYW)j=85NxH+U6uqSC>WE>~=*|I8xG1#)!#eS#JW$=iANiS1}k2??|aslwhzl`u$!r;EQ-pm==jh zk`zB7cs9t#(MhAi9+mcn+X;SDj2iU0lInA~1%`?zLx&FiuKaOQ&ZsMYTlAvC|I3)M zF?ZV3sRyfqD5PxObH(rTpLwwEl^L&4G?{be%sKG<^`(DXdh4sl`w#5jb?=cL9#$A7&q0Y!^gz4x!mSTWpi_L_sy4AuU>7k<}Y9O$-*}`gr_BT87ib;YGzdYk$v+QEn38n z$p7vBZLUn5UYH{reqi*NF)P!K{&IZVhre>IHfw%LRB3UELYl2^>w0(dkiFlo+P?1% z4u_M{l)HGrd~B`!^y%v_XU$;+z%YLGteLZ3S+ec;)kDAD;D7q*r}2UE`z1%bq;vld zJ3m@F6DNp`&sFD7S3LRT(!?>Zg%wPxt*QBVZAWkK$ZPcttXZSv^3bkL-{lvim51Jz zNo9C(eA)9x>coQV?CfnLtzUexw~@Zu=VJDseyR)$yYG55c}6_V|M1pt^}k&{eC6zY zYpLVu)2DuB|GRtNUo`pS7ajPEH!v1zz<>e&1hGgIDwjJP4!_UO<#PEv-o%L$TUuIh z99Jk5#l;UJqapzS2nGW*8jT+wJk$YQZ_b8|2ZBM5>*pzNsuUY|E9G0|$XQE4=v&)3u2YciT#E|)|iNlHqBA-KA#ib|!j*=(gk34);8 z<7O}!9NMtk`Yieq3R_oBAL+~J8T3T9MTA&`rGBxCx*Yecx7L&$tPnJ_dVVH`Ku4^?H?8+ zKDOuh#fOB282s*vkz>>0_1kZjG_VAd2EF|5j7Wk0*xNJXR6I0^Ph$px=;?7gbnwHs`|=IxTeiGDYuTaN^AFerqm61dN1^)0-tL+BGFg z-RQb{)31wXzT|SHJ`knfP-Y)E>Yp_Cz?fw(-Dqu8b6du&D;WOS%nL0IgNN@Zy~3Y8 z`RfHQeM^>(mbym2`NO*-Hs-ZIZ66tubM2V<3MWlHa^K>6?p%@k z>h@3HOiWP#kNa+Zd@O$SYD)ar2j|a>%8qp*x}l>JHsEHQ9?*sx)zEs?h3 zK2{zVN1*uhl*dgqixRp@OP_a27R0y8w73=vL6=by zgTrb~4?RgNmBWu(O=5}EV(S-(L|798L0~K2gjQOqBuPb3A(yWQArW9}EGZ3Vr~1eH z&>$uNg!=Q1?yN9IN>pu4eV!!FLKr#Mg>;vF_u6Z3%15LwTefTnOdCGrZ&Q9ex@XUB?GKxV z=D*r6=e@c2`)ejmOG|6X@~pStew!rTy<7Uk?lHc)c_BEZr@Z3Hlhnl{M=TNufYU$! zfX@DA%HnkvFRt!0#9wL$LGbkb5v$j&Z7UxfnG`?c*Y4u7XAe&==ko=+8!IlK`f|b0 z%J}&Be*T>a6DAaGd3O8x*ok9a=dJniT*K3`OXC|F8bN@OmNLXM`LLhx-L3p!>bx=2 zKS{+DqF>Lqzw^#Jx$ZpxK?3$A<8rp5>kpQfl@T3R-`nzT*?roT%a`|lyL8)Ub0;j5 zO7F<1qDcqTBL&{ikKNp%|&VtlDO`>XN7JZ^0QYT~szohRO7Z%foO{IsA&!Zkrh) zYo6Z~*6HF2Jf1VNM>c(-YF*Qdxp1KiqlO@VLZ+ zS!2sCXyR&P6|IbxVmzQ$uy}Sv@2RHsIg(IrpA3#v(-KXB445Ny%NPjIM&+0xx4^?@ zL3TP736-i1WnDa$5Gj3WIHF5MJvJHHs;N&0dWON0Tp~MF80_^6eL{H)9n#b2kS-~T zkwllk=1v-$(rf5pNoWy{COHtv$6~y#8t+#QW|+BPNoasZ0MT5xr8j8o3J+%*WdIFl zT2&5bhtXIA`P#K$wI=KV-PPJ?6#9U87NgJDLufs2vixcTEs#h_C0=0qe>wSc&bJ50 z-8~JS__BG}n$Ny_e&^m*Z%Xv*ilIN5dAUMF`EpcfjJqM7s~`2*0Z0D0d2jXZ+K0Wh zXEaYLtl7Ta7>$V$PqF)H!IYorZsNZzrcm%_DaThUU~h;`}Jtvp@J!A zCf(aL_SA~FHEXin?i)OdeCx_{9Sk>|p*VXy_wfF=cbndbp7VMuAE@~5_9VSRak88B zRry}#$gV}7s~5-LOD76PjzLJ^#QRyRCjc?>SM$?1#iqLro;!KF%A_Bx+dV%Ksy+Vp z%9Rs_4gLBpORn(Br7vEJrFeck5wm59$@+Oo=euX0U7!|3zBhk%PIeZzs{MFeKA4@c z_WqrqA^hgPTS24opabP(+Rt@)XFAFwR?Ry#rKxT6xSw{;rsuUu z?!Q+0A&iGH5wiZX`L*p=d9B{>7QZhG7`!hdtAtN{ZC{ImW%#ogaiY5Om)45L=;(Pp(s-6lVE6o66mmUfmd zKtmJERx6v&vw2D;CZthmw4E19Z0;I9m&UEnwlo+C-`Fc8H2}VRj zHfnm6vM6?02MKt^Zw&3}YiE`LCbQWyD=Y{EVw=tWfV(|~;;>UVbwQ<43FKUsLCN=S z;{v(N%P^#C1Gq4w!=w#OS34bsyc7*V5G55OB=Kq;DX%K)jpHVw*R`&3(NPr!6s1;) zAOLlU#8SJ>6|C?P@jOatISj+tpoG!WSQj2yeI8i(NE;}8laBsYtfw?w-YOZsv@1rVqssXgy!wFm$bQGw{6k?*7F6`!eX2 z$^Tx(%fZ3WwKUhXWMSa@7T(h z{?4P{o!)Py4(gqjJ^w@BowYWRvVO~wY)3^y4YS3*>s>@rmoWt9>+Hrwd-AFXeGQuD zaxUBAzHcs%X1RWk11NW9Vyddexij5wog;}`X&sge!v&Yj`p_vcS7-0G7)Z8#4sY3Z zQKSiI#wdYNSPTaWXmX!-Jz|o;T<78Gcw-d#`3_}k&)8JfN8{tTB-fA$?r<{w_G2Zg*6iXxe*C69acnIsE1S&a z$xTZbH)dvnFCaqil$|oDkBW*CLbkTD3L-)d%>$TafB*g{Qk>aXNKT8Jo}UNU(Adsx zf|OTKU&RW5DRv-H_%3JWM9PS%0$jGBqh5AJ=!2 z;eW)m z-hZ91k5wP8_q{=3NKsKx80dw?#Z@}(DZTozPj9tGOYq-#jrT{B(P4|)+r6%L2IO*6 z2)HzXwkst%5d(u={|mxgvCP`*ergz^4~yA_cJ)%bLH~=-Yge1&#||LRK}AJVfYsE| z*_fYy8i^s$(9qC~h8&o_?0TI%yhXrcx4u8INu-!UfD8o6M-DbN6DWC6ev zm35XSli5|GSc)AQO~7M&I4MUVlkua-Fm_BThu5RdeP-u$sVj}gjXumw!o`@)yNfjjWHshRu6+kr18*Dc1Ap*uJ`Ql^!?=)}u#mgIARHpKt zOoWvsx5c@X((ta#SIf`c(MnnUsX$M8;9@=_xXjv&N1iGEro|!L z6M?plceQdJEi4R7=X{bL_JZf7@^1Jx8nrxwSnnD3#Swh*weS!{6Mn^w=0==$RgFc| ztT>gGEjc)tkanKyng)!_iBQl@^$X0K1Ho{D2r}7k#TOH~d^Y^t#DZEl^cWDrK9s|H zH~iJicUOtHoYUQgq4{kB_?+^BG*Xjf##yI z&kdAwfS+7Ln#OTI0r;EpRDYM6SCg~c{9E8~~ z2P6oIPd=;(;1}zg-M;;qiXm@r=lKmwp}@KMbaLy>c5$zNh{Lva1Nk1*voIW?BqBay@UupndiakEmw zL&*2hQTH+13pLc=-*0ywRq}L|DR%>`4k&7}X^dQxun|>Ig9s=r zm}ehnuN4)2fC?~GwZzsgM&-&}xs1nnCz(d=fKGbRoYhk#A$E2_QLWkb>|kfdP!^oo z%V6>TzTTI={j+q~t?S5q_pnhE%k0GEnlmMv?{zH#je%1`fNf3GTl#18+a%P>3fJ9a39!_5Mx~e` zOhhNts=Lw=N3ms&r**rIh46kED@|g~Rd{u1dmX3qC&pjgy+FR=dJC0y;-eb9x!aiS z=G!{5=R`^xGc8_Y@I!(%S3DVSx;h=X!2gAD?qkpNTaXa`SQZwn{(yV+*O1grNehHy z>#;jp275X67D^eifm{y6yOhzwU$pMh`V_IAg*NVtdIs=9hvYzbPcuC{o*xHR`a&s) zIqiRl9M%iKyOSazK$ghCUA(Ch%yvUxl_ap(;*9J6RhTl@F;Xi`yH`BtxbFx%N!dEh zv4D;z_G->$`SUQ_x=ZsEak7qb#{6Gu^T-P{(E45E?eQO-HsGU*h^eY3tQhn~2v#JF z!It)&pIh{kNO9pKOh_b@7f5pjkF~ZNR;}N5viK!!Qu!xf#I@&|CCkwyqaql-rW?xY z#RcKW&Pu=gE^TR+PLV1SQ%5hlDmYeZw~(L4B7SZS)w~DsuvXgoPR2xHZm*Ky@=FM$ z+XfGR^NKQAO&i=P9zZLOPVMA-woA$gn*+{}!jx*Zo7`=8Ybb=e%Uv!A`VVVJmF}VW zN8q30=at->t&!3`M_m?j#L#Vv_V-7(3xuE18Y;TS?RW2agj?@Nn#HNn^(Ed*Iio6* zu`V1&8IyDzo?D(qyTB%P+}&h-;Q4hXW&%w>z1Z3o(fM#A1iX&QjquK3n z7kRhPgR9#dE=M7lpTR z81rA%-M;*_P+Fw2_rJVjH7bu$z8RjbKl`cg8uEYU}xt&Do4iR&!zW&Q3817 zJV^Mb?n%E2;diA1VsVTMht|6zMq9arh^fP&dnULdcM~n6PEirUtTjDdwCMo=igp~= zPQGOB63Zr?f?Txiw#Hf{gQZxPjGPFPV8*6b9>#cKu3s=liulClhy87WZXR(GW5@CC6#`d6D_i4_f|s>q zRw-x{=H}+=Ot@ZaSXz?==dcpJfZ73ka-?(H!FLl=--eRbB2nTrQ&-%c<%_qs4jQNv zA6+++nN!F1M2e1`->#Ntab-dJSaUusl|C5s{DWsU7E!<8jl4>o@il8CydM=BNp&ph zxqg#wcHwJ$i{RcM*Mc84q3P(;%aScA4}mg~F-C2oO!-qZoW@I=b>;Ok&j`{9N0d-s@CGdD~cFF|fUubDbZL(;(E`nppfdH>+Rq1k1AX^F=UG3@?9=kj(_ zy_JdYwVZ{+5AVkGvZK|B0cp+kqFG61H78KzQYytKN}h6tQ&T~Q*_sYs5xxM;$ZoyG zp~STf7>`ijbpm6RhhR}aJ6Jq;Le_RUoYGfYfLYY-R#{b5 z@t`Cy?I7q{61b8*U0Jiq+}t2Zf#QGOOB^2+Mi<;RTiM6hFYb4ft6bx^T0R6iA(JPR zZI1irb+i4igg;720KP3rH|NlSz3gQgc!K}u6Gr-1O#0i7ChpQ{R|KJdhMz~3L#Kfr z1({a7k#?uI1&Q2^xqt`LecxeFPJFVvPh$Sq1w%46_X8a{;)jyoNX_HeZOon9y)#`K z$KFdN%i%#co8w-`Vc?Sqz>CzK(^FP%cmz9IfFMj$xu96Gh4WNVj0t8koEOotx|tsb zOq5L7XMh*SQ4v3(yh8i&Bnt6u0Pp`qyUbqAZnoT$3t@bqWu$-coMOB_L5D7`X6XpV zaabgY>tU3Y<8tdb7yUAHxu1$jQ`Fw=nd@Km%uKvxI;vwb<1Cdf5U3O%;4~qTgYuXV zP;GlxPAe;4j^kqw^l3>NA4wF5jEFfty;ekNDEgV4lcQ^8otG5?ou!Aaj6)IYR^DKc zN_E0R?dPgJLRWcOtMFTS6FKw{Np}7uQxHxCZ0e4UBZx^VbEsLGynBC!?^WfJ19qGV zvvHJ}U3g|iX;8kYXx5`*x#hyOeRM;zz{A5D#IceYNf0#jbmCh97(Yji^dy<1@9YO5 zy;NB1i&QL{c(U?D1qV7IN3V#!GTo&K@bGuuVRg<4f#XUur&p*=5AfiQD-TNt)A&-3 z{ox?p;S>neK5{AlQ49XQ3kKtu32LG9C%ZxbNx7g6zi27OP*aq!jskczgbZ2I%DAhY7|-D%^YeWc^T7 zQQ5zJ)nFXgXT(SJCxZSKD~`GmppQKCrRS1DGO+&(BlUu1t-L!kG!RtUv*$FePbZTE z`qM2N(1FCzH;-IXZ~TPfgV*gF&fEYVN1a!<=koCOiC(39>5vrY4#C$gD`yeC`F2Ya zb7W5RAwvow3lRy5AAJZ4_Fx?a0S6EW@E{O)?FtCw4+1^$0STc_T|yA3R}hp(q*^aJ zxEt_0NEiri!b%mi5g`7GgarEn9%|v-wstP24?uB%*x0TzWRPrJ@P?@wV;Y$OIZU8f z!HPtWZSzbiD=fslXD4TjQYi=~4jR5aOBOf|L2Zx;fj|IhGO=~mw_J$$ADSfzGW1uF zWS)FrFHxeH(~CC)=b1p*=L_b^l)Vojqka$JUibSqTO#DR>*pJJ+vMt4;Q=COlE5G0 zX5w+qt3qMDf|1{kgJln0+xfm#PnZ_4UU2D z{~Ef6gos;oqR*ZmFFjJw%)R`UPIhe)R3P#d@gsqmMBAjQdVs6hg)= zh8B<0l5PT7eB+1Z-l@zE^Gydqxwcs>Z5{`OH|Z=$>@WVS349*c6z{%%Nj2A_`(Qyr z1q-VvSdGfVaAOmca+23NeyBd%+D=a9D|Lq@@jN^28_Hpuw`)cDMRSmTnCt97*Zj=g zO4pQqxTDKfZV@Usv*goe^#z87U$=#P2W-yk@;qBK>v`w!wpNdcUr&g}V;{DY)o_a%Hzi|TR0Dq#$nCc0 z|7bMP#UkZGwrJ{`*m?8RI2%Pc8n7Cg%+76hzn-ECMcHEb=W{#3QA9_7c)2apOlgjMeXZ!dp8WY*V7(&m0b*;e^(4pIHvISdR+Du$XN zU)1`)0Vye|?qB`-d0qaA#Bxf{+UWJF+e*=Xq%@|#pfAmgOP)shlDwjrv;Nv~k`MYFtJ)cme~6HZaHYYA3soxn#9kF-zOhp*9VE-zkcr#G6EhEy-Im5Ki?#M|m7vuQ-^2ZZgCvhM6yyql$rePYO1lDKR6sRnFRsnW0+3@NixC-T!x;W!q- ztDYDKofIxl%iYMWsn5^NjJLF}5e8b(@@PH&`5R&EtPWa5D_I|dE$tMiQ#IxO*u38YLl~xXxc}~0W^?!%rlBW5|GsNM3ALTuz7%M`c?t+2xS6Y6 zb5PI_t+t6o5ltJKa4$F1kfaMpaiz~{;Jm};lX)p|8gEIz-wM-v-ve4RMCXzVbS zOM;B3oQ|vb?)hnvp6i2JU8>zvRHfp*ICk2+J`e)vPUU05qeX-zfX|5%t?eU`MO$;G z11*XVGLn{!W7f&*&GAT6N(z5-;Nml+BLTc4A=!>`E?)>;;_>OC>y~g>c9ticPqPIX zaZZLYG|*WqiGo+K#qgBZ#O`Yx!3^b^4Pxq|aTf!(`6#Q)h4D5^6Pk;|I}kQq5d)as zYH}odttk)GMCg+%#7oJu&mYVyiE~539>g5HvT-R6PJRZsQzuLm_WTwIb46=(;IR@v zb(te-jxRAem43G2W0H4kkX<)5D|*oUVXeuyTFGcxL@t*!lQTZZg~{!i?{O@$+n$%q z9s`s3;E&JOKb=TXH5QVM+Gm4@t}vNUicC4bD;5HwR(hkKRY8|EM_{MGovYE7f3zuF z3onvP{6!2cc7t3wyu-jdZ%n;3&XX!wJgn8oMM$@@6JnyoAcRU~@k}hXG*M55SpTkl zk7zZLebyUH$?5~GN38N)GI8=0lYB6J_2+U6B*9OWYfVyE@0!QRU_}%$mSFzb!nJh! zGn@9Kiw(cWRI#aO2rvJfo2LnV z32I2TX-h?4z3z#&*DMhB8xss^h)_^22|9(!Ut53Em+%&=;8%P5CA_(ScZm`61%#_V z?EcIZ&Ua*fAGP9>_-Y&Bi%4ZIBUuh9_Cu}42C=F}*~-@R4nlGI#kHji?5dcbQ4C9C z1B5th>li5acO2WHGT~z!+d6*;!Bbw6ktt=fJaNq!zi$PaXqBi{!B_Dg zfP?)SWkyopJ6uC|R=hrpQWZ-(8_Q34`6MvsKCf-7*bGdFeI!;=KE@xSb@Trp7TVwZ zcNe59BHk%3lugH!rao?3Bjg&s@H<}t0zQGvW=gq6`po4BHj!nz*f*8#-`>LXfcK9$ zjnb2VGvaT}JZ7A$%hhPT2AwjM5fO%@XDmtKUjurwmW~A$j+9OW$D^AT?jQ}1-pr-^ z`~*tqgyBou#sBVt*K$oLc~AP(mRjEZnK6B&LyAACWo+fOmESq5eiXef1hV%zbSQUs z&FZYgFf+djVM`RYJBC2Sl(dku0`$49s;-?EtyIn?er60(eb%tqhDGlAUo11@A(RHZ z4UqbWtm=WNJ9Wm-c}avoNKa3m+^On!2xM6(s&|sGHbSt>y6;^l+YC$d^A{614?&yO z6|_;m!vD=C&hsrUn`HwDdumN?B8YAL&L1STngr0P1D&?)8PZq5*f3Mh&C=h|zI)OI z)AAmvICy4g!a2@n1uLl-eicHVBwO|`99SnthV~xFm>D6BA=G}!H~Jh4|3^POHs-=h z>+~`%9E=71LZ2j?&T7MCoOwbTNCj7R;M-*~BNRRL-S`ff2|mwXru*eS51mjffYO#E zeEJU~m5ckv850cI#{F?@pWK5lzwy-}AfPYnVkz#(f2-e2RxP)wF}f_Vv(1N~yzJ=o zc{(9js?xb7Xmxzw#8UEG>wcL)Fx%Yhba)T5qvudv;wNNbl4}{&P*Mu=#U9zW_rzAU zBtdPL20Hr`D}9?DW=;n{Dz=(|r;X=s`Fc0TO9nZWg{}T!7zyjfbJRkD_A`1+sQc5} zrc$1APX>xSTEm8WtLMPJ1&vhh{cU3j@Zvrecodi^Edq0Xv!Fk1W!!iz-cPCBI6F)QEd~Txq%`^F&b_4ozX?5;%?#k^JEbqCC z_B!{iC<=X^??#mP+V|hjZ`F#K7rX8kg=7}9L{DFw*~=G36|`{R#c&oh)YaXcS2o+) zR#Be85^?`GB*DIFZ)|Li8ZZQI@)|XTkf~{CNG0m3tE)>&DA?$d2Mk9F>qaA`f@Y@Y z=15T^fWxrV&z~W}d29QD2I}QAPu~2oL)nRmK|~VX-n{Vr(&Y8+?U}v$N6u{*7q;*P zfGfOKJs~@L_a5L9u%(Z4S2ne^X;v?Zi;G7ApdN|8yVv3C`RE|HIkGfrwBe?mRnzDg_LJi4!M(ckIwE3MV266tm0t)v9q-pHW>$DQH&+ z8T_-Wt6I1E@YWpy0wOy*TT)VzIh~ePdPJWQ;OoZ@IkjplQ#V$0tRLIM27e__)KF0Y zQ2mmUFyG*XiGX`41=xW@K$PcNDVv#)A(cl2)UXE(zmt&x03!NSz`82fqZUe~Ax)k- zG%-9J9vwa4kQJC;T1o|gjsWov&Nx=~vmOk%kZ$$V;jNQ?4Czs@AohQ7BSBK;T-w0QB91djGtklcK+7;H$jFe}})W|vaqoy~G==e}&=(8Qg?l@BPTJ*?^r!4p+ zi+J)p9JveKU41^y>=ne`8;(kmN2bl(L%hNEvdi=4I}gg=Hp9Qc%SZUplVLH~*xKIt zi*<@lD8B35A-kxnTSA7^dHmeGQWKn;mNzkwwYe3kZs(v?!$nOVY)6qUO|_7t%$C&^ z_O;Sjdv&+Ss06cj!~;uk?U)^IEMI-!K+F-pV4(V?7(0@*+fu5Vy{`4O6^(MiK9yP! zZXm4VI=-=}l_*5hfM*LQawH%1JC)P=SSSnB)IZY>+5@o=dv~PZh@5Hkh$6Mq$Xjtq z*OwUQuvGnd-nXiZ>uLR(aM6vbx79{zGn9W!xN{yoytFk~vbTer zeec3L6l7#+v_3oVZ2tZ@kUx?4^y)YjHTzH_(WOyf7sN~%=EBZOoWM8YogiI zylKVctK8?Jp{T~xjZ~oFk=u;gb^IB1>07_>dAsnWG2ro3u749KIa(UT&iDHdDw5u2 z5|z(~H(NP07dQWADi>$b^k1c$8!;1C1#bb~5BmY0SAl0&-JSOf8VHAXgo#7nv6Whv zr^&MI$yl}PSnhIWtUWr?g-q-?@+|RkZI5`~<#hHO#VUS&_v>9gy>L5Ony6NOm*EPY z{CPXV9I7{W+sAW4jwu}Y^A_&?nh%|=oy)%FPfa#vD++mp_l&J;i@&{1)nVf)9Gkk2 zZ_{TqNOR4XgUN&EZ8W!Lh$ERAoOTM6wM$`dxFs;qmW?{Qs#mF4A;CIa53L*-dYTHg z#;dYM^~P%!l{HUi^ZHA2MGYni&t^#vsUg%eF*%#((U0rp)FcmHi6y}_#WVuT15{Po z*3f+}+_kXZ$f>f}9f=tYLs$J?=G|!)t}BMLSKrTACLiHEF%by6^q;2xx62U3_%9~g zor(qbxbr8(Fl?DTW&SXHr(N0*nY9=B#|sP3>%L%6Ty@lfIjWD)v}C^|@q)Nwp=~gt z_Eh+3ehsH!3i}u&wU%_Tf1RvZeARN^8v6nEU6vK?+InIwO5 zaF*K_=%z(V2C=WwZ`Z8(wt?TZ%XijvR=KG*=BS`DL@2jqlgF00Z;wb2r`XKMN*imJ2K8-}cLrJ05DBH^ zZ5mnDL7W8JvD^12O-oN*?qQTosi_E9gkTL7N-aCH2MHn~R#8H4F$5rl%6U#~iKw+> z8g?X#&SuNRf?%jV@Rg4(i4ju85LN1x6Okc8F$cKGR}5fiAs?DCgiC>6AOab zIof2xwi$n&dTQ(O^ zgGI~2RueH^BSlwD5IX+T_0+2)tl^K@qn%b27nbL0rxs_+J{sEedn9zadLs{tLSxH#;x+GXb$AeEp%zF&Rg(ij}cK#X+4owUp44o|x zwu}zoQz6%|A2)3@QcN()-7KswUx`^B`+c@rDa1-!H(&Y? z$6kYDGadho`}D6?Q6Hw@$Gray)BDak0?*->efF>`S+-$89QTD%HR5|jsOP+M9Cg+4 zPsEt!M1O~`T~hMPtyxiR zZLQM!)HknWcHa=_&}|MRT6*Tm(1A4RVkFKFmrC5lPR;iIzb|AB)x{eWBoo1bxD;pb z1wVQ44$R1_WWDUWiFLQ48(7&V^l2k#iQbznvSLa5`_!b%!3LaE`()B|}_0%frH3zqX}sTm_wu*j5bo z=D=LzEOldhjbCl;TX%6f9ay4aa5_9Mmu4mdU9ZS`XSs|9ep{){&iv5#*NgMb&i2Y* zA^qEhg}bLszU%YP{8ST;(kcYVpSlopn87l&FC`lGB3Aivs8!yuWbK#wUOM zeBI8{46zNE5=gPEx9Yar%d)W;K?nbWSXp9UoR{5=r;-_XIk2#>1k5$sPuu&!U&0_D zLLy0^KtidOU~Yc*E-PbdEEMCm-~M~AH^Kh*c2D~B-x`Yek)N5FDM&PZilAC9nkOG8 zVeIGUS0s#rgEM04B!~V;v_v-7{rV)jI zSE5ub1#$q*O-=cfWpl?6U%z~l`vW{mOCdo1Vg(MI+8i9sZEf7hfluCUB=Le|$bXc} zdd>e(R)cDP!vU1eVrlmE`FS#eB$9AoXXlQeGX~gmu3b3s!N9-(xL7GGAt51e)FwN^!e;1bFmNd zn?+&a5opcY%lYzUH#K+kEEbo#BLgq+f_HkmbI0%e1(xSkGURr$amfQiMoFY^wFU84 zQ7gwUhIi|t<U1KJbme{^AObF65DOQJponpOkZ*ROJoGu(Qax!;v2CWXb@ zlUoX!&XohNtJC4>s7tg;Z?uTblxIc{@2F$m=nBo^%_afen>G8J_wBVsDDpzsM!cs- zHdSvbn>*CqVAJ8Hq-* zZpNeJ6#hysYAzbRx$gLz=LB!^dq*xLn3>?4=mf)h=IP8gwFb9f#m&WiQXyP|e_k|L zZm9ua;_c7f5fi4sfZUiFC)9u)4++#W93I)T58%-;!9Q2!&V{vUG3Sh#T0vN6fh*RS zCyrmB4VzbfiJ?IT^#&5<2-d8fGr}9ys;7>L7R(Ui-z|;f1@-<}uw>5!mUtlL*uUKa zfO~s;^cm?>$87737_pJTC5`L;TV$58K}M_@z@=O4K>ZrXU!ty0l|R#t5nEaeIOhTY z10g`|NdyfrerJiP{`)SFsF3Y+7#$WC|KPn6YB$D4^jhMf*s>TG_WA^kBJ4zUX!iI?b;rjkbkR`~L5S*`mLK`XlVKVv% zdxNs3c>IlQ<)tpBRO8%rCDT^rccI@ihFs?pKFBEGm%Tw(CD_W$qN5E*dQ4LL{Wp;d z9VQq;%^{J-R|k0YiJ*N|KJv*wG5EE2YLMJEg*Aowk}Pez5SFu}nO~OM^#5KrSr!W# zsL(6U7qh*9?Xc!esvOCdNjY%HDw#1^z6xg)#?z-_EKW0OVb`^iH?t!`m-=j=|0#q2 z!tFALx!cBd0jLR&MejJ+~Xes5}>NPi(5-o0#jh2u8~FH9%P z&eph`ZssF7#H~S_o&~502T$s@zm7`5NBU2$IPMNP0*d!{?teboZCcEsdtAQ@jr%mW zK8P)lM1IyB?{5l?&%#ECzyHdspeS6>i8kBa9C(HHT>ipb#{MscfC-h7tkraSL+;>E z{&kJuE8EVZba8WSM1*P|>XkPK_c{TD>0H}-7;k<*QEM#UFJsd+$jg$rO5ln9M7Uvd zI5P7v8H(TD`Z0R4A2p}>%g#BT4*nnpZE2W3q}XKT;5(zpMXiD>|QR#mf2oT`m~unZYJL@y-9YQIs3Sq zsgi!;9Nj8O>Au$6i}rK6Yhd)hswakO#b@M(I~CMjwd6h}q-cEP+;pC%@*6%q|2(U^ zgMi5{79&Y1QfsML>u$mB$Mg0qZg09S>7#M6PHQ$doHd*3OwlDbnXGV}q4PU4fAa>b z^%^R;VKG`I|>C$4V__3jQAUhx=vZnx&+#XzPD+8-s~BCg3|fbtq`9g?`~J zKR3`xnASgycW(2SsFsduPCh3T{15R*8ogq zG4U5_a8Va3kaF$@0LxhD+WkkWA z94tw>uy^?jWR-&9GEn1Wd-)^{_sQPi}%E#?7Bkui2D+scW3G zxLOeBszLGz1meHnqtKq^^E+Sg@%ti6lZL5ytE6m6{z*ax;OrJ5#zRMd75f%Mev>Av zfzJb^2+qgd^!K053%+gor#WijO74gD zlOz3^O)`AW1NIC)MVB5(xf>|DWU)<)Wh`C?6&-K)Ib1*WZLuxCVbNnYdFqd;(WU1m z-I7~<YW%8_{KmPt(E2Hc|Z4oMMjI2 z&CXN0douEhBq5g9q^Ts1emCM*mI6ubc-m3U*62ykswZQX3QhcwH64>dFAK#(KJ!%oXwHoXux?M#2K1Gen<&6e^J+})& zpIlxJZQE1&?-1c*M#)G>golR%&P?LoUI0gA%-pIy3;=uri4enXeePd>_XsjaSTm;s zyJCBJdu#2ilCYBQZa;l}BKZD?b#6 zKQnq?&%?23juL9kOwWTq`dJI@mEv@o?+sC^rXju3<%?bb>(ieQazrQac z!b}ox-nat(H9&$4_uIFJn;XDa#oxt`8x44-TC}SU4ora&ZU448tEOK!CnFINHT9}F z>;2@N}(>tr@uea=rCS#Oq#Rr}fK0?R>f~r)Y&MCgkDRpI@iyI_QP7 z*+PoYob+bDfB8Vt)a9@6)~PvAKL#-wP5d3Pl>%c`FFvEjpdb6@_o8{Qv-lFf?_O;5 z5_m>)zT1h_L2H|P#B$-rZ=Rj=%}LTUR;%;yIeQB2YvUYw(X6B`p`1iZ^Q9I|jNyRk zL}n}HC}|H?qJa9;&#BG`*2L7mRU?`XWYm6#^)#g7{B*qTpF0bS0*f-4r^PPVnU{o5 zmh7Y)$LY52gHRnhdM0e$1YAxvB)m7q>kX%S-}O4M1_{*G+adU*ryTJ4IO$0(*6aHY z;T1k_qHC#X-z76q9qq9uv*$zvvNQBNdF`6cM$h+YOy8X!$17xF`ZDlkEU8ZGS6BY^z7@7DLtRud0RBd@e0d z@%H?}s2yII8zmj-F0V}DTdf)2yd8^HxhBxYZ(Rd|XQUzZyDg?G(0L^^IK%NU8hk@~ zMp6%HyYYx;$B4zaAsx^k+6X0Qv+AK&lZtSqsXjkf!Jn>pj2!S}6z)7XH)4Y}d#>N; z_tt|%!yxxx=U?8&!>M-0<`nuRFm`I>L&{eYQYIr}Jbdx9Hh} zqA$2WsL2y$ym#%g!q%KPlCjb_f<|VL`-=VVYF;k6r#E;h#~F$BVq8Is&P#C_?W%IpeK20yAMWvH#@!9M$-`LSPg+}@itw%_)p9qnd#=tg z&@H;(6Q=FMHSU!}(05dzSRyv7jun=Jt)~zjG*mxYgj&~4?`AFWdy$(9IH2#)JRzCx zre@tYdsku<5LR% zxqIu_XQMJYem0tb2o`}aWpyXtD2SNJMMB*mwwZUCKrVfMQQO1;mSP_*x0$~;Gx<@i z@KRM>-aclY;Iys-VJ5KWHS1t(|FF}}N56W)vt1Ih+nVhR^LaHCI}AZkwHcCbMcRm~`p=HJpgPK6BwP zk-vToPgo8J$;WLq^=}b0a@#;4T~ukaF!0$f_J_r4@~l4MzqA$GkrDGn({v&e64oC) z_8nbG-Y;ZdM@W+Jw%BcTp@2PqynF4-HA`o)UhhROdfXn+jpA;tt(_jG;-llJ#>RWV zwloe*LA9;2PiXV5_KFkUxozHka{YSOn=<|U?6~p!23zTDZK+SGN1Q$7FOUeXn$1u*{*i_Ms{ma zt6^Sr8R(2%U9OBNeidn%(mh!`OaD~Zq?-@;cNMJD;Lsj>`9WNgI)kyIAFP*ceeJYA z?qSuUduOHkwqBOf%ARSH@d*R(zv5y1Sk);m;skSgDHdA9-ZdP&6r$Hm&41>I;O1YRzReZqT;`{ z;N&rK{&uh1{baZFxh-!8<96$s>dxBo3sDij)LOVl+lLB`TDstjTsV!Em(jXQ@%9IS zzO4-5XdD((co1k2S~0S?VlsQA7ZPrra}OEGuArc@Wa-CW>-FYRb$_XupfOXfz!hh^tW#4IT6b=_ad5J!d4dbUhio7`j)PS)YqrD`F5AG3lkDSs&2!+BV4Sf zFF(`uG2U=Tr-xcm2hVEU!+=ZcJ=^QpW3j2_%{z;R6ANt{t#+EfAiDUg#X3lRi&4m? zUaMb)Z^5SXTH&WVdf%_YHhvmEVP~=c2}Uigy9{;Z=hkitp%0uj{5voIOj{h$tg&@l59$+!;&Q&ZE9E#jUHWjB zpNeB+YnsumLAa!Iy9ix!Ro>fIv*BVxlj7ob+DLEJetCa=WQ#rTxtgWz3IbFQ?GNv> zH~oe4P^i$pZ$>U!R;LMpjA$m7vUj0a`$LnHTaZ(snQOvL=dlIkkZN-(bM51DK~ya$ zzwd^#`Me%zGjW*){0O4H(6+MCt^Jy*-jrbC*m0Sp>ZP!=wrlKc&O4e?iVe2^lGQr~ zsabgR-lg92npHR>xdH_$7vp(4deu7%c~Scu;x!#l`)tRgP1{=7kSS6?0brfd;c)R% zMPda3Kgb0dx7ZQmh=HAefG^14GUSOf=7lrn=jZ3D<&D+VNKo6BR#p?HEQo>1)GB~7E<_lp z0D&N}6A(xC9TP|J2??tgEJuuQ3MCUYwX_^Kc@e{a%CRzRV_u#vA2Sf}U)bEF$BY7M z%kJ*(^78WD!oV+*k(SQju**@WB}a`AD*&eI^NWjLL5#GCo$l9xz4~brm>>}HUm#^d z$V<8Bb>-#hwUyF+e{}_1wURL3Osq8nm1xUGz?ns8cbUmytJ~$hvbILSP7VoPO9b8J zr?;^_56Emge&kar6`yUiKTc-zK!iYW;>(iYoSmLp*;O56)OL01F5R zG-zE5WK+5?R&Z}QOt!S|=SvFm!X)t#Q_sC5+BBE$pN0;X+|(;dA7LB62vgd1d>%wK zgR%QCCt)JD`EeVm4-qh3mvqFnRd+1=FjzpAb8TIk3G1l~GJfg#F6JSVM(A+kyg7Nv zVq?3YmzC*xsWNmUy3b=YtJd;7O!xL|<;#P!@NQ4;WTox7y<{f)ZH3vu5izQ4qs0X? z3o-jheU8B1WcGlg@`YfIWqAQfx1>ea2l+dc6M(9C$l_c8$sUV((z9XscDZG*wPWQ{ zz$QXo&|Dr0uzh11!TmI+|A5!41M7O|x$9p9)2;e|5;mC;pD4j>%FWtM_|Sd0S=*Co zC9PjiNc~JpdAw&Z>$YTQm6x!lHP$_?=*rrs87&>OGR+~~%~e4YM(gYmb==EPXwH5vDF`8b6rLXyz$!jbw; zwFmpIKj>KJbIBATPpMr)rn%StD)|1DIsNQ#7ZK}N(`Y>2!TN4PC&5yKa5Z8;NVUD6 zuPlN>+@A9dG;2y5g&f||2-#`tVD64`COwZRLH5Lv7XvSJ#H zC#w9`Ti;kZ^!?34rDy>EUEDoJ+9HugP{tpbA;7y-Su^J@J=V7Oa&l?&xGP#@V2orp zQf)LS@aL>D)Bnu^1m=h&z_4sXN_GSVPN^@B;3-5B2>tl~+B(aqHoCTL2a3BDcPLQY z-CctgcXxLQ4#nNIxVyWS;_mLno#MQ?pS8Y!U;a#HWs;d}nc4f==XIRqvwELRjSkL$ zK>Nm3d?TdX?eoq5y77h352R7Md`sW*4KQ7IFQzn^tWt}iH!L))Wxc?Q-AT&jXWu*< zWx?4ZUO+k{SY2kv1yjm+-?q(Ar#-V_esIEV7&ACw-WW#^4TC(&hd;fCM_B!Dgj%7~ z4`t_iBg6_@EXr@iB3`ywMG^J-oc{3n$c5*DV zVFx@sLpQEY?5U6G_}GR9qau#Yl}zqCw{3ht{AAxaumdT-ne;eRDv>G?teIf&zieZ* zBNAmJEtmRu*y(%&^StaCa(?y$qUdDP7&n|6}bTpQ;82x&k?(JD; zsKma~5E|kol)MoXDVv7}%qq^A$7gBZ+A!eZkH57*~bmpUbOKfCp*=5FXnw@PB3a&HeMP(JWeJ*8izI0T8^AUj-K zm`;7aBcFsknbxsQxk%TXJW!t^O%fVXZg~BYZVk*Uelfjx4RdALZj6%pi=&x{*RrOj zpQa1IoVUB%X!oSx8R`yR-|g&zr{N^U6L$+qiKW<2FfU~*F4m%CB1diPkSb^{LF+6{ zP;#rMPB<{tfsOjEO5SS1U>e$F^XDNtzZ%okvn?!m1&ZkMN2}1+SqWx_&G*yF9}tLz z@cY~pahc1qnGqk#7F5$-$`&a|oOp?%_Pg{xmH1?L8L5k-4^RqcLrUroVyB++ofPMN ztNg9Eigm18FC z;2Mdl7B-bCvnvz8O@j(5Y*FyLR!(;2Aj`^@PgxXn^wYP*2(qtC9GCa>b{JA~G)2wu z6cZ=EEz$0G{}-Z>h{E3Na&s9hQ2;|#=s?d6jg2nHX?*pX=h-|iw(gyq_9Tg;Q}6DV zC-Y~O>eXv4b~+;cqsPRX(nm}pa zm;2Fw)IC?1Cq3SWw8Y_e*Z21ikHJQ`?(I`=-sY!G z0Vaoo4YOcj!H8~!Om?v2Ja8)KTvQhfX+c3kdiz`GMhbO!M8DbV(3_U_Z-d9>w&Z|8 zzw2NuWqjNBZn7eUP8qA+@Awg1PX8OX`}>nSOr(R?mnz?3r`ALc_mDT#!#f?sEj`97 zar>@kPGnc>+IGz9v1+pg{H@p8R+zy?quhT^kCCBD(ua)=#V0%(*-`n&^wRRFDZ`E^ zjg9|$PSe0*%ii8h#QY@oTsQW>pce}W_=}BmuQ@i{_MO)%KAR{m{QItFA=HmZi0bJe zJu9qwfv`EoX8D$Yl~sJit`Cd0Slx^KAm%5a|i>8Vh%da zXB{3F{@uCoFWc+2`n&Y>H0%vH_~h<}+iUzl`S^I63q0dUwdmI6A(cjn+q=@@bQsJm zHdf=g+b?b<{+h&(7n*on#2)8Y%IM$Zw|^4#?U%A(YD*wZ@5+D&Uu@3i+xsk?jo=^N zW#+#*^t|n76N{a-t&AUkC+go-h2T6lAJ%U|FXlDu(gkcJeGv{W2ohP%v}eb!YCrj3 zgOLLtR(@^QW`YD*;G zlN(zuvV-UDW&3;^e)aJes%UUq8r`Pf==KbVk8DZp_>F4LIb`*#~=bl@BsTBiilz6b}_Err|TM!vAuW=3r)? zVK~glUV3)2h6z?Siyk({2s9v7#k;)zBy~@Ru=&Q0aBA@H8YO% z^(5uwoH9c^LLdZYwRd%86R|L2xS_u>H-k|b`W=6wet7g+&L5w#uqSFBPEolCqb9}G zdZ>zt(L-Of%=-ETd1AS5QOepC@1Im+*jwgfN`lQ@fVKPA8QPk*^~mF4rebizPc1x@ zbhO<3eYR}z&A#+W7$hWIbc>ZhBPA8KK3F8AR@-sXXBr{ev zVG=VLrSX3)o_Y6NVgA7Z1giJCEh#akh+^z;S$4b**B(h!cyOnkd0_m$ofFHTn|t+8 zV*a$UN})A>?NJrZkwrNkw?$&CV}(^b|8)BriUS(Ead~yR3Lg;aXh_D7n=zvU@!a2q z4Be2v1ClrzOlTuusGk@w+qFCpv8T(u7_)dlV{53W87;#$d+DJrvQpNY%hmerS5Zu! z{Q`VFQ<O^~OgO$#k1)fm>2lD- z>am)J^POH%^Hv{c6DI3fko^hy-c5e^T91tlCOa@)Y!kQG0`~;1(UTq}Z!9F9$0YK` zws5f~%L*W(AODCHI16rQJT6kpN&Pph$@(uib9A*<0%F~udB%hVVnVimZvWZ~eKZl!AM3KBfu6oi($SMFN*EtO5<8VFRI@8hDrfB66jDoO^L!$% zlFY9IXvCNDEQARMze1h|4n3YRh>1*uktR0vUMP5(v`x-5^S>DtNJ*8(F6-sgQC*`b zD!X=4w|6wBE^iVZe_aCH42t5*b~w?hI9oVW+SH{uWX!yT6j@`Sx95nA;%3!+_S=<= z0E`!1uW_5gufnN}h&k6;r&Gt0`MniP(&E2v>Twlxo+zO3h8~7=TlKBN;U!V9hBcs-`vV-5z>EvkK?Wj8qitb`9kp`LiDV~jJY#;|1A`Z9gnisa(W z>~c%ajlVRxeK{?sINd~1@WA8i=4g6;8kH_YHpR_Glvd#LaBU-JQNzf8hXzkW72c}(vlZrA<` zx2>7@9GK7J+`qiS$?VKM7p!+?*5At;pZAz|=%VH8GF|K5&jWRACS!jm;9l$j9Ub?V zP6|Awv=cjyPTo2C$lAWlyfw$VN)Jo@GEnTv>g2kL9^wr_EMG`H1f~03-EK1Y{A>Dw zLqGK(5#`VOZ!kzOQH9^Ij3;G58|$Q5{^|am`R(xOXn$;l;o0Z$ZB{d$L*=WFXU*S! z>a2MR6#s`qir-_UGV}bck`iT?{_n2KQD;7JfFwnR4J!k5BK3c&=$dq{u&Yd~zltR( zO(3|LXnwuTXxYYfBD$q~sMl{)*6%bfbH!FqY*TuZ`Fj(s2g}G^elOQ$<6&7GzZ+f+ zy5A_jTBFN3&v19(3N7fTm@CaT#8Oa<%S(BA`fwn2U~EdR0&KATud)A6i<{mXr5bZQ zOqCFmsj~BoJ?6W$~B+s>yj*k0R<`ZA1t%FkhcFz@m%{V{Zrm^YMabV!N zdzTD{D{&cvX*f(Dw>_WVxcEPDF73a};|em#lyK~9To1}5P*cS^s>AogB>LmMH})PQ z5)IKwcVd29kK`VoRufLor_EeTHC-Kv>zqgiBsY;dx4W~IEaZMVY0^5D!^`CCh2MXx z))}L72((V`N!i4XbvQL4+oDiCw0Vc3JRZbH{#u&>^=dRNehEewteHsh{?-oW*a!R& zvLZfr=n-goI5w!-66S3$ZH?j^`5UXc2SdfE*r=tDi>(?nI||G(w5m`MICZr6TNVPW zD^5}v#ASwd)i)b93&7bUQin&(vzq+}qcIv!KoWW=u(DnBLN#t=~LBf#_X%I`@bi*$R)M!d_9KxmL3ELm=;fWf#%z54hyT&$a zGZ4pd{e|EsRwPA3AA%Mk1@C6alR~XsfXDDthRoO8U6bbESt20v^ue1y5D+ho5*`c= zF--thpudQdV@E-QRhbasgZ;+!V0sIpdmyQ0y{0glI2t%5h_e$JRs;wO*dc|4aA--_ z1B<&t` zYfpaSxMhNSAhkSsq9ga8L+4)rYj*rHHT(#1$n!^Kr1xo-|0!tGdn^BzBCr(BDt@(y ztnBy6aICn$u9JpUO30Lp9veI}WJHMoy*5$X4DliDNlv=V84D0UhNkEZqbOC2WCT*` zk>$p(=_yDp1}f*aTGsr0jO_?1YRWu9&#n<9jSSVsnv)XucX#V_gEL>tp=yp9`}OST zXFTv{Qoxo!bTFRfdpFq=nOt37EFJqXRdwEPv?$JAa^CCGu%OIR;+hi3yeD@{HY0b{O+ z|JM^V<$O2U?aPDo^HAI9w|{(vg(EB@%~tTqq<>Hojk3bM|2|Nhp5#kf<|m2kai=(_ zarc?9B!vbJ;@T-{={E_#>4Dx>TGWSBTIllJSlR+(aXy%tE9_re5~eYRDSjjGwRjVA`2Z5T%mF| zSFNwMAliToV$}GXCnN}C^UdnWgIbWE*FNjw_I7~i4&*P8$oc75xxx0=%1pH?=w|S# z*#;UtG%rU9YU(06FdQ?Cm3t^V^tIICX~Gg;tBq!mZRcWax@#%dFa~!%y@2f?za_3A zZR_|g^Eh3!G z4PF`DUZMTpP@A#dvu}C1wz)ZG@DK_XmH-(B+#vvcVD2L!N;ufzmDdO^{Ppaws+^ph zjSXFjBO93F37++$j(>HQ(BDe#?-xNuoeujA!^Oo#n}Ux+09KiRQ!iI- zQQ(mu?(Pr~5ltFb2L~IDj*jy44HXp?Bg9#q_QZLE6A}|Ku(4qPWoc;|8XB#-jJ+_kyWiu4SrEk;UoR_xT*k$?gvE& zOAZt=1pT~STvWl)cyLDM;H9*ztahV0a@6GI$%Rq<3JTaCD+L@)Z`OWaMpo3@)C&{| z>;Zv*;S~Jz^5tr47mvF}S}rb8lh&9~!&FIDj$l@ep`WjxGC1kdG&$+BliN&F{BoQ% z_(+!O4mNNZntC5$;)P;J?Q4|-;ejO#kG8=JA90WHx!x)TE+;uXvPeJ>V>*OZXbpb8 zMqv)JAr7*U9(@8FSGGi=DaJ;< zB_d;qVl`qB%WGAdg3BF8+DOM0fZs*952iI&ANH=3zLo?8&5yD z*b;RbbzGY1f}f_V2EtH71Utey1xy|ovz)A0Zi*&##C$tFS)7by?Pa_^1^r`R7{60{ zSEWHr-ZNXOy>gXvOH~h|JWfoefD9fq6YQ z&Cc!Dh<)QNZsCq3V^!9#HMc;6T(PZcTqEEP40qP0Ob&ie1&Nw4v1#5hd~~eY^S~JA z>p?oN30qH7S!A=4r#Wk7jEL*%s97z{BF4a-GfFDElY7HV6~`D%or87J?>@_@Qf7gA zHVYX4I-O$xq^V|fj(j@8%)6JaPS%vB5FsY5bS#fq(eT+MX)jKzVPH>%NVJnTD~yDD zSxWQ+HO5ITo~%ELkBH;>e|p3f#~u5dN|Dj{v1z6+_8II?*{}EaTxnc+NTIu_1K|h+ zlvgC&?0oGvj{-9`yLY!ORY}_hog6 zp=TM?a@Fea+f2xtJ_CL1_T*(0)tuk!H5EuGZn3loM^kEYC1BI84iku3K46GbazHl2#3CB^XVyHHfwG2C9B4DHJQ*9VQ{?nC9vKxof9G zqbsa@3w4duD{`kW9OUPZwBgmExAT!j!V@bnOkw-g`mJ5 z9(Aa-p*WSYEhvY`F+dc3B==*`O|DzZV?IzLA%CM8gnN_c^lHlb%X=knR$f0!M? zi#K+heXQqVz3kCqrJ!o-C~n#R5LwWj{drLL>Y|1G?e=~ z`E-~iWw7Y}KLT}yo z6c~(-)8T0GRbtwpv}OHWR4j|jW$~3up6A3hTIflZ$7Y)7cjmu&FIbq0ci7M@=rHU% z*!q3R|1>KrSk~asx4%Z;ha-Awi#)SDNlgnu1%qZ|AOft9*z>$ap0D)NHSnbIu#s~n z@Oz%anBL3TY#w8gJZdC{`g6lwW%xn;yoGlHzlk7t0h|g-Gbk<6I`peZBS>Lj3h!oz zwCfjk@J8uQAkr~rlW$J2=+cOUB&gAC@1Q6;FY?1tPEqHTg|oHKU->j!lTj}|NOAB) zL2Mr}Yu=f7CU0~8YQjzM`L}^>0Cg;C#Dko5I#wDb(Ssd z!g7bXB0B>)7=jx(WGxcbYxdHuQ)A==b1FJh^K=2dI1rk5NHV&5=(4A0^$oa6eP4kHyvUNud|3S+*!Y-CeggS{JfK>wHK#I+o>8Z5f3_gmdYK@Kqt9s}YquR=E3@=vMK&ydK77Gy#K5<=@|9q_?RKl<7(%O4 z!EO3Z1I;c9emLg#tT-Vnmp>e!)V3h6Z3sfRkPp5x~=@ ztVskuBRHt_~7dIgQ4iY5`Spot};sz}j zfDTB)>)RDZ19Nb(5LS)r(cqwhhj1@<%S-sTzJ9-fO3+%W^+=RriyI{lEb~M72$b1?o5k?&Fx)CW9dW1N5 zItc*84X)a8{&(r?q;La<%)H%GCn4xi5M(ol&RW&jrSmeBXkao15}zJi5P86)wFzsw zI-Oy&HaIGZGzcukj2pp}p~Mn7cki6vHLcgI!vrS>LO}us8R3G2(GW$@;DTrZz$r%n zfHY-a)pDHkxqn#wKJovg;sXN%TS8vlq+G1%H_ftZyjAL8QW!gaWyMx~cWF_$Bu(

    #uyEA|JK#Q^}wNiyOhYKn@uVT}z94jXvoU|I|ef<&MQgf2;rmQcZbmp|W5!*?Eg z!a(7f*;(TAD~+v)K4!3VaPw_@$n%x0bCLHhNKd3+p766A)6CBWU&vt;d);j! zi?P?cu?`-Z0n$vq$YZvP4#E@L>!NDD82M^TV@jLb?(`_iilpl0@N60t)H4>B6{10l z>fFJVVynyd?6xu&evH7WPy0Uq$S{G9@L6|2J>z#aP0Y=o^WZkCXJ90237MVU)KcOz zI)ALuizL|gdD_Tm;chHHRl!8^0lF0Hb?OVdAR@VXZ|m|LK5#;}G3FHVzjZl{6*Abg zOs(6pU2P5J6GusODVaM4%6@c3@}?*7ZeOK#ABq?l`PG$5ud#r~zCq=P%Z$2B$Jf_a z#O<$lE$WQMtjmJ>R9J9iFBJ_!Or=!183O*lhjCzP4U2l6uRiCe{3N;Da-9JFS1C0u zrla_&Y5WH|EzgC4F@3l*!& zy8MsUva`$alANjOS5p$5!#M{_X1GqNO`m=QarP`%#MrN-Zq0J&d0(eUw~XF4OZ1*E zTyoi$SMuX8^N?c! zkn#?5MD$F57jl$hTS)Q|*huFEa^8kvWNarrjmnLP=}r$uZLG@mW_=zb=S(PaYb-Si zwZOf1R0S$P|33Ixb@k=URYaAPK^e_`70BwnFIL(0{z2hGfhm{t{3T*t@uD=Psddbt zTWKG~4T~}BW+Q8F!7s2Cdz{V1BBh%D@TgJOKSq+jLGe~_y@^YujytF1OK>u^1kY8( zrdv)>qYopCrM<=y&avqshRB!klG@I~;=)1` zsksaI+Y(s3d5h*$JH_!y-_vRT>;CN3juEai4i{t*pS|ug0q0rBn=?0HMCzz4y*#T7 zu)FP~v>5vC)}wWqzK!Y0bKY*X^8opQ12JvGw`xVja;b^q;@d0+Ct>^X+12-ylK!{H zx~v0w4{&03?WU-Wu1kuXV75BtcKh!6sHlxrXH#MIMcByP8~RJ(QNF?XDEm>i0E2wf zn(6Id!bqYgde)_5{@jJiIGctAush@yos$#nf{cg;%^s^YP{R}U!H>6;Z}hB?!$Nz2 z@4{>LQdq;>8^nxz{&*~vjY+jKa0sTXG1w*1XDnw@|Ggk8LhMewrR>REc@I5LPELK- zM@}n90p7M+cLfO-88TOmonxjiUZPTrxV_w4bs`^9-!2{ivzQjhtz~6y|M+f-TumZq zpOMht|C>@o(#{ApCDR+htmjgjY7}QgLlbOdd$-iPG@|nEO{c&UEV!1MiVWWEqO`9Q zF2$l=6QoDE9i5=fdPpG@%bHiNd^|Jjs*+PHlw&~ViAWD&UwZ_&OiV69A$_&3P{R5% zgUnu`CE*v4R;s{w#`?{*Ii*O!?T_uFWpdn$Wu|IpG=Ah*-HDftb)8vxE{lcJa|6UK zr5R<44_kSss6_F)!`g~|GAQ=&hq$U2NQC)-*mT9aep+pV+L?s>puJvj*94Wo3&5=F zHGDKx?Ncnp7xcp3$t`kP+!!^bb=vvb(WP|+3Z6o_X1U?BmmPc&Y=HXr-AHAZI3?A`HUrn9M>^sGru6o0o}Yg!1wmNS~(k@v9I zNq@JPx|bQz=Ed<*G5;o=1>}Lr-rI{vlpVcSC-Iq@W@OB_XPw-7v36L8pC4FFB%n(w zJS=ta6I89N#+=x66Zbtny{I5AQ8S}U<6E{$4v*4;_PW>kt*YBip(P9bRhDSh&0`9OdFqY` zQXwaJtg^=KXNKBV8Lw)&Mva#5Z9cz}jUmhm4gA7u<~#3e4(-QtIkbZ|k%fbL^aD>E z^_82&R>x%_m6MW^;o8!5mr>-OtqZw2mfSl*8MRam_pA9{FT2+kH;5AbI|2K{nr{d% z2de+%dv})zT8XM}VY10m)h>AW9AicwL(NpZ=Oe1szs0uZ zt}rLpb|9k7uQT_7aHZ??qSG4*Ik#hUo7V5vQy+x@VdE%**G@T_4?9uB!P5 zlTeRyf2x#!kMXTvyJ|FOZSm>UeKauF@#ke3!JDg`ThsnT6i2H@E1^6So&D)m&D)e6 z!S-f?F3&3UuYvx$kW`|{#HlU|uxrJjNeZ-yUyBLk@ZFoHKP=!Us*6Q6Ba1YZkme1@ zCcNJe?SZos_hWbcuBw{oDEXEPzq&Qu4IGy5`yC))|J4-yY35fbRaiiXO>W znZ*`*2Y)C!sG{d|7BBHub{Rht?e@NHT4d&dwMJGo2EvZhc3vu7sJ50~ z{(T6+A8Zmvn{%1+_=49cFSRA0uA=r` z@h%cWHLE&6HLk|MVbr>w!`Aw5XDN5oMWs|-jZko=Oxme9tK`c2Q)zZV>-^?j?oz+S z!apW0z1X<^&&kcR-*+vlgkFo8Drw--ah;vA)MH-Wvy`>9?3kDYLi=93gh$MS6>NZ< zSO{dm?&%r%nx!G>{4CHE9b>Vv+ek87{`64156$k&D0>o!aty9Dns&=E;lZP){vsEp^;3+7*BM&3fr%xkh`>3CNd*XxGv>x8& zH3uW%$i9RnjR}JmD3WDPT*g2UyX9rMM^Nl?mgouYP)IOVjdLXwD!=r|uoCk-So$^O zM~UdiCA%>Kk9=vWq7X2?u(Pz==|WzRRpoF*LW%FiMh-!elMT%-r_mHiv&Yz^O2~yW zM6lvnOSPVY^SBMcWf{~w_QjSchKYhpRV^4p9+2G0lll9sx9}&EK$LuXQ zt%wt}@KxQYDc_jzBLNny9(Sq1uQQx50y|9a5gfUY)hI92wFD~_v%u-m0Dd}9=xN92 zkv<}?o(HdlfY0o7m1%-@iwV$W69<3nGnj|p&mj<9y}LkLlD`KH9+>Ge)v=14i>3TV z2Oogz5VmSPKnr~X*^HkBE{2_sfM?yG95}FrWDGP5*3sa02LRwA{}kUx0)KUgYvNec zw_TU0Kz1L+z8W5|PpD4y(+Q_THLXe|*&dAZgbX-CPyP1OX>j$S^OU9Rt`>{)7GU_+ z=gT-$YtuO`_(_7Z1yyggKq?9$OejVf1vOTGi^eM=Ku`0>DSs;0$g&bUMF5w!y485vSFGOKN{9ZG#Zv zsrShkR6uiD;bi>>;->)%;(w*2k^ULb#yp21Av1#L`U_yF`E;h3NeuWX-2bL9}2GgfM0?Jk&b?- zEGQXPw*S@UPD6YFV8{8t#`ghRa}Ht1{lB*t2m;hig#bf1aPb?6;$HdNEX!!hX^ZYZx&;4D;x=j8SJ0WmN>XeRQQs^1a z_D`%q*1V}*7@*8EesvdLg*~5nowclq4YV>+;`_>zCwKUU zLukWz3{lIvl#6IXPx*q+I!;b^--x8L}Pp zu$Fx$@AVq{`z1W@Z~cFbMs9%2mW z&qczw4J~rRLd5_0S9_Xws0GM@k^M(*!Dq)gX$kA>2BizP2kFs|1DX%sdI`VeQq1u# zL&pC-2)a|z=^L*$-*7EPSS_$t%TD7{;-2$=`q7Q(3jM2>*vhre1zHIm#p|24B9Loe zpV748e^YpWO-}yH9cP_CO0%wbjeeyfpu!ZX??ifFP||up8#3IMR(E;tD=p7+`>}@| zsBt6M6Cu9ZaJET)r{47wlzkKr&s%OwzMe7ubj887n`~BLwu{GU=Z3x7;DGI(6YxBX zJ5){D6#-1gBbU*T4H45Tc=a$tRtQHBXXT5h=Ck4Dlja%?w`mpQ51ac*`<**iUi_Mu zL)FKz&di_6BOPJl?Rvm{tmQKG3)NUP@9sxk!ZR zZ&4YS^}$pq?oTmc14JVxGqmcU^8y@nL3;Os==X?dW1EhzIY5j`no6{6HDlY5t;`NF z9=g`l>w;Lr&MK4(imP>(E>phX7L<+vN=F(uPd=^@kW_7L6mJWbRmIYtrR$ZCX}6e^ zKMmviHO~`!frQObq?rUdMSo+7-&ME=stlwemO#5jv!2&{(gtePey9t6J}(#KHCRaS zE5lKf2sOj=V;^iSMWwG$Z}!d)2@upRpONN-%l2&06kB|(Wzh@Lo%7QLcK*Ip{`i|l zSBB?nVt-5l9+;A-q)M7%GJ37KuT>1(W4Pn>v8Ci`)xiGj!{eH$b73*H6FT~#(Lw$B z^&oh}oN74`SbEp~V#pJ|wiAX@1i`{ZnA3>_#B*UL$$Qmp>8fJ%fyBy=!{ulTl0 zlx2HDZEjKnd>gARYi;XF(4hOzM{KK60=03B6| z64ED_rH7<)z-xa7myR&Q{49M$&2Ks{V$%JGpWsX;z6PI*AIQnGO?B1I`lR*=!n{^H zZ^*(doE9~8S`7nTS-ixmqM2-(Gdnd(H0L@%Tj}jJ$phKK(FF)khnqRi4n+pDbF^q2 z4e6k9HZ#+;?jc|r8|6arTG-+?0yP=)sO9XgaY$A8B5(+X?6k%-0gFvH9T5 zEH6BbGjk=slCdc7KC_E&d1hG+ta8dAGHE6)Rh^WWGpzn|?JAo}B7=;-BT)gv;w+n& zQ>x(gu1J~wf*5ZLx5Gjr@Y3nA8Ig^Voo{mwu{EjRL6Oz|=}onp-y6dqQ-I!f)oxm) z0&7sD5I0?YStNB=H&r45VN{MDeRa;tb9scI?tJsq|e8)yvW8OI7wQ` zThw}emb*mBdb|s+#x7yIX4}6F?{%@V<+j>mHYxZGpWABgafn=~$o3MPwLUnv{i)xK z#esMYUap=4TVw1PP|rsjEnR^zc?;x3nexs*MN~$@L>91f(cwx5A<|Vv?7*f8O#oypZqvKU zw4sWF8u)VqWD#ZI1ghuHHDce$Z1_ty5ost~E^Y6$yVZ2wetXxTqbME-EaJ69S-Xjc z0l_pyu*Knb*rlQd$Z`!sS3!E(bYt#rJ7XL|*IFjTcPO^6)>^JD)JjaN)eiV?mwvJJ z7gTZ|mc}P}ip_|Jg;UWk+wx^-ax!LxR5llGcgHRfGYwlCjTRkru2DmmAHJ?|1%AI3 zkSweoZ+v++EJ{$}aVUD{vjO#Bo{og&tja7BUK$-$r-r$?Mn`(a(1)k73}Y!#y}ALu z3VugVCmt}`R_v47C+5qrP(tu~=Gude`l^#c(oGQRt) zOuI8~Og9=G-mcCR5*f^{YpPAOFKI%XWk9?IJSCr*w$SVrqoe^2*= z%JP(UzRJKB1V!X%&_#vxO0hc`rqplOV4|kQ)Na{@(%2nnsE~xA(*B}-yaNEpjii&s zm;3qUT`ZYPW{O>ki|uzaFt~J87OaG8w{beB6B^gnr(9&PP_Jj(seWJw*7%Wzxr=%i zMBZrw)`%@4!&P3?wrnltU?u4wtrAccJ+x`IUSC#|eh)0Oxe@6ZdN-?Q7P#Nt7#?-O=A zofvfHFw^flyoM1lGl<$4#5Z%|IFwln!$raNn$aF;;1z zF_ptv0);2_#)yM&*UYXKofnz0Td6b1gzN$5%Df*aI!@&Y-l2v9jEwA4x z@-gI@V)4Z2glF7ax$E*EsC!{06sV?}y-)ZZzn`jjoJp8Es3RX=B22jMuTUU1TY{`^ zAI2;CPxzA^ZRM(Ig~>ScMXH^izvz_rc_B%aUe3-jD-zW|XkZ;4NuL>mz_Cpni|H@? z!)~{!fZm#tPehm`GsXcA;L0P77RdInOnCjk2gSpd&w?2@TirN z1;-;YgEeSA@Pt{wazG>?$)Tx}1~K@oI#?=qGWoOWTKjX*3h_bwklZjYiuxIz1}_m} zm1Y@VNbucnkU_rTk~k_d-`C0uvRZVpGxg{=Chd38Hf>-?4mUr3&rf5^&*aoR0Qa~( z3OA;dmrx`ZK+$FGN8(T+i6V*AAC0f7CF=S7FWj+^5Fdawy?Cnr&Yu^u#o*#(N?WsoYSGInEq*KxUJb(KF844%D6$ia6JcBV_B8v9z!?3GISdfT|A;rTkPNd+gMPPvuQ28MHiCU zVKjcbrm>@!wS=M<_jD-+bXC$W&plgwWLXKV=&HSFtXlraUVPD^M2oHlF+(LUe+@R9 zt}g?W(={V8{R4d15@8m2H2dO(wKC3=cJ<|_qD!yWRC7Eu#Efpvs)vaCpO-qNbWYYp z+){vkE=@0!pQ8B~7U=@9emTW32bN3P>|@^?4E?y+C@8XFmDN-mKvk1q`oE-V8^1I= zL20g8#%7e4%9~aYglUlSuB_!ela=0r@Roh%Gev^9=r{au=>ak{Tnn()M5PJovchaZ z*NvYuI`BnjfgQQdO04_dTiNM5dGh!F@{)L;ug>gKTC@v|lkD?iO9>|~-=C=idcu@Z zSXIl(H&}&=+%Yi8#AZr7F8X5s)4IDis@ATvzxJr*y7u8^SMFduWa7+?Gu_hIXhMAl z2UNY7MVO4%i)#c%UB+$=>9GF;75WTX4#j&m`b4T*K3KJ2KNsow`r0p5Cz2 zbK)dJLAyr)R&#(&gNxEBrb||MybN130qk1WEfTbLPC5y=#LZXc4r0UdLlJ2@%;p<6 zXNJcCrO)2k(pQT|Ufe7zIB%_bdN>5C%9$*`#&?T~7KlnRJKNqECT(#6)2Nu(NCTpY zdJf45htx`F|9pgWQ>q1xeK(%Hft;c8f_*zVoArBW>{k?VPA20<*;*ydjpjb=)5##V z%s&BuTQ?J;u7K%*G3AmDUN6|IEegXqx`r8fUFD}=>4&}xTK{~aKbN1q;kY+Ss@tog zbb*qts@RnqDR5WXj07BsFMm|@u=SR=TciW9KZ4hJ_qn(w0NEKTbdn)qsLARSJGr9X zgTlhiGvWB|`0G8K)BRBJ!%0mn`_;mGyi8wZj4&<7Tg|gnvqd7j%PS8LV88#O>|VCt z#KBdvy)}r+uM^burdsl~vMoh#!pw>tAiz@Z1=BB4H}8%?PQJW_Heq%J2;XrA7iHrH zpKpjzVDE%)dib9?24}(X**HSh&35KT`Hm?p1pLk>lLSAq`xb@sakTQ!vW@X;YZQ+) zyKhf7EvB!=*K}SH*g@Jm`bWF9kZ*BR>)r4kr6Kzf5|-;JiSa}3WO$6@?&A|C*hi}e zWwB2yVW-8Ma0#LQg1G*+fuh6cg_rQag|aZUiAFi&A<$v2G;SFS8ylRylv$)*rC8I= z7YisKb}ZtTFJ4x+wJ?rLDm*JOK(bJ+eU3q{3C#EOHipGuzk_l3H8gm)7)-Iw7v(R; z7J3T5m;Gy~xDgnOKo|jw5G%Slfvy0nx`3A3LvPO2%4W`XYgQx*lx)?5&Xh^GN;(Q5 z9bPHDQIqNS&1eF3J}OTN;&d{zPXn=N1yD5+f@?@+msy1|$aI81*spF~6z(OQsV?5> z)7e|YimZXWFdfc>M-fMobYK`Txzhd63Mre^4UI)fO=RVDN<%J`vn~@YOW1)0SIOkh z|70gkHM3=Glh+BWhC3HbNxS>Z7AZ@j0^--|g?XE3`1bj?&qd|~Dq)!^KClf3f!WSl zzA_n=>oZ!gbnJHy9B{12RGR#qE8{rtEI$0ZHEgOwOA666czmt7#MLQ^G+^{e>7qSi zX)4u8-puPk0BGIJP}=^S**WAG)?($Vs?`_Mgl`IY*b%$0QWn*ayl?dBs!tb5=WoZ( zKc-5kY<;_1kEh_HH6-n^WLFne8k^*wEhTA?iD;$zP!Y`9K6J{7*Jys0QDwoCCrJ>@ z7Yn*A&|c?3kq970BMg2=|CrgKCry>A)vxJ94iveti!v`1sLARF{WU!Yv zE1hw{q~rZZO^#ADyf=Hcb^{g}v?3RYHl4N*Bi&4L)bSR7 zsacQd5oD{AOJnNPy4`?#hGvyTeAyC_0ZX5dZQNoshBRM=iC@mXIE24`4N%*`X}W&Q zLpUWX5UB8c3oj=#Bfc-3UL|4M07^a5@@!Pz_;dk!wIbVBO+mx)%lE(nW&IYEm>1HwZ)YsbWVfe^k(|DS;-}76o#;5`(hcOZgYWm-m1AiPf90*5tZ}Z=N)vmk zW!M~_s(uAhmWJ)W;LiAbBv?|y{`9tZr2|&W{u4yL%Qi)!?&%yh<{=Z&D&l>N-+H^q zc2a%#^IovNXJKKA{^GTy-nB6ue!i`GGMa*thTn^K@{b=FO-P>fBXdeiXsag1Vhgni zYfq4i8+K(0p8=A{OW!eqIFctO>P_rTZ6{AHI~M_7;e%2YC^>{tT>nS1lzDC@_xTdp z8$s;?F0p&T&jBg+YUPj{Nb-eF1#rUFc)W@_-#nK)`81F#xsVIm%Na*KmhJM9f>R+b znH}Nju8Y2B3$13$zm2=ou$nDRBFrz?bU#tU9&kKpkdgZS>UrU!Sb_@r;p=Py3h;xso0AOC!gPPn z_S!SN`I22xn=|`JCE(DXzTN~g=Zsh- z+!ArXgoXO=e#0DyuHH^?qrXJKh(pFshN-YNfMxahLpB^smksRTDx$~Coq^!1LK-n| zc43O6e*`yYxl2;lHu;OBTks+Z_Qf-Io*iNbrVbwT;|PjYE!27h^34Gx00`1=r_j)7 zJ~NbOcEhF6R_G*dn|E&*xHxC$f-2xpZAY@>sO*Nwyc)#`6o@^S=FmMkE>mtPDUfOf zGXKQd2@`bjSh7KyWe`G->OjAa>H0=E?&(YHdgYal^iyaTw~B`6s_zx>Q}}uH6~^T z=LJ6y@s!KU?#mHu2rY4!kX^1Z@7^jmBcYuD5RyMU@DJ?Aj^{^! zdpP5I1nAasiup@$T=Nl%1PS+L#J#48jC~G7)PAOX^`dx!kAcrH1IrRPL9v66!#xSf zlYr|#cn7#CqOQ%&I}zI5nTDNK6*X%};aY)^)W*Gjc`i8oXe>{NhzC6Qp+&vci7G3P zb#3_g$u>5>aW(l%iNdl4RZ2;}#Q)ZD(bRvk^P zw&a=jk`B^=r_@OHY`2)MCGIFWpC7KjJT8Eh`|~ZCfW4pi99`yf1R66R8K~A7k@i40 zNkZM{i>+W~6TvZw$TBD+oX>n*JU@xXHbKRaUH5Oj!_T}a1@!r5D2U-3c>VTkUTF`( zpBxEEp0@8aR*o(w>zB7i3~yx%yeo~aCILg6$#T+VMO;RU`PI#L};&XSlx>3n-^@-aoe=8T! zENvR6u1am+( zI#)-O9kd`^=XS>o2}*V#`%yVNZtjOYB*)B*Teg(;8a;}47u*tYWl?z3>`=p`r7vTHT!tFPvS6o7DwTx0LD9_S$2c0X| zjlJ=8O2NZl_Tf}@t&sJ=r$}`u<&5v}^5Jxi!2DTTrHfUbl*i#Ir%%_NlYtm3{t@MG zDVJity%=zVT9}UOIB1`QsSTu7W5eQqc;fB6#^(0wR3!!}4eM-1CaYwb;cu1=$=CAV zrpm^e@625YbN*v_U?1X8J@A}Dz$X*sm_SKl(<1!RPC!H3%TL;0Xs@=G%?Ac+3Qlmb z%N+ZIgLT8L*uud^J&kvds6NtLE_&_9RR%*U@n){+ADCz7Uh_tx{U{OXVjuErSB)#m9X<$MFv}+^)07_9 z@=-i-{j)-VzHxb_-8|kWfLI?!RQMCttGZ+b7neQ2Rbl5#fl9AKRyErpGF{I^jT^u~ z2QihA!gBHWWI!!^)vHG z1VESv$^?y>zJ1M& zH9{&0n$-)Dw>^_H5V~UGIP>;%F z%auqYxS12l|GX zk_i}@rU(_CYYv>SYp@_YA?fVB2+Qs1zafMFJ zB0euk=K+mH=fk^7=Lia@vI;39HnO=n_O4|hb&Mabkl?J%dJ{V}%pOmkO88L;KHOiF zXxHRIbN;~20}nEBgJ5mNoXd`|M^Ek6GbHh#>`MA$%=fTZ$ZDb4Iq;%gBx1hIfF;u>GyR&p&^ZT=h zmh7GFy#nQf#L(XEIF<{S2>R0Yt-to7UrTiuY%emtzTmV;@%WzbjOUBeT zi|L}Z;OUWcHF!x!Sf!Xt1?zq!hT*)J^+@(x>67TV%YTwj*3*;^3T;hdTe7r7z-w>3 zuWvV|E-y5Q`q4LqUCt5?FON2#6|pp49L)ASpBhLM`*VzwG9MjV6xv^k8%#bB!dUYn zl1e7rPyFReZsQSc+(+?r&%`2%vzODP?B{4tP2`6NQH8MY1U|ei3QyuyMwwsAk7<_W zmPvUd_#d7l;pnvF8|bT)!{P7l;3%8}nFHu^qgua=T>>0i+0h7^ zF_HF>FKEQDj9^~P;UYiq?YxR@5mUOjCwTz^saHDPR34Q&=<5lvCE|S;V^o-89_so~ zmUO&Hg>f2kElN%%81gcHBR_*zWm<+a+2dsqjLkTyXbS5(*Ul?4Vt*LEH$fVJ~J zZd~m}^ohHy`Q?2(iSZ@7z-X0Tr2~-)2X0YQOnS!paWp5wrLu%KpT*UPPUt;$fjYU- zGD$9q%jd4!wg+k_DX`ViiD^gs+YXlj%Lo+qRVvGmJ?TXqm{-$i@!xJ)Rbs(BiHa41 zQ}E>Ra`nC7?qM^9d&1$uIRiTV>El>+>3)Sc8zvvxnO$+)0Is3%2hqAf7`bUS*L4GV z1Le7-WtGsb;p6iFATF|kcPt!qshbA&m%&9hW{1osY;RUjO=H~O(^xu9TG z`)zS*RU3KQJ-cvW_1k#ExSc68Lp@yCTIhPD(~`4{cr^pdoI$L{()qqaE!{+=&ESg^ z4`D`<$Hhn=4%P~jm@HBf#ixn{Xd27}vuj0G1Y$hzs5sqyaB+B5B#?bhy6DzDQMoSg zH@EhomfXAW4+7~m-%DdS*)Vj{(BuJjMM&{?j*!{JkpCcAtG7s3rc66n^wVc4h!U!F z?D)8O`#@o4ox8g})OpaR`#jo=yLbrkgso?|9}#-v&dpgX*V>!N0Zi1jq#c!y?9p~_ z^Y-zD%!CSMh=7CU7PvzSi@3svh9e z(Vq)G{k)7dw>^bYma#Jdp+*JMYUoMx1>pl``O7?_uP=fmmYtywtCN}8REuK=&QC=> z5*e6nKD03mku4VFu6KmiJ?2un?X>dmEivR4W`d5lrEtYu*xR55KGl^K^)+&|A9=)B zBvG|;9`Pv9ouV9f73=>rB?ww&3D;g7OiF!kc}>l-)|LV3`M93_DgN6}jNNqv z&^L3pebyNy@BUVpu@Yg+Zud5%R@OmU_+@Hc3x#)JW~h6@fwommegzKp;abD!3{R)9 zBY?ro`8y^3?2F|?qn8{Y%4hW*Ro8Eo9Vez}8=)7G9=(n+GU+io0Y2mi*!Aiwt5wxq z^Sg)xuJnV8>QZ2<@ByS+O-%LOCU^stb}aTP)7>KGDw&;(c)4|?l|M#;qi`>Gr#st#hOdydO47-_+^ z0B`o5Cl-4?dRnFGOv6g^7a@~gnWWx2?|E>%?GMG6M`bJr$g!U*hqe8MFNAd;1iAgw zHbEGNf@#>X%X?83HcX->`a!(Aj%1{o_eWuZIH<}zp!FCvsSm9Y(4f-ZVR%Dj_>9s@ zc~GGXE3m=SDx|l;KD?VWPQ?*!Ily$CBH0nBPKu{d*{it&nMgl zOsHu~B5SmdHD_>{fnA-$~^y1ugSl_;fp%RyGS&PVTTb-28GoXdc5?X%G}+MPo# z)w@g0b8EO5TgZ)}(kcgyc$0}tXHa>k3{3q=-MOPC^Mo|ef+a&cEwnQQI#6?j5xYHZ zI=UXcOv=nKAmi@!2&D^ta0K0Jy`4IJ5b@hRf4#`7Y@Kjfodsb73_jr+WU*#a%!X-iPCN+Wxz$mL5*RddSS4aYe;ge^#-C`x47{D&<@_ zK^Qkr=>jpm9Mq7r7CP0TvSkvSvBA@R;t;Md>*Jq)lpe9odwJ&#*wy*g`MLXx^LHcH zQmitk@MhCrd`X47ha+3u09~iM4H~h!xbM5woiJ?^eVnQ6{9hA~itUu5uam?`-8ZL6v{|nz&HJN}8kbv}4U^u+hG+eHl8X*s# zteO#-Be#HM$sw;qz{^2=w(;&Qxf`ymx;Wj#JHsU+L&1npieLL$nhoLLXA>{UOVa9{qWh^>4kHvV7EVYMmY zCM449`#S9j2GwW=B+TA9?F>YH6so-34Kh`)NUZnTpS7UZ@TIY?Zv!kAaa$W>1A3dE zVPx47Up>XErPTasab|JTS*=LNmhUUS5-dB5^bHed{s1+eJ09mOFf{>%GORu2v${yC z2V4%8G=4wVuI^XCM;sm$I%=In_bL=t^Tahdc_@&4@WSc_Rs;6$%3NC%e=bhzEHgcO zQ9hKvBP-%t9?C3f*79{E+Kg^j+crM9W!LJ~6bY?#2Ty@l#__1>N<6Nn9oi5Ix{*`w z_V}1%Lp)7!`xDM4SfFOxlN4RjNLMxO=o$^jwlXV@`qcfQTjta9lxf08L7HJdc9C*T zNAFn6(5s(|K$GXpHgy6*y>;E?a8Ed9Ov!(&mHU-XX*(Xi0K8SG(SniyuTrp@pr=I$ z*lNlS!0*u`dZX4oBV3?cpgvCl=xY$t<{`8@rLb1oDR$#&so7~5o|k8h$OHAB4s{9 z_Dj!OXVUoL0x~;Iy&@>rAj(Yv@hkPbqlkEkcB_!bhIO%*dN2Iy zJ$sU=0*^`2eOjoY2e&m!MaF;Me0QS#uKxr=p8I8k*aA+^gykFt*Y$BLP*`y>R!Uj4 zdbasBIatP^wjw|(&a(^U~lLKAw0$V*q$<;{{P%zEC-6F2UBkg5sPX$s^Xj|B^1xuo%Dn zHa|sAvTa23iRef0?B*WHm+;lQ_8QfY8;CZN?ouaG%JCJvDjr-=xNJV;^}Qg!{C9U) zzGe-GlGPx8HwV>;{&AES{3TKc{QKD%(d%)E3h>xSR-RSve@KUaev?1{2R2_;>US67 zBZFmdqWIr-KbzfVS6|+L+4L1q)0sT$aM!^heA)Q@%u}AYGrwOd{{SO6$Ma{P@>hQR zR+pQo+WVQzj5i)T9uhB`-*x?NCC%U+^}f(QRkky}lUg?aT*}$|6rvbth2MCQ44{X{ zUROJrVZd#yjN1A8_k4xlGTh#4ZN`*1&>KXA=yrZL?#G&)K{kRK{1@hg{<-7--4faG zKTVK4Jhh$w(^$!K>ec_HQDdS6&y&O{T=R9c*B9)$v0GQ@cMmAfga6Se&x^nKs}~vt zK3(J0JX_*Cq%zl6V3-^B`{LPt1Q1yWr((Ga>rwXrC;u{V65xpnWws6`{JO@$PpXY) zY_THL-OcUVm2*6rzNKYlQJibP-Y;~Ih=?9?tAkG&m}#5{SS=t-pAYy)nAw7 zTLZw@@}<&&`{$mGl2BlKtNT1JY@mUsLPJAIxsQ^2(7#?*A1(Lyf&MpwVmf!gfeiOd zfG~vTy=jmpva`hG$L-Ab3NX^UkzYpf;)3QL2KRArdaAb<5gTMR7Yy>BsOR2&pO5F% zosWCLfk#C}sZ)exczZnHUtP=Z;kQp#a&ZQjl@}Kl%CJ4%T#-7|dhSGi-nZ94+R0Gk zUv2je*OF+_o3;Fbb)hc%lTgXbw9N|{xZYo7Z?8PGFsfpy|OomEt}lFB2txcD?^|D6EY| z%9+iPP>kZQ2c?($BZQ@8P96jY1=TWYIn3iHpAJF0bf}a+uvuiNIL`~l<>Hz(3Jl&5 z!uP)qnF=dw-C@`5R@S`|l2jk$t8}Sed;=e>C^4H0TqY<%J~&$UWyEof$iMfI5^;Lf z4+6hEY{Rv>)cZh|ovkggRRtz}B8tMbl0w_|coF{4YuDcBN+!JN4+2uVlmgBjeojCA z_W>i=P}JzjUpa$k{w=?N03%e?qS%g*1UJ^=40#U^6ycdrWo2bjf#F5=PychuOZC!$ z4e>i^aOTT;&|+Zw;R~vHsE|q__4Rt^iqUJX&Dc6;7x8!|WYTr~An?x=9Q@v@98oL{v@J==Mj}_OXbAi1$D6!3~+~LDaCJ%6P zh!sRvmke4T^&*+t$Eojd=P>X}NZrAoficc~^DD4uMhOcGS9W19aGS4>JigK3X-%{a zdTwc5Pyzp#iITdU?l*fGKNcZ9|LusLOklM;y<;tTc&j;Dm3ae3E`sdSHc{$p^xjyS zWL}>Qqnxz#9`fWyttZm^B?DYzlirE{R}u@`W5y(qG1LmV*0#Bj4%2Ob$2!Fn>VOG= zq~v7q)M5>rMs1E32b-w?5TKg%u}H>?y$6dMUhStYcWSUKSZF#5M=QPFapS9+hUORzM=zBK1y0_FRMvk!4LzKyBDbKhzqGRA}lL#g|_&Raoy_-mn*3u7i&VQkvR}is zp*EQCD|hl(`Ewl@OGFyuH{ix7vq=fVDZ@j-v+aHLq=b`$h-GN5LTeXj^z-d&e1X9m z?Uv?R6LLX|0-`@I7?-x?>t~z0B3@gZE)3BOh4YIEFI_e=#6UrycvW8w&Oqn-4wwp7NIyK}{x2;1~ z_LzQR{1m~MS*`I~b<51LM9n>`~J5oAbqA%|nqi+YcS;AY+wr zaxLF@+sA&!njTyyr^qZa*`)S%57=A*qo693)ab9GV!fla;XTiTv&)RlL;BO*WF zU<2g)5~Wue8{bXWg?)6VMDX6^dzP2t79ntdcR78xzcYBpUF_f5YH$nn*ezKRKDa6Tw^X^I85&f9-~nDrPDn6jZdLN%WOHMm z4xK~{&6_m<+6V1y(4CoPaO5?jFM(_s%uqL8k^LhsPC5X%FlXEv(Tu8pPDoOaJqhC3 z91WAK;xW91xeWy35NC{A?!NqDG2XkVi@FJ#PE}EIQK4&QqC&hduY*>J#zTH96(N0G z-@br2QCB3%*UBHws$pBeXw|+mZcy7mBG7n1nVvGv>xC&HCG#1e?_^w9*+zrDV+zjw z0s|RWpG1G`u#S6>7lOTNG@uhJ2oFTu1*+_{TkiCnXObb_sRDa?1s9F9%npS<=O652 zl7z0X2ka$rOg;gTiU=@p197`N%QNpyaO+Wdjh*#zStlCTR3Nqmh-=b@Lu3ah;iyjI z0rSr)wSM-mu*hxW&qy5u%pail$Sx4eCVxKlAdu@%`5)P{NVL~7FtrycP-kZ9d>QcxO>1!R8ePXCB?)k~zuwIo_ zJx+Q3YfspVW-evLzI+#Wxy)?s!d!nzHu4xSA0!+ajF|2_i9$@Qpv)_K2==p`kw{Vl z!ik$@cVS_YK^5TO-JNX+_$kmA5$fjKhwz^1$-zD!Z+1&JjzeC!55g4R)(5W4-k}!V z_q5EGvJ6EOv|Qdeb}7J!2h|2jUwepN1}l5dkqDtA!(83xoFcL#2l}uuzMJ0&b|%$5 zW5bP@Wp+BQ(T(Bawr)yPC+&}#i;ymIeTt%-uH%;e&Rd9P5~ko3q~)&5IiH1w{X>6{ zRhl&+UFp{Ho>k z@fdI52L#1vI@k9hurfCu%fDYMGj|w4wseV+P&0pzY!NPQ6`Efj1xVBSgHrxTKmRpiiSv$M~E$+?^)8)!Cy)WUxd1Bo(zS{Bb1JbvvPaK1pw zXs*jo(r083TcvKYq!AoAeS=ZAyTiOHE+Bp+RC|&zlN{~Q@iJat>>02Ky2bxo802Ug z_@yky^ATN3xKCWg!!@ov$$Fhzae2l(N>(H|*KK=|H%-S4^O4ZWW&^+9JKkQrA)mWL zy>0kvcYUl&i2b zWBpSF2o->Li@5if)g&7g_3V35ZntvH5Zvup0Z_?6{tTC@nG578nJPQG)bWhlLn-YypGi$>2XOm^>F9w~I#NGdFo1$rLr<25C= zAxuyXX(NcWlGBdWDlYu>z{E(FWZemKvKO~PCf|Mf^l6lI@2`}lInL%*gB6cU+mL-) z#>yW`Ln1ETho33WMnF*vm7_(({e16)f#aK<##qLZg(VTzW;k#RTGdfyL{#nyg#eF% ziRy~yqA*zU`CsPClk{evARKtUV74&{MA@ACeV*q@^%$?5yg+5d64VT}Z~w@vXV|G?5@JF24MM(@{OPvY^Ek(O@fV)b?->gEag@>Df{Y|To~Lo64) zoT#bwU7k|p=F7R>JU#K4zj&@`E7^1R0l|g)rzR(D{qFw~PDb?4@-ImN26AQ-_A#1F zlj}h%W~jY9NfCYjazkAnmjy1Nd$70e{KR3NmC>wZ6B)bVKsF@Ssgh#EegsSqQ*tin zmM3<8m-4j>+$o2D@9FF+KOcr23wG}6!sdEW94}J2)faL#DZw&mAkB9w@q*|S@f)|2 znCAjVjI#Sw1)2*Heenz|^@PmdTO+T8=DGN$1Op?JHmVHSsDewEE)|1aYKRs-qkL>k zbZT^F^!cRtiDb7WRp_rjrP&*svfrk#2YcaYzlCc9A2VB}fx!(}HrMSt%I}{uh{=Yf zNc@aE`X599W+qfDT&xhGM}-j)amgmW+6KhuPWM&3RNkn>JNUKXm48Y6MQ&b}?x~dU zhA-_xPTaKR&EdM6Iq*Cp*Q+5;dFlJvA?-T@+)8D>YOV-Y%pk@gqOI>h2TCUo`u_Dn zo%c6%PpXYrdhw2ns9b2(jdX*(VBk%XojMqku)T^7hS&JBv0NzB&*Wxj{8F1VgNr`g zJM|K;H3e>St9NSs%f0`=kM3;k1jiYC(6%3kn~PW$Xu5XvNymo?vVg!HE&?jH+@3XN zdbUXT7XmD*`w5RWNnn?O7Q=Q|4-(I7_7e*SKRkP@rlz8%0 z;-(ZdxX}7$z`CpV)=Flu1{b^Jpviv3(#kCBQr~4e&bZ{R$y9}$S$Y1CYgPB8%q+P) zlG<9oHS_uI$$UofZosT>@5TfAjoFGJZZypq*HJSDMseoQbyzW%>TdD6rJ_8Wo#)7o zRfMNw>dsZ)pCIe1l#AsrrSrU3J}v@}{LRz06yEwk_RY8jnMcENUP z>{hpJ_pPP*jQz=3j0v7sUvW_b9S=@ELG9JkU^L@*cQ{l4ZqUHYv5L#ui7Ikctmf6N zC0gm8X`MM)zQvXIbbRmfOvmZMjW@z8&}h<6P{hSx>ROsO#_wK$l4n|#%Q!Ur?P#^K0vU2V@5H+uK7}F5lq%j|Qaqk@795Itzk;U(bV!rk!bO?J`O~t9U>G zohv-2pLP76l#u&;0bCqgmcF}1$F|GjO`M&Jwim)QJldrembrAu54EBvDk71U^$QcR zde}QcYJG&$$PD&$b`18X7vi56nPJ*;_0pMA7w}#TsHv%`*I4+)D)J6pW4ECAFfnfx z=R8Dlb8{OE+kp_PF*}Oe%C9h%h{7Q*SmZL7KLFVqE(#aKp2If1kmJzom5)%TOLzaVyIkr#SF0)&qg$(RxN_JSJxsfw;Cb4eilzlxGjS& zDpGh0Pv+5s>Hf@0=9Z&mc?=g>>2Xf}%A`CQPi<@-Q@QG`x&peD$*pc-eAb@g{SlI{ zFTY2=^Sht7_JOo^_QPAG{E&5wn)&pUVSP2(!avT@5gE8Z&Lq!zZWc-z4PQ(T7z=4K z;@=~z=r!9{@MF`cdvlHIg4Y?X%C*j(eF|*f94$j=8lx@XT=Ldg7RZ;{+M=~*BqnI( ztWpaCVMuf~d0I}{%6&Men;Y1>W>8=NZ1*9?jVnQz zkT!JThq$un@tiHlqX3N|wp_9+THp;a#DosC*8grfP@$DsEoHNsx&1`dR-EMl^s136 z2WleWze%=?=k50$qzf7tok45YMcpnJ7q-`v@@5}|+F&wgzt8^R&u>M%U@Tm~HA}sj zasD+a{`bNXB(D@==3+rl8JB*PeZ3}lLjF5n4>T%8r$zJSq`tQJ2iO*)u~UKk_zy7AL`3vc4B{BEdg?Q`*NrU$iL#Xwhea(c-3bc7 zXmCSCbRtE}GH{hBYo2=DK6;y5P9j$;0{D?I_rc$Rsq4x$?%I#!pj0u?6D4C=Tivl2 zjLC~b*KQ(oC#^m6G0ZOyVx7Le5zJwvDBJ_eOI6Fn+`f)c2E84q3*ME&b!Xn$Pc|@R zxNk>Tgdz|Jhq&B{xxi3WE*zSIIK%ADZt?0&5T(ps1tJis68x!~tiy&Rq=bACS~{0-rGLW**)h2PA1C8455%TQ!ey7AKz63%t> zHcs~%-$%b1-gd;Lzepv%KcNro?i?uqnUsLJNeQDm;|Ywq=6oK*J((eGRYiwMJ5qlK zu8)R6rVAL32Q9>7Co11dXK}-=d|lyGF|l8vv$&&%1SGRSLwqLYPcl;4`**3Rdu^{fr1K1 z2+|+|3ew%(B@NPSI+X5|ZfTH|2I=k&=@g_Jq+uf<-Eg1%o^!tM+&jL%?ihOv5g9-C zeqyb;=A3JhOAOf8yy>x`vn#+yS;Ck@B8}B{FI~G*FFC6BW$2MduqqS2xG(phh@z_g zNJB|O^6d$%&?#qW)9crNoZ-m&(ZZTQELC<{J#muK-QQVPCfn|Ov`rd$_N#^d-uA?_FRKg9~yRG&@rUVcP{ zRCe+c4z(+=_j#NjM+o@Neu8;s0_i0XZ4uALh8f?}HiSs%s!g86+moM*iaxV6poX<~G<|x^I#%PN^KQxogg|%Dv zxm<8N^Sq>`ZWW*>XOZ%i=UKi4k#VB8t*xzc82>?ka8p$jvWfA036}4$2LGhhzmVvh z1G&fq1nh#Xot>7K2iH59uJ9J}U6?=NA-X7=IM6)VP9xJbBQ@hf=(+QGN^lxe9p%O; zO^3aiy^l^V}yAi9>@ z5BIk%^}mYgD+W{1|L0LkKCfy8PX5f-`~!p3d^oays5|ypp#MtwkWeNQfn5IAE4vu- zvkWpxYsyZ)K>nXa_{GkVaxs7cvBYki30kO~LvRsq75SeH%(A!q^pZ1=iV29KY5`V9fHJRW1L){QH3Qs0AXEEQ*J0BRiO0zx)xXLgcwb_5 zOk}-UER9P3oTB#$_F|=>Sb8}=T(K;CH${?; z5v1PAz9Vx8Mt=hO*+u_ycf9-ItUyRhL_{Q53I#3_BDSoEi@5JPuSu`I#(J}?*3tb0 z&Nt;PC;*TA8dkj*0XHc$2Q-3z#cI^NVyO=H|yMQ3s%xrjfb?mbJ?NjrrMuB3cP0h!j4w>+9xR!;t zMKkuD2>5dV5NSw}1H!HdrXj;+dG9UP!|wL|u9Q$Uh`2ohiK(fULJ!B$%PsaW3Y!n` zrL6oiaC7#B>lZK?BM23L0aNPr+R8KR=g*cLi}|kr>e8`ngprxImH?;y!vW9Nd}=|_ z@ERQ)tQt+{Yf*Skr%~?YBfbdEa2X)PX6mgMSr=6X_#C(87D2j_nbk;e-k+HVoSQKS zg=MgG94TQuMLN%p7^(7@A>{MmQlZ8n3Gmy}Tq2{c_JQ?NVKt}6DFF+>c3yx(F&6Zq znTy8kSE{UkXO=T`YEVJ&+Qb7Wd)B+ZvJ(>$)DD}S6lc5|z!g5sNc6PTYx`W+c6Tzo zwW5E{S6yG`@fQ?MvPW?r#@m)iTfCFhXP+HL!?Bhz9u<5TzgJuw-?+I~Ufi3l$h`GZ z9^RKaCGcpp-NSmR)wCGSwDjd>*n^ z^qETKw$Fit@G`Vsu6X2;D)Ju(M!w=0svDiH{`?ch`!`rep5}xzR#$X9H0wn(`5qy) z5lWQtIs2$GXL;lt!T@FiP%RS}lai8l4$ETS=XN=pw;!)QK3uv05wC@7s!)CxP{PI? z*FSCl1 z(nh#d;d5Wo(m|6Z{{R+PtJ1&^60+jEt9YHg1(PS5S9#9)@&)1GgnufVqr%DS7#e2Z zK8-JK{#x`kORYqm@D(oHpGNQV68-sDDU!X1GK=yMlFtKCJlg~s(^<6Bpfp^mBUP8^ z2B31Q7$ZvX#>{duKN8gn_pBJj2&jG+fXi&*=|kuK4c8!uP!LBcUkwv0y}YKBrJ9@r zMc?uP2KpcoVq>pXOANxNohJ;E!SbTZPmb9}k2YFsSlM)WW0JBV_cnMp+I}Ibue_=d zwHu(E1CZFO1umB+@0AsdKOfZ1{wb{)EspL+`R%dUxDn-eDHl}wx#o*Mf|^Yc*+_*; zSvjb>nVa-;pIaJb`b^FC}jhlWt<@4sh!CUd$_ z$L|#`e*v;*4w)+L_X1Hk$zro}Nttd7XPJjP?Bo8LD_XAdW&$X}W7$i|)_{-;$YaC% zml6!C01IE%`8;l2N`T-W1%iJH_q~Yp<7uvI8|CEe&BoX58flx0 zqqWLTRdursx+^ArjiKBhqr*YNw?*ad)9cEw_cf?XpzN8&R%%+1I1e5;c1{jhJ`!+7 zS1Neo{4SZpGSNdUJAatkcI9wLo8jrdGXbT-(ba5-HhxiWtRlk_`{ishi!q43=)=FC z!yEMH4bk2E=_jaL5Em3ff--@1#ZC=kA*+MgCOKV?B6ZI-RyZUZeGW3V z<oktA`@_)T|kPS0!=*10|n$Q>rUM~ced zU802XwQz?VEa)A8F(nf?2?(M$`YNrFQRMFllM#+Zy|Jjjl(~21v8_+Rs{LAnPX8~C zz&B7?i0VxjL#6k4)i1Og$+hxcmND2cS^P)uM|1O{FT;`7w0(u2GTejFh3@PoaqTI8 zXrcwd(6Fq>#UIT6zn7zYM>wf^!mU+lutkt|<(JTmKxJ$cwz^Um(Stix*TLvz{6Q&Z z#ocrFa)W}CtsSJW`4R%yLPR{(G#_S0V?+O;prr}vG_n{?<8k@{kMulA>WiWZ1d?id z3|1)&u}3bD^{nuFR1Tnl6dNO99AXeFUX@8S|H=C`BkUdE&Y@gFCdVW5b$%+?{d4Ij z*D8IVeosaa7D2d~)NA{|fm@Jf*mbcAQ1B+&&&zC)Etdh%Rqd&6+sjYgI!2O_b%?w_nvy@*TjZU$7PNxm{chv~n|CW70)6fGv*jx0`O#hZqvizalIy zB2{7bxW*QgomV$?6<+cY8~vm{{9bFQ`dJ`>OfJ2TGo$}R5KQAVL9AGnO>{b7>wTJP zq!(6Y2b38j0eHDo&PWRji=M{NYcfs-J>VBP+t+_wh(H?6P6D5v;G+}w&FyUs5i|HY zSUP2U`(uiLhs(@K|Di_Dp?^ukc%PgJK``OivjCAbbo7lPwa&1OA2#DCGw2I?Q zr)l+G+3(3(w9nmlCA{3HeGU=!N;BGRm_0%~-McKzkC^8~q6M_RaD<`;L<)5X)P+wpL?}29x7*KM!Ry|c(`vVpMdn5`>H2BNj zw+ClH0EO^Rv49^6josJ-zNy^;p?jKZ&-T)IM}W&F0ZY#T^#t6Ih@#LvJ3>Gw+WTH+ zv^bur>phT5%m=?xoasNF>(_eSy0Cm~1IWo~zpP=R8$(DgJw4q$F%eu4JDR{_b`?hr z*gG1eDy;?UM!13Y;>9?V&$X;}+h^yMlRmKpRHjy8Y$^qu%p@j#g~y}Edm-&g19)PO zKUHVI81#$Sb*U5e2CW;1J%~FazZ(#(QkP4PiVL#^GSbq>Jn46Hoy>B{qOTQUWfbp5 z1lumq_1OUI4A1a9k-h;w1QB51dZnhOHV#N+#ivi75+)^qlg1uVaE#+1OfmpIRX4LS zuIsYUF{?%TfU#G=>*QwPXTz zEz9e#_*LE8^ccf!vAD#pT4rv~4V<*&T|^GWFYQ*>_O z%{r&O4>yB~-lD+nf@cag0_PR@gzNlf*4DWFv9R>Ug_|1?5J0AQ2QyHJXZ&>lHnmsa z`s82;-TfNT7}8czAwP+!?nLbZMzW%Ug8mV5yuUfV_MX6vhk324s`?8g90`Hgq~>3U z-}2Df+S+OXTb;exn@@6QO7QWI@|O*ozCJ+F{sM@ockT-dYKB2OK#M3rVq#(el~`iU zK*``PR)`^4yfQv6Vr^|L zI{$Xj5OD2eA>Fal9=*(RhHQBKD!$iLCRR2!`O#JFn7g~XyX2KUJovosj{DcMkuZ#5MNjXc20e}!0OMS`&)1G5@O8z3 zJem(gk_ZK%dnauG#@*lmVY9yr)Pb;6u6Q`1yETlxJ|bHj42y{^mXqG6ge1j(D}xZ| z_*3kzE{4xlfM%tIxp_8SMe7I6cfXfR zlXNLf=BrJ^HRt?42yfqllNMLreq&K}Rc|%VYS`%;#Oqsi4G5X7Q< zm54|JA7(+a7n_}RN(R0sCk`x%k*3Q*ET@V-H*wTR4iA8@#NJ?pKEpr85IBqBsV{a` z>;Dw7+=#x%{LmRxcqUHX|DI>XxO)SD?Tjrxuoc;`t%wLp9_yl$)7K{jB2z~WD9?^^ ztOnrS-{5bGH36$l5jFPtHHgb8TX-Ot8z%qhPJ~4`V**VsU7*KxIUpZ~RubWYvq8y_ z_4iE(!Zo0FYr3q*F}(jiy&2MICnHJBZ873ExCa&m`DCtjI&gTByaCY`zT5zZVLtaF z+>H}%>*=%_W$81M&)|O$%>^IVm)(vln!OSIaJwD!(0=XTzzmxMI%p^h-gPJM8o=jR zGHUcT=|FW{zpoh+q$?NGv(v&brHy|AXPS+!^hVemJd>W}kFV&(lH(Ze9xP*KqXj2a zXphM1htY{DwU{;28TdcHBAe-t2x4`mhm9Wb<;zSjzwakXD>^w`R9vqbg*? zR1_A@d~91x2Qi)}itzof9nrilTUK$y)I9~Eh2MAaJFbG}TAv^wZ~k!C2{?Y0s`m!9 z)CQ2xZuTUJwOq?iaRdeMeNt}4og8LJnkj1l1Z*T#ac3AEesQNAdKmd6D27Dr^s)`4 zc_W4A6fE1J+(@Wv1)1I{Cu?$ExxH+(wl;zH-zaTSEy>Af$))vTE`R zhgu@cplDhJ4D-S5BWZqmKlOqm6t7!>cki8H-Z3b-!Y|EGH$Q1mmQorJ!Xq#*Z5w00 zM0rjf0l#_Qh2Q=|1``yAoTcU80r8+QE6#Jp$E{}lm7rPL>%VD&p=0q9i~45!Bl`{6DL6ah{q!< zk!t=`Nad9GRA>+cXAVupta2jJ@ar>dQU3QEhTn{lSbEXulYZx8wHQ}vqH~GP4Tkl; z>q#0kc}=j1VCfI&kCas+{*krYXC(~$G4RxAHceZ2yg;<*8P$a?=bP1yB3%04vf-s5 zE5-qW>8*~Xs6EP78L<+XCVuAx0;;9bTo(shY2U+b$0J{{4z<@BB~G|K3|Vm_Zb^E1 z3Frc-i&=hZ`bP>xD#0Mmj3Kurtj4t-#5g(Msuy}lSznZf1I`IJ%YWp&Ai??Fj`Pp$ zDPqR!n`H9ZlkF0N7o`&bCyxw~u}=bbL?3e}bz=^et&LwqnGZ+M!BoepH_0*)?9_1v z94AdtH6hcnz}DgI>IThRy>)7(4uTyIS}z0M zu7R4LJgXAA=CFo{c0?O+_hw(Ag$LOJJZL1RfDjMgkoAQnnB_s}VMAzS1FIHc+b9NM zXKS4V4@{H6=|JG%;82A?=(eSbElt6?a0W4F6dUNI@Hpn%(M3BfE-dtLfOINf;!PWl z$AC5FKUeu=SX;BFlR^N8&9e1~mSI6fRUZXfGK)eLIW{<=u)`*#*`EyyJeLdS-lB6fU>* z+#n+RNRBrPvu0QE$U7&5AT$6Y2=W&FXeMx7kmZvvz}R8z#J6Tl1AcCe6J^xkxI_cg zqxNPP}jS0#l6?2Ha>qzP`V|H|~pm z=3;1w!sY~=2qwA9Roz$fpE7Y-MgY}WF^*aYw~yZA+5-4o`G(Ta zE8rEUauF zC>`yDn{!xGcSc$kh2uh_6b!r8@65@V#=3;jG=x5QiRhJoBa;;Al{BKH`6FWcP`iV2%Ik zY?>f~@O2QW6QAJex3OQkuFD*>baa{+{Ev61PC)lcVZru`NcT-IRunjo|C0I~HPF)^ ziDe-Cs?Z_F9(ajSFI|*2O=iP+^#i6>x{wpkSkSQG&SQ*eujadnz0HRRa&oIMqvh^f-m! zL7bT&Qa)5lt7PAET#{A;BZ*@7VPiWQ5)8fgPXze{G(E@ciE0+58!j8RtchjG zC95ur*7lPEQ?~0TZhRFMwI9sh z_%;c5pbEh5Y~cq$q>DFEXBs-ZShCb}o>t4zHy&vPD$Tqp3pj$W{<;l;`iRhtACx(R zX#5$lh%UDlH(B;)Vjb6@AwFU;r#USs9F>}Nt;+>CR%-3my1pQb=bXX~F;dS}8l(Ld zD8<7(%4%rPXq@=*3;{2)a(k)K{<~87+j6v}uYC>$<5N@OCF*5ZN5HWz9!I6fIVSS< zvJ;8)`Th_v=sq7dG^c2C+)-GYczO+3(2(`l6QX$3&=~Z@q!>kiOGG53Ab&#BoVHTu zATmt6B0HgwM(!AlW8x_n#L64-dxIA!L2q%wD^Y^3IZF8+fthI>n2L4*#}z);OSxZ? z80q=TMrn?WNr!w9)%q_0%3m(Ti7{tI4}HUD1>uHdrVxhIN+2`?;mNp_+VV5g$R`10 zxK*rxm9?mX5cdDRK}c-wdR0_QP*}c zfPtE0kdHaD2E0KLBym@-$^-f-#34BqWaY})EmD1Lfkr$eSEFl!Y`q9KY<&`~Fraj{ zb{r3?a<>282E*hos)EG&P+zk1>=2S17>uVz%TAW?y#d_^RL*)i+V9sX%J8yH0`th@ zot1uf>ayWjgUaOV=Y!!-io*TI(MO_G}DqI+LDU@bBd zgm7D3DA+T-F}RE#*{$N*cL_78)R4qzOC`k~<>huSJ^?^e=b#nMNeITQ0fObFdltz6 zDPD%U9hyBE4C-k&*rY2pd*AQt7wF%uyyKxY}F9aQ{c>$EB4BYm$>BzSR$u926adu8K<8+G0c={0S9Fp*hpram=LyV ziqRxi-9XSGpAeQtHzkiRD2yXk3KPo!4KHF=WHHuod5unk0yBQBcn6^J`3H!=fY<-N z-YJS8MEZYtDI?psa#Qj4f6}-k7EtIAI{rS}nFoM0ssZrt;jeRk! zGT;s5Mbz1G)#sFAH)?2Ox!maEBVEdvi{hmE?KlXU1Xy}ESM}GzMcDm98c$Wi_ zV`S6`@q?!&>a|EiQoPh6uqr6EI@4i@W<3E#>91H5isc_oCnu*bg3drrkxCcyjCRXP z2Xxlz7tO}XM#)s*nh9vX>SfA*_lbo(p;|p0@_#K}uA~T!HJ8=g8=c>xRnM~`0w?|g z=B1@FP0mSB?Kf0C@;jf;gD1=^5x7-QTB~}XMd;DOX57G zoG*&!6V1K;+>pUz!Mq^U81N9Gb^>W=P!7Ec4QegJB9uHToF%1N8P`s`*wSXA`X{I# zw5W1RDk`{EpKdF|CRoS1NHHB?)w?gC5?3@7~8 zi_#fI#vClNl+;VN$E57vdVHVuw#2by1`7$twfX1(Hxkb zn#cfUIy;b0d(oJ8uH5%9CPNq+L(U(4sVo{>|=l9ZWkBzp+@dLeK#kr5bBecld65Z2zG|3c^OX5L`QdKeyHm!D=XCDYSb%suV<3&l%SZ1nY@{YkPJ zBrRjwzHto0gZqMS?&*+>P6cgN z9KviD6xSvjvjLyAbNVlkoj;5Yqf-}TpLw2*OG&f{v%KIMEF>TD$1R!Mi`b*pLfBic z!&rbTkAu;FkOmuE(|;(R_V-x?y&OfFMMK;jUsyc$@{+>lx|K$~_hj|M-a7)V{1?ES z@<7bY15scc_1hTtMnv)@`mtqpk2jF>#P5tX4L360*bYQJP;St6pR=vs{zsAD>X0g5;!q15z_X{Fg!ulOR5`)=XaKV(NXMSz=p1={4oYs>4`c1pQG3 zqRJ@Yn$(WdSB**pVtx^~@iN5rt4biho)RmUr*m)~a>SR|Iip$X>loKj*0*rk6`)D5 z<2<^|zKcUV_gM?V9~s;2hV>}KAXo<`xBOQ=`1j2jnd={4UDb~f;WT_p$DlRI!{nE$ zr5Ub`>!Pcxd-k359?Sv?ApHd}MBoHoSe>T5+_ZyV`V%GX$Mx6oHle-~go*Wed3h{l zyvA=g%GW=~BZNb!Yb#m~c@1e&X@2|9E3AF9(2Z-NRM$dd%kWLT0F3Jn1nvL5Pay8F zqP0#aq_ERKxkWqi;`7R6$a;0F!vv_Ev1wegndLb<-1a~~312kvo8Ztzpjbk#fHWxd zq=QI%Hrw%_)}y4xpvopfZ#kyApEAN>r3Fyj|2IlRNTH~kr&?z*OZ?HM<`2hgd?#5t z3z|StlDhP-hctFUJJZKrDo~L{fg39B89Z$NS`6*PDqvZ@uE9H;_K6^r_q2mXRZB|b zU<Oh`g=_ne9$YYzx6-?sRXxoVo^}`#pV_{*9n5E)oL46Y#W7_;Co#Ta_x%=F5>dM-ibJyDOP3bslB_WA{XwJlQ5fTf+ z84p9LgN?@$-AVHcfx2bB1{)j_DOxH{$wBhdMCZhQlld4X^~FBu>hnS0zt3mf<4Q{w zI6G4i9h@6d4hW_u3J)lwiwbu_?z|Ux*hSec2OnHt8wffBU2HSA)C3tZU}|Cl%O1^m z6<9eWxTAGW&L$PZBmF{NJG@sIUT(A}2X|c$nM?m8mfP-pvoqZRAO?Bpu-}MFefmW8 zGns|FU@PY+02KCVi7m$c=*4Aa^6pP}%?K5}P&N?wWw*X7;x4#p%l*JkO1qT7FlZvU z1x&@R{^JckF<_RjY=+$ZA*q?FkEenxkGleE6JIP#JC0mB4!%yjc+5|svZ2uLmV>6b zK9l!y{Kma<*B$bc9hxSQGn$|@+T&4ycLb?uolMJ|`?+7)7iV88(~k*u{BHX?aa~ zb~X>vx07P)J)b|EDOJ}DZ}Cs+Q!9pC_V|yev;{1S#E0=OU$~!n@9Df+xKv_m=FYLRaIHy~lJP~ks|Uq6KF2M=8!udS-U<%4Q%(-GGYpGA*6(*i z1&Z9E3a8#}LYtUg^r?T{czI8FGTI*(9>;g^N4^*Dea?(LZ>8^`bKCA_yZqfAqH?*X z@oi`NlQe%SdAc~UVu0HDluPe{@+^Fd_kqo#H1>mCnOpRcKDZyOTZs%YIB_ z-o8l$$IJXa$l0nd$w3Pib9*?{F)M5YV{9u1GEfppcfW9nH&*MQ6KfjLdc!TRcOH1% z8h*6=15?(_PLF%j;ODAs-h|?V^)^L6jU}Xbz{NMsE&tH#fF`AF&AFzn^q{;PKO#Op z^pT7gY=^~`}$+N)oi;vPfwUrpaAZbqIbK9DO$7a|SS&Gg?8 zg*rz)b*;d2FWe`smM_~!g(g!?m|ycndh;}{l0gN}`IKi1H;O)8ek}I+%X>`dB}q&) zt{x*cf}cN<7ZqL}{dI8xgXpmFLkCqjQ?_fX3apYO3R5xu2LF->mj(Pzn?_YhdUaT_ z)5q#U@~l#;cQi;mACvJS@=~)W2m1K{$>fQ0=$eeam#tKg;P`obV^MoM2_r(!C#y=D z?oYN&88&9ZcgXRxN}mR#v8hy3`&m&o%QCV$E?p;uC7Rr22B?mAC!h9sH;_#X)-yTw zS9Bl*lG(MK_ns6(X-)7T3get&(mV1L&5K^q{nj&iWo^111IdN6?gIro{~a*U7FURD zeT3ZoIX=%vMlSfVj`w}^iV4w{{5Hcy`RagrQj0|tIoV6m{4%y&-QLH7QZ6D@>d6NZqi@$yKsrS&J zYpaqUKEeK6NH5h=fKCG{UNiH@t!ACT2jOo`>XqkbXPd3%X8LZqgBqD*&P~scSb?Lf zxV(JSylJCM(eO3l*oGO4|1lm>uBrpe1-Uq8;Y54fHb3HNpr4EV(uRxFD7N4EWm^wX zMuDbzF{TeQQfX-^JBHMhbb8JEDIS6$mY0rvJQ`KsW&!`X=U#Hk*wPfM-TwBw)VH1Z zBn>5=34)z>X5kt2LxpE2X`$8GD`~fM?s4|k*1Bbk;qe;ZsOl0EQ?|sXrOP@bb#>GA z`Wd~fg8b_*(m#bp@zSK}tuWR0XN{$KG(|S~1g1efF^`73OoR5-Nbco~)A{HIF1qv` zqsnIn%cF)kEN&AP`J>U2rU>QYsg5@Ul;3 zykDer$Mjm4C8J5U?#PfP19i*(T+5-7eEma1OV4XX-5H+T8!PkK3MtQRh`p{FcV_6x z{6r)AhWU2_RHvRLd1CTb&R{V~ZxY2TKI%Ta&B*2f`(Enrvzlz3I&+KFk%a}irSr3e z&a~%}+ot#O)kLl)e9w7wap>td_GzFDi@L)tIGG0->2$P%=(S?b5#E6wPkXp27FFsK znkciAB6r*?wDGmd>jV>5-gSuhIUqRQ5}V4gSVV6$YP2Xm3g zRX^w@Se#po`?yvfn%*wj9E3<-Yho8y4BVeGbmKhTP=|RhK!Sd9kR6;Tl&c8E*N?F^ zXBG2_zQ|42X+0~&e65i7k(~hJ%zn>YF^zH+%@V_q(VCa%*LONA82J}GNH8H<`kT0~l=N&I5*)qXYq^4N2 z1KOOuTY@16fGd&zCCtmQ)QF`<99;#P^|gN+oawnU_-caR&ObKUXk^_y?!@zhZA-D8&R>GnmEL_pSaqJNQe2KcV85 zBLU}e)HYA)uf5Sz#7&_)Ronex8?qE;4sY#T)ZE4l=46C1vv1E1I$ZJ|GBRfg^LGbo z2vK1V!(o923>Y^+F%bE>?aRWtK74r6RCe<5L_~I&R0i_NR%*R=?9VcE!l~+OBf`dec5_=fqifEVZx&E z2B$=WpPR8g#!SRtH(5fx-#!w$?!M2PbHm>sHg9N!R%kg;@mfx%l8y&3K~?gdnwhvy zN}s*^d~Uyduw$WR)ON0gg&wfW27LO!a&X?+D+^E^l zD*a5iYQ&||(ym)4;qM77jVlk&CHqA`5m52~t44&h=Gy^gxtaHrc;Pv$|-LCuzZUPh1JB#rE%erY^^*YXFsifWFqB}#8b{| z4k+UHtOg-vbb99uEqKayGZPvwMHLVjBYys(Ly{8)9dHy;U)YWqRbSpv<0Oc@qz~jX zBFO)3a5(_Org6xBT>6Fv9W@~9mQ~yn0x0;>0+J(Nb$X)qMr!D&=Jk4w&ci+Yi7WS3 zi2kJY(h-N{MgFTm?<4opz0QQNp|QD$%)MTFXrKbS0v;*d1#~^wA?K>h^*nRk)H(fZ zGh``0aSmZc5z@R|Ik9d-FM9br*T;Lyv6*j2b@=sI1WsWP_dh^+#LVr==d5Wr(fK+t z5KwUL&2wEW8&@}H91(9_f<~F;?7I(R!D&d2M-CFLwV0;;49ZILlg+_elR+$6rsxQb zD$i>N?GA6Ro~pgcf=P{z8?_U+uH#rRaYzP?UdY*q;$x($j!scsO2REp*f2<3I>{9! zqcHMuwb>lt5C|sPZgVDqTP)fN-FL~D|6=rfL?@pNendK&6 z0wi!FeFLo#u|q>cW?(|V6kuY(tPD6^(A=Q<;e%1WR;yc;n>3i9BnIeC+NR~T%nUz2 z_-5eiD=aTBcZ;vjHaM%&9MWp+F?C|yt8YqPbnchbeQ;VIHRu-Bu-qoKkwPuaiBn#{ zgHj^G-aX-tnt5K{$&^hO7u(OYid-un~=a z{NQ$-kNB$|&NR?~1Mcygf4nw^AB@b*zSP^UDga)Mgy%JS3acp!2Rr*OP?Vd2x5`oh zX>y=|8D`gC`KFgEtuyL0#sI@lEZuPpMtt1lC1FdsN4Aw!GNza3GOfTUJ&C09qiCm% zNa4#LQF=vz@_3qG-?hMn68H6%E`9vxfK`$Eap6&MHsGcvFqkNORME>GwVZ@M)!uIl zGv#17#{c0({D;a`Kbi#0O6OgjU}pKD-A$D#E{J-4?_KrL@cd<>baQia&+1Gf9@O=7 z@9kcvucl`|<%Mu`^s5j=zFjZ8(Ly?XmPe7YQB|C{`+>}QyF9FeSPttzgty`(LfUE_KmF}$Tzd>6w%%XULlENUna|@9hJ`DY4}FZE%%6U zucti%ZjPoFx-F5jr{&6U9NY^V8j@rnE)z~0+^w5Ub*qL0q28=H=Y9_4KASBn`zqDw z*;uyXt8stwmQrDVXiXz`zj64VWN$K5cwm5hszb#P}QFBM&BBG(9Wxth&B zMGh{Ao*wQ1XRkA>+zlBod54HH42%Sl@a+5C8;FeElYeTp<^)A)O-XrGoSIuQsKc0a z1<}w@B%eOtwBKJDEG{W2l#7!nFZq4d6N+t~Tdd$Q9p@873HJ6f8&Up+LNcEPtc-zmR)w3MSeUpgxri#?6?`6U<5naXco z{jFWSL_;Y~d4R*PdH}LcaALj{I54eFsBEjGrz}arU^_M3W{#WCdZU3NpDr7a$rt6R z*4%|2CR~eA=!#g_T%PheMUI?=$z67s?3C3cCfRx$8TtBHD^XupS1dt$o_Na?^T4#e zMatII(mP5o8XT9#j<*{rF%uWmlN;k|V7e0SCdY!xX2=U#Z(Vz>oYFjwc{hjt`|!{QPo|C|wb3ss--*vmeZb=(8YaoZTh0kOAbr0krW7WWln{86=0 zdKEF)_s_h3ENdCpGu*_NK23<tNz@Z?e_>Q`K4NDDV|QBx=+JiN_zV0 zI*|@VY4ZR%q>+|xQc#SaoME9vXOQTe^|aq!!d%wopzGlSn-EeE7je@vi9-_M7SWEf zemtq_P}1y|`r}3?ttH0$ylvC6?^ZS5V&3BypY`-A5Dje<4XN1QhMY=sZg$4Z3<(J_-x*EEJa()> zz!|TJxU_qLv-_t;USnZFZem`>!-MGDdQlP^2Vc-n*w=-ozqzHQQ1+I78F|_=vx=*# zqCv4SgK@v?C8OQf><@Id{wG?Awo(IaA$QJk9PSO_V;nzQ1}z;Aayvz6>wfwL_QEX6 z1FQ3j87nR+7Sco|o+W7v3}u^+^TUJ~m%?0w0+ZFmegzEy`CkuL3g)?k+qa5-nL57! zvNosGEh23*wbIJR?frWD2C7N!C*BTbUbD-y(h3J{xSCm)=9l>QldA&MI&gz7QPb#< zRft5C!JkH}8v0Bwo2B;@x;K}3-)mGnV$PEp8~d^>_z1I2`)$x2guXmH{HGewXqA!?M>-;_v%*^@KQ!ca`2DmXTzSkq~ zAh$D|GS404{KE(Xbu_BXwxfJ=a&=HG0POM9pQ8nbO0gd1f3uemi70rUZSuk}PBB_0 zohA?pA49C3J&lc3@RQ>DLe%$I(xW{3(kc_H@b4FipR#SQC@RXMWk`q2cZMyes>B=HnLg8a zKan2uDN;|N$;BH$GInR9ghN3pRmS*vJyEXO+?ry73xOK$& za_KTrIC$|NM}D2qPd4m`x^ZE3qR44e)`fW*GReda9)}=?IOz~K!>+1pg z2v`atAj(qGJfg#r$1DZ3s_$AhjGPq5f|(LePQ# z6P1O0?4!kEe$p3q1uM%6=sv-mhz5}cwJam3Wfd$SHvSuiJt>#?>wn_3J@Rey@>W7_ z0XRWfX&FY#^gA-zFX}43^&5=40P}Q8Cg$P=7)F@|^IX+$Y2nh^{Y2|c86~WLA9zccU3<9Cdg{F97@FMIhC9#DtfLlHP}O| zCqBJO(ap6)B6R!nZRWbfC!aBI!xsl1r@D_EB-2oa4wu?JE1qcCG20CzH`UOs!}Y?l>iHPFK*4sSxhi=M@f}<>pew zI$yDlH!Y~8=sB=&iT9N3IvTfWvo_RF5?@Q6i;CUlWZiVIyxv;pLvO`%RfZKk-8f96 zzCT5|@O~VhoJf}WXF21KhuFR(cDHD!0J}HH^!H<|{P0#CotED)v&3^c>whW^->$7d zY=tb)xEO~HX4`4ExG;cuD#;d>mgZobMJ;gcHiH(8AhKsW2Ff9h#wmxd<^9(bkmGFR zppU2cdt0%4$Srk;b6%)X+&mA?VuMXSqrlh~jWE~A%bwM*ML29SNldW7yrN*x^Ngr~ zF#(h;w4C{`fef_nsHlh`aDO&wG&u}B8gj6)Wo5(WFl-MhA+u#Iei`$TwiedEa%5b3 ztZ3ia_+|FN4BeMHTG12^b$Y<7+xPDj%NiLO=>!x%)TE+MY4*WNn@<|`+n~iIBTg_H zysLG4G@VXw!(T@sQ8}@;Hi!&Yy;&)DW^*t>ncwu-IC$$^OQTmI?n{08V1d^Lsrcdy z=1(5#DhHla@QXiN-Gt@(g!zv^4`VbG3I$3{Ca76#nt%JF2A1rBaf<;YK6l5@6`#Sj z;5*sQ+z7$5nS7UL8-b%gUSIt5**(>0&AJ1st^c4cS|F=sDRnK%%bsvA(Bo#-8-_de z>zA->B0c6xlMyBwqB-d5bK1=gK~dBhe4?mFZF~-$HWExPe4UU(tf-_0Dj{y`4k3S8g^G=2U^_Af8{&#ZX1b>m$qam_}op;w%&dmzLUTVNpDNv z+e$Lua->`zL@W4fFgux_#A6TL9VUKJ)GT_kGsnvlx7j?lLrAX!YgGW&8__jEk~c1w zz)fh|hbIU+xpqOjqoug#U;D9qj+@mcq^))<{mcuKo(^-g?oEt47ocNNLgy|H7A3Ai ztj3qiJjmzUEb-6HjzShb4=yf?np(HKPp5Hqa{92d{d3)GU?VR(kug?*5qb;ZrP+;) zB80RV^ZlH9Ku5nRY-$QEj@*E?sD49qSGCrc#TNP;bT|EC%H=l;yUvQFpMwC$P^^d$ zClTv=p8Q)xv>cE|mi8??1{X(hwg6 ze*6BJ$s~i7^t}BV(Q}Y4&fH+v@V9*eyEx*ZzTq5wkICNP}{gT;}O_MLE_X zb@SOM=WGG~o1=me$O_jWybNDF%`9gHgv#V=*6YnR<*raUj`AwK#k@0#d8AZP5|n1y zKq>6Hv8;->k@R#qkr5u`x9oNpXW|1{7d7d8t`moPLP4}#;?rz#z;%(EJ2JwRe3_%K zZ{Lf%aM}6|NUGk=pxeuM)E;!&6W@S&MDM|&wVgQcak~OD7(=hmCh}y9ii&6qdE`=l zq_Bl?=DsDID`DH?FMm$~Zx)k=q;M!GQ3!@r!RpX0!BgNdurUqbsOYbpgy@ zQl_>seCvDlL-Sn)Xj9kvvy$_h)D<(`BGeY$6dLkKrrpTxvGuUUB_2Ym$ZqL!VD-jrCr>(;y`6#d4?n^Tn8+;8;-E;WS#F{h9BMITil*Y##M2fHa$^%x@Y zVlq^lZcrJkDMnjEL$=Q2sLaQgNH4ZL#cZH}w4}UYIb!?yt|Ri@3I}Ik0$YwkB?=q; z8{+omO|wN2Q|)Hj`0v}ZuhgS{CbAMZaBJ*!d}qs7EWl(KiJiBdgp5O znT>W2lvIU!!-_){iL3iHt5=1I`)25LPQyboL~yqTGuRw3SuvH51Y!oo?BLxMSb*pRtvNx7?{WJR5NpLUeEpkHz!@f|}w zBL3HV>7w#ysy7j4Pjgv@!pz9sZA~%}lOKzT&=b7D2pd)C5lN2M#WGb1iDT9kJ>G9G zRL9lRQ9ea0=1#<3(6b5~dn(TTk`9!yXJDw$1n5mBw14er5~$Io9}%REy0EgM7yAh2 z#J=m3h!5c5i0@_lIP#u)=XuML=?D0I=6&@oNEoxvRnY_2GP73DfG|^&z{ZmCv+f?> zf3<(bNq<##peT9XZ3I6d>K;BD;dLzEncJwp3^#ZP9j<$6(Pt zNtY2i)4~b-c&g%uv31!2*MTIJ3gNIH&1K2--6${aNHjOnHVC|#s?E*mPVr3FxM+U& zBOTysQI4i|XmN+^|J5LIoLh&!s1A(mm`J>!_K zw6=kM(AHhlkyf>o#mjIK%oHU<3#HpH{J8d|G~`^Tlz-YW>dU3V{pn4^*|i|23CY!U zp_78d#TO65pZ&D9;sTNU@gARka?uJ2#LZdfCYra|+COio_*&7*=|{9!*lyP9YH=jB zc`0$gHsfi1STtX70JWSPOOu{q)2+TvA7jr}`od457&vl%elGgW62Wl^_~iWBfzx9W zOwutV>A$-McJ*v9v`EW}MC5%eNKE@=^l`b(%_(4zz|t4Xtlzz_Ti(PHX;;69AE%~6 zWhp!Ti0~~8zifO0xBCPSWl_%U*)UEke5PR^?!O>tvdND_SGk3%?+X^^Db}nUB4g8H zNb%jnq!WwM+AN#H0*vw1Ge{pfW}%v;wwsSn#Y$7y# zGI!u=+XM5+@D9Ir{y#LGWmr_-+qP9oN~ZD%f_#A1OIr(KH%FR^92X1NZA4y3MJvtJW7=ogixA4&SR%ur&m8B0?@ zM!sNeI)eK2D0*Dt5_QJq1W4w-gT3bhmj9+anXac)qR4ig-HSBp(?w-0vtR#oDjd3=YA-zTWPHwZjLb{Yc$1k3nVIw@V88T!Z^yD6Ac(o9 zHSo;xv@N4%8BWLke|6MxGA2Wl^DL$k2i7m~#@{!q!`i#%6{#zTrdwUt6)_n1&T9=M zxuz5(o_-bSd6&nIaN#g26{JXQnr~jMU=I%0$Wv~XQB`%5ocDZiw$Jc;W02Q3iLPA37TDTSHA($(j8DR*z+D!Y9=&6EdhFc%W zH5;{}+kfA9!QTU)r()Mo7?5gMPaW(EAS?f<<4xYy+jAa_zT37a*>J6iB?%;Z?re3S zg7n7AHfudJSW)}q?+#MF?3P2ku!flK|B=kDBc>TuwC^Rr=trz9F22A2*->28rMHNk z*4p}hC9|t}TA9ZuJ^ivrzWD(L>D-g|c1d30e9Kf^*#>`Z7kRLjpK1&0EZ@vk7k$44 z?@*l)NULI{Hp!UtyvZ3A9!WhFn+aU<<^Fh?JWL;q`y?snY1d2jC6|oBp$YD==D)BI zK%T%fw8&Q1*t`T`~X&ib@24mRlbpG2*?2R*R<&y40 z@s9XgeA;Zh(bC(1eI?25X73%PYg=IFJg#+`fTBkw+0xIe~xp5 zNyzOImM`z;E>NKkPEJK1iiDE#^rDhCS8;2VE-xGwO;hShlRe&@3O%XIb;+F>B`*m! z-?O9Pr}rA3*q!#&2i!ftTQ@C}*apH(QBqMz@%CD!`nJy7mzXi;D%K$k652f1__(_w_0Y1Mf~3vK1xH1gYv#5IGPiS z3}Is&M~+B2rt2WKY1w&{K<0ntUEHNKG5v`nwR9^_r8bXyY}%%obFk5~|KOkds9>6u}R@*Fu;5)%&~YP0ANFvWb{ZTl8!!V5V8+ z1R6SQAT|jr_Yh}ESx=@uq5qPwE)JmY-j8}ZR#5BJUm;)Dl-657t8LE-l)S``d!MFb zGK^sLshYRH!eOj&aw`@}jjfe$GNkKuu|bN556I{D(JjzgGF@fry=zFkiyWEGCLgEQ ztw>MLHylfoS0dT(ww1OMQ9F2gF)~*+I{*H3 z1drZcJD&@!WRceX)3WzmSSYizP+(ikSX~5!kmJ#Doh4_taR?u2xMwBSa>sz%xQJ}6 zET^if$_m}I|6XITP)K!sYYhp+BoE;N`9l0Gyd2c>6uUZCD75$zE;vpYk6}o#h0Z)r zaEr}uT4Yi#H=|`Y;lIScCusiJyRiq$InQcovhYFYui{tJXpCuA%>SLC<>}q^Qr@eo zq{)~LzimkqlKtLOMqFkt-do;Nt{h2-`tM@&Jzsg?bj;lW}|yGhiqC#4x$)1k%Z;XJ;|0kO5U37DPt3*J^G-s1LA~9s_(P3@|IL{!S`P;7**x zSKg~t6z_4W^!MqLc%&GVo&%x#-Da_CS6F*khxy6(1pbEWBGf*Av>va`*{>bF5R<`O zSt>qsDs&QPW))wjX2-Sv(5S6=ZLLPX-NKt@r5pU;!4e;}!!N&{p(>HoT2)286z24U zq@pnHgPTlabe%+uM^4T7Enz@A0he~@H-WZ>#9JWOYcRC2r0o#sxK)s8Aa9XjpSX8LMxNnmqKF;aklCm_ zzU-8})R||(2rw@7wda8nZrp1fk?lV0;H1%F;uwgaCn)#p@*U@gghpg-1L#823ly#K z`FC1h=dN!`d3A6!TrUnuyN^%jCX%rFMe(>6-r=8# znA!_Wk%Hv)j-4K8n;wS6+Goxuzx>kh?9PbOq_U)tQ?CfRPf&f(ss1wIaI(vbldx6z zujT2SATErkx_BML)EaTbeOGg2Q+E6PmB~UvH3T^AG7-K zbDEvSO@neQ=Z|R|@`zleS$PglUWpGxmnV_mh?&d8aMsR6yI@=H?yW6`Vn%EZ?}v!@ zH(JK;zSAR|UYt07^ceK!30t3jo~L~Dh{!{yYBL%m>J@pbAlyv0?DiMj^KGmdYDh%^ zd#@iN?1~aGkpw+Cu=AfkXTQkV?k+8^TC$h!3J3-fHKfk;K1ci=>ESs2<;+icN>dkN z|Cy;~!5V=)0mN)v>|uScK!WCIpj0azC|zLVM&}GkRk7&g>~KlwsaYSNBWVqaqVi=V zie_|H;10dKoF(I9@(w*ipB(ojLP2%;J}Nep8uKSRb3=knmtjghsLOiDC=A<@J}sl- zTlEs*0PGFDL#9pJ!Z&j!bq!6Ss((|S-5cgnOEeZj@xpTd1n|sY=2glti9tkRX`8as z@tYDy`U*E4AK6)Gk~v&47Fubow@4oFvmG_9w_`G|p_FLJM$uCiT*;iu10zjisl`oi zzm2L9ero`m&m$nMo5rB!A+35PW=n&@@jcu1^t^FV2U%lNfpD>3Og|nObv#mvq=JHi zx{glfZDWCTwskit-fs+9_Z#>3JW3bAjxKU)$nhNtt>O36p{ioU*)9MMSgI6gTk-E!!N z%2@^gA^_xlQbtD}n&RHJuNS)bWw?e0#~BpxEdWK!<|->%gBq)DRN8R!84JVRM|8VJ5e2Izb=B z$I@ag5&*YiHpRulAPZdQ!G>$^rQy_cMU_rYw_!cdB!0%7MH!U%X*moFjJ?&yCi6r@ zb~&)zSAzwd4DURb#XmD@$fg$5FRd`9=N&(EMOpqL^Z*_`)6!%Xwjt{hK!kZcWJbF4 za{IQj&HYo>s?b!|oV~&4u-5bs3+P@n^oX{jDq)+?a5XM4O!5bM?T=!83Pj@oY-S!d z<`<}c3VUiR#@fJP!`(9A!g`eCkZ-v1{Z)dw?so*`Q&~8^({sm7lGJqT^#q0~&SdJd z(;upVf$f_r)6fWat-|nuzRJ$5O2+%)v=Mv>CaG*Z2g-wF*ZVHnimtBgQbn%Cf@4Bi zfn{DXm1nUrzRl9Vc;_sSF%8x_nwr$00^ndIVL$`Hu@M&nSIj%>HHTdyJjyJ3r{a39 zgE7_awA-;_jMK9Zr@3p-Yvbj4iWv%MtnW+75w=}sfy@46*9^QNWkK+z|6JmjM?Df~ zW6%JC!O73FSD}qW$pj8V{_39EnhByoHKohNAjNeEy&KQ}wl+s?fVWN?WICj!h@^p! z%y6t-{ISW6)0GWHEn|Nmd&tWq6#J)qQsNKql!?ZxRGE-DuMK=e3&r>=cm{8LYP#sv z2NB?D4Pxd=a#=_$k8r8qo@28v|Cf%Nkn zaAg}dY%P-|aDrF$>!E8KV*^+?Y&h~8FhZ@P(r}M!KprxXLxCUJrGnPS4K5|YS=_!% ze^N^R!u)zNWtSb-vAV!{8Y_{9SdxXq5^rZU=PuU5GkGsbi*zM0&oeOVu_Zt=h+|yk z_uh&1E+j#k)$(Vm6RVb)q&d`l4?n?Vgp6q6zX}}`4hO%xVsvkuV4OME(~7QnAU-Zr z1?f{&;`rhR9kZQ!(+PsYG>1-&3=Q({_C32LPrp$M{m~TE#sq^AY#2p9HKv4!!82=S zejUR8eHJa~M5i+7ZV(0QX^XnhY-W+k!b%AKoXOH#SJdDbC&`brsz35W0cQ~=t8ptAw_@nc*k-|w@)40`#ESIqoiQ`-Y923;_!vgWoiQLO{MXLu z<_(?_$k^tIHB~6-a&ULW+NG8U&b}nAubd~7jCTVVD4!I8$e$E52ue*^W}8!5s?-+W z&ioN9V)GF7SJi=`c&226p3(I)pKhckjWu$!N%hkt7Mz9uxsRYqg`f|VF=wHVdd)6I1G*UCoD`EO%3%lqc36&yg>d_ihIKt zXW~!v18aadKVJ&$D?93i7@LqrBwmnvnJK2}R*+-F87musCwv^k{(s5fHEKAjRLYdl z`PPh}u5GVdr2`WtL*a&LlebTi$>$3r>l#(M0Rc8rQ1*Cu#@Lb~uXD$?KIY8U7T*jk z52F%2NSx-6tYe?S7EU$u;sus^M6n?kA-+d}OoqmmkA|(%Hr}omqZW_L2Ay(agG#Z2 zEZ9y~1{Q%mBE!Qkyht(;fvg>ZP+~2voUk4-hUvN4|DOd=jqdkZn}!n{fp|ddUJCck z?so|U5-`KsUCJVZ)+Yh8#yn7K7E4V{bqCXx%>wDBqIpyllv$uU`ub(`deikfzT?Z; z>G}N%fC6^KfI8rLeD5mKETAv%b^21JRaXjF5*C1+O?kHCv?JZOP zPw0h-X(LN3R^WXnyTYVOT_`CMmVAh=u<&-!UDvX>hO96ErosyP-s2-sD7Y`Q zdlPx@c;3BIo?uIdg92+K+EpsSCo;QDb4q@lzVfF9L~we}am|bhD-Q=hjcPY)K@1P? zhcyrVy!jns6v6#ZoKO=Fu=ak1xaiY-CM|F6aKh9#a@@cb2yfoL4N?#=i&w^&7dM9E z%p{f~u^vtQwpFz#-P*#awI$-+7i34KZ#2e#d>!@Ig2YS2e0;FzhU=^+vfNG@`L@7F z`*cBR5DSV>4sdV40$G!dkIgR}yHFq)k-pp}ci??Mk_hUN$K^cToxP~)-aZZtiHB4N z(3DI9pi{6dK1#C9|Bd6qWL0I`-fb6TIi0hIqrobe;`IlIbMnzJ5z{*k4cRv5>;7UlA2R@L~jjIuDBd=mPL) z5TMbu3B0ZDYZmD$GBPqmQLZsG<_+#qW(9ePJLE$0p$*PC7~ifW)ZQC6ec>K}*TzT2 zu`TkD0ka}nJH&0^y*VID5kEg)dwThDpcZIE+7XK?q`C&gRg|_0KC2~h|0JC41)qzN zBMxsxayx=NJ=Ms5=I4t@aQyS5?0D=de@MBEys4b$rp3zLiTkR&e{1W_*V0iUJuw)c zXcZYgE|**Ds7757FS)-;cmD{ToH#pMQQ$D6}U2>cFZC0rzQp)OJta?F z7Bjnj=*ef35C30OVQ~Mem{u)&MUqohlV8){&(D{;$Yy#GE_tLP#z_RxlukCg&7c)( zH%0&rgC||T@?Hqw_nm$1Pvg*-*!a$VMwojA8UpAF zj)h;4mKXv80&8#z-R?C&fsh5TV3EqOK!SS~s17nzR{?1?OT!R2KW)FMJP{-9sp(vy z?8?f@Aqoq2Sv~!1T&X>FqU^?57CWhih6!0}?EW=aGYRh9QoS8M&BHHCzHn+-#@#sr zJgxwx>E5v+34YJHm6nOA1eKCJi`9jKOS_xv{UvJe@9#yuwehFqFc>YY|T)e*L7d^O2wG5|MLGs5)o%P!n72JZz!10Q3B>vsZ8YHf7}pC&kKI zpoAm`{}jq7B>pxF_yaK37MGS)iTCX}yH#4+-80z^_xz7QO~UksHMtaZOf82GK%`#n z@&eV8z#EnOPgHBrE}?h2R)!6kn}&tu@=sm9$B%wP7EN%Ykcu_L%Aa3>Sc*zo8#+^C-U^;>T2J!j zn1o%c*If92q*nI!0R&P_Td7sW@Hsaos z=fSZ4KGyexv(l7r5PvhC#SAM%|KWL{2T&W4S8dBAR(pO7r~CB(Zzs?vR9!jyxqf~7 zM_@p5Vk(~&lG?I=dInBFd3bX#Zy*s0)#!Y?)u19$#@uSc7C1JtX~XYGE8W$p|0e3k501*Y~MR@-=%BwUt z{fv}s#29%b4loxuy1cv|s8?_-z5a;mjY$fg%yY#_3UO^I%g@K@WPcbGk6K`r|CVxZ zVS_;;t0gd8bR~pu^IbAN%>l@Hs^zcgh(SswMohJ9WtwMEB;l}d){3>eKUnWMZGyDT zaHC>0sLT3u*!1zM8f-q3p|2Sk7rV=9!EV(sCoQlr36+ZGYqOi2}h)HyM}w295Z%?)+AyG_)az-yOPX8Q`i{Tc&dKiBC{Zs>-CR) zWDgFSFFk;?Xc}RterQu|Wi+aZh-O6ig1^+g$luAhGIm38QA(bz4yE*O16BK3&&Kks zIoa^o3Ga3W1?1=`?fEn>CAwVx)WvEb6p~tV6mRW)F{DqvY~i#BKK>IwM(Ua`@@Btm zizf=Dy-@-fJ6s@K_X5GlL;o2`-h3CY}Jot3g*-W-oz?B226^p#SGX~@@HAv@ zJQNGgIlE)V;rS}B@)LRG_-Hq1PMZm(Qo2g6 zk&Rjc$f0?j`xEqv)?xzs%*ME6&IjCKNO9HK9yCq3#>pp4y@LjbH_$EYX^%UTQHZo` zRzF07?AGz;Joj&{8S-2%^WUK2pGP(lhugpO3e$6SPodb_$chzMmpFV$7q2~{6uIpM zx>M`UB<(njJ1<%U_lO6)+<{+m9Y~V`=M6cldt^iA4#X670}9lX7%-L%N)sn{NPHvP1wbIkAkC)qhh(v-`ki;^;v|3 zZZ?TJ^YRlX^P8EdokTW#2##}7-1;)jdh);W7PZCezRJX=9@sd&LSp}eTmnkL$W{Fq zToxRb@mwL_V?ZlS)wZrMBX}Jf`Rp-_treLw^9sy!Cf3rkYm-p#$BN!{tIpRgu!JyK zG3HMmJdLrACtsMV(F-icaP{i5!}C0lIGO)d8+yZBmmr)c_fQ?}*78eUkGFWisidT_ z^2^8(azJ&<9+}D@CqP&P`68aHZZww)^P53vByyOcxbvRx{eO@YqfR`C5i?@bl4DJdM~R=_vM( z50z{##dJa1Q9q3bhK>2=`%#JRQB1lwbhy&vLEVzV+5d>C^px}z)E^^VgH(Gn)1iE) z1eE@gfe0FSjV)>EnJ1FI_{`lk4{HZ7`4Vx*0wcGDHPi|?_a9~~psP_fHn&9oi1BNl!3dj7=kybwkQs!Y>de)RuMuhIWB6^!gK5%wKB z?eUD34KFout`OA+eDm@?R+x_MjBHQ3(nhlNUN5MJ8I)SHG|zpMohEJ)R)CuOR+Ih+wy&UEvhO4zFU2Hd zmG>jfe*sih?L`Q{rXuz^F4+BH1?-JplbXOyYqaksIF;c9Afe$Ro7U9SP)Olz%3`n`Gh9k)S8>+qZBBK1)+_|v9b=_xnIHaONV`2xmQ-tK^?*-DA1 z#FK(brh)!&grnUm^H*67TL2Hr*?N-BQ<;_~bx@c4kn!-gxr_OqIr{AF9cia=!>vrR z)XqWIKMk;3jIn=-y13)SXkoM?i=NqSYEUJ8NRHm^5z{hRO@EgGlr9)~JEdNLbW0^# zl{N!(v0>9$J?w{r#8Ne3{$IB1@Ab^WtNnuh%P4l3Ia@BveNF|sQiq zZK&=D;#3)HF#H(6(;wDY`{itfl!9g2#^z0yjs{k?{><0)=j%r5_&yq~8b^h8v_%dV zK|YaapM?H8OHL;Apw=X&OL9MKWo>&R_skGmqa*lI6Q{-tDNTzsp=P#?QmbL0CKe7C z6gv)}h7NZsj#^1EB7h1qX;+$H;F){#}azeEWB=Zuqd3 z46kLq`fsmFlCVMmpLfZXKnkc9O<~L^RHM@M>ao5K^Nt|$h1aoj?@g|1rK9dSat9nId}qe4jLKo@!aOyX>{zM4AN(n?L=#x%GCQQl_2l#*eRWvb%5XQLIXT|jh5 z&9-(k{mo7_HSj^4lel(+hQY;e>F)Qfg%kkQha&x70=s|k!vlOiypUTvvdG_~9WUVybiK+oo(_ zAStT#gZhXK0xJ;>={{R=szw@Fg1Km5_ zWfJ`P>NZ9zUfdG*=B9yq(QP^=Qkzl@RZYwG72RVrNd-;QY*@F!hy(jep=MjhG6> zlZ6J^W7qp%kN3Z)rxsPz2yc7D3}CpQ^;2Q?D%?x0Q#DL$I3U$PX=cO7KD_d;drhWv zHT#mFX)26tn`6G$L`IHo2k4%RT!3f(H)UVJ)<@=+9t3&=h!;WS;${}O@m+BKY0EsU zL$I%AlZQ|T3S6{c=~lwq8bbiZ_M0(otZfjFwCbNXDXLPgLT^Yy9E09Wr)LFZY;MG^>#BK>Uy*Q|qw6xcxTYtL9y4}oOsiF< zb~Fayb|%BQ%>hOh&A-g1xyVkeZ%8a^JL!pFe1PuL>&T031g3{gF45_WM_E6fL7)3R9a|%ZutdVu2iI?entU)Z)F6q~9E)g6~t244E zVAeeRF0Rk|>)SIxpfAO>YO&(&1sqAPZ-8n{$XV7dvwUb_!8E6zpwON z(&j==hkUM8jq=U}BmcP))RX!mXm7XWI9g*Ctu-plgswL8%SpZWA9dCtuDWLb9`a0Y zX5`Mpa^OFKb!g*WE|SM3m!Q#@*S9(}yX`*?Qa_AWR-G7?+74X=M!?3och>}lBxUC4 zd77H5r@_~jyQ`4TRLvE!d4m)w5m9rC6rIarz3xPp-I+&W+4?$Nabl)L96J5yg515v z94I4m*E1qU)o{iiQG&3~*PZ$lUX4lG;>s{3EVTjvYnR<53q5(mKBJ^Dw#*jo3;44B zuLn4X1sq(K;cz#N&p^ zsmNYZ0^TmQ@D2sKzZdl0RM*wb_|;a{79NH_uC-1!SzE*6e5}fH!)_!dK39dA?wqWs z&8?A>x=Xw|`)pt!v_zgKD3B!%{}3stoQB#}czYp~IWi(;GoPf3A}aD{!4>gA-GLEO z)4=xB#U7Z$4?sww+Cid=H6?U3HIKdH&B5}yFSThG38&5WEie(~8sJ`)B2>G=c_)sH zN&fgwOQ22-QY|rb$8ktaTmKb*JG&nVB9)(JmMy=8b0Ph}6*Xge<;%Ywr>5o_o+V^k zV}XG8amkFd8hLiQq}B9V=HK`|_j@{F9+C~_+iUCPj>NzEiH=7EaZiPo)kfD|hklPf z8yrg>YoJ2>umgqr;up{XGkWO4)eD*2VJ{KbP@lE4jg7j^_HXTwlep4FGtrK>oT}!> z2;oA_jepb|bjW;q0C9_p+JJ-BK%}eOQQI=>`*v<52a`|-fgUuzZi-;$ebf7nfA>GS zuMk^K(NSzguA3g^{pIUKu=eDucDvef3FCg*FL+yzliz-}^TR}RKY(35R`rKgh*}p8joR_fS4q-=dDo;TUn$PjxufsM0EBM?@hS&x>pz9TFQVou&WC zY`*yu45>+BP_%+07lftDwKL77dJd7so$R^<{JsFjK&Yjy4Y>{a8un}KcG>Che>h0D z>J@K5mULpfxWLFV~gwVYC`Uo}*;ALC25>{Wnec7+xuynCbRJsbTU z(0Tl?2WW=ao-ioV-meFMnlosL1-+Dq4pl?kB2huWT-1Vi6!xs|<*e@*P|_Bj;Bm&r z)pykou&slTJR7MH*rYcF`4H+K>&HDls+p$9@)!MS^;zbBzzJPgk%Zu=wYbP$BbO_V z(x)6)=O`^|WE_|eKK0Q+o z=0lU)XjXI>mxqTJJt}dYl2Cs%{ykAWMe&0UGkJiY>!vrD=0>8TsLIaYvB~htiVkhOue{f(;mCZ0~$Vi>{x(3-an5 z-w{M3tLRXXotm{gIO)X<@*M_k@SW?F80L9sPeb_8F4w7qN6(sg$cC(nueom3IXJ&C zRW?#Y3pPLMtf7HH8X>}$Nf657&fY-&&sWOW4}a$vy%FGxdzC`RM3SJqMfN@i(ooCd7Rj8^w&ubMixwO4j2`zU%g z{DV3pbqaDXd;6v=XSvZog~ZWDFs7p$S9k!uas_b)Ugk6Ve0sK0!+GgZ;TJT4r}6h| zsf(R)hpEHLtB?ltadi@#5=TM7Hc%=8jlHYfh2~0{Wlh`7MwN|$QS}plkpe=0b>W&r zab8t}6fpeyKsH)CNS!Lux52lP>qe}tC9~JD%|N)}Oq$WK-!f3^sdm}uAK^CBAX!hN z1As1Gi(fYY2>vZx4hT#}4q2dgcBshD8K`Cv{83ms>Y78-4bQ}x z)T5Uu|DoCej1q4^SQ~pvvU)LMvB=YNFOd>Sz_90cG+Lqja`VrN4&q{pJ%7d3im;OBVI{WcbTD_6saY1ug9B8&T2!}T z3Wez}X~67mrxfGWj<_9+5ByhJ7lWc80m}oX5}W5GrJ>QCs(((J0l`5^m2%qG{>)Vh zukVkdRPB$2=fYmCkSN%P5)kskHysPO5OS!@`B|PK!#Snd+DZk4T}{4o>iL*3EcVOg zz7yt8&v916qWx=LLG{bdGiHo7GIG>6&K_^mar6ER(SR|H-Z=6aU%|<-pkR4L_5mDt zcK}dpL+^k_#48G<>F)val;eEt450Yp!0li4f3-reT_YR)5wrXoE-(WiyV(HhdIb)^ z-CL2PD664Ot*tkmhdHXIod}<>yCtY%5w)>f?yOPA=>c^zgbeh@XD}iNkEK?v-Uayz zH03kKGYz)Gal}+PKZm&!T|1}Y@|1ClyE7{K^DM;U;4~6Pl_90ig009-ogohr3J!<0 zM`2#!;}kX?gJ}!KYW+AT2A(GkbR|H-OYj;UgBxQ12h>qwMvqt&$Kkk{KZN)7Hz@$} zOs)3{s{$zC!kK{7mTHF`U(@+j7Qm#_C~@yoy=l6~lhXReuJucw)3z}G?V?)Kys5v~ zi7vkJGwyP)BEfP0R8uzqGKVyI9YoCWQiT8pfFYO`|~O~X0ArRRuvQ%+T2{c#bu$BrOR<1(OQu_jYf|Z za_XCp?@(xMreXyX=3C=FvutaR6_g8uq>COt*@*J|6s5>-$6Zh;4CX(&!9Ja%U8?Gh zUkt_ew!U*~0d-Y=&WpLrklpT}SOk+%RqD}Fx9D*wSdGW9N{Qu)K0cdw3p4adHJcTM zPq=nW4K{U>-WZ7chwMqY;W^=r*kO?{*1a(v_5tWL(Zm{n=9Kmom2K*|2v~<`u}Ti! zFbg>W@{X$&l*H%Dmy1hwW&vO$d&s`pqZ^OPgRD zRXfV0HsiEj*5i7%tU$-_<6Uryi!9=wU9*bql;q_MYvhu@QWiUH&yeJcYr`P^{dH}a z110L7@vuLt@fK43kT$#+BagsTu=u;dRk)FI!vx(DUqmK5HJ*M#hm3bYjTrw7y>{SU zMN>m;UYo!TrJZFFl763QNINF2!K-l6Bn&wb2b|H$olH0=i<)~Sb;+Qa`!FM@pMmml zXclH4FhYHs=bvs}UdEa^iMepK-+w7P4L!~oxk}$bq}XX}`4CByk_fkTj(d64KD@#O8bDZfN2}fix@RuD+NgU)j!h=+*>8Rydq=|c1qs9 zKwU_@e+upo#|)6Z{6;uEKF$|;NER924@rOj3s&6lwaBde_~F9`q!K5xel`){ncxxr zV4n2*SVxY$oaNK5fN!NCqk{AxhaobFWIFmw6pU2c!1UzB_j8VR}6&Lj-jc&en$cW!{Kw@v_V&iqkMlQHzfZK-sk<@r`bJt%M2d?NTBAqR`kUKUycG5 zfn6+;PzQ}J+pbKiAp^x6QIPUqjY^$r-6HkqS&rpj|ectzM(aU9}%YmabA{g@yARo{0 z#ox{Wp0M}#jjZ1~fblG6UBo=jKV!cpz$dmt-M;Ix9&STb5@}~>kKd3X zJ75UBaY6ZalJ^qzs$t*-`pN)MOd=r`u+M`n3*B z%lNg07wFrEsp1~^0r~?9OBQIb$8P`~PuK}q8p5jIToL?y2LQ^#O#~CxH=K2cr#>1B zHJi82qghrfM%&=FPYm?6oHa4|^z=H9XN=k#;o4z?FL*a)jxg`F|8~mk{0x-@nN{8W zNkBzK*@JbSvpaa9Z=w0h%QDHqPqf7(lsBnk#(n@NtG&iigwg0JuCy0=X3E2{VU+7i zGg^Ek-YjUr0p!!ZjJJKtaUMW?ZE~i(;_7!kzyua36XWBL7k?IfrxO*Ee^ft<9lSz! zF#QhAg;rvx((CE&+>~ z(LIwLdT&!xF>UBBuRhgT72o3aXkOGsiZlD)eiXgi46ZWx~p+eK@e}z^?S!Po8jyrtfmL z3uK*sYxheKIsYTF=4l?|u#nj>zkXVo$ICA^S_WeakhRt+sp%OGrcv9XEZT_B??c@1 zu`4cQrGZP-*sPUY`DX_Bp(*)Kzmg(cs8Ts|A4em=Oz-w!9esKm?vcXj7^pNL6w7A` z0XLL8h1&3%&B{Lk+_2UPR0C}&Hq6&-C|bkyc$VCS2&Yl%$fYNFacK#$>StyqzVyU@ zZ_}i8Yc^Tl0%1Vu$ggoL)5TmraGV~s6lmf{%yX>5_tk%&J0zXq*Br`nTTC0qWP!xZ zx5xW|!vXy;oWkRri42#wmpIT{hu$NxTwflH+BYjnv7Q-p1r|Ewm%Q!pgx0d>jcX{n z*>2w(kp$`o{|G)d%yI*|layWe@|nL}!$Z--hb-DZ1_(eC$rXXJK6iQ(Xf7Nr)oXaA z>OVXB9uy0$NSNC`r3ULAK0N%6{tkRNQ-BX=0{har7yy5*>gLAL^Jh1vus1n5 znWFSRxc>1DJ`bm%E89LCvPPTy^tq~6My4OZO$^Hj;T9fDY`}sgHbM`AT445iOuAKV zq2~8cV)rh z4xv9T*MD7qkt=l_Hu}Xh%K-alkAZIZM4*o+FpgMu{@!EcIT9KBMqY<*!V5@tM!9Z< zHcXxV(PRYCp(demt9I)aF9`riQSl$DIN`hvuT6%NSC!=nU@gO8j^3evxX$}0=zrIg z$3!~=STaHYbIPl%V4SE6I6}O>vjyq~y?!0M|4x(wvwCkzSg=(F?Tt^Kv;uFnZh)0Y zd$FyxRdZ=IQ$ z2+BR5Q;+TF2COV7etfzej%B)y0fWWDV68diUvG1od5k|o*w`@p z3nCbzW*LHKz>HF6#o@S3Bqk&*f*yD#hSuBfw9TL(%iWEj>v-gC=1KdGZcwzZo{w?64U^sH2Bu`|3@ z_WFe81&;TJLO+ddbzkPI&AUAX-F(m8UQhMbTJ1AhCw6F>ej54_f>3hQmoP9WwA)D|6x-q?g%cSkT16PTTE;S^>?LV+B^!qt=^2v?>5ZZOk z*83nJ6F+L!v|>9yi->@rc&EqLCRpqP7x-RuBgEmvZfWenlOB4Z&^H(s;akETZ+!=M zQ9#+{spf3@$&KV-QD&v>cNWN;An95r;P}8+IIK3x$j>IhBDv zTTd{O{ArQgvZ0kjzJl}MlN5`eIek`wvTtQT4=?WT0z{bNUE|AK>r8~QodX$G-lz0=cpK6gt(dXqUL=p8=TTf# z@NaKCN}c=^(5p~UAzfS$;gt6vIJwfxjw;?oZ^NH-n49+h#@NvLWAYkyCZTBl=zy=y zM^H#J31Rt55TnTceD;0F988yWxwp;qRnnXazb&;`L9_nSiQq^vH;MX9h@n53w7s?9 zHSukvmWNvMG>0<0+X;z=2#?)O6%KEko^8TuZwges)>BK1Cr;_f28=uP4R=AR*3 zE6lA|Wv#f>$hf-Eu5i*5e0IGUvA=5I--^%Y(GjC+BQYuZHVh$Nji8Z~FpAb*!$9x( zW5MaqnRYypbFV)5{}OJD_E+^!OLYNMj6MbZuPY2RWQtYxv3MNOkTIR9sdBzKe$`mL z-$R-9sT8JketCIXIFOax*2%62a71A#SE@Tc`BemeD88;KaF(`G%=&PmO~%Ur+@4kI zcJ}rL+oFIq4I})%wuD`Nak8~7g|Dcdb-zIF(I9Kn0Tf9A%i_&Ng$g&dC&$*fF2CNM z9$p`{VWqCWfmN^<6J2Ba*byVH%f6KLE1N#%qM&QtZ7K3x2y4SatIu`d)ck+3CfuK9Xe|OSPNfxTg>PgKeLZ@msD2Ys3dT*CG&k zow+yz6``8kuV9jM?P`_JP{XrNt_< z)5Rr3yF$mA`T{2o#tyz>3_pEu>`Apr@sP6}Vq;`$dy;Hk`f|b1NUesFONyeh#ke9)2!9^cptLBCEuk z?{MSvN?j(xc2gqu8Qu=g!{b)HW{&yzpEB$Bq>1$DB2Ax_8r9+zJ!uSA+7aw4qLh*1 zpCkqEwQYl?D227dlpTLThP@D(&@YQg4z&y zQ4>M2Rf)?Fh)>2yC-l8nOim<}$_FsRQV!qXy4ZO1o)O4JC-MWW7 z!Xd{Xd(klsAL3$McL$xRf(6{z+};JRh0*7^8?YMmiT!?p(a9T3YWU8%<4h~ZeI;~$ zhj)vLmvXLVbi`AZvHUKV=}U@+pFO#^s|~ybu^%-U=R9MH8nOZ#QV>gnrqXt!7(i*c zE{c`H)1xg);7@tq9xfk{>~FRRuL;pe@%CyvX{NaR`OB`{NFz`?JKZ)UJGtvTlk>ni z&xRmkH(RILt$k%E93eY3{pE^W@A@MwC)S$>thyLr^LmYBe-2)Kvqiah+2f_<{Me%v z@Bw_eip6>zkP@KCU^;u&+jrWn6nQfyzSp_fQMve#;@s&viSw~IC}r!|fS;4^45U4F zDOU9;HM5G<&D51#1SRwtd3H_yEFi zYLcW1sb9h=EmqShXX(G`Xg=$u2vy?g4li#1+h3Q`BxY_SYH%j0zjg%Au(Su~ba`x^ zHd$yFyI82wj|1~SJ~_VP|BtP+49j{8yS$=+w9<`qcegas-Hmj2gLEU^4G-Pj%|nB9 zhjb&|Fn`WD@623t&1XJ{53u*X@3q!%4dzG%yX#2}Gt-&#qQp-?9%C`Qn@KU=VC4^*X+y?DkeFmP5k&}^@2S5&+i?8i^GHB*0%1r-?b0#2F6#^!5`x8 zz7ZU1T_FzE^)&w4zZ_QGM?a~B9{_Bp-^%74YCS6^6-MhFUUe4pbZ^;qrh$zPbD((p zZJF}s1I0bR+2nF8v()mf6bF^-v{92ETGmdn&9qN+jPlvROimVlmXo4B7ZSXp?@Zez_an13*~!GT8AAnuT;Obq9Y-Qt9#W=kBwMhgelq93I6O?Sbyd|k zeM!OqGg5*k1XxH)MrP{yc3l>LAz^_4o&x<_A5@)wFBI3~ajgSQ1Xfa>k#zIP?9_-p z6bhq@RejxwR;>H!0i{)VwJ>Yx5BYg@*&H28%;t1sDk$A*97!6v+f%ryS93+X*0BxN zKr2ed*2R1|20CjuCban$xgh9cdCy^c)CZY7+^sQdObS${c@-m|&EDIo$f`*$Wt6k#_pSm^iiKYGPMUt75y&W^T1e zSNvPoX&XO|;^v}DXjXLtTa*RJbBTds1YUCyH_|5KM)~fOaH+85h88`;lTx?%)r5{e z!RPop-dU86ms*tiAk*{gti?<0-X^HEwF%(2hQay+9D_R?X2Or|$GD?mZc2*Vfc%x6 zR+zNajzpX$;UJvtH2&CF~V9X?=Z{|79M&fwgXNW-2H z9x9`yX-#=-1P=1(Lx8(&2WHmyipH@zj>AB6>>kpILmn+RcO4Hdc+6zhgb`q_xw^vs`qJBk2P697| zDS$)>rS0>4eRZ>JGyP_^r`Elfh-zh1+o7sVs3gF}9pyabnL}^Nt;P&g8Yn0zC=#{f zE4a&Src3q0JLFfce68JyWY{3+%xGAR|y4Mchz)8 z`O-OHR((|=_P9=>Q6FS_9k`P?i1v-}VNTO#5=a#kH8dp7RqD!K?oG@~g-1qeHd=#L zTV19}le7e#oZM{$n)P~#hh2g7x9|%PmWiR5s9j)90&?)=SrWo(zYrCy_&`?GY1jE3 z@C#V(!ej?kCgFl*@mul^2ev*!SP|C+X{XgXh`pow3;z)6_1pT8KO{LtmIqS4msN{< zfHO*+W^CGp$b+PayOc(z9_nfF>-L)k>=Lp-I5j6INXu!qMv15?9r-3nUGsU8`lcS(AEWVD=)g z|90EU5Ch2jKNr3l1%=_u&fVFc2|ZTFx%FQ2b3Urz?Wn=!RycVDYRUipG!qR+dwDcx&U1Pkmy`1-wbKOfrQ&C#l@_N`cfmRkd5M{-@)~E`pkKyn52>68a#+X3RvP`p+IiRchcYH5=Kc_F=lic#hZOjZu4J zPuwUVc71m??1?#i1IGheE_W`11tdmTcNqWUC>a% zXkTA|EeveqPYH%<|7AJwJLodnH~~#uc_y9yL81ki`RNPZ2pD%r4J5<&uyY@nLacy0 zYGMjFKa=c!MKB_V%VB<~b~Wnn=|Vw8-a<=6(g|ZO5Pd11nOV+i#6X1yPG_B*B*RQt zy`5w5-pFGmGTx%1HKlRPS&}C{`bN2F6t$H;XRFVvHvor4!9i$kY1s!@Hlq?BcFdKP z(eOBIf8u*x$>${V0{?E{CfeBm(E68UTeUsvpDyT~#Gm?YkvM1x&c1=( zD2(gOP4d17_vK9-PT2R}LnaBh^#fKNNt{6S^#RaiS*!+!NkD)xS0|ZdtoeNy2eVop z$2xx9`_Za81!s6sRmV-FiCYx&%TJfxx7KG0yV)*;OF+>Q1q$a0SKW`wQwhIkb3_$u zK^fMPM2Y0hwfiNDs2dY`t?!RD#7+6Yd@uM3ZWbo8rpzRqBA+sXetCY}tfbc#)tW`* z%V07}**-!RYG*k+F`<}T$flI!LC(o1veA{|*X6Vs+*F-Gct)MCp87R_lJfMytl9BW zrsR;)Yn*~7pOgGSP(4_UB@y&FRI=tVuGVNn=NuzqE!V?fQ~soQF`5`NI_-TY{ZZNM_e)dOd6t4sT|&+JYz^haBh+Ib zFX6410@XmFxyRrsAgC7&Km?sb{3qaS{r%HBGdUS!cjN{XH%%a#HWN^z(DX>GG#BAi#j zr!opwNl2Qpzm*GPoK=U|2NO2(yJUa0gjS7K7WltLn;g_tuiic3ZoIjJt;7FlL%}8o z6L_nY!QcYbwCV?MP3S*RYZv-H^`8t5etK)%HV`s&a;oPD9^0^(uRt4K6beR!q1Nmh z?uYt|My>K2u=~)^xKz*mYphtt<97bx6- zlNskbe9oq%D}R%1CI$v^Luf@DMiq)(5XDwk_b~!?cj%Wfrk$(qtMv|TZqx3Z6Vy_x z870h{o9o|7_Rf)o1)ag@rum8^Q(uRcV))QQrKbP>WiW$0z>I!yY2zY&oKhQ=^Y!&? zQM%U3xr^wKyXnnr_Ymh)h_YUnK=9{HVM3IAuKArkT3s!VEtgPOrW)m`7z4O2lrD;P z`n*_3K(YY}C(A#g8wYjsUT78YFL+TWro92k?-#s0iRsl<5yNTku)93C+Vz6Xc@H3)WujofO`qH7E5_=> z45ezG>lY^i!7oA~5Ez!mWgMSh^uz}sFxPTr)U-9+w4in$)S#Q*s9r(BmQp-`xEDC3 zR9=Vh5kfsnC?|R$-S0CoV2>!)x74t_2}+;^yb(u;;=dM}<=#>I;*w{1+Rs%3H*E-0 z_00xqJ3*URhGDTBp$2-FgkSe(TI~|;^~9g`2;6we-iK-(TD1{qhg(n%!+J|-^e=R3 zRSZYT*mmC4L}$^}tcJGs@hWw!$X2TvoGOZn(pKe5m%TE@KHNr&mrb8U`8cnYvQOdM zwth;DKx@`?gnKhm)D{hTrLd};4{0BHc^|xh zDIoCQYtBmVwnXDTRh-1J{9~2>&Go$QL>K@wi~B~p&>Vh5^kr{VZt+h6X%=;Az?4+b?PV+7|EB&!X;bhZn^5isAxK z?fJt1Mw5hF#;t|&#N!UT62O4>KeD_GS-iImEJJ(`(N9BTpW_K6O+b^A@{H_1uaD0M z2xQ-$%KhOO;dV%u@1B&c9s8ax0i75vCo#Iw7KU2;7e|Y6(f;|&emlBEjjB&ti|(aW znLO1@*!2=9XSD zYT7kk*v#pvbxCEViv|xPOc^cPFSV7w`rN#{LKm^uSEKOPNbtX5Gc3r+3r!95qIVDC z8JS{J|LQFruAc`PyLoxF=R3krudj=-8upUNI_%xV;0XVM zr$w|f6r7=^FS51MTX3Rpf6gGzVxLrqb?~YBu|b8iR0!aCOzrJ`&noA*NUcupzyzb+ zhAh^Z&Ns=va?$Kwovz!&1%SJ>W4U(G75@~Whf6@9OO9&fx5B8oH_vJt@vY=G4MJ<> zS~u$E5L8m#AQ)Tw_P+uKzDYRxwy$f9{I|^$MsB}ix-Soa4;bSt_l^1hq^3VjFfqv~ zZLFffeo~7b(j%S&OcVi5h^QD5FpitJO!$QO*O^ly6R9fU)8phzuB8q&uAd^D-fRp9 zZ&nND37$B24-XNo<0R0J`zNvwG@PKNPC!?I7ffhy)F@qnD zK5)1Ixw9XUNwzT&u1){r%<%0eL2b|6FMyoN#Qt|&z?fFJ!FFQtyW{%O z0pr2%K%~2&;psenrkdw%8nJq2=DgTE14w8gQEmJl71C55_QIal=2NP5Nf2McO}Ws> z2fsshm^#!6>5=(tvfMvtg_>^7h;Z)74UD>lS0|-M%;tlK+rJi~dCe91X!|>Q%fonY z{ahtwjChZGHs@B?gRcge$XwDto5B{gY82?Q<`(RHwkAEV>(eeb3OE&?9d{B`jjd?< zf{LICJfq+Gqt82nZWn;?fD8p8fT055hNmd#hH}4&f||?fo>$Mj$zK2<{U_2&G>}!X z!BT_Nuf+Ux8gLK@0|IBATzzOH9+?|_3JXCOZqs_grf(JA9DNLhgpMXW_1w z;6*Oqzec6OTjT+-e}tT)IM}wcLQq(pBW`BH7hLY>opq$H#xe-*5XH zSXYbmr=!fZO6bYMLteK;f)8M?jpb*9ej*t(_yL}cYpO6LywM+Y3h^LKF~E*eJy3DG;d(w zq=6dzc?ZHL)L2%_^jm!X+&ikRahC#S(Aft3tDs*1wO8={&q$G)*(Bt)R>AWYAS9Jz z`sNTh=<@v_c>fM=Nz=MnL@n`8EwWm`s`ulGp#G6c337k%$7-n+U#5}z&lBmtKi4dTqD>IVj29ELc^Jmf zdY(Xp$*SphK3P0o(6DMe2Un+MO~V_8!Fu<+Ic-5ubDs(`oKqKgAK$#r)PcZXshrdx zzWY9*e*e3w5wtlNHQdtu1tw$6(}}DC%JMqxo!tHog#mw-T-*wocE4%H2HatO26d=# zf&!)kUbUr*UALK8HX8HQLy2)~E}mo27`u-Y+WL#2s})~_U}z-cUj!dMd{C#0%VEI8 z!pZ?a`RD3q8FLX3c?O(yrFCZ!Dqzn(dc1}W#QCExz? z=y@cLiS~@1JlwejCeRWpLPYU7XX7Ro8G}352t!a*O*zWF7y%RfU7|hBW36T?5t`jq z1?ZA~FG9vc>B{JxqP(!foCk5r@n=}gsva%8zovt@E_lfUzM_6)hwMT?N{dvEq8Fj) zp$Tx~lE#P!TtKRelWd~KRFo#Z43`JFycQABuXlYZS$slS<&l#SjM2dh`~fdsywK4YApU+gY(`2;yWo8nescSRh_rJ$Z43)+`0s62jaO?OnamVz6T53P z-%%wD=5Io-!6_umaS#ybVX$5kX6WpbP-zudUdpMXC5KBaeCg4k8iiFhw$6*HMFi27 zxqY$kqYMfTuI>2s>(`EQzCcO{M+e^2y6wZlj8XtND2U0d1j>DAA0yJf0PSElj8~1} zw(w}Rh*(idzJq4^?yl)BP1bCg|F>+8<2^#`@u8C$Cx5v^e{Q@cT(cGOK{WHb+i%L5 zM=Y01m+$Wlrcu?KVtWpbCRJzME>aa{a4uLgqU%X~w>z{=#G(w2ApC0H~()T!-R zSm20+FWBWenR+H3;pgHKJU3;UPI~brR-czfyqQ*#u4sheY*+D-mbgdaxNF6YKlJ6O zj_^`y3#O-6(_k9czvV)VFE&ur2z<511cUzDA7gs8HEeyk*vv{wm|)T|dlcN+4uk|F z%*-Tc`fdIM@s(M%OGz)0(rzNVYrh|JjkV>ViO;KQP}G>B)2O4M##{>+=Dqyzfb!|GtATjH%#hV{A!Pl@cYcdfz_A-U zOT`>MGRNR)TWX{1L^AKY_W-Ym`X8Q5B2YEn)|eq|vBo;@g%j|6HWC|%yZy*{U(>P} zn*ywt4aVuTTjGHi6?I3i^f@pZiicyE4DbgD{{5XJ#1hxw9l)y3fH`WHVoC1YtV0L0 zgCBm3-}uD?$%(2y%?pgT@|-aPrYJD@JL9VHiB9njEi%KfWwwFyE~eR{iV!NZHht}z zdWG9}OEMp9z@_O+3pD&NB4}Mb_3}S`;JqAWHt^l7&3dNs+T&i&+1)iByK<#mt2Tsw ze%j+N1a=K^-T=mZI>jVY-Jlo?EF_NNAH~nJ^9+Nbh01SeBZUSASvE1U78_yK>P>c^ zB<2`zN_iltRkgIQViiQB1Gn)%W|w9-EmUBpn|$jo)emd|yoW8oi>E(*uWni<5Ae$d z|5I@K2AKtXla-!HX&QPw_d`xQi9|SsU@P*A4C*brb;8^O@n+BJ`Y^XAPyfSaT@u4Uz zr8?3aQ6|tF+KYkeBBAa@ZmpS**_))qj!#ovbaH=>^ULMt%*ydKpN!SXC*qp=Pkt%E zIh;J!l+HJq8x8Nw{dS8#Js)wr`W>N!O_b(u%5TiTxO1u2u&t?Q6vevwHD=_R0e8gk zhfRFJ=eVJ-_u$%P{54_GATf$BF5?xS^~nODp@Pmo1DlVPfg=}WE1w8YujRh^9|Kgm zDEUcywbnGXdJbu(EE2#xiR5)`NRS7Buq8cTyOF%VbpjO$2ZyDVb6RDtTaoMmvA3G* z59rTh>rV)*ET*l*2AE`lHjdHQVT&S*GsaoepaR$$*ou$X4qKLt5#(P~%19ML_}m9p z)G;Rpf^1r4yEmP~nq2}+5ISz#gsrS-SM5h=d!+3tP>DME>s_0G_d?4#c+qV?mDR%Z zt*VI55nu#wZ+|FVuJpbxO*KjfQksc7RN;aP0^y6UN1m`VY_1lTH(`5_>=!-8Hdm}` zlI}?3-oZ5~=RAh6fkZ#;KZj}dwL8OZ^JA6eEG?k+8`#;EeFl~Lj4n0BNs&h$Eg5u2-@nJ9TC}&}DVGJT1RqlG1yJc+!sdBj{gs(}^^}w*sbHkrnc% z$MDWOxQE3FZ&4gJ%R%|Uo7>yzH__4OvC&8KHw0v_61=qAW@*$_l1!pR?xZLrYcIpx zzS<~Us(Gqq#0W+OVXD4MdM0=nTu22s;!-oKx`%9W!BD`6l7$Yd-MYD;LPF1%=q`%u zWItXUz|x_v?rLnAEe9%A2sMLT%(xnL)SqXLK+x-jgGY?-a)4l4=?eE#>>|#Hh^x4q z?HcpZGncy5vaqlqi>zXch|(C>3=gbFeL?34d=VJc- z)Fm4^e~fUg*U}$(@Rg2uda0sS3{y?O6M6Y4z63~c=AtP1C$E$^*QP^ObtVf&qSu zF|q7QhDN7y&@54+u0q>h>d*$%oS9*vCeP&N%iqcZ3ZVK0PWtTSa=b? zYKnE5&s(J*H2@9La^huXUE&de=@jDpKJ0ykF(-Q2EoI%SJ7WIJZgqrVa@aumWro3k zD=X$%Od%FM0%*zc1l;ht=mLjCxZ~KWj#r7~9pBk0eIsBxyhH{OltK+$y>`&Hp_cho z2?E&pM#B)!2_qFf7j}TF)*V=*9{?g^A@JUJznT;i8721w{EobRN0$^()Y4yuQKgV2 zwv{bTn#mc4mT%0@?-`;LF)}`cpUR0Zfj*W>aHms)GmM+dC=xoYj6A;LSESs)Eb6%p zT8m?WA6-ci_cL#YX$9_WHFBc*ADS9Q6oXQjzfJCDx4IPs#Cu5R!%LXH-q{X~{J=Cu z2exjfdXI+Vz_$n$-!MSn!}BK4UirLCyygJ1l%m>h$Hg%94tk`1vO$sXkt}n(Lt}+x z*?g?rN?9w>@`*_^c~!*L%#NK`e^GKH%XMQiW3L&t#8YCuT0MBter!W@g($P;f8z?@brcy`lo2(a@vy|JuWOHhg7djPj9&{R;!im^W;uq22N$2 zKUTMBQgQh}Mn2z*eoRDP_kWXKWtiir43)Srk}=8i3U~Tqp4Imz6C<(bznxY)ri__2 z$M~3Eq%tR5Vg!4@w$2$l@gDK*g8kZ6hJ3=yrKbYa)^q37&hhC00XAz#zx zuA}@hFJYcM)j+kMFq>?DaxAgZ`NsG?6RMv#vIDU{c1F+jnkEnXwi2D0nVCh$Z=K^O zbcm;2qy$0_V$nd8LX?+8T?4$ZVeRcttP9Y9ThmVLwBD0|HaQuY^txuMo5XfpZDnN^ z#Fo%ZbfTT7XL>>IoR1BKjoe{cmmdF6RBR#a(<-`q#+P9`^;Lp9ZSd4qI~T&sruy&b zLZq01(Qo{LqN0Nmb#zzPayIl%8chXiWow;*dLX2+sPJ#zh2LHj!@MV{kPL$=`?dmc z1{4b-iqJ34bx>@yvolmiE~c$czXCmkQdN0IkQr-@ltWNw#xYN>O`-ss6mWe+f=ROO0qHozJlF!7F zV9k_)DwR}_S)5cX=|PE`N2{=o^$8FfWgOM&)3mVhVV1Xs!3-9EdBSyIv?BGql&R}C zkFSP8tshCEFD%URPVKlKvw@{zQ0*J`X{%1e{ zEO>srg_|E_Uv*S$F70uRfP}NhAhml<=!qr9j+`1#Rnbrmh@wMH^_tO0i5^x`R+d*T z?Xh-Z@_-;TkPcQf0(y@nK9rn8C~0eZ&D|#6IG0?~j(mr1-a){gnG#^(iy!tO=|pa= zpVjBqG$T@*C0xa5f2>8!vv#8JShO?9MN6(*FE4$~uPtx=S;Jw6K+Dh9itvS4an{C| zayxT+IIp_+`ydJ18pWv@;a-*7Vju4hKU)qdT89M<>d&9HBfQk$g*=j$!lW&Rcz)P@ zQ0chZ1Qz3)-|>A8A4X|=&=d-!#Kc-1d++nr<*9gi0%nAbVX0)VB5j=E6WPnvLBEe8 zl`>~RA zIe=R~FMU=IQ#}iqtkN;MzXv( zCz69&ag=Qa=_TV*Q5KXF+v4)={kB*ZE%eXk_{hJEXLNhI>iBZM7e1D`@XlqNb8kBU zyi{+Iq=sZ}ZH;~~&UHkS&{p7Hpz|c9rVWBSII7=;DxlUPT?FCFP=RXEY;w86h ztx0rKjD7$-c~~hOJ$*y=qfSv4l<(3ncc4sQF-D1@?6&z?v--CIX2FjcQSDRxF1WMC zKiCIeEBz-6?UgIY+sH#sz9X)2zzL|)MB8!Nazp`~VZBKFG%FYcfXK>emQy8SsnH~t zNNcwKzmw~#;b&FZfr7Wy1NeFWD6GpPbgfdDpUc#${QyIw@gMYipycoiqy6k$L)A^- zP9?J1?#<0j*l!&fMvmlHJZL94ajUOstWxD13FbF#Tgg&+dBh}sP?3lsO}!^!HR1Z^ z=3#YGoMdERN|IT5>Q9PV%4|yp_V$9zS$BH$*SGm>r=!yxl^^dTPVb{NZcaYmPaH26 zsCP(p#y9ZFdh7e2RjWlnkt||6Ep^cu7Zg$w-Ddf3bST@Drtfo!3(Z=gW)%&UkWWF8 zUyKBFL^+-mwp_Wac!b)r-kT6{K63&mbT8ry9ia7YSujUn0I1n5zKs{Phk;2l%%EZp zDK$(&M2pXK24&lmGXgBU&=|Zk5`PPj)ua(O9Up^V?I6p0J(GTA z4yQ#Cv%!eS=#l|prOyzD@e#uNK%W1x3<)aZS^@)B)rfC?X3jW48{XYM*pE8i|eI5BBHNMU&)?mL;{>lEn4k<|ZQ9FLW~IBL3Z;q;9;QrS$KJAUxxJSK3AfLVY1rh!2f6*WO{pUTdlSvpwsKnpmxTH3qO@B zP{ir2-o5#J0B|8t)d+zZrq(y9>~AqUKuCbv4ER<>=GBn~S2G60^uB%$_?dE+6CERM zdOnk1o+Pj(-Z;Z~voZmEtdl11t(^SVlUCVG;mRL{L!&v71`8W1=xn%B9^AL;-X}aA zwmgVE6xoebhTrIYT@orR|G^1?4XfNbY2FGJ;-+Bnp=71wfJuEluJaGR+zo9BM5|#p zK!N1BklLdn)YPS=PE-|LD-(c7@l zq_2OOAzWC|NY5Yip2vrY1Qm9Yod5oN3*ZiYGaaGQg$xV~h*&OHRM0K`$K3#4@nXQG zKv1qxANB&gi`2E?K*zc-JG!v*!vMGs*$o%iO7$pTps8xb>`bB3#ZGW+!9T3sc8~*E zJ_LgeKuRMs@W!C8=XV!tCasd;WXnifBvFhS+ae{IX%8+KU`2h}N5V(J0^4alk-S;u zRwRgGvgdU?BG+8E5K{Sl__P5XO-Zv9Zve5?ve9yp=*9cxG7BS)>p08{s4wrc(`xj7 zc8@{oWni1`Q|X59Rnt2kelo*!`E0*-;YUg9${#c(2t(7GePugkwekD+XEW}?I`HTy z%QYUKRJh|fz^0v_7P?m8S~$MCy$#(GwSyFEeD(!<*3ZOr7?VA#Ci!z0`<$!KXvDFM zVq9I_$XQa6@v%olq2VzzR9d2cTb7c%mXVS%-h{_bTx!S*Vfs8@qcIuPWvLTgIB7|c zZv?4vD_GYYC)nFwOpne3pG?^(8|y&g84c_X>S~0GI`(;@4Y3*bI4vmQ44p4bWa|*%O$OuVA@vzWt9Epbtg% zH%8~3NKX7UX!TDUAb8FIfzCJGFxht1VS@KfH*_~Q7-o=4-5s}=>|%(}9#pB-sq0I& zXtK>orNqB(W!MyEV`Y^W6ALyp5>&So>*Jv?k64N7dDd4n%oB$`Ji>9D9-f&|Qj{B3 z>=1pi_=vi4O~%u}&c>p^L~B!2z$JV5lpHfgW16P%y}_=3r9@;BG4N-zW|ts$$xihY zwCJrsc0;DP%92KxR5Jp?HL9r-(P;Ot8Rz9~4BZ$s1_o?G+e+?A+}K zXtHGp+=1--fT&CdcdR@BHU-te#TQ!~HOjFglA_8NvzE z=#6%Y9jd>J8`xK&S;ganO+VdCay?&hS7i+uZm;qDYpbWEYxlt=+3_G`p<5?%ax@w0 zY}WR*_~Tr$>n~AU(ZX+7&JY}I@ z8rTe91cf!SvJt1LmpmeV=6$Kmw;aS$@1IU{AzjEW|gB``VFjhkqeYYG#5yB(8^F;xv&pyz7)Xnu_8(N!>0#2l^9 z$goVENnvq?&2%k9KO2o*zx=?A`{#gC{AxX@wfTm_8KL#FNJ&ja4wpC#L|22LNNDi5 zJw>C7-VnTRzg^3FBdY_JFFMqEU_F%&Ok0Y;J8@wZsnI6Hr#G71sX=|=q$4;5hlZ1k z+F8!lK0l&|0Zo7~Y<{#Jl7>ua9!)0`>0*__sx zMis|IUVD0Xpia&X}{R+!K@>^OAbNXm;^Jje13C zpsL73$!0OCxE=aP&cY@RC%p^Fei+UYW$}#vR@%@L*wD4H@wL^Mo}D&BLCi5pGpt#N z(b~JZ+n_bFx+oFfMpwsLOj3YalVR_}+9y3o@LYow>DToMM0wUF@<<1|0_oO zeJVc%t1DY>1so+e5FXVx0iDnNGbvm1Oj6zd-mcXj<3WIZ#D@qS6=Wb-->|Fko3h;Q z4>}flOUT3r)nN=RTVLDSh$p?!R>J{AX;Asp4%7b85sE~<(MY-yT)|JjCNZbcuo{lV zh17(O4sW(oc}2zAUDo%-8S!7Wg*47-F~=Hk|28`Q?h%0-V$TS3+9TjI)!?hr>-IE9 zh}gjmUV)3>8NR?9ja)_j1XaUlp>0B5ZdD#1KP)$|%tyzpZq%6|vK5Plx{#9Una%Gt zRAkI$^eRY4JwvM|${OZQQffZPZ)IH*;4xlkdtx@Q3w34Yp@{=D5M=pve}zPI%dPa9 z-8GB@w!OQ!;gucHso3^DNT9pO4{o@I!{QEKB!CyiP2z|SyIe%tI?DV~X`Qxnx|iGH z`&06!0UF_)d<>*UwNY@pQ8LkA`KcvH2ikEawldtMgHfCGYJ$=5SoAY9tedAEhKMA! z`nmSg$i3XyG<{28yAMp25u|_zx}Tpy_Ro~7v2hS0K94wn>yS}t0sM(D;Y+}jEkt1x z92^`t)lg9(qqe-6RYrq_gEOs6SByqA_`z(J(JA9wv18(SX?%(3-QO%CuWZutDsxMe zVfrx(B}OX|E~DiDAz|xRdyL*V~i_qhIi^&4>=t|=()4EXEFCiFN{Z5Fd!U*+a zlJ%hgFcBX%h1mP?!^grUs)WiW{--5aNuyWu`pPfOUUb(sX`GUBSyU7?36HZB6M7FW zm@f`d2QzA#1 zqbmyF7>UahE+^x)((8p?vuLlV%b-3@(2vn70wNX?0)}63_RAE^`%~1Q|K6#YL4#^W zwcDoJ-@MweLH~VpdUziHFGH=b*vi_v;QxBe)w!MDXv8w}+Q4hQ9 zos{uB|9@{)!HP09!Vz0RS-Pm}e9p;!za%Ymzgi6=+6QpEcB?bY1cbzf`!g`P=S<7; zXJ|7s&)E&K=T@!8a@OU6SF9rrqsij@mAMR1!_qU#e;*t@B(WEW_SdQblhN)QqV-{I zdYU-CnOe2Nv1}AD6b=_Ql$TZ#o12ny{q4A010GMdZtqcG_YE%S1K0wf!cf@@N`zkC z<+W^TnFQZ+&gP21(GK~uU8R^>xH2jZ$0Oy#C>?otDP$sjth=)+Fn5|i0lBmGmYB<6Dbjxwe#Sm1`} zZ9*wrXM98iZK^H%E)T@Syi6sk9^eyVs`|5jatgr?i0lrsE6uSe(*uXipIxHRa!2i7 z7c$wSg>kV&8N0CsjC@IUygd@T`>JbaWIj7mRKbDqDV6=>C(pkHq;A4EZ1v%>5ldZr zBZxQ&NHL;#1d@Agvg8@MitMxse?azU2s7QfyX&OAO$ceC)_?;c zs8}nvUgTItyB^FZurjFCAd@=c)EEbADb^$$B}~-A+J@X@Bvf`L>$!;%MTg0VU$~-D zks#<~)l8~~Nk(uVUt?KrMu59Di|asoO5dy2e}bb}R&~O9G_U1OJju#mrJIN{m*2Jz zB>N~f%AtUR5q^urHrUc$wZw?QjH140WY8S7Cf=OML_e59QM8J)1JA^NSkv;&WuKPH9yIY74SH4Q=K%IZLFhjOB_mjYHZigQO69jL;J6TO{d=qJwK5R%W8xK9)ES-JBQEj*9eGE$_i-&q*M*f^b4J-4<5ELyb>yh*z2<8yE3z-HLx2 z0MXQ^4^@ub_S+#<_F8SO)m3#syOv8oCxz|n-(ddVz`RemLP{;rMo`a6huQ-;sYx3R z{E^9)s!5L2q$7JyXi&qM6-5%{ft^2UYDQu}{zQ?*>iH3+>k?8Veh%yrD^jm359)Qi6+No-vO3P9$3~ zqp6D)N(p0wji`3~eX;zlMZt*z#{O_&g6gif0%2%O)p!E~6GQ{bkPg8Kr^YQxo!IIhD@)~JU|Mm9kJ7t;H*mpUVD z-UPEC;@<;NzbyJ}2Zw%EE_0DJM*g$|UbBvl)gO%-Pj^ZFP|JDo$2R^FWr-x}x;@=( zsq{Ai`@M|X3c@eT=!}+?t*2b)&!xK0=DRCzB1qQ5OyNEN52dS5PGqdhJkqi}GmtCk zJFZ;#t>=LiVYuS>t0K4EpgT8Cn}$Dz0zXajWY2!%<^?ca7Xp`pM!S3c--3->lTLnp z!?6ad<^8wu^x?t%VP%g`?BwJmGI$=fvj6MzTJjVlBcnhX zn-#J?Ogu2Xayo*sOn^9I>iW|zdr!toabc#!+qlthEK)l)@7ibX1OvIH9i-D}jgmqLaeHoB-3RMoT;Ev_tK&7W-rtmcyLEACD%Y5rAW`u|_-95m2IgJJ5*e88*q@V~vv zf%HHW{~ZpwYu*P#;j?agNx5nOw>1f@|1zA8K}kBpiz|NB=RmUonvU`I`PJLf8(4Rb zDse90)&l)=TmV7xAPC3$GmaU)>&E+srTLD613SMVf5pr80*K2%!Ml{;lEbue zZH9T&E?mtFeW|UAuN-fuJM%#39F@*hdCN?(#VD`yAMobKDZhl;z+OAeUDT@FgmTN- zjV6pT%M{M89<&|m)?vS%rs%+OI6B#WiC_^|vaBh--h!p1d~_l#S$n{?oMRa{;_iy~ zKb~V2-i#A(w2Cu*MkrE98Y@}O9aMdFI6yh&X#psVfjxO@5yeFNwmrLpSVazgdt^=}8?CYVxry0G^& zVfyyyp~ATII7Yq!24Z?dU)Wd91M2J!xM)zwL9e;7WrPRx>0Y8+FcJmi#T|~=tltAi zFz+8mU37nXGRwmjc8;K|%>cAP(Sjk}<>Uk#r9xaRw;cHlsOu2QVUg$mbW~2ZhvC7QWIZrwDLPqlWa?==`uvN4*zDMb|Pd&CWfGog(RG=StS* zKTvbr$@=_#ptq8*ShGQ^FqX>IqtHLai=|9>Dq%Pj7A*1Llul;3(n{g+&Z2x*6jL&Ilz_)Du|#f-@~*!NGVt0*-$US#W?3h zh23d_=)YWn9aMbw4(CbtKBL{{OqOedONO)xZRvAb^S>Gn4RvW!a@)+{QA$vBCcFTW zL{hr`E@eLju!>NAdw15-r4_(GI%9TBYZ9iYl%OL=bV+&TZsl$Bw~b`pe43Zb^&%vd zUSU5^W+}ux&Pl1_Kk{O|9=Dd|AN)FzJmQ^X{jSqaZCJ3m1Zwy#vYyrPa$O0>evZl9 z`M7!pTl!Cu)RmNDH+iOjyeZq~a*W;V`C(y7kxvtQ_Ws}NUhB6?3DQJ-VGU$Rm!17_M@uYMvN{f{)H*C z%fXLBq;le0vHH%JKPpkku3t*Z-zQmL(*l>|6c?|!m-^fIoum5cJF5Ix@HEj*g!wixdLT0_}6cnNhlslQu z{?5Wz81zR_sk^)wYsyNWnzZ=%sODah=BcBuO77(krG9IpDH&oCBYlnHmMuJv?EW*7 z72!}MFI=J<++QI6Pu*?dmkgpFMp?kfG|ko8tMc@<^hY_8>FOIy%hhw~)kjlr!?ebx zMmpRZlM@0KU<=8~bsdO&akSF5)=OM{e(*?#T@3%4ZSssb1`DkEu%Cz8p2Cxcx<0xA zKnw^#3-^3|zO?{cE1d_p`b>@gvBd?P|NW|{5%MHc=1FJA5(P|3J#rkM?V|8+)M8hPoL z|G9K6koJ!y8o08(Z7!`AiPnV0@_4N6Kt{720AC;q4 zNhPlK@e?iN9d*|A<(^3t;1WV6>%KLSN$e=sJqT!zFcthclsl}V%?vFAv7kFGRMunP z>pmZdN6M4ZQ}eytB~s`~VAhkr3}`6pW-%E3tfe^$S3O0M&95LkmQdrK%+PGmAU$3n zcTi=#4z_S6KOD~y5C6%RfR1uJB2Z?(G+piGil96#PT}TK7_WwN`*~`$_`-2eY7RI6 z>!gsNS`-XHny8}dCFuHMZaK?ixIdDwahZ5IOS_N^oC->nlvi_cw=-VW&N(0Asa_>N zTmrR37W=WEineB|r*xGvgQ(W38|M6PP>Uj?RLw@|mOj#>J(1SFsO9-j;_80|gZB1{ zT+a2+!L0t1J&Vns@)?_gPUefjO)6yTzSSDHQJ!kq_L_x8Bd74X_CgFeM;P?1mUM}BsaXa_1(!G7?4KMVAW-}Y`t=S2lj;Z) zT})DH-T{V0+!v8&tUY4utV=S};brtL&_UuuFUD6%37AAB?ROa1ut61evF{KT;#P0- zYhRx;8Ve7mOSK+b>TJKaW%8#NHx4bFHteO{HlGq98_W^TSnV+MB6xbt(eGp=WH9uV z8oq=X{nYhgIJ~#Mgw}UkOes`LGC}39SJZ7ehzi0N-QaB%8CbVUX?71RUr<^(>G0Si8R_ z$ijJ{K0O#}w>j;}gPL4z7HfVVDYy~b2AXeaxPar+9(C{QHtXLH zdx~KlX`W7g2c)!FqJJ1RZVtov1aEVEQ=JrT7-TnfW3|BcNm=iQg&ISB;-b*7PV7t5 zc&q(TWPVTKJ1eMwM#U4Pl2FUbuWI|g#h8DNOG}qx$-AnEEpTwqCed`E|4<|nc!H1cR6jwRO zGpY=*nngv|qM$8=^{!FnzXp#X9PS7TuUSg@{1`pF^PDA`>3sWFyI}t{LJ7YVK%( z8;u8muPt4oR+^)(=kvP^7`G;vJJV>qxpkdJI6LvS+5mX)1mM-73 zbzowP2zRq%KSiq8C|ni8_x^)C1gEAU|HmMQ#HoMIFKco@znBFmAFPTj`*{)ZXtZ z6A32GKOO&}kCaUmLpI{IB--NYW8p2Tsv`nMr!E%52@C&L(K^);(DBQF(O4CeTz-If zo1xs?2zE9_&;k0@qjpsvlcJX30)BWB0$~8sg}K-;RmD$@q6^_QGDtgd3A5s&kL~oh z2{Ckle@rPdf$B*AIJ#Isl1a`M-Jgj3+`sew* z)2zx$*1f4t7FhIKyrTEKoVE%gNy$AgB+Cg&gAiqNX`dMazlF967Dnz9~o7!Bn)20r907ydl6^qcQIHb)=50r#m@ngJ+ z`TTjEDW`)phf6oa1V1zr77KW1$SN=ym?IcXk&YYzrWc#`e;O(d5w98fD(8^}JNkD=7E8Vlie&#Ip8{8HsG~Jpv?cJK(~cBBwBC=gih16 z9?5F|PCFBBfvCQA_x!TDFc*K3L4h_sta32to+uMa#APxP*h%`a=4iB8zNk@KJ4t6x z3J?7{HW$j}oA*@Fh;v-sh#tITnn<=QYhZ$bi%;I2QG_dmU!Xm%;8I=i_gEj<{$W?T zFvoq~ntqGJG&L_jJWSdy%a)b(Ho^!$klbhIhrLsBY;B5kVt%T9j?E6%)$5fIrjsAo zK*M|pxZ1H57;tk7Gd#m=+n!w?-eOc+TMIrgkr5H&fU{5*aMO|pcWrDtBrPpBxiR~k zdadP_YSr<@SocP5r{e3!bi1R#1V%F6$hw6amI z?m%S8k;`rNU1hL#LyOEvX2S_;Bh{12kyFzF<)Nx>&B{Qe( zgaRAb0XeL-Ta!l$% zxW!VmkRpgUznc>aVYaYUy&=?W?8nIK z7|w-VrvK_oNo^xj(5EG=voLa({mfx|&c3*|rciBGoljQKxSmY>_Or*vXE<4`31@0L zVypZmwSoFbq2>i-yZcLB29-m@Uta`6`AeYANG& z(p_^*jLPsLRR}BL7_sw5uEOsa5Al4h{P@+n2m_E+1n29)m9&E43yPhp%=lgMCUr19 z!^jo3*_e?MfOB@=;_->JB%y~29V}kcX5C#db=q;AA;odoXdcn&$)g)agW9{bYhXFo zYq-0&?St`2j<@`KSP-Lr2Ho|Pr#=|BC&O67i+Oi!lb-yT#l z0!1dLNHxYdi^aEQNHURH;QaxoQr6PX4T}ky_)2lLpT<0p{?IoeJO(BR<(iHGYcLYr z|4wD%fG|t}Q+j>9FFMoO+RBlA97O^EkOE+@-i(ta<*m|h=?#FlmeH)s+;YF03IQ6T z8v##ucUg~IDp#mg31OWj+hT_-7Y^TEg>+#SN92-77w2OJ610D>HQMuq3|*>VG(Wyd)~+KT zi1azi$Z5$1#%aYS?AitxchFKD_L(Q>Z`boqUW7rcev9Eo=M_N8zB$I~%mU)d3xwq{ zQBgO`MnYZ7jnIVtM#(8)lt`56@#Kmej{Iou7TrvXU-bzA3ycICf-y|#`cYP&G+vA5 zwhHw<1s(hn`QZpvoVZU9ZfPFYUmrsL8q8VTTPU5rp3$(EF;hh@OzQME0E%In4IfxDWdv{@sij7BVkEp_-5krtb6OwvkAYt`#h4r=Pi6xmUlCKG3|5v1Xt~ z00n|~hWqdVxQsb$+b+{j$3q0KC_oT%B?<2&?V@%`rr}g#e)sVVA1=4$I2-bO@jQOeRkjKu_I@023>wQIE#-%je$GOk+2?rh z=X=(0=TmCk-)#f;I8h!_sLcsafN{eVqExQVI5`?TDdm`Tg9FB54=()oIqH&SMEop| zw$hFsRma;DBBI01Fl-RMbkaa>#y_^1?Hk$KcArHg*C-LE9_;5+x0D8)pR>lyk=jKZ z6i7^UD#na?hhVC4u^T^wAyS@@`Uz%kNz7x8oKrzGNo5;e^?J-a53v#=ej^=^GLqR< zyoS!Wdp4PJ26gtXqbXgXdlV3Yr8}lkeF!RvG%>fwT2hOfh_u$PQ-o%}%??k4lu5gP zdl4qQTznFanq}&Tw>jtUTxjR|eT7Q>X16T=+DTt;c@oFzgqt@T;Br1fSv;d{rl>u9 z*ED6(nYV;j&*#M)Kxe>T@|?FvnPdakN69AT+>p)3G~+%Of#BG@jg~DR2(PS+$t=9> z)=Aj&Yq*<t!U+KBsBdHsTsM(BMt4FJ*p^79<Fbyl;2$ zE^DBmC`keAdfXdH6s&2&pj%s4Hx(lEWKcPUh7|S&IT;5Y@=f0({cNEEN-E=oq+@tx zF7%GxO>^l8+S|6~?-p)}1x=b9WFwHn zeLf|q;-=B>o5l!`K59>Sb5Y4E!31^$;UYr>MM8Cs?~eZUYd0&^I8XrXO=aVl)2&`O zWI)=+MHBCV{E8E*qZ})Rc=A@9{Fl2<*we%0?N3TFQx9%fXk!cMe!Eup9~ZxYteR3T zAr~&MNTgVxz<%F5*)SJ38bP!gs;Swkb(S1C^-oW3bV&GsEgsv>mc~zurbdiygqdQD z;9f)<-jBZ~70(2dZKzGT?IOirarYz2K`WEweR6Q-U$0Vc)rfr*dg`F=hQ|1XN#lBH z-B^!NW$uCc>*j13)qQO)%Im%4G?ZR0U4`QF2k z%~uQ;ORBs?kE-tckYQOkXFsr$VWd_nLZ1@uU9unoAtD4Nlqd} zUUy9cX5y36)A08{)Gl_@S&5!XrqW~jXF&7Mt*$&UDqBYaYUZgXqV~J4vAobS^ zrk#nRP*A{xZfauDJ|`cY?i5OENt%MTFZnoQ;3dDw=#NO0GA~fH<}xRfD%^MAKI`5d z8Q%|i<3Bz}++ZW;M}FR{$;d!QQG;+;)n*4>{waUMDNa}HO zWNkFFdpKp}#(ZuY5K!7zO@gMVk!ub1d46O^lNj;gVT&M%_&{j!lr&Ye zfwOWgQ~7LSg`3KVq>}pO?>tq~hm3)U5mfPYwfCR4bQkyFs~19Q?FwsM4jWKdBm5Ut zOW&dZ*3c=HcYz4}@0?nJx^}?Hb~Vrg2&VghxkVvGj+B@fTSUuzuG5ck^qc)_8n08@ zP!JKViz4W+1jeR6TCySMM{@k`oU{db*`OLrQHKrI9caB!^r%J?x4(HU^lm8{M1=?b z?e)gXRPK_XM!elNTkt#o<9^;bY(Jw)6I!2dxYFwH1y7og>I)M7xJ@{94-=m;4DMCN z`ZPFf{Y-72Ke}|QpUSLBef$Q$YSU?EkI~NZTiPfyg#B8E>FUEnOp?90# z%nV<>hT zi`z+_g5Sf`rf-sHD*)blTi=tntuaHS*Kb{WvmeSFJ~F`@Y%mXoRTS$hlHJbEvq;KU zt-2Rnu~@Tznu$p|+q!If>EIuo9i6r-Toh0tP%Bfo;?6>UPaG$o#q>7JFL4#r`!9sk zr$d=iLkZ=shtz%NB4LM($ezcuT1c{7WUFQ4;-a2?w;rOi1C&t~04i1TYGC9FplGp> z_&uGAYX-N2&)+7#x0cGv{YOAxwO{u0q7chQ2p_4nO}W&c;+%z==J+;sWa{e>soL?> zR=~UaECvlXH1)G~V(>k{Ur;tGNq(DmvD)J|6I-u785G-Q$t8CHEjVQLGd6X`W1KkI@SLCh{)`hjjgghCLqwR1N+HS){` zJIO}hB#aM*&w$CmH@U#Unh-Ok@Cj00@y^~{VGwz1f++2JOx=nQN>}g2%95Z2=9#^P zIJp?wXcKTb=93VSkre_Vw+X?qGd~{)SJmu_bh|f3gZ1W z@7|X-5A(+=q%NBg`uQNiGGZ^{Vlw|bKK?`(M$jJ%_j@Aro8_QWBmodkil=<)_bQ~B zI>0;sa0v-*)Mu|rNn;KY8Olj|l$4M#@9*8iyjEtUJ^}bYZ_pC?FE?*y&k`BDiZ_?x zs9UQjkWgUe$^1u21~?T!m4>iyJay;w_VPtAk3Kn*_ zY)qYB+q`us2Ts-gqy!8)s9ohv=3tR7)jbQB4(ADD=M&|NV?XZWB(nkOoJ3N3 zJ^@`D`JYt*LN3u17C+|_b8W?*xpr=uGb0^`=8k;ppDQmLob%DEge$&oy(QS`%j!F? z_=9eNiKlG5`+J9koyK`54TC4a5MF>Ie8AjaP0CsP3IqN>H6-u=DEqJDEuQ=coe95d z?_Ll+W<%7+(oljL1$CTtN}SsbzSTcE1n#~)EW8`Z4FBn#QRp^6ahVgGhooiy6%n;3 zi@J6=;jqNh`%=uV(}ebsuh zr2O=h{?!|e@Oo;0^qgvTpDE%XJm4`tJz=o+h?(FtZ2se&$IE6nS^I9@^&m<~|)5V$`do_5v5F^!yGle`S99KI`OU1Qq1+m{xt` zNS>YcZ{8o)ABqe;lPmj%L{WMu6yp9A4$FCeTqSm)9Q5PK)OB+mWYnvE zJREBYIItn^GwhP6x!4nW)vW)gR15p>L!iS+{`RpL03-Md2zC z04x=vKXi5`!W^$p$}r0{!TC@vbFhA}G+E9-`A(jC%Da5lu}mFYGUK99xj#zNT`Tg- zPBspiU5iiri;2?+vJv=t>HbYL1=B@XPEJ}|Qy?bnnK?9ivC3)RxFI-+qoUi=w*MqV zLP6@oV-X@{n1D4=GIE@k?^f` zE+2?&aocRYj|&9hZJXxK-<#GpM1|vLeizZRc6&C;;89t^@0x!%cJuaTso@Ola9Tef zql-OM!ygsDFN6)8Vfe|xzMyloT>^5WXK(o0GKhi)=Vp>o4pAJj%tFxp(_m&UKB{AF z=SelQ+!imI&kZ}w)Z%MoV(V_0RQzG=x2FFfxOJ8HaH1M1ckY)Y3%zPCANcrTx$@-skr0P zXH~+Ih>X&@j$mSwNU0Mkx!Vk8w`fP8wURh7Dg0u6Bk+>yHzXW z#lA1~roOr|p{LK=Y3~U9Mz---wY`AvSH@9%wC48%bnPwKxnaC8(7fBE*NZp32u`MC z*sko!W5hu`Xc!&e{RJ#?OzrH*B2O3Pe0+Q$GLI2lsEPPazUY!QckpKT-*C#rfU-~) z*+~75+L72fBp~>C3J?lVUHGfldsvkMv1=N$@fX*$(Z^p*LCPaRc;PruFy@g=JOMe3 z)Gxy%RujUi@-7OHNDx@!OR)$yQvRh4?;$|GC5T!-V zX8b{5^8ePdz1KQA`&(%*FTNB{>6}BY4XN15oPZJ3%F5vKLBpwuOYoZ}lckkit}yLX zYz*P5s$^#vocC9+=47LBFfY=;?cQ{(kT&+lOIB+P^0~+FvnT3v0;SV)cO#2E^fFmJ zYc)7n(=qmzv7fSDEcjVJHVBPc0|u#i?n&l2lJZCb?%ka-F7({52l#O%6!k2$awo6Jy%XCHP_~|*dEDpQE zymc9wss$%PDqt5cn2B+{3;-uZPd`Ox%Wi@m3o1Jz4lCd1mbIjRAR_#u^N2mSAjW_@ zM(EbAO1?RXG^+Bx}*iN?g!!8DY(;S4$s4eAN3)9XrBU2XX7!1$pAoM_R1>J>u<1g=Y z1r`(hAJL9SWhg4Yu}+WS$k3XUF`vYpU(EU=Zy?}Qdh<)Akb!W_2R9WQ`$*iz0q8O# zhm#&qcnuU?(=%#R2;bd;)ffa1vGw|ZGtgaE3($y3hmRa~P8XDCDx$G$h5Zv~>OlCa z#MHL16TtJ1158AOIpUV_F}?P3odlUfwGd+JWC5!_Ae)cdQ4G{-!~X|Km-{MdC+$loA)jyw-HD)fPd<2E6c$~?A8==Y@WW*8O zU$m#;cCraE7}Gi(HZUFm-^r5g$`cO~NmR~SEY4?A+vvq#r>q<38HE;X_;d+-yxesvyxmE_6(6lxNH!qYko`Aig-+O)#*bDJkPCw;- zlA<_DAj0yyr=5$psOcz(BS;+AT9e;c2>H%OKu0K}(ab1Tf|b(vFTA+jj6#KK@sXO} z3_%>yhFoPcOV>c+UcBT25YkGqLA6BOw%AZA2*VgaWy3KC=Wj2pO8|dION=`87Q2}Z zdBcySe}J2+xg@vNsk0?{W9D4}_UN$I^GG{*3T&7b_7ss``GaPpwbG=J1>VFSq$;OINTO#uBm$=hz`?`_{iGS=WT zt|d?UpxV2@qHREVM~D=afb$U(wE7sVvDGk9uY1|o~y!oVt4qyTg zdJXJC{UUqE`h$yJ_k=~V(#LC8|MDkg6uRpq5&}_x;+^=9tq0&}G69UWpFY68u~W_h z-!aa;kum{i7Sjj3`S%~3NO?leer1T@)mL01`x0bXJOXrraUhpYFZ)@)kZ>~K%zz1I z8v#ADb9Q)Sx=57gErLGZELelzAc#5$VdcH5}SvTjzz*JS5=|+Y;xH@q3lE=^?>n z^QXSIO(MBC{6HJp6#yhL^{D|YKRhk0H0`rM(`ZLpbP+3uA&DhC$WNs)gN?!FoWad`9ejq z(Vqc)Y2ZAC>#D~dPP{t2A0D%=EP%MGVxbAJS1;F1$9{OZ6nbU<&jZ5a12_t&{Gs9g zmA^ie2Un=?dH^gN5iPv^_A>T7y%D@FmWmz8`~UR<$kqh-s>2d}ABP(n`RocM123G? zO$JoF+7BxftA9`Xa<}R=uvRH7Abu!** zs4)j^h}%kQ-Rj5JyI=6klSwQJpSltZvSmN~ehOzJh@LVm)z13~(wzcAbAP3WzfT&t zz8dZ{@iRwvBu;z*HHr~J`1P$~D^aPSfzy#xn_0Q5Z-QEZ+(X05iy+}d6oucVN#k$( z>8;Q6vM%+w8G6iJ1LiD@+R44pAR?WtZpNwCj2oR&xa*9SFRoNeVLS!DJ@T^Ez0puf zr881X6OZrad5RR?Pg0K3_j!7GTKr{dvEDu0=MLc7o9mKj2vmFK=_TD5Z&DTF{5)9T zy8>4OshP-9BkkoX0+;vNd6TLhcwxGPI?mU4aid(_PEFq&FAChGqV|BltZwf@ftny- z#d=m2R-Z(OVNjE+rOQl;2@ZU{lA0_0Shdn9^(1(=v%IvN zm4diy7^cdmw%hBSPg5wfI%Q>n>v8$WW{^p?c1OYYEbqa8T9g#wBa}q2Ws6PJPo1ZK zPLBPrtQ1{s=|sUr4xc|le1Pd9Na7D7%x`peD29WVt3n~10=C(51!u!?VPGfj6}--d zE|oOc0LWW$du~Tk$-@5cuHrJE^6plbmSptq2i5->kv%2>5fYB0wx^*s*^H2^tKaIn z*;DdgBHz}vi!EMt=yw@@FZZ3!a1d(>Vu6$Tt?p~|EJ2{g%Ngjf*<(cF=>m^w!MiUl zE{fqt3%&4^YBwmG&jWk0m@9uYZ(;bTZ2$k_mPX*+I0{C{{4vvt zKMHEer>IdtIw#wgSZun2Eo2L-t@rD4+zX9Oa$aV8cF`)`iI47JVBwaK}QU$pt6IS2ad2-OI%21ZGvHvk`Ym+b||nD=X}1 z2#pl39sLaPK;n8~c0aD04T@_6i2&{0o zgE9E}2eDKSUMd*eU#7Fk4bi!LZ@>vj{`KKq_Az(cy&0`-) zQ_!MXZBFrsY2XF=B0m3>(O07wzL&lH_P%*r)P2e&^z0yipY$1o=3dk^C4H=FkC z^$BZ`izh0Jad=p4VjslMaGT2sV%V9e3C^}wo6c52-Kezrs=O&r(^jF6HDh3 zZ6`U<`(>-ij%PGlGw-A~0j-+CCic@}9ce9EL36Hw0AD7qys(Ts$$c*M+vOEc=#|&A zHYvfu8|>%R0heZJy}Q8U7TaKg9!Kfjc>j|F|#p6d!%l zS)O8;Mc25PSrVw%8cVT~DDVh(T$;TDKNs|g7{$^fjSHSWU%xr2RUA?CGb4m?ChYwI zlb-KR)809pBhuw}Hx4!U7lK}dqg5%5;oWl3^7NnUk}dHQBszTYei9_k7&k+X!~3vb zG8JJ??&UirP7tzH;eM7VWsK}&z(^j{_8li_B~C;uDL2Rz>=093Q^RRd%tki8dk<7= zi&b#m#OEdSC-GAn0oAI2rAV7ric*$PwEhtYxP=*^KcFHbkH3{;26X@) zznCbY7x#@@H5@LY`*k=WTJNKeYi?^k+%7<79eDWv`^iNJoc2pG`*suEVX=OQ>u^61 zL8k0Nv9NlgPM+rKLq(Q^SxwZ31cS6YN7KLTAJ?rF03)Q#Q!oB0P@$4vf&ZhD|?Og6n#t?nPvZPhVrpy6Us{`=Ybr zF*&w?A=|)Cd@b??*(X^^!(;M3wCdR}lTHY=kZvyQD^sjsuV_)d6!Vv6Jo~aXp-WP5 zk;#$bv_JaTaY`-|4nv<0HBlQq#Sw#Z`Ui_8=lp%TK3O$(pwWU}J(N5zsDO?`@zYr$ zc#TILo+U9tyz*^JBI0ggGXMF^?{;bJB|-4zJbDU9@O{Qz*PpCM*KfQB^aQ+4O>@#M zw9W;fee!$KuLMDCioZUap)}^li#1n5ua`mtTc6S2QKGS&KQ)Z`M77?woQ4Jq-<<^G zvcHYhd;*exX&#D`eqmuSaPh3qP1R`d4$!$d&MhXtrdVe^uSX}dMnp$X)wMm_ zY#2i8JALdmvk&#JB>Y?dW*EbfH!;vp8_+<81Zzyyx#xM*I(joC3|iwQ2kO>Rzj7iQ z(L^>130#@XlzBu8_8jnISLvSw-TaQ*{hJQ1X-xt_J-^=#%e-@RxNTTz%tU}W(6#KTvbwJTIexBd?? zQevUsqMXh-k$3XqyHx8p_E-4G8o(;#1CV6JwA^A9e}m-~DhfCObKM&=?@xAiM=!RP zu%JqpuTJV8Op&kMBm>SmJA{hV2TkvN;v3K7o^uzam%9%2WRJ0IEX$oSFXF@ltH~Sb z*Q1-$ctcrOwRh99(CIa7levFGkEP=mkZaRHg3+oE%bKFz+>F^Ri2As9Z)wlH3k0`S8auG{2GQt&DN3u@I7J~WM@Q^pU<)cq<+dyEyHE7XKJ|NjoL*j*rAc>= zt>;Qq&`NuJKJ`<0`}Y(~t^Ub!gL5~A`&r~MeruQ;agIM5ZbLMuY#4_&U+2b*f`Ym$pF2xt z7==@GTDUHCKoI>wbQ%I6lAWo03$jY$>2{7NTuj&W7Vk(FS$|@LNP_o&`PvqnB^`iKO_tk+sfK}# z-(<#qqw!sAXa^mrATRTZ2NVw>U$v)_g$ChE^hx&n@o@ZdwJukqk%1gSdp38ugkF7+ ze?mGZ5X{plf3Bh3D4P2QLOwqxNzMy38pB)jTJuN^h|B%xQz=a1(WQc&gioD2C8C+Y z>eD4qmTK2u9%Hiv6y4=FN4ivMj1gUs9?0dh{w0%qoG)3)ovq$)DV5uqXZuK|ix@DVLsBzp$8eFLFj2l(6-aoqe-MSU*2kPUuntwt^lmV)_cphf!T1^PK zID#o6B;XLW1rPu-sevqkW)+xE{^3Up(n>x!(%)G}RHP}Jm@H(*^dkVEz+{`|#_YI7 zmbDVFZ*mbi2e!orNrfniP%OO4t>dk|{+h%=D-@I}RTTsyx~LN_`xqvQrjf?iUI7jD3U)c?Ie&NpFEw{295( zSU(=0_dwq5%irI@h|nxT^LiTNIT*Lo|^?z3hfp%LN9OkOfwpcCVnfMJmxBGo>S9@ptL$m<%Z z&6)N;(R3v@@t?yXZ=j;0Oz=Ou+37KhZ@oVlpQOW!r>>fw37SyP_BRzk9;m<_bJi>N z0leV5maJqo`*SJ3qY0z}i6L{XR0pb_TLK>FcQ5sai`?6^cB`}~Yk!W&dci$e}U z0k*-F!n7pENuq)@vNc4{-MDeC$*sgMbXzav9v>S!zp}bejLZZo2i4-EEioPmoFJ5Hfe zXkAYK#x|=pWklonL*!TmMhI^E)XQkbGMqElfkA9LJ&^V!arz3S>-wu!_eJeiSBB zJ_M1ngZ=rhgCYDJ(-_a^eW4Em2j*dDCpn%F1s_U(WUYj_z2hs-K9ARZviv*SQlC?g z)F-5biO$?;xiqma$&eblTdOy#5@9iz#+>%zo0biv!aX2e6JVldv?oVHALK5 zAS|HDywR(6i^ypZHELIR{h@BR1|}!cW}9fJC716T#cp&`F*KAsyisL4+9ACumKeNo zwu=j}c|j_nU0|_=f~e5z7hoMH$v{riuL3n89r{tIB3fVk&)5Ow_8#qs_CRns?oeA8 zjT!Z8@0BqqHP%7e`}q+GC_P@G85|4l7D+|||CWfav~6U!HCp&yT=%EP7tyDdudNIK z&HDl3y+${{qHF5@K+G`uwtRC2O8N4xkL^pT7`@+HANl{N12Hi21mGBbjVP&LDMN&^ z;VLJXdQi^%zNOJnuLs`)0{4rm4h8Dn+;eg;;mw-_gB=9i$&w@U&&>*vcnNBM(yV>K zm>Y!rncGZyDkWC$x^x#Pnj(!pS7wA)Sk!ONjTCTf&BdwS%wVAI9U0`nsdJQml>9177oRIY-QI6_llECv&nx%&xV<|n=zNP6g} z(3Jo7vJs43huxQ@jpu*TAJ=cxZBQ@&dMaWH>6LH!*}V2@3HUSF;ygmVrcS={lbRot zoLkN%Ggb~d{rtixA7^xbL0&H2xJi9WJ=()I*fTX5BSn9an|5hFSL|_ZjL?;v8BR&8 zv&FDl^Mk*!LQ~gHHd?ljP=Twe#{z7v$Hr{g%r-jhuOYEgV$f%@K)X+#tgET(AgTi^ z8p`#HQjayR*3??7%18X!#o~PLbBTMD^e)tG0#h zNN&vti|k0pe@VdtbCyu=-DItaFn>?^=8R#6BYdlQ)nd~4lKmg%0V z-@1n6snQ(~OK7Co7vO9e-wQdL7Sy!79Hi^h3kyT3<6vIdMDt(3`|Pr3Vzf=*LG7Cw z^(y^EkmaV36IN>^E50xb8t|Xa2rPOuWM$_a(tzTc=LgCVoyP=j$D2Ja%>f5~XEUq& z|1R(8wa2r>Ur{%X0IOBEL|8a;t46&dsL`2j#d%7uli>n@5J|o@!xz;a0}+Fi8zLjy zJBi!JW35L2Z?+YFYpr=`&7q;+7H?0QG*6Rl2@2Nj?Jnvp^BnsAIbYnix9d+1^KN?B z^)+!2UKOlVi-yx+pCeT7v#r{Tk}=M<64iCCFBY{bZ7@4zIQlNt9%JK^Nq|>J()qvkm}lw6*J#c|doD<=5)tRFD@IG5 zI`har)>M4VHU*+JR6|}l4Jy-dDdke4NcVjqaULp%mF)Af9hyiM)(j?Gx&=<=?nwoQ zaIvG^>ue!VE(!^piXFM&-_p*3cT<{g-&MyiXO zojXOR75s|(4pv<2c3xAzLr)u)^&r>{nx26Thsj~+e`SWf=mlKe47(bZ-E~Hs1i%gi z|2KEAJ0VGi1EL04zXER!^(QU2x{bRjR#YrWU+#DQGPl0pv?$+;-%`{Ln__Tl5R2(O{CCOpSE-Gv;NAY`>BCjHp(^?$0yNsFj(hPS0gHyiU&``I7 z*BcO(8gP@qe|XNZ?)7AAB`Qa&vDkZX{Xe|DbyQa0+BQlm9fG8!(v5_ql7h5IcS)yo z3eqXv-5mnbUD6$oba!`o=HuS)JAQkgamF}*eB)b#KLlZ|`OG=jJ@2^g>&hPwYJL02 z$|M>1nrT5Qp6VWvXq98F?`1?zn|DT`Sg>KrFMh7JpwF&kxxS^-+;8?hXmBju6r@;! zT(W3c>ai7Bjz&)n#vw4rohaX#tA`5SKe!N7lBhcN@M ztT{)5z9xBAo~V0yWq1W3A~Vc*6MXjX_?JncttfV%Dbuo_Gl~!DrrL)&+}+IrrTy_J zDmpJ`)g0IRlinT0^xK$1Jmx`Z0TAkU%)j73n*Y$XmrY<^@rm?DP?$0|rwG+O%!eW6 zH2Jw6ph43`rdpxvJ-ghY3WHE!1(6(K_50o}$F4~7P6JO^!$O=N+@`zo-+kp`f5O>L z)(tyUIJnj&mhbTJ@XK0m%_yonuJKm}law}4YyG0k(MqkXI(^DH%#qEELId0hWwxYb z-nf6pcF(W0uyJb^DY-80j$Kq-KC&KyiwhRfuBH0IHPCX_}5ss=?{Gc z+NJe&I>uQoV(b1YBxTz1S5xpmf9DJI;anxPZI47$VsT$cg*I1kSxmu&aVPNmI}j%D z#i@IlPnTe?&Hc7&l(87~E?fuj8TKJpIO$h_89dyoegkji58h1MQ|6w&P`DnkZWKtD zYSBAb!`!dBPGQjxs?;37uD|aasQpy% z719T){|*S~tsgse+1se`ui;b_hWhZ~21h#=`apg%W=00bfUSPBT(%^B5O<={+1lco ze5`Znhhw;vve&5=LdCBhWW|0kO$ew^T7!hFwpD@q@ea)tS2T5a`t0m%;UCMJ$<}#L zEgts(wGC+Tnn^Pu4upbI+7ohE&x(CJ_1M(Dwar*z;Rl+yh@`X;hvzi&ugFnnrNi4s zM69YqnjmcMPe~u#rQeoOqJNN+3!qy~m)&F`Y+*}R+yT|ZKJzeJiKa+Vh(Tq6#A|mc zp)aoe_*82xHzpk7dM2ULvOyJV5M%x~rb$m;vZs6d_Gwgy(XLLcp_!yVs)knV!=uL} zP%x%DHd|=XM$|cK=aE=iei+F&cBNOV?YWl3)VQ~@%CAtCU6hD{^S7x%pxT4+T$^l= z=Dth~<}1k8@~4O zS6j1;i`=&vD1oqt8!(1eGWK8+;JkCz1$|`Ko!9tly4Hg2DpYizaBv`@jwslublL8k zOsF2cpHRb?f*!N64m5j+$eR$Ishz(m&Gog&v$L~Pa&_riW@53{X!ux?%m%OEyBc-^c*=4NJ9 zObgHG0!ax;@g7^=tu2!~(9eWM$jPUBVsyb>;F^M?cUkC!#JLl>{r9=3^a~ZW60ghi z7g>c3wz~%h`A>{e&Df&^zh3y~k9Ro$9Fd`SBq5l2);LPnPD zan$_&@r2?u0$st-IfWAb*0lv4q&bn3f&Q$(_AaU>hYMA;}bCYFS*=R`}C_ zIe*SUu_2h5jTDffv;*{ zcQHAe*#%?lFT}@@)eIU5z^FN98yMs;KalN{O`c9tuEwrlnjJU{%D0FcI;*}pVYosB zo9s;*4`V@Fd7Anmt+_#R%mv2n-BXh+D=+r9Cgq-IVeMnK(IhJS6Lw$_zgrQz zUiu#_fQD+dN#wx^E43<&rScNPUpMG3O zPvhpqr{Z?w3yXt;s)z2X4x@R%6jLO*ZZR3dMSxH%h?8mAg6J55-}w8~p!=?sud!TD z<7=CS`d2#9HkDxH?T?6KMM_VRC=ZUY)g6TX=0p=?6y$l2k%9}>c!CWJIU z-tMj9OxJ~aP;b84w$|Ht*I?SwoM5nd%A~FE;Y3l#yshN6x;xqM;2b%m<%P%Qo*cf1 zIe+>nS3K)1dQ#W;GyL(wp-8E`(OXlCv7TRAK{AqPojL#Zu4!4>d@dOt*Ta?F9qkP< z*p)0br7OtKy^o&?Yuv+1w_G0lra_=d2eF%P(5u*329>-A|6^P|5>TU7HfDlH3e@W; z1St(XmT56(#b(o)ch2+Gs@O+*Px+~h19lZ59OEia@ggt?9a(oD-))-1Yh*eQmE?D$ zZsraXIC4?gf9Qr>A{y7#5l?OVm<1%qxZX8;)w$XXd_qHpj{D{#6|yd{Nl& zcGBeVsTj)-t#X=iRK>;Bw8)sxKCD`GHnxx)?jMC^N^~cF zPZ00j{ka&A6Y^XyNx@)1Hr-j)%!`Mq+6Ml;wFF~brl;|(FkR;xnKVC$Gx;_tED=}3 z`F^41J(hH*C~MD0=kmFYBDexLqo0sC%cc- zC&nEqum(Gt?l*^$yRB6z>p)UtyXkB=dw5v30YJ-lf*Hz2H=h-vtE$q}osUaJx7uQM zn3^+87!69l;$dN!$fvz^Ki9LYm!_Esj5yWD-s8A^^9jX#q`t@oRk+)y!bmf>0i!~Z zU>G#?RgA_#@#QG{&fPkIhYV|7Hw&S(BU(<7=b!-)ZK6i^v3euu#f{FRBKXSh18zn+ z?SNlIWp{(df@}85^WMZydycd17HR15TZsn@^D>e(pG6PSkQUgR5m4ha;_+4t{0`It zX`a3~Wo_p&v@due=ug<3UU(}qEnGpcg^yD zL;FmwJ3AHs#KdFzQ$=K`hK0ddoI*|u9TQsniaCxKYiY+k8}D;^gJ{IO$%G@aHA7%^W~Nn=Nx@Tw`B z)hP%Z;Y2H-m@GwxtG8Hq27Ob++T`*T~%wr~!YETq*OCyl0L7i(E-=5?y zH|h2Gkl2+q{`@1QX2i!pF?yzkfQ1LZT#@|q1v`UHHQg`mNogc=(<oe+{DQ|4xt!=a!orXjDLxe&&Ja4 zrXYt5J<=qe7W7yCn3t%sKp$%?=cTDuDy4pRTx4d$RZH!etY*sQV=uK-nVZ0Eq*wBQ zz+{)H5Uxz5_zVlC`J{&!s$oYHAr#mMiivtRNblMj^g5qVq%8a}ju@?KzFrOS>&^p2 z)qeqPgOr^Q<4P)8Mn#sO-*2&c2?=Mq`EtvaL=j`Q07G|-9xp=Co?p7<#1}QeM1(=C z+7pRc&jxLaGCQbJR?E6SDc)6XZM)!Di&tyu)hPqv^a6~Z2kdT23a(F$jUn#=kxRKm zyyL?sE!P)p2ZShS)ZtU6TbUD~4TJ>Sat*j2HqF}=IF$2aD@i@;N;Mj<-c9TM#p+i$?N1`{k>1~3xbb4}`4c&{L@8sg1ag-y z%Ck(^vLM!&=bv*t4-O^UNM%Z;B^#ZYfkt=7^Qgt2(GdUoQsw=07DC2cJ5VW$WYVr)$M z+g&8f-Opu@UdJ##B=$|;tjrvk@fNX-SbT!tDE_DC2pV zxh_3VytTT?fY1Pl^k821*O4Z7g77lgk!hMH^e|>Q3i+Wh)f^K`u(?5rxYpiDF+Q%P zT83==A}sMPlee2bz;#5v!`DqqOMpr9I}z%|9#KEZml^{W&Lr)ZM~EGR)vnB@q&$V$ z^p96}_xduu67SufIuu-?{*()DDNOl7ttFP20^f+UN6b{pG?^6;KNzxWWxi4+nuwOq zNZGdWY)8;Q%(J@c%-oj3^4D)XLoya&R0zvadU>vwV|eApUfAZ%lWPIagldHEj2cYo znT51Bckn@A!C^QT%$$pgixF8LJPNTKgI`SLo~K40cE-Jkv%v3@Dr+ioV9`|ENe??E z74R|>AlUmf88VGWBqykW6RqTVT zNHN1^l!4-;W}-n@9g@_CtuDbG^bmTO>AJSN^ zoCp{45bLmf$-8NTN|>MDjhk^xJ@lqkQ9izyZ-c*jRU~OtnogFUkdZ>idTYQGC-#1` zH4mcTSLG}h!%n^8;$!sA5mo5DkD_+h?Ig_sV)j?(0OTua9f@_Z8}dyH?w#LuRw9oPHfapa6}d{) z%meACsB@};UkV*Z&rggMS^JKZ{cV}7NYkJDNZknzDAa^~x%tdA8Dql?X$TaS5se*h zc#Xr4PhVXNSLf<{V4B69h7AdjHf2<=4rFeGP zA#W2tEP{?QmxUHnj1k&-g6A)*yYG2b)Bo`D^>J;ET*+doH@bRp7)^)=C)QH`*y;Dh ztF-d3ryblK*rPNkR&I(W$m_8^KX`(!;XeRf)oD0OGe@!9aRaLfGOwQXOuQ-qul#mO? z`;&IEPIXIj1(jVIy|<=)e4M7N{z)#{Ps2fH935Xt8fLKZ5&x$2G$`CQgn08MCx&l@ zU!Ag0h0})t1tz7!>)UN>4Gq|@>ncOv!DFZ z{Q3MUQ4sBJs5C-SB{_` zTb01CI2T1C(!U@p(mYqqY9Z#bE`8|bYmBvX{>Ap8*s)EFQ`t~nymXicxJ5PtkwTXS z#Lw$%bGL2NjCf7-h>Oo1t1@(U*vqLqXq|()`-_^kx0}9%ats`7dP;~MwF`o@UtlxC ziM5a?xlv$8?JI(UpQXo9`ci9|jB_wh1bU<5zYb%cbQwwOa@v`=zQ_;qzZ-)0Q^HR8 zImP4`dNix_DTyGDaa0^#cv7_@t!X0q`3HHzUc5p1=?p;2cj%TlU`3jgNHqUtql)lP^B(ZKg& zVw-S(!WJst^hMCc^=h+4A}PyIMh0Y^c6;c5)Msdt#ndg)eqvpXk&4o9cNUBv>2{Iw z^9vLC{ZmncZ6)rdZ%=bHPQ&Q5=yq{SXYRlgi=H9sYH%FUb*lP}F-3HSGHObq56r}H z8*?XBI({^-D@`^jVDb86#_d?*fr|pcTn)%b(n~1v_Jgu?pmiEPg%r(FsD;**kch{~mGUe#{zcxPeWS@PUDpJ|Ol)%W3^1W||d?Ip)4|Lwy(eyXEFVi2&4g?c&D z`pkBT8C^V#Rq%`)ThE^GdMgUP&Mf4M>9zkHYh4f`vaLzywfa$N?1PX%XI+?PhrG3^ zdX5OH%GmzPeva5sPl4ncgA~;G(GkkGl_oAbc0S4;@RC$Bf}_mJLm$-8bP4k4bu*cu zV`ZIdhl?b}3#wpvo)@mwB*KBRRFm+%(x!^ld9|-DbT$~r^tO9R(c|dw1C1X*eK{hp zV?jFjb0>Z5ApzNhLQjV#HJ0VZxX3>Z|MdRB_tSc%uGbtW%d2u(ZZUB~h*&xkRt^6| zM%Z!b{+86>?tU78+EmMML2?xNaWisDGxnO%Fq(5wXrn>x*-@q7)>c#O=r$8B()_}? z1#763t!X*%TRT~iL1Z0&>1Jr?EB^kyDJU$}_L78jn*dxD2jSH?U-P$B)p_m@X(qmK zLuY+ngKD^-&OLF<<02@?k?C<3s1=(Qgl*f5WX8ISo(h z&d=_SWx$99#BE6_f}+u|ESx4Tl20->On2Kd&u!41^Jr>XgA84AHVo_Qz{#b`T> zkEzg1dYGeJtHAW)=$h59+TcVtkh;cP%;hp7ko`Q}F0n8FPg}SsfeZ-;=&w<~kpjk8 z#`Gqc)tBP%b)gey#A?!}8b@JleG*yCqmj!u-5K^%k_m~m%re7fF%4c*GfF^Cz&EkV zUQG7V0d?m4PY-Mvw`97aY5_vB4g%8jJkS&!;4Y}+nk=434AEQ|Gd3~ec(-!%UcT-N zkBcEmD3zlK%|Pz2Xhtc{7N9D(rhcrbuAQQ)OF&GVJ@;cFmn4k+jq|ZCgpKA-@Og}k zX0m2$_T?A<%goCl@{4)ACmq=)Z|2I5-JA_W7sI5ZS$I<~1gqZ~yu})yV2ynF_&t*h z28o6=Ew63d0XN0zl}y{E;6%)FuESfx}5K1WT6=acJve$%Z`X4 zD(l|W^^-v5N47>bTWQi#t8cDuoN+j;<^vL#CVgW)Q;*7V#EW|` zE4v7UYa<(a<|yxN3ZnBas^r|tiTgDL$#&gb2aicPT8xini7v$Hp8Z<*qY*}YH(^Fe z8Zq(sa5oXoGj90sQA}1p`1>joZbSRuP-ae_?^Ux&Wg6Hd zs??0gFMW&BJI(K@B6v@Mk*i6`b6aI$TX>X8cOdXOXG-!N@fP~_5&w^?g1|6hgT;(R`1c!@Ky&5|`)e(}ZOc7C*uIg%@XI_)AM(^`2eQSgB$?u&j9 zZ_W^vgPU7}6rb8j+||vk zuttfP@#+2ySNEzF$l@5le~_lHtO&V>jSxhx3IVOl!0P~-EbQ){kuCy>-nFl-(_Sal zgWmKkem>rmxhdW^ZzS4vK!OJ1S?NKRWZ~UginlMn~+rqC*nM+*paYLdX5)ek%d$46$aCshNqv|T(mQsJB`1T@tj^~iG z2erMFP&o|q#|D*7tCGe2?6K+9^+=E_Y@bxEYb8qlX;g8Tnpo+^2B>0IbB3$?-zk2n zd%Gc6AaBORGM~CmrL z^bu!#B_hB4$^9cOg)4kYrFcPLdUvrRBU@Q`3dqa9zE zDpm4N{nJ-G7fl~>mU|1glqnIlWUG0CCOj%cFUe$k8JN`GT$D(rc;ZW>?DxK^lHt+n zk!_8BWwkF?jLeuvdo@P1E`(}g@cj#^i_}dT0iBO}twrMXV-KIe$4OONMz(Sy@sSlk>)?2JO0UbH>~Th zSh=p-$$0_6LLx@ReG_rMtZrgVfG>f?TqaT{L9_e5`{-&*CRq{bN4&agBzNJ7w*f7x zfeX9~nh`5tX*@04By$;3X09KGwnw5FFj)~8 zUJ{nl*9uB>n(DHdC~S_>ONG_)@OagVKa=hVF2WxekCiF4d8h|)_XO0 zu|nY6j&(0APv@5ycm{y&nOo^z+LPTn_uV?9ES@K*wvQrYxAN?oF-Id_k6%t6;K{CG z7C?C@OYjo~mG4#^*-j}3JUo2XBe(UU^uofz>MWqCWafg+&?MIscew{$6d6$cSpaNj zC~F_S1vJ$G;Q&n#DTh2b=5hCXDGv%9>W^iZSnYR!0CWaq$gq7j>=WZz<%4b6N>eEOu=yAr3!9j??8g2nNXG+F| zk~&bh>vBQ*;RRSd#-ME5fYOEVO2pX{AQOU4ed#z7><~t^{Xf4(h_>rR-5o6z{*Yk( zO_x~M4au?+7RDuDq`9RoTM`-~XJtn~rFj>vq!EAl=r)_=(#9rUZf5w`mY-_9Uy-8n5dXeZK1%&6U@x{iAD z^$B}_UURcn$~TyYA?~Fd(8Cd{?S3K#Z4Wf}8%I-=d2?`P-5vZqyc^LRess_Y<@`YQ zMig;8*IV%gV5$o5_8StAy*2K->y=Egh_4u4S^u(CJ<|KJ@Dp#UrxlfK?O!X=weu*l zdVcq*!?NF#ek!jh7nr^jSJ_m8*E~~#DSfCjQYSXexKFS0r#la_ov-VeFv{Xr3v*rd z2pImpGyzyCsh+nxtL@+Dv`shw>g107_6ewXIe!>e; z+#dmMF#B}%a8R0N**v8%!GhMy27Fa$0P~bAfC{=N0q79 z&=>L`>s0#rA8=s#FM@`IRldPXK%wi)Wi_9`k2%qF97*rJd=Ba%1H{aLuA>b_j|!og z_G!W=?N(0@g&h$>{Qw?ml};T7-@Z``>D^Iiu z>M#GZd z47|>(d>ncsynBo*WX~Zaf@O3K%a>%=KZYjHu8{Gb|E77T{WV_>uN(i z2%#msu%^lHEKTxdFx$N_Eh2%ausj?VW8pCUQU4D5{$rG(ACyomS(=W8SF*PNT~R$& zi*N=?J0U{6GEx`0?l;{-UqH^_}K(+{fCW^Q&dO z`2G7g144QC?Ra#HW2OgDkf*kED?kARsI>| zClb7Rz1i$&fzq1aGTvrHCb|beFQT;5e6_(5mP2xAY-nJl!*BeWu47{NQfJgAu1%C3 zLMH7XQumO&bT1x*T-|Ck*KW^#ii~csE})TCT*yzb*Zw5{9aegy{RwFkjVI`r4+`UV z-YmBK5rlI3_3GPxW7P&GvBPM7{V#qGu44?>I&fBe$Oe__~wI~25xy7gK$DJ8*X0XcVA{t-3uC~WG3}CCOCB>;&4^0xfJwVY9AQPS$OEt zdDyUdWJbHdRvn3XLLiW+Gl!p>euZfBRKZ`~{0|l&T^a9CZeil7tye#*AKRNZ32gcB z&QFRyPT;keXDXqz{s1KKcU^4tv|=cwC@BQ!aoU;bqz?UXJFuGp^amdZxd8S z+Mep+_wa;1%-+1XxVQ*vXx=Rrwj7PCoEr*q?QL3F2_VA@LCjCF3&zSY8Yovf zfuc0^4M-WE6n0pHJ#!wS${6X=09v|4(~g^&gL^Eqfd zY>=8Hqpr0(^znONO;HyrG^@HSOm?Q?b)mA_Vs{o^$v=OtyB|^6ocJC8foM^|IK}n} zp!FY)a@6|pRz2)Uq(`mDr6~m6a_MH+$ybV!A9HGYvDkw#xs;{45wrH2<^%630AybU zUXX-ptP>F;&{++lD)Q95EqyVc-jI*E`WT~El1cAZt9smahV+AGSKW*u+q6HX{5k$Fyo^jIBGuJ7Vs8@C)?pf88H7W0@ zKVLH2+Y1hU)XHFHTGbZI41Qi>A|x*)B!&$4j9dZE_xtx>X_D-ck5^8ghu4P9)t-Cp5B$2XeMa!TD3@YJ;eq+}Qal-nL)r#CL%4SXrt$IMzpX?q1QPnGM>ukRHS@XiO>#rZtsoi5n6-(c)EprmRS{cvtq;XZb;@_|7$fo zY4F-62!g7uZZFEcN=enTr-@vJ+8p3E*+kGa=i64ChJN%0*@#%<*4+pv*>+}8ye&ZU zC8a-}1B@;k8Q(BIGSwd6t?j*WUG!b`7WAQuatRlp zucze7k9bPFTzn6wbNBqYklQVvZ$G2cfLsa1m7Yv{#f)g;g!|=;XvW$w0tC8$l14b&u3izH2}ib{#%!3%Rqz|Rd|0Ow97o=! z!-ZRfs}&U$UGNKW)5Kj9swPwq=q9=sUwwk$Eyy%_pLvDC|3W;26+$jk$0JfI{SNb~ ziKUl~6&vkMtyE6C_`TB~U|f$(k5 zZ9ty!WdT9V=dfxL$7cj7c$uPif9{78BD{z6nD8jSG>loHf#=zfMm0(ABHZ1Z??(zw z6cWV@1aQNqBspjow!KvZW$;WE4ltaf_l{SM)ut2FN)*p9$>bY#FgKp<9m!goneh?4gK0k#SOTrIEi_2$}bMx6SwcqrkjnWr1krEZf zwQ#NrOU64xq{&+mW{cZi?ke}ZVEFm;xA!{c!u}fUv9;ITZ&@<=_i%9hCbquQS|+9- zd}HLNYG~}Kdu^S>VvHcPVWMiq2ni=h4b-i#R&JU$aU-p^z z;Vm>!d#t~|#Tab{V|&M}jXAbl#hTBR#k331p^Cu5KC9jpX6S+*JQo>XnGMG} zn5IvKGrLbYs#Mc>MZXYsoyHWH6I4@CQttY>BhtWmLuyU19f6>)s8Ji9JTZ=z)Z>A* zS9C9_n=~TFxrmvUjiNs>k3Adi0k=__&9`rCE}$=b$=OnC6ZtI5oN~TG+@MWa#FN$Y znN=QC*-ELAnA7|rXk$&{;MgPetyjw~CF*l*-ie8|aL7VIs^uXb50=0KbEfzW&sIuy zT(<}il~M%(LEq0i&`?mwyqxBFYw7|bWhGYZg%$^DB2Rdq*e9W5{uy~?xLExMyVLYq zT6hC1PH2BtogD&x+Tm*L-gMf<=tq_ZW2Xl7oHes(4mjqbVTh{{pb${QcE)6AzL%;% z#-X-{{MM=7^y>?SZme$nxcsqL{49Bc(YV0CKKKSEQ5D9`Lsj`R76t|epTb?Dd0z^X z5Gzls<_l$j%6q^54O8%Y0I92lL;OCA7r~!PklguX(EHx}Kv#1%=7J@h=@Pa_MhLZ5)qAZTbZ7AZLNTrOufxNNlN}zwo7?~$M@pq-*|9uz z-mMD6M7_^kPWC^daXTStckkMUKA&xUYoZ3wcdza2@}BMv&=UoHim`b)qO25D6naMzXx7YZfzvE zOumorAPV1{y8^Ly1xO8FdO>4|qOd)eFIZt%C5hbDX&-*L+{b~54_*7_{^xe4Re9jO z0SxIDojTUuS6p>}ic|?cv(?qvoiof{TrH;+aTdG?14g0I-lZ#Af^(Ul#+E1YBu)qw z;Gm{<0nzcN#K1P9hHx)HJhm|^f_ag=l;#;{C{u@HAKu9|aTaLD^mOP@G4lMUZPKFl}EUnOWtf7{@Evto+?S&P=2 zPWJ?VJngzfAzegH#q82X@_CqYwIAYPb0Oy~0RG5sI{u3A_2sBmD`n-65kbIK*1<5-;ft#`aH_rU)L_4cm=rO3i#c@YS@A8Qk+!tM!YVv zlVgGN9Ap&>!j%|b^r~(hxlJn8(WUcz$!kDv9A+8vR1&HrQDjM4bi{her0%q=_o9rc!m?Vp%~{L&*gJ#t zQpd%m-po66nbNHO3Y?ce?oXXa5%-*sdY=iMh+y!DrlQWaI$sAaQ>o+a+2LS%+1c6o z#5tYaFJ?4}s0mojziHhdBo$mC$e!=CCV*?*&chUP)7kTT*FUEEM<8c(7k#Et$Lpb{ zX#d+O^Yrl5q8-zo8lK}%v_msBg77Ao7EA&u>~^^OIQ{fJ^8k}!R6qOy#klk7{FdO%0rQBzpZ>mek%7!wy)go+(ZYLe?qloW|(@At7} z3lsmfbAgqWcM*kk-3q&t6hx!@))qm>r49WBcu6*6I#@m~WlNA;$#2f+q(LoBL#51! zt+ImWs^0fQSH_t8gBgh!ot42xwSo~qEnLL{cI(x-{~i((7=4Q~6LBGX9gVz@#8tR1 zUWiNPsJ;sk52Ks`{D30m({s45+R^+gd>#2Zz^(EGat%GIBV<+$O$#wXC0nHot|HnV z8|`PG!#fy0ZYk%4yV0KQvB+i4mz6tY-=oc(T>MOxNHO@IXS?j8w_@X}H}AX%s%bj! zHlLG-Z5LQ6u9AV;q+;$=%*r@$5L9{to3z!T-i*K*7I^0!_|FUC$7uphHTiw{q7d?5 z|5sh%7{PDUk9)!L2}G}9!3xv*st9+yt0t=p~u!gE?S?zCiNe`6ioia zGUy+_5g14r`2V_1l_)PPc+kq_R2g34g+uf$H$nIUw~&<}ujGT2=wy5aKNTekMqIwA zXglzI0|WI5zX5L-myKs+=82Jj-;A-n>Eq4-kASycPfChPn9XXRsH$t5KZZqil?US+}=X*Ee=Vxz|U@m1U%ub#u zSQxnMdI^Fj;?lndn2J&EcO^enR+_F0Bz%Jo`vPHL z!jVxfD3Dg24eD@W;|wh@4c!}6xng5?!~O3J(7u5_4cmdF@jdlFCL7$4ycRfBSb2ZCvVPAqsL^p;g%p0j4# zbx46*_QP?qx`_CVK%9&U|Cvm*w9a(Fw2fWyt_3!9Ehh613QYILPD@;*MAzHE z8vSd8f9IVtm~)>r5iC5l>H@O^zEzuGBcn{77i4zBC}M#-b3ls`8`}@%e-{Y%1@!X= zHiS?41qMolaZbdZd~V?Ns!qNcSZ8MGBz{AA>i_>S@VLLbqP+n-)E-@%GD`xwZ2!8Q z&o?l+*a|ah{r&xvsZS`g3Zhd}Np@bz`wE;nL_;A7vraS_Ezagl%lH~n9aD={eS&}-a5b=J{ z%V&67)swN%N788RGw*I6bsHa8z@qoxBjAJnjXnZ&HUIZ&j&99h;ya}IuRZqvYMYbZ7j( zYxDOXW3P65s%t#&8s=MbW;t?I1wD^FZ?28E8mX=ZLR%N~uD|``R=-lO^wFX?y2D7W4}GSE{KjEr#>t|bgcvwB+n2CZc8QglFn4Ywy#vPJazb)2=~NkF3A+}lUSEsV~9?x-^@<{ zvOQ$+q#dFEIe!jvqCT~6A0O0t_Kt2BCUqh#|jan{!sjlmS4Z3{y-k=hN!lFp+}T2*VO{E z$H~e03-IaXl$0=zOHxo$Mkgf^Imm>10n6W-l|`xc(@HBRT1XUk;@DkxXFRv*@f9%A zA0tU7xb$+Q9~NHGx!*XbJrA`t)=0`JW7e{h1zOn7*J6oR!)tc)BXWH>QYAvFJpZ{e z$6{P2<`TSzcJ;1s8dBr=QF% zEn|;b|7gdS0?|d4qZ$ZAy@r1}dj*2WvV6geeL+P_rXZWe*v&~(CNN}Uc?*ilMSGt# zd(9dc#S@D%@4Fh*)zvkPnEcP%zDr$)0@p#9VO2%&Cj7o!u>$n)q&a z&5HZ4+RwFt+@gP5k^ zlaf-Q9qvq8-4j311kneDBmkuxmG)U^*4yy_k}D`e7+i&GROEk(v%v({knjkV7y-Cw zyBh#~pSlI7Cv7$3%EG*S;%V1Y_UnPf*cO;NFl^23z7#OMib)>LNNmK2Dj6OPQ7M zEkK3#J+?F8z^ZV+*@!;{@g2IqpzSicd$J83X~N`#zdG4$5yZihTS z$t{hYo25iOVBw09ss*o?zp*>+t4{Sh_Jk5v*{+Eba+rJv)m=Vzhuu#zHMI03S;A5o z9~BjqsFLUB=dm`yff`s%Yht=568u{Cw~XWiVFeZCsnPZEiu+eF(Mfb#zPqcV^2eTU z7{rU7o6;A$0`&ejjRv#mM0n?2{VvUIJ%taD(oJT^|Mg z?AK{Mpb`GD9kRb~agoSu-RLvZC_0I6Xk=7)dJ6a*<7>=kicU{614yHQm6~&4as|+b z!ky7_owpY#pFV%iD`o^9T-GS+7pcF_bykazleea-DlI@rmF<`qg#;Kme-{THJJRxJ zL&JjB14lPsMG;Y4VtJlp@28~l0<9SV_y|^GRhIzuuP~EUARz$`hRSdM^0Bk9BqSu{ zF~?;EEKYi2=4F{@AS-*bu8HmirPWgj^?HB}gg;kw@gjGTr8EHE%c zE4a_F#W3Jd2+U6k^V2?-c7cWw8j$D@d__f7*mj`hv_J(Fdr=X7?6{$$AA^rM(A}X9p-J zim!~*J!;WjypewhGgoAEgIa=_?Xm0)kVa76Ux#jziyDYCD(C3@lyq#5s<`9wL*in27YvZP`K;Y z`E#pcYGQ)ae+y1;NvHk^z)~?VnD|;G2CVq(u1qJ2PcS;5Zsa!In{o|F+Dfgxxsx0}Co>N@#I;&&+A+>3PEt2-Dt>wDe$xk-~ z?Q2%1<3>>XQ82BdppZEobYcCwRt6~A(SUBpxUuZ_#it;!FW{D{0}_9SA@-CYISmUd zGmnIbY83?_QuO9CWlG#{!4#!Kw=|S>XAZPcHp~fMS=C7rv+4Wp9v%R`4CNyrE|%TE zKm<*|jdd(J+th%yxYPW(U=`1I)TB7o)!lF*gKxF~tw><-Zj;x{2R~<62daK?z%Z1b z;b4m;n=s0q*z9@zx$>IoEU&KSNaQq+gD}Va>g$u=5h4P(O$Loxi%2NJh`@dad>v4k z`UQFj3*hSarc0xB?pN`n5)uejnx#cOgsS86f6m*5+$ISY>i-WG0Dof(xHC*cLqm4k znE^%1u(c6;_9cIS#Dn7Rw=2Flh5)orpYYY5QuhRi0TnNco0~I~+Xm6DXI1i-mXv&& z$#3u(Fo+PK2EFCJD7R4L+bI}-k;4N}yNd%v%Id@XXVTBBJ%IkfI=F+%O!m#q%_-9>TBXw~Eh@}ePM(gYAg*>!{!7EC4-Q497Kr{coB9{qUfh8_2jTxwF)E8ZN0fmHL zK&&FyDv%Yuw9@9A-_uK^EZGEBp+&Dh9u7E{K!xPPl#dv?*1NFZ&;sU!g8@I${4q%H zd+btU$Q$6-Sd=#&@|K&Pr=$nc5{a3+1iKvTqV2uQSe{bRQCgT?5g-j;(IXiDx&v?p z&Cqk8CI#5Gr-o;nL)zBQfGb9)KJfogb(UdKh5y!8q@+=#LqL!ky1N^Z?vRF|yHQXY zq`SLI7`mjpbLeh_q4VAR&pFq74qxCC*M>d&*}qu#S}O1sr;WR$b&%B{Q z=o;hTTo9Qrcb2!&vNRjeof~uj=YSGfKq}Kx!?OEkL`fKVEol;~X{eRJ_k_wKOH>C3 zhh;15@0Y1M;B08a%llg6Bk&pN3ALb61fE6RWj7|Y@5->;kU z#pqdT{{r=Itp&ZJ(mn#n0w9|xv1(_a=+5^qkBW3KP{HEXTMT%u|H1?6lQasI!Y^ub zse1&7ACOqQgI3xYo%p&2nSk>skr^1vO!sA(&H$3ye5<~VK+C6xTV++%JgY9FGJ@#_ zblkE-Ks2FT{=r^FQhYkKTh=2K$q4@$e0Q;5f#mZ26+$m6<$qnu1P7}>DY8Wt{g-9J zh+vvUi6;IYSLRQBFSfgC`v}Wy1_0OQ2#k|U$HtE-fphH^S2G@{77jSZ_1X2z^u_`Z-mO`Z*+WS;RqLX zP0gZSI^ZG@C%$InNDwxi!iN_IpNL}r<36hD+Df%+Q-X>T`aa#7&#~f$NkjyoqNAgi zd?3PB`UG5}gvKy3KBaINW3nR9Jk4~W(sg9v1)vjg7Ttbb$yq{Bjp|&bk&5ONNC!?3 zrk4i3D6wl~LFRLMUN8T{zb?#|dSJ$1U^AReRYL;b0Fg_o=q!}5>ZQcp9MT0+G!J%A zUS6;(UZ?TElJicl{&TorK((_oUOG_OFsKiyecPmj7&lj(aFP{8spg|j>koxVm9M`dP#p*8xIJkNy{ncPv>^XLBY=i zWWpuZ-B;rRZ2k=|F8B|C6|%;Z9pJBArze~^h_69xyqlBO<3b1YIzZ{S0d$@Ib4-ZX z*q73lmc=@A@tr31P&IV*WgBeUC}!LU4`PC){&lBtj(~(W?uNfo1(w`hOqV?uml~aB z(8+si$tey%MveSEO-+1(1lUSd5^^4)4(-_9t$@IG1|l$(m3XP{o1=z5*&E6%jw3Un zoMBaj9N@t}Bu~?@3Z-H)L75Z_~E=ity2Y!HH z@21FP1Wt?gbN_80T9`FqNKv1?irLOFwdL7H&pF_>S4_m@oT~-AzmQ1X4HM_e3xSHq zWm%CKTPZGd`+OP-G!Yb3ghMI@of-z)pW)EZ=HDshN$7Zlc9hnymA13S&J_c1?r*l1 z;Kw5mkAr*u#{C_cnBz3F=M!{~HKE6?!RE1OVu8&a1jkdMW5*{SyrCNm5^lVH|N9lJ zLxL&O#cBeIa@Xn^XirQ6MS|es!k^YQ^`itdnSJ#!^12$&6L~G&SjtT)!uvl1bdS{X zu>+%ENQB)gJ#5R!V=brUqjgv?@B6_7$PeLi4d>_=DFMd{S`)9Vj7*58Cr~4N_|E_j zS;`}nhGF$eH!t{owcr6hO;C2UYfTbqwpW@QO?Cy?ueN;syq zTE-o-uY?QgJGLNxbK0J!ufP%cj!maNn5K2h`m~d3GP%ZVLU9l`FoduU30ujo z_fQ2WU-s};AjOn@(qFuP;Nb4R{Z(VCQ-DTfo9J(2I%-+hyQ?ns9q%+0mT#z7uE|v% z)sy)lboqw3&JcVkapZ;a>ACNnCG!*gEjDaHFSSMkXK-;l%yggxj<)zRk)~O*zC8Xz z817K09cqu&HHSetP9;-RjdrOu`)spOs#EW*Htj--asq=kIk;Y{$H$>r_Bm8S&SV>~ z3@K+wcmSE&!lq9cqTbfy=&|u<&&B>6gN-~aN8+CMG3|zg-;RBO? zn>q@;w@JyO^w_n8QO}(p=d6M5p|-hJY+s8ngvzxP#}`bT`CCJ_b5#4cX=sntmU($} z-^2Onbun*2PaP?2OU|JVmd;CNy>gxEl!@fZ|9_STIbunCIHS>Nj}oD7yl-lIbWHxR z98bqO=2`1xbBnq(=QVIkI7;$qJK7r53FEe~%bB&eE4iwv%SKoy#>4RX2h%Ay!PPhq z;ljmwWkPt;)ic=JwKgfA+00ujdo9WrAmTgxXDwy(n^ZjIrZpRE1BXqQV?mCpc$Wus_i8U=G*+l^nICDt1Hew zu%6CHC>xGB+Yv6C#BcahRCAKsYUf=c>@exTLZhD%&TV7@*yFtEQO!o;JW{PEMMfCw zA2q%jZ54awG9$G_yJu>Lo_ZO6wvlBt zD^#ZHzg;{R3TGqaOgc-BNp?EVT+=ZthhuhEbSSBMeL=j9ZE_ZYI|SfvYBCm2kYSj9aSblogp0PyVX?=~sE~ z7Q2&1YwlRwequOY9}OyzUMw31zuG97N%TT0jLPo(Hu5Ao_1v@*`iu3%&25Osm<0C0 zk6UHOyGe;!Z);*%$uwjdBZEOBoli-U!rR9&^1{m~vTT7_&GGlYHn3xXd}0`;OJ3gA zW}%7YUz23t<+mkjRr9lh^lS-(2j~3BaW&ewjDH5B6YC4Ru8!$hjq{x(fZjI3lp^-D zLI2BMrC@v=(^yo#LA=b$zyj;Ke{s#(ghY*rPGq=looZQ-KJ^sFg|v!fMVhC6fu`99 zY2OtS9x^>#qFaNTb;bdnQ^gAB#o4jg&#U?$Z`};2H%E*oFfwU&6(NnM?dNc(L8DMe zH{k(?-d+=gJH2lRX>r&_|Pvc&>qBv6jkNr_2P zk01_Hbg~nKdz9p!cXK{}FpsFx*|t(#{o&DNVAUkHg_2wH|4eQr3Yn2?BxqtE`K}l40~`o=_vl2_uY;CCQS!1lbf_24sr*_q zl4zB+sV$!OIp2Y2-IE`v20f9rCVEmN+V36-o?ZV+7M zDHl<`OO$!biDSUf6GQnq+{S>Hs$2kX7WB*L*p4CGHso@WmOxqkK;Co^p659&f_3vU z`P^pEz41t~@dpXRmVx7T4Zt_YJMOs;Bnv5`go^i);rGkJsnH*n0r6|PdH%l+{R zWe5n9Ybi;zo3KC`s~`{HTrEp9)$`1dPm7{YmdRkdG1hoCepvALe>bN(lyt`W4rE^X zh*@AI#j69(>y|DA1FRH*5bG3&;$Y_{)pYI^|I@FJ2&I4NY%IXi-E?U|~v2l-|ht%3rX%@bn zAgBFLtNrqw=Ic7nygHQW$Q>RH7^X)R$QR!A&kBxB&%znGO-`2prxldhWv z{mJN*7^9;wCDt?kRvSgm0YAHFYa#oVEo<#hs9d`Pw;9WnUa{Y->vP0IBaY1uk2&LI zv|IvB-1y#|$c08@Hk**Ls*PUHraLzTKwML&7a3TO`Uf17&)#`eGh}c`;d)2Y&t=9f z+@Amc;|2H$8|%ka5J3zZwF*i-PJe%O0i$2gwoC?&w1`K=nFpTwTzjU8Y{2v^&+6~w zk5U*6QNLG)61B~TCKoSHhlqycy1~%7MONC<9x~VBb88-`YQd;WAtpV#dtW}K;;vi{W?U{XQJ1J;|hsVhX5ng zG)j7YPFoq-BYmU)eW!!}etZDgpfBV)eUgC4oxI*D$fPv2whb+Nu*?fTlI7}=X=HUB zrqJTQ*4Gj^c`29plf5r*jl68KhrJE6H8+kaEE`#Gue*57t~v%-FFSZ@>~~sg#@)dL z9(h|BxC^$ZWg!-W*tQ8uu1W}#udiU|9eB9aSvs4Y@3%L=5x;D}rk(AagGEj!31@p|H}l1g2a5|e zYpwjZvC%@;*oL4OFT+R%ZFZ@XM><7D-!-FT=Sx%N`$>Mv=bLFwyIJwZqXQXzsr}=W zNa!Hnqb$$rRiH5$HGvLTA$n=5GUnSd{~y*!mh+|)50A|sUL>d2W6{1p82PPhbgxob zJv>I=)KIL<7b>{QEck`$y2y{G;=ljj^C$#`|DL;Hqq){T$;)7eH6Gi?Dp=`&VxlKy z;3tyCsA*#+^I2?`A&e01X3a@{$nS)Tv*XN#+2QD9u;X??>hhZ+SDo_tO*YRepR+ z8xgoZOk;rbL9_${LRc&3gVm#>jBB5D zOxPqx{}6c)@w-Wz?i{^l*Tz;%DT24a@2&wdt$Bd%&J++W#WXZa$JAHEUofp4Guy!g zd6b=cXAVx)H}BZn2B@_vA0%3to-;pBdsz(h$Nkg>2W`{GvgYNCQuBnkR>Hoem-n*E zNrW82X2XBHGcR-dCv+0!ZoD#xS|A4-pRgU`IAxQ37j9bR<2B1{$u_H^tIO~P2_w$; z(fgV_pvB5l$Ly>wULW@3m|Q{nv+Y0oqd*e2fnC=N&tYhXAZf$ebMZsM@(ow!P)hR9 zYbRfkQ?|SjC$EfqYOI$*5-?mzAL0^>tvx;J%^l9J;D%-6c3ra>^#*sQc}a=C|0<;J(S_z%d#Xf!}-mE^QUmnCafR3Af7-Z}#%@%tybOYzqQd18_y?#`^ zyA-tAPo&!RO9MS7JgQ92dKX2?jnF1G4GP%}Z3Wq$J$#bd8#lIX6pM$;rCt5qa#WDNY?Ku0XyZj%F^9qd$f_?;kIu zdyG$&jJB#-Bat4vqMIQl$*f@7i`%CBK;sL(3 z02>=y9&|#qHDbBhEr^P&qXj89&J_mB|W zfx$t?7MU;aEz8_|INitBuCCu8h9vpJS8S^wN$bkCnrNj!KObD~yBKL%S{o?(ZO+Vp ze0RF1Z*I6F9DUSzawyfbnL+|3W9`;dESa908_E(1$OCA+W0=|zZgHut0n>TOzNJ|) zZsqM)s8~w4Glj5*j>hgY_x@#4z0!&M*LQYXrKNrDMdrToy;rp!wqwSt@J6RAtyX*j zZx;`Iwh{#6dnYP{fCw-ua`Mi5!SSKZ7W|C|-CgvlY5vQT-PBB<{mZmHI-gN@XyRz? zz7qw}Zt*UUgc6nVT%p6P0BfFE(A?P4pcY%0SCEZW#t!1?5ciOI^QTgzUacKR>+cRv zpX-DYWVr_Kof(KS4b!?JPd~IJ7Vli{pK5x0ahWe33JfOtg935i%mjq@w0QKVXtYP4Bos^Cu^k}CXhIYRPVV234< zr45ojRANOrrWO&&E~_21V6ThCjL%xtPWd?M6PM+OIXOvED)%M9AI|PMm?MUWgorsur zeg#_xJEH{C{M_Kn1jK8=*BxuEW^mGLK~gJ(yYXdPE19tG$1!u_oZd67wI5%FC*H2t z{85uoEoE@3MTG6$0oukK4PyT}i05iM+Y?;64Jb0`uKFL5sk$2JJ`UEe*+o+drl*-R zJIp+Ci)Jea`^k%a?`XFSXLvil0KxLuK3*pbfoDMJxvz9FyN5mk?K)OK|{KFy1=d3tlTT=Wx6nw@K>1Hm1!N14BR<8>C9L0FS zGM4eG>CNRN1y|cFp(bw*LLh>$BE~Wa=_~s(=kIxi+Y5wo?$pZ;!g0DI`hA`oRMJw-a)c_HoJ%?xY= z_Pg4d~iYbA&z#1^G zwia9t%Y__W{zM^bnnd!~)rWI%K<8LVL{}fGCD9@NA$*R}*C!=cc(XXzK{vmqiNKa+ zC1?lttJ-s+CbEg#Y;el{t;_YW4f#>yPhfEW=ckh~u+g&P=8_Vhw!!58C!7~6{H?bF z(B*iCg|Nox*(#MG@8HMc;H>&Rw^{>B#3w)xP?B{B47;}hygucPvBMt-QxC&t)A^& z2%DRfm12@M8~=SQ0Lx`>5Nmf)3a8E*d}{T(fh^B}NmN@1SUA}&-y&_+GdZ|L!xXf| zV@1WQnaV_T5JiA)TKiY-4hVb=C_1z9XMwAy0+I}rUI)AwW=u}D2;meJ7e@mMFCL(b zrea}HQUF_8Dv2P%f5zhH25>Lnz1Kmya%n(A(Zm*I8&3K9_0O*;wW;PPk+@fP<8zk<=j?`;1+1J4 ziuuVk>FY9z?AmP|H)r~I`>I&jjo<#G@Sc790hMQuIqeyMj>bBe<9Iz4mJ3 zY$(M#`pfHI$et|1zu}SprXGd3-c=@!TSpj0yPr1WX_bYu=Up`t?47pSznLH`x8Ijh}e#WqzuX8HY z3^%U4Cf^COgEWo=SUy3r6Gv*bsixcw{=m#>*^b8qB$Z={O7d%JCP98zJJbtL%X2YW zZBn4Zb5r6?5}o>@$8C^Zgcn%vzQd{gWWG@+W^s{TbnhZ72&*)j^pSLYu(h#f)GnjS zR-rIIXN4Mlo8#b)dS$-gOIYCMM&s|ta)U}Pp!>Y%5rS5I3n29UYxMSbb2>B>S#$#T zf3yIhj1hhc;y6k^jdh$#$S<2-aU5#vVo89jJ%0o09L79hLGPRs{ok?Idv}7aboT)o#7UMUY%5&419+(w^OF$;y@-0sI?@P*H`w7oSl#atL%!e`jxdF}#Lm zlqU8{T~!)Mn6gxy>L)ddLCy-J4V+YW<2QG6JmE%+)lz z-S{O>u|^7i`&*!FVOW0aFWU3ZkAH~m4&0gxc6WAUb(4*)Z#j)gD%#nCLi4UQ?r9mn z<9@YyMB(CS(#7${qfJ}jd7IcSCM zLI)z?xHKAD?8811&q%j)00qe!X7nx0-6?oj|EWWw{HNDFj+6cH1+HR>=4$W$v3psF zU`{?>a@|4LUY*#+*w5~n<>pXV&|oWAC7kT9aX`CmDDe{bVn;Szoc!;L6baBGFa@oh z01z!mL)YP#=eVYh4n1J(5Ch2i27so9ceou;9@by)QR9FN#mNZ||T;+B43KwstCy9a@XFdB}KtiB9 z)?#def+v~B-zODI&MP~oTifEQJ{d8izVG!b&;2T~broIgeVoMin!5&-P$;i&Nxd8Q z=ZAb~i=M*iH8r@k))RjJ5^}*a0f_n*pR;jFLBLx=0u$asATc?)c$C8%^NZlEN&PLB zwCGpd*F^B1LxY3UJRv7hb^zu$=S|aRHkJW;5tbgJ;`sl`*yZ+A?4XYW+4x+JuSv(A zonLvhcu8cqw_F!NH?{4&{ZY!O)>@-~ySJ9nug$d?E{&ESL!H!cLt)XqSKHIOr14=J zT;Sz5*uPNb6hW z5Fl?e=H-J1W~?@1+Gq6mJOpl@ibb*se)-x*y~5@<^MM4&+K?Z=AA-7$+;c8*y zy_v8_zN|*t$v^xXpQwq<`ce41@GJc$D*UIBNsuwGlAc~UX~IDQxrJrGnaxCgVSQG4 zcMT3JWkOuL7GFIN#EDm7{7YoyC_iwoEet(1Ub(K=}qsXts!^(F3I%IypYYL#dNtHlGK*_=6_pnJux%ys& zdridi3^)390I|r|-w6=Cz-rSoQ6`jZ{}qBL^_E%-WaJ~}t4*m&o~Fl4Z;yND@Xf(l z=?Oc<>Q5R@!+I#>aeDV&OP0Ay?&pZnn-Q$A`Hyj_7LUQzZ2(_J_aDB@=N)O*COJ_v zL!_?}HKj;|6NYoL3 zskyOmh>Z(x`=|$UtiohSGF!yZW^01vej^wTVoX&bKJ0M)=Lq#9g>lcm-yPJ**=@6B z-2q?BC#+;R36V3~P24js^g&)7PE6&EfE0aySL^rQa5aLeeSBLnhYz{NDutY&T%?v4 zl(}u9$|X}hF%wKx!ggS=SAB95N2OpQ|NaBv{haMl^QW=uT&3R~$>x>{Rzgf^Lk(B? z()WVC$FKYD3!e_#KvNF`17{>-Z|$7_>z*$6-R6vinApMKwTg5$g5C|zy{PfyMQ)z< zaevTI+5v$6oHyWCP)6)D%u)hfgpCQU9yWdT{gNrz+I0bVkOWent$+V`{LVb<%wMN< zY%2bQ@6Fr0l*$REKs$DQkG`wO3N|&YrQ9JuXRX!2il>nN-)dn2?`tJ~HCYsOPeL8i zG$;cf8bxvw~ykYKYq@uTMkN2?D)mTG=Z@_OJJTx4k)+Fh&}hRYXBv6GQUTS z)$L^$rcgg%wXzLlrk^!>zvx(p7Ny91R9~#(UgX)@SAen^)BT!mZRMC0`#Va#x zIip?JC3z8A#5&2r;i2i#e6{axkI&hQSRHWqJesT%q$2~o;bZ~juNTm1_-p~zN$T;o zaM&)}mQ`8aN2}Fy#G!DU&zt5!>8*`058!e5Pqp{yFK)d!&gD4zF%V)>NtcucXuvCi z6oZHzgFTb8^XGt8XrB{ZM5pB+Gx}=-zfF8kBLd6Gg)YBZ{eRb0+P{zEj&{k7w=-<> zz?Gj`_~dG)?!x37%MOQBoUOpe-lPY0npc4Ya1f$4XXnb_p@Qqv%j%GcJFK>1Bs~EQ zSJo#8@K@F$e$|J#FBmlSeA9q400AtY!Z{3Py3*po3RHJ9b>8M8&=;Ob-}lyZU}OZH z+am1T9uOfq0$vV7p!%g8I=40;=ci=S=;Jc%dJzyt=ie+u(lTW(as_Du{34oE=V8IC~IVXP-2^KQzD2ipCpV1=pHg zp*b>n=xz11^Y&U7*miNRvY8Ek6=*P&#_+p3TA&2P(J~4pF50>zDo!E)TSg76LTlJhxl1GdJsm{M~I1z}aRis?xCWf472L8kwHs z64h^(Yt}obzl(ThN#nfPP{r%e^G4eS{ri4q^CnsG@aQ$CzsK@EdFO#Knu9Y@a>~Tz zCHAGGg#@3~G|i0c>=7Uoj(T~CoW#8Da`y(HELPLOIE+JK`eg)E+GKyC1<9_&bX5ZA+j&8nZ6Mm7I-f`V$~Vxz4+pJI$vb&h zPOf`=ii61VQ1fv5=ux)_^KU^W7Osli`vB1#pOEUTqsiM;P#EYo!RG z_Hh4FJ5pz%(Ke=8`~J#FBVd?#EXhbdvrVwauIGwOLXu=lkObt>TDOB9UlB*dqTSnF z@0?9^Z-Gx)UBJryA-?|wECg(efEQQc3gvfzE?6A|vQ{dzJ6{4mUI2{DpIsK-@EHiFph&4RKo=ep zdVP5^S7znuw+gx(Ids-x@+7IQ7244_93W+g*=h;ox|WSOC{HgxJHyNj@n+Z9>Mo|( zUEYH**0XcXH3_QRtEkyqSg(#|*hIEV5wErV*kbRyMtR0yNhr0)gnTC4A7eeH)L7}@ z0c>vdCaXrMna~kSE=^jjP)-Kr_Y*J?<;A8g_ios$50m3J2r>C9cbW zF`~)tXu2anTn0I(zD!aYu=={HLzjC;`URuEpkup;bdw$2H$_V&`{ThirGE?4IiRa` z-nWWsqzCQP3h)tQRtARS$Xq>rjQ13Z`=eg!l^C;EgQe%``2~}D$?twL^xnz%KB0+M z26nDr%C%C6pV^aIphg6vjToU&;H*oZ?KYFK0!KT8iw#u=3wkB1lM=v|)pF@hjn12P z00qdA0`%bP>h94=W`Evxzui z0;AZkq-z&+mqXPxX0qC)`r2e`WLrl6z=SKUgiO*8L;^ce6#I zh;>xYIQ6o(Wu{~)<+NIA0ix&27okUIp0)39 zOpX`p&H?A*egJM$?U*qm#(YRkOFK$gwXH8bvp);R>ccms_RkAIWSQTLZAFcz^50MY zO!EA+&$9|!^BiFF*?asLm4P9u#KSf%n10$1{Z-E*EF+#Cn&WHPl;xM;fFLBC_vzox zy@gil#R-E9jV{wjp)Li;7(=NX%p#DzhqSpWK$ir*wN0L{@Q_Auc=HPt!5Q`g6}jc< zcFb>3@cxMMC1ZVc+a5b1IeGu`&(4mSR*QRe2?(<4dw;8^r~z3Z(G)tziwttXuUrGb zpQu#ZCYhO>F2RGhc|>)$FYwJwgsfZk&dT+wUuEx@aCM~bsW4cTTSH5^%nh3S3FSw7m;zwk$NV$ko(ua)gpi$ zeL-$4XbEGN#YCb|y0GyQy3?4a_fU&U)J%!`X}oiQtfjY+$v)hutqAMm z(Twg)Oz3@UhuV$yd1(8te)Z4}vW_ZdXI83P3*#0QZp4kSdB`RQzR7v=Z3o}I1 zh4&l?vLKCtvxFUC5X>$8xNyXx|G=`Kof+Ijd*u&7p8w)Ck6oMYq}cI82i|(K+^gGl zq{~*1yc8EQ`l{ULFKrC#HPgY2eP5a_)tFIreP;dVvh@z!aVrb*A*4}sE^({8*4$t81@)P zG~}>`Ro*wLJL5<+O`#;-q1Dn60RS76-FY{}{7r)t#)q)P3i3Jt^i$)}J>ob%oeIBS zXShCRKs|lGBvzmW#tqZG&nj(}fJe}YRW`8?JX_h(G?0`GggRCn1NKpK32S>Ne>VTX z)rD8yo@u3@I=qXG*bYR8YUFK6Wx|Rd_MML(2RM9yMNr9y;vDV4RNTK_{8g2*4hG4s z0VX@1&H{7COS5fn8I?pKwJLZ3$G3I+@y_0G8;oXln z|JN32vwtENQM=dSG#e!nxu3xz3iF*!yMNOD3^fv?h0f9T0 z0ItH_@+NW#2%*s5Z3P1Tbp1A){Q#tjP$;&%1bYbR#ExAE;BC%CP2clphW)-?49sA6 zayuy6n-%KVX0C8%fS5&aHXwQI(nc#GN%B$w77s>;a|%e-fCJbm3h33|I|7h3k)PL} z)oL(yM-RV+Pnc;F>M|*lR*m?ho|YCZK#*2hv4o*U0Wkp*J{2!QyNms)ncY2o0ub*6 z$r;ccQM?eQ{J_%A%2!^@rUnT7lzTCO;CZ^)+y-_=P7#S&^&y0QZ04hJfUZWP9P_Z} z4k)6hz4wev0EbLRU`$hTnFZYhc*oLhxH@1CTzB%~u2s>nfimZglfI3|fdH7KIn>#V zUA$ovbBcmXPsqsO#?ZJ3QU~y1#eBgu7P;?IzE^5BRw>?h{FF-FBJuu>oeRz1l z*vB*xkQR!36Nwaw8i|1Y3sD=IyZb6mPUzvE*l+EdTx(H0xgRKf?h#5X)GrLl)?u4r zmj+2+C;I2wthyOFT_=@Ym}_Ev3(+;3kE~uDN@S^Du2y)YgiQ2%m$^b3yRQ|9ymo~! zRrt2!wyZxsJCz$jf}H6jnHBIx-XmeNJ?kgraF!y%_dwUS;sqH$9Pp&I`iM*<-N6e2 zl@Gm!7SStueQcXmUBFzut;8jo^95A`#>F+{pchYVs2`m= zu67k7)giBO)%ULE1=m&EU+bX54E3F3m(tH{MBKc>4Z8pqG}o#kC!X3r=7qu5xh5^u zs7!cacUv=juC%lo?6XnO1YyH$=Pw+nVDTx`MqNxAwooNKyp2^djyE}rX(<0c!gvR5&!}n-B1fh?T{%qR?Za%K2nYy)@?ZPao5D>hETcb=2xhY|9_LVf)T=%;iUOB$G>WZk@!#-a-=SRIq}2rQ znNHzB^G81-h>PLo_Amu+J_0u3>POuiqAQymfU!LemOV=mVuHS4rlR$$Vd_Ci(P2sn zEqzQapPDtx*qspVxkQwtp_0TC=0Y^t-Quyga?k$Cd~rx0+nGKN!hT&W69s}eQRG~{ z9tB64ckF&(B>vD&pWXIlU(-uly4Me)5s5Bm;S}oEU6ra=7bFZ+sfuVbbeE58u_kujxY zHb%?Hw8E-jb7}d!=FWnOW(IO@e;~TO&QUX7WmEZxiP#;O*AjDjo}7~#*Q-+MIptr& z9B-xAn=F6UWG4>U`GfS;6DgYo{!3GUy*gqNzm!R>XGl|-_8aA4gYK>sR4U|(C~>DvFGnW70k zL|J#ADN!@hAlmq{hIA{sCqJ!^ZP2~@UdWyn_9yd^!OWZ(UKM$Iu+a*u5Y8Wj`|sn| z+>ubz>Y5_JYo+N10Fg^t-1Gh%VoSwcO(fO zUZcBR>m{T{-sq^K;Qd3dxrSZYD@|1?25^Ji-`zzTYlIO*SXM%JR;^LCeB#bt^R!g3 z?nkQY<9Ie=k&Y;;7*j3%;OlWLmpVvHX>(8MF(NsbK@hdAV4!{##gw=n)XB8$~fvkAk(%eH)7I- zuy7!AYFR7SihFKV_lsxz&kirZty9u?upB`aF5wC~On+spXK#dz#nL&*>rUUinkCiCUcce|LwZw-}{1O&e^bYk{` zf-9?h+c!e#E&l#|`!^Ifb8Kg4hvl{-;Pp>~wA4@#D~s6wXaV?O%8MR4RnweohDM0n77GHJrnF1XtWd-dIx=uFez9O*v^Lh4L|N%z-(5Sih@fQ&2y5 z?HpvMG&&U*6a@C05{!_|svY2aMbF2Kj3`W(9UrxC-pK; zy^tB6rebS(1G-CzlyB|x()1^9w;S{`Y3B5naS`&FN)NX+6X&fuZS=W;<2_~U)L9hMxG6h)*b3}Gs3UlcD zmTH_4ckQFJiPZ;<8p9T)9oKM{#e6Rl=zH-#%qV6tukA$fUGJE^!HDWHCN#E4TC@q= zShnD5Ec5C^F>KpvW1csixIYhi8TUTFskRw;WkmLKQq(d1=j2?TyF9Bb3^o(S(mfxv4#{Y4Es=>zBk7}c_8Q;zS9r4SDIZw`mm_C z;ClJ11ddm;4~e>tAyrLC^Z#Bao?$u67+QDY|8k&K;@1h6sjAXw5A-tA3^sAOlJ)HZtkDbrK{^{u70JxJ|BYQKy7? zk)A)=D(U~UEG^U(g_CoCaqUA=c9uz$t1{%CFJ(Gnm4By9x?1fU8@g>%azY%YR>ts4 zb3?XeUbGuc3GXjTS+>LTmsGB{^62EU?`PB4bZfVwjQ`L+E5>5nhW_w9dNonm{u%s! z&QCzdg@y`?={ETuNEU{b!PZwpc)z^!Iph+bJt~i9k+1&B?Q8I19rryAa=pQ$kht3e zH)SU-jryO26^yRk4h~l79$pPhA( zE825z1DZf+)$zg~so;XO>0*^y@El{)UQXCM2p(U{^I`jwTz7cSC&cE#Zy=-}c$Uw= z0f0Q$e*0+0Le?yxdU^N+>*GKLrJ;UtYu(CXl7k&rI8*eArQ7KtVK zv{VMN>E=CL27-cMVKggmY9JQN0QGv;emPQs&~UZQhXRpnsd5vErGtFuMR{rD)bnEb zPJT|Ie+|qgZKbi)1jq7k8Uy#zQJG#h(gg^R0xv8eciBtv5ACd{d1?k^%QDFyX)Fkc zAUN$wa{>%Yn#T5$6ph+Efns&)CB}o3^YDlHzBmzT7@X!zVGRrQGCF2)IDI*J3Gcn2{pQSq z<(ElEo*QQL63q=Ym=&azaqw=ssF-Zx0UZ#NRfIlMavx(v%EZ&>Q^OXn#Y42>kY)I|0J5 zWZwd@tMdFYXbqGGpuM0ZTY+@^kTyLdSZZy!W>@5SH&eXLvTUCJFge7K7Tl$!rF-Vi z0sPWT5yJLcU`$`M-%m#%RxG40^dgzrBnliRiEy8RacOkMcC1zZYz6@@UqNYh&2^)8@s zrQYl@pMg?K*%XD^B<=%fNhr*-MnC)RhGo5TGz%Vn+yS@^YzIPv1_8Ce{ZDpV$tD@7 znAIpu#2Q6QfO%|4Q=6^K{4KxjoFVM#uk(_ahsi`5C85qa;YiRO{J+1gKtLWb$zW;Y z=uXb-)8uQ!yrzZl?YND%B0!cv0GBL>U&@h1fx1!YR8%W%u77P0-#;W7W+;rBiecFqaVu!X!OjZ8084T(x=hO`4nDJkQ) z0M+-$Q^A3Yu3s@0^7B7zHtyO_k0s{OWCjmy$)u5KyV^03ch#b^L4lYVx(2p@iZx_U z;F=p9vknMa!kSF@_p{z`_zNd6hEPiDU%3H_W()&*Hiks}`!y6ao3Y-e*S&Sa%9R*? z644+!=lBjqL@cF6(VDLixVu@~hP&&NF6kIJcFG>wTjTE0R#S%1)st@c(?P zLGAbpXH>0IJm5xeNF$!sLFSREKkzA#9O5@48hZMud*N@oR~%7`K%*%+zx6nDe>^$l zaost9VNJZ4z;{6lFLCxr&q@a&d%sC0VWiMcMwJjEd_h!(~E`T+rX0@5;2>XYNSNnnCK zz>U)Sg)UlOJJ=3h<@BfeT?^373>-4E{VV&A!@)>DnK3kwv^BOg9X0{2Xz>ss$z@@)Gx@a{XrSrf>5O@FCU z1YFB^)snUe=_^)USi(bLP0piOVEh{kA0G`f4Id9c@wWS%5i9xGAJ2fsJM*gZFc}gu zx~A~akBmkUOFN4mLY2_4!5xO)e=4HZu1_6fs#$cJmYmaDfq52nJgh5R)0}MFW{Ka6 z-;M%-=nCYV^x^#Pt3kKwKFBRFg0awCpL>T<(@FIqE*4x3vT9rK zqbsbhiDpK+cKsNeP$zFxI@b_?3vJIgE!L})^;qG)|EH~YfMQ*+Q9UsOT4&Pt7=M_jt<7;#?&d3>kS8y*LN z#(?{<-4tXS9>G8e)H4ot1GUPhwT@G2u2_Y|hY{n?o6vvDno&PiZj3U45TW}L4*hc)zZNkuU^SnZ4whfW+a zR$rw0N=HQ_u2@YrPUiYMK$85)&|koU9w2|h3{(kW-X{MJca4N0Li+iA9@dU7n{sET^zS z2uqh=Z*J^M12-iiPnC@ zGTpr@^sQH@!aQf^^{3-K?GkmYepp0XSi{DMMhzmlIvnHg;ElSfoc1e`4LY2z7EG0f zswQd7qXb}yDXk<(biM(=zgSHL4E))IR&u`Yhn&(ZRywYIXUP*(4KcWY=b|3koLle8rPEzKo3{VCWm;mEfmiL!9IIBeHlCVp%DTmBtBs^+m^Ks-j_f-NLEu=Is#tO70im zJ9))oc**>_^?5JPy3Z-R{1h;wKbQ7-*!aymOEq{4T69b+!5N2#xc1`r6qVWvoQ1Q& zH=UhQ8!g5cy0$PJBzq-~?~DZhrn8vU;P((qcDZLVCTj7VarM~I@NaoKBm6Nav7r1- zgD6Gk>x$5EE{2IvjpFsdX@=-DyvD7fC36+XRij4Oi%#T*VfB1uO*}Z7I)e!uKqS#| zlck9+r7(wX3%J07e(P1K0@{U~w%yG@(GJj1*`?Ed$Tx+#6l1c@A%6Z0t_BU7cKg8? zY#=lCW&#afVwxg;!djC_84YLth?snoQE^(xsh9sg^nuChxKKEgWoj~=sD6HEKWCG> zli^@%;JbN)AzeF41Q!<+wb~{)oiAPQ@i(1|TfON}zH(ZvccgprW}=vIad8DQKo*uw8N2_* z#cVkYt>H0O_W_L{lsyNkFK7CN!HCRs>Cc)Lh^0zRw#+9XAg0iyj=-?-i0C#F-s979 z5tV3lM}kjuIeCXW=%VM&H1r#H@rdeeD;4{_kMb+W`ZIzuZF@uyxZVzOeOV6?S@q7P zryils>KeH(1zl9MiXWnwNNL!uf4&V}n0vNTx(T+bY28`c%b9*S$&&Xa3b_IKqjT>H zUl7cY7sXb3UJ?2;XcH4D>e3gwh*7CpBkzg`{D^83Nxq3f6Cye#OR@XB6l1cjoKhY7lFRs6 zaCAJN)ncIckp{$Yh6%R;Wq2$ARx}0V<8{FFgZMke)y)l95SiyY<_qz~Ti2OJs;7T# zq2s3BlrzHZ4W?^=DU!?`6M1LQ{fh>r5LAu(Z#dj(@p+A#wcg>eC8F@;qOTpYe_J z@`{RR>6}hsz?BC>T2)h15||M++Rohfp~?1*SLWslHUaz?``E!s66k8_*w6+Ym(xSV zlI>Pxb<3eor7Gn)7}P2ucQp$-9#&U(K*l7s_q(!Fh8u&j0TG`7Eg8nXL?ihV)}*~;PeIb*#TUW zu4x<&WCdy2*~Etj2dLJ3wVy_yo}ZtmYiM|pm-1`Udj*w}_qWjxj*bdXZQ18I-+!86 z74m+3hQf_I)F;Bm25tP}MS|H3-|_Nl-#*y?n&}f#rr_~wbcIo^r2Xk-3oa+*43~IfsArH3hd0ns&IGJx7`(23b(KpLLQ;)5nTBA=Kg!v zNw!(MSi&=>k2|8`TA9Eo9Y4k$*~K|h99eF1DkQGLWY&P{rwn{zyy(%d%Q#2#k9m(k zs0;5&{Pfj11f$^w)yercjCF?cHIkeECNFrC-Xc2T?BFC{r`WV`cWtJDM_%{+PKNv` zLOx=C$55`vkFUI)O)n#M`FyjL@~~Io_B^vrC9$Va7I{AJqZYlDClEfo-+Y!iXfb`9 zvqU+=s93}aq9pNC$H#K#s!e3?{SK(C0=~$165`{JJ!oO-OU(Y|>JYeQ-}RV-a+}C& zo1dVL-$IJRP2y@LaihK}+GRMrm#imC$MpG`aY^MQ2gz17xhizG&hf4kWe%=!)f{%S z#R@Dv9O80GK26hDHA$NMNa9v>#FWFFIny8?o+K%;M|nwU#9Oj}b!dMAc4IB!?Zz+V z_+zb>(i-g;Js~`|Ky;~YWPPd{pFK7m@9;ZwO$)7P`AZgBPG%}e<(%8$^(U|@67nAe zdQQ#UJ;V~Q*)lKFX4H+BY9fn`=BjP$_3R|?DV4uO4o`eL3(Iuu zZl3dbQI;Y+AL-2*Oad)A!kB$Al_mDBR2Rs`?q8V7{+U!3xU_58TbqTiUWQaF7wM)T z_C^q)v&RC7!uJ*My%ET9KXkm7>SsYo3LoB=E(%8y3}ajnnm$>ZidhkLET0qrkmITH z#f!&@IyT@RX7$u1CiXtjGUf2m%*H_*%bV5y?2 zE2>2L{6qWV9s~Rhlq@nEv!+<)ntGI1gMR_aA5|8wDEI`Lfu2QMhvtpvYTGrYtQ^Yl z&c`QB57R&J{;7F#cqIbKFR5IS<*eY0_f8lgx@iqGI!^MCO-uPm;G1WONXVX5 z9RXFX$VNs+Ueqf@QJ*Mw4%qzn;|XipC@0oEelFxhPG}J9?VSxc%84u-W{9x+xB69! z0B=zV^e5{uWX~AgX*d|WxGrA}b8A{Q@GBuR2do7Tr1=<#JQrR$x12EAfDz^qpm1>B zISY$+mbmN%Z91t_WN}rZA1I`)`$NoKzlUn?Mki;M?9t!!+5hUbj&>W6gAlW^(zSj- z8{hZH;hS?nQ})1R=v12Ic@nI?KN>5Z^CH_7OX(0>xRn@dWHl1m+lyE~5-jH*ccb}^G|@!qLpWQER;bF#@R=WsNMjlvY7 zBfk;$!lU2wVdGhN>~ii}aXhvEi3QA;CH{<3vfBGtYbT-j`M%hb$;f10qn> zFMx&s?9u4&vBMe2C@ZkvKBEFTyJoFT*wgf1lY6-d377{PO_amQ)o&m(7JxAxqdM&i z`)qa@TS$$mPnBx-!65*~npH7Uy(k1Ju;X`>tA%kt$dW^7cG_G<2eEfM0~(xEe+iS5 z=!3@PTwE^autN;BwHJFU(dn{V(55tSsMLn9r)3B<6}ZXza#>SJ{^0}XtzyXbEEp5> z?feM*I6&6WK%l$d-!v@TCPG?(V31z4rH~Rya%h+ew8QOPL;i{P8izvp?_rp0f_<0& za5kGuGWIq3_ICk2SsB2l{*G3sO|9vN4q&&g+6EedMD0MJI8d$(1i1a&_u5N$*>5?j zkZgYPTXf`bBAj+|LFIW$zY8Dg2OPUF57}&h$SwjA6((4b>r)Rb=27Kq%L_o=LOHfI z*6Eh|6_}c#DHk+MeB9FXbANJ6?eW4Ulkdh4N}# zNJ`|N>j<{-VCLqe;+)qO$?gGu+eOMqswhkmbA|tTB{+SXEL<6VjiUe7hwr_fX-uC@|rux#9gI25evus?R}TXvKrZ zd`Y}R(4!Ic!fx?>g}A*fl5+<|>(7C9)n95KJ&JOUzRRCQoHrI{f1t>}H=FPBO1G*W zFH5Qp`v)`(i?r*gE-xJTE7)4PK+s*rbT9|F#B0VyB|6>&V7rYmJi7r_?u-QN^m3s} z9_iWd<5+xNRvi`~UJXXqCS5)PxsKd_hyg^x`*VMKOu9IkgtEiTdk=7e7_ruvx`7fj zg6qm)<*N>e&S2%&!^1qu!2CMKDv^ZT>j52h~3Q|APn;U=9 zq#&Ws?4lZtf(*0iZa#*uk6&QxnT7W^wG^lUz3t|Su)Dn-+zl9(KjnU|*s*$d>bk;!3E9!MbyGJv^bay) zY+x!EL^nbPS)&*5Xe+t-Dcir8ATE$&n`O@J<<|_^ z_5JxQ6h&=yUA#u58T@tW3taC@1)IIli%MeUFYktL$a6?dU1oiJq#@ zwQ-e!$3$)XQCNfpHA|93HamO$JV9GVC7$mp5@{8Pb|93`eik2(zptb57N159@noMm zY@ftFNP3d_FQGwgB!@62`lXJNSoSF1Viag@8N9P|+-UC4LOD20czBGy{UXhmx_^Xh z*lfS`>nDDxE(|w2M(;p8&pWilTn?{uFYY5Jouk z(#*6Lwb}a@F#H$_f*0W>4edt8_@P*aO!#zDh3$Hfu++&dj8?Dme4b84auF?&0>(uQ z;jM_5d390pIkZ81sc+W&+?;TsO!|mMvX_@v+~G};-d9soMzgzK7c7aTOE)(l;wwc}6Ae4gnVz_PLUauA^V%D6z(z2mVKAmml&UGP8m{IAPP4 zhI{gJEv-B&k=Az-E0AAhUub0Kwx^Sc99l%Hg;JW4lQ&rtN^2hXf*xgt$&YRSlvZXk zl(_Tg9P%$iL)m%y73kWvQ+>OZuSf^eHyOXX{n^wgINQ!^FI)78Z*(Jh3ye7mDYqFM z9{FkDP?;Qd$I5p22q~NPyw=6c`kaGYQ~Z`!?feVx*mcxAKlNyRe1E#uGZSW;-=zPSb;;`%4}! zZBkvpH+o({j0v)<`dwy)Gw@vx#(9FHdVZ=w7Mm|_g6_X__yMq$8{%Vr2U<}lsVcT01^wZBX3K}sMNJSdb(7I2)u z8X}8RoaPMW#SdB4VgLbEz=?-nu8IM?_W$`M^^mUh zpojs3Gw_0ufYG0cZi?;MSU>){byj2VTf=agtK7?>)?E6rswe6oZ^I;of|;yfOz6cY@!fhU#82gx zGo;CqMnU>HRi=PU1~^24OS=F(-Dfr8oW^(UeEh#wm9WZWy~VCBY_0-fn6;~}acfVj zqY(d)ChhcN(Nyj9yq{1p?HoKV)S$7zZHa)!3Y+Iwco8xxnpsR$Tm7+dM)*DsL(q?<)!*J~8S{W2tH+_y)Q5Yg+=t;Ug#MV-x)EHGg~Z!|cg4 zWLUj^$SbsoCG~zOt_!G)e!U!3xW-8eo7C&FXKh*`?e9(z z?(p5EuObwR409uG^(%LW2$v)!pf-P*whES;-1U%}{ET@|n<`xn)Xvl~XSs?-aw^kG**U6(1%QpsbHx zpHe*+NF2bZ&NCc?V`uMbNB6KB5EX0Gph;<>1iHKrJGON^Jh${_-u1@8-FT}08YY8w z{2Onwf1-1b@j;TxA%@w0ZqM_9O@pUb(uyl1m+$x>JH?E7XvxE`;WPWx$WdkC4w3|~ z4(CH6?G+d`CzQG1kHxk$WZt^4md}HyJb5GHJBNg3W1A3>COJ5QlFt5YQN*gD_F&{_ z{0xd5CZydyr2Y|-45O6PK zP5>kWjaFw`pizlaqN=l7n7B?e*di%JWq_sgc$cddzQYc+wKs;ija)ILUA=kgI071TFs$zEz-Fy8v|P3OK|Qc1U2 zAxXz)l==}_bQY%?)1ZcYgS!T6cCHI( zt|!Z~YnxtyzFl|gHqQ6~Lg{QHSSzj-p;DX9KbTa{?*C-3{vp_aBU0&d$kG*oeSwga zRt(r0^M-&dDg&LzXK!M$AL$ci30zDhXYcqNcBQV8=FakKI&SulYPkuiq8(lUxAvEg z%7W6`SP1MBaHh#`{bUEclJe{mXFo_MZpH4e8tZ4&M?NXwZtJcA!S%!_a8s0&ZU$p?gzNscC{fIRC4}%wH}W7|`Z?hdxk(XN zSlD7w&QAFfALVT|7Z)rfd@hlWHT`qGBkx|K80#Oo*;Hm;Sg zY72jwtNEcbzkqr&DU@~m(@1vAEAJN@)&1X>UN45C!$%<{ajT!wwv~0F56WIXX+W#< zeY@cF)rdz^&|oxC;o6!`N&SbMoviYG4IqwfoC_lhZ@-y_6Ow!jJ*xd z?PUfQnhGybQSxhvJ(Jjs3_{MMGS|$go{xTqgW&d7podG`!h13F%@Hsx%K=zomPp|0 zIiAWsqpYeej7BblE^rKJC(Kq`oE$H<#of>Qu?_&7PytX$yeYJP_V)G)EP8E*lA9jR z7CQphIj|_0)fK>O33IMY4pThbANXq5{3MUeoF8T1ZDl*BbY!edg28V`;!N(dl?{}7 zpdGWGl89`JC_Gs45Ba3I4^m*P_9`{3=|9-uk8{EIJ3pyHO8za&{ zqAdKbrhRj_Ic)@$s$ePtM-SRM`2;U?Z;{8_RW@jamy2UK2o ze|dCpa?0=5Wf}6Y#Io-E>*cA@?JAyCaj=63@%Mtfczj8+zr(4pWB#gSe1TGmepom# z=1N@yfH_GZ07vQAi^MJmMD}Ke0sSJl82$T;ogi>+K5Z|<7)7n|c$nMj(h}vsEDm#o z<$z?SFO=KHdrW{I*Y?e@9D5bYL{=}G%?z9ua8_DeSy`A>8gr;28@@oN3KL&E5lpHj z2r7F-+qU_HG&nwE+!)GoSI$dr>Y{L6XQs48@Ky5rd}0fJp8OV#OW2L(NB1G7{Ty2- z=p|%T(NVcAd*wf9*9nJ}a+;c=%8F!)u081Ikd2ZE!piwL(!+maub~{O;%F&M8nCn| zK$7sztlG|(_k~5_S6)0$BTB1R`X+&!%ICk6zsmZ_B6R&tHgcv%vYH#P( zjVhQ$-Nlp2JU*CiM%JLeYUynI+#StVnWr#-~n zkPQgH>}+fb(u+j2Ky1Db#dWtuYN2G?o@2sr=BK$m{)5rOLv#D$3hIg8`=JF5;#(U* zL7q9o`%6tAubErk*UxPW;ARER(a=~W(x?lFM&XAhiwXqTn45>`906N=W`pIDfKH_< zTt}jgd-0W9T;&cq@A)T4H3&A{>t;#dLkFN~>(fH3WZdKN0=}l+=gfed%uL+sr%+MG zX0wTgq7qErS~<+JUVE}UOlTBIPts!vr!WZuN%YSL{bSTjp2O@2DFW3Y4I5vY z3J?{4Rhe>C;C#J(C;u!^4#yEw)nS`cm}trt%7RvBR>sQWq!an%QmHv&_MUdO<*GbG z;<7Yyp7hHCTgd~Y&mdlUQlS0_h2VN3o0ZwTD7!LrjoUTEuXZn$2GIi8hO-6Rm6&I2 z{lBM}-kYQp-QHG$QxU9l!pY3nGCI;!RBW;ny=ChTHyzBnx5?}|56uR+nW-lj`k^uV zXAwYE`0ft}g~Jmujo3e1i!pgskEP@ah*K>R^p65trn7F)K@2%!mw=nBBy(W-6!pl>{tN~ zjuB^#Qv4#%MHboe8CPykF3&30sXC@yi;pLs(yB3H9`&B{d6(9Rmr4uR# z+}a?8iD`@0A9Q>Ek)Ng?xpll|7k35^k}Qf^_(n$XtBpj~dV8vyi2e!APus`6$Uy=Z zVjScEaMjoz|EXl^INdrle8O+X3a!`Qhmhskr~B~+QtyVYCVva$Y85Duo}{GY7|BQrmN|2no}}AzYVwls=2^OwwxOy z!L6$twB}w!n#>d_LA~1mz!)SxDeo;RHt(2Vx|=-L)*BUXoU3dxrfceR2QQt7U~Bc_ zm)s-3+LVua z`K>|hLJaXja8rCXB*3Q&&O|=pYusP#&g57#=`>jVznsY@9@X5&lkdybt$ft*p2IS& zmU*f`Bf`(?;!yolrLFBsH|`3eC$OC?>>oxDj|466PmlY=z77ap_su!ZQT?Em>7EmQ zwr{cQmxt6`OQ&4shf^5i^lHeCR<=fxoj&88xbEP%&%kc2gzwk*s7XnyD`(*p`~coA zWT6#$BUE&{0~8CyPAqyI{CdMFzb5?<(T}Q5YQKQ#fj(2=SF9Ek7u0!C`d6}bR`Ccm z?9r(f=u-zlD8-W4I4R7FI>u4`=KMZLdi`(Hm;36o+qz-vIrH!#0JMHZ8Uj7J=}Q?P zKulbw!HUQGcR&MaHwl77`$H9ni1Ex5aC`;qB?Tg2fvp->$)Y|bF5AlMXwzQ&8odG- zye5tEQr_K7xBpx&RCx`)2v$9h+}5go93Zl?)W`+>Nd{rj7(jEbN4qBka2p$AL<)4ESt97n zdMzO2I*9YMSud3emP}HGk%t(1%?#V=fqp%nn%V6#9B2R1m+}{0wv$GhTaZ}i1F;pt z4sRt|e-4*^BH;MGRfKxlb66aC({aJ*l)*;@`B;@Ll48MOK?aV%xfZhW0NC#`8rEWU ztk^Sbj($;uNOHZ+(2o26eibFvc0_(Abe`EHC6k>k&=zAaOQz)U@{w9@Xp_CB##Hpe zt#&v{)DfYMw#+W)+MSArjGw4rNo&_+ROph_8y3Z{J&tA`4DSI#PxoqHUQiMk1{q{4 z4$!I=LKj^Bc>vJ9eIRHQ1@m)N&NH9^mI6qApb0O)JrJl6e5au zC15US0m^sbvSnau8~H}EBbItPHsQ-fpl_4wtLtx5L53p?aVq<$(spukqMklOf_aWU zcO+x;I|V#6xWFB2+3U3_8r{*sYafA#^BLxYFWz}s{pZpvFr`O^d~idJMu{>3vqF^Y z;htlfM|9D|(ZNAM(Iq26<5bni{8B708PUw|4ne(hBwOiCZAGob6!aQlZu`k%MJ2l@ z+q!n%%4qkzni8ezxo)6URLo&di*M})@|o(Ym%oMWpJ0)!E3mWqRYR=F&f00N7_|OX9cglA|8N5Ed)u=0JtH{s$=>B?2Sd+ zH=3bN-&xR+3(qvBm4bo$H$*UXW!slX1nUnGF@Tv2{Vg1{4{q@J8F2rE0n|#h+0*-4 z36g4NyrgPI0d13r{N$!3ld-J~p9CMDt-bRZTN>LSMkGwDLDuWpDX}Z>(N8&UR31xm z)b))V#b%-0PMJT3IZ{NL&H`)93!`;IkJ(Bx#bVRWW{NR$?iIdh_9CW4Cs&?;AP@uV zNdMqlFq9?*|80@Ou&^1A)Zl0D~Dt5uzavmD%VmczOV=f0Xu>10B@x#pabMu$5i!IL(1y||GvmlS<3 zI@9CL37TJ7jRhtq0wC!ZpQVCAb_OKQ>wzJqAFX4mv4ddw`w+rm3Gd>B5qDT=zE+tA z23A}8K0!HSQ(IruKw+ZU^OrmuA)tIJ^lLu}nxvj>$b)Vqb+VsJFwj5;eW#O)e#W5@ z!3fh^T<39?M>_4dU9r}ym82l~XZ%&KFkHFsnyG1yAx5(8yHh3K9hti9!WZo;j;;7n zR@>U(zg8fl8Ml7(1H`MX9w#XM}N8<8KtK8%na&W1~S|J+3!W5zs>7uwzyGLAFEGF9bMaIu@ zcq-tB9io|p#{A%4%`t)%?+Dz1Ba^|7hqf%fJcSE6Gs`!HGOk_JK8Vt@s`eg;E_DP_ zenq1B`}%t`zpSGm+rK#hL)?}j5Gv=xo*JlDCwhP`F6c)Qpuv5NkvBSt$lofFpQcqG zsGszHU?UFXiK`9w%hXt}o4g|5iUf)guL2{6dUEUB!-auBq9AMm&2BgrX{>f6taE&#>${8fb^Kxa&` zdfF7gM3c~dzzS)K3P}oBN1>`JMj2#unak;GRrdG%x|pN0BtZRHSsAB3Ff!lr)F5Xf zM7WA=_s6h-26vegS_n%HZPXk#N@;mXvq9ih*eG1yNFCm12KH)Vi?`@@}X|0cwKq}A<9!@?XkBoV* z&-4#kigW?I@>uCd#HR@3&pBU(uCu4_^huD@AKzcwA$jCaI(R)bMUnloD@bCvctVO% zK@w6E?qc0kN7&kpWM9MfigcKgl^~RY@?UHz2d}h20&KAfa5P}`pUKm?+a7e7B*H%5 zC=D_Mz7<~SK|5E0*VgX4fuaY`Ln*&4AQCORB|GR|G-ZikE#bQB`1L7hpZeI*LsFL^ zVP;+@QrgJxC48?4j?%8w?N_O=rm4N8;|%%q%nOJu`(gRH_j2^@^*d1RlFFXXBS>}n zQ7<0g)H;3t5<2N5cj_yw9!$mqC2kM@np#`yYN_PB<{C= zcP@`wavUaKFA!Z{5w>Ec&k4Dd|{5Pr^OGA|h<0ZP)|U}OY4mFIsj)IXXeWa|7e z!!2wD3)L1425D(<|2^I;+hD1nz^kFBa=Y1&-G_8SQ-@n>taNL<*V~}QO7L2ugPtFu zjL(L6W}5$hv%;S5WR-XnUnkUS_R_Py>Mhs|_`06m8aq|D?elwzV%96ZC)7UeeD&sZ z{g?IcAA`%O7ULPW!!9BZz^}imuk?~0lTM`5mf(Asjt)>>T5I=sGjL3U@y3 z(3{PEb%t2mEL*LURZsE za&y=ma&gd=3HKSJ+oX6XSkYdh+8$FUf*4{N6PHF&ho}81W?2H7P z#k|KThXHfQKiN{seJ91PQOzO&Qdyy-F>`cYTWnzb9f85 zRQL2OZw{-)I&PWOFYSdnhukgRGj1(5<7$r>4-(sF*Qr;_$;UjbwF_1xRuT(;j?8Ah z?8L_!Yt{I6xQqX8JAJpLJ(1Kzb3MvFl|-g~WR~xUByx)hDLPoWuG`R(SiD7K&gkl$ z1!RwcfPKks%&xE87dsrALk-(s_?>rY;3D9bNhJiby7n#l8ib$dN$Kd`CNcXtI)?Ka zZHF;Pk}F!Y=vZ5Rb(Kw8LdGBZOhwHrB%E+g6A4wpj&@u!zD{cZqM4TMUpLh}E^7OR z_a)$13M3!i97GU|?Vz6Xe1)VuS&hM5h&-=morb*|V_JQ}4v z{oX$(QoP^hnC*NR*gmKgwh89Qp^asefj;LU-XD;J#mW|03BV=YyHr#gF*cw@C>4GB zDs%gip1dG@uU)iNlOnYlW>V_a^to6qCjs8nr6ghe{w0clT#xsDkncslDKC@V=4y=v z*oRMl>l-0VV|GO{fxi{}*bFM2zAS^8{M7ufEPh|celeE5_gkdkV4ycs!1D8t``$<& z;jIt7bl&o?^{w7cZ7wm1CEoU&oz@V>>nigLmqOl~{v$#B%&Y|wX?LsT96X-QeEIsp z`bXg$H_6;eVcnvO3$EjmTdiqtg#~pj^0dpN!5wHYws!L-kpcCuNS7FgI-{Z^?C5ydHRV#lcCMTM-3+=kN|fUk}DfRbRVHb!o+oCOAajH z25w&blAv#(H~k--J@&tUH~I8lW@C0V-r7FT*PFy}2}tHgfv#VTqu8uvNWuhu>tP#f z`7_!A`Ds1RyM3W({c3PWbUc6Ge?G_ADE=8*T(s>g%>cm6Im^{+$y)xgR#1Lq{Hpvn z$a+b`;nMSt>*CDp(g>lz3}JknFZ0SJB~g_Z^98oWUY9Hu&7PO9{b_e>Lbp!hMLTSZ zs+R$$)}gR18Cs>zJi>L}56k~8ttEh0K+7&7x(Z@t$LXPR-#B|FThrwD^5I+o&} zS9{(KmpqU=j?wh--1MHp<(dMc?VEk3${Z@|ATsq3(AxyWyTJTYSde5iNJ?K)6A-_q zzX_JpXMlRPFl46Q?a~+mIMdXc6!uR}^53#Wnxg%GB2a%6uIulauk?AgAfD}sPtzEa z77>q*IoF~vHeijHjtC)!wA`jr9AcPveka){$0Zay(`p(M$rLP8!DOgxr9S5XbhFL4 z9-_xP+guiy@jJc!j5KEYNF@Z}ORM(!xi55G>moe}N%AUe;3HqVPVCMzctj^H+rIcd zd<@3!*T>FR&9-QhN5HZL`E5e0MnR|Bkp@V1UIhC(g@LaqaLOVeCwa8ToiRO<{s%c0 zp7GcH1kO;JKRg3v@rpUuAx|EMT9=n}A2>BbGujeOAC`o|>Bn5GunQ>uyZrh^A?@1Knu_DVl(u9|578+NCnT@O%$b|+1&yRRuxhzF;gBZm0?aO*QUcNF=ow?Ap7 z$Pte-(=jcj3WvVy#^8fLZ2F6$TD`$QD8g;QU)PVUMOvxZNQdjQ0=MBPBa!+ z=XG#f@LsXUotoN!%6!_&M! zP*VIy1Px&62l3BNqqODvHsdo$p~@`vyXACsWr6e=I36GhsE2BSLb!S=9_dJwxy-V0 zfXm?jv;aCUR)>4&xeOYJe>Er;pu@$&MooZs4MDA8iyRBR_ZAO??Xi45wna&vvxBlE z3cgzD`rcn;LK^@pE?i~w-BTT1MZHcsqFb$pGG@{wP>#aK2-rA%X|3Lrc)+iq%hwd% zWgz6docAN+JRFd<(iCH1YMKS$=9S?Zfar{(5Cj!fRdzBSAp_QsZ$aF8-Ko-g@`!py z*>t0umlAv(j0fck?QyWUnHL?CvPK|XCj#T^`Ym|+2Z1OHv;;)`;a&9r_uk1;*#&2l0s@~wqfCa&MG57p+|L&=zqcDeAggKCgTmNB_k%i(xoyI8bw87c|_aVl}{$WqjOJ+IGps7ET^VZGVS%_hjj!7 zkpF2xA!7oXNZS8yA;2y&_CpWIdU8C-PlVra0p_)JfLm4H3xVB1BZ?qgyDu;VvbH8O zH#etVo=bx1;^frdw^HvGG|c$J8MU#bGdgaEyiXe1*@aJdK;{W6ch zj<4PrGs*PBi?g#_bBPqs$V;bM_wEo6jxKHW9-G}1`U{?M{deg{nEm%;ZH z8dOI|AvcAnaqgS$6jr=;n;+6i>tQspJQesh)-ELmr>yG)&HO7%X(5$U=;OG=vljpmuKkF2)o-m%+ zwFVc9&LlRS4(0McNNp%C&&)`+gYTE}3JZe^WHU47n36VOFn?1b1oVIK5Mw~j^)fTg zG@0Q{)JOLuYdV#IU0H`CQWz1?;a!DEm7dBx!!?Zl`Kd8dUn)Qt*!KUUqM)<|a$5tk zLNmj{;O!7Q^*Joi%a07xh$abrt_P9mRl^OX z_5SJhgdujHEMlTY>a-?8kFfbyOdb#W*C*`Br4vp=hkpcl^K+3{QJisexW5eZdmQ2N6J2-7x@@dgIZ z8UPtJ`x4=%wT5lfdTng2UN@|D^cr)xYCq1dgQtc8vcTw0gSLF8TA&qV9x$D-$@N{5qX0YL>D4^z#o2 zw8n$WQ*w#y=?sQyya&1bW1SK7^)ncs{6x;rV~B@Ipzt$6&hKw-$8(Opdw#2V4<3g7xT?J zMk#A>(8CZRzPQv4+na|VGy}MD(+FMvLTLc$oob7^fG|5mv=wl10bzlSrdtD%pDy1b z1;hZEOJ~(79w8y-<_Tc+vY+*%iKRVnKzBZ|@XdnnMh+Jk#yqR~%gU7Gjzj+{@{%68 zXjb$ZNJh_jF~~ z`%zkx4e}-W(Dupo0GOdmYgK(MHTdj7N1=mxm4c$mp<8Ve4wZ{XrRybev-pSZ|B-c; zQC0PQw^o#pM(LE0?rx;JyBq256eOiYy7s2KyFt2h)6(7Dbr$#iJnwtPIb-n6F;G|h z*DvP0CaMIJK;Ue$5t;rNBjwF^R&~21iOa&`b7k0Is<)0!p42)~Xs&3UbL$C)Ni9Eu z(BSF@=ei^8;AehD0B#r|Cnpv_2)^=(Nu<1SJ7u@Q1=v0PBx2Ke(*~&Djidf3-2jAe z%mi{yETQj;y*+=ILa#PsZ?;CI-i8y-qw>YrWd-41zVmH6bK3he5}DD`dvSa)*y0QC z>C(*v*BNVuM%T#s)Z!Ke1jI20^*1De&5YAhxz%V*u$BFHB6PG?nQ)X=D90+k zUsu8%x)3e9XVjmiPR5%w_z=uP>EOr)YJvi##Q`3vRFrVjv=deXRU0zdqSM|t!Bpl_ z<~{#>C?fn}*}L79+Qt5obR!Su(NJ%qczSs$SJ_Krbhivc7hYo-C9?$!k&t5;NkBgh zxba~9kKbTO^ILlSG_#a{K&@N{ls7XCT{YGo#%z1?Z%OYN9VLl?yw0AuIs~r5v!rmWF5-aOfR{!2p zk@J<&n(sFO({IU$kHs8V^bI^(m(lIltomNTlE4`7wMuDtFGNT9H;$k_*JR*VG3=yH zTvvEJpht~2LQ_-DL=~MAxdcj+Nc2GAc?OOpD{#11-dq`REW0R+02|M?e%*Rp;zv}X zH9F!6F%pf`Ag-tr@<)kJY(YM+>M`EIPwTV)}!A(WAl5+U!OC4879n+ z(`dbWZ)8h;7K~$t-;o7qOAuL(zoT+`oNp!>ziNaXNUALyu;2&6hp(=#xGE2aTyZMY z#!*jZ45suLlAT|9>=P0_pJq_SltY-;a7uD zoCdn@sV5qXZfFa;;pN2I5-fc_B;PxG`@kW*L#YmRwU9qwq&qNs(uyyTEKV@=rnrY;xfir^KH`c^&ihQ)%Akn^JS@H-lr?Jb=bE zNk{oRi_?E<8^a`Y3y>IO*3cG|Yhb$SazG@$Sdl6)AK-zHn$K(ed8RYq4JNd*as~E} z?}gv~1U-UU)Ge>;6IbQqtaKdK4nE?>ne zLUVLn_@j_X;rHF!KTYu1e}|=t^~fJ>N2;*@ME|tlitN0zjQ2wc7E=c&kCk$Eo>48d z<%MHDo?-0i9Cm%WH~Y!tc>fQhs9cojV%Oz5U#f+h$>C)UK{g8r;<)Q$#qx9+#A?ib zb(lAD(;~qYdAoTr{)2j-c$%ycoBci+WGJ^Z=aG#gOi8vjk z7?@JM9;vUVYCzRodZegpEA*uZrrf+)OAmHLeJEUgZ$%sd!;`}I!};nE`nFpm;U^Gsr-=>rH!wHL4<16smk19)FeMo6t>&bBOQv4NOmvlEj`*ZG=et^|aLxkqZ z-XPHzxAlb98uTjH#_(GCrF(boP3>2~M%R@j1YD0dNXUpW;?|dhStxqGX?pWKaazcr zOo*sL(65kS7?=%s&>Alt2a-p zumTz4u0_VI9=oeAk8!?_r!<$xG4E<@!Y^+a>XpAqa5;o(EqlZeHtxV){vZ;-qLFg5 z&JXc@RQ7b8CFZ`ozPRj=C(Jq`_lkV27O}f15%~FR?n+^S(*e0Mbxn$s(2TbKAVv;k zpS>+xMWdcxZc%C6Y`Uf_&q@g+z4q>>`A`D!-J7i9l@lzZQXx@M=-QfYt#D^Ix4s6M zV9v#r6^1~~24N6~9mMg|aqO$L0YD*cI&&8m7fa)Z6q`Jj?s#&65xIVL{~VN&lGGSw zw=CX#miT+>zM)Fo)Xd%}$3_RM6uvUbnC$p8d!hRd_*asLY%=B9V3am&E!L3uVMs(i z8`s=qgsACrM;HAjKh8O7UzX`J^9gJ7c%Us}CW&;j$rmT&k(r{ur7cCa&THMj;r9&! zJ(%+^o!Wu(J^E>!lRz&68#Pa-mQCB%SwH+*qU)g5ruNx}%y%0Fr^DWQx*f2em$dvi zZn6H&$y{?me7`s2w%5r(xADCfcm}C|c3x2cS1gM2Rby+{?TL?%p94riE$!^aEE|P| z&oL$8i1+`oeS3XG(0ZY0n;MkZIWd2Hmo8Cz@!KEOYDT)0+Aw1HziNxK&Z-wy zFY?J~aZ!<_&rZ>W8>sCq7r4Ku?tR0%qZo~ZYu z-@FfG4{BN-&EZK`B$|}uU}XI2FVS40=<4b^`EQX?^|maeYb~@lPxbqQF&nB5 z@wVky5B7)8kNh`(oi2Ov?8ZBT5ccX8Z9iQMz2dwspHBSrb9$bxOYMBZ_xF33Bl}^E z%J12FyRD;J2Q`=|m;CA^h*-(E1z`;pX_uN89@x?LA5^u;_tB}H!XFQnT0e@uxfbUa zcmvLS>!y)bZu71;!*H*Gxel>8cPx~(ZoN9$JXVTwR3p8BPz+mQlAd%6=_Zn3HUjd_ z23~B^khWanH{t9oE&)ZZ)5~SS<&rb2ZEM_#PHR)a!zVwK-I_m?gKVY0_7e+`&hEQY z*KdkatfLEixIvikclh!Yro{S3?*3zR1)*FCn@*_M1Wu6=XAi~ z;6$W|&GO416Z~*w=FzUtTA8?3VmTff9^wcV>dC?K;Lqj7Oon&Q{g0%vZJlgR-G$9D zzPP_ZPhtyW3dtPT3mcxWIWeDB(icv438&ply!)&c=jZ1|T~f}OW{Eb>;tY-Yi*4O* zgTRwfAMo}P;y*wYcxjnGD9ex6MTdkInN6%ql8Ttd+(3OZ@B0)zRo5kqUa?TP_lRe{ zfpJm5>ucNpE9&XU!E^1Zixs_Uss_O}lR=kVmY)J5?e)?S1X`S;Li(D0S1WPlZQFhs zw96%vdcq}EY}Xp31}U7Bx>Ff>KwNU%fVb>Cyx2&u>8cK})#MoZ-hpS>Y1y=aSKI@J z)n+?kqsyf!qsrGD9G=k?LsVMxqMQIGzK zPL@7~y3gtRq7U^ZD7cVn-t)v#yyvo2;`Z6)JqMwZwmKz# zn5g{q)oGJGC^k z1=|lJ9EvLFz!j_V)`YUQdrP!~Yk;r*iOCx5zW$+UD?xiV&1n=Z>(47r5%7>@^!8TH z_+fiZL@zLL&IY10O?~CIrv9LsOVeyBR$+NqRX_2-XiZawy=$Z_g`er%lI6{?uo#u^ z!p|P+a)Il(h64-O`m!q4j=_jey^Gkhc$}O;b)7x4hJ+_WQ8Q zLHTy+J_A|>ws3y03%Z&tu92vsWU=t+MJ?{@$>mJw8dte<<)bp{JrdY{md(tVHk93- zQo5Q|r`ZaF*UD%!B&B~{OWw8A2l&QD0Gxm$c|#9`7~;j|I1Ge70NgTHCE`tHw;mgR zXrB%0^8kLLWK}T2r2x8diCXJkGc$F5;8B2C2UxzfkOd=Vib+U3+{P}i4fP+W(ktEiA*+G8>^Em zjt2IIdTEy9;fO@PyJ8dfp^mB7^_x)p`z?mQQo}}tV#zD(WVMV_*gq#P4+blMBr>8< zV6dQ%Pw;v73TS65`y#@SU;QzNH6<|ULL z-a$T&obK>B2k1~Y@BiX&Ec5$5Hr(P_*!!rLXb490T_=S`hR>o}$Md4DV4gYFX3=#A zc}KK%D(PftHN9kQh^3uLhP#{vnY6CL8!{d-*Y=#mY@( z$v*m;0o;M|CV@tG1&0|Gl9&D@r;Qn9obaWmCez}V8LJ#n{QafAm2* z$yYWC=OMCS(x+LnkIey0TkK z;aIGqD@X>34hDf@84qf7c~;L?sm>z-Chx|ev|7zR&gUj9;;D`A=elyA61&b4WZx`b z^d;0bgdr1gm!uBm3?1jZ_;eXj-w^lOLXV`V%cm1m(G-`J>7CbnqI=kP+iPBVVqbD- z3(@;~4xe0g)9v&NbbDo+avH=i9#FM8U>Y@1r_V1DZ9Om6x34xHRIT*l_>Y55Sz$fo zstPbf$^^%8Wd@tYiH@`#k*kt3S@hM2rp*Y2Y*d(u+2uDnxB+=qSucn~yT;Y?=Ksi$ zlB``|i%WnVODI5##f?QKu?1S3J8%9o+~ey**Jh0xKIh$Ew~F82AfjO*9N2>k(%!8; zFIm`Mh$zOaxm#m*ySA+U9KI;Dkphup{3Q0RLk=!+Q;z&loHHiY;zB1j$Vl13BBN)K zLjA%h!*qI+X#9SW$+>2;j2=-r0Nq_<>J79I2D;t5iJB|7aSR4N=~TaS5|2UhiI#8y zY5j(d3SpzSu5(^6CT`HAAY#v(wUbF?mw>Qzh%`d4R-Wr&ULVn-7@esstCbW1^n!R6 zW=Dp_mX;Q6kx{;6qK*W@bKMW*8&p{-HqM9?RHH3^VIP{43?9rJl9tizG2e~UOU=FV zQFWpWSWrQh-EviD!+{y_nZSR05Cn98{P(!c?_LR#MUOA@UY|a5715mBKo+L8ua@@{ zAPZPIVsMpk7Q@2d`y@VVZ$J-#ZBSu69&t9@4LS|U@r^^C=FgJt*7=5**e~Avu(3X5 z3{I|0{$}?6geB!|HJml^FVt}Mn(H<&YqZBS1GS}u-f#Vg$i(?i6>WQ8GYv?Xy-qzhVCtt#fe8%Bt@C^rK!?p8dx5XYD%cOrobjkWr`cB;mG9+CGErC5R!D zyy8^!Q3v)`)-=g7gIwFy0-=K>Y2}#Cw>cgvtp{pvN=b$GTQ$S6@a?Vr?Bpf%qh@c< z$c5-C{6uwrpsdP%_oYzd3|1~_A0-~Yn}LJ{JlO|v4*WK z-TI)iyYnSypBctp!}}qpv2n^c>KoO#kd92g2h-F6vp^nvO;Ff zWs(V~dx7wXjDFJIe8BUInho&IFghAT5G znxIKGloFZuS}1$y-q{@{l^+M*$KQR&Iq@{j@jgg3XW1MR577c5D_$eQpbGF~x}Rvh zZt}-|jS%O@FxWk>hfWcG0goBKl(s58!eAHhoG=!lUxcsw3;#K1?)^S}W_zP(j6|Zo z*unFW`sMfwujH(^1B&j&#KGQb7s`13OQ122`N%ou*~S|O$h^xtAJ+rp%Q=*Galgy2 zjz^iPt}9=cENlil$KT#>pbZfI?p+Ht^Ln^)o>ea!SRE^lS~0$Cec8)|naCg7bpr2< z_ak=2Q3wkQljQsb)Xx;eb7h*UcR5R(S5)(5C{8XN6JR< ztU?H`>`j_aet%TE{<)?Vdp{IMSCVLq=px(Yo?Sl77T^75{azU9eJO$SllpO%)q?t@ ztp1%>)1MP@C@sU&d=S4d1a-m7^AT1Lu@HsnMnQ z35%=uz{#M>sZZO}@vwCgoLMI|LxLkjkt1_8M0U#e`68S~O zJMrcG`Y}`&-AO^KI`7oBR+bAiIr-p>4qJJpegmL&*pH=_Au{7!;%|<w|&mJfAr1;Hpl`f>6C~PYpd!T5oc^io7X=3pQU}w94{)U(Q#7;a!aS zA&|P2YqMClMHVVlAUdkkqJ~%J-MJW_ksfB__UlH#m%99~3D7D)=ZE(zr@oNIpb{>W zAfcoUWXq7k&Go@JrZNWCC*uSh#R|mvYMcz{*_J7j0J^e4kW$d#7HVtnrLB>?2?h3V%%>_H%xy-e=*G>$5k_bwl8I}wr?VZujLa=?mM)F7@!QvLww_3~;`zf9 z$qLr@)1NJMr!7HP`0tlMpi7*_HN$4Vm*<8MIw3lFdBA(hbKF2m)sEdQ7HBx*xYh}6 zX=76h*+BMrzpKu~`%q&0Zh7#o*1u|KtDdNdDW6n@LAvavD$+OoOiL%Mz?kwb69`XP?7@apl3c1T&XLu3cU}$(F zaPt1HLNiBTY_5-tfEEyA<3EPg(d^}UwX3D%z4qKy?*op;e6Ly178)um72T(zqNBrG zYEcP3_tNV$D8Q9bhO3vNHhW%KdIFUAbugM14wp^sax*Yaiyw9xf~C z%0BZk5Z|UN#kv0HNA4_i;8j70dCeGen7}k#t^B+V%!9%c?aa+7Jp};ia`Es*3Z3ww zvT-guF`t^JT52HOM-rjX&C2t7fIbb356xzhFhGquoagWi*W)bq<{>SD20f+-Z7)YPXIdTJz!X#NAz;r$I7nO z8{30u{Iw=i)R$#4vGH>kHY(ivZlO3}53ED01q2c00mD3QH+rrX6@aM;2w;6KgBZL8 zTV|n7wDI5B`CWmjch%mPr5u{7FbC1nV){q>o4DTMx0}y^%m82v zc~%gWMWirHhMeif-*1S(n85FXvuI@_Cp&i3nVQV@R27h zEO_d;gx*%_<8{9T)|3k>vy(mX&lI_W1mt&om>i`xa#0@3ZRBmLhqm*2X_w_zLK-NPyZmoi^K0ohC*|0Y+@3q95@VJ z;oc=6j9y|8-7R&I$NS*x&|*YmqXR5{X@LC}@ca!T+spyzdD6)vVB}tC#+Zy!u5r}O zS0_pmlXmcZm4}m40;s=H@3Kw+Vg4(AzOb=z9zJ&HojcMrL^G9#O5BcWQ6?(OCf6`% zHa}Uf64m;q)kx1LH=xyO=Y4Byt0dT=yf+{{oW7~Qik(5H;cEyAQQq<0Md4KOhvXA! z5g0*27j2EU5aJTwr{7|FKXowwikeU`6cP&gER@+~aWVa5p`Uz}_YX6Y*&FlZm+ko}hX zR5@Yd)i)If9vr9dwUnvEaEE!Y0GWXmeTXEsvgxBa>qn%LEB<)E5qT=6Jvgs69V?7V zlYPV_G!5OKNXuLD?zq~C?gxe2G(C4+@Y$cL?i1of)Mx9Jzj=aMXiHbV|7$!h&&R^@ zOeWvp?=Of8oST~0lq`oh>^z@G_o0PoHM#|8_-v+|j@Z2%Yhbk3Tj$C|I{eewvkQ=s zdEf_6QWs5`*87Jn7G;ZIW6;-+<~Oe^MluNI6GSw7N^bd6mpJ!DelBInYC94KUaGbV zd#MeTlRW0$txX@fYTu`aHhiw-nX3Dx(^emUKH&5FA-lYbd^8)4Bp`-PQEtVhF`|V* zkKeXfmqo3&ulK;*GO>>g*!0KX7m)v_V}RmlY$SA@=AW-1rR|SXHA_zJmVJZ}h?QVdCFd^FvtTMYqjo~3 z?uFF@r3_h^ks3kR9$MD=v<*Kx0Zl_$<$&iRQ643bD_6$ON(Q@RZF&A%`;(_i>ASfx zf=}hh`NGtR`&-KD>eAPUo!Mv&X1^XZ+<#K_W%)wl5eNvg-0nm4hh)m4IU< zPO+wzET3jSZ9v&zsbz^%h5Wqg+V=PTsgzKEEiT*=cD+x#449%aiE`aRbzDW%$1us! zXq9&2oa-aJMpy@Zi zNUS93&fB*M-Wpe5j3EjkV(9bSJ!-ic@lt)*C&mDiue@mU zgPBs28(c@UHo$^i^KA?i;o`kXN|XtH+@yG&)8G;k<^lBnCLnC~C*<8H`)O1c`Z)DV ztl!z}vtjn+6goZ>=R6(o^0ipL;+Ov9aW}LS4@a@2tS=lZv7!%-Up@|&f{le|7CRX| zlw3P)3dvWip9pPQRa=Hzo}oO(T5NP+^g3wmnK9rdIa)48d1^eb_FG6<4JOv58Cpf` zTF!#|Yg>%_kShx+Eq4#8eB9@e_cXmutu1T}sAC&z%tK8?viiCqaYRlWgg0h>ZAPr|$cK;i^ zx|W3u;($@bd;o7}W(w2U`$fj*R`P96gplFlaKoQ61y)qk_|l{c2}7;*_$_>;dL}CL z#<~Niok=Z%Kco8yy<}SCfO6WL;V!hUQX>N7eJ-h0ZHBZ29E(8dDE>;t@5$;Z`Y@)i z4D-}rOEF=0ld$mfoJ_gw_$wiC7EQPI?6lkOa_C?xG@N60pd$R|Ck_uzun4{`=sRN} z?Hnf2nri)w#2L6-^oN3^=;(bQURgC3aJ$P{6Xn0IsV zgNmqP@IQpm?vm?MzYXETSTMBR_ApGUx8JJ*ba2Ue6o!RYvimL*zrLkN{D36aNH;)R z>2YE_Zy~km7)M3er7Brc;kt>Cg7WkI^JzZRpQUj&_Q=EgH0?3kFo_Gn>mOGLAnx{; zFfOKDh`7vuF$VtN{C#X)QYc6?RR7^_LnKxdb64t!F0aR0yJqd(P$77$lT+Sk=N)eY zsw?0R2Im6+!TY&BezBvYLLrpEz&La zV?aVg9B+RdYVW>qDgK-T1mX_;oUG3>a6;umIuE`n`3*EZZqCQ6{Y-wmEIZ%FQ*0-Q zk5f_jE>VH#0^Pq}MBb-@uNyLvI2?@fS!wXy@VDPl{^Q{)#1T>9978utMs$e6fxo4U z*r|9fm&bQ`K$5UBh*Y`~xIiIL1phyU0D!i5yY`j~#_x11ptu4{up-h!dmt}8!)r4& zp_yCdM0k+=U?~pkVX&saD}l_5rRTO2?tL7q%WTZ*3UNj_5d&1N1+ZNrcA1S z$yNI|_8W&cnS27zXVKw0KN(t1q4h=q zv4ZyYl`7wi=VRVMQk5DOd~#YBILUIuy=ukpCXmH~xY+26@LA_SaTMg_1!~HA&AO3c z->=PB!B?h2DH{}sgI|bLG!dMHV2op@95O4MnM7lwM_lD;(O6ChK9-xo{_U;B&cn6* zU`iyMeE`#U;M3vLW|2~J>cs^Vr+}oWy2LlQGT{6{8b~4o3bNOqWP<85k~gNDZ&hP9 zS(0H55eV_zo>p-7H|z)1?13J&>8uwoXqJ}O2F>?kuF}}K2SdGkD2>N0ikq8TL_*H~ zZYy!>)ta;>ity(KAYU;8rh_ma06g%a^>R{LsAd{!41n4qVA4|R?Kp~H+N7FMEX+VE zLlhcGL{@I9F^i?jp7FB$9wZA(4Vdl!Gb!lK!ul@CoXTN`xlWsf-7$1M1%76oo}CS+ zgw}t#pB2?G`TL_%qqlz%Q;3e&>rBfAJ>^U8a|g>dZs+TC@;~CkwCp$6O~NxTHfhr9s>k=9z&ZV#2$Yb1xGu-*~Te_BEm|~vMXbL zbw&yj5-L$GEG^9i!nVo6>VTd~k~o(56ljSNp{sROelwUmtqrc%X~Ojk3u@AXn#q}+_O8}(i~Y{@;fpgAzC!Zh?FRe|MkxwLs9CW%gPYr44U{wbm0@TKV&sjf=H-#Qs&Bs#r1rAi}8d>|i zkOozm%b>D#8+7E-OkztbgBMt5C#}(DNjejntD-X}Tu7s471h<{T^?^ZyU70htD2-J zJdUsLNw%jHr!r{wY>59FXzkw}kxgTOH5(@t5Iq(B^N z7vW;?duhwD$7!l4uxl-6EJlLN^dov-(b#WP-&Yt#I3w6<7N9BGweuAo&dnvAE{nn` zkSd^HUR(Ho=H=o_u3C~&5a@vFWr7uI90Ke5`tg=z*?Fc|5AG;XO0Y;JbNZu->Rhdt z3fIe*QhfX{^}ey7txl2F2muCaQ+-IPk0+OXH6OpnNXj~~PEi5*t>ufIpEy!`qkaqc z*L9&wmbDDGe9?P;L#HW|&$NT)`9FUUg!i`>V5Q4e;an9T{Wi*$E&8P1837TKm`X?v z<(I1CuJ?=Ce?GPElsii5=r%A?3~Q6WwFak_Yz^!{^c|9 z&E_|-Pr`A-2C`%G(mE*Y!BJcLiteic@Pco%|7#g?ic%_Xv9YkA8YEg%tAESz+ensN zMOa@*&cm}1daaq(I(!U)CBFhs<_`&lab=eii5=;A0_LKZjQj>|Yt82b5Ck+s+f<@t1k6c0|h-U$vRy{O2*Z&!pl}q4oo)c$054#Sj7! z+!JN@MU_4YKJ6dm=#ei3fg1`t_6If7 zp#{}OIMbbvvrWwM9f`gSb;!}2nX-s>UjzfaoDo=yVlbF=q5(U^XA14^mTW?8DnY7y znnE-1GtZpS4wfhDDGSmY?X&W4nyYw{b04><4XZB?K3nM1NxO_W(eY5u~e z5X2)8aaJFyrY{JoolCnff(tExi$U~P zIgl(yv5PnzfExjBFxT3L;L|ksXy((or3MGmR|BClFw=%-!U~`~-}ZgJ0Lxp!Vq(zi zHnCdZVPbSPTLWm%>JxKZ1ESr-!<;*UNxqKa&0GS042G#DlU!%I!8-*ev07d3$~TSH zJQ!=;+w#l#kPXL1QMuIx<#TM~5?qL#F%t4!!m4c3-;0-`O7iT)DtxS4czb zb!4E;q}6#qc}|^*eLLO_>hza=*KGQB+9NhPEiqZ(uDy2+_YI5{6=+R=y7~Z~pih5V zLddluNjZf9$aRC}4A-&DYFS{IOqSXwthLT~#^v=#{eC(kTWxKgi55s34jK{cl~WY4 z^i`qGf9o~wT77d z7V%W3+uXg5EJ6o-^Fr1fP;JgoZXkeLH%~+&NxJ**OkjH?i~Y>P{Z)nlEqbCr`_tPA z(^*pnSeS5cBqkXQCO5yO|9)W&Cu1b-*_0zhbz4(MilYm$b^M-Rn8T}1UD>7I`yGg8 zz0`*63cSq!bMK&0Mdd+pPS7Nq+dwp8>Ut|Q+6^FuMOP7;A)#oR+Czu>$lG=*4-LgA zqN9rXniF6S;1gBU6Gv_;x<7Avf5R=W1;vYKyGh>^%N9NiqssjnbGdNMs)e~?i5KP% z_p@&sB6t*2Uw|rshas`14`4gab<(UD!7GN=W0AO7>_YFIzuBrGRkME4`=NYQJPXT?jlhEt$kJJ)|A0rr_qd@F0xOJs=p>zNtv>I+%xu!#^6Lr1M?6 zW~A7NU(yeZ{GCj}GDuIfW&VGhLR*ua2||x1vAL!q#77^GUotxuM$l7jC~e+B!5B)? zKnE^7PDJ~Ph({CVK0o`SfvWTK#oe3u0eprta7~o}pNh>~g?}56h^q`#%dgoC&a=91 zA5i82>3IskQZVGUU;Qre$KOZ)elk&Hsln~}Zp<$)oyRSqA+cJ7mQj*auTl0CUgYWS zr~AlJwOxJO*8;K4V55+kdZk0l1M-wnrt+oi|-C)M;IIbuk%2%fbbhN>StmTR(5u+g)(YP6eOe`KFDQ5A51^txw1WZ zk@skpHn| z8vfR3cD0$@LHPN#5!i7^FA@+DVSy}Br6%jx6c~26y2%o64<$$Q;r3bm9<44&%(@oE zK-w>InBUkMuTne{l&97MH)+n1>1QN62u1frZpT6KkYLFxTEl^D;*)iuLc$Q*1QN); z5hT@EPoVWElH-vhOB{K}uOy&P*}73pEMf5XU)-B({7VXji zW1-_r9w_9t_H5Mpi+0u>ot}5{H{EDL+QR~8#}7lvRNndhmE4Iv6*>y7(lkBUl}WA% z#SJ=edKayCu?KcEyxzR;DM=^lCi&`df(Igvb`I#=e&ua#32f>S#&{SpT`bgJIDP8c zHT(z4@K1ZtD?{zE-ilslyv~>2k#3>no3NHqJ);kAATJ$q=6bbaU06M13^HWSLh(m@ zi-o>_*KjN>Le#>-lR9j~4!Dv=VmOkhh^X)C(+Vwxbfu+jpp4lG5NIe?tT;`|UKLRA zX|?J9?M3n?6@#Ls-W$pC(>w|`hL!fb>8!&c=1bIPe&EK=(=lh(*i;DXV2KwM3Qk^( zGY0J#Wr~d_7n=;(Si4*Xc!vg8Ck}+n$T=MtCsBX@-#eNlm7hlLlytte{SsS{*$7c% zl<`m+&Dyi>V)fFl+CkcE2^dh3g0su>q}=iPHEr^VZijoz*(M2XPfU7<_y7BC&gQ~u zMW4tKh8P1)zj!UE!RLY)Q?aRbE0Uu`MR=ZhCRPOGE;9+p@p>2|s5E^19Gb#Xs@=aN zO4ZQyw$?&!?1Gc&;OO+D#=uS=pn?nG{R*w`F!*IIQ)^pSMc-79s! zcFzyO7MkK2M0fwI1(33V?^5B=8k5nJQ08X5eq8KR!QJP}j_afi?@Sh?H@t=e5%?!7 zX?HMU012tU>_kSBk)2)aA6yVHI`GA zi(7afY}j_Hn^nraCd*T4D@G~8(nXcRw5KoIDab^z=0M1`$=ant&gK$Cq%BG8fPh3b zZ)EiR^~uhXsUjCVh$yTw?;(G`!jI{c?D%a~UE1btY?$S6tG28k4Wmr_DsAoC*MZJE zU?C*?Dq5}&og{Hc6qp=G;d7Db&6aA5ySVhmGQ~&)z6w!@pVu{l zsd&U-q-@@H4HN9Qo^SeeQ21fS)}CljVeVrt^O;g!{;3OFQ_^OlF`8^x_}nCLC!ED{ z62Fjko((^GrbB@lKJofNu0a~B0)~9K3t9)fUKREqUpwKv{QRDWls_yGMrN7)rNhex zXB2{)+)`3zUct5dnA=J%h-ZMHK)Qe*C@+p(1*UVRel30jrR9;bdwcU9S(e=uqfQVbs|dp-Rm8faByG1XQy?a z`wWqnRs3=3>*)B~dLq=l9C~9fZogM#t^1a9n(AZ>-2PQ|at@NOCjIW&e)9GeuYLHu zm*XJYXOY`}5%1bLXvLoAJ+;Y18E)mF(%8GedcD42tHz&=1cq>w3%2gQi1@90Z{S}a zSj*I~N&Ps{zT_3;1e3Db5$<@HI8z4LYBceu`FPMO)SutY%8a)ILb6(nhulCF_V?9n%)4yEd&p*%4gI+MXG_+ds5-(61V}phj9GKvMrMC5QEtZNGLK&nD!|}6i||qZ zpx+q(7yE~@-OsVn8TzcZ8||X_a^l`Hy8Uday}fHqff_5*OYz<<5OJ5`+CHnX)C5e6 zzXU#xJ{s>0!gKhejd+igmNLrP^lIKMl{i!~0mwGog^ocaLF6(|GNc_5%E{B^XTL5@#|1DjV z)eaZyqxHPcM9;(@X4{`#Dv`hd=SiqoZNN}_RrKNC0UAOlNtdu8KqkYvnD30uEH7u+ zyMRFAG$tZcch&sw-#XrGb5kk8p|*tkPj5g2YKhTgoKBTwU{x23ssGJe&k@$A5M3T7 zFwzfT1?~dOKnRe-!{d4=f#>o@j};EB-E0K6D@hb$X3E zX2Z-6Egt13MtpOG)g~=Wv7CJ^@XYMu*QVFF7pP_}PFQ8n*?y0>NGUA8=B!fOXTv>F zrzL7BW>7s}phrP2DlDF{0H%(X)HVsWjoc&ud?5!A6@TuXU1cZhs0 zVdH-JFhvjo&B4CajOt9WSPa>-pBq>!lUxY#{R)w0{}WHP{sFf|sdZqv%bnkiwwbdr zjVHa`npx;^CPFjsn~%1#mk~bxbq#w_0Qb13_F~S92oc+?=;JM`5xCh3ZQ{XC4q_!u zw}D#M&y!Yqv3_D@`FGAjr`*=t(&`Id)}I_Fv-kJE8Zb9qPYL>=H8*AH4jj2Y%_jBg zMdy=0;~ZJ`u@x@QjOH8+&_{+m^A>XEI=BMOQO#sCh>Fy@fj&F8u9G)~+FdZiz1M7_ zjtm`lA8c5J{hwDCD&P=_*ks=~|T-GFAoIR8T3H(IZSZkJ{c-p=&uuv{s1rR&JH(c=TKf7=|uX-#MiCO!*R3xrl}W@ zNuac!%;M2b0R6M>v{RMonA=7GLoohd8}4Zp_`#fWxzU;fO)fIg}9zub0!=wWTFTz0FZ)x+MjG zn@^2bVco+l#r`mII@2SR=cuvU$r%3z6UsBJnJFR~V^;^WQ?IenuMxmTJ@9DMcQe{{rYO95S)uH8{dQuo zQdV%vF`wgjjzbnsPv&68=5+?xPB=P9aBX;c`X7+Sk+F%0cXV-W2Qn#i>r^>O2OT zR}DD)j5hqf`GWG~LN?Y;!`Ezj5?js}8GC}SkAwi;9!AOU;_Z3nEVavO>(J^vJ=qa> zf{$fY)5y~~;bnJo0Xi%sM%ClI{ij~@*v3!{? zo&U_pfn7mp!%^L0cMc`5g=)06%gXEYh~Itnc&?Jv%12$-CI0qRGTUz$b20LuTP9+K zw|-(1!%7l2B}?+Tm9BNi;mTwfr2SsaIwQpx_Tr=OP=cmTU`#M3aSqReT(a4ybzvUw zWyA-A%7q-!6=fEo2dGfswd&KyHo4m=zAeJ`-6FsX*85i_5QhPGq^Yi+=n7^h zWA0M<_~&@*JoS@Z{0wRyi9_?!IlOa8XIcEL;RZ%ygVWmyc*l_kB&nDiyn;1wOe#ly zUAKwX*Xx9RNZc$D9=<#mx=-(kRCQiCKs#^TsxDUa&4=Rm_&o-D5|<@s%E;UckjKgYpvh9 zTuyhrq-VGG-MDpe7hG`1yfxr0U`)KowBDDDaKPuc9LTVME00z4=Zz3H&GAw$yWo8* zZEru!b;hg?Jm;+bCLj}z@LK19tV+Ll*hji5D6E|6Iu^&tPq%UAHWDhfzaEB4vCdf0z=&l76kaarX8 zrG}o00{}%i+2;8qn*8p`2Ur_hXh$k8Ay4N3Or7|aK^;_D_(JzlMMpp0#GLb9Yi|&= zg3kZ;#VebEU#V<3QJwa?qpYrN`~bvhYaZaSXcZ&^TmwbrWtv_Y)0J^o7|H?c^Yz~uhNZbiy34(_rV@1> z{wn}cAm7yeBL29YrVZqVs|@>j!=BZeOptzp_IKDF#(Brb+w*2zc_s2R!uFU(S*EUB zYb^8!&QZXu^V^%CNz|7?+_^I_wEt9+9oFV-~faYnnd60$q5cQbGAlUK(CH z+6!E2G9c##f!FJq``vsZ?BIDlAua}mT~#RzezyYj_`?N(&Mct1NkrDkbU)=;Y;r7V z$P|2+p*-_rW>bfz zY{MHSeTQm4*{NOs^k9UNwSN3mQxg1v*WQM1t6!@EUcvGIo_w@@!R|cugtK*3X~a#! zOxmCIn}aYW(x$yWqZC$SpFS{QJfZUrNKzPX{!U`_&p)_5?kkaXfy%3b85<6rZwF7- z&L>Sxo(PD-_>kJ*SLz zCLM|_&(aagMWnkhC;?mO4kC5toqj%3ni&IlA^&m~vGXN{HQWeh;Hg(}W@xRL6MVqW>zQ0C3K~D+Zr4Y>7r-$ zRsDb#UG+7#)iT+yLx2PQ)5j{M4|hLA*~UZv-xg_D4%M2fW1K6V)63=Yqj~j zv-Kc9wJLXL&p`cUn2^OWUfX2)(9PUWhO9-VZkXi!~cRY2o;) zm?&ScqCr_iY`7q%@Q|7n^R+4Agy!yx>rH` z^q{ZDHyY*^VS3VYvksI5Rze(FKxZs@jMI1%8sz@*axe zSxXOC;%`GieRo+#%our7;b+q!!Izh7(d_8#XoToQ?kcHk)O-}VcQCbBkDi2QEBNod z43xndBRB)n#JX|SL+ns2QD|9ltClm;E(cN&I-mbu<<6fNb@tmMES18?_bE~!ds(+V z7(H%N3DMZCoUXNN2Z(C5c{IYNAKP#T6r36+Y9^q%seV{(RU=LV!fJkv4=b^7a#Ctd zMAGigz$5-Pb_nPFhPey<_%wj|DHka25agbBOFgSBIN>$+LKo@U`Bladqa#lApWB3D z6aLu#dDD2cHt>?v9MO6psc%j(}NVt&6cnnhB#;VO~9 zk0W_tehjZlpBH`Js3+Ly`HQ34Ri`P98Cx^V*6#J?mJG1J2L0c{E-ji%FA0CXd%|kp z4)OS3G_+V_SPYzTmopPFF{#pxwIT25xAVLB2Dk+_q$V z?P!4_)}Wl#ylvF485y8>SY9#`XYjP_}NCu$zpk~h4+=5wtPMh>+#nN<;n*_mOF~Xj8 z#nY8Fi)Tf=%fwzsGb<$IGm;FVk9GGQcug-hDF*H+0uO;M8*YKe^9S55Z7KRqzZ5R6~{@qChE3i>m`PV77HmF92-^#qC5;@7<~-rKCm<+wU#9fFqMuFKNZJ( zT(b^u$C~q*!V%KCmWy@9KB?DBtyrrp4}{BO5ugbMv$lvkwET`R!2qJO40){OY)i+j z%hEd3Z!IV4l)2F`g5AR>f=m<1lQwq8nz&BVb!Cv29=Xuh@n1x2nKPd;m(s7mF6pvc zv$e7jyat81VKEit@BHE)xY~23(q=<*yr>@;VVJhqxRZ}C>r&)yNK8f{RjvQSw7e({ z(s_gh&-Z_Nxejo>*>KOhz>lbPipg0dZAB;J2lc!^`t!OnT4Dbc`VISG_J|u=P}}na z4Pe2hk!mEzXv?{xm`{JiP^lt9GNo%hZ7Hzp;UeKyM^C7ad%z>1{6fx!dedqO^~a;~ z$UlNL4~(!9K=Nh3yKTmhzrB0CRJSL=52TN1y& zkjgiI#8v>ZQ2y!ZDe289DL&)F;~jQ@xm0rqWnxFYR28t}2xID`NphH=tpwX$^&a%{ zQ25n@`+VCu>i8yZdZLr$5XXC0Gey1chHE3#aYqyNA!Z7%erUCR1|ow1aO4<)|5qaT zu5FT<%ypsZJX1?nz=%~)+BN4u`)M*F2CviwZPfW=+_FwE*pe{7EVUXdWy=DPr~!5n zq6q-67#O@1ia}TS-sQ~tZuQ zk(N2wBT_f6@po}=Q{!=KluVl;gN2IyK#BM_*3+XNcCX!k`Xm1|)miW~shd@DDBb@A zYKdhv8QDFulN={ptIcBCk>bi+>yw`M4<*#`(J{M1|DFAgq$PD+u@a|MyV8>WbtzF! z!DNsZFR7)lq$KCPBogo~$hcG!C2$?ZeN5o4dx&J2MVN?ej{b7aEJ~awK;Wz32mzl{ z?sUWnvI{{TYSVYIrY8cZB@MC4rrzPFne)rxWuF{$=J<{}E@LJI`s+LKWwt3-5Tm*> zMwTbsSGwxiZIb-MgX4i#M9=R&QtLR)-8AypK~;qRDGKqx2u}e7IttH>l$30M{499| zllAT)G&=|&=)A#zEa`)@(Y`Fef7-++q?E!C&6dL*>;mcC08T;I1km4MIfq75eWgGo zPPFY62ZIMR7Lmx#d#`|$0G(uMYz9Y(Vfl%gP|{ns>?(Y*B@6x0B`yq)i)rR;j#D?&~1SI>Qr?N?Gp!l{PzVy8`+r)L7*?-s9UA3pyih z>|VuX!tCUOT@dK%PyVmJt zbF7pYeeuWx(s>tp5xgstt}F|&k-EFQ0wvV!dcNXOsyxNEXEX)l7n9as*!lY~#Ybmg zk>C|*uX$8(%zaEgZQNE~<6Yg-H4+7{<#}{|4%=sf-kU0nbPxst9aR6PZCjOa`{DBF z{s-`EvD-bVUz2$Z*9bQrO&g$b*tbYZj-gtritd%-_b)(L5;pPJ&L2($1OsvfVaFTD zVnI~>c|%9I_;<1PZfU?GnuVWSI1ostJ$zgDuXQZx`!Zh>>`!BeJ2~y| zk)nS>cThbQytO2g&i@T0k+l2GP+*YLTUtPx&e0?}EiJcW9U14r`?fiS+G8s**Gqc5 zn%2kH;Om0!Eaih&^&{6fPXb~2-=T8Lbs?O%k6zVm&>XZ#|Qr7y|$;MqS6QtU1m zKAQL1nO*Rj-LIF`&P7km!y_Qb@88>gnp1U0`YPeBWzwZckTgCTm(zq^2xon~3pGZU z99*pKmT&S6ExD7LzU61uCXXHzTVjWRE_MWXoI1{>s%}ECW7ifuhp%fhEOL!C78@4vhR^ zSxHI3qS4mCQvwBDKz+c;GRp-{##K520qOS-m12ftdcjj97M&#-kvmv10^BG}bBhVh zeWk69BGY*0aTZC#)|X7xwyBj%j4vf6B^33FQ8lfSO_j2->aG#>WH%+z=E8Y7ISS#i zZjp-1gZCwd5a8LMaRV7gvp&?@k{g_*_n?u$0;hEh!lLmpVjbaSFGs}cIjxJ`uXhHVs_|@bkC$yKHsM(o{9>$fm z?3m43Lkd{y(vhLC9$_r&gfssdly*SX(2wsPYV@!2iV_cSaqrlDjOpLH$mQw8hw%N} zBA~D|32Q`qUMZHj4EcPb|d>GScG(Hsr z>pMbYktNoSM)`6phJ~3n-=3sCb9`c7d`@2Me&@%FRV(NI8~P>*slo+E;rvCW7*>TR=i{tr1Zez%G_Fo8Q=!srgV{A`hT?m znzeygtZiFrS=Gpu#64zAyYgAP_b(@_3sGoY%t1+Ll7asfWdzYM_M}{YLTWr-&Ldw3S#Ae#Phh;92Mgr_EswA`sK9qKwA;(goOEq@c;(?jQId}C zr_Ui8T;BEYf6vfF_4q3ic;DzSbAj4FXXQUr3b5wW7bxx`Cfjv-%g4KJYV`E7*(W+zV_eu8ebd#zgaY8rT3i zKxnNv$+^9HdUjSYRiJQXm-@jD#oyN#oTs}fpt(J2nXC({kqsGkvOn<30PYbD)Fgm@ zDL%j=@HMiCB~tnlRSTURC15SpM0CHws8n@F#QE}j@3P?rDM_1*lkbKu)AyrC=1qP&tvZ%JQQ=T zVU~;^QExd+A9MV?!tW?XtIP`A7=MOmJl1;tcnUO^zL`_*`}slt({kvG{#qv5b`Fo zY-r#*z%4PsEMq+8=E$V!R?mGLcQ**#OJ=>BoDSPFV&d(@E_?`F&%n2VEBsx zlcDb8um&_C*Esl~7_2{c)+dkv-7nB7WBT0BG1^bHJNqad8Cef8G`Z%K;P@n_eM9W% z^s#Kx(gRb^$aY7EhlN#D6V;qAUaIHQpsl20D&q+H5MBZ&APbJl&Xk<}zzaQQaFK@o zMJbqCY3;KdB%WMUtEb=xrYG~uMqE*Wo7?C@NpJW;KNEYl9tSN|P)g7CTPmL+U!FHN z=E-l;#bVHm#0^P{&|(!0$$+`}5jCx+jByUbKDm3P;1+fxdsh2VZqsqfm>D0TX^RDk zD?Nb>-vWDMAqpW;U(pRWg*zS|p8VoZO9v<_u}r^mQCNpGylJ|~#KW&dWGSzIJPT?^ z4pLJc$i&oXsiY!OxQ@_39a=%*rv-=mPwkEbYTO)Ovf`E0@D3 z} zTE`=$n8WPl{R+#RKZ4uVjecid#O^GT{d+i0CzPt6SMI{D4T-&guI$2+A?GvxFtZf+ ze@lK{Dp7|toRgUgQt+F`l2k-*p0Vn4{F z_|qbCu(OjdCYuFJQmhJ(U%91{>$CF|Zde9XM;02mV4KxAXtP$L+}5oA_|1I3W_CxjGYOTa!7y`e!X3K;n0j=X3B)DXLE5xmgz*n!D4_3#b|^G zigI3)CO1{ek#KF4WnHhXM9)R2GzSM^v1vjdGybO@`>f_@D)j@|G6Ul>r8tMYRh&SA zh+pO(+NHD8i;ne-uKwj&+6QJ-|$&8QAh&hkJ4>2L;YI{o6-tc#0qO_*zz&}yXjBZ_Wm3zLv+7vANM5hd-$Wc@!s{UZ#WsH$3K+cMc2d6Nz6 z1y1Kl<{Q_<1DdGz+pj!I)^M7Rt%S-0_JSnyTz?svN0)S;Jdwj z4Xp8V50jT%fEQ@#)Feh57hr&mZl7vuu(m<9Ty@VR`cI4ljU~ z?bkdOtC+#UA%{v!3!4z23%qHcW0Z;6UJoqN&vfL8lqyIIM#NU@wSU!T&Yu+kQo{kl zs7Z0WiNldgFCb+~B~W*7M}6RlD2lRE}lU z1dh>rdEp-Ok8XUX-c-d- zwPfq8pXDsNTqmNi%3*G>_p-HYWoDYLw{Yd<8L>5)Xi8v&DfUAu0>= zmxnc~Iy`%H<&0mP?6IC^^a~(M_wEvfYS+lOo=#@N|4J}@kuC2bt!@ryNb9Au{(;Ih zd@08VXcS-^))T&-m+ivXg_Slc!^`>hS@@_LBW!Ag^?+N~cRCHE`^;GmQV??%(8$m$HjY8?RF12yPDn;Sf5_E7W zSi;TvaHGhq6jmioFgsiEq5aomAomE*ZAIe>Ki{>!8B-au(g;4EQ+RpPAuY)~#B=N0 zdr!;ytIOY$<;GYyx5)w_pWu_%m!~a45P-2_7Z703IsX|m&DR77Z5lP-Iv1TrCKw^oV;gwF}Am8mfar_X)AYgU`~=bdJqv{y2rl)9y% zs%wfrno0DKy#_g>*5L{{6FWPWs1>*v!F5MDH>TN1m`NAfSsd+0x7nMi4+`RCd>N$_ z-qJhG=af!W!`nKcvll=<+z4D*oXy}zOlc$zkKd1Cx>>Cl#rdU7&z>D%TtYZ;>Srig z5JhPjHCMsvU9?|Nudj9K6fSMK8z?1{I4P92wC>XDw3%tk*vrZ!HGRvre)1>f*WYdx zN~~cOq)INyyT>Aytt}4yaGN_u<7$U(JJZOCxS2%BdTFH2%`kJTawaQ7Y=ZyX<%Zy$ zE&R9h`kNTu9t>J74LTE;8$Y9AU^Wtg!dDV2I3XV_LFlvS0;M{R2s6s{AYH|{FC+|5 z#0qmdp8o=fUD04-V`Ja-xPWIm;<~Sn-xZ@xtPUX-&N#if4?dYTQZwM`UzTKeO}ET9y)gu?2=hGenHWpZznA~u6K zq#<83Ai3k0%AVs)Qr2#Sw5`R@p;D=f4o1nQ4N$LL%^axwE zm<6hYwES1ULt4Ply=q5l;M{A^*tc-=GrNmvFJTH}{Vd<(GNL#J#j9BvQ+~yMpSSk~ zt+6JH(VYbWDldouDN8TC+&7U-}?Hlr4GKcQnX$=JZU1Bmv2l~rJL^f7DDjh4i# zhkuD2{=3eQq+=H)MR^ZU79+%eCh2FE!twbf8zQuDIYjb&G4>|@_;Rk1*2NDPg!Nd4 z-XktA9X7NYjmHS>n$dLUqTi$239}y`-TqOQ$%lYo5$?8QWkbKQ+}yvXQE%Qah;ry$ z{z=LjJJ(FL6ch<*D0Vhs^aCHXTCqGfEvcwS^+`e=xZ=sy2*4-{X6lN=Gh+NafI+V~ zz%|U);@IP?Rx>_4c*ZU4#B01kq7iz2td-tZqI%~Xezlvr9q9F8PHx?$)@D_tq+{(I z);}DmYbG*b@t9i!d6gTM4PGy98^3`0MN6$pFAz4~0|=^7G9dh(S(BhAlU@id$Fgz* zZ{6*45eExZhoeJ7TF(-@;uhxOjbYwh;?3%rd8*LKxz*V(L-r#{h`0P0f_RE z+if~3&skm7vTYZG!dZ?&flvg52JaND+ShoDbFkE8TC3C$osNuykHhdx7Eqn}q;os- z^qnV(L!{Wf?CWx2?Hg5v$YVuZt1kMJZJNzazdTi3BHRr)k9jlADi;u+&9CqFs3$%cM9mwlI@wm^X#5MjgSU4 zWK5h!E2pznf-y&ZD9T;th5Q$O4h}JZNH@K`FlOeqR$ zy=_}I@2=!dmoL}z#Kfl1*vVBlyqYlJU!$DSW z_)S_D@!*a#%7V=KdVjI?Zle12wDn)J{-glvos=l|{e7PhqwsM1c_$nx1Vn~pRpBn+ z)JGZn(;)lyE(pKb@l=p#Iqf<^sJ9uceu*zO{;VjxYL(#K`Ea984^H12dqtZbr0@0R zkzrLWeuw8&xXFjbCniy?Wh&#XjpESjuO@wnpsh?0{VU_opwxlu3rGZeN2eVLxUfQOTqs-^tM1JoIw4eTj&O2-+b=HJ7`J z{L9w)eO;~;WNf4kN{X<^+Mc)nk|%p_EIP~-j|AOsR4HU+FY$!&1mRU}KDgeHq`qo= zHPn}baLu5yN72`7iEeI@XAj#x#aM_nbr?uX6^T7;rnod&ai%sjnHqh z+QSFS)V8|R0BOFbwyE(wS7wSx0kn+uPx8FAxBGvxB0t@oMY+$DZd38&|D)+8FPjfU=JtMl&fR(^d;$K31h-{fKMbA8 za_s-rZU}zrG=SLPZhyV02y-XvhIR?Wab3#bPsY#2?Zo46nEKUXloi@ji9}kb`j7YT zJ7(!PCja^*1L8;7`+h_ewm4b`F-^ixGQmCW?$pFHYncujE)Mc7&X2tVfa}CRXN!H; z>d!~@q%?t}9ly!5NV(RH#{(D7LbyIi?|F<0CIV#W;Gp0h>s38Nyny^4`gjb8&>tl; zNjG5Ag`6{5w)At;J8vJ1Onxk$rhSZYJ_)AQm=hf>vNijCD-F{%$%7+}8)i!}ZO)rz zl`*;6Fg+SKIafF(&9Q|Rpsf><(B%(VORTBgS-oETn6Tw>lg>jHAs2ytMK~hJ@ER$G zzQ3Q9U;jq_$X;5z!p}kT$V$-~Y1+Fgz$?1)ZEHQp*3_GED;*AwIZ|HW(aBbNo^%RI zmAW(zTwjjwATRJ+s^okm3y$)rIx%wY6p_d0j4zEcCv}@Rgd{hkJe-1zr+Gd7nh_F6 zK)biPg`nmyM|dEq+L?0r9L|B;R46FK;txVHysxtcuC_)hY;frZ{Wcw6567s8h+iML zBsRdK=UCDcaTLCyqgFc%sAZp0T)NYVEeU@?RFm4|QYR{2Lk({+<`9;vO%RyJp=40` z12Hx=NI^~ktrz!;j#LkLY ztrtJAb%q*PK6r(V|7?8Y1k7hkQS^=h8F|k(Z%f7h)<;wJA{Qg-0RcNwdH&Be$k6Aq z7~1EdJ(P`LB%etAQbbTXAu&7O4LytinIrBJz%NKYdVE1iwW+8>g(ZeT#7EAs$Ol}G zvgqd&mYto1wOrm4dRsG_uopX--*{wve$=E79(v&O%_kK?_7$NGCADA^QL5KOrC>3T zF$Hd)C-)zy>`9N@70iaUj20p}7Hx2{Oq5S2B8D7IB9Q}YTJARKw5wT0QVO-ese?EX zvnWcqE96pK(L&3EwZX9Mh{0`{oH@ye`2rj`j&1Kn^B<-aWo7A$itgj>YR%P4XjpaO zKIt;j+~rf*_iHQD0Y>EOJ1% z#lDl~ZD3`(CM|`1uSbqc*iz1wKi%>wm#K)pL9gYSo#y3lQsI-ixxl4!u1uA-dl@$> zVe5&drLaWZV>4VS;)pediej)B>Erq3=2I?s80qz9{kep4Ht6&H3lL<&H0icZ==O9{ zA9m6}i4yAF6@;J{e)=y2PyuroTTJ%z%YCA?VxY&5qgjx?0w(UK=4?|?mIXO2s4rKU zwNlON8pUES(g20phB~Mgh zR1cP6@mBV%p@<@pqdR>*4<(D&0H@hw*>r3OEV(wGQ=W`P6s=?71y}Y&l6(a}=YVpJ zou%i#zCrV51x?Tzb)JUkl#JA2LB#f*+<}1U@zgDqmN|XObcKtMPrmn8ITTT%>$%iv zm=o?LntU3jx3T=|8A?y3KH)l&?Q(@)(xhoC(eMHduESqFwOg^=!H15&v8SewIx&S{ zCK_IpbatFGl5;vr&csQ7StF^2hNtZsSN5u{!tL-^c)j5*Ep=OUj?UHCsdVwE z*_DGn=V1;6{~=hYi~2nDiwg>4X?s-N!|?L)iIMvPx}AJr#sY{IPZvZa9&=4Lxg;0oKFgPXhFJbFhm~2UVF8c1vO*+G8i+6;x<8QdG^q2iq8od zf{vl2lh(GeyERT4it4=i`4SP*;MPZSnvFIq0AS(Kt2ytauuq7WmK5zzy7QWAFeZ&- zc3T!2RX5N6&q(pXqz;46q81(V^i$Vj0$OEQTi`zs<877Vt_0?btTK(^T+NUh&3m36ct^u~|}koW)Pp{jTV7_0~&tC;G$TcM^9l0T|NiQc6Yx54voM9 z%u6q_rj85&_esJiAd~m%;XQ5$WU)Ys*TW`*uIQshN{aP5_Oh!oz}VAQm!ZR zMz@RUQ}n1HB;Qt{)1Q;q`X0)H>nET64`{Cwxntk9vKKGCNYlaOb87d8{^o;Ab8B0+ z!=#7`V2$Ea@Y2+iJzm@CXSZR{p`j3wV>0}XJlkhUlZYa!n8w30&$UB_Ig%Z6f|Y$m z+OFC^-p`dO8+j2|2m3WI}R%LZN`@`iet`4X>&Y%nQdzicHkcZ{xEm>P5Iyu8GZ`4S>-k#v_C;xbi+|bk z^O1g&9hi7D8CMc;a&|j#xw+qAK?psBh2~AHhxTzG_j|X1Lus6_hdd3ljU~k(vX7UL z5$#9MEUq3#FylV+ZiTZAH{pWK5|`gb&yd8*@?UhT@Rj9|&j&NZfDMyEncI_H7$84jE$U*@YjwCP!K8~oJ3 zZFKj+cII%fAd%}yn{@13|$mSalfd`4M29x!B#E`QDunOU>#BQTP&Iw>oz@O+%gN#w`eQzV{fSA)tV5JFIAEL^f zM|t*=vZ5UO_0!vD+gi&D>;sR^TW*W^0Y}O9+1u;ch#kVY+1RS8$k;;G-(?UDTZ}26 zRRNduy$6tXSCMmk^;4EZEeOprD&3Jp5UZL8#+BF9*uFu5IQ30;*Y3Ys02x6pyI0QP zR5#6S8Pj|B$A0)AV;Ks(Ru6C&h%f5zSE64u3trBBK%9zk=%#WZR`c`NZlJqlqloX7 zVyPC)Z6jmBWtK@akP6SCqK+qYMw%8oDghlUJ(mT(p!h|V;JfQA%iej_^T)+e(Z*n= z46H$8rE-mjhhIYu>=Ao}=nF4q+Ydy9u$ha&qKwTzMA|p2q2TA=CucZLyvl`UCZoss z2(1Gm);ji^&4Gv%rCiY;AlYPxhDr|@CbPI^MjRw`(zp0*GY>-qz&=gVKhL~>XE16N zHrU%p(~gmoFrCM$3uPm&Yt^{L+~XCppWM0)(`;+@OKM%0FspIr-+<&HIO zy74%7iW!}~*wfD;t$5AskKqz~ePA>mgfQ@r{KLwd(tP&mr{hM8U*OvcdF-DuGzZ1Z zZcnoj6u3X@$h9AUkNq)U+W2N9VqIZTXg;W^#d~ed=r*4(uNbG@H4+cJMI*MRW7i=E z#F#Q#i14VYpcVgkV=+dSJc!5C3uAGzh!|?&rM6ugW~WMuPI|Ia+-jGOKdUh#9{k;E z<;v#LiEry9=D`>oo!k%fe%7DVJ=yn@xV?`o;)-Enw;tF> z`R`Los`H11c;Tz2uy~JfDW(%geldZ&s2xmq*-*tF`n6#_6sU?b9a^M5J0dJYu;qcP zzpQRJj4SO-jhF=f;cm8{gP}8Dd7vLBw!NBM-h&$!hOz_)K1+v{S11wjZgz&2bu&vR z6&iid&?V8+_MS>dB(^`Mb%7?9V!@+PhUP-d&A=3SwM?tA=}g4;;KmLi(e+v->dFuz zjWzblJpp!DoALGF_IdbbdE?=`JL{bRiBEh{5ic*+j;^c;@F`NR^Btq6qg};o(&|W3 z-T`zvX5wO zvWt+{Vaj4{Q~;_)`Pw5S_pGC(4=i7F9P|C6QRZOTUz18~dSowf&WI7Us>BurHICl- zOu})`2J)C|Q)r3W{oa*3z7(@-B`$J2=H8TYt22e7VrTv%aGl=-d$@Z(Ne%Djb@Y%F zL8$$KnATwtux4x%P~HBBkj=)7j!6imsz(Wu6` zM5Hx*%cQyCLf3c-$|yPe_4Spd9u6EVgxl@9kX{`$e4>ZmQdPymW4A4_eAU(_xKLvp zb^r41RLP+qU@roSyKk=`2pe!#@E5k1gvSC%L$@GTVPiFAyvc*M=AYA*YzINOFdMd{Aw^LO*ZT&b@k z_A5$Ll@V9yt(H$)mwojy> z*0NV8H7bJN^R!{O__*-ae=gL@ti>8E{I^U4GO>_uDW%(U+rJqMBGmSKDpY_d-)t8fyvj%QyO(&geLWOhE){UMaN+ zGm`rHxBk*EFE4hBrUixqnZoghPo{tzk)4CXkYNpwKP>>fSbEiJQChk_CV2HTx{|V* z%(tqmF||J4x8IesIQSnWDoei?zfgRuuUeJN*>Ir9@~j&*ziK)W$W-C<1WH>Y$4pP| z_SKgQSCQ_SYCEt(Oak{oreRXeb(;=2xZ$-ii?0h;A=6xwn5veclNpV^4?~-?9G1YQ zzjkiPO&f{8A|?ogS@YVVdM2Y*P`pRTJQh6L!g1LszZn((cl4M`-CZ~S=aIatL1LU^ z{db*-?-z9h5(}8BC!rJ##Luq3$03JTr-o@O>eAbc7~nV=e{aN&ertS-ziHI5EO}Zj zc7FivjhNqhE^%Q#XsKzU6Zvr5Abij>T@q54YHo6Sap6m`(V%}6`0Jf37Fq|>1kL@N zhnm7B^z2$=G&q)g@JApWr+#V5@CtXtxMg7JmVic1!yVI8mg-4M*V}sE&C@p;9VRGm zFwtW=Ms7+F>1(^>*TsZxhY`D&hAV*!J`H2zg6T2K?+fL*gX&k3&#Q5vGZ_g#mBm+s zC&m&d6O_BXnJ4+!Ro&Tt;-IeesU-#lZU0^G1{n*xft*HO)2H<6Y|~5Da7)5w0pYwF z(hFjlW$Dd}5Q}%LQz&cFoMm4}v1GCR5UuQ{*A%ECcPoTv?E3245l8JK!?!ExTOQ+h z!(m?M&YPM=Q+KV~v$8HaN|65SQE|PMFM0)!`Q~1mzlrR|Pq&qv#zxo9OC4qj;ECB1 z@W;|>KZ+AU0v!V*xv#jsevZGQsA#}!6>vlT2H=!N97tth-%G=O&MAhnrbP{mG;vEG zRqOmk7s(v?QGS@c$LaVjbJh@OYO9Wr5goX9GFC-M&19&Y^h;^YjR_+lQZ+YHRHc8H zqKcinp~QVbu_jub9JoHI0Eh+>KYzwIno{8%s9*0Aw-TGa+}rbdKDh}9#AMViSi!_) zpi#oc?r7x$tX8zqUlvuPzp7b-gNtSU>;*tq{nkXb!96(`K7V27Y0JD!XVeT-!djd< z$Pti^yp7|e{^wA^Wy0W#dXP9Y*f8&-KE*{}Fvg$$TdZDQc%iL-RsfAO+G690R0!PpoOP&A|J91LQYM0X@pQgWWBe#swQ(D-I z@rP_4Dpuokw8lcTf4;~x4)7J)rt&I~kNgg@1RTF+%>;5%dj_gt5oa=!AcBE?pq%0} ziNV9ivIfMc!7orcw6$nS@O!~rwi+$Jw)_Tcyj;-wdQ!O=`^En=Ex*rbU|=0$|NBS` z80oQN@`zyl$vNUtg8=Y(nGyOwZv_LhQ2~Oy*txi5RS4ms4@}=z`TxGGNdH;$|9iyv z|93>&_^_M$Hgh5j21fe$^puyC_4h=N81AdM_ES1%kOPs1LE>((3N-2(KfG0(@$)QM5QL#be&_zH% z!0zqkZYWk(kOt5cbHl9suaYVeM74DQmkOP}uWpxu0+-5WT?vC|ZpLY7@T! zANjywJe?+^Cx40MM`-^l-onvQnFj#sWhjVueA6F17IaXX_u$4z>VDa{vh!5*4`Kxg$d?^@A7=nH`23-XVvh9vE^L&3i3<4 z&z?V)mdh8f%h*f_YwHTvHrFeo1%Own;k@E#2QY930ra(gn}tjKD_FzJ?b6PPwRiKv z+vAqB_jr*<;SgO#3n`HEQo|2`yz)yiUu?^EX2MR@Dr&|A)xDm#h*GbNAy$o|8DpkQ!yEc8| zN^2?I;}2HI?eT*C;L8S*x5{bTla>7i#oB7D+r{a7GkNXxwtiPHaR-?2FW25)*6bDx z69@JL?|AI1#)^aIgz)mG5_Yc|LyiaboD$CpPB;I#On1O#3jX&pt;4^sdkq%O&np4M zknAY`m6Uzry59%5VJz#H>O)6dpYgC;ARE%lBKelIdEF0gIEti|&SIs>G2i(oX=~Td zGqV|{6|2c+ep%iF@|rcrw+ij{)oU{%eEi|CrZkVs0Xv|pJ{W|6mz$eQ4$^>xcf&dY zIJ{D=K-ZX(gM%aA%FXJ4$7n`%Oh5qUMx%C$NM@qj= z`;?*BqygD#H3|1w`{hu12Z$v8rC#3mRS4;t(Yq{13zpArJH&P5C=+4IRPyRUL)2;8 zzIo_w3brlNsGc$OtUv;%Wn@C$wDEW44rqpH@_lnXf-EVR$yBf-!|r^laV0X^G}YML zyaW$d)JINq`1@YJFI(iJ=O$Vv^UV*Xm%+io;l{ih(0-I+S$#$rra;@Gz0932ho~Y5 zxQi0Ry!p-3Pg|~J?8U1P)r(uyu(;{8m%cT>Q`>j-{9jesb&9Y^=z+GKQHa`;7;HsG zJ1_nv5Z^SS->UPD6H=&mSdIN23_I+QA9U(BcN`c!k5NK~m0&=63Qc_c;7uv7$n0g{51!oLd zXZe3Wl6@U3>uz+to;OX8pOp(Hey_@GPbhA-E@0o#pYJYS8a!s#tKi~!flV|v>K_|{|uh~x%a`dS~6sC?{yf_ zMBHp5yCUX}yRpL=a*N&%F92-jI-r!mHi7tvArror3HvE&u4EFAmdhORv*qbV`1Y&x zvwrXAKJR{@3c~;d8=b`oWw)v%DzZ!DwswyK4;h-d8Mh;^ac@T#>I&|S0JLp_Gne4T z`7Gzf!>DQl5#sc;Qms#kT5hIa!M|5zwgVZqg*0>ZZ8r=LuXpX%f@lu?+eQ0r z_X}fQ2^X(6VW)3^Id2Rej@x>*g=X&V(HPQ_<`d4EY^!RbB5)dIO<+#71*@f!w7~yj zxM<7w*{+|J!0$(}Qx(!#wlnIXW{_6i73*Poz4x87H&y1_^WF|1=lT5}USZTANx=GT z&!bAc)+FXZ`KZ_)=CFmF&-v6LXCG}J37i0W3K#SLkoVPLQFdRuBO<&iV4xx@VF02M z(hZ7$k_yr-9Yc2`3L;V>Lw8Az3?;4742{$bDa|m{zzjp4J^22h1YF;JR7AY&1R*)6Y@Rfh#TRh?Yrij#@l4N zvblY({xs@A;{f5qkr1-2hS`}os;pBU?zy*|*L~&;l*_&rD?nTRTPlyH6eh%%n%X$L z;b|c5zawN>U0gl3y<3Nc+VkR`EA6C;VT#J;%F+&t#DE+{>bh}e+L=9KZoOjp;@g3m z1)PO+LFMb??l$L3V=<9f&XeIF&1!uOv-iKVxXF8as%#A~mI=}N$&4S9=aZVl7(ZQl*()k9j zs&9VzB)MNsZ^Bya)bSAk;V|>m6RA7-W~CbV475MK78moo4ik;b^I(2Y+{2Evjt0|? z?)8e{K8+odu-mAg-xwc=Ss@%P`at8L@poZI*%`GNk`hOy|xiC+|996FU z*s<21{`AxmjBb^myL%OB7t1~DJAWDQ@IHW2W;p@-$wy3keIbgXf55nMMOk{o>2TY% zp$%*mbWK3y!q0i;-jMge(d6*?H#e>fO;Q_NcT$cFLva3HZpk=4=so6h9lE_(e&_?5 zKXkX$gC?2tEf&(OP%C|XeS_4n_SC1A?nJ*8KYcLj-8IZrmMT^%DN^3xYslEZ(9qSJ zw|^)vx^SL5{)ezu+za7G-V*ov?Q`kL%z%M{S6iO1R=iJ+Yd;g8NSBgCAD6@pvA)`L z+Ie05fgIOiChy_Cr$pxKRIOfqWAi5Z+~V(lE9-1OeK+~4gid=aobdt&!PZCR@2kR> z)+=MGa=W6Ih;IP9fZy~!`+$u13L|RpK+5~vv!(kGpiV>!OCtOOUY8A;zYrS9*S(V4 z@wW24tM}P|9bfRz9R2_PHUu(#=l}JmG&F!0nMVW|&UGB`FJ8jIX?wJ_?xcAuHjzXFNsCGGQiWe5Ad5!q#nyhHTG$B`;D$iT^0a$6b;ZAQCtGs5eI z4v&-|?d1RYrQ|2x=O9uGq~H8}6fb{Z{G~^P_-pT!pgOU5C!#RHN8r?ipz=h^dFICc zROJRnEPH^<|NH{-Lm{5ztzOxtnP~KKXz#CeIb|e}DSifG(U(dVz-y^;3S#g~MHeE~ zW$K=ZGIX|%zst4XmKSW|J}B|+?tQ_!oZs`8Pqnj^=goqj?IOo63xOx&ec~+=0iJ7G zon+=qHn$uUjy)X{h^GU5z&t{F(*L~xBubbRxBb^a*AH$cYLnrc^2B4z?{5mc9KD9( zLiZ)T4P|7}KKs3QomxUAgiC_=(;%O+#B;>YOjDuM!1cfh!3W6P1qn|Gq-+=r8}4*Z zB9fJM%Fp=O(PoEn3;D5WT1f0LZAefYA=Dh#eSb6y z-91IgO#gq}H->}Wdl$Z8_x zJcKlKdH;KEWI%j>(WH@9VAF2cmMU^?Uew&)Lm^*m!|P>4A^RW7TLGW?QzNp(&v4WJ z_bCd{Bt1X#`?Pwv*umY1<*8FfxnKr=(En$Y0_d*p5D1ag;Gv|Nfg*v~7{bhs?ykjn z;_L$Sn3tK-OOBxfohgY=B__$s4U&@<*WVR$at_G7k%56bC$B4I&=c_TO|z77SYlvG zAy2o!(iX)_uO+`{*{ZJklIRbXT*iN2a&q9Yw#ffxGf1oB+#m??Db^_eYo$QwNVorA z$Nwjv5RWeY(+--zb9v)e9c%CZef$mP6VoOII}`N86Ns>910WBZ`S*K2f=KJ=KSw|q z_#_p0lb$|0&d^UAxTbHA_;rvL23^1b?%M%6klh0?pd+Sn_fOcs*x0hH^4 zT6gm@rhe^Umh?4^v!Y>|HSzl9}Yc*d1 z2)smG+VP`CVz>Pp@M4mVbZ#(j0c0anQ(T~PL((l_r+byQgSkphK*hfkz|Xeh-(RiO zu1nD0yvYj(MG0!8#!T@LGKkSQh_mJ?C3gc8(lkg?)>~Z1TU-NRw@qCiEo~~VLRGGG zbC2jbhaUi0C*HtGpgJrAINy>j0|RcmBn{iu>dGOYnNesJ6x)!C#vlMsBylE|lo$b7 zpzl6*bYPcA&CI+byk3W!Lr)!5y12V5`1;nLeUR*ivf{Cy6sroOgrp*z=H-E~skfs2 zC;&oejfNm=B$x!O^xP4wwnYjH2r{=CkM|o9rX81woth1IKv)EJQI<5IH^tj&570-ybOM!Sv%|d|^K3Gx!cHlLt-7{$LRX5YVAer|SUCc!UCzJ86GnK34LE$9|9NN+!W3&^^{KwIYo$SP=9Y>DaxQTSxn ztHQh2EYsCPsl6_rE7=rv)2CI*6(gQL_p`2+)Ab+&~7a zZj*=O93C3#AT4!(m}DrhoPn$l;X4aFQ=_-Z03QpSMD=& z-jLfLXs^b`D2uQQ^kxpU+52L2m89H(+2e!(5LLl z$zz|fdhe>b{fTvfrXEu0^#vfRtp+1EO^-mubiAj%uSA+HNN|%o{atF@qW|5lmR;im zeXB0p;gnvce~%Bm24SY7prlk%(Mh==NDqyxP#P9)`FD|yx@1=@EG(Qwx|ShN&-H#xeAnIN+ zYX%9>BUyFQAyv6M&I!CBZ_rY0u7%F<3#d0l0#;-W=vWO}huU)PVGtleLRyWFw7xM{ zc7x248R$iA?mLeob_tw5eOgsRgO!Dag?-S@(lQFfBrl-QViz~JP5|w6?E&>-zEfwr z3*qqIqB7700;dBkm8bq-U)>2v{u|7UJ%%v=m@CWa0c5bdLNwXd-75;y1 z8ngEjfMCx6+Z-RZ2N(@8AckrOJEqdN>9k{F+Wcl4Qyi#as0Nii^O61sS=xrj^+3SU z3@B$i0nMTg!-E#X_FSbT#eiuN$i|lH@n)cW=g$#cvdW5h6Y8rWu4V1h%THn^zt^MS7)5oh*q(i+k&!eH9mI zBN*l4>T2dsdHQ5FyPWjSQU;J|sH?Lu9WKc!LAF8hcNCCQObXkn>00S82n;3xP)@D% z7V%*R#~T-pnMwUA&1`MEwwcvXcULjs3SX=+dZ7KUnuzbzL8`Ntp^a+ za31XK++}`2!KCS*o{K3KWc83~0j|nC)IQ$-XwhHMNF%5Lx7Zs8cCoSTg(2oO!{Z&p zqyYHj6*y_DV|BF&@7>_kR*1fB&OD*)JDAOb8=(=Y<=aXxdAfMfBD!@=5SGc%NQevWFBzK;r4$1^yRMsQ^@OgTDr;9;5P%G;M1v%YJ5RX_i zHlIS>l~KQbET^VMM_GL5wI?XM=(^Z>fvFe*?Ji;{2fxRJBqn8iTezgj%<_cgEBxr1YSG&SGwnNkKc#_ay0V||c7W&!c zT!=1^4xvf#bd9Yn1^uuEu%$f-h48=k^n>BAwU;@l0(9LiQe7^i=iDV;%1nG^4UpHI z2jtww59fi>E6lxdf5NGof25gn8gu4|w5O*6sNuz+ayE{4MK5yrTe5Y> zXBLDTR)^Pqc7Ku};R*ChXBUcxs&B<{>RS(=f^co>co&eiLS8BydtUOJR7`RODtN^y zNBW+j+nAYoj~>){y$+gpAs0H#RnRV$ zT#p!jIq&1KAj4uA{O1uDmwT}oBnnQ7=}1S^2e6R`lrK3X15!PYKO_L8)&4cttnuH^ z{e5`S-+KSjF8=<{|AS8e5eL^2ngL&swHTGtIAeX$46LLd{ZtL*l{1DkT2;cWieLs#cyYzAAgw*x`rSN%-p@>ih#2H z@)5_fkXVLa13pM~KWROwO|H>H#g>>{Yj^2*X2Q|6RP?nf$5kvT^LASU-?3F{S>Gkn zU&CToxVkSDIhE1v|0Rdz?!d;biV3FNaDQj?sLyxG{phOWzCz^tiYZqnE0O%I>%}F4 zy&*ame$uRpFC^lcliYd%$J&vJoyDhpJMwv z)sU&*FJSkTnCQZ(5=UrzX~WbPGf#_ld%OSCrc>52XCv7|@V6nw z)-+WsF*)DMvCCi+NpQyR$~Dk>GAhdrmQ@w*84OFB<>XmWCF5!G8;_6AHp21mkY!>A zo*SWBM;VF2JP7Oh*LAzKS`EV@^2lN|O55{cAvI#sIjg_&4T4ZTECbq%Ci$z?Ux?`! zv6?6!pU@nScVU=d8=7$yDOix-tSyP)xRBWUp+{@%a|LwIGadTkkA?h$l$7X^at_T< zsNSEWZ&fP!s3y~Sufy1Gr_MT&vhcP2n&ZQgF*@lPlaa%~o7wldFF&jjSL(KZputeV z8RSf-0IDN9U9nJ%rA)<`p-hHR(fIIDMODY}W(9s3Y`vyHO+yW4!}|AKGcqSdVdq+b z$1|I1!)@G?EIeR)-oacN5N*E5a-|l4KmrG!TI40FywqUl;nK_PEEvrk^=fFBD>H~1 zI)CUopR@O8H`mNeRaNg+T1l>;SdQ>5A6blg*VIh0GfJk@dEWU}u0n0(khEp4`xtd{&pwO2-O4#|8=Lmbh>He`zJ z6s0_-nQ0}g&YU3xti|8_vPT>oH{c_6@3u5{@4zuNGb8&eE7_HnLUys$;m#)eO{@F4 z)w5Qz4`?L>CZgMFB0C?bF1|urrV=PAvcK92)p>dNhW2u?4hg@Z6R(V&*;2O_E;=+G zxEh0(x_{t;=2a~S%ys6X^_{OOr`A*IL(z$ zSJ;);XS{r_a~TFeRE5}`hLO6kyFur!qS0L+Hwmw^8SW=Vxv5)vh^2hHgBI2|6uV(H zYl|)L(=?3x^l!;)-OnMU7KQ`OlZ_j>cg;H z{-J$6Tc}9^!I)=ePitOzuR7NyZQE7eO#3@^Dcodogk{!DVYrg|!bdBGA}?*IevKnU_>U94hOULR2j7EbuuAShYr%O!>D0d0@Pcp&LU+Vu@i#t~O7k#yp1xdv?d zcbo3~3~*k)ig@{BfB(TniBu@%onvK|a7h0r4`0KdwKb^RM(nUsPweUH$2U5AYECTn z+(m$+Nc|j`G?2Q(V6M8h4{KldhaJhH&UbB-C*7ug8Z5^6;del5Uw|Ol%}T~qDOp+7 zahJZq=#BBfn_4uh$(}b725zz@TgD{+#!h!jot*S9r|`_fIB{r<)9llRxcNIDBt%5! zABb$1ulh^RElq9@4lyshOstn*W;kb>g2C>;uAEP=Dxr80v*R`*wcQ%h=1ql}wLgH` zavIJBjF7F6&KpoC1(2VBr|IHyg5s-g?71(0h^O)YW4pt1en9j}`*~#=+O+5Bcem+I zyWjXav9#0sj%>I_@~C>UD{E2LKT>r5WNVu{0}zpbxAWCA(XOd|iXrn#aTNcg_RgHy z%Ucio-Nr$cltd4w>JVE9ydC%I`@q>>A{9M~u2mM2kv{2Jce{cO^7j&kl)#OEgIo9n z$W5QNf=GpQuC9F`*DN*`ujjT{a;ac!?|JVQvlh@^*K)_~YW@gv(N~am5L>+%dpk+3 zoA;cMjCb5hUBzNOo8V7PQHINgXb%NDvx2zs>XX*ROx@_O-NpPRddQ1#Q+>@D~GUr4vLXF}cLnoA&L%17Zh& z)r;D?5*j7Rq#kLnl3HZgse#9K6eRa2z^4_2;-u*khO%V^2BTyx2D|WQe5qAwwm%fr zGuILoWkN<=6f~2b2mI*xWVvqYI)2U_Ir9o+FPwk`XgmLi-rd;P*sVgtLwT~mfMfte zIAX*>%T(1@uWk#DI|Kog=^05WwM^;$6`n|bEi^!%NbwscAM_<_A@_(BT@xZ<3ZzXN zmTE@;fWT`Qmm%j#BZ$MJytgn_JncV9E;V-=N~%Q_fTSy!i5Qjgam{-SFzA}e!aOY=dDdtr(Z z#!VFL?Chc5P=K5vK)_oK@H%n2Zz&b{^-o=6-bQcWmUj(Ne})g=_uti7Ja>WOZe)lv zP=n%QdXW80CCpT$m|49W9NwQ-reJ%mJw}Y7-9-Q6)K;A5DIk^^8cE(?{ zbyFRHAn;Ga2Qo6~#tc>_8+B_6=-O4$JoRU&uEng%ZF`vGZ! z?e~T@tC*OTC4>?8eaa@}N|WdBu>nhao4CCVo4p{L`^mli@35dpMow#GLq)^`do)+~ zS+jGDqSgZ|=kpvN)ufvJHsovxyp0|-e0H_%P3pQ*WZc0?P;zpU;W+Uuq!vooog zv-w^w+kRO`xbkSiaal72Ghtb1^G85hNg>;RncX}^%IIF@@$ct^sE)P=X|e9#5c=}9 zI&6g1q+-RO^1;sC5}B4`vnz0#vA>Q-%ai+W0sbp78aU$PY|*w&XeuIaj7rk`=XbzE*JC4 zM&?*Hadc;U`#E&GbYZte2IACEs%{E-warHDfZi`tuqs^Kdu;wbgH|e){aZy7TIMcuApI{`323!#P!V zUE%Qlfm8Xnmf||!xdy4S%G{M!9T;J6I=nmMQ`_bEVA(S6&xVt)ZxNr95l++{feTj47pE{O|xM68Kb>HC%!M}Fa z8E?S`hBY>ooF3)Akz}KQE?WwJ$(7TR44wXAz%G#arZb5-D?CrLVl$rFw06c4YUQp-Z@1knw=v_i3=^_@RE|~ zk7v%a7R^57P^vk6yHs<-gOib-$)LWZ#K8I&ocN$EB%EtAHhxC|Khm7j-#5bA((ruU zLR|Lz<;udMqK~>$I&xRT@hbP#Uhoxn8tsb5zl2d>j!fT3|kMFpQdm6S?x zZ;H$#uIy;E4s)!bm^d_B-fb6;6xFbDjv)3|?i8m^Ek^2ExU6M5NT|=whb@R}-0W%W zq-zw@i+rr#n0L3t7F9b+XzuN5nx>SvS$o?E;XK};B1^}S5~X>EBkzk0KDNTn-``(P zzsT2$q44{moe9)rNKMY~e#!laq24)T%wzG!{8u-&4>K_pOrJQZv&U7~F}9+)!zK?^ z4sUfWc&ln-Puv0{ual5M`8MHOHz6_B-f!RlO~5z0vG_aNWeZ6TP>+hpoGw}B`PtVn zXVyg&j?SUl83&!LL&+Bx+=T4lRdlG?9TLD&)+2)a0@RaR$ltBbmlPH8`QLb?k1!3T zpVaamO7TA;4tA=Y<`Nl4t8pCKCEA3yA6DEa?A$! zzU;BSA5irlcg8-}`+PB-ACn!wUd2~aa-_>AaMUnZ6WX95CUM2})vjcj->+LgSy)=$ z)Eo2tV>et{zkU}U8CINiusJVCd=}*6h-o>e$I5e!x&XamVEwRg?X)i=x>BsJ`x^&C z8dsuUNAq?Tyk$kiIOz?0gv79bfL+R;cU#(?7-IDII=&j8dl|Mmc|o;rToFu6(jhLx zx4&U-$nZVaL0?aEL)2Yf7BQQjo&N`pInNZbTl1xP#Riu5oVPNZ0u|PJXTEBGb}j#f}p89G#8IpNWc=ZJI&X&`u_~;(w}(GnAXyz7?49t#o@&a z>zX!Rr>L8IFDw!E@0b;Q{WK*zveV+v*9MbIFfeRs`4d@UVPKzp(|f`mZNS8~z@&Rk zUnPj+YUsSu#W>BU#B$CU{KCV+-)s7%u7~~U(0REAW*#wSRh@f3J4KdnaYU@fWJ;M5 z>pV*tRZ7j=Dn)?q;Rmg@QH=&v9OF&@TN9Ci10wlfOCqM|!fPZCLMJyCOsb{qwn0CcDhb-+s!Yk;`I*ZGXN|z1#L-TJO*{=4ymlSvveFMJ!?Pt_rh3 zIk7X&SzDjZcW?ef(-qxtbo4x>)7-tcxFw8^lvncsLX&*!ml)i3_b_ll_=GQEf|N_@ zw8~3i1-!F-AHZ>pt*{yC1f{{fo&aP`_(UM%>7Iu_1rg|QAds30+ zuw9pw0Xc)jh%bqmSbh#W9OD^{Lxd!m45ajGc*FNt@zxLTi!`otHL^R6%ST`EyU(9f>696`MK(!`@&obcr23E&TCpu_vbTPzG2`?&v%ZtdC5DKP5WtB zIE2vn*!AY`p=-oXQextQ+XsWyE0M@1=abAKHw(#;H%&6g#(%DSXSAT8lB?&sw56gz zR8Ucrg#U1uQwfiV;F27_gIIbusx~F%-s2slluooq-%}mDUVT6H{9)OBDPg`HpHI&@ zCerfIs(R+Poijc!d|D=}I)y;jUf^;%a8P``)=BK37~jlyN;!nAB2l^WZNpvm zU?&w>Of1q(m`Ur@$qOs>Bm?T_8>JwgULWsJmP!uEML6Q!R7zF34Q{&RU* zuWb0eyEvy(EN#_hR3?a>nW2MFCSI(1!Ku20zMFEt6D=c^9%|W4C_2p&%wc{!q4l5s z0;(ip=El3`+hJua;Rf(Bn3{NJ#9E30Bh~TQ?Cu?5v8cY+ z?1%c1TnV>K@d2Z6url6^Hb)WM)SXFF!P6(FJV$YIPO|!qlKLDJjkzEb7Y)65mtJ0~ zRNwXIk#xa1N>n#&nE3`h-O9v22EtwthB^nIDT{b}S(!S2Sn!d)NlhB`Gm$4=b@e3e zL)x;jKRRqP)XofX-vIw zEh?|Ec(U*HKTiBzk|$FLCxH_XBB$XZMe^#MN}5@*(YT3#wsxa?4*5hOjyUHlDkEHt zmxxFK>Y63)e7Ho?Tkc7ooIr^;wa>OJMyQo!fl!`G1!~Vz~+|=*9bQtJV&P z!Ln|%A2ppOdD@C%0`2EL`8$8w3Z_i9)r9w`V|EO_^&aWW$r*Mk3m5n1DthK(EMm^PQg`HH|KxiP;^SIk>WjqWC4mJfTJyMXB|Mlw!4oOg8 zK$89A;pS#0iB6EXdJDh6mflFJgBTbbq}4siAj~7!?h=i~qJ z-PL5SbTCa4bp%rviP2E;#(OdAs@*UZiFEh|u}7S+JA{J41RJ%IB5)ut2O||;lj{Q- zgK{;G ztooQ>~xZ;|2aJAS%=>{q_JZ`};*)!J=*xm^lkiOO&p6a3xE8ta;j zXk&II4~c6Mk{^E^Uz0)7q9yb=ZL~v+Ij?y$6#0kQ?kiyE2WGF`;73O^;Ojo=ZD(YA zwqVu74cqR>wdn|6S9Wi%4DW4d)Tu-+y|8G>d3EEViPUjixpG!N;l2F`Y_#D#dDeQ| z%x(5c`;jR&lN@#PgxIQK`29UC!u(g?!;DQ4ok)KBiIFism4f`qxRb@+aY8^F1xeUB z{dKRnv)`?yTmg{*VKc(1-RS3Qn*vg{)c|zq7P(PZfo|}v)pO~WF=ucE&KHuzNq<{e zUtbSOTke7OSM+fYs3?+GQc@b;T3uQBJj>=;KdTkO$M)!^nAgjJ+IhKqSExEb20o`YPfnh)DNTCt z{;Z{i?xP=hr^>Qswy(V(tndo}B|5Aek(}!)sz=S`Xt(vKtu@ZlJ+I(QE5go>yuORJ zu5KZp9P)AB6jw16TKWU^wNWq8)@-RV$4o_>oj+BVWjU|I`vV<1i@)MZ0!B?~VFW#Sk%Hyz?+rLiv&OHw!7B!wJWe$=gdU zDlS&i_Gfc(acTd|qJT&|LDR1Akitp&{`0wBfs>Q_>fU{zU6~}OWnsO6+K!KJulFP^ zK`-QKNnw~??$CK0Y7kE%0eCimoMnEIi{;X@XCdw`w=K~_4-iHPyjDs>H|b+;GTvfZ zxzZk0i2({yp}KBBZ@)}0T3#0bhczBe$-0k zuc-K=TmF2ZE3}d*LbX{qyPw?;gNakCtu-{2iWTiB3g2W?e*f~%!%$n=Vt?tTTUJ_? zk2Gi|8eXOKiYjMUJm9&9NP!`B+?FcNE^|?HVYGZpJZ}{$ba>dSad@XJG&&#To^s}7 zMI!Ac|2RzcTiwjWz-~?6b?rS)YyYD@uv@v|9?P(~U+B0zko|(=-1_48j2NuzxjeI5 z^lJWJ?m&?uB5T(|!EE=ih!M9FHUuD5+r z_W^EY1&_2feD?cxGL~=2cE_y$;2{NN&M#nhFMgrHZt@6StP-PVQ1<#};*;78;DEEY z#_whXHjG9^2UTvLo;h5r5?`aX$CSmk9o5{^9=_C#JU8p+;wEpk?OLoU^9!%R7*o1p z-)k}5XFwnC{`N{FRf2O)UM+e~Tt~zR_FlF%I~&exmWUTwt_+$v)E?;PbbYbH){mfU zTCO^Y{ymM?ZGI1pvHZj)0cB$BNV7?Ld2+_%#tt>)P0ikoO$Mqsc6I;UC+MT=Vr8n(@3TWRgTSQgG$tbteX*ZFOk7-PBiHxUtzy=zd%Hhyhi4*eM~^$6ejbC*Uk7@sYm+zn-x<$MhtnVpAtA0^~i1q*i<8`flp9{k-6!S zt5IJ2TXnm*JHBIyjr>VOm#R>)?WX4exX+QfRpAj-7pYMZc`?Qb+oLNRqc@~ntqmq! z2w@cFVKLO7PW+U3EGeK8lU{5ZBHB)YY<{cPV5(Cm`+^F{f2*L-Xr-3~Kpns!dv9rli}vNN%VZ;XlStuSCB2lH2Ei7h=o30eD)c4=W$=W3Xia?j><$_ zPOX94y{11uCztbh&lMy~z z&a^huBVA{wmh}1r^o#fJ)*D*fiTUAaq?O%hVg5Ji?(~+l`O3;GD0E-!5Lr#73A24H zecGHEkfBKUcr_?uQ*;>44AjFyZFc5{h*0TJ2)ND)09NZeSP%mb< z!E||$YC`MMRNC-5;f1p=)MJqaLwiGoVAYbm+3>;4%XMZnlHo_IiJ-<$5(G$u8f?2?8(A9r?0Ke5-U=)uo19q1=49uFWPQARSf}B z-)GDWrmmm8pX{wY*Q=3-Lfyz8I%Joo?WEJS5YlVw3m{L7lb88*F@J}8=g!a$ZDj@C z=Fem9E{B*IT)};AiMbqgu+Vl!SbBa5J=ULK@V*3m@~n+ymhz5-8Hdx!tJg zmcAf<4-l?0i}dKHX6K{j7^49NvLzLxSFNDdbvv%TB2^2O)#fx+GD)iD&6 zm~fpT+`mLTMz#O^=HA}(j{!MeD-zLdYp~28&sSWN-$))o#-kto;P0-qLu8=Jek}`( zb!_Lxz-R)+2Kf*#PG`>aQZC!3Xp4;f1ABO=*#84jF~yhWLiRlc8qL^J140yF)451x zq}(PX`s05@cX<6*L1~NW=AV{a%6Z_UihCaKPuY{z=96+WPV4oAP7?nV^yNtF;=wu+ zNF|(;pj~X|1^->?7(4*!N#r$>ctc@9w_b_2n|3`PPX7&LI8@3K6PS<}9g(%~6r+-P zw3a_|%KSVVnWcDJm&2bj)k?CcP=v6pi@P^|poi|pr{UY_DiUqj&!qc=43IK*+$JK& zt4e|srok`p`-i0&p8)C62UJ@neLDg*Xbc1&EUID)P}RiD_WX|t{`O=)P|h0O_*@0= zjhbKGw}mFs#XY?uLyLsjoE%5bQq6-U>`fle;WW??g5LTQQ;SIJRW$9hQC~dsm&-Lt z;=NB_PtoPo^Q5$=k-7h?RO(Dg5G z;J)rBkE!_MssqTPIzg4uk8tjcuC@QD4Ng{xs7yOPl(`0Xs^#msN!=|*O*`77x-oOp z|0a>px-OjR(*OL|%c00g4g`l!T`%yK<2vJ^sedw#p7)`PCpX#)oemJ2zJP7mBCtW{)jU*{@<=AF(pj09s z`PLvoZpFQy(s@MmcPKIR!s_LGXuPM0081N9E7x;Dq1k8Y-Y{!>`?wHBujiz4L|{T; zpc>orsS$8wUJF1`rVD4%09jYhk2r6>UTbP9I69WN9%WnbzA=N7(Enm34i<1O2%(6mXSX-WF| zvJeW!nN1Ca9Ojw(5!K>muuL537nuMPX;>!+)@0+M`Apmx12 zH4ZRhXFl`4)YZ^1hgsR{P?uN-46lLOZ`SEV>j-V{^x=|>j3*MCp(@R(y5hFQ2Xz6S z&K6Qrc?lFps~Az?H_`%RQK+M(MWs!?b(LY`_j188jn}G}wl_QHFiJAu{=ajh-YsBo zixP&UN08DX%>cyx+2W~368rjh&Z{Ks*7V%oN7{e9757udk!>MRDI50-g)D!Qg7efO zI~zzwEn(?rcC}-~UA|y?#l?!-1i@FFQOZj_*ZcHFvLY31<)C8EqTis@4>;r?1?UoQ z!nUMWEiwf~2t)bPnnEtLabU36*oX)kT8Ex%O8$XZPvGR)zRTSE<+@2t^IZMffU*pR zXLRsxg<4VGPcOsIUP%Fd+m8PJhDEDpwFaO6NYCIOiTITEMC+NO4SMtc+_PG`IH6v- zQ#7(YmCoT?n|j*5wNsK$=$>yI%B-Zq7j3*uEy=}5|M{Z>!-GuMHS7oSAUOrfE}Kp- z@mCfW7UMV`Z{hO8v&Uzy6@7@UE-u!W-)pVP4;_{xs90^6;<=g5#wlnMDI;WhX8L;s zme0k(TPvAdjxU4|lzg|V4{!bU6}WM$vGj*Qo~hXyHB(sN#(vKIx(ImIUAx-;nJB)| z0h)VXjQcA6EKMJ2s2I9e2~&9KRji87EJD2r?Ut_frV6^cNkCk}g2R_IEB9SU2+4h| z?dHx?)HP$?RVyaE_tshy@+MNuFFf*A8MW<`au9g?v*Qyh`552sMx$iA(xlX7uDXr4 zXMd%7uT`s+6hcdO;?AFL*mN*(INBWRXwK=qbQ}#^%@z$OngflSt8^QRa@Z_)UA@`X{gvKAICUW>y1qU#0kH?KKSgx4l1s^E6k9N&$hu}FFyZRrm z*g11@Or)tyaXIx>(i%=MXyQc3rn| zitXrQzV}FyiFlv5aW(#H#q0N~RJe!HHKG0oi$@S<~uivg~yrY3Yeuq0fR#vzA) zCQx`|in5}jV&+}e{HFz`uOI4qU8dse09--vm%)1YTQ*{~e%l;cL)Rs}Qyx|FjJ%7K z32!j$S{!pYaB~fJB<|57H}+cFzfvt4ux8OuAhi*EncF?i{mh%W4P4eMDU6}GR3pK(9$YtF9-T?qgz-zR`DzoX?n z4C=dpsrzs~bLCshUY>N!gOngQz?27&1RvH%S66qws`0Sc2b3ef5GFt$d~%AC9*d_P zrk-d!m&aM1drBA2K)Qn~2sMf<%l>N$gJiFmi@m|k#V@Rr<2O8oF{vY*<{!&NG{=2Z!<)%~o(G6h*V z*&NmkJGJ@Pb_qh}FxPx(Mfa?=q+5-Dkch>fEce8WSKgC2`c3q}>ht);K9tJg>C+2( zT;V;J@>lntAjR+t7*#88J1$+7U&LsS9_r*B&a>U2U5u!!63XE-xT>y7sLW@Pdr2V! zB22OHLf&OzE2w>};omUIAT7v|tddyS$C7&!kXr_mSa4 z1itR~;F%>a1N4ap6kv6i1kMU+aKZKP_!}|XxGkp16vsUlh7cv@-bPcUR&>v^68yB4 ziY%ElcEnG-ka*>N;{C6ATF+vbif)+dAJ?)UEZOFB2MLh+nfnWxOR zrC=;?s6L1Lk*StCaEr`cpFz;xV`F_S{PCyLP-#K($A_G%^K+5pal)-`EJE*9jFXTZ zT%p~ikJ&y}_~76mFZd%~b?B#unOI)-o2r^x zPLaWFa=#m>O>X};SD?d4%G~q`lw%(76ppBy)>WD+?u^Dh zU?QJ`D3n(0{ciSht&1>VeL0x$>FVmN)5@{%BK8e+L)7L4rwSV*JRWb3ro$XwT`s&X zpzf+TT@eu z3%ZQ5;S}Y3aqn`6ISx^>p*mUTT1PPNUu^^$IUn_6RvNU@1*LvBYk%7@Eq_+(#W5iJ zB)9l1Or8F)j)L7^o8gP@`jf^tV?{am$4-hI9=_Wy1qXg7MhMb(?OQGgdjiJH*=~xH zSyG0c;|)-IVX`mNG*3A7^%M^@=|e2bm%^dWpT@jxcG<6Y$5Kq<;-!?8?b>V26yB7-nq9o_fQk5pV=UNEIqy57#28+&^E?`2;;CT0K9 zq5Q7XMrIiY5}y=ijt+ z24fjH2z3SWM*R%4^Au|2k&hWU8qLoy^`e*xJGH0A;j|1M)um*8fuzp>*Ln=tG|`* zby>B^-(ogsH*~JgY)nIqWDQ*2|99XVdSpq2fFBREG)vDN1QN#tx6=wMT}me@_t{S~ zr9Vt48RsIaLdcpPKd|9DK1}QX(U*%tiENhpgk$To@Q$a7D04dbM{e-xf7e5An$H&b zOH5_?TO7p4z%L%+uj1O?xKTJHG^PoZbux2xT^i#hm}NCfoLpQA`GhoSQng7*d<|T2 zwMB9Qc?B*N*x!YF`(NAoKUCO-n_qQaxbI)_aa4qTp9*TcKcEYBizt_W^%6wF3trpmON94Qe>(Gkfz`csF;% z9orq9!M+D7hxG^bm&UT#3~#R|9lt2uWO#P>ZfitBG4ls|^0z*P8Hv_^t_>C&Xa5kL`pr1q^Z?clF0H; z|D#z;*$O{yuI^%!y<5T~*ShId=qt7O{7+j*C()BHsOAk7B~dBmnE&_Lpyv}vMxgRrE;P2QE4j$x z+Co_GZ#Y+3`Dn;`NlU^3wK=>Lscj$4?~Eo_a16lz&R6q9Uk)W#QdEp;GtqDT^C#0M z>sMEf!WEquctSTtM3{%jePhDQJnNg0nnbz&m6Zp@(8(7~_biMk=;r)HK9&p4u4F_^ zURvSXbWdobjc)Bvpzl3+f1a=4MwT`VcCCXpG)i-^`gt_=jSr6ZYBtk zVaGqyVsKObFI|`2L-?*SQqHcgh2LxuD06jli-vX-@89ZGd)(8fDsw1w%k2hzH`i3d zD-pKEhrBiWIOj2wuw?f}zV(g>2$3zUG0$VXWVLs^kpnmR#_;#C?xk;eUlmqFv76Dz zv?ooJ0nr0L3W$=~v(fX)*oYW~@P72u06 zfdKQ)(%z^^WPkKsx%(H}edFnALY6_5B;6D-NcxD|kmwK@=}YP}3#5x!YA2LcBoFyo zW*t$W&UTjws#9}-0Dlz7Vm>2b#7oX;`+D9!+?ES49wle`BPEiO=9E`Bs; z*6w`Ve28`~?D^ijIi%_JZS#0FV%1{~$J^-iE@<&*Nuy3geLLQIga6N2u0<&AVzYDp z-Q%6#lJ=?YA?0Zk24eA0&HBZUNA(FlR(VH~fed-aVYdXCzECjh>ylmt$HKQi-uS0V zs_P02X|*)0;64;xJlwKyvRcy@d`LYP5xan%jok}3k6?E+$Pw}slW*jVvtG4xl8=;O za^0=}TQnZ9Vx$B#gT9V|T;LN>mdCtPUt>pY@0G&G(ijWhZ$ArtN#9Lyt!WAUfIHAmEkGY0ZM(Dxs1 zr>1Bxhk8~cPg^nY>Fz`tj& z;ot3#_2i9I%IyEz{&C)@_-$Ec;t6QEgp@EaFvv|!WduFcb5#Ip z=_C;LuFRu)E>|R|>Ps*=P0;`ilY>p{`JQg2A&XmmdJ9p)H0s~zyA)T4*>j4jlnA4j z&CJ3pPlaEKq@NPJ!P8)0+Sq+7d4AgJ?vY#C|Dx-y!=n1)cTr4K6eN{a6a)#SyCkK% zOS*;{y5p-zOAZav-91BzFmw+cO2Ys{cbq-o@80v=d(Qrg=kb|w?=@?$^;z%d4No!b zse*g(VXow#%M0+^1y@K}$julSv(N8i@l2jn-srQkO>g1RrV@S>!mH?VZ~ zRN5H!Ja0z6x^SEt@ADHTh9A%-% zl(A%`n}uV!?){KU?ifzFMtiH>z9)|nTqotyvnyBwKU|zY5&Q}5l~aUs-kj0N$m7O|f_3j&e-28g3kkESXB!`pI-ZGdwj z0Wcnz%#DnVvkbKL_0yI$9brVa*$;Hxbe5{F2JH0n&Tzh`$YCBH9Yyu{+Xq{TuO-x{_-$8S!y&nASZLSOMTkW?gU} zyPMUtL(nqX1kI3K+e8q1`{4H+0 zRJvFo$OSnDyD7adGmuuB$$&x*@36#{yQDrcsD7q>iHZ2A#@os6+O#T zP~HAsN1l`ez?YO@(`TtO5(Wnh;Ib$ImxTrpcrdQHPGt{B*sK5rJm$)D+)ELU_{I0o zA4B&8pQ1J;zHN{2rXO=gG4y+?yM~V-tN*eVSb3%PG2`F8<(>?ZR^X+c1(X$4z z^su<=%79hiuyf~*c3y~5f&YP}1SbppkZI^v&9o?~=YA+IW2}X?Ofhk#n(2&#X&EAs+INy+@%C;p z^6q(_0exCfA$4VA$@GYhRY8da(n9yMxx3jwYBoAtQAPthV|%(b0Cv3kTZO4v_33?ulx!lbrbI%9vA9_x5QJ`dfD=$dque`U!T0Lwd}; zD$D)-eM6{T>|Fr?iii+kyXv8lmy!AIT<382xUEiw<+>c>%HK<)f z#U=NjlfBB7aKGuQilV9nzd6fkDe4e`Wo$;=OP}jc2XnTCWG`<=PUD#sx;~w@48cqJ zF%XQ`IweC}s_)Y*PV3x+O3O!arGMdC>%ODHuGbvbOmLwGM+Whdg~LE(){NgG)Jug+ z{^0e@tWsXIQVyc~l}C#$yJ*h{g;&pM%>&OBAv$VPe};RfwoH}$$H-j69dYp7JePj5 z#rEu~u81b&tGxgG*rKSUxBS9W$$c}XX216>-e(8n@V~2@i2{b@T-W-}KB=YuZImnK zI|gTz)4EknvoA_ZUrS4_ZWg3(86Nb*-**}NlP$a8yUJ}Tn|0;mSIL@nkh-p*XeV*g zM{^28`oUUGHZwUurTS`$an?n~(?zN@vq9H}o~imThb#7zW%r7FJ=P}EkztOek{Ist z4}FSkc+}d2V{5)6gJ z+#vsWHKE6!efh{v-1fTOt#?MgaRmGU27wpY3vfrw1jue1s1Lnw`2i35IU{T-`}Eqv z!k!A|_g1KftE(0uLw_YE9Nk#SxJ!87#zyeTT(le6ldl$63t}XFWIY2Pq$19oui{#469hgSO&MFvUo_|Kf6TEJ947&YqvwJ?sP|+X$<4Wn4{z zVGmb&pZ=7_H$JfZ!+pra7^Tg%(WJV)VxkpC z(IjL#__Fj`(|%B`J{49l9;)B`{gUcI)@ls$htDcaE=^Z`jmmCy=GH(ou?JO^mLshH z`tYgjGn?$r7S~Ar0{o{BEdUi*PR*_LU0Lc43wxpmrx8k8p4ayaoLO~7^ zqRDUUi<=ykJyD+!b;isP?zsjhD`!#&of_oRN>mrH^ESKsEOTL35VgVW)X2=*-owj) z&~Yt2JtE>dvN98$PYL6-%ZE1A!rI`o&wp`JMaDtueyn~9Kf=McPndj~`k_2-`+x>G z97VYx<)(^w=4hMe<=<#mz1DWyMl6D*<^wt7aVuh{^x`?#%uMs%`hHkp%hop$@|xu( z$98ZbN;Avdm=R5~jm4Ubx|r%BC#<4gDf~2v?85D_!7rVUuL7MF!;VUU(Cd27N=I^4 zX`M`Z@Pe5OkE+9bUl$=&w2#E8DX_AkuROhIhAVW1ZNTr*+u%^_hFnlX{vJV4YD>4g zNF>5^0BXZ%#^Zj?Vp2|it=q-&8BHlv*E$QXu)#&D{KZ(@}M4P!t@U z5$AL-7badzVzx>l=&eJx=uj{E+4un_=#-w|Nf1WaS-r!CQ=3e84l%wNv_eUEnj+f2 z1~aRka^6%g$A+=!iD)hU74Mg&4iazqp|^)z zjHe>#vjqnJ@2xtVZi0ubru%tlucdkEm%RL9dOf4|W)7me zy)ZC)n!D$Ih@PzgdZe=R7^tDE!q(T=H~$h#UimBi0Cy&Y9hH>EXAI~G+BO@*88nR<2-*B%I5y|_94>m zp{@SRd8Oji+s0I@H1cd?E?V=0Bc!2?#vx+wd2*tNWvnNtnn)i{#VtJ5BhXhMHrr9k z3^d>wpSiE<+^$xlJO7R(Uk}`{w4HIXL+Ie<{HFI&hbD1o@yAT!_k&NVw+Q*-R-4jl zyUKNP(>bnF#9M$e9JtagZlDQ}k#W{eKqs(y^i;0F^gd+MKe6pzq)JiBHRQ!29O%}( zn4pZlmhjP4^y$qhicUJ+nNrOeEf`r(2aKKePP>aj;2==F9EC@aec`F6!0f)+tc?XU zvmsr+9^8$RBtbcp*@0`z$>xDkoa6d^N|G+nzs5xn&hgao{E~c0g2=kZVoC;pssDf& zO=7U7x)0QuriXP+N<6Fmj42`z#I1;;T94tSz2C9JC1%r7qdp^Ar?-+>j4`^I{+qHa z)h|G_u*ibf+bL7x2dny9ZC9HLflE%0(_!~M5%#77+M$UW@rb+W>@)lA;t}c9Z0?|K zQ#unJ#{2&~gS3clni4e%M^#;7B4HH5VBTFv0RP6YjEzw*$swrBj6>AjR_GY%h?{`} zhOmVs!4Jjuyo%jOcg|g@1FDkCuC1+EXm;`sOzpct!t*d|2NE4BOiQ4TVsVj7L?uCm zR(8Ep<)eyITcCsI7@^#Db@fue#(xzUFy8%o82GQC^QF4|bl^|s43F@Hd7H}8o`UTA zC8YIwKQ;9{J5B;odd<~F`I!eTl9_=R2EF0@Kqi363~YoVbnZTS9S1H@&*a+8ZC7PE z)zR2mo8z~mF8G|x)BdF6-o-{L@ucL0y!fyJ($>`c2}EYFva-e<_H0#t{rbRxi)_IR zQq@;klh`QWMybywBe1@@Ha7orXIq<&8U$wgpzLCr#%^)lx%d7sM`^)E&6`Y>Jhz5{ z9P)?++K=#JU}92o$+R;#T;JWrCIA@bUA)?Ssq^49{ z%IPQ=1+4#@HLj*Jgw@K%C9WP7!AHN#vB0w6T8HxTB3b?PEVjUESlp0#j86B(`iu&} zlj5P^ugwrJ7!2_3tu@bYpLnZ8^J)ui;ZuA+K{s8^s)G{}(X?y>?zsERsB*GGzfbb< zdrRtPdd>O$LZ>Ogo22t)VrLB%zj4pN9q{U?wK1fXjR^U{Ej)msA%C;)lfO zN{f=GxEZ}lOa6e|b?qF1)K9*R=@HueafEtFLRB#hj!=c?Ke&5mk+XP($QOUzsHhtiIEKCHQJ)cJ`@T4c}L13(wR2l?{kqk~AuDtZ7zt zR#1QIXr*4gUPGbb4e|1e${c{toQt=*aqRMgGNJ5tbYu(Nt`5NuV97}-z5d$(>8 zCo$60PwyzPN`FWd(|-4J$VU*X|P)&s$>nP?IA%4t+0Sb!G^Ikmgjn8kR2y`@Fd2q$>(`+3n zW!PqNy~wG`twJ#{+r06rm>7k|#GK{P97j#d8+Q;K9}jjPHI6!6)h-Rw`SltRt)@>l z>8y~2hKqQm&Y1pcxlkFY!ENuaR)Wzui8j!jn1(T>Se0tIk27F>9W!rVn4hn&uRo%L zVh@^opwjRw4!cxhG7MxaR=ZZ~$eFboQAq#wE`c2qo|jCD5$I8CCBL82f2GW$H65|X+Kk<_eSy>0 z&av%d3?5>R(ssWz{JSZVJ0gsh1C?LFjQ0B9>?fK{A6$okjeLA8{~0$v5d}lajG5In&H>n8_5UL&aFN>y7u_-BWHS{uIFta!CVQ*#w+3 z*Dl<+zyB8YWi{&`1iZ@ra-6)xmeje0QA?yLC~?gM-3vR})C3Glok7soi=~<)3oM8u zn!0x&XO?UGguO{Z=7x?eTn*`(bpYcY#+T<42C}g-0G(8D3ldq{tXDRVM%ucE1p%b3 z%IDhK9bib(5rqT|kIVxM>oveGnB#Kkogi+j_JjWy(8>q4{NTmdGN2>2t}oqQUgkmtdE&Ig1YIRdL(x%VcZeYOD3Zug&4 zcpWG<0Kz^|gW7mtxISqC@L2Ba$7fa0k0 zx&o0+>*!>M&X(Ca`!~<9u>o2afi56sb_D(<#0k#*qp*Xxit_pd-VVM&@92oDb?*38 z+5xE&uUJ6Z!{PjU5Jj^1gC(29&rV0f8-3RvvcxBGEUZFkng;rM_7xCLV2p-J+o8{9 z_p$5v7%?&8N7l1|Q0R1nYrqX_iGQvh{O%GsV=_#_(e&~;U&g4lNAdVoU(blG12z}5 zLiJ@2_A0|D9>lb8{<8a{+Dg4%ufx(j@6Jb29#(0=tx?E`5D*lU*RHW1(yfI=tO^z3 zJvG(ga5FTdBCYZ|M5v2l#Gn67-*J!qQ(fTHJ)M*TO6JGMJwWr{obiFUWbyE<|qMqWZZ1z3Zjcm6BbMTho_sJ&3|u z`y6X#PuLj^=>v6110A9S=~O&SNcZd4)Sy|7|lg!3c5+oNQW z5xN~T$;1LXN+fZ%lm6Dp&?#s*t>vSVL3bd+#RSkstn>28~un1}(L zau5Bp#GikAU*<6B&~h==5d7>9d0}C+vMa+S+=GINXlGrXQi`)eOlge;<-MdsM%%U| z!OYz3v0IsVNv!krGERnH*rAQ!E0+8a68#^kNvV&c7JG@ON-P#Xt&n|tgS)`D(F2Ci zB{~k}ZD=ef6T)nE6LTUgcgUFXr-F<-(reS)OEPb?C_N8d z#_P2N%4mX+c5{GiW=v@ElolFBsBcT{wz~HIW*R<~^tP0RC-L+1OxQetM8=VZIp zFk(tm6sANQo==5@wsw|_r+%oVT(tTva}|!g=jgZ|-qm@!p?j|#a9E5U_%$zm_&-S) zD%j|gwXgUFIZa}*VR2_UXeB~yXw4xBtQ@x0Otj6yt+-LYJZ`r~ZUw)`h$<*7!MigG zbd(iiqy&7fU}RZ}#mYU%;Qbev(wv~Fw|8@r{oMEaIA4Gutmba|fR9M6x^b@q_Y&Hf z)%qF^qz-L1!D<*wEj)leg}<{a*_6;Ti0%qTnsb;gE1mE(%$uu)`jD}M`cj^=v-4a$ z+eq+``t-R8Y1jVx24pUJmLm04J$cfMT)D4_jgWW#?2m2b2XLO02LsS-dt@I3PTvQT zw{_-|gE$sx3rOe%B8qv;9zb9WWWk0WklXH!BXtkZJ ziP(`7%SXMraiW)R6M3|`Ow}^j%8Gx#937apUWT~ZS*$?S`1Cna49dCm?@7z80fV`N zTrX-vQQyAzDwzK>ki$4rHuWV3_Qzz=&4Pzzr$!qk)p@32!S@A7`~!-3kMJ1#@Gp;I z_s);8W4-4x>SfP7I&rlsvH-(scBIWGL_^yDhE&&o$$OUA`&KdRh^Lt;^aeb@AbYi8W2G|ABCze)+FwL(4$pxWf1%OPf zskI~c{nP4)W#jmbRyjkQn*RlUCxk`% ze+-a!2&{=X-W-jV8j}Qqr!vk*iNkWUJF+yG+tnqY{v8lOQ^~WK4<(-sTBeTZRT*eQ z41nbVxP-JVE9K3AKN4FMV*NgPKEf&%`e45IbDq)b>3pYe5^*|3Ow>vk)1=kItE4|A z8D#Z@KGLpog(jvP#f=rDcv>VCKW^`G4DDJBe2>^0R!^=$W^bp(GtOVZ!^vCWtQKyU zi{u_MRuu*8pz8}%7P9b}zxm(=mqAxTPBNhuW52m`oh4BG+m!vyJ!1&X-f%j657TvoYNl|?xj!mN+hySy@;9d+Ufxe+ zpjE4sQ$wY3kwSY%+SKrv}4%2o-$5~=J&BHm2i3Sc(nG} zc7vk;6eLCxa%6;1Fb~s#QKInW6+W`60d7cySmgZ!1IiK7+lBOC25o@LA*z6F$&;N1 zRfW?IvzP=&AjwRcx zh3C(159enF#Pg>NCiA+YLqBG4d;-aue!>gfY-y?=Cf$xv`ub0_%S9D=QXfqd30=dX zi}~`=0K4PUSSe`ZgK3gRcY_LB5s~xNKfFs;2dKEosV&bI3{cl!=l>2Bw8aZp6};bJ z%Wz5~{$WsMPj3oVP-ti98WW@G}z(io&tMLFIEB{ zx`QdxT_m@h%vXvDQcE+Yol1f%9g(4v>!bQ3A8oNcr}(trUus1;Xl!%Io4s)3w9YD7 zlloh&?!WeKBz(^8g$LuwaT{;LZHW9D=PuS0U};s&WVnZjMOTc7QePaG@x`A zeD+dS`VE&~!HD8r68BM;{fx> zHzPWiFBmlx3$yoP&FH9jpC+cj|C`U18gF5jmXP~djg1& z4Bo{=2ShO)6395GFDfHKG$Yn2khOH%(>ScwPKXiL*R=*iSeW+ ziH;;J7`5TOIXtkOmE{E*F6Tfr!DP8Pwop<1U2^@RpDsk`Qlx?e3{9rwEINMFaPOTB zIyfIj@hgX`SAFOsP78E5*^Q}^=flgc(k21RP*m3Fx@{^tm{Gio?pLXm`5!L8XDO+H z>8zuIu1i_z>&kp@In$Nbu~JH(5T(kAFQ5~t-P0wZ!npP>KX@xkDGqn#@*|X72=<%S zjf1P&|F~ImAnJNPTXeLI7wVE!ii9?nc@=9gq^8)izQr51iu;Z{%Itj{!5!m@JX@6e%jaDtDvq9 zG4Wzz0wObKBJgPU`gIvRbQqGUJorBKS7&e5NdAiD=OB_)wB-KO*!qAZiUN+$oS2eC zPa3+wF=`}P56eh1{AN^c?R&J)9^&Z6B>93#Cw*(9T2PP7yuM`<)6ReNviZ*+- z4}P&oYSpQ+WGNw4)grYtj4lZRPNht?)9o@J(c~ESz8-49AtOLiqCP=~?4Joa>sihlRCc%-LDD@i~37yc91*^Nc+6oX&(@<$;S) z-(SY~52aNDs_MGb*O&2IKc+`&dKMdFdd@f3A1mNolRLx=@Qf|0%(^aaQs!KF*ULDX znZ0u=xNb7(7%-YCnqvyHJ!`D3z-i}hRN-??bGcYodaX^BCcmM)Dvm)x%J}lytau#R z=fMa25>Ly7$$8IDZ@2an*J;J}Zl-n#%M1k9tMQK1MocPYYzeQsekwjiWv!rk1uyDQ zp(KcLd^v0~AB>L}9DMam^TeZzQ}N#xet4r~5aL%!w6G#DOLD*{i5qV5+{8|a>PN6t zGO}(>i2R+cUH9fn&8H=U+7KjwYS!W4r`w7>SdypU>}pX62hS~c0O zEGDDHeAlTYNQDc;u zfSXmt@?Cd}Pc^Yi4M)!9)GJt6)7U)A)UDZnOraF7&%Gn$f?w&1#u1|F0<65~D^U;M zdjo*si)m8iMzn1k#nA2P%Mrm#d|elRXmuw zdFGQ!>6&d(>G@W*a87IH;@c7v!yH5lNFy=W7hi4ugV@oWav%>Y6TmJ2c24JZoA4Bz z(nrTrUt2p9@UVy;c!!A~A|hIY&R;S)IXWu5kIeu8W#!4SN0tVZAy`v9VU-&J3lTy@_-}hLH!=oRI+gg2VkK#&WE7 zb3M`Fd;6Z%GCuv;@{75S?1{`T`Q(;i$L~_ru?L$jtXJ&QU}5uZZWmKZZDfNfuDm|e zejY(`O*(12BwwRCZ^xGr2Q5rcs07G<0e~&7gflb+;JI0hm*_eI>CiAQphG(AbA9Q! zIl)XqIq*SydSpZd9zmH4DAnXIiZrye@Pz*W0U?>jm6PVcnm`rL%g?V1*)K>kH1TXc z$h%Ob2LdxuZ93Z!!7L6VS;ebCpGQ{FHT=9baTuL#sNzR)U9rzxDT<2h{@6)*#Ve%7 z2UKlTO!Qrx6+=i%Ztqb@2rWJteGBZO4jp7gsz93J@89>)Y{W?rnd9;Y{v#^Q@5vTD z!dldYaStDUL1VCneBAV*f!LJ|VY|-&m-fgO zUGK#Gyj(4oynnUe>s#30j@t;k$nsukkZ8tA4L=1=aY;#VwGFhP0ibE1w+}!^s^#JX zddNgcdQ(v6tdz(6y39LQR(3E3(wpgC^XK*@{2d;gvy11lCXIMU8&6!wNc7q)hjEJ; zV4#gIzUOb2%WvN99ngv5VW^IMh2s_%C)s5(7OrZgIK@fV*mPtk87glfAt&tCw+87@$m;Y9$n?_c zVgU!$Yyd%b?}&nF5{ra8AQ#keTl~xXpUQqAnoFFoA$3s%OQmn3kea{Z-b=o$J~-4G z;vP(u=qArd?S;|e1--4WUhL@&`h-d}XzD%84V->d7{17jGtj5VS0klcz|3IL&s$`v zjB-+-ETk|?+la?r_n@s3zbV5%ud)JtN(;)c7S$(LTk}_8XdXYBRJt`;nfu2p(KeL( zmRb2}#uW@8_ronhOwo}*55V%Sx{x`Xn4w z#F{dL=&#e*7aC|Clq_n;?eu?=0tzh)5>7{J&$c)j9|0*z!F|l5;;dQkTA1A1FIxQ%kE(79=Wq(<{iM+wwVSqs##Sff2x;1)b~r z6P>pN1m9*sQJtNVtsG3b)=uAEmX7yuDTkepjLgpV1<*~*O;7h6X0;D6L0CD*%;T@+ zYm2Y0yf?scT^vuArKt9h3yV!|bN=<7orXrLJ67Gq&H`zI&E)+22~OmQ#W7Nmyb7f- zrd(DOG%;Qo;eAP(HSQys*vzu%WIBa6P&n0@lpDH9X3??LYzT!>l(`o%sm~?$TKdiT zI{Mv@xwF(Uuyrm4firR8V81k9XOn(kz{Z5XeL?s!&|7G>VdX32WM>x(2neXMUliXx5urSLehJW&o}&;ttwOEH zNFN~X{)0wDvh*~#<+x_rCR1JCt6DL4Xoe?i9xJ!-!dpQrTeHc}8+T2Hu{Y97HMT2M zsYT-&*ero7{n`JAgbnEN!S#Lrngr5LtD)iuD5O#;k3p1 z=8Z5s<@`Ig7qfOb#?vVe%#`a{!PgB}apDpGPKQ#ri&-dZ1v(Ik$7v%*L(myHR?H?K zz$E1$0L1J$0!A1?EjH|2c{`=oXQfncLMz){``VpWQ;kF`@qv22NNzBID}xUcTlAdL9!O&TrqaUm5@b`38%l_3C{p3eK(<_am@3ocxIueECBk}UW9T}P9#v6b;Jic=9iO8cI4Zp$I# z-v23G=}GnL5+T3x`a`;y!ji^uT=H0C+{ok*?(0CR6hX1wOD|A|l}_NwvYTB;M%GNlZ_ZZ}s{8p(8Ow); z`}%@g?kmE5iGODVlvh;v-@*FNFE*{`l1Z~Ui722NR$eCAe-0PyPgShuO7nok^)Z%3 zwUWnrFF4Z=sKZ$ENjTH?VF&4~V1CVP-SJ1a9sIl(=rTFo6n3<^y9jR&{J!xD1M<`B zk00ifBSD9y?>4+-#wS3SWKwO|G+Kn+KHl&(R3?u=pOtL0Rw-Y`jGp!RPmAiQvlj8O zR#Vedz^*#~TJ)LMTiFw@5iHu80OQDgDZ1s2+f4)`@e!I`V?c7K6@&po#z8~Odfa(8 zc;m!LNZ#=rlU}X@&1KMACbtV9|8fPi=+L!sY+gC`X4Tj9%^3cw%%={dzu9>ZW31qh z@uKi=S&z%QkRmu1=xX}ge@xtU8;%WPZdaQPJ>6%s;%i|Nr?Yll*H0IdlIymFXj3`$ zXL4y@6djm_^enExZB%{Lxzj?!!{s*ql=F-@i1|l)wDU~+=?vD$Lrv=}1;j`%hq-m4eA392&P5Do!I$i-(nI00#Ju`#{cDD;V6Ou9 zf$Gu+l&gyR(Vntg;Dm!2h}U?Z=uc{97tj7#ettGCF5e`8+K#>wUyj|hMpjgiiPaQ7 zr(OlEt=)!3({+;*4WU~Pk!c)+*f2RcIVE6$j;J@LQx{OcG((nCxIrZkKyNq9%d1DZ zKdQ^denTc&lh58e?)KBJ+`X`4_m`r_yZ^8oWt4p{FboBq8xy^)e$DUQv|=!<)*gJI ze{7CzyWq&P-(mD0v7a9HLk5VxcI8{_8kr{L%Uv>!60>V0U0dA)ySNoVxG#^6(X#-& zLl0JxY=?n3KryvVbg2r2t-ePG(bG3ja4+Fojq@a(q{)hR^Pw68GQcmoz(?cR$E5la z%O^=r>vyh54aL$)Zk`-n^!l6uf0kie&x#!&Us&1(R<|uD$Lu+})~o8$?uXN(UaSjZ0@{n@Q#OSOelXvRwI6YiUj}1$OT2wg(qWlsJ#b zaw0&z$l*EOgIe`Hy)I#sdzWv$Pp>yHK}p#1FpT>mvY}QoubGrX34M~L!0|vspgY-jpars762N}?@PL8JJ&GPA|#;-p1SPQAH z{rBo&unVIPN*GfwAfa-;bO({qs=$ z;jA^;l5Rt?$L7|L`P|~7?*@d0l_qX_XAk#l!))L1*wvld#{Waw3U2AOv;jtk$et}w zzd;&;YlOq~^EEeA?3&oviMZQTClGv4(0%j*d=O48a;NP6eCU^sXT;v?{B%`19=yfr zV+=D=n-{c0-wy=XPQ%G0mr30c%ir1+`{@aIO9bstl|ukF5TX{XMhC9j=I;LyVs`Se zl(PT3ylf&RIglUn3{KAUM9K(>9T+5X=da!OI98+TcB&gAYl^(U9N5VKy}$7SqD z-6>9C3MsIG+Cn#3?g3%klzu`A;eZDuPPF1Bl+Q<#OuKhs;jUQLqRw!gXM*#u#C_?G z;Ux{-=kv;cMO*6-MvtsIROR@XQjB=SN_uzuMg3#QxU>14&aH+|-;OXjNPK8X^t2-_1Vh^So|)9pmFBu%#f~c_csvl4gL*BM*Gp>0xY>V+ zR4R|zb~|G!^_iNl+n`jDsFwpt#1HFC6Ig%P~3$eyq{t6y`o#u)G(E{LE_l>~{Y8uHVwp|99=3UMX1?a&wd5a(#V!d!=;= z1m;fJo3Mdud!F)n?>#f=AX$!yH@8_;VB*bfZcaN(PB^^gdet&1CL&TloKl_*^Y-); z$e-=qhqzZvW1e9y8HGHe&bG(FjslL((_{PW>-c|9t+HAI0I<|34%0zo?NV3J8pzk%v?WS7$w769PV|WY(&}z{iD9;-4q5GVRi$|^`T#wO&`Aa*3oCxV+|`o z%@`Y<&3Vc?!HG3PTv6m11Yp!UqyU+zY4_vx$GmHud}J*dlXkbsC!cevLy(ZNWbOz+ z9La})f<*mkXlObY%b6q_>gs5lz=8lZe8CJgb1rr)Uk@X|dWy@K{MyVsDA`InW=$jO zziWrN^78EORi%OMW~-tJ2j&c)fK*JInr6#9IN{@LvFpspN4r}B`}+`!!(R^)Z-@a2 z{$PcUHN9kNK=j`Mp9x`|1S%<7y5=w`k0RwW&8`91D^TSJhTw`tOI!;w)`UyK%wgmz z!(MX6{Ps_|b>4x?DmI(yEH-sdEJA(E)vQ(VEUyNZ0ojX)0fp|Z%)aa6`Cxc-bTol} zcrbXsf4RFz;vuJEmDLZT>?>Zc#-&MEbezcJ;8ej6N3utuLw57j8;|6qQPsj)z1F!_ zSAiGQX^wMbw1BUH81zXK$M5pQwAg7)*Y$w@-T}`h$ef!*Z!?R0P_GNKjXb8H`#3;=qX_=uiS1r1%Wrj1y;+WP%`SmsugD$^US%>(1d`R)qz@@!Li_| z*5vhgscu0qhZZAJ{gW~}3TnYKJNfMWkumG~Sn!nr|U6cy5A-aUl?7bi5 z=fATtRToL5gw3o!1^i~_NV_OTk~laVbhbHpwDbfsW;}Jm%$^GBm01OHvIt!>zM7aB z=HjsF{eX|^G~Pz?L-=!xd4rf>bQOQAOZTmF$ugX{mcS0D;Gc%LgZnEK`>>pI&R}q5 z_87}*RqUqE&Z^<8RT`gbd82+JR`A#MCgJr%7ugGOYt+UD&Gp*%3B8YU2r^op?A=q9 z)%Njk4n=2XU*}QEmovhPi;IR=v2GQ|*mz&#D!?lnn^gQ15lPm#r!SW^Y&^{Ee$1>_ zZ+dqp?`x1@p-hg!FWeYUNd6)w zqbcK1Kubq5;uJnS`GR`dJW%Mc-in{)Bb)V~g$Cx%7G&H0Zk4gF zOw*AkD|QoIKNCo^b$Inj7m~^9lYWSL6mTrV6TnDHB2L&ub~9tc<(`g$)Mn4XvqZ1@hH&&er6no>Cfb z1a+kh21@;)ddIZH3~{=m$6HT%mEuTqGOVV%PgKSNrMMY01Ag;8HsDWRqAV?Z#Pg&p zOjz9D>0r}O?^O<|*#mp2lf&T8W196srj5F_xhLR0ojnqabP~@ugLAw~d+TbJM^d*( zZj3UUzxqYMcOpM$3KZG2vveZz*bMZg^7mT%xYESO&K_VOch8@9N4zjQJb#CoibKZK zC|^1I_TTYX_70kSfl;OhHU8F+^igAqA(8pVQ;fj^o(8}!==SH$V=Y3PwqO{HKCk$E zM=P~iyIAPHe}Cm57DY4a(STVL6xx|7;B{`@kqTBO?&!eDJqPWF_sw#mgHPr#^rI#>|i zT>EGSq23tx{Lfxz_FdqNhrY^~ZuAj@kKD73`_9Vh5_(4Cuwr~e1M<5Q15Jy(I`?gC zhp#uSR*-~RkcuWp{t6nTAFB}Yb&6#6leE?2(B;#!Ff-32m<_T;^75<-Pyd6OK0W_E zQ7mvQdF46@poZv5sE}&x-vbto~>15`YLu*fIo4j40hEW8@I&Vhm zk4pvMVzlp|WU1t;YG&3V<3D_iKj$XlD4jK&JQ5F|9!kRsCq=MM%=kHKr?7y+7Ebq^ zRmyx;zNN`d7n9=O4xq+E^lzRw2{01n03u8#z{OOeCCGqNc!UD^>!19DUP0x#$a4O7 zc-<_Z1`7-8U>4;K;o;$-jg=Y#qF6--on2ky1+GsA%%=bR*=fG$Y5p-XdDo|@;PMq& zue$6*LNktH^}{C_d#Fd=^6xuP0nJy3rR&)cy$m4l-X1?>zLb({G0E8buuwnfcom9j z(uToF2t(RMDKD;?zO;Rdx@3FSl**Mp$s{~&iuI}l)5dmtb+@qHXN2ugb--u-q(Q&i zSaUy2Ah1azt@q3A(|sft00f)9iH!h-vkHj*tC-%R8gc%=0POm06}h>A;go`M$FnYJ zC-w1@lbX0YdY zv%dEt>3n)*ByPj;oCp1r2?v7{AOI?7d)vkxh=rFvx~JM;6LC@N+cQs;iT&!6pNd)f zg#qs2SbQ;akzcgRp-7HVX`nUGqW%d zxhUeC)MtvX@4oqP+~Km%*89zrBNtiqvxdkiMgt@&j@6y4ct4E*%`vg+adII3+zF=+ zne073L@H~p#`jEp`h4Nt!};5Dg*6Qql>`VavkFTZg3+!VfKIW08zMuqK53SgEqJvk zNJNNL#T61KJ+*(F!s|k0Jm`?`Zj)x)^7k0D4c~Ge0fIQNY#RLD(=}Ulez*2Yo}H@9 zZKA#iGasYA?MPvy=Z&5I2ZYlk;z3wflscwbtf(SCR;K}Sd)%Twp{0g7xw-okKm?i6 z$Cm_Fi&{bFipLO#K-z@3dx}zZ#%^v2c*9^V`{SqF002f2qHqJvbJ{W?uH@EF04V^9khB@S17dWV*9K7G&P6a21U^+E5S{`Ql*5&B-$Sv*i9N zlRY2hVq8b&QaODoroN8=qMERmY*7O%$gABdy}|TkN?Kx!mLj^;&1Cz;rAYqpjG=I9 zDXopQ9tpFXTSCUw=IhJ%V?FzS*!|qIEvBr)9?e@wB)!IQQa~X^MBtpywU;{(2PJB7 z^oH{E`y-e`y*iVG51icPdo{dyKLs^Nc{xNmG&rbmKFyn5kwzUsPr6a~Jd zV(F>YjGh4iy9~Hmcy5@+RrmX!K-Y7UL=W0=H?_*%|9dQmJ-8>!pflv#^HRUgPPhoSq$8oC`K0}W zsL5TX%nQIl2JMeQ=U;ifOL_g;#=HK^vOXCJI8v-NUm=@CpR0%Td=v!y7O0|+>Ge;a zI5{~HZnhcysRkqJ&hBEmRZsV2-7mm83lh&d4t<%;dWSygpz>wz0hg{J-%+nO-P}9S zn?OfwPyA2sQ}DbH4os6Zv*fVBkCRkGF#FvB=6u0zkx6y_xL?r`OA+esn?KohQ~8eb zUk-$&{I9;QGpea=>vGjAR}@e{L{p$9({!nm_|O)wv$76?u(nAYT%+IBu;cxMZIqE>ZJLOw`u2| zVDA;@SRy{8*t7t1O3{oHN+#l@lt2W zb{;*hg9gJT%WWmmkl__nTy3d+p?^V^o1(~CRwG~DQM*}XIoW#fZDJ1vD|dK(9Ah&I z!cguNx7J|E|J^rY#4h;qBS_xw{H+Isrs|Bv_7y_dUUimuBdBtwsNKrV+;wm>s5J(v z;TN7Wy=WAcmC>jbs`yEpFI`*$)v+K}1~(Hj7ef{6yd$l~QK{4sVo%6HMbg-U7$i<) zUpmbuIN@;JJd>3my?C0;=<5twV_Cv5Wz3)4^g`(@O?3+oGr`?RT+`HsZF^g$QL&_GGeN~LK1;#vu$m>mZ{O8_wY0zfTCptKLenU#2207 z8>^d6K|uM1X%8-Cuo{1t(HE|NaRM5h*8+6_#H|T0&kUOk&xa(7nwObvw7$6sZWl;_K-ENI9jxtz z3^ghg-hcR@7mW_6ZP~PKKdX78LH&W@dP@y4JB*nSc}q&W>%{pCYPh@OZF>F)9a08M zQ?L7t6{iIo^R9IBe8HGhoikdiQY=$-Mwc%)}t10$VxKQ8iz5bO1ZeH7Q+P zCyO{M2w1+NNrI5Sw5dn$I?DgaGybYn&TFcs^TXcnJaUScUN$gdXW{?F=udg4g34Iz z;qL3AGvM@A@BqcoaYk#cDf&J?%ehDQy~hxo9_@(G)>t?YAYK6Igrv3LvyoYXK2cV? zY2hp=ikWxNrwwEO2nAH&0sF(mhDFqMBV;cxfI}w@YOp&s}OMhAeuatd&Ga zKUR%dXBJ(q#Q;ciY0pMK&wVxjpGKXv?j)_tEPweq#B^WZrUB*y_OnxsF*3=OfG~Rt zOm~3Z3}lw&3Ylia^?bp1DTTNBJRGw@$UJF4yObd!X?j{K0}UaOP>r_EPx!r8$f6|z zD1Xfd^3tGt8z?R``sg^>`XLrc#(RtVPPEp?%PnAGVo^%?0_+Q_1Wh#d;DJKv6o-%JiT9eMsde2oA!w0yo zT*06H#|?(QjwtqbPzJ}pSICTvq;2y$`gwbUc0HzgeK}l;Szr?%*^MbYl*Uzpo0!&H z&&G7N1FdHjFzf=UzsE!kcOywS^zGZ~(a-nm8=@ZAt3Dr(DwxxsZ|Hjzq3jNM(^Ux# z=p)sdIf!~pNjX#rA8pk8QcEkM1>KhqF!>_1bdx_yQMtcEDYw9OPaN>RxxVJhXNA*{ z$3j1IFF>4VOUf-pSDR44E0JSG#5}s%oOXygvPsf{yJs?;e6{wnTh5ok(aR=Rn;-uK zcC|hh{P@p>Y~R+k36n|Mp2(Q+$XSZ1@Y>Gzr{eWO2xU{{zy2c3Ulqgj`?`o#=9e>1 z`96t{tl$Z8ZmGE8s^Yu1T03qJ#gIoUH5gDn$){jCCSe;iQh&I_Z20Ak?hd~KxsfxB zV|tWUIZe3@6|K>2TVBo#?ZB)Z`?h){?O*jbb?7mKKN2HzKMtfOp?W~{l4!PT1HY8f z;zRs+8bOWYf#hIWP#U8t>V&QCaB^X|2RU^G{$AtxC0Wk!@2VsNfa$?oALm&#R&Pk7 z64K9mrF~?C>lLLFGlOI8LwC*YJ*TlA{bJQTkCHJ&&P?@()3*6@PRz8uv0FSEO4~Toc?S+I)u!_`=jD@v|lg0dEtUYuPY~ z;(Qb@O&}4>qNzzQ=6hN)sW*KmIpc-u{jCuTNFk=3217oHlZqX*W*V`daJa z@Kmbho8xr6O=U{ASVvBN+|!ba&FJTm@3r7HpRDJ_OD?SVS$tVecW&_V$xjlq9v(;3 zC*%H2F~{VNy9%ewlumm_ZwAN)og(}gU(Iv^NZ_@KL5M-q95)yNc)jtU(t;C_X8E&+^5iIok7Z2;6`tM#YIqD>#Lo$*^xvhai$+v49EAt(Z%vJ)pMFHqaSV!TotQExd`y8n z)>0Co@E8qT=D2s-t^xfT-#1fs>TT#PmuocU3XY4HB3?;nN93I4^}m)g|GL)Y#$}E|_!&hIbSY(4y-L<~G&W#wE%qiX3&$9A zkm*SO5z8SH{4Na*->1ce>mOmKCD_m+-oKfc7vtyy zW6&)v9$n*R;1^%xXUAzq;DMV92?>i&voiPWd;E_oPRcH5n)i4^`^8QlMf5j-_SuIA zc=6lARfZ>OWLp8|8-h#Fv~lA$ooa7sx8$X6#d4c|WI5iYO9!)F-BnZ_o9N{h z{|i;=;&7-?9Dk-dj&x*s7ihH;;5w&kC!IFgm*OyfdvOeQ*gV>%6J@z;Xt_A)JYw_M z=-OK4Lt~0(y{^86Asi=LLl~*rL#ajAKYmTZ@JYI_r9%vIv=Wwl_B6hX0p`-$&Ed7I z5pypO3UjtpAd1wcQE))0Ola!p=}?Jg6-elt^}`I+F(0V6TFOqBdQ;c~aZy`@1Y#Ip zif6l==G^yiv~yOH+wz(m4Abw$?!B5|NmkfHNTlng!m1A?gW=Ao@=z!8z|{Dav`iUY zVrE~|)RgsbrCjh4BviR{!?Ro(wG?0udY;qPGvQ{n+18ffGvMS($W%dnActBFi)`8) zb}VBBmVX^uV21|>=iDwwTZtqm^k=CNl`Uc?l;q@`=hy4KaXnlvdBk{?4VlfW)P^^D zE&3gQ1Y?Y3g*LytUIt03Zl?2gSFaSUDo9^|rqaj{u#NBcSj&z|i@g=hB%{2?q}c8l zDEw8)o__Y#_}g`8vfR*)%>{N7 zYr(v#2aBMx2_x4o=6t6hp#WBEVs+F6D=`-BMoM^zrha)>bV~6^p!TUbJJ`{R_$nm8 z%9@tc*V*!csh62I+GnMIk##Mc16SGI>$jPLLnB;c)7QaDnVbnrhNYRCWer|g`{44b zeF1jqIh}!hlO$EbNS+?GR0V^u6;OHDrwq||EtOIbl{5HePPzMrpv%k z0FYpH0|V=^qea#MLt}}wzQ(AirL(E8U%w_e=u14r*mrd(2Ebsj`Q0F1NE=mwEh_b) zrl;ph%YIfR0K1$yb;8)f!eWHxmwZ>?p_^Ng*rlLt;R@kIjd%LW`6wki;_cgR!>aI}*S5tS`W*m=obA$Sk-|t8Cit@!(1FP73#Dc@GI>{2H(0Jn|4@s3tGBr%Q zdvLTeZ9qgj-M>9X=tO#YIKNkv_Zv9>e;!Hv zucIwM{*#71ev5xb-g0+$_i5%%eeqCQcz*+j0zBtC0bJ=J;88Wde>hVy)&ht-YN&=$ zt^<#munE{9*#Zgw`b;yp8zqU_+Y8u4DEqEW$!&WB4Nl#1ne;iN9mj{>MC`4i-3{;D ziA2IYJi3&_SH{F;;HBo}(l8{IT&s-ur4EE4MBg<(ciuxXS%5Fjbt=0p{73>N>5zi0 z_a@fd!O|!sKN+~BeaoBCQVZzS^EVCws}ilTGONFLcX#1(9RrFi=gxfx^d-w|8>NW= zSJ$G#HvXch`9sGCwSAfjg=u&=fBYK&F}rJGl8AJ$_;M(ZJ2+^tZ9xov&3>fdwY<6u z?8V~$%p1vaCj)~{<4RJiWO}PM9ebz!>xeGw_+5%!%+1Y(jFY{{H@QWD#oF>o`c3b& zyeXiG#Tf{x2%l^056XocFk#7n+_o;@eTyu^C}gOG+p_@N?-#6~6pKpxPJBn4UB%+` z&p-Ep6(ubQq>L~E=;odhE-R@&Cw_7*SuUk6O_+)3Kj%>j(5rC3z!Mnj08MX5;CjX? z&{qjyyp9Rr044tz|FW7j#Q??UiYdwSfCA?b7Ap!|Uv9UjAgXM*(6Afmxm*A=F@>yh zN{|wtA)`EjAk`2z0{QFLXGq(n4Qxo-ui@=>p!-Gd$n%3adqWBLl<{>I_j5z!;NU=M3X`TG}5e?80YBdkzaQl@7#Dz-`6x>Y4C5+sKG#OF52qt(B!1M#KE%WkjJ}A z0UOhee*-B_w{oGV;Z5&ZFV)TbOACSP)9&W;eKi}vKc!cbqPt1xy^^Rm5adY>T%CB3 zkf|u7D6gocXs+l&9}#H>slU^tBUuCj`0sVzxXkUASbC(|TOth072<3QAvxL)OAS?h zz*aaFVTr@ZK26*Pl4B@9?=OAfYZUv3zzzFNQV67S3D^uW`6gR~1N&ZedSpgF(vs`2 zqdtiVE7HIxln>$RN&aTU=ge26t4#6nvw{K~OlF!kOzqdG3^5hf0KY%i-2zzHWaZ`4 p>Uwu6_tq6m_4LX|e$UqeDyaUP;E_xG7=%9WbTkb#DsJD8_&*f7SR()c From d6a5addc3d348e4b937a9b483e2b479b8a281148 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Jul 2023 00:54:09 +0800 Subject: [PATCH 460/475] Adjust docs --- docs/BestPractices.md | 12 ++++++------ docs/GettingStarted.md | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/BestPractices.md b/docs/BestPractices.md index 723c89c3c..82d0fb004 100644 --- a/docs/BestPractices.md +++ b/docs/BestPractices.md @@ -14,14 +14,14 @@ A dictionary is a .txt file, in which each line represents a mapping rule from o Syllable names and phoneme names can be customized, but with the following limitations/suggestions: -- `SP` (rest), `AP` (breath) and `` (padding) cannot be phoneme names because they are reserved. -- `-` and `+` cannot be phoneme names because they are defined as slur tags in most singing voice synthesis editors. +- `SP` (rest), `AP` (breath) and `` (padding) cannot be used because they are reserved. +- `-` and `+` cannot be used because they are defined as slur tags in most singing voice synthesis editors. - Special characters including but not limited to `@`, `#`, `&`, `|`, `/`, `<`, `>`, etc. should be avoided because they may be used as special tags in the future format changes. Using them now is okay, and all modifications will be notified in advance. -- ASCII characters are preferred for the most encoding compatibility, but all UTF-8 characters are acceptable. +- ASCII characters are preferred for the best encoding compatibility, but all UTF-8 characters are acceptable. ### Add a dictionary -Assume you have made a dictionary file named `my_dict.txt`. Edit your configuration file: +Assume that you have made a dictionary file named `my_dict.txt`. Edit your configuration file: ```yaml dictionary: my_dict.txt @@ -51,7 +51,7 @@ The dictionary used to binarize the dataset will be copied to the binary data di ### Preset dictionaries -There are currently some preset dictionaries for users to use directly: +There are currently some preset dictionaries for you to use directly: | dictionary | filename | description | |:------------------:|:----------------------:|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| @@ -66,7 +66,7 @@ You can submit or propose a new dictionary by raising a topic in [Discussions](h - All syllables and phonemes in the dictionary should have linguistic meanings. Style tags (vocal fry, falsetto, etc.) should not appear in the dictionary. - Its syllables should be standard spelling or phonetic transcriptions (like pinyin in mandarin Chinese and romaji in Japanese) for easy integration with G2P modules. - Its phonemes should cover all (or almost all) possible pronunciations in that language. -- Every syllable and every phoneme should have one, and only one certain pronunciation, in all or almost all situations in that language. Some slightly context-based pronunciation differences are allowed as the networks can learn. +- Every syllable and every phoneme should have one, and only one certain pronunciation, in all or almost all situations in that language. Some slight context-based pronunciation differences are allowed as the networks can learn. - Most native speakers/singers of that language should be able to easily cover all phonemes in the dictionary. This means the dictionary should not contain extremely rare or highly customized phonemes of some dialects or accents. - It should not bring too much difficulty and complexity to the data labeling workflow, and it should be easy to use for end users of voicebanks. diff --git a/docs/GettingStarted.md b/docs/GettingStarted.md index 479ad082d..59a7906d1 100644 --- a/docs/GettingStarted.md +++ b/docs/GettingStarted.md @@ -6,13 +6,13 @@ DiffSinger requires Python 3.8 or later. We strongly recommend you create a virtual environment via Conda or venv before installing dependencies. -Install PyTorch 1.13 or later following the [official instructions](https://pytorch.org/get-started/locally/) according to your OS and hardware. +1. Install PyTorch 1.13 or later following the [official instructions](https://pytorch.org/get-started/locally/) according to your OS and hardware. -Install other dependencies via the following command: +2. Install other dependencies via the following command: -```bash -pip install -r requirements.txt -``` + ```bash + pip install -r requirements.txt + ``` ### Pretrained models From dae6b828cf059bae9e05938eac37f054ef2685c9 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Thu, 13 Jul 2023 01:46:08 +0800 Subject: [PATCH 461/475] Update discord server link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index db261b2e0..be08033e7 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ This is a refactored and enhanced version of _DiffSinger: Singing Voice Synthesi - **Best practices & tutorials**: See [Best Practices](docs/BestPractices.md) - **Editing configurations**: See [Configuration Schemas](docs/ConfigurationSchemas.md) - **Deployment & production**: [OpenUTAU for DiffSinger](https://github.com/xunmengshe/OpenUtau), [DiffScope (under development)](https://github.com/SineStriker/qsynthesis-revenge) -- **Communication groups**: [QQ Group](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=fibG_dxuPW5maUJwe9_ya5-zFcIwaoOR&authKey=ZgLCG5EqQVUGCID1nfKei8tCnlQHAmD9koxebFXv5WfUchhLwWxb52o1pimNai5A&noverify=0&group_code=907879266) (907879266), [Discord server](https://discord.gg/k5Uk2RfUFs) +- **Communication groups**: [QQ Group](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=fibG_dxuPW5maUJwe9_ya5-zFcIwaoOR&authKey=ZgLCG5EqQVUGCID1nfKei8tCnlQHAmD9koxebFXv5WfUchhLwWxb52o1pimNai5A&noverify=0&group_code=907879266) (907879266), [Discord server](https://discord.gg/wwbu2JUMjj) ## Progress & Roadmap From cdcfb880a6440ac3d6e11851b8ca60bfd4a974d7 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Jul 2023 17:14:40 +0800 Subject: [PATCH 462/475] Data distributed -> distributed data --- docs/BestPractices.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/BestPractices.md b/docs/BestPractices.md index 82d0fb004..c8284795e 100644 --- a/docs/BestPractices.md +++ b/docs/BestPractices.md @@ -113,7 +113,7 @@ For more precision options, please checkout the official [documentation](https:/ ### Training on multiple GPUs -Using data distributed parallel (DDP) can divide training tasks to multiple GPUs and synchronize gradients and weights between them. DiffSinger have adapted the latest version of PyTorch Lightning for DDP functionalities. +Using distributed data parallel (DDP) can divide training tasks to multiple GPUs and synchronize gradients and weights between them. DiffSinger have adapted the latest version of PyTorch Lightning for DDP functionalities. By default, the trainer will utilize all CUDA devices defined in the `CUDA_VISIBLE_DEVICES` environment variable (empty means using all available devices). If you want to specify which GPUs to use, edit your configuration file: From 9fcfd2efb1822373b24053dcb5a27a4995ff3c7d Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Jul 2023 22:49:26 +0800 Subject: [PATCH 463/475] Add phoneme coverage checks to variance binarizer --- basics/base_binarizer.py | 50 +++++++++++++++++++++++- preprocessing/acoustic_binarizer.py | 60 +---------------------------- preprocessing/variance_binarizer.py | 4 -- utils/binarizer_utils.py | 2 +- utils/plot.py | 28 +++++++------- 5 files changed, 66 insertions(+), 78 deletions(-) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index edde29704..e3e6ad872 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -14,6 +14,7 @@ from utils.indexed_datasets import IndexedDatasetBuilder from utils.multiprocess_utils import chunked_multiprocess_run from utils.phoneme_utils import build_phoneme_list, locate_dictionary +from utils.plot import distribution_to_figure from utils.text_encoder import TokenTextEncoder @@ -167,7 +168,54 @@ def process(self): ) def check_coverage(self): - raise NotImplementedError() + # Group by phonemes in the dictionary. + ph_required = set(build_phoneme_list()) + phoneme_map = {} + for ph in ph_required: + phoneme_map[ph] = 0 + ph_occurred = [] + + # Load and count those phones that appear in the actual data + for item_name in self.items: + ph_occurred += self.items[item_name]['ph_seq'] + if len(ph_occurred) == 0: + raise BinarizationError(f'Empty tokens in {item_name}.') + for ph in ph_occurred: + if ph not in ph_required: + continue + phoneme_map[ph] += 1 + ph_occurred = set(ph_occurred) + print('===== Phoneme Distribution Summary =====') + for i, key in enumerate(sorted(phoneme_map.keys())): + if i == len(ph_required) - 1: + end = '\n' + elif i % 10 == 9: + end = ',\n' + else: + end = ', ' + print(f'\'{key}\': {phoneme_map[key]}', end=end) + + # Draw graph. + x = sorted(phoneme_map.keys()) + values = [phoneme_map[k] for k in x] + plt = distribution_to_figure( + title='Phoneme Distribution Summary', + x_label='Phoneme', y_label='Number of occurrences', + items=x, values=values + ) + filename = self.binary_data_dir / 'phoneme_distribution.jpg' + plt.savefig(fname=filename, + bbox_inches='tight', + pad_inches=0.25) + print(f'| save summary to \'{filename}\'') + + # Check unrecognizable or missing phonemes + if ph_occurred != ph_required: + unrecognizable_phones = ph_occurred.difference(ph_required) + missing_phones = ph_required.difference(ph_occurred) + raise BinarizationError('transcriptions and dictionary mismatch.\n' + f' (+) {sorted(unrecognizable_phones)}\n' + f' (-) {sorted(missing_phones)}') def process_dataset(self, prefix, num_workers=0, apply_augmentation=False): args = [] diff --git a/preprocessing/acoustic_binarizer.py b/preprocessing/acoustic_binarizer.py index 74a9ecdd0..8eb02db0d 100644 --- a/preprocessing/acoustic_binarizer.py +++ b/preprocessing/acoustic_binarizer.py @@ -12,11 +12,10 @@ import random from copy import deepcopy -import matplotlib.pyplot as plt import numpy as np import torch -from basics.base_binarizer import BaseBinarizer, BinarizationError +from basics.base_binarizer import BaseBinarizer from modules.fastspeech.tts_modules import LengthRegulator from modules.vocoders.registry import VOCODERS from utils.binarizer_utils import ( @@ -27,7 +26,6 @@ get_breathiness_pyworld ) from utils.hparams import hparams -from utils.phoneme_utils import build_phoneme_list os.environ["OMP_NUM_THREADS"] = "1" ACOUSTIC_ITEM_ATTRIBUTES = [ @@ -78,60 +76,6 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): ) self.items.update(meta_data_dict) - def check_coverage(self): - # Group by phonemes in the dictionary. - ph_required = set(build_phoneme_list()) - phoneme_map = {} - for ph in ph_required: - phoneme_map[ph] = 0 - ph_occurred = [] - # Load and count those phones that appear in the actual data - for item_name in self.items: - ph_occurred += self.items[item_name]['ph_seq'] - if len(ph_occurred) == 0: - raise BinarizationError(f'Empty tokens in {item_name}.') - for ph in ph_occurred: - if ph not in ph_required: - continue - phoneme_map[ph] += 1 - ph_occurred = set(ph_occurred) - - print('===== Phoneme Distribution Summary =====') - for i, key in enumerate(sorted(phoneme_map.keys())): - if i == len(ph_required) - 1: - end = '\n' - elif i % 10 == 9: - end = ',\n' - else: - end = ', ' - print(f'\'{key}\': {phoneme_map[key]}', end=end) - - # Draw graph. - plt.figure(figsize=(int(len(ph_required) * 0.8), 10)) - x = sorted(phoneme_map.keys()) - values = [phoneme_map[k] for k in x] - plt.bar(x=x, height=values) - plt.tick_params(labelsize=15) - plt.xlim(-1, len(ph_required)) - for a, b in zip(x, values): - plt.text(a, b, b, ha='center', va='bottom', fontsize=15) - plt.grid() - plt.title('Phoneme Distribution Summary', fontsize=30) - plt.xlabel('Phoneme', fontsize=20) - plt.ylabel('Number of occurrences', fontsize=20) - filename = self.binary_data_dir / 'phoneme_distribution.jpg' - plt.savefig(fname=filename, - bbox_inches='tight', - pad_inches=0.25) - print(f'| save summary to \'{filename}\'') - # Check unrecognizable or missing phonemes - if ph_occurred != ph_required: - unrecognizable_phones = ph_occurred.difference(ph_required) - missing_phones = ph_required.difference(ph_occurred) - raise BinarizationError('transcriptions and dictionary mismatch.\n' - f' (+) {sorted(unrecognizable_phones)}\n' - f' (-) {sorted(missing_phones)}') - @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): if hparams['vocoder'] in VOCODERS: @@ -179,7 +123,7 @@ def process_item(self, item_name, meta_data, binarization_args): processed_input['energy'] = energy.cpu().numpy() if self.need_breathiness: - # get ground truth energy + # get ground truth breathiness breathiness = get_breathiness_pyworld(wav, gt_f0 * ~uv, length, hparams).astype(np.float32) global breathiness_smooth diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 3c9f3d0e4..61ecccf11 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -82,10 +82,6 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): self.items.update(meta_data_dict) - def check_coverage(self): - print('Coverage checks are temporarily skipped.') - pass - @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): seconds = sum(meta_data['ph_dur']) diff --git a/utils/binarizer_utils.py b/utils/binarizer_utils.py index 84187d076..786bbf330 100644 --- a/utils/binarizer_utils.py +++ b/utils/binarizer_utils.py @@ -1,9 +1,9 @@ import warnings import librosa -import torch import numpy as np import pyworld as pw +import torch warnings.filterwarnings("ignore") diff --git a/utils/plot.py b/utils/plot.py index 2f7e7f836..9a1971e76 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -15,20 +15,6 @@ def spec_to_figure(spec, vmin=None, vmax=None): return fig -def spec_f0_to_figure(spec, f0s, figsize=None): - max_y = spec.shape[1] - if isinstance(spec, torch.Tensor): - spec = spec.detach().cpu().numpy() - f0s = {k: f0.detach().cpu().numpy() for k, f0 in f0s.items()} - f0s = {k: f0 / 10 for k, f0 in f0s.items()} - fig = plt.figure(figsize=(12, 6) if figsize is None else figsize) - plt.pcolor(spec.T) - for i, (k, f0) in enumerate(f0s.items()): - plt.plot(f0.clip(0, max_y), label=k, c=LINE_COLORS[i], linewidth=1, alpha=0.8) - plt.legend() - return fig - - def dur_to_figure(dur_gt, dur_pred, txt): if isinstance(dur_gt, torch.Tensor): dur_gt = dur_gt.cpu().numpy() @@ -75,3 +61,17 @@ def curve_to_figure(curve_gt, curve_pred=None, curve_base=None, grid=None): plt.legend() plt.tight_layout() return fig + + +def distribution_to_figure(title, x_label, y_label, items: list, values: list, zoom=0.8): + fig = plt.figure(figsize=(int(len(items) * zoom), 10)) + plt.bar(x=items, height=values) + plt.tick_params(labelsize=15) + plt.xlim(-1, len(items)) + for a, b in zip(items, values): + plt.text(a, b, b, ha='center', va='bottom', fontsize=15) + plt.grid() + plt.title(title, fontsize=30) + plt.xlabel(x_label, fontsize=20) + plt.ylabel(y_label, fontsize=20) + return fig From a7210f98abcbe45899eefb1a31e7e39a198473d1 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Jul 2023 22:49:49 +0800 Subject: [PATCH 464/475] Remove dataset making pipeline from this repository --- preparation/.gitignore | 4 - preparation/acoustic_preparation.ipynb | 1652 ------------------------ preparation/assets/2001000001.lab | 1 - preparation/assets/2001000001.wav | Bin 359718 -> 0 bytes preparation/requirements.txt | 10 - preparation/utils/distribution.py | 14 - preparation/utils/slicer2.py | 103 -- 7 files changed, 1784 deletions(-) delete mode 100644 preparation/.gitignore delete mode 100644 preparation/acoustic_preparation.ipynb delete mode 100644 preparation/assets/2001000001.lab delete mode 100644 preparation/assets/2001000001.wav delete mode 100644 preparation/requirements.txt delete mode 100644 preparation/utils/distribution.py delete mode 100644 preparation/utils/slicer2.py diff --git a/preparation/.gitignore b/preparation/.gitignore deleted file mode 100644 index 165dd93fb..000000000 --- a/preparation/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -assets/mfa-*/ -assets/*.zip -segments/ -textgrids/ diff --git a/preparation/acoustic_preparation.ipynb b/preparation/acoustic_preparation.ipynb deleted file mode 100644 index 986a667c9..000000000 --- a/preparation/acoustic_preparation.ipynb +++ /dev/null @@ -1,1652 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "# Prepare your own dataset for DiffSinger (MIDI-less version)\n", - "\n", - "## 1 Overview\n", - "\n", - "This Jupyter Notebook will guide you to prepare your own dataset for DiffSinger with 44.1 kHz sampling rate.\n", - "Please read and follow the guidance carefully, take actions when there are notice for manual action and pay attention to blocks marked with optional step.\n", - "\n", - "### 1.1 Introduction to this pipeline and MIDI-less version\n", - "\n", - "This pipeline does not support customized phoneme dictionaries. It uses the [opencpop strict pinyin dictionary](../dictionaries/opencpop-extension.txt) by default.\n", - "\n", - "MIDI-less version is a simplified version of DiffSinger where MIDI layers, word layers and slur layers are removed from the data labels. The model uses raw phoneme sequence with durations as input, and applies pitch embedding directly from the ground truth. Predictors for phoneme durations and pitch curve are also removed. Below are some limitations and advantages of the MIDI-less version:\n", - "\n", - "- The model will not predict phoneme durations and f0 sequence by itself. You must specify `ph_dur` and `f0_seq` at inference time.\n", - "- Performance of pitch control will be better than MIDI-A version, because MIDI keys are misleading information for the diffusion decoder when f0 sequence is already embedded.\n", - "- MIDIs and slurs does not need to be labeled, thus the labeling work is easier than other versions.\n", - "- More varieties of data can be used as training materials, even including speech.\n", - "\n", - "### 1.2 Install dependencies\n", - "\n", - "Please run the following cell the first time you start this notebook.\n", - "\n", - "**Note**: You should ensure you are in a Conda environment with Python 3.8 or 3.9 before you install dependencies of this pipeline.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "%conda install -c conda-forge montreal-forced-aligner==2.0.6 --yes\n", - "%pip install -r requirements.txt\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.3 Initializing environment\n", - "\n", - "Please run the following cell every time you start this notebook.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "import csv\n", - "import glob\n", - "import os\n", - "import shutil\n", - "import sys\n", - "\n", - "import librosa\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import parselmouth as pm\n", - "import soundfile\n", - "import textgrid as tg\n", - "import tqdm\n", - "\n", - "\n", - "def length(src: str):\n", - " if os.path.isfile(src) and src.endswith('.wav'):\n", - " return librosa.get_duration(filename=src) / 3600.\n", - " elif os.path.isdir(src):\n", - " total = 0\n", - " for ch in [os.path.join(src, c) for c in os.listdir(src)]:\n", - " total += length(ch)\n", - " return total\n", - " return 0\n", - "\n", - "\n", - "print('Environment initialized successfully.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2 Raw recordings and audio slicing\n", - "\n", - "### 2.1 Choose raw recordings\n", - "\n", - "Your recordings must meet the following conditions:\n", - "\n", - "1. They must be in one single folder. Files in sub-folders will be ignored.\n", - "2. They must be in WAV format.\n", - "3. They must have a sampling rate higher than 32 kHz.\n", - "4. They should be clean, unaccompanied voices with no significant noise or reverb.\n", - "5. They should contain only voices from one single human.\n", - "\n", - "NOTICE: Before you train a model, you must obtain permission from the copyright holder of the dataset and make sure the provider is fully aware that you will train a model from their data, that you will or will not distribute the synthesized voices and model weights, and the potential risks of this kind of activity.\n", - "\n", - "> **Tips for building multiple datasets to train combined models**\n", - ">\n", - "> If you have multiple speakers/singers, or you have multiple styles/timbres from one person, please make one dataset for each speaker/singer/style/timbre separately. Then you are able to configure parameters and settings for preprocessing and training combined models from these datasets following instructions in **Section 5** of this pipeline.\n", - "\n", - "Optional step: The raw data must be sliced into parts of about 5-15 seconds. If you want to do this yourself, please skip to section 2.3. Otherwise, please edit paths in the following cell before you run it.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Configuration for data paths\n", - "raw_path = r'path/to/your/raw/recordings' # Path to your raw, unsliced recordings\n", - "\n", - "########################################\n", - "\n", - "assert os.path.exists(raw_path) and os.path.isdir(raw_path), 'The chosen path does not exist or is not a directory.'\n", - "print('Raw recording path:', raw_path)\n", - "print()\n", - "print('===== Recording List =====')\n", - "raw_filelist = glob.glob(f'{raw_path}/*.wav', recursive=True)\n", - "raw_length = length(raw_path)\n", - "if len(raw_filelist) > 5:\n", - " print('\\n'.join(raw_filelist[:5] + [f'... ({len(raw_filelist) - 5} more)']))\n", - "else:\n", - " print('\\n'.join(raw_filelist))\n", - "print()\n", - "print(f'Found {len(raw_filelist)} valid recordings with total length of {round(raw_length, 2)} hours.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2.2 Audio slicing\n", - "\n", - "We provide an audio slicer which automatically cuts recordings into short pieces.\n", - "\n", - "The audio slicer is based on silence detection and has several arguments that have to be specified. You should modify these arguments according to your data.\n", - "\n", - "For more details of each argument, see its [GitHub repository](https://github.com/openvpi/audio-slicer).\n", - "\n", - "Please edit paths and arguments in the following cell before you run it.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Configuration for data paths\n", - "sliced_path = r'path/to/your/sliced/recordings' # Path to hold the sliced segments of your recordings\n", - "\n", - "# Slicer arguments\n", - "db_threshold_ = -40.\n", - "min_length_ = 5000\n", - "min_interval_ = 300\n", - "hop_size_ = 10\n", - "max_sil_kept_ = 500\n", - "\n", - "########################################\n", - "\n", - "assert 'raw_path' in locals().keys(), 'Raw path of your recordings has not been specified.'\n", - "assert not os.path.exists(sliced_path) or os.path.isdir(sliced_path), 'The chosen path is not a directory.'\n", - "os.makedirs(sliced_path, exist_ok=True)\n", - "print('Sliced recording path:', sliced_path)\n", - "\n", - "from utils.slicer2 import Slicer\n", - "\n", - "for file in tqdm.tqdm(raw_filelist):\n", - " y, sr = librosa.load(file, sr=None, mono=True)\n", - " slicer = Slicer(\n", - " sr=sr,\n", - " threshold=db_threshold_,\n", - " min_length=min_length_,\n", - " min_interval=min_interval_,\n", - " hop_size=hop_size_,\n", - " max_sil_kept=max_sil_kept_\n", - " )\n", - " chunks = slicer.slice(y)\n", - " for i, chunk in enumerate(chunks):\n", - " soundfile.write(os.path.join(sliced_path, f'%s_slice_%04d.wav' % (os.path.basename(file).rsplit('.', maxsplit=1)[0], i)), chunk, sr)\n", - "\n", - "print()\n", - "print('===== Segment List =====')\n", - "sliced_filelist = glob.glob(f'{sliced_path}/*.wav', recursive=True)\n", - "sliced_length = length(sliced_path)\n", - "if len(sliced_filelist) > 5:\n", - " print('\\n'.join(sliced_filelist[:5] + [f'... ({len(sliced_filelist) - 5} more)']))\n", - "else:\n", - " print('\\n'.join(sliced_filelist))\n", - "print()\n", - "print(f'Sliced your recordings into {len(sliced_filelist)} segments with total length of {round(sliced_length, 2)} hours.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2.3 Validating recording segments\n", - "\n", - "In this section, we validate your recording segments.\n", - "\n", - "Optional step: If you skipped section 2.2, please specify the path to your sliced recordings in the following cell and run it. Otherwise, skip this cell.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Configuration for data paths\n", - "sliced_path = r'path/to/your/sliced/recordings' # Path to your sliced segments of recordings\n", - "\n", - "########################################\n", - "\n", - "assert os.path.exists(sliced_path) and os.path.isdir(sliced_path), 'The chosen path does not exist or is not a directory.'\n", - "\n", - "print('Sliced recording path:', sliced_path)\n", - "print()\n", - "print('===== Segment List =====')\n", - "sliced_filelist = glob.glob(f'{sliced_path}/*.wav', recursive=True)\n", - "sliced_length = length(sliced_path)\n", - "if len(sliced_filelist) > 5:\n", - " print('\\n'.join(sliced_filelist[:5] + [f'... ({len(sliced_filelist) - 5} more)']))\n", - "else:\n", - " print('\\n'.join(sliced_filelist))\n", - "print()\n", - "print(f'Found {len(sliced_filelist)} valid segments with total length of {round(sliced_length, 2)} hours.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Run the following cell to check if there are segments with an unexpected length (less than 2 seconds or more than 20 seconds).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "reported = False\n", - "for file in tqdm.tqdm(sliced_filelist):\n", - " wave_seconds = librosa.get_duration(filename=file)\n", - " if wave_seconds < 2.:\n", - " reported = True\n", - " print(f'Too short! \\'{file}\\' has a length of {round(wave_seconds, 1)} seconds!')\n", - " if wave_seconds > 20.:\n", - " reported = True\n", - " print(f'Too long! \\'{file}\\' has a length of {round(wave_seconds, 1)} seconds!')\n", - "if not reported:\n", - " print('Congratulations! All segments have proper length.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Manual action: please consider removing segments too short and manually slicing segments to long, as reported above.\n", - "\n", - "Move on when this is done or there are no segments reported.\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 3 Label your segments\n", - "\n", - "### 3.1 Label syllable sequence\n", - "\n", - "All segments should have their transcriptions (or lyrics) annotated. Run the following cell to see the example segment (from Opencpop dataset) and its corresponding annotation.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "from IPython.display import Audio\n", - "\n", - "# noinspection PyTypeChecker\n", - "display(Audio(filename='assets/2001000001.wav'))\n", - "with open('assets/2001000001.lab', 'r') as f:\n", - " print(f.read())\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Manual action: now your task is to annotation transcriptions for each segment like the example shown above.\n", - "\n", - "Each segment should have one annotation file with the same filename as it and `.lab` extension, and placed in the same directory. In the annotation file, you should write all syllables sung or spoken in this segment. Syllables should be split by space, and only syllables that appears in the dictionary are allowed. In addition, all phonemes in the dictionary should be covered in the annotations.\n", - "\n", - "**Special notes**: `AP` and `SP` should not appear in the annotation.\n", - "\n", - "**News**: We developed [MinLabel](https://github.com/SineStriker/qsynthesis-revenge/tree/main/src/Test/MinLabel), a simple yet efficient tool to help finishing this step. You can download the binary executable for Windows [here](https://huggingface.co/datasets/fox7005/tool/resolve/main/MinLabel%200.0.1.6.zip).\n", - "\n", - "Optional step: if you want us to help you create all empty `lab` files (instead of creating them yourself), please run the following cell.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "for file in tqdm.tqdm(sliced_filelist):\n", - " filename = os.path.basename(file)\n", - " name_without_ext = filename.rsplit('.', maxsplit=1)[0]\n", - " annotation = os.path.join(sliced_path, f'{name_without_ext}.lab')\n", - " if not os.path.exists(annotation):\n", - " with open(annotation, 'a'):\n", - " ...\n", - "print('Creating missing lab files done.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Run the following cell to see if all segments are annotated and all annotations are valid. If there are failed checks, please fix them and run again.\n", - "\n", - "A summary of your phoneme coverage will be generated. If there are some phonemes that have extremely few occurrences (for example, less than 20), it is highly recommended to add more recordings to cover these phonemes.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "import utils.distribution as dist\n", - "\n", - "# Load dictionary\n", - "dict_path = '../dictionaries/opencpop-extension.txt'\n", - "with open(dict_path, 'r', encoding='utf8') as f:\n", - " rules = [ln.strip().split('\\t') for ln in f.readlines()]\n", - "dictionary = {}\n", - "phoneme_set = set()\n", - "for r in rules:\n", - " phonemes = r[1].split()\n", - " dictionary[r[0]] = phonemes\n", - " phoneme_set.update(phonemes)\n", - "\n", - "# Run checks\n", - "check_failed = False\n", - "covered = set()\n", - "phoneme_map = {}\n", - "for ph in sorted(phoneme_set):\n", - " phoneme_map[ph] = 0\n", - "\n", - "segment_pairs = []\n", - "\n", - "for file in tqdm.tqdm(sliced_filelist):\n", - " filename = os.path.basename(file)\n", - " name_without_ext = filename.rsplit('.', maxsplit=1)[0]\n", - " annotation = os.path.join(sliced_path, f'{name_without_ext}.lab')\n", - " if not os.path.exists(annotation):\n", - " print(f'No annotation found for \\'{filename}\\'!')\n", - " check_failed = True\n", - " continue\n", - " with open(annotation, 'r', encoding='utf8') as f:\n", - " syllables = f.read().strip().split()\n", - " if not syllables:\n", - " print(f'Annotation file \\'{annotation}\\' is empty!')\n", - " check_failed = True\n", - " else:\n", - " oov = []\n", - " for s in syllables:\n", - " if s not in dictionary:\n", - " oov.append(s)\n", - " else:\n", - " for ph in dictionary[s]:\n", - " phoneme_map[ph] += 1\n", - " covered.update(dictionary[s])\n", - " if oov:\n", - " print(f'Syllable(s) {oov} not allowed in annotation file \\'{annotation}\\'')\n", - " check_failed = True\n", - "\n", - "# Phoneme coverage\n", - "uncovered = phoneme_set - covered\n", - "if uncovered:\n", - " print(f'The following phonemes are not covered!')\n", - " print(sorted(uncovered))\n", - " print('Please add more recordings to cover these phonemes.')\n", - " check_failed = True\n", - "\n", - "if not check_failed:\n", - " print('Congratulations! All annotations are well prepared.')\n", - " print('Here is a summary of your phoneme coverage.')\n", - "\n", - "phoneme_list = sorted(phoneme_set)\n", - "phoneme_counts = [phoneme_map[ph] for ph in phoneme_list]\n", - "dist.draw_distribution(\n", - " title='Phoneme Distribution Summary',\n", - " x_label='Phoneme',\n", - " y_label='Number of occurrences',\n", - " items=phoneme_list,\n", - " values=phoneme_counts\n", - ")\n", - "phoneme_summary = os.path.join(sliced_path, 'phoneme_distribution.jpg')\n", - "plt.savefig(fname=phoneme_summary,\n", - " bbox_inches='tight',\n", - " pad_inches=0.25)\n", - "plt.show()\n", - "print(f'Summary saved to \\'{phoneme_summary}\\'.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 3.2 Forced alignment\n", - "\n", - "Given the transcriptions of each segment, we are able to align the phoneme sequence to its corresponding audio, thus obtaining position and duration information of each phoneme.\n", - "\n", - "We use [Montreal Forced Aligner](https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner) to do forced phoneme alignment.\n", - "\n", - "To run MFA alignment, please first run the following cell to resample all recordings to 16 kHz. The resampled recordings and copies of the phoneme labels will be saved at `preparation/segments/`. Also, the folder `preparation/textgrids/` will be created for temporarily storing aligned TextGrids.\n", - "\n", - "WARNING: This will overwrite all files in `preparation/segments/` and `preparation/textgrids/`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "segments_dir = 'segments'\n", - "textgrids_dir = 'textgrids'\n", - "if os.path.exists(segments_dir):\n", - " shutil.rmtree(segments_dir)\n", - "os.makedirs(segments_dir)\n", - "if os.path.exists(textgrids_dir):\n", - " shutil.rmtree(textgrids_dir)\n", - "os.makedirs(textgrids_dir)\n", - "samplerate = 16000\n", - "for file in tqdm.tqdm(sliced_filelist):\n", - " y, _ = librosa.load(file, sr=samplerate, mono=True)\n", - " filename = os.path.basename(file)\n", - " soundfile.write(os.path.join(segments_dir, filename), y, samplerate, subtype='PCM_16')\n", - " name_without_ext = filename.rsplit('.', maxsplit=1)[0]\n", - " annotation = os.path.join(sliced_path, f'{name_without_ext}.lab')\n", - " shutil.copy(annotation, segments_dir)\n", - "print('Resampling and copying done.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Run the following cell to download the MFA pretrained model and perform forced alignment. You may edit the --beam argument of MFA according to your dataset: Longer segments and worse quality requires larger beams.\n", - "\n", - "This cell also checks if alignments for all segments are sucessfully generated. If the checks fail, there are probably severe errors in your labels, or your segments are too long, or you did not use a proper --beam value.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "import requests\n", - "\n", - "mfa_zip = f'assets/mfa-opencpop-extension.zip'\n", - "mfa_uri = 'https://huggingface.co/datasets/fox7005/tool/resolve/main/mfa-opencpop-extension.zip'\n", - "if not os.path.exists(mfa_zip):\n", - " # Download\n", - " print('Model not found, downloading...')\n", - " with open(mfa_zip, 'wb') as f:\n", - " f.write(requests.get(mfa_uri).content)\n", - " print('Done.')\n", - "else:\n", - " pass\n", - "\n", - "segments_dir = 'segments'\n", - "textgrids_dir = 'textgrids'\n", - "os.makedirs(textgrids_dir, exist_ok=True)\n", - "\n", - "import montreal_forced_aligner\n", - "\n", - "sys.argv = [\n", - " 'mfa',\n", - " 'align',\n", - " segments_dir,\n", - " dict_path,\n", - " mfa_zip,\n", - " textgrids_dir,\n", - " '--beam',\n", - " '100', # Edit --beam here.\n", - " '--clean',\n", - " '--overwrite'\n", - "]\n", - "montreal_forced_aligner.command_line.mfa.main()\n", - "\n", - "print('Checking aligments...')\n", - "missing = []\n", - "for wavfile in tqdm.tqdm(sliced_filelist):\n", - " name = os.path.basename(wavfile).rsplit('.', maxsplit=1)[0]\n", - " tgfile = os.path.join(textgrids_dir, f'{name}.TextGrid')\n", - " if not os.path.exists(tgfile):\n", - " missing.append(tgfile)\n", - "if len(missing) > 0:\n", - " print('These TextGrids are missing! There are possible severe errors in labels of those corresponding segments. '\n", - " 'If you do believe there are no errors, consider increase the \\'--beam\\' argument for MFA.')\n", - " for fn in missing:\n", - " print(f' - {fn}')\n", - "else:\n", - " print('All alignments have been successfully generated. Please move on.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 3.3 Optimize and finish the TextGrids\n", - "\n", - "In this section, we run some scripts to reduce errors for long utterances and detect `AP`s which have not been labeled before. The optimized TextGrids can be saved for future use if you specify a backup directory in the following cell.\n", - "\n", - "Edit the path and adjust arguments according to your needs in the following cell before you run it. Optimized results will be saved at `preparation/textgrids/revised/`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Configuration for voice arguments based on your dataset\n", - "f0_min = 40. # Minimum value of pitch\n", - "f0_max = 1100. # Maximum value of pitch\n", - "br_len = 0.1 # Minimum length of aspiration in seconds\n", - "br_db = -60. # Threshold of RMS in dB for detecting aspiration\n", - "br_centroid = 2000. # Threshold of spectral centroid in Hz for detecting aspiration\n", - "\n", - "# Other arguments, do not edit unless you understand them\n", - "time_step = 0.005 # Time step for feature extraction\n", - "min_space = 0.04 # Minimum length of space in seconds\n", - "voicing_thresh_vowel = 0.45 # Threshold of voicing for fixing long utterances\n", - "voicing_thresh_breath = 0.6 # Threshold of voicing for detecting aspiration\n", - "br_win_sz = 0.05 # Size of sliding window in seconds for detecting aspiration\n", - "\n", - "########################################\n", - "\n", - "# import utils.tg_optimizer as optimizer\n", - "\n", - "textgrids_revised_dir = 'textgrids/revised'\n", - "os.makedirs(textgrids_revised_dir, exist_ok=True)\n", - "for wavfile in tqdm.tqdm(sliced_filelist):\n", - " name = os.path.basename(wavfile).rsplit('.', maxsplit=1)[0]\n", - " textgrid = tg.TextGrid()\n", - " textgrid.read(os.path.join(textgrids_dir, f'{name}.TextGrid'))\n", - " words = textgrid[0]\n", - " phones = textgrid[1]\n", - " sound = pm.Sound(wavfile)\n", - " f0_voicing_breath = sound.to_pitch_ac(\n", - " time_step=time_step,\n", - " voicing_threshold=voicing_thresh_breath,\n", - " pitch_floor=f0_min,\n", - " pitch_ceiling=f0_max,\n", - " ).selected_array['frequency']\n", - " f0_voicing_vowel = sound.to_pitch_ac(\n", - " time_step=time_step,\n", - " voicing_threshold=voicing_thresh_vowel,\n", - " pitch_floor=f0_min,\n", - " pitch_ceiling=f0_max,\n", - " ).selected_array['frequency']\n", - " y, sr = librosa.load(wavfile, sr=24000, mono=True)\n", - " hop_size = int(time_step * sr)\n", - " spectral_centroid = librosa.feature.spectral_centroid(y=y, sr=sr, n_fft=2048, hop_length=hop_size).squeeze(0)\n", - "\n", - " # Fix long utterances\n", - " i = j = 0\n", - " while i < len(words):\n", - " word = words[i]\n", - " phone = phones[j]\n", - " if word.mark is not None and word.mark != '':\n", - " i += 1\n", - " j += len(dictionary[word.mark])\n", - " continue\n", - " if i == 0:\n", - " i += 1\n", - " j += 1\n", - " continue\n", - " prev_word = words[i - 1]\n", - " prev_phone = phones[j - 1]\n", - " # Extend length of long utterances\n", - " while word.minTime < word.maxTime - time_step:\n", - " pos = min(f0_voicing_vowel.shape[0] - 1, int(word.minTime / time_step))\n", - " if f0_voicing_vowel[pos] < f0_min:\n", - " break\n", - " prev_word.maxTime += time_step\n", - " prev_phone.maxTime += time_step\n", - " word.minTime += time_step\n", - " phone.minTime += time_step\n", - " i += 1\n", - " j += 1\n", - "\n", - " # Detect aspiration\n", - " i = j = 0\n", - " while i < len(words):\n", - " word = words[i]\n", - " phone = phones[j]\n", - " if word.mark is not None and word.mark != '':\n", - " i += 1\n", - " j += len(dictionary[word.mark])\n", - " continue\n", - " if word.maxTime - word.minTime < br_len:\n", - " i += 1\n", - " j += 1\n", - " continue\n", - " ap_ranges = []\n", - " br_start = None\n", - " win_pos = word.minTime\n", - " while win_pos + br_win_sz <= word.maxTime:\n", - " all_noisy = (f0_voicing_breath[int(win_pos / time_step) : int((win_pos + br_win_sz) / time_step)] < f0_min).all()\n", - " rms_db = 20 * np.log10(np.clip(sound.get_rms(from_time=win_pos, to_time=win_pos + br_win_sz), a_min=1e-12, a_max=1))\n", - " # print(win_pos, win_pos + br_win_sz, all_noisy, rms_db)\n", - " if all_noisy and rms_db >= br_db:\n", - " if br_start is None:\n", - " br_start = win_pos\n", - " else:\n", - " if br_start is not None:\n", - " br_end = win_pos + br_win_sz - time_step\n", - " if br_end - br_start >= br_len:\n", - " centroid = spectral_centroid[int(br_start / time_step) : int(br_end / time_step)].mean()\n", - " if centroid >= br_centroid:\n", - " ap_ranges.append((br_start, br_end))\n", - " br_start = None\n", - " win_pos = br_end\n", - " win_pos += time_step\n", - " if br_start is not None:\n", - " br_end = win_pos + br_win_sz - time_step\n", - " if br_end - br_start >= br_len:\n", - " centroid = spectral_centroid[int(br_start / time_step) : int(br_end / time_step)].mean()\n", - " if centroid >= br_centroid:\n", - " ap_ranges.append((br_start, br_end))\n", - " # print(ap_ranges)\n", - " if len(ap_ranges) == 0:\n", - " i += 1\n", - " j += 1\n", - " continue\n", - " words.removeInterval(word)\n", - " phones.removeInterval(phone)\n", - " if word.minTime < ap_ranges[0][0]:\n", - " words.add(minTime=word.minTime, maxTime=ap_ranges[0][0], mark=None)\n", - " phones.add(minTime=phone.minTime, maxTime=ap_ranges[0][0], mark=None)\n", - " i += 1\n", - " j += 1\n", - " for k, ap in enumerate(ap_ranges):\n", - " if k > 0:\n", - " words.add(minTime=ap_ranges[k - 1][1], maxTime=ap[0], mark=None)\n", - " phones.add(minTime=ap_ranges[k - 1][1], maxTime=ap[0], mark=None)\n", - " i += 1\n", - " j += 1\n", - " words.add(minTime=ap[0], maxTime=min(word.maxTime, ap[1]), mark='AP')\n", - " phones.add(minTime=ap[0], maxTime=min(word.maxTime, ap[1]), mark='AP')\n", - " i += 1\n", - " j += 1\n", - " if ap_ranges[-1][1] < word.maxTime:\n", - " words.add(minTime=ap_ranges[-1][1], maxTime=word.maxTime, mark=None)\n", - " phones.add(minTime=ap_ranges[-1][1], maxTime=phone.maxTime, mark=None)\n", - " i += 1\n", - " j += 1\n", - "\n", - " # Remove short spaces\n", - " i = j = 0\n", - " while i < len(words):\n", - " word = words[i]\n", - " phone = phones[j]\n", - " if word.mark is not None and word.mark != '':\n", - " i += 1\n", - " j += (1 if word.mark == 'AP' else len(dictionary[word.mark]))\n", - " continue\n", - " if word.maxTime - word.minTime >= min_space:\n", - " word.mark = 'SP'\n", - " phone.mark = 'SP'\n", - " i += 1\n", - " j += 1\n", - " continue\n", - " if i == 0:\n", - " if len(words) >= 2:\n", - " words[i + 1].minTime = word.minTime\n", - " phones[j + 1].minTime = phone.minTime\n", - " words.removeInterval(word)\n", - " phones.removeInterval(phone)\n", - " else:\n", - " break\n", - " elif i == len(words) - 1:\n", - " if len(words) >= 2:\n", - " words[i - 1].maxTime = word.maxTime\n", - " phones[j - 1].maxTime = phone.maxTime\n", - " words.removeInterval(word)\n", - " phones.removeInterval(phone)\n", - " else:\n", - " break\n", - " else:\n", - " words[i - 1].maxTime = words[i + 1].minTime = (word.minTime + word.maxTime) / 2\n", - " phones[j - 1].maxTime = phones[j + 1].minTime = (phone.minTime + phone.maxTime) / 2\n", - " words.removeInterval(word)\n", - " phones.removeInterval(phone)\n", - " textgrid.write(os.path.join(textgrids_revised_dir, f'{name}.TextGrid'))\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`TextGrid` saved in `preparation/textgrids/revised` can be edited via [Praat](https://github.com/praat/praat). You may examine these files and fix label errors by yourself if you want a more accurate model with higher performance. However, this is not required since manual labeling takes much time.\n", - "\n", - "Run the following cell to see summary of word-level pitch coverage of your dataset. (Data may not be accurate due to octave errors in pitch extraction.)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "import utils.distribution as dist\n", - "\n", - "\n", - "def key_to_name(midi_key):\n", - " note_names = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']\n", - " return note_names[midi_key % 12] + str(midi_key // 12 - 1)\n", - "\n", - "\n", - "pit_map = {}\n", - "if not f0_min in locals():\n", - " f0_min = 40.\n", - "if not f0_max in locals():\n", - " f0_max = 1100.\n", - "if not voicing_thresh_vowel in locals():\n", - " voicing_thresh_vowel = 0.45\n", - "for wavfile in tqdm.tqdm(sliced_filelist):\n", - " name = os.path.basename(wavfile).rsplit('.', maxsplit=1)[0]\n", - " textgrid = tg.TextGrid()\n", - " textgrid.read(os.path.join(textgrids_revised_dir, f'{name}.TextGrid'))\n", - " timestep = 0.01\n", - " f0 = pm.Sound(wavfile).to_pitch_ac(\n", - " time_step=timestep,\n", - " voicing_threshold=voicing_thresh_vowel,\n", - " pitch_floor=f0_min,\n", - " pitch_ceiling=f0_max,\n", - " ).selected_array['frequency']\n", - " pitch = 12. * np.log2(f0 / 440.) + 69.\n", - " for word in textgrid[0]:\n", - " if word.mark in ['AP', 'SP']:\n", - " continue\n", - " if word.maxTime - word.minTime < timestep:\n", - " continue\n", - " word_pit = pitch[int(word.minTime / timestep) : int(word.maxTime / timestep)]\n", - " word_pit = np.extract(word_pit >= 0, word_pit)\n", - " if word_pit.shape[0] == 0:\n", - " continue\n", - " counts = np.bincount(word_pit.astype(np.int64))\n", - " midi = counts.argmax()\n", - " if midi in pit_map:\n", - " pit_map[midi] += 1\n", - " else:\n", - " pit_map[midi] = 1\n", - "midi_keys = sorted(pit_map.keys())\n", - "midi_keys = list(range(midi_keys[0], midi_keys[-1] + 1))\n", - "dist.draw_distribution(\n", - " title='Pitch Distribution Summary',\n", - " x_label='Pitch',\n", - " y_label='Number of occurrences',\n", - " items=[key_to_name(k) for k in midi_keys],\n", - " values=[pit_map.get(k, 0) for k in midi_keys]\n", - ")\n", - "pitch_summary = os.path.join(sliced_path, 'pitch_distribution.jpg')\n", - "plt.savefig(fname=pitch_summary,\n", - " bbox_inches='tight',\n", - " pad_inches=0.25)\n", - "plt.show()\n", - "print(f'Summary saved to \\'{pitch_summary}\\'.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 4 Building the final dataset\n", - "\n", - "Congratulations! If you have gone through all sections above with success, it means that you are now prepared for building your final dataset. There are only a few steps to go before you can run scripts to train your own model.\n", - "\n", - "### 4.1 Name and format your dataset\n", - "\n", - "Please provide a unique name for your dataset, usually the name of the singer/speaker (whether real or virtual). For example, `opencpop` will be a good name for the dataset. You can also add tags to represent dataset version, model capacity or improvements. For example, `v2` represents the version, `large` represents the capacity, and `fix_br` means you fixed breaths since your trained last model.\n", - "\n", - "Please edit the following cell before you run it. Remember only using letters, numbers and underlines (`_`).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Name and tags of your dataset\n", - "dataset_name = '???' # Required\n", - "dataset_tags = '' # Optional\n", - "\n", - "########################################\n", - "\n", - "import csv\n", - "import random\n", - "import re\n", - "\n", - "from textgrid import TextGrid\n", - "\n", - "assert dataset_name != '', 'Dataset name cannot be empty.'\n", - "assert re.search(r'[^0-9A-Za-z_]', dataset_name) is None, 'Dataset name contains invalid characters.'\n", - "full_name = dataset_name\n", - "if dataset_tags != '':\n", - " assert re.search(r'[^0-9A-Za-z_]', dataset_name) is None, 'Dataset tags contain invalid characters.'\n", - " full_name += f'_{dataset_tags}'\n", - "assert not os.path.exists(f'../data/{full_name}'), f'The name \\'{full_name}\\' already exists in your \\'data\\' folder!'\n", - "\n", - "print('Dataset name:', dataset_name)\n", - "if dataset_tags != '':\n", - " print('Tags:', dataset_tags)\n", - "\n", - "formatted_path = f'../data/{full_name}/raw/wavs'\n", - "os.makedirs(formatted_path)\n", - "transcriptions = []\n", - "samplerate = 44100\n", - "min_sil = int(0.1 * samplerate)\n", - "max_sil = int(0.5 * samplerate)\n", - "for wavfile in tqdm.tqdm(sliced_filelist):\n", - " name = os.path.basename(wavfile).rsplit('.', maxsplit=1)[0]\n", - " y, _ = librosa.load(wavfile, sr=samplerate, mono=True)\n", - " tg = TextGrid()\n", - " tg.read(os.path.join(textgrids_revised_dir, f'{name}.TextGrid'))\n", - " ph_seq = [ph.mark for ph in tg[1]]\n", - " ph_dur = [ph.maxTime - ph.minTime for ph in tg[1]]\n", - " if random.random() < 0.5:\n", - " len_sil = random.randrange(min_sil, max_sil)\n", - " y = np.concatenate((np.zeros((len_sil,), dtype=np.float32), y))\n", - " if ph_seq[0] == 'SP':\n", - " ph_dur[0] += len_sil / samplerate\n", - " else:\n", - " ph_seq.insert(0, 'SP')\n", - " ph_dur.insert(0, len_sil / samplerate)\n", - " if random.random() < 0.5:\n", - " len_sil = random.randrange(min_sil, max_sil)\n", - " y = np.concatenate((y, np.zeros((len_sil,), dtype=np.float32)))\n", - " if ph_seq[-1] == 'SP':\n", - " ph_dur[-1] += len_sil / samplerate\n", - " else:\n", - " ph_seq.append('SP')\n", - " ph_dur.append(len_sil / samplerate)\n", - " ph_seq = ' '.join(ph_seq)\n", - " ph_dur = ' '.join([str(round(d, 6)) for d in ph_dur])\n", - " soundfile.write(os.path.join(formatted_path, f'{name}.wav'), y, samplerate)\n", - " transcriptions.append({'name': name, 'ph_seq': ph_seq, 'ph_dur': ph_dur})\n", - "\n", - "with open(f'../data/{full_name}/raw/transcriptions.csv', 'w', encoding='utf8', newline='') as f:\n", - " writer = csv.DictWriter(f, fieldnames=['name', 'ph_seq', 'ph_dur'])\n", - " writer.writeheader()\n", - " writer.writerows(transcriptions)\n", - "\n", - "print(f'All wavs and transcriptions saved at \\'data/{full_name}/raw/\\'.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now that the dataset and transcriptions have been saved, you can run the following cell to clean up all temporary files generated by pipelines above.\n", - "\n", - "WARNING: This will remove `preparation/segments/` and `preparation/textgrids/` folders. You should specify a directory in the following cell to back up your TextGrids if you want them for future use.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Optional path to back up your TextGrids\n", - "textgrids_backup_path = r'' # If left empty, the TextGrids will not be backed up\n", - "\n", - "########################################\n", - "\n", - "assert textgrids_backup_path == '' or not os.path.exists(textgrids_backup_path) or os.path.isdir(textgrids_backup_path), 'The backup path is not a directory.'\n", - "\n", - "if textgrids_backup_path != '':\n", - " os.makedirs(textgrids_backup_path, exist_ok=True)\n", - " for tg in tqdm.tqdm(glob.glob(f'{textgrids_revised_dir}/*.TextGrid')):\n", - " filename = os.path.basename(tg)\n", - " shutil.copy(tg, os.path.join(textgrids_backup_path, filename))\n", - "\n", - "shutil.rmtree(segments_dir)\n", - "shutil.rmtree(textgrids_dir)\n", - "print('Cleaning up done.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### 4.2 Configuring parameters\n", - "\n", - "Here you can configure some parameters for preprocessing, training and the neural networks. Read the explanations below, edit parameters according to your preference, and run the following cell to generate the configuration file.\n", - "\n", - "> You do not need to actually run this cell if you are building multiple datasets to train combined models. In this case, please see instructions in **Section 5**.\n", - "\n", - "#### 4.2.1 The neural networks\n", - "\n", - "These parameters control the capacity and structure of the neural networks.\n", - "\n", - "##### `residual_channels` and `residual_layers`\n", - "\n", - "These two hyperparameters refer to the width and the depth of the diffusion decoder network. Generally speaking, `384x20` represents a `base` model capacity and `512x20` represents a `large` model capacity. `384x30` is also a reasonable choice. Larger models consumes more GPU memory and runs slower at training and inference time, but they produce better results.\n", - "\n", - "GPU memory required for training:\n", - "\n", - "Base model - at least 6 GB (12 GB recommended)\n", - "Large model - at least 12 GB (24 GB recommended)\n", - "\n", - "##### `f0_embed_type`\n", - "\n", - "There are two ways of f0 embedding: `discrete` and `continuous`. The discrete embedding takes 256 bins from 50Hz to 1100Hz and embeds the discrete f0 bins with `torch.nn.Embedding`, while the continuous embedding converts f0 to continuous mel frequency and embeds these values with `torch.nn.Linear`.\n", - "\n", - "The discrete embedding has been tested for long and is guaranteed to be stable. The continuous embedding has not been widely tested, but some improvements at the extreme edges of the pitch range were observed.\n", - "\n", - "#### 4.2.2 Data augmentation\n", - "\n", - "Data augmentation improves the performance or functionalities of your model, **but may boost the size of your training data**.\n", - "\n", - "##### `random_pitch_shifting`\n", - "\n", - "Once enabled, the pitch of your data will be randomly shifted without keeping the formant when preprocessing. Besides, the number of keys that each piece of data is shifted will be embedded into the networks. This will broaden the range of pitch and allows you to control the frame-level key shift values (like the GEN parameter in VOCALOID) at inference time.\n", - "\n", - "This type of augmentation accepts the following arguments:\n", - "- `range` controls the range of pitch keys that will be randomly shifted.\n", - "- `scale` controls the amount of data that the augmentation will be applied to.\n", - "\n", - "$ D_{augmentation} \\approx (1 + scale) \\cdot D_{original} $\n", - "\n", - "##### `fixed_pitch_shifting`\n", - "\n", - "Once enabled, the pitch of your data will be shifted several numbers of keys. These data with pitch shifting will be marked as different speakers from the original data, **thus making the model a multi-speaker combined model**. This will also broaden the range of pitch (maybe slightly better than random pitch shifting). **This augmentation is not compatible with random pitch shifting.**\n", - "\n", - "This type of augmentation accepts the following arguments:\n", - "- `targets` controls the number of targets of pitch shifting and the number of keys that will be shifted towards each target.\n", - "- `scale` controls the amount of data that **each target** of the augmentation will be applied to.\n", - "\n", - "$ D_{augmentation} \\approx (1 + N \\cdot scale) \\cdot D_{original} $ , where $ N $ represents the number of targets.\n", - "\n", - "##### `random_time_stretching`\n", - "\n", - "Once enabled, the speed of your data will be randomly changed when preprocessing. The ratio of the speed change will be embedded into the networks, which allows you to control the frame-level speed or velocity (similar to but much more flexible than the VEL parameter in VOCALOID) at inference time. In other words, by applying global time stretching at training time, you gain the ability to apply local time stretching at inference time. This can be used to adjust the texture of consonants and the ratio of different parts of vowels. **Some audio segments will be longer after this augmentation is applied. Please be careful of your batch size and your GPU memory usage.**\n", - "\n", - "This type of augmentation accepts the following arguments:\n", - "- `range` controls the range of the speed changing ratio.\n", - "- `domain` determines in which domain the speed ratio will follow a uniform distribution in: `log` or `linear`\n", - "- `scale` controls the amount of data that the augmentation will be applied to.\n", - "\n", - "$ D_{augmentation} \\approx (1 + scale \\cdot \\frac{1}{b - a} \\cdot \\int_{a}^{b} f(x) dx) \\cdot D_{original} $ , where $ a, b $ represents the range of the speed ratio, $ f(x) $ represents the PDF of the speed ratio.\n", - "\n", - "---\n", - "> When there are more than two types of augmentation enabled, a cascade and joint augmentation scaling algorithm is applied. Briefly speaking, the following rules will be satisfied after applying and combining multiple types of augmentation:\n", - "> 1. The number of data pieces applied with the $ k $th augmentation will be $ scale_{k} $ times than those not applied with the $ k $th augmentation.\n", - "> 2. The number of data pieces applied with at least one type of augmentation will be $ \\sum_{i = 1}^{n} scale_{i} $ times than those not applied with any augmentation (purely raw data).\n", - "\n", - "#### 4.2.3 Preprocessing\n", - "\n", - "##### `binarize_num_workers`\n", - "\n", - "Multiprocessing can speed up the preprocessing but may consume more CPU, GPU and memory. This value indicates whether to enable multiprocessing, and how many workers to use if multiprocessing is enabled. Set this value to `0` if you do not want to use multiprocessing.\n", - "\n", - "#### 4.2.4 Training and validating\n", - "\n", - "##### `test_prefixes`\n", - "\n", - "All files with name prefixes specified in this list will be put into the test set. Each time when a checkpoint is saved, the program will first run inference on the test set and put the result on the TensorBoard. Thus, you can listen to these demos and judge the quality of your model. If you leave it empty, test cases will be randomly selected.\n", - "\n", - "##### `max_batch_frames` and `max_batch_size`\n", - "\n", - "These two parameters jointly determine the batch size at training time, the former representing maximum number of frames in one batch and the latter limiting the maximum batch size. Larger batches consumes more GPU memory at training time. This value can be adjusted according to your GPU memory. Remember not to set this value too low because the model may not converge with small batches.\n", - "\n", - "##### `lr`, `lr_decay_steps`, `lr_decay_gamma`\n", - "\n", - "The learning rate starts at `lr`, decays with the rate `lr_decay_gamma` at every `lr_decay_steps` during training. If you decreased your batch size, you may consider using a smaller learning rate and more decay steps, or larger gamma.\n", - "\n", - "##### `val_check_interval`, `num_ckpt_keep` and `max_updates`\n", - "\n", - "These three values refer to the training steps between validating and saving checkpoints, the number of the most recent checkpoints reserved, and the maximum training steps. With default batch size and 5 hours of training data, 250k ~ 350k training steps is reasonable. If you decrease the batch size, you may increase the training steps.\n", - "\n", - "##### `permanent_ckpt_start` and `permanent_ckpt_interval`\n", - "\n", - "These two values help you save permanent checkpoints during training. Normally, old checkpoints will be removed and only the newest `num_ckpt_keep` checkpoints will be kept. With these two values, the program will save a permanent checkpoints each `permanent_ckpt_interval` steps after the training reaches `permenant_ckpt_start` steps. Permanent checkpoints will never be removed and can be used for comparison between different training steps and as backups in case of over-fitting.\n", - "\n", - "To enable permanent checkpoints, please ensure these values are positive multiples of `val_check_interval`. To disable permanent checkpoints, please set `permanent_ckpt_interval` to `-1`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# The neural networks\n", - "residual_channels = 512\n", - "residual_layers = 20\n", - "f0_embed_type = 'continuous'\n", - "\n", - "# Data augmentation\n", - "random_pitch_shifting = {\n", - " 'enabled': False,\n", - " 'range': [-5., 5.],\n", - " 'scale': 2.\n", - "}\n", - "fixed_pitch_shifting = {\n", - " 'enabled': False,\n", - " 'targets': [-5., 5.],\n", - " 'scale': 0.75\n", - "}\n", - "random_time_stretching = {\n", - " 'enabled': False,\n", - " 'range': [0.5, 2.],\n", - " 'domain': 'log', # or linear\n", - " 'scale': 2.\n", - "}\n", - "\n", - "# Preprocessing\n", - "binarize_num_workers = 0\n", - "\n", - "# Training and validating\n", - "test_prefixes = [\n", - "\n", - "]\n", - "\n", - "max_batch_frames = 80000\n", - "max_batch_size = 48\n", - "\n", - "lr = 0.0004\n", - "lr_decay_steps = 50000\n", - "lr_decay_gamma = 0.5\n", - "\n", - "val_check_interval = 2000\n", - "num_ckpt_keep = 5\n", - "max_updates = 320000\n", - "permanent_ckpt_start = 120000\n", - "permanent_ckpt_interval = 40000\n", - "\n", - "########################################\n", - "\n", - "from copy import deepcopy\n", - "import datetime\n", - "import random\n", - "\n", - "import yaml\n", - "\n", - "training_cases = [os.path.basename(w).rsplit('.', maxsplit=1)[0] for w in sliced_filelist]\n", - "valid_test_cases = []\n", - "if len(test_prefixes) > 0:\n", - " for prefix in deepcopy(test_prefixes):\n", - " if prefix in training_cases:\n", - " valid_test_cases.append(training_cases)\n", - " test_prefixes.remove(prefix)\n", - " training_cases.remove(prefix)\n", - " i = 0\n", - " while i < len(training_cases):\n", - " for prefix in test_prefixes:\n", - " if training_cases[i].startswith(prefix):\n", - " valid_test_cases.append(training_cases[i])\n", - " training_cases.pop(i)\n", - " i -= 1\n", - " break\n", - " i += 1\n", - "else:\n", - " test_prefixes += sorted(random.sample(training_cases, 10))\n", - "\n", - "configs = {\n", - " 'base_config': ['configs/acoustic.yaml'],\n", - " 'speakers': [dataset_name],\n", - " 'raw_data_dir': [f'data/{full_name}/raw'],\n", - " 'binary_data_dir': f'data/{full_name}/binary',\n", - " 'binarization_args': {\n", - " 'num_workers': binarize_num_workers\n", - " },\n", - " 'residual_channels': residual_channels,\n", - " 'residual_layers': residual_layers,\n", - " 'f0_embed_type': f0_embed_type,\n", - " 'test_prefixes': test_prefixes,\n", - " 'max_batch_frames': max_batch_frames,\n", - " 'max_batch_size': max_batch_size,\n", - " 'optimizer_args': {\n", - " 'lr': lr\n", - " },\n", - " 'lr_scheduler_args': {\n", - " 'step_size': lr_decay_steps,\n", - " 'gamma': lr_decay_gamma\n", - " },\n", - " 'val_check_interval': val_check_interval,\n", - " 'num_valid_plots': min(10, len(test_prefixes)),\n", - " 'num_ckpt_keep': num_ckpt_keep,\n", - " 'max_updates': max_updates,\n", - " 'permanent_ckpt_start': permanent_ckpt_start,\n", - " 'permanent_ckpt_interval': permanent_ckpt_interval,\n", - " \n", - " ###########\n", - " # pytorch lightning\n", - " # Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values\n", - " ###########\n", - " 'pl_trainer_accelerator': 'auto',\n", - " 'pl_trainer_devices': 'auto',\n", - " 'pl_trainer_precision': '32-true',\n", - "}\n", - "\n", - "augmentation_args = {}\n", - "if random_pitch_shifting['enabled']:\n", - " augmentation_args['random_pitch_shifting'] = {\n", - " 'range': random_pitch_shifting['range'],\n", - " 'scale': random_pitch_shifting['scale']\n", - " }\n", - " configs['use_key_shift_embed'] = True\n", - "if fixed_pitch_shifting['enabled']:\n", - " augmentation_args['fixed_pitch_shifting'] = {\n", - " 'targets': fixed_pitch_shifting['targets'],\n", - " 'scale': fixed_pitch_shifting['scale']\n", - " }\n", - " configs['use_spk_id'] = True\n", - " configs['num_spk'] = 1 + len(fixed_pitch_shifting['targets'])\n", - "if random_time_stretching['enabled']:\n", - " augmentation_args['random_time_stretching'] = {\n", - " 'range': random_time_stretching['range'],\n", - " 'domain': random_time_stretching['domain'],\n", - " 'scale': random_time_stretching['scale']\n", - " }\n", - " configs['use_speed_embed'] = True\n", - "configs['augmentation_args'] = augmentation_args\n", - "\n", - "\n", - "\n", - "with open(f'../data/{full_name}/config.yaml', 'w', encoding='utf8') as f:\n", - " yaml.dump(configs, f, sort_keys=False, allow_unicode=True)\n", - "\n", - "date = datetime.datetime.now().strftime('%m%d')\n", - "exp_name = f'{date}_{dataset_name}_ds1000'\n", - "if dataset_tags != '':\n", - " exp_name += f'_{dataset_tags}'\n", - "print('Congratulations! All steps have been done and you are now prepared to train your own model.\\n'\n", - " 'Before you start, please read and follow instructions in the repository README.\\n'\n", - " 'Here are the commands for you to copy that you can run preprocessing and training:\\n')\n", - "\n", - "print('============ Linux ============\\n'\n", - " 'export PYTHONPATH=.\\n'\n", - " 'export CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python scripts/binarize.py --config data/{full_name}/config.yaml\\n'\n", - " f'python scripts/train.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", - "\n", - "print('===== Windows (PowerShell) =====\\n'\n", - " '$env:PYTHONPATH=\".\"\\n'\n", - " '$env:CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python scripts/binarize.py --config data/{full_name}/config.yaml\\n'\n", - " f'python scripts/train.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", - "\n", - "print('===== Windows (Command Prompt) =====\\n'\n", - " 'set PYTHONPATH=.\\n'\n", - " 'set CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python scripts/binarize.py --config data/{full_name}/config.yaml\\n'\n", - " f'python scripts/train.py --config data/{full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", - "\n", - "print(f'If you want to train your model on another machine (like a remote GPU), please copy the whole \\'data/{full_name}/\\' folder.')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 5 (Additional section) Configuring multiple datasets for combined models\n", - "\n", - "If you have multiple datasets, you can train a combined model with them for better performance and case coverage. In addition, you gain ability to switch between different speakers/singers/styles/timbres, or even mix any of them in any proportions, static or dynamic over time, via one single model!\n", - "\n", - "This section will guide you to create configuration files for this kind of models, so that you can run joint preprocessing and training from multiple datasets.\n", - "\n", - "NOTICE: Before you train a combined model, you must obtain permission from copyright holders of each dataset and make sure all these providers are fully aware that you will train a combined model from their data, that you will or will not distribute the synthesized voices and model weights, that their voices will or will not be mixed, and the potential risks of this kind of activity.\n", - "\n", - "### 5.1 Selecting datasets\n", - "\n", - "In the following cell, you can choose the datasets and name your combined model.\n", - "\n", - "##### `model_name` and `model_tags`\n", - "\n", - "Similar to `dataset_name` and `dataset_tags` described in Section 4.1, but here you are able to use `+` in your model name. For example, `female_triplet` and `alice+bob` are nice names for a combined model.\n", - "\n", - "##### `datasets`\n", - "\n", - "Selection of datasets. You specify datasets using their full names and set a speaker name for each dataset. Here is an example:\n", - "\n", - "```python\n", - "datasets = [\n", - " {\n", - " 'dataset': 'alice',\n", - " 'speaker': 'Alice-Normal'\n", - " },\n", - " {\n", - " 'dataset': 'alice_sweet',\n", - " 'speaker': 'Alice-Sweet'\n", - " },\n", - " {\n", - " 'dataset': 'bob_v2',\n", - " 'speaker': 'Bob'\n", - " },\n", - "]\n", - "```\n", - "You must type in full names of datasets (folder name in the `data/` directory). You may use letters, numbers, underline (`_`) and hyphens (`-`) in the speaker name.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# Name and tags of your combined model\n", - "model_name = '???' # Required\n", - "model_tags = '' # Optional\n", - "\n", - "datasets = [\n", - " {\n", - " 'dataset': '???',\n", - " 'speaker': '???'\n", - " },\n", - "]\n", - "\n", - "########################################\n", - "\n", - "import re\n", - "\n", - "assert model_name != '', 'Model name cannot be empty.'\n", - "assert re.search(r'[^0-9A-Za-z_\\+]', model_name) is None, 'Model name contains invalid characters.'\n", - "model_full_name = model_name\n", - "if model_tags != '':\n", - " assert re.search(r'[^0-9A-Za-z_]', model_name) is None, 'Dataset tags contain invalid characters.'\n", - " model_full_name += f'_{model_tags}'\n", - "assert not os.path.exists(f'../data/{model_full_name}'), f'The name \\'{model_full_name}\\' already exists in your \\'data\\' folder!'\n", - "\n", - "speakers = []\n", - "raw_data_dirs = []\n", - "for selection in datasets:\n", - " assert selection['dataset'] in os.listdir('../data/'), f'Dataset \\'{selection[\"dataset\"]}\\' not found.'\n", - " assert os.path.exists(f'../data/{selection[\"dataset\"]}/raw/wavs'), f'Wave directory not found in dataset \\'{selection[\"dataset\"]}\\''\n", - " assert os.path.exists(f'../data/{selection[\"dataset\"]}/raw/transcriptions.txt') or os.path.exists(f'../data/{selection[\"dataset\"]}/raw/transcriptions.csv'), f'Transcriptions not found in dataset \\'{selection[\"dataset\"]}\\''\n", - " assert re.search(r'[^0-9A-Za-z_-]', selection['speaker']) is None, 'Speaker name contains invalid characters.'\n", - " speakers.append(selection['speaker'])\n", - " raw_data_dirs.append(f'data/{selection[\"dataset\"]}/raw')\n", - "\n", - "print('Model name:', model_name)\n", - "if model_tags != '':\n", - " print('Tags:', model_tags)\n", - "os.makedirs(f'../data/{model_full_name}/')\n", - "print(f'Created \\'data/{model_full_name}/\\'')\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 5.2 Configuring parameters\n", - "\n", - "Most parameters for combined models are the same as described in Section 4.2, except the following differences.\n", - "\n", - "##### `random_pitch_shifting`, `fixed_pitch_shifting` and `random_time_stretching`\n", - "\n", - "Please pay attention to the size of your training data!\n", - "\n", - "##### `test_prefixes`\n", - "\n", - "You can use prefixes or full names of wave file, with or without speaker id in this parameter. For example:\n", - "- `xxx` will fully match one single filename in any of selected datasets, or if there is none, match all filenames starting with `xxx`.\n", - "- `0:xxx` will fully match one single filename in **the first** dataset, or if there is none, match all filenames starting with `xxx` **in that dataset**.\n", - "\n", - "If not specified, test cases will be randomly selected from all datasets.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "########################################\n", - "\n", - "# The neural networks\n", - "residual_channels = 512\n", - "residual_layers = 20\n", - "f0_embed_type = 'continuous'\n", - "\n", - "# Data augmentation\n", - "random_pitch_shifting = {\n", - " 'enabled': False,\n", - " 'range': [-5., 5.],\n", - " 'scale': 1.5\n", - "}\n", - "fixed_pitch_shifting = {\n", - " 'enabled': False,\n", - " 'targets': [-5., 5.],\n", - " 'scale': 0.75\n", - "}\n", - "random_time_stretching = {\n", - " 'enabled': False,\n", - " 'range': [0.5, 2.],\n", - " 'domain': 'log', # or linear\n", - " 'scale': 1.5\n", - "}\n", - "\n", - "# Preprocessing\n", - "binarize_num_workers = 0\n", - "\n", - "# Training and validating\n", - "test_prefixes = [\n", - "\n", - "]\n", - "\n", - "max_batch_frames = 80000\n", - "max_batch_size = 48\n", - "\n", - "lr = 0.0004\n", - "lr_decay_steps = 50000\n", - "lr_decay_gamma = 0.5\n", - "\n", - "val_check_interval = 2000\n", - "num_ckpt_keep = 5\n", - "max_updates = 320000\n", - "permanent_ckpt_start = 120000\n", - "permanent_ckpt_interval = 40000\n", - "\n", - "########################################\n", - "\n", - "import datetime\n", - "import random\n", - "\n", - "import yaml\n", - "\n", - "training_cases = []\n", - "for raw_data_dir in raw_data_dirs:\n", - " if os.path.exists(os.path.join('..', raw_data_dir, 'transcriptions.txt')):\n", - " with open(os.path.join('..', raw_data_dir, 'transcriptions.txt'), 'r', encoding='utf8') as f:\n", - " training_cases.append([line.split('|')[0] for line in f.readlines()])\n", - " else:\n", - " with open(os.path.join('..', raw_data_dir, 'transcriptions.csv'), 'r', encoding='utf8') as f:\n", - " reader = csv.DictReader(f)\n", - " training_cases.append([row['name'] for row in reader])\n", - "valid_test_cases = []\n", - "if len(test_prefixes) > 0:\n", - " for prefix in deepcopy(test_prefixes):\n", - " if prefix.contains(':'):\n", - " idx, fn = prefix.split(':')\n", - " idx = int(idx)\n", - " if fn in training_cases[idx]:\n", - " valid_test_cases.append(prefix)\n", - " test_prefixes.remove(prefix)\n", - " training_cases[idx].remove(fn)\n", - " else:\n", - " for idx, cases in training_cases:\n", - " if prefix in cases:\n", - " valid_test_cases.append(f'{idx}:{prefix}')\n", - " test_prefixes.remove(prefix)\n", - " cases.remove(prefix)\n", - "\n", - " for prefix in deepcopy(test_prefixes):\n", - " if prefix.contains(':'):\n", - " idx, fn = prefix.split(':')\n", - " idx = int(idx)\n", - " for case in deepcopy(training_cases[idx]):\n", - " if case.startswith(fn):\n", - " valid_test_cases.append(prefix)\n", - " training_cases[idx].remove(case)\n", - " else:\n", - " for idx, cases in training_cases:\n", - " for case in deepcopy(cases):\n", - " if case.startswith(prefix):\n", - " valid_test_cases.append(f'{idx}:{case}')\n", - " cases.remove(case)\n", - "else:\n", - " total = min(20, max(10, 4 * len(datasets)))\n", - " quotient, remainder = total // len(datasets), total % len(datasets)\n", - " if quotient == 0:\n", - " test_counts = [1] * len(datasets)\n", - " else:\n", - " test_counts = [quotient + 1] * remainder + [quotient] * (len(datasets) - remainder)\n", - " for i, count in enumerate(test_counts):\n", - " test_prefixes += [f'{i}:{n}' for n in sorted(random.sample(training_cases[i], count))]\n", - "\n", - "configs = {\n", - " 'base_config': ['configs/acoustic.yaml'],\n", - " 'speakers': speakers,\n", - " 'num_spk': len(speakers),\n", - " 'use_spk_id': True,\n", - " 'raw_data_dir': raw_data_dirs,\n", - " 'binary_data_dir': f'data/{model_full_name}/binary',\n", - " 'binarization_args': {\n", - " 'num_workers': binarize_num_workers\n", - " },\n", - " 'residual_channels': residual_channels,\n", - " 'residual_layers': residual_layers,\n", - " 'f0_embed_type': f0_embed_type,\n", - " 'test_prefixes': test_prefixes,\n", - " 'max_batch_frames': max_batch_frames,\n", - " 'max_batch_size': max_batch_size,\n", - " 'optimizer_args': {\n", - " 'lr': lr\n", - " },\n", - " 'lr_scheduler_args': {\n", - " 'step_size': lr_decay_steps,\n", - " 'gamma': lr_decay_gamma\n", - " },\n", - " 'val_check_interval': val_check_interval,\n", - " 'num_valid_plots': min(20, len(test_prefixes)),\n", - " 'num_ckpt_keep': num_ckpt_keep,\n", - " 'max_updates': max_updates,\n", - " 'permanent_ckpt_start': permanent_ckpt_start,\n", - " 'permanent_ckpt_interval': permanent_ckpt_interval,\n", - " \n", - " ###########\n", - " # pytorch lightning\n", - " # Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values\n", - " ###########\n", - " 'pl_trainer_accelerator': 'auto',\n", - " 'pl_trainer_devices': 'auto',\n", - " 'pl_trainer_precision': '32-true',\n", - "}\n", - "\n", - "augmentation_args = {}\n", - "if random_pitch_shifting['enabled']:\n", - " augmentation_args['random_pitch_shifting'] = {\n", - " 'range': random_pitch_shifting['range'],\n", - " 'scale': random_pitch_shifting['scale']\n", - " }\n", - " configs['use_key_shift_embed'] = True\n", - "if fixed_pitch_shifting['enabled']:\n", - " augmentation_args['fixed_pitch_shifting'] = {\n", - " 'targets': fixed_pitch_shifting['targets'],\n", - " 'scale': fixed_pitch_shifting['scale']\n", - " }\n", - " configs['use_spk_id'] = True\n", - " configs['num_spk'] *= (1 + len(fixed_pitch_shifting['targets']))\n", - "if random_time_stretching['enabled']:\n", - " augmentation_args['random_time_stretching'] = {\n", - " 'range': random_time_stretching['range'],\n", - " 'domain': random_time_stretching['domain'],\n", - " 'scale': random_time_stretching['scale']\n", - " }\n", - " configs['use_speed_embed'] = True\n", - "configs['augmentation_args'] = augmentation_args\n", - "\n", - "with open(f'../data/{model_full_name}/config.yaml', 'w', encoding='utf8') as f:\n", - " yaml.dump(configs, f, sort_keys=False, allow_unicode=True)\n", - "\n", - "date = datetime.datetime.now().strftime('%m%d')\n", - "exp_name = f'{date}_{model_name}_ds1000'\n", - "if model_tags != '':\n", - " exp_name += f'_{model_tags}'\n", - "print('Congratulations! All steps have been done and you are now prepared to train your combined model.\\n'\n", - " 'Before you start, please read and follow instructions in the repository README.\\n'\n", - " 'Here are the commands for you to copy that you can run preprocessing and training:\\n')\n", - "\n", - "print('============ Linux ============\\n'\n", - " 'export PYTHONPATH=.\\n'\n", - " 'export CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python scripts/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", - " f'python scripts/train.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", - "\n", - "print('===== Windows (PowerShell) =====\\n'\n", - " '$env:PYTHONPATH=\".\"\\n'\n", - " '$env:CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python scripts/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", - " f'python scripts/train.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", - "\n", - "print('===== Windows (Command Prompt) =====\\n'\n", - " 'set PYTHONPATH=.\\n'\n", - " 'set CUDA_VISIBLE_DEVICES=0\\n'\n", - " f'python scripts/binarize.py --config data/{model_full_name}/config.yaml\\n'\n", - " f'python scripts/train.py --config data/{model_full_name}/config.yaml --exp_name {exp_name} --reset\\n')\n", - "\n", - "print(f'To preprocess the selected datasets, please make sure these directories exist:')\n", - "for d in raw_data_dirs:\n", - " print(f' - {d}')\n", - "print()\n", - "print(f'If you want to train your model on another machine (like a remote GPU), please copy the whole \\'data/{model_full_name}/\\' folder.')\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/preparation/assets/2001000001.lab b/preparation/assets/2001000001.lab deleted file mode 100644 index 57de698aa..000000000 --- a/preparation/assets/2001000001.lab +++ /dev/null @@ -1 +0,0 @@ -gan shou ting zai wo fa duan de zhi jian \ No newline at end of file diff --git a/preparation/assets/2001000001.wav b/preparation/assets/2001000001.wav deleted file mode 100644 index d06f0998e6e8e5be2c315eb311056ec8b2074985..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 359718 zcmX7x1DGVs5`{Cfde*iv>z%c2+qUi9yS8oHwr$(C?P*kIzQ6Z<{mtG%qcS7n#EBD; zb!$~DS1!en5K+Hmy|V4P_DK~>2qAft*(}8P22v;yOSEa(r=@>zi11r&tqoY0e>ML# zmM06%eZt=Ia=Bi$$H|p;?v*~Rfm|iwi3=lKL4>4HgR6|rZl}V+N8N?Oa%x*JX zOjk42+%eNkQ?tROu)XbiTUBfolSFsXSdjQcsWR{u-oleTh;zCxlBc~(e4sw#dfhqFYFS#mEB1!%8Cl&oQ+{q+kUp5ePQmI zFq_zZHoy39Ih);Pw#98)JIO>b!hF#u^jdvb=P{S;TyaaxROLNqBczG+CFH7SiYJqE zSxy!CY+sX?M;S9x&vO^*Y^y5U=e;)6(oVA-05lW0IS`#<7o$Pp{Su>{nS= zb(dR2H&H^K6cOzlTSnZs(Xj0q)6i_!Nlh9o(%4pDO+D>>-mj6JWWU%Ue12y;$jmn1 zdEZASk^N-5ia}x{K6SyqVuv^Js;}7anE7bh+7b2!SJ_k~l&9nyUOSsz&mfzLhcGyT+8U$Lvv?N{kYHM2LJWa^vC2#aJ81-q(ZmQJqjvb*tzD#+aOD zoQcT#D~l*{vP>!)NsqiNV#|Yk))TxXrI>_Q z#BKx|&wA`7v)Y_BIqf6cShN;5Y&Sc=j>nF%>`Qj#6@HfAX0z>WDm#@m^|x*9H~e(I zafyK;X1`f(D^uWzi_gGZ&}y{m}9VwPwq5{ZN&J=Sh4;`~Fgm+awB``R`T>%<(fN*oZ)!MVRS z5=d7{lmWp$*_5Kb$S;ELm08phEk!b{8A+7px5c^6@**-=7l&6R06QP?tn7SZ4v`p~ zY|iIo!Mv5V!j;9)|Xw@qyayB9Be3ajaEvAXZVu6^*r@h9OQL)$@F;=wa^V9Je zXL#pdwxnpp`zFNlpV-sJVv*P>W^gr`S=AT3`7gT=5&yk!FXPdnT;)LGzcHVch8z`c zAK+zQxWW^7`!)VO%jcA5H^W2rCytEhanNa+{nHa-!W{GO}#upHCx*bN8 z568#P*^HtApOcu_%gyzaWX)CC*_e3B3v$j>;%h%wF2z&U^&J0?#=FP+_gU>kJ+3V$ zpIL~Vi7Gy_rsMXKy^W3I;Wrg|x8Rj45k-V@eTDI^FT`Wee+)5|6Krs}+90C{=IEf; zRAEOOu>U>rmiAnCarQSR@9>BCzfAOgVdWt_GaVir%JYk`FTeQP347R{1Uc?wn@pk< zev+P?|A{NU%d1`yIg*HqBkq#b4)eROJZ{*1{B#zt_{JkT>(9g$p2SZs@;MQ)$qj7( z6w5@z^Kx({p~R0<+_{`Pki1*3g^R)#fi1_&JPMHsa^T#By-o zQ*fn^xz0oO0DFFc6+K|zkK@lPS z6&}OmKl99U_B4L`92{V^&W>v{qS3_2rnX7F@)OIF+ z#g;>4L77Izm0HA=Ux}!V;*_`m&Q0Ok=krs0Vl2R8zc?zaNGW5>*W#*3CX334;KWkA zv?Hr)$G%J;$9Cskr-2+*uuB=dKAXg3!8_)b5jLR}HkwUNrpZVK$xj}yV=LN` z?AH-?b|X9U*DAa(6o1blMsR(5#Ru_Kd=u$pQ`ug2mfhrFIabb8hgYuZAk2zRMKKEC0x->Z^n)s?_R^+%I>^1+s$-lQCp0>EZ=7v1uLr{GvTb zR_j9UF-F=nT-{yM%d|1gjMbxcb3IVE(FwG69sSz9>TYs}xf9*xZb!GEThy)0Ba55f zt?D*%+qrYxz3wA7f_7c)wt!0%)M<4W?bEf)7Sj-W4u@y%6Td_S*y}G*OTWz)qeF> zC3c>w;i{<$W6j&-NZDQ1kQuSDlDmlNwBi!}*9<P>FYF)c-|oNc|Km>R-dFhOG8aWl5%FY94fM<}Wu_wFduQS^*YK^*~zN*#gvFgP$A~}y?r8873 zW#kCiPY#uN;cV;3a_vPHBJL?(liwaNwas7MO()h(^j3XAf6}o{KrhffH_UAuIOw0@ zALFm#|IdHgKgZwRzs|qKpTqye7lq&M^e^y#@ZS$SavSSWIx5~<8D@USl*9fD%u>Ag zl5H=Jh~o08tgR}mNGgh|qpmBL73OsgI**+zj+Zqi^L%$gJb69$omtLYr=2s!`Rsgi zJ~>sfUsBIfCzo?x^;SuEeMj|8Nv9UCyzY!~syd%pT^kisU67aM8M#KzlG9mbeHlmI zgtZI>>-xeWZ{U+j%vbF(aZM}}!+7;iy+fzbhuqXT2uCxS>${1TV$1g z$l$SLsLHI8sXywgv&yral@0Nv35go=#>wW?bBa3&oKtF-YUmtsCOIRWi%x$hg42%u zd9Kb7i;*reomZ-~N1ZmML?%6adXj$Z-gVRJb8arTjeEm=?sj%}2Sx_^2mbgo_&56Q`JVWy`9666cnkVc z`JQ?E_%8V?y9M=los5d=5xaCu7cyT>HP~!B(McAQJz+K-sPtCb%2W&sFx1ESP)XtQ`ytWbHJ&reu|ayGM@NOWp{Quf1SKeX4OnRQ4LjI6~ekxdDb|w zoxaW{=a_mav*8o7RSGJ>I4ZH~BVUTL@&K#MA+yVwMEiSN3^rZ^-hPE@?+}$;8FO4O z)-m)-w>m4cZesn=J?$QL|8uVeG6YftvIR!_ zm<9U0&R}Ai)B1}3W*UOoGsPb;B}A^a^-VfE8#V_k5Tmd~Ewx<6R;jR4AE%q=V@UQ0 zBILDm*6HC?RfDkeW~Ywlp;N#~=^R$8&os{|C&YQDEHPbC?NiO2 zrH*oHs^3^Msq7_=i~4d2kslk8V=MJ|Bx;jo4a+L4nO^#=JJ6l!j&wJ>8FW)E zbz}EO;CEnlV7|YNf04hYzqtRQ@1(biw~O~pxD9uF&wRrJ<@9a+RhKrIsU|b>Rmd#0 z@2R;j$qm%d8DRBM#CE%jYB8hCuHrjq@Ua6bk5k_{;SBM#3AqzeJ7m4*ymLSmRQqIq z*-I4yZ5BBHIX%>5X<=}2AP%6p1=ZaDFsbE<}lpr)#XPI;%IlhOIAim8S& z6jbd>?wKaX;ICz68nKL8X*&7(2^{>iZDGe!%_h(T+`?{acOPr~>27e}g04A8yo3B# zeJT8xef@l{ufDI4_d)p9@TA_W-aH5=4fQh}*LY0=`;t7FpX}Lz46#*Y!DFt-d*T%O z%6~BL$7Jq-avkwDPIY!BI}(Ih?I{{ko)yON9Cm)IY-+VE0QSan3Of0na!z4YS|*e? z$oZ~VB8y=4jGk~Of#(>=bb}okt$wSNPEIGDBb;^WJ$bb|)ome}Q&y5oWjT6*$^-0)=ewDhcSTB;Cr4rENK7N{fYhf1ZQ$fxYpCo0HQGF;6DhtqrJIjfwL zSbdBNkRQu9$D9F9sIyI7lAU*V>v+?lr``}IqnCyOR*$fq-fjxqsDc!^!?QU_G zlIx$jDaiiU+!cY8fz|#@{?h(UzAN4;#A9J^QEyuB)bRA));<~N;{J61==Siwj_3iS zLDxd$r##}h$PPc7C$EZK;+%LS7Rz)h1Ne{wqs`T#7tC(W^%du=rr^+VL#G&emXgvjl^hNCo?{u!uf;uhpF{= z>2yAGp?oc$%c-DikU2&b`(T}dif)em1ZJewlig8n6}P^7#(nB$A!q)D(JTurC-eJ# z`F+WJoxDEq=4bei@TT5Y-d?`QZgt&^e38tIGD|?us&KhEVuw5@lcQ%XmaEB90d$4q z;;j5CTag=!VV_NEjyg(4Nb0QhbO|}_N$5%AR8k*gGjx=UTw`l&Jw~;YBY1Un)RJAI zolNgE_T2RJ_RJ)&%y90I8(*vZaD}hLbs=Y$ilwH=)^NblvL1XcBOdV+o|ptXen&&N z1fOn*0_B+1?#DnK_q==4&8`<|VJe$%dY9WZ5GC;0|Hqfk_s%=STixq;=Y%&7&m0~W z?s$9nPr0%6CS8cA>x{y=9>r#)km|V{Am7S%s;^2R$JyT~T=7J8_@|XeRcU80z8a!p zlO+;(E_DdBPSfzfjKb@%NkBN?~@tdp#u0N+b388`>4;ya+S50l=>TYDlMeZ7R zs(Z{`t)J_{`hgq44GlyN4DmPfeerJemhvtM5C8l8@7KTk{$37G?B5^Q<)$|~?0B)7 zim#jaES|}%Y98oOi!~h-8|*f!tti3)9Vf{Za)`>}yjD43)d8+LIgb^dtHfgr=b9`- z#j%9y=(?CC2g!F-vB}6fv9M(Ww7(zfvQytv#?#D`iB)YS66>Kf?gxQ>!GT`F1YVPk zpNnI@3-(|ULBsqdp!Jexb)u&WO0Q~XZqQx)Pzw`Lu$(tGP61;yO1k3$Q|gL zt;BWP9F{XqCRDZ6F!*LQc-9I`;OXc22t&{4_<40lGR#cphO^zd$FA*_Ddb+UlG<#Vd?nX`tmVMH&Qu+> zsY0uW*>Kkgwz+Ap=erBsX!@7?#w`v@8>pYS>Dv8JVcUp)R1>+VSo_G5%B41%Aj_#lsxE5uDjS>nDg~eZS$buCEOHKv z%}ACz?#%bJqn^m=>EonPQRQ%$W?yj}&9XD`beigHjjfKF@lp*)+50DsoVFRZ-n!T9Eb|t1E?nO(1*Julc8wd6WL@7(GtaWoav`W!U4Vq>pyZxSDit7-0yy$|C;}V z&*wek&FZV|E8|@lzT8{JSIyhi+t1(D&8=7HL1w3|h%)oV&J@GsL>TxYIUMyoDP4DO`htvnQ4}L~o2!Si75#zca;oST z)QZs@irdRZ+5M)i?gX3rTZPoS1wr;C`~)@A?R(t(PK9lL`4+o;WVlMS|^Kzugub#WUPT#qh~iP7?vTB=6K zBl4#T^=u-)9d?E|kBFX()VZl;39($HQZM9hbjVcV3Uy0`0a>qCCaPJ+~*eLu!c)Rc#;hlWr-O*J1gG~}Kn=a2Q z^v1gKFxmd3ysu8EcybgB;2?c~*=D4DCMVKwc_vz^Q)<7=C2z?*PDIaf=a$pjGs}s= zbH{<9MA{TN%IU}ohj{jT-ox2jI*zA?=aCcNbJR20)7p6; z|B5nH9rMIXV)++rsSDLdM7fjfGaVJ>5;b#YdM+Q$YQ5Y2<|-ZTHb>c5=BCrt-4cNZ z{t$m!-xTjzZwucUZ!+&TZ!%v(UsB%-Zvy`occ`ADkC|xT>3P|P$nFbj`@yohVmkh_ zkDM}Jj27|WS--?cc)@r%fPURdqBWnI=9KfCR&V8B7<_c;wX;B&uRI)jP#eIo+vNYA z)J*B=mhE=tc&f4&={y@e!<_NxI)_wDCzf*-U$5xwBf~-9e+%hb0X||Eh|1uZaD*| zJg+7=u{`6PW0kge0Q$GSYLSzRn~Vh zMrO}ZxcnQctg`q;VOg4LBRam`Pi`kqWh74eQi0@x3zXvD4dM*F#`kctuKJyO$bIjg zgbT#b)$~GFxQ<&WFv0)RH{Ltd``$a)d&~R7`@-ANSI<8^Fbe#==!TicbT+1f@~^}s z^s%|J9sED5tPOuW4Gu)7_V|K@^T<=8EA?YQzLR;$9xc(>#;aGvZ%cHvj^OTV^!%pq zYAMsw{b(gFg0b(2;IXPcip~=!hbJ3Vabew+(-X;?!2Qr{0mmKLiWL431GZwVd%MKellbxc^3Y)kgND&6Hr~vHkWi!U7cJI zS1$owBkFywFK`KUYLkDn?}s;|ua&Q(udFY%uc7a;caAsQSA`rJjWw`>H~oy-s4dZMeFuJn}hKs{?Wf zG;<9OCbV8Nj{ZHt3P2E?Uz#X`|vl?Gdp*o{mt&=O@5cA|S zxLhbZT}0hwouyHgD#;F_2l4pHBs6{WQoUQJqKmd z*IUVV+;_@X&llI1GLYEZ)yLiTW&{X7K#Zg_S&Uk~IT$m8YCOowyws1GMHUp5Goal) zy8fqS8`Vg~pzj~axv!F|Ug)EFWOh1!pTz+1ejog@0k3^QH@%x^i=wYGSCoT-R}S4%i|yBZ{bhukKk{D>S2AU{8a)GbWI&oztE#> zfBGlQQ4~Va8be{8+2wk2-9EB-Ssrc0Jy!dKnqW7z(iQaUu406|pkkwt9;ANAOFZ@m z{R)!H3c>;hf`<3;(%dp!Y#;-CfZ}UXR@dc5 zYPuJo`##XS7Lgc<$Z)|dzc@&DC9~LSw;D}e+@{;<@6^am^)wwp=W`DS>IcI86W}?+ zeR2KAeICf6{F_Y43SYQ4Vb0O4T%bh(PyY(sZYk*_yXyN?59nu|74Y6O+XCRSU}xd3Y_fhw{N1~6Cdl@U~ab(D%D4YkK?nGJOP$<9U-`I!W4*iZ7a=z;4oI*VwphhtZvqp7LXH^|%5DMiBG^=hZstVRp z$H3BY;LbBC)l%Lw7S&c0SyE<}7wCEfXE(~(VJ16@Yh|-ouhBE%aMrElUJNV@3<=Z- zyzt-fmqGRDjlX6_jojp4?Oz!P)xF51V{~!483nOdH~L%4sgH`Gq#TFuUm&~9pktp9 zu47?j^NG!^D5IId`Txj{OVK`J!);fH%k+6h@yhz-i!S(bSvsski0L-!CW=OBGQ}n5 zqiRiWWHEVWqe|n%K&NU8a<^66sBvCW>x|?5rsB7A!02b>jkQEe74!%<*q`O<;@bIO z>RA0v8$C#u)_rwD)Yj#0NjE$BaZKQrf44tYV4L6TtKzR0C=&SMKNgtcuG3cg-7z{g zsQislE7!!JcXpS)^)?ioRZM0mw4F6{7n_QOawxSxJgibcl~5~W6f$H#>YvBx1G!}# zx~}=CIwCMd^UFqp(M6WY=tc!pJg2M5r7EDm&g6AV(U=#ia?w8UqTS)C_G_~(WbjiPUI!BF{#Qmn4CBjk6T8L?T>obRQcsh zmBfkcEJHha4W_nK165WPs>-N$G9uM>B($I;Ja0F6eMhFpm)}t*51{hMDVNfjmE^Lv zOkgdiC-9OuR3PO$U0h#r+vuG74Z6p?KwY|0#RItm1p`UZ%n$gdVbe5$;emGUOE;+r zwLj4R_oMBdrHg#ccA!hWgNPr>D(eUz{l3cdqyGajkBW-uzGqN!s?lj20d^h|_eCRA zkSy4>F};KB)csvxM0cnotD??*!~>J7Eh>dm0skwc?x{~8OeVC1qI4hMqpCFHzemXy z&D3r5stH8-A{Ci3lkUf}1`>^*O#z$5E;PyPWKl?dV3I-LpRL6jJDKUS zEOe?_>O09QXk;j$;kE@ zRTliQ9E`L(ex91VpB!sdVD({YK3;vFeok7fS_DVfIlwT+ucXT;|3^(>R0 zd83^s7yjCkTCoslvxQ#x9Hx)<&|80G!-PVE%D~<>pv!g(E-K_jGJ-|nDkl%3ZD(d~ zCL6drkUsZn`a#=eG%AzYXm6#}G}d*P&R!BKjf*gck0>UEs9ECT50S|eRZ-Y06Nx2N z6S(hEFv-X#>|Y+a7*_w1nT2sSuRRN%b~fouxSp-2>sNY$?y5Ji(yM6T{=mJ!nZQJR zHDBOUO+osth%!s)lq}*@T&akF>{uW ziMi3_g6;Aezt2Sv>@~ThJI|d#$NUQFb31x?`O(PNF!#2ExrcRjFtd5t?E~|aiL~Y> z6?2E%^htda%&D&9XrVW|Ub>Hs0to^;0?Pua0=oiF1E;WJkw9a&lFq6p>5h2fA)>9V zsb%(qr%8$J88!`*P#z{qzEDqnU`i?qs>(om*ZzQP^GYNN}}K;gSdT& z)XZSi269G!)a%n+%`he_A27S{#!NJk=>I>^ZRuEb(3k08RdG!qhx;YaI?y$6IWU;6 zYb`AJE6~k-z*IoE8=E-40me8mRjUO=~ut;$o`2YK(7g2SX;4 z)4P!4+mc_~s?Jz)2%h?eJQPKxW0o_svg~Ccd4>1YD7#&lC+!ZhM_`iUvsppEwK+O+ zadP2%1GL_toQukH< zWH#f3JOtKeBjfi#iA_sXeu04wA>W(@buv?Z*2l6Xz}FOXr8BEMXvm{MrMJAtAaLv) zyf-ox<5gxkYB0l*6F%OQPRx5SC$ouaF6-esL@#q&f~K+PKaOIRnZcd^0#gEy1EFpL zVzw$?y1=w!6678BKLK0q&J=D_<``R}x<_YbH#0Mci|kD}bx~Hr6sQa*CyrpXZ_$wx z(aG>K@mrl0Rul1MRqCG=aOs0|Naw>|SAdpY{CWr(AuqWhOgSivD~Y&oGC7`Dm-*(H ztUC?se-4sG1AmOXjVd>c3i2gtYBhAYKVVHuJA~QFEp*j3m`UcBUa05lR@iX1o6XJZ zu5~NA&jVfQCHG`T#Rg7*q@P?#ynfd|^n7!i*^Q1S0dqPH@cDu^CVs!f_Qf~mV#_I1 z3p+rN>eM>@h{Y-YymS=U69t_(8|`T*Ka1Nhi{d@+KW z4L=x%{kr3UQQ$DiR6Q`o1#eRGZ%$Pm?5#@P_zQEZ#BTm#7AY0V_!aucK{QRrz8m`Z z56wC}^gi8dQa8ZBmqV+9yh(CpD!OldHt z4l8SBbDQ;X+9H^>$&=f(kEuI#`56!R{F^6s4v=KvtUex$OobVSQG{BUW-a) zEFF?AMC23@)1q^yRVS$j16b)fb>}l#l+2Kn+Pa8(LZk+3!^r9rib5i6_>g{8Dyr*4 z@Q0#wJQIi$?Du54IA6^yCZ`Tj8)Y^j<_%G%n9F(Lo*?UYaT~f#m&3{zWg|uQm<)hGz@Z>dxdJP>kM~KD?H~;ptDoUE#vlc&$|7Y+=xs5T+iHCRr-{@^#T3Glm)NyGcA}#)V6Pl$`aI8 zOHs?ClJS;NUqzNn#bYMlQj1(N*sX8G?`o6(m!J?|70u9Sdci5Xzy?+^fq$HMe1@78 zQJp2G2B1&$1!o@Mw=L+4^nnW)*8hZOM51@f)-6ai|-^=~0Ezo$KV5W-g!{(_)L|F0Pn)##BGTF=OmlZhlhk{KS0nE29hvc*afkCi5o zcSIsq)`c2qvYZ1OOAkJJ$qv1V-2aH%{V2@I;4fjYhSdKu{1q@MvZ_P`c81$Mg9)@G z8nzSJVN4mu#DXWu`TuckKa7tRw&hWeS}`-3qPyPAI<$KP&3Fg8<4Csw9rJT2R`Z$I z7_WEgM$9s#H}BAj`!MVBlw4@cRI2;j_KRtS9siIgdcdS}5?$BGh_C3AuBD=XM3;L9 z?7ARpP6q=Ygb&ZbeyQ1kibQEA*+Y&ZOKb;khtPct!`AJw`Dx-Y2W;UZej84%ImFjz zDy_0;8b#QNeX<4i`$!+XF1)chYcyQJVS4@*s1l=dhM_jrE5gKBG_y=+fwlG3FWrOa z!1s{VV(6K$@F@80Sba!OW%gz&d8H_xdQ4vhO;0lCSehC54b)5dm~PC#oaYtWMX znZc6)YwJ$bFNWLvX3}ao{lxMp$>+#{ZMpW<{M&@JP6kPr%1!ilB7zvt$SMVRUJ+Kf zjrfbtYVwl%9-~}bqZ4@^W)q*)HHXh^VZR)h{6bJUK63}DiS06R2y4hW1T>l8?cy@;x?MNkJya=FTdBrf@6rV)*$G5qU@+V ziEjTA~YMrEc#6Qa>X z-~_=!ur3|7YXp1LjAjmTG|2cF%3(2k|631&?X8h$7@b^oS|M?V7eP= zX7kfn6VGNMlTKiQaW+{Z1aBxsPAo(Yd?oVYV+r7A7wHx)LUVJ4pGQzBE(A7yVcx0{ zN>UA&bXIJ5ffWY(A!kuT&yqC~Q#t15zbmMR6R^H?)Vn`f-5lcaFuW%=Sko8_KEgXK zySEIMn3jp(%&?em%um&aAxF0NnL!-E8JqG{_O4#ftVT6mQO9HM?-)}jd-2To?j1M3 zj%iYw6FLoNMp}TKp-g!u#_A7E22PnIz{_utHA0zH$^Z+ULpQZCeU@<4(q-iPsVFj0 z>DinVXHb9tqBE{04s(HFHj^Rxn|NlI-U16N zsUx8%M%7W6qdHDcOdIc-U%QGJl4GXDEG7x=#nS1L+6%h&CJ7{*9 zooK?n$?x$@bRJLiFr6O-qYoaMgs#m`w;A~bJwD3sK^$6?T2p=m879SwzY~l1v zMO2Sc=uP*CkA(IwXR0Q^uUeYM*di`7VSn+x^{lN2s~gJ7;-Czt*5$F=C4Cybu!^Zp z_V9tN1*!b3sl-&lEYPSx#z>DzMcBpnpoGvE?%U(uL=vieuMxMBoLUU4mZd zF|^lysH=s^<{3Ei@yITws`){t8p4`dn)>kLlITs@K$V#07N;j?qZ$s><#cA9UZ>P$ z^nXlE?V#@+u9IWg3_QOq{#*)mdj|9JbHU8)JW_H#b1^el(?FEOT;mQ{$ue;ZWp}U0 zz_spxja((GO##D;p_FE&SJDofE#P+>$qcQz+6Y8yS=P`U>l~AJ;U!8Os(zHi57e|37+{sX&S;#p~7aIZ`>_(^+mcm=ey6t)WdH6nay1{p01J9-<| zybX`pFCL(i#w0WMN~fc$jnlf~Q@e zk2V?gEGhkasq>&yEabPEb=5FAh{&ka6Ddp+d^t5ay9sm0QLQAOq_x%X&3I(AedMnX z_~0S1`6PY#GE}fUCzLs z>xr{D>_um6)rpn0 z@U=r^wY6ZycvjL$*Pwolud`7HEh4tUu~T}J27My<%on-@6+txeSx?)8ImVJyK6!0* z_?Dl}%B+8N`7R=C1l7)3@sv8ilBMT!t!v0;qrvcntg$s$xClEvfkSzjj(Es3`cvz) z#mh#35s#&x{Cop!8H3VY7sP2uhVRRomtfDxOp1(V7OVo1^%CXy9;iHyeU4^tbE>Bm zJ)_i2)?cUN@R-=S2|^5`rfJK0l8D+Nv-Q=p^nT`=Q<24DQPIB8k?G&pHsjG8@`8lf z!J$&Dt0gO3#=2?|&Bx&(9!@)4V0!5%Rn8w&?1k+6E|B~&HaG=dG{Uzo6F*yttzWF_ zCq6l!E1ty427#B`sJZXKFdy)`)l@x0SWgY|LN^|bnK$;)$v%m_a-ufAV#0AB-K)~D zp;-1BXTsB&2w=uZFrE|A%z$n~PZ_0qqQS>OpLTREEVmZ#jLBNE^4w4QI!yB;$bS}o zoP|?SVc4q`3Q-YwWJ)sEx_>MqIoj+^kpCS_`7fER9vNTGy&2ANl!x0{s;?hGg5Tax1va%SeP z{)~1x8Z9s{8cB2VMQfre8{Bacb#@CXv*2V$E_U+|Xq|}4Bmp`?f2{CBI_ez^_au`e zJ-PnrSnn#;W=+)z_8yf?bqPC+X zBpcsAt(nbLWFaFTGh@g^Ux}Er@TO(d%=4(ASF^f7bl3~yeNSB1jj4-)h{K4sAmi8= zoWcSTv$Cc)rW^`aBciD(*=rPa=mX9=)`4N{rLsPRu6&2nf6>6eP;%-LuN+>v>xYR?j%Y}Q*9#vU!8yxJ?A;OZo-CCFP8yxbglKel-eRvx#KcZ` zU1VyhTwLi@eDOUsiDpG5QFQvCa-=|&+r#Q-f+ao_uZ*fNx=vB0)FrTUIC-iXDA5K^ z97fmb9KPL_S*;aRzHQ(Zzu-+b$)tm^lrR@Sh5|a^8s-Bt>vTFFXdYib<3z-H&MUoj zQ{sz1L5i398v0yG9uJ6KnT~z6AJSfIe+ZqHsI@ zSDpGM67ySo#d)gr$tVMF$y4K~au<<9C-GGkwvfn#>ph%wECNo{#ad~ooa5;mRG2s1 zhtw#USn+3_%|tVcK*+{SGwH*&jO2<%hzUVj=V;=Pb8?^?{|H^_?|EZJL14`6T*xS2G9GM8+18FFaf=h)66O0WMk+p)PmRU z2l0F&9~d=?m|samydv(6tb{G{sGwS3ldg7BD#NlUhn?`Ud~_)4;g_MPZ{1M+^Ps>* z;qeX}SOP& z&}VSr9va*ncRg16Y4~k))RJa&;36@7vKVVN_3tKeTMP6#?Z@lN;dckfPwUXY-Ceu-B31$TIYaYJq zMEAN0_NoA~zW^0)qJ@8@4ttE17lEE3at|of=wE;5GbiBJQ4M9VC%}bboU1)Sl(eJ5 zs6eF=mD#9gtZFVQcT~&BI1H$is`NQhw;Rs5BBD^2)pf9U_qx z1~DDemdaO=Me1|PE(DFDB8plp{+&)7Jz)1M(4n2pnz9ikgXp+-P*aGk_e=w=$M+iI zabEPn(tKBuXN*%5S$iDy8*hCJj`w4K`clJKagDCP3eL&b9zfx23oAWC z#u7 zxbsRzR^&qwy(yBR_vNG3iNvHy3Ym@7Ekj$0f(G4`-S17TS7in!2R=7~j`Szi<5z~s zieN7>n4^MS&8n<#3HJh;K-9%2Tb(9~CV}C-;IUOeO-r5_3?q60em8+{Mr79c4Cj_Q zFkNvEwh&)`1=UWVogH^~;c=VjL*#IiqtMrI`=f-0yY;B_C&OxsGSR&R6hDT#G1$z+ z-y+gk%x~+`e|XH@6zVZ;w*hP`OjX+gRM^12Hh?MY1CJAsVfv_t%BLP+nQ`>QM$%m$ z$-KxDe)E!Ps2xmjb>*ydLwdR;c+_N-9aVPX%S(n{fvwXJmp}38wP?m0z`Dpxo{ojN ze4{&=6~5+}{ajyTu=)$kXD2HBG&&ANI8R!_P3fkl*ExZ6U3*-?3`HYOqifEl*N5X> z(+@dMxq;b`kLDZwqc~I?%eaFp%{c7w6LGmEfK1y;0*_zC8o$?jlxERTMit8d_zWoF9_d#=guN8TTvW5pAmbXaAv+506Yx7W_eki<%-#bgbb3)uKNor6%E1Z0Ui>?P%5N$w z%7XuFVSdizl;K>?8`g3}-Bv%<3!*3;C$@_)xpSEl7lT0dDk#r=vD0#}a5eqt4C)y? zs4w#~Z+Z8psL|VDkll%*FVqyd(EFaDW9Ek`#6sokz*No)x&}ktI&LObP?WP-!u=5V z7I;lBUDAW@iH*KNr$9Nu8cQ*Ke1+N9hpe-@~Gw&mBK=NC>Pc1J$E-J>B@8R zDz^KV$&OorLxHz}B=ljI(&@TKjxDYuGh25T?w235f2K=X$~>R`u^Q^~T#a^Vmv8 zI7)?7mTK%5vm1laO5UL!d|_Sp@jJh4&UN18G<O`QHe^Dn<1W0lRBQ*G~b zVv71J(>Ei*x*B{HWf!Kw1jE!HY~2CAy^b9l#hJ_c*rz>{YY$-k>Cw*zq4!)MU#`b{ z8gpMRLuY)GJAkfuV>dtZjb*^P)NXXJJDpnr6dwzEKX=o^^it9NDGO^@3I{5Mr88ll zgD{_z)RfWaKc-;!f}O@X)Zj^AWyxSKKgjZTWjrE&pNh&|3kEoIn9z*nTqkD6bKQMF zyVy)j*LAvcXN%;V7HR?;}w>aofob|PI+rsQlxc5;X^0AUK;9_rVv`yb9 ziq4oQXv{a^303JA3C<%HM=@Vd=O#G${e;ZV2{EoK*s1>|GZ5=@nenR2T@HFVshnuc zd(UHzC>>dLx4Mh{(sOD&5);xxIUAmV3gRZJOH9~bOe(~!U}t=K!(Hiqr{Sb+Mzr$6 z%rHFUzAbs|1vEy7+0$cs7oE3R-0!3bn0%WVl(Fu3=250HrFn%;>N%L*B=S>Vkl`)N zZYt0J%w%p{P$d<8ou1|#o>z<07=@{crZA74okG96gC=x~JNXQto_WPdzBJ||{lc5vWnmBgcMv|1<6o9N!j#lFc=r{eCoaD$ zpfllXrP1};qg$>Z_O5D&H8lq>=P(HtpL^UKW40|dzC98o5=?@hBB$Sk|I|TST}@1X zlUeCrXUAfZnb~SWeYArJ*$kGgU@aB+U2c?;4E#Ne%Ay3QSdvv`Q9%{&G%8p(s=Orh z8-LQ_4r+G|=_CH8!~LCl>?CNng3eK4CJOg43)Ki!_&IgYE@EQ{e6L>&DX>Nw25mlwnx}8;>f_X<~4tX9?beih*JeF!qZ?+RF zOv?Vo`d9h1!JqPy{g1<*52HnDSa>(qbP?v=fmMB{0%}5KHxUi|B7cw1F8m_1j)EC} zz?*|-lo#;tEOe*!>|RMG=)W+Tyaasc#Y9$DdZdNncXy};I??kC2a(T{J6jSbL$OC& z^pqR=BEET>x#7EXriP=2)z`UU;J5JAow}?EH!V=a^V@()&*WGS<{amcm)hfdZ_!Pz zqfKsuL+l|RUO;uIj<#c2T`cv0Rh zN)aU&VLF5G$6WlTKUh+kj=*Kzn$<3&lkr?fq|3h=uM19U{70Sj)Feeg=|RUg7Vlo2 zo=I0S;URinZOI%XQ8`Y+|6kIn3C@ljVMX_dl$$7x!(d4< zAb-$x{6a50J9S=gHmbzG*{0*%H>x`bUKg*}&D_m%daMCne}d0U%6%A@fPY0e1(XEL zsKi6s&fHOA7OMVPuC)eT;G^`o@1lGirqA1gM?G}ezMxBM?pMo>4-FA znq)P1+lmPu#bwW8GSMNpYgBLi@0|$t=tEgSCF-W1bV;M3SN^3Z5SeMjBN&ECzQvn@<Qgs0qM-#;*w5bQftqq@0AO_iEX*ih{G z9R^iiW@1k061loO@gJF;sez3TqrRlYN(K4bF|KzqR%%bjvkKn_r-!=2_G@6l$3$>s zrqN&1KW|LTonzmof?CB`>o_|fH!lzE#V;x zn6Ar>ZjppKDU?&>^-((Sf<&)i6XQ?^`=CF(`#1Bul2fQFn5Yllfnx^04c_B#|Gz2y z<8;3o!g4R+Iq~UV)}s3ymEAZ+{?5mm#u1D2s98VK*}06r<>BX`ayt*No(Xq-MD7@k zrGoz*PwziWo}gy$&0m8n+lqH~2d8QBpRwwU*_DDs6&_VB6p%&XLIx0R#a|R@YCLW(m4F@4W4zv zq@;G3i>Hi-C+tDVYQx#eb7b8^CIwfwg|lao(e`f=}4nGW4MC z$c-YiEkZ=!*T2m(dZ~?>6siDk{45HhZ+Yx-Fo4dcy(%J+tq+qm!Z>j<(=LV0PSvZ- z5)q5c_?+KvgH0V_$3mIG<#f0xEstZhlgx}8k&(YXM_(#S42{BnzVb;w#B}hqy@HQN~^7D2(AGD3c z*@m0M$vN(=oI(_0YHu;;cD>LG1Dx_1k&;X_wI%7b z%&^}?3G!9&rjKfVi?ta(P%o6XYtCJGhdtf5{#Q>9($)ol9t?9pB-lk*);508+pT?LXG}H{Sphs(G^#j zHYk7snZY(PA4~>THG=7f4@AL4a$a27j60H7=M{B9$&but!~z9-!MAoY(Y=UlG#HM( z#gw;gxkuJO?!9o6b0k^qA9~Q6tQTJ!h36jy5jrtjekz!sxj$1(@Z}qfYZcnpHTwXq z+6z+muzSUNG%7EeOKEaKOQy!F(yg8&TFDgRF*wne?(S*0L{gYUsAvJ&|A9-5F_ZN* za}^993)Ung2J6Fm3UTtk7U-1{eXy@ECMt7dW~$8+vaT4UuLXL$*G)m$T5W_Awouia zZZeHI>n@@CnQea9E>vhkh_i^YxQK5zn49(|Rh?rSq6b&7i@4G>%rCd+tb9e)$3zOO z@;}mKYT1CPj^XN}vsm_JdZ31_0uIgpupc6XPM~xvlLC_Rx<-Lx|2D{W6N2?v((sM~$^awBjBcRpbFq z5f?zWTTcx-OthA}$zQe6Yi@{#`dwhN+tR*-O%xXc#UHgF_{P=Vd2%^|5;m_f-PnVqnW2I{QoQbsosloLieEYukXkO#h_AG1ulsAD;@IMU%_bUD@BRp%Qyz z&cS8{IsFWIsUlU#C-Ix^!F0QhswjZAyOI9=3cM&bxpNpy+*Ur$81({-VC!N@%ij(A*Z@G5Rvt$aB@(qJ+epTB1d>S z%A}^Req_osgP+HQyHibL73L|R#?Wi}L{4mIo49-YOWaL18F$2);lxzKVEc(di&*-p z+X+sZk24R|l#fh#k*XoRjcg*C7~u4F7)_WxCR;ks)L_{LX7o#Mcem;7_Aj>20`sbk zUNTf_uwsBAtD#wK(FsjHYNUl^!^&if?@Z5BXFn>Vjx=U=XAbfF$h5;J3pz198`U9O zRX=dO^iq1biR^XN%u~v#$6abGI>qco?w^;^?S$gfMQnD;hJ5p^bdJf~9I>t!=*H>t z&n6~kqOYSGK0?ziCGY7o{#*ViXkf?8W_2-Sc1Uh@pIn-isM+RL^3QkE$x2RBo*7+M z;B?&t&QUzqyG%lI*=D#}RCyLovCp2Q22ClJ(`WcZwB85bd*j!uU=X zE-}dc{Qd)(OEID2w{MyH@|s#yrwQQsqnYjr*rsH{;mtj zyUJfm1wISZ?Wgn(ztJ6{9GGHn>bbwj<{OAzWBqO7i*iK1iK--AIC2}Y!Mp6#@!n8p zr3K;N1(S3GRrlXS*M?qlp1VJ}y0P?1Y?o(B()$b-j*Ujc3uTt;9u^-D9xq9n8weqC?^jr_n!X{ zoFv-+-HY^BOZ&m^ib~a~8CywbK~?Lsz$MDv60DJX%U6O)^pk5iaes*);8eqcEO0eN zg5oHw9!XWH9`lh;k1(gTONtYm@C4k|Y2sWFyd!eU8bHUpm z@_wfAo$?7O*yY{vtC6=`;`$nk-3<>0(^ae|_GC~Z<=eqwFPeViccuYq;Y>9X#l?QA z!KacODE=}(n^Z$Amv&L1y$gQuHk;6u)jvKggPlQUO&s)q zqj?roCx@KC7wiVeG`;9|x^q0D-DRE~B!!h)AS4i|`nYY(^&PnI zU0!Qxq?|x;q-cL#pn}9?l(PF7-Gu&Dd9C`p+#2PBM5a1RHjVUigRb=x5prR*h*D27 zy{$L`4|Q*PO{Hq+4>p0%r;-;*-IiOORQ-AZTl6E)a2VBl&y9Szr1 zs_|Cz=>wTU`9PmnimoXI=)z7h8^&pVAee7Xjk`&9X$Z`Dc+i&lkOA@pqG3_|FsK)#)6C>{6<4^y zG2GMNf{pG*XQEeG9;0Q_4#}mZq5fK!iTYG8nQ37@C$j3hzi&I zX0gl{HJYt7R65Bs#VYBoZ=p;%?-X)8&zAFP-{pTKCTFOL!h)Z6M?15396iG}X}nxa zy`tO_rGh(NZP?yjR2wmVM)2xK(j@N4I&%|XIz9CXqBG-Gc_cTPsIH0Y+FT9R!j5my~B`a9wSiD#lOTWk;@AM4t~a2P=S|=?Z2o` zPBV%6%6F*Ha|IW?h+r+=2M45TpgldLq##W?ilnprmj5{O-JVh^^*!EoN2Ld#(b0Z? zI=nV$z;1{d_|&E%i>-Aodjmlnm(#O#K^^}K?Nl^K#%$2ea3%-Zh%9(!HI>^*XHkb7 z2T4*n3;F(s4@_s)ZIF1yEz3=fcTK8{3aW#Y4@8Dpga38-iK$R3qhekx_4D?-2ZMAn zIEmj@YKK$RIJ%1dzJ?y`6G(q9|Dco**W7!cvQCf_%)WpWK{uU-UL^}XO=minyX5{s zOhDg8>D|_QWM}m=E2-7-A~)#iac;-NAi__=g!L?cCigOpQca#ompUFj$v%+JC;l>N z6{@GN;HUqg7~2U7k%G6l9pj!g z;fbWh{zdSotNt(gMoTKeC$K`i0>kUf1Z7ryNP796H(WXeYtoK7dKQ&*Pv-0*Kv^pA zCQCD!w}G5BjLZ;8eVS+#Vmvr6R`tYX9&g zxs-BR)CP5$Dc(wr{TXZ#Xyhj+PbY&*n5SG;mkOK{%xd;^ZxuCeX8D3BEFXb~92o3& zCOcQW)qKjOs7ua^>udy|Lv}~Gir*B}q#?*vM`^Z*;pYvOyGlRc2J$iAJqc##yy(srE{rsC{z;hwvR+5mXS@r9|YlI&d*{sSRoe9sOxRExC@8Snfyd@yh=O z|57CQlX@&a=<{SEb!9O)h~tgsdw!tz-xGYHavUjV5QD+^wg&6{7T!sxBavOeIM$>p zkd%--lR7k?cikTb3R0K8Hg>njW^q`~Ez*Eh^8Jax|M8!r*ibZ|4r^NQ2Pki8dYiP; zWN_RJ!7(D*dvA+hOL{4Cp#?1``hqFH1o_QIx4w__NQFRdY%2|8Jxt&WaR-btC#va6 zQd7SeT-9cfjq!Ml<^jPNjmyPWQ7~BUweY7?(T+uz*;1TW4EYNGw7w`SG6##D4fY_f zA7>Uv0oWyvY6ii5pQqMnYDY`-{M za1j>O)t7Gh>*+f-(h0}9!Bz~yytnpg_l}f7=^;Ju>;h#J1EniQo-O7^Fa812d#)Z?SqNn23uuC zX(n}N`@kHZiG9%$TPuC@JE8j#IG=q~*Ak|09?|r7}M1 z&m$++;q8u*YA@JIkC`HNurFbS>_`jf8}{34-2|e!JW^^I)grQHK-c#PtbL7llpg3KwZ~NdU)+VOh$Z|sE8xsZ@VRcL zrymtm-O za&>W#xVj1qY$aMM*Iye+W#pvcY4w#5SX^~t`FFMdCoutnjPiIJo>SoPn3PN;Z>D@Jt8rz}ksY`K|vC9{2OPKdq?w zZ~A9NOJ$+B%AY^tzZUPg;pO~rxtlsm9A&RXL}26B_`#pT?!sZXC%EuJ$AZxyd577{ z*%Lk0NUHsPVv01$AMbVcYx5^w;qvt|n2wt0E*T*&@vy32#!XLm^OsO@CTNL<`J|MP zh*JhdLT>V7WU!O|>^XRf8C2unDv0`ax5%jB;p}~}*LhTA;yCsE#^3^Y?O!5}d{{c? z6>@uc8oZPdMA8f3{wSa0q~wL2OXbhx#+Y90UB$q455cVUBb&y0=jCS> zGHy_lSQH`mR9cJ4M31|4eIwB{jrFhL)qfpz|2UA8bW(HiLfY%Cb-KIb1695&s!EN6 z3DP9=Zq23dsMO<1{l&RpENtRG!8oeDE>a{&@I;YK`Atf|^jH%S>m)iLsEclG77WB_ z`J}uN6b|JC5u~G+(Jta1kvb{q!~|6Phs7vWQmX|6tuX6jP*hu_{tTYDlK+;vp`YK# z&f?A&Rn=ee8`RHn#P;9<@#Tp7RO%}$;)UNps)EaBI&l#mcQ4Z?N9l@VH9+lLJM#c~R7vCE!c; z!9vs&+n5#YPhV0M1xCP(#!tMJ_rSRg1kcSUFO(L!?VOJO6|m}+e7D3RfxKPJk){V1 zaSf{AcY&in0TP=AMPP&^p)TDZe+DNR;TJ`3hLZ1aQ~S=sEfazOQp$uZeKIp!Qa}fm7&qy72MfCqtY_9Z;4@x&E@*N`X(|&>XQVyyFnH0D)YvJ4s(v20)%aj@1?2Z2MUv^HEi^lO{ZY;* zXSP&btt+eah--Z z#e3}$tR#=TVN1y`af8bAIvjXiZs#~~ zmEz(Ls=2bkaA$&(DtIU#RW68nXww?VgGD(nwX=l&oXvn9+djD^tx=hLq0X!otN{o3 zGx!7iy(MS&f_zYH2YE>%FBYC3t9UQq-$yy9OYh`%>K1uD-R2W;4-Fo^qd$iVG_JHp zxvPGZXVANrqlP%>uXpCsnPl!8L@tGi?96wAWuRU&iF60!XFZkVn)uq_4Ap%)}V4fZz$GnHoA3i++K z-eJ=L)kzd}&OyJ1TtdsC;!o->a#r{@y~s)bnA6Xf%fx+$fy%1`yN+Eac97kS?+ z47z>G{{+``M9!)eRob~1t!sW^`E0Ppo$pUoK4_)ot)O0SVMs-ghv>eBJCZ4QCCaGB zMQ4=twjaZ+^?koRb!P{3%QvWJdP*C-m(Dh)m*)k4fWa<9W4TGLEX8rV+LCu4|H+-w zPu}z(c@27=lOP$t*u@qV96~u(HCW}>m9E0Ob|=&2lTko&UIu&ToI37%9}f}b1CHn` zm8oKr*UbK6-En(y%G%1Slt0u9yxG})lVBU1b7}uLC-j(KUffcWs`2E%!5fDLn*WHY zt{+|{DWn`$FUmjtEa~>yuKfbV(uG%RzqnOCfm8aG;5&WUb@GOpBXAn3k(|_!N2&Lch`fFxFTXzyHZwkNHmh`wD0hk~?gi|^EN_Kh z5G*)c+JFoHlwdu*<$2sT5}St{!*m4#Ss?A<{yB!$hMfmqj@@_3m ze43-P6u$4+jD~$iK@q#7ZT_Hp?S+)Z?(dwAzR{oF7Iz44SdkMf6o+d)l~)pya5bP&V9i3*^I z`9XiSj&sTuaeDqZK@T}n878$yjXX2>C9T8p{fS@Kn~lEVLJ;Fs4ywy>z@Y}C#O*EJ z^ADf^uH--RGLS1y$z|1QN<%4jrZqdCLm^yB*ZSM!`f5L=sWi%M@BI=}m0`gayN5GD zbk+Y-BZFjKO+Nt@;CQdFlg2wD`l*%FJaRbaZM57?*%Lgsv)czf9OaaxRDSL8P#G-- zsilR(oJA(7|2pqzQ(}(e!mDOkfx&$sB*FSKhxR;wh)VQnLFMc=sz&cpMas z*>g5Sv<_O~ckFre;4tBn)R$^%xTnAQgs1rBnGtyE|3<%jL|O;p{}%OHa%yv&g`B$n zZDPl!AS+Y6iab}nDD`xgxDn#Ano(}~Kevj%*sxd#&+jCe8uQ{K*{l4k=mg(#l zr?txJ2j(7f%j@LpK@GR1(}KMIT|AI0h=&FI zjr5(xq|Wkg=`lL){2*mxK$%WUFJxR3&;Z@%e-B|^Y9U*+@=$$^7e%=R`GS@plIuYU z8cD;M*OXvWC3Ik?y|K=8ub~(t{{|LNgg@I(*&kfRi+8#II>EcY zsk*pi%>}W@A}>?Vf!kjK5tt|0ZVjiYKN|i0d8+89%pxrZ0S*1n1@4%aIC#%@$qJHH z8r1xr-;qse^XQEvIF|HkJRNq&&rj~pjAc{Zu zEYA7;q`UGA^}f6#xDA(;#jDPg;_x7zoP$Z4WAw@2xGlNai&cvL>mK^q>`X#WLc!UE z_=sa%u!;HQZS2fE#G>XRCe59}gDs zyPLwXg7sQRZ_C+gLkHBG4XR7sRHzpQ%1N|?$`&g1%c7jTJUHO!_5t^Vl!eIkKzbrq zRo}=ezeA!}Z+35!&qi<5=CON1zfoVUlL`k3{q7)iC8gG=VRo=5ur3;p+C=`E0)`5qg(XETC4M)VC5qwE;q&mc1vK)bku8N_O+ z@=K$8>3|xpgxp-*_vaC(mT(5MyCePYa%U}@Qcn6EjBOacJ7fHg@H_>Bs^TnLAtno5 z^8C)sxApZa;7K_M>}E2mp19IlFyx!TL+X-;D9d)j;r>MF)l%vTH`h6cbzWRSj%EuO z%AmqQ2eA6U--G^QiI^wYxDjj?r;1r4Lqsd^7@E zyyh+TvvRM(iLq@ZOVm-Wie>%?_kxp_Ec}L<>^YJrO7A_iow}Ukaq^nsoAzmQ$!kA&~G3xoy29d5n*DQl#475rVN&` z5~b@rm;wzYYDu(!OQkq4CpU2f$QdMwy)~do+hA7ep}@_AUs0@rc09f~eTlIZ#9X$> z<|i93g)JB=l|x%R4lU_R;?K8W8G5Zu(n@~sDZw`8>@WXMt9uS5%22SU*6c;kK>Xjq z3_~3{z0~NGRk0`d>`8u6(1&d>(CMWYVjgGNA|`zBuX5^I_>0iM7M4fI@kQ34v0nxZ zsu>*1Z&YA&C7qgjFjJb@$sQxo1y_^i!)^&`?xwuo^`N=Ocq3s{EICm8OJY1zkpD2H zR4IrgoBl?|&5H&qIV@)jX+p4xf1?GR^%~w!IQJx09TgXCYz>&cBOn%0{#zz%m%z>S z1{dGT+bE9)Hw}8sxBe%$x@VHNufwuz<0&(sLzkq-R8pr<3e6$Deh4};Iha;{$=q~Y zc+I7}Z4HKIWUx!>z`V?Krp;c{QP=Y)GNqFb#IrqfG@qF-iAJee68`WrmCgyOlL5Se z@2CkM(!qYCzIzS#un5gzQPAdn+?n&tH+#V%v}{$V25Z4F_VGi^|J3C(ttZtYKMv$G z+J!QG6WLX!!u{cQ0Xs{HQ{yD!>R-|}{|6f~&v`5Tr@;>5-!L@U@1);_ABkQ8N_JO|b7HZ)d)IW3FP9V8+i=b)kyZaElRWI6g9&G7&{j946|Qpvefh%eP>k70`hU z;JS}NbF=XF!ckr)776L8GBf%5ko`r6c!Cbh4Sxz25J&P6f$Pu_L=!vHNj*W2&j!W# zeCvj5Wa4Tc8h~k>xvQwkpP;WF0*_yk zNsE?XRPVuZ2k~^Zm_wPvy!=dfvuQy^G@=jD6m>&o-Hdl+h}>|2pHNHx!4%4WFj9{H z5#@RmjA?ZdjyIZ5ocr6a&rhj?GI28J_7m#mil|ga)9IEaLLBA{oa6nB0cm)Ry8b2H z`EmM@lOSl@P#nZgt5x9h>CPw2eld_fhX_|%y2#{SBz?_EKK}(MWuMVWcLGBm%zQ*N zyUnWc_kCeip2TkMfLrd3y3GNR+lC(e2U#r+-d`E{Y#;JdhB7NT7d6>rSk{cx((iEJ zdxw^_jo&XgDK!IS9U$&=Gv>h^PNlZ14}w0P)4L2bYAYUkc!;l zy5btsnH6CsS2A-HqGnl2-QEuc=`A!w3z=y=%s)}3N$5J7f;{J>vdfNUVJ@nbRn*0g z;3DSpxr}7qWgBNDiaAWfD?}9C>3?CT*Ezn&YE;3(zlQT)-PkIDPwQ_`{geELvD1HX zP*r3PGLtQ9g3_-=yP1Q3I*vO^=EIKGMr%<8ZP+_<%rtWLO}e!Xd=*5g{0nV&GAftzWMr5i@+3c%sd-qQ4UMTRLPfT#nVpmHPn|7MA6{;`WA!;%| zv;v0KU?RE+p5Phr>>1?EU|MQ4Z#536b6u*p9BA!Zpn2(sqP`~;o1>=GPO4?u?>(7W z^K|h0GuZp|NGZkcXGML&ma;|kA+hcbsg(vwPImOBU~f__HV<0z6{fv&gOFAy=EFph z7h^Z-)h2Srx+2Ere!653)_t%}C3z!1y@U8zZN@cZ6I*od+FR`*>|(iSuRwnra@N@Y zv7>b(o4gj;`EU^UgFeZ2FS60=I&+S%K-oIu(DSeNg(y;y6P=kH-joT?`5 z8ePv0uAfRg^$&JeZ&N0)18@wE6oQTPY1t<{Q~gVQrXEoRYT?830A|vUfj6dQN8m(u zJ{MQYfgHz`zw?eVaTeFH7hybaw2y4#dm&R-j}0#R3z^*;gnw=%@vIvz2qCWr`iu_F z8-9DwerIQMQaQ)$!}dZux&6R8V6C-QStqRY_C&jkv)?hC3CwfHPCG~9!Zy->>3?Q+ zq!1Ib9?|a=yz~QNR!$=1RUGa9t_1f44SRnJw8W zXT`H-m=Wd)vzpo5>~3Z;=a_TZUKz(8&0exA&VTMyD#P*c#TWcYu&wl{+bgq?vm~c4 zJ-tK(tjJo>69;daj9}HdyXrn_Nh#g`!{{+|-E|6NnV|nDhBTRPN!= zCpNT!eeA(}NFTQfJ8@;Zn3c%tY?fjR=WAo1aof0MoG{KAS=g?++dO6|_Hz5YbCc}{ zKT##8B^SiHA}>b^SC4MvGV1kcX$*IFDSaMX7A_Qtm4FSJwbWy3e3TutwMyC;ZMD`w z%cY%R`)x&Z^lDm%blSfch*GIwK5{K3t;1_qjn?&YS?qTPiebx$DmCOUiD5ICr+o)@d zF}4`8nb?dn>YKmJjn;Dew*A4m>kVN>Z#1zmq3%*WwWhXJyR7xna%;7<`)t#Cz=S{77$mpJ~IZ%(cYMrH`VA`6xYQf_np+C!2Y(w^Zs&h!@rU zH{dMgyp-;1+puz3k-VcdMwGGN*lb)irWiSlgM3Xg>X;p@8nLm&31=SeF(~IaZs%!w z!bWJ2a+4eP;k)q|WPdyOg@=PvEAZ+KAYBjDoZ0|wIB#j9Rz|z5o?=IQOSOnvO5KUe z&=;_YbN{OmVmGf&f)P5#E&Pa1)#Us>#apScl9so%f(&qvXN^GxlR`XWdb=HL_9X6D zC-nAxsoVCFcTeNHSkupfD?k@KDUP|5*{YS5rOR-Hk_P7}9hQl+u6kl&^u`Ej@SL1%GXRL1Y*A$q)H@^@vVT89kP zRlB04(RKZ-c2HZVUEu$_N?h~tMvwI=Ey+~s-~4`_l!-az15}Cw@fwn0VFnUG&av@0 zDWAY}C8IJDR&*~Y=0EgyV5ijY#h5ymsfOp{7%(Tzbdb7G1Z35Z(}F;ZsVAd(@4So^Yt-lcvF>Q_D0v|Z7nc5T6yg(PA6{^ zRrwy`TuWx8dcjRTV~!yp8&2fhyZBdiC3aO zHItfF?VY`zE`N35UtF#JRoxyzlkc9>nwYH6l6w;S(c&c?Ke z`4F8g=0^0D=9@fP3s{^M_hgX}XE>laW$=J9&i!=F( zo7IuL>yZ0%#=W2Ittad|frOxHI?Vy~K z&)}SRRL-j$QquEhBGkXM_S!-6RU&O4r*A)>MjmB7zDs3AWoqhTL2T`&;JLGmEpprlwQsurqYR(4f=stKgF+aIF)!@yz6D} z5Kd4j+&PZw)U;Pyy{*73YhExq8qZ_u#=PPUMMTeuHlxF0mKa&A=2imdqdN#?Vl{5l zAbG9QpV)j_jnocn*RMN@zA!mJ2POvw4 z9Dn9JQ$OpNhoIq4{S zzHPR$tLi-UtJ+6fO0HX^4bv2@i#l5AihAk|?kwqp$$k_r#;@GLZaVj!bHHg%uko+l z%dTmsw7Xk#*c*J@Txn)E8yL@G7Q{4+3B@GkuRbv^jVop?Yr0+C^}Pw;OD*Ur7Aq&! zXjq7yda6*{P{Yt0eI}70G4)SD&V6_KzcX}3?cf7Ol7*8i>xkb`a&~e}2Bj&HPbQ9S zVCpxV3AU~D-LL)EX!bq77*Vhl{6Ik>(L^Tvt|)_5m1?m+PxTjZCWF>heX7V*f`5t0 z;EDLE;@CIAy~;khKI{bkXkWEk*&+K8Re5czAGPc`^QM`{oMJRKsu?o5YHQ4%n8b!+ zJ~E@sqn6{GK+Dutx+isz_Yh}`Ym2okdf(9QkRP%_zlY9}hu&)mwM|q2<&=li1tr)> z`GAV4ApOusG<`Yc2V~~uC@&ww=U<~AUj&MKl^MEh!G7Pxm8AuUQZ>+uRycpwz_oF) zGD^L!Dq08a9B1ODHbX0~%~t zG%Bop*vHUZ-j$_o(+X&{)T2r>dR7VLSbI8&@^H1qQ0A55Yc37Z5W_z==S;m}gPBLrr zcNdHZNX0#SqKdcIW%(Qu)>br(=4SI%pJ{?VF&Mji}u|QSUn- zaP|08v0cm|=Bws|v9yv4DR0SgiK$2S@wH5=ujSBI(=`v`OeU@H4^iSw@=vnA)s79z(s7UBGv1tQ&$fT0EjGO69`l@824in@f`Klnz zf}Jf&^%@1XR}xm}Yv7_#+=b>ngxYHemG?Lt2qwc%zQX}=iPBD8trpQnXhVn)rM0i> zQ@a1J${hBxJ7_Z>FhB8%y0xl%#QARjX3w|cTT{(Mror<#MjW%a`GNPc$>>bp?QJwS z#u*!pJH}dbwe=fl#bLPfss2Q?__(IZ=}`-J(JJU!LzhC2LQ6uELc@8YY@u=bJ*|&c zl{0@r8KxZM)Z>OOMu1#pqCZ_l_1OmwXle$uUtrpXoYeTh8WWo39PBn!(lHC3hSMQ|Kf8cf5ym9U`?vn|YlE)fu z{zmV#+E`_@<|+CR?MCrb)s6p*bmm1PiCMwiWS+5Ru;W*_!@T=`V$Oz)V(FtYL(QPQ z(*$vRLg+(iPUvgs&(KRDmj0r|5qmgmR@Hux8#xkRd z89@hs-%95c^|t$iac3HaR?xw5!c~*$=kH2tp%=S;QX)3Qfz-ee#v2K8;JU1p2 zx&Jh78ZkV<6e_;)Mtft1@yMuXCO3JzBku`sD}?hRI-&qi|-2M_|g2JqIZGgCG}YFBW?| z15cS5q`WA%%|#2JT0E9|!U(qKCZ3Szl5=NJRTk#$*U_?Td9-*`_m$LZM4(Y}H~cE5 zp_zD%L&-$<9BAb;OR)x-H#l*oXE4$@YFI`K^LMMQy~7^q zd~m@C!NXT_cl#?h)Dc>0eTy!Ku7wJOWeQ6e_KQ1}IP|Z+Tf3xQ<`!JW?bAYYung_k z7W{e&qs?nhPF)MDXEOQw1lDL>@CD|fH7;-oK~H2NWL8k|djG@L*3pynQcA0f)N87x z7SJ*heF|$|)b&(g2k8Q)$y328Qo|-y^(WJ_8%`YOp;gfOWLDzdEaZu18-t9pMpvVp zQI$8<$T&pbo|Wu7-E3{;viGtx`=;}smji#TSCS**D;?FiT5+wY{#wrv+7qfBHXxW3{SR%g8m@LxV>Rb}?t@d5f&Cc@f4viY zub11*X=m@|O-(T0bK;g5t$3O-Mt_sKNUngz``R#Q%1JjZm#cn_Ek zz81@AbE5=xWioxFeqA3Qde5yo!pU0@8W%dGpX4S#SL1*@CQ`DKU-5b7qB%7f;JI1qp3HlK;N)Ry;aLY8j2ug`%q88X#0{W%=$`U23`a2z3i0Ao7&7&pN zj#8gLR$3`9z-Pn78ko*Yu(3tG#qI}Zy}i(?V#SzM%{#_fPT&AeTPvfC(Zs04UlCLg z2Zf}SIl_CY-dVtt>Ed6B$_ZaFt|jqUwK%)ANrjs4K7Xgx4X zg5{kBubfJx=xwAk(sP?~fFY$bGntQ!{NR~8%yRY*`-$y13aa%?%qvY13uH~*q0S&j zt~#8drUW9g0C9MyvqVS zAd1j|kD?l?3UbnbZ2N_|z472uYrv~dGYeP<2H_)G?SX0(inyohEA=~fb-&t_-m;={ zfZDh`{oxuqsh>C;k8%$=OYNyvax2QLY=)UKZ|jip+?Z#y1-X0{BM}Avr9KLb*5+FC zvUSNS&4JD?Q_)i}gIvM(p)z@Zdawsy?D^yNj1O9qeU8Wo* z(|m@T&drATc3^zpmYH$2}mdik?Tdu6};mTIyZ{v=aTwm)<0xTT%6WT}u<&pbtb8epX5 z%=I!}#k`GaY~(T;7{82c<{KlgIp6$brnh~&wezPNp!_TbZoOK}mE)*q)r?e^3-rf& zp0M|!0lcq|p+Us+ZhBuWo%U9(jNME#+3qoZN-0{!w8 zPz~XiVIqICv%}tQ)wAB43Czr9d2>BC=&dor=+3)(5u=f%+Vb9BQw^pyD_Fhk({=%; zm7B+3h%Z(->d=PD8FjvPM605|*AG(3rU}a%RyFKRXdcQ zA?H1z_8;|8EdTM4Y_Wzn)ll?j9(6C%R?WS6Zd<3W{kK)jIzWH>(fCMIk1!S!{eO2aLus{vpZSy-hLRu>^?6epLUF>rhth|o=T=n- zz1I`##WhXqK%__pPPPm#Hl5Uq7_kOEcrJ*_eUyTU(G|EjuXlw_DnzxHmHN-;zP7^K ze=oekP8=x4fXbZUoOf0Gt5?*MoDG#MH4jE1uG$S3?DS|$_Mz%efP!j_caA*u#ZF`o z;#RFR)0s)lBw$BNsmVth8F{K+JmGO8rCHIeVeT|LS*h%2c1x$JOK(Z7mKCn`pd3!_ zNTMCn9_k%Khlqcfh=8}L{gQ{K=%&_2TcBp)srFL$B|sNbe~F(3IVN+i5%)Ha)#EzNgS z6dmoruHf8pPx!}BeH{^bI2(od@l?ZFaGrbQDaQMbV|Tv4a1#ns*9u%*n9$LeV!f=9R{w)WP!E1aSB0jsRF%gkY7Dn+!o zL?`to=3Gp4%=Va{F>{T3Mgemc=PwEUZWmiPtsIkS0|{3+lZi7yjxS8dmOx*tPY-1e zD;_p0tXkNe(ALmD`ep67+DuKSE~e(3DsK}BMN1rNB0=m6^3@-eNm6pwZ{Uwf$-_JG zvh7U;{|r1LLL8t4q~&M8CIu9aBg0lslLWt1Cy~%##v* z+I2sz-{0HkZg581Yr(>Hn;Fd#W-?O(X*od`l^U$ij%j18BLem`ubQvT64pIyyZxIp z$N3kfd<^*cX|Y6Jq`XsWXmho~`g*-cC}UXpFrAxqDAX?WOV6gee6r2e8%jZCxZD~9 zqC7eBK1!q-u&F2gU;bCnm1o5F3Scc;$y6KAfu!Ia^@V5n3^r4UEg${p0!M&i79^79 zftecwS~ybO0^gmJ?At@R2kJ3X9EFi$vJoX?O>ek6!y=r&Z^f7KgrtN{7`A zyAWCy>KV$dAJSTBn?U~;6Dx-ClZU`uCS`Ww1aV>`Y-}?0QRz8>>4=IGKv)XV>EFbE zr3q2uHtu}Cm?EAEj@BI){3A*N&Rb)(8u4NUtoL~E&_Bpjy{ML}$xHE9&dCh>I@~{N zcun12oWQLh%>S8%%(HMbZ+N1%#0r%@aWMVJ3!{fQ)a+?qHnUq*?R(&8!i|TkWjKBB zGEmcBN=vnYT0+~PhlM(Y(uGwHOBv>dMuwzNeSMXdM!QHYa1{izG$_(b>1ePBy-5Vl zYNOE?p7r0LmpO|z=`?;1Ct$!Qk*A8_l>bm@sPVIb!xuxNutS-P57uzC6E~|T)x;Jx zg6Q5uji*M!fUjo%R#)^UiI})p?muC(MPqj=$jer%g_Y782v=Q_&dMS_#HIHrZ4~}r zp2|e_U1rWS%UKt#O?Fl1gfqaahl}V3INhak3M%F-+8(W>UNq!zuU=DEBnx{KY7ttl zFVfzr-_R1Y!=orI+T_z{*j(OH1NL^!{GTRf3s3Y0AE7AJw9lCq+Q}JAz*8+3XF(=1 zfbFJ1r#u?$@SW0xs;h?Dk;-;DIcq-eEj^vhaN>GM?vJu50s88_zKx<~3E25k`zPq- zQ+mZroWN+hj@48a&tpPH4^X5ebGP=Z$@;VU4V`_7) zp`J2yAe17kMp&M(x1s7Ff8lyE-c<&z8Q4o>C70X3aO zqd8i;7Wm~|fWJxTHFpO)BkgNe7psKz(X7L*$^+Ln+Ze{%l0jTf6D_)+qF82bGkx=x zb<57kw8H@Jmmh*Vj)Y@rpbS_4MyDe5FM5?wqOhV=6(Vd;C|9VTKAyVbD{*Th)zSia zlQ@W;r&O?lryAmSf&sY0*BMmL->C=_)2AiI_kBK!+2KU>+jPDPN{~--QKb`E_zFm7 zXX>NLMD=0nEKXn<)q=C?qMSmtG*5g7<(P_dNe;galS4P0^WbE$UC1%Ikg{fc@QOop zAnA>8BH=mSRc>>!c?zXLTo{~__DlPWGtWDU-hT-ZIKSK(Eyql{o$1e^;t3E%m2&vV;PqVsjY``VyIu~1`PL?h#In=JVKXt48QXqo^p%-jh# zwbRzVVb!2wA7Iurzf$qNHEJ1IjgdxeFr_<2dUL$_(OhWmH@8?<;pOu?8{u)D@Kj~t zt5ea%O#$VepbgSX&?VmEZY2tf3{?xw(U)r_wd=&=XP~qf#NJ`8HZOFu>tx`z7i5(=P1@|7`S0$9@-XFJwPD}%L$Y}=a1{uGR?7SW;yebyVJJu$yH z%+7T4h53W(_JqB~?ars1<9~j_6XaoVPvg}ET28$fxO3gG;bHy4mW4chk$!`EDWCR+ z`<+Z3uCxaItHyNTMNZsB&Rlvlt!Z#L{mwI8^kwS6Xt3;~=uV5WuWBpo_)2aTvtlrj z<#{fjis|kD;(H;YMi<^#PcrHI$2G4uNN(-Vsm#XPD zh)Xe;`GQ7G>cN+W0F6FuHZvnl*@|!HaN0UE-17cRChO9pNBs?+542S6p_SG@!V{Qu z_8UUkL(}vgdP03BeeeeLIQ2zZWujaN)cilX!<_h;#l-=m3=uI2yH1y(WeYK7tl>6r z3)C+w-k@!Grn9&Pv_auB6E=A?tZ#WWky?ejRZeYB9o7kL$ve2be9A)H9#)YzvQu4v zg)rm2)V&8<+u2TLPqto?l~#b#o--bU(iTM@vf5ZlPu~#6pg2+gnt9*aVrNH1*3El_ z_w5AKojaK|+Nz{fGsC)_)Mt{1e}(>q*D1vbtN^mfWC=)2PIW5P#XP1X*3e1CgNKUU z5qX(RY!D6i66ZJg<-yA`p|AW2?l+f=brv_gFg637mHWcd?NH)SF;!5r^PY-OFIVPd zXI6b?MRKEL${??WX=uWnfP#baMEJzSc%e;)b6devZ81ktxxXM|Z6^}0pd;B&{;GzK zEE#9>jXB#og5qGCQyR=-Fs`f%=vMbqUzA0ob5h#^wpoP?+@7xjp>_Ij{f8ETVrMIv zrMa>U?U6vGjC%z7{?fdsU-%yfyd{%Z_>Rf&Xf(WY*(8yhD7c!ObxTO>IXQ@)v5xWw z%zsYwfMMLNOlo2(mRE4+x2UEng9JB|Kj0EG4ez83I5oV(`*A*eK|*JSoxq+*g|W>X zY-TW1(vJ)?uEfT5Bh1Wis_>;n!PKf-HNn-(I?HfVP?-;IiHf{LF>ZE=Mhiki41 z-F*@?dLPhnr$HaRp02nd>Va71qcat5Hk81vP&zp%!ux;$)~DZX!zuK6syJ$hsQN|8 z4rZSijcR|CGb`9rvjR_%=BTyvFco32ovDsH5Vx@r_9ZJjyyA4Q%o@CzxiB9Oi1T-d zgBj`lv%@z|GDB8ZJH0c}iK53Z$c~<#-0iY*dOX)QQsbu7kL&G1%|kOnYrvPj!C%GG zS8Gi`7=BlmqK2s=k7uIvGmbOkQMZ(4lHvh#{;@8bm(YTbM)O@8eq<}!uw)<{t;kzX z@v`24_H-C^>_!;4H&hxwVEjy!s>x6{hUf^I)4er@GbqFq&?m6CU4C|dns?d#?u0u# zVR#nOlh~ZW_C#{cylnhq95y~12~5u@18Z8?+-N>E|F$fiE zPl@$EVT*F(VK^SOx{5A3oRhc+jHv;NvNAB^eUv?57f;YCB~(8v>Bz*fy-764XHMlb z6~<=fL2BZC5jPmhUSb@>+&0czdkQ#vJ?h*gaEpVf{$?7}!3NJ7iRpxUnPaKlH&ZuN zu$tSCK`*YjMVY;cjTx0hUhZ^PrJ?#s{Zp%{pMyOZ18?;MB;y`zNE_JoeMI92{9f1L zIos_F_8Ok6fOU!d)C=@^ zBe8HlJ^v@85V5@u|IZMr+(Bq60(+g)nu)voxM!ZD)9NXA;5KhmCu_Okw+HjSCWJ~Nu`$YH!I%63q^li{H+vsn0pj&QhE;3VE#XxUEPAT`icL>j=9I&~O zxR`cAwev`AtmV@~q1vG~q2-}`p{e>RUDDfVHOX3)$j0N)tDI+2pc$Tvdrpa&j5cXG~fmL~;-e`qa zn2W4%lam4x7ct#g6_ zzti=X2|dwU>P7U_`V!5gCy$}Gwd9)eH1UuapBipwH`6V{c}KB`Q+v>vKJ4{K2h+2Q zX^`s@6Lf5OSR&4gWK2-pV;f!;HpKNu;WU(*<~aI>?L65NWeu!|izcdwe1gy6PiAul zGF4c{Ux!~tw9~_RXZNsGo-32(fdRHRtD8@V7{7>zFNubwsp~77P0jD-D=QLK_$JEQ zz{&5%sr+rVbh zf<+~wH<<#D)y`gHYtBe_7G7-W(AGX@{;3(->(@%8xq4cCtm1*75W>hM>T)|&1z1#T!`I5#%lvh(Qn%JD|pqP29{O*fMG(UizIfjT3M z6FQnGzmeOOmFzVSj8I}T@Lk!WhRUS$Bl7hlZY){mk2{EuRJbea4K~ zF7n}8^a&NcA*kjCI+>mE_HApr)zrF(27d;T;$IM`-01Jia{_Bp7yfQOHpg2XY}xt4 zY2Y65I#8|dVKyfhUMCmi*!T5FjnL}p9rWhHUuoR~N49azJB zNjiwGRxV|Pu>W76>|6Z^%uUCZeub?U-{V4qLquOQWps4Iq1 z;Z9bjD6=_>Rg^Tu!pR_nH;A5}!5{b1l`h9)rkD3O8ijREF=w%z%HD4^CUWfM&CEh$ z@eil5ifI}@jne#g4znY=_3HKlTX#yjNxVl;8p zC+WHMSK4Q7JKWC}n1HwJyBta-Ur-!l3SksaRRFER4R#S7;mIa40~n5axGgvAZ?bYW zX0UppNBw~lV5IzvH>NTr@;lEmh-X?t^?R1gyapuWBhm18-dBHSGg~sr*qvFsRDL@g zbzV3#oLp23)tA|Be@g$L-Q!{j0;el3ug+RO_6 z#1-QKlXCCS`6NTfG>}QKN%TC&(QOum``*gto~G!JkIU!KjF$kd9;htlu3knjxs}W{ zjeMO5C2?o@UwBYS^x%F~q&Lm%kM>Rz=Vvc+XEUEsR+q0WxQmS&vgGMwCwsq&fyHbJWq82&0rT0 zjTIoUH~%7o!Sl=LN}%HVsi@czt>E~b>!m3rfEw%8=XnH)wN|Jmfs zi!6r)x=&@D2)%WCrub(n%lMo&!byfJznHNoi}zMHFrYuAXJ`tqGd(`m+ekcI<^1N% zWp-kRHHve1kZL0bxb-h0{a3P=PQ_b`XT59gu!`CP?82~hY^?Sl_(SoG4+nca1NQ1G zH&Kp0(Zb1F)3{|F_5Au_G`Lk!6`ueNbJ&Z!nNRjK%DG(P5mOywsc%Oz1DX=v`#F`% zVAMa8c;54Hn>rD(6|ws|+MoXPRkPS6E-P`^aaxi5+=*K@oLJZ%wD=*lemT_2Z8-;x zsHYpVcd3HE03JL)vvA?g20Jml$)9vz)6LGP*st+aTaEMRX1>B7-v!w?VotOY*lq1% z^ph>vmQj)%_$8RjaWoVyhF);zQz%XLp>4_pX1K)4X-A-AU*}HnTEXG$W#+OYj?8&s-aD$Yc2P^OPv#63AS37J zLm#C5sWsLX!55}i7bzc@`#Z)INp0SR592+LZe%iDNh5ArUFN`#!>T^~Ka%bP?&k9S z1Nb@T8QCK%B%wlt1}RbY$V`h8^^<6)6qU#>Qi+B=O3O+KQ9`m;*|KFNBJn)W`F_v& zzt8i3Jul-~=X~$`zV2&$uFrK{?EK|;B2E3=>#9wD5-Z=K67C+A2@Ca(*_&Gl6UJ_U z&{Rvi*3{bRssl@tf~V>Edm|wmze5GtF#S$C2ixK`}U z`Ta}B`M=3aTe3D}=gAq9vpK#gSQGxNqen9NXJn&^5%=n=wFr-8Y(_hJ@afDOeO#A0 z3a{=_RXf9BM(b389i%rl^L9GmSd_tAT5Kh6So+=1QEMi-Vt>H{l?DBI}$e7Cxn*2!&&Dv9>koyCLq zv83;JS`D+$sPRkBW!<*GTX@x3&3A5ZU%Cb|Ff%JIZ?`z7N>D!hGi=886y8ot1@=;7O%;unQ=b-cKor*=}qJTy5M7shcH!A-TpToNopR)a$>-Bwey2q$iipx%xz-ek?Wtq5@L-db6 zjk$3x-b`K{Gs@uW|AdWpS8z1mLKS1_oIYZdZCUlRHo%{HV@bb}n+@710$X`0AAGmJ zYQ}qWK35;NRCkzLbff=6mxu0d*HAf)T@c;|na|OM4`(*cESWhxqZv$Nx0=$Xu)fn| zW*+-FP6XT>Yw?6Qzi>3E*Y2GW~dYr31w%E=p)2%|Dn?alMc2UA%Y8n#e3fQ-szhT> zr8RkMrQgbB*3(B`{}XFo8{Ot7W=4M(Q+~qRC?O6ms)qK?SOEy)vrg?%Iihu9vKgwF zhQ!LojzLYEiGD_?DjOoh**o!3cmykLIgGcry3;n@KZcbx&;R^pRPj>!wQZ3-`)9^WG@Cf_Uf-8M|Yh}mfTF=4c z&#SM!k&Svy&f$4~2J>_W@k0J5W0Y4H(hq5wPk3ds@clnXELZnDCP)w3sCB-M3|7kS zmvs&r-{sP|i|Lr{=dn%lUCK`qgG=YLMrQBKPSbty+OVK3Vt>q}SEDo48*Y#nFN05d zb!NBBXEK{*=46z~Jji=$kGnb@dvJ|-Vu9Nc@C)yzw`*Eapo3vM&r+2~r*n#%^u1Ng zmxtWzywaWPcI<_-P{SQoRY0$Srp~K{Q)@2XZ|lq+i#>z^TGLtnqSmRn4%0V4g4(IV zIthhZ7F-#$j(-cUnaO|ZCDSA7vj|KNuMGwnhh&@&b?{^TkcI~-)AL_ zMIiU+tMamy-7n5xE^>^LrDHIyxVr7sbo5_v{P$Z`e)Af((}%mG>!SZ!XS(X9M%MMP zxTHetyiO2PX`%8sqz|iu9f=WE1=slB;Qru)_zCgkI#}pOwEH`B#pa7kFYdp%!ryyx zPi^|853|Z=kI()$r;Coc*Q@0IMOJjYxpR5pCX;k*-=5Km2mi6m!WEep)tQ#WGx5jG6J56a02EzN0`a~S7j z1Wxh$vO%XWuDkf##S)h`seq|)sTO@dFRNVkj_kKpBGlFcudG_a$Dp7MNnrl;)$&1M z#>2G5lbL^I9K}7UA!AZ9<9%71>s3M2PkTd$p^o&zH4y3B)E|tsrlEL`PhnuZ;#{sK zPaWa)wXLcWo=;u3s0?QE^*DO7-LXmV!~OW~k=W%i^nCx@#}l6K?0ZLhk>NlU!R>lg z-y(ar);#o4Z1Gga{eNP~+p@pRI*Vaf{nCkxi!YwIcKlASr{HIjEgky3= z$Njt69h~mOdzqy30#5lT~Ly?fWN4S*`HM+YX&R)=f&^T^z?ULXD$xA z7{k`BOa4Y@kI%`9=Lt`SUz!f|0`H=UO7w+kpWqO7%2=DxkiWDgW4hX&7qMo4b58lw zzfEhw8-2$!9-z_h;JfyBTD{$*UoZtudcMnOw@xZ(SE<;3lMbju`s&JupTP^dD2F^7 z0?^+c470W+Fped($2lhzC5K0y;1sf)4~w7~RO=(z=sJn7!bEU$Fh-5~4LMt|(>lon zrRumNmp;GvyQ=3U7k6H)sB(DLrHxsiXJ=Gb{?W7O9_jf0+rvp7a2!U7uqISi|OcoQGRE|9$G zZu~IMbdWCV!S3|qFZ7Nrz$V_x$J@x0ZRy;uahrOuD;G?ov)sV z7PRWCc!)7r@m%LvQ;c)J%Arlk+f*-o6J8AJ>(>2wPUV~_*>U)L-K_n1PI)d}edz+t z-V8%Brc&W%dg2(aUB7sCFhi_gLzkwqV&b|`yf^U*YGZ2Mp3z9AID_8%F8vYN(4y&! z)%CW8S(X(!exSlB}JSdtkRS zFTJb2;_^$yFFgu@e_a0V#HEVr=@w({35R7_7MG>bOv*)7RnWVE=O34XS6px z3kUOgY>+zX--%nh^8V_|O;wOrNKbnb&*gdX?eqMDkE|jcZuuimZmF}{3o#hV`%TjE zGuWnyx-HBwiF2HDXvObn&6Ao@ z-Ljp;jp1__O-oCQlfhlk+LN)0klF9(iYl-Oy^HDTKJ?0VT75HZ@S(M(f9Q5PK8vl-YRBntllCs=%KSx7yWyB zxml;P=F1P?EsxL*R{bT8d!Y8YarWQYUtl@4(b4C@aD%?9wW0S%&7dkn(^lf+JPi$M zDrfz&C%s3u<~`Y{+UlI&64yoCp9|`Bra@)nB(03vi~0V(C9RPAc3R|~>N8TEp8Sa& zE=HeJA{sS>dw~oiw-zPv#Mm*%D$VQ^kvQkXlIk)W<9i@$NQ_J z|H#dFggr$;d2u2t>HE}BR_W`|N6MK5Lv8@hyqFjF(?ZFPW5e>J5GA!&=|UUu?p=y;fbyT%K2B z4Ay&f-n&*;m64`NP6|telXb%SMvT8Fr;}Xd{OoSf_aJMvE<8OY`MtovssuJP{kg z?|R6s>cg(Jb6(qM$;aiv2dgvu+|Bw&U!S<134aA$b+w%ddCwD{m~$iaeQWkBqQ`RC z*;#e1@Nwum&AQK8=}2hnSn;mA%O8FgcG2?%}uoo-F(E1_Zje`}LY9@5CC_an?6D zyCQDe<>tC!wt3ArU}kga!6rWclNUHmuBZ$)b17`n4mfUa>Ad_4pM5ns+^7%shMZ4D z@b}Uk_4&E|v){}9*qTOL(Y)-#*~M}i<*dS;X&L`5{!x$&+J)bRr^EXaZTU`V^n5wC zvKgQC3x4O%Jhkd}x4XM=(PWx3JiGe*qSLV$ZE?N)Mm_6#B<-!V9clmRow65_zDYi~ zCuZFXwB}!s;WhFEJ)tePuwxCZEMQ0K;3#yY1BdzE)7E=aEGyRvjDTDG2am~T+EN3Y z`hogM>@eLVE9j5Q)h%8;zL^exHK(3h=@r?7d<@QhN49H2c2;&gyH-xuobUAl>lr^1 zKN~z2K4$`NYtkH;WPcUhU<_=fWHf`%)`{($BKEmLZ1ff{_DOd17y5F6)4Iw{>H^d2 zp3AyDlXk26(#O*4!sXw#+Gm|udA9Cn)%qXMmxC~L-__aVHU88ja^1^kJ6OWh%BR9X zuZ1%0U}G9VF8Zlx`%=~6CbJ`c{Ki7{NDIo@e1)vPG(F_E4FN5_C%=J z=Ij+RKQ$rqkLc!hL#Llcmz_kg73YD zZ~Batz3LY!))MVUMt6WkKD^B_{B+Vw`USH%o*tbf&X9dXMB>? z^@=xQqf5oV&v`cI0rM~NLN=OPXCD>cFXenGSCSTQ9bX!+7d#hy9NcAI-tzG3#59bN zAv#aa&{yV4xz2&|jWwY#5z_gJEPPMc*s5q(JAY6|nEzsJtm`oxvmY_Cnya{apRa!b z>vj%X{o2`487jw5gmg{fvPvY(HP>$<^(IuyWUQM-f?#yYO^LWm{oB{gq zC38l^zlm>%4+&Q5Lt7|p6)rHfqC;XgAGilL$+N1J-qV?Qhq~Hv>R*;%>*t3&%kS|a z{))aD`%mrZ2Qp&c}kyrt`O@Li27)*FBKRQSH>W4{NRgI`Th zd4MmnD?Xkjs2R^E!yM$~jTemwzJ+52wqjGY4*oIvy0vqusc0n_H(Qz5_US(OZ_GwS%=1$*}nQKmm zG-tW%LO@nItIw_NEf(NW$XN}VvJStb8B6yNE!@&w%NN^^gSehgnZkRnF~@7Bn)vD} zB)-$Dc_c5Qu8DkKsHtkN{&TUul?_$EUkXAU9!Kduvw-y7!#dm-eCD+NgQ}&O*fBm> z9()!20OuWKCf~o|iEwkc*q@|T>V9W(@<5$itJ}juUcb9Zmi4{sx4I6_WPK*MnTJH$ zwZ&w;Y1J>uZ8P3`OCOKWJcC3iBl(7(i14QP^C91N4Q;r|&K#ocm-DtZ@Uk|scz>~Y z+vuw?(9{BQGL_lwt1&&AL@&p}?Giml!?o7ypp|UXQW?XuCM2~nDYYJsKt7$JUc`X8 z(Jft{XsvH#9?YDCuOGl-%5Q2!6`dr%#;sdKKh4$M>LI&SCQ&?5mPGW$tNw)jTauWE zeSA=_m5>kJ&MS^kl{-NGzqda<#Txy6^t2yO`TB?UeV4k1JnF=o@!MZh(LYPhC@tZZZkupwIqQula#G$3pVBI}_(H5U*4xUBc&!R04HR z-0Ma^PG`NQbMuQTo|@|Fb-$XYch$7+SJCh@*8fbaPSrD<(cAA9T$}P(u>WDe9?&p>e z;Uj6q=kfUe5(#XkS0-a-d?3$1%uO7oF1HiQdJ|S&IcrGp)s9)$QfL079L8Gf-Qsk2 zV=nK|yY`gto>ME3g=Kpok%rxzm-N)f>g(vF`?}XtWU2O8Z9G|=j=m*QPaeLze&J)} zahK~qxIyjDcKM7$`q2h%K_RD>>VdGv^B+{_d|E#0AXd%ak>fHu2h6fj%Ij-~>4znrK`#-Cyo=Fm3BcVM! z?Y;W(Ho!-`Q`}fjH{F!JE8#BG@>vVt>xwno#hRW_Eii&U9L4TTqP0H7lGuo~a|&M~ zRBv7ko2RtSvZ?pI9mlex{uK}FN7!2?`!(k=+}|&;^u2xUc^^;t=te>x*A1c%R?Gkz z;dNgh=(C|dM#;?$X4l@%Du}sf*^9tPdaZY?3e%QNEwXs$**KPdX_g9-LwAHHi>e9Ry z3+WineGF6XN}UhNLw54xzLb!aFKg;v89Q52wdoad*;VA}%jjisxgD?MdFp$bnm)hA z^Ih*WTH2R;eKnOGzR8+9@CrKne{b_+AEpr>a%%1D!^5gyTKb7*cJU5X@3qKHY41_c zI~MR9DH*=Z?23O#@-dQpz%BmUvmVuxW{>r5u+m?BWj_i3TOXm*Zu$XdblfSOc1jWZ zlpjK$E?=1!s*_>?UL>RDmn3s?znjN*3+kvAr6G^|%0V~gsQ1gnLrCa!k>&~hR=b%4 z`M9Y6aVf7@S_iCJ&f!XuW&*DN-Qb2_L*mQZiwgc<)vh$QcXj=#;Z@vE2PXjUm3vUhVPB zTX1=P@!1+Dz87!vi2wazw^mr^&t!Os?=L0OTS@hP|Nqn7_|I$q<2@og(!lFxd7lgR zBac%El7&e|X{T161I;dGDH%lXJN;l#%N%9Pww{m2ffw-PkoRaujChP z@%lfL?v=SKP2JE{K34ngR^Qu#qqWWM9QM0^*w0=5x7ELU{d>W?C&_C{!U|YhetpaH z`a6w$r~5m*pbk9=U*D`!Jn?$GfJ8l(6Ny{0}o^3TLDai2bJG^3Q#i?D(^t;*i zu9)9EYgbb@Gr!-yjC4nREyK-7o#5GAsoQT=DP6hW(;oLa$8(>57ajV8bK2x1wSsMS z`iS=mbESK?Jv`y({&KD-ywczL=p=LBv6MAMt%t+!84}(z=3TFVzLa#Y3i^LBCz|Cc zQ?m50XS?LLGu*TSRvB1tY8S5XzfAW%rQt65sZ5_2AOlyD^(*MPYshm^Ukh*rFM0oT zf2U3|f9|`){lzyn89H6Ol7H=iO4Bd8&D}b51Sh-HKRm$`+pSoe0S|XJ_KceBL$PUS|7h zhPzzQtv*b?0(vgBXDMA*z~BFR@?%zT+G`)P@*Vz8z3%0tD0QPwJB`#_*1Xu&sr^am zpd4Q-L{k>`9{PxQpP0`|(wga3T-4L&_xmYbQrLQ`(3y8fp5p7w#VQ(VGUA)$=37#K z!U}7Ws=TCN4x4q%vu=mE+#DMky-#HH0KcV@P6A_c@6tBUm6FVLye?fRN#-6_uCFsV zX(Gsf)<2#e%2dNNhCh(Hb(`@tu8hoO-P7!8%2y~xQ|$G=zmkolGb~{Lj*-A>^hj0K zEVa88>HAyQy|PwPz}j-WN_D%O^gd~RevdsYYB%z;e^vZO#2(~zq9yD^Veg;1nJKBh z%FkTxRZ`Ls^_`SV=JRhQKVQhtM*T){tH0UarFJUf^UM63-!sPjPLAD9-LJi#=@`wi zj~u4%)n;e3on_qY#CBQJ-_CWPpG)22U+hUr!=+9=ht%Yk=RN4P{_wLY2`K0pF5(rY zZc#~VNqDET-X*W+*yh&N_RJ}}dAXJ4^HfF1O(CmGb*nk=-WIe^soQnP%8L7m3ZD3! zwVv^8rM<>cyHdiQ<+qLkrb*=Yn}6d{Y|9pPjZ>65*l#c?r(sz;! zTq3&_$Y0*bV!Kk*%5L&1sZ+{uyKW>!2mNNmdTTkmQaruU#~vTC|2sYJ1wVg^ zMcx4|O;sb@9qnZ&Ud16ihkdy2}bnwVT)=q*`Wx6J)srv+cRHcxUz zWTnVtH(&gT+*|xPk9dZA)ZPu<8L0sUf0aBew}-#-<03l1^}v_j48^`*P2C|V{kL>l zZ65Z|CPR&izN|yl%UE$wMSk@8Dep0W#1wP`PSP+p^Sv`jMfXSvw|kHICQH(+XnPhs3O^zKk#~$~jPrmnhb$FM(B3Gz#J1QIWYU0P_tVlIX&_0l` zo;Yw%>a<;thFPU2>qvXDk{;-x58E5Kf~%max>kys^O*C}oXmU)1F5af@tC@d|4a`U z>ck^-#Gb_cV#<2X>mHq>2E*{i>pT9XxVa7gx<}+xcb(`o z70Ev*yUUce_qyFl+BGD2hS@-w(A^(nb;N=1;{YtyVepix2bERB_B3fG<(b~*>|5zu zGf$j(FC3x0+}9A2*pLUkP&TiOJke}%*%r04-EnY#)C-}l-Wrwc%m^{(l{yE`NZx86 zrjx$;iNBK%>-{s=sVq<20_9DQ4yC7m5EnId&V}5ahzg6F)a$HpYlo|sx!RPz6{^JF zWrJdnf~hzw*VEeXnvMI82~Zs)U6Yws_%>;)E^knr9Cd~woDJ_x+?rg-`c6tdm3TWb z-VN*!nXP8)MK$KFY2L5(*S{}JCi|O&H_C*!`;(*nY~AGjiMiq4L^Lu;7s}>ddrGn= zzQg6FptPs=@$&KfX7eKs=+Ad~)YLm&&(0(oNA}^h=pyKJtGPcLA`ir>r{9xyH=g2) ze&?czHcw&@ZK26Kk*y|rk&Tv@e~PAV?1ryrFM8_;QbUB>R^)%1S&h5GC~I-Ze2@ty zKn+TcNa%kj4=*xJCQNSrI?%m3W}@5{2+Lsh@n#jW z19`W-@V&>%77dEDgbdzG3*UfMH9z@39oN?Co(gthZ8BSMQ=VLs!Z9Fw@kh0 zB~rOkef%?#ld?RAY3W6J5%M*mksBPj*Im5_ORuf?#{@Mv#Z@A=5hOn|`MQqN z570Op>Bp(Eb$o+fC(xa!h3Wg<5x zw>=uf8RM+YbSs{WqIij7N7QBS&+E=VShn9Mag z+96Vplx$;-HsKELg@U|oV#RBA?6+h)2nW9?@teCi-rT@xwBT{l)zLounq2Qzu4m1P zM2bhx^64hi0k0-gJy{w>swUsotKcbmwQqEkz92vFp+1Bb4RBXWnjtdSPTirGalhyq z`<-y3C((A9IyX;pwqL<1BDCBo9#3A`t*0U*V^5ppv^KF7T* zB6-v|?uc}WZI4_?6i@Do+!>9TboVWN{7!DQUdU#SWe-=eal_fvx{(H~*#Y01Da!kq zbvdj?<$jj?IyU!x^0W(%caI$EI;di=$UAh^Y@Wm=c+qAz;eq7#NN<|rIUI(lnRCCW zE3%^`V;c6}Wyv&*3Y9$Ys^;cEK9^ibd)`LFrg}*TNZ$T!cJxzo=)uv^XjXXqWf1vKRwbwr6_ID$Z%i&7Dc+RA`B+KB! zDc|oF2-`e%=9b7es*kssleHegG}`x4K1qHYnrvF`Q4(E|=lCy6bUCSBgz3@`vKQpO z(`4S%M%FbGwzCh5ejRy#TW5&DCQEJ#2PIG8X3MWRo(*&=<{7PcY9Z9_8#(u8?xxcx7uaK7o@W(d1 zvrnvTOrl1lYjhhf&|31fie@jar_J5)%sJ*@taJjmILTU=8mnZ3diZVg;>d5DuUo*R z&sKgaN)l5T>VxKy^@Z-&k%{ z_xbd8E570u617idM~!H+$fM-vq}!J22>XOs=bp+Ka>U9=l?0KdKoQOgXe#*pL>R^A0q** zuJ^3WW}lbT7i-Ycd1N`l{#>f8B` z-dxQ^eXr2}hq0_@%NLx62z0k|4MYqT@mwas`=h=#M^6ML zw|t=*xt@0XbsrbWMaAfG8n=!2=_5Pxut+pLGD5|49c-5;Xu`e8k?d~a=tei>0=Z9l zGxbTrvm|6)@=kmE5#9TrUA|T9-GlZpNzlp7q|J9Z_iLT(2fBXDA&rNUtv&5?Ua6c6 z&Z9i8q4p+&_5PpN8%-bI;aR@pTQ}EMR%P^;UWC3iqS7$Q`{a>s=OO-tOqh?8yW9e%Nn zsjSfsb}WaaJm<+~%FgfSq-kazne7LS%M%wG?Jq!t))Y1tU&=C;j=Z zCmYM^{NdJIaEHfO&m(>(!4o^icfW#cG;vceL_SdCID%xIqz{VItf%1`<^5z$-|x)7 zD@0H2kkdWLmnsk~Pg=%^^M}h}Jua%cpJpk{H+q6h$4K0b&i^=HHZAf$w){Rn`-+*K z9m&Fda+D+B<9T^h4?4qF`7qb8Y>y^N>TcRoesi`?oW-5PW^4SGe6Ho8j+Mb}D6ZQk z51NqApP%S0vpt;Vp5hh^mV3OLCO<4zZ6q#QNJD<F^K+z!@E zeu{zfn%Md_HaP>2euD1D-5~?r=;ZF!kPjDP0Du1xv0iQVsV{x}mM2>v(mf$oTtV|x z=EHnRH;?K^tZDJk9 zO+~!Fr5Fx9d~H&SEthD((iWmMj@!YRqN)iZ|J%*RJCW$`2A3j7U$MV8xQ%6@F)ej> z?I%{vVb#0K?e@lJXl!Kzu^!q*|FQDbkdmvd;bi20cp5u!<}avdYXOC+A3Lr-B304y z76j~sJ*y-`UIp~u7~i3=Dwg%W-&m|O#XWD%+CD;-L%Q{LPwUUl; z_vbUec~#<2SlBH|gqNvOoeB>rEdzc7pJ2E@FOrtJJhMt9@Ey|e1&^z|r)$g~EQ8rG z8A~u#XZa~6(Q518!2+~V5wb2?TIIn@csj>1z5m7f?igE%F*`!We+HKDT&&5uI<&uq z$(X7{s2ZE@b^3ax`uJQ8@U52fId4~WTPB+1LC>=Cr|H{q| z_!&=SmDT6yrCHCKvdxc5T7UO&pec;Mg|CHAV@AGf0&N>i-9OEhsvkZXZVUeke+=LD zbB*1DSYl$>4twdtaDS*mh%DV`CqHDJMu@b=k&0b*`LDzLFwyci$#Z~7z@^&YqO zqIlhd@jJ?4>rBIuTaVdT&3wZ;csr?1RF7k~9>FQBi(j`AZ~Pfm!sq?;1NNpVmQQ

    kNsOXFX`Upn22vV%`}5vY6r<(O)FnRL(gI73P3#ilZM5KXVvlV z2xocz61XjW?L{jc3(7g+IpK6{!Mnq{>Q43sS6OF!E6?71Xdg?|^~+)!LP3BvFjbax>QdMz&Ywb2E%!SZM!bxQ?x zmi!oV^`yF`qtPz<%eKe3YoxAyBW~6U7D7o*SOe6`K(=A1VzBppp+2|Uc}>{fNm zr;d0~%YCh)pB{h@Qys$kP;|fB_$*tshd21OXm|ii(}9H;NEfbg&v&>1^GNmkI9{*n z;WauO9?lFqnz+^23IE}_>`$i`O9$Kilhxw z@0Aym^cU>I2ci$U86UGW!+q9@*Em5WILXcI!3P>lBRAzcK1dt9Vaj+}lkI*W@kjO1 zI2#t#^=qfIf5D`#0;cD_fyMS=FdF-7SMYN%J;>`$yoevBaxCl?_70ZC(}P*TGQ7q& z)kZXMC-&pcc1%Rvu}UU|kD?h5B*uzeUL*N7fM02FK2=axuS!qrE(3=Fwc$;7qKHy8bh_|)Ux7BvGi2ojcI@spizYV&C^TOBNhHZB1 zU~n7G;3(4cdiZI0E6XuI@uXhGL)fIkqKA2+h{yQryP<34aQb%R;@2h1ui-DY)A?&9 z_Is9opkLv+*Vhkc7A|!$HC!KJQtwoiw-1kXrOJk+d(siJ^#<(z!Z_9MI`anV)atPd zJ;_LOotm4-ur!UDmBXt(#OruiRP-ryxgRUA9iOWt{JNWI&r|UhM~9`uEA<)c;;wHB zz9H>T>6MX)|82$lgFk}FR^KVOGq^lB5Z{Ek8sM&;2(p5m!J#0(`~OqWos^6W4~A<^ z?W-0}QF$_nH+5B_qF$vR@Te+_S}KScQa$;WiBoI2^~-oKJ7u`;QO7WWeD91+SI>Ko zI*b!KmfVTyKi0}WP_1&mPOR6ae}U_IT(waH6)QJbe@~sccHyzs!Uw&eD!h+<>=fII zJJ?dCad}+lFL_iKBWa?iVf^5CY5k|L5^F+(Ca~mf5(nJ$S#JHI@MY}e&I(4qv5uGV zug_ZHwc(-Qvf%chYm}GLRkE3df#>qQv zm*H9~LTdzXsV5>V@7`C2lAP8}uuEb|_=c&!Z@KFw!=hmYz1Xf*HCj2yaNga*^{Q9S z#NP;>559^IiSLa^g6hHAAk)|91~-IV!`;?BF_;uQ8$`)YefnZoc#!Yh+Ib&H3{75^ z{LyC_GI=Q^r3!rJ7E$#};=?A<&)K@-Ect0Va3h9&yV#G>vaDN6xBaEuZbJ3(nEy+g zs>|Nn>UolG#a;gGsXJsGM}4B1PoL-|`a53sSSMZqV}BJU>?yDNb+lJ56TFU2Sl^1# zJ3v6k@yhbZYmVmoeUZ3Jp6q%S{u|Zjnbus%t?v^QHCuA4x`+3J=Wv(5rs*2$mpvrj zA!rcZWD;PJpmmT4GE5(@6Z8!>@&~>m>BD$P<>>cx71GNwN2lrhx1AqxjlUNs+w%A? zhwgu8QrcxKejCx@rbs%EWh{Q@;MfG({glf8b9!vulXh6<@fO_;^$vH-_v!KVf>pl5 z_qklv*(`n8t2y(l)#yKq*FJ>B@5}>uAX=3*{YPC~7Ov=IH-3c}up~623q7Y7FD=(Y zG&e)P>P@+`C!3PgU2N5pKAHy2oNABoxUM(@<0GxGa*#h>FkY4x%NMo{-q#E42-f%8 z!S})C^xHkbk|2X8e3tCo&AY*XV(-c(A25~mp+vPrDK`E~+NE@406n~(SDuisItc^q z68TGy**n$u&C)3?Z`v4j9xL&?kFerR^jkcNk3T!DoOw2%`r4&fe?4!e#Ae2xOxtZA zQqz(@R4;QHL;oT5S?`K{GWopIVf7<1qL1S{KY&kgIgB-BZQF~2e&*FyOH>ubRA9?~ z3kq6wx1dVUJQx$Cg-=-D!FZc^7@r#a6Bxj;{cRTxMXI~ija8G}tS?C_dYehVzo@g+ zTy;+WsJ9rbZYxJW(DzjPTuLjhf@Gi@K3zYpq>g`i)uR(Uo z8jth)5sxX8XR{n`-dnxTy&~l4;+nF_y<(81Z20wf%ENiE1N4|3XToSVzFMB3Snl)B zKk)_b^e!TKdMe) zB8~PHDgQzZZ!NWV6;wi9V8PGP;bT<_O|6KxD*3I!12n*^!RNkL0V3UBPIoeWT@6;&Nqqh<&uD7$Gz6h1O?I8E zO_}Hz*i%+yhBarZFZhTZyIa-IV0~B?L~m7he5Gp2Z|SqL>S6b(n|zYhn(b8Qig2!0 zJz3D5zFDQ$kF@*(HQI&MC%&e7VJlWdKj(V_7VrTp)&Ms4yZAjn3~H(_mH+XmN{KQS z^G&>-Vk{~HvI{fdWMSM7ODg9 z7sdCdlZ&Mdrir$yY}&70bcZv1OaI8!6z~mcYxPbWq$}6uX+JvsI(+nky!fpq(Qdc$ z`ZV7tHsA+x{Je_T?P{~Pi3Hz)2|NVfh+rC|tC0U7@gO8(b$E&gc76Dperv*C$X zX39@#8Wax3^1(iex5}xS^LNg&cmY0IK{n|=T53o*+Z^P3^qpTAAFC4ay6{bQew^N{ zYhY`uv9nUO-Ce~2^;JV}OZ+c6B$8sKY}FTTE8i?%v^L!R1${EE7Mr}v%iN~sawp&N zA)dzhSU>((B~@+zrS;MCegesz%4Y8sv#nJ5o2o=zkXA@H-VJ)L)mCx(h)DH-SbnrM zcT*?WLX#~p2pv3peMi?r}a-TCFsp69pr};^Z(a))J=4BnCbrJ#M|X` z$-X=1uAnskV{iOj)1MNF%M#s!YxrqLdFEFI>z#0quwSCNy!d5s&EFy|qE#b95*_93 zYGDnu#!dJjvP8x^DiWUwd-x!_PR67j?X@I&o2uP6Mc^wMUX2F(Jgt(>-ZQNB9WvfqJhI4{Zpe*v{uSqxi1mx^fyOoC z2dAb%e3E!9QOJC#B_wkTZM8c-*-fnk&Y+FGg^D#I5(i`)gznVo2-e{CLE#ERsL*gsc7 zk6zS+;R^oXWwdk=_}SR#^{PmoXIXxSt~{*L>rI`ZcJi-Ny-^qFAG1o;-{bVwjcEtf zMDLGfrrlxfNAIC=}$4*UD*`^LV_Uc=8Sr=caH(qF3TkG8_vd z;TtBXUP(Kb2@izB5`E-G5>`8rhIj>o=Hksp>X9g8%_I_deL{naS8>rHq zcJy0ootUWO?iNtL(-(rCkcaPf%CeE5c zOD%@vM-r?z!1_KOlnc8C{d2};|CV!#F77R^ zsuivmeQwo*tB?A)_BwxNmcXd1a(6c|~V>4#kU|kY7klxm+RWFhC zc&r}p`b=6H30`Qe-_p=i#NXrSw;_J7AB3$Q9QHrGk`6)-isRRg<>$6Hzo2mPdENPn zo60^bI2V5|{#U%Ti1*eo87~P1Y8-Zr&&+O_Ga&c~Z|d*(hrxAxry_~B*yjITSI6CGWbydKt=hnGj^!cGaEti|47m-j9)N_ z$M1)Fb`R?3{FOZ?ewR3Er#n47JSz_9$o5oLvwDmA!8ziEB4ULH6XkdxnU`O2CzaU|Bl20CM*eJJDdxV(E`+|Zk8(G|RbH}r&lLlys1I@+y?zDcI* zi6jo{e*KYX_(!LnPv?oHzEZ_nXQoB<%DW0q@)arW4JY|l$A;x>^kSB*iyPb}wuUy` zMT)D-*u28uNvLJpNGJa)l6{qiIv5`%;;jl1%+V?Ct6;iVsZKnavovQ?@Nzg`j65yq zV6xASa5JClBeCR8$Y7?qP*;az!Y>jd;r?yZQ=P`FxHlPx4IE0$WRL!W4yHw3flNN< zOs|Ja%*N$AC`RbTyE?|F>d)i5WYS=sSSK}-XQI=f;seR%RXnMx=2Z`}nmrJqN_?k& zwDT`%b%NY9X0f;*vCor4OHzW;)of&%e%IX}lQ1#QWBXZ}YX(N3E%PIfQXXYN!`DXbS> z7xoTcOmtUIRV(>tqLaLACsWDKh3_PK$^8@-F$|Ye4&*}bhx}c^`u!pg^1Mvy1e3$l zV{AReEcWXD4&V%kN*-(gxnmC zUk>M}8?G~FqeR?&g?xp4sG&}9zj*gv{WcDU&$7@9`5V*2X6Bg0#k{-az}}>lrunH> z{KY?<^)!>lHskL8jGLP-=g~|S`g^B4lx6rk@{ztzZ;9Ocu^jWoGM78++2nDc`?pV= zunB_qtXo_xt(4m3CrIubNZ={`|1Z!^vqeNhL<^UZ_Er#}ODeO+v)olh)7P@J^I#lR z<=l^B7_Ku5XO*sbzp<)AgGa1)qWHB=I703DGeI-nMQ2?qFU0e+$VGzg@i6#AmFFKp z0q34CQ8=+PxGlInte&WhKRjCe^B?qb2?j*BWM^^GZ;5eoeo4OgpXB*lG4`i23!P!x z#ju9TM}MNXr(-#9cCuSB0eZ`6&4EYnHsx;~@34!f{+>O0R_vQ0`q>$)OCN2u-bwne z-=xp#56*R*Uh~8G$#>9ZFX|5T1)V%mZS#+MQ{5!P@e@nfQGAkOR_+t^6;Gu4SWFhP zFN8wo*Mn||^*qOd{-tBbmGO$SR^CJ`EDXQe6+WBj6bIJBR9`fUG%yRVfOYHk_437pf{?p(&Bjh4yvqhz4oi<{N+-9v` z^5b{7zwgOJ)Z|@!;1<5*y#9_|q5Ji1qJ-7CIL>18h^nT?#eF}E7gktxC!SU(T6?X0 z#spDhKJ$Be=&yKA{`4hT@M&VuOjf8#;yak!S#iMm;G~Z~#Q{}=j-t6NxB0txnc#UH z_3jI6ur9Z%po@DrS=bH{CSWK1^YJJ<$$Ad+GN1#Mc$qhD? zHEqR%Jm7q*$tljm>uTyb4o6DDRYzh;ROCN?!lVB-8q>k&K`3mhIKXkU5Q{#?5=lHJa5zOP41%M8w;{kO|hbdYbI zDmym{Zc~t__?@+u4^PuqJLA9VTzMuq94`WkyfrahhpHH?_OI~!@F{VBk+6Usi#4qF zy|8IwaH2k~R5Vdr&T;`g|23aw3hd@VETMaRb(veeRj#f*u0)nUhw+Nur=#o38GV7( zcNmA^OFa5FW$Xq+ce0|j<((SC+#hB+f8$S__V?jfBdc8u)#$4y!DDPtme@C<8`%Y( za_w+kwDEh`$vO9iFE z2@u-Cx}2iZE~J|hlF1`Vz8F@AEl@EV!=D-+A){Oo>BoRHQcvhy(W2jt8ei5F_gAnlE|wAys} zr5>>Ag|yaRu~KBY1Z?hO-D&2gEvKiRG4bSz^#AmVDq;r9(p*1dWZF$4-?A{fclChD zfZ2Ty$vDr}zavAQ;#*y3%Fxp$8cz-%3;zoi2mi9UbM=rY35lAaLsu&|`mHdQc$-x& zCO`2s>+`61DKD9r5`GXa;YUr9tt$h~ew*dZ3g1-uFx|YXyWx?AXyuGVL7i$oaqa<5 zM?IP8FR;^6x$;GrHGS|?r^yoj4^G}+rackSdrA)ePP(;= zot=_a1R_7kl)lzFq4Y4%>4N+FlaJBxo92AYw6ssi^Nm(K4QACeS_*6LdK{Yt;**WA zs-#%pRcG~`juJ7rMLOC22bSJG92dN!YtSxv)KAIwouYwW5WMDgp|A6tBfroY#(sq6 z+A2rWR@QDRHo|*}0E#sWF87X(*z+(Jj`7@5Gx}1qfBWHiY{330!)Grc2l)-l)ZT3_ zO!g}I^Df&|RQ9wnJ@q~;V>c8h)EDG@>hWlYiCsehoW( zuWapgrhfJp2~{=;;dMNPT4Kzhkl5|K=Cato)#M0f@QwDuP!Gf1TI#VrR*W!?xAlEc z!5Quh^MqFi*9Dt`8(G`uAyN%=beR*5gxZV?SLo2Vi~RS;PPkXsh0k#}ZgH!B#bwEK z7pnXJC-@2b6Sq3wx}u|bP>w9@gJU>#pIYlf;)LBeIBoC-9>pp+4Rx=_^Bu!$JQFLV z@7&qkXm7DuPdnvY>YGwH$t1>lK3bSXRy_SATzU;u`bjZFT|Hqwk)yrC3D1+gYtDQ5 zjTO8=ruvAv`zCT^630QA&IDV6MG&eR`4WH8SZyK6r-EV5t$)~`7yX(OF6IU|lu=rR zgOEb<2H+qhobj7sOIUN7Eb2^1P$W^Fjr!G%{xVL!eGlqr{(gPmhHbIv8f$+1SL+}ABby;{ISPrRrLpQVU!}Bb%c3rW-Ru`pr5JrfRd>zdUslEaX=c6V_TaStiEU8=UR**% zlj+YW7|jtV>~Qg43t5%1e5oAg`xK4!zub>rB>HB!PPz1o&iNFb*2#pWYOwgXS>dka zH#rW(+ z?DQ)*cHiJqTxY#Cc-3Fx#Q$N{b!1hJvP-2s@dC)jLNmQj>UL9JN4T#bHCLsV7XJ+} zdGiI+9PgvIx{~T^tvLpx`^a>kZla6BVuu0b`bzhB0bWj38I`AKu3dIJJ-J-gur<{A z87G=WBRx(pjSuRO*H@scLu8Q`SlRNR3q*MpZ{%~i)N)q0UKI5|-x(2pO&*%?T|NtI z@G&~@+b)IqX@?@>t`Fs*ufxE5D)9o%wMCstIgH_(QZ-cW@xQpF*U;0ii2gUja}Hsh zPm=YDc-C(GhgDWQpO!ApXY66cfr)K3%>f%|j_MGz9(&T(9n+iXt9h*%e49+{7$Ppd z%CyXP#Yj)+QTB&EGlOt+mWjm*;y=EO#dN?OYn?bqPAi#CKUNN?RWO=w(H<}4dDiB8 zD92;rL7Bi#!2lV-DZx6dm~PfNp5AU{tq=I@b~(Z!@~rhBL*?w>AK`^?ABNRvvK}Mn zy?7&^iY$(h>%4G|4m9>Gd4>Y;vpcX@rpYs`$AoQ1s@Fyf==WzPwJ7i#xs^-M);d&|Rct@I?1b3U*7(X=bzAfM^Q*2{`7@KdL-PDgmLb#X26 zdv${O7B46x@s62xr?4NM4|)Wnf&(IgXWh;AR`UW2bdCjXEaU&XQyvfPxh)(^1}e&g zHqjI4W&J|Skcx9*6(@WKI`X@ZU&;H#aDJE_wuRO`hkLOU|Ed5FERkr0Tl%}&%pY+^ zJ2>Ol>Fi_tt#16wBl3H9^Dg^X?Cw%K|MNr6n4N08tf*!@x0YmltryatGDy<5*?P0=Wzv|)DjkcHM#%L zoyf8eqC~lrpG1xa_}*!#_gWi3}K2@f<7D7BmVVSLiXvV-DO5?9oWq`UdYQ=zSolkHV}>k6cHDDGQp+5JqDp}N9cUltc`gW_Zb z^|7O_fq`7TfbRq5K<82d>$Ix7D zobV$wb~OEvc;Zw2_R8vpxYm50-u&^FxGxK3X|hEqEotj{cn{50YK>tzkA!_>0_Qv5 zS4D!0@HeWqPhhLoq(R$x2bmg}d z!2;-w^YJ^+F25R`5#}81U_;9$7Sg-tSf0UhWK(!vX>|8Jx%#>_hED+=**KWUo9^%q z+=oCX&X(8^`JHh!m_ZX+^LH?0F6cXd*ouF5gNM-QKfBHA-Q-&_=O*$_Hqq9p4pN11 z%TmZ!ss~MV9@kJD%yW^3e8O4sde_5MXTmMM#9TW>s;}2ma0*7xA87?3SbfspOn=YB zp9wl2ze01}OJCPDpYozP%S%OzO(1+Lobgah$mU|>pW!^Q=;yeu`EYGhJ$+Z$|00Q* zR`4IhZV+oc%c{-?W%(b?oNqF7QAP%vSd{NIPK=!LtQ)AOnF$9-lSfKbPhQYv zus^TqGg@m4JnlNt!CT_P-*fqE9n)VnxBQ*-cXhQNCRf@c*GApxlrQl#7wCsvidX%m z$oO^G?yb0=llbbVp)2=bkBO}Zcc~mCd_kU;Zl98Y$f25I%7WoUM=L zy%*r()?#(vp}G2#?JwNclj4R)pznp9agNU)#@BjLHt<(5<9@82+eKUZt++YeR+~JG z;zQ44rN5JPSSm04f{1IQ$arBGgil)WD*p9e_Bf8|{d=OSoWutFj(X7P-&v+isLT*} z za=W?Jgh5;d`x$G+#bwvVsTe7NXZ--4;#8U2>qJfC+{9xd;wt>)hal|Z#J)%5d+U+n zkJA^W|KNpTrkW}%y7ad$aGDbtt7d4fIL6M z7EiRw^=$CkaI142&(~hbQf-9iUd5-Lti#(Hh}Sv&{&H}b8qiyd4lt!A)GGt+00?7>dArFgNLDaeN)U5j}t z`N{N;Zt+7Hsd-{+>B8zN+NVQW4q~ESC-?q>==~E~Ic}vr+2U7&Pw}h&49eiAJ>ZVU za5R3$V|m0qp5yCVXs&dqbt78q4;?hyi=P^>Vavi}PIobR-Wh&Fru(|fXPovy7~p55 zdAYdxcec0`@V$L`~2XIy&MMI%Djt#74N1j@>{8VOdXD^JgZ{#?z8% z_1xh_R{Iuz>oXDKb9}D*+~aG~{}d}Ol4&cE_AQ+^7N7GDmiRE1QbTzBCnBf6)D)FS z9=G>(6I&E@g@zu4g)_llt*16JU$ zOoR5Ur}5s%&5`D#vu5Jp6^PD<2E-y0WR~_oCYmI6@~b+!sf(?&D~mhWU0n!yzSXL_ z%DWV_7qjq4oBA_KC(q+5Or5^) zo=uqLl@qII=_00Z4i)p?L#8LVkgk)m5htMb6V%FItJ>0)}HL^cRByXZ1QRrt1OKbxWRw&G@H5UU9EDj8k67L+gb3k*G1Kno%4RNV@G#FxvcwW`LuuK z2U2|}QXRI}^OVvt7M==jh1)e|iNC-+%TG>k4a>2$i`?30Aq04EmobRtNxLVXsS-Ou&ik{c;0br+CsV0W_Ch7%JZ=Pu-@am%GSE5_Yp0B8r?>j@vARIJ+N5WsuMtxX)EW6F2*4OX?4(U29mn>@cH z@4trjEQgu&8p(~w{yvU1`8Y<_3uHLmEv+RcDvJ^D16=cNp4MCZ&S`wLQwYhv?}ZIhfzA@hm_3m{@5)%~g(uKIQ&CDso$^b3-wD zA$hp(B|_?TPV8Pan~ ztkf^2n4>m4ahDzB2a4@4_2|6j_JeW~kjX?fBmGKOccVr#R%eR-9`>F2BE zlpccNZAw3${u_DzSiCeGhS`bV)s5G2TrOr1wDyqv;Q*|ThanyZAPSF@ow+=Y<0?0A z7xpXOCgQ1wxj9_~&@olO5HeWle-aC?V~{qKbabY_vZ zL$zL3A@ZGG^P^RZwdY0u!y-M&o0%pa`$NS;LHGB6u;edzNyW?r7|I$oktcnQ1>RvT z*H}qet6J^z<~Y)KI{N|sf5cu?cjN!$P2UyXAsTKh(n|SOyLgg+I^p-ikIDBRPWoi{ z7`?m!y7Mc26_qnTD%;jsH-S{Y(5~u;^I^{3OqyT8fjkV+dX_)AHJ4A<0gt1z^-h*~ zD42eW`}$4#HZxX#OJ8E9%o1MZo2*jr^mg=Bs(1U-qNLTLhH)Z~hB!aFp@I+MYtB-K zy&Fz@5#PNr|7tLaEEKMHH(w22lMnyM?LE({C@ktfAujro^{pDzaIfDEo)A@BgvuY` zXPyeGk$^Vhq^D@*rR>mmqQWiWqxWDPzw@?^!m}ppjhyO^ygP9Uz7xf3DG!Z5Ap1}U z-(hZKdnAu6(Em?OD278h3?4O4-ZQU=;SrJE8*FPqH?#$Wc9wOnO<(LDkKtFQ(D(;< z(WUV17O~Wo(~j~)r?5s3V4+2DS-Oh~*Fb46>J`u!vZE7M;;5U}Ok6OHKB^Pca>o6F z*=m^zi;XLVhgqY^koXc-(a27oo?}PAGxkbvp0nPJppt7lO(UP#<(?qC~L8w zt~zEu&Nn>C0?9KpRS`F+P;$Sl`6AZ$1{l+LJp-;qV&W2ie^Ap;%+TA7m>Q;&%@ zTEM1X2ikuLzWqZg^?AsTX0p3!us!>+%GIr!c%II&2u~E}_6>eGiWA(PXtE|O>00>V z1N{Ck-bWm~b0cu93dGanuvZ(f#|8-H8*Z(inI3WIZ^#Zddz9?x1zfju;G@a-stO*y z8x+0_T<~jxN{w$Q6^AX<{=bTau+CFKDn5hgv><*MO%!pQ?B@qH|Ca3NDq_iFtT!~7 zYY6u?gjTxr_U&i2rKqHxf+NwIN{tK#zXZlza(ds}(6u^Ug<_=1{v0jj0f*xQ!ivJtV2-f#G#R@j=8 zRFy=k`8~*fL%!%9)+!a1m03iR*O~9U6|1$4)s|2)f#?XzIsEeo@E=V+%w1?h)#3&3 zZZI}M6_0^={|2)Yn7Eo5=1xa!Qh-XrGR|;czIt$``{8?*u&48>pY5Wu(uMUlj`hr? z`5SwQuD)Q)8ged{5mj{u-#E&>-2!U$i!Z;O7VcvA*xJfuA`B5PAKKeyRECGaWID)N zOQP!7hpyqg#Ph?6tM0<&O%21f2UyM(vO$yT*%)GtiEtb~ad*=zg~2FSv(7d6-E+z{ zFr9zM6+)brBj8*EsD#I(i+34(uF}kJ*$zXs0`>Lfc!0NHvZ6bI)!xR+B!=&L7&K=X z9%nY*W)bVzikF?r`p2^JT2>~!&|oSN^@%q3ThGakUUF+Y^1UsnEX`n@JE$+-#LotZ zu)fgCP?wym0_fx=@`9g4kg2Jibps_Fh?>rKHw~4QS|A$%I??))1KuIa4~ULfjy!H7 z72yw3G1w_%Imt`GTXpVjS|a3$*U<6u};Ci`0iYW13(jl&GP5OQ@LWKy8sIfs4iLX7cBPE8dwztRz3y9Hl6 zf)#Hj*KbGdtfZnT+sG6LV!>0=lO95!D}hVSr|+i}eDdS4-rf)~i*8St?NM~5#062` zOgzMf+_5kac5(_m_C;=IXV%$-wKZdJ*Aq95W#v^ky&gF4=gmKG zvl0?hHRFtLi>*K?*yC!fwE*IaMP=wK*?l&m$E9Su z^Qe7KV24L>f=l6pQz?guE4spLNC@V<1;n{5Q zRl<`nl?pjF9=8P=;t`c z_g$mTR+ck-l$uOpII=Uag7@f)`2;4Ki=1FQ$eIUqeJM9}0=wIjn;ObyC&un;b~rlA z=u3G`uW*c+jT63xzc&#S9;OyC^dn-(#^4~ssF^r+F+MS;QyDAV(y4~}=WTH1dhF{e zEK)N1q937)(*xGVVwjW(@UK@my`fBCBKy&wf41W(LzrB3@YeM(QI27Ue{y4=g0E-C zt2V^{^uG;=-Omiv%a=jYmlIWSxIG!yc+&UNGfw>`{I8brvy(sGNYOQc*UjiJc~o zxJ`820^juvJUt(?d>4V$JVpUTKu@ra^A&WZ57E;dcC`=dEChnlh_f7lhpEp01jVRiWP zSJ9>%faRTq>cDrpPXZ{o45#NeKH4rr>06G1rE!r;=TF!;U%0Ck!D;${OKoOPQxW|t zvFi+DZSl$KTVsj)vBUYPl`IFL%tIFNhVy%fJiaSa`Tk(nCxNs+fII&SJ*@cjq0~jE z{KXQufZBe;(4kN3CrBn|IFsh2ODTMV{TC|bGv!WJZJtgD`Ls!Vx=c> zt9McN z*Ckf>9v_p7NZupsOz9gI8w(AIwMn94!5GX!-cuCrZ%QRAwfl0hETlwAs6sN5zUTz< zGxl{GH=r8*4iE8Rov^`=;5j8h`>`zeTRS=fpI|>*a)Jdo?aR4|-NRwYI&;Ec_}Aw<2D9d)}-$P{880-6zyx}1Cv@-VXClTieu>@SY1!!5_r{DGm zSaN;Gj}7iewv!PoZ5vi;3jeerX0Vv^c9H2~Ma&GW(=norKSp*lubG{_ol3{+esd3# zseJI;Gmz!ZBt}jFQZtb3Jq~Pu3`8i~$!`*X@C}BEk%BwZmX!wK7FENW{tKr&KJ~pd zuxu>OYet^8HJxzPi5}*%#^dz09wo<7$z;;9zRE-lp_m~TU*)mIrC4cd`V=CGBAdfC za)`2S5?f9oJM>DSdE9-8m?EhJrDXDFCK&tw62U#9ax(%OngQP225i%O{7o4vwe{0H zZbq1;=+};C{xlvN4-Lae!QQ4~Zd_+`kU893V(w<&eP$9c*O|mof4Q;4@UY)N;PMhJ zodl0jIlr4xMJxzfzlxQXVoy5~ss0kv<6#zXZqtJ|)nE^oU~?nrL0E%FJuly;W-Y?0 z%E*1Jfsd(*H7d?mX;xa6wHKs9pn$e51h***|Kus>cMEpB5fckPVeJO8(hH!NA>I8F zE}bE8el3vhrS=3OpX9{D7l|GQgC@FWw0VF%EyPMQnttOnYmLjSF~j&pk85tTq?v_D zo2|?l<}UM_nVO7X6TVmlU7bgDU?sk_3w_MHz$Oa9)(_2B$O@YHS166GXi9zSyI7Fw z@+tVdrSKHJSzQ!N{Mqc~d#XL3sl9u_N^7&Siuljju#4Kp)-YRwpBCbtHeto8a)G1JJwvxwI z0;ydD%O*XMZ)moLjk2RD)W;rmp>L=txy(fDQ95|%ojHXEKoGCvSAJ9FDb21G20NWb zOwv>R}Nv6-B+{MksrO0$~b+}ji8Z#eyRITQC_ z%!W}H*&JKh%tn-QhZ|d(Nc55$&M96C*CI7>(oTA(6H$)}F&^WAKrAQjS&A+C!U_H> z+j2>EuRUkD51Hx?;{4I<>`KZlcwEVtaZTet{Pc z!U{*jOUn<#cPsdS4)V2`S!v0g_n?7?sAQC4SCWIP?E_tEhlTQEpZ+!5GvzV6nHsB< z*i3*`E^JmZtFg*L%tvg&zOFV;5j!R#zM6sOdCh7oz$)7VM)4O;XnP|6OSJcmi0BCTVy;*Y`(q-fq3q!-CS10}YAs-mecAVZ<{WcA_Bs+9l^AWE z2E-z($P3jgN|Z(@i_+}3W~&`z)}+7THxjAeV~g4de>uX6$)HHe##nR9#(Up=3_ zEk>sPT|7;HVP|j{gPLIkF|iDax`mvwC_O-Tu$;q*14X$i>maD(Px?7(VfluGmOr9D zqbAYuMevv!SjS6LYqOERE(ASIM;G9~*sSUA`(u~gxK?)7k!p5WSVoq!PLGn0*gCiEXvp$y!SE{ z`c(9{b|m7RNoC_2e#VDaeL=M^1#5gwca@*7kHm^c$d_i4PjrA$Qxlx65EZV>;2kk! z8b_EWIu|@OFW7V(v=+|6KirRhilDP8qz;=E?d31%!X84;wl7wu9jw%%uz!3Yzt`ab zFD4%^PKKVH8~==*JIz~IOXfZf+^sWMV;yqiOhk~CKrl*iEAnzTI^l_1P)XYWivAkD z&2O0AO}ImAS@~09%d%i_GvVF7gK5)%2xtxr_QzC5@=*(FM0eyy_USnpz6TY7nsjs4 z2X*O!-|ohL+e@K&>b=SALK=v5iO}*;|40ds$fTz70;cdi_$%ko0@{dw8b<8g1us>U z_2(uAd4j_5T|6Y6r^J|bsG4ikcki(Jg)onnvHm{fGx@+CLY=@n!N4bC?K<#Po_7!z zcE0Rf1!p`8>tr3Z+mY}QdT=JDQ1e zFEJj`vIqQ5B);;CfB4fI;wkhoBZ=lB$anjr@lcvR=^|7~lAt=7nzerf#XbP1dLBP> zFz>w*-J5lz30MO-B(9&acftF8+^*Ow@7BPZxMpQJlU>EdgMWmyMCcM-i=4fM4f zn0H_L%vOK{+=^9WPQ@9hjn!+5c4KEw!ekhBv*Ca5WMw<>N(bo63jO*t-ToK&_j$gK zpn|lDd%YZ=H56vmKyrv)R6B?A-;Ug;I=opA{gZb=R2H!E!Qe?o@QPPEtL24DKAC4u%Zesei!jhGJtjgU5y@7$%~pqAW_z?LY`7V2f74 z&^ZgU@Qt8j12d38mm`m^&WUJ7=G6|&aUgGG7^@n}8R^IW4W%c30@3wkPWmXY=7vP` zRXFFVnSYy>HM?~2z2VIsM{VvL-0vkY-eH>L6`qpX9=AtAx+rT>~s`w>L^^v zU-&W?etQONX=x&*<{+2dIa@RFsr&g^*RePj9q^&aYlYZ@%JlB^irtyzFNiu;NBZAe^8NFv=1j(_Y-MK8 zLQdCdu)R<|;zzY4VXQWFF`gnVD%_2TphL4$Lua8SH?u8q-Vk9THhKZJdJ)RrTgky! z#-5VR?BZhnZ$1678&P@LjW)zXp@eu(8YET}A|y$yAxspz3K!j+;zih4{}DeG0z0k8 z+g{4&Zsn&pL+Sc7R*z{DaQ|Mj_vb(#-^T9FE_xDnqnzBI)xV`1V-8sUO)O(C5XxQb zW)1$dBkStK??k~}T*TX(#_ycr?{fE@(mll%W&Q{g@st*v+i$ z5-VFtE^JWwD$Ep-+~|m3=6T;!1#3xv)h9BVm8>909Qq&s{*O0yn;Q}n>nT&91#%T0 z!$R0*tFRvpm@AW-X=Le`2yu<6521-s7MAW0svb#rp50iy2=;m=&yx#QQC?+{{Yk9;H-8pvFhr4hd_r!De(4iG|p^Ja|>+2J9f8* zy9I2o5EC8NaYD+8-PoD0_{Pt?<3XIgI812}c-v<=af2OxruddO7n-P^h-C$_+!8>a_zVBVOQ6b>KM_5X}zM zu`-LRsktZRC$%R|`xpJWLwx@V@`L?UZVflclmEk-2Qc65I^Q{pc~Am-gwixmU6QVs z+5Bw><^FuU^UyvW80^Ah zeA>T6=ABXcX~_va$$tLJB!l7Xp}<{9FW%+UtmJpbxT4rvxXi?rt9ba4Sg~n5VIFk% z-!O|}o>)-$>YU-0zvT&Ko;;D*ox08)rY_xMj#f(UMqZw`2Uw4JZUM0(^U>Nc6;Bbe;GcUDxvzjP)`y>xjFY5@S%i>fzQ^tO%!G*&{ChF; zD7rCsXB6{(#tRG3b7(HM!Y@@8lL~3wb?i)k_cFa#`N_Sua3T{>oqocbI_LUOVcW`D zD!3)7iboK&b;16!A58KoD(-gwb4p;d9q!pJ)KoTLYd>_;-&O`!! zR%SfPSNn{+OdKidymV ztysv72b=K{Jn+_Nq~;g*N$Z^Rb`R?A_1(wLRJy>1<0s3Jg_h#>^$~5drKZHjWAMbU zcsEJFW?Kr6sra@KUWid{6KkcrQko#FcAh(M=!JcFzT0*+;i>q{9YYl&vy(~aA#Mc0 z9zfrF0&y5;aU#}qEmnCyN+m@>24A|ri9IWGf|j{g?EKCbc!sH*`}Qbz6sUAP;=Prw zA?VUAVVkWo74C$1Ld?cKG{Q1OGt2O(+fqD)GSnLM$NCAkJDW~k^@<1od# zG??jiYn`$79jyIlGN38$7Ac4O##Du((h9e(y@vPH7Ms(RSwx-S7i9u@fx`vveFX)P zEilN|uvck`kR}k>uXKLdtHf1OgixML>J0kcdGJ3?iH~Hrr1M)UuT*x%*%gGVRO7F> zm3gn*u*6MxM#TwQ(LyIVo=_US9$6YFHX#Z*?D*YI(hT>nJz40<-T7=?b4p6>xEI~v zuZTAk`< zbt(%%sia`oQJ_Hggvrt-H#*&Y zB{{Cs6D5U>=y+s=tEo!Ag%NHzx!5&#o~63mrBPU|x9)Yju251gDl~`rcb2#-KC#XR z=ac(GEJAiW0gdS>{K|&d?7oLH(LE#mf(zjh^121VVr#;yTPB>5Dv23+!>G-;{glx% zDwSqpv7|gpOzV2=@opk%hqyw>01K-$$i*lj+Bs+E6sF5f#JTn&V$6-gPAP}*8=Z^e zPI3@>i;jR2P9k#R2Ou_0i36sIhost02CJ_VAzPy2PIhFWqBBWoBgMGm=|X-3zj+_t zwy!hS{weK|4XeF*P@?0(EkfOEuJ}btA*>DcP1--)&LCxJ>?T%zag%mINUe`^7E`_J z>t?ko*fXVZN+I`&xzF7p-Q_1V!ZJ2@50a1Ew`*HDTL=2#6y_hksU=Pl`p@YD`UBZakgN~@n(OD!e@%x6|3 z;gnoYy=rAJCb;S4q-q;CDmc~2t7MctQY|Zv*+W?%=XSb)i!YXnD$DKu)>iU`CFBz} z%+njrXrZ`rK)6U>?^^eVl1M&d-n1{^xz4E`F}DHo>2@{SDj$?mRylK@AZuTRwq|Pg zs#peVG1mQpYJD5>!U#O>Pbq~s%xvoX6|$n`liEsXRuI}NnUyK_YNMM_SsCueIIpng zOWftoM=?zHxbLmGc0qNcHqr0|$4U9r-eQ=Q-u^9QR%QsF%*sw2v6(biOl*7Hi9&AY zp>k53pyzi4se@uTN6d~=LVS5!;)H|BDEE*Vnj_Lh^pRz3)v*o?(9?nm27QvF;aOODe2sV5eo&TM{~6 zPI_^-ph&Bo>6R`gktetM|>D26QPIE4~{$ufxGvC@F%|jhIxlq;GFSe5x+8@jq zF}X6stq)f5&Pjlpp5e5&d%JriX8D-~;Sm(EK01CWQVg>#=b`im?TNh98mfsytORZ^ zC5?E+9_#cK-l5GKhZ_44F`-mh{AraiC(6;v2|Cxty4l5XP6E-D)7WVYW?QL_>$R(k zErj`YQg^-&jF6l{dvICHzT^HQXRy;a{gtiGYx4k^{~zJ9w1BE~rBJ0%d@Owe zUyF89OC#kb&OECqx90&-S6#eFF?Wp8Pk3s+bS zi#wo#eva^hX)ymNs%aEkTqIJNwN>Zc^pGkl4C`bxbG5XO6~mNfX=KCqb(6voNBdxby+uT2A3QdSD~42y8sttEve)dHyGco zs!Ehp2Mw5)(qnOi)tPgjl{_#)eBpkv=LuuP^;8Nx#Kmz%dib4tZe@9?;G>r&To@oP zls3X)e=RJP?+YjFRb;+pi7;1)JKQeRf%gkxa(qFzX1EjNhTPmA);)Qzbk6$YY*z+~ zpUuSXKT2LsM|&%;I85H)46zovbx=FW;6^xT4-$D)b~lNig`OZ?`QTuWcDfSx-{G8{ zAty84QO-kmfsjYsE6%LzqIGohXNQYl*v2%1`u|h;C;%=k{xS z`7x)M-2qSj)$QUO3*|OL9ylv<$l@XV@*_i@U93_w6 z#OPa`WP&FB5LSS#IiQnMsIqy{pw3B+aWa~@<>>{v1=5m~tR{+lI0MmY`A~E$yae&O z0RLt*%2H^WId?(zx{-;_6tbZYGZgaZTxQH==IJ*|Qw4#iTR_ek!(7^#WKECoG_9lw zPCl!;6Ag0Q)D1c(#nD**8$vm<${$P&Ez52DL_~^CGIJx^f)M-y&xm36?h$?!bk@7) zn5Mc+NH5*N|KtFjyyzr#MhSJrD^5dq3j4L4$oD_^(@DrAsipA@E5YEqqI&$wJt7V# zYQO0I1>b5cbVJ9fFuf4JK~k3qIb=ob#a#^p{dx}H<-I#gtVdt{C*Il<_o>rbI4vC& zP|b0|IX^Rnc}`q+1XW@SHPYV=gH`IAC-GNW&N z(mMQk4*oQPD0RDgo88Xpyd}mx@0J4zdk5wi!gS(r&$f$`dBOtB(^iS1BHaaOmn{(D z^Q(W!nR<~`uXXafWnf;V5Q;fIp^(&roWB^8>X)F1WRQpaOCH&kwVV+Kx_z7ldbNb_R2Fq~1hM-qzaqcej)WxaiXMs(7g{OXI8$`1a;3Na6hcHWyDRTMHS9tC^ zoUgI;Jl1g96M0k?zBr9|dV|if2&POv1YbNv_8Cw7O9niSSopO25w1xt6dKOpf6rqB z;=;>a3GXHj`QsBX#7nF@9rL%Fiz%@;ld!e(nD;&0xk9YyCxcz*esS)A+x>7C3f0Km z>w#@2Cto`2PRHVZ6ec;JtrtRe`7+a#<2XBn{bagWZ>vyf2wD=Y)UekA;0&u4*T_aNJTkJWDi+n^<=oJ$_`)O`<* z+Jt9}K~=0gH|~s+2E@4-SdHIav+fLNg+hvE;n?DB8If1Ny$2tS1Vr=___BFF0coVJNuzB+$9VAUrKWhG$?8K4Ixm z0wzb?#`?Z+!m|r~L1!MZH(5aO_TweXlF_cftHi*4n9nI)1gh7E4E`JV<#tZQPR{>Q z(6IhI>1Xb0a_R+J$@F)Eg&hSMPfVoRkgkmjU`LCE!s0|ST69b~hi`cgQ}7~dLHZNo zOKJ&Au?-VBM;&-mH9^degA>o?E{0}{`?zT%K^F`s4_Vh1w~8|q3t5DD>gSvR-FXybY$-RRzRy;be z!|4dF3|Dd*6|fmp%8sEZVS=xpaVEo2{Lfj#%^Seq9blIuCr-q~#CF7S`qk7+0wBv0Vs9E9V7O;r-M(H?yvKXbT+0>!|Q^qNxQJr+?pq z#a)gGXdA(B64Aw5OgcbU(t6bS@=^!=M#b<87{M)meH^=&4E3;C;uE}KO;F+ZRG5m8 zH`E7hRN$v|MGc|^*w{y&{jog;mQ`LfVh_Wz{7h%u94g@!om82K@e88MFo~|vD@;ua z^*+MfV^?=m8*fBcRXRF(_JKsCW)4zP*4l)6(rIv~XMBaYm1~61gsQ}#IG;pOIwUUO zIlr^kL_GI>*!#bwj`V|Vhwm@~w&7FQ4y|CkM8V0riFU|GIPT@;_ozU(g=@MFe#a|% z6gtAs>W{KUsQ+Xd_)S0NuO4SQ%PKIWN$^T8xMg7>JmNe*a-PyBGZCEaAra>?y62|a zHk^&L))zA_Y}h!lv)!W2nK13%ngifnmV@V#6qZs3^n@zV;kuJre`9+&Z)B1!I4|f8 z6zHaE72C&LhR&jX*sGSTxDu%39O`jvsbc>?*=h*AVy)P>#%KX~SE{rnl0VOXp#MLO#Ig^6uH(haH6b<+Z-$RZ~h^`buP z@J?z0gF%VE2n}E%tfOw-iq5w8Vk!3Yx|A3WbVItu#=&3tmtO6B@MK5OZ(=J3iVL5V zWNIljjT%tmsK4oVy#&{?9lW08uzd2<$vlRsK;iNodhH6(rG9~4o(^pjN~I>FgI0mrg2JojN#z!zfg?%)ygfLl-G zX6+WMz(g4n`*nb|o~7d;4~(m_D6n;v;7?hgB!~18>SR=oKvwPk0{M3x}E25YiXEPkf$|eXax6)(C9$C!I1| z(EaF7f8a0}!YApHJCC+x5cY3+<_i3Ry}t#PY<+flkMU90^=KSW%y1sSW4VaMO9>k$4mI4wR3K;KV*}jPDDk1x z7smd4`f}sbG4zm=+!m#_@7%_%@N%A_rFuomfEK|!IR)KuXB40ETgk1~RU4}_)jF!H zcvK%;#B9nD*t11o;@yPPcNGTsF*r<{xzXKWf|rBoaTPlemoCtH)K6Lx?T)1Dpbl)( ztFY*%)AyKxu8qmeK=D{Q)A=G{;FU7-z!P3$R5Q{Uzw~~(r03A{>#u@U^h7XfFB{K{ zb!IC#OFp{SPk?!L;q-KK&eM}$(z$DohX=Kdb3VywjL*G_m98gr2KPqaRhobe8VUcd zx$+urY!S}qMS8_{%NJO)ARpm*hhe+s!B%Sv_wq8z1BJlV)~K7*=ITG{7`3N*89ruL zn8WLt4-}4`$Ode7SG3>f!$Ula?OOvUJO_%yCz+CXjEJ*8F;8|fiQ>*E)Y}HqKlPoC zg-mp!EHlfRAB+lcfK{0Tk#w(T{62i1x3EOlJ>yA;rDL?tYsr1p+Q6gbk z|AcF)(O=~QDG}f>-G27I`5o-QW~@0Xp}e=6!g&7HF_%~rsxTMb9HfGL=7 zj3Vr;U?$;Lsm+7z?izilUP$jAOvTspfEv6KxE|0r-?lC>u^^m&$_NyNP(gyz^cl9L(T7P^Yn;WJ;BCZX2U3Qd9P^mQgvv#Rmb;p$5Dr}BVq zu}JwFF+~FOt6!k3a#2bqw})@|l)X-)oT6KD4pyxVok+c46W3H8!q=-K|7M3DijT!4 z(nqXfK@_C#W4(QF*jn*BTVb#~;a+{FKH0*(3lsD(S}?28`5r?z!(Zco@y>8y3#Two z8Xb)5Mq^{BUQj=}F*`0J1K#|ab(G!7IE?$N`I9L8~DjaeOLW;Lw#4?8DS(znD? zhrt^96Hgu?Q;~?v8d4YPDr6ToptDd1Ui54fx`x5zf2h>sUS3iTsYA84+9S2HdR<{6 zr8J#B$2C~o#UghTg^?VwdeHspglq`o@(cal7vwH5ix0{lrPTcH1a`6zebUW{$tn|# zEJEkpOLs^fd98Gc3Qbb64?WIRVLY55`1&+$L=U-lOblnZv}F5CJ>V-)r)H=TFYtaQ4#D zJCPYK^=xK$6ciVMbM%JslA8Ll!Cf2T)Ux|oC*gfRz#nZgbfX-5{nEH*BsU`UV|sGk z8=M^I6YvHG`Dgmy`3D6S1~2Ocj5 zE=IV8CS6BqEXo;q=||b8Txxp_6yEG-RN*d6N=-gE=R+@ zT`LZgYQwZFh4$|^I`Q(*8M1{*XK~@0en#u;A7*bBBF_3m?`0WSSihKb_e?6oT(>w_ z@rPg4F(Z-D)x212zguJ(y~h8QYYBoWl|Ni>>K)Du8;$TRPUJ%lqhP zpUU(2Kog?y80*~Du%jwEznC%D8(Y23XlT^n9(OQ*8#avp%*IyTAAA%@6DaJj>C5O# z?W^dM{F?&Jbj?^|=%xhI_?=aij-B`@Jih~Px+OKD6E`KXa!u+wHua<^khO3$a68Da zl!9t6wW@YQO{59z=r=5Nch!&fz-V=~nnW$6d?p)sPOduzmB{q4eKlc|IF7h%Ja*|I zN;7r9_wI0t&a#tj=^I#!l{?P+SkC>85sHHHpoXiH)spHUBITrVBC!G|ZZk^phK<_48xW?@+Y}FU`4u^q4(@nL zm@D>`ilFWmnx|L)R7dIjf2z`COFvua*5|M&&ro>4hiY;}e~cyNk(kP9o<=XhZS?Nj;RzBbBe3f~I5TNL*Fq}TE=a*3_qa32 z-c3JCHFFrArj4=J2s2ZfahRsL))=QZ*JlUQ2XguI`DC9HQzvFZOhy0nUJ zi?D|mP&NM`^uQOilTRyc)!E7oddu?5$E9;*x2Ncvnj%@Q24w;r6D(Zqo#?M`rVyeRq2Css^}reZ1B-|xjR)s86}Gd$*6OgsM)BFu~WWwWJwkeSA_PrH@)nd#9FFyDBT?QPMwfmVd(JZ7R)>-*H>pbDm3U(b`w_p^{1cLvPt@ z=1M$A(P#*f0ADx>ariG3g(8*eYF_mO{St-bvQk}fHGG@0FvT8lhetC%p|f4wDrnX+ zPU!9RHhNBDg)ztIMt|J{y@?)Q*Zu$bB4alHz52Ih%($3NzTbhzdLARnNMoI~nYhP_ zw+hpxqw-v(k$P8?J&Com%1bhzNW9Y`l)J`AmE~GWU$uyKi>&9R+Co#*wDb$sLof3I z>)ej&Kn8g?k^VS0((Y;%A~!i@=3wqc7AAtdp_8~M9`*`Vf_1{bFc2D{uKF(-RXZgb zb??pe$3BKVUz11@RZ{tiqNw@Q>O`ztsE&7(_RtBp2|G0l6fzxX;~M9PJ=~%r*0`^q z(&y{4@x!RZSQ!v{#L%HG1X!k#`MqUp!qH$(RBd>YL-h&Qk89Z7R@K`T3{3~!yMC|+~BG3VJab;JhDoN$G zWJ>GE6EaHixVL3^i_uDQbvoHpVLEB!N%44(Pr=GW(7T;X%zJB(v__FBBrtmGqxGkH zdn3juXsYHcW4*pQcr7s7AMU#xvzFE7jF}k|?ynPUM5dU*oM9(}E4mLh+DXZlFDa?n z*(lFZPjzjO`mX|e1RqiWpFfN<_MehaT}WT#JS8hJRUhR#y+`+%k&%SB{~$P3f7l)S z-Gg=_>!#6*?#36!L9>Yc$r0hYp5`2vCByA3rlYefTE45~z;91h6RLC2*Gw*tr@JFQ zdGHuAjlE35S`2z{l1yqNzA7UXp_;6B0LW+|Fw7&)L3^52(@bTQ*Hh_b^ufAnL>jf} zuN=Tl{)p$@;$P_d95W?mTFjZ4ZoXUoGQqlfd1J0w&tBz}1TpF)zL&b{V98-wDi3`m%FjnYKl(fbP9I38yYtkm>_bM`0o#X3#}I?yPeod zX;w8x87G)FJ9=Ed4}vgK`=ReVVSy&&)@EAvFOX zDv1VGd+hQI{$wIZUt{9T_w=#kf|WYfo#m9{-ZnGK8N>A|dVPHbnf+ZlC^MTYjcd9` zKObo7Z{gb(Gcl%JOpX}E_rm`@IF~n))%pT6B|BBbPhuNos~S&>=V|Xf;jQn@?1@re zDXH-h%|I5n5J|?9N8^!q(Ek}1Hqml1Bl{*f)d0zO3PNuB%F6CE7V%d9Y>$IWTb}Z2v?tUM#$Is$4eA5ulO+up8x2TUq zGM#b>v1exSInTHg?&NdkA$+h=`DL}s^fauuwEkGHZ#*#Sn=6Q@X6W&QkNiI0xR}l{ zH@Ufw@VJMA)yNpfm~HL<$a52lou%B$Y;BO|wnqx97`D;d*ONt))ROW?F%|b|E=Xw? ztnvmjm8;TydgqoB^$q0-<4a}e)jqrM;S>!JwgZ5A2EVF z@VAf&Y-b|%*-h>~r??%UcR!oaSN|Cl^u>B}w!%16*M+e7G<&@#f_0 zolv4`LmW^KJGl`2?kf4sRAL7-%&6RyW@mqxONi35gQ?sxH(BYZ+Rh`xs!f*X1$+0f z)}pGU?bXV7YI@#iXVh(I9QZ&scGG|LTO5H#RtVW0jS_+&FQIGcD7jKwx>vm70Wjel zR2p(P=v;w>esWUG zNc`Lg*3Kg$>||v(_>?8W)UkGxz1wB%YwHAtpcR+>q~yp*?E7=61-C^p8C2ZGQ3CTHAE|I#?PSc$|4@-xHv zVehaO5f|&MwY5G*zk~mYHU#scaZfL&9}CtCr}b{$VfmpiEfliJY&V zsK>qGCN1Vx#uY}vs^4ojw0>cqI)bE>GCNuIoZ)V55QCB+Qst%n(g_*Ga&@`Z!gIrO z%2UBJO4ok~XW3--zc^N48=bUF9F=Z!OCItL`V(*VfhAHMKKDj`$^d3I zM}j_H&=2W5$Z}>IUsz8IW0KxkFBAOj|Lgl66W>?T_b_IX@14I^u!Aw$tY9%8171}; zxxey3UFb;^Ryk~L*w!#DESop2mX)3DNxuCO-gY_udtPiSy%cA~_D7YXODL^43YNK_ z+T2;{bTMe_G;})I3(OP7F!1g7#ztm54ss%?aSs8@`OJ4WKm%Zo@-%Q_YU;RLG-D7?>r&x~Djn8f<$5(r3 zPdr*!sjzKfS;N+OBDBKlB{`=Q%@Yg-=?@nL-f>AOHxmH1!UayuJxU7;H=~%G&X{D> z=!Q^P{Y?$(z1ho{g>{@|hFiDnY*hGXpjNU7-F7q?b9TLu>ZJ+#F4}c;p#q3^zQa1 z3+oe>JS?#{hi5lW5g~cRGr}+WCzJLMs^V24iaTEwZJZnO6QzrqPRr`iJ?XtuJaIhjGz(vQOtLt| z7r{*`(>b`Ellu)m-c0tpSZw!TeSW}<8=2D4^GttKj&^47GhQmsfJa4$CS@|9GSX8&26VJX2 zlG4H~2I?KKYNA9L;)3O-l2(>mD=KlpD!-_P_Q6xpI}3|EOmmffL@DRV*;|6Wl>>R5 z2d6nMw(ub5=@!v-9vD<**yBFpJ63yxGqe_j-Q?_6zzd~Cu_4sQKf`nl*=VEx2tEkx z_22Z>@NM?(_SNxu{nrBh^gl)i>!$7Dy|0p*D8G1ujNWJ7&S7oCqP$BzJ~dhyF24p* zxBzajovQ9z_H;O1U3;-PrKtV(W7_Xwcqj|qKGe>#F)w!`U6zaOx>jqHiQ0zm*?qdo4y==XvQF;OV6;Q*SB%$fjlA zg6p!Lq9*6594PYy{Za5ha6afoO)|cF zz83zX{;~c_fscW%Mre+k;WQM>$t#JCuc=?P5@9vMMtQ4v_j`_O2i3AleEF^jGl`p* zQ)~zS3dJj`levX-bYA5V^TA*kL1nZXeM2>1-}Hq=cp6Q#nO3N7BU4viPemq+dqfyV zmG8Zn5=DpeN^@eUqMqrVP1xk@oO{X~aQkvvtBqP9a6rjs&-p*y=%gzdbfHqcxGu6R7p80U5Aynl`~vIY)&6n zB^U~?v5M*Gb8Ag}-WZLWDdb;qu-Xosh`3Dk&fz?>mY9porW}t2x_MGyO%I66 zr0wzrWjz)mqo;)Di07lHjOQ7cPfDe_TurKhHmEMdBLn^b12Z3|I1YNDX+)Xc*S&bZ z<*Ynhya{&H+MR)NK_{k3)+VnssAPAh&f6aU+sc?vZaXp9CD7PE*Wbr~*O$ZJ-T&4f z9=xruH9OjOoS8x&>4jWGjaH+y>z*^-ciz{Ye!TVbY7S+q6oDRkGMH(L=yz%g=J$&( zhU@NfYR@YDQT6G$naj=mOi$imerFX*M%U?%FKf*(Pn$ce({_IiSPq6=xHBBOfdqVw~KD~Cqy(o z;6sh!L|vsryNSJw`OUXk>j<-{nax~FES18D(#z`QgOP!=e$Btmf7IW}|J&a-Fg)-l z*w{Qn-1x*DELKs5fh2BM^LyTUzj=Fk!|@I6Jd&1N&L$aj>CS>Jbd|coQ`l1V>GEpr zmL==*(fJnzyC8xNss(u7ThwIVz^dpTiZIZ2Txc~y?ZR)jgEM$nv{1?_Cm&OGso~lL zqQ;WmcHVKGF(B0aK^I38Bk$xhrr^83l097JrZm9Er(r+WP$|p644fjM8y&G*Nnscj zat7P4sZ)PuUt5^1&9B(yRAz7Tzkl?{)WMnrHuwklANa5PZ~FTOS_FsaXN+sscB0s- zMUQM`g1yonwdKy}Tr9$;)nbAeWejNBYU`M$*%*gG%ddj4x4K*6XcI`>0RZK>kGG!t@&DWs%ml3Z6*ZHZQ%*dnbblP8@gH+f|u zS#L5J@}H@PH3gZf9IG+k(AtX! z8itQgV73EuoCBJgJ-9CL!T-gdAz=Gk1hxk*2L=UO8-J`hPAYme-$}Z9R$HiEBoF%6 z+uM6jGu3Ql+Vhm(mUMYD&s(jGvce}t0-X6YF*wUnD2eW2Ph{wVm2kwKSOhI~YhM#kR2A zdh%Awae`tv%^_X9#o{$Q*h}K<8gLoLqQbS0H~bvEebg}R|KMsgV|sBUd)vnJ8Lf@Z zdL=!xUJV?*TOe^D=zl>SY((H^V0Q3;v6+66C9vfV$+y+)+B)^EI@&YAJJAzgYp=D` zt}FHBy6}PWh+ANdwFBWi9IIlNk(~M~n){dd-Vnd;DC;dvO-_Wfw#D(Hj@Z-=skkHu ztNsl)C>booyqw4}(tq+jWjAl~gSOPu$n#E1=-H^XR8uMKnV(Qrs!Ba`5{S;k*y$U8 zu~$jOfAF?ZutU7)lBSYI5#3ye<=Binlg5ccnR_Jsss7eOy!?24fnlUIuIOi}WtR(% z51b6l4jBG5{sMtg!CApX`W+*o{lz)wy24<&rMgj#QvcJEc-wn(dZM(4+7>NLEh|@~ z>gG}zDvs`gLM^5=ETHo4UMCG)!f&j%p_?8rV{q#xa=-n|lS=N)K#{kpl@5LXVW|5& zv16RNpvnWN0-cdX^_<#F3-^rlRQD8MhdXFFL2D<1lb^=lRUsm+$(t*JenWr`#YA}P z6YTB}u>mZnl+t?E{|Drt3g^s38)Yx5M?KL=8jCv8d~~4ZVVP%ByFVKgf)fM#0!E-s zpnPC`;6q@2P&9U!$9eY$g@;mmrIgxP&CM!{da8Ono&(w_Ztqluxd!4|kgGJpH~Le4 z!j`X1PJP<>MbGCwtW+Xcm=-%+mEPTL8K`(%HE)e_?*cn(vwR1*bN#K5< zRe{^n8!?zNG62Dr*Gn^lMYtw zQg;)K(=_BUN2xGpff3ciUBG(V*iT_V^?(bX5B;i@ZVjP5{gF$hCvqJ1F+V4-r#rcj zPiv!XQuC{?L2LU+YsJU#v6IkIToC5TKitze0S{X;3YB$WOV0YT;5&l8NTm zDYF~)d4@T~c%&cD>*`;EyMkXh&3^)Mg4Y9U0~Z5_1Fi8#{jIogUz3Ohq<%p!It zl1j#7cc60-_1ZD$j+xe1I|~)3X!o$VAC}xmkiXB=gxhMTv?`uYT1Bm#_FLVqRD@rZ zQ4+=J%Y+mXwF{1DvNsRH}uJzcG|^QDp^$iA{;B5D(yN=*Xt22j z9s`SYUfrW)l{ZSA#ij04be^ZXb)C%gCf$Zf_zk6@tuRz>!$w-=RtJ;IFEj)1mYo(uao9xNL;q3T-dN{u|7!EIV;eYK!kL-|GBJ~v27B4HUf z_&jx+gjm4j_}{s)x}NEXBO_p&rIN;SgI&~+`nvbfs8Z+wv{9DtZQViPc)r<*%20jd zgML#F*Cz(8z-Z2M!@whdB8+|7;Obzs5yyVyd~vghapf>|m3EGZJ)siaVe+$4On2c`De3d+aZ$nDi13%ayf+T1sm0%{?E9D9>vTw0xe8>N@7$R}gh) zKNEk3kOvGx)AR;;>~DHUF48LzM@YfhF3cH)vm`F#rw2L%?4BXzWh~ZbCKdE_ zZNU|XNDA8Qf0Y($Bw1&5Ew1)W&8OwoimF%S?DB5b+fGOO#8}w_Q~dM% zOZ{;JF9HXGyNt{BE0jjAizSrPs;n(n1L`Xb0s7;i5RQKvgUBy*!5Tb}^! z)PcsK^^+SPRflKHhZ@f`YSeXv1@!n!^p~b54_s|GhYO#GK9Eh+WD>ym9tpD~wYraY z`%Ar~u2K)e=;*Ih#t*!Km0yxBpM}`{XYMc|H97BnF%DL_0kLLI=_gpuf85?8;&rO` zmx=ObJ8xiUKV;@-6)OT&(ROe`C*pwz>0@QRFZYiOObHg$8=Ci= z_MDvB(ikPaHWpiXM_uY!&$~aiZajGdI@>>Gf<7X28No0ZXtYI;($_?P^->lUhwp zsrLULM`r;SMZ1OZotd581-lCyySoDg6Kw3l?(Re}uth8s>_8N;P_bJuP_aK160FU*Y0O~Yqe7WH?rQh zW?Fr0KkR<$cc#wD3C*R^h91URhWE10*vC}blx{d`sA)VQPp58~D9+Ula?PN>66<#0 zg@Wm`G}KvL{&Xw{daheV9hi)-nhQ$(gg5<@E>jWu zsiVOML&^58peuJ9r9l_Ep%;ilPB;k7>2{_&3OV08ZrU%|a!@VwvwpNhSy$L>worR< z=K!r5eUFE_dlGpDOt=_%mC?uRv~dX)OnqatTvO_TzTj4!$b`o_R|qpIZ2xtIV7DPW zmjyqiu*o@=Cr;x==VK=2rn|GN2D>K&S3z{fvr%81Ld9x^uug1Bmw&E4hx`~0`x5!7 zyh*;yMzRTDpYil|$MYVWG0m$mOEwl%C_C6q4*H7OdD<@YQvIp=H-K99M*XD}laE#0 zp{QiWQ|~9Cs(Fkqwm|*+(s|jj$`R+V+x_g5ZI7%Ih(^7w5g^VnR177QY*^bFs^7+P zapP~WoH~XVv+RIP?oE(=TtI@MAv#Pl3cQ}wj5+8jHnUHJsW~$ePzBkJ2+1a zXx%pO*~?4_y`r|*!u!>unjPoLg*s2XPy`M4BKnp*Wlwpayb~^(ET4mA7%1=8$DmAq zK}^LvpFnxQNIkHeT%#4UDLuvc^o$yb>*y+sMW=2B>{K0hsf;@o_G=RL?;fJS1)g>v z9Ny>7>CQjg-D{4#j#%=@8&+TIMN3`lCF@R`-q8`wfgNxVi;5p5*3}yV4DaLxhSsL> zru;^?;Wb#tY5hsMa7T3i;2>SnW;6GaqFr<)p{;d|Sq(QzQf{_cd|;;WEb~@P!18|U zD!85O<(tTC&p=gE`=U+JTX#+f;@-~FEAUw8*~?4u%jcL}ctGyqFIUtzk~WAtLD*Bd z2Pnzl%`uraHw;;FSsW%LlwXj-fCDh7N!{d;8OlW)lH^KUkHz2F+0({ zwS7!6iCFV;vNt1qz>U&K`kguD!LWxys#VoaS+27a6y++t*v&I#D?zjcE3|4oT3ZZncV7#^OR#8Y>nHt zBi3ow_m)+bXO_0sa<)v{P{#rFGPSQqi@+=oGgdK9ph|m9$0)++ZHhDQkl(?gE{|5u zaW+s^MM-uuxXg6anr5Q9v=tkDPS%*8w>*(~$@}gQeAP^M8P^W;jZi1sMkcUAh(w9%J<{}vbU^H|1qBHtZ;ztL;`HWF}j<&vmhZ+Y=GKM z)pU<7vaw9zC4s@0!Ea}0ay*q>AR3kKX(;j=)FtrO=}jr`ol~3{juCXn`q)NTcUs~t zT`Ws1$#k@b+0z|?n%(_E*G34Gw$i=1K%Ej`9PZW8>$*XcZ&JDB)CW*0jMv43BLu^7 z9OjDAy1T2w8K}!tp$|Hic94vRu2d@g>fGOUyj!$%n8_2gDr}81YX>(k zdKs?W2wBm0mt*BF@<066K-s0gf$sGg(S|KQVD@@+7FIV!sEC$sSFsIjMm``{`munwY<_PUH8wfnJLw%KD^ime!pQ|{d9WUviL|H3a7g=&z_FE2EN7&xm zraD5@x>RV@gf(J)xu$WBaSiYGs;RWsGAh9ULkMhz(o!*Wg<|pe+u1$d5#^*ls6!oq zJHCY>&L=du2cX8gTJu6dEnQb$9FDU4dHqj)Zep#G z_uF1>LLAD_SC?P1SN;$6^%j@_-b`V#GlNOq{Xz#Uc?VIbsrZU+#Az@Ef9kVa(IIae} zm+7tvAEX(EPxLc>8uZ2>Q>rP=xDiY;MZT-|k?slKP?YoM8N8Uj*+GB68w~sdkzy1` zPBY>`CQ4c($t(_{VAT`lo^B|Q&ZhbfQGG!rmyu)pl0Qc47m&jw%4>PfzeJ+*@X*Gw zdo4o$8m>Zp`VD1-F8II4AVoiQD?weip&|Sq*gy_(7|cL#v;e1q?bjv3H*~*st)ebn ziY>2KbE}7y<>VM6vi85sg^T0u_+g92lK+~W=FOHT^a97&{y2WBRZ%%W4@FB%$7O}_uZa3(oU!=9|dEKp^N#5PRc-7-Y==ef>GaC0SZuo z?_m}GePQm{dMYt2kr#ko;Qe`XERJWW-7j-9k7L&y>jl|yw$qA%}cIXO#e!#giU28DI3~3 zMc8>Af}%oRW(j|Z+0pAisdwoMG8_1ki9v6sQts%x;Gs{#Nc%}-T_m)ISNJ+hb5Tcj zJWZU08cJPy_<4n?Fais(?Z>0!C^LyLnSSy)_?r{R@D?y-(*&RV6>k0zn1NMc+)Qranf8LB(S(kiOFY zD~4uB71tNu(ljQ-*Xla)@3*BUX#@Iv5dGl#D8+PQFJ}SR8D@T+Q}b*(i$F2^ySCIF zhF;Pi=^#%QEw|u}K4LO5lNqv{@-Q&a{PIYBl(ZVo)?u)l-Mqc(psqLR2tO4iP`jey z1mPm8hF6(IUw{VZ3F6OArcfn!1J_vgF)d(*wZGC@d8uUJsRG$Dk;ggN@yjM!-IfeX zHS-yB7wa8s75g2>G9}GbhF)S9F-?EZ(3mHDYw$O9^y+0AYD_V_;`=Hnb;BFx){Q5> z5UFCLQ9}tQ^8BS^*NTkFqyAjeUD_RmW^O(7A#ZUmkC~ROK}BX_XTTQMS+Jos;(qvG zVfx1MWVtS0$(x%i%4gY?_>t+?Z)9wPnQwgvdaL6N&ZhspL+H*#z(C%tk9Y`v#Ak7VJS4o4RzXxQlu5GY&vIPdDxsPS2^?%ks zw(ZWLS|9guT@7&{T|mWPH~1Ov7>Am^8gGF{t}`r^Tj(cA2Jx}G4IHM$u1vJy_28`2 z=`DXj;jl8AW1E=ouEFg0Jl6v@jnJtl{+t6}_#ggwm3jzdH(aNOZPT4Rs*}80p3So^ zW{ci_IIP>4iL51GVQ-)xQ9Xq^H@%vQJYu?aouineguT1o z(OEvg*0$Q%aXq5bBX)00pci>r^LC+)uQ=5~sH5oU@w|f{kjj+8B>5S$p6l7Swu>#U z)!Dg`!TgP0>WMGf&y6{R4niceCpO&)rqPZt%0b^EZrV#`(i0*vT`VY1jB0r}3|$j$x(YxGBwR zt;x?=(%8db(~ptXfa_0o=Vi`SKw0FI>om$EWx*M*;u}lpI?)Hr0g8T;tR|MsqKtbx zxWy&vu$zjDZfYN@-tD>(VzB-(4B#aFa3;17>mM-0!Y%=KxvYcO6z#T+2TM^xc4e&8mFdMpr4Si*_NH&B$ zS52;gV3RL|;-IY4pLAC?X6%?tDy`=i|A1}g|8rr*O$3?kO{p(5xmJ)`M-KIg>w#{9a9gY(Z!zQt%|B)cG3D}VVQOhSX?P3=e>_)j%qFiu@yt^dz+U!oB-k2RJ2PPsW}awv zn@d>#vsHE`sutG;-4J-wn|RuKM%g&nIMYxronz3eISQ~m#>WMKrn=yI+zSnCW| z3HN(lVetdeU>;1M6=*r_M3a4_%grXfP1J&6bX(`M(Rn<*sX6KhbRWLClY|k{Mi3fb zLnp%%Cem}uPxQfh6LU9L^cQ&My;2M55-M{j(4a+K2j$p~@THfqOC&}71pBk8uz{K< z234h0R|fn2ax%Aj8|9qtY)R;=S@HT#wmIE~b5n`R_JT8)bF;mZ&CeRd{7g}EO>>Cl zm^Iw?)A37dLPk_yETF$-sAkG;Y-s3ZO7uEo+F<-{$fR#jU;i1lr56f)?@=E>b)9(A z0iB)zCZ&oB9l5#9;Kub~ntO`tjYr*-s;9F+;LRzmV;LqnP$04#80N7I)LL?nU(eqnivnxD4~`3j0TLh*!X8#-mc^*;Rjy z_ZiMs*;-(%qlf_iV?)t3Fvs60MqEXQs{1&hS*QzEk%odJUy{@046>%H%(pgR z2TpeOL#}7~$#Ia%N3v#`6|>AF=Pt`wYi{tj8z@WGCd2tFrO8v^ zt!<#=xYyXvw3}*v6;Yy)93ef0-{OF6GMstLboVx*>u_#iX>l;K%U{8UCXoBbGvgbp z)kAA0jvDDKsMS!dJU)4c=5W(iO$%fVbhI=UHm znKgIA6T6Q7QZ+o(TEUkMj8Q}%8@StNbp2Y8kEY@w)^cxtxt#33h{7X!lUbyrt5A;a zQ%NT7cQ}ta@;PSO3)=Ls@W+@7m|MWZ&t^@q#X5GW+ugN1g?V;y5>5Y|xB2>er!p^>4m ztkXxJ23Z8;c%$ne*y(roE^3F};3o0h=Kg3ypK+j);&G6s}(p0P`02nv_?V0%tc zS7}j9_wCR%5QFqboWs(&Ya7f+uYCc)jG;#(tWHJR%Ibi#UJ`DP|%;^6I_VWOfX-svAZ<#Vz9sSsOlhf;66Q+_x&$Xpotf^lfuXdKa>0Tx13qH_iz!2l zF9PFsAc`juOj_Jw-XO2;0(YmAtE<+T-B;OA%sB=};JR8^`=xf!ijZf{K@BKVETsQJ zw4KYHZ3DYwAbKD+<{vk+N%AK1_1D=-7{;yL#*_X5DKACr(a<3X2gh&;{b2eZcjrZC zbSo%A0b<>B?0FkF-~@OBub8yWj+*5{7&mK_Y0Otlb8NCdwl)Tb^0V|d-~ac@+{_wl ztL=EM^l J`1~~Ylfz#2{5xS8J?SFnj#HrW9!tHxsXPj$T-j+E5+NcAV$b?$^~wt)cc^O-8NKUsnlSDOvi?gj;7h zU49~eLkp$_o6;}R3+YPdU>=i44|tQo+>TJT`RK_1-{6toFb$Slda$->$dqvFtGSFy}O9H~(iYMP@O|aYPN*HKW6m463@v zwAZ+TSv$$(_g1Xq!jKiFRKwTi-U* zhnb}o(Y~NGG@QL}mGG0L;fkSshQ?}&XaHdxCa;qpf~p=z1?32LwhuF(^Q6agIY%IJBbc0GurBEkZ#q_5i|4t5R35anG zcwye$)v{n1ortv8;5#bti#mYbPDM{@ul9<3WixN~JNtD$@U(xlUG!cms7Y{oKY-2T zAhXDBkFw6Nd}SWDf?4@@(A>|O$F6g}QE$3W3KOIi^nUW1E`rH)G|HyIOdEvKbKIf7 z$}Dt2*i+3x00TkjnC4MUTB-iTf>TwOT1N|Duh$W6 zFnQEoywp@NTDn5#V=8(f`Oz)vs=tQXKvQ&0da`?NuGE`tzh%iUW^#%(dFE!+?$wF3 z{@C$c<~!zrSD2Vit>=2JJw)qZEI0Q6Z}t$m<7>8LZc!hgmaxIu#@WX)j=o+W+e&K{ ztIKROuQpGzBv?jS3)=rVZ@D_*0Y^$#}et1ROFu%!-5DoK31f(-dy?rTPx#sDtVd-a#qw^(*Wis7K_y zDV?G7Qx1<@L{4P7H=ezT+u5x>42_@BXjE0ECYVmdt<2NTr6&+9_U37Gp%GjTBs~yI zHd8zI1lRkm)dVrPO9a|aUe(sMAIzYadP6CU-pNqs9LF(x27HUY)?iDJ`QyJO=82ZP z*6g;*jt1&v@R>J4F)2!32M@8a;e*_o`Qzfo6zZ}czg0giUF!+fdW_u<1(?~@kv*M3{oo?V z%xZlpYWL&hP)~_MtI;dl$-cqg+|p@qbpu6TroU>ykt#u5y8=w}5WLH_D1e63Wyyn= zY6B|vij$1y?e;*E>=x+T46Jy(^PR&B5A~nTGx1gld}f~6*Rp|L*L0iJ@mpQsnd=vJ zQV*yGCv&3<4N1li#&$4!2FjCB9x5OOl99V$*3RN4-Njc1QE}Fg+ED*|Ao5;-D>jS` z*f-HjvMbk607_N+(j_l}uR5l6hE3B)*H@?lTR9sswhP*N_w{R0_4tXZpib}RiPx~t zb}rSPl|H~awk;muB)7w%N<`DAiujeh%9Ax-rgk@zG1detXhxs71)T~%y3~bSndESn z*Z_JFj^{L<_LaS%{kUzObqP;f+kEfeA#+0uU)+Acc}w%wjS%ulgXDC>8_>3qhJwa* z#$>}JGUQeATJX*U5WpeAGLVF!M8W_Vc#+&$e{R@$v8?zNj`A;jBfD?4ZESvMO$>TK zlpo3Vd6CVGlh`g<3g31|2#{Xrugb6RQWvP_qxFy2V;he?TLPa8=qe6JKkx!ya{ykV z7&DALc*kw2?Hj>UZ^xukc{UmBblKsjO=fHK5$q;A+_OO5bU0gHJE~_CFQq?vV#6I< z>}_E>hFkAi8d$cQ519*ELM`L1^X*BB4_jEy35TS@%+E&}7a3LbmASS%zEUHxJ3)s$o==-0#MT8uXAawf9kL7*z2#J3MkyDuR17NwCIt4@MX7KaM! zCgFovjA^KXasxP{Bgv8*P!W7%C-)%cjH`27-gA1>K`6d5k9!v#0zFC)`#9%qpmLMx zGj&Dx;xJWv6nW`t)Khx!38sdh1#ebIH#vk`)D;zqgO0|ID)y30fIZ-8!%+=bYfiKb zv+lB`IuC30+_LaSG|JhHI-@TiE;d+`Q5&F;X1EUG;-$(2iX3AokpysO!T9ht6<4s=+z=1XEXVeWrT$;iej} zrXT3zw4?(0MpavZSo@vX@?4Ip_R_XC*4q}*GRK@^-euWtJ%c|MwB|5j#tVMZK=j|7 zsF^%5{50M$K81g1;T<&6k4D?A4(!t2AlL<|w70@Jd?SW1`Cge^r!Z5U(Qxv&kgHe2 zCoj|LYuDicykmoR0QKHA^}P1Yb>5w-^JtbNOM&{+=!Mjy8?uqU#CI^sic&9B-^+CUn4b^q9o()nl>Yrt7cntzYt%$zd zNZ#@mEPE9_%tr9A7o$iu81>;i@S=QO$>59sgV8^iJ;UXgb~~Y*QzpR)%XH3mraJ1w zpGmOQvJGJZIgmNkD|88dSc4r#H9vc4hq!&kOlh8ei+-S-7sR|RJ?aj|5W{S^A9nCh zf9aKAK;>bZWDf@WMN?g$q%6pZ#b2n%vm_p;jWcgUb!jL!G+Xe>Mg3E+gdX?G$I(_*zK(1;cMOtkoFdJX4-*kmBABp#u6OZ1gB#Fu^#9 zxugc5o4&;KR_Y$+y{@1qHW;1S{OBKkL~ZJneTF^J_SZVl`aeqn>tyRS>nGa>M}H-! zI!9{@i!d6d@X~p2~H6p$~M9X#?YDT}v zsJ`>@qwbrIV#|L_T^Ym+eAT1ORgb6FxC6C-O#YS|Ju@GA*EQLt*PSj?1@P2a{2Oc4 zPt5W?afUhTprW<{eS}3QoeW@#@wDx?wS%>Wbqu^@m$iodyW@?rM=h(}0)K1I-59`} z>O8p!JfUgwUr@4c^xIm|yU4F^&ut6Qo28@Va5<>U94IAqfRQ$WT>S&A|0{5~vVmFc zWY^Phu((v>zKL2dFB{n>G5fWVT^U>0{;(f5+etAORX`i?n(z9zD4Yawp4mVzHW6?4 z)5Xk59lM+u*8wJDZqxw_lbvrO>iIB<8woF}AX=v7;aC1-Z&pn$1Qm?dbn=d)$?(gu z8FivNC|edm+j6umgm-(zy4`x+dJLve6D7C$N=?yrFh6=*HxL}Wy8aZZs%K;a_)LCo z>~av5diuXoVSH6<6uojvL#6qs72Xm9(3YJ;rPqKM2@e7lKZTj_Q*`Ek!@w?!Zyv6- z(7d#qRAFa$+BeM9Tw-!_lTaSZCMH(QO^7m|(lra>hGC9GUjTOb;BejfH`I z3aQYP8eWWq75h2Ltj<_ zRTzWX>^e}D2Grkqi5Pptf4r%4@LM(rr^)Tk!u4qj?_vje>q9bt5g7vy1o1}))SM)uaF#WTK$;BLO?|T88Rscu;05{Y# z?^~TW`;#111%+#ZE$7$z(g)ef4E=eh$+^lA@4zA<5K#7@>SL$R8IBQL!UWW&0>oPKnrePNQs!MK{qQ-+|1H=a$$4!Xz3L6o17AJ+lF zoejPo!)=$XBdo)jp`UI2Yi-4B+ji#+<(q1Ntq{n3Mtxzt7>wp( zPI!j3^U8y1I1myP}1M;GYHI-HGx;Y*_~@t+jrg z6ov9+e<_+e)(bRMgc*DW>;lXVg_V_JRnDWA6bT!suKqbl+8FZ8YfL&saP!6!m2zX( znb=)7qDB{{;vUfTeF95U(;a8Jpacr6Po+Zi<DpuMytbFhAQ*OIJbM7ffYfYuCcy@)Z(nY!O#XP( z>a^ZrnlIfR<5=vRtN6j$O4n+mxbj!nDz=lFpgLOsRYMO`t0`DbGQOmMz9&`gEby@^ z;vhVS9~;@t%(3O8;{AjgQ#iJ~m%Fz_y3F?<&zrwUb{T-`s)$uaa_@T4Ez1w`aFe>f z7_6=|_E+4|je_A;m%HBsD=Cb=_&{)tfn?7YrAcHQ`^lM;1Ye;a+jPC?A-<#MQjl)X zP44Rkb%(k{oun>P6ZrKj)cupyvuZS|;*xrtocR^%!z<8Zx?=aW*RsE{mw@T$<4AOD zLG@*hQW)j%>&#Af)0Gtb=ts1n>!#{g!3Xb12l5G8ee1DrU-VK#rDQT%^xR;iZP&?6 zpE=+&1vA_5m#I5ZTrPeC@qZ&0Vw%Z9P415p?NrpMdNN_-BkY9_S%-d(hgFdm^_GaN zNNt02^@fPnpZT@tqE*aI#oHK6PDHQh3_0X&`Z9+>SdTKX_zc$08O>k&Ks{NH>hhv; zOxXd;d@;)D^-y;iq)g@0i8%box!CFNv^mZ?l9~NFh??aUr%VkTMPA!fJ*@_6?OcVJ zWU9@3YmEQgjq2Te>5|kKTz{EPp+mV*A0xv!Fz2VfyrgZaIo%Rq4uHGe;{-AZJ; zsQMDA@^A)sdCG3WQYPVodE?{BjKjzy#?n)|%ZWV|G@kb-HON=GU;p^ns7q{QsU3uW zOf$}*6LSf?q&KXZjqoi~K{7Ou-IZYWZJ$yO1rHTL09^!TAdDTEBxS6W+lgveRPAfRE=7%2Z*4z z8k#q7Pt?O<|)5-8;t%EZ+xe)SvZ7t+geT~0+vlv>dc+Yr&J^g??XqsE*odo zgFwC}zE|N*uTrO~RjI?)!cHEjv;~_>#)daJ>p6`~_uX+EL`~e}j6_lWqf^hElmP@U zl&pTRR>F1GRhHdE`O#T=M=hHy=&`iQ+^^HT@58*mKOh-@#kcgp_E4jBI$TuC^=tshIIQrNR`3C~xL%=G{hR#v?9RCuIsxSQ+L z_GlNp#>yj@@T;u6ZOt-IP>r53m12>q{Ers&*M`az8U|wXL zi&6F|L`Cr(m137;lrza0&23klUHB7k*j+P>9_2T!KQoJYVQD3^>25Rmoeq>PR%pvJ zHRP`D5u?EhxAR>ONC2ag;7JjOBZ(>Cl!Qu$iv@D;}*wb;}e8B%-FlpI#Zc#=*g!SQ938O zQ4Tnmcc{JwxJxj(x`8R3LEr!ioS$f_GaWT)Z|=ESNn#4UHJsOn&QR3VbHQLRv-2Sc z{kPuELHM+sN_izrc>%6?MxDjqt^yb05i>v^=!uPE^06h@WOZ!Xk9)ikdp(1_9|7}N ziv>^QDa+%nJlY+#$yWr?N(E~NulPXZ&W%3{!B$pqR_A!zY3RLLz*-C79lO!r4Pwtt zUwVRGWOBDTkvP!FkxZ$&g>+7PDm7D8LF2}~W%j-;JhGbPp1+xGb;91eOZRystjJ%k z$K1{c^mV#3jsA!1WH>CKbZmYjeSHH6R~)JX*_ggq%WjV$Sbb$Ugv*_Q=yE4GFLK+v zF?*AyOr+n{5)8ivbzcUo#uq3e9D{u`mDp5-H`xMExzLilOWeMWZbZ-6V=3Wj+G z-~Uua9BH)z`>3Z?ef8J-^doDdvGwC zg6)?gul40VWw`tD?yI3*n1Uy+42G~6|8)bzY8O6j7uD|!Y`i%3-4tZmn+{Ma_{A~q z>uMn`D`Wi%(%XTaSVwGNCwOxPn*eHf`paZrOF^6Vv+-ss80$&31QD^*r&CJ=uy3x5iArO`0W+|=S{$;D&PHW-bU_l1Y`lHEg-nRjhY zz5bJaKnPrm|0xHR@o)fS<$*H}@4no*ffHy>$89~jgb&#m(umnQMH#B@0~hV6J*C@x z7FFr}@R2+!CN-Fkze;p1g@^J?CMJP7F6GC5PM|;0syWZs3Y7pCHhY`rJ`bLgz~>P@ z`!%!IKhOg4#lO0Zxuvz*u`BaQKnPl*Z9oihaw~e)G^hQinVRyI% zU6^GrslMVJkB2MliO@mV^;;rwbLccT@RoQ=wTGxwnpmKDp!kZ$&zK)hZtem0n=9}9DQA3LD>scY3M^!K|`<&>oc3Wakp06+7cshb(h?`5LF z+ly$lkN0~X`}FMQ`3q*J!UC-T249BzP!=zklb@BKXIPYI(VCb)hPU67oG%LOG!E41 zH!4_F>819j-knO%aevk)5sWpO&n_%u7z~MuRQR`uGegLaUh{9xg3pzyJIUm62X>dd zgA+cEd*Gydz6)N+L~7kKuDtZL-)l!f(AI&^1vBej2UWS>R4=h`Q`*x_$y6?o2ZW(y z*agopopT7{|L2eyL?}lTJGuNc^`t7p-RVo0FDKpGjnwuP`L`}Gr&}KlnJ~J{#mT_i zlABH@|4QTYnp4o@3CrUT%1~ETA+)b25|p zXgZ~lpMD_+@Wuve@WW#bg?aLdoJ$^Tp$IppK6#xVH>M9cVK^Iab`wqS!B;De%{K?H z@-Qey(s!P~iA>|O04#bcHGe~L1(_&$mkMJxo*_T}@htypQ}CS^_=XWg#tfLiW61vm z`lE5MpaQ`bbAzVdBqr{`H-vKvV~CFJ$&3oo8%b3U(G~BFP2@mtDvb`wMQ%tK(|cpk z1?&k2eIh4vmK{2^@gn2Zy;&^8XziLN!IgXhslsg$!#5S%@G$Ao_L^%91PIL?f{S^BZsI$}Dtua2I0=_XS!>ONkiO zm>~Vh?KlU%vsGI}%<#|3mfx}!Zx3fN1YW!k9l8hD!zF%fr(&83Ca?%UF`fV2uRI5r zt%7RTX#A_doYM?$nh8&Q5k7h=vWJh%QwFk^JQ)=~olqSOp_y347P6W+qT~b4B$=wl zgKNFx*1X4B{t#P4^5(kONPQy82&`fY(e^$WV{z_=KL}?pKBKXO_0m4Pe1ymVg&{&LoWAd#%W@ z(K=HzT~NQPxv`8LU<38|`VWbr{kWC4$yohC!na}>7AA?h)A5;)2J0Rw*?ahnOza~Y z(K9zc%JQ9iqDVEKp)2`if8Oj&GLDU8&>!#=xrhvbWX`?m|IQ$GY?6{Vg+y|QOVTO& zFN-;gu3!#j(3HPV#0jC}oXL65K_xN|z9Js&m&W7~xB0$C(6!4?5Be%R_8Hue(x@B# z(C%=<&*3pPung((vXm#Ohqv#A%?l77+b->*d+<%Cs6Y<|y*mO{pcp6T>YajLe1FYanR<)lm z%UjmKW0v5%FT}m@M9~mx%n8Jg8Q8@U@aiw*Qsuz@dyrFln0Ec)Cv7B)iXwh)$3nue zm3ClXRixaU#XU}NEV-%ybVM|9-=cm9v06(R0>w{rfSoXc?(;12X$lzZb?3P zq#w06SjR20t5_{e8vygKq-KW~mk6FPTMZKPQ@SfG+b4O{O36)(;qO|7xw=f@H8ST#Xs$(RA zmLP)EA{xEf zF-~Ry(WRl(1ct4GpWQ<(>qmF_1^;3<>a4$1xgI5z%DT_k#zs1iP2df@1y7A359-ML zej!fd2bTSiyB-BwZUNlo3UFMV;G+v69V5aMo z_exo8qdiKBYq%*@v<~>-`{45XIgL_u61I`wH{mpP^1T>fSWPGMyGbUU2JRzq5(PMm zLS&NxoI`mrFF*Iin^vR#_EaIQIg4N-%3|26mpBgt{F>^VM+>}1U!q3@`!_wG>vRg@ zv688rM@Ra-<*3LXQhj&B*1mK0DsQ=+d_Yo)Bt7Ty)rNBY{p*-PsEGE-9fo+Tfg^6GT zXF+(D&DlU0qyAM3@^^>ArK$#M{|mp-mAvLJlP-hs8ZUME!FsmQg}X^CaZp=&@~kG< z!7%cwA)safJ z8q1BDiPvaDK2{bDfNP*O<2Y9fk#z>BOD_7Nm(Uq#O7(e_)96qA;dFUyBLa?Y8|n`U zuGCkiGwy&Nc((p-pkqB4-krxjj4%^C`KKq>sHD2M6DIPFXJi|f`EeCbvKG$GEyV}5 z*oNqCOapT>!pQOGz8N^}%Ps}&q@gG}L8e!a)F!1()M3_@x zQA5c#+H$XMsip8dWAVwA=Hi}`{?eu4%6 z0+!E3{OEb^$`X3aZM3rV)eV{hr122bQ$4}y{$))9oEPcWRHyh(r;;mHMmzTx zeY7`Bn7(04_-p*%S$t_I&(jUuq9QswAK-s&=1Gg=J8lwJu3{rA$wJ4Fk9UEaRtY?7 zopJ-s`&!IFO@g8QRjp5U|5_X5x<|h+95pm6=+!p-MkD(B`^m40!Z{yCwCKTEY#@Ga z=PB2L+fJb_Z%9nZ3vOqC!v!q}6zc_%_#IsZ8@OLz^3fxBgr{OAyaAV3k()7zZrlw% zcj%BO5Mexf<3T+J6`TCx1G3P<#gQ!mKpmyF<=f!D+5>dRf;%g+;NiY#57hT2| z+>!fujTfBCFFxt`kkfe7Xl*eXOG7|2iZJPxL<$yBqea_MPR;cCI`+3vowa&$cuK)OuFV4R|=Da8<x#FpVBCU*E%_yR6)S6FP~itvf0X zWk7U%v5(`57dd=8YW_Q_H*qOJtIoe8!%T33>MrHGaOf71gWH9MSVSDv_yasZGI;!R z@R`5hw3o>$CSx5H;fOxr$zPGFrf?1`=|e1_3$YqscmagBEnZ~>_}&TeKQeVUo!Vje zjO%QFeu{mZCdO>Vw|2pk7U1ii;jYMH0`H*)DE?6t01HvK#uHJpvu*x3(@LSt9Cl(B zx)8JOzqut}$T70OZ1-yymtVhgjlC*{F> z^C%h7JAMZMn(|C$E%?;0}F1B!# zTk-?Fl3&tSP9=u(8A4s>4deJAkt&av$lfjmbZH$saSYs(S+KcHXxW7^{V|pYdtxJZh1%HG5^mTF&Z09hb_AAj zThcSrR#NY!{{#0tfqJDAUi1(D)&$P?5%JXk!>&K<*uQk#hPmsY_3)7TWF+bbf!KgQ z%-uS4d1?}88!}E|96W=o{n~ti+<{u*u3`Bb~qu?LvRN#x;6B4M9C4_?(cw zFl|vAMpZ-3qXY`AN9hODrxUOb8>&Ptzl?1~TVTc&XS4YxcQ@kd2RK<5sMUh7i?K|+ zP34J)xI*atETEcQ%53lr&^kL>AaCJ*Y^J8GLlyH9Z`y@>@|>9ffejN4*ev{+-flWO zWji=$(G%;#Hlj%8?`x5#cA-Cf3I@Yn&f*A2_))fCd%@Mq5So)g&7^iHK^EDXYHKq) zEBkS`zcDGW8kDmc)zwREVk{dOENpw&02;E7b38-#c%N#o7Cr3VSVp=q0$=0e>jaa3 zrmz|K7v3Ttb5nhpv~L3M=nhr=P;!k}I!wjIS7hb>Y$qA0E3Hc*!|2DXc_vSoN)^A1 z3~?$y*3i}825Oy(7Rzh))BocP@)L1t!`%r09el>Ku>TJqdkn@-CG9(3QDNP=mh`-2AEV<}z$&A$a$SI;g_ogr3 zX)o}*U~+;|WQ*g;2J(pSh#YH~Gx{Gj|0z6fODd~YbaGPQrCb9S$0GS*Gj}H z?$EVMqOv>-H%X>DRKi_@X`B({r!(mo1XFuQQDIG|FKz(|+Q(VkgAd?C{dP`mtR4gn zKhG(2LtnqBv!t`Qv%0f|vjKgw;dG0el9L6iA`v--X`!dCHmE8!fMGC8C`WF!2aZZn zvZ11qUh?6l*CUU-PK+H&t(HQkx)DlRd4=C(B<<N@^?}Lg%g?Lx|3>)zv&lk_lhJ+RZgdy2!(*93XYf1IirwHo z{lxPBxxP_9>D?8uky7qP*vcH9`T`aHb2^$&_*pC{&^B0I>**hs$(jvr?Ml&tv`nzC zsi1(Tm5Rz4=Tx5D;7maEw}`W)vk@$TF>H4F>nsKKQ%rrRZXo*%;pC^Iz7UM=#XsEv zVE{SAd(eFy45eIThUf7RG1%}T`Xp|U@^&C|MigB4F!{ZfY;`d#ie6}k&!op$1U!8l zU4DV8Z7LHQC#csJ!fVW-cS-s267}@C^~qWNfxTHVB$srHY^4kq5k!yh0%toKy^+%B z7;fc_2eWyl0Z$+7o((RULZmxSZT*$4Q7mgfv&N&i`Iq0{Lf^bA8G8x$ck0ey5Ykxq zu1~ld#ng1F)v;JbyweXo?>~-@4lieC`ez&2e^-;(5`nUi4}HdQnh*J{9$WkDUW!gc z9NQF63QdU-c`?E=D8fSBY2*$Lk6<&&~6x2ydz+`Z@->=)c%{eU4nc8JY{3Ow}HB&4OW` zLH^lXleB2^tOw*3>(H?%i{8NyXD3)PJvfJ3&g*c3{P{1>!8Lk<6&4^m@5A?G=S1`uNbd$rP%QeNO=0_29de#SdiUDo?l>57CGIL1QZHkG#Vh%w!tCvA@B#aDgC) zkW(B6|MO^u9MU*?l&zW#3TGLH^;4BHhpeTK$6cF+$>S{GzU8;qmqEH4OG!xCF zJ?yT@$3Cqvp$;`jQPC`vr$Vhtv^YxN*iCLZ5ms6brn;A+sQ8L^Xw~&5qfj|RiE~&E zR-6PAGCO%kGos@fo^>0RQ2@kZ6lj-@x>XUq$S_xtkB-Lo%p!tX$U*kuU5atKo;~Id zVNlGc5}1cZT0gi^I&7d12umk!a7#`j5U#R;D)ua=unLXCCSapyUF+x`^dgSxT!+Xg z8qitZtM)+?LQ=mf@#LCqnL)VEj^5Mo2^422-L*Mn8<#kVlT@lNRr*dY1HPsX$a93d z5$HuWbRcpO(<)MnR;9;#UMNVO{RSQOXXI1~m{_fVqQH_rC67s7qAU+cN zeflvaW~ZMv8(;BENmuSG!_ka*#JxD_jN)DmcZG^T#L0E%$}9m*o^&Uob{ z7~LUQyrOGC#$YB24g{SjBYN0=B6jf_Ja{iP#s;3Z4H;B@@LF&3Dm_y^AF+Vq zfwq&YJ{B%gtrq7g(MQJ{oMVo)qV$U@W*0Z+Bs>*Q|1}tVFpz5Z2XS{gS!y=?W+ohm zi%c87h6TNg9&cBoNn31V6di+gOr$NrR=ThwBp(yvm+&KDbW-Z!EpNDH<1L%hcg_Im zssyT&&QxA!RPtX?g&ZfZD9t9o`_6ODOU^Ga43>cq-%x%l6{t_If&}|(SF~}&s_yKI zItZ>6#_p~+Y@Hc`jXwdq@TQNh!0t<hi3f`#);+72sJ7sK);1G~gkw!8O>1O^9??x4}62p!NgRN`(_Gp_G2h z-5OsJOB|@d-8G1I!HSI+0dJ}xdQ>33QnNlKC!Y!%;Q;pBgS%Z;Cjf;8_gd`1h$n{s%(Zi67CcRVqS9>8vDMf_FbHvfe#aL+^?lHvUEHb3_>1Cv8f4kr9q#KkI!9a4 z;%cYs%c*Q*GhP!Iir?H1=wXa=S9KR86E8{pNCY28KOg)i2}VUDY7fu-*p3F9AJOy- zSZ@(#pz>wik^b1kS8m5~G!=VshwiBkbq~DE|1ncJg&aN~|IPz!UB{dGMio*S+iie< z?=JoiKX{s(zZ9={iTol8izrMslmSa&n=TsE=^4EIl0=bI{&q3mrVp7(OVDg5e)GGK zlhbUE4fukPcL5I^N>rJMy?K~U?eU!=efDHB)VDmS3gMOmUc^tizeR#~x zoI@PGa{_r*Zsywd(^2zuTPvXTXQJjfl9gLkK#h7gdFBLdAihEd*V{n1xgaMo75+s4 z+?@Vkn?vchCx8GSgnv+<*@O&s2!?Bwvv@h}(T2>2f7-*p-Gp~@18?~o6lxQ9yFZxp zeQvylr(D9%e$!pujgQZThySa)MSR;p9a@z)c?$gg7FnmA`|=b7XgKF~n)7&>#cZkq znp+u57(g~Y6U*2Da_r&jcH%Vrm{PgMtvknAZ^F;?#;)qHZ}Sn)zJ!Rn5&tj*rtUOvGg6H(?^gG1;Xo{z}N1ZVhjAjtH zW>Id6hmC$)`JmLHhxk&hfp6KODcTFyCRkctAZWXY0v-m+G@ja<@Af=r5sYn=2Hnpi z-k|q;n#_ACUcL|6lmm2PAFBJOd2@&HZu5yfC&-J~56o!<@LmUyft=zAGsw1z@O8#> zE2dG=9Rw@g#;^RSMkkVg4Isjp=%>Eo-yRJr*pLWa8Q-a*(0!HLH;*%z3{R&Y`Bnq+ z?QF1A_EM>ZpmkZE`{4qw*-ch4%GDXK`GRh9H2wDgx<$+3V|#eKH?V`5>{seeuKnFv z8x(Ilzw-o}I0r`25G3>%nolBjmV{2;SWu`jpmX!^2A_m_RFsplZumr=p_F(PpZSw$ zx*4BPh3Mar>Do}30Y`M-(M#DtJvNWnbA!q(7Cfs0zH>S`$WbEjEBx(Gp%!t%<0rz2 zsVm7Y#_&~J;zP>Om8=FHCelwhhu55h&+N`uuSg!7oeb8A#qFi~-O5+rNJI^wN;A<# zI}C@bIrVBqIs_kG+u^7*!6KY=j3S8y(M%;)MG-lSetT^??*BN8-OQf>9D_rv)`T^_cSwoQGD-3iETx}B=1w1doscI%r%a{+n?s7 zLa~b&>hpEHrxWm2o{;~wBku3yCeP%p^`&A*^Nbv$4jIOJa;?vN{&7bCXw}7cX%$!m*->LJ97j1-p9B*PlkF-HVJfoPVtcxpy^kD<`N*7#?#0 z)#z|4r$0o%Xi&W|R3GWE`cBYS?S>Xa1i4jR^uu1#QT8LZ8kDtZ`jxXeJZ2Fl*9+$A zf2e)PKo)X0F1zNqb;PE;=%uv89+u-L-U~Ic*hPH5p|DpylQVhAk< zo-Cf5v4O0|4|V%G^gBm#4)gFeU+5mVscb8gA#5c_)$_J1a4S}FvJ1(u<`O-kIFqs1 zg(sc{5@j{w)gJuNa3WMaX1od$!_3%PLROAF7bIjNwv*cLN1$=@sl)VawTT3=8AG4L8+PYiV(DnKH7k*2#lcyt$wbpcSZ+1pH?^a~ zR>^hWH5OHSJ9AmD@DlZjp}onXvSYQah;x1L7QMls8;B0!INom;2;FE-B0n}<9d(;a z__48kp8>ESYm=KrfNP#22EE~i8@cKAiM*aWl8dkAsY3>neMfQ%M=agt9I{=ySaJ7aej$y}#W z)hxpgmEnGP@~Z8eayxFsKr+m})a4b>^S(w#JRfUlLl)G64D&O}r@LSwb|IT6O+Jx| zkC;G4k%3m?4CZzMxF26Qh5pnYorxS)XK~m#MLCBd>U?-zRbYv4*7A~@6{hBBg}-~3 zg--Y94So?k-*pde#x43)5#-1D$gIAC!(8GXw2Ix(7aQ~j%w5!DAKXgLc@%)?N>0G4qI+LHz=VnaA9y;PV+mnkv%$jU#N*e{dIC8LGBpGyXg?l1cTXpbG1*G=@2*q3~(b`yq7cuk*%->n? z8b8p_2;m$Wp%i^7s}7IkEXs2?!eP7TLir(pDYk6fjJdE_0zu28xfzYfSWp6W)dcNm@WXY|P~f>@lwX6o>^4Z=|}zb?F|0M5Y=3!YAe)EYaUPabrd zyyB0J&4m1&-DJ`qgwo{Oo$)<$IgR~%<+dTM+>M&tjl$$< zSBOHhxp6Jg1opHOF%L^=IXrL^9D}Fi zm}7{cCD|;qC5w~f=^d}7yPgNlmjN(SVpSjP;fm(cCWD>tK{2TX__SF!5dUzC@B5W7 zgcDfKn_GbOMw0Sg)uU&K`XIUXLfFq0w0tNqrqCNKMW6gT zne{g2JmV_b#$qXTh}UV@P8_+64^y0Gs+ZUFPtI}@o~*JLNT!K- z{?8zno*A%KoI?%xogayuJLm~bz#3fi(EsBk))Nnhz_75Y&tNik04c`aWLJT@s!in$|Pm&|v=X}l+ z-2%WMJXIoG1ER!yGR$x)qj`ASO?bDvoIpC+P*p1QqaYdk@UKsV98}{Tmi#=jiOsMy zf;^aEcoYR#O*SiLq2L57xs|K*SSqzxfnaE#_cP{NqmA;;E9_pU_O7P z@{GVsPNY{_l~Z6J6sM5Llw371{svU!F4(Q1>I&v%mQ#&Z=Z2I=3#uMJKTa?74t2&_ ztl_3dF_!*fYcvm{*@iwyr>7Qw4r*V580pJ9dOD$kw*sc0z_b*bt=UocIg1G&1z$aZ z&Tv;YCp^-wayOFUJdcB4vVtG~C+RHUs_2?9zI!(S5y3`9Y*7rv4g|XuySsgD>{e{W zK*jDZFtM<^MMP0l>~1CQ-q`*A%lGo*RWIDV=ggUT=9xL?%=BZ&VTE{N(5v@U!YwVM!I`^Xyz1UGJ0)usD7sJ9RI-sE+MQ`#6A=t55t?4^5 zI7ghZi8+Pw)t?+nRdz7xbxACK1w=TE7DvLQKC*}Q3NDo!KJW;QSO`bD!8xRYoCz;P z?7bO#+m1E(aaPMbMW3IXhm8|&o<#g+wxb-cL(P82t131{T<|MGDx`?u9<}xV-%Vb1FBp_rk~KH{8*WJXZQpx zYBB7gACmFFSLjIL55Eyjv})cNw6KE`hd0`dMr>eA&0z2Tum~5r`&@@X3}a`*lX@(F z^(62if$ZxcGW#LA!}-zqos)~_Ik6bc8SohH5P2hq`~FOthL++X^ad4 zIa$=1J&{Ceue-p)db0O5j5ypzXO?Yb_TRJT|B_YRDJq;Ix=*C!&xos9Qb4`S=tVp$ zJcj771X^5^3|(Hb&y~UX6l%y^CI)}XT2{bz%)XCi&-55P@oxN38q6dRuVJ3)i@;Bm zVrSgJeUqQq%xiux5hPi}KDjS?mdfWpLavcW_CGvQ9OUaP#4(!^nw^K`-sxDjh&%b%?mJB)h^XaH1Hn@&Zh1JlVy;cnvcy z%8o5t!S?IO;4pR)$0|MoX&PW#Bk{3wk=s?6**mN(FA?`pBA4}KFb5!8(`$&9k67UA zOnI&26N5R8kwyf6l!$&YC_Dz9S{bC&LCNc!AZQPFC}RBL^v_v6PCv=lCbVKCr>ydE zHqXW_k&n)=oWAmcXTRf&-frhEyh0ciz&bgb<3-9-*(jBpw+8#bwK&f?mCV#~__zwv z)Vw%C?k@&ahWKm5Zr z_|p+AdIelKp8g#^png>%4D(*iS~O!HPcZME`C(zLh`BouaRkB6a#=EnTyEh{4)F9v zJh>qj_8GZe0Dr^L&%u1RBR?SFGZTn7hr=FRLCJLe4`aT`iK5HMB7(ELb%;CL5PjC- zd|r9}-WXfSie#J|DtHUe+=y=&&7F}@DuRY^621spQVTpiL{9G{zY|F8x`x|WPxV%y z@msPI<>3SyKjGNbCXtowIP^M1z${$Oh!Fw}I>6|d=Kp}rM7bs73F0?%nmA~$;Wo#$?WL4@No zx)Wm*A?EzaZICF=;nhVe>aY*&$4Si$M*Wgp#D1g^O<#!DoIE1(g_b z4%w%X;C482xs1lfvHDlZL7c!2mJ(OZBHz0Q4cd!6E#uW2`2J5Y??5t^UD2HS*w-M~ z$95tJ6MO%W-@gkso+0uux7Km?}I&;=71jhVm(;v5?YyWkWO0 z5f99RO^o7H**Nl4Ti_O}=n_1}=*KPDG32h6a?a{7J@|GLQ_m$c-H`lEXlAV@D_V04 zi9}*U1Bh08bDpsSH@RD(EdlHY4B0)J zvETOEkjWSd;8ziBIFy?8X6LH@#D^33nvHFn{Bk8+=p>f%gIZWQVc$QvqiOD>6~@;D zU;%ybDvj`#!5~gc^q?-^weZKah;-bs4i~hd5K;bLP76Q9=1!9T*~qh}vl1=I7Uba^ z^Ih^Y7s+LB&g|HAnDYXwh)Ty9Q^{T}!o%z#YKY*cyE%!shF2Dm|L=zXZ2}W1Vfe5c z@{yZRchQw&Si?@@*DYYpI&xhn^>{Q%)AJM4x#6YEQ;ju=ZOq!wD|miiV%eD*<3zVC$RBvAA0Efq6F7+@JNrV4zpJD5L}~YztPC31t-^<;Ey|FV|HC4 zf)P0TdIej0hI~G7T0n)%mjjRf<7*m`UOzN(AsOSz$axz(EBnxlBh)^c&UxwyJaI{8 zR(%;7x(*Fpi}oBL+x`^a|CL?UEND$##@!WLng9xq;BQ^wN|Vr{$soW==4QsOi$TW- zENdUL-UR}`!~@+!3j6t;UBq*7_|=1%`$-Y}{yMZ{cxK$&98Xu2{QgU#gj3Y8NT5z& z3X(|yUEkpcvtv>Du&FL+(;Q^k4jb>zze*y<^60b=@_Ru%_m7cpGOjYi2v)oVd#g?O z_Cphb_*-{mu?{VpgmpYd7mtFZC*d41M1kvg$7(ol7_Y=3t4QQ?gY40HWOa^Ph)>xy zI?QTaF-8XW|_enbVl0dsk8$0n9Yrb zm;Ex%ZuHr-dqe?Vr_v8ukDnH>X%Kgk#$gS@_s z$pf}nm`@Gnt0$wa2alVG?aPeFpNeW4@3!)c`#iHI*-EpQcUFGtCU>OPp?%TF<+b6>+EvJ`8E4Pbkq69h zKNb{=mst#6O+#KO$aETrehO{7k8V6cI^T`rSaLDunuhM$h#*qQls3RKl%XPIBAFp8 zpM3xfEQmdQM6+F(;S<*AJwI#2O4de$WZ@O>9fq~#1gT9fT%G$cUS!Dzqiq>f?rVjN zU5Qq6V>5q@JVY$6NOv)#HcwbQ=jkrIUK-~9+9-f@H!$mhJlEuHDRjCHWIWMWN2`7 zF=W=B8yf8W3OB4?()oTp`}$XnV7zXy@D!_jMsL??SinDOrmP0tB>wggw75;LVC9uJjQj;ptH$0*d+fd&C^8Ez?}}b!W9@1)E*~^y7dDlJu|Hr140Jf2KGrqR zsc*=uHD`S%bBeA9TKRydk7QMT8RlMu`FSeLD4IQHYC@tRqObvPW3Jf)G;SzTUI>=> z3h$6tHZ(92iOfZxt0MXT$0vp8+P9f^Kf%Pv@CM7=L+CtSfqQ8~uHQTwY>D=h<7xi-rbb)$z=}Ar_E;oi#>Z$MM_GkarfM zym&No2yv>D-1cbjZzrDe8@Jicu$HE6{o#i25Z-ClrU~S8>oTSstTKIIS*M@i*Jq+` z1C3jQ{R{zxrm*TqkalOJd=UK%z=ldPia_kOI8q;hZpAak5cD`dnwE$q4B=UM>C`(E ziH}3;V)*2OWDY8TgQgTm;qiu|PoBaVb`uYxKMGnffM+GMcN>YW2+VanXnO~wtH*d8 zXk7(xaszwcV|n5mZW44A;y|eqAcczFWCPVY@>&MF;62cc|LEq_6EE(|`(1?IV0&4{ z{K^=NmUUz;R*^9qE|{m;=kv8L(~sR|9)sY<6}cU73Ehl>5lQrG^~X9^f(9KJ;b*Yq zGf&>iDwd#E`cz{PGZ;Yk1FWv~exx;D(@aJ{%0=bcs1y(i z%^)Ya8q7I|9nS%?UV?!6VLa!t_8P2H1PB<6{8Z4n9&0uXWUqw&l%l_T3_A$PVEs^h zIl_Ge|x~$Jm?CcVyq)%^0WM;whhVleA(g=XGIH{P<= z@IRh97S?Cp^6v*O#<9k(LK2!GfhoU1m}HQ7BqOMgWqYxT+Zjm>ArhAQ9<1<1Cx_D` zwx+P2PUStQ133}t<-re+WA*u?*@l0eF zC<2Z0!isicBPHk${oM!y+iK&vX0hrMk!%v{I|gYDC(fOWUd%%#!?5XM)PPFDT2``d z32+xLbg(1Zevw-rRnezV5YPe+_Jw&A6qbT5U-|6sZ~$L4{WpBu#`*lh%y|VBORwOW ze&fk{vkn_szrOf{8t~|xy!wF8QIJLgvv1AspGNaW7#{4w78BYNV;+NtpBH9}lZCI; zjdP%4y!zCyUx19+Ap(#t;>mHI^;nf7qH6p4%Ls zeh%$BYP2VE=!~BoBAh@X^+1YSSV}HzunV^FAG+U^wJC$vy&!IS3!=q=P;I!gat)nd zh(({pSC+*?UxWJvk`Mb!KiRR&Y9rq0q%m7>M(?r3$g3krc3zmzZfkv^4Xf6dG0JoT zY=I;$Fvbz=0=R?bC(!pq)pYWb9=QvNG9==~?=s z>y0VvX_CT4HR^y1ZcOc21 zc$ZmtV1xD1^faS}*n!V~k47&-I^*HSf>4|l`<;2?<}(an92(yY-*pO~Pz{8rK>jI( ziZq3YB|5_orjd91!&7fjdn3}=04hGgvU8v@eR<{sc1gOS&6kZf#AIgW(~7+3A#qS3 zJGM2zlY0FAZkWk6{L^}(T~9bsGT2lPlr?u|13|jgWN8#)>Jmgyr}a^=m3^$|5cF{s zF-&=^F}F~JwHS+kNTKItGe&k$H~=U4%-1Ag7e41coS`}>?T&16lb`yCoJ-@?n+d}} ztXp8)tjt`%W1`wQFl|@xIt2~#0XsF;u@KSJOg?W2Ebu#=qXN0Dclgd*{I(@#UScE~uM>aV07v{mYiNCp@ za#in0TzwE0(ujDf4_Tf7*6)XrkFyEB>?>T85$aGq^=uZl4IL|&c1 zE@J`ou|K&H0gv25*nEe{`k}T zSZP6WhYfgE0Mh8fie7>}x}YTj>!^@n4rleV!V^JRa+#;tEstYvSa_iEx?atyz8;EaPleh_GInep`(|u&I5SEYW#VgIOifPqab-lIu_wuK4aU9 zS>3jLN@Z5#As(m@%&;f(tHnIB2_{d!iv+U~CuKvQ_JE`Vg<|lGHpH&G!LxAW*M%Kw zn-R`_SRQuW{xDMu?Uj80+h)EbM_NAV{7H{XnuHy+L@fE zF21)L=MHi(t8?i1Q?f#Cc&{crH;9#!(2cf8=^7{8e}h}O@QhtKC-EKhyl1q-6OAI) z+z9?X0S$bRiYMcDGTH`cNH*~T95IwJtJqgAdaC_0&X7mg!SfHZVl!a`E3gRjR(C#d zTcD%fOW4|8W-}Wp%;AYT^Dt}8bz@;st{B&mc0JIyK2I{wj4xqk{qZzG=-M521K}H^)NaPy77lutHpc9io0Z*dEB&>N1@zZP~jWr-mRSs%yHQd=z zoygj>!griQhGp1S`Hy&fTxJHT5s_^ltSC2{QI}Y1CEQ>QKU+W?S{|9-fd_wPH@v%< zv*YeYMZD}{qSjnk!4Rae6CEgswjRb)U%~nhkT2Q7o`ox@UoP_={xVL%^+V!qz=-B> zkCWW?j(`C=LC^X|9B{5E)7i>yu{8oY=9 zr%)#?fHlZ1Y-g`p6%Ql(X;iu_1#|fdc6@=Ab>OM@;ciusxrTomN<{UDsLlmW9f7WG zfxkt8=XK!w3*bYG@QM~zaXuba#OgbV7h!U7c>N@-`4GtQgzoOi!evfWS&e(}yi@uF zvW5*=9TSV9u)EywNda#dDDJ{*{soKk5N}^bQ>GDB&V$*WL2|Q<+xiZpm>7UW^J7c7 zg;V7E{250_v{MEtZj;G#qqi%5QT$B|=ffUrRxI)s7V#Oh$VEo!JqY&&YdXO_xu5)N z4M=hy_SlnrNnfIg4a6E9iG+H?H`h!WDSueyOQWeIC|d> zU)Ta)_=HmfuCUHu#&Pz^M{zQ$iEt4f<-tCyxtEcfXwvNUyP3}z4L;t(mI~uRW`n@} ziLA=d=_VA--cOz(6(sJ=Og&i3hv10^^R<9l62u=HjrmBj6O1+4 zsEq&cCMHbe)Z9s;s4z0%>v;DPV#TWHX$m%F-h{|N@&m}6yynxIau%d4Okp*M8;ggZ zh7L4g?lxFqHDi}9z^jVmkF((E-eTi#$#rD~k)E;Yvw3D#M*W0y!XBKAnkeKk?&;rz zXz>!ShoEy2L?+c>+lR6J1~5f8-YN6m@$ip8*kKTi<0iQGnDIY163EnS!pHg&Rd|5Z z1;IrRGHsz`8q(md!B~?_WYi4ITMI_Ih{62pHdZtan@wP)laPX`A3umD!jR1g=31QS zdI=hI5O0@*-~UMLSpfZ8!^)0gg?pp_=g?X+hoivx*C@E@q!qnDC|YetmIJ!xyFMEzmQcp z=xb&!%OLFvaFXeGsgJCiD>6LAKEXo%KN}AHRxbiG*h?OC9$IPT=L#(2C^1H&un+C~ z4@tcx4vax&d&sYS2Y+2zHvzvkmS+#-etUNOKt3T5U)mdPv7M~sIo{P9{PGfBakgX) zcHV{0^T5WUh+n&N2IW3$GzC5Bg3eDxv(rE)GrP42Y}$rI7Qzqi^E7W(Yc+ZPY(jpr zwAHaA57r?HOfz#_^uR`rJ(2oIFf&NVA^Hm?sZTPN%vLMmiO!94c1r}YExC&%{Pt|- z_8-z}$C<$gWa0LLr(@v;Q$Q~_)_Ej;s|tvF1bu9Y{ZtmZP&@HHkz`guLYsGqZ$TCr zJWPj4bRp{VMtZ*Fu6CgxCl|``aQLv5$AoNIZZ4~*XzGN0$v7()@=$4?6A9<@QoYnKin`MWq z-e6Tkxe(-A1%LJ|#yk9tw|{-?rHo-=|L z%+JKJ!(@sQ;Xyul2{X%B1tbcAiR>mH(}3JqRT$=RG6xN@!2ww7N3<>*T<9vycs9>- z=N*sH(@!w=LS%mwv4YSXi>S-f_u$*V5X1jN8zaaoyu_C0VIO74KAtvyFnf_KUT*`(88XrErBUqnTt@@TGEehF(+4;s z`h>ms;GyElmaIqe#X%i6^u-guHW2&YZ%Tk%!a2CsVSb-}X)rO9)wH17-(q6A8?gV6 zu)pPaF&p@Q(>Ts+oai*(!(={FO`r)L;TG|)7jc?K?54pFdgF6f5o>OQ&HE#j^TaGp zy(Sv5nwYc_yLF4G@>k7atE;7nR@R_psgr9ij7?8Ub8J2i_^Chrt zv~h?kWU86p#I}1O_gbvyKiKwc{OMlK5*0!+=E(p<_coI8%PN|FkLYeHQW`;ge3IyI zJs6pacRL27JFL5sH6KLgYbx5)2Bdw;sj67RE|eC*GxnoSA<;X;Ikm(fVT3-?C?}qS zQEg*|T9M7jjnqn@15wCtA=WA2t3ufO%>p<53WxfPPIckcdRTl@GO)jlJ4k3Ly0%tm z1zQQju6yE#{{zVc;?!SQ*+0;6I979=wRlN}U_1yAfWIq3p5qW1i2!Wvk*N#(?E^Zw z4L|J%2G<6=>+qxwSV1K3oeJjkC361*%k$uOv+=Y+Fw9s+c9AUfX@2KAd+jp#yBUud z2sUiSet%&rhZyq(ti2kVT?n}c;4MaRn(;mSpg*rYW#&2Q2$qPYGyp@U!a&>MdrN|C zCtzFVS%sd&4=Z{430Tv6e&+#kSOre^bzrU@tbT2>B_lzj7_yoTvFC^21q`~#2M z%{qMnP4W_vd;n1^!6D$ctkMGVC6kH0M)Jue#0Kz|5olvMGFEjs8Tu0s{(^knD`MW9 zjLFRJoJZdV!BMYb7kS0;!bZIz4EqW>g%$cethXdA<1x0?43-rQlQrPxv#{ls=zL99 z;~bpiFg7=Ue8^33x(tYRg7sfW=as^ODCQuC7!FFGH@Hb)W>JOV#GH50=EX?NtTwlu z%tbmU?0n13o<=qYyf7gUFTk@JivuBb^y(?h69O#2F4zJW!S#~RAAhBxrq z2k|{Q__ujyH;UPf5^9RO!NsX)XDP5d38oc6bkUIKhk{HgtgAcvwb59I&r5<4KP1=l zko-(t_KG^g6*AZ#sS8f`LL+m5V6WjDFX^dV2EKO-PGO!6tAJ%z0|{cumsCL_rOEoN zM1FT+c-h&BzsR%Bqb=jf{hi{rdq<%a82%hRYXjFYGs|6hat?H=BV6P%J~^*wGTW8# z^YY?mI6`BPaVJ=|8hyZ%i+oe(9Tt?8$fO=tS%-Mx3^-O94>E_( z96-$58UH+!xSo6(`0tBt`j<5zRnmgWwfC zk%h`{?!oqZu}AR^n+igkj^F|IkV8nq7X;(O=Mjsz5s&#ZpXT7&CDx%GHk<<9B=QNd zAjBT_xWagEOQJh3aOn$Le~SF&QtamwBb$XUaX|wLqB&# zMj~>k2Cr*P6#EA(GtWGC!}IINDm&bwCvqJM5}RCD6iPFLtZ=!{#E-N1Oj&G-s*mSLT{!DUzStnI8wOWs|d6?sNhIT!7*B7e(}~yx?jqemEIyzlYv8gA1Ek&rWD`5zssgUs?%$`hmAf zz=KT1TV#XT6eL@|2h5E`+EdZiCLqxkMiYh%l8JQ|pdDk#6y!$R?!a3X!9rG}!#R=9 zF=pswMb5&TiU?KF*^p1QS5oZb;sR%#fKec!Y(n#Uu5C#cxHnm0^N2VNXe9 zd$NN_{&0(bM3d&}xl{OzpV;I>ytfyAemZMC6>qej9oL%hg0H6ZLDQP};z9f@i7_qX z`H^J&uaRj!&3w+YXWW@xk?&~86RfQfTw)hiQyCjt&Qr@H^`U&mZ9e4^zHL3;p#|I` z8pdn(znIQS$H6=8jC?(7a~YH`4BNbmo)*P6^P8C`-aXO01%+>Y4Jxn0Ulc^rc|iV9 ze)lWfOTZgF)Pu-dY$PtY&1cqPd~d;svE&fL*b{3>oB#%}3$zE#I!FfNCDuM3t7*ZU zj*>0uimh&EN23Ak;C+E`g65o}_ymJucMh#=i_SS2XMPZ_BOcZRR4OjY@XvnOl9T65 zVaX|zV23A6Z!CObH1&?N;ts~Lz7g+BfBv=6KSxNS698t&`_{&^8Y5{)71*`4@22X_7 zz2RBic{h8cJSzd6X~e3J2IaCaha$+*JZ~L|uR2Yhrw_mBkFFKOVh)f?yNOTkMI2$? zmni`+$u6G9Ud9rUzrjD7^-S-u>L1y?xq*h?2mP+0Ra^LOh-I32q>|W^H$HhdUPnhC zdUDQY5e(dyU708lOktk6z|-g0?gc)_JcCsbdpOK<{jk_r^3a?4?Ot$?2gv6s5tIvY zl!-Mph}hXtW2MYIs~0y2!o(;eie6~UDKV0IAk{$^~`Ja@buY-49yKT5<^oBfV5)^)ERGlF{2rg3&U?FqDBJKaW%;ugv%HOtE=R6;@t2p;MHA(If!d?N zg=jG80Q{yD8gP*nJI?B7BXS6a0e40h&2vnp7}HJ=e;(c;p7><}tCtM>2xT_cGP79c zI5W5n{lAZYT>|eP!Qad?d42ip*`SFR@0bnSox|<VYRP8n2*T*D!J|jM5F9Vp$7*TcO;&54xcp=Ezi!nHpR}(Q}@Zp zxjs+jR26nxk3WmUUG{@TpO|$kEaWzpVxICev-m-2P;z& zo&UvHuF&}*1_Ug@99&ra->}l%XhThYV<382iIG1ga*KoqR7c~A6Pq`K&DBJf8uCqm zEf;{xKE#uo_85RSHSZK{1*Ps|TeDdySA2a}Y~>)Rw+rdSVb@uSpB)|gSxbBavrk4h^TOeVqwOEi^Gk5Ia_9qln#{Zza|~c~ z&C%ZaJnuf-!u+Hc=!Ll#>WR%=1-&kV@cY4r6j0f`4O1EId;o)p!cz=JN5jCn=lJe& z==wt#*gKii9o`?o%9ZEoKZy{wffWZBy?F{HHzR!n#+dx(DywPwju6Im zB@;dJW6dwgp{+*3D|vz!nBxKqH0>o1I{E`0{miQqdD;VZcRaC5;ozDHcmi~qwsI9v5F(uoQif0#dc-U$A7iy8ZZ-<#Mg^x#Q7K%7CWKwU<67wt*| zWfRfbP_(cFYiZ@3NsKHVUGO2E>&cUIvPLfGRV4ZxK{mz}@A!(J-{SdZ&5^nM>lL}` z(wu%#U^uza$ZSLvF5plw@;DC5*$9)Z&MYtR=Tye>E;B#uf}WdaFfQZ2{E>s5)h^Gt zJdn>)q*$7GY&R^*8_WOAs)dsy$c{w9c*VT`SLEu2Fb`d;rBbwC`9p4O3F+JN6 z=2R4qRSOIGiG*r0HgaWXS5I^^9Iqk6>vLsV;Ci0&9(y3?&3w#zSy`~K;r#q1Hnag< z@+Jn5=3HCXMx@Q*(rqgh4XAMYi=3mS9s z_#2;m2|b;PTrYz#ZgWauTb$m(L7EPL(tBFDc;VN1mCJ_@xtkz|`0T^yU*B*v@LRONQS!?^c=_=>vG2 zUSMu9EMq@9To-(u%i8`!|0}Wz@36ps@SS+jD3M*Ci`d%)>_x+7&D%=x;Avr`;$S>3 zurV{UQ;_e%L>eyGU1K80OE8iZnY`AW-)Mq-O-U9;uI8PdQ#?BZW_W<7XM=Ozg`s@J z3nhaIeb9V!pRzhEsyvwL!pfUBL3)uBRk5x&=-NB%umQ65W|a(9rYW4fEXed7O?klR z7ckqe$g(&R{s5X4LXuz6mQ`5UA{a;j{yaNUdPv^mJJKx#V=$w@;*8}qu|*bi=NY(` zmskI>rt`^hRYlLg!QS3tQ6I5`53EdeMp^=t%7@*V_jI%Hgko=(wrJ`un)8ssf(^&W>q?KH#>%v{Df65L~dEppFez# zAE;rT_aFR(JDjl! z{&fSR$&b9>Ap5Rpp1`;oV)0e5(hu0jIcz)yt9}Ev9pPWESPk=rS2_68Ong~>yg>_A z{t&tMFy57uFDLqy9a&ew`aZL=>DbmGtTrCo@n$BK@UVf%KL?|-;IDV_lTUcLc;+TC z);1tpdn~IQI%kga9NJ^*Yi%U_2Pp`6ooMD~p0Rer?gBvPcdVM7`I`43&HIHp_{znr z*I56C#Mv4-O>=hf%;W=KoFrl1<~?LrMp}Zg24~LNywzdu1Wy7@(y-br%;y6VvoXRH z-seRB%dvg|JS!U+k^0Ec9er28+Gm-ooeRtrvBHXs{tuGK#(du(0rS?47w@_OCpK@H z#%FRe^ERR%PpilO>!3dk(1DA`XAtH0yD6K7{8d4&ip00N&+}gpyg~Y)I3M6)(-78GMESa=hhz zZ&>qOp!OXk;DI-P!YIwNALdQb>U?KGCb_{+v+k`6c$0;v2*@uLEJ|R^uR*B0yf=ff z#-Y>y_$2eL??2}7g*AH1PYUwByu2nem)rQGRP^3ly>#@m3D5n;pJvso7n$1b!z;g$ zgFvqEG*Y#rJLb6lq3dQJkZ0g|eq?A`Q$a@lkpI^~9#;NclsRW-eEAqd9-e50shf8# zJV1%c#Ad1Jn_2(B;OT{!<5xyq5DoC=UH;%uHT1qPGW$u?@|~ak0(Vkbvs~!Udt~&9 zQQzm=$t+WuyDihA?=VUYn=-M=&MR)%iIp`j!c)z^yYRbjz%tWTKl7UkpWwo8|KwjO zSWhBza{)m_bUB_mJ^@>}hr_CyH_HqpXwS4#g_->WMa_Gt=6@y7EOTBJvDfl^dGQnT zT)2(jH#H@LIi&HqulUXne>LN^yl7rwzWn$zn3WFTs~Wc8g`W5@Vt1rvdhTD0AdRnY z%;^cw;aqm6&os{=xuONW{3-K!Uc4tKpQJHfGvoM*_dVq66R&7!@;5a3GiaNPuEnFJ zr?IXbc<6Qb;3Kev)5PwF_}YVyI0H|)fKJ~6?PB@)NnYOu(jMSFHyDMw@w!<>Wh$A@4eZ%Okwdt`+2)I6^loxW@erBBHAFLJMWzX4wx+OOUJI0{ zmWey==!v;MeH2y`PUg7+=hl*`fU$teAl;~tQI^^}HL00Wg^DVwo=eY9N4%VL1 zB+$=SwY(26_`J_cW#xSOoRXdQ;&kmYP0yIJ=vJeWbdaj+%*NOvYfI zA!>|&pGVAcoAc%;sFhKXYOc+x%d;%AKFA5`$SBl~DMU}M+Egd3MlG*i)FGQmmGoXz zW*bS}rN(>*(bdG2x{!fX<$6S2t|Y!LQ_pP%wN}~s5ER=j~EY< zogq)e{sWczis>(?60@EDR3)kN8&751Ms&*kN5$)X)W^Taw^`k`G2Mx7*f!aQ(Z#Hy ztvJ1CirX};EPZ(f(0@gsBTrlEnRTFg#w2!L7jnj@Je5piEpIJ_=(UwZ@2Cgc z6r(n7JbkOTF~e74JF5SMQ1>p@^3~!&$K5vcry0uE9J+Id(HF{FQs@yjgL-gxkm*G6 zFL6^(uwn=&bB9qOXCPHNA3GALGgr#-(f*R&LY?WglVZC`Z?0vwhPINne75ykMXi*U zVU4o3vxZuaSj%e5w6-?GHkK;bR~%QIYxDx(jwiL>6D*;0rR*wSlb^`(@_pGR|B;u= zo9HYi%NOa`X{C=&FcqRIP1_GUi<5eb- zAIM3Gtn9V#z%B#Wwe3wk9d|0EIxPL^gsDk`yx)yVZ29OsUR7Qrf0CW@D`ee_PHxZW zm)ewWRfVL_)INlp(;MBHPpzFX)E-(*J@KCGr@4#o$Q5!I(b%ScaSG00$052- zZK2T#<#Tl>uWGsJ!K$Z0^)aQ?Mj-hf>qSH!+*d5g8X#hsfVTYqJ)%`YGyrsp`2gy&K zCBKt(xv1i)Y?XV_yX`Gqa~nuM=wSba&Z3dvg+JX15~%uigi1sw(7Jb2Sl&Z5s(RvE zB=DLn>~cMs>T;DG!|glhzUI)5Ym>Be>v8LOYiH}W46lsX^gij%w54fF(uSw+$XH^X zqs_Bzw5K@s(2p#M)0<0$7;&%VkhDNnl#Pm2Nl_Lkm6b9|UgfDAC(o3N$UEtDY%r!t z7PF#WA5Jptuo1EwQ(B-70Ed1Ga6~s%Uw&N7i1}0IOhKn~^VL zbNaOOQRzbZzO>otMXVjPSK10&J;yKSV`>F)_E7jIF17TOn#o<2oyva2U+tkjS4t}- zm4V7aWs5vYIzlhC98xbTlINf@xoD{d+Lsc|69*z*yg8cqp8dJz?4af_W>V{nj>V2R zM`fxYIvnMx^;U`+XhW#abB*&3Z@871gG$D=rR7pXu;z)JO=+d9S2inU6`TAN)F~rx z1r7Vtf&3L6s?4t1HRvB9QX4*5e2Me?GyqCJN*8q8lx+_XZxZs|+s z)e=fQwTKb}=?h~Ih9tcZW!+w@((lsf&yj1=xw^ybs zO_22^Wu1~CZ^0@;qj2TGJ=Fg5R_5eqBY8>W63Vk?>S_8kG!R_mh zLSHf5(w!daGo>POkTPFMlfTOm%2(x>@<~1>=Tdgdcj*Wm$8Qt|eZGPi;g;WGVd^Ls zvE0C~mO&2_s7tqt^{685AoA4NVHrVRLQiKAho`-r?W?v!3)Sve&sy_n$E~HU#jLk8 z24s}ZSef22eQkP;^!w>?){3^)^g=k}NYlr_fPFZ*cEr+2o(C>BR^O?o)Env)wT608 zxh)@-H!A1ls?sUEzzOPSHiFS#=F~uQPVVgI4&Y(^K0T?6!L;U6hwB-yq%iC1)O?JD zk*sknbk3q?TN}<6y2AvAaVF@XI1;~GosPf5<*oSJfyzVWywX!?uk2Q)G5-*`iCkDV z=;2qAPMKxtlsOZB<3b(G6!7;u_36y|(usJ8!Q9kLgELkk1C`t8sIPL~pu0mBdqdj- ztv@n|wa&I);=36v;A}=HdaxmVQ~K5Pq3L-uHe0vbR@-ws<~wV1jxa;0O=ZNPQbBxS zs`5n*SKA_okuFbEAH`eVC96t?-%40{)^2HG8|!LH()}CSR2`d9mWIYRV*~ zt1?sRqa-o^vV0|j1(jh$@2T23m@WyesRe2i^S}@)!#j%dO0eZEwIG*Kna@VdypetT zF?ug&gyR^D=%ZFadu6?EU1RM83u)vZiL7$+(;$TeoMl&G4|c)Dms6bUq#Spkl`zk>1Pl6uFerME#_Cxp=trRV%3OYEk90+*dvzc}bJuwllfs&cfu0!sb#;k zPRa%j?o-l~BiKL?rr1*PR&pzE=x+L6ddOE}$?Uig4#zx(|Jw@+?8KKOi@wy8j;BK5 z4Y*ZT(aMfm1levcW14=_dC75}9x#EnS(>kQ2~M!qIs`q4vbtFv8McfG8ACDx(1oTM z8?0-zRGY(|)A>(#BRl(C=q8@BxXQWlNQKpOwXRD&mjUVub-vnE87H@v(GO`UJ}3cp z(vd8WS<9)t(U$U?KdJNk!{OrWP6bP=GZz)bm(a^TihmDs{&tLZl*Bg*RKHxyIRYOt z+Uy33`QUr6ErX>x@W8L~U`11O<)4zMY)2kdlqK+x3FyIfsWW|*b4U?X?DT>a+=K_j zQqA}&wF%!*+4voPbtyc20e1tI5r0y_$jG4wQ>Xg0{SAF;yJ___A8j|55XIMNP|@3Z zJfn7oZ$_4k+v(LZT3DZJ`|SH16Pr7y93o*UjEWsrpx=I(;)DM0y=aUF1>BwWAWCI+Tk_s$x<9z(v-|ALWa3B5}bg zX}aVu4Wp8A1a@#49Lz!GQeUb?`_ONxwdJSiLTz;sHts8aAy0bH*s6bb200r$LhRFR zZ?slgcI}PziuIK>(i&qer=?lHS+82VTE|=4TYarQXva0Jvwaopu#n!zxGeOicgr|2 z#ga#kmAfcEm3wM0msmAewc@Rg$aZ=%Um-dQLJCc&7F$_(Pqoj@^c(4q%^#+dV>CTJ z{izCk9*>xZDzK?^@_OS~K{u-*)H(I${*Wgfmz!{=Erq+a^ThG^gbPwxdAFQZX{7YW z7TncDWrng?S*A2lF3640gZ%Pp5b-U&r2hj6Uc;xKqX`c|MYEQ14Dr`@@NqYcF`Sx> ziNxWY>(F02y`23WtL&$2qV0kT;%;gVtl-Yp zV!S{#y`M9R2(6;C03B$4J3c$>!i(-v%k&$}Siw8&qw?!qy0qWd`;(oBCf6LpT)v5` zEz_l5pzRSkM0u^asvnedO0;rDX|DW3E(vl&`2kXjr9b>>R(~|8pj!501<#0{Bj`u- zi}-sFRf5glEVHT7_?WY;zMTGSs-Iw8?$g`6gDp{;qLtO!Y6HN9yVm+zIqkl+g|#+X zaoFl&J(#iHnw1{Lc^p?AMRZQi2raSo_LeYdu{=nzE49@bYCe~mE^caeB~czG=a3sp zJL%q%!%{*tGwi21N9&-1YgIiOG}ul>_$qM0t&Y4-vkv-w=UDpC|D?KpLwaVfcZzxe zBDs5dCr-J3Gad>1#O7$TDg~nV@v!od%4ZPqk8&4e*r${L6L-m-oXf891*%myakg19SAEzH=kTL*n)@VjyAI$LwipY0 zzbM_5rzwThC+ab^9o{isG343uC3@^ul*U;)iC%ibD`V;hgiqp-x%+ZV933D8- zj`sA6ctBkAmuk-)sIjl|J8y^*T2tjUp15=Yxj8pZk`yAo^g*xQTh>USFoboAqSjJ7 zsrA%id|qYcp&TKPh9`WJrb;cz9PEP;+{gCEQWbhOy+byUM>>F~%wqY1{u)$5e~QO) z;e;n$){V7#ob$Znp#8b6lkK5ajHo#;nFC)UC#$uWHd0$+6|GId$IVtx>tJh`R>|HT zO)jGUHTsIP=|b|y5=`zQn=*j)siAIEE34_sMswdT1pzr~>(jB=s_V%e4sE`*fpsYQ z&<(qIVg0PxZ226mocDD*C%e{&ab$9?NYmt5%0XCyw~Okcskzl-a(4MR)^M4gacvoY zZ6S>6y-%p`?C%^vJ^W41LQZ%5LkarsOk~dGs3xCXKj&P>`fr9QY`vA( zitN!w;Rsy;N?KAaukn&AWxM=S*`{t%2dH(_e@Y!?0$Ce}^jI1tb)_GAGwB$)MF$>X zDS4^&aHJr*cQv9jL^K)5mRL(T*}A)oO%d0!e`Q1Z^{R z=62+fqFC=mSVI}IsC}r1f1I&OaLC>u)mZw-)B+z&4s(!GQ!R}7`bM}*K1XZ&INM`w z6nM}UUC667(Mo}q$FyN^sGpjT?JRNiV6DF`+1AS5(a~6c#O;vQ$e@Qbg%~kfc2{Fz z2!qwN>IikYQb=~P=@@BgDfVSG`f-QpFbvpFuK^~mbqpkKkFm@2U-QR9oOC>K{?I$> zTk*b)sc7V)_i+x@n;LuQG*FKHq6wTD*eoQAU5JKTTDlUc6p#ano-~;HRqSw`(nDDT z9;8d2@=CbUIBA-c4dz@CUR0i*Q42)@p45b{e!b|ycZQ5!H!@p|U`}JWnfji6s9MzE zN^mC8HP6*P&~_h7aAVfIaE|fCbf%n0>=o~^1mEm;P=|-O= zv+r0iUZMzf6dutpxs_hiuo#VC6Wyp1-k9@jeL1yeW8dE)jDi2=AtN_cY63s3shn0y zt3%XAYH9VRVwczABi-Ps&6&SM79q%T9xhjjJahp%@?6AITq2h>ln#|kiK`C6zz@+G z;)gg5o9NA{wDtNXxbs2!F^AY<(E0Q5qJ>&ra>#SETH0;xlU7;#4=JQsOTi@8YiDdl z@$w3Jj`e~+Pbv+oA1&om9w_vU(AADyVoz?MSHg*T6igSq>9Rrjp(H&)CGL-XgQ0G0(f!irQ$cnl@0IuEl7oZJSnJ8>bD@ z>S)icC#?^(U$!7RV+|*(7AJ($Z*~Oha}ws5CfmTPzu4(crKxgQ86%g+3R{p>UL@Az z%=tv)r{2@Ko4%Ca90eUq=t3vb)pRxgFX*gAR%4rU75d<$(?limsb*(8Uv{B_*iSks z)MXAe>5D$sQl5OtBR>upY%xZtLZvhp`!J2DP1%ABlPgem?V-dI* zt{36NOf}9gmM~AE(~s^sS*2~3wU!V`mxiGWr9j7nY76Wklx%J@ITDUmR~jVwNPjF3 zh@t!}+v#f%gzeA8mdes^M5Hl8OVVoE7O}U~T5GNO*<5TzU{F)F>b6u{A)=jde0g5(rX8`AA!_!M{FJ_l63N&ASZTUz3(c56aHqQr`9jN5-qna@pNu& zY|*e6RqVs*(JaOgeX?_~~pU%uRuYkF)0~sn1H;$*aPB5Aj zK}WyGVl^yd3|N+E`AEmL+7_KoGO6t8?_%$vv0ltMo_wQZ?+HSBX?E*yEmE5XbK0O? z(sJ4EX#U{EZY@YFsa@61+jiNb9MheVI;RqZaWK=zmUif1UgFzjN*sF+Pt{@SN4b(z zirnxBOChlbc}9g>tl#O8Q_Ojdo!xA7UfW^ML8t5MbX2Mglg>(a%c*$F7n~h;G2Hcf zx*vXWANy|9O65e>m6MCfJ*9V+ zU{JD$KJco6mXCDa8ZH(V^N<4%r^o67$8tJ| zZng~p9gb$yaXkam7;1Sab5{|mt_-utIWDkzSQn{PN(@ zxttMtZF)P_V*fspvktY`=P$`gg+%UbL~;YEtGG>|i!2=(hcJ(Ik|wuQTH=`>sIBoz zB@_?kvV2k+E(OVT|=J`I7As<#&D=F#$m-;TN)rss0gvysK`QR=H!~4&7X%o#PzdboiDW zmu;79&uowFC&@2fag5bNuz>pTgv;>l6XarQa7WU@YYHbw7jsr8T4+dQB+(sindPz6 zQNAo+gFVhf@7^kFlp0Eqyq*2YP^p~s4~}>WcCZ!Z+?H%o5%wAmiM7yyMRemV25xl4 zQxs*7cs=7jD*ob(=1gM+tMuLxZ}+pW!YB9;Pq@GldTZhE$2Zz@cykv{BFwezCFTxd zr#b}G)NJn@P1zk-2tu~A(Pqu4~gBQ|(7>-E2ec8TM!pqLrgN{epa8&6PQg^MpHv8|haS!p*Sk?8TSjoJvnT zMUTvm?4Fh^(roFQw3*#MSA3Hg@L! zibe4Y>n(Om3Aj~DsXFY+%3hs~o?90=?WgG_^pehUj<)uVwhFe(#1b~^BpAa=tl*nA zl~W4);1Dfruke+_trfKV=*JgZh@*jikkf&l!Y9##_@$q;QjSw{x%^N!tE*hvx;#>@ z%2nhc(s)ZjaU5q*Rv6R4VQ*&^XK6=Adp34JKiMi{69?&M9O0PZ+)JmFdHPHCd40KU ze3LFIAxL04Rc#i6!$0WE;mIkJD`EjOximTW1Z4gLuhc|+rSyZjM$4}90%C|gWZT|Y zmcWoaEMc5Uupoy9=)`gGbs)YY(c%k+R)#aASwv~M}LYt992AZ2xV$X)oxAwh!e0%i+}BofFu5 zbu*In9-PA9JSH`$E>mahD=~&mkLGspaPGK`C!3rm_J%wCz&DprL^VPk&F)uW_JRv2 zCFP^M{z+;fJ!VgKB|cy^oqzIR6D8RFza=()DISU0v-|eKcdb8EULA8`?G7X6$87D zf-QDpx7Zz+*xl#Y-GN|#ZG2m7uCH2S8S~F)EXE|pw=PqrL_FOGV^yHxv+6;WoG&!?;oK1AKu}T>^8UIS@ zz{G?SS3V;0$_QpamsRByG4(lQ^$+u!ZV*>RtX$?;AiM*?Y8)nSN~_n=lj*ti!TNGA zN=?YKbCF|u$T$mlBfZJ=m3ndW1p8$-_`Hra(hYewF>{!jT5IWi=1dVV-dTbZyG*OA z_F_AGCuuZTxd>lbdVpoDCsbHYviW^68(a4o?Tll_By%!-xUJ-oHvOsB_~7!e!0oqU z?dP1i05qZr+m4noJDpF80~4@RuCDY_*HhQHN6l9wP8+Ay(o$$y)X`v7<}0Dx>2GAw z&FBTqrW4y04A^YE#8zZ5NcPCJ$u-`|$JtBzn*8J(arPgws?ok(;LjYE(_CtV8te4R z_=h-egdV1M()W>HOw`Zn$MtLabWX)^q%(&KOrRc(R?Ai?doMk@y$<%pE4A@ezm-i| z$AF-KVa^ZE1UmQCwNLUlwBa`Ws0H-EBG^>gnF@3dGau*cklv2?p(VY({?GuR#B`W`1KO*y{!$?AUup9lfOdNlvH!q%(6%tK%F_ZRdeDmkz}y zH8~Z}YsxVtx3ZavN-{LDigcL@g+iTXnUn`~&^vq zXXFE2vD{{N;{{iXFCS7t(Fbc*jdZD!M#=ylC2 zMn7Y(F~{6xjiCbl-A?D9*Y8JX_P4JOaYH@O>1lnh?VI2N?tq)t=z(lUlUK_DYCbKe zmX=tpt}~0XKRpSZPU&j$%KOR!useI$dp(ePN(hnBLL#J6p!sSMaSUZkaSXQc8aehs z`5`&xLps-w!GqTDwYK-rC&*RG*(T4=$#`=0X{?Cde1g2}BH;tY_KStIaPL|OU z54uv{Y*qqyz$~s5D4(Nibe!(QI4v7B!2-^cSaGMbo2ID!h*>U5%sYb2kmyJsWKZo; zbC~&5Kcz1qh8k#8Hxh_Jiqey84g&N#DD)YiN5`V2KR6d~gVWv2MkV$<9kpUoIj-=v zJXMLH;xY=n)GqCs7RUdu)hIQudRbY8$GlHZrWpS61e*zyU?GcxRN2pos!QB(ME)+< zQbrLw6s9g?5n+5_m)uIwyVZSV=uLDtx6`w&plkYD?@jL&??Z1LJwlJsO?{)jRFBh> zpey(Eg7mSLp)*7Es^rJ>tkw2f=2=(MGgu+7BfD!tl)GCS;T%LK@rE|VIZ;!UtVFDh zB{r~Gfozd0Y&SN`8t2&YzS|g~KlR4wBa8~>Ni)6O0L=R-P|E|r_hba!(2_|MiCX?I zYMp`X{Mrm!H5fGGR{8+CTuu3^WYgw3PdI;QtI1(rP<6SkbWv_AabUUJXn!=Qg`Q+p zsihC_?EJ<4)u!NCPRjL^-b!_HvvBf`)1W(c5MTU@WlrMjZ?CW-%t+)gUw2Sl*yF8F zMA=1;)Gz2Q*-d{K?B-=%BJ%A+C+nE^v-gWW*~n$qwMv0b-S2oomKmay0jW7veGj7R ziPq2=9Pr*5q%9@meJ(FxVrdR+S#^8`EtR^{T5Qn(y&^G5tZ~2yB7REGPwQhDE;0{t z9j%xO3z6HFU>)&-%I;1~T?b#W8g|EV>@nVhy-%k;RZD7@wN_3g;2Y;~4KdVW;`df` zXG}RGF+p4Um6Ndxzj=ix;8rUKc4H7{H;2-aNNWnYx~j}TZ(dQK+5%R(Jy@#gMEXC8 z=puDDQQ}W>i8lINeXH&Q$C%uR0^RyVf3APyJLRyVq56G28+*#z(Z`2#he`c+QWm8E z@mWpvht|)TiYRKnGo7=f)>(C^g_Lbl6|m;JeDiG0Drx4$J8U&n<2Tw?#VBuVG-{h~ zP2IX@!^+2b5IeB0ATu}Bt@-$i!<>z-OvZGg3gv~j@~qSbukTW}sgcxOHaKUJ*(Y^Q zAyQeWYU)d62T^T3C7!C+UN&p~7JNRu7ob=9Q5sH-=%<`qDNaUSK>0~`r5DwZm9UtH zGF5)gzDRxWo{^giane|C0-0tuVum$(AtT&KW8Bv>^YaV( zCUUGI=tdg8d%)aezvuc2IKnuy9%6PwnXeUd-lLOU(s|msNpq;rh+QsH<*|J`$(fSa zH>{83n6dgvu*DmUFeH(KT4boT)@p{;ZO55-z;sw|_J1UWmmmf#>Mgq=Gf-W@ybl0_ z(oRZ`Eox8gwS?M1>*lQEl$=|!lZQcjrcm#I|2&}7P+rUP*;c-f{)C93oYbHe;x#AW zBQD8`5=;~o=*Qr`rAnPo-bMV7$I*+-{(!Z|OhG(Z#rRJjMI4z>&Fg!w;43T(p-qo7d;sSdODQgte5PlaR01G~i+djk38TRcVx zQzw&{M%~4>48bt)&he0r{zB@0XVn<(n^u@;G6mhsX4+Fw&rg*B%2j2Vl0b!}f?SB zV>DUza(y9Q)8GW8GS2CNMm8g}u^tPO53jl$1o#tU37OGPTXF1`-caWoN`#V8%i_Ea z-sh_pM<+84udIUlQdXsC;-Qv4)gHr!`yF8HrxQ=+A;4c@%s0*(FU*bXB`t)vh+wK> zIP(b$e0xDZjADZ2nEf8D6FdMZ!Agof?F*!5^zXvV^BDwSa%WoN{Ya1wa==7wJsWYuWCLBVVmG7y8ezw<-#pq3GFK5h%r#aTLkyQu z%o=MKW~XZjCTFJ-+YO-RaEhvAINstsHS}IgMek-da0U||8WBKK<&d%rM2U<&zC&g9 z2c7-g+Aj4c$oQGccX=1NW&p9l9-@Qn%vaP0k#v%N&nWpEb^dzPv4u6w3{H51oQc@F zh*S$UsB`qL0?E496Xh!0`}NrJczu>}k?8-HvBBtz=vNR79uLnpZ-wz8Kq5X@ISa-Snq3x5%_O<<=_K_(k-*bA*#^Q&3jnq*!g z9{bNQs6&k9Sx#F?eEZl!l^I_d3Oeixvo(#`no^xPp~={bGW4}ykz+K#19JM69?D1M zh}w~E_C@UtzIq{5sVdrUCIYU|PwYvQRFmhuE^VM9C$r6=I}zn6>JQz}TwS)w+nXSX zeAKJ{BMUtt+057Mp-Qxo&P`6DsXWF*@`*X*+S`ewGa0Y+|Ma{@QzO_YX&CxDz8XyY zvxtndnf?n~zn`*h8NDn|jqc?=o)&y^+3{9?n8*6g8=Mb^!g7s#7W7rMR(S>6{~-_noPoaauoZ z2R)S&Xx=|e2o1C|pn0{;HDElB876*swSLKXYkVx56kL7`X{Q`A3y`_^bRoGrfIc>M`?+4 zMxL+q27MEyCf8u{5}l+Oi*}78w2szNXBf+0<%-Kxh>sGZ*_u<~mkTVVfwumaD2Q)Lv>bErat6 z2%w{yMqZOgE3Dp7)^L|AVJS1w-!4g1S&?k(w<9|>z5LWA=24j&t1RXGv{B|z)4h); zanT!lO}-^!m*Ff(I=jaEi2qp`6H+g`_*U`!$^K2H|9(!19? z!#mwuNbhA#Gq*E8!NwlPQZQkSIekaeL)vU-PiHt?*CozkS_d_q(w03$%vLjNH`Tgt z6fu98&FJgqGs+ls&Avo1ZOsZGRiI?KvJzSh?`Sje~vSac82(%rsQM0*KfyN$4KTz?vSnv&9 zL?hh#C$y!Xo&{X>Mek%h$1THQJ+>~}XW7Qkh&*SU5=`dVlZ^9ZLJlv11IF_ z)Pyga12Ewg%`fz_By+l%%sOP=HzTdH_DW_XdXaAn7LiImAG^Gdu=9Pt zT^U}~3S{L=9RkI2U79JcQd)w=zK+c6Fs-4`Kifu3H3>}IW~OHjf${hS2C5o;s*7Nv z;fDnM@Kh>A4fl&&QYovn0$G0&d`NOC5~Dy+7NW0r3nu1p`tbA2U-Y_D5iw*V2fd@G zr+U_(`0t@E)9(ug8DbGzY#?L&0v2SM_p&!!?`qsP8-i$^9WT00Swf}kIs6vALHI1N?wWyS5i=ut(B4ci>VcOnYVI-`v4WHN5*c;uon>%hRJo+ zvT7IQt&*P%{F1W*`1$wLBetjqmE!V3qT?vvZBTf(zy}*eL{3B?oB+jPii@Mu>+~hH!|l>U+AjMQoz72=8dSkEg9i#z+p90t zUs@YyOJ@!|byiU8XTZ_tS0|zo-N42@2ATH|S)78;@H!lBbI38%(bv=EP%^G?s#tTt z1!(x%rt)pj6YuG95 z-E=ue8k?BV8eqEg>-qq*2|cwKbFOuQY4kpzDo=wB9p~!{CMF*}=nmEi%WY?2A49y| zfs8E9(N>C)Yp6@qHR=Ygz?%s zIrE_v8Po{HEscU9X&rMcf6R4w&?{zhvnM^RvfyWbf~@auJ)twwpB#L&uO5sqam<}9 zv@>60oHjaMGs;!a#Rd%T(m~AXWmApW}dhoi1ki&WPD|M$j^fAUs zmC&54NFop^JcX;dB=gwo>Gp?!5pk-DdVy)R|CDL;0{;Rx*-{#Wh4@ULy+89!vBo-l zWw6oO7-38?ZW?EdKSl%dJr;8m-@i`2<`9MGOV!#gU>h1LD?KhiyRSiEI&Cc9OIdSw+JdxzQDoMQ9^U$n=X zZGEs_+fzV}i#>Mm$1!hPhk4Udb{Z=K8Ch~rJNNArj!vBO5%NN%08@4;m}IFN&?ew6 z=)@+}#Ik7B)mXg2Zg5}Oq+~F47J;iMEz`z#*y}e6)bL#S19i@M#J^jW>BLln$TbGw zUzm+x@}m=d)^Fh3`N@+9-P-u#hX?&-kEdRjnR?rN`ZE{g zugV2=iI&nS>U2+>wVbCwNV&=WE31<@|D)sw(t419$w9}igL|tXv&X~f57uEC^SN9W z+)PVsWL+xQm%!Cdq+WRzHi)J?>pQxTz3DoQHkD0~%W2S=BASqG9X5#5cvY+erDElw_3P|(XRBS4P zm01H5Qy1ca%yKcRQ(f^GcfhgN0j;`#na}&op5*~Q83YF9J-JmZ9lj6D>wN@cwu!oT zU%FUlBo)NrZF2O7as~RGNtG{j!Ftj4s3GlzA*3r9sh!0870f-z;k>b#n99}%!b=cr zvAgj56i=l!E9>zxrzL}+Gyu1 z=U(SQXFPK!ua%_oLy)kWL2q9ME!`M|eF5;xwXNf5!xgg-NEtUdmjnGT=Ic(*nS@z{ z{PtX?vS(QHt?M8z3R91av*cwO$P!wI*sbne`xe z4Z2Z#yF}Kx8d;PgO85tT$O-!60`rg$sYvvqmhV(9(&zhxj2@HSi~d$_d!}{53^4!2 zPi)~dG%{uwuZ`Wr6JD4Krhr-4M3 zJXa%_%ITcMQZdCy59B;O>bCT7;-{fc_?^G&Kg&(9H=R7`74H3&y zP=uyBO6ferwAcs?{y#rsw8gLLB@uQ>V_tFcPsRX;< zOHeTnk^6Hq8F_IMvpAC%Yl(aFn$PLM4aHLY*2mKSm-SUlqAd1|VeX=nXN4!9H#1m< z+2&(w4CtLuxh4IrAzCTA-gTJ7YlFvVK`mmh>Qo~+hke=W5YO~PYx{4jv(*OMcn!4T zbUHn!x#Rn+%|wy$#7?d8n62%r)&R7o8s0p?tYl5Gj#;URC>p}Joq<|oO*JcBt(?xq zOm{59LL8*iH(PUR_0@Q4rfa~}os_ClnG0Yq;%oAZ9(Ym*D$Sl zklC!wo)@0{`am-W*rrsl(N_ci-(DR^bs!DM_ksA%lUh1$EIDx&^^PpKkkgSXHT2D9 zqGS#F(8+2JmLi8ez*=NoVNy9aXJZ(T6=BaZRXoHAtB+ZgiOqEOFeI{qiLWa}nyaO< z^p=yWKh&oWsdN*PH+nF(Cg6tu$aSjvRSOK{bNI8nF2 z$wkxqdWlsi4EFph6LKfucd1GR>==IYsg(m%$amunmBSUrFykRT$P?sU`LGmcbd%g_ zH*y%|{R6`M1^Up=^V9Rqo1Q9NVf&JA2%VOX@=ac0YW16XPV38=wViGtp zGY+&p8;9uCU6AJ~G1MSlYD1jYh#yXY3aNk`qo?r8hJnb!UDs*RPMthR()z;)2YpEqW(3+nVu3OM0tpf|Olk1;G{ceb?P{}+7V#ZAl;>9D^@?B&y=DQV1bi3UA$Nz2{ zDaup>D4y#hU3`h^^l-dl9<;wS{9z}U&J7}$8bGJxFq4TT?QM9E zDnwfIsHA^0nwzzlM(AlhwELh9-KqJwK&91X2J(<|m$RF*50${vAd3GbYDt5q2&BGZ z({F#uWcp3A%ddt8sEpa9WeWY1opCW)8k$uT{so zW^OkhnRl3(uWvRnCz&2%zB6=j^5|o|^*pcKnLX#-rQNpcFZU7mSmswt>b=Zg#Qwv; zLTn&{x~A4+cK8+e!E4%6kd))qnd&rckD601Kz=b)n&ik#tXP;kaGZE)9K4x(>`T@N z`vKc~24W{?F@19u+dRT-$VA3|eUUN7bTOaO+PZBI1yeG}(NsQ#XD-SN;A*s?I1?9( zK^%n8?cK{P2Z``Qc{wK@*2Luh&lw8 ztLJ{^!mQNIcYu0cgk{c59lt0icD}jKyltj52U3~*8?<>F??q39C#$EE`;RNDJD;>#P z0v#Ak&*pEKDf`mJ+z*rSTrkBG>F`~)4wG$+0e|kLt9;7PnMr7A4gxo@01VwohfaPF z3~S1I=9Dj!TTF3=J3DL7m_2w**Lw!&-Q7y0@(kp006m*&oQ~^!_7NoEbKLG!%SE#GQr0N*DaZ5tUZM&ZVS_4orrAW%wwQ@TbY-MrY}&Ve&(&@9quXW zsq5M1PV3(9D&{)i>g_({e(VV}I#}s=bxWjvau^w0VRbyd>@<@x)9L9rU^9sX?R-MM zA{_#i(p~NcT8EuqWEp?^xx2bkjeo&(NjSB^s&;l}Rh`7Mndo{w#XoJQr?wvJ@XLH; z9jDXx*!SA8UW$^hDi5gvw%Nzjb&@D%_v{kOA)**7PukEIbbCl?I z4)bBtsDN!&WaSpNI-34p1T&G1siK^t8=aJ{LRaaQ@1{KtHq!O>7;8Fn?CY%)U=7L` z`HhzJ*jqCXTEQ~RaJwX|W{t=g=gB$Ms&ub@sb91)+H`d~%q|nuOH|@Dc%*L8Jy@uu z0WB3w74m@ck}gFlGO5YL&YR?o@+LgRHZYU@!OIMQ$)YH%b+5r^>8sEx{OM zq8j)_IYaa`na;!tvdD$RiihafT&B{nQ0WQ=#-c_Ku9dl)_MFc>)MZn{)OQalJf)x9 zk}YE!vC}#2V_+ONTm7uI%oVSpb~uJUcR78TH@9~!OfO-c1@83jcdlWs=5T!Uci;D{ z(zE*gw?;dAGc|ZZ&ZaEGP91xdm2^no(bk z(Owc!sM0C81SQxPE5rX+4T;3jH?!RXPB;JBcaZ6K~ zdg%rRV~7%lzP4BY1>amu9Rc!sDwU6poRqF${wGk6%nE+*DYKhjse+yd8?e(1wD`Z)5mLmVw1P2}}KG7%vyV0Ds&b4;$bxm=4clDPNcoW^yx^ zv00x7);I<%aSm@y&n9=U`?qVBE85k=9qZ2Rb?Gmdr_n(`-nPHt)rW&kYsG7<#w6e* zn65rct%%`o5WBpT7K1+ioBJ{Y1oSeP(T-!!D=IUYlW~DfFsO&UVB60p-+Qu&w`KwC z;!yL5xyt%OjsL6FiXQr zoW~5=Df&5Q!6*!+T7MagOd9J>8wzRKi2)*2d$VuksAj(eNf%ya2plZF!3n35UxS zW{Z5p+m+~KH=yE}A8RoU+0VsaYDxyYMK>7u#)D*Rh?N`)X8Q_!N+YP~WPlfN75(S2!jQU19IKumZJ#I<- zAbKX_n0a{(%KIu=a2LA`8c`qqflW-w9H1IyfDy_IS6mkC#4gV!*e~9Bj(bOgtbA(B z1RtBjx@6_B2ihLHt}lY=!i}(f$AJ3N@q`~i`qZbdHWSQy6|}hl*IWh;hHAY1MdUY! zh_9pako>P8nDiQSTMNPpA~uw~_4DYOY>TN&wLK5j))_D`=H%|zpuRE)&WakKpq@~d zKS$?xE!>yuU__~lWJd9J1m4YPBFZ~d7y5E;g8lV{)O2;u6TkJR-jcX8hTVVI2_IO! zi=dL$f`J|kJ|PvmMGjFLze?^q#B!O3sH&GHzKx*IeHzqxBanrLHwAT=aPW^A&4)~y zeK6-xJ-TmQwKg$z>Hul9U7$9YG^`3w?ixC^mBGb+bflM>avHPBBc&wtlh%O;jbNH^ zl3Yl6PK28v^<|Q3H}-2gd(xtaa>GD}J)uVSkk0*Z*dt>dyTCv$;(Cg4mGh8-hSi%S z=fn~RkVC)68*U({ZiIv~lTlCQ6cmN$p)6mSRCHtMb(WTXpskCLY%m#iX(syfz{UBB zGtmyl)Qwo62x@-Y@ZkB0sNT>s>OhaLr(MZTutvZrx|AuJp=PLA%GAsm)bdwT!F^`< z@S6FpELL)&&sZxAzZ+$z@%;tgc_=xGn$d8I|l6AOYLMy<7yYk;&MS z2Sg8lk@s(h-=`s;^^zvxMN5DXC`q0DFr6H+A*Cmnot@0;{i2RCpA#_>eE1md`eN+E z4e5`RhfKi@cR^~b@fa+}D!ST_!F=3C-WkBUoP_J;ymSz*hY4UyW$?4ZVL2~Bhp{yI zO&F38O#XN1mEGlB2cReIiR}I+#u7Z_73tAGquY9fsktqn_Xe{Yun|*J9%?6xVIzp3 zf>MUaD=*dbq9B`ckQ>Y=3R%qO{iroBf#>QZG3seP4kAWt!S8+`pAMo&S(SObHTF4s zEf|jv)MG0$vFx>*F@@5b_ekzY40op&J*^13&u!q7Uj)kZJ)OS(__xVi$8NI3IQE;D z=Gv}dA5U{C&iQ9EConBLi?cEnY}jgk+625@L+o{ZrrYzt4_O0LSP`lX)u_wl1CbvL zdMFO;)mOHMd6BeA4xWw<*n6r20sOoYl3mJ)i$`Pk*`nerwmty{$Ei={sXYj2rqet0|?KWY^E{LoAhGuNR8x6vqwxa4)k{*>0E&638 z@E!*n&~MAKbn4Veh+{(F2dM!QN^9O5Aj7WM&DMC4sq7(LNe|;3vs}NaUgl!6#xV9@ ztN|zRky9v`;_FgrZ9**4gKWAlR&WaJ#Cxb#AA_6o0jEN2Y|aeIv@|kl$|Q3HY&bor zRgERap9deq7Ep$pz}lTbqaM)v@=7upx{8(0N48!JEOJdWu@1fT@|?L6oaS7hxs$*v za}5i=6*-MXc0GCA8gxq)kQWc>tgd3-a0suf5EHDQh#0qXZ<#akwS@^WFAPDyI5Ah~ z)SSaEZG&@pIcIAIJ@4_1vMGhR19KYzx82jC5hq0wEC+brzC0bYyPaxb==WaX69We?B* z`sgc}tPyC5PvEN3QO&C&I(;zd%tsRrQs+L-sk=vQ?+Z3L9xh5r_EGnBA_vtkt>k12 zGWN)YO@D}v9zm`PIj^13%&N?ar3J(C0}pxvR_nQ7AzKp97DGB&L5LUiCYjjHhr?-{<`jXKgQ6x*pw_V^6ln*&~tKU{2*=a>apm zH+-J(dJU;^wnUcgImNw@&=_Pi&0cD+w%4JXNAQmKxCl3PeFHXDJW_b0#S#*9nG=SfL8nT#YkT zozqj_e+FxEHY@P+0-VO2R6WvDdC1H(WdLtZDe+7ups8=so?GKrIf`dpiwzT-POp=#zJSBa%`{zh>}o0I%PPZ>*~EV~TQe!p8LQETogQ82 zUUegq@5Nai0vcl)naE;#OIwMfkFj4@@Ss0|74A9G71r+yp5!T-^9Y>%T{QF|-u5;! zy2Hl{%;ua$S_e7td+{P$klA#!8PpUOp(b~&3?4B^pht*8T=?q`e$3%zH0=auaud^D zbFlh-xu!@q$2aiX=!#&k3R5Y~#o0+gjWi{1Nzss;Xh;FxN+5~yycc7Zw3@FfQmMz@ ztwu<`1=8t(bOs}ziEzTsVp<}aXW4`{?Sij;Kj&{BpY3M*$sT42_VU$#{w*Chh}xbb z!Ouw0K^-U^h{?RfPKDuougNK`iyYhGx!NPgZuo}*oT0Ixjc39oDfqG1@U|Tp>_G}U z$&a@1^-|=z2)QT(2gUp`N)8@73ujL3MAz7pkb=DyI#}l9WaJjluaSk) zUgk3=Q~EAY&pKNH{t#!xvDHBAcKA%}`-RWG^4i6=if?{=+h?xlG1%8<#3bS>&*IMx zuv0~B)ZC7IHX+$v$lwriKgLflGkNp?4S9r)yyw1%JtS{<+%G)CTRh=Uq#}0cDBuXw zbN+H5kz8oLxQa5wL1O38|=JAZbkW>68uI9ej}8x3lg0bL85ti zlIC;9(&PUzgncx3W{SoI~>cZs*leu+f}v+|yrw=CGwKz^Rn|FfK2 zt&mMlwsVMcR{#kG`{j^@s68E4S8OE-;EKf6_^9zDO2WcJhz%f zm64R#F(Ub|Uu>-rSFiF@r~fSluR?e~vEL_v$Iy|+53cOF|B4>?ulFvw=rw%fUH|(- zfaXMG`_E#0Ik*$SsTi3}o=%=_LG33LaNnlZpg{W{6WFwmymt z72=W8Am70MNiG>zD6Tue|2twsjgV?a9y1ef8ThR^7VgL`;q^j z^Y$7|dB#0|kJkL)r($P_*sAs&o%rh43Gu%;b>iHKbNCB62+b2Z?DW5)#48rhCgh)n z*O}CRbP$FG2ANgp|_%PYOc*>HN}j^4sFv=-853`moygHRv zFZLjLdF?{>7Qc~#pQq%ti2EbO{5^w({FS)xsrf%C zUy0W)cGYkhd?hxz2#E-Z3;Ct=%TRolg6}0hhtRRCJe!b!*bygwnuL!+W+GA)GE6*8 zF8}@&3rPzN7muDOUm<_-F0SRb|NehRB4S63*r1fa_bJM8dc{>I6e7qX!FajJ7Nc$gWndfMCfP=t~izdGoeR96O!_l0vQN9pz&x1^3#!`*lZKWJ#+bO zGsSWqNxTb9Nz_N7JBhMS>OZ5YxauGzkb^Ut$uIdJ zv_k$u$Hbi#-}Cb9;tIr$Hy5wQ!`H$dc>J$SygG4(Klz!5&;IaxVn^3+-dz6I9f!mc zt;<*cNfO#-|L?VnyOI2V&!4DG;?Ab!%%u3AJVX>B?wqiHLZ%XaQ}{;_g(V%kBiqRq$B=DXsqzj{-@(r|3WfxT+<(Z>h*t2 zv^>B0%0!wTenWUC5#5LwRzxpP`TRMbJw>ZTRGS#vJ|!A_LJad9Ie$dcgdZ21?SAqY zCXeRgxy2b5_g!G&({TMFjuCNQFdv27GXC#=rsEuDBC^ZF_j6#S|MJ_C?EcI#3t#0% zs@eEnb{`Mf4-Ap3q>So8s|>=LzEa3lP!d@_VyP zNJV7SfAMtzGOe=I3&p0hGQ1T=vbm99PJSk0AF;_T#Ge%uK)RWcUlwGhpb;V)7nbuQ z&-h--28s1Q z({H|$ct%XLMA$3gA4GPW#;>!9vAHWIG+)+kYWLUY7@5GPn< zQUB*xM64iul<=qGOo$j)*mYq^exspZctjW1mUvQxeGtzntl}S@SzNua4#FCUzZaS> z?xe7rB2J6r=ZR5CJijB(cw%%TB0gamL@XP}`zK!gOLYDnfAx(w@qH2T3JWCSoW$D3 zFP=NmMkGc6LSl)YLBviX>q&zKi`|>W_$YSyCFX-7stxw%fg%Ge#Lvre1`2QoL^PHe zi)YR$JKAG)PFuNcb_KE5et# zk!Nz`nV12o{H2KH#6OW+hzSBWiJFEuNkU4R|NebLenNISe<`#pF^U(@Dqf+)9z=0IWBD7AOX+A|ZtOq-G*!fd zzx?-5#Ni^w6LwQPvWQj^ttBtRe}BXY5&IHTq01tlPLG8aCqeiu5d#Tp9E^m;2Jm$J zR9I5som2Cfh^WO`3E{rvS5t)zhx8h`F z;nBq%&(9-?sO6R4-#kXg-to#r_L82*6dv#wdL?$8ikv~54H2=3`Hi-XYA(*7!AakRBG@74Kk6F$~*RHgP@_Sk*mm4CG4!Q7Bbi8_3MJoM>p60o>wC>26289Wvz2v!UBrx z7rS6x{8U&Hv86MWKf(}RBstF|>P6|1n$49bMr6X`iHs%>km?U1TTZCG3i@CBoK-b0MNO;V*<% zJJE^U+;`!{#d8afB7C|yT`7^Rh_2Furu`~SlxNHPnNa*PmcF+8;&*o8#w<5&xlL}&ggFZ=~-sr8)q6?43H$kH)ta2M&~ z7op1h9$udjbga%H!Q;Lcblg7KO<L~F@2 ztoH46RG|A93L<1Mn%9-C{WQ8~uhFfS)Q!(kH@0oD1zPNXZNM3w0bA5z#znSKwj@3>Yewg%g=|?Iujkn{jK&_8@kr&4TO`fb-jPl_Vf?9`-XUn}_Z*TTZ#g?Tt9 z*PG2Zj&AQPp0gk|#$ueAGVDtax zwR5+fk)G!gD#^mmq@xNu%eR#NR2?378PD<3cZ%n#%)G=_rr=j%pQ_UJxWv4AHEQgI zc%B|y?^KX3b-13}bT2;HgJG8#OE=qPcjhr_ASatUQjpIUf#IA3hvF{!ZAEzXI%ui8 zpq^{57tnYZavKZc?IHl49soSbNSkk#ll{$N*4G_%+v z>4lfJYq8s^4tMw;B=gNS;H+Lm=X*6Weqa~iPO*)RxAwj&=xG(c+KFWoKB%apGp~Od zJ&A>&Hd4|Xn@HF7GyTMDFc77YpD|0afzDwn=5_1CM%)2Tj}E*w3TM z0>U~OyWyh1l09Ktj0E<4G^aEW8@3jl)K1t5Oz9js{`rZmo&H9>;hh0EbP6POcW2B-on(pvol zydmfGAQ-qN!me_`FpOsAXn0}vFhOzHd<7b^Aly8^t#07bS|gW(^d#pXr`lLE7n3%# z9e?1yswG7cZq#R*@EUXJ@v;P$Y%6erf8eEU1$KEE*vSwzfEgbL%#)KqlYUmy!H!af z_vUN_3I@gb9v-17{O&}xC)*z)z*=4d(HQ{u#W>_vA3XjbroAiDv-Z;Ao(ghGY=>Ho z550`OruJQ|un*j&jK?PLaZ6qveq@DY|YUV$a6rYC{7 z^_SNJlW`nudsMph2}GVF^!L>c$6JrYqlbf3Uu|; zfbvWQg6}(Mv!~1>lmOi}g$Uyln%aR*=V#_ZhNJ6UxqF-FIzPu!J)(D93K`vGpVdw) z3Jh)-yoIgJ@y0Rcw}yGYvIF2hPkI<`m&3X=6UMA=?7C?P)593Ho}^+ER#y-ag?+!l z&<`V}=x2q4s)=BBIX@_oNY0Uu^S0GD7Wv-!$GQviG(n4sMWx^KHvoD_`eoNs2muh@tmezAR1N?KODm2^r5%61#fl&FCC4o zEdfWQ3yblA?zzdwBiP9ZrcUd^z}m`u!AV)dF3LIH)6BEg^^9lJlI0x;=S*|9o(zMN zCJ5GpAfkpyw&J~I62->qE(R-j)f@?P$~SWwGg}H+hT|X^8qo>+0e~6DyOD z&nv!v1H|tY5S9@VOF-ZO=@#=67g>Iy%H*&DFSmpo}awcNSf@$P$`%8sJX?%^c49gm5+S~7{$oGYo}XH#B@kEqVI zG=T5o9_K8#G>Yi|1rgl8oTsDGc6pU@1WUY}$%OXIzLsSUeje;#E!45F@9f|dg>f

    EE8y4p!=W7R&I}0Dr8W4*$u{R4re{9DGb|D_W z4d3ccunYgti7o>3We|I8L)mcA88pTZ`xPo=&hfJ(Qmt_6npA^xME!i z?&ThjCpC6vonaYIV5FGA#8v@3&o4IU)CQN39q;yxjY4Ueml$t-WQydM^#K;HGSv9!P(KQKe)pf|-2P^#D8*V%X!+C?*$}>!vUNyd(f7@Z$u5(~5 z?hrZWp#V|Ky39PuGAn|JZ$G)#BR1W6z!Sd!l{r&t#zvg!0tL^k`%W;KVt%hXvyN_< zNsz7Y{1MKj1 z;+P<2Nn^l)e^`MGck?`B(WPNqXD!7cj_ ze|Zrt64=rprU+vkpNS(vd4@m8B?-J*tHFJ{nCUG{v~d#Ilm)pt+-lEuNq+?c?qFl5-c#?$&XKWrB^A!hsi0*9Qt2nWUZbsCb}HXg zP-OGiu<EtSH`N=`$65ahj>omozvm9(kS8of)#+>N@2#UiTy{ zsR!Umoz7#54QFFPg&$-)^#J(LP#wd2v2IdW>Q^*6e4Ube6%VQ63{BF86ip^Z^e z5c62<7mZ}{>lXYk(_qy5n`w+o;HNuFY%>A9*i?E(F4>Y8xQTtiT5h?mXyVpQ+~fCf zXm0@@-puI06y`h6TlWWd3HO$SjtOI!!0h9Wc7OAXMNdw_$NEOEZaSIo?r0AH-5y1j zIvJ*JP0B|;oe|cB4rKDx9h2ELlhJX+cb_vc3og$cj!-5Z_Tv*O!^c!cse`9@0_t`? z$jMja!DW3}h`^)4ZcZkylE@&!9Mg$Q?@~u8@2DhIMxGCtRB8`fU=3wB{6!PN%9kK2 zN?>1HDrExP!XL3GWr-Yb5#hao>t_@kH^rGfiiA&0)()%HnZ3!NPT`*9B@0#LWyCYn z!3jr5lO0>ZSE{hp{j|Q5+okZmBj#NO%6lk00$tcNJdnK>vFuXa>i+IhT+I?LC!}__ za94Hj^GwwM{4;jc5ymYsBY-tU-0IQpgyvgDa>3_F>J*i zBHrd?CM}pE=!TB$U^1qlGFono@0|qlJ)iUuJoZ&z8yMr7*@f(sc3(RO^}uG#%T)z= zm>Nzwm#>L5TW$|uKnvzFE2-hiAmX~_XhlP$F$VpcrFdyACR5gvC$`4Al*OhDXG(Ch zI)iL#3;bd~*vsApDdkp|!;Uo;D^yB8O-y+V{P_>+()-Yt-EjAvhmofynC}_nGx^~5 z3bL}9MUAQY8KSxr-khFl?o+N;38NFjTu)s2-5otWyfyV5%o@DXHyR`0ItqiidOD|m z5?Jf~Ot*;Xp6OIc{sGB&mD!tG>|5RdYA+74<~2tt``L`-A!qK7kH3X4mtdML zN`@Jt_G9+%A98@1%p(-yH-}mjx>W+Uf$ZjRm<|pyb5g}y%5%g$&RyD7HQ{l>TGuVt zIQJ;eAn#$A_m1m#jQwUV=Jgu;PB^l2$L>hMc!$l*4qXKoTNKSnN3~-V(7%N@6nb_0N*V2fbVY|w&jYWr>{2ilV7bg#10qW{s;!=nw9#;8meEv z?d~u$UXz|+wd-O*Yay9|#I!HiP}B=aRmPsiD*q~C*W)Or*(Z^g=OkMw&91RfDiBfX zd!$lUv&qGGlVg2^=e-e8V-Y2q%p{ySuYFX;M^hzSNj2b&m6t535dOLxTOSqco!JKF zgJ3pLe)ZmiA;@&sa7VgMC7e!Z>hidTF|W1Mdy@I%_V7&|HLjRhs8g-+6`*$AlX!F( zQH0ocyaY6CAa<-0bG&1u?^I!T67{8^{*;`$%UArzS#qV?RMVC*aU9CoPA;t=%4q^O zre=3$`u-ivDX~@*oGHhte#x)`^tLO&cwG_Z+57M(Wn~KRG&}%9;Kk}r75s}LYBTky z(xhd=d>zy8Rmr!PG2>YQ-*cE*vGSay^z01Cqn&`KZaVw3d!R4Dcqyl{kSHg!oIz^h zXhRLMJYG7AjiC9M&531>HP<;%;_B?^nJArKC0@Dd2h!mojYK&rXV;M;0P2V{lg zob*7h$XR);eCB#*^7a?ps87}Z;AHquU5UTw9X1I(b zO2Zt{NipFt3`B==kx$J+W;4_`YE!K`b*&@pChEmR=tNFaW@;9#scd$^Pk#oH{* zMA0*<&hcbTRlt((r}GuPC6%OD*up}roorx^$9hbH<8vt+X106V zQ-!(a8lG@GVWKO!yOBG>lU$G0vyu(GHu7<22ca{w9g$dxxZr5z;dZ(oOW_SoTYjISWN-|JiH za7v9GMTk?fJJ$Hp*S~p-ys432GHnBV3@JEe zE{BS&NFCwbC3d_xGiNJp5`EbFxuW1S?%i(*!^i**6+$v)Jl`onZtpUo^qylp*e z-T%3MqBG-NUEJT@+dQkiH(**g%ATE@W@|Rpri4u^P8tUfNp7Z=BiWOe5g1fqX&Ky( zVtOu(V=dYCzf{4N(SHb}x+>W6k0P(##BOicF!GDu;zH(5y5JGgTE)%1#78^L9cG}l zon4O^-~uel*7!HJ+xN(ELwY55rs8l)wYl#xs;SnTZtNl5q0Z%}E2(v@!A5r_T5d)p!~ST;RI8 zWjjeHva8$FKR%KV{7YQ%md;{(p6?5~T9Em&682j7)P`6S%qpe{6X*;$gG;cPp}kj! zt7nz#Nk4+D5j!yoSGF5K*4kX>MFpWi;r(yg0o6boR7%`7{9=RX7P9#LQ+PeF@vwG@ym*^>}=yryqtqJGj zGoGojv{}xf943>^hpp)>&Buc@q~7GnKM0QkD$Ys4LpI93d3xA zZJL^5@>H|lEml}4&L$rDTAxtp^2sS5fvmmDb%UAeZr1#7qQVHH%d<9IoI z5qXvOR6P=?pUs9twmSE49QDkO%qm7w@2x?6c^7877;`QBfQO8=MnmJZ{!CAyU#226 zfq14693K7Mr9HX5rC`yFVDsr-^PZWC3i?3b6NiU5FN;!(dfF)}@D^vqq}#ffY}HAh zw+`-l}#j0P`i$o>QvA|wVSEzOm4y=*b=6K~1`Y;aG6GJHk z7g8@|Hwd1-e6W>&;dkcavvZzp;eXGdNEYf=`Untr`!vo=s_^|Z>KWc z8r}dO(M3)?Qy+Sft>_{jV$ysk@(H%z!#&iWt&eVcLX+sAyke_Eq<$R!n_=!Ht`e}F z^mM26JohAJt7KI=YCp_Out63E4f6$Vs1DRvYjdZHDCMQjM0t0a&f7|yT^WD8h1uDi z_E@~IY**k6R-xw6o~ma@FhTp>$Io4a4XxHpP{NCGCTf8E>53`50c)^6W2Ms;yHa@x!r1dB@nRHd~#?@8*Sz zb^*1DA#B|nr}c)P_83{jK`d|%a?$c|qD-PbP*U0fhxjA9hLy<;^3qij{vs=!S&Qk- z-i9aYohg_e=CWOGF1zK@cyGFgyN6QuE#OY;Dc}uei<*nQKn;ip%fo_F+wSfdK^@wL zyYVyivyXH@rcsmWNC#;o{`xdlLz>}=)=AXhx(hDn`iX+&e2`|li2u^h?5rW_V6~eQacs( zIozDowKj|e68mGjqDgb39?U(GyFNNPHafm9u{ha_3!rbRV7gK!kvW=|$m!~AYM9OQ zx8C1JTo4K35e5w@;0@~J0<4zz_xBq z9Zo&urM69%Pv4n-R<&I^GgVewr^+_r8#lTuxXJ#ux5EoXkLqG;b6;(fNO3!A_lqUd z)Y{d}xPL9XJt@@9KV?5b~pD&35aTqX;QWQsVaL@ub6#UDpe-A+&rl+u{)zb=DwYKH1}cK&hLy* zOq5KWO~#Wq`Hn6~m(J$}W}0yKy6iNI`t_U6=DIo0(D|&&y2%#F-rG`JW~T?ZYNtkS?5VEY`ehKEfIE5^kZezlIUNH83~W z9hdN5xI(0iDrf`(dMyh;#CvRCa<9VPfs0%QopuQ=&8Kh0?*RdE6IT0Q*0i{ zX~WHx-BUi0CVmm^x_aJInQvsiW@^Oy(tFYa&DNO7T{(d7@p);7Rq*C!yLM7XW|^m2F!wD*tMV0~omzh$TTWviq(H#4OKa_R-}0prfE%I?T}t)rpymEM^s9LnK7b~B;eO7U{!!L#IVAFaX+El5E18?pn^HGm zUN)v~Q-!X|d~dqO@0pmaeUdNWyc^RdV~@E1_YM_G8ne9COpTAyRiUP)(B5#W_K?6X zp3G+Q(Dv+vO5gjw5Y5Hr0fq})WQu9)$Twm~ThpfAF_o+k577%wajW?IA)NDrTB4Eo z+eMC_;h(Pz|K!bGW{TH1rrCCUC`Ig2Y30uB<&!z%o^E$*x|>^ZRQ>h^R^p}f7}26T{aFg|#Xg?5$9%?SnPTZ~ z=^Jc4-5>_nNPq0l@>}{Ed7>z1+bDa0{}=ef%&nO=+`b?azar-j1?DZDmZEt03=%x=W}CA^#tbHLB|$tjwC#5dDTZrWz* z&K-PaZQYxyJdEMIS9s^HfHo)Mf`_N4_zK>TJ6|=UYj6C?*jB8}uxOp=W;)6gdzj`X z`X@i)rYhpb56Qf)nmLg3ULd>i=K8>Jj9!EW*)(ro`UCFAQF75?FR6f9kZlYpd()QhESl|72@U%=%NtN>I?ozKTOJ8gn(03f7KjX#!2>i|; zGF<%pCmfE{iIj6kCSoAc@XcdpkiTu}YqjwGCUf1%IrOo5a$o3M|MfrB)j;UCovqOu z!=cD3EY{E7Sk-XxaL>?ejN4#rNq>`nZk9O;s1dUM&vH14HN1p*77+XzkG?;+AZD2?>rOYE2$d;1pMJcBaAyw%ue^^ry{= z3EPL-i(l+~dbE89Z@D=Q(+}94^FZ>ML^nLdlIXx_iD-FoW^ZgRcBUydPqdId3S}z6 z_e*nT2QH;!Jsmt@H`@E$xU=%}^a7J%7MUb7f)8ysW&3LPWpnyvQEd=C;NMI`KCjE& zo0;Ox&2Gl2bTQjVUr&9TDwN)0@7r(k(ooY@9^(G}0ozfG_b47b9a`aK6!$g0?mhh$ zp2efLH5`dtZKK0{Y|VL<;@Z$BCj1@Z0k}E*rwNf4IJs`Piq`m3Z{%aXkm7mYJDJWrXJa@w2Z(~q1{~MR=xd&ST~jrvg{te@58eb)P)_v5IOIZo9j? z@Uq|c{&XLo?aWB80_S}MEtKIdeo$sgW@_Mv4`edw73r>aGCVK-c1s0)hrUdQaU}Pd zHoK;n!vCOF|XZL+e5>IGkVk(z&vAj^; z7p9%1vE|3ppF`zu1Mb&ADGuQ)1DBu}N6qXUz6})t~-E>KKNmeBQ@c zlV6Jla&X^n2;Y>0g(q=pN>{*_O93XL?{oFdCW$l^mCKC&Mz8ObY$T zYq`nEL}9;|ZH4T}_i=+tacAfbu_O!^U%~%+U8Ejfd{4NJs`3aHaEa>a7D~*E<`MSi z@VbxwHjz)~lqxRH-}8&O_8#AJ08hIO5)P>Pt7V?xJo_LO#q|zL?2L_y-4p8)-IaS6 z=UK=;h6@s{lKt!>D(7y!O|cz7QyI?BP%SuyX4_HUz&b4Ig=RJ_q#GSqbN+3Cg#Lumo$A)c|B-JJMS(l@D@E=y-pB~la2S8Hs-+#+}E5sX(z_s6B? zP*n;w4X+GeDZ5OE)t;7Ln#$Y>sJ{-ja~sUE7^W&~x7j2r%5@@ApU5MTD{aB*Z1U_D z7Nk943CeDirlV#sD#<7FJEuOlkWD$K8Aryh%<#k6SxeXGM6cjSMdd0W42eO zEhT8GJW?KGQYP69H{2SnAY`3i0{@8g6C z(2wH#Ci6Ic#RksGMSH>qDq!7yF|n?eJ5vA>jc`T1lUbJe9y4+nL-Pn;Z%O>R_%E?qv31^3 zgV^ENw)mNN$z)&b(%IB=w6aqCLLcg|`HmN7Z_ZcL$togNwanY;W_XVO;kLJ6#$Go4 zdySj0UQKj8JR4C1EzRrW#th}*8-XF3%V++HTUFY;^d+gcQz4$}4?kJRq{FQ zB^2yp&+cWu%#M!LUr?HU6Q$crGFlUK4+(dS|O>fL)F#2WO zM1|5FQcIFgCp#ua$FGfl87pR9_4;Us*z@sC@krv!Rq)(zE5Jd1~$Ifg8-n zoX8dPHRslIT)%J8vWCb`BcPb;oY5Kl4sf%1pR)V0+CH->L@8o!Cj1)%6jWp7vI@K51dvRHHK{y!qjhhp`2-cm%KDjC@zT3;MiTZL9|V(isz@FZV-RlcV& z>fz=bhX08(Exq*%-JHwwZi7w7Laa;DzoovliFA45Ny^H^*pX;ep1jr3w((Nt_Xg|% z`#IUjjLP=uPoVa>d5cvvtDN->n3-mIk7T}}2`AHadHl9>bS~3-@TG?A*6ANoz0xmn zx!nXu4Z_c4R4HZfW&wH}eF*L6Y-`oxp%`Z^_KASZpPTcg*qqo8u_v`Ej!Sgm;JnnfA02k-yE4CXfj^Wpog1h%9pG9%g5UTDZWU3R zKr**y;;Egf!{&VU*NO0e8nK*h1FLj7y`=}FJVf{y7UUmj;nvIxruJSV_BK#mtuQ-0 zq&s1j9Qg4eQ77piXM~H++TZJy~8(!kGn0usw7W`Zp&xw zB5q6qCv#4g+7{j=LLH`P?!rmj72d}!aC2xKuWdbi+cLZ8I*7vS_0c85Pd=nO)ecU%i&*sf&|UO|hM!aw;9W&Q^rwvG3#G$KSM<@&Zo!w%DaQyoDq4 z7SWfUpxYP7c@{s{DpM%E4%ajr6Z&9!g6(uyrr)!7;wz5eTVT8;aPwq62k$|bGdQLf z=}Rc!S3Qiq*#QlFrpITsI}=Zj#{vJ8c|{M`Q`ElS0@H%aLU&_+r^rr2+=*gvUKMKN z5OL=-aU;O#x88i`MO+fs!haPaRpGk^_5hal_X6;5VNA~7*shLLm!I^y^vQSfwdAS4 zmtQoygXs%ygf%)pK6Y!;Iz0QBTh}3PtoZx9SU3twUu}!)=|nDH^w+UWv`p-Y*vqlI z;+4&{?U%ezXGJr3wZ6`RWxUYWn45PE#qDI?6Kb!XnTqMRXs_e=2sXfOS9@1Y(rvNJ z8)-+K)s5{r$_Be#Ay}rqx7W{P=MtIIP<1m5c}cUcSEc5OM-k|%tJ&Y%b9%ut`-9um zOodg9m;38Rlkg*Slag}p+c>+L;msKD;r{KXJ_pOzmGc#*)^hbJ*=vm|VW+zjqfX7j zbZ#|8y;$g7F{T=(a}&Sycn-blwh>L!=hj2VZ+50cjhr8KsMO9onC-iw9~4fHOD#)Y zkZhbdZZqM=*bTM@l#5RneZw~O^+;aqCCyJ=pME_P&#dO_T_8@Sbn6|=Y!-QL5UD7V zdb(c0ptnrNl250;hkhH|Gc=t$a;(0$;;Mo=c~6_ESQb-J4cj+BC)pgEG!~@p#3{Dr zojH(h4IBT!VO5uVpg!0A2vO#Yk6~gc{)U|BHWMANHi=CAN}|bQ2bHf$oq` ze>69~Nlx3miEvAj{*aS-1kc@rlPlmYeU*ABb#3Z#Zh{?DgR!`v-I)<$eNB$z<@`c} z0)PyI-F`r=z86p8|p7niuDqYb&>nCiidp0p7e#CyZCGN}NcnRCn zd)oY2K#r;6wMEkrv3n>D@(;iIN_#WzlkZ>k!d_7`Ou~G1R55>%9%ZL#Ke+J^dxyU9 zIaFO;97_IC-`56J!##SJhj7X)z_U(CXZKyMz-<+#Xq3{o&{?l%1KnUBaEyKK<6j@Y zb8cq+;-e`+&=Y z_VUO!k(IXEit;qy>EGr?KFaA+LLYJop5>H^`+8GYpT$uBtH0oL>h1F6Av<|qjmKl9 z;tk^e#&YBD+G%%>TwPsv*SA!-y!2)Gm8bP>y)NfRuyMbnQyf9RiWtMwJ=B6xir!oH zg%1*`UvpNy=t;NW4jamVznb>C*2%o&wv>dHH~RT4F(AJ}%Fl?r)l3C$rQ&@(r-gS^ zN$*-!mC%Fo&VQkceO-lYBdLXTI{>azf z2||1XYrEfB>`u2%l}0xMF5q`upkw3Z%#rl|^hMt9(DW_T z$ussaf56Y#OZFO}gXVS);wdJ{&j?0B=WS#g;=Oz)C+E>)oAXd*yq$f{W~+Z+MCR_{ zOwMr7j1B+c4NVbm*YRK7L*p+jM;*b7+Q=SWK+W!q@7v+Vl;s+K*_7L)9_7!tz^+j( z-K-1wKNZU$9RGXn^D*p6eQJAQU2hGO57D!?#z*PrdO9(g^Xv`%qHoZFzTj+`Anq38 zIFI0Nb4BczeAmZ#J?o*^#vl_AxFKf z&$)&!51$I%$<-YN@xlscq}`B`!}Kphj*_>`GL(haiFV z9jN>EQSnvO_5HrgUHgK^yAg$Gf(?$7;oM)u!|Zt$5+3V?{~dT9d%R)BYo{%aGi}|xL&Z$Vl3z(cjkbq;2DmRlF-jT&hZJG zA-C(!xQtu4S{3^%7UP+Yru*xv?QI9yy*3JuOFW(U zO`r7z$t_MLgq!_{GS@L(JJW~OxC6TX1!60mk9^jvj|QcS;+@KJK(5kR+DX@7`_#%* zcWGn_E#^rH0R8k82-vWF3s0bSr;o~Z)iWI-!Fza8sLnpY+n(#mjF=0F#U7B%} z{D}t}r*|0GLb zv1MT65?ak)Fb>rmW8kO!lf;n_~Yr%sSGX zmjXFo=AX@E->;qNdg)ss(B`S9R0iF#j>l35WuaSO-J$Z$CSS=Rk>sp1d=iQ~>Z>mW z;a=ufrSSZ7?4vJ93$NkV4wAo0;W7S@x3}6xccDFOZDpQMFfU8F5^uyax2JG_?_4*_ zM33?ue859e(=?8$rtY+ZEcWqN-N`%md!Pu-V>DOllltb4>4OYV=x?((WO!&ICj3Nb zqj!}XD(2N0kYjk5guRkIQLG^(2reYw{UU(IZ?R4M&3)00!oc~fy?KHbqR=M3d4Dr?c&%3x@ zl)fgtFSRT61Ab+s(Q{i=*H0g~u7}g@=I?VS=*UAU#Q8f2b5lV`=}#{5ky^;A`MGP6y#xr*|$jC=ZKnPX$%BKWF}bO;X|i0g;V4Kk*6K>bIQ-12IF;y zev9#q|E6o`ZG9Mn@tq}{%?dnuGjsXps*ua*WzQngxpd=#nHj#5RaGr~3FuAyK?ys%GjXPRL=Y`gSnA z3jr%b48)v7~?Ou9|Ltg=ll9%ew?Vq0dj}3|s zm>)7R88sPVTXMIGvza(tJ5|X}o~dxnnABXS`nLSm7b5x<;;G^H{a<>OtaM19#iweh zQ@Xoe=Iy&qr_WpPXmOnyooO8l^c7e2lHLoRfP|~l=kLsaN8fozhj2CJWu&X!mk*r6 zYjV*q;RXIWOWn90EBYtR`!8I0XA${?`}MaB^_#l%8?W)2&@oP^N#fTNdTzT1Tj+?J zsn_;AhNdaB_bzRF9gQ_^6ZJjVf|jD=V43*S^b8#Ji;#XZ*?4Q}7d^GTZ1ud{O*m^_ zN`+Jb&ZBpeW0PZe@w;&2uW>SGYz9vFZH+}xMWHkG}Cmq`U`>Tuu2g>ttzv5MzygX(FG zeO_l}?do32^QwWbv76iBmo3=O6>iUVfB!*U`7Ncki~Llam;4WRX1x0{31iVZR6cZC zta(ZIb0eL)yUY&AZml~A4OgUa-{LLxv;Fyh@=rI4Pct0sdroCCTsuZCX^&sqE+c=G zy3e^>?_^4wh;hzN;HAkC=7&s6zTzci`?OCdQ^{S)6Ukb#)RoSqf_;0>h&IDh@5@qO zyE&uftPQx}Gq%AGm!;OI1s>r|%cbUZ*QasTtbsfA8b`gP)`6~Awz;^rO%UG$(0mPF z%OH;V#okhO6W?JTz53X_N5t4Uy13@62KHeeHb64Fy`ewco@M^s0@2soByZe_ukOs$&T*J2s_2!aD&bz zV_s81h^CXBPBl1_o^(szHrMA>n&Vh;c$&L&CRH7)*~_W#Ra=Y_dyAUVHCHG1wcgQZ z_H9H&n5WG}`@(H0uEXOclN%DjtGK;;*iJW1+|9-2HPh4dmZ&?|4cYGv{jCzHoUdLN z|G@0yaUU$z~#@BTUtUX2l*fVb23Mgo07lFMT5=8xZala zo?^{5eF7z%MH3l2$L_BjH|BQP=_Px1Uw3OpxjVh#<1wkNsY~;b%pYp(2UJmiXF|Nm zgY<-z)xR-Ir+O#4&x1bRhb|82Z0>5#N8Ql9uXGFKjHFiu-&V5Pe_fC91|v^f#pUf&SJ$X3#7b(T?G= zFHto;?p&5)eVgM)#=sU2@-_5O0lp>1Op>Mg;K>Wv7<-@FQo$zWzmjXcmV@>*ex@(% zm1M8vb#BbVUetX59-rmLMC9_Ssk9h;mDu}?O!Sm``(?Slh3Gxp-I*iyj!y5OB;AX% zJLe;S_xehvZs8@J%%O_ggw)eZnqyzkc?#2FarYKIeWS1k%keN#({B zCsD>(WaR4&PUeKWbA}FCD)NtP)e(c9tsQpBW5=D$?|KM_$ZrQA;K}lJe{AQ~ob}_) zarqxM?>|aUVY|LtyDvXDmnYSeLuBhW#F`;$p@HIXlrQ*TYO%ecWpEQkVcdys$1lm% zUQrvF=w+{{t&O4+McqB7dmW&3-^pKHJJpekq8r}o8tC|D_o!j2pZL2()(=1hlT``F zXb#Uny+K@HKU;aK=~JGm=jBY!4f#8wdb%;0KquJrYqdZ_`L&5^>2u%jbt^&Mwiiud6+%*MNL<0u`m zm*Hq0p)!5Mjk*>aS0Qf!B`C|n4fX0TldXF|x>F&q6?n(DM4XDYMn3Eg{A(xuGVf=r z*m_=-IgN5TP(|L_ow+MHFPTYJlfTa-OS>%vu^5Mw4V`9h4A)Cy@26r-FA?TLSa?2_ zZJ5~p7ezN)RhP@FNU_*uP9Fb_Su1y`S{@R6N86iz(C(@0ZHB)IA?xERl75XVIIE4R-F=4da8(XoWf0RZ zF*;9Ha+N%-Qz(fipQ}b{%q94&88pqY^?#bob34}YGf{V8V69VWL~CBfdtFg=JrpxD z7ngS?)Qr|Nf^NUSsq7Yc*UQc4y`2Lz%G2J}2`BZFnEa+XsS)lW1|x4&Ma~qVo>MJd z2la++rCy~1dtUGVHi)-OU~x__y{&JVKv^Vj9vx?+{PUdKeTk1Y>Z3lQ?-1{*i|+ov zQ&~2+k_jTm+<|3gz^!z1=8GyfB|9X0i!mLNk0qyjMTec*KKG`E_*))2zQJAT=EmHw zs_Lk&{KzYsC%PO>-RI1gsjQpQC(EnUTC1vyi@MKpuWhHBJd8(LsJE`BnM#vk)BR@A zWW?V4^+C?X-PKnSy)B|ngxyv7NC{-P!hQTuGn|B8=7FLRRrOU_6>n!GyM$X&TI*}~0P=)C^Gf##U^wcHyk z4Fg_ccWw97_g>B2GJHezW?!*)WPUCAsCQdPr^Br@l#}qwAl|$Xc5#QOdp@VJntFO* z2^3lY<{GY|-mj)E;w6pBe@8n)SI|ygaxPicdncB6vDcFlU;cDcYH<}6jvRE4mg9hX zin=$5FKPGXxGepiyw;IQ+fcvWe$#ojh%vp?NWYpeP)nY^Ly!0bb-)(=I{&Ji+u|DH z?sp^H(hGdxe_mi<^6iwx3sP0a^?#pw+`u|_j z#r$$$L!g?Oc1=x2TNo_l`&$MF^OE@}+ zZ?#^<^bJR2TXj@PN?Y2ge6K@iti0ALRF`)o*L1W#`tEd?2dOah=lPBe;`%?p;yw+mOH(0*X5WzNYs28nZH-~%=T*K15 z4)zjm)~nq@zxxIeS63<~-~sN?Z}hxmtHu{#vO}1P+gTE`wKwq}~*5?>2++6}kR5RpXDCn4ZwlPv+-6jQMWwOg^S> z^|8&*UgDgV=3{;v*lMe2LoC4cUePjhV=`vD^_OAKg+flHH($qSIr@X}Qu$^tXHaQg z#8y14mph-r9KpXik*{U&(X^x%95Gqu?q@aQ6F8VYP;)2V&2pjRUg$`5;|^+MMShmS zn5lWZW}&<|Z9{Zs<4i@YV%d%U8jynrQ;+zt~Th9{qqk-vg) zuS&M!y?Qv=PTZX*_N>JbA4!&yjne*lz5auiP)lE|LkDbDSNG*bx94-*RDDtXQ?IBe zUaF`acCT^{SK|>}WVUii`8hi;<6NM$$kIbq_o0kYKqh(^&OLx}xds#W4y-cY*M7*` ziNO1XAi4(nC7b%FNAlL>3$oLGnR}IbIu8=wF7|ftW}0F;AC{xL>%P3qUq`8vW~+@l z(QtpFa5uv7E;DU$B#pNWrfy5lrK;!$0w1XrH<`bArJCh78G22ynCU#%a|Ioz**Of(rsul^p6D#GJ_q1f^YR=Oabx}0}YPkb5d-`t7YKS?o} zpvpPo>ptnFOf(U1j4VCT4Y|>YJfV(mm23{(t#wmYx;49aiW-Z%F)TxSucw#Kd$D`h zx;y>V)ARZ9F2g7n(Z_Ii=9kQM*whiCe_>Vh&YVMT%aiJ%Vy3Isz>7R7e-6o?D%io- zG7EEgEeBp#FXJ_e<1F6P94Ap%4`dBmV^fvlN!h3@?0-ys{fD^wwJ3B&e#ibf75E&i z`Fkp(eyYZc)BrR5{&jiH-k~dwy{WY>u zZ#uwJ-cumG-dFROXuA_)+vz+W)wAvZcL?J9!}|Quc=>tY*{Df-bO14VU^#?6jQZ+Oba-Y_ZUaRak!-gk9l^7Y(xjd zBkoII_ho6YIxXNW>RLU>b%^{t3WqygeAykY;-0kEe}1=)ftKD;SROhj4zGn=epML_ zcfV>mmDB2}+4Rb(c=Jz0m|oCJ+N92hu)Dv*1XTmS+C7@bop2==|8RcrIcBFk0_%s; z=V(5)V3<4#P;s3yAIl>n@B&r!FrD-9tNU_?y!@z{GJ6xLL~G~L46+;UrtEQd)`}}t zb^reg#hepq&L)eCF`Y%=>Z;+dxKxa{R%Xq5kcuH)vdk8vp};+AwSTj1Xf zGWZy$c#B_CfiAgAj-D%We}Tz-Le*H(mhw3;K(XKw^M9`6*L=nl#aXgInifzqvmO3_ zM1D@InK$bZ2zygkr7xzM{N)u57At-eQI_GL-b_BI3i?zPeYq~UYs8qFlNUm}FTr&0 zsj&ZoTB3gDo@8xIX2H}UUAGhQSsnb-i!_s&si#!{O(>2V(gD-_4`!~(yH6jC*{B>T z--|7e*-O1E5Hjnxj{Mxf+}a1tq%9U|F1ln@{2OS~*N82PFt`76Tk7)gX5(y6J)_;d zpfbG8Rm7Gx?n~A;^%+du)7vYK_uhv0-Y&-c?tFe!OEj0GKg4(if?r}?ss(mCmxPY4 zYCJm~Oy775v(O20DUz;YVpLidji<`09%o}_-|+QZl==!{*$Df$GheX-ey)+*ax!rT zyxSsq0|sUurr{wx<|di^bGf{Mh+8ssTou`szdWo?dK&w^44&I4x-?OVuI9tLOOC!= z2jC9dC$2F4varb_jVWBq`5K~>>WfVWde*eaf8n_C0-8fpcn03h6?bn5o(YUI zLuwnGd$lUMkD12{`H+v(N&90e<8-fI@%9JJDyU6iEHAdarIx9c+U+aPvQBR&Uvnn? zy_%l%ebWxSscvr0W&Yb%cjQN}si0Fhf|ohw#*|l47m}Typ|3orUR>jM|05Mk^)-*G zHedcH<~MvQL$AyCGb}dCwPK*Rn45;@a%72KKKhy%`Csq`cUvVgaTd9pR~3CnAD9jCO%-*ssICq_?*X;+Ml9bZw`G!CeKpQ| zyefL1`t(Y^yJ z!~H+XJq?qWCU3_W=!-=n}432(EGAK`R-sV5#guM zQJ#-J7^e4BhzH%3AjE&84z0W0mJyNH_4l=NE@km`TQQq6W$|hJt{;ff*$8u)8xw~L zk7A|%bjtUrjrQZ|s_8Hu7ih~BGuAAVBb56p9B_lUFIG_(`^e2%#r;nozo-88mbPK= z2YUyNu}st4k&$qJ>tuCV=oTmOpXnGCQ~+I*wUaeP@zv_&Gepvg(@rNmNLoYJ{8?3 zc%vea`}fd)2aM(KZo>|zQWEcfv8h7$njtef@^)l+{?w1K^^Yf@mUZfaP$U>RMMYYU z7az#gc_X$i#Pv{FcKVLu*n%GWCGK*ETI?R`^a8x@r6vIN!H_M+Uv5o*Brn%WZ^ba2 zbW{EndrynNufcPLWtU&PqFa3JgXErYvV?QU;i3Q8Qm}@}TH^1O5X~br$49Z?)7+Ua zMCl)sOWc~n$s(zXF);o0K7B)H`NoMCS8@DEHM+xn$)SzB%ej}EcY`S_>*1H{RYnV; z-8K1}8rs=d@NsZwusBX&j2B?mrdKi1*S*izopp>gh+OX_J?6d)j&$|oOCkwO!+KG7 zlk?lF!rm#;d?hY#ji zz4|{{||Kby>9z*|4ESfx*_}ju{M_56vf{(5o_1LWEhSw%nJz94mD%mf{_J)OK~o1*+2zxID8C;MMZ}-2K%%zl4jxasjBemFUtozg~RA=`@z3 z%lWt2F58v7@%v<~ojzB%XHSG5RVTGqLj=PAs;ifXFf}0Fk7cB@fsb{~6b~#n6*|fV zdAHg5Gv$S1nKe%4a#iD@)C%uslJD)De0{MhR&qPk-zOiZ)JvA<5I-j378YT?Pi#(9 zlc`H3S0uK!a=$GIqt<%emq@F?&9tZRg)h~U8h>Fk3Bw0mu^D|_=}(Q zCmnV>@R#$&m=h+c_vXz0Brpq8cT?~&k@q=#+O_h>aEL6x)iM`1xl{GrNG>YL$y`S# zWG(%%9pt07s-?^PbJ@ri?B*KUTH3u?tb*K&zj}pVu{ke!5tE0O;m-ThNe;_+w+D0m zKeGz#yR_*ubgT!>bJ@TjR6O%7hf~?~Ix%1&EqXDuu~e*l3|G5Ut=K(TkOTU7BA#HN zaT1L%Qri;0@Z+6GY!!p&Bw~qp99bm7SYo_fGze*`*`73(wR(laXubl3O1*3)|J@DjUv`@prH!$J7)tvlZ@gk+1cb zuQ-BF2zfc#{+iY>?>&)QW$o-Kl~G^+>!$n@F66aUh@`^XoXsLW&0F!`VVOof*g-v|T1TfhCznNnWXDY)Yly>qXq0 zOt~jD#FwLq9B#!%$%BdineJPJ19(c})5O1t%aSFNLAmPjY`;(;4pXF^J|y z_2a8Fk`1~8u2GLYXF^vOeyL5F65P+f*^u+M+Of0Q??2?EavG_s-wmwdWw^!3+)Qme z6TA|K*%OCZkH$C|%kYP@IpiK}b$gCsyHjfFHgeKc?o379YZp1Oo#qi;Qu59Oxs`E-k9-sf*hVbgws zP3QRQYUgtsX7?Yis2*bMe~EMM%zm*Yow(Q?T9)`c@q1!-VulIGwWK4};BNW!H!|RU%p6 zYDurGhSMo2_GDzVv7+}KCOhQN@mJyh#^3=qHwZ6a&cme`!X}}c%p8i zNn(^c^mXDZF5s-9R9&p@>t(f7S=v%Smb)#(ucjM?)1jJ zxbJ5AG^eSTO2X$e^Xr1z)WA_rW=lSs`4rwQ>fSsA(=LJXR?9*Ex*LB`NXmOh&Geac zGd-}c6KW`DRd*i6-Ju%JtpbiSKwi$q8D*iVr$ts+QFQ_Cp zn`qd^y&0#9+lJN5;cOTnb4NLyPKm|KX<}nxnYeiRpa`6o_$l#8Vp`${^L-cdxBim& zC^0PYLgKYV&qRU5K{LKSP5hCV?dNPsv@|8Hjk!)CcreR6B_ZnbNqzpuU&nsbt}TD|XkbQ~0QbLvt0dYOfB%si*3xVn1lqiSx4z%zL88|-5k zP9NNEdV29tQ5?!VzJ{{gh2z|o?6iip6#Q)NSuW;W;&f{3PRin&fzGC-jDD@ila1FE zol%oWdA0Xe)z)NpuL(W!w5oft>S++o?KFh&sJrtUouHB#u5a?rALoGXs|WfW^K62d z6FnH(w4Cw{e*a!<|2C+mg^Yb5vC=!5XgbI;Cp1al#aN%Wn537E*GqKq(J?W= z`#P5>=``bBUpqYLAYA!qif&a5#0%cm*3{(?-#FUuR!+@3bmtUSskEfEmB6CiBTLM( zm8`2jTg{iqZb6y?%CEhnXbGJIQ^9|;=t9ttns#-_9U7ch`+&vEO?RG-rO!@jS z(NEmDSWJG>k2)kCbF-p}Bc}H!677=rd6V^NEK6XVWBAW!#Ig;hkhY^OeMSRLn2NB_ z^n&LJn1)$~6PF6VaGZnouc-|I;-O?mma3%}4n)zlOS zd9t^*)>pqrbgtpX6jm=3cQ(PuHVAN|ytY9WpW{t_pwfJh?={=Uy#wbnm%{%TpH^l4 z9KYFh(8Bxr7N^*l(lpt8%-U+IcWAc7^Zt@u`_c(7aZXEdHzVP+vgwsrxhG_tvohBt z`FWT*dUYY>i;}0ju?2~F`FE&=lX)jGGVy?OT4*Q6ro?jSu%vii&Sx*kdL}+JC)HN0 z{{T`*>a%@_PW}@=NG}tv7IIeIgb!Yz*Y|ShbS%H`1*)5u10TrhMLAlZl~v~2+ zG7>wtOr?0CdvpzLt(zXEVe0V@oYPeJFk5S_bsqDb+9LelYZ%fIn2G27yl%323wNg+ zEpAu7rad{os^hELV~ewY?L(n^5J&p38`jrN zYwjc~@%@&PuZqd%=XfV_skoc4V{po{z~(Uyuy!Gf$wT59{gTDy$fNY-{rT*#N=z$ z7xBbinSZ^y>sR@0L*lTPb{Q2f!iI z4ceJswBCI32HyUgdNDKhi*%uVY{q%rM0@^8Z4tqaW?7wYR2N5iUpu>3gWZI=DyAiF z*BUPS<@vd3IVZ^?Kl;_XT*{O8m294Mng7a>bQDeqsmUwr1@!#6Tt9-x?MW#5 zR$c8yMW&zmu-d6i4?|+F1nxE|JtwdQs(((5s+@DwoRQakSFhu=`A4Vn`?{)|IIUlK z?1y2NlcLI8ck3=1!X+GNoAcd!uedQylNHolO+~KqURoU|*cj8%+!@{PHof5vX7!tZ zUpJT=>v=WZ7I?an8n^?m*Ix7U%47Dw)DaWahcF0}Jl}+snkHGyH$66%)6P!BPu#85 z&g*iW);H@&m=>H553CR7@H&LtgR)%a_4(%uLdIP=WxL?uvu^x%ox?)h^dt z?BYE`r_WAb>TSnoX}NE=F0P@N)r&I+`GrQ(IvYb!o8X-BxUEj!($(0}(`xiRxUU~X zq95J370xEhY3#&^ZR3%RK<5|myEVf5+)INP=Z0=h9jA0$r^Dq1onb#xf9vb=8OCcK zvjy}f?AYh{u^4Y(d;MB-bAG2xl+&fqTXp}5O_B%Tr$WI?c=EGbGoEwXrqQ%k@a7-p zw@yIeHMvIa#HLTglHMKeQ{bE_h5aH??{K-DF2~e0ivvA_e>&@pZfkLTb@`Wx!-5V!0X_1s?A{8>@I5$*RGzLJy)pk<);@Aa2G!AtR#*mMQ9 zwlUSPBn7dm-|ab`_OR)3%_w}GM8`jEslAPs^_!l@(&k)j_ouoevr%1u#Hr{Pn<@m}Xw*pE`&#PPskzh(_ycRwBHoBYY@ z)wl^pf08$ePWzJ0@$v`MTKB8gFZJ)A!*skar(KyW zjTvdL5*dXvyj!)JN)>Y^Upa}-b>Q70S3GaS?NfZzyYdP;jRh3)Sl(^+t5)`XdNJp8 z&by+~uE2ew=`oRLF^pYL-TI_z>N!!itX-wIL48*TKMEAKq3984>Rle=XXL`i@zT9j zp>^%2D-}Edjeg3XJq<@tNuTm5w_tV7U#9D9(pL-gNs=s?&gD5L*NL2iM{1 z3dn1JnX>s%-hOeqy6W{}lT^m>^u8zxwK6B8TzZ9yvw>J0Ff)6BS5pxOa)2Y{W*t%g zrP`=jQdEq;A)ged^cjqGR`n?cseg?v{4CXwyJxxH!jRu@Yo>)c9uIiKGdRru@PfAI zHIVPib4xvI8rvCf{ds-okLmQQDPA`6z5kU0H{5=o${Y|8K8QZ_%59?WLU{Wi-FunZ zvlX>_ppJuydUy^78+%u`@lCIAPEEXyC0tG)+oZO{i&_FzbP5#EF}H@_=N8-I8rrU1 zogR_#eOiV|jhPuUNLO(e{en&9ljptqc9}dcWhlz{RX_)2w7`&VtB$v1ba~dc8c&#$__dcdg1adY{01s~p8qIJ z-&}%GIS;3P=v=Zpz|ZF`<{2By_de8CsUOXIxC?5#**!VKz1BtEEp8Ue@7T|KMBHuo zuFWQw{}CS%KNsufP%Wsda=1J*NDi8p-@ z<>&{?oXHE}3XwGs;b<Hfc`(r8gCx#@dnhFq!{}~%*ms~JbCiYYGl~`xEa9iR!6NitbGJ4<|skNfI zy{dXEb41eWW@24z!qGG0N}Eh+uXY+$rZInKXIc44H$FG`BlOx@H0Xx=3UkMArOTx0 z<7=Re_U`so)X&Mm&7mfGb#{jfNBU9e>Tnsg!Tvm`7o!0blf}h*MV{`kV=WHysrJU5 z2sXuERHj-jqK01{nk?Q-5Wj2l_g99Fn*<7SP&Wyz(N{EB|4MNgdRyKaQSK}~Z8o&J zM9=K?nV+4>jb2XW^mz*0Te8Y9x_=s$JDw<>XcC_sJ0EQmJ(gQH`e}56d6w_QC#mEw zO@55k*_P^W3Q^P4eWt*@?o4Lz8Wo2vFUYHElE~!r=X}#S>V`J$^oRZM@K#yrH#@b~crzz*>e0e4vz_Y^UG9gW zmO<`IJ#)b}P>i;@D}kKZ=5*~*ZQZV~vxw+jUHEzJ9hEsP6tO7Q&e3w#!8Oq**S-l;yCqCR>W7Cmpb+ni9z@Z3<- z&@ZMgJrq0{m?8$wwgKsWobr;;{pOGlo?WYNzf$@O`fqVxUnB0bs`*TNrDW~IqWD^oWVp?g#d8biChc~*IJzc! zeSDzmX>Rg)tkwY0c|4_|u-TGpGT)h%JHf1ls(Ck4Q7`5M`!iF=jGz*M;v9NS)ih__ z{9iEOPwD>qn~K?-{&J(7drH+aB(#Im?!~}objMXrWpMEA(CF~?aF@uNku&P>YT+58 z@_eYjF_*}r&hbXkjeS)t-yVsfvepW|FSJMYa8k$rFt@Ul$Z{Nm@{df^o3Ah1@zf4eF`T1hlPd5*OyKF{9US4MDgT3$UzjAf zE%r~ebM(X9{pVMoub2CB?p4t~v8wT_Oq<@GtmFM;Xmv{<{PpQSGp$Wo*zWT|EPm0v z!Ej1_7^p%{8H#*$-}!C6#^1%q1)-;8lH0-!IN)z`Dp`(hE=Hn|O!8>3zbrA(j`~jm z3xhROg8znl(0N~qj1gD2#$rYHRPDIM!Oy_yl+B3GF4S14E^zZ>Wb-S`Ii zc`I+tDKo7G!)sNAz@Y<4=3xskVY7*FSB^JMqfT{stdi|}`jusfolESsIyI-k$sT=8!&-VeH(n5-O_L_=k`GB;D=6(<`7vOiBbPMnpX9kx%?-&PRf~C^{1++U}&?9 z{5yo3?YgKb4-euqSc!YS&3#%92OXyF-mUX(1q>OIb>3H3yrA>znCLss{^(qJ=WSSf zhThH_%qd%Dc1AZ|>hE-~R8_5f$ThKzy1mIgXqp_8xXbQ?g|Q!^xhDMYKL5`7m(S;( z-<^BE`P;W91}5*K5!a_9)Jp$BacP`>G}AHPE7;H`z`8jT^J-uymUrR5w$R)q=1@+@zq~KcY=q@^ zacAHD@X`9weZ6|}AP7?#$$ zBkMYs3!tI8(B1nY_&LtBBz}6PDkla_&T=j{+26NDCvtsk!q2LZ&9v6f)n*+{9Qqs{ zZJ-~aXZq)SoRa0JHuC0m(n+vb&9@Y?9c>EZSGmW}&pSWie9C5?Rk;VEeM}^;&Z|}* z+WU@<+l#_A!koAEe($a_bA_COwwL}0J3nMf(d@h{^y%n`4xI~E=W{3<87JBv#AkNJ zm7EhL`shj7A%-sVb%i*1kK}a7c`4^9d~q!JYv`-+Y&#!1V}Z@;R5AV?42sf8{_1JC zm!LOvv%S*&sW+olj?X)p3Bjx2ptL)*Sgv2H&d5%}*eU~mZK~sHxTPaJx5bT|CP)r+Si);nc36hfkn|e};{E7J9ot2TDo(2djAYCR3mq#_x)~9DOzS)cJ63 zo7^vRci8`RZLAnZ{UKWT*yIdziH4*H=pDTyuXfIAlO|4O_U4VFV+^+0W`bG3jj28Z zu)Ze(yF+(JY>3fMd_g!IxhwLcmot!NkQPU`g@(zu+e{jc;aDyW^vc$b91aDtbzt~9 z>cO|Pf#c5cZmLXY{C7*7)G}VbbvT$N^!|6ezQ^=-OjG;3=2t!kCH|lanxpSywcLEI zS!Fd~{NAw8RjQ&tDLjW{)L;CY%V<)o z=(a5qRZK5B8QT#p8BLsj|9l|#ncSYakLPxZo{ISwd?b&krJqeN&dg5VXnJBYlj5B@ z!p%QC?`y9hoc9`Jd%xXh!_{ZCIA;o}WM1TPm_w^QWLw2l_a%Z!8;Msd9PVh-Q84r~ zFWGcCqHIp@z)!&jG`5;1ZcdJDiF^=VjFG$5`{~bJH9J^YWT_(C92RFUhg;rLL7d

    IOaPGaaOv@m=AI-rSZ8OL+*?ET-{S+_zt$~SwTvhtZcGaE4+t0!-Ezv8|k-Pk_ ze8%@W9?+)g%`~Ik7?dz}`vcd3=>%JTlr=2b7sE9ey!wmINx%zH4eP|ir5qF)5c1+NOf z7n#fz^DrlGpU88*(($1+!P`Ss;JU)$_MzL&aNEjTz7X4yWB$y+U}q@B6yV4&k@q8C z>HK?0ee(m3@?9||p|)J+)(rRZg3jf~V7cH%8M?hX{H%QaB#vQqqa% z%es@UaxPvs{zq)Dor}G5e-U5S=N`-L?;ZUY8)+NQmmGW#q}t&;U)G~ropQU(ma_+O zIgdh(y-alMZL0Ql7^Y6TF0RKVJG;G`%hMNkp48iOcvc;@TXdYik4nFM}3P3S_i%F3G zVv{xA*Khon4aw!aq>6#7b3*=1`|=Vhvo138lkU*ZId8);^{_3)yrpe=Xim5@?eeB& z+M0Y=l$KK41g1h1g4gwX?N8pK&MA=UEE`Qoy8mm-(gOQH+M4KjHuGrSk(>yJ`-NDeI{ZM> z&E}e=9{!zL-$C9zt)9znP#PVg#=&Iwnd8wE!nh^SE%+D)yA?-KeLCc09C_1mPM@oa zzV!O;bS_Op8|1GOdZVg3pJFo66ZUAY!B_mHLw&Al`rcs5G?JB2e&w9T&|77lGJ8xA zi0MGM(OK;_pZ@o}{c!(pV(&uE?)4@?l+NsNsT-vh@&@mt+AfSA zj)h`xMkCSLx%+bMp^RP=?Gb%9dRM$&GQl@oL-pKOcHU3TtDs}FC*R^!zw==G!I#?< za6{g2_J)jw&F+CV?hh3ZEB5nDy(;=nqAoTIAJpAg1cx68cap8|k&hDM%W-|%Zv~bF zTR^fWoaTKTBtye9aS!95|AQ*ci$vc}u*pfUrMkLjfH>O;{%-~UzvxV+!gkf6+&1RP zbPuec`}I>9_QpCr#pQni-v1z!TP~-XNsAG+!OJ4dL>PUa+2B>>qO0&`qjfj+Gr#Lv z`}`VHII5<`)2(ME&cp}DKaZ`6zF;oYS|-I^yMR4ODib zKjAl9yB}L9obUV5e}R*Mnv{cIO+r3Gi+K)5-a@2lfw8PG$%{MH#4zt@J@LEodz-Jil$~@E;a}QlDPyg$t ztkRt@Kt?a8>o3kHa322eD<(A-Uxsl!bjEiN#d5vIS5ytIxyh;B9S8=7V^(^&Et^b; z+JZ~JROEdVt3KbG-7B{I>aI+s6jkM<9*M`Tt~OeSkIl}Ssgzzw6}y~T8%S;9&0A@A z>6-YlSYEVe^q1Tgxg&Br;-lu~)`@P3{bke4Cwgrcrgmn2&TEW)DngSISR={0^yCp_z~HQ8V10 zDpY`3T-tw{XV3_GxX-(3g3UeQbCG`E<=k?^xHR%iVtd1Pv82h!Gcnw)&BQx^i+)w* zcNMOGp|0~i>f9^e~+D^K#P`3RvmT zALg8Gz+e8y!7-gPc8-@sF?A!}lZ|u4=si5+-FeRPFfAXckt>D@ho(cev#A63;pQI4 zD{Rh>w|~JcFS$kSX+c@me5$!>2mKj3Lz_*--Ui;z_c{O47+&Lp7$@>{#Oj{HNWE`n z=}wGv1JnL4%nEP6b5KPKwJXaQSz z7DtE2pTKf|k<7D6XOzt#B{Q$2x~02|Uq><}@{T*He%#T^Oy=8?31S$|s_Hugt_sz* zvu;TEM5rs@*%ZA?T|#%@?T$nD^c^wWIlNU>!+mMi zap!VX=yUfY7JS)DYAz4mtFBH`psP8fE&5Uy`1|dm_@n$F33YSEe4kgv-PWdqgx!+@ zcEs<(?TmMCX5e0y+7tQ&J%54KYreL{CaFGuB-^HkDtTpi8v1{FA@v9)k(uDRwQRH#PZw4-WG5l?2<@Y(oe5k<)2=5UwKMr59=u$A(7`EJ zQZ?-~r==M7DG%fSplp3LzId7_vxSECJdWp1f6~MH07k*Yd$}W>uB$eZP zX7S9TXtdMkUl$j1mx*{LGf|34B?`nR#m2-M$2!J3%iF2w!RVjSdt$H0pG?$C-kNGe z5n8H$szl~3GrZo^5%^{1dg{Wn@Z*rY6y>>-U5X!bkoO5TRNw65qF<;cx?Oyk=}cDY zMQRxC6M8-PCjGMtrr<*}D^^pK%J6Coq!hdcU%2EW@oUeZlERkq~2;qJuNbrXeO;}=+uC4GwrtUPXJ z7B|lzJK+xD=AZMHD&VBL>n~ly|5*bIvyl&IEnae(e%qP)4&F6i`v#jITEq@TmqtIa zm#BU$8I8mi##`tO%T2~{4b9DZy){#TzOp>?pLcaZ-r-Dhcjn`~kNjVvnQ?YQpU%0T zE3YDN#&!6Gw&pS~c{ zBfQT9?d6!y+J635!BD8E-I1@rDx>mI?l^rfqhPvi7?^wI{v7k6%LW%Xml}aXW~{$U zT`nvJe=6>-vITw-FYatgKrUA5agzqRP=>!%2i!|p970$8m@g)~rRhQH$j4N%FXijQ z9GoW-x7qO3S(M3%ZHpd?hGT_H^N+;(#iqs2B)V`1k5N-MuwP^wb-u4@xZjB8FPdmj zmW%zJyxsn}s8~Njk9ucuW|i-L1D&6P)PF~1qg8PIg);SNw_t%odvd+7|oyIIbcccvcK5xh#|7|*KaJh(c zS`YY8vsF2 z8p>qWWA*RgYOJ86{eR|LoK+u|Pfkkw9xohU8tV|-Ey6U9b&3s)y%_5n-*{3`Eq9jNV7=w;9U z>3R$3Ce|)cdnO6RT}yFycP&=n;O^3cySux4@dD@I?i6<`?i49d+@X{LZN~1iGhbHE zz3cy*l{7jt?|!!{QJ&hw1*YM#-;4*cHnw9V6&n}S!ek$e!j{ZWjddUFg}QVBDF9Rs z;&(Yl%qanV7c)Ckk#m>Xxf{P_eCIP{(L&kPols>_OPvPyIiL{D^?z_-;JsNJgkBCtrA|zGlv$z-jvJKg^i0t@OWa_mv zR^g@UKrZr0H$ z#HAIwv@WHqZC0l74I-!WFRL(UBp?qU;71fOsM&5*giwe7#S8*+X`?9G=;F z#A08P%UGT)mjtk1^g=^*@&$9-Wfe*fi)#M9TBZDz7vHEV>e0O+k{@7t~m>kRsasL?;b1&vnOrh9> zL`lwTdB|%?Ppw@NbmxZHS|j09VRU8sL43M5k$~<SWgA~*b zxic_ld=^u8n#2Uew2xU8b2X-Ztizp0Ye)XnJ8cu4voDjoF@s45FX=Cz4~w)4lsLv% zPIliHc<~T?;-$$8Y$4Gp3H`Ek#b-qVb zatv$Bh_BP1dcTcCQ+8o}j{*vk=aGfD&}pzK5%GcfWclQU+w_2k=j8qEc25NjzsJ_ZAG0TBam=omw>;*Lxf%1fI{`epHj({I z*h!PjOhy>>+Ii83nqZ-qBByRQT{m06*Hh9-F&~oAmyU0d>QhHUc6>2@nFV0`D&k-o zKFHGeAhVF6k{|1)96P;AFJn-47{Nl$gpmtmn*N(|hcN z@^G3!sH(d&4fTx8S^!6=C94!|f@1M#Aw3*!GYK}#QXC+p32JWXQ zf1|x5B~$Z0dPe}8Q1%DfH}Ue=Q9QyQFEiuT))3YJdpvry9x@)-KW z>}Il8y4bw2WtjZC7wa(%aVPpY zCkmY~hyd(^2ZX?L=F_>k7n5#&sr~V=O@^A%;jR6NJgS06B^c`|6PD#-@F64e{4|z8 zS?39$CYdnjp`2~d(r&b%_4p-f!ngk6{}1At(GHy)s}&9d9v({mOkwp4Qs_6aqv~MF z9I}IUVR5V_b6xtX1Ys%VBvM}u8|J3Dm+Ngx3~4<3`kPG22q^0k(?vI8Bdmf~RG`|j zslJ80uyopfcM11$W=}@Oe2@78e<>I{ooPf}z?hBX)nz6JJ*Sz@$Uw(~Y~&IN=XR1s zmz{p(&FQiBj)+PtEbYZerE=gzK5XN6=&EzEV3NZF8mJpoU-;~GR+NJr$#-~bb7Kh# z&$;L5NMG8q_-cxy7k(##AQ{c?;8e-sO7|Q^@De|ACV@5s(Sf(&smbQNjNFa}YhGeq zCdPjJhy_^=no*&scIs5(Y7;s0YTz>z-gAUlTQA^p8{IA$*331sQv%_Yir_(I^nM5UKyJ?_IpfvW*n4{Z4(<7d(B;kOrZF5l~@Ok ztflXEH9CmoK*I~SQWtUx=5c*3 z=-hf!%?>@4C42NI-$@08N?|W=A!0NREq9v7$Ge2=mY?XsZ-L-*a3Tefh4%Pm?h*CA zL}Z~Ua1&om3=-=JTxJLK`jT3hE%chJish9J-M2nE%@p2w&22CFSQ>d`Ey)MEU`?e4 zHh`X120UOcn%D%SY8ZN}!6a>sI?#(u$y3Ow8v)MD)1Q$!mp%75to=snCvAija94 z1TKFem$DspWglv%Ptcoa9PcHt3@tjG}LhJ$RCY*>B<*{P+>vsBF1#afD_R_;FD z-(adtTf6hQRri(H4Y3DeZ@^)OX}if~*rV5Cp20ezvUHZv29p`&&+jVHMIj|x^g=jz z1?f&`uEKJiO7uFCJj>{=h7nuEegT{4HnWW~zGu51$*mY`AIPYU{UB$2eiQ6`` z`xL~>zM-?FckV+HIgv>1(3d9=Y4LGghO6`ibE@#pWFoukiK|S;3;Y8AcV1%D5{bMI z%&$^;cY@xQL*d6)pw!)jEA{6vgt4ACIhkP7s_koLo2Me9_0V6 zgMJQkiy1}vU0ciFvK9Fx{!Q=5;6(#>AK9cDz znQkTV-pz#nZpJtJ9sfyHreU;T=FUr|uFhaC@^4lcNVlFXRB%0_mcK1I@vo_*oJ4)o z5x3^1yCC%@f1xdJMrWR7d>~uo0rlB_R3hZ0L!&PKGNCWh^*LeFwfXE7X$10@N zIdbvpBCCS2E!R@NJen9vX`-`b@dGa7J`T-CVzOJ|@LBLX{Df|GARewh0dp>*f8Qpf zw+DPDBUw{>={(sD9WN~&&tUjW0y=2#CF^iI95)M4=|PXdUSzPe#DjGfUqnhKsYp-Y z^3+?LCx5vo9=*-@^b%7iFpQdueCR3>%&^Ex)Z_x)!go<=uz<|&zp>L7VPOm>NA)gH z$&L4GHdQEYV;9*UlZkc}0ZSAm1reUD)W*#tD%X&_$niu-pDCHtkK`UqC${lhX{P$) z*=H&#noT$S2gR9|S%c?g@#(gv?kN+xUJ3B2Cmx8>;K&>3>@&P3kFyNEnov6W?8gI~ z6@1x`oxTpOU=Uob6B^7?@Np>8&=0NV2vgQm6F)V>F*H>PA8lzo*BjxF zFNk`i!+%~y`9egl9Qw{He8Lrp_4h^FS&YRn7$`|T&~>t4vQX1go!(s`Sm;&J|GN=C zDoU=@Xn4*o`s0ODReBZ9n;p$)3-4`3s{b;Yn2VU1QiYteMs%NPL3KhrBHT~G+EmOg zY|8|#C3uT&bH72JaTrsvl7LsEu!O&%$rnZs>4+z|A=;WhnH$^K{oizDsR_+YBI7@R z+^HmJFF)ur{gb*elmEZL!-c@4HX83Yj*<*ruQFbUhDgJv{Lf7$_$PF})A%{}5Ht9P zNYi(GN4bb=-DIaOQ++zvGOUtt@AuSuq`@*Rpr!;Qlv~bb<$am(go_ zFaCi;<_UDNr$qZ=smn}5N040LdR=&Z8*+0dlDQB>R@71Ai(zCgYs{(1%T+cc$~K7H zlEvW1b|83;>R)m|J-jay7*!6Kh-DXugdYYJ7NHMrQIAl$aEgrji`=f`N4teZ_lWM0 zSAkCm*ekm9GBmSENYw$rxfT4Fm5ujqm@ii z+eU^=2$>D%saAMQEbKScck$6^GLdUmj0lTAKG2T%ghvn&U4ZW|7%bQi9M9mn2!kU& zLu-u$C&lX(1y_tlzPgoI?lFo3P5T#K%P2nmjKBLC*n5%u$MgJtFCL-wOm1AwQ6~}u z9D%p08#!aG(5dPY`Kt_W6#`;;xRMm`zZhWl1kMscKGRiVf|rRWZ-R2y!*}M85w`$u z^+Z-R0SX?5tQgF_4^pECSSfJrVAWfA9Gjg0yi*d4~_d6H|nLtOhV(d`Hz_=8L=g)2_V zb*J-inQTN~av}GNK-DFIX%+4zxs`%;Dsit4cGL$`>mdLBB2rP0)mP_P0C1?yv)a6_ zOf5$Mw>o?tz^_WMJH?cIbZgEHoN{u@MlNLrI9dWEmk+)Y6)9%0n+mYgkq~Yozu&mN zPuwHOqI*Vul<=G|;_(l;`dh5wIdYn0jXSko{nLOXm z=li)G=kpN0ecCfm{3MU3`Slh4pW}GfkO_CW-{%(2d1Vj7kyx^ik2v>3UOn-A5)S-c zTQ99QeEQsTf6gaQc=n2H#E;y*aQ$E4k)Qa^TfY02mAnNnelYV>{uE~XsAQ+8teb`c z+)P$(cpw27*b@cXlV|cR2XS{F{>MiKBqS?LZpr8alAPD_P9mrv3HKDhG%4|E`JB`0--;&G)3xI5`Z z_Y*9ab$sCQ6YF~igdVg08^A$uF%+0w@nEJ6ox9-LWA0CZNH{q8jPE_*)k7Zddfs^g zPDTKA!L%q3bRs={Be~G{8PuX#Ecd4G#wp>a*zMk_6bvzKgyg5cFf`Q?Tb! zWh3up1vw0VE*r2$)Ji4 zH6#Ux(s0G;fk#H5lY!?MJy?|yjLPNN^E{qa=l9@DKGvC^o8Uu1&-;Qy#krS&qm}~} z1;M2P{Lc&Y3Ue>OJ2^R4M$cIUXH$YZK3tQ=HGb#1qxdhFkFe#gdahA8W)xWTg|!Oh z34Da^ez3A{z$=Q!Xt3=kzx~bn|FHUBo=@eoU%*DFNw`xC=lIF5gaiNbd{=PAO*~p| zi9Pn9aE(MRxU6RPLqe%IXv&>;dKF4F7#6z znzCV1g6E=dOKiR{uqej+g?TMFBs7*CtjPjo1TW<#&je0_qXMav9{d#wcW`BLb;1*L zU=$0aezNn?JpN?0zqv=V=C6DvFp#@&DWRt~+yst7Z!bO25cw$5P`I*i{O|lyjwAnn zc;5NS@r1|8k$&)8F!DRt_QiAm%J&2-g$f1lOt<53jcO6{LdFgeypup$4ped9X{kh<&ZH)^DB_%UW#NB#%FN9_3lDy&j=lp(}yz zHy{=P++KNbN+9&b^Id__CvI=SQ_(L3V}+-^^FTL}Bg&^AJ$oVV%dbS1*qQ{P4`3F< zP4-m>594{TE*+R4Tq^@>P0eakLNh`)q79_t9l6<>foK)+fP(zdzR82rg6DEvIkH^; zZ_XISEt=PVc;)8P7#@XJXr6u4`9?IK$ol1be>_)cQ-&Pb=CJZ#I4)^Lz*l6rSEG=# zX`EYlupC#`BRokswjA@DN4t9Ofy+nm>x~CKqH75E|LEcQ@+h@R0y}vWDIhwfeG@%R zp!b>g1m=;RE0+Bc8S#g^@XFsjvoTYyUEm;V6u8K{@=e*(AD-XZGFRYZ-(}|o*9Gr| z&&o}9Mj$571w!&oft;+>LIMda;(>*>RwoiipsV7(? zx&jGT+-$1#@%$HkR^TV^iB>E+tjH+A^<*9>Ch~A$$zM<4(RD=U^};wY?|NY<7%9iM zGu`Fe@p&b>f$&e+C(*~{{e+&o51)!0kU!Dc?N8-1xiZ-Y;kxqY<-c+)`IAR`U9uw# za`Iq?TZQsJfwQ)TE!;qGPoOEV6X@CR2;Ae|v7sz`Dj00T(WYqo zEH)mg`Xws}MMd9UwP*H*DC;n#D2^7>hL_(ogon<~b?BSeySO z1NQd&vPZ&+L{7!2z9ah~*e#HfN8tb>_3i5r4kbE=P=WkLc!RuWL&)A+ z*;!s3^Fr-nso55sz|!Y`F!kDJg1z!-d|=}MkL(>2 zUMe^!R4)8VXk4&F;3R7oxQKieX)K)7#wNiep=tXn1sCJ6#cSu;xFG8iDzL9v-m^Ks ztWor6n+gRN#5R+w5XoWl1)J*$&ym{~@c9G$S?p=i)kT*OK5gpCRrr!znLt}s zC$uPMu{oT5H8x!e{oC}K5Eu#;3Z2T$hKYSef~vUbzb4 zXaY&WH~FspTA=Lp4cJ`WmUrGq)pO?mo?B#=O-DlEUJc%U$3D88K_Fz`Y`wz!3&D8d zoN}}A$jiUH6fOHC>$Rnem&OEZ?fnv8m`&wEVRDmq;-1^@3HI4g6iC`~Bo2mh{J1|a z4cY%~jW6y|c)LJI{@a$`f9X!B#(v-C_O@;$YqZxHw{CmI_R4Lj%kS)~@UGZiwXN3) z7K%KyHA7jA&1Gc0wl7d9EAD2$vL&uO+EiwH25tH4wE;v1+c2~xw&1;-Np1qGIC#b3 zo$x#__mZo%bt0Rp?41*Ov9VLud%IAp^?U}3{U z9=&uUSNdNV$aA^b|Nr}mjX_=u%S#b9&IuOU+Mx}@IE_q>U`t@R3VZD~hIl!bypk1o zchlZe`_X>S=5q24c`ckrc3W-&N#Vh9-?s6>=85ucIivU6_HS&N?3If0-^QM}@7eIN z;b6nUhLNm9UjN^)vsW%uW@{o|Ot5Lp%hSCO@=~L`mZRE#|Amq4QXIB;uQ~3&y;}Qf z<5m~9=l|VjIg%Xf|30FZUgX#IH@tk^rWX4>8*26${$Coixsv?Ki&Js$c)7h-o3tg6 z&B1Nm%C^76wiH=y%WGTa$aC8QjkE5&6lilH8^&G^Ca@Q}^}^BnKW>-g3cdK}^+pL_CiP?Y_I(P?=cP{Hr5GjytpX8`7aE-I3oYOm}Fm_(2?APlh`_}K*$Rvue1|L z*|w_v%qy4VbCI6%eftPDT(c9L-KjtmDuo?U&d+uHlMI}TYfKp_8!a1{!0%+0rr~zU&@v5{rA(j z&;JV>fsFjwkDjNVtF^DxYpKLxxj@IetKJ>8U)yl7Az-i9d&M@6%K61Ix21(G`MeU( z2bn7Rr+6#U;y<=yKLzkR`x6;&M{dAi>TBlGRemR3E5?!M{F02KYShHOAg|#o`A!GS z_vG+wf)C}PPOBM5b17%Z2UtO$%DM3AF;oI}AlvT&Gx=KZNj>W$`G>E_?+GLxn2adF z81kYkbN0JbVxQo?mBi7$$-BV7bLkgKzsyvr@r zZ`4Bbdq$1eOgi{n#x^KQZe0L54MjOaR_f{w5<9;{q(pr5PTonyb*9IEUYNLo4;E8a zj~DU>-AR^`->{23hYjRs2vvN;j$cA2w25SkOeg=~99`R^l$lIhX+ux8t#lo6S%=A> znMWSTF!I)xlNGfISR^8+c^WxQHTdmjPt5%(QP3kCJBT&DVokF>^%hmBVj2SEs}Uzm zMy#m;nVc`k_XwhAwNB39Zcp|@2XZ71VfDl#3Lw`ce%8}etTZHpViec4f*gVP zit;a+6g{c$+dx#+hiE}P_A!HUl3bxhWUhs>j!F38-vZf&0346t7CZSgIA z2U454&eil8?Mp7+I==e{sD+Vnan6%Hvw==Y&#>a}dwS$mquS~fpByGDVI{Xso*2q& zGBq}GtfK7gSJoe8Wg;hH3#-{nlsBB*m;3lgBoddM_~A!(NHQ4Cv&Lv(lZbUB1#=6L zqfrHo{}*`+QZqW3qd%bYO?&Fsn-L{R2h?tGrj_7MC>SF>GD@-Qe)Rbd1}?k!^a)py z)Duyu$@iatPxpBDIQbLOXTyhcl;9fQbB1$dw2TBUF20uuj3~)&eVd2bD8Il+GavT~70QjZwV6S6fVpJNki zSj0SyzN{fT*gSyj)Rg3T??RtEpAbT8Dow|VU zc|I%Z@hD~lN^SYX0d&tnO?6nQ4||-0-!^4m&q8be0PotYv=3{PT<)*z+cEYbE7%xC z9z_Ot-Z%OLowo*&!&Qo3X9gl^fT(1sy6H@!D$)3^Bva!l)OQB1@ZD-drRW|qeJcRp ztKf1{@+>>CgBK7G$H2oO@ZTe3tz9##K^ZB4^I>}X)?{914LZO?G3TT*oZugLayKP2 zT^ANBH^|rg%mmu~aK`K4okGs(6xO?u>utjoZH0FvfxG4=2XGCwy8&RtS7>bt{BI!D z;6Lc$SdsbkZN}yCkm){0$R3^{x z190+F9eET0A~pMk=9A{$Xc$&}}5bQTWCPuF044HYdkDIanS-PT@U#ucM)X2CO^_ zD_X)1JVa(B0e^3UJ6GA&Qsi;=V#OPgJq5tuP;%!pTNRW>blz-3-=oxEd|vh{g8ofc z!2BW9^QYukb=dV6N=+(%N~tkqEC-W`dV~DYy})ge(peo%En_ofEBZrGa{S_3D_O%k zWe#U3bVoi0O)Qo@=} zXNW%ZELlunr+s81k3nCt$VT2mrfal07%HyD%0sC_9jeZ!cbx;A^;I63tC`g9j|3@0 zu4*W-I!X<3YNY{G5(F&5pzcdx`U>(W?}N$3$WJt(wF=~X7bdIqGZY_f-Bwbosg)~a zc6a1@3xdG|l&s`W=YtBvfN5GXnbU*4SJ{c0XeDc`%~Un*uma#SMaTgzqgGYcl3n|Q zEZa2bNFOXGI@1a;ejQm^r{ICBSy>O!2H-%$pp7!niFB=BrwpV!>|fwQJ!>!68*0vm z^X_IPQf=i>>(L4Ag?W!P)&|#CkcInRiAR?95V9{yTD7PUDbEU9P^nRXUI(AaDXphi zbTt`7ru0O3!2@Lq({r+^qZKA`ll}UenyC*|mQ_#|lS$tNUeF($t7eTh-stIBQ8cP% zYdTx~giFqbr|cvTzO^~lilEP2A}ahpvRh58Ddq)p92}@78Rk0mMPIBwW~8yz90Sf? zrnY=4m1#Gj=zCyBLGqUiga1Fw^2$Uqe-qOm_%Xh(qEtTpFpFCu)J3&b5|UT_n2uQ! zsU#?^qy-zo!Kum6c764rau{uB4LQpb+0p#)-+R_Zr7fLg2SQm(;7fPG?;lndku7Ko zKft&o%3$!=fKy*W8@S1iNJMo2UFY4*MSo+$zxYlv^vzymI=ADVkF(D}n(mbeYIcm!BL;4Qmb4?i-m%3A#I`=7{H&D&(!VRa#n`sH1FU)nrd^)7@|f zJ-?!;hAwTDB7gV+R~&4eBJ zu0)$P(IHZ*x9R;col1_{)+I9t>F#UoGLKrawtPT!EAsmy_or44YnGYI>}0;Mx~NCV zjt>Wq58)0;YaG=LqoWd`4y12oWkf$UwPIVT3<*OXgqaDAQ+l{@ z$(pQAquwqZH3+fRY;&*C#%OGmH3wQJld$hw93sRt<9VCxGdV(EBed)2#UBWh2x)58oTjM4+mQOp2=jv~HMR=_`2@9yoGK>h*K0_srsSs90_EGz%!r$y;Bk_M*Q-N#ztZAfJpP$bz=awoFOAMp-og z>^;i7;Sbn*hV@FhMXMQ$q_sAjsQ2J`+=c=~L3O9r4j6K-0PK8r~)PkW#jS4HQoWgVe&ST5?h zE32cd5A>lPU^&z~V0?^qUd`sXgT%f_ZMfSEfv?s9S3X)p7n)U z39rm@Xa!lRSiYtnP&Q+C#GRw1&_o5Gox=K|Ej>0dU$ zq5??W42S(?{R0=SX?3Cgv>sQ{g&g~p^jBS@_Ejc=pdTbF-={A zO#DC}?k&b$^Q|=$Ju5#I4#D(I)u~p@!!?ga2U$-Y#Z~jPxy#B1Uo4^=g4*sPi_@aZ zm1c*=A=`Ib@u`kzs=P9%TH~npyM&~RLgyXL_4F`YOw0OLJx+~NFcW+3piS;Gmoo|4 zVV<-es68D4K(-jVOAh4tP_(EkNU4=blMpnytJEN8fG<_1L(eH?HX20-xZ7#yY`FpV zL~Hhkr>W{obdej>$*J^mZbg-0S6~!CokS7%*ekPvwL#fO?OH-)T{m>go=iH(Vzq(i zKQ_r}qlPw!I`!35Y$?iTs#Hrr1G!m!dT{EY5(~AqM#pt3DXcVD&>OH2n}d^Yl;w0_ zid1HSg=^{Qna@m4w}7cuUNt9uM=#PLrHeY-%4@D*LedQS{xpRrWTIaC36(WfmFea) zBiiUqueCQ;bLP$OQdUu?`kk!P5Go>n8^P#zRg`mBim}QEW>uF|a#PbX8SXn84I#)H zPnC39^&EX>Dx%A7qCZ3dD?f6%GLrK-`b0fqk*A=xbjn;LM*zpoNq6Te)(~X%J651k zd(jMD3`as{R!8Ugg$7^DnnuN;pS1=p=U*yEPf@KnM|nx7Uzw{m(0r_{;GLmV{9RC~ zF*RS%1@sUV$_vpXK5{mxt$Kw7(U5Ml&5ULMc7jwCT!bSXhKIU<;{YqY^_ngXi@@gG z$cfd;81=cDM*U4Mv9okSuZ+|UR061^iGGp4jN+dESL>)-tR}`) zdO6j#lEbNjGTL8WPw_?Sx3?Jhf|CEhC18t>}XLsX(U8%cA?Yf#5!9^ z=h==(`NnYT!tmz1Ox|d2;a@>dx`;$d0A*iQ_FyTf$}H+s$55qn8ZW?ptoOditA1GE zMUZgw=`Wv+N}2-7Rdc8@-{@+^QwLHtIm**><{eW_K3E%6=BS#HW&rhqQPyy3Z(e`} zeU*jupG=3;E2EfHt~=1H8YyEuHt`)gH%wMqfL%$HRB96?A-v}emd}2ABt%=!v0k^D zIj~78Q{5N~J&Fga8?>Is{ET)~S$#-P0zb76xEf*x88u9*{-{X112-)T4rB)gjj&UF z@o9vkL06+wSVQX&Yx#(TSq3~?Ix;I|&9C}9{iX?Jz>_nod+10s-P*xCl{57ANdzs7 z^i=ijQ>!`3sROMcc=^6?CDY-ai>#FDHnhz!>n0FQk48~YNn!=j;pMrR6CF1doV%8% zmtI{ZftAYa$%La0aDoWsI2GbY)uhTZ@H(sU%;;pbrXFl6RnV{CnCHkrWZ12xs}39X86;II|H=TNol1PavpRPS4Pp-;wk>ij;1fxNNr`8TAvQu`<0UB zK*OXHTU+cT>5$YD%~L@uOaqtwhV3<+js_$W-vA>zXJcwKyK{Z)kG=cfl0IyKqn1)`|xJ4J!N>(ZJl+}<5 z+DP>zc25KypO)~r(yW8Go25Tmr3h=(t`*#zb6Y1C*ge9B_eK<-PrLJWn@k{&= z`K@p~4*htHHCs|E-2?leJ>5ytsx{1JdPd{2SrCuw4Qr=rP*a}`euCbi4v1qAH9NVlbOfQ>eCdEddq?U5FaEwzn zh*d?cSx###(+c6AbWB;ow4Cqs%%1{ZB@g=c7~10rfkcny%(ZeXy|? z+Wi+^;SYXhRI^yk=x0;VdZf7U?oOj;aw2CP)nSdNcg0d_r2Va5yql1YPd}UkICp=m zr{16>K&|vK>XoABQ#Z9RF-tC6BrX%V37pQfa4FRGJ$Jj3!DXmfk2l6Fu-OtWn;w zf34x<_mqZa2=i_SSRaAVZmKW+oFg5JtWL&Qy@v4#eY~qujGnGN9fQ?$N>A{u8v76r z?~ha#E%p zr_IaCVR}uKVV2f7s&-qM-8Efrg-*QLOrYkZt64hrxUvwsX=5hTFKZKxAgda5+saXx z?vys*f=8qgOV=Bk@I)~g(x=iKgST zT==%buy4PkKMi0vo+Gn^u(poiS07>(Ha8fL&A!A~H07e2+VMbbr|h6FZz(V&5RXI( ztd;YQH>!yaY%v||l359v`yH)$5}NrDted)M0NIrRX0VZs8t_>3{UzW=D7EZ$m@;zH zoWdmJATvJtQ$wqR5=iHy$9QPZdb-`_XM+3zt16v>7CZ8&8?fl3@yk_&HceCXv%>I3 z>Bx85>di!tHRgV6A-pwLnH-TE=ht2n#R3a_mTE)c>@*rr&JpUmsXirL8H z{mLn@_8r!0eyq=F);`f?&3}zXRwjCSZdICMf8>MCdf^`#hMv?DUq)7CC*Cc8#Yyjp zdC>oQq|I0=!ylL%sle~1Jf!2q3AkZutQ=qKDf&evs4YGf+xw}7U!yKmzS4I(5G|)H zT6sAA5>=n>rcUn5#)LSX9tff>Y7birz1NoZ(c|0E<%wH-~ z*AzEh0Q~UOtOlzr91^8qw#>YB(WG#Z`g0T!{ zpye*YV-$eK(Ue|g^islyubW-)UT@?4d95?}1kPaBsjQ+n`dfaq))@1bl9Wz+zIZtX zAbG~p$Gb7zI1KZ-V!-)dA#pol0fnN^&)3V)J!(Gw!6RxU66uB-Z9T*`*~|o?lxRR* zIQumu;$XDmMA(GmdFQaT9SbT2UcNzS0wb-V=5#BAxnZ&<6~rrFhz$``sBv{m|}#rGv@RE(ou zHXrb-;!FbS4PEC@%djv1(qCzTr&{R&Jh&d7zGl$M53FFSm*6yut^Vv;ck7lpo=WT< zT@zIvCfsFC!^2hKk{AQ}nMEGzUEch2t|0DB=^*2{n7@ylRW`+#{3Q35p zOao_DSrzHMu#w%)2kr+d!RkcEY_&9=gt_!o?r!G8h8f5)k0OH;sk5NvzE*B5zNz@5 z%M)ii4bFrTmAH#!b}{Xw5nkwR#C0RlobzHAWPye{g9~nI@^@ns^rCk~HT2J4L>7)> z?G3|9OQEK597k)-ten9YR|;PZvu?1X58+c+)hAfL9f|xDLwC4JoI+9B5=nkWEW7|m z8jszuoA+1XJ$i@-v>-H;1NlD$FIrEm;0o~Ax!A(h!RFh1Lvj_q;B9$L9J3LcDZaV$O;FX>>CS! zf4n-$c{d$Zi4s4ofd6$JGH`^Vab@L^U5fG#UZQkZdDpPV4-oMN6j_E|a=G9oow1@SVMnIK5En{=hZkFw_&^1`SPeN&S+v!!W(^{TrP%ESp6*enu>)SR zhBfH1k671uaz`S-l$TJY3%OmJ&LWZMIR#kFCcKI=4Q>Qjn;5UlG~((%u{MShU8)Ob zxq^Lv$;wQxsa5Jg*4mjP4MeIH!tU6MhU%|&=KXNavy>P~2J5Fe3W+;{NL@cxu#b7S zQ;3Z}rQe!q#K(KMlT|Du7IcD4k4i+$47@wbu_N&&aZQQX=aF=B2tZ41%-%_^%3^Xi z76GHJ*wqn8k96oiui?=xJrEzs8YUveJ8%W#h)KPIhFX$A(izL?CX~B_ogH9x!k@n# z8^1Sk&8K*CDwtEClXuAO?DPcf1^-=)uA+by%gO%mCGvTcGwkORTILbfk_%sE z82rKyE}j-^?*`p2E<@Q&sKJkjREP66Cn8rB9F}fSy@_osgBOj)|9YC8nS+Pp6u7Ed zfq0{OvnS)&lO}2)p2+2BtgEea@CNY`_eU=6WQ9@i+zdpfpIP1Uj7@zClj~a3Dn0!ss9o?ldOYSQl&<> zk!c5sIrdp=C47MR5%KGW;ALI#Fg}`kb?`;ObMqMJMkB%Zaqa~0&*I=kCou0jG1l5> zk$)3ci^gkR2+KA;HnJb^hF|-2n;TGe0V4j* z!IQ#d51k-Rvl%J05=~(`R3X>X5RP{VuCW4od5o3W7rr$JsacF%h*NOWeC&-vgmy17 zs0q1%K^*Bc$9V-09zaa%1>C6yIMUpTG27z(ZBM4kHekMy^NL)q3%u5w>&S_)IL{%Z zdT;3JF+0~8-N}cmt-$&}f{8o1nlHSY68-$EwGGbLmwnt0_B@21q}y#@qW@>e>#KR z@v=y#tvPseiXsI&vNON<IkL|Mi)6r7R+e0iClCA z4`l}qaFv;?M@0B0gT={#LKGZDdaQH@)6bD7uovAsfIaF&CQbn&bK}uv*3k1V6|sVT z#OOK`r)|zvEC+_|!07Ho@K4fxtQUNsJ#y^`HW~g=;GP$YCxCUWAR}oCyLpm4p~rXz zKCzQUh#_4@ZUqVoONgQdx9~E9~7|I$x)Q29mR*HTi5FYp(*{T;!8((105a z!vX&FB|l^xdfgZHp&aC*o(Tz z`*+ayDQILfSQHACtwv*t$8jsdiDsauoBFe#6Kg={P46}c#%Kg zgHAxE#|7epA^2~GA;*Ksk12x|l?k8hc_IW2faF$=_L}pw0Y961`gsik55sO4%hL`&-kL!(xZC~IMv0>&3uZ2JSHYS zwF&Ju6#aG+u$>9)+M|06E|XQ3PEW|jx8dkSj+Ul82n1(@C*&rvySfTFCaB~s@ay_oaS zOSd*_nSmcMKRwBt&?h*%QH<#cY3TTWn`wNi;m_QF$ND&?LYbNmQ(Fe>cj?5unrKXO zZiVq%Jz#?QSfdA1$L|}Z=(PF(wW$c)oM^no42OxS;!C4QG)%V(KKy_VOzod=lBa)3_WNBHCr z9fo_+555h(L3KLg|6rQLOC~|}1XBL=2H%UHv?4v2eei(?fwy(w4yn*s?js#`|z$!+?)w=n(8SR)Y-$x3hSS9Cko>B$~JAJ1&~koPlXBNcPzqP3)Y5oY@o)05~Q zv?#4IlN`V4Dd-}+mB`mbCJ2mW2JA6?rrwx&)Fbso%mygLB)@UsPPmy3Ph@ZOGJ|Zg z`DlMHk%I$}y$#^}58+@b*t>K1gC>df#X2t1*?$+kr|;1t_66~qVa(vZ$9$S;OcU77 zJmks1PSFGs)B^1!AJU1P74(>0m7EKTXc%QPuw ztL4ETv_vhbMvzB27LM~73+xek)ev?qCzO*IF5-lbBt@n<@eM7Z|7m7+dNgyNKI^+! z_i0wTl=)AG=oq_SAEXy&{@z7x4AGR4Oor`1=iJhC+nqok-Gat+-K}lZmT9-OqRjsZ zp~r405kX%deue(1kxWncYGehQHW6Jd!}&Wx#q;4$Yskhsfov~_>^_ElQv?fV1{TRe zI!V66<6a9mtzeGGE9UqlX2Qs1up*WTxfPhdoR?`)GTZYhU6}7O8|e=dF>^7!J|h$P zzJN)soY|PYVKFa=5ORZ+k)%5iGXYHsqN$altt<|HWZXokvYwPow zp8TF(!al5aSqK&PULpxihr&m>#pE@qo7B{KK&< zBo;6asV5x!8TpDm*~Kq-=T^~Qdo~?fj{={Q#9A7{JB~pWIhaqg0c;ru^(10W{ck2W zrg9~4C38j6se6y}5xtlroVi`CT`gP{UFlp8puS?xKTH(R9C_&xUy***#hAVEo}SnB zz#WTz&(p~$2n2)DAOR0~{2xEquQ6CvgWQsGXt17z*@R8B-|i6i2KOnq;U26N;nVWQPG0`kf_&?zlMYb|t@20r|Q+}il?f{A1o zlx8ZLoBvoEpR<_J!6_-0FXKDya2^rVGhSzXBF2ZW)>E4 zU1#FsD7wS{1%-8@GxHs146_wB&?mnj(aGVCSR#vk$yl4gJ}d%LCzD&?4-Sn(Z+eei zCEWw13g#P9(+v-5h;}v}|Lqg#B!CXn@eCDi8w9^8uP4yIGL8FhO?Pi|cXhXSZ)UFN zKCOoSS|3Qa`##WN9(}b|OMB>E;ePI}tDV+{!>?1ov4f4BWGSs@&QBlXr%{b)$wsCS ze8w6m%Wi~|d6@(~`~$G5gjTl``|&59&M2i4c#|DI(cRIRh`~~(*)-$TLSkfP!Jdb7 zGat)5W|isf`&|uPeqhpbx|vUP?sYm`wOyT=Zu5zG3J0OXF7Td0Ogx#!yoNbICkN9? z4mt)g8|MjK&QBq=rebB4#vlD1EnqCyA=B3E4s;2y_O20m8c7b~OLHw(pPww!<@EUf z1MgU(4`l+XpRNO&d0KTX(jDw>=q~M^B9jY@2Xq9_4gJnBUy}d&nkY>S{a-(zH8qC5G9$Z#u#Yb)FR&qe z(O%{wy#pL`xy=DS0d#K+~uJc_x$i#p$Obi(BuIujLKI3kneb&b4 zy55_f-C3E~cvDND&17y#DR(#K+6>bQ(eJ$+IgY``I^!R3XM)iQi55<*tuE6ER+8`X zO>(cWO(Tf_@5ZY!75~{fvT5=&>!3JT9!ze~0&OFdg2onxJ zxl+YTA8(5*pG$XsVs6%FrXpPhcP6pN32$H9B*GX11GuzLW!{xEeQ4Y_dJ z;L9(tg&UI%IS4%>6;YWSM7*D%b=Je4NsRYHI+;e`-#7sO8BT<09*O`U=!lZM9~sgPt}|HeM+7<# zvp1RmkwHLbFtehzGee;T(@avi`nlEup9m&4pKvW^f?c31xhn(w`5+UOBAq&PXEFt7 z3$I=?YivF|`;s#RO<+1x1yVXcL8)EPc~UduVk-QmHv8d6gi9my^bgwgT<9wRoVkZB zJr{dv2zqi(KhqJJqro zzD-eA4rEkfS5heIHMb{7{qfLHA0|<3cAjJc|h!4jB3J_$I{dO2AGpUe*Gh^+0W@JDxi%ww!yR`>lH!6DP_VdyETA6zIsz zlEu)~2J#^aBF&@Btk!+>s)cwK>oDb_C0b%7Y~-eRcz@z`uBOfV~aHXxzKI(IUEW0&)Tvy3ah>l++s zGko|Eyk{j-A0MF!{bX*(4Cd}$gM&3?p36P@ZwHd+yFl%ZoGc7=i7ImgEN+gkHX(7S zC~Wj;Xh2i2Id_8hAB}#0#M8@ zpz~c%sW;RHx*c$wK&HGm*V5@{na;5m7#CvtcXzT#g3!Cmn-}4~<;cN}Bz`l1Gu6f> z+l4>u88-KL?7na?C!U&{D@+4C++a>CxXo8!^qrjHFLYxch)#FW*~T@&waInawZ_$v z34iXIW^nY^n?rF;+nTyqd6}{a)rRdrZR6J9Q*Ywe#kw{o~S|WCmz=NBfM>!@CY=nJDcYN)sHcPLAWss05ZNrR- zda!nei9m(j4czD5BecTUTL-ZiQZv{6C=|5}7)F!*Fo%iNEw~l4?vOjv7pto>6+-vT zHAJE&V^_T$CV zH|HM246WVhf)AJ%c^UmC!0FF4iq2$BTp`yhu`3=EMt`CsWhLiwn_~?VQ;U&_6wTa) zaHdhTLYn78UL_)y@(G{EI_Rnnw&*qdh=YkDl|oisN0L`HucMdTLHkqmZCWGEsfD^{ zx~nq_)E!&eeZxIjtEtE6GnqGcjyW%PnB%8vbKLp)M!35=8qQ9=gfY!HY>YFi60NSn z{{vX$H_cLbbS_d~s1xD$Cy)Aw%;en>svcCKeWwpMa{&->xR^{_^L ziLo3fu6NB?Vq`QXV25AUmSR!$L4u!luSPFUk1TR%r?h!`Tf8}`*!iDEg#JK_atFAJ zxu?3Hk?oa7ccC-qWVXaxru6QD4?i=qlkITbOp2zk5KAg2ITi!Z7{*eQVVSFdO+W1M z$#@UX0hPU6V_iHp$(SwX<2ZrsQwASaQF3zjp!FmJFN$G1{p~8_N(9afMfWWa4;ctX zJcFlPMmt=KR+A75^PM9A8d`_^)}4OnK^NdXznE2a1Wr5+d#$wNEgqut=nTHzcR$g^9zaJYx8N#OyIt~B?0_Yn60X5MeZ;ykH0MJk^$22+XjR!gg`a1VE{bEnbvYqgk*G#m@_IePF) zqdilBQj_a?5#MrmG~mTp;n&QvGS{Z-LB4e8U&P7>y=MKX6SIrID$ z0hzR5%tfSl4<`O?z*n&pkHvbtgSF6`{s8qP>NDjQ9OgMT%T26@mw08H69175z=il! zrr{UPO7>|hGVfY5rLiD0qINNZdNQ7g*ialpl7({v;b@|-BqW}q+)Jy>HN z*e>+=NObrp@=C(hV~%d&_GJ4GD+}rvkqJSA7;x|LP8~R%*48kuj-sL8Ic@=3W&GVdN;tk9)ssL z1=QV&c;Fp0j&P})uOuX7PJv5Kj@`H(*j zJQ59ErChhbi$_cvZpZwrJ>X1o{1N}6?W{$D=fW@X2L7@Pon|RkR9C!z8nuHxI7ec* z?kjY?dq{z0L^(_2;VOZLqBx$MI#^U&$l}>)-iQA_((58~D(g;s<~gwyw&TnDgatVk z2;|n@Xs7i+R-Tl}V&#mfSQ4GI)LKsFj{VYxk&j&%e`q)C?ghp^{PT73q-Tb^iyv(v z@x2nr?JQ(9B;r5SeP}r+$oJ_6otZ>FwkcWEmiThxQ*}HJ-kX$&`A=eVx6p$%X3;Ii z25QU-7ckRuFngaFZ&p?~%rGW3UuRlxq+m~HXOgNw79)?`@(|%}Y^qxQ^g?Rvf!&2;s z8q5tHr(M(9;yrfj6`9G_2b}pA9`oKk14 zv^SRjPiv|oUgCeS4KCrc$){d|FV7;Ut3P>W^Bpmcad;c9qxI&%uW%Hf9~@ro{& z#PtkMXKuLfav;-z`#~gkVP{KXFrAoN=MR^OAPzGLDk|?t?)ZjHHiC+(n(*9WYEJU2 zzhP6ZLg#FNgsef_eJlF-Vf+(6$!Cc{&sd1x?u8zzU&3yFtbf;!6LrapolqaIM3g%b z-q3VN>`K%^2jVL|rdP#hF#+rIx>lVz@fqPPC*Zl4flLpq?d5Qzyy$t^i6^fm>iY$M zc}4Q1L#chrPUJ2(am}w#Q9feZ4~V-{#fvulP05O{ZydT^C^d+_#3Oq$dHFYyf-pQ1 ztC74*9rKt-oE>}mk~1N37+>Nox1h2cd@>%WOl6wibs{d6;K!Mn-FFeKcQ}<<^ZDIu zFlrKB=Nyi$Xal}r=5p5GmKb79U_A^h6%7$ndkWd?`|^I zy&8(jp~q^sw3WbQB(arr(AiDw#vFL5y5ZR9#(cw_+6-)l7%iV3tmlTmJVm#25)Y0x z(xJ5sC(H0U6?^r-khNsomw}36h$5$=ZeAh2v5zRPbSi3sj7kL6KO_S%0eb0droEDx zOrL|wOm$qyn%AM%UvL->Cst7*xW{$k1^bZ4TY*q#ym!|fXNmM31DkF;VjT&wlJWs9 zfAkv$YxDXYP^sZ4>d54H2S*QtpOwUpj3(pj5d5Vz__-XrX*2mL;k>s5U(Hf7o#qhb zoyNiB|+UQyFZiZ=xiN(cg8T5f@xPPFTxB69m4R&XCyynl*ZzfT5 z^-1rF=PD67i4~X(zRFyNy|jY98vF6<#gYTxj=YBYP*itnyK3-UDvwsec|TC|J%pG- zHt2^cTQb9Mk$n?`H*^LSy93}0rXwf*?tk#355<$Pofu~fUWtQvs7`Vl0_XV+rA#Cu zz6C1!Mm<$g^gur*V8+5<)}g&@C+gRmDUpTf0T50(4y5)4ezgWG=_J3|!9@7pjxxmP&XN&57asE$ zzA`5jS&QK+3CWNPAet}}P52&tOSYh2?IR|%mKaMYPzhwc&9DRe8WoI0#&*36vr=QU zW7<9Ki}oAsZV%R?Mdb1-T<8^1;9A6D8tEOOxl_FQgtc@7&9^tc-bZFe@{7XpaE?UZ z-C>5C4qn$Mi)0R1(jT8kNupmvk>s&t8P&xXn?y~neg$t%al1r+xte%Relsm~Evwsv zB(8+UItpE67jS8U-ZtMk99f*#=|%@$3{K1iV>US=$N_jn-o!OH(o?)$tD(Wo;L!}c zghkP*&*H0^3zd$*D^nZzX9qiH;ydewb{7mhPGPzILoU>7qR{Wip7+I@@RHfmp~Ujf z;`z^S9M=QM0mw`~z)vkTwq;pj-!q73E+ryM+YenTW~ zHIbBVXt=|OO!UQ?E&}E3AzNw}()n+A%LOvDB`?^49yJnO#m&{+#LDgft-mGTs1ba( z3bJ<@p2?hen353_xk|pkT2}0brXZd8Rq0Cj)&Ln zM2{&7eh;MI^%c;U=?=i{yJ#aeVc+9g?*iM_c zt;QyrgS8X}7ut<%+6*o=V=lPNGk!;$J4oGxl{yk$7733HM(+IW!Iint(^6zWIeb+E zpnD(s^t8fHmBD-mUzzxSh`JMa8SD3d{BzG7`%+P|lRetB2q9Y(SqdpBOJpgjl+r?q zq@rk%6h$gav>{1_5EW?=p&}wnmdH70X72et@A3Km|K@Ryv&@;f@Avz9U(4%yUDpN2 zi)Ji;ASbs`zI#lnKQFjEA9znPhD~$G{SPpK`UR*=qx7Ow&(yW4VX`a-#nn^O#qe^v zIya-O%KP-~wy?ge%{1BU-(^PU*n>c}az(aB%J>~xfGx(j<`4a>0! zJ7zxr{eCruH`6dR(qGUQ6H~Xy9n_&c9&lRnQw7uirW#=&oo32%#q?1gP9Fd7>r?^r zx~~y8R^{Q2qg76_zu%FUSz^Tk*v$7pdltdNcJlCQWR8dD^J03^DjWGUkKmAP_Ys3J z9?+4gff!&B8958%>LodYYR*uBc+u>gzY@>EnW>8jQZuoa#`&5T^OAEin^t*~#vN%A zabN3jpJ?G7mT9`*j1nzgEj#cm4L(NJwkM5JlOJ6)SOgdAj>Xqf^xB*6au=U&7j|7g z`}P=aSyggUM1JXe7Gw!;tOhjqIVY$S8*>TXNE;EsJ~_8rY2q=YWUfe{j{J8C%&ZJw z?k0%KbgY@nA(ne&WDj7@bha9ox%)QG&Puty@x1*-zPp-yw&ed$6=`1}yYv%pi@{V?b&W-TEnyw{kWXyh2s~Md{1Z#i0>#x~EOL zW4cB97QGxMld}q>KF>Wr>Xgi) zKbDf2p6pB&{Ig7O4Ow|H-d_w=TYl;uD1ypj8vPp{%1borXi{-nR7K%Yi)M@Fbq`gg z_$fwOf5=+F@Fc8$n>^4NGT2|FMqr-R;n#Oh^<{I%;8<01Rw_6(?ZwW$tVHM>mDNF} z4Vn2logihMRGqzB{wLRv0O)JYtU?_h7HhzibgE*g=8 z^WoWjWkXu~=x4Do(^F&3Du0-@c_wu;ee!DRJ5$N?VEtE#cKUf{%k=Z< z1@elst;T=Wa;r6YfJXgW4be-m;%H)3l%rb7qaSDOKeamlsPSqB9c+&mR~iSqHJoQu ztN>POO(%STxa~Rq=44%N&cR8j=MLN1@p6!ng3iMfnzK9{dl*gf2dna`RXFNwwGa;s zbAl$YIJe8yZ*<~T(9m<(t}^jT@pW|SBlxk)oRDZ@?WL})pm$cMh}}>?6n0Pq+W-&u zTpXwVVP!pUwm2(g-z6GU9xz#n|qv&hK; z*^i3mML(#6-!5A8Z#K8PQ?Wy|cR4QLpVnY8ZrQ^!01NQA9~H50hL}D=vyX=JBy{)v zjJ;`yFZ8dRNF6!Sg7E{sTZT_nnpe`6_5F}+R7k`VJ5-pKm-+b6=@^SO)7c5RO&l=5 zV;qm9i&It-A8~tdLcVg7x%N>niTiOpl5A{4a&eMA|CpC_p1ru3Z*~Vips*T~cCLIR zxrm3$WOSZ`XkUnhkYaToO0`Xu5#{Yk{)5?Z6W&-6k>5Mkp}oFN|B0Qa^UnTBjZ7EN zlkAo330Z2o`$GQ5#;~3{ABXc^+lg%Mq>hrydZ8~yZHtOyNP5=yuW ze)=yTc6aP5=s_5a!#?V4JrG)1 zlLu+gzRpc`D;fD?Z(@&4^cajO)tFxT-I;s=*Y9$AY@L&FN9F-lF>~ZQzJ;%s65m{u zd5>4N7~dtd|LgeU#WBjhPc29d;>B-E2C0gC)gh^ve9vC~>q)hSqIEm)ay zWMVxly9|r&FInb)RU1DJGrE;!e-NVehT5t2m^Q~qMN4wpMx=KKiM`K91D??|_O?oP zs{H+G#2>+hdk@R*T#VD3?f6J1ps~DSVQX?5TybywLg>k@GOU-eFO&KAh2uq7pEzC7 z2!_(%x%h?;_L&UrK#!L&8{c#WiwCRu?AKa{FR(<1vLHiA>brcSleGL9@(W9J``V~> z{@idm*2+0np&;oW30Jy0Rhs9%6^8vovJri8HSeSx+fz3DAwAl&-oxST?lfJMek#3Q zPo$P|Ukg+Y?_p6^>Q!?i-F-;Rzu#GC4)rXliuq02s)Fxc6;Z7fSAEJmokupBV&lxA z>E3naO=PU{RP|k{A5?_GE{?Ah$&SDXx>CIM5lwmljP@Bw?d6HDd14hX!M>ww@lXweqHk?1WS+?*W-JKqCy?uC% z>N5FDgPic9PXAbu%p-K;15QAF$ooW$vGYZDOJF5uCN|KO5#RJJdDiN*=UpC8_=`^L z>q438r)BaxiCAvdbKrNCHJw?G>5z!iF-R`p4Hh5=jcCjcqPf;^(c?Opy(P~+ot4}O z{V0aBJR&(no@XYcXkl`5vIr}?MSka6e3WNJeih_#`p}t=(j9Zt<7k*M>DDTgU-a5H zK%DDC#lGNOej--ObefNeooa4ldu@;Zr z;3E7IRCYpcclR&zulk72RAo6D3gOg4rwH%;FyGr9>n|EBeNTa-Ibd|Lqa@qM}>uH>EK`SP&BT71lo zn9+Z#IQ-K6-3FsN&fj|+Le_+ImV@FgmDj3@|8PXdvmA9xn=v8sWp9R(=taD(r8MNx zpa#v*k`C+-Cmcanv?KMOi&n0;3S(p@y0RpT`5!5NaWW`HSGKfTLrKrIEXYKDTMIaI zCHdjfc>jO$WSYa3yW-{K(`Os-^D1Zk!PdO2&1xWChHVwrO=zGzUY>iqUZ(gSywP&W z->{Mo<-eG0klK-Y5L)yOZSWFL^NMs5Dt)c*GraHW^zChY*C5=LZmcKf9nRhT`suuY zG4!(dXgkcZJ$uoCWHz-PyXBL1TBGOb@=`$*ozxb;-fR_y(3Z1F`4SAKOPz!2;**B_ zirNsaZX))|VztpQ)m54InQMqjZ4rLD&IENZ@*#1?eq4oJst(*7gq*d26-W^97} zRguk_A;b5x`0N<_u|VePJyvxAzdMS^Zh-<^L09(CGjJbyI!yOg43;>V??F&%sD$|e z*3v?Ek7c+p^Kj}f5hJ`!XSS0wc}Eq<%hsR_%{MXCkbXRzyf0Z^-sQpM+sWr(V4ulV z-6?WkE^b@QFFz-}KUIc5nwz?SXK^3h`F9#?NzQv2Sr{Su*%*u4i`Uc>ta9JOAqd@h zmkmk8EMD*Z*|@Z#%+d4w;%`;NoI$hS791qG>sZp7BByid%il#KC1}W>V zS4?DI7{msPEg~=^hZ{dE(Kif?<{HN)rG4oBV_Mf9DUBe;SH6-HGq@ zx*Y#19>yi9#xldtIvHcLjPo&Yi?4avU-2&&==<;sFR4EnALQ=V!>rHcLzZB{@3#hP zAQpw3g4;#Ux7hjd&P@^C)PB}<8ohcRt@aA7cP1PoN9N>Gn&C_M?-H2$WzNF>EdIF& zCj7iRUoBdX^ubt<+h~)EXp6zLLsdTJK02neDC{S;X&`UzB7XHeI`PcR&*EsEg=EV* zilWDX1cCtJK;^jLvl8oN4C^wEw4TD97 zI>d)R@{1Z+hta9?WVeqbpGqEAukmL7%KXB7hwoCe)8*m4U&=q8AKvCXtPzjwf?aG# z*QXsbqONAxW|!&p*w{L3jn#ANr?Yr3y1#O=15s^!OYz?r=i_TNQF?E)=FNGIXY&=C zI5!6{Szcn(cI&htjn+b)u}8j8@ZPRMs?lw^pCEjPwjg+2~AY^O@^rS=3_VKSLN~; z?snFHU?)qZX3HwgQFrl4{;Tql-{;p(PD=Jqb(U>hnXVIdfKzQ^CHBHQ2(xNoRhOw%{SmtTqDLRdVhj3dq4RN$l^AXn4v~nxhWWYfy??i{dl_dUrJi@#fCLhKX%6A-W}ctrFn+;bXH~oTiG_f z0Wxz}DoDK}BV8o93a|BEmU3xwa!L(L_=%`t4rF*4D=|eq!p>A*+=ljKtABW17V|ou zt^}j$r`tz;n*Fr+VKHB;;4>C+5leYE)0~~04TtE?$KR)3xeHFiGjbU(Iyr~1l8^DN z55)$^nfwLQ4B3c3ppn;!5t6~JcD*(x?{~Po6=X^#$zbQ{L^=Rs^|HsCe$v}Xs78+z z=2>2jXS0)+J(^!$#;Y%YVn%Z+Zc@284L`m!++h)>LhZ*Us6 zv5j5j?4QL6e=7gq{I$tdse8ylip*c3YGsI6^CW)RYp|(%>4lGUyG{d6A~9s4=f zQMa*7P(u7&#D3P2ckF3*KZnHpD*oQDp7O8wZ@kALt3Dm7=0vPx({Fa zDiQb&mby6IJjH2j2zl5FK^(wa(0LwS*;PDPnIGK*E3>=v@RV~shGZ`jD;3mb`*J*= z%k!_w{|qPop5*A%7kuQ4oTJ;-Q{IUI@-6;hM-lidkcS*n5b-?*)3^`GHOuaj@jG!U z%esd%f;wzN1N!h;-4gQQ*^BWwz90#I@*#%u5P3t^Cc&lR^Tep!?UslSK(9^3U{V261P1pzw<||r>J1FovsuN zw7VOe_DT38KUjm8$!5N8&D&##V>2O5GqN7jPP*|s@#b+Wv%xvq=1jbfQ_zCPk+Keh zMN?O*!)ky>`K$<|sr$Xz`JW=1SSlwp1s|$5&3LOENEexcD(uUTJjg3CH8NIwvAE?D zy#i`xPGFobVDA^pPdx!e93g|ZSBHfj*n!<((?_8A&p@u%BnKrc<0Itd2Ur>ZBsZmA zOP|!c`W)SjKg4N2ocdM1{b8Bc?S5L^-Ty*s-Y!=ECtRab&bQVf#x6vBtM}=;$K72e z{=lcS-$G2&rDXnRUi9!_FHiX?9#TP!lK08V0lA>t$iWYOvjt+g)OwuGOMaLvY?6UL zjG++K7ta+l^_`mqyTX~!r1u8FgehGgC1wKKfd#r*X*t-|McMRA;_ zaWImJ@(_J|-%Wh^g^KXKs*J8m=Zh6qL#s=r28kdJVAy_~A4~REvsY6TJsKwa6diCo z%=(0?ktanEXNeoWkuQ1*C%H6_suv{xbQ z6q(rXOk61YSCtljjJ<5Yzc^d;_cvVbb!G5MAKjbYh zq9HzW=a<4-qC7}#mb9@Ba(A&9cUyx^PSaF=>m|W%t3H<((po=`A>!$TUYOs)iN0|j zhGIzcVmS}$b67t7!x{gA?>tTY^QX!4le=(if6CvK-y@k$E=W!1DV*RtFLlSaruV2Y zFK0CtLj7k#Vctl;D&8I>Q+yfpv6VY6K_A^k4}1m(Y{z5!L|IP*Xa5ktX{Y$<^x#MS zbyNBOdHma+e!hmMR8M{RmC%M4tin##VJT#FuCwsG`+CZ5PlwibrwN|0`%zAGH9o@M zPC!`}v6T}L2Gw~^&p_ik`AG)yKSsVj$I9k<*LieK5k1VCy64~7i09d|?y8cc6o_ZR-@-gf-pI=ZR{b#B=nV1dPJ`Rig zO7?H26Vb{m?++i>6XzbD+TY~+HLU9`bin6$K?`6@mx}AA^XIq3q7H5=WOQDD&VK?$ zoumuV4;Y>0@E1>F_dmn`s~C^s(SL&`cDf4R@CVv4%3(#a@B*9o1-V#hb=p9)BkQoA zT^`}*xAPZz^B6WkD(+(?TYK%}(45FFr67Y1byeF46+~3W-hJQQ*WDo zs!`A67tdiMrr^=G$2;GaUoN>qg~#z!9oh6h(;LO!*J8N!QS&Bh=Z{* zWT7J4+9Zq1ZlNFc2K}7#D_Fe`$p2lkSzpHke69uYKk-uXM3Qy+R5f@8`{>X|#9VKY zhet`vnem}c#hY4EAWqR^BD99x3YB;aq*; zR7LoFRAso5mL34*nJ#*4VfPPXfnTbA_FGriTVJHTu00KBx)`Ssq~T$<@kQR!CRSk(&2WI<^s2kRg{+j}FP;yXx`AGZcuNqOC);J!A(G%O>syunKJ7ej<}=|&#apK85ysP@~g-+yllnm!dTJ#rMvML!aCZ{z=7OS08e>xp;F%Rh$-ocZ+ z<+pTMte;5olm?UhjWCuw_y?C;qi229iN1W++6=HRYxxT|sDz2)_qrZcy~kLceD0KU z?h7HT#b3HfFTIz2ca1or5dF|yb~7)V*UX5SPtujw@~7MDF)}$hGFeIX;!+%nMd@7L z;v_Nk7BOYSCaFa?7BgL;ze=B*;L#7$G}FSSVydam+#;Qkt`bQOmubFJo@O%-yAy9R zA=WJbTj?O5*#u@hp3ksUUb+gjWwWUAT9$GypP@E6kH-)28g3v5^J#>g@hdPmizL?K zLdm`gM(D!uvn*zm1Go|f+gzq(DyGCYdb4$B zOH10Vk$k6#{H4vX!KbZGWh}-@G}~hNnnE&GyI>8)M2%O=Y=4*KCbmpmk4tmF>G*`D zxQ!G%fD`a2uVN|xdL_jAIY@p-`I|4rt6z)Qo`l=HXf+xIQ6I-N)alV|wx$E^__}KC z1eVos7^UY{p=+ay-`E`n(!!wFW1HziEmVB%&6JaEp^rRb2fGZ?cx`-V1(nm7m@P zS$NwzM7*j6&dwt|iGS$X+^j#|1NQYcz1xGXNU_y}AmU%s6CKIHgK{-HAm?jE*$ZVU z{*|epEM|B}?AgUR$>$k-Oe>7Wep#%i^wgRw`#qJWDj=fy!?`KN$Bpb7m)^7=xJ_-I(thasbxyznDBKB{WRb*lE7Z>J_aYhh)1|wt z(3eikO-{yX3oNFm@mSPdc9qQrI>G*X40EIY5T+C zg2Pa>Sz?b@q7JBP*3ZGQdQo1l3A_0iuVE}^UJLg3KJnC1`g0Tg`8Y3P0W`LX3|?K5 zv0LRyG1|9EI6$rFObpg`aw5@dm!YkzsiZs8_pL;CkI9?uGx4P&3F*wYI1|IGq*a*= zjaa54_J_c@VGAS{A!xAU-qPo<#`Tvo>qz1 ze`1AKvPvIYhoLy&^~HN-Sn3Th;rrEF|BjLInVk3UPDo>W(TA4*hV-tL^=<4i+({Zj zkLHDyM5aqH3GWw`w33C$xZh5gbaiDVK9aM12a~LU>hi9#(^Eu@Ob)S_2%Wn z)%=W`tn5>MIy}n+I0muZVkLA%SIzaiO2}<0QOxd=y?HXF4 zDHtuE$^rf=^V6P(*i|(0IbZW6oUSMzqZKcrCfT@{FTH~d6oTUJvY^ zag^09Z4_3^auOcpfJbBJE(&|&LJiIAknfp}t9w6uFBFCUYW+fe3i`=~b#&eu!9V|z z$+*U;>C3VV#ua&3WO}c!&%yJ3G2CI5ei8Tlu6}<7oaH` zDF(qaL5zR?fDHM^;?-!f+L^L*53<8uNq>L&p3$O~cllB?XL-LJR^8+5x zZJ{e0(378Z9b9J->+_F315eR4b7`LWIPS%C!H)iIgx>Ab8>W=JK&YR^AhFB@ax~Fu z^sy2T(MTO&xlckb#{0i>SX~5O-Fu}Obz$ZlLylcGIk@cCgu#D z?(=VK?^fMmKd}NkoVu@6u{|joSfQiUqa^Sx*^m!JNfDRxb&TkzNmA4~d9BWLe|i2c zIVv)4Gh*vGs_#DZ{;TmB|7Oo)rUPHf%Zcji@4&dc*UC*3{VXDD z3+0S{Q4-*~gzefJl6`PNr^{bZAq`8UqmKA&^L z80m}{N~c@v5>V(GdXQg83%0jTJxJ65p5ItJhZ*vCA3A?u^L=)x1U^K{Q|bt-*vkgA zY6o_$FDV-VW1ql7d^P)c!(A<-RhEhyeuKc(5+4kURg@V>_}CmPMv_X3AI}i!$`=k{c+i`--z42CX{$cFB|jEc9~qRYx2 z=UR(%eRl~w;tH$WK{l(6=k?@$59Vd{&@Eypuj>&R&L_kPuV6>a!s%K{KStirZytZ@ zW@$`{zMJ{@cO|orXwt&jIy~2L5~8Wd(fp}ye1e-uU2ieQKsxyz`P@IJ8Hvj3kTrH^KH;oa8dcCQh68PU|`_BcdQKJ*eyyK32<(q+Uh z=7H$R5?HDKXsbW1li7A2Yh3kex_kwu-pAH0nngat%DsX|IuY;XX_EJ}$9UgGUp@Q3 z|DSf2p7HxgGAGm2uQ<=qO!L<~-oSi&*DJlv#=Xu{jO1v(*PiQV^L(D||7eQ&R~~EV zgH`%Vu9fHC;;OdsEBCv?V|0EJPbvp5t&l4}!zrz1x9hvF%Vhpr!>_J!r|q5FE?Iqi zGu?cPkAc`SgR>|3PG1kEt#7i@-Jtt7$kn#-msdJxS9*ir(Wwj&sdR_=y}#~p@->Q zR%^Xgi{AC?>5M}p@T99h=DJT!s{fDYQ%vVs{h?Wj=&oB>fhNvT z+w6+8^tp|%uePGsdR%RFud+gy%4kN9de*+S&lmXnnlwNq{zGXeuMn+%>R+DsOWCtL zQlIjX?*ycMj#lvB>{a|@Uk|&c|6JP%Umf|bhAmwk=UPXW7r z7LtdQ=N7H{b2gUd^f3J?>pX_i@BK9AYE>%KlaK%F(s|>%C8Sh3Nl7ezVtK9`=sWJ)H2a zG0c$M|2^Zs>{of2z&aK15lx~$2j{VtM@_n*4hF?J*0M6$E>HSO*`W$Jl4tu|&gaq| zXJt=rQ5dN?x;~=L@e#|mNOtv&L-8+?*9`j~KyIRUDnN>h`drwa7x%pAFVFHQ<)flk zKGXjdyyF>O=`^3C_bKN$RkDw2*=s6GW0mo@g}r8ucPpH|gXoo_Yl+TCbUyM~&_nM2 zn8y+SN2_%(yXM=i`ESniHmhtNjE}uON2lkv|Lw?T&xocK?DZFiy-u`Rx%M~u_|NMd z_VfLI9&wlB-m|ppKHHU_?wX6aj*_mTfUlyTNBZsb?9ZZ~M^}5AXP)T}&URl_t#El? zm(D(?gugrWFIuUB?lD@I==?`(erEPr(JGhr7nSYPxgPaso@mBK9j74LwW>a%JYrox zyNFJ&XGd#$ZZ)q`!CytI8(nF%!X>gRRxG=M(L3gN^GK#ccY4&r%oOXkH@iN6TAwYn z%QhcBvS(X(zd!njbYoP5@yq|D_<)s(q&3o#f%}cLR-`GT6)BwE@q)f9X>|(wS@iQr zlSYyd-EBEnScM$c^bwuqGM-tO^hDAWo#Xufox+&YUC3)ke^tzD1mx`h&S*R<717F- zaj(%j)v_khqo$Rpn_ZoV4{@PYI?s1?{j_#gdsO$X=UBn0KYg@L6@66lsFM9{)$DI8 zXZNDCcP`|b(yr-W()Ev(ilkznwc0~=wz0UIX}+DlKjaya&dG7T(WAJ>ssE#sP|RKw zvrf@U74cQ{yHjT}$8V$iJI&rlS97M{g~*e*WJ+Z1;ZqvXXw>svq|CZtuS- ztLwkFTf1CA9xZELrPnCs^-lA3B&Q`v!CCJ0-0Vu!r&FW8+EEYuXi`A*)AO^xiL`P< z*61?7jpV7iH94gNqVsx2b`PSf)`G$#I_=R{M($-bW2En*Jud3KBRP*gm-1JqY=22t zb5?du%lJP!FU8z<%-`mb!V}rk_K%hQ`+sY@JNtihUU&NVJ-gCBK-zv_$JYD2i6s7N z<@b8t-&x%k@%)cDxp^cz-$%mhN7a#$d{lBSYPi=^vJsu2=utN-7m;+-$x2H-pQE3j zoBbU9te&syX1}ZL6(YH*VvnL-D&rB|$EiDw?j||`1*}SRwj%p;GOJ1cqd)%5-rfGJ z?)lYO`q@Y1sYRCHKa!qtf(lr@$o@o{C)YhhCm@!U{Zo3knAISn(MWSf zS5hW>CI7cWk<>*p7U`+z8Kk8E2cUFE%ZG#fqAzLAEBbjv9_7U`5q+3Shc>r8tQ*{)~}qF4Dp zZ{w7ok!KH%dqh^}NcItVebGseJ|b@;($di*`fa4MkNHh*b|oWkA-We$$9z6@8lycv zwa=%#q3Dc7uM*kFgC3FoKIH$%e>v)v{>%P8@{A(c`@jA1lJ@A-K1JF&y6!kxjjUFr z$s%ifT2{kES|*avVjhviM*p9({gFM4eiuEWuTDK9(u9$fER_9-bYLR;>d`w!_wj#w z6YWlPJ&|ROt}Z%F(IZ-kXnl^j{!@NkWUHgc-#((RBQG>ssmOnh)+)OH=n>iX=n>hO z|Fbob4m$OdXtko1jCT3|{zZ5F|9?cjK=fVo@6hPTBj)WTiSS)0o%WRo1PmDv=?wgr>Gclz%^z9Y@H@HP>brbZ4bgcm;+fl=<}E(HbeeX#--9&I&rbTEwCK;C zS76ul}}#`zkMox04pG;Jj~jeKETqO+JXQy{4XdhFI{VJ37O8jVwXrd7fr%|8*uK zOOfk!4*Kd+XX+e}81&{>SDx^@(>yzxoc*h)@n`+S%bV!*y>nZbhA+zhjU?!Vd)n^) zkNa*POBz+BA8xX zpW^H_Ua1^}@_Q>?jOP`YeDs~YsOs-_WNlKUJqwEPdb{=&BK409Au?;Lf{=g=(wSch6{bU~Bie(eqUW-$cQ$y7$Y;+Lc!RX~@L!%s?^x8788Q zbww@Q<;NoB#U^sR>o59PgB!@vJdtxX=s;^Ly8`kMMVI4DlqnR9G9%*&k@+%cdMQ&I z4ynC7H#XP1Mw2dXh9y0dIUoKpSVpd+c~`Hy(;MIp@5qzvF|8#o{_W&EtQ7O_#0iM( za1C6&an>oqp5ma$a5w7$Lncrt^zVf-5^Vm(!IcDfZA4;o?awWc>HZi^x50;+n}?Nx|NU+@*G#OA*0Fn` zK09@wdDaAz_D<3RUVFRuevt$Xhpudg*se7@=_gW9%l>R}S50CygRf%$KpDTtyoeJp zBQ_dJRsc(&vdNweGDl;hf|p}M^kSQz{ww?;$dBJ06wlNNJG-uJP@;!3KYK>oSZx`& z$(Zp^sBFr`>$t-7pkCoiriD!l9x$Kj8PagG-QI$Y{aNPDuz#i~T&$PrWL-%78tb(z z{4G;0n9aI>V`rl2=?gPQVg=)$$MVAeOf?=Vuee8kXSU_{7-Wbh}hM z?2Q4p*jacbyaB^#YNkW*vsF&WF?G+pt=or)AwD@MZAyJ#^*;qO_sIMm$xIJ>n0^QdM;!MZzIu*WMgPw^s!EoJgj)&{%mzwm}rNKbEUMRCTeLX3A!il^hD43WV z-8E#E=%WMn2jPD9MhA*bR$`p;)%iNSM z5E~hPEr>k&KmFZ5;S%$o-_N`hj)*;h!~bxmIfhIEyYOHvSIzyZ*uM1Rsq4d&=1|@j zZ<$yVdoU~Kf5hGnzQlN1gt53SH7;2p)<6D4=0N(|;BcZ;d_cH8bt2pa!LOiC(X%k= zv>D`&XJ+YYaARh&iq3ZO4ham8IVLH$iIv58=n-42GgJkfo}chhpNh4NFEh8SidAkK zds9Z^EIF&cVf1$fzsDXACuAz9N~ja7;Os1nRgar@oViaQoMl$JU{EJkChVT>mx*Jp z7l~Jj<Ff}<{J9sJnfKz^EnBx_i2Jhm=&B|ON*V`gkn;0H63NHy?$Si@y-y{ic`RB%gCW*6H(jmy=lTb23fwmLCm1lhajes+zc3=EdNb_~PIh zeCbJB|6HEI<7q+3XzZS(7r4!121md7MQ)UzGj&GuAm?G}veU&Yvpq<_Gzsii0sar!IU!LhBD?TZ9dH8esNpdiNt^Xk1dbbfF+?3A9L=@s-KS1nZv zbf&#VxQ9p0tWCJH+eyJ#KJ8xmaFEJ|*|NZU{Vu8*e-px!YvQb|sO;tF*p>0F!LUpv zmi%5ieG_)UqnRpr#iirt1)Vah)oskl{DouK)_eEig>R7cE0dWV`&u<}hfHJB0>)&j zs!x30{hkbO%hYA7N?AV(s0d$?iC!7$k6kL(1!lo>tU)RR& z2%d;dmUaIt>d_G%$qb55PaF+C2~(++>5VM#Yfj+^zS6HA>SR+SvFQY zQ<3#D^EuW-@3Aj~`@(lpf2GT2K8yVrv@%)qVrK%5nOVJf17EGTQvzv-`8E|W}Xk*k+~TxM%lzw@fK{#=jm_5>+l=;#NN8(?meZ3bSW2%T?O0Y*S*ov9LYV#sb_eMP6Jaw&~;}+K7d%n%@+QPSKBm@0<>}q_~ zhhkmA_tS~Yq+mk4d2nH7QTiQP;;>1cw`Lxf$7>r~rhcPbkcht@J0P#$J8NHe$DR*5 z#$Ld~YsLd=8{DO4tOn~dJKZaDnry+Dq7tjX{fghMlT;z1Xa+xwIbdR=z>npi&` z?7!ygUZ2Tx^)SfNs1XveqrM~oV=0gmT zX)=~iiSxFZXMG|zAX6)RC4F01P(4bW_`5*^-pz0FYu#jQJBSsg(KfyCRySvwlGV#( zy1IyCyJV(>E1k@q&Pi4Bn-U3>3FgPz@;&Ba+TWK+1dYtAZpbgWJ=l^Nu7+riIjb3- zZoQxxzc(2^=u`5R7JiJOj7FE|YJa$q1{GeHQ zB6UWl4YtlzeB;++b>ou)SCT3ozGhs~fG^H%I%dGe{w{y$=w z;rhrmF+JxJESBoQ^vvD(&pY@UYr^gJY7I6=-Jp%`gfHPp49twi8o%5*suVFC*qphU z!gz7J_{{x8YV$L1VV`u*q+_qrqHnMaTV#~SWd?`kT|t<+42NfRY%%sn2d@fO49n@t zSC79mPt8n!=XY0lI$qa-%rH??b8MvFV~^pOKAJg~%>F44JWTh7DW)=i6VA^Rv7?^^ z*JL)Q+NRHojnXaaZ##39=xsPY(4XmZNcwegyz0y)B<3E`L^{0HoxW=>`m159SZVg@ zW2f zbyc1pndjM;x3DCed)IjK8~!UwVYry9hUBP^Pu+{dEI9w=;Xf=GmD4BW&?q zu!wUr{Z*X&5MC3D`lT(Wr4CxJXg<~Bu}j_MyH5JSa5glikiWo>4f=>sHiwszjD@ni zA9~i!v1X#~!ZFp(8SfH*GWNXQG@C-0Q>=}y1f^J^o5F$V2mMt=RT)W~r@@(vaQ<(~ zbPPsdN*;}w1I$}G9A1kp&{B27Gx*A{WG)xg&d-d((m8^6S07doGo5&^bFoeQ@H$38 zC9|W4$uSoY_g-et&eIp|_slIK<^HCk6%AI3PiKi1pEF6fifZ^H)+gcGo0%h>5Wkfm zNBOZQ_(i`vBh%TfUhMn6@OBZ~b2QZ5wB!sCSdKa8C28n=Ds-RK;lljQ@OSe69R5Qz z-*&TIX~7cK$1}MgHo&Q9%7<(zzN_Zs)WYd174(mtmRX|er9ba;c+?v)6=W#a?E8 zO%UVBEfu@aV|2*5nfviy$2pk|VOXZ4V|?F|%>|T#?EFC2Prw^|Ky>;K?{*0N)*2JI zz5Q;h-sxJA=R1&}^8BcqAyg%;*KYX7K>lJI=e8NGE^mZ~RX27v%;Ir;**mS>R;PVF z>Cp2bcor6Oh*UJj_3kVByAZGFfOFWJe_jFV@C$8t)D=wBacG}<)<(R62Qe)ov^l0a z`*e)qJE2+&_(UJ-JiecPtr--+!LLM;>t+_|VNe%haC6Ww{z}l;+D@Wtw}vgPQH~Qi zPgT=Hu`1r}GIAO9D%rwwxmQGcBi7C$d%6L>^E-_5TDu%&1@GkB?GW44f@^FN;Vuw8 z)feX;U=hxRJT1Z#>OnfAjwS~)i*-QUnVHU?zf`^a1lnjZ#`{P}%_z3+B2@tE^*Eb} z!*-|p`~%zm8t0VZ9f# zvr#Yf*4gelZ)5yj#)B9JM_Ooo%{&)5ugc7%UmuOl=cAv;zp6^x)po^ivWvsr^FH%^ z+rg5q!a!=z%Q%~j9Ehc~*6#k~Du07B^yQW9g6~9AXV*c0XL+rQRUSS`Zl>V<%;Ev8 zQ;T!Cm3#t&`D^&g!)8Vg!;R@rW-r9j8$oVz@Uj-c4$Qh|b1ULM2f@o~2dgne|KT5G-gtj*ejWp^ja;l8&{d}x(bd*aSvOo6Bq_HeUXm-0x~$-K3>C{ZvhuM zop1M~yNR-%bKSuMeA2(zPC}JzVQXc50IoQkK_IA0K^0AMG6mXU=0k_wy}ELLuKb6Xa@pTiI^(hYX$I zMXz*Kb9kB$(W|dv-F1S02G(qazkJP%w1}Zv)Na2c<{w3d{)J_ZAmafacrN+97v|X4 z`<@fK7@Ki5$@&j-@+tcEU!LyNSP#+9MX{UQ=O1vFh(}Y@x<3T*FV0Il3iWHCCh}@$ zu{R{4pS!H+bbbYkxI`Qjb(-h~{chuQYz))<{R=$vRlZG8oy3;#-a4`V?I6;B<+9vI5< zMbqzk(fVKbejv$?axoog+E3`A67DqW&Ro>>?pK5KH1FzG*FA#2`7s?(kgo6P-qNIV zo$ds;u>ofVy)h#DIw=)kuPx~3KU`;P^7IgFx&#@2RFy;>wL3-lmmdTd2lYhESKFV9 z>;S*bdyEr>^c3sC56Q)D+UqdS!dh*Oc9R_woBtla&>Ff zjy8A#E2=3^{w2A`kt%7s!(Q|GGOJ_VvK;(6;*0Zfyk}zrz7JP_g~xol`#;ADCfTwx zuz)wvr!1*=eGoR^n_L_f)!Y`mX;+K!$oE4v7Qww6ndkKmPja+cy)W3j#TlA4*edS7 z2tqy2T1CvD4py+U{;p;C09Dl{Ou`*(;VN(T^TtrhB@p6|bgx|+oqWu$T@wsI%I2w9=QnoPC}-%TG7MMXwWq{6mZM%H~KWI`3D~ z#YxE4TllK;oW{qz)*`5UQ>*r9A`|Lb+Ro_+zSK+tJ8B;1br4 zJZAj+3-Ir^hR@J)m&KOS3oo+xL%nJZt60a0%Hy{cFcE%^^Y00)w&|6B5v&)|Cl_KRde*Ndc|Jq4>qW4Ef=3@?pH@2 zPm$zLRI${KpB;aLrRl;0o24?Y8*gkp93v-q!45~+|Jr;WSOg0fb$L5tzoxO!x7vx} zasq=`!|gcnRYVHS_;|(L+uI_rA6d5j>X0tgMSE4Mxk~1xDliVIX*!bXqOR+;)bFWE zYBc^;yYqpLQazxYwbND9zCN4o$0m+(iYsRx;|bLg`z{PGWRrG7COX;eH|1A}LI1@`<`jICcubG^gOI11 ziI(P;U2KZ&1-b#}=mtDq1z*~F)lmt0Id9>FsO=D4a*JHy5gujK`FN7kb~}6dg*#tm zZ*PIbog%2s1wMYlmp-ohW^pT3rw-VQw@Ai7m13As~-@jki ztn%jPOf^%etSPN^%mjQ@$If+XdFdfar2 z=E)nA>yv}^nX9M5_g-B9>gjzvM8}w(=G^_0D&{9o>it?$m+htLp?Z%LrhS{kP9olO zdA{9Tm5wq$bW${FzBId>zzQv{Yi(n-d9CA{q0Zl8{m)Zn@a$a0B%M zSkCiV2eF&aZr2oL45cH^V9{S<<1Xf*{e`uiR;5(YjF(wzk`6-Ezt=Zzi0Z9+dJv{` z!d|6gMmqU^vP1IBWHHn4s;1Vas#%R;Dw>)?Q|syRT0XU0f1&I3X{_q6&Q%#7#pW~A zaeV^c8v-p0!^wP?J~-Y_$rtxxKW6c8quQLAV&7pZimuQdZ-$z)$LN6jXn-%E8b9d= zP(%OGj5%yG1>k` zjsJ@@#5}v-QJ<~#Ix_At(d6l57k#!G=(bftSI(!Lt&e1!J~#Q}M_n}zT-nx#?|wx7wFFOx_B%t{=bs7?+N|A)yhN>X_QZ`4m-J$d14*G=VOCEaqG>FawF?XW|xd$2S0l^*Mlq%PHW z=VFt(mYR=qm5$*yZ=k#?(B@w6=dTf zh|F43l)g>ekaJ5;BQsem=DeRcO_$dlR&$D;0$-cyu-B`+sD5;$t}|)Dw& z#I2pIYX`P^G2cH&Wl#rwKVA*q;;~&Admyu$4|<8Zqdst%-tO~&?yQfxqg{0D<2uD( zq4QXAJ)nO}l+5`k(ZRHiYIN`pT|DO`MwsJUu0XW{Kbj)cBr!rSM@nD zrBYi=yLr-_-rsatdBg;%x6M9Wudl?_>0RoFrzgKN*LawTk#$YzdMI_9Ze)ejOg57- zzb1@k8H{6XHmg@!uexukD$}!N4)(LWlXxV5y6Z=D)R$J7%?f`&j|?#3`73&) zMB*j2v0p%f+US@3psLwYv`!r>vOpdF37FAZo_j(+fqwkZc{oN9Q@tXuwyX0L!oa5C zKa384R}tOFbicl4R5Z}hw0vrkNkHxNs#{|&!^^2>_0w3OGWLm7iByl|gZVY|W2vpz z=l{l(zi`Hys>ByN z+ogghF?6D+vJLylGv{9oV{Jy8eYrxnvD0;A`Be_=eAV;CO?%wI+B~f1Z+)8NMW>^r z^;ty6G&9StMb10A&L3uX3+VS6RTz)pz5QvTS)as@&ffXfszWdV8WXTDFNp~v{PRC& zrxcG?j4o2{E^Zp2o_=F`pKb}U^h({aJLzOuExFEQ%mT?eCf!U)&Btp9RTAH(ljk$Z zXY-HbJ(z!=KDcFN9(y3)hMj)GeXF6!>E311i$UWa5{bZDS26ynIN*s zz#F3q>6!4*>w}NrKSfn|EfuZ&f{C&z=&UBXr_O}4$oU3x^seU~3|jKwI`P-)FEG%y*@~b+rFYh zI`IXDiFVI}u@@A@y)4@N6(^{^cx4-GYyk~$r@HxZsi>Rf8OcTYHS=rd*GRVGsb=)- zJRh^;(sZFzXWC_qY3Wr>WIL7|pxgYN7;n%nF2dlF7 z$NBMZWj>b+_!GW4HfW;4?s}(sli1=dy6qbt|GA`hZ|ra|QLcv16_+R~PNOx&7c9_Y^|lig}{>hR!?^3A5|!_mR2)N*R3 zTEFu2$RO*oK&OhC{F8+&+ip?T%g*x{9Le2qu`#^Zmt4yvNa|JOx=4Cj>PoYJ%Ib4D zKu^E2`6W%2-Dzg*{i)B~@lD}{;XCP_sqb_+8<2lI|9n&Zy6I3cL~pOkG-u3ezX1sv zLH>8^3ROm~H4SMx0r#9nYmO2@T?|eBOZG47e!M^EL_0hv?{WqFs#xY)Ip?F{@{EjJ zFilT^;r4&K{t5-+({=SLMIYt|#hrt@usvE~Z4|H?ubEK&OCoI=Z=NaNPtd8g6MxXW zm&CX04f$8lOr`k;=0XnQv&^$5A30@9d1`H9GkMbGAS9DT+jYss8&=~c9YLSrv6ap| zAv$?S50+78L=8&DlgslPIS`T0VLq}N^6FE=N_?_2V8^$yaUDdNf5g5E(rUn4JLl8j zaphv&c#0=v<)_GoRdZgZ=-u>Xys;BsP`vh#J^hjX-Q#Z-It6vDM^n1x1v%}9`KmFK z;rp02_ObI-HL=W!)${m|&wHGQH&0Yp!EDH167MIm7QAp23oskyF3f@ISjkE`EitZkZ-3Z(M zU8VB!V3|1HG!^*pz4{U?56^(#E+spS`18^JAJy4xkCm&X@5J5Ww&lFf({*rLA?n!` zd>X$sQC{RXK4)&uu$*7?!dygK+~b)e_33y@jP#DM)CBmH7!bK1A%kB_^c5-4H@j-s%r9PfonQX(dtj({RKQHh3yrTK5^XDgjPVGrg z33u?R?n$ps?N9DDm-!}L@cv5HF!$y%c6OHf@aZ&X4upAuyB`E2JX7AeChgcoKo7%Q zJ#?GJAJc)NzAWUWdU*YyFX#r=rgqMmIq{sFoYkZ$nkhDi^!&`f=?^RUR_CL8`S#zL zpY#i9{6OrqmVP?uTu;-7;x(T86MTRizTwU6%r@HfZCy3qh$hyg{t(xml^mA;c3yRl zC-Zvh8C}uT!7lDhKNhIKOtamSdAH|ZBa$hhPjDPtse_8aG^Y3nd=48W-v*qn-fs;})9u7%KUj*esh#`0YH1bt2co zY{3DNuuJ!a+Ptz2vcwbM@HOd#?eS%a=W;&E>29)lot$N&y8Z0VSop^tvlmW}Z`Ji{ zb-b_M>&wm3U98{BUAS|<;2Y%9>fL!eLqvD4^GpMXQEMpQd62qWGsE;<8EXDs&(vbx z#-#j?`N#6cwGH+~dAaV`@-K_K3c0D)V(kU_zS71f_Dc*ZMUV3>EUHzQ(H17#( zIN6Dc#ec(IyNR5f150{b51`tKv=!chJ-J)VbUoCtmH6s%S-!_H#~ZS;ufm^~!$nSq zfiz9eF@bP=vTt&4{@%Rn@@nUm&U+~Dk^JMyVdpXhV)W;!^bMdDd!f37unv?f$)TO^mZ=D`X zWFF+1+>H~qSI+Die#r_L*F;*NlB=)IpQ;r<#G*99-`eTitPINg-LiN))7>lPoS$=1 zPL-UqayFYqP$O|}Vv`849fa`)NZSbgu!g|ZM)72CPW%IX`^a31IppybxGH@ z7yf&L81pl>B^Em)bDPd9r=<@i@6s3a!ep=f^?BR#j^_QEH!p8s{-vp}(&y#=V zM}l&!Zoq0xjbD}cGjU1IgE`YpTRu&MH&C?q72WwPl3T;rTnH~+V@813x*b$PS%y5yD18=E&f z|LRl+$l$8<(Xc{hXZj6uDvG5lqz)z@OD;;jmYSrGawA<@u9Qp2<6SM*E3<%HPE?6G zC{`Z&vXHf{7Mqy4k6-$-ti+p8^Ets}Xx;#b<^ssj{ZQ7A^i?>n%Vbfw-fwaP*E;i$ z@x1oy4fqgj=R!8K0Zilk_$=JTv8E6l*L!{poUC5XPH{~1|6&o{mx-4}Ef44t_X?z~ zB^(&%uj4gyD(wIkCmU z@j50iPWDcon_QY7%O9B6G_QZ&n|X~zEBB-(*#8P}jPBtZ>0znSkkl1s1W!#?NsUTB z;ir#;PwS*xgkM`-1b=35gFT-vW1?0KPdcB!_Z_`(05^3eKYV?xFn?o}IQth&ipiKY z!{GP}VcTcRly!slzJbkIl?5#jeKIW|}sU zZp8npo{Xg)G>!5+ow$Dr3uLB-IdJP^v61pa^J3j})omq{I$1_-vS@i0`M-{}D8%1u z$s+XCo2RX4rABZ*=5u{<|3X~--1GzTc`fC`N2%x-8#IEg&w^)t>HnctWT{x?ZJMMc z3py>{p4HrL4t>p>&@lD$b1um_0)JkX^(&IPtqvzmgISj+Jhzy6$pb|<>z$y(`mVes z2iGohSpSnfY#)6O7cQCWIoQ~GgCzEA0@BkSq;mdpZ9w1b-71!`{ccncW-_H zC*>+$<=xQ4A?bHh1yV1-K=viyPIcFL`z%q;JbqViJcTgKiIs!wgenw1ihS>21^Ii2 z^}Pu9VY54(#&2JP6Me5UPFvg{`#v6KHy47sO|Jc645m(StnMbVj)J1!2?_lmzd-)jyra1r za;vkj*W_xrUzf!o{@4a1m?`T;kiY{4UZzsta7;GoD&5UAr-MlT6YD zv|t^Q@G?%*Ngh{K9d$=THpYuk-hqbQW%6%^H2jz-oKrd{mtVTg*AK}_RZmn&>}7S{ z)3N)A$GPJDE&lSt_!wGrgv@?pdZU^62r`LZ++f|{35ABaKTq-{O7IP73mSFm`u#MCeGg~V^@bQ=)m{BT9mp$?!Pit z;%l+?_BAa|j{1|FAR8ycUNARoV|w#(x%yl+J|$(P2F7|ohTdRXAAqO+jHk2+59(5E zt?xweBTepT!-J~Fu3oQ$;2g65vG}$eyrN#>4T#{&?8VQ{%;v;r@^^=<&hs9#Nyi27 zpZB3;rC`jz`9D|8(w=?oDx$9-7I}f*XzhBI;BU=mTSk(MX}aLuZK`#*^dJcNN*>iK z`7`qx=2gx+p1UJA%-x?iHJM4(hM`s!vyTiLrhA%Kx?b`12PW&?n=Zwkbme(Pm3^x- zv#={y1V2KQro{S+oU8LE9^s9)h%IB?r(`?F92TqehMtdsqBnMmuEa?A0+YI#yeO`- ztGobDb%4p6SDMs%FgQ!!yc0n|eOKGSOHan{7ms!lb1X4ad^Z0wj1NjIv>H`%&dsTs z(^xF~r3ijq{A#}Ir~0>l1mS3CF8dE6ww*AhcV(P@fI;30-M?D}-=DJeU%2+?p$bKD zuN!6)EVh*RWaY`(lreC;B7wNRQ7k0$Ylgd;9eX(C(ste;=uoa`q>%|)`s%q_ixx6y=!ofKaOrM?&!u!XC>6c^zok>!O> zcZv?}ka;;gt~>XgI*;EjmamceI(b5T@?c)iyhXX&P98d0);T%HO!cS|VK5%-#`JK{ zZIN1%td+W71YainEj=szR9tea*KWm@G~i#F=19iZu^(HpPg?RuYr{Shu@`x&D^+3i zgGXKp<0&XoxQ++(w}|aL+Up*fs+Zu|GqRrONz!pt-OX$<{q_8>m(7rm-|WXjJ&=1iPV3o zg0RjBVQC!wo9WQ;sq^`!U+YHNJRBtoJdX3OH$JXoN-fA5Rf0+ElPmncoY%bt;K=@j(jb^<;az3X$;FaE;?!9q#OzE zr7zy09agg<)pA~jiGIRvHgm5dW#-!E6lZJOb2#N!@W3YU(5B{8jL?~QlPRO2DPU!jwemaUZO?rk;nfg@E{)@$*GgYLG(wn%W>|-mvt@oyn!*?DN%?-8lf8m^VgG;@D#df#< z$K$TQ#;^QaeMU-+&$qnJyLe+0FzaKORd3*qZ7`W?25+UD%D8G#CqgWf3+3HwiA9RQ z#a6?4Izu*^%cnIHh2BOA@}XUW;eLmll6ua?XNd&Nd}SgpF;rx~4MsXOaZO?;WGG0~ zlyTU?e{agus0u&mp?h~%xcMW{rJ*vV)A$%CVNK0gmfD#s!@hbGH^$qzA+QCf6TijxiR&8x-&Lx9#+d@UCwu;imEzjuYYwV`&ksfy)v6|fCfDq zV`Y!1>~x;y5wcP_I`6n&?RgL?;4{~|-)C6ir@Vg`JgQJEJ)AB-pqu`a>0V(A(i`y& zo9bUaUk%s)Bk3&Qs!F#uzUyGO0ycKH*olqUEn;JUqGO>V7It@wttetED%fHtHg>nz zXZN}P|*RKCu1##2HvEa+sU3vQbIv(nULR zx8gfyl(I^oGE50j40#K8C3(pg*vYyBtt!1d3_dvxHMA|5z6eg~dz{Us{Pe+`3u(!Jmm^`i4PxVkssdIQ!YN4v^vL^FJXU47m!aBr# zq_eokw=_53yyi@tWaF5dX??(3xB%Z|I2wEczMb8i?VjR5SmEpc%X^i}8W2f%_WB9i;c^(XIJ3-@$_Qh7<0`G?EvuM@Hon z(|H~4M&Ct+IE0F=!+sT1MCB$beh0YGfv`Tg(Cz9{Q@WzTl!p&J&BW79{D$i&86L*% zN*zZH-HzMoJUQcPcwc&;$d1F8af^F1YnX?)AuV3dtryb#)naO0)t9=Fs&zEZkQ=Iq z#*m)+VBmOoXV#-52jOk-wQr#tD7 zuhxcfn*nA$O{&31aBrFK#s62F+13khOihr_Cp(xbbCy{rfW*ZqSkWBx$-T@S3DOX7 z)*pWIzc@S6DR219C*%5vW=6Zq-^~KQ;r_p5nDaon0q;-_c%N+ibr4pI)ISX*IPm>TYM4v#{!+-O@%I zk=B1~eM}<1(ZPOzdarU9Mm9T_ogQt#-KqP4x;*f(OvO1JDR!k&`Et?^z_yeUv%$$% zr+WC<->qus<5Bead|>VX-WIoFy9w2xnLUwt@F_aoZ`h!|aCDz=;EU9MKRjQ)Fys5E z8s73Ls%IK%@#!xV5U;1m95eqkbgC!YvoV}b+ zoQu^Mt(dXFJZbH<2l9@$vCpD2rEnKYe_S3h_Bt5Fzq}na+1Gu9Pdq2SiDJy3i|DW) zIO%Dmo$#@hc~jE5&TzJ5JhCJBcg~V+3job~a~i_z-1ZZ=q^qcNX<_movTvXq@5m>7 zJ2yyy`Z2xLhU+XP*HHE;qm>qPjr*X^CcFX5QDUz$$#+zm^CTqB#WI{gWngulahFgV z>PIZ|+!c8gv;2Jg9R;Wwp1dhjnN7C-`(#$5*EgYN{Dk552mO5mZ8W5c`kGf@Z>BS^ z4c7jsL)DJzc=eb%Qj6Dna7)k*ZV7r1wq9b7=dMlUdB)LIY6>%txp@K#!8<^-TC>b~* z{&HsCke5;#l(#ONpUR3CRd)`n``)LPCm&arB^_LiO2e>Fb>MoQ1C|k!n z<=|a;KsNFkzFV7wzjXIIhT>$l`MjXwKSdMV1=4uN`_hNx*#YX- z4*M|*N(VNVzh(A#%Wb2}jeYt(t*UlXZKmOk1UgcPRsC+z-@gYAmJS-XMzz&4}t%;)x^E!+lD1N4`JTGFn?A$n*>Kq#Rh|lqAbNUd^!#3a+^@Y zSK@v8Wj6&8FC_DG8PCFI`ou`?4ys}nGJ5D2wG{fvdS`Cu73VRvu69NDB6T?r^qd4g zF~`niMVfi6c;21yWGG``elL^V@!=><{ zaL&UMuvkv;cp?mu+m${52Kg}l%MiZDrKtI}@EVUoW!NVUq^~5S6%1t4c|n++ctusx zIyTT}o$@AXhc8-mA3pmNa#1Bc_0=2n)q_(r5Ij;&u138WjrZdu-8n6umkLz)81#}G zOiZKcBfa@r$fwebS~`@f)`)ZZkr{R;`q@RJym{Ifqc7CP(N}7!b)4Ouqf|fbr8dSG z1~Xn3*V|2I_st}$%EJs^1aGW`jhMy!wU|k82(#f!I=B~Es?l`WO`=O|#h+Oji#phoaV=(|o;8G(3t|87dv2ZDN-fo3&!a?dsSi)X zho5Vt(p||!9qj-H8$drvr_{y;rtqtQQbbI)vj=X zWwn*scYQv4?`we;OTbd~f(>vw+u7vy>U5R5 zFyA);ZJ$Gf|6=85YKeu1Jzy_#&q;LbvGnQ$c;;Eu*7rQ6S1y4RY5~0TKZNt*TpaYZ zsG>vQd<80IEv1)Il%M@8r~e>TVG8G^hLTNbqO?-p;#$Z8%J_=2?u1kcCg=b?_@;cD zKHL-~Wd@vT4JN0hFbsKM@JI5tCNXUu0{OL}uDN%$7iW+AM=;f8+{Lxc*~T3`jlLQ* z+JNMhK?uwS9}ODbP}c+?kG0Er@rlh%iRO&*__a0Jef_slB?(H3`# zUQArqcq%vHt6GTzz&uyn=?hm;JJOm8AF>s;E7+YLq0ig=o&7Mgn@QAVge}`k<*0-2 z@c{F%H@JKc45}%flg7!WoI~jfzKDSvKE;G`hfn?;)if(g>|W-$!@MO&;Qw;LbbX_C zltfp$gWq%reiljgl~%gzJD=VbP~au%XaF^=BH6!rC<0C4s@qVRMaR)Xqa|qDo#?Ii{=JE@RB87O zDsAYG^ThNpG?LVZE4cmSV zO)0hB|^5eq6F$z)K5cjN=y^lhf3U^bY1 z<&MSa+}1hDT#mbZzwuaaOYP{djZzOeFE}5o*|pltZVjxz{Bxe*r<0r|9~`GQ?VR9B znToeu>_p;zF8V+rmp{9Vwh6PCP%6Suc)-&g;XNrv_WG5}hsiFEH>MfRxF#?jxA52f zu`VzTujTic_mNh!!oKVG^meqbux0|RxP_dAjACX(#w zc@W`LH1`Nt%}C~{UbtQtSx?M<=33*79-=>k8I|F``lzqeqS}4+rZykv$_g{oTE{$b zpIpZ{IRDBp6i3lr-PzV`An;kPDbh-cOKaxpL_NN24h7tP1tg{fEZ8nN@Ht{CT z$dh~rtLT8WT`_#Z@t}v1IMpVhpsqs)%VM9hDp{VUrq9tE>BF=oY7MoDdWahOo9E%B z&w>Yyh6Ouny<~EoPW5B^0y^$(YGWQa(FJ6#Uh||~b`YC63UCWl7z(2&jubB`BdW>~ z-q%2C>2)&l%}^sRF~$3WhSOOYnUtQxist659pURFiMG#hqv^q3TR^m_@PV!IwZ)>Y z{g5idk{<+jJ(tVjZCL>Y8UWVKB8tIv}IM@4vMK!w|Q_pR3UPYK+C8|Xd zxr;R1tdfDB=A{$_cGwK3(V8c5o@eq^Zo=6Zq=YH4XpkD+NulST+Qsujot&9hEwF3pZ@0B^sZ54G{-B${YWDI*sR^b7D3Z}?nmcZpS zNq?hFQ%^f9sAtrj>T&h3X6h-%JaYypdI9QmU)a9}bnA&!i@8h|TTrfMf^}b^r%t6y zRz-(@CEn#TItzo|OF-!d2QZ1z2(~jxejjm=D?kAgKZRK4^2g4JD3gq7AmmTEa zkp9^jl9hR6^;x z9?Es9=y%xi%S^p1sifuUxIS_vm_u**5W1`eC)W|)b|hTwUS^XIJck)f^0l}l>m;L1%V zwK-Mq3bo&)cefxXbCCS*LDx{SB7xjgSb=20N$E4)H3?70C)AuU@LD*EZA-EcTggyq z+!DzhvfOG!GR?q9~4+ z$|~NHwIn!h!iQCqpYvb5&;5t%VL5*DSH?r{0Mim^ICv1iFB)4}n}~BIh@qsck8|gbbs;T|e zc;|IzL(Rw7W_&S*^672i-j}BEcR%d6uKPj{=BpZD-F-X}Hfh23Z6|p#nN1#j@m39D zCa)-!5Rb!vM^m{vl3T9`&+!>_e+Z^)EW0u?;*JSJo9W8^!s)1IV_e=O4@Sc=2jjPw zaKJ6c|1}mwYf0nba?bJ;p1=|xK{44NFH=sygw5xK)N#B3flOe2?+7yQ$=%RXP}KU+@2~3&Im_2LbYjuYr&2*5vp?XyZQ8M< za|$wvuS7}JNcQc;?VU%wkGsAlzPNVyhyq>5c+;ja&qm@+P2lbGAw%`VO2BREV4is+ z6ao*-fgK)#7CRcRK`q|;Qz%6LQ9)~iw0FrF;rv8ps^U_zIC46&I#Q_{c%GPAOUW@H z(I|A3^QaC!|NJaf`Al+2Z`kcQi>_aSbK4#zY%z%7D);%NkgHijD&-7m-!Z%`1+5Nd zdh-p*wokbHs_JdEj9Rps?98DKQX8nfwMwYfE6p)X|5L$7H*i0@(biDm4tUvzy%}t< zM$mA9DX ziYPkjNfo(<>>|feQOZM?t<0pBjH>^T)Z7QWk~=_jd? zNaL`TzseEV(nX-~LR8Qw=GX1KAw#)MG!TbFZFCeBkLeop@;9LUEojAVO{WQ+F&Aw8 zA8zUM<-4hkI^WUS!5xp&NsBi&g7r*#8U2{HP;Em6-K9pV6V!@4i-P7%^!WW~Lr<9g zJzb5^7X@|*^k$CI$Wmm7F*o2AyAj)soY%y02LpVg!lmU9w*;rJ@cdvcuL#h?}s;2ywK^7utyoulY`OVCql zxz3VaEsVopCU;(D;$Cn+s%T!EZ_kZK#xFDwe=vGk8B%;kH+b>1qQG%u0|gi;!|ugTgF5; z1hm8bV!S19cr$X_#jVGvxrex=eigZGAEvb_uK#f3|G`_iiSIv?l#i;!dXaFloaR5rnvC%}poA%76XbC`&4ZYQ79YR*M<_?6;3kt8^@ zL2$LX#7TT6J*eZW*)0(B@5bLKdRsS~OHX)HHkq5ua9m4o^nzr!>ggx64(bKxY4wBp z+*upUmCWSoG+$Z8V6AR4>0JbCRRuL9iml{wQaP~bMDZOv26Mvj*2E3Jo_VB~G!|$3 zXEewNu>(wk%I@EF>;TTiyj#pw*J@_|#EtvPEMP{G<*3NJdjQQYI~?dSVUM_%ZcvD4 zp-UZ5i6Y?t-0Z}2Ng_wC@@u!;OL?U{R_-fajscF=j>&L8EtLyAjfd2RySRwEa2h7? zo_s-}@&#jDg%Q|+Ysybt1nYH!?DRu4tty-ihdtT~BCQ=6N!>`2TargPMBUh}Oji2w-dv-q9yN>9sMl|TPbzFI!ujpesjK66STo>V(>S+S(TaZvV$eg+h@Vd+@a1hkA&6V4gdQG z1U8T=T8}*VFE&6lVY9+7ILs1kLM@83YbL$DnCmpv<{1jaPn;sB&4)13-;LvD9!8_t`T!j-3=84RO^_c<| zTS)!6xj&4(CM7i(x7*DMsfxm@ht9`J_SGhn^`B2-JCwuu?gfb9TRn|6YX|!|B8_qE}MJ+Adcwl@nw}COz@;j_X zN%Dt7J4DU6NVUGo`KTa{K|xsr;+o8y{6-idwtz#p%G;BSTDFqRb{Vo!B|&Em>1nN9 zK6W2^Sr|7K&qB>gGDlhssG#@oY`jCSd_*p(3~9A9;OQ*55mutbKL>vw^k+XJ5hV%^vVFfj(4w;41>@pa`oKs7zPeqExnOL8i zF@}?R0Crkrldk(-=xH!bkvLdVd2>&ay@(|fo=$I%f}*KO>UQu}M|_dzw6w-{BfHs` zjV9^&R6VJp++QMW0%yAOdH$gN9dyt}?ETmU*1E=j^$cx1xZRwP=ayI1#Oz2qUw^GG%9%aR1_rhOPl`Qb%dBNmGr4yn68}f*B(|uUxV?2w( zs0OFNpPASy#2zo#NG1bsw!?;6uW@?5Wp2!m`)@A0CMp;y`g~o{4{D>(UTebs&ByZ< zpgEaYXPCKodc~<4>rr5<;=XVS2_(F;q3=#)&&4#j&YO6kpW}tShzF-9tnOPlnJQuw zsqo|U)c`(|Z15@q+sr=Tjksa%;yLW#bgVM7^MmY%h0Dd}s$KuQwzbHWB*64d#Sxwh zrR*}iW$B++4~Y`YP*;S zj+4l)&s?C3VeB5ZO!RMl96mE#zNg+x3pB!Ynob6 zJ?YG&2CA907uq#_pYhhL%86ockKLIh<0-P|A>tV+o%|Pdpey*!S8NT6nE`{DLsW$w z)YV|-lI)T%eKjMw*Ty7Fet?1Nk|X?Vf1{HWCquZ=$W13H2A`1?MdmUt>ZfdC$U`p} z@17^{CB(~rrRtoHr%YgtnZqX1W4ogciSRm$nMg`InmN{i!)2z{xv-%1IUD6bp=-E- z+Fwd9&44?6As!)fTaUM86tnX%oD|)0V>V_VNf*5G>DVCfnXd^Xaz1lXHyfFZIQ@^7 zK@Zm7Y4x=R+GTZ#Dru^-ne&u7O`l*CrUTm608T_p*ABddC(zxC!sd5_W9v%>v!?hT z)5%nPIxVuc(VPO6}O{Rlx9+j z2BpwVU@%6YEZ(GgI$;Q39Xa+JBG z91gHEd`CNBG!}Ad3xig^P{rK$OjiWkCgK^`j$W08oolVgxmM*|kV^xNJJ>Ms$~<8f z<2|`Y;^vm#N-w2rn!mP3ErUa07{17XYNFPVyXiB4%MM#%q-Pe|)6qBABa*3>l&KE4#}+9hLzk;@EV zGeaHj>wZKxT1tOCDQp&JONIE1C-HXJRE?UTtX0&H68yc2DD54opKs*$OlZ-{Ik=r? zN_!NywtPv-2A)!1*-2MYq+Ptd%fX%F`PG+dk%zwemzm0aTZ$lzg%9qHX3`9$^$HHJ zPv&`C35^V0FJ(N{BVaS8l2Ll0UDlFtM2c!-HCzqSdh3OZjyTAp$%{y+t0kCn+$X~j zw}JSRM41lWk=eB(Dc}e=Tv_zv*>9w}W+V5J7q5MKIP#X9nwH#El1kS&ZiiXJNjM1j zQa@0eKbSkMYNYK0*=ExnEba-5>1HrhxV{u}@eA;^rBE%FpsfB!B5ee&j32Te-DIb7 zm^uCi$fJ^ySvf~d@dS6hAo0}_{#BMflQG{6mpT*Xtss>iS_K_?5^0BqxOCl{y-Ja0 z9ELk7-aKe3@W4NfyQGe`knX*T%d4pNP}S8^+5y!|{q6juPSiF2{$^vEwE;guPdqOv zY_4y`)X{}^A(Y8-DL?fXn9ez*|8L+@cqMGZagt6Ng10}G{!&|9jk`(ZT-4!RZI6>_ z6t2S{-jLnodm_zj++JP)Jv;`Se+LD7B5z+hSlnk|$uK(WdVc;QFrfEQm>)?sm`*O> z-suD@d{%j?TviUi3P*7^Uhsr|@y~t7S3enl?HknVB$$M?xKu-_>7{YIAK~i;Ps4?d zYP_sPYWLLE@O@v^A*hVajDE&;y^MJf zF25M8c0X=N2@;F39TIPeJVcs@YJP?@GKcK@F5xWuep|qLzCkG+1lyk;emj}Xrc1bG zWS&R{34VPZ-otCA$ef(jI%+198NEuU@_;o;Ck$av`X+wAwrs1pE%{O%c2gZJa78+H zfQ_YZhVe|waaL9m(CdXqEM4QEqj&eS~K;p#Z6Bq;!t70Q0ND4szQ zPhtc81* z*5zXOAj^}e{`sG~U% z-v5}T?0iz+m>3tr)_U^R&SHa+dt=~mwtq$Afn3X^^aDQn25dq-nBd0dG-H$T%J@#E zF$2EP@gy&9scY0nYK%IXMBg?%_JaAq+-5ng2lfQlYdoBfT(<-RzQqNSDNA>)Cl+VI z>dLHESzO8|F&EtBg7Y7OPdD1ti#hH%yZVZeN$H3?G&>0Rx>c4xVh4i568vj1U2-)0o81u!tP(ZZ5ZKC0V;*<02MHLH~s zh(ne;Tg-ump{F&5=?ApUTA~)JJ;P%^T1}&s*FLM>WagilZ_KV%AP6!e*l8u$`k^?C zBrnXL^cq~(AAV;59i=(*Y92b+G@i!-_Mm#AJ-4TJrDZ<&fgb;XG}{{xs0!zI1ME49 z>rj`Q@jRX1R%g;^0V?5Bq(bJmfp;}3{2E?g(KpToGYaZ-!+DZC(I@<6N`!AC# z_eu$4X})*F$)wWLz{(w9YiSx()^%X64E*f#;56Ey5bWl!O)3k#7%nf1zI+^C*iDqh zwboN}2wsPHu<#Fa8Tq(uRN4B>?*-|s&*5-0P&st?oECV4-JZ2KIG1k0YJ`Bhdho7% zM>$!i{8S3U`pu`KR8@8|$9c+kIagcQy?F=rFqiZe4Q3hB`vkE%*!UrH;}%e8ZFbT$ zcCCWjdB+=j%j|B>W5Zj3S(7~guh{05f_G;<^XfhAqx!;mSna5NQ9rA7^;SkOJoYa$ z${GwCbreQF9t7nttQOr!?=$#28+<`eu@~vy+W6Gw!@GWGq77kRXkJcr3PBX2N7f!DppZWyw z)PHn~s(67@RKOd$5J&wla93|U678s(CHR$A zybZ?q&Ssw0Y?%rrTUrGKc#^qm0B^}>c&==)<9UtAdII{G9P!vQC?AM%l!q3 zt{-9#$)6m|LwM;Ad=CM(!49lWmITJAVa4&5dZ1|apq9z-Tpc;phsAhl5Hm(^@MetM zLU{op-AW(@6pr4Vj%{KS(K$QWWpTjt1Fzqu z?xy3c?dNP9k`E};NS%}bYcGNYt%TP#E0tqEPNw59pu>0?8>yXpNL#ET?|FgRvW6bq z6PCOyZuUJOj1poxCX?!7F)*kfe?5x{ejm)`4LV5|oNZC~GMb~pFUCES(JW|a`U5=_ z^qHc4)hg))^xE1a^?^E0ZL3D|r-&+X_NS{C;B7=RkHWG@_sNqiQs;b6Xjh-Y1}7g`3sQ z>Q)+jVoyjlmKQstC0=5uP9YG6%0|tc%01N8hxiee!F5&OEjftaaU0xMU8a+hOs{=$ zRafL}oP;ecN!}t4)5$tmu9N>Zb`B9!*?RsO=KU?WD+hCl2PmTnlfiyeB$aoxJ*>`M zJoTYwJGNG3HQMVH^_(z%CCC{Bp|w`WL*0u!^bc*XzR_5LkLiuIiM*Z*Ps%Leo7f-L z?+N(fjx=0Kr8X9X1N?SZ>Pit-lAG$p(AiJ^@TO;#CDF-@GYHqM{c2beIqS*gWB(n zTX;GCw$XfLM>(l)Nq7N1@GRyVi`de1h@9&ie6Y7U6K~a8>?eB3Y$9FSu?je5M+_nDvk>AjPI&~-G{p>z6mW!Q{R=`Uij&9Dr|UBWfeR{ zB5s{Ea2c=JJoAQ1dV_Z)BiRx!RK;c_JInB^o^l(<<6zv5`$>g90tej(0Y-{(Xh$AA zjmhkT>4?*A6R7y=Kj+R-dl@S4PIf~UBeB-s9A})>A2Y$PLQ{-k`^-AcQ)>dc=!L#G zNByA17)Rlxa`D-9zz5vXzHV`!Dics999wOrp(GiavaPfWJ?kg>!6;^BKM+`65+rPs zhvP^C2h}HTSxsFocWezb?^$oZlckAu+23mXV zn$}0FL-u!*nnP>ArjfqzK5^zJs~nEDgLLKBuC(X_nZ*R*tGI~D{*?{(E_z6Dc#GLW zvhWli!f+h3L7a$v>~+5*&5hTE zxnc;FZxnul)#MlRlFaLbLgb!D57JLN!0xm*EHPc)M_JWa3Rh>wBMTXQPj z;;26i9uEKkEo5%5{qK7{j?*Ct-g+fo0UsPQ1<*OK!1I>`eO|+@5{hGHAgp*Zwmt^2 zhvx+4uc2-BVty(Q-txvJ9M1%mo9zj~s00nj>>WTSdCs=Fb+COK z`P>Wfo{Z2xUfrQ09Hc33j9ehK-ZOnrGOpjvHAM_eRKiCICOoMi}3SGFV z^c1$Wp_GjCqAx$oH?bMdrkqd*zuzlvIH*e=t3K|g$#4e^(OgclFQU7>4z(uDzxVVn z3G*Z~J6`tk?1nr7W0a10)$Mp5!1Upv428qe`Ru3AQ_?7#!R7C9;;iBvTp)R|mYvL{ z$=_I1j~Hraa}e4z@L5)rh3!&zRH)5Vwmf_u=kS%?Vy{INdfp9`j(a#in~;S{hU3b? zo~j9OJEzI)jb<05rYCEqwBK4@{k2+{uVA$T?5Cf0ShtM}?0@uNwsPO)IgQRbRlJOc zrWCw^JFz{Dp7FzG^C9oWJhu5&1&Qqw52IJ*=D(awp05YFn#PY)v+|RUWKL_vUo8ir ze}fzK!xP(zf36z|t-)+I+YKjJPoB!8^o5yyDjvBBu%BbVTi59N^O+wSpuC^a4rq7O7OF#Qp|v4paa3~}wfPKxnNDjB zcu3){3umx+`2|HMr39r^4_Tn!)tHQww z)7Z$<4i|MA7`8s_pi%8r_^JEb5o~H`Nmq?Grkc8$mOWU9tq=Bo=Jv**@MSOpyWxRN z&P8kGj`9V*H?wjDKig$yvrZ0Cdvq23&YX zd|PYS!gGszInR>OS_%G_;0%s1sT~Gy_~Y0a#M_w;PxdfFGknQ&4j>=t?^=U0S%Y0? z717To^V!GCHVZF{wY34%Ie$a@y)iFl^2J=6laDmq93dc`g_nB`VHIWNnZ z%RJCX5_lG8nb@XLH=lsD^6@D}&_k2ZCr+cA4aJpGo=tNK{J~syh20?a-3Of{J-eo- zlBEbVuNt%1G(C%rJMXmW`YPR1|BsBX$-b^sH5;?YHWCEA%yckOZBY@M;kWc$JohtTEEz*^KM-EmL&1$VNWXOV}klL9+kTT#6) zu`{MMSw#<2lD~BH)@FiH+BnZNwqDzzWg^d-RXd_)Qaj;SN(<}RNRKwQnugWY9>8X? zEGR7=?DNir&*Pie28HSudti6s*xC)AJB$aRE)~k3l*|=y?h5ABZ%l)}cw$DmIdry? zc4qtK2P@dxZI&{7vtzNNnSgiynYGV;MKY-p`+d8K`8gH6n69$JaKDDzodK_z9e0*H ze;5hFb4soTo*2aJF#!KT7bOL*d^U>w73SCl{AwWI$Kz8S6( zfj-O%v+#HK2Jfw)cE*x_;`VEJzIek#E1GUJ)vs#h^`Uf;ziLLcH%}r)4RnU6lZ-`d zd(2{9w~|1Oi#SnULK6Gc#-i#goXD=yUttCuNipWWe%y%DlsV*yuvRQ2)etSF?;=#q z{qO|)ct>BkmXX6aYcEG%K43OwOT|v3ni-5sIvv}(HlQPR0+n7yaZkXn8X=ETiaY+m z>n5U-l~&vvb`p6Gy-*5T;X1g7d#yCjsSVRwXWU9|j&DCts!U=vDFIGkWOrx*Cx*Gg+}R6V7GcA!Fa!|9RidQGmRHVn=^ z^vjpjid5#1-@KnW;K6zd?i=bFQY~gtE#k~maHtizWo5Kk)RM>x_9l^0mXjQWLnjlT zZb=xLw2q>V>@d6aLDxByexQx?V7peFg2QlFn%oy$At{fT(PZ2h&6wRjQB~ncodf>)4_fvcyxFUmPut*BzseS&actMj(-ooLA) zv=yj_GsvM8!!7S}I^ZoH8}E%zRy|HdG44LRZ?_V{a9#xQc1*&*oLzoR{+8PaxkYk6 z&pr?C{EB$~AE6JOWot+T-P{Rge-9(pk)08W{R0*57p^h4FKCxpp86HSX)VkC#p`^> zu|f`M1?MdSO`s!F=w&%S^Lipb`zLuRnDQ&wAz41mn{$n)QI(n+s9dD4=2T2P3c6_34g3NGhw%wL2TRBqiw**~$z^y+CQlz{Il)GAd z%G5WFH{=cYsydE~X=u7JY!_;USI-|^`yRUif6y;sPh1yaE;iUDtg~pA+06q?dmTvG z6rwM$pgDE|;GLHH% z8_m@lO!^bn=Lsk0CCP~}u?(HG85&|2X0fkmdbhznu}nlQUGC2`5+Ck*rjdfoBCnVR zKH*)P%qHav+ChAs(OL!VF`Z)^jPGFQ9p_xFm{E>A$x*h$6vk817ktu^)IcwMBOYM7 z!&JyoFnmwo0S4k!`VHDNK--O(PL9Lb)M9#@0v65S8pS)Xf*n6ynF)*A=SUEr!69|t z$cA2Y!s^96_eJPUZ`d>37B|f}sR`U(0Va&cc=zJym=fqBnkm)o2fiJ=!^XVb|a5tW^_QAi8 zw7xQ*<~2vdZ$Hp);&$~#BdVrX(yNmiD2d}U&Nfn{% zjix=)+(}kJLxGA1S?pkcNG#lNl5m(A=bm(yZaIWoE3!KjM=ZR=1F*v(>f=B>;yvZY zJb~(*h=I7|SF>SyJvgf+uD*9X!9*r@iP~9Gx(16-4Aj|^>E<)6?{9e54)oI$FitSo zVh}0XH1>Mjjpxkyq?xxGNAxhxMKk>oyCGsxiKfBg495#Sh3z4!>M;GI@xd%ia(_Oj zI4v&TQ`{R4dK0IR;W^5$?{JCUIIaV5hTV4U<^AZ1E)yi)WfICQ7QmU5KyLOIobCZO z+-!kGT5nCnZP3&3Vw2_`wp6!6S)R@mauvoc0?$ScScftwWRDde$9J-3-(YGsGMOeY zmDXeWx`%dnj_ITmJ>)R@@)AzPB`Ria^4^8O8IRDHqwr5n;5#c1;tXQ*Mli~MBanDA zxV=;OKu*~M`Rb0J?-^R|ZY#IdglAFNIL=&}nO|MVT&*SX-brhw%~BgWpF1CGrHt3c zQds*B)@qP^3^*yybzX4LzYowc0(pDaf$EZQ;-!bJr~v{ij2E4oSE=8-*kISrm5q+x zi=F)ya7!;?pL-g6FSl@Iu-0%_RZYAonYmfyJ4kCenDZ0!z8BokQE7tQhM%=Lev_6s zEKY;4!r;Sy!T467O4Q@Nt;sN2g~_5-p_8Umwt`{<0l;7hAb*YK8O}=mU`3dT{7MgMu0r$`V0Lc zyD}DN5xgh5np=J7Ows~y&^$68SUvw?gQsldy3PDm7yj)YT|6&YqSp9-f`kz+Kh8u9 zc>g{qFOu6pr@>luz$rG`6%Jc)5jN~0x2>jyMV|umJ;Zv*ZnSRZDN;{&&1kC(DbLX` z3iAZKnasUQHJINi)VJ&);7GnZPrLvbc-LpMNl>#5djzcSJt}A*%71gSosrB_ z=%Z)TpMt_SX>;hQ`N*K1btXD}wIb|OF3CI{WxwZV^JSmm1=lI6<2>}ew&Y(D;g&;S zdzZk&h)fc@N#Q1w!!>zNzv9)Yf=?@uZ6&`zC+%I|*nC>h^%Dn*0j{Y}s;83q#Y}6f zc4M~r`jS;!CcG3%qLwXV#;63h-OKTV{X#o94+~@$(_j!h_ZL{tS>Vp^D9aUiU$Q9f zyVq)hKs&*fKLCq(%h{x(Jd1hg+-u2!CGs_xdU2m@O-^p_oQ+EK3P1B|G{jH%K$df_ z$tv?DY*u~a1^J>V?St0(AMWfRQ^d#k^7rFvYeH)Gpevoooq`#i1%AFCfX zh-5Q|nR|?M;Cm0a`3z`E?uy$1HTbx6Np7gDP%bL<9e$3bq}zvc9^Q~L6!2rsK_jXH z?-xoxX$2mA4(6zVHqZrB9zwsp2wxng>MrMTXNa~N$3X|CNl)&>@_@;X zfsLsvkeZN^cq98ukN+_`UN}1j;qRPG&$7@*9#BE@Go5~-O1z{$ykW~-7N*#I+*CE2 z^jn;@ibT&CT*nUlgp2WD2zDoWNmjJ!y!eKfO65rDs>%dM2}cZWy$i5cCFE2oie6F? z{IDAJr~x}=HljHD!``^}*#+X$>5T5$jwz)xdQE5YcnP=-x^Oa<;WvHH?6?@``5idM ziF{RHPTd99*&PnNDfc0@BZF7b=%+W;J5e#VX~{U(R>1deRO8i8&Zo}4+IFL!^%mFP zW&0*Ieu&FK^;`|xzEgN2+?2}7MPYp3u|fGT8etHu-yztezkI%X(OGx7DwC?2!@blA z%*DTKU!ko_!WHCgXNB9lM;3O5QOnHCbGXMelESUbi|9nP(HCOStG|#+t>~~}Y&P>A zEJZc)L-*(fi#H$UVi&tHs?bNP(?<{EPC5@7^@jcOWfRd>p4K2{vkUZ(p7fDqIOqwy zvyH@OoY}|lIP*|4Z-P6gvNc7cS|osevyiE&OLjL>|BC_=qbKS<>}pFTHBnTJaf<3V zGDe4(&5m1M@Id4EbZ7B%cjml(5q^$io>`+_iIn*WXNIw`R(IOL$#QTx})EY z2Ytqom`#T_;~djhPS*%j?Yk&LSGY0brqLf~cn?xL>2Vq!AtSv>=)#`Mp;CZci8tT~ zuA8@@%j}?tarD(j@Dj~Im!8zg5cGj@beU#wULRn6Ml-*UW@5<=?>AE3%c;1G`!@pT z#WMQIe|WY$_?$*CIbQp>PurwgOvX!@ikmNjp<`g%1A+jaxP;rLC^Kvku?l^q zp!gVe%Y6^~1v;@0`>Cp+j(lL}N(AWc9Kce^ zy`}G=?BNJ=tfVGP1zmooqk7Ag=`j208$SHZ<*5<%@foP#u*~EK5<%r_aHVuWQ7y{N zE+Tn>tJI2RoQl4jikBoaaxhDL#!u58w^V5y2W#+MY{ZGO5F}oQ+id?sJzT7R(~fAT z>865~k6pR%)pDBYtf}77gLqzD%yRZpwh8?JJ#}(@gu!S^UFwD}za2ZPL&dk~hc(!N zP>ZyYNN&oV!K*5sU{^~$J_~=&y8N*i8>u>j$4ihvo54TX zTyf#;T8GbTChoNa@tK$bG`|=YvvJdj=jjN z8fL~Y1v|OPBMc_J0ng%(kSw~SOw__-a5h;SNBE>q;A6`!7sKxwhM)1ZJQmzB9KS^u zn2QT^(KGm!#(+HAC?nV*5-uNL+kg|CH6MKOgT$yo-fB2L`CXih^Z0^>upKHLllL8* zd_|cS$FKpSHao_BV18S2E+kZCTN{9y+!4n?h#KUq;>@p2HMYSv=&(5l(H5@Y-HsP5 z++~l%NO6L+LC%dAt1NePHz1dN8XbQz-mu=x$huH~yQyC?sa(V5IUKxMAGT*1fAtp6 zNm`uHjg03glOxEv2jY9p%l0W!zij004QCoDb)#48=d-@U^EgO%*hi%p0FRSQqQX79 z4X0KFl1ogaZ^Ym}$V%lHr!?k`YDYEAA;)m%*l}?cnCBW>1J+S9CZVUM7Y~u}DapRo zQJjlgJPYouws)ZtwSjAU&Stib+=@0`FR7o<_GlLOrx|3JtJ6=`s(H1UdU||zb=dE6 z&5pz~FdSXPj7Mr>29^83${*L2%)?aNj#5!mE`R znz{4!{O`Fz<~?15aEYyiO>)4Qog=BBnrE%IRuArpdBMh=P-eS0afVbF2D1h4fJQIr z!4r9gQq&q2yaIFRG|;$&yEO-WwIiE!1m!*X=At}_?z}gxV9mo&4i@1Lc}Du{H$2=P zI?4pNoqM=`>yZ0Gso*~O=j`yA3fuY%SEE0>Sle-vaSgT}Or>Tt#lcry>xd%I7B2jW z(_i(`s_HSuUKEVXOyAwX3N}8>R#dkEFeD|&Py0xj#oKHcpDfg2A8`t&VmY&E132)> zViut}S>9*I+(OcG zs>FHt#>aSb=Ajui1BXVispJhw!h@iMCHU61qMV)P9VujuHOrg3$Xew$iW?L39QtPc zwEhs^?-q5LYN)f+rsRvp8%<#--&%3@T6XMSutQNXO7gjPCD~OS?@KTo+EXTXA13tv z?$ZZT=fh{vnk4TKa_^zksj7Gln$U$N(`PHY+&5Kjwvw3uB8^JiNOz9g`J(I*;0sSS zOjJj6_$e-urZcG~@uh=6!7ydUGU1TCBkj{W3Ki!SY~@*Y#i?NMt7xh!o$@U1AaB## zXrOmzUXO<3)3wrgf7@xP&T(o!EkF%We`==DldXNl*vV#q9N(fLPJ@qg3P;4c)a#-+ zk1miM@)A^ba*aYeOy<9K63@<8X2RU`$UW@tDbHjW42Kd1%CSj!Zf2uUXEVka4zs+F zjZk9Kk|ecF_Alg|ALRl!8bN!&rJyGMG_ka`nmGDlDJ#-0oBvj zj%N2*Jhu^)#>Y5Ne*_zzi92q*^x4`z^|JGzvn<~D<7@|cV76p_x&imphVz>N96e9C z44&J=Rw-R=ht0mJRqXxp zVDrRnVQ!|XIZ$JFlbt+B>oWL-8Y7Fk?4vzYc8z9NNxY*X>`K(Dg^ROI_ zC*uRTc7L`dTWAOyN%iLB4cX27QU~DXH!us<*nEyG-8cQ`rHLlTAXI z(7gQ6b9(Sea~G|+j)X@7{aO`##mYiWH2m50L=~*jf^)urld>BgX*us^Yi^jFj&fEC zcho(4>uFLn;do&FGdG(z%?$X=OQ9kKfyK59X_?wDiRYy2s70gs?5E+!;;F#tPQ&$5 zfGvf6z@+ENlV~7Q_sRH-nwAkiN+nK3BRXt#X8fY?d<{|BQ}FL4GCgepA6F7zv-_wi zTf8SRTePsVdI zl_bptuAAdH&$;X}dS zn|MPup(UQN7g}v`bha_C8LjE4q3mrb&V7pkRL%*o)$N#d+e#(mVEBt>?8#Y-){bkF z44Mz~=wBQrVYoZ{F^x24Pv02k)Hl3MC74hfz>F`W|1QJBca%K`b8x8zbZ4N;#@! z*S>Quis0;g#74_XcrO~;@vfPC`U|Xs^o87HvJa4$NT#Dk&;_qi@k-+Bh+#v@M0^;D zRMRrdC8c-{d4=Mx(Kv2y&__(3Ll%(db89thL00m@Mc{#tSn0v;4se+lPvfd^i-dtw zDo&CthSbn?KKm2s$&E-E&c~xQj5``?;!dees$>j&?{hLGOPSgIVYO<3NBV)*%gSw- z*sp=Q!^O+E^FQLx35Wfi2;xj(F5Avz)}HOJoym+fu>X)kEN+cAH*rJrBmJy?05)fv z{!DM6Z^NJDp{--bdXm~&AI@Y}1?R2R`~g|BYKIX(p!cOLKN6*il7Vq5wh zS0Ea0EY)NoH6sBxbFtA1Mqy0D{eC|G z(8zzV=xwA80$2#qpt+_TY(xp^x~3%(wFQ^TBjxRNS}4yM2vYndRL3 z3*+gzQ>5S2i3EHR#c=Oe#gjXSw?q=VpdrrZyE?~-$V6WoMY_EQJIK~qShjHAm3 z;o{4|t*hmA7kjhIX#>>G&QH!u+5*GZd| zfdAOfcCh8NJTBbNt`9<#I7b@6C%>Psi~Pj7mHzU0Zt4k!`K}0)nHSbK7Sw$P{Vbdr z{XRU$GBmaWV2x0wlC@-M$b#d|UJS-^XWJFj<+b3>-^@%3Uhu}SOM|K9b8)}KkZgBv z8P&`u#&tUUZ~duISQK{x)&%Q%Fb~dQS6wdfQ$zaJaeE`#=7vH8@Yp-{_Fu+} zA8AZs2gYe@8n-djqNaX9Qys}n7b3P}F3Z7f6kXu)rcx`~;#CuIt;!(IC1A_>REu8J z&TPybDq6uJ&PoJn{{;HTPjG2jlA=$U)o)VGs(_3elQ7zZ`y!cspB@fIKrPzB+`i%8 zrm1@HuT`y?B&({Mr{UU`vUe=h@Gv~l+COmHWDwi7@~{V@9m&;mMu0Vw&NBtKTT>?7 zC$5d`n_Pi!G9%b!E^hC}>{k0?_aM_|b60IiCfGOPS(hL8ZnA)9B3Bt(x3~yaTsuZE*(3;s=aV zJzTD*m=}JrwRZp=^&+0%mu3atSqJ_xAGY(^`f6@th}GtCcX5jPk*TbdzKD$OGF(Jg z?4#^SsKy^l;@W-ddc&u89pC02HrM||51S2Jl@HbHBs|P~PGJXd1$(f062qw_&-v7M zP%Dnxsmx_X?Ph2Zqj(NBU)!t{c3{*8&z?t_8-VV1N<1Ph#ch!d_AZ;U2mU4tU%z=8 z4PYtq<2I>5mZU!2)}L9#$+OAJn-al{I+V_Go7wF#9C!t0r9Al8&f(DR%hQ;^_OCc5 z#uliE1>q6i!=yB2$9!(m^ttWnWX;B#^-WJRj!i<}^}_ft3ZjzKCR3C_yY9TD-e5OM zJUPL7xESN{(kAM-2FUW$_d-C zft_rLoa0)sjT@%Gha`2Zy80^ff=S-R+zGhNsig9)2|qF$exV%<#zHibk1$Is&_|}?3E{pPvViH$38WB9 z=|xeC&Z+`=B2^tinkW|-J%B9O12&#laf$dh9>Hwqf}i-v)RDwn(1lIyFYSZY2zw2_ z!7kv8NHn8jq9^=K5)PJ)V9o@v>I9IDP4}$DO{Q_CgFc;rdU%A50XbZWcndS~27B>j z4+|H>d(s$co>{D8f^E4DTuqd2hb?GGzm+|bU*vaqjJ{_fSgCFIS*~R;j9mU4NYx=`102l6y zYil#Q%^RM@2X0|4ilQ1vZsdy=L9V|N@5nH^;zLpv$4GILrR6y^!M|#-OQ9osAbmib zckptjV|(a%Ce3>AI7lR}N49W$vl7`lJiu&3UUvsO$y?*9 z%LhZhpT4>rt=1ovuohL|B0u|8Q1=uP?mgH>Uh$tgBI9IiickJ0HKjCc_ME+ zHs`p7=IO(E{w+`+$f7hSI-$ZH=b#DHaMPtMDWC*p&4GaaXDpZ~FS zA7EZoSKRn_=}1we7b$`u3fQoK6;v$PjT&Q(E%p{$)Tl8=P1MA0EU|ZkU9neG6hV*< zqS6GBCejyn-_Lg@`}#ir=fCsp?yt<;x%c#Q?m1^jyUoeZhNz=k^&e#Cj}=Eab6vM} z|EfAzecd9}?1Stk{$|bW)u*nxdhI4MlpBJ^a8)iZ$ay=v`XL#w>#RG^P5juN@)oYTDTGdrq$FBl3A~6SZu`(>fAn@2^v-*X!|iN7~3jxsOEYPp_`hFQgTH zdTjmKY^}FMcP5jt|51nYz8#Vg{F+li^8i*j4J+gDGgALp`L9pXc02Kx>UomkEoyZy zrr+MNzTMVOR%L&{y30I&VY2Zs!u&_&2el0ShZL`lB`<&Eoo`Ifel0H8w5ACubDQU#ZY$#2 zmtL(>?b=E2i{12GeMO(TcUT%DXrU|RL>{QRYJV%=NtClu-C?+7AChnfnOGm%%TAU5 zM3Q3y?zo%nxg}ltZN298zp(4L2bt=5Na}l@uq*$m&in@&#*^`Kr9v$bbXnD^=-Y^G?-$Rr|{f-%Z@}lQnm)dBc9Tpm`R*Cu) zJ#Yr;M>(m^Vd7{L<%-{kOP26hKdXB=|7|zWc)s3_^@rAfTnzk3@xaY+$(wbS=~=R_ z&VFj~*3+v~D&J+5AIOhCizGd@@-tZ}%Xua@iq8KVHlJtvoaAX|U1aGjS5?0Kx+`$W zFtyx^<>|bj7u#8DCs*ye?najBkoCW3ZH?FMaZvS7HUHMJaznAq=hzoN(9>&F6<&qT zD^w&L%D!waI(3QsiwV^O>>OQ6e?89PYO4R*9=fZ3q>B7TxwfZ}o{Q=0j&j5gKpU+| zmhVaDr4@N<&9`9i63&<<8gUq};tXB4mgqqEW1TZ_(8KcMC-Bwtw7ea8?2C2JdBU#A z5#-3ObvKps`ihLe6?Hn>nVqf%udaySyQ-hIsoc^2!0!0v9&&Ad&3xYK`05JWaz8)q zr}ZbPAU@W<(O!1;22}Oq#jlp<*hHpIyS01Si65|TV|ixlbSD_ZMmu1A7g3unYZmGf z{V5tduX?zyDgS}p1FKJ94RYLl}PM$HU0U!7gX+m#>eoo4ks(@qN&F|Ls#Glbw-Ki9K{AZ0S^sR zb<~U0SSye3br#oyq$sUoC#5M}`nhvw*BK}xwxegkA6;i<e)qmil5O|houqr-utU?`AKfy*tMsv?J3e#Z|$+_x-XX{v5!3AAJ!eN z%lNr+mY1ngeVHVlDdzVF_S@stGd(kEBoFRmohY}nbJ*P2SMiQp%BMd?SFULlSIE4X zA^Z2%nt^KUULrAP*gHE=1ltcVuJFlYASO%+pc5hv?@U z=>f@Bo+^{=64^ta@ag-@VqYlR_yr!5hK%y->f14?Yp?lD&$eH@ZUH{|q3V>XnQMPo z+py~9wRP~zSyh8sWP7Z;b>02z|D-e6PpJD$ag5j5lKbgVzlU6e-;syis;k%Etq$aV z)bhA!$2qj=BA&{@vc+!`-@D#jtGphI1|B3UkFno=l>V`oiN9VU=D3Mx$85wR+g3%v zTNSTVp6>ber^(3MU%gI$d&RqYeIcs&hQ=ITryf4oPo~8~_Sah5U2h=LHA8-Qo=W?O zI-{j(TAsrtZB!!-sJl>=U}c>LMqWFb^Va)M`R%F$Sa=TuNlL}5Y|94>WUeBUvZ0~lJCl){FnF>#l8gJC8VvFv=XIUZ!e2grn z?d+TV+kVMcERg|X9rfzIS7)rfg&|@=OLf?9D--i|@z0M%@ZS|*zS-{Q?y`#B^*pjG z_>SA*<2^(nw>6g~<~2ayt3%iI(O3TDs-3HT5&N64_H{i`dfPiVY28!nuT`nG)GpW8 z)h~-~Kc>fX|C+PdTl2)Xch|pbb1}R#;flEvf8E za%^08EX{JU+QVDfA)o8Jwkv6Io~-%VJlZOC7Yq5QC#V`205UHap)TLGqRtHV%d=o| zzl@9}c1sRq6V2j3?aaEFDuR6@x!%e1LPqEpc*Od{*L_y?qv-d0tgqHpqpH4N_sqJU zbna3+%nR37nfpFw-%zKDz2!*0KqpS+b?>Ir$}Rlwra0$ddnni8kOg)g+KTlouyb<0 zIQZkJt8CEmy)D=%MSiCJ7j5c$@E2@&I;dxB|2H< zz?)$3nOODnqMs+q1Ky6mw!Pie&DqBHiO8L3SE8dXI=4H9ioZP|{x;cjIlkt-ZpGrb zzN(3?HaoAcNrvvakJq21V`FmtfA&nHu2y_IOX*0J@w>BC&e0e1K0Cx`u){vn4}Az6 zA1@DRdzpOquueDgE5{prpVi5typS+VZh_39T

    1Rs~9mL4NS zv&>4bv|rgsH<-WjfDU7e)PwE6io`Fp?o_Qmqk~WjaJ?BU&Se3g$@cq9cFr**?9b}^ znxWx5e`rp{N$BDa_82Zx&9rHqu{uI{h8Az_IGqf7)xBC)#t>OaP3@nJ@c&Y~jGaCC zEZx~YB~_wX?*)OE>=?DRixKH_J|DidSnWmj!B0;fyQ=hu<%ab2#Pe5q2_xBl?LBq3 zi~aBCMR~4M{dDQN)%FjEs#k0#d*o<%JHS~Nh}^to5AHD0&;RCodal-QKE;x1C!G|S?QtbY8J{E z8NfR0u$7;a3EWJ!|8MvZC)pdGEwkja%J0Mwe}Lg*bQY>dGVCSC?>_k{BV|JmwXe2O z-DdWoSK3#Y>x`+SO?!JK56RG(XWwYJ>e=ID*!^V3>u_0p4cVVtdNRP>asZl;orCNS zzK+VbQMY=Uii#;Ls~PL&t~-jia^(8;Mbza!@#@`CP>tuD-5^G_4?QpgHh)_@C+Z-S zF}->gc`+Ba&0sTy?{)AiJ!Yp-OrPW{vEyq{Hhp8W8y`R=!f5L_lYzk%G8)t>Zs z430jPMyM-CwI^1*z$&|hWzv|xK9xQ3g}v1g_-m-@#1-i6I(vUd(IP!b&TmE3 zP6ma8Sp!?x;W!_i_L9+dx#uw7qz}Y!`&qZCw;$z6ZRhf8hspoz&pz1Hxu2kcPszG- z;c|}L+5K46@5#mcCu~1aJ&3jWGKlnWZVjH9AS-m2nxCz=bIr!OJzM}{FN=L&f-k;A zJO6dO2@V&Fe!U>W=M~TLI8xN*jf!De1+SEeeK*?p z8a0g9G5M`Jqj~77*ys0>tT*bG(?_LMKl-C_onPc%+$s0>P|?T6VDTu5yGqo(t7o+| zQbVzZrFg9|dKQe9(YuZD z+cxCt33jobv%9iDR`~Dj8QrS4&!eF5J{-l9Ha5{2y>qU3VH5K4|SAX(ZeEVyj}e{s-I%bKcV_B@xoIy$uRcCC+zRZe&2`f zThPXpj$={RiKy*dFnNG{cpiUz2zpad!`kYe=uGctG4u<_?)}+(&x@C~#2cr|UcX4x z{vWz%yvg3Gugdf=lK)yZ^L^~bkFD)TdSq#3Q`YzZ`-2BL_7~|r7zBO`5@(Vk7uhe{ zS-tMhH0V8*7t^5wJQZXnig=Oc*k6UuX5tEC#nIkk9iIs+EktKGkpb`mjMt^{>yvNa zu#(!S>fI0rH>+6=Hb2AjNN3dW$d}WZ^Le)wHN()uU0}3*%>vMS2dqB8WvAl*tH9`5 zUe_rk>b_zVC$gv%AGSBnG10VM!&Gq_d>_^g_Efdkc zJ4oDG_?BbQ#dvncc-nC|DLO^w=0a9iZ*bTKJa#}Qd)p=I%2uBb5`Pub>&hGbi0n!K z()4HkoY$w~=Cp2M~=D8^Y zN!fliD|n&1TGiIHNed7gi}v%Zh->62p3MS1y|5tnVU-{3_XZF zsg~z^i5Q{Q5B4g#HCIBYCPM2B1?22s7fQtYFxx{%FC z%>TI8T=cb`wRr{zJO>ij`+eFdH_8xt#k_6@pVw4Sd`yxrAvK@0L%&SM+xBd#b6AK^ z;f^Onu*0&K^(1F+d7D?5Fb+=L-Pg9jopP6AFQt^8l<1seI%WT5+ zq~q%99dP*7WXV=mHH!cEFlY=!Db3K?ySiyyMSGruFIuuuyV@VwniW5k4xPZ>OjqEY zWUxME7xzD~auFW6oxC{J>-#|E8h8CfWK#zb*5(DW6GrH8*@WM;xBdhtkpIWXS-yY= zG8hz=%QIfi_L?p#+)7@_L11*XOqgf+;4@XrECrECDCrIU{VVwCGBT$#d*TZ@rg?(S zA3c3!FCJ+#&+PjT%PQS@29kQ4pp_mZ=d(0fLp0I|)V@Zw_v7mH%KMRScoEh=vG3I! z1a~4;E)&V!3jGWwd9D|uI*dME%bFdt0JjH2jfx3y)2AFNs$nqn@*x z88RNG&^ZtDy6?eX@2hpL$I}>ylKzNVo}g{N!apCAxo?PfKZD~Qpq1jU?`wa#hPFz5 zOHF&{mTwP0){i`yjJIcHVqY`arDsUK*q?3!{#8 z&*5=&r(w22V{K7O54PzKy!7Kx+o@=;Ma@DU{3m2+JoH6m_AK`I!#ukG@N%9)Pfg(A zaB=5z(b#n`@g&N<0$k4nk86yQei=`Y>ra8qR8%uZCg6!On4bWFXK05zcr5pW&KtO3 zp3JEEDCRk<8e#W;n*4_DXeN^7M7H~13(b57J(TB>{H@SLm!PIYP+Mae;!{!CIODv= z#=D4>)0W5fA^CPEYCalW$KP&Yr}1~@I0j5gn)x2o-jNA?F$vlZMOBdt_0Y>v;#K=w z+i9?S6qpXSn#a-pJyyGOL3_Wj*(Z@hRV?t?te39W4S#P`;-|YerzJzV| zB-wp4c>SH#da2LPX1D#-tEbSfa_H@J%nl=u+x8q zJk#rR{(1t>eZ)VRQ1LBT)wA|JNZX-k=`b4kc$(_>`UC$#%=Kus7JJ!)-Hd0nJ$P-y za@pFumY~+KvaYUG-|+a}cZbVy_0AQYWlc_^wcaqJtKl&=*;!UFj1Rs>e#%d@_h^#f zG2h>ZdOD-2Cj7$DxTOK8v?=slXFS-Pf0Ii8bDXt5sBZ*PO>64b%;!U{MnenP!e8RM zi70AjbshYp}AXq`n=4EuSoQUw0k?&Y&-o9xAZiVo!Pc~ zJ9hN03!17c&w3M5KH9&(%BtkPY*yLJyG_tsE1Ac$(b?m$b3C2aS+>G*5@;OGdy$U) zr&-@>C4V6o7LZVj*fBG}?R_+M6As%vN(r<)?a-3U$D9k z1wM<;Zu8lD@&@|x8Ha$^X^yjbxtBZsVl_94Ed16tyMXKdI((eSF1Q(WyhOTBBEg!2 z%Lb^Y2blHJ|7{yo)emGk;K7D^bXV~`o5_l4iz9nEI)PwE5ba!8Qq4&CAIPw0=)9BV zar9y%Rj?U9WuL{HOV!6!ejg@FD`Z?X&{JWFXRb|w@dvGPdy=U&2+gAP)>em_mZPMt z=*xlladY%~54c}O675MMZpSL9VqMqeIX3{eZ}^-)g57r@-i@3(25nx4C;v`{f9`mJ zEW00{-Un)bg2UVWJ;&>Njd?Y=eL`Y%lsnqXQvwD#hM=tztm915>#wM6UtahCJhYep zd(m6R`!>+~4~hQ)**zWvXXDpZB>y^==Sr|z%c7c0vVVn>Wu?%6jZj!85!((RJIFrC zP&|A(E9~ULGT+mun}d6IaBb}BUv&RmtcrXZsreb1@V=^r7gT!P43n3c+xB8vGtK;Y zlIIk@Qdhp!cewR!k%uw#T|-h#MjOcXMQi;~TQibu9E~`PEp?;l)t!#pc`Glm!``6- zMx(km^y1#dF@%kA0LeNCtsY7841z%mEzsBotgyL0o6Xi3t70QiodB-W z$^CiwdOCSF!IiJ0vHR$o3&?;WaJGUTe1`0}$ei{Ei3?df&#^dr&=A|mf9)c2FaTF{ z*Fo#e!h^aG9bM`5B(r}GU#;Lv{(y&GQDZ$6uD4*nFE8}$FFv~!4ZV$0jwbPT0LeZi zVHeusG!pGhw$z{LmV3-$1nN4UOud#~ID~C-6bd_l);ZXf`+!UsS$pJg7aZvxpd zioY9__c|}Xx1h?C zP|ZPnM3!*Z{1Sp|*)4e^ZL{8u_-9)%GXr_Jr%hVEhkqyU#~+ zCy{x_qP8PgFbC4e2l#sgEpirG`-|gm=zT?e!_Bdw=)c0!AC5f^X>Dcko`Kp(o&@*tnaJave#bbHHnu z@9qSXhspJ4SX$SC)*$w8H!<7gEaCPbz9Ff%2k0HZ^4*7IcmlV7Kw>_`${5ZndxS2y z!}a&F;}Xw0qL6ioLEZ`0ufpYMFq|*Ku{jQHf~tc3IcD}X+>iJDd*J*$EBQWWPvOZm zWG}P@!5zTw0G8g-?3WV@zS*&$uy(#}1IkT!%dNq>2d+63EH2{@-N38JGu`iIM?Zx> zUuF&di)DBoo<5DlKLy;*1-~nB_%&d9n+`HB@ljvLxp~Im)6Rc{#hrbhbK&a<5^zs) zVLzBU!0W+|J;}F2*iOfhp2yk~*#{5wM(^Rn&2a4u^su42m*j_kjB8im($V;DJ{tcV z&o)9asRCGM^4lX3XKzdE z;Jbr6bigfZ^+9U^Uy)9cK5NPDW$Js@;F%xHy^MN&$+rF+?~lMuc@ot-1%JMVGv7jW zuj0tp@#|auKTQw3hJK%Azx|iR{TvD$=G8NPpY!S^zr&68fveue;U6384X-{%ZLj%s zG`W~hMuSrk7LKp0wKwLij9wVeB2u)iyt;c(LC zGIrfx@yX?+&uRE{C`ySA-PiYnjkOiZh#h?d>GwArbqh|+9_5wv?*$~;C1ln~`1(-P zwim97X4wbr9Zo`?;=4ZO<(@bq)QXBy@LI+tkAG4 zcy{Yl@ir}_S3Mlm*ip}K3!mg&Q-8JNrB~PE(p7jnpVmS5&7Dz)mI(E4`n`St?5gz-={Hx$_kyojo_z{IJg2xr;bdq;#$e~Fnd#RPC>S`|fnvSBP^QNNY zdBqh|y!zHzlhMmuR@YKi)epY;!4)}chHGbfHN~qrsIcUVZ;Ua|C*S%z+f_e%Kil=+ zI4_o2_-3~A^M6snH<@jISDACnWtr^y11X zIAx}9@?Y;t>-?Jhh(vkYIj^x4o}xK#q6K5cUP`Z?;eU3dALKjT>WsHoz;EOJ&#iT( z1H$Zztj1rNMI}n;f&#Wi{ks-NyrkXH@zLaOr0~8tYYX^|2T)_R>v4Ifg0nZo=WSrT zf&X35Omt>{N9d?0&S-~v`uX1%j|?t!*iamDT%iTeWG`> z!eC2MCHqbbY4|muyh0{=H5k{ht~SOgi)qGft#PAbO&dC=O74C=e7K5MYvYUtj;8*v zhWifg`U@y_aBW>2u-v%8Q*w|Gw+^IrF?=06ZKoybT zHD*=mo=a4Lc0nP{VWb((-3;HwvJL-719mOAZ4a+ofp#R_j(Bxxwg*70s+P!};!)c;~mXu^tv{{3FVXuYN8&?;M}L=y;ee`~3}QffJ^9+D^cRntg0JC zs18DF*YKJCJ|H+}MU|%yk}| zu7~9x3uzK5x4z(?X0GaBj_uG>jWw*plhNqQyD8Y`{25!loNk>IQzdX3bW z=$Hv3V@S7&WXA%s;yZYp@0#yn@h21)-5J|1x--_mMDKnyr*DlP3AMm?bNw%6Sae2H z)K${+z>}qkT7*YXHl`fWHyX0?5UB~ z{RMg)iORpLOTnqJHMg|pR(=yd z*$QnO0yc+;FePRit9F2u??h@Hi&FZNtjB^`YqWDRS(s?lS-gS6&}S@(erTf`$ZlEC z{Q&%44?Xn&#d%tLeOP51~I@ifi!cdv#y{tI)_-ivX3FXvZ)K|HE8 z=wJ!Do&(2Ul0rYY#}ri15j}MSty9Rh%k0_QL1SFbiu()hjE@=GYGh`KA2dZzo3po~ zWqwaX{L#7Rv-|S=y`4dNd$hQrb5^_mLRhQxs*+6aQ(V)ZJ~@`nwj`t3ncjQ!4v|1n6tZ?QGT)5ExSyt2x5YNM_&PE(&&_-+$8Z4E~q z{cq*$maxzs6x;f(5T~5$9y4L#TYsxuH{a_TqfaouiKOr{pMO9u&$Gg($d9+I_96bx zXV(5anKi?@9|nn$&bf!i{1~kV3e&;!YwMkCPTzv+L{JZ&fA+r`wqx&228|buIKkPI zaAPdG3C0}fZh=iFSnXl<8#&hF&?(Lejg9jETbP(-7_B`PiD@MlbP7M_XyEuWH$oEorz7jW`2Lmx9|!D|~=`@jg5D zYB8EG$nRlBS?v2t>+E3c-SJ>FPCNKsj4CFPcW;6Ci{L!WT|$Qoe7+dR^mJAOlo3y> z3a(-Y&vlot3aa|XIc^UBi5y0r?_~7dNb8=~v>kaEOF6#Irq$D*{u+~wz7njyboO+Zcnc+ktD;LQjXKi^fkk%BJPi$PzG}1?wCRheb4G#1dCvUB z_>Is)q+%Cv*a?J6|1ze*#uoyn6Kwuvz2xMorC?~97MOZns~mg%y3mfpRs`> z-6FNRgW{oJbGW$KMIw!-@D9!Zuf)U-GQw5`H2V~kxEY*y<}r*N;Mkue9O~-b+--aF zXo;H}m`mb4vCJ00dmt0fI@WPJb4;AM4LrpTTIsx*sAd}a3};Vp%z~Bpi_r_Qn-&`7 zCvX}Ait)u>BiWuL;a)@C@AzblBT=2$3!%Vy=q(;@;^Oa;Y%inFm&m?Izp>8z8vGX; zc@4=DA3qjQ1*pt+MWX1Du%Eiu7_X;_C)iJ zAF>1%qhB^glU=N}H>&Ola_LzS3M0ty{Lf&r$_%QUTSMnn7L=BVL__B_FEm{pa7{9)_5a|}9!wHtZ0l%Kd>S}q*Qbmqclv++C$`qvv59Zn71!j7j1{;C z#<7#~w@oot-sPLzH+TztL(7>{{sv;fQ{I}r2P;eljuomJPaGq|pWe#sNHSLA=-P~s(je;uv-ri4=d zmS>gclz)GPVmVIcUB0TNu6Qf8M>)5^xSZRsR8Whhd|u+9#8|G#QN{zxkxLpXDYh1e zwX-W%nQKdGuEk(^W&ZyE{nU<9ephlrMy!Qix$?aGf9W^xeqBMnDZek*Qp&dSoP3gX zMefw{VF`ms%aXV9zrp{`%el2HFCh}XD&=PGSl+i554AHbAy$4`%gtF)zOVhs=e2Xn z|MDnjSG$%H;XPfjvEkC%qkNYu^Gz@iiYg&cOF6lF)>Avba*mO-rTmU$jW&-3Q~s~jR)> z@=mqim7`@&Sw-ert|Na-TO@W%BEi3Ez39*6T;y;0DmG2x3*}L2-0T8+5{@If^w)3W z!Nn9wP#27m$)x&l+vmyZQ6WZ9wlsRVO(nH zS{jNa7CRtM-5SF1Al_TWeR8wq5R0S|XWstwoZVmAb$7j=^?pg4(gl z|FttOX(e1-%Cc}qc|@C)91{L0?X*$`mii!eO05)&My=(j@|s`uRL;p*8FgJ@qo;Zz z)_H97*!VdT>5d*>#}mnsD04ntQyh6;o|)_76J(EmDT_VPrp5j*@^>LyKl^{#>6_^{ zJ9vq>l@@(Gvv@_TjkLnpnM0yK3)%VE!Ara*QRn>5ag^`o7XN4SF|w~1pDvzAewVo8 z5`IEfvDW(H;lwtNJs)an;dQWFTET%z)}NJ>D~x?n+9y?o-;(T!_$xWG!mKfVNc{YK z7jGqN&N}0Ne#fK9-sf~4_e}FEcPume?2XR%o4E!eC4?5cGUMm^cvA6gvgWKjSIq{o zMB9_07RZ&~WQ_9q>;{$h$eD@cmiq(o2?NdSxYQnjZG5-VZwr3PQF8a}Y2{t!kdXt+ zU?|^Zex>I;-@U&7zw3*a`s4p#p6@g7MXm{iLJ^D1dkQ~zl-N$W2Qal*OFYUz<~uO@ z_J1pjUm9OMQLtn}OyogNaz=JyRu^;}TQQhg?Kc#=*8A1wHOpCfzU4@M_E^_WGxB_^ zSz4UA9EP&)#l`Fb&mVjjOe}E53bb7ZEhL^0kE0#>i2obQuCzy271)ipo49KH>u973 ze`94w(uc>BHIdoHmdy_GO0!-98q=&KJ097;iQnJGx!LDm;j?Mx9VlcEDE@Hv$HOCu zat3=@=X&0MU9qC@R3Z`I6!4noeWD=Qds$(Avp_4)M~m;g+I^CTvCx@`aAr4eegUn- zrWUzFcGI&z{j<47CS^4>?l{lr$%RZ5GCO|dzEh|^RFp__)|xnX;to zVj~hRkVrO3ys<>WyYQYj6A^0T%H;5+I^Y*;TI1f?g%7VKVwp%zV%%L^9~cEn@zbLd znz~0gI?^F?sWju>9)ON_S80!eY>> zU*I{CH~TD+D%r89bbjQ2_Mm_Eex6m#bbhJ|BJ+}svH%u8a=&juuMWue7QyV#7N}3= zIl=WCfOYapwiMs&;;KZj6U`oB=HtPsImmUluBNDPtKVyjJr zI;)SCO?-DZk;r5TZEH?V%;rbuk1u#DQK>}G#=*QHoeE});o?nDjReTKG$UuCK&s$h=hXQ-q zKV4d2GkGJCr6s0j!u&AN*e^v8hnr(GK=y0G2Mf&UYxFV7S`yt%eoUmrEbtsf_rpo+taF;U`m64? z3MLX)-bqY5{I<^Zv&^J9y4X@YJA3TC!Mn@ByN`JDOKl=Y|3|#QA$WlE>V} zKF{~4J8+G3SY#!2-8qr)zOXsCBDoUV7`2`2>)M?Nr_KSv+CubXSO zG>a`stK@n&vtvIQzP~K87^b81F|J6SNpM~XCWGwD4Kc$L?L?m8e{XY6oPM&=KR}uD zKrhkDwFSSoHzc`g&#Wex&1$pP!xf~qcJ(6njC{%7`ljxb-23&YFc|`);U$?*E#P=daLgWMU9`H) zXUo80nf#f=*7vrm!S0^@+cEC)UBQjXfm#a&sUdz5Wllhqt5p>21UowwFgVHoNS8`` z8!yF{3HDwA22{lOKeuWiAfjf`DS?!h-jOgu5_#y(G$ zTCAw#QDnzE`fIFvu9bb*owOJLvyJR}WM3uwP`&N<{MKIA*|J3THlO4@R=MvFMr~1W za`s;16GXe`S-1z;AL?1i+odGbFr4=pY8Z!SllvKaFM2#Xy@|dw0_k8mG@od3b|!u@ zpON_WKDp=rHOE*xRc4=x<_U!i_}q9U9Vf^49p~34!QZnVpUjw?EcKg z9qrNfF67*RVxKGfDs9lqCq>3YvMIj8)t#NOuWXvO<}eF9-!bBp;`2SNDtj$^k>#7& zmD$BvJD}+7ILA&dbEguaZ4T<~(QbIJyESDGuPI(iR#x^{V>_-Rp|bnF2CkAv7z=O_ zI*nZ&%WD;hl#GLTn_Y@glEJ&sb=lWVeqOXzmCs@;j=|T9!KVqCu&33<9@q%iG)8HC zeHIR?g3Zt6PCOu6VvMsJoB1}5@a#O)_c16{{^tzL4#)%aiBT&M3)wYvt_a zaPT539SeimXP9daAJGcQ63(tFlzt`@g)MmR1RzcS{;m%J>GO& zI2HwVgNbC~%r;W2pmy+|%;^n05%Osn^|#6+d5ElP>#8l>vwcB%q2UU1O4d(yG5Ul5 za$`&^cD`OkB~#2l+07pmyI{#XpAA-voH5;f+M921(FR_pIVVzTDq8*2Rf}}xi*!kC z%8ux;1G}QiHOc?VUSRS!vNQaw{oq^3m{t15?oDT$hx3jy^JwNu(A*xDlaITHYgQQj zJvjXsZG`6bA@lY|o7rQHwV0f-fppYCq;;UPNWRM6GNHGU8FRQypfBO7zs%k|131|R z?MaE1=6QsBo=UszWlnFaCp=Hq+qHOQjps~nK|9p3%2@dI+-Vm(m?z0eInCYsf%AND zJx*=NX+@RpyI^smHS7bXJL90Ot$hd`c!)Xe23lX?!3p5{qHOK-WgBVb@A?}_`#PEU z5qe#%OIbe<=t8QbE+jTztoH_9ttP2j74tnF)}u*xC^#VWy}t~Zi{&64P{+EJExgXQ(LmFIm|b!_g&Sc zz{I)g34d>X$zjX$Lfg|Xt)10?tbV1a()h@FQbVu-Oz&Zg(`Dp6uF~l#dNbK98<^Wv zXG|d*m!j$^^uRDQwg|lIqNJvvx-o4(3#YGzo$ai#j^5;N7E*8|N!ZLdgV9DZIDUbZ zj^O=!D>w=bFG7b8q4$5|&tD2{zcCml?|e3hj3sRw8b3NCRSer(aqgH(!;Uzs7y2yY zi^+4Y@P0RUN;bd%l+Y8Gw6WIaMw~7u_ek|AClvMk{~~>tS#ft%*Ad0^HRn_}9BP;Q zEPTEz*|RDBjsfilB}aMjJIsqu&}9WSlEb2g;Az9Xwf!c|*q?LfkwDQ6?~ zA_w?Qmix}GjUMR=`b$ZudB&?Yk2xe}FZYY3nhd3%NUG#_J_wJ|f`MAj`iji|z+Ebh z5Gbc2>j0E}ihFgl)~y^p*uHCcgA+mZ1i58mTG;a=((D+NOX_Ul2dk?7J3Fh_q|uPPM0>t|parYt{ ze@9R#YoT_4mt?UVTjUX3rxM_Apgh#srN5BK)H;%R4rl^3?5{>8p{p+7AKly7Gozm| zzgr7kI0xpART=f1{Lu@F>bkwnFI^x$kP&;Al{|qz!=;l!Z#oz!A9WXaO^r}0u`YAn zepZ-_tftnI_(gIBvNOIC#}4rA4luG+AwAAE-}G%b$UH`l+h4V4k|s;goNC2|;B zxerJUwfa;N?ur6xtmg;vBfi21tnX;Xcppv8W=o^@H~Qi3mCj5jlR16CcOJgE+wqdw zFJ}+-GV133_Qjj0;^oWbU>pgSyO4=0 zqDjt72cX#`!$Dy1N1X5HqTqV}z=FIsOS^Zwcu4}taCngP}lo24(9bj7v3+%xO#2aEl{etZABI6O}U zoIAn8u4b6r`8`4UH?+V*s@$(}r$zWGy`bVLZDVa+$)=yIw-Z=Lwj>w7yU(IG6J?1N z@qxM46`^#|7g{jg-lvV zHbk1N;Ij{;kuRiI_6DUy37Uat1MAt-8vB}Ad(eoMszKvBgUC>$9t1Yqk)us~n~H*D zPxW;SaWtz)%|n&5w}SuF{R}b7^n`p2R=#C>Z44sufj>aA|2C)Ps5{tBT~#uP(#0p) zoD*>774-CltcQ<%y3)9z+f-!5x=me6tk>_zpx8gxfy89lXq(}bet4zE2&1jDyBQp0 zCWFl*mD_)oZF@g?ahk3w_ZPLcM}t@ey!5i>6Vx|6sOo4tPXkD8-8JTLBb+R%*@5o; z6MFp}eoFP=J!$~IQY${v82egNVi&3F97o2bLLhaZ$+(SuH5WV%m6?7gJ^8#Vx7D%! zdJ_A2ces#O@E#04f-@K6iwz2S+1+fLp|sQ=p9_au>yvh+8s}~*q5GkZ@nm|kbXud2 zR2BYCo^ASc90VuHUz=`?Cy+Y_>S1y!%jjb~lRVRo@N_JSKHn8*!TdI!5B&GSzxccJ zu0bc`%;Q1%pVQ2x0m)hqEO(-fj-Z?NHFjioviO7FBUu)E>0@krN!#}p7r=27yudbsl6Dxk-xTD{SE?ZLhS8i@Zt8oyk{;yNGv$K#15D7!9f zi~!%2DCkvo)K@S)iro9u6^T@@pi!ew6SWxcee!)T#JgXqVE>O5Pb9}uP4|H_8lbK3 z>C2b+V>hy$*LoJn5hU8tKmrAV>Q+dE5VRpVa?tMf3c(sPcY>)LV>_7m)2uKw?w&*WYQ~ z{;v2FJnf+B?Y}B`Pb(yLVg-w>Dpd)IT_=7&-PMVm$HrP-(-Y43BkND2sZNEL+3xou zx_Sz%cS0G5S-?yR_XISfnAoGOve~JoL zs+rltDBF^e56Mx#NL_Dl+U%afY8>vo8}QV1=63*L|%vjUI;qjk^ukapt&n7Js zqxi|Jf66l;>A)KN+7(UK=b^_#{m^;ep_dNi&za=j$yOHL-PPPqlXZWZSBdJh!S$); z90!Us3!R;QATJd&o{3&sqlH9I1G~8>F44`8$&o+P!B^=Q^9Ps>>?-Mmeb8&Kf`TK@ z;=3fGpPJ)%<&E9(H|(YG{^@2Co2(Z(pDti;TV*oEyPMIr^u{VwzTUGU&Vq?kyiYuF z6}*kHsyVEwj%M=+KjJ8r&%aZ7{ts(DMu(@VdQIHnibLG#w;+8qY@Cd{>gcBXiFG~) z-#?hm+t$(-#~nrrUqnhh=Bj`CP36mpusk32WX>P4d~U(ZsTKXZ498dDV1>xX);KSF z9xcGH!r2ShIn&_mARRx_`R_$~G_}3WWs$y!%5QM>c=9vVy|FXqkh$@Qk|EirsQ%jD zte?<1;tkY&NWn#0(;uk`oQ~%^8fi-5r9X;Fo}s@_(o>?7epF+0@wm}Swlj|}-6Iu= zi_G&ra80gsDtYI#I%2K2Bjr!9-c!JSBWM59Ow&y^9U4=iuol!);}C0p83UH;|)uz_$W=#+14KYivksIK&hmpz}#_YMyqZ6*iW$6ua0g0HHf2w}ME%|={iy-|o z6XEPbiZ}H{!goY1(q;Du@wB%#7R;YC{K0GjGnl)Hd78%jG3f%nEJbv5t&cW{5W zHMG;|E z+OZpHpX;ExWz5}MOkPE33oQG%2dYr@_PIKSG-8EG*@1oW_N!hzVWIU=#1?MuqKfreO zT+)dkJ7Mpkk})`Q{r}>WgWxT8>mV}i9UOE!$P5s8H>OZLK7pN#fS) zYJP)b9f3cYUX%?MuWBR-uWW_t6q!lc*3Fx>5U34$p%_G;Q zJO3j(bvf2F){@ImA6^p$U5?KGj?~f@QZVU1riX?N(5@(Au6ipFyztJWbj`4~_9g*vLu~^Gu#XQ~b1(afZ_jU-DVc zVFTP-SQC%aoO`3isrpqUHgp|2J_Y?`S8M@zjO52}iW`#QQv(X~>94%YlS)T{__tQN zDPBE_rTCaJZZ(q=aL%_V{uc7^&w8Jn2PSW$)xRqJzMAWzEhsbx-&oi)shz81hHdeK8qz5gVx+`~Pv;IXU)Y$*wKgr%TZF{$P}y zw}o)@GM;E@X4`<>9x$*J+;2vIV^LrBHnzea+rx17sDGe$Td^RoWf^CLAJE6qIPx5L zSy;rwQm2+^XFATD!0#WWOGkQ(HRPQPCmZ5HW@jdKpOyIPLU!E0SVn)t-|-K3vF`0) z`e*XyR#bF4+iWlEJ%RK-16`%lXDrP{;GO7Mte^|o+Q+KZ+mj4=!uUO4Z31e1$I9k` zWqUVI}6iv+c zd8AAFlD|p+UCF9TEySy4)`F})hm<)31kd$qe=AB~pT?x+wbptt3nP-_S(ts9t+API zS3Bd21#I0zNatOgn;6SDBd6LdyGQ9*G8SfISv*4vuk)n7m-$iW;LF{t_Imx1CbHV^ zr`v|QU-q{9vWIqJRs2M|wgb~2$&v8x)+i;>*Tc>17puI)xZ}YnJiNl+*RACx@J;@B z_C>S1Iufqt;SkT4h2^7hsb>I^8gKY!CE48$Mbz>9mrL|1Y$Xo%o=;Peo``y4+37>D znmpOv5wGEF_F5tVQ?2n%`ua+cxfRvE$LebbcQ-jsBpafk#_-6VcdzUgGz7`)Zlpp! z9XYoq5B5Z>JyGaH7TdibI@?)0@YG%>2OeNwoe2}?qn-}Brw_w(r<45Gz*Psc_$xlX zl*BlkjndoS4j9aj#?8`7OQQmXDX(ywq|gdm$B38<@o)vt19asV=G{Rl4D( zWQf(c-+N}UmnhdBFtdmMXS+*0{k_rfb0p(2thsaG@J+Kz=lDCpF1%fb%=(=Oczo&u zabSBRXLornSk1@fqrm)8XTO0a;?r+x&P{Q7`ctF=HF*}Bnaek5V>L}sQ*h(gV7-^S zA3@IVPjmINhU^V4EaJWC1JTxfz5~1Dn2a!!k?`5m6*rj6AMnq~Xe@D-RA_z<;@LOZ z9RDma=MTuGca7EC$UC9zWF?FvNggIa$K$R##aY>ZZ)mQaz;j=8d^Sn24cYZ5I{y>j z;zZK$KC?~5Yx)GtVsWf9zb%}fy~1T=)(x!ndtvT25PP0Hy4IEPP2WY|Kbl2)UOa2= zOUdo*nnxlh7MCtrGp#pK?Ptl1>FA-eahr>@ByN@crQT#_f7nf&q(7X-F79HD$q?9< zjkFP7O7EOc-77uKvKRRn{dNi8ZG_d=wX$tcNV+#12xdc!xC0J5AI%-ds=0(*j*XOh z+b-_%Bi>0Lk@SYx$@%A?kUydFn{nTTxbtwMrQ1X@jFL|k+>CVB&0sKlIpNFf;ctNM zH==#k!1z-r<0`$$4hGXtNSo&7vG13 zl=%vVuVwjt4G#;@LU!%iR3xS_j07120t4yw^YC2LiqW|G&(3|*=c`EmvuN69NUNRM z9F48;Blx?IMmx}2>cd@fM#uZ)QY)GOUmKV~`a$iEZ+>Pi#)ppvdc*HWX8gFhgjVXJ zsAT^%#3u`Dwjf*2WWBsZGVX^0;$^01{ajW@WA9tyvL<*ewUK2OZfl%*Bp$sUo`#^% z21VTKk91xtPCr1YZ-RfKNrCF2B=Z3vwTcv(imu1OV{~PzJKK?EdzkUQcrx{M8{waH znOO^RUphZIi_NS*n)nb@v^OrjRgb1CVec_B9DrJ`HhMbdl@W&>;qY)2T!)5<{z@HO zvKoItMdJ$%`z#x@#=MeG(Ak}jpevFk+zaN@O{2_k`T_;~i`L8Y;U9MW02<&lUdUZK z#NFmqI*;`Qn`HaVg30t`i+w)X*gIJ9Y3ThN{?YBAdINqxL2N6L#~SNi4{v!A+c+>? zQt)nK-`kMcf2OS;c88msotSh_Qgt1EOE;_RnI(rHy#l^LH_5!~1ao)dp2~{1%rkM} zhv2ydh;(3CyoKt%^_wiY_(-V&Tw#oz+^ZLeZw3PEaa1h%Zmwu#Ph&W3_)ip*p4Qi) zn8X>A?J$vdo<5D)3s3Dq>J$@2ON3!Ed|!+6Ua^+tEQ^k`#@T$Q2UsI-vy6`d*^O~S zvPv7k;W^-VDvD3u$`3xw(ShQLYMvo^)*5QgTz-26H zI^C|v&NTdc)-w(i-tfsP-)A#P_j*P`5pxN#zE#4}9a|F4~!s`~7x4us=1Ah=M3C(<@qq;s76y3f)DG;!cd zQT|SNuQfPsiFY?a1<85I?s|HDo#6Wu$nQjIQeC_kXv7bA#ab4EUw3f%GpV{izHNvu z(uwxZj=x!FddDOiBDqP!Q2S@*_=Zn|op0Dg`-qY^cWm#fRk$D>z;=VFFP`;dA6WLIT}cxOB~2t{TW=SQ^m z7m)pot$L{WW`{Nz9TUv;O&njr%6R~1&LB6FE!Wxa)@JjF&thrMCLfa@limA7z!TNq z3?>djac8-6FIZnpgg(hASRZ8F4Gvi~+EKl&OiSpeG>dhaxL-cIN;*=%EZB++^CzhjHV zK8Zh3-+Ye{1$vV2c9!+6goX6b{yWP1p1w(^k|oyh3&{MYkhTwj&#|zvIa}Z>Bc%`0 za1r(UNw(yfCaW)gXG^0E2A_9y-hI`+>V>FddsxeE;D>1HRXqD&uhQf6Eo*rccT5*C z-^u7#!@$X8(T>)XytU57m9yxv*HQ1yIOQ&|O#ilI2qb%Gx_N(LS$LkCj#QhNA$&^^m6E{B)cx*HiY9XTFBMad1lTF2%4jM5zi z{07ZE#_GBPU*5~2xU8_5JJJQ|dy-xK32>RZmvuGUvJh`a)7R4WiSr!m>~nE$awxtx z;@{YfcaoZaL95Axo5LeKz<5`)Bl7%$^SnwI{m$g%C*XXdh|)ujYh9JN;w!YlPZ^=9O?W##!3_Yn=Mwb zGbki8E7{y{xZi7dpcxDtW=@B|(D$TM;ysf{&GXIiBJeu_?&^?x?}6JKkWH7n-L3T` ze;XHkm(H7?puNYzr#-wTI(2yQ=@(Sf^yG9Wm~z^JfnsGUw37y2of*O zzIb}LF1DVYR+Rd=bO&l!WJAyQdG=YqMCTu(`Sg2DM~v^D5mfX{k^NxHa1hPN~7 zAR{H`JpIhOcz-rc^0aQpp{WbpA>B$_`o11{kh-#TAesTI$#&d~?l_G#dNH`3Nf&Mc z-=Fb@hFe`C*FCM|IP`uT936(o+7y;S`md&c>SFj#r_Aj89BP$AShXh`Wpn<^Bpi{R zv5AGx1e^5wO7FDJX0rzFr-ORB<$TM6NpHhLXo7p}diU{De*>0bM z_Nc<&8HWLozs{v3Sh7HWj{^FlwvEj*6$`(B<)>Eo7K`oy>--$X#~W=Wy}TWK9}g3|fN3l8 z;|uG05A>6zS7!b#KpE4_W)8ST4y51hAuOfelOmV+8*4bV6RX|fOHZ%SV?R7>0 z$@`2Kmb}39#A;$)v(fEHbX;cGy@CSLODcA9y4bC?ma_i#7tn|VPHo7RWJZ0`;Vb-o zA06>99(Wv;r_1&T5H4p%~Y$Ti(4x)I~`_{ zbCDkV*^_R9TX#VfgN)DsEzH9;V{qQbzAtUW577RPEX3?*$EHtpO;gkvnp*$TfVJ0yP5_65HMhP~Nn^YQmfFm##YPWYSz z)`{qL@LhhIIcqOmbvC(hGQQ|*Wh+r`YR^(llg>}yg4L(!JT`tKkWLQsjxbdr-c)v; z8td$htZIN`uu=BG?@g>Q88cs4=R0tgIN~HQTI>GFH42AjA3apFwNX1-VfI_!0+aOp ze;phXElA~TDy0)0PVd@&e32vR^52qw*^x=*St>7*f&7x|9-$AOVWFkl;1@V0xzwG} z(^l@%7pJAeTQBcZsgV5BRHCHIUu0)(^=<0c8^iM6?y#pbTA1%rkQ|OGrs13PHIEie zrRt{SL;7H+zjeB`F5<_hCM@IUSsL--Q>~KP@QT8x7>`>b1=I0-BnXwdEp<}K_D$?P zUE0!nDo?~nkNQ-tC%1JGoRrNuA&X~Y z@lCE^2R~_?O0>5h8GRZZzZ=LUkH3j~r=M6?SJ%f`sXGi$PD6v$#&2uZd3HzYY0@P( z*}M&{Hnr3%3tut)6(d7Z!I&EGbm8k@#6&dHc{{zlenyj%A8C~b;g<5#AOdHSEH#;phWzAtEZFF0qeRR-(vvlqL6F!>r> zMw=y;yCt|~FC!JZsSQXbRBEKM@(s;;2D*D6d}e}JVh!n%-xW=#-$U8wC4EDhT5Zn>fH(EO~JrVZQndj4{=AJpGOY+2mE1H8$dbG7Q|0;J* zo%DJU)?}^5T1uuztkPkAlO2*O&O`=cTc&~_R{W0eJOmYOYK+B3or)(CgPsp}u}S8c z??_OY%^!HANUpi#gu;F^cY7rSQD!oyf+pIsth!6~bz|1m0+4?fwGD^u^iNM#cg{(TP+fG8-U6wH+Q_x3m`Dfo z?7XMvWf`#=;rcv*U z**qs|6Q8E{eew*`w=uQzS%3Osq>o5+UeiK@m392-lp7BBF1)%SiXCLQh8)Z;k(Va7Cyv`aCoG)>*0b9&IM^M3Up3s@^6cb^11l}HXo&RW>|s8U z711P#g{BW~x`<8&(`3r0yGbmW^!?r%es}d7Pd9r|iB^R7%gXKag-*ARcp=%#iZ!~G z6~tqVU7xcf%TiTY#yi3>!B%$8+86YYKCa1;Ejv7=b4dy7Xv6e^37;kdGF%Z}?1T@q z2j9d0^bAjRL$y2PX(BUFO=&Nrx;NGlul6JIr2B17=A#}d1_AC z>o0yqe4XU3%qw70+m$9#et|QCt#pma^P-Z;lP9U>*~fb`1>oEXXeOU($6pVNl%<~ zAWQd>*y7P(WpApo<5hYt>&Zie#JHx$xa_>Y+GPAP(QY@~rZ+xEqll{5mv838EcZpm|G%nCdlqJ-j zo&(vVj0FDP5o*i+PWJKAjUfG-g4_5%;n47DxIDV0)QGWQq6s6l(|I|)_&XQUrYEVB zo+|0_k!sUr#l3Q;^aM(m;^^S;P5McdJ$FREB^M+w4iY_4NARS~7 zWC7j(qKfV!(aFVw>U>5VTfY=Vk(6vdSwl$L-1X);t>GKGuyLKC68Ji_U5a2S;3!|p z>=(`w1Y_2bJ^DD>2UT_fq1U`K(kS#DpV_=V^F|cU#2QzUlpOKINQmbpdV9U@OIi0r^zjf;F7Q`??*qudirzpgMC&U?dc{TOnQ=w7gkHcy@RaRu4B{b zzD)o9Eh!-@SZDK20+iNp{2*s*j~x&o2kQ?jXW5S|1&IikIJ`@};Y)D+?aNC(3air3 zcqFGUCxx9geIbsgArixHPj*RH&x=U7`At>|t~DBmiD@Kx*O1j{zH2;qHfB8FmUY8W z5#eP0I$;pNX{2z;{+_2rT{m6Z<|Y?X4kISURI&kR6(h=ph-|4`CySUG7Q zEi?xm!lv4V%cTU@(Hep*)I+I5_tUemc8WNpb+olV0e}g9Q{ra=T}(il9yqD!sN_WPCE34{?l_>mzAWs(&P2z zuxUvo4@~cq)#xB=$w9pvMy%^u5@*9oo(PG2z z(8l<_a-({EaXFpF=)Ase^=u=!Q0F+;dE@d)B|AQD43#ppbI+f6%z1BVku*wL507eB z?YJBqx5okZNmXr{lug!di$~i^tdFM0jG+#m2ij1`a&*w<&9K} zv)BA(7O>||VU(8U|1$8S1BZ4E_13}i#SvU73@tdic=>j{G;v!>cC*lMcAu254YkI+ zbM*5$pw6b%ogI+Ry7GP65O>Wt&7OGV4!AKP$GY}Z&wIGMdk2q$*VunoU0ci8!M}Gc KjhyBF_xT0pVyMFa diff --git a/preparation/requirements.txt b/preparation/requirements.txt deleted file mode 100644 index 8c6c40960..000000000 --- a/preparation/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -librosa<0.10.0 -matplotlib -praat-parselmouth -pyyaml -soundfile -sox -textgrid -biopython==1.78 -sqlalchemy==1.4.46 -praatio<6.0.0 diff --git a/preparation/utils/distribution.py b/preparation/utils/distribution.py deleted file mode 100644 index 4ac44e869..000000000 --- a/preparation/utils/distribution.py +++ /dev/null @@ -1,14 +0,0 @@ -import matplotlib.pyplot as plt - - -def draw_distribution(title, x_label, y_label, items: list, values: list, zoom=0.8): - plt.figure(figsize=(int(len(items) * zoom), 10)) - plt.bar(x=items, height=values) - plt.tick_params(labelsize=15) - plt.xlim(-1, len(items)) - for a, b in zip(items, values): - plt.text(a, b, b, ha='center', va='bottom', fontsize=15) - plt.grid() - plt.title(title, fontsize=30) - plt.xlabel(x_label, fontsize=20) - plt.ylabel(y_label, fontsize=20) diff --git a/preparation/utils/slicer2.py b/preparation/utils/slicer2.py deleted file mode 100644 index b522ec164..000000000 --- a/preparation/utils/slicer2.py +++ /dev/null @@ -1,103 +0,0 @@ -import librosa - - -class Slicer: - def __init__(self, - sr: int, - threshold: float = -40., - min_length: int = 5000, - min_interval: int = 300, - hop_size: int = 20, - max_sil_kept: int = 5000): - if not min_length >= min_interval >= hop_size: - raise ValueError('The following condition must be satisfied: min_length >= min_interval >= hop_size') - if not max_sil_kept >= hop_size: - raise ValueError('The following condition must be satisfied: max_sil_kept >= hop_size') - min_interval = sr * min_interval / 1000 - self.threshold = 10 ** (threshold / 20.) - self.hop_size = round(sr * hop_size / 1000) - self.win_size = min(round(min_interval), 4 * self.hop_size) - self.min_length = round(sr * min_length / 1000 / self.hop_size) - self.min_interval = round(min_interval / self.hop_size) - self.max_sil_kept = round(sr * max_sil_kept / 1000 / self.hop_size) - - def _apply_slice(self, waveform, begin, end): - if len(waveform.shape) > 1: - return waveform[:, begin * self.hop_size: min(waveform.shape[1], end * self.hop_size)] - else: - return waveform[begin * self.hop_size: min(waveform.shape[0], end * self.hop_size)] - - # @timeit - def slice(self, waveform): - if len(waveform.shape) > 1: - samples = librosa.to_mono(waveform) - else: - samples = waveform - if samples.shape[0] <= self.min_length: - return [waveform] - rms_list = librosa.feature.rms(y=samples, frame_length=self.win_size, hop_length=self.hop_size).squeeze(0) - sil_tags = [] - silence_start = None - clip_start = 0 - for i, rms in enumerate(rms_list): - # Keep looping while frame is silent. - if rms < self.threshold: - # Record start of silent frames. - if silence_start is None: - silence_start = i - continue - # Keep looping while frame is not silent and silence start has not been recorded. - if silence_start is None: - continue - # Clear recorded silence start if interval is not enough or clip is too short - is_leading_silence = silence_start == 0 and i > self.max_sil_kept - need_slice_middle = i - silence_start >= self.min_interval and i - clip_start >= self.min_length - if not is_leading_silence and not need_slice_middle: - silence_start = None - continue - # Need slicing. Record the range of silent frames to be removed. - if i - silence_start <= self.max_sil_kept: - pos = rms_list[silence_start: i + 1].argmin() + silence_start - if silence_start == 0: - sil_tags.append((0, pos)) - else: - sil_tags.append((pos, pos)) - clip_start = pos - elif i - silence_start <= self.max_sil_kept * 2: - pos = rms_list[i - self.max_sil_kept: silence_start + self.max_sil_kept + 1].argmin() - pos += i - self.max_sil_kept - pos_l = rms_list[silence_start: silence_start + self.max_sil_kept + 1].argmin() + silence_start - pos_r = rms_list[i - self.max_sil_kept: i + 1].argmin() + i - self.max_sil_kept - if silence_start == 0: - sil_tags.append((0, pos_r)) - clip_start = pos_r - else: - sil_tags.append((min(pos_l, pos), max(pos_r, pos))) - clip_start = max(pos_r, pos) - else: - pos_l = rms_list[silence_start: silence_start + self.max_sil_kept + 1].argmin() + silence_start - pos_r = rms_list[i - self.max_sil_kept: i + 1].argmin() + i - self.max_sil_kept - if silence_start == 0: - sil_tags.append((0, pos_r)) - else: - sil_tags.append((pos_l, pos_r)) - clip_start = pos_r - silence_start = None - # Deal with trailing silence. - total_frames = rms_list.shape[0] - if silence_start is not None and total_frames - silence_start >= self.min_interval: - silence_end = min(total_frames, silence_start + self.max_sil_kept) - pos = rms_list[silence_start: silence_end + 1].argmin() + silence_start - sil_tags.append((pos, total_frames + 1)) - # Apply and return slices. - if len(sil_tags) == 0: - return [waveform] - else: - chunks = [] - if sil_tags[0][0] > 0: - chunks.append(self._apply_slice(waveform, 0, sil_tags[0][0])) - for i in range(len(sil_tags) - 1): - chunks.append(self._apply_slice(waveform, sil_tags[i][1], sil_tags[i + 1][0])) - if sil_tags[-1][1] < total_frames: - chunks.append(self._apply_slice(waveform, sil_tags[-1][1], total_frames)) - return chunks From cd26ecb1435fb748aa602e81710ebcb69c3e312f Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Fri, 14 Jul 2023 23:29:18 +0800 Subject: [PATCH 465/475] Add MIDI distribution summary to variance binarizer --- basics/base_binarizer.py | 1 + preprocessing/variance_binarizer.py | 42 +++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/basics/base_binarizer.py b/basics/base_binarizer.py index e3e6ad872..41091a26f 100644 --- a/basics/base_binarizer.py +++ b/basics/base_binarizer.py @@ -185,6 +185,7 @@ def check_coverage(self): continue phoneme_map[ph] += 1 ph_occurred = set(ph_occurred) + print('===== Phoneme Distribution Summary =====') for i, key in enumerate(sorted(phoneme_map.keys())): if i == len(ph_required) - 1: diff --git a/preprocessing/variance_binarizer.py b/preprocessing/variance_binarizer.py index 61ecccf11..8b9dd5ff4 100644 --- a/preprocessing/variance_binarizer.py +++ b/preprocessing/variance_binarizer.py @@ -18,6 +18,7 @@ get_breathiness_pyworld ) from utils.hparams import hparams +from utils.plot import distribution_to_figure os.environ["OMP_NUM_THREADS"] = "1" VARIANCE_ITEM_ATTRIBUTES = [ @@ -82,6 +83,47 @@ def load_meta_data(self, raw_data_dir: pathlib.Path, ds_id, spk_id): self.items.update(meta_data_dict) + def check_coverage(self): + super().check_coverage() + if not hparams['predict_pitch']: + return + + # MIDI pitch distribution summary + midi_map = {} + for item_name in self.items: + for midi in self.items[item_name]['note_seq']: + if midi == 'rest': + continue + midi = librosa.note_to_midi(midi, round_midi=True) + if midi in midi_map: + midi_map[midi] += 1 + else: + midi_map[midi] = 1 + + print('===== MIDI Pitch Distribution Summary =====') + for i, key in enumerate(sorted(midi_map.keys())): + if i == len(midi_map) - 1: + end = '\n' + elif i % 10 == 9: + end = ',\n' + else: + end = ', ' + print(f'\'{librosa.midi_to_note(key, unicode=False)}\': {midi_map[key]}', end=end) + + # Draw graph. + midis = sorted(midi_map.keys()) + notes = [librosa.midi_to_note(m, unicode=False) for m in range(midis[0], midis[-1] + 1)] + plt = distribution_to_figure( + title='MIDI Pitch Distribution Summary', + x_label='MIDI Key', y_label='Number of occurrences', + items=notes, values=[midi_map.get(m, 0) for m in range(midis[0], midis[-1] + 1)] + ) + filename = self.binary_data_dir / 'midi_distribution.jpg' + plt.savefig(fname=filename, + bbox_inches='tight', + pad_inches=0.25) + print(f'| save summary to \'{filename}\'') + @torch.no_grad() def process_item(self, item_name, meta_data, binarization_args): seconds = sum(meta_data['ph_dur']) From b658499ec598460758a102967b6fc4f29443ba04 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Jul 2023 21:10:40 +0800 Subject: [PATCH 466/475] Support spk mix in variance exporter --- deployment/exporters/variance_exporter.py | 102 ++++++++++++++++++---- deployment/modules/toplevel.py | 13 ++- scripts/export.py | 69 +++++++++------ 3 files changed, 140 insertions(+), 44 deletions(-) diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 85c137322..0a54aaddc 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -1,6 +1,6 @@ import shutil from pathlib import Path -from typing import Union +from typing import Union, List, Tuple, Dict import onnx import onnxsim @@ -20,11 +20,14 @@ def __init__( device: Union[str, torch.device] = 'cpu', cache_dir: Path = None, ckpt_steps: int = None, + export_spk: List[Tuple[str, Dict[str, float]]] = None, + freeze_spk: Tuple[str, Dict[str, float]] = None ): super().__init__(device=device, cache_dir=cache_dir) # Basic attributes self.model_name: str = hparams['exp_name'] self.ckpt_steps: int = ckpt_steps + self.spk_map: dict = self.build_spk_map() self.vocab = TokenTextEncoder(vocab_list=build_phoneme_list()) self.model = self.build_model() self.linguistic_encoder_cache_path = self.cache_dir / 'linguistic.onnx' @@ -55,7 +58,20 @@ def __init__( if self.model.predict_variances else None # Attributes for exporting - ... + self.freeze_spk: Tuple[str, Dict[str, float]] = freeze_spk + self.export_spk: List[Tuple[str, Dict[str, float]]] = export_spk if export_spk is not None else [] + if hparams['use_spk_id']: + if not self.export_spk and self.freeze_spk is None: + # In case the user did not specify any speaker settings: + if len(self.spk_map) == 1: + # If there is only one speaker, freeze him/her. + first_spk = next(iter(self.spk_map.keys())) + self.freeze_spk = (first_spk, {first_spk: 1.0}) + else: + # If there are multiple speakers, export them all. + self.export_spk = [(name, {name: 1.0}) for name in self.spk_map.keys()] + if self.freeze_spk is not None: + self.model.register_buffer('frozen_spk_embed', self._perform_spk_mix(self.freeze_spk[1])) def build_model(self) -> DiffSingerVarianceONNX: model = DiffSingerVarianceONNX( @@ -68,19 +84,22 @@ def build_model(self) -> DiffSingerVarianceONNX: def export(self, path: Path): path.mkdir(parents=True, exist_ok=True) - self.export_model(path) + model_name = self.model_name + if self.freeze_spk is not None: + model_name += '.' + self.freeze_spk[0] + self.export_model(path, model_name) self.export_attachments(path) - def export_model(self, path: Path): + def export_model(self, path: Path, model_name: str = None): self._torch_export_model() linguistic_onnx = self._optimize_linguistic_graph(onnx.load(self.linguistic_encoder_cache_path)) - linguistic_path = path / f'{self.model_name}.linguistic.onnx' + linguistic_path = path / f'{model_name}.linguistic.onnx' onnx.save(linguistic_onnx, linguistic_path) print(f'| export linguistic encoder => {linguistic_path}') self.linguistic_encoder_cache_path.unlink() if self.model.predict_dur: dur_predictor_onnx = self._optimize_dur_predictor_graph(onnx.load(self.dur_predictor_cache_path)) - dur_predictor_path = path / f'{self.model_name}.dur.onnx' + dur_predictor_path = path / f'{model_name}.dur.onnx' onnx.save(dur_predictor_onnx, dur_predictor_path) self.dur_predictor_cache_path.unlink() print(f'| export dur predictor => {dur_predictor_path}') @@ -90,7 +109,7 @@ def export_model(self, path: Path): onnx.load(self.pitch_diffusion_cache_path), onnx.load(self.pitch_postprocess_cache_path) ) - pitch_predictor_path = path / f'{self.model_name}.pitch.onnx' + pitch_predictor_path = path / f'{model_name}.pitch.onnx' onnx.save(pitch_predictor_onnx, pitch_predictor_path) self.pitch_preprocess_cache_path.unlink() self.pitch_diffusion_cache_path.unlink() @@ -102,7 +121,7 @@ def export_model(self, path: Path): onnx.load(self.variance_diffusion_cache_path), onnx.load(self.variance_postprocess_cache_path) ) - variance_predictor_path = path / f'{self.model_name}.variance.onnx' + variance_predictor_path = path / f'{model_name}.variance.onnx' onnx.save(variance_predictor_onnx, variance_predictor_path) self.variance_preprocess_cache_path.unlink() self.variance_diffusion_cache_path.unlink() @@ -110,6 +129,11 @@ def export_model(self, path: Path): print(f'| export variance predictor => {variance_predictor_path}') def export_attachments(self, path: Path): + for spk in self.export_spk: + self._export_spk_embed( + path / f'{self.model_name}.{spk[0]}.emb', + self._perform_spk_mix(spk[1]) + ) self._export_dictionary(path / 'dictionary.txt') self._export_phonemes((path / f'{self.model_name}.phonemes.txt')) @@ -132,6 +156,7 @@ def _torch_export_model(self): 1: 'n_tokens' } } + input_spk_embed = hparams['use_spk_id'] and not self.freeze_spk print(f'Exporting {self.fs2_class_name}...') if self.model.predict_dur: @@ -170,13 +195,18 @@ def _torch_export_model(self): ( encoder_out, x_masks, - ph_midi + ph_midi, + *([torch.rand( + 1, 5, hparams['hidden_size'], + dtype=torch.float32, device=self.device + )] if input_spk_embed else []) ), self.dur_predictor_cache_path, input_names=[ 'encoder_out', 'x_masks', - 'ph_midi' + 'ph_midi', + *(['spk_embed'] if input_spk_embed else []) ], output_names=[ 'ph_dur_pred' @@ -188,6 +218,7 @@ def _torch_export_model(self): 'ph_dur_pred': { 1: 'n_tokens' }, + **({'spk_embed': {1: 'n_tokens'}} if input_spk_embed else {}), **encoder_common_axes }, opset_version=15 @@ -231,13 +262,18 @@ def _torch_export_model(self): note_midi, note_dur, pitch, - retake + retake, + *([torch.rand( + 1, 15, hparams['hidden_size'], + dtype=torch.float32, device=self.device + )] if input_spk_embed else []) ), self.pitch_preprocess_cache_path, input_names=[ 'encoder_out', 'ph_dur', 'note_midi', 'note_dur', - 'pitch', 'retake' + 'pitch', 'retake', + *(['spk_embed'] if input_spk_embed else []) ], output_names=[ 'pitch_cond', 'base_pitch' @@ -266,7 +302,8 @@ def _torch_export_model(self): }, 'base_pitch': { 1: 'n_frames' - } + }, + **({'spk_embed': {1: 'n_frames'}} if input_spk_embed else {}) }, opset_version=15 ) @@ -375,13 +412,18 @@ def _torch_export_model(self): ph_dur, pitch, variances, - retake + retake, + *([torch.rand( + 1, 15, hparams['hidden_size'], + dtype=torch.float32, device=self.device + )] if input_spk_embed else []) ), self.variance_preprocess_cache_path, input_names=[ 'encoder_out', 'ph_dur', 'pitch', *self.model.variance_prediction_list, - 'retake' + 'retake', + *(['spk_embed'] if input_spk_embed else []) ], output_names=[ 'variance_cond' @@ -404,7 +446,8 @@ def _torch_export_model(self): }, 'retake': { 1: 'n_frames' - } + }, + **({'spk_embed': {1: 'n_frames'}} if input_spk_embed else {}) }, opset_version=15 ) @@ -498,6 +541,27 @@ def _torch_export_model(self): opset_version=15 ) + @torch.no_grad() + def _perform_spk_mix(self, spk_mix: Dict[str, float]): + spk_mix_ids = [] + spk_mix_values = [] + for name, value in spk_mix.items(): + spk_mix_ids.append(self.spk_map[name]) + assert value >= 0., f'Speaker mix checks failed.\n' \ + f'Proportion of speaker \'{name}\' is negative.' + spk_mix_values.append(value) + spk_mix_id_N = torch.LongTensor(spk_mix_ids).to(self.device)[None] # => [1, N] + spk_mix_value_N = torch.FloatTensor(spk_mix_values).to(self.device)[None] # => [1, N] + spk_mix_value_sum = spk_mix_value_N.sum() + assert spk_mix_value_sum > 0., f'Speaker mix checks failed.\n' \ + f'Proportions of speaker mix sum to zero.' + spk_mix_value_N /= spk_mix_value_sum # normalize + spk_mix_embed = torch.sum( + self.model.spk_embed(spk_mix_id_N) * spk_mix_value_N.unsqueeze(2), # => [1, N, H] + dim=1, keepdim=False + ) # => [1, H] + return spk_mix_embed + def _optimize_linguistic_graph(self, linguistic: onnx.ModelProto) -> onnx.ModelProto: onnx_helper.model_override_io_shapes( linguistic, @@ -636,6 +700,12 @@ def _optimize_merge_variance_predictor_graph( var_predictor.graph.name = var_pre.graph.name return var_predictor + # noinspection PyMethodMayBeStatic + def _export_spk_embed(self, path: Path, spk_embed: torch.Tensor): + with open(path, 'wb') as f: + f.write(spk_embed.cpu().numpy().tobytes()) + print(f'| export spk embed => {path}') + # noinspection PyMethodMayBeStatic def _export_dictionary(self, path: Path): print(f'| export dictionary => {path}') diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 9649af20f..6d2a97cf8 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -132,11 +132,20 @@ def build_smooth_op(self, device): smooth.weight.data = smooth_kernel[None, None] self.smooth = smooth.to(device) + def embed_frozen_spk(self, encoder_out): + if hparams['use_spk_id'] and hasattr(self, 'frozen_spk_embed'): + encoder_out += self.frozen_spk_embed + return encoder_out + def forward_linguistic_encoder_word(self, tokens, word_div, word_dur): - return self.fs2.forward_encoder_word(tokens, word_div, word_dur) + encoder_out, x_masks = self.fs2.forward_encoder_word(tokens, word_div, word_dur) + encoder_out = self.embed_frozen_spk(encoder_out) + return encoder_out, x_masks def forward_linguistic_encoder_phoneme(self, tokens, ph_dur): - return self.fs2.forward_encoder_phoneme(tokens, ph_dur) + encoder_out, x_masks = self.fs2.forward_encoder_phoneme(tokens, ph_dur) + encoder_out = self.embed_frozen_spk(encoder_out) + return encoder_out, x_masks def forward_dur_predictor(self, encoder_out, x_masks, ph_midi): return self.fs2.forward_dur_predictor(encoder_out, x_masks, ph_midi) diff --git a/scripts/export.py b/scripts/export.py index 69c5dc1f9..3328773a3 100644 --- a/scripts/export.py +++ b/scripts/export.py @@ -32,6 +32,36 @@ def find_exp(exp): return exp +def parse_spk_settings(export_spk, freeze_spk): + if export_spk is None: + export_spk = [] + else: + export_spk = list(export_spk) + from utils.infer_utils import parse_commandline_spk_mix + spk_name_pattern = r'[0-9A-Za-z_-]+' + export_spk_mix = [] + for spk in export_spk: + assert '=' in spk or '|' not in spk, \ + 'You must specify an alias with \'NAME=\' for each speaker mix.' + if '=' in spk: + alias, mix = spk.split('=', maxsplit=1) + assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' + export_spk_mix.append((alias, parse_commandline_spk_mix(mix))) + else: + export_spk_mix.append((spk, {spk: 1.0})) + freeze_spk_mix = None + if freeze_spk is not None: + assert '=' in freeze_spk or '|' not in freeze_spk, \ + 'You must specify an alias with \'NAME=\' for each speaker mix.' + if '=' in freeze_spk: + alias, mix = freeze_spk.split('=', maxsplit=1) + assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' + freeze_spk_mix = (alias, parse_commandline_spk_mix(mix)) + else: + freeze_spk_mix = (freeze_spk, {freeze_spk: 1.0}) + return export_spk_mix, freeze_spk_mix + + @click.group() def main(): pass @@ -76,32 +106,7 @@ def acoustic( else: out = Path(out) out = out.resolve() - if export_spk is None: - export_spk = [] - else: - export_spk = list(export_spk) - from utils.infer_utils import parse_commandline_spk_mix - spk_name_pattern = r'[0-9A-Za-z_-]+' - export_spk_mix = [] - for spk in export_spk: - assert '=' in spk or '|' not in spk, \ - 'You must specify an alias with \'NAME=\' for each speaker mix.' - if '=' in spk: - alias, mix = spk.split('=', maxsplit=1) - assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' - export_spk_mix.append((alias, parse_commandline_spk_mix(mix))) - else: - export_spk_mix.append((spk, {spk: 1.0})) - freeze_spk_mix = None - if freeze_spk is not None: - assert '=' in freeze_spk or '|' not in freeze_spk, \ - 'You must specify an alias with \'NAME=\' for each speaker mix.' - if '=' in freeze_spk: - alias, mix = freeze_spk.split('=', maxsplit=1) - assert re.fullmatch(spk_name_pattern, alias) is not None, f'Invalid alias \'{alias}\' for speaker mix.' - freeze_spk_mix = (alias, parse_commandline_spk_mix(mix)) - else: - freeze_spk_mix = (freeze_spk, {freeze_spk: 1.0}) + export_spk_mix, freeze_spk_mix = parse_spk_settings(export_spk, freeze_spk) # Load configurations sys.argv = [ @@ -132,18 +137,28 @@ def acoustic( @click.option('--exp', type=str, required=True, metavar='', help='Choose an experiment to export.') @click.option('--ckpt', type=int, required=False, metavar='', help='Checkpoint training steps.') @click.option('--out', type=str, required=False, metavar='

    ', help='Output directory for the artifacts.') +@click.option('--export_spk', type=str, required=False, multiple=True, metavar='', + help='(for multi-speaker models) Export one or more speaker or speaker mix keys.') +@click.option('--freeze_spk', type=str, required=False, metavar='', + help='(for multi-speaker models) Freeze one speaker or speaker mix into the model.') def variance( exp: str, ckpt: int = None, out: str = None, + export_spk: List[str] = None, + freeze_spk: str = None ): # Validate arguments + if export_spk and freeze_spk: + print('--export_spk is exclusive to --freeze_spk.') + exit(-1) exp = find_exp(exp) if out is None: out = root_dir / 'artifacts' / exp else: out = Path(out) out = out.resolve() + export_spk_mix, freeze_spk_mix = parse_spk_settings(export_spk, freeze_spk) # Load configurations sys.argv = [ @@ -159,6 +174,8 @@ def variance( device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), cache_dir=root_dir / 'deployment' / 'cache', ckpt_steps=ckpt, + export_spk=export_spk_mix, + freeze_spk=freeze_spk_mix ) exporter.export(out) From 5036ab328c2f84463d661c81b2d95752b26dcc0c Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Jul 2023 21:20:24 +0800 Subject: [PATCH 467/475] Add missing forward argument --- deployment/modules/fastspeech2.py | 4 +++- deployment/modules/toplevel.py | 13 +++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/deployment/modules/fastspeech2.py b/deployment/modules/fastspeech2.py index a1925b8e9..338682aae 100644 --- a/deployment/modules/fastspeech2.py +++ b/deployment/modules/fastspeech2.py @@ -113,9 +113,11 @@ def forward_encoder_phoneme(self, tokens, ph_dur): ph_dur_embed = self.ph_dur_embed(ph_dur.float()[:, :, None]) return self.encoder(tokens, ph_dur_embed), tokens == PAD_INDEX - def forward_dur_predictor(self, encoder_out, x_masks, ph_midi): + def forward_dur_predictor(self, encoder_out, x_masks, ph_midi, spk_embed=None): midi_embed = self.midi_embed(ph_midi) dur_cond = encoder_out + midi_embed + if hparams['use_spk_id'] and spk_embed is not None: + dur_cond += spk_embed ph_dur = self.dur_predictor(dur_cond, x_masks=x_masks) return ph_dur diff --git a/deployment/modules/toplevel.py b/deployment/modules/toplevel.py index 6d2a97cf8..35c866c5b 100644 --- a/deployment/modules/toplevel.py +++ b/deployment/modules/toplevel.py @@ -147,8 +147,8 @@ def forward_linguistic_encoder_phoneme(self, tokens, ph_dur): encoder_out = self.embed_frozen_spk(encoder_out) return encoder_out, x_masks - def forward_dur_predictor(self, encoder_out, x_masks, ph_midi): - return self.fs2.forward_dur_predictor(encoder_out, x_masks, ph_midi) + def forward_dur_predictor(self, encoder_out, x_masks, ph_midi, spk_embed=None): + return self.fs2.forward_dur_predictor(encoder_out, x_masks, ph_midi, spk_embed=None) def forward_mel2x_gather(self, x_src, x_dur, x_dim=None): mel2x = self.lr(x_dur) @@ -162,7 +162,7 @@ def forward_mel2x_gather(self, x_src, x_dur, x_dim=None): def forward_pitch_preprocess( self, encoder_out, ph_dur, note_midi, note_dur, - pitch=None, retake=None + pitch=None, retake=None, spk_embed=None ): condition = self.forward_mel2x_gather(encoder_out, ph_dur, x_dim=self.hidden_size) condition += self.pitch_retake_embed(retake.long()) @@ -170,6 +170,8 @@ def forward_pitch_preprocess( base_pitch = self.smooth(frame_midi_pitch) base_pitch = base_pitch * retake + pitch * ~retake pitch_cond = condition + self.base_pitch_embed(base_pitch[:, :, None]) + if hparams['use_spk_id'] and spk_embed is not None: + pitch_cond += spk_embed return pitch_cond, base_pitch def forward_pitch_diffusion( @@ -183,7 +185,8 @@ def forward_pitch_postprocess(self, x_pred, base_pitch): return pitch_pred def forward_variance_preprocess( - self, encoder_out, ph_dur, pitch, variances: dict = None, retake=None + self, encoder_out, ph_dur, pitch, + variances: dict = None, retake=None, spk_embed=None ): condition = self.forward_mel2x_gather(encoder_out, ph_dur, x_dim=self.hidden_size) variance_cond = condition + self.pitch_embed(pitch[:, :, None]) @@ -196,6 +199,8 @@ def forward_variance_preprocess( for v_name, v_masks in zip(self.variance_prediction_list, non_retake_masks) ] variance_cond += torch.stack(variance_embeds, dim=-1).sum(-1) + if hparams['use_spk_id'] and spk_embed is not None: + variance_cond += spk_embed return variance_cond def forward_variance_diffusion(self, variance_cond, speedup: int = 1): From dc1e26473c27e90cb28abc2bd4ea538d615d61e6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Jul 2023 21:44:04 +0800 Subject: [PATCH 468/475] Adjust configurations --- configs/acoustic.yaml | 2 +- configs/variance.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index a08022d58..e771d6655 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -90,5 +90,5 @@ val_check_interval: 2000 num_valid_plots: 10 max_updates: 320000 num_ckpt_keep: 5 -permanent_ckpt_start: 120000 +permanent_ckpt_start: 200000 permanent_ckpt_interval: 40000 diff --git a/configs/variance.yaml b/configs/variance.yaml index edf947e2f..b401ca087 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -93,14 +93,14 @@ num_sanity_val_steps: 1 optimizer_args: lr: 0.0006 lr_scheduler_args: - step_size: 40000 - gamma: 0.5 + step_size: 12000 + gamma: 0.75 max_batch_frames: 80000 max_batch_size: 48 val_with_vocoder: true val_check_interval: 2000 num_valid_plots: 10 -max_updates: 320000 +max_updates: 288000 num_ckpt_keep: 5 -permanent_ckpt_start: 120000 -permanent_ckpt_interval: 40000 +permanent_ckpt_start: 180000 +permanent_ckpt_interval: 10000 From c71c93a9b2c43e03e3386636f2da46bc89531005 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sat, 15 Jul 2023 21:50:25 +0800 Subject: [PATCH 469/475] Force spk to be None for single speaker models --- deployment/exporters/acoustic_exporter.py | 6 ++++-- deployment/exporters/variance_exporter.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/deployment/exporters/acoustic_exporter.py b/deployment/exporters/acoustic_exporter.py index 3c85bc37b..a2e5e4363 100644 --- a/deployment/exporters/acoustic_exporter.py +++ b/deployment/exporters/acoustic_exporter.py @@ -45,8 +45,10 @@ def __init__( # Attributes for exporting self.expose_gender = expose_gender self.expose_velocity = expose_velocity - self.freeze_spk: Tuple[str, Dict[str, float]] = freeze_spk - self.export_spk: List[Tuple[str, Dict[str, float]]] = export_spk if export_spk is not None else [] + self.freeze_spk: Tuple[str, Dict[str, float]] = freeze_spk \ + if hparams['use_spk_id'] else None + self.export_spk: List[Tuple[str, Dict[str, float]]] = export_spk \ + if hparams['use_spk_id'] and export_spk is not None else [] if hparams.get('use_key_shift_embed', False) and not self.expose_gender: shift_min, shift_max = hparams['augmentation_args']['random_pitch_shifting']['range'] key_shift = freeze_gender * shift_max if freeze_gender >= 0. else freeze_gender * abs(shift_min) diff --git a/deployment/exporters/variance_exporter.py b/deployment/exporters/variance_exporter.py index 0a54aaddc..939c42f3e 100644 --- a/deployment/exporters/variance_exporter.py +++ b/deployment/exporters/variance_exporter.py @@ -58,8 +58,10 @@ def __init__( if self.model.predict_variances else None # Attributes for exporting - self.freeze_spk: Tuple[str, Dict[str, float]] = freeze_spk - self.export_spk: List[Tuple[str, Dict[str, float]]] = export_spk if export_spk is not None else [] + self.freeze_spk: Tuple[str, Dict[str, float]] = freeze_spk \ + if hparams['use_spk_id'] else None + self.export_spk: List[Tuple[str, Dict[str, float]]] = export_spk \ + if hparams['use_spk_id'] and export_spk is not None else [] if hparams['use_spk_id']: if not self.export_spk and self.freeze_spk is None: # In case the user did not specify any speaker settings: From 8f3b6db173a3569f6bfa6bd8e58c36343ff5cbaa Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Jul 2023 00:03:06 +0800 Subject: [PATCH 470/475] Fix crash when folder contains illegal ckpt filenames --- utils/__init__.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/utils/__init__.py b/utils/__init__.py index 6ef9d7a69..e9b1b0178 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -166,16 +166,14 @@ def load_ckpt( checkpoint_path = [ckpt_base_dir / f'model_ckpt_steps_{int(ckpt_steps)}.ckpt'] else: base_dir = ckpt_base_dir - checkpoint_path = [ - base_dir / ckpt_file - for ckpt_file in sorted( - [ - ckpt.name - for ckpt in base_dir.glob('model_ckpt_steps_*.ckpt') - ], - key=lambda x: int(re.findall(fr'model_ckpt_steps_(\d+).ckpt', x.replace('\\', '/'))[0]) - ) - ] + checkpoint_path = sorted( + [ + ckpt_file + for ckpt_file in base_dir.iterdir() + if ckpt_file.is_file() and re.fullmatch(r'model_ckpt_steps_\d+\.ckpt', ckpt_file.name) + ], + key=lambda x: int(re.search(r'\d+', x.name).group(0)) + ) assert len(checkpoint_path) > 0, f'| ckpt not found in {ckpt_base_dir}.' checkpoint_path = checkpoint_path[-1] ckpt_loaded = torch.load(checkpoint_path, map_location=device) From e94e14e3ef8e4d1324699a39ecf94089fc5c1aeb Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Jul 2023 00:28:33 +0800 Subject: [PATCH 471/475] Change diff accelerator in ONNX to DDIM --- deployment/modules/diffusion.py | 42 +++++++++++++++++++++------------ modules/diffusion/ddpm.py | 8 ++++--- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/deployment/modules/diffusion.py b/deployment/modules/diffusion.py index bcca20d1a..128a26ddd 100644 --- a/deployment/modules/diffusion.py +++ b/deployment/modules/diffusion.py @@ -45,6 +45,16 @@ def plms_get_x_pred(self, x, noise_t, t, t_prev): return x_pred + def p_sample_ddim(self, x, t, interval, cond): + a_t = extract(self.alphas_cumprod, t) + a_prev = extract(self.alphas_cumprod, torch.max(t - interval, torch.zeros_like(t))) + + noise_pred = self.denoise_fn(x, t, cond=cond) + x_prev = a_prev.sqrt() * ( + x / a_t.sqrt() + (((1 - a_prev) / a_prev).sqrt() - ((1 - a_t) / a_t).sqrt()) * noise_pred + ) + return x_prev + def p_sample_plms(self, x_prev, t, interval: int, cond, noise_list: List[Tensor], stage: int): noise_pred = self.denoise_fn(x_prev, t, cond) t_prev = t - interval @@ -77,22 +87,24 @@ def forward(self, condition, speedup: int): x = torch.randn((1, self.num_feats, self.out_dims, n_frames), device=device) if speedup > 1: - plms_noise_stage: int = 0 - noise_list: List[Tensor] = [] for t in step_range: - noise_pred, x = self.p_sample_plms( - x, t, interval=speedup, cond=condition, - noise_list=noise_list, stage=plms_noise_stage - ) - if plms_noise_stage == 0: - noise_list = [noise_pred] - plms_noise_stage = plms_noise_stage + 1 - else: - if plms_noise_stage >= 3: - noise_list.pop(0) - else: - plms_noise_stage = plms_noise_stage + 1 - noise_list.append(noise_pred) + x = self.p_sample_ddim(x, t, interval=speedup, cond=condition) + # plms_noise_stage: int = 0 + # noise_list: List[Tensor] = [] + # for t in step_range: + # noise_pred, x = self.p_sample_plms( + # x, t, interval=speedup, cond=condition, + # noise_list=noise_list, stage=plms_noise_stage + # ) + # if plms_noise_stage == 0: + # noise_list = [noise_pred] + # plms_noise_stage = plms_noise_stage + 1 + # else: + # if plms_noise_stage >= 3: + # noise_list.pop(0) + # else: + # plms_noise_stage = plms_noise_stage + 1 + # noise_list.append(noise_pred) else: for t in step_range: x = self.p_sample(x, t, cond=condition) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index c49be05f9..0f7f68013 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -158,11 +158,13 @@ def p_sample(self, x, t, cond, clip_denoised=True, repeat_noise=False): def p_sample_ddim(self, x, t, interval, cond): a_t = extract(self.alphas_cumprod, t, x.shape) a_prev = extract(self.alphas_cumprod, torch.max(t - interval, torch.zeros_like(t)), x.shape) - + noise_pred = self.denoise_fn(x, t, cond=cond) - x_prev = a_prev.sqrt() * (x / a_t.sqrt() + (((1 - a_prev) / a_prev).sqrt()-((1 - a_t) / a_t).sqrt()) * noise_pred) + x_prev = a_prev.sqrt() * ( + x / a_t.sqrt() + (((1 - a_prev) / a_prev).sqrt() - ((1 - a_t) / a_t).sqrt()) * noise_pred + ) return x_prev - + @torch.no_grad() def p_sample_plms(self, x, t, interval, cond, clip_denoised=True, repeat_noise=False): """ From cfea3ea52227dec2b98614e94e94891e0df5ed68 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Jul 2023 18:14:38 +0800 Subject: [PATCH 472/475] Change default diff accelerator to DDIM --- modules/diffusion/ddpm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/diffusion/ddpm.py b/modules/diffusion/ddpm.py index 0f7f68013..46c3eaccb 100644 --- a/modules/diffusion/ddpm.py +++ b/modules/diffusion/ddpm.py @@ -221,7 +221,7 @@ def inference(self, cond, b=1, device=None): shape = (b, self.num_feats, self.out_dims, cond.shape[2]) x = torch.randn(shape, device=device) if hparams.get('pndm_speedup') and hparams['pndm_speedup'] > 1: - algorithm = hparams.get('diff_accelerator', 'dpm-solver') + algorithm = hparams.get('diff_accelerator', 'ddim') if algorithm == 'dpm-solver': from inference.dpm_solver_pytorch import NoiseScheduleVP, model_wrapper, DPM_Solver # 1. Define the noise schedule. From bae626f174e33c5266ec87c7fd187218e11b9ff6 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Jul 2023 18:38:23 +0800 Subject: [PATCH 473/475] Add configuration templates --- configs/acoustic.yaml | 2 +- configs/templates/config_acoustic.yaml | 63 ++++++++++++++++++++ configs/templates/config_variance.yaml | 82 ++++++++++++++++++++++++++ configs/variance.yaml | 1 - docs/GettingStarted.md | 6 ++ 5 files changed, 152 insertions(+), 2 deletions(-) create mode 100644 configs/templates/config_acoustic.yaml create mode 100644 configs/templates/config_variance.yaml diff --git a/configs/acoustic.yaml b/configs/acoustic.yaml index e771d6655..66b79c5ea 100644 --- a/configs/acoustic.yaml +++ b/configs/acoustic.yaml @@ -70,7 +70,7 @@ diff_accelerator: ddim pndm_speedup: 10 hidden_size: 256 residual_layers: 20 -residual_channels: 384 +residual_channels: 512 dilation_cycle_length: 4 # * diff_decoder_type: 'wavenet' diff_loss_type: l2 diff --git a/configs/templates/config_acoustic.yaml b/configs/templates/config_acoustic.yaml new file mode 100644 index 000000000..0291177a1 --- /dev/null +++ b/configs/templates/config_acoustic.yaml @@ -0,0 +1,63 @@ +base_config: configs/acoustic.yaml + +raw_data_dir: + - data/xxx1/raw + - data/xxx2/raw +speakers: + - speaker1 + - speaker2 +spk_ids: [] +test_prefixes: + - wav1 + - wav2 + - wav3 + - wav4 + - wav5 +dictionary: dictionaries/opencpop-extension.txt +binary_data_dir: data/xxx/binary +binarization_args: + num_workers: 0 + +use_spk_id: false +num_spk: 1 +use_energy_embed: false +use_breathiness_embed: false +use_key_shift_embed: false +use_speed_embed: false + +augmentation_args: + random_pitch_shifting: + enabled: false + range: [-5., 5.] + scale: 1.0 + fixed_pitch_shifting: + enabled: false + targets: [-5., 5.] + scale: 0.75 + random_time_stretching: + enabled: false + range: [0.5, 2.] + domain: log # or linear + scale: 1.0 + +residual_channels: 512 +residual_layers: 20 + +optimizer_args: + lr: 0.0004 +lr_scheduler_args: + scheduler_cls: torch.optim.lr_scheduler.StepLR + step_size: 50000 + gamma: 0.5 +max_batch_frames: 80000 +max_batch_size: 48 +max_updates: 320000 + +num_valid_plots: 10 +val_with_vocoder: true +val_check_interval: 2000 +num_ckpt_keep: 5 +permanent_ckpt_start: 200000 +permanent_ckpt_interval: 40000 +pl_trainer_devices: 'auto' +pl_trainer_precision: '32-true' diff --git a/configs/templates/config_variance.yaml b/configs/templates/config_variance.yaml new file mode 100644 index 000000000..8bb2d44f0 --- /dev/null +++ b/configs/templates/config_variance.yaml @@ -0,0 +1,82 @@ +base_config: + - configs/variance.yaml + +raw_data_dir: + - data/xxx1/raw + - data/xxx2/raw +speakers: + - speaker1 + - speaker2 +spk_ids: [] +test_prefixes: + - wav1 + - wav2 + - wav3 + - wav4 + - wav5 +dictionary: dictionaries/opencpop-extension.txt +binary_data_dir: data/xxx/binary +binarization_args: + num_workers: 0 + +energy_db_min: -96.0 +energy_db_max: -12.0 +breathiness_db_min: -96.0 +breathiness_db_max: -20.0 + +use_spk_id: false +num_spk: 1 +predict_dur: true +predict_pitch: true +predict_energy: false +predict_breathiness: false + +hidden_size: 256 +dur_prediction_args: + arch: fs2 + hidden_size: 512 + dropout: 0.1 + num_layers: 5 + kernel_size: 3 + log_offset: 1.0 + loss_type: mse + lambda_pdur_loss: 0.3 + lambda_wdur_loss: 1.0 + lambda_sdur_loss: 3.0 + +pitch_prediction_args: + pitd_norm_min: -8.0 + pitd_norm_max: 8.0 + pitd_clip_min: -12.0 + pitd_clip_max: 12.0 + repeat_bins: 64 + residual_layers: 20 + residual_channels: 256 + dilation_cycle_length: 5 # * + +variances_prediction_args: + total_repeat_bins: 48 + residual_layers: 10 + residual_channels: 192 + dilation_cycle_length: 4 # * + +lambda_dur_loss: 1.0 +lambda_pitch_loss: 1.0 +lambda_var_loss: 1.0 + +optimizer_args: + lr: 0.0006 +lr_scheduler_args: + step_size: 12000 + gamma: 0.75 +max_batch_frames: 80000 +max_batch_size: 48 +max_updates: 288000 + +num_valid_plots: 10 +val_check_interval: 2000 +num_ckpt_keep: 5 +permanent_ckpt_start: 180000 +permanent_ckpt_interval: 10000 +pl_trainer_devices: 'auto' +pl_trainer_precision: '32-true' diff --git a/configs/variance.yaml b/configs/variance.yaml index b401ca087..ebfae4091 100644 --- a/configs/variance.yaml +++ b/configs/variance.yaml @@ -97,7 +97,6 @@ lr_scheduler_args: gamma: 0.75 max_batch_frames: 80000 max_batch_size: 48 -val_with_vocoder: true val_check_interval: 2000 num_valid_plots: 10 max_updates: 288000 diff --git a/docs/GettingStarted.md b/docs/GettingStarted.md index 59a7906d1..77d2fbd13 100644 --- a/docs/GettingStarted.md +++ b/docs/GettingStarted.md @@ -19,6 +19,12 @@ DiffSinger requires Python 3.8 or later. We strongly recommend you create a virt - **(Required)** Get the pretrained vocoder from the [DiffSinger Community Vocoders Project](https://openvpi.github.io/vocoders) and unzip it into `checkpoints/` folder, or train a ultra-lightweight [DDSP](https://github.com/yxlllc/pc-ddsp) vocoder first by yourself, then configure it according to the relevant [instructions](https://github.com/yxlllc/pc-ddsp/blob/master/DiffSinger.md). - Get acoustic or variance models from [Releases](https://github.com/openvpi/DiffSinger/releases) or elsewhere and unzip them into the `checkpoints/` folder. +## Configuration + +Every model needs a configuration file to run preprocessing, training, inference and deployment. Templates of configurations files are in [configs/templates](../configs/templates/). Please **copy** the templates to your own data directory before you edit them. + +For more details about configurable parameters, see [Configuration Schemas](ConfigurationSchemas.md). + ## Preprocessing Raw data pieces and transcriptions should be binarized into dataset files before training. From a2e388dd71033c1649ab49e9dacf7a1ab346fff0 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Jul 2023 22:01:29 +0800 Subject: [PATCH 474/475] Support variance models in drop_spk.py --- scripts/drop_spk.py | 55 ++++++++++++++++++++++++++------------------- 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/scripts/drop_spk.py b/scripts/drop_spk.py index 332f8516c..6a19d09d5 100644 --- a/scripts/drop_spk.py +++ b/scripts/drop_spk.py @@ -1,7 +1,34 @@ +import torch import argparse import pathlib import re + +def modify_spk_embed(spk_embed): + num_spk, hidden_size = spk_embed.shape + all_ids = set(range(num_spk)) + if args.drop is not None: + drop_ids = set([int(i) for i in args.drop.split(',') if i != '']).intersection(all_ids) + else: + drop_ids = all_ids - set([int(i) for i in args.retain.split(',') if i != '']) + + fill_list = None + if args.fill == 'zeros': + fill_list = [0. for _ in drop_ids] + elif args.fill == 'random': + fill_list = [torch.randn(1, hidden_size, dtype=torch.float32, device='cpu') for _ in drop_ids] + elif args.fill == 'mean': + mean = torch.mean(spk_embed, dim=0, keepdim=True) + fill_list = [mean for _ in drop_ids] + elif args.fill == 'cyclic': + retain_ids = sorted(all_ids - drop_ids) + num_retain = len(retain_ids) + fill_list = [spk_embed[retain_ids[i % num_retain], :] for i, _ in enumerate(drop_ids)] + + for spk_id, fill in zip(sorted(drop_ids), fill_list): + spk_embed[spk_id, :] = fill + + parser = argparse.ArgumentParser(description='Drop or edit spk_embed in a checkpoint.') parser.add_argument('input', type=str, help='Path to the input file') parser.add_argument('output', type=str, help='Path to the output file') @@ -36,28 +63,10 @@ 'If you are sure to OVERWRITE the existing file, please re-run this script with the \'--overwrite\' argument.' ckpt_loaded = torch.load(input_ckpt, map_location='cpu') -spk_embed = ckpt_loaded['state_dict']['model.fs2.spk_embed.weight'] -num_spk, hidden_size = spk_embed.shape -all_ids = set(range(num_spk)) -if args.drop is not None: - drop_ids = set([int(i) for i in args.drop.split(',') if i != '']).intersection(all_ids) -else: - drop_ids = all_ids - set([int(i) for i in args.retain.split(',') if i != '']) - -fill_list = None -if args.fill == 'zeros': - fill_list = [0. for _ in drop_ids] -elif args.fill == 'random': - fill_list = [torch.randn(1, hidden_size, dtype=torch.float32, device='cpu') for _ in drop_ids] -elif args.fill == 'mean': - mean = torch.mean(spk_embed, dim=0, keepdim=True) - fill_list = [mean for _ in drop_ids] -elif args.fill == 'cyclic': - retain_ids = sorted(all_ids - drop_ids) - num_retain = len(retain_ids) - fill_list = [spk_embed[retain_ids[i % num_retain], :] for i, _ in enumerate(drop_ids)] - -for spk_id, fill in zip(sorted(drop_ids), fill_list): - spk_embed[spk_id, :] = fill +state_dict = ckpt_loaded['state_dict'] +if 'model.fs2.spk_embed.weight' in state_dict: + modify_spk_embed(state_dict['model.fs2.spk_embed.weight']) +if 'model.spk_embed.weight' in state_dict: + modify_spk_embed(state_dict['model.spk_embed.weight']) torch.save(ckpt_loaded, output_ckpt) From 3d1a857010103d49d7c5a0c52534e08470b87744 Mon Sep 17 00:00:00 2001 From: yqzhishen Date: Sun, 16 Jul 2023 22:01:41 +0800 Subject: [PATCH 475/475] Update .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index ad1c7ec9a..cfffba8f8 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,6 @@ __pycache__/ local_tools/ *.ckpt *.pth -(!/preparation/assets)/*.wav infer_out/ *.onnx /data/*

    7v2Qg{(71x9a!BaDj3sfSSbg*{Vb?ecveM9? zibF$WAA8JS$gxuTv`RH!`Er|j?h&8vKlG;cqGY!LY89)>oOn}1CF*d34$CnkA%{o1`ejNVilVvsoq=W7l$mgM&hUM_%7RbRBSP z@|bC0!a%bAl+)Sff592BtLeD2j6`~{EbwM63wj`zD312{W3Il--}7D2`zs3A@Auew zH*ygQ(Q>~STZsB7kukXxdZh{-xN{&}{+a?(6#C{Ky4dk-=fq;ChDkUeB2O#cR~z5U1b&L~u|EpIT(lkNzV1)gQ3Ee5>Ta zsr2ECOVn>-JH|@y>1Je7pz%@vq3jW_l8wuZ7I2BBfcnM`rHvNs1KNY1J|e@{j-@$M z0EWU2AbT&G#H{?X`5mMJoIknSf9%@bDY`XCIIoqqM+X?Q8^UUs)tp25qn_ww%)vdO zy5_g)68T(uz1PRC2VMz2zq!W)@^qIN_4RWc+L?&0HGnftil_HjVtmENR~|k^MPY!x zh!#yNPW%Q!0<2m*4n2A4!wj~KJzlElF*+{^1R4H>U1&UwZNPZG5^WrmyMl9;M!u6vIg z`&x+9s#sDQp#noXpIfuYh-^bTJ;a!?=dcvR_sLCX$yW5xFTI-wrU$c*PCPz7VGn1J-_kG!O{d`n!)Xkv`6LQ_skl%LfO&75y zF!=rl24K#q!|;cB8Z!Rln^Gpj(tG^q%1!@o^qQoLV{lIAFaqoHp9rhP#}YS%UssGB zE_>$O>$vmFH+t_KwN0|EzT`Dl5v*zo(cJ2-O$H47%`S~*Gaf1~Dr(B)NGE~4{U2VSi$nX&Uw+;M5s`~> zi=RtlZSv4;3t-r7ndZyep{eVEs72)WKyHhlEmzy^9#M0Y92m~xg`6Wq&~|YK=G9u&Q3HwJc=jAgdx zhUCvUUlqF}$7;~P6sC1=@{M4mmq1T`IIw=q7gN_E>=uE(zN)8+-o-A~{HOEi7q@dZ z4$f|%`t5tSn_*Nxi)mrhq(moW4`#~(ea-~D4p3H(KwPuuMY#U z-@i>TdhLmQrWzAmv&=7uS=84K$cOH?kb-~W5pY%N#F`+JLPixL5ommYsX0CTAE$c5 z_eb7wF0qGN3>B??YXr+>Mv;+W8)RT#4lxvo969x)%RYuMk23z4|J+x%#FQWo0&4WG zU?LMXG3!8u2_gQBRlTfB+HS(h6C}pub#q*`30oiUvq&0Oc4-?+^*|SM5@NuwTol1F zkgexq9ztCCelip#Scxxs>Invty7j>8WujfQ`9H@c^Mg$$wS&~ymYhpXj@WH}lpUfKK1x}}thG7#J?0Z;#bcv?82WLm7ELmqI z``b}lFdrRe7J~30(4|Ove2yZFLe-WmT)?|;)3u#l^ast;Ds(q+x`(O>(F`8IS_sZ} z7GB90{KV-fa`giw93-URI4y3*YE!QV1x8=kx$a=6)Fz^iQt(c@4=MKZfvU3G7t>QH zPO0rY9ms#^QlSBTYGvI`TTAZyVu+pxr1d3zHAV?TpqU*;MGeP=jftu`$Dxotauqxk z{WhKVHhN6bl^-+xK@kl(h5IznG(*Rt@UsUAG0kT)kpI?6a&S9xjXW|*I>u`FHZu7|9{9|-1 zY-xM;a`72*`y5wd!*FxcA=cOu{e>10&F7HmG`3aIqwznW&7f5H&+rb%K`Cf1JsW_f zN&1j~!tWAJnO7aJetgyos2|yvdt>ppQ&)W4LG-EqY;%tofN$|M)HOSPjx$)e_m>Ui zmgrBWl}R~<;TJpkS(B)M@6SzLJHDQm_0jVNRvS9uA*JwMChS zbE~YvV8`${Q|R%et?2!mfItU9w0=esnVp*74KEl~S-lu>k1&J=*f=3Y_w~(kHT?bj zd<_7>X}t)W6@)^BnFP#HRu3Wu}t` zVM>yU6Q&O7?!zQqL4@pWBo!!L+^B|vq|=;e+%qe$5N^m#5nkx}7p;l1pU{c#A}Q#T z!wl9wT`D`N1e)9uX6Y;!FV(84S*hyGL@E9}?{Ks$xkTdfVPWUTv#&*a-f8x*pzXKX+Rv0lKUB9s2dDkX=nGLQH{XwO z`xN;I>0+O6?fN>J8eG53oC)fplCsGeZFL3N>wimu26NkG2E-jSs~Y>Z$Ctu)6Tp)F zPG0DqX_Q#}x6KsPMhWCFv@m#+u8NZhXHVVGLGeTy^D&Z3q2ymq2gl~7`OiZro7j2! z1|^#L?XdYj%L8?6gc(^Hf5SdbS9*2e?6k9fXEr9G1i0C;aZn|3#+Ut(&4aWZQW8|$M zQ{c~B_Xtzzu3bjxXxrO28redD@;J#n(~0Tvqs#3&5|N~NJH3#-Wz`IALKp5|*Qpaz zRPomUVrmFRUQn~TaxeZ?&4i2HBcf><`TuKICq-!d|1b0Z zE%18>9E6{rYOyR}?BhG+B_4R4)n`Rg%4Uw@$hb!3D(4X&JaB1a(vo3YxW8FeJ@h%2 z0@IxFo1&`Ul2ktk?@QV$plKZ^3@O3Yn^*;_)#L`%bE%qzE2iI@4~mwhrfhI->9vDc z;f9wD@gs)O_^f8ufcaViN!0g5(%eY70AC=5<8yL3M83w?Fohx$JgrWuIf6krf!) z;mSG<*ou(YaC-0xdK{AUl-Qfh%)G{o>t)m`#{eC?uE*I(RmR%E?T4c#9H+;RVn~x^ zzAjm_d;5=o)j}q1B<&Xt9pC=nRP77uVDCdoXG4Xz%UW+5F9SSur&5e00UOte zd9275%EVFgZadM673TZbb3=HRA7jr=Ff?;b!ezU1E4T`OhuV~X9O()uRy13b%FDo) zO5z|g*l0#Ud0M*mryP22c?%5JFCYoWimpYaozeTzqR{`nFhQHiibi84H@ER_7DAuF zI&J`7ctNxJDih!>qHp-bA30WkmdmhE=6I zOOoRxIZtNPpMn}jF=>Y1@xw$|xo}OT-#A>J7S+H@M6zFmMQxNV3W@SE`K~#qx2B(7 zdS@IK^180}?Te`*t@}*1SIuk(RFNU#=rxiNWY?MDf(i0<$EF+r` z<}xbNpB7M8kiq4XSTmjIbSZn$ifb43@%M9nxWAfoe127(|B^QOXN;wV77KveSRmof zYoT?pxYDF&&DMKHALKp%#Us(liZt=;O@KA+p1^MKiC_k=CtB9Ag+~>gi(XApIZ`sn z(irPAmy={l8RZEK7!qi4nobY0xVx5dopXRTZHrE2QC2AS&{<|tgr7Ycwdwn3KlJMR zwrhVTH2yK!JF~UY*IgD_EVK=ukjObY2~G}avI6DktoLlvz_#@;Jyq!C2o;=TnEZW# znJu0Q_Fxue6zhfvl#OAF%O1_n6j8bp7%Kn{IyvT^tXy$m+!{RyKsdJ>G;C{qzq3cr z-nxo^e&!hKzZX33d9m{HoYMo^4Da4gA2s5yw@n!P9f}(uk!b4O0I2A`MC~9lXCZjt zpkoc)$q2G<)JU}9AO7MhL73z%iaib&{EFb?d{otA*reiTdQ5=iUB==fnIcTf|4Lg~ z5}4VEg(zhd_RDE^%;PphVk9dW#mL6wP&8HcG{FRzJ+dB7C2osl|1`f#&uQHwZp6iq ze!u^f9{OajL3F~}>r{t`FJR<|pN^OV4?jP{f!YsN{&+JQKAQYo9xeagF(oZsU)$R+ z8CZ3Ocu*GysJ#jgmaJ$>3&n|1_9FvDF(^iVl{S|1#pZtHeJtUh95o6R)%DEhi@6lbGSNl)H$=0KkC9R!#%$#tO^!zc zPMav#7@f>PRT>ExQL+eTnH1DC8Bp23pf@EGEo*QaCv|9(fhgfS$aCq4p~j}56vpr! zgW;n886liv-x!-8OGM8qR`4Y^!BwgfJ!!@Ks_dh9TjWCUXg!BeD%yr$hqj7TEsaGO z5kOZa+DE;l{Y@p6MAsbl??5cNtWGl0BeHDe5zs*Iwii@ajWiwKt_(U*r)`u(k!xzt3w`bN!zz>B#-fyr+)pKdgqIqG^A468-As^ zbCya0<_)1B62sqigt)k6Oc(M&_TLLz&H47sCrH@iVjD!+pMOCs-HD2_T2_7h{l&}b zqc9?twvHyV{`G~gU(DM&VRdlDtZL{)CPa3pmaUkERPRbHYpoN~<<*BHsRrxf3G@8K z-U{~ygq|tHMbi<5o5xxIl-5dW`rcCRAzbvSGZSQq!0jZ=6%SV5T!=Z8{jQIEe0;S{ zG5P)(?9*6BzTx^IX88BqP=v-erPb@jf?YEdYH^Vr2R1QYSRh@BkBXd8Pd-+FjS6~` zCKl(DJ$q08Czav_yYhQETZuyIE@AwblBIXvoaaMuJ3>(_xWAV6~LRMt5;a#M4QoDR{c&89H?uH=>l#6^f$gtb<(0 zKo-zagyp&EQTfRD!YcQE&r{s3$pLXT;jAT1Jb*d0T35$pKYncvrg_>YsD~-Xm+ssB zD5194ld+5OM)VEdRnixfP5SYJqy-^9(gC{t{tqd1{ep#T0P9wbFs`Oz)!c#h`*B1a zA5;0CZ}CNS3?K0;%DRRBTBgq{x0Bom2z$<1+V1X$&17yEx>0z02m#sRtUctUc9mJg znVoC`uFQtA(3?>X_wFZDwfJ+VFx{V5Mz!?epvW>LX$H-yYeGVZ!G%^vm4Y%WyE(c* z-U+AE7sAM#C6QrlGAJcif_8wFk~*#5FFE+I*z5N2x-lvW0%(Zv!K8AHoqq4bJQC`k zbJKd=E%zsbEr1YKAD@Fad*9PGoUO;|+txPzG1)6%dexeI6zxOn*ns)^!!WdzrrUc* zu4n@bP{mUvq9Ld0I^CApLR~ubZHfsyvzdUWnM|Y)7k6@oE&g7euTfj%h%3PEuqV{PufxyeKbm2I3#Aj6Xh1^o4^Xn`=f zUOOieWtknc6y-MJH&*|;q6J5)@ZoIKW%OvceGsKe@AgrWr8DHm8cL}NBKJUg|gW( zMTr|72rd6HK;sy;|A*B7-$eeTRRen9|HJe{KD=^DMa6$j2?eDvmfSCg#AooY>l;8^ z#jVk^w8gD?`yBO#LV_E8&8b6pT;m|&h9Nj=umrqyYkj8j%8YV00&Rqp{1a>B%T2UNQ5w*(<)v6C@O>a{0bC`|?oETrTrANdhql zGi+}@kWdJfD{8x3%=d8QRLYYQqN{<-0MK(}E;sAmjiUgS3Q5hpxzP4bZa#B{wJn7< z@Zg~vE#ZCLVtD2DZrzVrkD{#cGWf9Zzw`8e7-509ev9f)IaLko=7!4DQ}cuKT2982 zWuPMq+JVhs2XYg}ESw`*Ax3dLWX+Chl!35KB{)lPmeL^s@W~#zD|(`=DtwtL9$kKq z9;8HVRVTiaQa{%m<|ziz94`Nk0Tab3vQpdoyQ)`@SRf2tFRinunBfVhYwM=@8N#lW zvynW~ir#c6Pc|wF-N-WcsdbISNd|#`Cv3dHG(RkHkM~OCv~vhu6UsRF*ZisQ{2?%4)A$J z{Dx)x6oM5pTW=*z4q2oX_s4`A$Sl+1T{b!YX!#vE zh6XU9_;Wzc;4SLuQ0Llt70p+3+D({SR)4PJ$8!sNkwXMHX0^0I^`_4+E?U0#gKo}4 zClQksazB<3!HYD&n7Ia<5O@rQbRQ(l_O|}2FI{mw!)-^}S=+kNyhn}>yp7JRP(9BS zn-sDZefCP|q1J@J2PC*hDS2+-HTSFJ-PrIg+f1cexf;>#~A< zAAxw)B11h#CSubPm0~N)=t5W5t`18NCS4~RFG~g#n^jE!iIkzC6f;NV>s|8iH3SG& zj;4Ptaq1EGDw;`pyqj$RflRMq=16ju%th~LBo7_c$fmA2hnyq3Kuq_1e~_tzA=3{m zH+P@M{+-^e<#zN>8(C+-%hZg`;8A<%m7{IuU!f&3JG?G#S4Fq+hw;`y(hM}ZXHUy- zv~1XbW`h}$6bk%@+O*ZvL?o-E{$kclC~6Dku(8FWGD?5`UHwLw`O*4fqE@a_`dY`Z zW_Y?HFbZ>y`))f_l}}7&Z@U|!ZH_l9g1TrWz4Q`BChuBNz0TLPzlkpBa-hI}qvXtg z1c<+_?a0%1kLIL&DJ6*H{Y{s5;^Yd9U=!J3av5WA_>3Yq7CObm*g4q)KeHI|}%0Of;5rcAZebK{{(ZIr8>i@3hX`p)KfCcO~gPOqX zWzm>LJiIu-VSUWcNoK?6`@xIVz1G)lZ8Vs5L?ySJZ834A3mkskh# zMhy=={@c?k6-qe4ANSl;klXpnk+DvPqwgIx)gk*ozqJ?H*F#mTFXi~ zF+MT7GSY1g>X))?hK*lEp{M;aRvtM}AC_sB-#?vs)1OO=NsyB%B~MWdZxL_&BLH@D z13tPq1Pul1qvxRk4Y#Ub2^>T2f(`LfCe-Q{en8jm5{Dmz?i2LGA8M) zvwM?ToAqABE)=7^CApOJCKLY{{7=sb9Xn4#0Wr=-AbdST;2}*SNUqtdn1{6RpMSgw zXj2fz2~M<}q#=k0vH(-|L%BqY9om{xq#^14xfnY}G_pU}Z>e`wQtA44!wNK|uM~{h zaJxC4%z1F`-I>Gv<$R1=f|cf z9ba32{PfiBqP z;lFue>_U@kKOn}Gxn7eEp>$8+5$l$hT4Gp?NwC1%j*2M zZ+=5MS~SeWB8cYuBFGn0iOpmYv}@Mzuwr}w2KsGiA*r2HTedS0?bQTR@`Cjf=JLDM zy{HZq-a8)7t!jS-itw5AHblVERERpS}AlC_3N~`4<-W3PZy=v0HQi&giLOJp|Mh z??d-;%MIx%d3dY6L+nC7NP9nhvjO%`l1?%fyitcx>?Ac3FQDxG>1j2j_$XnqTCL{L z69+cI8Y}uE!^cZI{h^~NT1GOr?rfCjn^-^|p^|2*;6V`u>f%^v#(dn-#r{=!+Mr@f zEscjp zvkXY+v%#?@W=kIAGfhTv%EU472Vnq>O6?v0g1pM1B!9tmx%BhOvmKdihJWK@NFYxy z_W5cWjq@q6qAXSEGhh{=)U_$S!I8v0z8ClANLUFN1RYOIGJT{T-=F14OXHLy05NXU zp!z{{nDr1MaTZW{Ff+7LzNkphRR(BtD##*Ju)5^zQvjwE0!*~@SKFKtGOp|!VRFA) zC8^3a-?MKWd2n4J2WywCzE1D6&t{Au$;4roW3L2t)sxf1X7~$;`6=W$9SXcBR>+%; zi>FMXs-kR*3|>jIX`1PYEw8lcveh0^u1EDY{J#|Lw%Fz0w29EIXe zko~;LZtIBNUmBuCSJIrCQSY`6vS&P+MJ-Wf(ueigIzI!i z#xSgzx}Ips`dCM2nVwlm+aTd?Wa&ZzVNr|+WNC}rGcruupQP2R+0N#zP!=v`Fu0Zh zCsP950$N5ho~=_Nb-46uEW-3K4K$eHX{(s#dnbs!F!ll8u<&|E~E~J`|ZEJ%;(p) z)0M{mDmlxA9s-0O9`ZdhJD=CiJ3Mc#8&_VBD&LMY4SjA^S110b@ci=K;#v4Xb?E$M z;@tbbb>sEFLN-VT&ijvvN4uBEPKWl2547)_|5d8pR=&+a%HG~zAjLjz9S_^T{;OQO zQ*=}PpL)g>dpZQN(Cu!#r1@Q6F1(*)z1B9`ZM?0YzpfiTGWhL1K0ao>L5km2Iw4&< zrVdP34c}i)$oqo=5`=>{V*U{FuKMHqI!ssx87Fqcu ziKe^QYkAN1PRYcOW@-)r%0;_tZ*dBdEaI3#FRo@68L?NJKHPo}*q6=a+SjtN9 zqa}`raVtU+v{C23rLkDYK>1EE58)}5>?DDo)SBb&Z!IITyp)1`DNFM4QAl0>b0MFo z`+TgPo!RBwPTfPDjz5D#AdPcpHL^?mg})Pw2YthHAea-th&`a4QpIQue(28UhOM)W za!w5ZRY_Xyhcjwi%rW&Lsu7w&rxvWD?Zm#~-hTy>^qsyJ(tGR13QdlG4l&jOSZ#)F z8`FqZoFfs!m!^;VOZD1wL6*q5YTI+UsXj7=;pq=>S#37qODmu;}zMo#6^eGebDj$3D_z5=R3WDk(=3Mlsu91~TJ% z$7HQ&=}|~0iJVR*^H@yoq8{1cJklKhiCrn24Q?$e*gcI_X8>|;5Le=XtgLA)F@719 zHRKu&Q-pqg)G~2b{q4u%fLfkUfE<@1sGBjxBbj|f4GtP3YuIoTM+zE8c)?KZ+X%Az z&nx;2N|8fI>I5VV+Y4v!FB?je0O3WjNH8AoS)~F!@We#t7&W$-cnA@jzt`V1FJG6b zY>P^L?+#vAuv-;ux2*(bX@7a~QIAi+PW6y2u0QNvQrxBYpNl>{onDK@4bmkpLI0KX zNki|gu}P#jdt6-F|$&mVr;d%t{-MSFKc!-JzsET);6EZyZL#0 zy1WYjmZ{$*)qD93DUnuow<5Jb$*lh5bCK)@ix2r{dPM4R?xrxPi4SgF5X~A9ld|o)mhli*Pc)pR(^uPRYQB43(|_j_0L0P88`?D$Wq|x4nh)x7oAT^>HDY!41Es-IhL2rBFty^Y$Zhb;>{YNSl|- zyz>Je%{gHs5(^Xk3+SgKw*Mp>1|{qe4bt7|Lt*e;=CvF0B@v^5!Qm~XWkQ#+CK_B^ zST#oG$DH|@5gI0@`M))dDpLt96!p>cnagy-l&Ivd;;y#puC`$^?0TCkGpo!=V&LEg_il2K^~=Fo>Girq46aK7 zqIMw}WdP!rFZcRG+~aCFmu@nLJ{E910p0pqd_^X}pSCDjToqE)qt~|k$9jhTJsW^b zb25mLyzhv87*+<*@XX`43gs& z5JYiBB`Za6JN@Oxo+2h7Mu3+)od@abOPt=@=h@p1wp|K}pN(`X(}XWN(^F%=IC_HP zAp5NpW!|9H=<5*5!Zp{TK$GkI7XeTEFQ1n3x2hcV!@xyG-WE8(B}ldjjGT?3oxf*F z`#S2CHb%hFmicTey>xtXOM7Yz`a9T${eCk$iWiFVo3wmmSxnMT zWyu`DZ^Xmb7tY?z4Sw_9_x#>*!tWK*%l4FS__k;Gw$I)9zlRBFhnaTbOxBR1d%*ua zjN(qAw-nh+o*;!m8}Fljk1X$xkP!y*e%wo98%Rq_i}KrwQxtr>8fAG8Ie&`Wc!=DQ z?YNAzd-=re{y%-ULb6NIQ(5L6ka_wBnSfdEJAN-w?|wcDLN6ij7Ylx`3&)j;g)`h8 zm#A0!X%t_fH>C-g(VO2ZqwM~}{2%86mjo6=&;s>Zo zkQp8G(36BCwYIGVDE~7WlKsgc`b~+MN$SOC4pg2q5AsZIpHD&uHZi3VOTrnc|B)DA zPa^UCgla!}^lt{v10HOhzzbL@85qUd!xqPo5o=Tk>+Gz+Y}zLSjFkd09)(Uwc+<<6 z3~D%0wT;=cI%h-r>1^%UHU*Kfv6v^xc4rk3N&D7D&NXy&g5{YNKKA<=uN%6o5H9-W zrPYNt_rqP{@?x>5yqUG#G<-ZsjswWH0H35 zJ8>Hd`RR@dLu1zD+I6u-Aw6S?kE+3%p6fW&W?fx1oj$Dz5eu$YsQ^j98SMs~CfG9U2wz;Zp0 z9a%CgkA37*IS6lg_TkEy03s2u=zAr3CWT)H5dwt`^7&s`=7AJFh960engG%B;jKNa zy%5^u0vdo4-;^0`#!9;?IzUE+*FFECD*|ft2X{hlt2I2i-j}we3ZSY-14VA1+|~@u zCdcUzXrS`A!4vgtyE`c6y}SOw4dp)fyK=euL|cS(uIXlbdhAH$QhK#6&K;(%F zq!rsZmRH0NqdcxEk>=>2^t4FL0%E4*J`Ng#!m@;+(~9mz7SbjTK(h_j=u@jE!f*I& zqSO?)%#R+#>-`CDEyBHeb;f!dmN&U?u2I@xOX`b*CR)g$O~{OVI!iN%H8C2ZHl}9~ z$PKx`q7e{*H$U<5GL>8S%tcnVI52n6lg@<>(>px1IghH!M$S{J>wtw}K1_$bCUENT zsJ0u7tPAf?o?yvzPS-L7eKZjFhVe6=7=nj`@HTck|FlyL>Dwo3AB5;DkqO(QLyY3s zEM+f(vkPW2`+a zypYfH8*{{(OwkVAJ!B?wCfR2Yj8~k-wi|6Z40+rwV|Z{<$)S~N*#R89nJIxI*?BLo z*QYD{FZ6n#m&Y{HQ_-jH!vcaR#ra3FR^MWQ(m zo&fCQYuxdgX(xU;$(ZKz0EYxKQ$g$;J#8`3R9|sJAbf0a*|75VLG|-D0jMU#I7(>y zk<(|h`?@;mHUiEH3~gr>x>vDu_#}bjbkWTAk61ivPwgtd;8HVmha&r6*ZLv0zSAOkA=wey@@+$g{Ed_C4+{|)|L8U;1qzJt%A8fkc zTB9eV%s!;kY!g6Ycc-7_aN1r{x2fq*c>AlhI+($d5&5x%ML&YpmmJux6b|~pmxSw5 zf64C-$;99uNmH^Sc>EIhWrPN3fcq!R2?!cD9#Co00Eo~{NM~YwG`J_GCVoP^9I5UA zfm+n2C{;C=IojC>kjM+e;`V$g?`NeC!q9(Qe{{u%2389`dyM2o#&VY18HSFxC1$8l3t@qsVJt475?$h{ zMPO}SqveaOt~+#No6L}guniSMeBq)KvL-VkKme0m(2MvjY8YH-`7!Jf1+LIfBufZt zlQ#PgZjDyHE?2%D|2NBe{T|S>eD1Vg;}nHnIr48HFNfHR(#npfn;FIT5WDZG1y-q9 zy9NJKE84U84DpPqhNk@Yl;zF`-1qH`_h-NNjC1dUovinu&I?>og!i+J*Vd<%|AhtL z?mOQZIHmP{}t(Vz9hXnc5W6(eUUj2b{M>!GztxKd1a8fKKCppU{rhu*K-i=} z04DsD(-^TXwRl=0kbNr=|K(7h+jV0?V$~%KLJkTmVzxJt$)`oVo@kLWwl2oM(x;z+ix$(hbzfbwMPLlWaSdo ztp4KvY7yZm7}Qk|9Zo!KCPp3Mwy5T}i2nt9*u zBy*kXAQ&0^%@)%y#+_+gYddP7oz~BNeXijWVg%sjEwsA3zeWpGG{^vmZ1kW20BfKWKiQxa?T|?1zmm>1wgU_cWaC?J8G|n1=Yz|L?Bvz|DKO>$fKq+J7oZZV(M{w<3d?cbgyF4qk7YYvr&b{(RGps=?F>#ny{PSNZ4*jYDmCeADoT-ZZtLsI@Eeh~FUOfph&TEo0&svUMTAl3E`A4iwqtQ{J3sXn+i*yG8Qk6- zPe?e3xUOAgY(azVxbA=MmU_RujuyV3IxjR~ty*umD6B+FETIg(?rN_@9}txUs3Eh{ zRXfD1%a#So0WSfQUb&)g>fahdLe|=JQTisO%^D;Mt)tqv=?8IvY) z_ew-k;7&TyGl3x!p?6d{K55%J%YCAw!i<&b3p^zFheytalAIv{D;z}o0lx?X#p!Z` z^t{Cf&i>+B-|5vK4d9=C^-WE5$4{fi0myO8%kTX+sXXFhezAhP3+>uHUS?X|___TO z&^!H}ZlXxfgQ=jY;~sKM)N+0@U8k4-YR37ig(mw+8ZrN? zMANY&y0@jA*jtp8EK%!kx>YULWq|-+D6EuU> z;V!t2MhOfKJTu0HU@9`U`x^vy74k&=MCx9Oux-g`Px1)Ig8>t48X1fVhmMH2xXzW? zZZj<+bwwdq#FKPADM%7n)^_}v7SoZkAy4#l)2ey5@8qTeZyIX2ioUf9Dad=T(xYaWa23{yS0YKBBKZ-X9adkSg`g#c`@aU|3tq*8!- z&{O{MX4T5EZS>if3mQB)D54m{p2BVO5!uFI1DI{}X8t*H^toK@b@Vxx={@9svyMWK zH19B7hOec5w>w!c^LDTE8;|^c@LfVrU*AJh{BAY@adR?ofCe@pOugZ76O$9451I|);6N1aF#d|;bD7uw zAizCSyteTh5^P}o<>$7Zbq$I0>+r}>nt`?OaYh2@S+qzuN zXPZ581+2@FhS-6FtV43Za`B90o+kGmCq}&_+;*Q&P*cG!=42>L?Yh z^Vvb_VN0y7%UQ@^cKq(P%RV!fM@0m+`b2?kL5W z(Ww1in%BL}0~;%#jnHlGgO+ZIzEX6;3H_t__x%WSc%Z$A(rP|e(UjZa$S2d$J@s2^ z&&iziJByJFnPuq^3LeT15iuQ@ZSbI4|BTJ;DMsV zHz>eSyfXPgC;bZoHZ~Y`!iy?DTvHkvwe%+j5D2kivMntH$C>!}EoG3v17=8+d>Eoh z`Bie6izN)`6TsiG{qc?Q%BTe&b_<7tQ8X|jy4%hvD{LafkW1B&AjUrNbuUO@2yeEF zDY^-b+ScDw3OY%NuPXq#BsLy7BE}1^muRR9e}D69wkvIxl4o=ovz}1JL4>CzSRAi+ zoI%c@wLz!=Uxo0Biqmi+WVE^7=?k}#c}Nk)^Z-vxgn|FOkFPdCr&mkZjHWS!QQ1%o zahFoT1FEgeGrqv0`uN^HjhUVo_^jE*&sQ9U5A+|@>o#Yu19|L#ZO(KrZ=17a01Q?s zMQns|CXk+p)Aek&;9$+GS| zs>58yr!iT1k5%k1n}qsu{b=LwsCLDq)tAO%rX&E27rUgNL4pjvEi1axf?~-7s(A~k zojo9V0mc+ibTk_WEs+W|YmrjF8xvl{fa<-+U47$-bucc@)ai&Jd|M09V_` zH9;Jn*QQ7qE!5D9oTH5)&kI}LF?W7GFDK9Fjn|wbnWaCj`0dVhHIo^OOGlPwrsoL< zA6J74@=oT4uyx#}1>K7`LUr}l5h#%79neS);f;<)42?*~ z&;HmDArr=7{UO`})@X!^-P)p^-L#?aZK~4XEI`W`g5El(#a2vJKl=+gG_#es93T^R z8zKPsJmpEQhX9zp2rB$H2N}Ag=Kn&}rjhKYlLIzm`IuI;Wpy9T}cuy2c`z5vqH z*kO)*xoFXHr}WTb6Q)V@%wjxDktEPo6?;UsA5km&RjPJ5;rJIP{9rchau;5F@j6&R zm4CdO8H8jL|K}+!-^Qbv*wQQHUhCWWcJr^sOfT7{@yj=>OpAxPSCv}pwP+6S4O z8xvnf4TF_mGs()mDaGg*5gOcXX`v$eMfkWRU7VR5StOUQV}k<}6mn+*M8ZAnu@7c^KXLP~ftvc&^CKQjwqZKOm^S*bpKwlh;|!~k^u z<*MSss{NP)mHg+jpicO|oOg+k2t9kC5Hfk8pwy8#yHNW^H@-p+vqVJ(dmSsTP6C-b zT|d1k36VrxZZ6 z#~=Pemf^>o!e6x+ZSDMPTr-Tm3da#dxHXd1tTglx%e>Pu)VGc@M zeJ|C>HmBsKX-QQPA{LWB*eEyRTRs4xm$!H}@=KzBm&pIM!2mnhA-;zu9V?M8^s!pq ze_O1*_ve1)!a#l84qBD#(qT1=I7+xw(9I$}usy&93~jRPLfGz^(^PXMUNx_C1S{4X zWqqc{6rvh6DaK@#D2LI&ta8RFR_0h30obEVU>8geA3>j$$n|0VHJW@KE(EHZ=^O2Y ztA5H45#GAMjeg7VppPDaz4PfTHI+B$Y1YuqU+yAUk}8#-zcE@IAQL$hLBdPezUxgl z!5@^I0D+5VG?a^of?OZ%i3nL#_JjwbRJoa^s+m*;z&R}4=LS7+#GP855%t6bVV<<>xY`CbyZ5P)1nNE(I!_BJ^CDBSQ+U5LF#W zY08dvp3l4PvUVafUJEc;=QPZ}+BP*~>hi&^!D7H>g{0-{<>fanx z53EP$lX+fcg0?y~ zmh#Er;ghm$loaRofaLn{J^|Hy-a6GIB%}kkj9r5cpQkn=P`aWGjm2%aHY-Puh)?vI zZOO{j3(tiK8F_$J<=e28$?>dBJ(y!8=UbB!61k?t@ z<98(6fxMdtGga<6+$5SHF0$f>e4Z-}_+2q>!=%Lr(Lk=<74sIG3c$8!*4{yhy7avU zkL8Fa44_7h3v`<~TU3-CYv#eMDEc*`1NB|>a|>UrWns++fGwU2d(ib(9xFKdqk)AF zB*yLZQ#_fQ0cDz$5R5QS8?hXHKc3zAbZh%CLU;2s&Bn!20QieHb#hS~hD+fVsziHkYQY6+fOPE8ud~+#)n_*2E zZNO6yFOl72QoH?o_=rN!-=mYWwwVteC_5*fh?(-#Z;8p}h?WdSOsZX)iG$&Fe^A?x zV**hVeN2hpanSg(t~_FqdbLcbZW|`nIIN>*AgXyBwf8uGqm)b!C`$NoD@Vtk*38Nj z8s|qwzto!>nGfT$`0IUbY(0%fQ@FwJ`?$|-yBF)Ka8mFV^VgSp2od;c?0>K6#JOJ^ zq@|cR=tD;6G7PJ-aE0*3q1&ab5U&vd2UAFvb*Gr&UB39?%SM;w%7SiNVV;>}P7WP| z*w&AacomHvuuWCL`CF7TePgdefRN-J^-t=N<@9Xttf5bSTjmnK0j8!FbOy6g_{mH; zcMKLTTC4)#Gv0zMv?T=-;7A}O?S;o;{rK)J2NRIs020NY1Ov%^qmS}}Zg(Obk04s= ze7di1fMrtg5QRM9=^DsxRwLO_=f)o%eg@hpo=DUTk2yzw;Q zxMK@A-)nWp&1@14ZC<>;8Vq3W(={utw6$UarI6~naUG~2a=|+>#XQnGtJ4tKDW5k} z;6f_62Bp!!_(Skj1@4UWTp`8@xvgrK)~C-V-eQ{yfMKR3x)m|Rv?F(`?*iJDl|L1najLILVWj|kJ8K9UHqwoLHk(l*f z5#u2B7yiSSQ0|3+8CH}xf7%UGFcqZa^c10g3p-MoFlKbiHC38{UYIVZAIT&Z|0Zp+ zMurGj%r6PcteM;jwK4DKfcv?Y6K%}#7UOm?Q&1fYUL?Er|3sm3%M!m$d1-h3)$~&y zDHR65yuo+)W6I@a!qlVt;2w6#;{%fdTZ}5sNP5Lb7CDOk@})Kb1Sp@pxRRFAM4#Q} zNu@}rQ#R2(X-;cR0TYiur+)c%p8^7GGbYt%rlLfEWBr)h(;Q?@IR%@_u8-+d>VkKu ze`g1(t{doF3McY6@_Jf6{V@uFcXgfrcCE9f)Q?>!IOE}CYSk$xNe`%L>o|zCxTRMX zjfI|u~UFN_BAvReT%F zCf;3J1>Y~-Iv>gi*REJqKpQ_=#0fA`1?0nFO29@&e;T?f5#A)ea^?sAoq%$FCy@k* zeaDWHJBDPaSD{bS=E2-OD8G8n|J9XWZ492ItX1AW-E__{w7#PDHu*38Uy|p3OH2d| zr{oZZ_eCAZzUOH#_3)$k90woO32&kKX>1BQG2L*uH>r2`20*3ZB@OG(NyI@Nwts^G zF(GflQgf8mOCYJx9O^2!b*0k==f zoP5)_lM>Qn;`*(C?kjjJa>@^>K-tzUI8@~$#`gPST(hia`~>fA+Jz8;-qQkiB4k++ zueOQuh0a_Jsu>(hJcu%e2=o(_6=M!2;}-%VljZ z7?6+25*t_S!XAv>4jM5EW<$bVE3*s9RhoKs3fA%I!h~+X zm-M6@-JVS{!ljtD4~V^XbyO~Ufy|n5X?u0d!E@$DZRp=>>&U#b%BYj;{;=Q4d$QMu z$vUK<+hSu7%FukdfGaSe8|BkbZ*D9Yk09$&{*&)N5po6w^TPI|T*4zJ#|4Vx<%kIvd1khEA5vY|NGY?in$ueGQGcUG$dfv_Q@MaT{AE<`2uJihqZE z?)jP%4{J$W`d2zO+(8Bc4=DkFHClOxd`vZmP={!2{9RctfPcuD_Z5&6vO~`FqoXse zsiUs*q-)8T&Pfek)J%fs?T8w;IIaIUw|*q{cqqF8_xL*U;qT9ZGuV8zO%+BpzTgVt z$vIo<`F`HqUYq$P`3$#)Yvr3jIIJ_;rQ+NMJGDe^F%NfS z(!*d7a)>{Io(@y~{>!=NjZP%qiCRxUP{+k^2O?k|JF9?#qJr6L#Q(n5ao&+V^-B)H zZiLGAFmA5zJ0OxR%EQLWM!|5ilrO{_Api3xcULy3A;zEIY$0F8n4%<}t?GpfJO3hY zdvK!Y?{`yk-S&r%o447%J&GR67xSf8Yxud5HJj!zOAtQM`nS?yY({ACy=eJq_YxP* zFK2@LuXfionco#(IH-U>eJAXYoXsC4!vyQ|Ie}ytz=&R9&+tTw3`e|S7{MUGmL-6! zv6}7uMb8w+lpIp47SFCG4SMk0d@=P_XJ_g_4==(Inm=V`KOj8Fg&P%>Vby-KaY8dG z*-8GljZ?Clq)dVykX4*jI#V7x0Ce6nKn(xzk1$7c&gAEzBslCUbHV$z|DFgM;Wg&F zQ%|mvMVSJFTiEe1IeIYxTLAEKMGKZYu?((JHBr(utwNPoS*Zhn-FFvC)K*_bORZmu8r%}vW~`qRCb z(ERVO>|=Bv-R-)M6BieEO|Q4GQPYR)<1})5I@u66^u1F@ZVFS5 zFap0T(ltD>2AV>|5|`ApQ>q8aYb^ejh6WL!4!M_+rU7RbaH_>lXBm7+7YoSU>oJv} zm~MJ}@kZ%|zj$+7wfl?#J+5mh56LD!sfO$AXr>wcLC@{ak?S&81gi{DEVf3gKFY0T zoC}x*d^fqGg-`|lv>jrqd$;P~6l zW$WL}%glE|N(vIlKk@EY&gD8aTw(#`K!UpM(@XOgUPyQLKhB7&pq`Dtb+>k^-pW&2 zffoXZT&b>kWVCE!?>Vib0Y~P&`Hi5fC&J!ZL%ELoiW_AYD-MbJN#+vn3@#8#h~%HN3^F{se+k#IM|!CO2|m;{ zukOQ7L-nsk)M@HoI5&C(BN6Sev4OTI8P#x4kmQ26Io^f&Bk4z`DD)Jf{ zPa=VE#1|Xrn^uGuFdo#Zi?cC7WChEA0%A6H{i)B4aCS*!)>4};Dh}g&Zr8`A6lY_k zrs>!8wLgMS0C+olxo+JSwLCQF%&_2qEzz7o|Ng>>u$DPcuWhw~slBh9^hQ(dTX3-e z%WzA*elbZ1M(UR5-zygc3~kOk-NPny=tw~l@GFU491L2*UVtXSSmjG8FAj-m#yYO7GvO0U+0|K_8Oe@*m}KB9%sIN>LugX}cQVtPZ-JK02D6kXGHSEM1g$It8U{dMA4 zk54C(l#RgX_~-h!neuZ@2nEO($RXHsZ4O5LuSD-1ORN+D&uc(+^s4bZa;!4ign0N)qSX^=A9a{VtpogQ8@|IfQR@8i>R&}!4;hA#)sKdSV121{2^<8_N#b#ZZ_){VJ;U*1N%@q5%ts42Aq9X zpy=dh1|g+AIi=KFhRzrSfB`pY$wCU`Gb)ib{zMLaB0iQVoB7`9MU^E}-v%5@ zZw+#Y#dF}ZZnOtdCLjDJxhuY7>^@9` zf7_QxPP#m#a~zY^kw!wr%vE*p2uO}A64=oSJac4fk|&{8a5d;&7ZD)ue96GYTr=?aKw1Ddr;CgUlpqkb zXO`61ZVL(ULZC;ahIt=eg9SyG{9^ZrBed#J@p;}WaM_`Pq_H>u)Vx|#jHo-k#O=>> zUS3K%<&jOtgkP150#|9T*c^f%tnHSNIE%!lxqT}r<@tFScyV`|eOXQJQi0whb#;Ho zpr`3wN|YRDsJ3HX*_+;oi0(WDG#YsIsf?S`E`B?xcZWmy&jX5k7;%Fyy=t&*3Xl-b zlOW5JbeO@ih1)*frNJV=s4H=n$7ASNIZnzQ>(qC!)W)S_t5(RJaM_b*?Y>v*lDts9 z8AQL{1<|w$9F6~a=k_68!kkvV!!`LGwgp?^76s_KhBfbQW~=8V|K5D#X(Hr&s}unu z+12wpP_EDXB49%s*cb6{Z%fbPDxaiht%IZ&I-#DLJMTh4(k^Hj|tG-adg;~hzhk2)U*cy0q^!q{|ds#N{q1oIW8c!f4lqlSu zxCwKE{fu|xhJ*z3x1%ZJ%zyI~V@7;=hex_QUq#>YnIf`A%MquJLx+}@& zD~EZE!xBi6KR|IjOz;7|AOo+sh=YvC17Jj>I@*0-F8g+g0?$y{j56yPPg=*Hjs`2i zzT_aSCCJP?yzZZ1z$em=rrxO!#WTcBh9563`CN?aOm#WdSKj2xf?d&ZUnMQVa9F#G z5QP&z^U-pn4ZiKe`8*I8nMivWP9Lf0u!BJVMotO`4tE5CM{6C@0B60iTlCx}y6LRP zo6UV0(J@`o9~w|M9yU>HB$=vwwHlBgbVT`antOTW%y4Ej?%QEwaLjkxEI6sA?W-B6 z%2aJ@#fST97iTJVxcXE{&zTxd6Y&PS^>xcs9{nbRv`Q8dI$Z+Y3N`hVy636M*cpkQ zRo?0-VWvYH;M))DHf#VKG2S@&+FKH#+veJR2sT_=?*uP_`7uVOI~o1**QSsA;74CB zN}uADe9Ez6Z^Rl2#;Rey{TK!e#^XBEuJ${+iwNyLZsR3iFYX&%^G&vdE8XqINtn;L zv4;7X?+Lp#_3D*f;5Js-#g2zVOHZz@?E+9tvtNrh`^xG79f=C9Xk=wB$3Vt3cZqO? zQTxkb?Vv`|;-QY|uH2`~>DZJx6DG22tK&kO(TR!8mc?+T;tY=VCR4nCs`0~fZgyT2 zWF~zhlSZXp1vrzeD?OBfek@FJN|2AwEsfCQY-r#X=9bMsBjxE-YdDS}yH9$14P0~% zF?nC&pw!frdqwUef^Daa5chw@HM}hg>O02&3bO*$q}gLM5yy8L)krgy__PS4A=kG% zCexY6y3})gf@MMqNo9jTXaYirPWqUU;R;-DOxC>gd2W6CUK{JWxx1*KiY6PxavN07 zgeHWZKfF4%>24M_v;(grrfYCg zI?mci)Xd9_l%4lU+alxe(R-MPKyuFDlL&u#gD}=q&1U1*G({gLq=HJVakfkbHwU4= za%=0i;*bssXTEZjV4r4#<}n6a#*3nWgL@iae{cn?GULzd?Z@HRX;MYXK;s5q1vuZa zzSk0I`VRJ6PQovpx7;+X5QvK!v3#@RENDLcGSQUFlzR@6y;`w*D~cY%TcI6YLRf%n z*|M&AKB8C4DX^TSrq{X2?sA!C==+NVQXe1gj?Sb+Hz|_;05txJdHA63oUy9m=BR2h z_pbAm)|@810xz*L$;&Wx&Z2}6P^P9y`0}Iur$KnwOrpmbkbpDwt)~^cOMaw$pF+UKbVUIKkd~P{~4@*C`=9@3>NZ);boOD-z-+~zLpL<>on z8mm3|aH=63SGIapTirUOC&)@f)!|gx>9|sa{?;Z%hI>)ox%rWMjbdaNSH(fnN%1~N zKfBp%@gt3dmtka11|xhu2SV>OK07VLH(eX}owkyPpBvbM>xb@(+uvp$8~?_Ou6Xi8 zh}9@3)miizbdywtGv-W$iHBX@`!479OU>0cvF+q2bVB7y5$V0guAc=n8npw4>BV)4A<@c7ILb+4#MK2n{1W zAnl+OHPj9#jzmBSW$4|3D?_;ldViOoWGb4w`95IieGt>w?66usoS`TkD%_@+UQ4)` z3`?bo9@$b99%hg-d&KEaTp1;<_PX7t3)jk0&-SnS3?f^gF)R0#;N8d^=;5d9uSaKsB-^>}P6RG`vSPHp zN|~8JYT|T{#uQY5fk%|FVefZI_U5_%!%RF?#wOB^kf4Bs=Sva@h2~?EkCM%w#%v{d zMP3^B(8cMZ@iqY!K(4boVy$Fm8|t^{0tA*ouZ``Nov#RiB~zz39S#DoPA2Y?F=ynR zqpcg;320ICxBmV7cB?li=ev#DTiSJSOWkQOz|>FYfMUdK9m=`T=Iacg}>dG8{I;S*#4wc?1!bEF2p_uE448+CwglpD&k) z0WEuy1<lJ9dNOSL zlCOFmUa~}BU;)G?T&sgS)Ia?5%4}TYuawgd)OyT#DN>cH7WjtMo0_ZWSQCtTf<6;! z#>a!T>VR3G>+{WFZs|!|hgrE%s+QG9BnZ40K}^lr6*&p;G~1;wEuj zU-me5g4QM|Z^nA|iE7*Q*-)sI5$RT`aA4qKBZ8y-rnSPuP>PLIt>yG1QAD3KU;(Y| zR@pjm6vmX5Rmjxk*+GOj(UM_hKCUnm&1t@0*-TaeJY z_I+TUF%0dWhD}OOX1gw4r#5eHz&;&YQ))jO13aPMHw6H97odQ5qiIqu*8bSR4q&R{ zQKh!wNqo%X`qLwYKXx#j?=gBFbJR^2$LhlHcLynOvE9cYJ zYZUEtWyBX$9-71Cz!@oD)2fob=R#mYz!b1DmB?usVy>FGpS=r~Y#$%#E}QB9x-BM@ zE;k5)}#X3oV_dm5VXf*yB@Z~G;$9lU_&JNy}a1jz8>aHJ{$_0Tsi4#S$N*+qBu-}ZMg zX40DX-suuS&;9L7#iNx#+dl_F&6iHGPOzpW-Vz7k1AUb1M z@Vz?#7&O9l00n;1z$O2Q! zZ_p1*I2@7N z;{X~oSjcA0)?1`n8J3gSdjRzeJ?PdKN;Syg#Hq|6PMWU0w_wa(v#2&^aQJinq?Vga z-){Y^DURW7X(k#xvMWBw=VfTqx#!Xm2xq`}ZwbUBA&!+~O0k5@F3oXul4{Ym74n5_ z&iZfuDKi-%*pL)OcAct=7~aZbFNrrCC2dd;2;tm3wiXa3ZY;{=Gi>5t$>fKFRL!ek z$JZ^Mg^{Sw{TU0s`|179UUWNG0FWve`cb7eb}%`tsKs&L^&#hiVK)q;2(kF7q%%P1 zAxDMKDC1XU_d0xI3K<^7p<1m9$awYd=S59iZD$61)mW=JFs~q-)DUxsdjI*6x8Vk4 zVhSG^WMyW0OWaB~PgASYMR;O83OanQn&L1adDqIY8j3DOWx^-osmHSW`Ia_WlXQ6L z*dFU0?a-|AUz+anKQ&3@Rsyn(+?gqcsgcd2Losy@`+Ip;FE%gJ6KiyD(;86!edEW$ z*A=tP_{=05$gP5$N`}+rUT_*Z7)Q8g`y`9wAHaKl*h`w3^Jv)B@CkV*HK2_MFJ?CDfMk477=nClNOaW%iKHdf`dC5%gY&B1tL@6AB z_fWr!xrE7g&Sriq6nYZ4g%?3@kO3z)Z2HtMQi*)TnCDA}h(Joh$8Wm4y(?Yc!UZ0a zr!b||!6xdwbGcJy%r`}(wF08RnSpqNS*~Xsf8*~E-VV2uSgvP@cVD!W7UZ4B+4ryj zA;;s9O*RY{0X524^HA2Ri$jq9(c*>o^+B$<;~{w}am7jYPbf`v_8Ycql5w>RZ8CR0Wbc0$=Z5>N2)F^_=qTAG~KJ z0|R%!9QWGsNK*J)-n!^|1Sl_1BFeXZaZ$5;1$}{*IHf0!DY)~Z zuU42bG|U3ovaSRBw|o=S2U|A!UDH7CMI;_hA;+=+&qO+f*%A*6&%So}|5rnp;*LL1yUH&~B+jDIf{YWLc zNmN**#oFn-Q1n^&Ew0ep+`2cqli+GKf19RHP*eQ!ZL3HTUO0Y4oeg+)yPgcPV{CX- zE0>3u>}*@X$D)glhELwsJ?US7cDzy#t7eaF0W!C0SguU#sf;jK5Br+dXb=Gwk{kyB5(qw`0j4gyf zLsIe(+q3OMQg`wTXe9oo#u8~K(f^;@7Iw!H?T^OsDg*x*gwg)WLEW+%e(^))A=$1p z{>yk~K@A0Gh+Jza-+O6leu1h)Y<9)qJzA4Hqocqk_D~p7roS|MLPmQ zQ$GDS$?Ga#UGCkFf?b)3^%(%}Ok$_=97ftI(kNuTy2g88fkpm{E4QnF=`2}%1oafY z0@lQ;hGc^w$x;B}S<=|NMMF#3yKD3>Obg#mh#?|zAExs*9`761KR$P0*p_XA(}7Dc z5wvx@CPdR_PF3en<0~AtsGz$27x7kVjUrlS9fghgB9b(V=f0rdkB*t$%V%D;b7u3R z(_1z`kpJl*Xmn{&DRgd0lXjfzb3YASy+EDG_xfEugP^_hn_1^AZ;7_rM-h0`5L-H7 zVzr#qAEaz_ketWettmSMpeb7Trn5@^1kDI;rLHp!19%39WF)9{ah-;e5;C&?OMTC# zGD-*$WRB`s*!D+>b?keyN9Ckre=d=EQxjUcthDadd%iX?it(f>Z6ulQn(6w%&zY*9J6%87?xqmJ| zc7+;CxchU-kqHnOXh!jCK{G(ekS4#QkW0(-F)*9K*-%0%&^%i;x7Ak*^H6uY?4ML% zWEV`GR2BLS{l*ySHJWyF=Pz!I3_S7ptOnTxUF_~y@u62|_1tq46+g<3WlC3eOVdxu zRaa=j^!XKqnCrN}$AdNS1aG(Y6r5P#trj5yPHd&V4tzn*sA=7dV=}A?ezMuDHXTst zni`IZ*Sr4F!oWa@ifCJcEVh=cC8)FeO(FM{_I#=ncI#)9Y}9-B@Xq9x4vtfYHKx5* zfl26X{6obFz9O;<>K#;YzuHBgpzjX;x4=lLpw7woukiiMqok0Jt+=4=)qtwT%;fwBTWP8rR_AgcH zG*>|T$|r-)_>=R8SOeBy(%zx;Yz~t4%4QF|ySh|il;Ur4NJ2juhWn8kYc5^SlBjbk zVB&8~G)JzH5e)uVh1mud4d)P})3uGObhL3hHTCy~K^zjaZ7yk22hB44nBDkeNs*)y zUYMI~s^9wiiHOpy6nvzr8DFD=7AYnPL19r=<>?O{x08qt4A$+d4rfbQ`3C#u%g``p zJ+M`hdnf2iMi>Q9Z5`D4ae9rhn$_TQ(#Il}Qm#|V>JKb>JhG@dp7n?Yp7m-T5CY|O zjm?Z))0#G)Ghv9R;DfHT`bspSb!FH0WlBDFNYzy9%;i0(BC4HmpB^lBb1X`h&F>PZ5t6nE7Z-lyxSE$dEw&(VGSso`uB z)q^5eCv7J>$QnK4#p3-~IM`Z1mPgi=8)kQ24{k!IFk9RDE+YKv!T}s?7lgWVzmKv( zul?R`{)h0YQJcVCix5MH{u zaggg>+<}^0clDs{L7j*NnM^zi**%(@KD(wQZ4!~f)!xg;+1s677e~3`RxJ_;8H;_{ zV1t}7OoD_Q&I5_n)(6-(#JkRx>6EG$^LJ~J8NzzEq$NR0_~Be{2#*IN?zJaiK>Twm zLV;Lxf|p5+B^6F=O*DI>Y*yU1xO?b|CO`U@xhFcpUM6z(CI;gS%ApYar~d-#N|7)2DR*WfXAEtGVPLP$?C4v?VLv zoqzmdkeVaP<1Jap08`aR*yZCwVg6EYMway2rQ(zP%SGIKbGf2cLHoTvTVPle-+R!{ zg!6$58i)jRWGTSsvNjr=YA~B=!3HS^HQ~#EOgWQyN(0ftg&k*$mGCJR#>%|>;+er> z=$UX;7?NxzKa-(eK8x>+bKzP1653QqBUxGbZqR;awmH4 zN$HwLk6$P4+}g_Hlfo}H_SOl&2k02t7n8Pr(^EzD2`KcbV!TadQ% z8N6qT5H1!hj!YrQMWRk(XrGTm>0|E)lW=ghGTgzn`*n?XBYk#COOD1r#V4{d`GPyS zG|HC#DOr;Mpe_%l1irzO9Sg9TU8Diu;LuVmL|x@fdha9O46PVStrz;A|19{!(Ik>S zp}imYb2XRLZCtcCuj8;WzC>#}LGIi`cgR`4FFs^9ZIR_Yi;RRDJRD}b9gt-o!8loD z!-(|2=5_6btbrKq`tchas+%z5C;a(|y*y=>H;ZpeNPWE3d&QuP%E^^1X2wotts-^Ugx^Vr=+GB2+C*DK6PmVg;OEllyYcNrCiqsQ9+IF?gxA zmA+gyH`JU*65jQLSL;>hjPa(DV^Ie65g%Wk$EqhKMGiU%-KrzirXl#q+t)1!eJ9D~ zq3V)uME`5RghK9z7=HuZAkA7>?PQ@!$oNi!`fZa*z6*fJt)G37cjjHel6E9LDY6jM z3dFZ`h)#a!T+N*i*|-06WD@Gq_AvUF}rn~V(wooP3Cgt8TkxR>)q$3I!h7iE1zLg2(wD>Yy&Fn zsoYJgWj(OmjSw8FM#Ln#6dJV<9YC5cw_@T@S0k_tGM-^(mh9a8?$yX*oa&IoQjy?X z?I0}L?K;oSc$+dx?;46$fS6~ZAgQgY|348dr2mnqlz5&4rEnwc>3NfD!Jk&0fTtqXZ1uC?5iFRJMW%<4;*|iC4DMR+c7}n6XG-Rv6t8`@BS2984k`|mieJy& zi_+_efN}L2vPz{@|PBgs%X?hUTlRd*61`wgla1SZak0{x&d>c@U07Dtv+Z@{nUQBey>IbKlJuc~p6my4DrKDw2 zG$^z+4+)_40cmGpSU#Ok{@<>^L`w&nPR4e)U*OT*91)Lu!jQ|76Vf>OA^AK)?zPdh1x0D|GYsZD(rOvXPgLkla1+{ zTm%>&n-+(UIQ2GoDyrQ;R8QvR>9q9jwJKR<(L22|LLg#fodn>GSvu}EMA;EyXV$Jw zMVT`K5;SU4cr=kwEmDQJr(`8{&X`15(2MQiaN2&8-$MU3DOKoKSO_(90hHgZ5vBQ>7F~CBnSKZz#;WBg~^xf#i9eDR$oxc zI}`jVIqF(Hh+yANqX?zTgQQ;|Ga_r|{B+XnHS3^UD;?lgxNpM4B|cYuK(|7eQb<_w znVt8-gxk(I5((9cyV4BJ8v*F}BGk^Vg*E^6ZxI8+&!;m}jE2$04Aw>j2p2+Ve5YUb z##EE8&zc2sQ#7uQ;#tmdrZK`7&>o(?L}UlcSLcUg?{>2qbK&m{9?@1ePY)&W?ui=@ zdz5DnpUxirHJ1a@yHuFTlqMy}SZPZ#qZJo9Mcz`%$fJ!bV(1p@bkWAa7h5*2YdGR# zCiv$o^9YM)zumPLK%geu&VTA51j?gQQbTv-coQ}2%{m%;lvH3Nn5Uy^X}gUm-SR!1 zIKm+zd5b)?)GCqctlhOhgaWn8B@miLjoinH0O>Gf2%aq|!TBN3XYZ7BY!W(M{vN~l zJ;0KpZ&;JH$+WJE!5p!o(z0Epwc?BV5F$#rR!wGP4E#-LqFS%c&MRT`Iz0<50^@7x z;VELJ=th~SSnF|Bsx!YuX=hoKD6{F;#M=3mITKjV*w#kaO;Fo(=O;ln-a|NK&&RU! zIc3Ki|9Ia-HS_E9D#xR#er6m2CyrJzliGR^EuBruDO;3|!G4BLOe20wb!Kd8S+suk z=G`}C}4)W zA920^Lg90%(2owdlgVLue)78GBm-j%`ER%K?|iX_f15ay@5l*PT__G~OHP?Ny&rye zbz{MI+@RvijmQYckfbcsILQ<3jZaPE$=F!LD~4l>%fK!=-TUETO5MZti_X5Q>X#GX zv(TR6xFT;Xd@zlbut95pSKHN#_%=WJ*S$Z}^@zU)y}g?A$}K7xJ6|~%=#;HpdE8Q^ zzL{pdSlUmnYUVDsh9LcV+_}_qGeZL2pLOHMRcW<@0TY)c3_Nl)$nY??K7tZ4#G_+d zZ`F<<2MjWG9jQ7R24h)lx2UYwRsK~>GmOlz)X+Li_K+dA9gp2YkS+~o5j{TK5bP2r1`sGMM5cwf~sJTRhNt4t^Qq?!+YNp{U9auR22W1T7Fe`LOa0Ar($&qym6abDi+zjK6O=1 zG^C6_3uipaIWM0;wEpkx{}re**&ENfo}u%O@#g4aNI!!KnfJV+MuKO_?o9 z-XXi{+tFC?Roikp(A-D0Z75Ln*L8N2D#P;oNQgk*u5ht zcQ-vOyeQ>Z^ZlT_%2h!~7pF1%^ib@GLw=(Q{E{jwZ-)nNbO=HD^cI z$0hdoOxtFb+oY~HXj75k)}<(-Rh3aS=4S7HetHXGT`ZgE8QgX^%h1TwsU6)s*Msgn z+^?^d5wXv(jU_i7ZccT%h(p|U4_y5>ue7ZSs zT|xx3-s^tTn$97``>1c+arFza_t4*G)rAZXYtRk!ZQnBuRBnm9<2p;4mJ!F&(&w}k zhfX5`yz(=@RulUhv^?;gy`;ZeH^{}}hm@FP(kmNg7ET{zOgTXT>i#H09_`%7ovP95 zY0vpQlMPrmNmh7YSX*r%qthZeslE%i9O&>4z8 ztBo8QF=6X)tYk5kg8$QydpG@JTokC;;A)cXcq7*2=CVXpb6kp)T$5}$W&;!m8Zvo2 zIh$)+f#m6P%un|bm0gV^!*mq+$Mrj6R)HPxJZ8;VeD?g|`Dvd*sumxxz>3r;suxaz zie5VFI;4;ku$OSL865m?@9`Gfc?&9JI?^e3=eY!+&X(9{ae?K%Jk=ZZ{-h38Zykh` zb(>t>o7ffDwn((Di#i@NH*xQoXd%5-evx2mj4(p_jOYk~6Z)PtLgJWkHcL=ug2S3# zypIcltkX^xB3IT^xX10{0Mt7|&vq+VgVN{n`m**8wG~_liVA{R?Er*Nzcz_qIQpcO zAl54Ff*pn0G>QpCqu$JTN}^Cox>=*AX1@9xMo5VV#kk8C@l=qPRW~PB;+rd%u`Zl9!7Va6sYiY-g7^Hj?)L4lx80B}T1HPF!sO*n0r%&CISCye?WHwj* zsi$CfI%Rc&**PzJsYGkg3y9L`^^v|Asq#owBZJQ=(+$^MSY34ZGz;G@wmk(lS+U%1 zVO^h@{Bd-ec=_vFF2c(9=0#byN}ML)?8Sl3xj~5Z^<@@X!vfo@9@ziH&mwg?3R*9;T2ttU&~nR+IKzLBknc8YAgN*3z-R+D!t zof&i;6j1|3lt&VgKUSybdVx8aJ}GWULo|TMNOF<5fRWSq_A`xqUKDyHD)1mD(;<@; z2htpenG#WyBTfS$beV8_Gyf1Rd{8s7Ci;;kp4j-lmUkPkUsvj=vn5KmH?HY1ZmzPc ziwdG+Bhw8hB&sxR9y%VYsXt`&_#8$DQL1t-T(AIgEFl!Fm-{E^kcijMmi!7`V=XZ- z6c;ogvXE^6hZ@2@<;&3EMyrgyEgsl^0oUFDLx{XsB`4E@2tjjua=L%>16cKd1s`D! z0vXFQSpfj^H~rM*?+K@yqTefH5^%56=~a!Is?+D-w5;&lHNhxyYf!MWgQ(N$J-fi%M+!jsm?O&d9e7~MpW5@zp0(lai&86t@m!=Zmd>mHFQb3 zP{xDE5w@kM!#MXojs^4Mev#l9^o2>8S92YB5{`TPij+@6(+qHd}n{BRgbX2)bAY zgn}{8S|5bn`LWdqU|RuOe_Gg^NQ+*y*&zUI(-1W zvSK^K2vUrTUk`$GE3;4wT*MVi2}un=neb;O?xz064r*&CMbmP~75#@7dJ-V(?z z7LLWKLWeR=XBDd6qzHQx8mnMh%cl(gHH{h7J8J49y=KH7bN1#(9jcH?9b{%L6&{b! ztFpoGHQOYpnQpBLw{SmmJ|sZZdOk6f<}0J)uV-JN5j?(lK~6~vdA~^xOUcU_D;e!q zqQJxCGVsMf(=qv$h=U(ud!3V6GN_$$^?O2~qe%myJ*X8i0Z(5yb{4m9#l)YzQaTUK zc2CMNh3E5(C@Xv`imfYYz*GgrIx_rF0cr&a7D7L62YiIbF(%Uc$j^@$nhYCn04*$K zoCr&7dg`#<+q_T!b?NH`YvLH+SVvcvJ901SDpt=$;F=#puh`Tc(l^ycdfDdhe+tuY zcw}hC@K5MoFyYCjK64Y>ma59Dy+>Ep5s54dWBQDz#880?7+1mwuTZDWH+vR73+C3q zcqQgKO~8SIuvVgCYL6WTk+<5XV`uF*YNkWMrkd9luF6CDjGAWjSWlj@kWVvtei;D@f2rA$r1#J8BKY9TF!gn<3cO@UKj zPDj1|9fqK<`q0SP?1e;Z(X^Hwm<^jveWVLc1B5Oo2!m6u9a)~ zR=G_-HSwx;cbXrXq_lmS*qQdRN*7*EL0zuCv_=WRwiFb;Y60UI-nC7O%xA~AKyz22 zh1s4xN#=-NF}}Sm31miae<4vF@v@iWn7M^%<(d>i6{3F39F!(7>MNAs?YRWp-s-x3 z_R?6~Hgxovb|^{|VIf+uIBofk{y%>b3Y4arKvd4XA=!^U?&w1Nu%Yj>qbZxb?xSFhPx? z?794hkx)Ekk7({qdzz__g}<5&o8CgG`47^B8#8XHM#Vm9)v$c8O#(Y1%BDU6%Bq2S zV?W?N*)hNyYw{z(P1%wH#6Lae@zU+atQ=s&VYefBL5niR$P?@6TMr3aA( z^224rFPpTLkPCH*A(GgoZtoh}Uk?ts zj8TDWt5q<Q zXR_r|4LMf=+^pLG^gwRyG={`aeHLaM zYlV(~fz9zRIBjy15UqF1$*XC@)}!zZR=^)NIFqRXl^fFWP#}!~U)#wAmx0VH%ws_l zc%&p|S^EC-+gm70I6vX;(a(D6rFvM=il53u@;n}zWCYA*vY6_H`X`&`tG1`zJKolE zDQ>l-%dh0E?3ts=vTN{^HtK zWGC4f9M&tE^PjCWnE0&$#@Qk=wQ?h0m7-vxAi)MUo`1y~=ul*nMpYnsbXsqF{9N~8 zS#w!u5k7}_8nYn}52p~bnWrg|I6ioJU&T}DThXocZrz^OFD=9k19|%?lTVu?>f{JA zK$T##sxOW_Z+DFgKja8+j7B-kp^6N>DXXoR86}=3pFuik0tsfROmo0S07;w3n zWSbacHDoK_4s#+ zPSht((3D3Wx*5JP7*_Hch$l~#bbxG(wX=@b2?lWgR=T8jjce-)WycK`(Ju+B}#gC?L5_7 z0Yvkx?0_`~YhHD?bckXr_oilMlz4C6joG7r4C)9rY zz6cGsla23Z%_);*L9mModDPT8j%KOL!P45UUhVt8Co6A5evOcJW>$UFNJ>;FH*&fd z4RM*4>~VGs?eE+2NaiS?TCMdWTIm^sfxINqoaFA0=c<>aL?rbs{)Y6WACKN~j7lfg zsLMeLk&cTho7WVq-j~@A*h-%e!>fK$u?^#p4MSZ(hHQD%lO8VhNBW=u*e%D21aV2C z;r+j|_bIOn&|hQnC9vMDQg^gCIqPM)WH9eU@?u*yeGErt(-fsOy>3)eYebe0{RkuJ zl;A!HmOO^Th0C<=7Y%Lz+_G1EUj1;0gn#Z*61{}2;Rpi8ed+i^g)P85S+uJqAjf8-}-aSM6wm5l2$ zSv8d3_v+Hk*wCm_o};`2PBU0staCNJ*VM5L-#tWL_q)6SYov9X*3?gHh-%&}&Ix@8f;Yb)^i^}{GE5OX+B~{ySCcyh$csbP3m~x6q2sM8k1wgj%eXyutv#UFH zI?e=>@-U6t`U+m?4}zxU8t1E>3PFr^Bh=3WpwtAJ<*65rp_jQFRzZdvwosEK*RgU5Cf9=v>lrkKQ zPcac>uQI{jwlaF2W@akQm)9gWbgGFfXUt1YQreRf-W^mLe^hX<>4DM@Nh@>=|w zeMe7-a4D14fSk2X2J98WQmIUJCmvorfJgZZUy4SD-bq;h`sVmYWcxk~Ggp+J{BVJE zQe0Zzi3T8FVxYX}yFEUOUf~)(UW}7a1Of~9P+tXgivzIEb|EA}ESL9U`^Y@(j4hri z2H4f&voL$!uqfXcAmv<@t_Kar?vC;kypqnF>;>#)2vJ|nbUVtunKFnFx6%E_Yjrf@ z{vdd|VLzWUtXlBVHwj=hpo6s`OggRiI z+ce8e%KHGg97)39+qkHIvbIQ0GFfuPaN>Uo2TKXRV7dGNdf@_BI^85A3?$7&v*-cK zSO`b2I1}IGCq9AFZ-g$P26h;eg_`g-qUXBYE)({yg=@qMbI#+#YHuL&({12Hfrk1K z!!re)6{c@TahG45yuhj$w64HdtnA3fAZ=aFCK^M%Q%9xqMJcmz6Uuwg>0(YEMQD;%OjGmtN^$B97JU@rPJ0mes@EcSUtIfi@%R=1%nsQ8-V^;y$LCI9QPjZB+DOq&Wu_rWh3iBNn z{#REYTW#Ha(3dUodb2|AhgxCDS!AeF3oQ~`!^GcxCy9+V(6vV^cbxD{c2U37@%cn} zIQ?z8&tN=CLj9UmsVdg%c@Z(g#?;`5tZ6E^1yFA#7J(;G} z-~OF=IM>*Pbcqcu7+mpSqVk(ErqTby&EzKXAd+{56a8_~Ih?+)-Ajds1dnlgI25al2tw}trjNs`^SpOQ!XD;a@VW| z@-b3*7hUUxu8&!?qYLMSAjSIII2fAQhH(iF`DY2s(gq$Fu2<7T3_)Tn6;8!1MWaFNvY!2#@(Twl4s5%HfLYtwyQ8IEEhJ$>VoTfUk;&lD1+rIwVPXaf*;-40w|i zQmPGEpoT2^4S$yd#-6!~Z3{JEYbx+5kWiKMIp(#DbpBjf^;IRZckl|(zlho5B{i~8<5m6dqv60^d%~B zVlgD-rKCm!<2q(lX$xH8$hxxcVpb0$-_w_l2EpEGD7yyd>3A|QT9#-9$ZyIr;8YH} zHKfB;F>w+BI`Lu^I|j}!i7=ylqkO=i3Ea@$`Zf(WuypKE>YZl@M)EZ$|E)C{G^ycP zE0M;Jb+f1asi35Q;Gl?-%T}aH%Dqd^q_0r~hO0>KBS@Ql4%BL$bY5~K?&ecioW3r> zW;`Psp!L_1_qI>MI@I%F(SO!QSy2yF8I0L`s;hAMRRCMb63 zN|P!~ogTmq(>O0+@|=szKUqohb+1PFq~q@MQ^B5T?_!;14v-&C8tu0^C*N4=;P-TD(Sn@nNCARiA6v$1_q2%+%lD|0S@u?Pa>0!YdYCzX zt*LN{DR2^FarLlUqhAwg^qElC63*6;b^-QSZ7LFH{}z^$H`NtK`p9lK9r%Brj9nC@ zznLr=rFIIcjc}>Hrm2%pjbOjnuRqz@xVKV!bM(_lq>0Ne;k!E3be}<@K#{D8R)|Bc z5Mf~Q;?CTDWh@^zbGoU0u5y{d&Q3*)`zIcs1Un*Y9$d6Fsz=Ed0oy%?Gn~P2S2i`K zVFz+r<%|$j5t}^~maT!k^G}mT=}tg}fpL1H8yqAxnh^|)Co1}HEazFUTrI-MT8jXo9cPKd^3dh@{(v0D zbu+i(4wR?^vltp7q2UTJCJu;Iw%*p96U|cGOa>)NX^T|~vk9xD5ZZ=vhqKTwISZ|+ zLDhmUFdvyEe@8nL?E=H9uZIVFG_LEh0&@QElt~WP$6a_=ahMADmUk`OfXkb6d4h}_ zxNxL8yf+2mdc%>uT8Ql4=6Y`hp9NiBL2eHE9*Q}MoS5eE%$kbpN~uYywMyMugkO*K z9Kiu)I-cBkBBP2NI?e^%boHU)UVC(y)HV7%Ff>u#45+;riR5oHauFA!S2ZJDqTbcO zyvfk}KInd=SBV7ybLx(xb4hZ0+J@Z)yRFl9hzQl`&HmoKOx6IZr8@r^^H_nR`gYEa zQa@ifK`)JE7<2q&W{PkNB{j)oYA@n3XwQsxzha+tflJf;7MHb)_$BBMDW~4LU6^lz>!;r)?MxkdO zBzN$L9+YRPUQ1%z!lq_wD~!<&$lx3qW>2|+wGY-nJq60D}|>9KaI7po5j zPjjs7L*54JzvG49_~iuu%T2;TBbAn1XWU#>Xw#QgzGa$*X!UBAc4|jh0FbESCx|Wj zaa4YsR?uDJzU7O3i`9c+Mqw0*V98J1pq$L$Tl;Fn`=fzQqlvDh%vd~%9LL8u^rMcC zlumB!RLWf$sL_`E6<}4_$?!?ug!;@ReT2PFYNU=mQ*zT+OnAnt#qtzAyIK$4h?It?e37K}?ai6Q15 zXh6Z6g9@DDmww?WGcQ1FVmO(E5oMOm4LrmKOUV{+`^>OK`F@*Lsn~yrIa+Hjzn@ds z#3XUKt2dORA&6?(x=`f8%46=YmpSsnTP<94v@+NIxbuwH^z##8n#0w=iT&<72v4IB z{xH?oNkuDjh5K1ipe7I#$23di>p>XSl1~IGPGZhv02V!;MeH&97Cv%ce|gpduVbR zL4|!^vrq-&q0GTM%fP_#*{|}0<*2}ylhQv?$69arVozeNa$Mgx-7+aj7ddLwj>V5I zo)e4KApoNnW=;Oo)@y$u z?l^{W?t6F8%sw{G#!>erR=#8MvA} zsmEcbnk;ELQEIa6tT({LLsF&0z(c&;)^+z}NBTW8*b|Tg76c%hF5Olm5 zrjp7ds9_H6sS0w)#NM{Rd+b?;>%Wa$+YWwxLy~ozC$ctPp%PN~%Nq%jxg)B&nZ*Z- zTBS_`w#Nxfyn^ib;--@qDLKm4j|QK9xe6M7gIJp+3b54a+moh=GirlxBE|QzE?qJ4RW=O$GIXg43kyqY7M^ZIR3d3ZXWY1PH#TAcbmU%j*w0=mWc`>H@$N>(bL#=W8 zgNCvgosz(PaD`CbLV%J$sJ_R%^CZ|p1+IGgCMB2dA?T+1EdW~vn=!7EbH z(>IBo`BFsI!y{84@QW-_3J?!xuHa$O&;?QY>3UFA;n+0A2xzhykzm!|oL7;2CdY|X zBqyG*2J${_mfU_bg~=Tm4$)6mXw$)^A*$#jI=Ww=ys zHx}$vwI$*X=HdJhN;>&&vC*IU6%T}>VbVblO2OkLcm$A6?Ec)PRjTGfnGlP4i8dnN zWiOsIEV$pJR@NN!mK>RC+UhAVi!m5suNQDVLaRazzQ8ro0@k#(oTEUjnt8BJ_%9|T zAcXsMT{;R}4(YXyXSnQaKW487x!YmUPDT)}lug+SVusFSTez z7V7meJ?NUuE${<_1*x)~aOqjHVz8X(z|PrlP9 zW30hGO#JsQNN6U=Z$yC!rw$rm^$XQl}PVr|2|9>g&&kOEVvzQPvrYy>%KCPkq z{is#^+vwr~^JNvl)Yj#6k?@Z_`#$Za!4noe+3zX6UA8nz72?)? z7ja^7e6jH`CW7j)1Fmq=J*t`m=rIO8r?FP%WS=C0$^OZZ2l9jYhbzk7x-a~q zZ|E?PV!IvVthcJk5s*0a-+{H});yVuI3F?uLShq7Aa;-HYwov5I!&Q& zBPwy{k!@Mk5)I2RlAi9CNy{|&`uFQhhy{qiBO#y;N18%8BZ@4JD3rDn!pX~apVezO zNp-{#hjqXNwD1(s4ofqEJ{H6Ro6V_mAz_S3M#AoBKI14asdvF7Ggo@rz>9oY{pWZC zbLUlJ?@si4&VP%(YRsWtB-&c*jmY5eAFI}cyw*R{(}{ust@aX9x}I_O_xAG=t5Wm- zgr&arCcY9?vro%XMB}a^nIQ*f`+Wx1iiQio7{^*mtE*gY%f3WaQ>{f^kk8B(j+hO% ze+F@iY@P45**$|KT==}ptY}4A@@!6&3`1`8*jF)1wqNm-eXJ4)6!Vb4h%HO$pBr<+TCn-uoTE| zUBT!gzWJx|P4j%)nctbu&rr_)Wi(nVI2ui)<#yuvPNZXp*~x2pdF{Hq zVR`%E^~v(YznuR$JB=nQJGSiU*?+dzI%2-<)ZG}oDEuD0FT4YhobqPP=WHF?5^(dw z$?xdw=Kh~|TSLRmg%h;cyEIR+NM=DJYM2Ywq@e&;K2|T6jil*KHz`nHiP+^<(N{Oh z=&2Y{yv-c6A`8X@a;$t+e!!!gMiFXp9-__KsI{kNu~;bN7aPP0!GPH)`QlwlpDpAD zDG$*DXJVKrb)<7u0Z#%gRR(ESuynG}cHo97fUp8vM>kRrCn_&hvyxn^T(YzCcb+-L z4+ID%9R`bVB_JIa@%PA}#9S*nd1HY&yS$r=7p3!822oR($K88y7JxHVp;~rnq|o6f zy?mo#{6=ff;etI(K3M!j^y~iJRT~y!SzH3ZQ}N4%qO??kR)&JxwYkI| zMlf<|6$96_myAz$r)+N=z4)qzYu%M}Xrl10B*9{j$JXaf*uqIp>)xh7)8=Xtj}(9 z+VuG|N0PxrqibxGtC3I+9YbT}f!M41P=`rl(r9Uz1roX*$uOyF)z_G}we03sq5DQu zW&i|BY-2*2R|t#kxLor-G3$-0_VU!-XKWPoUV3^KMPs?H_s#_dcIVsMl0}7=gQ5&L zNL7>x%j$6gvAHT_QPpvejkpt9qm`iel*LGc97ZZ-?kAL<`%VW&|Jz)_jlXSgqs!@ zx-ZYGBgdp|{+$*mZTC^2A(4v&tl#or6<-;lvJhav5@d$2pe9*NR)^!xr*MdS#Q9N# zxe_uXPRy4lAgvR9nM?e((7C%HIJwWopR4N?x_YtvMi3X?C?_JKi3&4q%G>&{C*-(h zGBIOM&9a3n)r8FS3Zj0~I%J7|;0Az=c@QgHxR{)AP~zORI<5zH4vfx*vgg~L`fWhAKU5Bc&uyNKl@>`qJoM_Imbeu zp*#gCHm9TFP_w)?}~HW)2`ku9olHza1nh{U``}gyT7&4KoiEW zWH`%d2Qrd?zmYbwGr2no0vdc285u`fV?Y09VGu{ixAGQgA(uiE)euaRUj?4(9~ZxV z*6eJ4lSIX;Jb+6jY7@==RO2(tJx~ zL?}JbIr;oibsTYwE-Ez8f*4hXLI3OWx4csh_KV*ESI^KBYisNBSrn{*Sj8K3e_Ral z&(9C*&kpO?4m&bd{?H@i=d7F7Jq;rs)E>5|vcuY1PJrWTiP5qfI2iCz9)3^NM4K3h zj)L$n5lNPxsFnc!aN@PN+Eun5I*Oz>?q$nsaSXcOL_5VeK{i4Nxqoh$kul`pdg-wD z!?Dvk;c5VCBH=djnP+{ml0itx)6d6v|K~uqXJLfgr)Mp6N_Bc~ZbpIRr_w;x9H7Ly z>x>>*iVhQ#e*Cq#s}Z_~^ra5E*~SXz0eCGC9daRS5)gzC*J}anr^3F18l!Zt)K;Mc zAxf*5Y3b(1X2ER#X(iWGMMN9Kb@5M#(xgO}goW3pqP0v?2686oJd=PXYvckSJ+YLh z4xAvc*OrRWdkQ{!%Q_Kt3=nI+42%u8tRsI$@`gO7lVoowACGSz)6zkS!Z8XlHyhOHxx) zjN!e)cM5{9Vj{RfUZ3^Ebd;|Y#8QGecCxr7h8<)I$rV*zwqiL9$48E~WmCXWfJf0a z4FZ(|7SQh8I+4Juh8iN6-a}uDQ&>`)5aUGt){Jy>a-n(ZI(~2LeAS%D~Hkm^CAp#4NF3K2wC=Nsg*&4VjCj_~e-$!>Xf*llI3L z_p?$LLEY7h1=GWWa$NG?S3bjlph3L4fvK10grc=I$WD z_|446Q@Kko@zaq%GeWV_VX!@XnwBOIx&kaMF{UxC1kI~*uhk^ZKNQo_x4utIM=QDsGvB9i*ECfj5PTLCC$* z#$t;~kmsPvU+Jf93#0Sao{O*P4`dVhL5^McQ{Y~&v_a}dRbfP)z?1TbJ+(nF6=p>~*Axp=NB5v?>jH)cVeTFgIpIa) z2&P^k<7Y8|)GjFp$(j)-pW*aQTop^m^WBomshOv~5BPcbt*hH+se(bh!lafh%=sZmW7!_E;8*Xb_FjD3Sr| zf#~nQ1U6Em)tUXebx0a|PPd%UH(1sn9nX4#El}BEfHJe42wyuwU z$cHbObV~Z|8)$4MX?5QW<);i$YI{TFU(x3Pr;Id)J-M|5xw#|1qQl=YL-!@ULsi z*+cu8&ya$V`ux8mqnqX%&>fig_emJ(*7*4-0D2$-Ep}RI6q6J=#}aWqZ!GiQ^Yq_) zf3q^pH3sUQ`jhRwfpn4UI4ST=wN{d^cFL+aH-w8oY`_7i-bdHsT! zzHra%I6K<+`6l2QFW?;SyLJbf@a@LLwS{(UDCzvFM{`Qpd5u@vxfD4tUpeBcr)Ov2JdPuf3akZ1pwlAIn5RjR`)aZWc0lB5OT_>D_6FSL@yQ9aSLLy{ z61r3gO%=JcCjGqe<>%3`#(%H)&~ZQ8ai!34)$x4u?0+|6{KRd1$vrXlTjb%ktn$8$ z?Dh-U?e9?Gt1dMEzbepD5%+b!8;6GFiy2*zD@fuOw;-W*o}Zt; z@Oho_L*2`NiQEmTJl#A$t&5z8t0Wc|jqMYBdbmD`OuZZTY{%-Hcz&EX;^NPyUx%)) z`yEUbX5W`)d++Cl`d`TexK-=;p56F6dD*6APnZ{hl$yl)P{ZnTnZ9oDl@$7h5E%WyFKFoNSB!f;~fYG8kCz#I@K5O}9FL)dq<#k$PQt^AC zJ=;3MkUvd?*Vdd!NJ!78pi4`il1-C7wtFdjcDV)~=}F{y;#mWwo4#Nos-f+~;l*1P z+3BKBnTUE>7HS@F90Oe&C?4CN-b-&kNC#=_d-xy-0CuO(Jw)wKM;%X_108?yp1O(F zZ>2ML_bxN#IPUr7kpDo>0-j&E?cyD02Z+f4aDSI8-ws_9LrJ~&cgNE4x>lB#eO8Z* zJ_O!@=Ad;zz;j)tpM%#S`0RVmju)OD@4sIhUR)%A^z=vo;BpV;8q-{&_>wJMPjaCQC#VhWI^dV^q>7 zi@|e+`tOInoI5KFFKHObr$J_N;v2EmI%xSum2V=sqf#e_A|W5zL@9Rn1H9f zfF~r8+ZP0;c(!hlfB}$>`;m^v5tV>XAW=rW1Uf`+zlz)re`voLqRoE%d3OI(pzi>^8 z_I+`e{`&nKh=8&WZNvBpu1}|DPp2<%O;JwHI#c{_z(;Xk`ugK8n%}-e$8BWCtpVD$ zYZkbNB(ld1q>1tIr;&g=Rwr-p1<+GZk@Q?uYz(gxJL3nH=K~RhzaZAA$?kuLK8!?f zetGr`?RY+{d^#lJbrhP1yIa|n@!yv5zYPtz{SOE)!r^5@ARTqw^+yv;Kl-k5%1MjR zHu>Jwu%K^Qf9U;x_>%`OH0o;ghk7>&{)|-@5Ejnbd~|C+C+ava_CGB4f5Al` ztG~0?_Wi#jilFJ~>BxDZ_gUGP))@lgzd?Q+)sk~bDJ`&1B9G&wL|M=<{7A!RYcg+8bu{TOj80uHXE*d9A3YTUccSo zf3aoOZ~J+Z0~9HsT#Jq%^p-pPCf?sDUS6i2;~1agsGLn-R)ebH>EUeP7lEJmV<*~U zC&*vxe?xc^$%;X?wOLNSD?RMQe@6*I?=ov|Y1V3!;NqKSYkpZ7*<%;kddm5dnBnCEj5H_%}04IU{`rtp(~{2K5V4b-Lne{w@f??s;Xo?Ag#+~Iq70g{(H zNNSrP_Ok1)TRI+EM4rxLp8ujf|F!3N$R)eay_f+l4NjNwn^n*C2LGc5VQ80)i!kTf zRS|F7z7#8i1Fiq@fdBEu&dBp$vZtPl8PM1m0C|0n?fK9cTK;g_GU4-VX?$(@pMhY2 zw&x1Te!k0oPGpsQS);Tp%=g7;sk56eE4zQeZ;by{K3<25o7>fk5fRyER;TB;I`6k? zRqHh_{jA%62K6{V^4~xj)-arhW_Db9zJ2@Fs&)}1?|>&zl(pXecvkW849jOj4>4`D z3)vJykB`qmZh;eb}YxxcRggQKsjSx>D?80oIjYhu#l)`lEl%XAgTV zULpKr|GXWRJ$TYjJX%I}E(|>Jm0Vj^%gVe-=0CbB|NAWTZNPk*%iYkaVfpgz(Kn&z z>Cwb^tA$Ju007}E!65KYE<6PQh|-B60|2@_IxGMLVgCQ(>w82F0B33Yen$ud0w5=) LEcsL1B=G+MAuU>{ diff --git a/docs/resources/model_a.png b/docs/resources/model_a.png deleted file mode 100644 index 65c6dc3938ab3ca44482e40ddfa2b054844848f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55023 zcmZU)1yq|s^EVpYJ-EBOTX6_3p+#GqLUD)U?(XhTXesU#cXu!D?k+cd|MK1M-gA-n4>nz0Ko% zEOVXw-p4BU&&1!a=ic)zfVh`HxNaqFBtO)AZQ$m9Us!bHS3?9S)C&NNHDJTOwpJ4F zK^8h4+G{b&IvwW$|4$1##p5&6j963`;5|2~S%-=fpq3W3fAz4WPl!JxAgYx`+o(E} z*1)4Y&Y>_)r4zvQDbeYQMbGOl-3&wUnkz_zaKveR%j{xjm3})~5W77p-jEP~AMv4? ztDVjy&{;l2yx(r8n$Syy55SPH%C_eOHnnCoNnqd#VBPirY=DLgE~eSFM<;MbF#5(v zb=k0KO2Y{0^uZy~i8zs)6BEk&k$s~a=={;tTbzsOggPXfjsWwC7agF74PgzH@S=dA z0EZ2$O*_M^@8_4ygnQSNFG0R%gwo^>J$G6@>1cqe60`cHr%)S9T)^jJD7Dtt&@n@H zKtxTu3%a)hS&P>fjovMmugpJJSM?R4a;+HI56+)*(S4;1ye2f-|DNv}xnCy=BsghxJ0!9>$&|BRx{0M4^;YuQ!Mj2A3`wBL0w@ z@qp3uR?hps2-F-%C0)8zSk`d0hN=RHF+uoULl;~EvuksMff-LjJL?leePlt1IJi=ZvVm*dEa$eSZT@uh=$KzJD$PBo<@lwoq* z;H`p`{7?COh2`o9YS}+N%zm6@_=bFor7UYzFg7)Q0B~jzBFv6tlhw-0oFqM{J;1sV zc*2Db%r}YRj3Mw$ z2`3!m9Z0?9Y~LVYpf92l#@zdHL)V_(UdtY*iLecO4f8w-qwo4+30xFlF5iQKVecp^lq%B=8;$aEdo$0b=oO+hJ-TF1yMxLr9QOJCTQ z)TVj_!!yHI1{2Moa#b^M^Q_CP?2*oq0Gbqc3EZTHWpSOIMPMBxZbU@3LY+d6LiDf$ zE+c6^@f6I#;SJZ5(UZ%Q@JH5%i?T?WHM`hdj4u= zc)EOAsPI-Z^Y>|fcKIKv=b*bH^p69|a(T5<`$gj>A;>+6@#8;CZ#YYwd0k#zXj^Vu z0$aGh_kUOa9{m0E`~3~l6U!^jE9ujpCosGaJS99FdLp(xiUZgh@uep}vlD@i^FejW2^L&q_o$uO5P_g|auOQ?Lc{ED1EIf=PK$4E!v z_FBh6Sts#S@FXMQBZ-x;XA;wUtj%I{r&$A8QCM49a&-u``}AyTj~bsWZ0xda|5|tt zgc##kfGuWBu76gfvo19B-!1E3>905c+;^Suw*6{5(70maHk0#>+_%IurP2i zf6Qols*`i7dj1kVvLD&R#ys0iQrJVV!c}PRW`3zl=frFA5g)%qF+(wOq|l@P`r(>r zKf3v-*-`)abFnRIqjcjRTMf6*ZZV#L&oCI?7&&r@13{%_U(5F;Ccb`||3X!XUb$fX z^-kfA(XMx8qP5p^GIR@P7kuipt~J5G)HvrhJh5gMyw2!b=NtFV_f8C@3$^OE=T{~^ z(3$M-Em4#QCow015ik}Y(BCQi)+0`+eg`V3l5m{n>@|G?~dw83HBzBLBc?( zMDist!coVZ>hNnZ912K*tEBovH9~bRoh~h3@VyYX5T_us&|tuQAZY+(Qemvs|0QB3 z45!y+z+}5{J9DcP|A}R_*uzBf9d4>`?}+czi!v(DZM4TUz-t)UmO!do{C}e^!nEG?h_Med}|q z7l|7o)5p)(Wc_0$Y?)zEpvm&Aq+FR?(VZ4wu|knqVcl-ew*4V_Oe@*IdpV)btZFph zdg9aQr_K4}mDNU41}(M;cJdXnnmWz_AiZzP5g!)UL&D84a zkGx$JZb8vn%AOVGrzOh^tsbqFCzp#epGrS!>E7E`8lI1R?|2%AFARzBPV*LgCgYRk zzw;e?YH5})EUP>0@!9k_^bdpa9*Y?pW6fYStFZrB1~fhsAp_dOr7`FhtD<$^@T+^NBV1mbIH@Rr!mK5ABf!R8V8C?w>?MB z@5yIGKRM~|x-TJhhZo@Fs_f-veMojWIy~H&G5MqT{X8pN)Tx5K&F;c_a_M^CYa;vb z{zS}U?e?V_WgI`|i|OZ+mL2;tr%s=|ZG>lXA$U@F;ZTy$7Ixv!XM{QYURx2*$unAi zDsx)#>`$i$U*b29)fep6Kuwf?hFTvFK0Q_JH#*r)tm-)7dpGXnO#g{^&bTM`;d!n8 zd_*Do;Yd{I!muk)*krYuUd-(M#;5Io^3kX-Xk7Z5dPQ!4&YPZH?8}4ui~M|IdTE~+ z^Sl1@=+2T`hx-c`2d*&hoBqwqOl79QkB-In=`*u)=f#^>Ey7CkFR5+nzD^sg$2RY+ zX>K^&&F9tU%wlOi2ag0dmuIC>s<}Qe_d(asCpJ)45HLw$0@6?h0KU@!0DeILz!Rj& z?*IUB;Q#=R3;_V4GynkKKC4At1k!{H75|pW8wJ3qHUxPCHxzjdvwtO!}_uPeu8_Z zq8#I@-v+zFge!1@TIyi&TiLiA!vCx;*K<|N z^l!{o_~?uDu3%)r4~IO42s!Zg(SN)#WCBplb0;L*#!p%JgylKP`P=Mp1KhU?LF$WcNJ9x&Ps0tE?`y-OmkH z9&qjyXxZlWva#Yjk*f`nN@bs3484le;(~~A#P{J<*NJ4{`cC?X>Ppq;zvKtmrJDBO zHkCD-i`S-lv!x3$deLoN$UO>2(3q;C-) zsB+x*a~H2KQ?7Af_x)-+$(|ny2~|4M6Jpp%6)??(_wQBYY7@dy=^vx!md*@hpT$34eQT?G$4j2XcZg z8vN%I`=<+T8%S_6Vvnd8sczmCs&C&jFE&HH?-edY8He~gJokgy+%U(y*E3umxG@6q3q&E z(LQ|NVwV=L7s{!j*e7uj4vwcid9thCd;4qr^9ZdP{l`bEWCV0BE&bnZdTTy^4vzPa z<;A!}k*5z8Q}P$s=!cw7!*%Oo;uq6TR4w zBe~NOF}wA>tsg$DbCWG67k%8dzqFN$ilFy_Ivd(}J;3n|(=zV3oUt8u%gJI`BkS05 z-2M9dqkZwNLWL-N@qpJ5x7U4L>^#jE=Igu9Q&jEW<;047TJ~|=R~yeZkE?HwZ5GI% z4uO5l3nptgRKC}E?fW7a&AdJqnHyOA53gJ?p|Py7p0}Pqs*Tki!wsT@-EIwnFEB(- zn6$3OX+biVLe1~}UQGg*If3NAFTDir8Th{JTYv8A4KF``xNV}gXrV8DjO!tL&~*dG z+Z~QK`wF-#p5L_23S5jglkl7oa+0|(&aiJU5Pc)7Y4Kdq^w~V5m_RZGPigIA$KL|mLh6rj_MJrDdLc7U_s@PNAVcmvdd4xqkl!W zuBk2(uu!G{xEn(H%FFI;DHa<+-@ZYQfP;}lu}Y5SrDg3hVRRn)Tw8Pg(!=P3-T}F; zVrI=(BXDSauWgaJ>pizzV!AmOH&*Uph>w*D*k^)Q{-Li2ed|u+&#KQ`x1?fE{z?7i z?ucSM4KH&>mEYVw2LD`0x!_@RTzQs*&X9P&w6$*w0%^L*4YsIqGaJtvUpoyh@v|h`Jyur9 zmAts)-p%t~<9d7!G|rpfN-usr`bA)nfhw4D!c(8nyk19=8|yxm#ci<;;H*cLo%;^E z?aIWC*R4P9FT@M9PlMJwmfrtTle;g%Jf7LS|Fz!P6;nkx)-!**7ulsG-xI~ZC>$v4 z58JBrc(uZP5gJK0aIOfj`#7=oc;?i0aJ&PdtNJeSWmju+lip)6K9&1&2IHCXOmJ?y z{Z